blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
f4d977236ecdcd874ff7ef30b5ee8b9e88a04a18
|
Shell
|
Celwood93/CSC360Assignment2
|
/runner
|
UTF-8
| 486
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
make
for i in {1..1};
do
#to print to a command line
printf 'testfiles/test1.txt\ntestfiles/test2.txt\ntestfiles/test3.txt\ntestfiles/test4.txt\ntestfiles/test5.txt\ntestfiles/test6.txt\ntestfiles/test7.txt\ntestfiles/test8.txt\ntestfiles/test9.txt\nend\n' | ./ACS
#to print results to an output file
#printf 'testfiles/test1.txt\ntestfiles/test2.txt\ntestfiles/test3.txt\ntestfiles/test5.txt\nend\n' | ./ACS > outputfiles/testoutput${i}.txt
#echo "${i} done"
done
sleep 5
| true
|
6f29bf09a173b5028cdf950659aea2444a949421
|
Shell
|
benayang/H4K20me1
|
/Signal/deeptools_computematrix.sh
|
UTF-8
| 1,756
| 2.671875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
pjdir="/nas/homes/benyang/H4K20me1"
#bwdir="$pjdir/SEACR_bw"
bwdir="$pjdir/anna_broadpeak"
overlapdir="$pjdir/Figures/Footprints/sets"
outdir="$pjdir/Figures/Footprints/Profiles"
logfile="$outdir/OldvsYoungCUTTAG_computematrix.log"
[ ! -f $logfile ] && touch $logfile
# echo Old Young overlap
# dt=$(date '+%m/%d/%Y %H:%M:%S');
# echo @@@@@@@@@@@@@@@@@@ "Old Young overlap $dt" >> $logfile
computeMatrix reference-point --samplesLabel "Young" "Aged" \
-p 32 \
-S $bwdir/5p.counts.Young.RPKM.bw $bwdir/5p.counts.Old.RPKM.bw \
-R $pjdir/Figures/Footprints/merged.nearest.tss.filt.bed \
-o $outdir/H4K20me1_merged.mat.gz \
--missingDataAsZero \
--upstream 5000 \
--downstream 5000 \
--averageTypeBins mean \
--binSize 50 2>> $logfile
plotProfile -m $outdir/H4K20me1_merged.mat.gz \
--legendLocation best \
--colors red blue \
--plotTitle "" \
--averageType "mean" \
--plotType "se" \
--plotWidth 10 --plotHeight 10 \
--perGroup -out $outdir/H4K20me1_merged_profile.svg
computeMatrix reference-point --samplesLabel "Young" "Aged" \
-p 32 \
-S $bwdir/5p.counts.Young.RPKM.bw $bwdir/5p.counts.Old.RPKM.bw \
-R $pjdir/Figures/Footprints/common.nearest.tss.filt.bed $pjdir/Figures/Footprints/unique.Young.nearest.tss.filt.bed $pjdir/Figures/Footprints/unique.Aged.nearest.tss.filt.bed\
-o $outdir/h4k20me1.heatmap.mat.gz \
--upstream 5000 \
--downstream 5000 \
--binSize 200 \
--averageTypeBins mean
plotHeatmap -m $outdir/h4k20me1.heatmap.mat.gz \
--colorMap RdBu \
--regionsLabel "" "" "" \
-out $outdir/h4k20me1.heatmap.svg \
--heatmapHeight 9 \
--whatToShow 'heatmap and colorbar' \
--legendLocation best \
--xAxisLabel "" \
--yAxisLabel ""
| true
|
939d9667b5f6d18adfa7f9b37e4f5c78e9cc0178
|
Shell
|
jsrdzhk/captchappy
|
/src/sh/ty/build_gateway.sh
|
UTF-8
| 1,577
| 3.703125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh
GATEWAY_HOME=$HOME/build/gateway/FIRSTApi
SWIG_HOME=$HOME/build/swig
TARGET_HOME=$GATEWAY_HOME/build
cpToSwig() {
cp "${GATEWAY_HOME}"/include/CrossPlatform.h "${SWIG_HOME}"
cp "${GATEWAY_HOME}"/include/ThostFtdcUserApiStruct.h "${SWIG_HOME}"
cp "${GATEWAY_HOME}"/include/ThostFtdcUserApiDataType.h "${SWIG_HOME}"
cp "${GATEWAY_HOME}"/include/FIRSTApi.h "${SWIG_HOME}"
cp "${GATEWAY_HOME}"/build/lib/libFIRST_API_CPP.so "${SWIG_HOME}"
}
buildGatewayLib() {
CUR_DIR=$(pwd)
cd "${GATEWAY_HOME}" || exit
if [ -d "$GATEWAY_HOME/build" ]; then
echo "directory \"$GATEWAY_HOME/build\" exists"
else
mkdir build
fi
cd build || exit
cmake ..
make clean
make FIRST_API_CPP
make CTP_SE_Trade
make CTP_MINI2_Trade
make CTP_MINI2_Quote
make FirstGatewayTestTool
cd "${CUR_DIR}" || exit
}
buildSwig() {
CUR_DIR=$(pwd)
cd "${SWIG_HOME}" || exit
./create.sh
cd "${CUR_DIR}" || exit
}
packSwigAndGatewayLib() {
cp "${SWIG_HOME}"/linux_release/firstapi.jar "${TARGET_HOME}"/lib
cp "${SWIG_HOME}"/linux_release/libfirstapi_wrap.so "${TARGET_HOME}"/lib
tar -zcvPf "${HOME}"/build/build_target.tar.gz "${TARGET_HOME}"/lib "${TARGET_HOME}"/bin
}
buildGateway() {
echo "start to build gateway..."
echo "step 1:build gateway"
buildGatewayLib
echo "step 2:cp libFIRST_API_CPP.so and headers to swig home"
cpToSwig
echo "step 3:build swig"
buildSwig
echo "step 4:package swig and gateway lib"
packSwigAndGatewayLib
}
buildGateway
| true
|
c88e07c903a7c06395117f77895848ef59914b19
|
Shell
|
ssadler/aeson-quick
|
/manage.sh
|
UTF-8
| 287
| 3.3125
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
function publish () {
set -e
dir=$(mktemp -d dist-docs.XXXXXX)
trap 'rm -r "$dir"' EXIT
# assumes cabal 2.4 or later
cabal v2-haddock --builddir="$dir" --haddock-for-hackage --enable-doc
cabal upload -d --publish $dir/*-docs.tar.gz
}
cmd=$1
shift
eval $cmd
| true
|
68b7d5eb25e975c0b34962a01600ec6aae8ad38c
|
Shell
|
jimy18/bash
|
/humano.sh
|
UTF-8
| 136
| 3
| 3
|
[] |
no_license
|
funcion_nombre()
{
echo "
Hola como estas $2 mucho gusto"
}
edad()
{
ed=$((2021 - $a))
echo "
Tu edad es de: $ed años
"
}
| true
|
25252e599e5a0fa8fcec9b901f034a467ba7acf4
|
Shell
|
tabithabragg/scriptlets_n_bits
|
/scraper.sh
|
UTF-8
| 818
| 2.640625
| 3
|
[] |
no_license
|
#!/bin/bash
subscription-manager refresh
svrname=$(hostname)
svrip=$(ip addr | grep -i "inet " | grep -v "inet 127" | awk '{print $2}')
release=$(cat /etc/redhat-release)
motd=$(cat /etc/motd | grep -i "server")
svrup=$(uptime | awk '{print $3}')
satenv=$(subscription-manager identity | grep -i environment)
satprod=$(subscription-manager list | grep -i "product name:")
satstatus=$(subscription-manager list | grep -i "status:")
yumspace=$(df -h /var)
yumuse=$(du -hs /var/cache/yum)
cnthostnme=$(subscription-manager facts | grep -i "network.hostname:")
cnthostnmeoverride=$(subscription-manager facts | grep -i "network.hostname-override:")
echo $svrname "," $cnthostnme "," $cnthostnmeoverride "," $svrip "," $release "," $motd "," $svrup "," $satenv "," $satprod "," $satstatus "," $yumspace "," $yumspace
| true
|
7a3e149d37365035b5145b0dc14c566f183b7eb6
|
Shell
|
rvanweerd/developer-documentation
|
/generate.sh
|
UTF-8
| 1,095
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
function generateDocs {
echo "generating $1 - $2"
git checkout docs/$2/generated
rm -rf docs/$2/cache
mkdir docs/$2/cache
rm -rf docs/$2/generated
mkdir docs/$2/generated
cd piwik
git rm --cached -r . > /dev/null
git reset --hard > /dev/null
git submodule foreach --recursive git reset --hard
git clean -f -d
git submodule foreach git clean -f
git fetch
git checkout $1
branchname=$(git rev-parse --abbrev-ref HEAD)
if [ "$branchname" != "$1" ]; then
echo "Not on correct branch"
return
fi
sleep 4
git rev-parse --abbrev-ref HEAD
git pull origin $1
sleep 3
git submodule update --recursive --force
php ../app/composer.phar self-update
php ../app/composer.phar install || true
cd ..
sleep 4
php generator/generate.php --branch=$1 --targetname=$2
GENERATION_SUCCESS=$?
if [ $GENERATION_SUCCESS -ne 0 ]; then
exit 1;
fi
}
cd $DIR
generateDocs "4.x-dev" "4.x"
rm -rf app/tmp/cache/*
| true
|
0254cc406fd719c745e041fdde792eb35fa9e9c6
|
Shell
|
ShaftHQ/SHAFT_ENGINE
|
/src/main/resources/docker-compose/start_emu_headless
|
UTF-8
| 4,310
| 3.546875
| 4
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
#!/bin/bash
BL='\033[0;34m'
G='\033[0;32m'
RED='\033[0;31m'
YE='\033[1;33m'
NC='\033[0m' # No Color
emulator_name=${EMULATOR_NAME}
function launch_emulator () {
adb devices | grep emulator | cut -f1 | while read line; do adb -s "$line" emu kill; done
if [ "$OSTYPE" == "macOS" ];
then
echo "Running: emulator -avd ${emulator_name} -wipe-data -no-window -gpu swiftshader_indirect -no-snapshot -noaudio -no-boot-anim -memory 2048 -cache-size 1000 -partition-size 1024 &"
emulator -avd "${emulator_name}" -no-window -gpu swiftshader_indirect -no-snapshot -noaudio -no-boot-anim -memory 4096 -cache-size 1000 -partition-size 1024 &
elif [ "$OSTYPE" == "Linux" ]
then
echo "Running: nohup emulator -avd ${emulator_name} -wipe-data -verbose -no-boot-anim -no-window -gpu off -no-accel -no-snapshot-load -memory 2048 -cache-size 1000 -partition-size 1024 &"
nohup emulator -avd "${emulator_name}" -verbose -no-boot-anim -no-window -gpu off -no-accel -no-snapshot-load -memory 4096 -cache-size 1000 -partition-size 1024 &
elif [ "$OSTYPE" == "linux-gnu" ]
then
echo "Running: nohup emulator -avd ${emulator_name} -wipe-data -verbose -no-boot-anim -no-window -gpu off -no-snapshot-load -memory 2048 -cache-size 1000 -partition-size 1024 &"
nohup emulator -avd "${emulator_name}" -verbose -no-boot-anim -no-window -gpu off -no-snapshot-load -memory 4096 -cache-size 1000 -partition-size 1024 &
fi
};
function check_emulator_status () {
printf "${G}==> ${BL}Checking device booting up status 🧐.. ${G}<==${NC}""\n"
while [[ "$(adb shell getprop sys.boot_completed 2>&1)" != 1 ]];
do
sleep 5
if [ "$(adb shell getprop sys.boot_completed 2>&1)" == 1 ];
then
printf "${G}☞ ${BL}Device is fully booted and running!! 😀 : '$(adb shell getprop sys.boot_completed 2>&1)' ${G}☜${NC}""\n"
adb devices -l
adb shell input keyevent 82
break
else
if [ "$(adb shell getprop sys.boot_completed 2>&1)" == "" ];
then
printf "${G}==> ${YE}Device is partially Booted! 😕 ${G}<==${NC}""\n"
else
printf "${G}==> ${RED}$(adb shell getprop sys.boot_completed 2>&1) 😱 ${G}<==${NC}""\n"
fi
fi
done
};
function disable_animation() {
adb shell "settings put global window_animation_scale 0.0"
adb shell "settings put global transition_animation_scale 0.0"
adb shell "settings put global animator_duration_scale 0.0"
};
function hidden_policy() {
adb shell "settings put global hidden_api_policy_pre_p_apps 1;settings put global hidden_api_policy_p_apps 1;settings put global hidden_api_policy 1"
};
function check_emulator_focus() {
echo "Android is booting..."
EMU_BOOTED=0
n=0
first_launcher=1
echo 1 > /tmp/failed
while [[ $EMU_BOOTED = 0 ]];do
echo "Test for current focus"
CURRENT_FOCUS=$(adb shell dumpsys window 2>/dev/null | grep -i mCurrentFocus)
echo "Current focus: ${CURRENT_FOCUS}"
case "${CURRENT_FOCUS}" in
*"Launcher"*)
if [[ "${first_launcher}" == 1 ]]; then
echo "Launcher seems to be ready, wait 15 sec for another popup..."
sleep 15
first_launcher=0
else
echo "Launcher is ready, Android boot completed"
EMU_BOOTED=1
rm /tmp/failed
fi
;;
*"Not Responding: com.android.systemui"*)
echo "Dismiss System UI isn't responding alert"
adb shell input keyevent KEYCODE_ENTER
adb shell input keyevent KEYCODE_DPAD_DOWN
adb shell input keyevent KEYCODE_ENTER
first_launcher=1
;;
*"Not Responding: system"*)
echo "Dismiss Process system isn't responding alert"
adb shell input keyevent KEYCODE_ENTER
first_launcher=1
;;
*)
adb shell input keyevent KEYCODE_ENTER
first_launcher=1
n=$((n + 1))
echo "Waiting for 5 sec for Android emulator to boot (${n})..."
sleep 5
;;
esac
done
echo "Android Emulator started."
};
function access_emulator_with_adb() {
if test -s /tmp/failed; then
echo "Skip"
else
"adb" shell ls || true
fi
};
launch_emulator
sleep 4
check_emulator_status
sleep 1
disable_animation
sleep 1
hidden_policy
sleep 1
check_emulator_focus
sleep 1
access_emulator_with_adb
sleep 1
| true
|
48b58da0f8be1659bb17e73568845c7a69c9da5b
|
Shell
|
opensciencegrid/docker-xrootd-standalone
|
/image-config.d/10-add-users.sh
|
UTF-8
| 453
| 2.71875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
if [ -f '/usr/share/osg/voms-mapfile-default' ]; then
grep '".*" .*' /usr/share/osg/voms-mapfile-default | awk '{print $NF}' | xargs -n1 adduser
fi
if [ -f '/etc/grid-security/grid-mapfile' ]; then
grep '".*" .*' /etc/grid-security/grid-mapfile | awk '{print $NF}' | xargs -n1 adduser
fi
if [ -f '/etc/grid-security/voms-mapfile' ]; then
grep '".*" .*' /etc/grid-security/voms-mapfile | awk '{print $NF}' | xargs -n1 adduser
fi
| true
|
5167cb0e971d7913a4ac1732e24979a7f66952f8
|
Shell
|
uib/tk
|
/scripts/site-reset-prod-from-staging-branch
|
UTF-8
| 1,829
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
datetime=$(date -I)T$(date +%H%M%S)
echo "Reset origin/staging to prod!!! (y/n)?"
read x
if ! [ $x == "y" ];then
echo User abort...
exit
fi
#Check user
if ! [ $(whoami) == "app" ];then
echo wrong user Run as app >/dev/stderr
exit 1
fi
#Check hostname
if ! [ $(hostname|egrep '^(long|real|double)6'|grep -v test|wc -l) -gt 0 ];then
echo wrong host >/dev/stderr
exit 1
fi
set -x
cd /var/www/app/tk/ || exit 1
#mkdir -p /nettapp_w3/pg_dump/pre_staging || exit 1
#DB-Dump
#bin/site-drush sql-dump --structure-tables-key=common > /nettapp_w3/pg_dump/pre_staging/w3.uib.no-staging-backup.${datetime}.sql || exit 1
git fetch --all --prune || exit 1
git reset --hard origin/master || exit 2
git status || exit 3
git submodule update || exit 4
set -x
echo "Confirm before feature revert is applied!!! (y/n)?"
read x
if ! [ $x == "y" ];then
echo User abort...
echo TOTO: Reset using GIT...
exit
fi
set x
cd /var/www/app/tk/drupal
#drush vset maintenance_mode 1
#compass compile --force -q themes/uib_zen
#find site/files/{css,js,languages} -type f -print | xargs rm -f
## CC all cleans out the deleted files from DB that was deleted above
drush cc all
date +%FT%H%M
#CC ALL some times display changes to do that not are visble before the CC ALL
#Run FR
drush fra -v --yes || exit 5
drush cc all || exit 6
date +%FT%H%M
#Run updateDB
drush updatedb --yes --verbose || exit 7
drush cc all || exit 8
date +%FT%H%M
#Sometimes more modules may be updated after updatedb & CC
drush fra -v --yes || exit 9
drush cc all || exit 10
#Backup DB
#drush sql-dump --structure-tables-key=common > /nettapp_w3/pg_dump/pre_staging/w3.uib.no-staging-backup.$(date -I)T$(date +%H%M%S).postupgrade.sql || exit 1
date +%FT%H%M
#Display that all is OK to user
drush fl
#drush vset maintenance_mode 0
drush cc all
| true
|
584559da2f24394f4f784e5a64ecc43cc988c0a0
|
Shell
|
daler/build-test
|
/travis-run.sh
|
UTF-8
| 333
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/bash
export PATH=/anaconda/bin:$PATH
RECIPE=recipes/argh
if [[ $TRAVIS_OS_NAME = "linux" ]]
then
python conda_build_with_docker.py $RECIPE --host-conda-bld /anaconda/conda-bld
else
conda build $RECIPE
fi
conda install -y conda-build
anaconda -t $ANACONDA_TOKEN upload -u $CONDA_USER $(conda build --output $RECIPE)
| true
|
0a00ab16084d3054acc72d06436d3257fde3d76b
|
Shell
|
ithewei/libhv
|
/configure
|
UTF-8
| 6,567
| 3.84375
| 4
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
. "$(dirname "$0")/scripts/shini.sh"
print_help() {
shini_parse "config.ini"
cat << END
Usage: ./configure [--option] ...
options:
--prefix=PREFIX (DEFAULT: $PREFIX)
--incdir=INSTALL_INCDIR (DEFAULT: $PREFIX/include/hv)
--libdir=INSTALL_LIBDIR (DEFAULT: $PREFIX/lib)
--with-MODULE
--without-MODULE
--enable-FEATURE
--disable-FEATURE
modules:
--with-protocol compile protocol module? (DEFAULT: $WITH_PROTOCOL)
--with-evpp compile evpp module? (DEFAULT: $WITH_EVPP)
--with-http compile http module? (DEFAULT: $WITH_HTTP)
--with-http-client compile http client module? (DEFAULT: $WITH_HTTP_CLIENT)
--with-http-server compile http server module? (DEFAULT: $WITH_HTTP_SERVER)
--with-mqtt compile mqtt module? (DEFAULT: $WITH_MQTT)
features:
--enable-uds enable Unix Domain Socket? (DEFAULT: $ENABLE_UDS)
--enable-windump enable Windows coredump? (DEFAULT: $ENABLE_WINDUMP)
dependencies:
--with-curl compile with curl? (DEFAULT: $WITH_CURL)
--with-nghttp2 compile with nghttp2? (DEFAULT: $WITH_NGHTTP2)
--with-openssl compile with openssl? (DEFAULT: $WITH_OPENSSL)
--with-gnutls compile with gnutls? (DEFAULT: $WITH_GNUTLS)
--with-mbedtls compile with mbedtls? (DEFAULT: $WITH_MBEDTLS)
rudp:
--with-kcp compile with kcp? (DEFAULT: $WITH_KCP)
END
}
mkdir tmp 2>/dev/null
while [ -n "$1" ]
do
opt="$1"
KEY=""
VAL=yes
case $opt in
--help)
print_help
exit 0
;;
--prefix=*)
KEY="PREFIX"
VAL=${opt:9}
;;
--incdir=*)
KEY="INSTALL_INCDIR"
VAL=${opt:9}
;;
--libdir=*)
KEY="INSTALL_LIBDIR"
VAL=${opt:9}
;;
--with-*)
KEY="WITH_${opt:7}"
;;
--without-*)
KEY="WITH_${opt:10}"
VAL=no
;;
--enable-*)
KEY="ENABLE_${opt:9}"
;;
--disable-*)
KEY="ENABLE_${opt:10}"
VAL=no
;;
*)
print_help
exit 255
;;
esac
if [ -n $KEY ]; then
FEATURE=$(echo "$KEY" | tr "a-z-" "A-Z_")
if [ ! -f tmp/config.mk ]; then
cp config.ini tmp/config.mk
fi
shini_write "tmp/config.mk" "" "$FEATURE" "$VAL"
fi
shift 1
done
# config.mk
echo "[config.mk]"
if [ -f tmp/config.mk ]; then
mv tmp/config.mk config.mk
shini_write "config.mk" "" "CONFIG_DATE" "$(date +%Y%m%d)"
fi
cat config.mk
echo ""
# Checks for compiler
echo -e "\nchecking for compiler..."
if [ $CROSS_COMPILE ]; then
CC=${CROSS_COMPILE}gcc
CXX=${CROSS_COMPILE}g++
fi
if [ ! $CC ]; then
CC=gcc
CXX=g++
fi
CC_VERSION=`$CC --version 2>&1 | head -n 1`
echo "CC = $CC"
echo "CXX = $CXX"
echo "$CC_VERSION"
# Checks for os
echo -e "\nchecking for os..."
HOST_OS=`uname -s`
HOST_ARCH=`uname -m`
TARGET_PLATFORM=`$CC -v 2>&1 | grep Target | sed 's/Target: //'`
TARGET_ARCH=`echo $TARGET_PLATFORM | awk -F'-' '{print $1}'`
case $TARGET_PLATFORM in
*mingw*) TARGET_OS=Windows ;;
*android*) TARGET_OS=Android ;;
*darwin*) TARGET_OS=Darwin ;;
*) TARGET_OS=Linux ;;
esac
echo "HOST_OS = $HOST_OS"
echo "HOST_ARCH = $HOST_ARCH"
echo "TARGET_PLATFORM = $TARGET_PLATFORM"
echo "TARGET_OS = $TARGET_OS"
echo "TARGET_ARCH = $TARGET_ARCH"
# hconfig.h
echo -e "\n>> hconfig.h"
confile=hconfig.h
cat << END > $confile
#ifndef HV_CONFIG_H_
#define HV_CONFIG_H_
END
write_define() {
cat << END >> hconfig.h
#ifndef $macro
#define $macro $value
#endif
END
}
CheckHeaderExists() {
rm tmp/check 2>/dev/null
cat << END > tmp/check.c
#include <$header>
int main() {
return 0;
}
END
$CC -o tmp/check tmp/check.c 2>/dev/null
if [ -x tmp/check ]; then
value=1
else
value=0
fi
}
CheckSymbolExists() {
CheckHeaderExists
if [ $value -eq 0 ]; then
return;
fi
rm tmp/check 2>/dev/null
cat << END > tmp/check.c
#include <$header>
int $function(void** pp) {return 0;}
int main() {
void* p;
return $function(&p);
}
END
$CC -o tmp/check tmp/check.c 2>/dev/null
if [ -x tmp/check ]; then
value=0
else
value=1
fi
}
check_header() {
echo -n "checking for $header... "
CheckHeaderExists
if [ $value -eq 0 ]; then
echo "no"
else
echo "yes"
fi
macro=HAVE_$(echo $header | tr a-z./ A-Z__)
write_define
}
check_function() {
echo -n "checking for $function... "
CheckSymbolExists
if [ $value -eq 0 ]; then
echo "no"
else
echo "yes"
fi
macro=HAVE_$(echo $function | tr a-z A-Z)
write_define
}
check_option() {
value=$(eval echo \$$option)
echo "checking for $option=$value"
if [ "$value" == "yes" ]; then
cat << END >> $confile
#define $option 1
END
else
cat << END >> $confile
/* #undef $option */
END
fi
}
# Checks for programs
# Checks for libraries
# Checks for header files
header=stdbool.h && check_header
header=stdint.h && check_header
header=stdatomic.h && check_header
header=sys/types.h && check_header
header=sys/stat.h && check_header
header=sys/time.h && check_header
header=fcntl.h && check_header
header=pthread.h && check_header
header=endian.h && check_header
header=sys/endian.h && check_header
# Checks for functions
function=gettid && header=unistd.h && check_function
function=strlcpy && header=string.h && check_function
function=strlcat && header=string.h && check_function
function=clock_gettime && header=time.h && check_function
function=gettimeofday && header=sys/time.h && check_function
function=pthread_spin_lock && header=pthread.h && check_function
function=pthread_mutex_timedlock && header=pthread.h && check_function
function=sem_timedwait && header=semaphore.h && check_function
function=pipe && header=unistd.h && check_function
function=socketpair && header=sys/socket.h && check_function
function=eventfd && header=sys/eventfd.h && check_function
function=setproctitle && header=unistd.h && check_function
# Checks for options
source config.mk 2>/dev/null
option=WITH_OPENSSL && check_option
option=WITH_GNUTLS && check_option
option=WITH_MBEDTLS && check_option
option=ENABLE_UDS && check_option
option=USE_MULTIMAP && check_option
option=WITH_KCP && check_option
# end confile
cat << END >> $confile
#endif // HV_CONFIG_H_
END
echo "configure done."
| true
|
74dbb97db1f97c6aef80dc6287140aad70848807
|
Shell
|
cha63506/dev_git_started
|
/src/github-config.sh
|
UTF-8
| 1,897
| 4.0625
| 4
|
[] |
no_license
|
############################ GITHUB SETUP ##########################
function signupGitHub() {
if (( "${#git_name}" > 0 )) && (( "${#git_email}" > 0 )) && (( "${#ssh_public_key}" > 0 )); then
have_github_account=$(askYesNo "Do you have a GitHub account")
fi
if [ "${have_github_account}" == "N" ]; then
signup_url='https://github.com/signup/free'
echo "${RED}Please sign-up for a GitHub account, and then re-run this script.${RESET}"
echo ""
echo "Suggested username: ${PURPLE}${USER}${RESET}"
echo "Suggested email: ${PURPLE}${default_git_email}${RESET}"
echo "You can ${PURPLE}pick your own${RESET} password."
echo ""
open "${signup_url}"
echo ""
exit
fi
}
function setupGithubUsername() {
if [ "${GITHUB_USERNAME}" == "" ]; then
echo "You do not have your GitHub username set via the \$GITHUB_USERNAME environemnt variable."
read -p "What is your GitHub username? [${GREEN}${USER}${RESET}]: " new_github_username
if [ "${new_github_username}" == "" ]; then
new_github_username="${USER}"
fi
if [ ! -f ~/.profile ]; then
touch ~/.profile
fi
export GITHUB_USERNAME="${new_github_username}"
if (grep -q 'GITHUB_USERNAME' ~/.profile); then
echo "Updating your ${BLUE}GITHUB_USERNAME${RESET} environment variable in ${BLUE}~/.profile${RESET}"
sed -i -e "s/.*GITHUB_USERNAME=.*/export GITHUB_USERNAME='${GITHUB_USERNAME}'/" ~/.profile
else
echo "Adding the ${BLUE}GITHUB_USERNAME${RESET} environment variable to the end of ${BLUE}~/.profile${RESET}"
echo "${WHITE}export GITHUB_USERNAME='${GITHUB_USERNAME}'${RESET}"
echo "export GITHUB_USERNAME='${GITHUB_USERNAME}'" >> ~/.profile
fi
howToReloadProfile
fi
}
| true
|
1a8ff296c57398afe7edcab62860f2f489fc8c07
|
Shell
|
michaelbub/MiSTer_Batch_Control
|
/build.sh
|
UTF-8
| 926
| 3.765625
| 4
|
[] |
no_license
|
#!/bin/sh
set -x
die(){
echo "ERROR"
exit -1
}
export DOWNLOAD_GCC_TOOLCHAIN="http://musl.cc/arm-linux-musleabihf-cross.tgz"
export PATH="$PWD/build/arm-linux-musleabihf-cross/bin:$PATH"
if [ "$(ls build/arm-linux-musle*)" = "" ] ; then
echo "downloading $DOWNLOAD_GCC_TOOLCHAIN"
mkdir -p build ||die
cd build
curl "$DOWNLOAD_GCC_TOOLCHAIN" --output cc_toolchain.tar.gz ||die
tar -xzf cc_toolchain.tar.gz ||die
cd ..
fi
BFLAG=" -std=c99 -Wall -D_XOPEN_SOURCE=700 -static -O2 "
COMMIT=$(git rev-parse HEAD)
BFLAG=" $BFLAG -DMBC_BUILD_COMMIT=\"$COMMIT\" "
DATE=$(date --rfc-3339=seconds | tr ' ' '/')
BFLAG=" $BFLAG -DMBC_BUILD_DATE=\"$DATE\" "
echo "building..."
cd build
arm-linux-musleabihf-gcc $BFLAG -o mbc ../mbc.c ||die
arm-linux-musleabihf-strip mbc ||die
mkdir -p hook/expose ||die
cd hook/expose ||die
ln -s ../../mbc __unnamed__ ||die
cd -
tar -czf mbc.tar.gz mbc hook/expose/__unnamed__
cd ..
| true
|
b9e007ccf57c9056490701b16654bf6967fe4401
|
Shell
|
Jbennett8/Genome-Annotation-Pipeline
|
/bin/alignment/hisat/align.sh
|
UTF-8
| 596
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
module load hisat2/2.2.1
# Input
sraName="$1" # etc SRR123775
sraDir="$PWD/$2"
outDir="$3"
readType="scythe" # Change if you want just trimmed, etc
r1=$sraDir/${readType}_${sraName}_1.fastq
r2=$sraDir/${readType}_${sraName}_2.fastq
# Parameters
cores=8
prefix="$PWD/index/hisat/hisat"
# Create sam
cd $outDir
script="hisat2 -p $cores -q -x $prefix -1 $r1 -2 $r2 > ${sraName}.sam"
eval $script
# Immediately convert to bam and delete sam
script="samtools view -b -@ $cores ${sraName}.sam | samtools sort -o sorted_${sraName}.bam -@ $cores"
eval "$script" && rm ${sraName}.sam
| true
|
bbcf5edddc595f8718ef72bd027e126ce323a4d5
|
Shell
|
chaxel/cartoprox
|
/scripts/util_GEOM/old/extractfondmet.sh
|
UTF-8
| 1,832
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/ksh
#
# FTr
########################################################
grid=$1
ChemDisk=/mnt/mod2
ChemProg=$ChemDisk/appli/CARTOPROX/Util
. $ChemProg/include_path.sh
echo "===================================================="
echo "====== extraction WRF + Chimere pour $grid ========="
echo "===================================================="
LATGRID=`awk '{ if($1==grid) {printf("%s\n",$4)} }' grid=$grid $ChemDataSrc/$ficgrid`
LONGRID=`awk '{ if($1==grid) {printf("%s\n",$5)} }' grid=$grid $ChemDataSrc/$ficgrid`
echo grid=$grid LATGRID=$LATGRID LONGRID=$LONGRID
#...n extrait que si fichiers non present
if [ ! -f $ChemGrid/fondX-$grid.dat ]
then
ssh -i /home/troude/.ssh/cle_ppa oper@$ipWRF "cd /appli/PREVALP_200611/interfaces/SIRANE; ./export_vers_SIRANE_CARTOPROX.sh $LATGRID $LONGRID $grid $ChemGrid"
ssh -i /home/troude/.ssh/cle_ppa oper@$ipWRF "cd /appli/PREVALP_200611/interfaces/SIRANE; ./send2mod2.sh evol-meteo-$grid.dat $ChemGrid"
ssh -i /home/troude/.ssh/cle_ppa oper@$ipWRF "cd /appli/PREVALP_200611/interfaces/SIRANE; ./send2mod2.sh conc_fond-$grid.txt $ChemGrid"
#..duplique échéance 1h en 0h du 1er janvier
awk '{ if(NR==1) printf("%s 00:00 %s %s %s\n",$2,$4,$5,$6)} ' $ChemGrid/conc_fond-$grid.txt > $ChemGrid/fondX-$grid.dat
awk '{ printf("%s %s %s %s %s\n",$2,$3,$4,$5,$6)} ' $ChemGrid/conc_fond-$grid.txt >> $ChemGrid/fondX-$grid.dat
rm -f $ChemGrid/conc_fond-$grid.txt
#
precip=0
awk '{ printf("%s %s %s %s %s %s %s\n",$2,$3,$4,$5,$7,$11,precip)} ' precip=$precip $ChemGrid/evol-meteo-$grid.dat > $ChemGrid/meteoWRF-$grid.dat
rm -f $ChemGrid/evol-meteo-$grid.dat
#
else
echo "==== pas de formatage car fichiers déjà présents "
fi
ln -fs $ChemGrid/fondX-$grid.dat $ChemInput/minifond_$YEAR.dat
ln -fs $ChemGrid/meteoWRF-$grid.dat $ChemInput/meteo_$YEAR.dat
| true
|
ea0eb2fde79807b0f167c7fd9c9092cbe5a1de83
|
Shell
|
TobyHFerguson/cm_config
|
/bin/allow_laptop.sh
|
UTF-8
| 497
| 3
| 3
|
[] |
no_license
|
RESOURCE_GROUP=${1:?"No Resource Group specified"}
CM_URL=${2:?"No CM URL specified"}
PREFIX=$(echo ${CM_URL#*://} | sed -n 's/\(.*\)-[dm]n[0-2].*/\1/p')
for NSG in clouderaVnet-sg ${PREFIX}-dn-sg ${PREFIX}-mn-sg
do
az network nsg rule create --name cloudera_vpn --nsg-name $NSG --priority 120 --access Allow --destination-address-prefixes '*' --destination-port-ranges '*' --direction Inbound --protocol '*' --source-address-prefixes 74.217.76.96/27 --resource-group ${RESOURCE_GROUP}
done
| true
|
460df02fef35417abef4c82795ee1b448a0d9a98
|
Shell
|
antoniocosta/scriptsmisc
|
/encmovprores.sh
|
UTF-8
| 507
| 3.84375
| 4
|
[] |
no_license
|
#!/bin/sh
if [ $# -eq 0 ]; then
echo "Converts video to mov (ProRes)"
echo "usage: ./ench264.sh video [quality]"
echo "[quality] defaults to 3 (HQ). Accepted 0-3. Higher values are better quality."
echo " 0 is Proxy, 1 is LT, 2 is Standard and 3 is HQ"
exit 1
fi
# Use $2 if defined AND NOT EMPTY otherwise use 2
QUALITY=${2-3}
# Use same filename for .mp4
OUTPUT=$(echo $1 | cut -f 1 -d '.')_prores.mov
echo "Converting..."
ffmpeg -i "$1" -vcodec prores -profile 4 "$OUTPUT"
echo "DONE!"
| true
|
94ab1ee57fdb1c763ddc9930a7f7964519fe7913
|
Shell
|
johannesg/bubbla-dev
|
/image/build/setuser.sh
|
UTF-8
| 256
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
# Set the nginx user and group to match the host user.
OWNER=$(stat -c '%u' /var/www)
GROUP=$(stat -c '%g' /var/www)
echo "Setting uid to $OWNER and gid to $GROUP for user www-data"
usermod -o -u $OWNER www-data
groupmod -o -g $GROUP www-data
| true
|
4a2e259ed5174264088593ff549ce9c1baf9bdf6
|
Shell
|
Azure/azure-cli-dev-tools
|
/scripts/ci/build_cli_diff_tool.sh
|
UTF-8
| 419
| 3.109375
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -ev
: "${BUILD_STAGINGDIRECTORY:?BUILD_STAGINGDIRECTORY environment variable not set}"
: "${BUILD_SOURCESDIRECTORY:=$(cd $(dirname $0); cd ../../; pwd)}"
cd "${BUILD_SOURCESDIRECTORY}"
cd ./azure-cli-diff-tool
echo "Build azure cli diff tool"
pip install -U pip setuptools wheel
python setup.py bdist_wheel -d "${BUILD_STAGINGDIRECTORY}"
python setup.py sdist -d "${BUILD_STAGINGDIRECTORY}"
| true
|
ce4a381dd84fdb18e46856e7ad6016b7a0b06284
|
Shell
|
SFML/CSFML
|
/tools/BuildMacOS.sh
|
UTF-8
| 14,367
| 3.546875
| 4
|
[
"Zlib"
] |
permissive
|
#!/bin/sh
VERSION="2.5.1"
VERSION_C="2.5.1"
# BUILD_CSFML=FALSE
BUILD_CSFML=TRUE
BUILD_SFML=FALSE
###
### Create packages for SFML $VERSION on OS X
###
### Author: Marco Antognini <antognini.marco@gmail.com>
### Date: 15/11/2014
###
##
## SETTINGS
##
## Edit $VERSION & $VERSION_C above
##
sfml_giturl='https://github.com/SFML/SFML.git'
csfml_giturl='https://github.com/SFML/CSFML.git'
gittag="$VERSION"
gittag_c="$VERSION_C"
topdir=$(pwd)
tmpdir="$topdir/tmp/"
codedir="$tmpdir/code/"
cmakedir="$tmpdir/cmake/"
builddir="$tmpdir/build/"
installdir="$tmpdir/install/"
sfml_package_source_zip="$topdir/SFML-$gittag-sources.zip"
sfml_source_archive_url="https://github.com/SFML/SFML/archive/$gittag.zip"
sfml_package1070name="SFML-$gittag-macos-clang"
sfml_package1070="$topdir/$sfml_package1070name/"
sfml_archive1070="$sfml_package1070name.tar.gz"
csfml_package1070name="CSFML-$gittag_c-macos-clang"
csfml_package1070="$topdir/$csfml_package1070name/"
csfml_archive1070="$csfml_package1070name.tar.gz"
SDK1070="/Developer/SDKs/MacOSX10.7.sdk"
##
## HELPER FUNCTIONS
##
## Echoes to stderr, and die
error () # $* message to display
{
echo "$@" 1>&2
exit 2
}
## Check that the number of parameters is correct, or die
param_check () # $1 should be $# on call site,
# $2 the number of required params,
# $3 an id for the error message
{
if [ $# -ne 3 ]
then
error "Internal error in param_error: the number of parameters is incorrect"
fi
if [ $1 -ne $2 ]
then
error "Internal error in $3: the number of parameters is incorrect"
fi
}
## Check that the number of parameters is enough, or die
param_check_ge () # $1 should be $# on call site,
# $2 the minimal number of params,
# $3 an id for the error message
{
param_check $# 3 "param_check_ge"
if [ $1 -lt $2 ]
then
error "Internal error in $3: the number of parameters is not enough"
fi
}
## Assert $1 is true, or die
assert () # $1: boolean, $2: an error message
{
param_check $# 2 "assert"
if [ $1 -ne 0 ]
then
error "$2"
fi
}
## Create directory, or die
# === mkdir -p $1
create_dir () # $1: path
{
param_check $# 1 "create_dir"
mkdir -p "$1"
assert $? "Couldn't create $1"
}
## Destroy and create directory, or die
re_create_dir () # $1: path
{
param_check $# 1 "re_create_dir"
destroy "$1"
create_dir "$1"
}
## Destroy directory, or die
# === rm -fr $1
destroy () # $1: path
{
param_check $# 1 "destroy"
rm -fr "$1"
assert $? "Couldn't destroy $1"
}
## [with root access] Destroy directory, or die
# === rm -fr $1
sudo_destroy () # $1: path
{
param_check $# 1 "sudo_destroy"
echo "sudo_destroy need root password for deleting $1"
sudo rm -fr "$1"
assert $? "Couldn't destroy $1"
}
## Destroy all paths, or die
destroy_all () # $1...N: paths
{
param_check_ge $# 1 "destroy_all"
for path in "$@"
do
destroy "$path"
done
}
## [with root access] Destroy all paths, or die
sudo_destroy_all () # $1...N: paths
{
param_check_ge $# 1 "sudo_destroy_all"
for path in "$@"
do
sudo_destroy "$path"
done
}
## Copy files/directories, recursively, or die
copy () # $1...N: src, $N+1: dest
{
param_check_ge $# 2 "copy"
cp -R "$@"
assert $? "Couldn't copy $1 to $2"
}
## Go to dir with pushd, or die
push_pwd () # $1: path
{
param_check $# 1 "push_pwd"
pushd "$1"
assert $? "Couldn't go to $1"
}
## Go back, or die
pop_pwd ()
{
param_check $# 0 "pop_pwd"
popd
assert $? "Couldn't go back"
}
## Create a .tar.gz archive, or die
archive () # $1: src dir, $2: archive name
{
param_check $# 2 "archive"
push_pwd "$1/.."
src=$(basename "$1")
tar -zcvf "$2" "$src"
assert $? "Zip failed for $2"
pop_pwd
}
## Build SFML with CMake and Make, or die
build_sfml () # $1: 'clang' => clang & libc++
# $2: 'frameworks'/'dylibs'
# $3: 'doc'/'no doc'
# $4: 'examples'/'no examples'
# $5: 'templates'/'no templates'
# $6: '10.7 SDK'
{
param_check $# 6 "build_sfml"
case "$1" in
'clang')
cxx="clang++"
cc="clang"
cxx_flags="-stdlib=libc++"
cc_flags="-stdlib=libc++"
;;
*)
error "Unknown compiler for $1 in create_makefile"
;;
esac
case "$2" in
'frameworks')
frameworks="TRUE"
installdir_adapted="$installdir/Library/Frameworks"
;;
'dylibs')
frameworks="FALSE"
installdir_adapted="$installdir/usr/local"
;;
*)
error "Unknown option for $2 in create_makefile"
;;
esac
case "$3" in
'doc')
doc="TRUE"
;;
'no doc')
doc="FALSE"
;;
*)
error "Unknown option for $3 in create_makefile"
;;
esac
case "$4" in
'examples')
examples="TRUE"
makeopts=""
# NB: cannot use -j8 here because it bugs with cocoa example...
;;
'no examples')
examples="FALSE"
makeopts="-j8"
;;
*)
error "Unknown option for $4 in create_makefile"
;;
esac
case "$5" in
'templates')
templates="TRUE"
;;
'no templates')
templates="FALSE"
;;
*)
error "Unknown option for $5 in create_makefile"
;;
esac
case "$6" in
'10.7 SDK')
target="10.7"
sdk="$SDK1070"
;;
*)
error "Unknown option for $6 in create_makefile"
;;
esac
re_create_dir "$cmakedir"
push_pwd "$cmakedir"
cmake -G "Unix Makefiles" \
-D "BUILD_SHARED_LIBS:BOOL=TRUE" \
-D "CMAKE_BUILD_TYPE:STRING=Release" \
-D "CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/$cxx" \
-D "CMAKE_CXX_FLAGS:STRING=$cxx_flags" \
-D "CMAKE_C_COMPILER:FILEPATH=/usr/bin/$cc" \
-D "CMAKE_C_FLAGS:STRING=$cc_flags" \
-D "CMAKE_INSTALL_PREFIX:PATH=$installdir_adapted" \
-D "SFML_DEPENDENCIES_INSTALL_PREFIX:STRING=$installdir/Library/Frameworks" \
-D "SFML_MISC_INSTALL_PREFIX:STRING=$installdir/usr/local/share/SFML" \
-D "CMAKE_OSX_ARCHITECTURES:STRING=x86_64" \
-D "CMAKE_OSX_DEPLOYMENT_TARGET:STRING=$target" \
-D "CMAKE_OSX_SYSROOT:STRING=$sdk" \
-D "SFML_BUILD_DOC:BOOL=$doc" \
-D "SFML_BUILD_EXAMPLES:BOOL=$examples" \
-D "SFML_BUILD_FRAMEWORKS:BOOL=$frameworks" \
-D "SFML_INSTALL_XCODE_TEMPLATES:BOOL=$templates" \
"$codedir"
assert $? "CMake failed"
destroy "$installdir"
make clean && make $makeopts && make install
assert $? "Make failed"
pop_pwd
}
## Build SFML with CMake and Make, or die
build_csfml () # $1: 'clang' => clang & libc++
# $2: 'doc'/'no doc'
# $3: '10.7 SDK'
{
param_check $# 3 "build_csfml"
case "$1" in
'clang')
cxx="clang++"
cc="clang"
cxx_flags="-stdlib=libc++"
cc_flags="-stdlib=libc++"
;;
*)
error "Unknown compiler for $1 in create_makefile"
;;
esac
case "$2" in
'doc')
doc="TRUE"
;;
'no doc')
doc="FALSE"
;;
*)
error "Unknown option for $2 in create_makefile"
;;
esac
case "$3" in
'10.7 SDK')
target="10.7"
sdk="$SDK1070"
;;
*)
error "Unknown option for $3 in create_makefile"
;;
esac
makeopts="-j8"
re_create_dir "$cmakedir"
push_pwd "$cmakedir"
cmake -G "Unix Makefiles" \
-D "BUILD_SHARED_LIBS:BOOL=TRUE" \
-D "CMAKE_BUILD_TYPE:STRING=Release" \
-D "CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/$cxx" \
-D "CMAKE_CXX_FLAGS:STRING=$cxx_flags" \
-D "CMAKE_C_COMPILER:FILEPATH=/usr/bin/$cc" \
-D "CMAKE_C_FLAGS:STRING=$cc_flags" \
-D "CMAKE_INSTALL_PREFIX:PATH=$installdir/usr/local" \
-D "CMAKE_OSX_ARCHITECTURES:STRING=x86_64" \
-D "CMAKE_OSX_DEPLOYMENT_TARGET:STRING=$target" \
-D "CMAKE_OSX_SYSROOT:STRING=$sdk" \
-D "BUILD_DOC:BOOL=$doc" \
-D "SFML_DIR:PATH=$sfml_package1070/Frameworks/SFML.framework/Versions/$gittag/Resources/CMake" \
"$codedir"
assert $? "CMake failed"
destroy "$installdir"
make clean && make $makeopts && make install
assert $? "Make failed"
pop_pwd
}
## Install a package (dylibs only) to compile CSFML later on, or die
minimal_install_package () # $1: path to package
{
param_check $# 1 "install_package"
push_pwd "$1"
create_dir "/usr/local/share/SFML/cmake"
# copy "cmake/Modules/FindSFML.cmake" "/usr/local/share/cmake/Modules/"
copy "lib/" "/usr/local/lib/"
copy "include/" "/usr/local/include/"
pop_pwd
}
## Git clone, or die
clone () # $1: url, $2: dest, $3: branch or tag (also works with sha, but less efficiently)
{
param_check $# 3 "clone"
git clone "$1" --branch "$3" "$2" --depth 1 || git clone "$1" "$2" && git -C "$2" checkout "$3"
assert $? "Cannot download the source code from $1"
}
## Remove anything related to SFML
wipe_sfml ()
{
# Remove SFML from /usr/local
destroy_all "/usr/local/include/SFML" \
"/usr/local/lib/"libsfml-* \
"/usr/local/share/SFML"
# Remove SFML from /Library
sudo_destroy_all "/Library/Frameworks/"sfml-* \
"/Library/Frameworks/SFML.framework"
destroy_all "/Library/Developer/Xcode/Templates/SFML"
}
## Remove anything related to CSFML
wipe_csfml ()
{
# Remove CSFML from /usr/local (that's it)
destroy_all "/usr/local/include/CSFML" \
"/usr/local/lib/"libcsfml-* \
"/usr/local/share/CSFML"
}
##
## INITIALIZE
##
## Clean up before doing anything
wipe_sfml
wipe_csfml
destroy "$sfml_package_source_zip"
destroy_all "$sfml_archive1070" "$sfml_package1070"
destroy_all "$csfml_archive1070" "$csfml_package1070"
destroy "$tmpdir"
##
## CREATE SOURCE ARCHIVE
##
# we need to do it on a Unix compatible system to keep symlinks valid in frameworks extlibs
wget -O "$sfml_package_source_zip" "$sfml_source_archive_url"
assert $? "Couldn't download the source archive"
zip -d "$sfml_package_source_zip" "SFML-$gittag/.gitattributes"
assert $? "Couldn't remove extra files from source archive"
##
## DOWNLOAD SFML
##
## First, download the code from github
destroy "$codedir"
clone "$sfml_giturl" "$codedir" "$gittag"
##
## BUILD SFML FOR 1070
##
# Now configure CMake for 10.7, 64bits, clang & libc++, doc, example, templates, FRAMEWORKS
# and build SFML !
build_sfml 'clang' 'frameworks' 'doc' 'examples' 'templates' '10.7 SDK'
## Copy the results to the package dir
re_create_dir "$sfml_package1070"
copy "$installdir/usr/local/share/SFML/" \
"$sfml_package1070/"
create_dir "$sfml_package1070/extlibs/"
copy "$installdir/Library/Frameworks/FLAC.framework" \
"$installdir/Library/Frameworks/freetype.framework" \
"$installdir/Library/Frameworks/ogg.framework" \
"$installdir/Library/Frameworks/OpenAL.framework" \
"$installdir/Library/Frameworks/vorbis.framework" \
"$installdir/Library/Frameworks/vorbisenc.framework" \
"$installdir/Library/Frameworks/vorbisfile.framework" \
"$sfml_package1070/extlibs/"
create_dir "$sfml_package1070/Frameworks/"
copy "$installdir/Library/Frameworks/"sfml-* \
"$installdir/Library/Frameworks/SFML.framework" \
"$sfml_package1070/Frameworks/"
create_dir "$sfml_package1070/templates/"
copy "/Library/Developer/Xcode/Templates/SFML" \
"$sfml_package1070/templates/"
## Now configure CMake for 10.7, 64bits, clang & libc++, DYLIBS
build_sfml 'clang' 'dylibs' 'no doc' 'no examples' 'no templates' '10.7 SDK'
## Copy the results to the package dir
create_dir "$sfml_package1070/include/"
copy "$installdir/usr/local/include/SFML" \
"$sfml_package1070/include/"
create_dir "$sfml_package1070/lib/"
copy "$installdir/usr/local/lib/"libsfml-* \
"$sfml_package1070/lib/"
copy "$codedir/changelog.md" \
"$sfml_package1070/"
## Clean up some files
find "$sfml_package1070" -regex '.*/\.DS_Store' -delete
assert $? "Find failed while removing extra files"
find "$sfml_package1070" -regex '.*/*\.swp' -delete
assert $? "Find failed while removing extra files"
find "$sfml_package1070" -regex '.*/*~' -delete
assert $? "Find failed while removing extra files"
## Apply patch for templates to fix paths
curl -fsSl "template.2.5.0.patch" | patch "$sfml_package1070/templates/SFML/SFML App.xctemplate/TemplateInfo.plist" --no-backup-if-mismatch
assert $? "Couldn't apply patch on templates"
## And create archives
destroy "$tmpdir"
archive "$sfml_package1070" "$sfml_archive1070"
if [ $BUILD_CSFML != "TRUE" ]
then
echo "I'M NOT BUILDING CSFML!"
exit 0
fi
##
## DOWNLOAD CSFML
##
destroy "$codedir"
clone "$csfml_giturl" "$codedir" "$gittag_c"
##
## BUILD CSFML FOR 1070
##
wipe_sfml
minimal_install_package "$sfml_package1070"
build_csfml 'clang' 'doc' '10.7 SDK'
create_dir "$csfml_package1070/include/"
copy "$installdir/usr/local/include/SFML" \
"$csfml_package1070/include/"
create_dir "$csfml_package1070/lib/"
copy "$installdir/usr/local/lib/"libcsfml-* \
"$csfml_package1070/lib/"
copy "$installdir/usr/local/share/CSFML/" \
"$csfml_package1070/"
##
## CLEAN UP & ARCHIVE
##
find "$csfml_package1070" -regex '.*/\.DS_Store' -delete
assert $? "Find failed while removing extra files"
find "$csfml_package1070" -regex '.*/*\.swp' -delete
assert $? "Find failed while removing extra files"
find "$csfml_package1070" -regex '.*/*~' -delete
assert $? "Find failed while removing extra files"
## Clean up temporary directories
## And create archives
destroy "$tmpdir"
archive "$csfml_package1070" "$csfml_archive1070"
### End Of Script
| true
|
4b89c97e18c607353c85d84ec3362134db8a8530
|
Shell
|
E4S-Project/testsuite
|
/validation_tests/ginkgo/setup.sh
|
UTF-8
| 958
| 2.640625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
. ../../setup.sh
export THISDIR=`basename "$PWD"`
export GINKGO_DIRS=$(ls -d */ | grep -v cuda | grep -v par-ilu-convergence | grep -v preconditioner-export | grep -v nine-pt-stencil-solver | grep -v distributed-solver )
export ARG=""
if [ "$THISDIR" = "ginkgo-cuda" ];then
spackLoadUnique "ginkgo+cuda $TEST_CUDA_ARCH"
export GINKGO_DIRS="minimal-cuda-solver simple-solver" # $(ls -d */ | grep minimal-cuda-solver | grep simple-solver ) #grep -v par-ilu-convergence | grep -v preconditioner-export | grep -v nine-pt-stencil-solver )
elif [ "$THISDIR" = "ginkgo-rocm" ];then
spackLoadUnique "ginkgo+rocm $TEST_ROCM_ARCH"
export GINKGO_DIRS=$(ls -d */ | grep -v cuda | grep -v par-ilu-convergence | grep -v preconditioner-export | grep -v nine-pt-stencil-solver | grep -v ginkgo-overhead )
export ARG="hip"
#export ACCEL_DEFINES="-DCMAKE_CXX_COMPILER=hipcc"
else
spackLoadUnique ginkgo~cuda~rocm
fi
| true
|
fa28d362d667f252279560f66368a9f9a836f630
|
Shell
|
ns1/ns1-privatedns
|
/docker-exabgp/health.sh
|
UTF-8
| 812
| 3.8125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh
# count configured peers
configured_peers=`exabgpcli show neighbor configuration | grep -ce 'neighbor .* {'`
# count peers in established state
established_peers=`exabgpcli show neighbor summary | awk '{ print $4; }' | grep -c established`
# Check if exabgpcli had non-zero exit
if [ $? -ne 0 ]; then
echo "The exabgpcli command returned non-zero status"
exit 1
fi
# If either value is zero, something is wrong.
if [ $configured_peers -eq 0 ]; then
echo "No configured peers"
exit 1
fi
if [ $established_peers -eq 0 ]; then
echo "No established peers"
exit 1
fi
# If the values are not equal to each other, one or more configured peers must not be established
if [ $configured_peers -ne $established_peers ]; then
echo "One or more configured peers is not in established state"
exit 1
fi
| true
|
0694b6924d0e49f0b840cceb6ef6fa24d8d898f6
|
Shell
|
OverSkillers/COMP201920
|
/meta2/build.sh
|
UTF-8
| 942
| 3.328125
| 3
|
[] |
no_license
|
# Start building commands
LEX="lex jucompiler.l"
YACC="yacc -d jucompiler.y"
CLANG="clang-3.9 -Wall -Wno-unused-function -o jucompiler lex.yy.c y.tab.c ast.c -lm"
# Check for any arguments
while getopts "vpo" OPTION; do
case $OPTION in
v)
CLANG=$CLANG" -g -v -DDEBUG=1"
LEX="lex -v jucompiler.l"
YACC="yacc -d --debug --verbose jucompiler.y"
;;
p)
CLANG=$CLANG" -g -v -DDEBUG=2"
;;
o)
YACC="yacc -d -v jucompiler.y"
esac
done
# Compile lex file, checking for build success
if $LEX; then
# Compile yacc file, checking for build success
if $YACC; then
# Compile using clang, checking for build success
if $CLANG; then
# Zip the lex file if build successful
zip jucompiler.zip jucompiler.l jucompiler.y ast.c ast.h structures.h
else
exit
fi
else
exit
fi
else
exit
fi
| true
|
621ad656423880f103a0d3bb1e7f0a5ad68eb823
|
Shell
|
josh-padnick/dotfiles
|
/bash.d/git
|
UTF-8
| 137
| 2.53125
| 3
|
[] |
no_license
|
#!/bin/bash
# Git bash completion
GIT_FILE="/usr/local/etc/bash_completion.d/git-completion.bash"
test -f $GIT_FILE && source $GIT_FILE
| true
|
40872be056665857d3413e141435a9efc4e47e1b
|
Shell
|
yhfudev/bash-mrnative
|
/3rd/runcompile-bitcoin.sh
|
UTF-8
| 1,694
| 3.796875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
my_getpath () {
PARAM_DN="$1"
shift
#readlink -f
DN="${PARAM_DN}"
FN=
if [ ! -d "${DN}" ]; then
FN=$(basename "${DN}")
DN=$(dirname "${DN}")
fi
DNORIG=$(pwd)
cd "${DN}" > /dev/null 2>&1
DN=$(pwd)
cd "${DNORIG}"
if [ "${FN}" = "" ]; then
echo "${DN}"
else
echo "${DN}/${FN}"
fi
}
#DN_EXEC=`echo "$0" | ${EXEC_AWK} -F/ '{b=$1; for (i=2; i < NF; i ++) {b=b "/" $(i)}; print b}'`
DN_EXEC=$(dirname $(my_getpath "$0") )
if [ ! "${DN_EXEC}" = "" ]; then
export DN_EXEC="$(my_getpath "${DN_EXEC}")/"
else
export DN_EXEC="${DN_EXEC}/"
fi
DN_TOP="$(my_getpath "${DN_EXEC}/../")"
#################################
PREFIX=bitcoin
DN_COMPILE="${DN_EXEC}/${PREFIX}-compile"
compile_source () {
PARAM_MAKEFILE="$1"
shift
PARAM_TARGET="$@"
shift
which module
if [ "$?" = "0" ]; then
module purge && module load mpc cmake/2.8.7 gcc/4.4 1>&2 # for PBS's gcc
fi
mkdir -p "${DN_COMPILE}" 1>&2
cd "${DN_COMPILE}" 1>&2
cp "${DN_EXEC}/${PARAM_MAKEFILE}" Makefile 1>&2
sed -i "s|^PREFIX=.*$|PREFIX=${DN_EXEC}/${PREFIX}-bin/|g" Makefile 1>&2
sed -i "s|^DN_SRC=.*$|DN_SRC=${DN_EXEC}/sources/|g" Makefile 1>&2
sed -i "s|^DN_PATCH=.*$|DN_PATCH=${DN_EXEC}|g" Makefile 1>&2
sed -i "s|^USE_GPU=.*$|USE_GPU=GPU_CUDA|g" Makefile 1>&2
cat "${DN_EXEC}/${PARAM_MAKEFILE}" | grep ^include | awk '{print $2; }' | while read a ; do cp "${DN_EXEC}/$a" .; done
mkdir -p ${DN_EXEC}/sources/
make get-sources 1>&2
make ${PARAM_TARGET} 1>&2
}
#compile_source Makefile.gnuplot all
#compile_source Makefile.ffmpeg all
#compile_source Makefile.ns2 all
compile_source Makefile.bitcoin all
| true
|
fe1771f27d59f458ea13624d8f82d3e001f9da1a
|
Shell
|
ethz-asl/maplab
|
/deploy/build_docker_with_rviz.sh
|
UTF-8
| 484
| 3.671875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env zsh
# Get the current directory.
script_source=$(readlink -f "$0")
script_dir=$(dirname "$script_source")
docker_ctx=${script_dir}/docker_ctx
echo "Creating build context in: ${docker_ctx}"
# Create the context we use for setting up the images.
mkdir -p "${docker_ctx}"
"$script_dir/copy_to_ctx.sh"
# Build the docker image.
docker-compose -f "${script_dir}/docker-compose.yml" build
# We don't remove the context here as docker-compose requires it to be present.
| true
|
cb835a9591c8c133ba960a8260eaa739a1e94f56
|
Shell
|
mhfowler/notes-from-sfpc
|
/run.sh
|
UTF-8
| 202
| 2.8125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -e
PROJECT_PATH=$( cd $(dirname $0) ; pwd -P )
CONFIG_FILE=$PROJECT_PATH/_config.yml
if [ -f $CONFIG_FILE ]; then rm $CONFIG_FILE; fi
cp _localconfig.yml $CONFIG_FILE
hexo server
| true
|
faea8892b9363b9599fcb1b5ac49765789db9ea8
|
Shell
|
JLiangWaterloo/sat
|
/William-Scripts/CommunityOutputOnlyModularity
|
UTF-8
| 317
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
dir_name=$(dirname $0)
dir_name=${dir_name#"./"}
set -e
out=$("$dir_name"/../Haskell/Bcp | "$dir_name"/../Haskell/Graph variable | "$dir_name"/../Bin/community -i:"/dev/stdin" -o:"/dev/stdout")
modularity=$(echo "$out" | grep "Modularity" | cut -d: -f2 | "$dir_name"/../Bash/Trim)
echo -n "$modularity"
| true
|
448c20974fba21892b0ff9fdb5372a8cde1fb95a
|
Shell
|
epandurski/swpt_debtors
|
/release_to_dockerhub.sh
|
UTF-8
| 426
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
if [ -z "$1" ]; then
echo "Usage: release.sh TAG"
return
fi
swpt_debtors="epandurski/swpt_debtors:$1"
swpt_debtors_swagger_ui="epandurski/swpt_debtors_swagger_ui:$1"
docker build -t "$swpt_debtors" --target app-image .
docker build -t "$swpt_debtors_swagger_ui" --target swagger-ui-image .
git tag "v$1"
git push origin "v$1"
docker login
docker push "$swpt_debtors"
docker push "$swpt_debtors_swagger_ui"
| true
|
7ef9a5d74b80dcbbc74df453223089c287ae7cc0
|
Shell
|
dffischer/xmms2-mlib-adapter
|
/PKGBUILD
|
UTF-8
| 550
| 2.5625
| 3
|
[] |
no_license
|
# Maintainer: XZS <d dot f dot fischer at web dot de>
pkgname=python-xmms2-mlib-adapter-git
pkgver=r0
pkgrel=1
pkgdesc="Export and import xmms2 playlists and song statistics to plaintext."
arch=('i686' 'x86_64')
url="https://github.com/dffischer/xmms2-mlib-adapter"
license=('GPL3')
depends=('python' 'python-progressbar' 'xmms2<0.9DrParnassus')
makedepends=('waf' 'python-argparse-manpager')
# template input; name=git
build() {
cd "$_gitname"
waf --prefix=/usr configure build
}
package() {
cd "$_gitname"
waf install --destdir="$pkgdir/"
}
| true
|
2a66c527b74b00920a8a86e87090c6c07458071a
|
Shell
|
l4rz/idrac-7-8-reverse-engineering
|
/squashfs-linux/Linux/usr/bin/discovery_gen_csr.sh
|
UTF-8
| 2,509
| 3.296875
| 3
|
[] |
no_license
|
# This script generates a CSR with a derived CN using the node id
# so it can be authenticated by a provisioning server for zero touch
# deployment.
#
# Files used:
# 1) d_h_ssl_manuf.cnf
#
# Files created:
# 1) STAG_iDRAC_d_h_req.pem (CSR request)
# 2) STAG_iDRAC_d_h_key.pem (private key for SSL handshake - client)
#
# add PATH to enable racadm use
export PATH=$PATH:/usr/local/bin
# default values
CountryCode=""
StateName=""
Locality=""
OrganizationName=""
OrganizationUnit=""
CLIENT_ID="" # Node Id
Email="." # email is optional
RET_EXIT=0
TMP_DIR="/tmp"
# fault codes
NO_DIRTOWRITE=1
FILE_MISSING=2
NO_CLIENT_ID=3
CA_PRIV_KEY_INVALID=4
GEN_KEY_ER=5
INVALID_PASSPHASE=6
MISMATCH_PRVKEY=7
FAILTOSAVECERT=8
CA_CERT_FILE_ER=9
BAD_DESCERYPT=10
INVALID_REQTYPE=11
cd $TMP_DIR
# cleanup
rm /tmp/STAG_iDRAC_d_h_req.pem
OPEN_SSL_CONFIG="/usr/share/discovery/d_h_ssl_manuf.cnf"
# get the client id
#CLIENT_ID=`racadm getsysinfo -s | grep "Service Tag" | sed 's/.*= //'`
#DF538859 fix
CLIENT_ID=$(cat /tmp/sysinfo_nodeid)
if [ X$RET_EXIT = X0 ] ; then
if [ ${#CLIENT_ID} -ge "7" ]; then
echo CLIENT_ID is set to $CLIENT_ID
else
echo CLIENT_ID not in file
# Try to get the client id using IPMI
ret=`IPMICmd 20 6 0 59 0 c5 0 0`
stagHex=`echo ${ret} | sed 's/^.*0x07 //' | sed 's/0x/\\\\x/g' | sed 's/ //g'`
CLIENT_ID=`echo -e ${stagHex} | cat`
if [ ${#CLIENT_ID} -ge "7" ]; then
echo CLIENT_ID is set to $CLIENT_ID
else
echo CLIENT_ID is BLANK
RET_EXIT=$NO_CLIENT_ID
fi
fi
fi
if [ X$RET_EXIT = X0 ] ; then
# load parameters
if [ -f /tmp/d_h_ssl_csr_param ] ; then
. /tmp/d_h_ssl_csr_param
else
# default values
CountryCode="US"
StateName="TX"
Locality="Austin"
OrganizationName="Dell"
OrganizationUnit="Product Group"
Email="support@dell.com"
fi
export CountryCode
export StateName
export Locality
export OrganizationName
export OrganizationUnit
export CLIENT_ID
export Email
# generate keys and CSR req
if ! openssl req -new -newkey rsa:2048 -sha384 -out /tmp/STAG_iDRAC_d_h_req.pem \
-keyout /tmp/STAG_iDRAC_d_h_key.pem -nodes -config $OPEN_SSL_CONFIG
then
RET_EXIT=$GEN_KEY_ER
fi
fi
if [ X$jobId != X ] ; then
if [ X$RET_EXIT = X0 ] ; then
echo jobId $jobId complete
jstore -a 11 -j $jobId -s "COMPLETED" -m "LC001" -x "The command was successful"
else
echo jobId $jobId failed
jstore -a 11 -j $jobId -s "FAILED" -m "LC002" -x "General failure"
fi
fi
exit $RET_EXIT
| true
|
3bd72ca6826b44a5e3d89bd23b1f209b2bcf1d7c
|
Shell
|
cgpuglie/vagrant-development
|
/scripts/provision.sh
|
UTF-8
| 1,227
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash
log() {
level=$1
message=$2
echo "$(date -Is) - [$level] $message"
}
log "INFO" "Adding package repos"
export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)"
echo "deb http://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list
curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
log "INFO" "Performing apt-update"
apt-get update -y
log "INFO" "Installing packages"
apt-get install -y docker.io \
google-cloud-sdk \
zsh
# install zsh
su vagrant -c 'sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"'
su vagrant -c 'echo "vagrant" | chsh -s $(which zsh)'
log "INFO" "Configuring user"
# Add vagrant to docker group
sudo gpasswd -a vagrant docker
# Copy ssh keys
mkdir -p -m 700 /home/vagrant/.ssh
mv /tmp/id_rsa /home/vagrant/.ssh/id_rsa
mv /tmp/id_rsa.pub /home/vagrant/.ssh/id_rsa.pub
chmod 600 /home/vagrant/.ssh/*
# Set editor to vim
su vagrant -c 'git config --global core.editor "vim"'
[ ! -z "$GIT_NAME" ] && su vagrant -c "git config --global user.name '$GIT_NAME'"
[ ! -z "$GIT_EMAIL" ] && su vagrant -c "git config --global user.email '$GIT_EMAIL'"
| true
|
500dd97f51b2fddfcbc352d0cd30e5eeb1af99cd
|
Shell
|
CheetahTemplate3/cheetah3
|
/devscripts/postrelease
|
UTF-8
| 784
| 2.953125
| 3
|
[
"MIT"
] |
permissive
|
#! /bin/sh
tag="`python setup.py --version`" &&
. `dirname $0`/split_tag.sh &&
split_tag $tag
if [ $micro -eq 0 -a \( $state = final -o $state = post \) ] &&
git branch --merged master | grep -Fqw maint; then
git checkout maint &&
git merge --ff-only master &&
git checkout master
else:
git checkout master &&
git merge --ff-only maint ||
git merge --strategy=ours -m 'Null-merge branch "maint"' maint
fi &&
git checkout $tag~ ANNOUNCE.rst &&
`git var GIT_EDITOR` ANNOUNCE.rst LATEST-CHANGES.rst docs/news.rst Cheetah/Version.py README.rst setup.cfg SetupConfig.py &&
exec git commit --message="Build: Prepare for the next release" --message="[skip ci]" ANNOUNCE.rst LATEST-CHANGES.rst docs/news.rst Cheetah/Version.py README.rst setup.cfg SetupConfig.py
| true
|
3e3d38b2e11a2c648bdfef71a156c5a58e917824
|
Shell
|
mjuszczak/mjuszczak.github.io
|
/lint.sh
|
UTF-8
| 599
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
pcregrep > /dev/null 2>&1
if [ $? -eq 127 ]; then
echo "ERROR: You must install pcregrep"
exit
fi
fail=0
directories="./_posts ./_pages"
echo "Annoying characters to fix:"
pcregrep -ir --color='auto' -n '[^\x00-\x7F]' ${directories}
if [ "$?" -eq 0 ]; then
fail=1
fi
echo "^M's:"
grep -ir $'\r' ${directories}
if [ "$?" -eq 0 ]; then
fail=1
fi
echo "Use of FQDN links in posts or pages to fix (use post_url):"
grep -ir "(.*atopia\.net.*)" ${directories}
if [ "$?" -eq 0 ]; then
fail=1
fi
if [ "$fail" -eq 0 ]; then
echo "SUCCESS!"
else
echo "Failure :("
fi
| true
|
e8fe78308508afe5b0d7e65912b3887d2681b514
|
Shell
|
caryatid/toolbox
|
/scripts/abduco.sh
|
UTF-8
| 648
| 2.78125
| 3
|
[] |
no_license
|
SRC="$1"; shift
INS="$1"; shift
TGT="$1"; shift
set -e
cd "$SRC"
echo 'PREFIX ?=' "$INS" >config.mk
echo 'TARGET ?=' "$TGT" >>config.mk
cat <<'EOF' >>config.mk
VERSION = 0.6
MANPREFIX = ${PREFIX}/share/man
INCS = -I. -I${PREFIX}/include -I${PREFIX}/${TARGET}/include -nostdinc
LIBS =
CPPFLAGS = -D_POSIX_C_SOURCE=200809L -D_XOPEN_SOURCE=700
CFLAGS += -std=c99 -pedantic -Wall ${INCS} -DVERSION=\"${VERSION}\" -DNDEBUG ${CPPFLAGS}
LDFLAGS += ${LIBS} -L${PREFIX}/lib -L${PREFIX}/${TARGET}/lib -static -nodefaultlibs
DEBUG_CFLAGS = ${CFLAGS} -UNDEBUG -O0 -g -ggdb
CC = ${TARGET}-gcc
STRIP ?= strip
EOF
make | tee foo.log && make install
| true
|
f6f06d03696dc7d9926781d6a00eb5756b06d0ae
|
Shell
|
mblow/coverity-scripts
|
/upload_scan.sh
|
UTF-8
| 1,294
| 3.625
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
project=$1
case $project in
asterixdb-hyracks)
project_name="Apache AsterixDB Hyracks"
;;
asterixdb)
project_name="Apache AsterixDB"
;;
*)
echo Unknown project: ${project}; exit 1;;
esac
project_key="$(echo $project_name | tr ' ' '+')"
archive_name="$(echo $project_name | tr ' ' '+' | tr '[A-Z]' '[a-z]').tgz"
dir="$(cd $(dirname $0) && pwd)"
work="${dir}/work/${project}"
devroot=$(cd ${dir}/../${project} && pwd)
token=$(cat ${dir}/${project}.token)
if [ -z "$token" ]; then
echo "ERROR: cannot find token for ${project}" && exit 1
fi
rm -rf ${work}
mkdir -p ${work}/cov-int
cd ${devroot}
git pull
version=$(cd $devroot && git log | head -1 | awk '{ print $NF }')
if [ "$version" = "$(cat $dir/${project}.last_version)" ]; then
echo "No new version, bypassing..."
exit 0
fi
set -x
mvn clean
mvn dependency:go-offline
cov-build --dir ${work}/cov-int mvn -o -DskipTests=true install
cd ${work}
tar czvf ${archive_name} cov-int
curl -o curl.out \
--form token=$token \
--form email=michael@michaelblow.com \
--form file=@${archive_name} \
--form version="$version" \
--form description="${project_name} (Incubating) scan ($(date -u))" \
https://scan.coverity.com/builds?project=${project_key}
echo $version > $dir/${project}.last_version
| true
|
c7a08382b812dddb40ec304f8b05d33d313c3a62
|
Shell
|
CGA1123/heroku-buildpack-foo
|
/bin/compile
|
UTF-8
| 91
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
if [[ ${BAR} == "true" ]]; then
echo "Found BAR exported";
else
exit 1
fi
| true
|
a251e8a630e0aa84781585b1eeb505673f54934d
|
Shell
|
GoogleCloudPlatform/anthos-samples
|
/.github/gh_runner_dependencies.sh
|
UTF-8
| 2,567
| 3.109375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# shellcheck disable=SC1090
cd "$HOME" || exit
sudo apt-get install -y \
curl \
wget \
vim \
git \
unzip \
gcc \
ca-certificates \
gnupg \
lsb-release
# install Golang
wget "https://go.dev/dl/go1.18.linux-amd64.tar.gz"
sudo rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go*
echo "export GOPATH=$HOME/go" >> "$HOME"/.profile
echo "export PATH=$PATH:/usr/local/go/bin:$GOPATH/bin" >> "$HOME"/.profile
source "$HOME"/.profile
# install the addlicense tool used to check files for license headers
go install github.com/google/addlicense@latest
sudo ln -s "$HOME"/go/bin/addlicense /bin
# install the tfenv tool to manage terraform versions
git clone https://github.com/tfutils/tfenv.git ~/.tfenv
sudo ln -s "$HOME"/.tfenv/bin/* /bin
# install the terraform versions and configure it to the latest
tfenv install 1.0.0
tfenv install 1.0.1
tfenv install 1.0.6
tfenv install 1.1.3
tfenv install 1.1.9
tfenv install 1.3.3
tfenv install 1.3.9
tfenv use 1.3.9
# install the golint binary
go install golang.org/x/lint/golint@latest
sudo ln -s "$HOME"/go/bin/golint /bin/
# install docker
curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt-get update
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
sudo usermod -aG docker "${USER}"
# create the local directory used by the github actions runner
sudo mkdir -p /var/local/gh-runner
echo "All dependencies have been installed."
echo "You have to download the Service Account key into this host, store it under /var/local/gh-runner and give it 444 permissions"
echo "
> sudo bash -c 'echo export GOOGLE_CLOUD_PROJECT=anthos-gke-samples-ci > /var/local/gh-runner/env_vars'
"
| true
|
36a27df54ffe2beb227df45b49b9f815b4a61465
|
Shell
|
chmstimoteo/CloningSystem
|
/drbl/sbin/select-drbl-clients
|
UTF-8
| 11,785
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
# Author: Steven Shiau <steven _at_ nchc org tw>
# License: GPL
# Description: To select the DRBL clients and show the results (IP or MAC)
# Load DRBL setting and functions
DRBL_SCRIPT_PATH="${DRBL_SCRIPT_PATH:-/opt/drbl/}"
. $DRBL_SCRIPT_PATH/sbin/drbl-conf-functions
MAX_DIALOG_HEIGHT="10"
#
ask_by_MAC_or_IP() {
# Note!!! We must use common tmp file (ANS_TMP) to pass the result,
# we can NOT
# just echo result in the end of this function. Otherwise in newer
# dialog (FC3), the script will wait for the end of function, then it
# shows the result.
# There is nothing in the screen when function is called if we just
# use echo the result to stdout.
ANS_TMP=$1
local TMP=`mktemp /tmp/drbl-tmp.XXXXXX`
local mac_list_opt1 mac_list_opt2 mac_list_opt3 mac_grp_opt1 mac_grp_opt2 mac_grp_opt3 M ip_addr_list_opt
trap "[ -f "$TMP" ] && rm -f $TMP" HUP INT QUIT TERM EXIT
# if hardware is set in dhcpd.conf, then we can choose hosts via MAC
# example: "hardware ethernet 00:02:e3:53:34:6e;
if grep -q -i "^[[:space:]]*hardware ethernet .*;" $DHCPDCONF_DIR/dhcpd.conf; then
mac_list_opt1="by_MAC_addr_list"
mac_list_opt2="$(rep_whspc_w_udrsc "$msg_set_by_MAC_address_list")"
if [ -n "$(LC_ALL=C ls $drbl_syscfg/client-MAC-group-* $drbl_syscfg/MAC-grp-* 2>/dev/null)" ]; then
mac_grp_opt1="by_MAC_addr_grp"
mac_grp_opt2="$(rep_whspc_w_udrsc "$msg_set_by_MAC_address_grp")"
fi
fi
if [ -n "$(LC_ALL=C ls $drbl_syscfg/client-IP-group-* $drbl_syscfg/IP-grp-* 2>/dev/null)" ]; then
ip_grp_opt1="by_IP_addr_grp"
ip_grp_opt2="$(rep_whspc_w_udrsc "$msg_set_by_IP_address_grp")"
fi
$DIA --backtitle "$msg_nchc_free_software_labs" --title "$msg_drbl_title" \
--menu "$msg_specify_hosts,$msg_hint_assign_IP_group,$msg_hint_assign_MAC_group,\n$msg_choose_mode:" 0 0 0 \
$mac_list_opt1 $mac_list_opt2 \
$mac_grp_opt1 $mac_grp_opt2 \
"by_IP_addr_list" "$msg_set_by_IP_address_list" \
$ip_grp_opt1 $ip_grp_opt2 \
2> $TMP
retval=$?
[ $retval -eq 1 ] && exit 1
M="$(cat $TMP)"
case "$M" in
"by_MAC_addr_list")
# HOST_MAC_TABLE is not created by drblpush, we have to create it.
gen_HOST_MAC_TABLE
select_hosts_by_MAC $ANS_TMP
;;
"by_MAC_addr_grp")
# HOST_MAC_TABLE is not created by drblpush, we have to create it.
gen_HOST_MAC_TABLE
select_hosts_by_MAC_GRP $ANS_TMP
;;
"by_IP_addr_list")
select_hosts_by_IP $ANS_TMP
;;
"by_IP_addr_grp")
select_hosts_by_IP_GRP $ANS_TMP
;;
esac
[ -f "$TMP" ] && rm -f $TMP
return 0
}
#
select_hosts_by_IP() {
# Note!!! We must use common tmp file (ANS_TMP) to pass the result,
# we can NOT
# just echo result in the end of this function. Otherwise in newer
# dialog (FC3), the script will wait for the end of function, then it
# shows the result.
# There is nothing in the screen when function is called if we just
# use echo the result to stdout.
ANS_TMP=$1
local TMP=`mktemp /tmp/ocs.XXXXXX`
trap "[ -f "$TMP" ] && rm -f $TMP" HUP INT QUIT TERM EXIT
local numhosts=0
local IP_list host_grp hostinfo igrp list_height
# list the nodes
awk -F" " '!/^#/ {print $1, $2, "off"}' $IP_HOST_TABLE >> $TMP
IP_list="$(cat $TMP)"
numhosts="$(echo $IP_list | grep -o off | wc -w)"
#numhosts=$((numhosts+3))
if [ $numhosts -gt 0 ]; then
if [ $numhosts -lt $MAX_DIALOG_HEIGHT ]; then
list_height=$numhosts
else
list_height=$MAX_DIALOG_HEIGHT
fi
$DIA \
--separate-output \
--backtitle "$msg_nchc_free_software_labs" \
--title "$msg_drbl_title" \
--checklist "$msg_specify_hosts ($msg_press_space_to_mark_selection):" 20 70 \
$list_height $IP_list 2> $TMP
retval=$?
[ $retval -eq 1 ] && exit 1
# Convert the selected group to IP lists
if grep -qE "(client-IP-group|IP-grp)" $TMP; then
selected_grp="$(grep -Eo "(client-IP-group-.*|IP-grp-.*)" $TMP)"
for i in $selected_grp; do
LC_ALL=C perl -p -i -e "s/$i//g" $TMP
cat $drbl_syscfg/$i >> $TMP
done
fi
target_hosts="$(cat $TMP)"
else
echo "No hosts in $drblroot!!!... Abort!"
exit 1
fi
echo $target_hosts > $ANS_TMP
[ -f "$TMP" ] && rm -f $TMP
return 0
} # end of select_hosts_by_IP
#
select_hosts_by_IP_GRP() {
# Note!!! We must use common tmp file (ANS_TMP) to pass the result,
# we can NOT
# just echo result in the end of this function. Otherwise in newer
# dialog (FC3), the script will wait for the end of function, then it
# shows the result.
# There is nothing in the screen when function is called if we just
# use echo the result to stdout.
ANS_TMP=$1
local TMP=`mktemp /tmp/ocs.XXXXXX`
trap "[ -f "$TMP" ] && rm -f $TMP" HUP INT QUIT TERM EXIT
local hostlist=""
local numhosts=0
local IP_list host_grp hostinfo igrp list_height numlist
# Part I: list the IP group
numlist=0
for igrp in $drbl_syscfg/client-IP-group-* $drbl_syscfg/IP-grp-*; do
[ ! -e "$igrp" ] && continue
host_grp="$(basename $igrp)"
hostinfo="$(head -n 2 $igrp)"
hostinfo="$(echo $hostinfo | tr " " "_")..."
hostlist="$hostlist $host_grp $hostinfo off"
numlist="$((numlist+1))"
done
echo $hostlist > $TMP
IP_list="$(cat $TMP)"
if [ $numlist -gt 0 ]; then
if [ $numlist -lt $MAX_DIALOG_HEIGHT ]; then
list_height=$numlist
else
list_height=$MAX_DIALOG_HEIGHT
fi
$DIA \
--separate-output \
--backtitle "$msg_nchc_free_software_labs" \
--title "$msg_drbl_title" \
--checklist "$msg_specify_hosts ($msg_press_space_to_mark_selection):" 20 70 \
$list_height $IP_list 2> $TMP
retval=$?
[ $retval -eq 1 ] && exit 1
# Convert the selected group to IP lists
if grep -qE "(client-IP-group|IP-grp)" $TMP; then
selected_grp="$(grep -Eo "(client-IP-group-.*|IP-grp-.*)" $TMP)"
for i in $selected_grp; do
perl -p -i -e "s/$i//g" $TMP
cat $drbl_syscfg/$i >> $TMP
done
fi
target_hosts="$(cat $TMP)"
else
echo "No hosts in $drblroot!!!... Abort!"
exit 1
fi
echo $target_hosts > $ANS_TMP
[ -f "$TMP" ] && rm -f $TMP
return 0
} # select_hosts_by_IP_GRP
gen_HOST_MAC_TABLE() {
# host fc3-101 {
# hardware ethernet 00:02:e3:53:34:60;
# fixed-address 192.168.232.1;
# }
# clean the IP-MAC table
[ ! -f $HOST_MAC_TABLE ] && rm -f $HOST_MAC_TABLE
# To get the hostname - IP table.
parse_dhcpd_conf $HOST_MAC_TABLE
}
#
select_hosts_by_MAC() {
# Note!!! We must use common tmp file (ANS_TMP) to pass the result,
# we can NOT
# just echo result in the end of this function. Otherwise in newer
# dialog (FC3), the script will wait for the end of function, then it
# shows the result.
# There is nothing in the screen when function is called if we just
# use echo the result to stdout.
local TMP=`mktemp /tmp/drbl-mac-tmp.XXXXXX`
local MAC_list host_grp hostinfo igrp list_height
trap "[ -f "$TMP" ] && rm -f $TMP" HUP INT QUIT TERM EXIT
# list the nodes
awk -F" " '!/^#/ {print $3, $1"("$2")", "off"}' $HOST_MAC_TABLE >> $TMP
MAC_list="$(cat $TMP)"
numhosts=$(echo $MAC_list | grep -o off | wc -w)
if [ $numhosts -gt 0 ]; then
if [ $numhosts -lt $MAX_DIALOG_HEIGHT ]; then
list_height=$numhosts
else
list_height=$MAX_DIALOG_HEIGHT
fi
$DIA \
--separate-output \
--backtitle "$msg_nchc_free_software_labs" \
--title "$msg_drbl_title" \
--checklist "$msg_specify_hosts ($msg_press_space_to_mark_selection):" 20 70\
$list_height $MAC_list 2> $TMP
retval=$?
[ $retval -eq 1 ] && exit 1
# Convert the selected group to MAC lists
if grep -qE "(client-MAC-group|MAC-grp)" $TMP; then
selected_grp="$(grep -Eo "(client-MAC-group-.*|MAC-grp-.*)" $TMP)"
for i in $selected_grp; do
perl -p -i -e "s/$i//g" $TMP
cat $drbl_syscfg/$i >> $TMP
done
fi
target_hosts=$(cat $TMP)
else
echo "No MAC address in dhcpd.conf! Abort! "
exit 1
fi
echo $target_hosts > $ANS_TMP
[ -f "$TMP" ] && rm -f $TMP
return 0
} # end of select_hosts_by_MAC
#
select_hosts_by_MAC_GRP() {
# Note!!! We must use common tmp file (ANS_TMP) to pass the result,
# we can NOT
# just echo result in the end of this function. Otherwise in newer
# dialog (FC3), the script will wait for the end of function, then it
# shows the result.
# There is nothing in the screen when function is called if we just
# use echo the result to stdout.
local TMP=`mktemp /tmp/drbl-mac-tmp.XXXXXX`
local MAC_list host_grp hostinfo igrp list_height hostlist
trap "[ -f "$TMP" ] && rm -f $TMP" HUP INT QUIT TERM EXIT
hostlist=""
numlist=0
# list the IP group
for igrp in $drbl_syscfg/client-MAC-group-* $drbl_syscfg/MAC-grp-*; do
[ ! -e "$igrp" ] && continue
host_grp="$(basename $igrp)"
hostinfo="$(head -n 2 $igrp)"
hostinfo="$(echo $hostinfo | tr " " "_")..."
hostlist="$hostlist $host_grp $hostinfo off"
numlist="$((numlist+1))"
done
echo $hostlist > $TMP
MAC_list="$(cat $TMP)"
if [ $numlist -gt 0 ]; then
if [ $numlist -lt $MAX_DIALOG_HEIGHT ]; then
list_height=$numlist
else
list_height=$MAX_DIALOG_HEIGHT
fi
$DIA \
--separate-output \
--backtitle "$msg_nchc_free_software_labs" \
--title "$msg_drbl_title" \
--checklist "$msg_specify_hosts ($msg_press_space_to_mark_selection):" 20 70\
$list_height $MAC_list 2> $TMP
retval=$?
[ $retval -eq 1 ] && exit 1
# Convert the selected group to MAC lists
if grep -qE "(client-MAC-group|MAC-grp)" $TMP; then
selected_grp="$(grep -Eo "(client-MAC-group-.*|MAC-grp-.*)" $TMP)"
for i in $selected_grp; do
perl -p -i -e "s/$i//g" $TMP
cat $drbl_syscfg/$i >> $TMP
done
fi
target_hosts=$(cat $TMP)
else
echo "No MAC address in dhcpd.conf! Abort! "
exit 1
fi
echo $target_hosts > $ANS_TMP
[ -f "$TMP" ] && rm -f $TMP
return 0
} # end of select_hosts_by_MAC_GRP
usage() {
echo "Description:"
echo "Select DRBL clients by IP or MAC address"
echo "Usage: `basename $0` [Options] RESULT_FILE"
echo "Options:"
language_help_prompt_by_idx_no
language_help_prompt_by_idx_name
dialog_like_prog_help_prompt
echo "RESULT_FILE The file to store the selected hosts"
}
# option
while [ $# -gt 0 ]; do
case "$1" in
-l|--language)
shift;
if [ -z "$(echo $1 |grep ^-.)" ]; then
# skip the -xx option, in case
language_opt="$1"
fi
shift ;;
-d0|--dialog)
DIA="dialog"
shift;;
-d1|--Xdialog)
DIA="Xdialog"
shift;;
-d2|--whiptail)
DIA="whiptail"
shift;;
-d3|--gdialog)
DIA="gdialog"
shift;;
-d4|--kdialog)
DIA="kdialog"
shift;;
-*) echo "${0}: ${1}: invalid option" >&2
usage >& 2
exit 2 ;;
*) break ;;
esac
done
#
HOST_FILE=$1
[ -z "$HOST_FILE" ] && echo "You must specify the target file!!! Program terminated!!!" && exit 1
#
ask_and_load_lang_set $language_opt
# check DIA
check_DIA_set_ESC $DIA
# Note!!! We must use common tmp file (ANS_TMP) to pass the result,
# we can NOT
# just echo result in the end of this function. Otherwise in newer
# dialog (FC3), the script will wait for the end of function, then it
# shows the result.
# There is nothing in the screen when function is called if we just
# use echo the result to stdout.
ANS_TMP=`mktemp /tmp/mac_ip_ans.XXXXXX`
trap "[ -f "$ANS_TMP" ] && rm -f $ANS_TMP" HUP INT QUIT TERM EXIT
ask_by_MAC_or_IP $ANS_TMP
target_hosts="$(cat $ANS_TMP)"
echo $target_hosts > $HOST_FILE
[ -f "$ANS_TMP" ] && rm -f $ANS_TMP
exit 0
| true
|
daf69e3bdfcf43bdb63e63acf4ed72a89c05ae63
|
Shell
|
truth-zhenli/LinuxTools
|
/gpufans
|
UTF-8
| 2,160
| 3.90625
| 4
|
[] |
no_license
|
#!/bin/bash
# This script modified the one from https://gist.github.com/squadbox/e5b5f7bcd86259d627ed
# It will enable or disable fixed gpu fan speed.
# You may have to run this as root or with sudo if the current user is not authorized to start X sessions.
#
# Two steps to execute it:
# Step 1: chmod +x gpufans
# Step 2: gpufans [40-100, auto]
init 3
nvidia-xconfig -a --enable-all-gpus --cool-bits=28 --allow-empty-initial-configuration
# Paths to the utilities we will need
SMI='/usr/bin/nvidia-smi'
SET='/usr/bin/nvidia-settings'
# Determine major driver version
VER=`awk '/NVIDIA/ {print $8}' /proc/driver/nvidia/version | cut -d . -f 1`
# Drivers from 285.x.y on allow persistence mode setting
if [ ${VER} -lt 285 ]
then
echo "Error: Current driver version is ${VER}. Driver version must be greater than 285."; exit 1;
fi
# Read a numerical command line arg between 40 and 100
if [ "$1" -eq "$1" ] 2>/dev/null && [ "0$1" -ge "40" ] && [ "0$1" -le "100" ]
then
$SMI -pm 1 # enable persistance mode
speed=$1 # set speed
echo "Setting fan to $speed%."
# how many GPU's are in the system?
NUMGPU="$(nvidia-smi -L | wc -l)"
# loop through each GPU and individually set fan speed
n=0
while [ $n -lt $NUMGPU ];
do
# start an x session, and call nvidia-settings to enable fan control and set speed
xinit ${SET} -a [gpu:${n}]/GPUFanControlState=1 -a [fan:${n}]/GPUTargetFanSpeed=$speed -- :0 -once
let n=n+1
done
echo "Complete"
elif [ "x$1" = "xauto" ]
then
$SMI -pm 0 # disable persistance mode
echo "Enabling default auto fan control."
# how many GPU's are in the system?
NUMGPU="$(nvidia-smi -L | wc -l)"
# loop through each GPU and individually set fan speed
n=0
while [ $n -lt $NUMGPU ];
do
# start an x session, and call nvidia-settings to enable fan control and set speed
xinit ${SET} -a [gpu:${n}]/GPUFanControlState=0 -- :0 -once
let n=n+1
done
echo "Complete"
else
echo "Error: Please pick a fan speed between 40 and 100, or stop."; exit 1;
fi
rm /etc/X11/xorg.conf
init 5
exit 0;
| true
|
f9b1412facf4f8484f0f8b8f2f2925b02bd34ba0
|
Shell
|
xyc/salesforcedx-templates
|
/scripts/publish-workflow.sh
|
UTF-8
| 535
| 2.59375
| 3
|
[] |
permissive
|
#!/usr/bin/env bash
# For publishing a patch version, add the following parameter:
# "publish-type": "patch"
CircleCIToken=$1
PublishType=$2
curl -v -u ${CircleCIToken}: -X POST --header "Content-Type: application/json" -d '{
"branch": "main",
"parameters": {
"publish": true,
"publish-type": "'"${PublishType}"'"
}
}' https://circleci.com/api/v2/project/gh/forcedotcom/salesforcedx-templates/pipeline
# open the release pipe line url
open "https://app.circleci.com/pipelines/github/forcedotcom/salesforcedx-templates"
| true
|
1e82294cc0c602c13d9f7fb3021816ca18739910
|
Shell
|
Loosefish/dostuff
|
/tests/default.sh
|
UTF-8
| 238
| 3.390625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# call default function
ds=$(realpath "$1")
dir=$(mktemp -d)
cd "${dir}"
cat > Dofile << EOF
#!/bin/sh
do_ () {
echo -n "6446"
}
EOF
out=$(${ds})
ret=$?
if [ "$out" != "6446" ]; then
ret=1
fi
rm -rf "${dir}"
exit ${ret}
| true
|
c20108b5dd6a3255a49250e228fdce054de776c4
|
Shell
|
Jalez/WebStore0.1
|
/storeWebpage/vagrant/provision.sh
|
UTF-8
| 1,941
| 3.171875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Packages
NODE="nodejs"
BUILD_ESSENTIAL="build-essential"
MONGO="mongodb-org"
GIT="git"
# Prerequisites
GIT_INSTALLED=$(dpkg-query -W --showformat='${Status}\n' $GIT | grep "install ok installed")
echo "Checking for $GIT: $GIT_INSTALLED"
if [ "" == "$GIT_INSTALLED" ]; then
apt-get update
apt-get install -y $GIT
fi
# Node.js
NODE_INSTALLED=$(dpkg-query -W --showformat='${Status}\n' $NODE | grep "install ok installed")
echo "Checking for $NODE: $NODE_INSTALLED"
if [ "" == "$NODE_INSTALLED" ]; then
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -
apt-get install -y build-essential nodejs
fi
# MongoDB
MONGO_INSTALLED=$(dpkg-query -W --showformat='${Status}\n' $MONGO | grep "hold ok installed")
echo "Checking for $MONGO: $MONGO_INSTALLED"
if [ "" == "$MONGO_INSTALLED" ]; then
wget -qO - https://www.mongodb.org/static/pgp/server-4.4.asc | sudo apt-key add -
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.4.list
apt-get update
sudo apt-get install -y mongodb-org=4.4.0 mongodb-org-server=4.4.0 mongodb-org-shell=4.4.0 mongodb-org-mongos=4.4.0 mongodb-org-tools=4.4.0
echo "mongodb-org hold" | sudo dpkg --set-selections
echo "mongodb-org-server hold" | sudo dpkg --set-selections
echo "mongodb-org-shell hold" | sudo dpkg --set-selections
echo "mongodb-org-mongos hold" | sudo dpkg --set-selections
echo "mongodb-org-tools hold" | sudo dpkg --set-selections
sudo sed -i -e 's/bindIp: 127.0.0.1/bindIp: 0.0.0.0/' /etc/mongod.conf
sudo service mongod start
sudo systemctl enable mongod.service
fi
# Libraries required by Puppeteer
apt install -y libnss3 libatk1.0-0 libatk-bridge2.0-0 libx11-xcb1 libxcb-dri3-0 libxcomposite1 libxdamage1 libxfixes3 libcups2 libxrandr2 libgbm1 libasound2 libpangocairo-1.0-0 libgtk-3-0
| true
|
11979407b1f43db4e750374b6db1ea062780360d
|
Shell
|
renan91300/Teste
|
/criarServidor.sh
|
UTF-8
| 1,332
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
x='s'
menu(){
while true $x != "s"
do
clear
echo "//--- Gerenciador FTP ---//";
echo "1)Instalar servidor FTP"
echo "2)Habilitar acesso anônimo"
echo "3)Desabilitar acesso anônimo"
echo "4)Habilitar envio/escrita de arquivos"
echo "5)Desabilitar envio/escrita de arquivos"
echo "6)Desintalar o servidor FTP"
echo "7)Sair"
echo "//-----------------------//"
echo ""
echo "Digite a opção desejada: ";
read x
echo "Opção informada ($x)"
case "$x" in
1)
sudo apt-get install vsftpd;
sleep 2
clear;
;;
2)
sudo sed -i 's/anonymous_enable=NO/anonymous_enable=YES/g' /etc/vsftpd.conf
service vsftpd restart;
sleep 2
clear;
;;
3)
sudo sed -i 's/anonymous_enable=YES/anonymous_enable=NO/g' /etc/vsftpd.conf
service vsftpd restart;
sleep 2
clear;
;;
4)
sudo sed -i 's/write_enable=NO/write_enable=YES/g' /etc/vsftpd.conf
service vsftpd restart;
sleep 2
clear;
;;
5)
sudo sed -i 's/write_enable=YES/write_enable=NO/g' /etc/vsftpd.conf
service vsftpd restart;
sleep 2
clear;
;;
6)
sudo apt-get remove vsftpd
sleep 2
clear;
;;
7)echo "================================================"
echo "Saindo..."
sleep 2
clear;
exit;
echo "================================================"
;;
*)
echo "Opção inválida!"
;;
}
| true
|
f04c58f4091e80fb21cde67bb0c2770bf11dcc4e
|
Shell
|
rafael-larrazolo/dockerized-hadoop
|
/docker/hadoop-master/run.sh
|
UTF-8
| 569
| 3.1875
| 3
|
[] |
no_license
|
#!/bin/bash
namedir=/hadoop/dfs/name
if [ ! -d $namedir ]; then
echo "Namenode name directory not found: $namedir"
exit 2
fi
if [ -z "$CLUSTER_NAME" ]; then
echo "Cluster name not specified"
exit 2
fi
[ "$(ls -A $namedir)" ] && echo "Not Empty" || echo 'Y' | $HADOOP_PREFIX/bin/hdfs --config $HADOOP_CONF_DIR namenode -format $CLUSTER_NAME
#service sshd start
$HADOOP_PREFIX/bin/hdfs --config $HADOOP_CONF_DIR namenode &
$HADOOP_PREFIX/bin/yarn --config $HADOOP_CONF_DIR resourcemanager &
$HADOOP_PREFIX/bin/yarn --config $HADOOP_CONF_DIR historyserver
| true
|
a2bb7bfd150f3d3931b5819e54dff73bd699ef5c
|
Shell
|
amhiggin/LuceneInTheSkyWithDiamonds
|
/run_custom_search_engine.sh
|
UTF-8
| 1,101
| 3.1875
| 3
|
[] |
no_license
|
#/bin/bash
#######################################################
# When running this script, specify the RELATIVE path #
# of the qrels file. #
# For example, if the qrels file is located in #
# ./qrels/trec-qrels #
# then specify #
# qrels/trec-qrels #
# as the argument for this script. #
#######################################################
QRELS_FILE="$1"
SCORING_MODEL="$2"
ANALYZER="$3"
# This will run the search engine with a user-input specifying the qrels file, the scoring model and the analyzer
# It will use the MultiSimilarity that was defined in the code (ApplicationLibrary.java) and the CustomAnalyzer.java analyzer
java -jar -XX:-UseGCOverheadLimit "$(pwd)"/target/Lucene_in_The_Sky_With_Diamonds-0.0.1-SNAPSHOT.jar "$(pwd)"/"$QRELS_FILE" $SCORING_MODEL $ANALYZER && \
"$(pwd)"/trec_eval/trec_eval "$(pwd)"/"$QRELS_FILE" "$(pwd)"/output/"$SCORING_MODEL-$ANALYZER-results.txt" > "$(pwd)"/output/"trec-eval-$SCORING_MODEL-$ANALYZER-results.txt" && \
echo "Trec eval results available in the output directory"
| true
|
49b3ea1588fc2edf7bae92c54710b3784def03ec
|
Shell
|
leskovsky1/FRI
|
/Bc/5.semester/OS/copy of OSko_skuska/OS/cv03/s3
|
UTF-8
| 68
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/bash
for arg in $(echo $PATH | tr ':' ' ')
do echo $arg
done
| true
|
7529ac94a8717ffa9c211527ebc119fa7ee7d030
|
Shell
|
npr0n/bookmark-dl
|
/copyables/entrypoint.sh
|
UTF-8
| 2,355
| 3.4375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
PUID=${PUID:-911}
PGID=${PGID:-911}
# set uid and gid
groupmod -o -g "$PGID" chrome
usermod -o -u "$PUID" chrome
# redo permissions
chown -R chrome:chrome /output
chown -R chrome:chrome /home/chrome
chown chrome /var/spool/cron/crontabs/chrome
# start cron
service cron start
# if VNC is enabled, copy correct supervisord.conf
if [ "$ENABLE_VNC" = "true" ] ; then
echo 'VNC is enabled'
else
echo 'VNC is disabled'
cp /etc/supervisor/conf.d/supervisord-novnc.conf /etc/supervisor/conf.d/supervisord.conf
fi
# echo if JD, LOG, CACHE are enabled
if [ "$ENABLE_JD" = "true" ] ; then
echo 'MyJDownloader is enabled'
else
echo 'MyJDownloader is disabled'
fi
if [ "$ENABLE_LOG" = "true" ] ; then
echo 'Logging is enabled'
else
echo 'Logging is disabled'
fi
if [ "$ENABLE_CACHE" = "true" ] ; then
echo 'Caching is enabled'
else
echo 'Caching is disabled'
fi
# VNC default no password
export X11VNC_AUTH="-nopw"
#echo "X11VNC_AUTH set"
# look for VNC password file in order (first match is used)
passwd_files=(
/home/chrome/.vnc/passwd
/run/secrets/vncpasswd
)
#echo "passwd_files set"
for passwd_file in ${passwd_files[@]}; do
if [[ -f ${passwd_file} ]]; then
export X11VNC_AUTH="-rfbauth ${passwd_file}"
break
fi
done
#echo "passwd loop done"
# override above if VNC_PASSWORD env var is set (insecure!)
if [[ "$VNC_PASSWORD" != "" ]]; then
export X11VNC_AUTH="-passwd $VNC_PASSWORD"
fi
#echo "exported new password"
# set sizes for both VNC screen & Chrome window
: ${VNC_SCREEN_SIZE:='1024x768'}
IFS='x' read SCREEN_WIDTH SCREEN_HEIGHT <<< "${VNC_SCREEN_SIZE}"
export VNC_SCREEN="${SCREEN_WIDTH}x${SCREEN_HEIGHT}x24"
export CHROME_WINDOW_SIZE="${SCREEN_WIDTH},${SCREEN_HEIGHT}"
export CHROME_OPTS="${CHROME_OPTS_OVERRIDE:- --user-data-dir --no-sandbox --window-position=0,0 --force-device-scale-factor=1 --disable-dev-shm-usage}"
# export necessary env var for python script
echo "# ENV SOURCE FOR CRON" > /envsource.sh
printenv | sed 's/^\(.*\)$/export \1/g' | grep -E "^export ENABLE_" >> /envsource.sh
printenv | sed 's/^\(.*\)$/export \1/g' | grep -E "^export JD_" >> /envsource.sh
printenv | sed 's/^\(.*\)$/export \1/g' | grep -E "^export OUT" >> /envsource.sh
cp /envsource.sh /home/chrome
chown chrome:chrome /home/chrome/envsource.sh
chmod +x /home/chrome/envsource.sh
exec "$@"
| true
|
3be44a9ef048914ab2b46cac7cb9b72ce1046364
|
Shell
|
wmthu/Public_HBP
|
/Code/Analysis/Louisiana/Subsample/subsampling.sh
|
UTF-8
| 4,101
| 2.53125
| 3
|
[] |
no_license
|
#!/bin/sh
#-------------------------------------------------------------------------------
# Name: subsampling.sh
# Purpose: This file is a script for flagging sample restrictions and generating
# time series of well-level production data
#
# Author: Evan, Ryan, Eric
#
# ---------------------------------------------------------------------------
#
# This is the descriptive portion of each script run:
# ------------------------------------------------------
### do $CODEDIR/Subsample/define_sample_units.do
#
# INPUTS:
# HBP/IntermediateData/Louisiana/FinalUnits/unit_data_4_descript.dta
# OUTPUTS:
# HBP/IntermediateData/Louisiana/SubsampledUnits/unit_data_sample_flags.dta
# -------------------------------------------------------
### do $CODEDIR/Subsample/units_sample_defs_to_wells.do
#
# INPUTS:
# HBP/IntermediateData/Louisiana/SubsampledUnits/unit_data_sample_flags.dta
# HBP/IntermediateData/Louisiana/Serial_Unit_xwalk_Hay_HBP.dta
# HBP/IntermediateData/Louisiana/Wells/master_wells.dta
# OUTPUTS:
# HBP/IntermediateData/Louisiana/well_unit_xwalk_master.dta
# -------------------------------------------------------
### do $CODEDIR/Subsample/create_well_timeseries.do
#
# INPUTS:
# HBP/IntermediateData/Louisiana/DIProduction/HPDIProduction.dta
# HBP/IntermediateData/Louisiana/well_unit_xwalk_master.dta.dta
# HBP/IntermediateData/Louisiana/Wells/master_wells.dta
# OUTPUTS:
# HBP/IntermediateData/Louisiana/DIProduction/haynesville_well_time_series.dta
# HBP/IntermediateData/Louisiana/DIProduction/time_series_4_decline_estimation.csv
# -------------------------------------------------------
### RScript $CODEDIR/Subsample/identify_neighboring_operators.R
#
# INPUTS:
# HBP/IntermediateData/Louisiana/DescriptiveUnits/master_unit_shapefile_urbanity.shp
# OUTPUTS:
# HBP/IntermediateData/Louisiana/DescriptiveUnits/units_with_lp2_neighbors.dta
# HBP/IntermediateData/Louisiana/DescriptiveUnits/units_with_lp7_neighbors.dta
# -------------------------------------------------------
### do $CODEDIR/Subsample/prep_neighbors.do
#
# INPUTS:
# HBP/IntermediateData/Louisiana/DescriptiveUnits/units_with_lp2_neighbors.dta
# HBP/IntermediateData/Louisiana/DescriptiveUnits/units_with_lp7_neighbors.dta
# OUTPUTS:
# HBP/IntermediateData/Louisiana/SubsampledUnits/units_with_neighbor_stats.dta
# -------------------------------------------------------
# Variables CODEDIR, OS, and STATA exported from analysis_script.sh
if [ "$OS" = "Unix" ]; then
if [ "$STATA" = "SE" ]; then
stata-se do $CODEDIR/Subsample/define_sample_units.do &&
stata-se do $CODEDIR/Subsample/units_sample_defs_to_wells.do &&
stata-se do $CODEDIR/Subsample/create_well_timeseries.do
elif [ "$STATA" = "MP" ]; then
stata-mp do $CODEDIR/Subsample/define_sample_units.do &&
stata-mp do $CODEDIR/Subsample/units_sample_defs_to_wells.do &&
stata-mp do $CODEDIR/Subsample/create_well_timeseries.do
fi
elif [ "$OS" = "Windows" ]; then
if [ "$STATA" = "SE" ]; then
stataSE-64 -e do $CODEDIR/Subsample/define_sample_units.do &&
stataSE-64 -e do $CODEDIR/Subsample/units_sample_defs_to_wells.do &&
stataSE-64 -e do $CODEDIR/Subsample/create_well_timeseries.do
elif [ "$STATA" = "MP" ]; then
stataMP-64 -e do $CODEDIR/Subsample/define_sample_units.do &&
stataMP-64 -e do $CODEDIR/Subsample/units_sample_defs_to_wells.do &&
stataMP-64 -e do $CODEDIR/Subsample/create_well_timeseries.do
fi
fi
# neighbor analysis -- finds operators of nearby units
RScript $CODEDIR/Subsample/identify_neighboring_operators.R
# neighboring operator counts
if [ "$OS" = "Unix" ]; then
if [ "$STATA" = "SE" ]; then
stata-se do $CODEDIR/Subsample/prep_neighbors.do
elif [ "$STATA" = "MP" ]; then
stata-mp do $CODEDIR/Subsample/prep_neighbors.do
fi
elif [ "$OS" = "Windows" ]; then
if [ "$STATA" = "SE" ]; then
stataSE-64 -e do $CODEDIR/Subsample/prep_neighbors.do
elif [ "$STATA" = "MP" ]; then
stataMP-64 -e do $CODEDIR/Subsample/prep_neighbors.do
fi
fi
# Clean up log files
rm *.log
exit
| true
|
d0ca6cca40fddd5880793c014080a17f78bc9955
|
Shell
|
OpenVnmrJ/OpenVnmrJ
|
/scripts/loglib.sh
|
UTF-8
| 4,525
| 3.96875
| 4
|
[
"Apache-2.0",
"GPL-3.0-only"
] |
permissive
|
#
# Copyright (C) 2018 Michael Tesch
#
# This file is a part of the OpenVnmrJ project. You may distribute it
# under the terms of either the GNU General Public License or the
# Apache 2.0 License, as specified in the LICENSE file.
#
# For more information, see the OpenVnmrJ LICENSE file.
#
# this is a shell-function library. To use the functions
# in your shell script, first include this by "sourceing"
# this file:
# 'source loglib.sh' or '. loglib.sh'
# The latter syntax with '.' is POSIX.
#
# CMDLINE="$0 $*"
# SCRIPT=$(basename "$0")
# SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
#
: ${CMDLINE=loglib.sh}
: ${VERBOSE=3}
: ${ERRCOUNT=0}
LEVELNAMES=( ERROR MSG WARN INFO DEBUG )
# call this before calling any log commands. can be called repeatedly
# from one script to change the log file.
log_setup () {
# directory for log files, will try
# to mkdir -p if non-existant
local logdir="$2"
[ -z "$logdir" ] && logdir=.
LOGFILE="${logdir}/$1" # typically $(basename "$0" .sh).txt
# colors & names of the log levels
# check if stdout is a terminal...
if test -t 1; then
# see if it supports colors...
ncolors=$(tput colors)
if test -n "$ncolors" && test "$ncolors" -ge 8; then
normal="$(tput sgr0)"
bold="$(tput bold)"
red="$(tput setaf 1)"
green="$(tput setaf 2)"
yellow="$(tput setaf 3)"
cyan="$(tput setaf 6)"
white="$(tput setaf 7)"
magenta="$(tput setaf 5)"
fi
fi
#set -x
LEVELCOLOR=( "$red" "$white$bold" "$yellow" "$green" "$cyan" )
if [ ! -d "$logdir" ]; then
echo "creating log directory '$logdir'"
mkdir -p "$logdir" || return $?
fi
if [ ! -d "$logdir" ]; then
echo "${red}Unable to create log directory '$logdir':${normal}"
echo " ${red}log messages will be printed to the terminal.${normal}"
return
fi
if [ -t 3 ] && [ -t 4 ]; then
# restore stdin and stdout before opening new logfile
exec 1>&3 2>&4
fi
exec 3>&1 4>&2
trap 'exec 1>&3 2>&4' 0
trap 'exec 1>&3 2>&4; exit 1' 1 2 3
#trap 'onerror' 0 1 2 3
# move old LOGFILE
if [ -f "${LOGFILE}" ]; then
for SEQ in $(seq -w 1 10); do
if [ ! -f "${LOGFILE}.${SEQ}" ] || [ $SEQ -eq 10 ]; then
echo "Moving old logfile ${LOGFILE} to ${LOGFILE}.${SEQ}"
mv "${LOGFILE}" "${LOGFILE}.${SEQ}"
break
fi
done
fi
# redirect output to LOGFILE
if [ ${VERBOSE} -gt 3 ]; then
# at VERBOSE >= DEBUG level, also send cmd output to terminal
exec 1> >(tee -a "${LOGFILE}") 2>&1
else
exec 1> "$LOGFILE" 2>&1
fi
# how & when this script was called
log_debug "$CMDLINE"
log_info "Logfile: $LOGFILE"
}
log_msg_ () {
local level=$1
shift
#local datestring=$(date +"%Y-%m-%d %H:%M:%S")
local message="$*"
echo "${LEVELCOLOR[level]}${LEVELNAMES[level]}:${message}${normal}"
if [ -t 3 ] && [ ${VERBOSE} -le 3 ] && [ "$level" -le ${VERBOSE} ]; then
echo "${LEVELCOLOR[level]}${LEVELNAMES[level]}:${message}${normal}" >&3
fi
}
log_error () { log_msg_ 0 "$*" ; ERRCOUNT=$(( ERRCOUNT + 1 )) ; }
log_msg () { log_msg_ 1 "$*" ; }
log_warn () { log_msg_ 2 "$*" ; }
log_info () { log_msg_ 3 "$*" ; }
log_debug () { log_msg_ 4 "$*" ; }
log_cmd () { log_info "\$ $*" ; "$@" ; }
cmdspin () {
#
# Run a command, spin a wheelie while it's running
#
log_info "Cmd started $(date)"
log_info "\$ $*"
# spinner
local sp='/-\|'
if [ -t 3 ]; then printf ' ' >&3 ; fi
while : ; do
sleep 1;
sp=${sp#?}${sp%???}
if [ -t 3 ]; then printf '\b%.1s' "$sp" >&3 ; fi
done &
SPINNER_PID=$!
# Kill the spinner if we die prematurely
trap "kill $SPINNER_PID" EXIT
# command runs here
"$@"
retval=$?
# Kill the loop and unset the EXIT trap
kill -PIPE $SPINNER_PID
trap " " EXIT
if [ -t 3 ]; then printf '\b.\n' >&3 ; fi
log_info "Cmd finished $(date), returned: $retval"
return $retval
}
if [ x$LOGLIBTEST = y ]; then
log_warn "bad"
log_msg "bold"
log_info "info"
log_error "error"
log_debug "debug"
log_setup "lib.log1"
log_warn "hello1"
log_setup "lib.log2"
log_warn "hello2"
log_warn "bad"
log_msg "bold"
log_info "info"
log_error "error"
log_debug "debug"
fi
| true
|
1306a593eebcfe5a1d609748aaec1bd7ea18102c
|
Shell
|
usmanghani/dotfiles
|
/.bash_profile
|
UTF-8
| 2,867
| 2.6875
| 3
|
[] |
no_license
|
export PATH=/usr/local/bin:/Applications/Cloud9.app/Contents/installs/c9local/bin:$PATH
alias proj='cd /Volumes/Data/Projects'
eval "$(hub alias -s)"
# Set git autocompletion and PS1 integration
if [ -f /usr/local/git/contrib/completion/git-completion.bash ]; then
. /usr/local/git/contrib/completion/git-completion.bash
fi
GIT_PS1_SHOWDIRTYSTATE=true
if [ -f /opt/local/etc/bash_completion ]; then
. /opt/local/etc/bash_completion
fi
PS1='\w\[\033[31m\]$(__git_ps1)\[\033[00m\]> '
# MacPorts Installer addition on 2010-12-26_at_22:02:46: adding an appropriate PATH variable for use with MacPorts.
export PATH=/Library/Frameworks/Python.Framework/Versions/Current/bin:/System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home/bin:$PATH
#export PATH=/opt/local/bin:/opt/local/sbin:/opt/local/Library/Frameworks/Python.Framework/Versions/Current/bin:$PATH
# Finished adapting your PATH environment variable for use with MacPorts.
export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home/
# MacPorts Installer addition on 2011-04-28_at_02:22:41: adding an appropriate PATH variable for use with MacPorts.
export PATH=/Library/Frameworks/Python.Framework/Versions/Current/bin:/Volumes/Data/jython/bin:$PATH
# export PATH=/Volumes/Data/opt/local/bin:/Volumes/Data/opt/local/sbin:$PATH
# Finished adapting your PATH environment variable for use with MacPorts.
alias lc="ls -C"
alias lm="ls -al | more"
alias dirs="ls -al | grep '^d'" # show the dir's in the current dir
alias h=history # show the history of commands issued
alias nu="who|wc -l" # nu - number of users
alias np="ps -ef|wc -l" # np - number of processes running
alias p="ps -ef"
# mimick a few DOS commands with these aliases:
alias cd.="cd ."
alias cd..="cd .."
alias cd...="cd ../.."
alias dir="ls -al"
alias edit=vi
alias help=man
alias path="echo $PATH"
alias e="mate"
#PS1='$PWD> '
##
# Your previous /Volumes/Mac/Users/usmanghani/.profile file was backed up as /Volumes/Mac/Users/usmanghani/.profile.macports-saved_2011-04-28_at_01:42:29
##
# MacPorts Installer addition on 2011-04-28_at_01:42:29: adding an appropriate PATH variable for use with MacPorts.
#export PATH=/opt/local/bin:/opt/local/sbin:$PATH
# Finished adapting your PATH environment variable for use with MacPorts.
##
# Your previous /Volumes/Mac/Users/usmanghani/.profile file was backed up as /Volumes/Mac/Users/usmanghani/.profile.macports-saved_2011-04-28_at_02:22:41
##
# {{{
# Node Completion - Auto-generated, do not touch.
shopt -s progcomp
for f in $(command ls ~/.node-completion); do
f="$HOME/.node-completion/$f"
test -f "$f" && . "$f"
done
# }}}
# Setting PATH for Python 2.7
# The orginal version is saved in .bash_profile.pysave
PATH="/Library/Frameworks/Python.framework/Versions/2.7/bin:${PATH}"
export PATH
| true
|
7ec16eab081646ca4998920122d09535c2547449
|
Shell
|
shells-sh/TeaScript.sh
|
/lib/keywords/types/def.sh
|
UTF-8
| 614
| 3.3125
| 3
|
[
"MIT"
] |
permissive
|
def() {
local __T_typeName
local __T_methodName
# local __T_comment
if [[ "$1" = *"."* ]]
then
__T_typeName="${1%%.*}"
__T_methodName="${1#*.}"
shift
elif [ -n "$T_DO" ]
then
__T_typeName="$T_DO"
__T_methodName="$1"
shift
fi
local __T_safeTypeName
reflection safeName "$__T_typeName" __T_safeTypeName
reflection types methods define "$__T_safeTypeName" "$__T_methodName"
# Clean up and rethink, but make it work for now:
local __T_safeMethodName
reflection safeName "$__T_methodName" __T_safeMethodName
T_METHOD_DEF="$__T_safeTypeName $__T_safeMethodName"
}
| true
|
4cda249c855b992f79a4df99e64452d9768d9d65
|
Shell
|
fivepiece/43_coinswap
|
/alice/tx4/tx4.sh
|
UTF-8
| 987
| 2.859375
| 3
|
[] |
no_license
|
#!/bin/bash
# TX4 :
# [ A1 && C0 ] -> [ C1 ]
TX4_inpoint=( "${TX0_txid}" "0" )
# tx_mkin_serialize ${TX4_inpoint[@]} "$((2**32-1))" "0x${spk_2_A1C0_2_hex}"
TX4_in="${TX2_in}"
TX4_out="$( tx_mkout_serialize "10.007" "$(spk_pay2pkhash $(key_pub2addr ${C1_pub}))" )"
TX4_uns="$( tx_build 1 "" "${TX4_in}" "${TX4_out}" 0 | cleanhex )"
TX4_mid="${TX4_uns}01000000"
TX4_sha256="$( sha256 ${TX4_mid} )"
TX4_sig_A1="$( signder "${A1_hex}" "${TX4_sha256}" | cleanhex )"
TX4_sig_C0="$( signder "${C0_hex}" "${TX4_sha256}" | cleanhex )"
TX4_in_sig="$( tx_mkin_serialize ${TX4_inpoint[@]} "$((2**32-1))" "0 @${TX4_sig_A1} @${TX4_sig_C0} @${spk_2_A1C0_2_hex}" )"
TX4_fin="$( tx_build 1 "" "${TX4_in_sig}" "${TX4_out}" 0 | cleanhex )"
TX4_txid="$( hash256 ${TX4_fin} | revbytes )"
echo "[" > ${Alice_home}/tx4/TX4_credit.json
mkjson_credit ${TX4_txid} 0 "$(spk_pay2pkhash $(key_pub2addr ${C1_pub}))" '10.007' >> ${Alice_home}/tx4/TX4_credit.json
echo "]" >> ${Alice_home}/tx4/TX4_credit.json
| true
|
4c9f53e0dd116228e78132f74f554c68b75aec88
|
Shell
|
Mikle-Bond/std.sh
|
/lib/macro.sh
|
UTF-8
| 1,485
| 3.671875
| 4
|
[] |
no_license
|
# @macro [ expr ... ] == eval "$(expr)"
use args
# The var-substitute solution (without additional variable).
# This approach is fail-prone if macro has pattern ' \#' in it.
# Pro: simple; Con: macro-command doesn't work
#alias @macro=$'eval "${BASH_COMMAND/*\\\#/eval \\\"\$( } )\\\" " \#'
#alias @macro-print=$'eval "${BASH_COMMAND/*\\\#/echo \\\"\$( } )\\\" " \#'
# Copy-paste solution
# It stores full command in variable
# Pro: macro-command works again; Con: DRY voliation
#alias @macro=$'_stdsh_MACRO_COMMAND="\'${BASH_COMMAND#* \\\# }\'" eval "eval \\"\$(${BASH_COMMAND#* \\\# })\\"" \#'
#alias @macro-print=$'_stdsh_MACRO_COMMAND="\'${BASH_COMMAND#* \\\# }\'" eval "echo \\"\$(${BASH_COMMAND#* \\\# })\\"" \#'
# Dummy-variable solution
# Variable exists, but doesn't contain command
# Pros: macro-command works, still simple enough
# Con: none known yet.
alias @macro=$'_stdsh_MACRO_COMMAND="yes" eval "eval \\"\$(${BASH_COMMAND#* \\\# })\\"" \#'
alias @macro-print=$'_stdsh_MACRO_COMMAND="yes" eval "echo \\"\$(${BASH_COMMAND#* \\\# })\\"" \#'
macro-command ()
{
@args
if [ -n "$_stdsh_MACRO_FUNCTION" ]
then
echo "${_stdsh_MACRO_COMMAND:$[ ${#_stdsh_MACRO_FUNCTION} +1 ]}"
else
echo "$_stdsh_MACRO_COMMAND"
fi
}
macroify()
{
@args funcname
funcname="$(printf '%q' "$funcname")"
alias "@$funcname=_stdsh_MACRO_FUNCTION=$funcname @macro $funcname"
}
macro? ()
{
@args
[[ -n "$_stdsh_MACRO_COMMAND" ]]
}
| true
|
0bd3026a64357306552f4f047f6525426ffd78ad
|
Shell
|
susy-contracts/susyufo-test-helpers
|
/test-integration/susybraid-core-2.1.x/susybraid-core-2.1.x/test.sh
|
UTF-8
| 1,184
| 3.609375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Delete the installed dependency
rm -rf node_modules/susyufo-test-helpers
# Replace it with the local package
mkdir -p node_modules/susyufo-test-helpers/src
cp -r ../../susyufo-test-helpers.js node_modules/susyufo-test-helpers/
cp -r ../../package.json node_modules/susyufo-test-helpers/
cp -r ../../src/* node_modules/susyufo-test-helpers/src/
# Exit script as soon as a command fails.
set -o errexit
# Executes cleanup function at script exit.
trap cleanup EXIT
cleanup() {
# Kill the susybraid instance that we started (if we started one and if it's still running).
if [ -n "$susybraid_pid" ] && ps -p $susybraid_pid > /dev/null; then
kill -9 $susybraid_pid
fi
}
susybraid_port=8545
susybraid_running() {
nc -z localhost "$susybraid_port"
}
start_susybraid() {
node_modules/.bin/susybraid-cli --version
node_modules/.bin/susybraid-cli --gasLimit 0xfffffffffff --port "$susybraid_port" "${accounts[@]}" > /dev/null &
susybraid_pid=$!
sleep 1
}
if susybraid_running; then
echo "Using existing susybraid instance"
else
echo "Starting our own susybraid instance"
start_susybraid
fi
./node_modules/.bin/susyknot test --network susybraid
| true
|
fba571e73372dc65efcba2b30bfb8f785fb9b3aa
|
Shell
|
gergme/cloudtunes
|
/docker-ices/etc/ices/update_songs.sh
|
UTF-8
| 267
| 3.078125
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
set -o pipefail
SOURCE="/data"
# For the default station, you must have AT LEAST ONE to start the containers
printf "Generating STATION_0 playlist...\n"
find ${SOURCE}/station_0/ -type f -name '*.ogg' > /etc/ices2/playlist/default0.txt
| true
|
775818994c1c77a2e7084fcf09cc3aaaae9a40c1
|
Shell
|
kaihwang/TTD
|
/Preprocessing/run_preproc_and_localizer_analysis.sh
|
UTF-8
| 1,835
| 3.5
| 4
|
[] |
no_license
|
#!/bin/bash
# script to run preprocessing (fmriprep) and localizer modle (FIR) amd MTD regression
export DISPLAY=""
WD='/home/despoB/TRSEPPI/TTD'
SCRIPTS='/home/despoB/kaihwang/bin/TTD/Preprocessing'
Model='/home/despoB/kaihwang/bin/TTD/Models'
SUB_ID=$(echo ${SGE_TASK} | grep -Eo "^[[:digit:]]{1,}")
session=$(echo ${SGE_TASK} | grep -Eo "[A-Z][a-zA-Z0-9]{1,}")
#SUB_ID=7014
#session=Loc
echo "running subject $SUB_ID, session $session"
##fmriprep for prerpocessing
#determine number of preproc runs, if preproc finished then will not run fmriprep
nruns=$(/bin/ls ${WD}/fmriprep/fmriprep/sub-${SUB_ID}/ses-${session}/func/*task-TDD*T1w_preproc.nii.gz | wc -l)
n_raw=$(/bin/ls ${WD}/BIDS/sub-${SUB_ID}/ses-${session}/func/*task-TDD*bold.nii.gz | wc -l)
if [ "${nruns}" != "${n_raw}" ]; then
source activate fmriprep1.0;
cd ${WD}/fmriprep;
fmriprep \
--participant_label $SUB_ID \
--nthreads 4 \
--output-space T1w template \
--template MNI152NLin2009cAsym \
${WD}/BIDS/ \
${WD}/fmriprep/ \
participant
END_TIME=$(date);
echo "fmriprep for subject $SUB_ID completed at $END_TIME"
fi
##parse stimulus timing
#change back to default env
source activate root
nruns=$(/bin/ls ${WD}/fmriprep/fmriprep/sub-${SUB_ID}/ses-${session}/func/*task-TDD*T1w_preproc.nii.gz | wc -l)
echo "${SUB_ID} ${session} ${nruns}" | python ${SCRIPTS}/parse_stim.py
##FIR model for localizing
. ${Model}/run_FIR_model.sh
##MTD model
#. ${Model}/run_MTD_reg_model.sh
##Retinotopy
# if [ ${session} = Loc ]; then
# #create SUMA surfaces
# if [ ! -d ${WD}/fmriprep/freesurfer/sub-${SUB_ID}/SUMA ]; then
# cd ${WD}/fmriprep/freesurfer/sub-${SUB_ID}/
# @SUMA_Make_Spec_FS -sid sub-${SUB_ID}
# fi
# if [ ! -d ${WD}/Results/sub-${SUB_ID}/ses-Loc/${SUB_ID}_meridian.results ]; then
# . ${Model}/run_meridian_mapping.sh
# fi
# fi
| true
|
15784e98bc44111cca60a1eb6dd0463f80257f5b
|
Shell
|
jsegeren/ParallelMandelbox
|
/timer.sh
|
UTF-8
| 176
| 2.84375
| 3
|
[] |
no_license
|
#! /bin/bash
make all-parallel
list=( 1 2 4 8 12 16 20 24 28 32 )
for i in "${list[@]}"
do
echo "running on $i processes"
time make run-all-parallel NP=$i > /dev/null
done
| true
|
635c888e5fba6b0c5801f8c4567de1d447077683
|
Shell
|
permamodel/ILAMB
|
/CODES/ILAMB_Main.sh
|
UTF-8
| 1,440
| 3.65625
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
#
# An ILAMB execution script, useful for debugging.
#
# From this directory, run with:
# $ bash ILAMB_Main.sh /path/to/ILAMB_PARA_SETUP 1>stdout 2>stderr &
if [ -z "$1" ]; then
echo "Error: Must supply path to ILAMB parameter file"
exit 1
fi
export ILAMB_CODESDIR=`pwd`
cd ..
export ILAMB_ROOT=`pwd`
cd $ILAMB_CODESDIR
# Allow a user to configure these directories.
export ILAMB_DATADIR=/nas/data/ILAMB/DATA
export ILAMB_MODELSDIR=/nas/data/ILAMB/MODELS
export ILAMB_OUTPUTDIR=/nas/data/ILAMB/tmp/OUTPUT
export ILAMB_TMPDIR=/nas/data/ILAMB/tmp
echo "ILAMB files and directories:"
echo "ILAMB_ROOT $ILAMB_ROOT"
echo "ILAMB_CODESDIR $ILAMB_CODESDIR"
echo "ILAMB_DATADIR $ILAMB_DATADIR"
echo "ILAMB_MODELSDIR $ILAMB_MODELSDIR"
echo "ILAMB_OUTPUTDIR $ILAMB_OUTPUTDIR"
echo "ILAMB_TMPDIR $ILAMB_TMPDIR"
echo "ILAMB parameter file $1"
# Configure NCL and ImageMagick. May need help from user.
tools_dir=/home/csdms/tools
export NCARG_ROOT=$tools_dir/ncl
PATH=$NCARG_ROOT/bin:$tools_dir/ImageMagick/bin:$PATH
## Define model simulation type, CLM, CMIP5, or MsTMIP.
export MODELTYPE=CMIP5
## Define spatial resolution for diagnostics, 0.5x0.5, 1x1 or 2.5x2.5.
export SPATRES=0.5x0.5
## Define plot file type, i.e., eps, gif, pdf, png, ps.
export PLOTTYPE=png
date
ncl -n main_ncl_code.ncl ParameterFile=\"$1\" # http://www.ncl.ucar.edu/Applications/system.shtml
date
| true
|
0099489af95e3a00f30dc0acc0b9334663e5ddd3
|
Shell
|
dondropo/monty
|
/monty_bytecodes/color
|
UTF-8
| 565
| 4.15625
| 4
|
[] |
no_license
|
#!/bin/bash
if [ $1 == "--help" ] ; then
echo "Executes a command and colorizes all errors occured"
echo "Example: `basename ${0}` wget ..."
echo "(c) o_O Tync, ICQ# 1227-700, Enjoy!"
exit 0
fi
# Temp file to catch all errors
TMP_ERRS=$(mktemp)
# Execute command
"$@" 2> >(while read line; do echo -e "\e[01;31m$line\e[0m" | tee --append $TMP_ERRS; done)
EXIT_CODE=$?
# Display all errors again
if [ -s "$TMP_ERRS" ] ; then
echo -e "\n\n\n\e[01;31m === ERRORS === \e[0m"
cat $TMP_ERRS
fi
rm -f $TMP_ERRS
# Finish
exit $EXIT_CODE
| true
|
3c0bc27aa04cbff4f995d9f3e600d5b526e346c2
|
Shell
|
unixfox/piwall-school
|
/piwall
|
UTF-8
| 447
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/bash
# Variables
NOMFICHIER="video.avi"
MASTER_COMMANDE="avconv -re -i /home/pi/Videos/$NOMFICHIER -vcodec copy -f avi -an udp://239.0.1.23:1234"
CLIENT_COMMANDE='sudo systemctl start piwallclient'
# Corps du script
for n in 1 2 3 4 5 6
do
while ! nc -w 5 -z 192.168.0."$n" 22 &> /dev/null; do echo; done
ssh -i /home/pi/.ssh/id_rsa pi@192.168.0."$n" $CLIENT_COMMANDE
done
sleep 5s
while true; do $MASTER_COMMANDE; sleep 1; done
| true
|
92ae62b852961b8cace6eeea079ba16358f78923
|
Shell
|
brentwpeterson/lighthouse
|
/lighthouse-base.sh
|
UTF-8
| 294
| 3.140625
| 3
|
[] |
no_license
|
INPUT=test.csv
OLDIFS=$IFS
MYPATH=/tmp/lighthouse/
IFS=','
[ ! -f $INPUT ] && { echo "$INPUT file not found"; exit 99; }
while read filename url
do
#echo $MYPATH$filename
lighthouse $url --output-path=$MYPATH$filename.html --output html --chrome-flags='--headless'
done < $INPUT
IFS=$OLDIFS
| true
|
5eeb99a3a3cdb8bdc48c0ece4c2e9f71ca298a87
|
Shell
|
kpdyer/dpi-test-suite
|
/classifiers/bro-2.1/aux/broctl/bin/helpers/cat-file
|
UTF-8
| 112
| 2.765625
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#! /usr/bin/env bash
#
# cat-file <file>
if [ -f $1 ]; then
echo 0
cat $1
else
echo 1
fi
echo ~~~
| true
|
9e3a8f683b097aa460c908bef14c43bfacbe59be
|
Shell
|
mihneadb/dfs-bench
|
/reserve.sh
|
UTF-8
| 636
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash
# Reserve script for DAS-4
# $1 - nr of nodes
user=`whoami`
# TODO: check that reusing is proper - user might have a smaller number of
# nodes reserved already
out=`preserve -llist | grep $user | grep node`
if [ $? -ne 0 ]; then
preserve -1 -# $1 -t 0:15:00
fi
out=`preserve -llist | grep $user | grep node`
while [ $? -ne 0 ]; do
sleep 1
out=`preserve -llist | grep $user | grep node`
done
nodes=`echo $out | tr -s [:blank:] ' ' | tr -s ' ' | cut -d ' ' -f 9-1000`
echo "Have nodes: $nodes"
echo "Saving them to './machinefile'."
> machinefile
for node in $nodes; do
echo $node >> machinefile
done
| true
|
64eb0cde47ca3817fb350e51a71ce7bab8dacda2
|
Shell
|
jstarcher/jstarcher-public
|
/goodies/rename.sh
|
UTF-8
| 141
| 2.875
| 3
|
[] |
no_license
|
for i in `find . -name '*.*' -type f \( -iname "*.class" -or -iname "*.php" \)`
do
j=`echo $i | sed 's/old/new/g'`
mv "$i" "$j"
done
| true
|
ae3b3ed3e5f43a75b986ae64023e1a422fa6bb48
|
Shell
|
eBay/NuRaft
|
/prepare.sh
|
UTF-8
| 223
| 3.03125
| 3
|
[
"Apache-2.0",
"Zlib",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/bash
set -ex
. manifest.sh
PROJECT_DIR=`pwd`
cd ${PROJECT_DIR}
if [ ! -d asio/asio ]; then
git clone https://github.com/chriskohlhoff/asio.git ./asio
cd asio
git checkout ${ASIO_RELEASE}
cd ..
fi
| true
|
a46935a9b1c4d9c995049d5da810aabc500da307
|
Shell
|
SliTaz-official/wok-next
|
/tramys-client/stuff/tramys2
|
UTF-8
| 4,327
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/sh
# tramys - TRAnslate MY Slitaz. Client solution
# Tool for managing translation files for SliTaz GNU/Linux
# Aleksej Bobylev <al.bobylev@gmail.com>, 2014
. /etc/slitaz/slitaz.conf
. /lib/libtaz.sh
# Ask for root access in order to install the files in the system.
if [ $(id -u) != 0 ]; then
exec tazbox su $0 $@; exit 0
fi
export TEXTDOMAIN='tramys' # i18n
WORKING=$(mktemp -d)
LOG="/tmp/tramys.log"
TGZ="/tmp/tramys.tgz"
URL="http://cook.slitaz.org/tramys2.cgi"
# Common Yad options.
YADCONF="--center --window-icon=config-language --image=config-language --image-on-top"
# First step. Describes the functions of the program.
# It is possible to set the language.
# Also here the user has the ability to stop the program.
yad $YADCONF --title="tramys (1/3)" --text="$(_ \
'Now translations for all installed programs will be found and downloaded.
You can change locale if you want, or proceed.
Your current locale: <b>$LANG</b>')" \
--button "gtk-edit:2" --button "gtk-cancel:1" --button "gtk-go-forward:0"
case $? in
2) tazbox locale; . /etc/locale.conf; tramys2; exit 0 ;;
1) exit 0 ;;
esac
# your locale -> HTTP_ACCEPT_LANGUAGE
# your SliTaz release -> HTTP_ACCEPT (different releases have different translations)
# your installed packages list -> HTTP_COOKIE (list=...)
# Note clean address "tramys2.cgi" in the server access logs.
#
# Server sending and Yad shows user useful info using log widget.
# We are temporarily stored this log in the $LOG file in order to get
# a download token (see below).
# Here the user can refuse to download the file.
busybox wget --header "Accept-Language: $LANG" \
--header "Accept: $(cat /etc/slitaz-release)" \
--header "Cookie: list=$(cd $INSTALLED; ls -1 | tr '\n' ' ')" \
$URL -O - | tee $LOG | \
yad $YADCONF --title="tramys (2/3)" --progress --width=320 --text="$(_ \
'The server processes the request.
Please wait.')" \
--enable-log --log-expanded \
--button "gtk-cancel:1" --button "gtk-go-forward:0"
ANSWER=$?
# In the last line of log server gives us a download token.
# We can download archive which the server has prepared for us.
# Get download token and remove log.
DLKEY=$(tail -n1 $LOG); rm -f $LOG
case $ANSWER in
1)
# We need to remove archive that the user has refused to download.
# This command passed in HTTP_COOKIE (rm=...)
busybox wget --header "Cookie: rm=$DLKEY" $URL -O /dev/null; exit 0 ;;
esac
# We want to download the file. Show pulsate progress bar.
# This command passed in HTTP_COOKIE (dl=...)
# Also here the user can terminate file downloading.
busybox wget --header "Cookie: dl=$DLKEY" $URL -O $TGZ 2>&1 | \
yad $YADCONF --title="tramys (3/3)" --progress --pulsate --width=320 \
--text="$(_ \
'Downloading in progress.
Please wait.')" \
--button "gtk-cancel:1" --button "gtk-ok:0"
case $? in
1) exit 0 ;;
esac | \
# Unpack archive content to a temporary folder.
busybox tar -xz -C $WORKING -f $TGZ
# All folders and files in the archive are owned by user www and group www.
# This is because the CGI script on the server is executed by the user www.
# If we had just unpacked the archive content into our file system, then there
# would be a big trouble. For example, all folders: /, /usr, /usr/share,
# /usr/share/locale, etc. would be owned by user www and become unavailable
# for a regular user. So force all folders and files to root own.
chown -R root:root $WORKING
# Create or recreate virtual package "tramys-data".
# It contains all translations.
# And you can remove it if you no longer need translations.
TD=$INSTALLED/tramys-data
mkdir -p $TD
cat <<EOT > $TD/receipt
# SliTaz package receipt.
PACKAGE="tramys-data"
VERSION="$(date +%y%m%d)"
CATEGORY="system-tools"
SHORT_DESC="This package contains translation files installed by tramys-client"
MAINTAINER="you@slitaz.org"
LICENSE="GPL"
WEB_SITE="http://www.slitaz.org/"
DEPENDS="tramys-client"
EOT
# Update files list.
cd $WORKING; find . -type f | sed 's|^./|/|g' >> $TD/files.list
sort -u $TD/files.list -o $TD/files.list
# copy all translation files to root file system.
cp -fpr $WORKING/* /
# Recreate md5sums.
md5sum $(cat $TD/files.list) > $TD/md5sum
# remove temporary folder and file, they are no longer needed.
rm -f $TGZ
rm -rf $WORKING
# Final message.
yad $YADCONF --title="tramys" --text="$(_ \
'Translation files have been installed in your system.')"
| true
|
058d9373e3c8df0e03131ad88c40ff16becf5e8c
|
Shell
|
eldog/man-up
|
/notld/setup.sh
|
UTF-8
| 2,469
| 3.359375
| 3
|
[] |
no_license
|
#!/bin/bash
set -o errexit
set -o nounset
readonly REPO="$(readlink -f -- "$(dirname -- "${0}")/..")"
readonly LIB="${REPO}/rock-bot/lib"
readonly USERNAME="`id -un`"
MIRROR='switch'
while (( "$#" )); do
shift
case "${1}" in
-m) shift; MIRROR="${1}" ;;
esac
done
readonly MIRROR
readonly ARTOOLKIT='http://javacv.googlecode.com/files/ARToolKitPlus_2.1.1t.zip'
readonly FREENECT='https://github.com/OpenKinect/libfreenect/zipball/v0.1.1'
readonly JAVACV='http://javacv.googlecode.com/files/javacv-src-20111001.zip'
readonly JAVACPP='http://javacpp.googlecode.com/files/javacpp-src-20111001.zip'
readonly OPENCV="http://${MIRROR}.dl.sourceforge.net/project/opencvlibrary/opencv-unix/2.3.1/OpenCV-2.3.1a.tar.bz2"
sudo apt-get --assume-yes install \
ant \
cmake \
default-jdk \
freeglut3-dev \
libavcodec-dev \
libavdevice-dev \
libavfilter-dev \
libavformat-dev \
libavutil-dev \
libcommons-cli-java \
libdc1394-22-dev \
libgstreamermm-0.10-dev \
libgtk2.0-dev \
libopenexr-dev \
libpostproc-dev \
libqt4-dev \
libswscale-dev \
libusb-1.0.0-dev \
libv4l-dev \
libvdpau-dev \
libxi-dev \
libxmu-dev \
python-numpy \
python-sphinx \
qt4-qmake \
texlive \
x11proto-video-dev
# ARTToolKit
cd /tmp
wget "${ARTOOLKIT}"
unzip ARToolKitPlus_*
cd ARToolKitPlus_*
export ARTKP="$(readlink -f .)"
qmake
make
sudo make install
cd ..
# libfreenect
cd /tmp
wget -O libfreenect.zip "${FREENECT}"
unzip libfreenect.zip
cd OpenKinect-libfreenect-*
cmake .
make
sudo make install
sudo ldconfig /usr/local/lib64/
sudo adduser "${USERNAME}" video
sudo cat << EOF >> /etc/udev/rules.d/51-kinect.rules
# ATTR{product}=="Xbox NUI Motor"
SUBSYSTEM=="usb", ATTR{idVendor}=="045e", ATTR{idProduct}=="02b0", MODE="0666"
# ATTR{product}=="Xbox NUI Audio"
SUBSYSTEM=="usb", ATTR{idVendor}=="045e", ATTR{idProduct}=="02ad", MODE="0666"
# ATTR{product}=="Xbox NUI Camera"
SUBSYSTEM=="usb", ATTR{idVendor}=="045e", ATTR{idProduct}=="02ae", MODE="0666"
EOF
cd ..
# OpenCV
cd /tmp
wget -O - "${OPENCV}" | tar -xj
cd OpenCV-*
cmake .
make
sudo make install
# JavaCV
cd /tmp
wget "${JAVACPP}"
unzip javacpp-*
wget "${JAVACV}"
unzip javacv-*
cd javacv
ant
# Build lib
cd /tmp/javacv
mv dist "${LIB}/javacv"
mv src "${LIB}/javacv"
cd "${LIB}/javacv/javadoc"
zip -r ../javadoc.zip *
cd ..
rm -fr javadoc
cd /tmp/javacpp
mv dist "${LIB}/javacpp"
mv src "${LIB}/javacpp"
| true
|
1dd0445c24b7314e1ff5dd81230ee5bd968800fc
|
Shell
|
acritox/kanotix
|
/config/chroot_local-includes/lib/live/config/0030-macbook
|
UTF-8
| 656
| 2.875
| 3
|
[] |
no_license
|
#!/bin/bash
if grep -q "Apple.*Keyboard" /proc/bus/input/devices; then
# set keyboard model to macbook79
echo "keyboard-configuration keyboard-configuration/modelcode select macbook79" | debconf-set-selections
sed -i -e "s|^XKBMODEL=.*$|XKBMODEL=\"macbook79\"|" /etc/default/keyboard
# reverse scroll wheel
echo "[Mouse]" > /etc/skel/.kde/share/config/kcminputrc
echo "ReverseScrollPolarity=true" >> /etc/skel/.kde/share/config/kcminputrc
# disable tap = click because Apple Trackpads have an own click button
sed -i 's/^\(.*"f1_tap_action":\) [01]\(.*\)$/\1 0\2/' /etc/skel/.config/synaptiks/touchpad-config.json
fi
| true
|
b08afeadbac4e14435d0012385bfe03911d97128
|
Shell
|
ValdemarGr/.dotfiles
|
/scripts/windowclient.sh
|
UTF-8
| 419
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
port=$1
cmd=$2
arg=$3
case "$cmd" in
("goto") printf "gotows,$arg" | nc 127.0.0.1 $port ;;
("rename") printf "renamews,$arg" | nc 127.0.0.1 $port ;;
("move") printf "movews,$arg" | nc 127.0.0.1 $port ;;
("icon") printf "iconws,$arg" | nc 127.0.0.1 $port ;;
("zenpad") printf "zenpad,-1" | nc 127.0.0.1 $port ;;
("poly") printf "poly,-1" | nc 127.0.0.1 $port ;;
esac
| true
|
51600a6a1dbfecf27b3aed4e0b9a539309d53df3
|
Shell
|
fpalm/ownscripts
|
/countusr.sh
|
UTF-8
| 155
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
USRNUM=0
for dir in $( ls /home/); do
#echo $( ls /home/$dir | wc -w )
USRNUM=$(($USRNUM + $( ls /home/$dir | wc -w )))
done
echo $USRNUM
| true
|
d8eb7f09102240869e9c64148161524f0e6862b6
|
Shell
|
thejaspm/docker-spark
|
/scripts/spark-shell.sh
|
UTF-8
| 413
| 2.515625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
export SPARK_LOCAL_IP=`ifconfig eth0 | grep "inet " | awk -F'[: ]+' '{ print $4 }'`
/remove_alias.sh # problems with hostname alias, see https://issues.apache.org/jira/browse/SPARK-6680
cd /usr/local/spark
./bin/spark-shell \
--master spark://${SPARK_MASTER_PORT_7077_TCP_ADDR}:${SPARK_MASTER_ENV_SPARK_MASTER_PORT} \
-i ${SPARK_LOCAL_IP} \
--properties-file /spark-defaults.conf \
"$@"
| true
|
ede521078c35ab49a58f3a9ba4efea4baf7b67ea
|
Shell
|
jboss-openshift/cct_module
|
/jboss/container/java/singleton-jdk/configure.sh
|
UTF-8
| 543
| 3.578125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
set -u
set -e
if [ -z "$JAVA_VERSION" ]; then
echo "JAVA_VERSION needs to be defined to use this module" >&2
exit 1
fi
if [ -z "$JAVA_VENDOR" ]; then
echo "JAVA_VENDOR needs to be defined to use this module" >&2
exit 1
fi
# Clean up any java-* packages that have been installed that do not match
# our stated JAVA_VERSION-JAVA_VENDOR (e.g.: 11-openjdk; 1.8.0-openj9)
rpm -qa java-\* | while read pkg; do
if ! echo "$pkg" | grep -q "^java-${JAVA_VERSION}-${JAVA_VENDOR}"; then
rpm -e --nodeps "$pkg"
fi
done
| true
|
8d7b1c07a9da7d2f4583c8c990625a8c61ead9f7
|
Shell
|
RohanChandavarkar/L3CubeCodes
|
/TextFS /TEXTFS/copy
|
UTF-8
| 280
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/bash
if ! [ -e $1 ]; then
echo "Please enter correct name for source file as it doesn't exist"
else
if [ -e $2 ]; then
echo "Please enter correct name for destination file as it already exists"
else
cp $1 $2
stat $1 >> logfile.txt
stat $2 >> logfile.txt
fi
fi
| true
|
ebc2a5b71830afd728a2fc4a8950134a62e72ac1
|
Shell
|
Brettm12345/nixos-config
|
/modules/applications/shells/zsh/hoc.zsh
|
UTF-8
| 1,149
| 3.578125
| 4
|
[] |
no_license
|
#!/usr/bin/env zsh
# Provide higer-order functions
# usage:
#
# $ foo(){print "x: $1"}
# $ map foo a b c d
# x: a
# x: b
# x: c
# x: d
function map {
local func_name=$1
shift
for elem in $@; print -- $(eval $func_name $elem)
}
# $ bar() { print $(($1 + $2)) }
# $ fold bar 0 1 2 3 4 5
# 15
# -- but also
# $ fold bar 0 $( seq 1 100 )
function fold {
if (($#<2)) {
print -- "ERROR fold use at least 2 arguments" >&2
return 1
}
if (($#<3)) {
print -- $2
return 0
} else {
local acc
local right
local func_name=$1
local init_value=$2
local first_value=$3
shift 3
right=$( fold $func_name $init_value $@ )
acc=$( eval "$func_name $first_value $right" )
print -- $acc
return 0
}
}
# usage:
#
# $ baz() { print $1 | grep baz }
# $ filter baz titi bazaar biz
# bazaar
function filter {
local predicate=$1
local result
typeset -a result
shift
for elem in $@; do
if eval $predicate $elem >/dev/null; then
result=( $result $elem )
fi
done
print $result
}
| true
|
6e798c18de59b584b5d55098b41eae3ea15b4c5d
|
Shell
|
RobertsGhostBusterFan/CustomBuilds
|
/script/copy_chromium-overlay-pkg.sh
|
EUC-JP
| 2,314
| 3.21875
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
# BOARD̵᤹
if [ -z "${BOARD}" ]; then
echo Please set BOARD. Abort.
exit 1
fi
# ѥåХ쥤ǥ쥯ȥ˥ԡ
cd ~/trunk/src/overlays/overlay-${BOARD}
if [ ! -d sys-libs ]; then
mkdir sys-libs
if [ 0 -ne $? ]; then
echo Failed to mkdir sys-libs. Abort.
exit 1
fi
fi
cp -r ~/trunk/src/third_party/chromiumos-overlay/sys-libs/gcc-libs ./sys-libs/
if [ 0 -ne $? ]; then
echo Failed to copy gcc-libs pkg. Abort.
exit 1
fi
# R60cros_debugꤵƤʤtty̵ˤ륳ɤܲȤǤä褦ʤΤǥå
#if [ ! -d chromeos-base ]; then
# mkdir chromeos-base
# if [ 0 -ne $? ]; then
# echo Failed to mkdir chromeos-base Abort.
# exit 1
# fi
#fi
#cp -r ~/trunk/src/third_party/chromiumos-overlay/chromeos-base/tty ./chromeos-base/
#if [ 0 -ne $? ]; then
# echo Failed to copy tty pkg. Abort.
# exit 1
#fi
if [ ! -d sys-kernel ]; then
mkdir sys-kernel
if [ 0 -ne $? ]; then
echo Failed to mkdir sys-kernel Abort.
exit 1
fi
fi
cp -r ~/trunk/src/third_party/chromiumos-overlay/sys-kernel/linux-firmware ./sys-kernel/
if [ 0 -ne $? ]; then
echo Failed to copy linux-firmware pkg. Abort.
exit 1
fi
echo Apply Patches to chromiumos-overlay pkg.
cd /home/chromium/myenv/patches/chromiumos-overlay
make TARGET_DIR=~/trunk/src/overlays/overlay-${BOARD} dryrun
if [ 0 -ne $? ]; then
echo Failed to dryrun. Abort.
exit 1
fi
make TARGET_DIR=~/trunk/src/overlays/overlay-${BOARD} apply
if [ 0 -ne $? ]; then
echo Failed to patch. Abort.
exit 1
fi
cd /home/chromium/trunk/src/third_party/chromiumos-overlay
if [ ! -d licenses/copyright-attribution/media-libs ]; then
mkdir licenses/copyright-attribution/media-libs
if [ 0 -ne $? ]; then
echo Failed to mkdir license directory. Abort.
exit 1
fi
fi
cp /home/chromium/myenv/patches/chromiumos-overlay/mesa/license licenses/copyright-attribution/media-libs/mesa
if [ 0 -ne $? ]; then
echo Failed to copy licence file of mesa. Abort.
exit 1
fi
repo start my-chromiumos-overlay .
#cd ~/trunk/src/overlays/overlay-${BOARD}/chromeos-base/tty
#mv tty-0.0.1-r8.ebuild tty-0.0.1-r9.ebuild
#
#if [ 0 -ne $? ]; then
# echo Failed to mv tty ebuild. Abort.
# exit 1
#fi
| true
|
8d305289bc95007acd29252ce5f4ebaa02af9cb6
|
Shell
|
getkub/SplunkScriplets
|
/thirdparty/preChecks/syslog_logger.sh
|
UTF-8
| 214
| 2.609375
| 3
|
[] |
no_license
|
# syslog simulation using logger or netcat (nc)
remoteIP="10.12.13.14"
remotePort="514"
sampleMessage="TEST_MSG"
# logger -n $remoteIP -P $remotePort $sampleMessage
echo $sampleMessage | nc $remoteIP $remotePort
| true
|
f36a0ef68df3dc3c7b6789289dd83721c88a969f
|
Shell
|
orange-cloudfoundry/mdproxy4cs
|
/assets/pre-start.sh
|
UTF-8
| 163
| 2.59375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# add ip 169.254.169.254 to loopback interface
ip addr show dev lo | grep -q 'inet 169.254.169.254/32' || {
ip addr add 169.254.169.254/32 dev lo
}
| true
|
dbecd7bad331975a567c237b2bd840380ee0029a
|
Shell
|
relaxdiego/cicd
|
/scripts/get_vault_password_path
|
UTF-8
| 345
| 2.734375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash -e
# NOTE: Any echos made here will not be displayed on the terminal since
# it will be captured by the parent process. If you want to check for
# the presence of $1, do the check in the parent process.
cluster_id="$1"
vault_passwords_dir=$HOME/.relaxdiego/cicd/vault-passwords
echo -n "${vault_passwords_dir}/${cluster_id}"
| true
|
e418ec64126e0fef154b07f4405770f2ff453a02
|
Shell
|
ptramsey/configs
|
/_bash/scripts/kube.sh
|
UTF-8
| 434
| 3.1875
| 3
|
[] |
no_license
|
#!/bin/bash
_get_pod() {
kubectl get pods -l app="$1" -o jsonpath='{.items[?(@.status.phase=="Running")].metadata.name}'
}
_local_shell() {
pod=$(_get_pod "$1")
container="$1${2:+-$2}"
kubectl exec -it "$pod" -c "$container" /bin/bash -i || kubectl exec -it "$pod" -c "$container" /bin/sh -i
}
receipt() {
_local_shell "receipt"
}
receipt-test() {
_local_shell "receipt" "test"
}
alias lcl="_local_shell"
| true
|
d1a77e979f2729d7546de3a16c80def9b6208a83
|
Shell
|
ionut-arm/parsec
|
/packaging_assets/package.sh
|
UTF-8
| 3,443
| 4.40625
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Copyright 2022 Contributors to the Parsec project.
# SPDX-License-Identifier: Apache-2.0
# Create a quickstart package
# Avoid silent failures
set -euf -o pipefail
ARCH="x86"
OS="linux"
PACKAGE_PATH=$(pwd)
WORK_DIR="/tmp/workdir-parsec/"
JOBS_NUMBER=1
SCRIPT_PATH=$(realpath $0)
ASSETS_DIR=$(dirname $SCRIPT_PATH)
PARSEC_DIR=$(dirname $ASSETS_DIR)
VERSION=$(cargo metadata --format-version=1 --no-deps --offline | jq -r '.packages[0].version')
# Usage
USAGE_STR=\
"Usage:\n"\
"package.sh [Options]\n"\
"Options:\n"\
" -o {path}: Output absolute path, the default path is the current directory i.e. $(pwd)\n"\
" -j {jobs}: Number of parallel jobs, Default is $JOBS_NUMBER"\
" -h : Display this help menu\n"
# Flags
while getopts v:o:j:h flag
do
case "${flag}" in
o) PACKAGE_PATH=${OPTARG};;
j) JOBS_NUMBER=${OPTARG};;
h) echo -e $USAGE_STR; exit 0;;
esac
done
check_version() {
echo "Checking version"
if [ -z "$VERSION" ];then
echo "Couldn't extract the version!" >&2
exit 1
fi
}
check_release_tag() {
CURRENT_TAG=$(git name-rev --tags HEAD | cut -d "/" -f 2)
LATTEST_TAG=$(git tag --sort=committerdate | tail -1)
if [ -z "$LATTEST_TAG" ];then
echo "Warning:No tags"
fi
if [ "$LATTEST_TAG" == "$CURRENT_TAG" ]; then
echo "Packaging release tag: $LATTEST_TAG"
else
echo "Warning: The current HEAD does't match the latest tagged"
echo "Warning: Please checkout the latest tag : $LATTEST_TAG"
read -n 1 -p "Do you want to continue anyway [y/n]?" choice
if [ "$choice" != "y" ]; then
exit 1
fi
fi
}
cleanup()
{
echo "Clean up"
rm -rf $WORK_DIR
}
pre_package() {
# Construct package name
PACKAGE_DIR=quickstart-$VERSION-${OS}_$ARCH
# Create a temp work directory for parsec service
mkdir -p $WORK_DIR/parsec
# Create the package directory
mkdir $WORK_DIR/$PACKAGE_DIR
}
build_parsec_service() {
# Package Parsec
echo "Building Parsec"
CARGO_TARGET_DIR=$WORK_DIR/parsec/ cargo build -j $JOBS_NUMBER --release --features mbed-crypto-provider --manifest-path $PARSEC_DIR/Cargo.toml
}
build_parsec_tool() {
# Package Parsec-tool
echo "Building Parsec-tool"
git clone https://github.com/parallaxsecond/parsec-tool $WORK_DIR/parsec-tool
cd $WORK_DIR/parsec-tool
git checkout $(git tag --sort=committerdate | tail -1)
cd -
cargo build -j $JOBS_NUMBER --release --manifest-path $WORK_DIR/parsec-tool/Cargo.toml
}
collect() {
# Include Parsec service
cp $WORK_DIR/parsec/release/parsec $WORK_DIR/$PACKAGE_DIR/
# Include Parsec tool
cp $WORK_DIR/parsec-tool/target/release/parsec-tool $WORK_DIR/$PACKAGE_DIR/
# Include test script
cp $WORK_DIR/parsec-tool/tests/parsec-cli-tests.sh $WORK_DIR/$PACKAGE_DIR/parsec-cli-tests.sh
# Include Parsec default configurations
cp $ASSETS_DIR/quickstart_config.toml $WORK_DIR/$PACKAGE_DIR/config.toml
# Include Parsec README.md file
cp $ASSETS_DIR/quickstart_README.md $WORK_DIR/$PACKAGE_DIR/README.md
}
echo "Packaging started..."
trap cleanup EXIT
check_version
check_release_tag
cleanup
pre_package
build_parsec_service
build_parsec_tool
collect
echo "Finalizing package"
cd $WORK_DIR
tar czf "$PACKAGE_PATH/$PACKAGE_DIR".tar.gz "$PACKAGE_DIR" || exit 1
echo "$PACKAGE_PATH/$PACKAGE_DIR.tar.gz is Ready"
| true
|
9e837c10c7a96bbfb33d474bd97a2cb4abe76492
|
Shell
|
djacobs24/dotfiles
|
/.brew
|
UTF-8
| 1,952
| 2.640625
| 3
|
[] |
no_license
|
#!/bin/bash
# Homebrew
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
# Make sure we’re using the latest Homebrew
brew update
# Upgrade any already-installed formulae
brew upgrade
# Allow re-install of grep
brew tap homebrew/dupes
# Android Studio
brew cask install android-studio
# Base64
brew install base64
# Bat
brew install bat
# CCleaner
brew cask install ccleaner
# Docker
brew cask install docker
# Go
brew install go
# Google Chrome
brew cask install google-chrome
# GoLand
brew cask install goland
# iTerm2
brew cask install iterm2
# Kubernetes
brew install kubernetes-cli
brew install kubectx
brew install minikube
brew install derailed/k9s/k9s
# LastPass
brew cask install lastpass
# MD5 and SHA1
brew install md5sha1sum
# MinIO
brew install minio
brew install minio/stable/mc
# MongoDB
brew tap mongodb/brew
# Muzzle
brew cask install muzzle
# ngrok
brew cask install ngrok
# Nmap
brew install nmap
# Postico
brew cask install postico
# Postman
brew cask install postman
# Protocol Buffers
brew install protobuf
# RabbitMQ
brew install rabbitmq
# Redis
brew install redis
# Robo 3T
brew cask install robomongo
# SketchUp
brew cask install sketchup
# Slack
brew cask install slack
# Spotify
brew cask install spotify
# Tree
brew install tree
# Vim
brew install mercurial
sudo mkdir -p /opt/local/bin
cd ~
git clone git@github.com:vim/vim.git
cd vim
./configure --prefix=/opt/local
make
sudo make install
echo 'PATH=/opt/local/bin:$PATH' >> ~/.bash_profile
source ~/.bash_profile
cd ~
mkdir -p ~/.vim/autoload ~/.vim/bundle && \
curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim
git clone https://github.com/scrooloose/nerdtree.git ~/.vim/bundle/nerdtree
# VirtualBox
brew cask install virtualbox
# Visual Studio Code
brew cask install visual-studio-code
# Yarn
brew install yarn
# Remove outdated versions from the cellar
brew cleanup
| true
|
940a346d719698aba445e2d6327d7c8c4e532b33
|
Shell
|
gacop/Caffe-PHAST
|
/test1.sh
|
UTF-8
| 1,144
| 2.890625
| 3
|
[
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
if (( $# == 0 )); then
echo "please, provide a mode parameter: time, test, or train"
exit 1
fi
JOB=$1
if [ "$JOB" != "time" && "$JOB" != "test" && "$JOB" != "train" ]; then
echo "please, provide a mode parameter: time, test, or train"
exit 1
fi
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$PWD/build/lib/
if [ "$JOB" == "train" ]; then
./build/tools/caffe $JOB -model phast/mnist/lenet_train_test.prototxt -weights phast/mnist/lenet_iter_10000.caffemodel -phast_conf_file phast/mnist/conf_file.yml -iterations 10 --solver=phast/mnist/lenet_solver.prototxt
else
./build/tools/caffe $JOB -model phast/mnist/lenet_train_test.prototxt -weights phast/mnist/lenet_iter_10000.caffemodel -phast_conf_file phast/mnist/conf_file.yml -iterations 10
#cuda-gdb --tui --args ./build/tools/caffe $JOB -model phast/mnist/lenet_train_test.prototxt -weights phast/mnist/lenet_iter_10000.caffemodel -phast_conf_file phast/mnist/conf_file.yml -iterations 1
#cuda-memcheck ./build/tools/caffe $JOB -model phast/mnist/lenet_train_test.prototxt -weights phast/mnist/lenet_iter_10000.caffemodel -phast_conf_file phast/mnist/conf_file.yml -iterations 1
fi
| true
|
d44b506c16e4ce608e6fbb09573340937af8746e
|
Shell
|
GoogleCloudPlatform/php-docker
|
/package-builder/extensions/amqp/build.sh
|
UTF-8
| 397
| 2.90625
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -ex
source ${DEB_BUILDER_DIR}/functions.sh
export PHP_LIBRABBITMQ_DIR="no"
echo "Building amqp for gcp-php${SHORT_VERSION}"
# Now build the extension
PNAME="gcp-php${SHORT_VERSION}-amqp"
# Install the packages for librabbitmq
install_last_package "librabbitmq"
install_last_package "librabbitmq-dev"
# Download the source
download_from_pecl amqp 1.11.0beta
build_package amqp
| true
|
3161c1e99291ada78cc620b1602249c6da77f1a0
|
Shell
|
PatrickAuld/dotfiles
|
/aws/env.zsh
|
UTF-8
| 388
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
if [[ -z ~/.aws/credentials ]] then
echo "AWS Credentials File Not Found"
return 0
fi
DEFAULT_CREDS=`grep default ~/.aws/credentials -A 2`
key_id=`echo $DEFAULT_CREDS | grep aws_access_key_id | awk '{print $3}'`
access_id=`echo $DEFAULT_CREDS | grep aws_secret_access_key | awk '{print $3}'`
DEFAULT_CREDS=''
export AWS_ACCESS_KEY_ID=$key_id
export AWS_SECRET_ACCESS_KEY=$access_id
| true
|
4f0cbd3a0c0d1c16f2d9b352cfdf67873805200b
|
Shell
|
redcrossp/virtex
|
/scripts/runTestSuite
|
UTF-8
| 842
| 3.96875
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# runTestSuite makes and runs the specified test suites
# Usage: runTestSuite [SUITE]...
# without SUITE, all test targets are made and run
#
# SUITE basename of a test template (e.g. "virtex", "format")
#
# suites are made in output test directory
TESTS_OUTDIR="out/test/"
TESTS_SRCDIR="test/"
TESTS_EXT=".test.c"
if [[ $@ ]]; then
testsToRun=$(echo $@ | sed 's/ /\n/g' | sort | uniq | tr '\n' ' ')
testFiles=
suiteExec="${TESTS_OUTDIR}suite_$(echo $testsToRun | sed 's/ /_/g' | head -c-1)"
for testFile in $testsToRun; do
testFile=$TESTS_SRCDIR$testFile$TESTS_EXT
testFiles+="$testFile "
done
make $suiteExec TEST_EXEC=$suiteExec TEST_SOURCES="$testFiles"
else
suiteExec="${TESTS_OUTDIR}virtex_test"
make test
fi
echo "Running test suite $suiteExec"
valgrind --leak-check=full $suiteExec
| true
|
e4e425ccb0d09c2021c086fe6a144371b540658e
|
Shell
|
lamdevhs/miniml-compiler
|
/generate-all.sh
|
UTF-8
| 1,345
| 4.03125
| 4
|
[] |
no_license
|
#!/bin/bash
demo=demo
ocaml=ocaml
ccam=ccam
tmp=__tmp_folder_generate_all__
Msg ()
{
echo "generated-all.sh: $1"
}
Crash ()
{
Msg "unexpected error: $1 -- aborting"
exit 1
}
Done ()
{
Msg "done"
exit 0
}
if [ "$1" = "clean" ] ; then
echo rm ./$demo/*.c ./$demo/*.out
echo press enter to delete those files
read PAUSE
rm ./$demo/*.c ./$demo/*.out
Done
fi
[ -d ./$ocaml ] || Crash "could not find folder ./$ocaml"
[ -d ./$ccam ] || Crash "could not find folder ./$ccam"
[ -d ./$demo ] || Crash "could not find folder ./$demo"
! [ -e ./$tmp ] || Crash "could not create temporary folder ./$tmp"
cd $ocaml
make comp || Crash "could not build executable comp"
echo
cd ../
cp -r ./$ccam ./$tmp || Crash "could not create temporary folder ./$tmp"
cp ./$ocaml/comp ./$tmp
cd ./$tmp
for path in ../$demo/*.ml ; do
filename="$(basename "$path")"
shortname="${filename%.ml}"
echo "### compiling $filename"
./comp "$path" "$shortname.c" && \
make build in="$shortname.c" out="$shortname.out" && \
mv "$shortname.out" "../$demo/$shortname.out" && \
make build in="$shortname.c" out="$shortname.out" DBG=y && \
mv "$shortname.out" "../$demo/$shortname.dbg.out" && \
mv "$shortname.c" "../$demo/$shortname.c"
echo
done
cd ../
rm -r ./$tmp || Msg "warning: could not erase temporary folder ./$tmp"
Done
| true
|
9e42bfe4fa6f65dd66a24350b0d713a34baa7fdf
|
Shell
|
nguyenquangvn/workspace-setup
|
/bash-script/up.sh
|
UTF-8
| 105
| 3.3125
| 3
|
[] |
no_license
|
function up() {
times=$1
while [ "$times" -gt "0" ]; do
cd ..
times=$(($times - 1))
done
}
| true
|
322eb36c30864a82b928a21caa40ec734f2b6b7d
|
Shell
|
g-bpg/k-and-r-chapter-1
|
/1-08/devenblake/wc
|
UTF-8
| 1,242
| 3.640625
| 4
|
[
"Unlicense"
] |
permissive
|
#!/bin/sh
which nawk >/dev/null 2>&1 && awk="nawk" || awk="awk"
if [ -e "./counter" ]; then
counter="./counter"
else
printf "Either change the \$counter variable within this wc implementation or cd to the folder where you've built counter.\n" >/dev/null
exit 1
fi
usage() {
printf "Usage: $argv0 [ -h ] [ FILE ... ]\n"
exit 1
}
# this is the wrapper on counter that lets it act as a wc clone
# lacks some features compared to GNU, adds one or two
# - honestly you could probably implement wc in pure POSIX shell but it'd
# probably be brutally slow
file=/dev/stdin # shouldfix, probablywontfix
counter_output="$("$counter" <"$file")"
# REEEEEEEE REPEATED CODE!!!!
newlines=$(printf "$counter_output" | "$awk" '{ print $1 }')
words=$(printf "$counter_output" | "$awk" '{ print $2 }')
chars=$(printf "$counter_output" | "$awk" '{ print $3 }')
# bytes=$(printf "$counter_output" | "$awk" '{ print $4 }')
maxlinelen=$(printf "$counter_output" | "$awk" '{ print $5 }')
tabs=$(printf "$counter_output" | "$awk" '{ print $6 }')
blanks=$(printf "$counter_output" | "$awk" '{ print $7 }')
# tag me with a fix if you have one. idk how to do this better.
printf "%4s %4s %4s\n" "$newlines" "$words" "$chars"
exit 0
| true
|
4c3170d1b3466d69319e5aa4f21cb7daa1c17110
|
Shell
|
liuzf1986/emacs
|
/bin/rtags/gcc-rtags-wrapper.sh
|
UTF-8
| 958
| 3.5
| 4
|
[] |
no_license
|
#!/bin/bash
function followSymlink()
{
python -c "import os, sys; print(os.path.realpath(\"$1\"))"
}
rc=`which rc`
for i in `which -a "\`basename $0\`"`; do
resolved=`followSymlink $i`
if [ `basename $resolved` != "gcc-rtags-wrapper.sh" ]; then
[ -n "$RTAGS_SERVER_FILE" ] && RTAGS_ARGS="$RTAGS_ARGS -n$RTAGS_SERVER_FILE"
[ -n "$RTAGS_PROJECT" ] && RTAGS_ARGS="$RTAGS_ARGS --project-root=$RTAGS_PROJECT"
[ -z "$RTAGS_COMPILE_TIMEOUT" ] && RTAGS_COMPILE_TIMEOUT=3000
if [ -z "$RTAGS_DISABLED" ] && [ -x "$rc" ]; then
$rc --timeout=$RTAGS_COMPILE_TIMEOUT $RTAGS_ARGS --silent --compile "$i" "$@" &
disown &>/dev/null # rc might be finished by now and if so disown will yell at us
fi
[ "$RTAGS_RMAKE" ] && exit 0
"$i" "$@"
exit $?
else
dir=`dirname $i`
PATH=`echo $PATH | sed -e "s,$dir/*:*,,g"`
fi
done
exit 1 ### no compiler found?
| true
|
7d04912da60b7cdc38b619b3c3d8f698503dab6f
|
Shell
|
durlabhashok/OS-LAB
|
/Lab1/HelloWorld.sh
|
UTF-8
| 170
| 3.109375
| 3
|
[] |
no_license
|
#! /bin/bash
clear
echo"Enter 1st Number"
read a
echo"Enter 2nd Number"
read b
echo"a - addition, m - minus"
read op
if [ $op -eq a]
then
echo
else
echo "Welcome"
fi
| true
|
f72f91841cc5ffa1cbf112c5a8ed5155491bb350
|
Shell
|
ajayravindra/scripts
|
/shell/killbill
|
UTF-8
| 3,579
| 4
| 4
|
[] |
no_license
|
#!/usr/bin/env sh
DEBUG=0
ECHO ()
{
if [ "x$DEBUG" = "x1" ] ; then
echo $1
fi
}
unset rflag; unset kflag; unset nflag;
while getopts k:r:n:f:h opts
do
case "$opts" in
k) ECHO "khit"; kflag=1; wtime="$OPTARG";;
r) ECHO "rhit"; rflag=1; wtime="$OPTARG";;
n) ECHO "nhit"; nflag=1; mailto="$OPTARG";;
f) ECHO "fhit"; fflag=1; mailfile="$OPTARG";;
h) echo "usage: $0 [-rk time] [-n mailto(s)] [-f mailfile] cmd [args..]"
echo "options:"
echo "-k <timeout> : kill after timeout"
echo "-r <timeout> : run at least until timeout, else notify"
echo "-n <mailtos> : notify by mail (also causes '-k' to not kill)"
echo "-f <mailfile>: mail this file in notification"
echo "-h : print this help menu"
exit 1;;
esac
done
shift `expr $OPTIND - 1`
ECHO "kflag: $kflag"
ECHO "rflag: $rflag"
ECHO "wtime: $wtime"
ECHO "nflag: $nflag"
ECHO "mailto: $mailto"
ECHO x$rflag
ECHO x$kflag
# must have '-k' or '-r'
[ "x$rflag" = "x" ] && [ "x$kflag" = "x" ] && {
echo "use at least one of 'k' or 'r' flags"
exit 1
}
# must not have both '-k' and '-r'
[ "x$rflag" = "x1" ] && [ "x$kflag" = "x1" ] && {
echo "use either 'k' or 'r' flags, not both"
exit 1
}
# if 'r' is selected, then '-n' is a must
[ "x$rflag" = "x1" ] && [ "x$nflag" = "x" ] && {
echo "with '-r', you must also use '-n'"
exit 1
}
# if notify, but no mailfile, then create default mailfile
[ "x$nflag" = "x1" ] && [ "x$fflag" = "x" ] && {
ECHO "no mail file, creating default"
mailfile=/tmp/killbill.mailfile.$$
echo "--killbill activity report--" >> $mailfile
echo "host: `hostname`" >> $mailfile
echo "command: \"$@\"" >> $mailfile
}
# execute cmd in background and capture PID
ECHO "executing in background mode: $@"
$@ &
p=$!
ECHO "monitoring process: $p"
# if process ends before the PID is captured, then exit killbill with retcode=2
[ "$p" = "0" ] && {
echo "process has already exited"
exit 2
}
(
ECHO "waiting for $wtime"
sleep $wtime;
ECHO "waking up"
# check if the background process still exists
ps | grep -q $p | grep -v grep
terminated=$?
ECHO "terminated: $terminated"
# if rflag is set, and process has terminated, then notify and exit
[ "x$rflag" = "x1" ] && [ "$terminated" = "1" ] && {
echo "rflag set, but process has terminated. sending notification."
mailx -s "killbill warning: process terminated immaturely" \
"$mailto" < $mailfile
# delete temp mailfile, if one was created
[ "x$fflag" = "x" ] && /bin/rm -f $mailfile
exit 3
}
# if kflag is set, and process has not terminated, then notify/kill
[ "x$kflag" = "x1" ] && [ "$terminated" = "0" ] && {
echo "kflag set, but process has not terminated."
# if notify, then send mail and exit
[ "x$nflag" = "x1" ] && {
echo "sending notification."
mailx -s "killbill warning: process did not exit!" \
"$mailto" < $mailfile
# delete temp mailfile, if one was created
[ "x$fflag" = "x" ] && /bin/rm -f $mailfile
exit
}
# if not notify, then kill
echo "terminating process"
kill -9 $p &
kpid=$!
# wait for process to terminate; and capture exit code
wait $p
retcode=$?
kill -HUP $kpid 2>/tmp/killbill.log
exit $retcode
}
ECHO "no if conditions were hit"
) & 2>/tmp/killbill.log
| true
|
d8e43e4fd2420adc5bfb30f70153762088413af7
|
Shell
|
possientis/Prog
|
/asm/64bits/mul/test.sh
|
UTF-8
| 1,289
| 2.765625
| 3
|
[] |
no_license
|
#!/bin/sh
set -e
DIR=${HOME}/Prog/asm/64bits/mul
cd ${DIR}
echo
option=$(sh ../option.sh)
echo "testing 8 bits unsigned multiplication..."
../yasm.sh test_mul_8bits.asm
gcc $option test_mul_8bits.c test_mul_8bits.o
./a.out; ./clean.sh
echo
echo "testing 16 bits unsigned multiplication..."
../yasm.sh test_mul_16bits.asm
gcc $option test_mul_16bits.c test_mul_16bits.o
./a.out; ./clean.sh
echo
echo "testing 32 bits unsigned multiplication..."
../yasm.sh test_mul_32bits.asm
gcc $option test_mul_32bits.c test_mul_32bits.o
./a.out; ./clean.sh
echo
echo "testing 64 bits unsigned multiplication..."
../yasm.sh test_mul_64bits.asm
gcc $option test_mul_64bits.c test_mul_64bits.o
./a.out; ./clean.sh
echo
echo "testing 8 bits signed multiplication..."
../yasm.sh test_imul_8bits.asm
gcc $option test_imul_8bits.c test_imul_8bits.o
./a.out; ./clean.sh
echo
echo "testing 16 bits signed multiplication..."
../yasm.sh test_imul_16bits.asm
gcc $option test_imul_16bits.c test_imul_16bits.o
./a.out; ./clean.sh
echo
echo "testing 32 bits signed multiplication..."
../yasm.sh test_imul_32bits.asm
gcc $option test_imul_32bits.c test_imul_32bits.o
./a.out; ./clean.sh
echo
# TODO 64 bits signed multiplication
echo
echo "multiplication tests completed successfully"
echo
| true
|
2f10d828d30f26640f3c39b1c8be6320400a1e41
|
Shell
|
alfredocoj/terraform-azure-exemplos
|
/k8s/install-master.sh
|
UTF-8
| 1,648
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
useradd -s /bin/bash -m k8s-admin
passwd k8s-admin
usermod -aG sudo k8s-admin
usermod -aG docker k8s-admin
echo "k8s-admin ALL=(ALL) NOPASSWD:ALL" | tee /etc/sudoers.d/k8s-admin
apt-get update
apt-get install -y apt-transport-https ca-certificates curl gnupg2 software-properties-common vim mcedit nfs-common wget
cat > /etc/docker/daemon.json <<EOF
{
"exec-opts": ["native.cgroupdriver=systemd"],
"log-driver": "json-file",
"log-opts": {
"max-size": "100m"
},
"storage-driver": "overlay2"
}
EOF
sudo systemctl daemon-reload
sudo systemctl restart docker
# curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add -
# add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/debian $(lsb_release -cs) stable"
curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
echo 'deb https://apt.kubernetes.io/ kubernetes-xenial main' > /etc/apt/sources.list.d/kubernetes.list
apt-get update
#apt-get install -y docker-ce docker-ce-cli containerd.io kubelet kubeadm kubectl
apt-get install -y kubelet=1.17.3-00 kubeadm=1.17.3-00 kubectl=1.17.3-00 kubernetes-cni=0.7.5-00
##Setting Limits on Ubuntu 16.04
##Open the grub configuration file in a text editor.
#nano /etc/default/grub
##Add the following line. If the GRUB_CMDLINE_LINUX optional already exists, modify it to include the values below.
#GRUB_CMDLINE_LINUX="cgroup_enable=memory swapaccount=1"
##Save your changes and exit the text editor.
sed -i 's/GRUB_CMDLINE_LINUX_DEFAULT="[^"]*/& cgroup_enable=memory swapaccount=1/' /etc/default/grub
##Update the grub configuration.
sudo update-grub
sudo kubeadm init
| true
|
a4b6584a122105f98e235128a6ca8c7a4bc0dbd8
|
Shell
|
apache/brooklyn-dist
|
/release/clone-and-configure-repos.sh
|
UTF-8
| 1,921
| 2.53125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
set -e
set -x
# Do the basics
git clone -o apache-git https://git-wip-us.apache.org/repos/asf/brooklyn
cd brooklyn
git submodule init
git submodule update --remote --merge --recursive
# .gitmodules sets the submodules up from GitHub. Replace the origin with Apache canonical
git submodule foreach 'git remote add apache-git https://git-wip-us.apache.org/repos/asf/${name}'
git submodule foreach 'git fetch apache-git'
git submodule foreach 'git checkout master'
git submodule foreach 'git branch --set-upstream-to apache-git/master master'
git submodule foreach 'git reset --hard apache-git/master'
git submodule foreach 'git remote remove origin'
# Final check we are up to date
git pull
git submodule update --remote --merge --recursive
# And also the location for publishing RC artifacts
svn --non-interactive co --depth=immediates https://dist.apache.org/repos/dist/release/brooklyn ~/apache-dist-release-brooklyn
svn --non-interactive co --depth=immediates https://dist.apache.org/repos/dist/dev/brooklyn ~/apache-dist-dev-brooklyn
echo "export APACHE_DIST_SVN_DIR=$HOME/apache-dist-dev-brooklyn" >> ~/.profile
| true
|
90eaeffd5f060669f55f0452a82f19508998f38c
|
Shell
|
yohlulz/docker-tcollector
|
/etc/bin/tsddrain.sh
|
UTF-8
| 327
| 3.015625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
PORT=${PORT-8888}
DIRECTORY=${DIRECTORY-"/opt/data/dump"}
function stop_drain {
echo "stopping tsd drain..."
pgrep -f tsddrain | xargs kill -9
exit
}
trap stop_drain HUP INT TERM EXIT SIGHUP SIGINT SIGTERM
echo "starting tsd drain..."
exec /usr/bin/python /opt/tcollector/bin/tsddrain.py ${PORT} ${DIRECTORY}
| true
|
05b142a10228d93fcdde85d5a9934c6c5f2fe142
|
Shell
|
Augustyniak/DrString
|
/Scripts/build-docker.sh
|
UTF-8
| 269
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
command -v docker &> /dev/null || { echo >&2 "Install docker https://www.docker.com"; exit 1; }
IMAGE=drstringbuilding
docker image rm -f "$IMAGE" &> /dev/null
docker build -t "$IMAGE" -f Scripts/Dockerfile-building . && docker run --rm "$IMAGE"
| true
|
305f6b2cf2b7df1c3b7c8507ffee4d3a13329288
|
Shell
|
gabriel-mao/cfddns
|
/cfddns/scripts/cfddns_update.sh
|
UTF-8
| 2,223
| 3.71875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh
eval `dbus export cfddns_`
if [ "$cfddns_enable" != "1" ]; then
echo "not enable"
exit
fi
urlencode() {
# urlencode <string>
out=""
while read -n1 c; do
case $c in
[a-zA-Z0-9._-]) out="$out$c" ;;
*) out="$out`printf '%%%02X' "'$c"`" ;;
esac
done
echo -n $out
}
enc() {
echo -n "$1" | urlencode
}
update_record() {
curl -X PUT "https://api.cloudflare.com/client/v4/zones/$cfddns_zoneid/dns_records/$domain_id" \
-H "X-Auth-Email:$cfddns_email" \
-H "X-Auth-Key:$cfddns_key" \
-H "Content-Type: application/json" \
--data '{"type":"A","name":"'${cfddns_name}'","content":"'${ip}'","ttl":'${cfddns_ttl}',"proxied":false}'
}
record_response() {
curl -X GET "https://api.cloudflare.com/client/v4/zones/$cfddns_zoneid/dns_records?type=A&name=$(enc "$cfddns_name")" \
-H "X-Auth-Email: $cfddns_email" \
-H "X-Auth-Key: $cfddns_key" \
-H "Content-Type: application/json"
}
now=`date '+%Y-%m-%d %H:%M:%S'`
ip=`$cfddns_curl 2>&1` || die "$ip"
[ "$cfddns_curl" = "" ] && cfddns_curl="curl -s whatismyip.akamai.com"
[ "$cfddns_dns" = "" ] && cfddns_dns="114.114.114.114"
[ "$cfddns_ttl" = "" ] && cfddns_ttl="600"
die () {
echo $1
dbus ram cfddns_last_act="$now: failed($1)"
}
if [ "$?" -eq "0" ]; then
current_ip=`record_response | grep -oE '([0-9]{1,3}\.?){4}'|head -n 2|tail -n 1`
if [ "$ip" = "$current_ip" ]; then
echo "skipping"
# new_ip=`record_response | grep -oE '([0-9]{1,3}\.?){4}'|head -n 1`
dbus set cfddns_last_act="$now: 跳过更新,路由器IP:($ip),A记录IP:($current_ip)"
exit 0
else
echo "changing"
domain_id=`record_response | grep -oE '([a-zA-Z0-9]{32}\.?){1}'|head -n 1`
update_record
new_ip=`record_response | grep -oE '([0-9]{1,3}\.?){4}'|head -n 2|tail -n 1`
if [ "$new_ip" = "$ip" ]; then
dbus set cfddns_last_act="$now: 更新成功,路由器IP:($ip),A记录IP:($new_ip)"
else
dbus set cfddns_last_act="$now: 更新失败!请检查设置"
fi
fi
fi
| true
|
42d4d5865ed59aaab376f3ff807e15c3789bf2f4
|
Shell
|
nvm-sh/nvm
|
/test/fast/Unit tests/nvm_is_version_installed
|
UTF-8
| 1,511
| 3.96875
| 4
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/sh
cleanup () {
rm -rf "$NVM_DIR"
unset -f die cleanup nvm_get_os check_version
unset NVM_DIR NODE_PATH
}
die () { echo "$@" ; cleanup ; exit 1; }
\. ../../../nvm.sh
set -ex
NVM_DIR=$(mktemp -d)
NODE_PATH="$NVM_DIR/versions/node"
mkdir -p "$NODE_PATH"
if [ -z "$NODE_PATH" ]; then
die 'Unable to create temporary folder'
fi
check_version() {
local VERSION
local BINARY
VERSION=$1
BINARY=$2
# nvm_is_version_installed fails with non existing version
! nvm_is_version_installed "$VERSION" || die "nvm_is_version_installed $VERSION should fail with non existing version"
# nvm_is_version_installed fails with non executable existing version
mkdir -p "$NODE_PATH/$VERSION/bin" && cd "$NODE_PATH/$VERSION/bin" && touch "$NODE_PATH/$VERSION/bin/$BINARY"
! nvm_is_version_installed "$VERSION" || die "nvm_is_version_installed $VERSION should fail with non executable existing version"
# nvm_is_version_installed whould work
chmod +x "$NODE_PATH/$VERSION/bin/$BINARY"
nvm_is_version_installed "$VERSION" || die "nvm_is_version_installed $VERSION should work"
}
# nvm_is_version_installed is available
type nvm_is_version_installed > /dev/null 2>&1 || die 'nvm_is_version_installed is not available'
# nvm_is_version_installed with no parameter fails
! nvm_is_version_installed || die 'nvm_is_version_installed without parameter should fail'
check_version '12.0.0' 'node'
# Checking for Windows
nvm_get_os() {
echo "win"
}
check_version '13.0.0' 'node.exe'
cleanup
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.