blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
f473810f3a02d2a9facc7bef8a2c853b8c8371d1
|
Shell
|
onap/holmes-engine-management
|
/engine-d-standalone/src/main/assembly/bin/run.sh
|
UTF-8
| 4,028
| 3.21875
| 3
|
[
"CC-BY-4.0",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/usr/bin/env bash
#
# Copyright 2017-2021 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
DIRNAME=`dirname $0`
RUNHOME=`cd $DIRNAME/; pwd`
echo @RUNHOME@ $RUNHOME
echo @JAVA_HOME@ $JAVA_HOME
JAVA="$JAVA_HOME/bin/java"
echo @JAVA@ $JAVA
main_path=$RUNHOME/..
cd $main_path
JAVA_OPTS="-Xms256m -Xmx1g"
port=9202
#JAVA_OPTS="$JAVA_OPTS -Xdebug -Xnoagent -Djava.compiler=NONE -Xrunjdwp:transport=dt_socket,address=*:$port,server=y,suspend=n"
echo @JAVA_OPTS@ $JAVA_OPTS
class_path="$main_path/lib/*"
echo @class_path@ $class_path
if [ -z ${JDBC_USERNAME} ]; then
export JDBC_USERNAME=holmes
echo "No user name is specified for the database. Use the default value \"$JDBC_USERNAME\"."
fi
if [ -z ${JDBC_PASSWORD} ]; then
export JDBC_PASSWORD=holmespwd
echo "No password is specified for the database. Use the default value \"$JDBC_PASSWORD\"."
fi
if [ -z ${DB_NAME} ]; then
export DB_NAME=holmes
echo "No database is name is specified. Use the default value \"$DB_NAME\"."
fi
export DB_PORT=5432
if [ ! -z ${URL_JDBC} ] && [ `expr index $URL_JDBC :` != 0 ]; then
export DB_PORT="${URL_JDBC##*:}"
fi
echo DB_PORT=$DB_PORT
# if deployed using helm, use the helm-generated configuration file.
if [ -d /opt/hemconfig ]; then
cp /opt/hemconfig/application.yaml "$main_path/conf/application.yaml"
else
sed -i "s|url:.*|url: jdbc:postgresql://$URL_JDBC:$DB_PORT/$DB_NAME|" "$main_path/conf/application.yaml"
sed -i "s|username:.*|username: $JDBC_USERNAME|" "$main_path/conf/application.yaml"
sed -i "s|password:.*|password: $JDBC_PASSWORD|" "$main_path/conf/application.yaml"
fi
export SERVICE_IP=`hostname -i | awk '{print $1}'`
echo SERVICE_IP=${SERVICE_IP}
if [ ! -z ${HOST_IP} ]; then
export HOSTNAME=${HOST_IP}:9102
else
export HOSTNAME=${SERVICE_IP}:9102
fi
if [ -z ${ENABLE_ENCRYPT} ]; then
export ENABLE_ENCRYPT=false
fi
echo ENABLE_ENCRYPT=$ENABLE_ENCRYPT
KEY_PATH="$main_path/conf/holmes.keystore"
KEY_PASSWORD="holmes"
if [ -f "/opt/app/osaaf/local/org.onap.holmes-engine-mgmt.p12" ]; then
KEY_PATH="/opt/app/osaaf/local/org.onap.holmes-engine-mgmt.p12"
KEY_PASSWORD=`head -n1 "/opt/app/osaaf/local/mycreds.prop" | cut -d= -f2`
fi
echo "KEY_PATH=$KEY_PATH"
echo "KEY_PASS=$KEY_PASSWORD"
#HTTPS Configurations
sed -i "s|key-store:.*|key-store: $KEY_PATH|" "$main_path/conf/application.yaml"
sed -i "s|key-store-password:.*|key-store-password: $KEY_PASSWORD|" "$main_path/conf/application.yaml"
if [ "${ENABLE_ENCRYPT}"x = "true"x ]; then
sed -i "s|#\?ssl:|ssl:|" "$main_path/conf/application.yaml"
sed -i "s|#\?key-store|key-store|" "$main_path/conf/application.yaml"
sed -i "s|#\?key-store-password|key-store-password|" "$main_path/conf/application.yaml"
sed -i "s|#\?key-store-type|key-store-type|" "$main_path/conf/application.yaml"
else
sed -i 's|#\?ssl:|#ssl:|' "$main_path/conf/application.yaml"
sed -i "s|#\?key-store|#key-store|" "$main_path/conf/application.yaml"
sed -i "s|#\?key-store-password|#key-store-password|" "$main_path/conf/application.yaml"
sed -i "s|#\?key-store-type|#key-store-type|" "$main_path/conf/application.yaml"
fi
cat "$main_path/conf/application.yaml"
${RUNHOME}/initDB.sh "$JDBC_USERNAME" "$JDBC_PASSWORD" "$DB_NAME" "$DB_PORT" "${URL_JDBC%:*}"
JAR=`ls -lt $main_path/lib | grep -e "holmes-engine-.*jar$" | awk '{print $9}'`
"$JAVA" $JAVA_OPTS -jar "$main_path/lib/$JAR" -classpath "$class_path" --spring.config.location="$main_path/conf/application.yaml"
| true
|
1334fe6d375b8fc6a2906ef0b910736189298d3c
|
Shell
|
Deron-D/otus-linux
|
/lab07/httpd_log_analyze.sh
|
UTF-8
| 2,681
| 3.796875
| 4
|
[] |
no_license
|
#!/bin/sh
# Define variables:
# First found log file
LOGFILE=$(find / -type f -name "access-4560-644067.log" | sed '2,$d')
# Lockfile
LOCKFILE=/tmp/httpd_log_analyze.pid
# Report file
REPORTFILE=/tmp/httpd_log_analyze_report.txt
# Number of TOP active IPs
X=10
# Number of TOP requested locations
Y=10
# File for save processed lines
PROCESSEDLINES=/tmp/httpd_log_analyze.tmp
# Email address for the report
EMAIL=vagrant@localhost.localdomain
#Main function
analyze_log_file() {
INPUTFILE=$1; OUTPUTFILE=$2
BEGINTIME=`head -n 1 $1 | awk '{print $4}'| sed 's/\[//'`; ENDTIME=`tail -n 1 $1 | awk '{print $4}' | sed 's/\[//'`
SKIPLINES=${SKIPLINES:-0}
if [ -f last_run.tmp ];
then
SKIPLINES=$(cat $PROCESSEDLINES)
SKIPLINES=$(($SKIPLINES+1))
fi
echo "=============================================" > $OUTPUTFILE
echo "HTTPD usage report " >> $OUTPUTFILE
echo "Analyze period is from $BEGINTIME to $ENDTIME" >> $OUTPUTFILE
echo "=============================================" >> $OUTPUTFILE
echo "$X top IP addresses" >> $OUTPUTFILE
tail -n +$SKIPLINES $1 | awk '{print $1}' | sort | uniq -c | sort -rn | awk '{print $1, $2}' | head -$X >> $OUTPUTFILE
echo "---------------------------------------------" >> $OUTPUTFILE
echo "$Y top requested addresses" >> $OUTPUTFILE
tail -n +$SKIPLINES $1 | awk '{print $6}' FPAT='[^ ]*|"[^"]*"' | awk '{if($2 != ""){print $2}}'| sort | uniq -c | sort -rn | awk '{print $1, $2}' | head -$Y >> $2
echo "---------------------------------------------" >> $OUTPUTFILE
echo "All errors since the last launch" >> $OUTPUTFILE
tail -n +$SKIPLINES $1 |awk '{print $9}' |grep -E "[4-5]{1}[0-9][[:digit:]]" |sort |uniq -c |sort -rn | awk '{print $1, $2}' >> $OUTPUTFILE
echo "---------------------------------------------" >> $OUTPUTFILE
echo "A list of all return codes indicating their number since the last launch" >> $OUTPUTFILE
tail -n +$SKIPLINES $1 | awk '{print $7}' FPAT='[^ ]*|"[^"]*"'| sort | uniq -c | sort -rn | awk '{print $1, $2}' >> $OUTPUTFILE
echo "---------------------------------------------" >> $OUTPUTFILE
wc -l $INPUTFILE | awk '{print $1}' > $PROCESSEDLINES
}
if ( set -C; echo "$$" > "$LOCKFILE" ) 2> /dev/nul;
then
# set trap
trap 'rm -f "$LOCKFILE"; exit $?' SIGHUP INT TERM EXIT
analyze_log_file $LOGFILE $REPORTFILE
cat $REPORTFILE | mail -s "HTTPD usage report from $BEGINTIME to $ENDTIME" $EMAIL
rm -f "$LOCKFILE"
# unset trap
trap - SIGHUP INT TERM EXIT
else
echo "Failed to acquire lockfile: $LOCKFILE."
echo "Held by $(cat $LOCKFILE)"
fi
| true
|
015051906fd63a3d183a13b7c95cc9ed27b4d15e
|
Shell
|
anshu0612/Excel-Killer-E-Learning-App
|
/entrypoint.sh
|
UTF-8
| 736
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh -l
# remove dupes in the case where we are deploying to amazon from our local machines
rm -f lambda-deploy.zip
zip -r ./lambda-deploy.zip *
#sam build
sam package --output-template-file \
packaged.yaml --s3-bucket "$BUCKET_NAME"
if sam deploy --template-file packaged.yaml \
--region us-east-1 --capabilities \
CAPABILITY_IAM --stack-name "$STACK_NAME"
then
exit 0
else
exit 1
fi
exit 0
# export command sets environment variable for Bash
# export AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID
# export AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY
# export AWS_DEFAULT_REGION=$AWS_DEFAULT_REGION
# export AWS_SESSION_TOKEN=$AWS_SESSION_TOKEN
# pwd, ls -ls. used for debugging purposes
# pwd
| true
|
9d26c7e507b9d43f55ecefdc45085cd0efce6582
|
Shell
|
ohmyzsh/ohmyzsh
|
/lib/history.zsh
|
UTF-8
| 1,478
| 3.8125
| 4
|
[
"MIT"
] |
permissive
|
## History wrapper
function omz_history {
local clear list
zparseopts -E c=clear l=list
if [[ -n "$clear" ]]; then
# if -c provided, clobber the history file
echo -n >| "$HISTFILE"
fc -p "$HISTFILE"
echo >&2 History file deleted.
elif [[ -n "$list" ]]; then
# if -l provided, run as if calling `fc' directly
builtin fc "$@"
else
# unless a number is provided, show all history events (starting from 1)
[[ ${@[-1]-} = *[0-9]* ]] && builtin fc -l "$@" || builtin fc -l "$@" 1
fi
}
# Timestamp format
case ${HIST_STAMPS-} in
"mm/dd/yyyy") alias history='omz_history -f' ;;
"dd.mm.yyyy") alias history='omz_history -E' ;;
"yyyy-mm-dd") alias history='omz_history -i' ;;
"") alias history='omz_history' ;;
*) alias history="omz_history -t '$HIST_STAMPS'" ;;
esac
## History file configuration
[ -z "$HISTFILE" ] && HISTFILE="$HOME/.zsh_history"
[ "$HISTSIZE" -lt 50000 ] && HISTSIZE=50000
[ "$SAVEHIST" -lt 10000 ] && SAVEHIST=10000
## History command configuration
setopt extended_history # record timestamp of command in HISTFILE
setopt hist_expire_dups_first # delete duplicates first when HISTFILE size exceeds HISTSIZE
setopt hist_ignore_dups # ignore duplicated commands history list
setopt hist_ignore_space # ignore commands that start with space
setopt hist_verify # show command with history expansion to user before running it
setopt share_history # share command history data
| true
|
a8bc750fd687b388a3899ad4fde00a3b8aafd1ae
|
Shell
|
florez/PhenoAnalyzer
|
/PhenoCodes/run_sim.sh
|
UTF-8
| 3,411
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
#Este código crea las condiciones para la simulación en MadGraph.
#Se crea un contador de prueba para verificar que se creen todas las simulaciones. Se ubica en la carpeta donde deben quedar las carpetas de las simulaciones.
COUNTER=0
echo El contador es $COUNTER
echo "import model mssm" > mgcode
echo "generate p p > ta1+ ta1- QCD=1" >> mgcode
echo "output auto" >> mgcode
echo "exit" >> mgcode
#Se realiza un ciclo que recorre todos las ParamCards. Acá se debe escribir la dirección donde se encuentran las ParamCards
for i in $(ls /home/jgodoy/PhenoThesisProject/ParamCards); do
#Este comando ejecuta MadGraph. Se debe escribir la dirección de el ejecutable de MadGraph. El tomando toma como entrada un archivo "mgcode" que debe ser modificado de acuerdo a la colisión que se quiera emular. El archivo importa el modelo "import mssm", luego genera la simulación (ej: generate p p > ta1+ ta1-), crea un output ("output auto") y luego sale de Madgraph ("exit"). El símbolo "&" ejecuta madgraph en paralelo.
./../../Programs/MG5_aMC_v2_2_3/bin/mg5_aMC /home/jgodoy/PhenoThesisProject/Simulations/mg_files/mgcode &
let COUNTER=COUNTER+1
#Se imprime en pantalla las simulaciones creadas
echo El contador es $COUNTER
done
wait
#Ese código toma los datos de las ParamCards y los integra en las muestras, para realizar la simulación. Además, la carpeta de cada simulación se le asigna el nombre de su respectivo ParamCard.
#Se crea un contador para verificar las ParamCards modificadas
COUNTER2=0
#Se realiza un ciclo en las ParamCards. Se debe escribir la ubicación de las ParamCards
for j in $(ls /home/jgodoy/PhenoThesisProject/ParamCards); do
#Se integran los datos de cada ParamCard para la simulación. Primero se escribe la dirección de cada ParamCard teniendo en cuenta que es un ciclo "$j". Luego se envía esos datos a la carpeta creada para esa simulación luego de haber ejecutado "create_sim.sh"
cat /home/jgodoy/PhenoThesisProject/ParamCards/$j > PROC_mssm_$COUNTER2/Cards/param_card.dat
#Se cambia el nombre de la carpeta para que concuerde con su respectivo ParamCard
mv PROC_mssm_$COUNTER2 ${j/.dat/}
#se imprime las muestras terminadas.
echo EL contador para cambiar es $COUNTER2
let COUNTER2=$COUNTER2+1
done
#Este código ejecuta la simulación y crea las muestras para cada ParamCard. Debe encontrarse en la misma carpeta donde están las carpetas de las simulaciones. Ejecutar luego de name_sim.sh.
#Se realiza un ciclo para las ParamCards. Se debe escribir la dirección donde están ubicadas.
COUNTER=1
for i in $(ls /home/jgodoy/PhenoThesisProject/ParamCards); do
#Se chequea que se haya creado la carpeta de la ParamCard
if test -e ${i/.dat/}; then
#El código crea un archivo de comandos para madgraph
#Ejecuta la simulación
echo 'launch' ${i/.dat/} > mgcode_$COUNTER
#Usar Pythia y CMS
echo '1' >> mgcode_$COUNTER
echo '2' >> mgcode_$COUNTER
#Continuar
echo '0' >> mgcode_$COUNTER
echo '0' >> mgcode_$COUNTER
#Ejecutar MadGraph. se debe escribir la dirección donde el ejecutable de madgraph se encuentr ubicado
./../../Programs/MG5_aMC_v2_2_3/bin/mg5_aMC mgcode_$COUNTER &
else
#Si no se ha creado la simulación, se envía n mensaje.
echo Para el ParamCard $i no se ha creado la simulación
fi
let COUNTER=$COUNTER+1
done
wait
echo "Se ejecutaron las simulaciones"
#Se borra los archivos de ejecución. Ejecutar luego de run_sim.sh
rm mgcode*
rm py*
| true
|
31f71a297eedd6f0504a750906c3656b6023a600
|
Shell
|
mxm0z/armadillo
|
/set_rules_firewalld.sh
|
UTF-8
| 518
| 2.9375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# Author = Mxmzs
# License = MIT
#
echo "Opening $port/$protocol"
firewall-cmd --add-port "$port"/"$protocol"
firewall-cmd --permanent --add-port "$port"/"$protocol"
echo "Adding masquerade"
firewall-cmd --add-masquerade
firewall-cmd --permanent --add-masquerade
local_ip=`ip route get 8.8.8.8 | awk 'NR==1 {print $(NF-2)}'`
firewall-cmd --permanent --direct --passthrough ipv4 -t nat -A POSTROUTING -s 10.8.0.0/24 -o $local_ip -j MASQUERADE
echo "Reloading FirewallD service"
firewall-cmd --reload
| true
|
0bb39c0b1f6c059d120557795ca491059e247ca4
|
Shell
|
gmarsay/rpki-rtr-client
|
/rtr_client/simple-connect-ssh.sh
|
UTF-8
| 436
| 2.921875
| 3
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
:
#
# FYI type password at prompt - the username/password as per RFC8210 is rpki/rpki
#
HOST="rtr.rpki.cloudflare.com"
PORT="8283"
# -N Do not execute a remote command. This is useful for just forwarding ports.
# -T Disable pseudo-terminal allocation.
exec ssh \
-N \
-T \
-o StrictHostKeyChecking=Yes \
-o PasswordAuthentication=Yes \
-o PreferredAuthentications=password \
-l rpki \
-p ${PORT} \
${HOST}
| true
|
93432a926931441acb8b79115af2c85447bd1004
|
Shell
|
qlycool/azure-devops-examples
|
/devops-springboot-web/build.sh
|
UTF-8
| 2,989
| 3.5
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Spring Boot application build script. Note that there are several
# @SpringBootApplication classes in this project, therefore there are
# several Maven POM files with specify their <mainClass> and Spring Profile.
# Chris Joakim, 2019/11/05
arg_count=$#
skip_tests=true
mvn_clean_compile() {
echo "mvn clean compile function"
mvn clean compile
}
mvn_package_web_app() {
create_build_resources
echo "building web app with pom_web.xml"
mvn -f pom_web.xml -Dmaven.test.skip=$skip_tests clean package
echo "then run: java -Xmx200m -Dspring.profiles.active=web -jar target/airports-web-0.0.1.jar"
}
mvn_package_noop_app() {
create_build_resources
echo "building web app with pom_noop.xml"
mvn -f pom_noop.xml -Dmaven.test.skip=$skip_tests clean package
echo "then run: java -Xmx200m -Dspring.profiles.active=noop -jar target/airports-noop-0.0.1.jar"
}
mvn_package_cosmossql_app() {
create_build_resources
echo "building web app with pom_cosmossql.xml"
mvn -f pom_cosmossql.xml -Dmaven.test.skip=$skip_tests clean package
echo "then run: java -Xmx200m -Dspring.profiles.active=cosmossql -jar target/airports-cosmossql-0.0.1.jar --find-by-pk BDL"
}
create_build_resources() {
date -u > src/main/resources/build_date.txt
whoami > src/main/resources/build_user.txt
}
display_help() {
echo "script options:"
echo " ./build.sh compile"
echo " ./build.sh package_web"
echo " ./build.sh package_noop"
echo " ./build.sh package_cosmossql"
echo " ./build.sh package_all"
echo " ./build.sh container_web"
echo " ./build.sh mvn_tree"
echo " ./build.sh mvn_classpath"
}
if [ $arg_count -gt 0 ]
then
if [ $1 == "help" ]
then
display_help
fi
if [ $1 == "compile" ]
then
mvn_clean_compile
# echo "mvn clean compile"
# mvn clean compile
fi
if [ $1 == "package_web" ]
then
mvn_package_web_app
fi
if [ $1 == "package_noop" ]
then
mvn_package_noop_app
fi
if [ $1 == "package_cosmossql" ]
then
mvn_package_cosmossql_app
fi
if [ $1 == "package_all" ]
then
mvn_package_web_app
mvn_package_noop_app
mvn_package_cosmossql_app
fi
if [ $1 == "container_web" ]
then
mvn_package_web_app
echo "docker build -t cjoakim/azure-springboot-airports-web ."
docker build -t cjoakim/azure-springboot-airports-web .
fi
if [ $1 == "mvn_tree" ]
then
echo "mvn dependency:tree"
mvn dependency:tree
fi
if [ $1 == "mvn_classpath" ]
then
echo "mvn dependency:build-classpath"
mvn dependency:build-classpath > mvn-build-classpath.txt
cat mvn-build-classpath.txt | tr ":" "\n" > mvn-build-classpath-lines.txt
cat mvn-build-classpath-lines.txt | grep repository | sort
rm mvn-build-classpath*
fi
else
display_help
fi
echo 'done'
| true
|
b2cb170a9225a2a4e2a88a855953f3dc78146673
|
Shell
|
huang195/actions-install-iter8
|
/install.sh
|
UTF-8
| 1,122
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/sh
#############################################################
# Check for input parameters
#############################################################
if [ -z "$INPUT_KUBECONFIG" ]; then
echo "KUBECONFIG input parameter is not set, exiting..."
exit 1
fi
if [ -z "$INPUT_ITER8_VERSION" ]; then
echo "ITER8 VERSION input parameter is not set, exiting..."
exit 1
fi
#############################################################
# Create Kubernetes configuration to access the cluster
#############################################################
mkdir -p ~/.kube
echo "$INPUT_KUBECONFIG" > ~/.kube/config
cat ~/.kube/config
#############################################################
# Sanity check
#############################################################
kubectl get pods --all-namespaces
#############################################################
# Download and install Iter8
#############################################################
curl -L -s https://raw.githubusercontent.com/iter8-tools/iter8/${INPUT_ITER8_VERSION}/install/install.sh | /bin/sh -
kubectl -n iter8 get pods
| true
|
72898ce2992abf1bc6bef3fd48e776852bb34e70
|
Shell
|
sigoa/pybm
|
/aaMenuXOR.sh
|
UTF-8
| 5,062
| 3.328125
| 3
|
[] |
permissive
|
#!/bin/bash
########################################################################################
# usage example: menu pwd whoami ls ps
# giving you a menu with 4 options to execute in bash shell / Konsole
# call in bash as: . menu1 # if menu1 is the file name with this script in it
# usage e.g.:
# menu ls "ls -l" "echo list dir ; clear ; ls -la " clear
# q, Q or 0 or empty_string i.e. ENTER-key alone always exits the menu
# click-launch from Dolphin file-manager in KDE: associate shell script open-action command: konsole -e %f
# under right-cick, FILE TYPE OPTIONS, ... advanced option, do not tag "run in Terminal"
# so you get a "open action" rather than an "execute action" , but it does what you expect.
# to set as a bash lib func : copy the text between the upper and lower ###### lines into your ~/.bashrc file
menu()
{
local IFS=$' \t\n'
local num n=1 opt item # cmd
clear
## Use default setting of IFS, Loop though the command-line arguments -- $(()) no SPACES!
echo
for item
do
printf " %3d. %s\n" "$n" "${item%%:*}"
n=$(($n + 1))
done
## If there are fewer than 10 items, set option to accept key without ENTER
echo
if [ $# -lt 10 ]
then
opt=-sn1
else
opt=
fi
read -p "ENTER quits menu - please choose 1 to $# ==> " $opt num ## Get response from user
## Check that user entry is valid
case $num in
[qQ0] | "" ) clear ; return ;; ## q, Q or 0 or "" exits
*[!0-9]* | 0* ) ## invalid entry
printf "\aInvalid menu choice : %s\n" "$num" >&2
return 1
;;
esac
echo
if [ "$num" -le "$#" ] ## Check that number is <= to the number of menu items
then
eval ${!num} ## eval "${!num#*:}" # Execute it using indirect expansion, breaking stuff :-(
else
printf "\aInvalid menu choice: %s\n" "$num" >&2
return 1
fi
}
##############################################################################################
#-----------------------------------------------------------
# "Here-document" containing nice standard keys.dat with 3 chans and 1 nuked ID / pml , dropped into thwe cwd, i.e. .
# note that a nuked address is kind of useless , since its key was published. It still is kinda broadcast fun though.
# You have no privacy using a nuked key -
# much like you don't have privacy while using a key which someone has stolen from you.
(
cat <<'EOFherefile'
# omega
EOFherefile
) > omegaXOR.py
#-----------------------------------------------------------
#local bbb= '88'
#echo 88 is $bbb
picpost()
{
for file in ./payload/*
do
python2 ./BM-API-client.py -e"${file}" -s 'pic sent via API : ' --es --ttl='60*24*1' -uUSER0000 -pPASSWORD0000
# echo "${file}"
done
}
binpost()
{
# recipient: [chan] general -t BM-2cW67GEKkHGonXKZLCzouLLxnLym3azS8r
python2 ./BM-API-client.py -m"data.zip" -s 'zip sent via API : ' --es --ttl='60*24*1' -uUSER0000 -pPASSWORD0000 -t BM-2cW67GEKkHGonXKZLCzouLLxnLym3azS8r
}
picpostPolitics()
{
# Politics -t BM-2cVE8v7L4qb14R5iU2no9oizkx8MpuvRZ7
for file in ./payload/*
do
python2 ./BM-API-client.py -e"${file}" -s 'pic sent via API : ' --es --ttl='60*24*1' -uUSER0000 -pPASSWORD0000 -t BM-2cVE8v7L4qb14R5iU2no9oizkx8MpuvRZ7
# echo "${file}"
done
}
# if you wanna read ure own stuff.
# if set up as user B already, then
# python2 ./omegaXOR.py -d ./data.zip
# is sufficient; no need to changeover from A to B
omegaDecryp()
{
mv keys keysa
cp keysa keysb
cp keysb keys
rm keys/config
touch keys/config
echo b >> keys/config # switch to "system B"
python2 ./omegaXOR.py -d ./data.zip
clear
ls -lhg --sort=time --time=ctime .
echo
echo "run some editor (kate) to read the decrypted file now "
}
# useful in click-launch to add ; read WAITNOW # which will wait for keypress before closing Konsole
# now actually using the menu:
# modify it to your liking note you are then on MASTER branch , not on the newer ver. 0.6.3 branch
# run through the options 1 2 3 4 in this order: 1 2 3 4
menu \
'echo " create a one time pad, size 1 MegaByte " ; python2 ./omegaXOR.py -g 1 ' \
'echo " edit Message and crypt it (as user A) " ; kate msg ; python2 ./omegaXOR.py -e msg ' \
'echo " " ' \
'echo " decrypt data.zip and read (as user B) " ; omegaDecryp ' \
'echo " " ' \
'echo " launch BM " ; ./bitmessagemain.py ' \
'echo " send crypted data.zip via bitmessage " ; binpost ' \
'echo " post all pics in dir ./payload/* " ; picpost ' \
'echo '
| true
|
b163115fcd47a2b2e7d9e3ccb50cc48afa0207cc
|
Shell
|
portpaw/dotfiles
|
/.zshrc
|
UTF-8
| 1,540
| 3
| 3
|
[] |
no_license
|
# default user
# export DEFAULT_USER="jmooring"
export DEFAULT_USER="jon"
# path modifications
export PATH="$HOME/.bin:/usr/local/bin:/usr/bin:/usr/sbin:/bin:/sbin:$PATH"
export MANPATH="/usr/local/man:$MANPATH"
# default pager
export PAGER="less -R"
# command aliases
alias c="code"
alias cdp="cd $HOME/Projects"
alias gfrom="g fetch && g rom"
alias rm="rm -i"
alias nvim="neovide"
alias v="nvim"
alias vc="c $HOME/.config/nvim/init.lua"
alias zc="c $HOME/.zshrc"
alias rand="openssl rand -base64"
# default editor
if [[ -n $SSH_CONNECTION ]]; then
export EDITOR="vim"
else
export EDITOR="code"
fi
# oh-my-zsh
export ZSH="$HOME/.oh-my-zsh"
export ZSH_THEME="agnoster"
plugins=(
common-aliases
docker
git
macos
npm
vscode
yarn
zsh-autosuggestions
zsh-syntax-highlighting
)
source "$ZSH/oh-my-zsh.sh"
# fzf
[ -f "$HOME/.fzf.zsh" ] && source "$HOME/.fzf.zsh"
# nvm
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"
[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion"
autoload -U add-zsh-hook
load-nvmrc() {
local node_version="$(nvm version)"
local nvmrc_path="$(nvm_find_nvmrc)"
if [ -n "$nvmrc_path" ]; then
local nvmrc_node_version=$(nvm version "$(cat "${nvmrc_path}")")
if [ "$nvmrc_node_version" = "N/A" ]; then
nvm install
elif [ "$nvmrc_node_version" != "$node_version" ]; then
nvm use
fi
elif [ "$node_version" != "$(nvm version default)" ]; then
echo "Reverting to nvm default version"
nvm use default
fi
}
add-zsh-hook chpwd load-nvmrc
load-nvmrc
| true
|
725f331dd98824df638219dbbd4bda22960ceaa2
|
Shell
|
jcampanaaxa/typescript-test
|
/scripts/test.sh
|
UTF-8
| 350
| 2.921875
| 3
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
#!/bin/bash
DEFAULT_FAILFAST_FLAG=false
DEFAULT_CONCURRENCY=4
CONCURRENCY=${1:-$DEFAULT_CONCURRENCY}
FAILFAST_FLAG=${2:-$DEFAULT_FAILFAST_FLAG}
FAILFAST="{ echo ''; }"
if [ "$FAILFAST_FLAG" != false ]
then
FAILFAST="{ echo 'FAILED!' ; exit 1; }"
fi
npm run eslint -- . || eval $FAILFAST
npm run units concurrency=$CONCURRENCY || eval $FAILFAST
| true
|
51b62c251ceaea544ef62ab4185e86176b899e8f
|
Shell
|
bennes1/codenames
|
/appgo.sh
|
UTF-8
| 821
| 3.84375
| 4
|
[] |
no_license
|
#!/bin/bash
case $1 in
dbshell)
container="codenames_mong"
cmd="bash"
;;
dbprompt)
container="codenames_mong"
cmd="mongosh -u admin -p pass"
;;
webshell)
container="codenames_frontend"
cmd="bash"
;;
esac
if [ -z ${container+x} ]; then
echo "Command $1 not recognized."
else
id=`docker ps | grep $container | cut -d' ' -f1`
case $1 in
dbshell)
shift
if [ -z "{$1}" ]; then
docker exec -it $id bash
else
if [ "$1" == "bash" ]; then
shift
fi
# If quoted, docker interprets it as a file
docker exec -it $id bash $@
fi
;;
webshell)
shift
docker exec -it $id sh "$@"
;;
dbprompt)
cmd="mongosh -u admin -p pass"
if [ -n "$2" ]; then
docker exec -it $id bash -c "cat $2 | $cmd"
else
docker exec -it $id $cmd
fi
;;
esac
fi
| true
|
b1dc739f71e3b55b86978710bad1d9d2293fd182
|
Shell
|
t50504/CLASHanalyst
|
/suite/bin/fun_comp/compare.sh
|
UTF-8
| 3,417
| 3.078125
| 3
|
[] |
no_license
|
shell_folder=$(cd "$(dirname "$0")";pwd)
if [ ! -e "plot" ];then
echo mkdir plot
mkdir plot
fi
name=$1
python ${shell_folder}/compare.py select $name
echo select done
pir_out=pir.tab
hyb_out=hyb.tab
clan_out=clan.tab
hybrid_file=/home/bba753951/Django/master_project/media/uploadfile/$name/hyb_file_comp.fasta
export LC_ALL=C
sort -u $pir_out |sort -t$'\t' -k 1,1 -o $pir_out
sort -u $hyb_out |sort -t$'\t' -k 1,1 -o $hyb_out
sort -u $clan_out |sort -t$'\t' -k 1,1 -o $clan_out
LC_ALL=C comm -12 $pir_out $clan_out > pir_clan.tab
LC_ALL=C comm -12 $pir_out $hyb_out > pir_hyb.tab
LC_ALL=C comm -12 $hyb_out $clan_out > hyb_clan.tab
LC_ALL=C comm -12 hyb_clan.tab $pir_out > pir_hyb_clan.tab
declare -i pir_count=$(wc -l $pir_out |cut -d" " -f 1)
declare -i hyb_count=$(wc -l $hyb_out |cut -d" " -f 1)
declare -i clan_count=$(wc -l $clan_out |cut -d" " -f 1)
declare -i pir_clan_count=$(wc -l pir_clan.tab |cut -d" " -f 1)
declare -i hyb_clan_count=$(wc -l hyb_clan.tab |cut -d" " -f 1)
declare -i pir_hyb_count=$(wc -l pir_hyb.tab |cut -d" " -f 1)
declare -i pir_hyb_clan_count=$(wc -l pir_hyb_clan.tab |cut -d" " -f 1)
declare -i hybrid_count=$(expr $(wc -l ${hybrid_file}|cut -d" " -f 1) / 2)
echo hybrid_count: $hybrid_count
declare -i pir_brid=$(cut -d$'\t' -f 1 $pir_out|sort -u|wc -l)
declare -i hyb_brid=$(cut -d$'\t' -f 1 $hyb_out|sort -u|wc -l)
declare -i clan_brid=$(cut -d$'\t' -f 1 $clan_out|sort -u|wc -l)
declare -i pir_clan_brid=$(cut -d$'\t' -f 1 pir_clan.tab|sort -u|wc -l)
declare -i pir_hyb_brid=$(cut -d$'\t' -f 1 pir_hyb.tab|sort -u|wc -l)
declare -i hyb_clan_brid=$(cut -d$'\t' -f 1 hyb_clan.tab|sort -u|wc -l)
declare -i pir_hyb_clan_brid=$(cut -d$'\t' -f 1 pir_hyb_clan.tab|sort -u|wc -l)
# percent
cent_pir=$(echo "scale=2;${pir_brid}*100/${hybrid_count}"|bc)
cent_hyb=$(echo "scale=2;${hyb_brid}*100/${hybrid_count}"|bc)
cent_clan=$(echo "scale=2;${clan_brid}*100/${hybrid_count}"|bc)
cent_pir_clan=$(echo "scale=2;${pir_clan_brid}*100/${hybrid_count}"|bc)
cent_pir_hyb=$(echo "scale=2;${pir_hyb_brid}*100/${hybrid_count}"|bc)
cent_hyb_clan=$(echo "scale=2;${hyb_clan_brid}*100/${hybrid_count}"|bc)
cent_pir_hyb_clan=$(echo "scale=2;${pir_hyb_clan_brid}*100/${hybrid_count}"|bc)
echo "File Name:"$name > result.tab
echo 'original hybrid sequences(after trimmed and unique):'$hybrid_count >> result.tab
echo "">>result.tab
printf "%-15s %15s %15s %15s\n" 'pipeline' "pair_count" "hybrid_count" "hybrid_hit(%)" >> result.tab
printf "%-15s %15s %15s %15.2f\n" 'pir' $pir_count $pir_brid $cent_pir >> result.tab
printf "%-15s %15s %15s %15.2f\n" 'hyb' $hyb_count $hyb_brid $cent_hyb>> result.tab
printf "%-15s %15s %15s %15.2f\n" 'clan' $clan_count $clan_brid $cent_clan>> result.tab
printf "%-15s %15s %15s %15.2f\n" 'pir_clan' $pir_clan_count $pir_clan_brid $cent_pir_clan>> result.tab
printf "%-15s %15s %15s %15.2f\n" 'pir_hyb' $pir_hyb_count $pir_hyb_brid $cent_pir_hyb>> result.tab
printf "%-15s %15s %15s %15.2f\n" 'hyb_clan' $hyb_clan_count $hyb_clan_brid $cent_hyb_clan>> result.tab
printf "%-15s %15s %15s %15.2f\n" 'pir_hyb_clan' $pir_hyb_clan_count $pir_hyb_clan_brid $cent_pir_hyb_clan>> result.tab
echo -e "\n\n\n" >> result.tab
echo comm done
python ${shell_folder}/compare.py draw $name
echo draw done
python ${shell_folder}/compare.py boxplot $name> mean.txt
echo boxplot done
python ${shell_folder}/compare.py pvalue $name
echo pvalue done
| true
|
343f6b7dcff5af516511d3bc807a659321c727e5
|
Shell
|
swissmanu/barefoot
|
/.travis/before_install.sh
|
UTF-8
| 702
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
echo '######################################'
echo '# BEFORE INSTALL #'
echo '# - START - #'
echo '######################################'
echo '---- Installing NaturalDocs ----'
sudo apt-get update
sudo apt-get install naturaldocs
sudo ln -s /usr/bin/naturaldocs /usr/bin/NaturalDocs
echo '---- Installing node-jscoverage ----'
cd /tmp
git clone git://github.com/visionmedia/node-jscoverage.git
cd node-jscoverage
sudo ./configure
sudo make
sudo make install
echo '######################################'
echo '# BEFORE INSTALL #'
echo '# - FINISH - #'
echo '######################################'
| true
|
cf0a821b7873937fd942124d9cf94c947cc148e9
|
Shell
|
killall5/devops-2020-05
|
/pre-ansible/distribute.sh
|
UTF-8
| 194
| 2.96875
| 3
|
[] |
no_license
|
#!/bin/sh
HOSTS="ubuntu.local ubuntu.local"
read -s -p "Enter sudo password: " password
for host in $HOSTS; do
scp setup.sh $host:
echo $password | ssh $host sudo -S ./setup.sh
done
| true
|
6d591f7aa86037d6922fc81e52d6faaa7f60b564
|
Shell
|
Seagate/cortx-motr
|
/conf/st
|
UTF-8
| 9,512
| 2.65625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
#
# Copyright (c) 2020 Seagate Technology LLC and/or its Affiliates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For any questions about this software or licensing,
# please email opensource@seagate.com or cortx-questions@seagate.com.
#
set -eu
#set -x
export PS4='+ ${FUNCNAME[0]:+${FUNCNAME[0]}():}line ${LINENO}: '
### XXX TODO:
### 0. Add failure tests. They are very useful in finding deficiencies
### of Motr error-handling code.
### - See m0t1fs/linux_kernel/st/st in git history for how they used
### to be implemented.
### - Add a failure test corresponding to MOTR-322.
### - Add a scenario that will guard against MOTR-804 regressions.
### 1. Start m0d via Expect wrapper.
## CAUTION: This path will be removed by superuser.
SANDBOX_DIR=${SANDBOX_DIR:-/var/motr/sandbox.conf-st}
M0_TRACE_IMMEDIATE_MASK=${M0_TRACE_IMMEDIATE_MASK:-!rpc,formation,fop,memory}
M0_TRACE_LEVEL=${M0_TRACE_LEVEL:-info+}
M0_TRACE_PRINT_CONTEXT=${M0_TRACE_PRINT_CONTEXT:-}
MAX_RPC_MSG_SIZE=163840
TM_MIN_RECV_QUEUE_LEN=2
PROFILE='<0x7000000000000001:0>' # profile-0
PROCESS_M0D='<0x7200000000000001:1>' # process-1
PROCESS_M0T1FS='<0x7200000000000001:4>' # process-4
error() { echo "$@" >&2; stop 1; }
M0_SRC_DIR=`readlink -f $0`
M0_SRC_DIR=${M0_SRC_DIR%/*/*}
. $M0_SRC_DIR/utils/functions # die, sandbox_init, report_and_exit
M0CONFGEN=$M0_SRC_DIR/utils/m0confgen
XPRT=$(m0_default_xprt)
start() {
_init
services_start
}
stop() {
services_stop
_fini ${1:-}
report_and_exit conf-st ${1:-$?}
}
_init() {
export_test_eps
if [[ "$(check_and_restart_lnet)" == "true" ]]; then
m0_modules_insert
fi
sandbox_init # changes current directory to $SANDBOX_DIR
mkdir mnt
}
_fini() {
rmdir mnt
sandbox_fini ${1:-}
if [[ "$(is_lnet_available)" == "true" ]]; then
m0_modules_remove
fi
}
confdb() {
cat <<EOF
(root-0 verno=1 rootfid=(11, 22) mdpool=pool-0 imeta_pver=(0, 0)
mdredundancy=2 params=["pool_width=3", "nr_data_units=1",
"nr_parity_units=1", "nr_spare_units=1",
"unit_size=4096"]
nodes=[node-0] sites=[site-2] pools=[pool-0]
profiles=[profile-0] fdmi_flt_grps=[])
(profile-0 pools=[pool-0])
(node-0 memsize=16000 nr_cpu=2 last_state=3 flags=2
processes=[process-1, process-4])
(process-1 cores=[2] mem_limit_as=0 mem_limit_rss=0 mem_limit_stack=0
mem_limit_memlock=0 endpoint="$M0D1_ENDPOINT"
services=[service-0, service-1, service-2, service-3, service-4,
service-6])
(process-4 cores=[1] mem_limit_as=0 mem_limit_rss=0 mem_limit_stack=0
mem_limit_memlock=0 endpoint="$M0T1FS_ENDPOINT" services=[service-5])
##
## *CAUTION* Do not use symbolic form of service types here.
## I.e., use 'type=4' format, not 'type=@M0_CST_RMS'.
##
## Rationale: spiel.confstr() returns numeric service types.
## cmp(1) command at the end of this ST will fail, unless service types
## are specified numerically in this conf string.
##
(service-0 type=3 endpoints=["$M0D1_ENDPOINT"] params=[] sdevs=[])
(service-1 type=4 endpoints=["$M0D1_ENDPOINT"] params=[] sdevs=[])
(service-2 type=1 endpoints=["$M0D1_ENDPOINT"] params=[] sdevs=[sdev-0])
(service-3 type=2 endpoints=["$M0D1_ENDPOINT"] params=[]
sdevs=[sdev-1, sdev-2, sdev-3, sdev-4])
(service-4 type=6 endpoints=["$M0D1_ENDPOINT"] params=[] sdevs=[])
(service-5 type=4 endpoints=["$M0T1FS_ENDPOINT"] params=[] sdevs=[])
(service-6 type=10 endpoints=["$M0D1_ENDPOINT"] params=[] sdevs=[sdev-5])
(sdev-0 dev_idx=0 iface=4 media=1 bsize=4096 size=596000000000 last_state=3
flags=4 filename="/dev/sdev0")
(sdev-1 dev_idx=1 iface=4 media=1 bsize=4096 size=596000000000 last_state=3
flags=4 filename="/dev/sdev1")
(sdev-2 dev_idx=2 iface=7 media=2 bsize=8192 size=320000000000 last_state=2
flags=4 filename="/dev/sdev2")
(sdev-3 dev_idx=3 iface=7 media=2 bsize=8192 size=320000000000 last_state=2
flags=4 filename="/dev/sdev3")
(sdev-4 dev_idx=0 iface=7 media=2 bsize=8192 size=320000000000 last_state=2
flags=4 filename="/dev/sdev4")
(sdev-5 dev_idx=4 iface=4 media=2 bsize=4096 size=320000000000 last_state=3
flags=4 filename="/dev/sdev5")
(site-2 racks=[rack-0] pvers=[pver-0])
(rack-0 encls=[enclosure-0] pvers=[pver-0])
(enclosure-0 node=node-0 ctrls=[controller-0] pvers=[pver-0])
(controller-0 drives=[drive-0, drive-1, drive-2, drive-3, drive-4]
pvers=[pver-0])
(drive-0 dev=sdev-1 pvers=[pver-0])
(drive-1 dev=sdev-2 pvers=[pver-0])
(drive-2 dev=sdev-3 pvers=[pver-0])
(drive-3 dev=sdev-4 pvers=[pver-0])
(drive-4 dev=sdev-5 pvers=[pver-0])
(pool-0 pver_policy=0 pvers=[pver-0])
(pver-0 N=1 K=1 S=1 P=3 tolerance=[0, 0, 0, 0, 1] sitevs=[objv-2:0])
(objv-2:0 real=site-2 children=[objv-0])
(objv-0 real=rack-0 children=[objv-1])
(objv-1 real=enclosure-0 children=[objv-2])
(objv-2 real=controller-0 children=[objv-3, objv-4, objv-5])
(objv-3 real=drive-0 children=[])
(objv-4 real=drive-1 children=[])
(objv-5 real=drive-2 children=[])
EOF
}
services_start() {
local CONF_FILE=conf.xc
local OPTS="-f $PROCESS_M0D -H $M0D1_ENDPOINT
-m $MAX_RPC_MSG_SIZE -q $TM_MIN_RECV_QUEUE_LEN -F -D db -T AD -S stobs
-A linuxstob:addb-stobs -e $XPRT:$M0D1_ENDPOINT -c $CONF_FILE -w 3"
confdb | $M0CONFGEN >$CONF_FILE || error 'm0confgen failed'
$M0_SRC_DIR/utils/mkfs/m0mkfs $OPTS >>mkfs.log || error 'm0mkfs failed'
## XXX TODO: Start m0d via Expect wrapper. You never know how long to sleep.
$M0_SRC_DIR/motr/m0d $OPTS >>m0d.log 2>&1 &
local PID=$!
sleep 2
kill -0 $PID 2>/dev/null ||
error "Failed to start m0d. See $SANDBOX_DIR/m0d.log for details."
}
services_stop() { pkill m0d && wait || true; }
_mount() {
mount -t m0t1fs \
-o pfid="$PROCESS_M0T1FS",profile="$PROFILE",ha=$M0D1_ENDPOINT \
none $SANDBOX_DIR/mnt || return $?
}
_spiel_setup() {
local rc=0
cd $M0_SRC_DIR/utils/spiel
python3 setup.py "$@" || rc=$?
cd - >/dev/null
return $rc
}
spiel() {
local files=$SANDBOX_DIR/installed-files
case "$*" in
init)
_spiel_setup -q install --home $SANDBOX_DIR \
--install-lib $SANDBOX_DIR/lib64/python \
--record $files;;
run)
PYTHONPATH=$SANDBOX_DIR/lib64/python \
$M0_SRC_DIR/utils/spiel/m0spiel \
-l $M0_SRC_DIR/motr/.libs/libmotr.so \
-c $SPIEL_ENDPOINT -s $M0D1_ENDPOINT;;
fini)
## Redirect stderr to hide these warnings:
## | 'build/bdist.linux-x86_64' does not exist -- can't clean it
## | 'build/scripts-2.7' does not exist -- can't clean it
_spiel_setup -q clean -a 2>/dev/null
## If Motr is updated between two runs of this ST, Python module
## will become outdated. Delete Python module so that it is
## rebuilt afresh.
xargs rm -f <$files
rm -f $files;;
*)
die "${FUNCNAME[0]}: Invalid usage";;
esac
}
## Keep the audience engaged.
say() { echo "$@" | tee -a $SANDBOX_DIR/m0d.log; }
usage() {
cat <<EOF
Usage: ${0##*/} [COMMAND]
Supported commands:
run run system tests (default command)
insmod insert Motr kernel modules: m0tr.ko
rmmod remove Motr kernel modules
sstart start Motr user-space services
sstop stop Motr user-space services
help display this help and exit
EOF
}
## -------------------------------------------------------------------
## main()
## -------------------------------------------------------------------
[ `id -u` -eq 0 ] || die 'Must be run by superuser'
case "${1:-}" in
run|'') ;;
insmod) export_test_eps
if [[ "$(check_and_restart_lnet)" == "true" ]]; then
m0_modules_insert
fi
exit;;
rmmod) if [[ "$(is_lnet_available)" == "true" ]]; then
m0_modules_remove;
fi
exit;;
sstart) start; exit;;
sstop) services_stop; sleep 3; _fini; exit;;
help) usage; exit;;
*) usage >&2; die;;
esac
start
echo 8 >/proc/sys/kernel/printk # Print kernel messages to the console.
say 'Test: m0t1fs'
_mount || stop $?
umount $SANDBOX_DIR/mnt
say 'Test: spiel.confstr'
spiel init
{
cat <<EOF
rc = spiel.cmd_profile_set('$PROFILE')
if rc != 0:
sys.exit('Cannot set profile $PROFILE; rc=%d' % rc)
rc = spiel.rconfc_start()
if rc != 0:
sys.exit('Cannot start rconfc; rc=%d' % rc)
try:
print spiel.confstr()
except Exception, e:
import traceback
traceback.print_exc()
raise SystemExit(e) # other exception types are caught by 'code' module
finally:
spiel.rconfc_stop()
EOF
echo # Python interpreter needs an empty line at the very end.
} | spiel run >conf_ask.xc || stop $?
cmp <($M0CONFGEN -f xcode -t confgen conf.xc | sort) \
<($M0CONFGEN -f xcode -t confgen conf_ask.xc | sort) ||
error 'Configuration data is corrupted'
spiel fini
stop
| true
|
a9a47ef57ab50ba7745e031fb50c5ea502b85af0
|
Shell
|
CRAWlab/ARLISS
|
/2017/Tools/convert_coordinates.py
|
UTF-8
| 1,020
| 2.515625
| 3
|
[] |
no_license
|
#!/bin/sh
# convert_coordinates.py
#
#
# Created by Joseph Fuentes on 9/13/17.
#
def convert_latitude(lat_NS):
""" Function to convert deg m N/S latitude to DD.dddd (decimal degrees)
Arguments:
lat_NS : tuple representing latitude
in format of MicroGPS gps.latitude
Returns:
float representing latitidue in DD.dddd
Created By: Dr. Joshua Vaughan - joshua.vaughan@louisiana.edu
"""
return (lat_NS[0] + lat_NS[1] / 60) * (1.0 if lat_NS[2] == 'N' else -1.0)
def convert_longitude(long_EW):
""" Function to convert deg m E/W longitude to DD.dddd (decimal degrees)
Arguments:
long_EW : tuple representing longitude
in format of MicroGPS gps.longitude
Returns:
float representing longtidue in DD.dddd
Created By: Dr. Joshua Vaughan - joshua.vaughan@louisiana.edu
"""
return (long_EW[0] + long_EW[1] / 60) * (1.0 if long_EW[2] == 'E' else -1.0)
latitude = (40,52.782, 'N')
longitude = (119, 7.308, 'W')
point = (latitude, longitude)
print(point)
| true
|
4e1b26cef1b47f755016a1d3010a28f69b975631
|
Shell
|
a-azad/geomexx.com
|
/deploy.sh
|
UTF-8
| 414
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/sh
# If a command fails then the deploy stops
set -e
# delete docs folder
rm -rf docs
# Go to builder folder
cd www
# Build the project.
# if using a theme, replace with `hugo -t <YOURTHEME>`
hugo
cd ..
# # Add changes to git.
git add -A
# # Commit changes.
msg="rebuilding site $(date)"
if [ -n "$*" ]; then
msg="$*"
fi
git commit -m "$msg"
# # Push source and build repos.
git push origin master
| true
|
e333612828465e900fcbe57f18250cd80b51a231
|
Shell
|
jlin21/henchman-demo
|
/demoapp
|
UTF-8
| 1,337
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
#
# chkconfig: 35 90 12
# description: mom_sender-cache script
#
# Source function library.
. /etc/rc.d/init.d/functions
DEMO_APP_DIR=/home/vagrant/demoapp
DEMO_APP_LOG_DIR=/var/log/demoapp
DAEMON=$DEMO_APP_DIR/server.js
DESC=demoapp
PID_DIR="/var/run/demoapp"
PID_FILE="${PID_DIR}/${DESC}.pid"
LOCK_FILE=${LOCKFILE:-/var/lock/subsys/${DESC}}
RETVAL=0
LOCKFILE=/var/lock/subsys/demoapp
USER="vagrant"
# Get function from functions library
. /etc/init.d/functions
start() {
echo -n $"Starting ${DESC}: "
daemonize -u $USER -p $PID_FILE -l $LOCK_FILE -a -o ${DEMO_APP_LOG_DIR}/${DESC}.log -e ${DEMO_APP_LOG_DIR}/${DESC}-error.log $DAEMON
RETVAL=$?
echo
[ $RETVAL -eq 0 ] && touch $LOCK_FILE
return $RETVAL
}
stop() {
echo -n $"Stopping ${DESC}: "
killproc -p ${PID_FILE} -d 10 $DAEMON
RETVAL=$?
echo
[ $RETVAL = 0 ] && rm -f ${LOCK_FILE} ${PID_FILE}
return $RETVAL
}
re_status () {
echo -e "${DESC} \c" && status -p ${PID_FILE} ${DAEMON}
RETVAL=$?
return $RETVAL
}
### main logic ###
case "$1" in
start)
start
;;
stop)
stop
;;
status)
re_status
;;
restart|reload|condrestart)
stop
start
;;
*)
echo $"Usage: $0 {start|stop|restart|reload|status}"
exit 1
esac
exit 0
| true
|
69a9be2af02a767f4940950f7593b6681f7db898
|
Shell
|
tais-aero/lugram
|
/print.sh
|
UTF-8
| 647
| 3.515625
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
# TODO: DRY with make.sh
filter=${1}
filter=${filter:=.*}
echo "#!/bin/bash" > out/entrypoint.sh
echo "set -e" >> out/entrypoint.sh
echo Generating screens 2>&1
screens=$(lua generate.lua fosdem17/list screens | grep -v "^$" | grep "${filter}")
echo "Will generate screens: ${screens}" 2>&1
for screen in ${screens}; do
prefix="print.${screen//\//.}"
echo Generating ${prefix} 2>&1
lua generate.lua "${screen}" print >"out/${prefix}.mermaid"
echo "mermaid -w 2048 -o /src /src/${prefix}.mermaid" >> out/entrypoint.sh
done
chmod +x out/entrypoint.sh
docker run -v $(pwd)/out:/src mermaid /src/entrypoint.sh
| true
|
c51619abc6d91b396f00c2b1795e7da443beec94
|
Shell
|
LiberatorUSA/GUCEF
|
/projects/premake4/Premake4Common.sh
|
UTF-8
| 3,086
| 3.71875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# Turn on tracing, we want to see what's going on
#set -x
# Remember the dir where we started out since we use relative paths
scriptPath="$(cd "${0%/*}" 2>/dev/null; echo "$PWD"/"${0##*/}")"
PREMAKE4COMMON_SCRIPTSTARTDIR=${scriptPath%/*}
echo "PREMAKE4COMMON_SCRIPTSTARTDIR = $PREMAKE4COMMON_SCRIPTSTARTDIR"
echo "*** Perform common Premake4 environment variable setup ***"
# Set the basic environment variables we can use for GUCEF in the rest of the script
cd "$PREMAKE4COMMON_SCRIPTSTARTDIR/../.."
GUCEF_HOME=$PWD
echo "GUCEF_HOME = $GUCEF_HOME"
SRCROOTDIR=$PWD
echo "SRCROOTDIR = $SRCROOTDIR"
# Set the output directory in which the target directories will be placed for Premake4
export OUTPUTDIR=${OUTPUTDIR:=undefined}
if [ "$OUTPUTDIR" = "undefined" ];
then
echo "OUTPUTDIR is undefined, setting it to default"
OUTPUTDIR="$GUCEF_HOME/common/bin"
fi
echo "OUTPUTDIR = $OUTPUTDIR"
# Set the exact output directory in which the Premake4 will output
export PM4OUTPUTDIR=${PM4OUTPUTDIR:=undefined}
if [ "$PM4OUTPUTDIR" = "undefined" ];
then
echo "PM4OUTPUTDIR is undefined, setting it to default"
PM4OUTPUTDIR="$GUCEF_HOME/common/bin/premake4"
fi
echo "PM4OUTPUTDIR = $PM4OUTPUTDIR"
# Set environment variable which points to OIS library home
OIS_HOME=${OIS_HOME:=undefined}
if [ "$OIS_HOME" = "undefined" ]; then
echo "OIS environment variable not found, setting it"
OIS_HOME="$GUCEF_HOME/dependencies/OIS"
fi
echo "OIS_HOME = $OIS_HOME"
# Set environment variable which points to FreeImage library home
FREEIMAGE_HOME=${FREEIMAGE_HOME:=undefined}
if [ "$FREEIMAGE_HOME" = "undefined" ]; then
echo "FreeImage environment variable not found, setting it"
FREEIMAGE_HOME="$GUCEF_HOME/dependencies/FreeImage"
fi
echo "FREEIMAGE_HOME = $FREEIMAGE_HOME"
# Set environment variable which points to DevIL library home
DEVIL_HOME=${DEVIL_HOME:=undefined}
if [ "$DEVIL_HOME" = "undefined" ]; then
echo "DevIL environment variable not found, setting it"
DEVIL_HOME="$GUCEF_HOME/dependencies/Devil"
fi
echo "DEVIL_HOME = $DEVIL_HOME"
# Set environment variable which points to ZLib library home
ZLIB_HOME=${ZLIB_HOME:=undefined}
if [ "$ZLIB_HOME" = "undefined" ]; then
echo "ZLib environment variable not found, setting it"
ZLIB_HOME="$GUCEF_HOME/dependencies/zlib"
fi
echo "ZLIB_HOME = $ZLIB_HOME"
# Set environment variable which points to ZZipLib library home
ZZIPLIB_HOME=${ZZIPLIB_HOME:=undefined}
if [ "$ZZIPLIB_HOME" = "undefined" ]; then
echo "ZZipLib environment variable not found, setting it"
ZZIPLIB_HOME="$GUCEF_HOME/dependencies/zziplib"
fi
echo "ZZIPLIB_HOME = $ZZIPLIB_HOME"
# Check to see if we need to invoke the Project generator
SKIP_GUCEF_PREMAKE4FILEGENERATION=${SKIP_GUCEF_PREMAKE4FILEGENERATION:=undefined}
if [ "$SKIP_GUCEF_PREMAKE4FILEGENERATION" = "undefined" ];
then
echo "*** Generate Premake4 files ***"
cd "$PREMAKE4COMMON_SCRIPTSTARTDIR"
. GeneratePremake4Info.sh
else
echo "Skipping GUCEF's Premake4 file generation"
fi
# Go back to where we came from
cd "$PREMAKE4COMMON_SCRIPTSTARTDIR"
| true
|
0ec2c26d3a9991069bad6c84cbc9dfc12108d61c
|
Shell
|
friism/linuxkit
|
/base/alpine-build-toybox/build.sh
|
UTF-8
| 222
| 2.84375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
FILES=$@
make $FILES > /dev/null
[ $# -eq 0 ] && FILES=toybox
# TODO symlinks if just use toybox
mkdir -p /out/bin
mv $FILES /out/bin
printf "FROM scratch\nCOPY bin/ bin/\n" > /out/Dockerfile
cd /out
tar cf - .
| true
|
93b007878b94ce2664fbbb8be1cd7416f9f25e4b
|
Shell
|
magnushamrin/github-action-install-dependent-sfdx-packages
|
/entrypoint.sh
|
UTF-8
| 592
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
# The execution of this script stops if a command or pipeline has an error.
# For example, failure to install a dependent package will cause the script
# to stop execution.
wget https://developer.salesforce.com/media/salesforce-cli/sfdx-linux-amd64.tar.xz
mkdir sfdx-cli
tar xJf sfdx-linux-amd64.tar.xz -C sfdx-cli --strip-components 1
./sfdx-cli/install
set -e
# Specify the user name of the subscriber org.
USER_NAME=$1
# Specify a package version id (starts with 04t)
# If you know the package alias but not the id, use force:package:version:list to find it.
PACKAGE=$2
| true
|
0178814affdc20dd65de04e97c5161d018719b6e
|
Shell
|
arcaneastronaut/dotfiles
|
/config-groups/foundation/home/.bashrc
|
UTF-8
| 2,732
| 2.796875
| 3
|
[
"Unlicense"
] |
permissive
|
#
# ~/.bashrc
#
# If not running interactively, don't do anything
[[ $- != *i* ]] && return
alias ls='ls --color=auto'
[ -z "$PS1" ] && return
color_black_black='\[\e[0;30m\]'
color_black_red='\[\e[0;31m\]'
color_black_green='\[\e[0;32m\]'
color_black_yellow='\[\e[0;33m\]'
color_black_blue='\[\e[0;34m\]'
color_black_magenta='\[\e[0;35m\]'
color_black_cyan='\[\e[0;36m\]'
color_black_white='\[\e[0;37m\]'
color_red_black='\[\e[30;41m\]'
color_red_red='\[\e[31;41m\]'
color_red_green='\[\e[32;41m\]'
color_red_yellow='\[\e[33;41m\]'
color_red_blue='\[\e[34;41m\]'
color_red_magenta='\[\e[35;41m\]'
color_red_cyan='\[\e[36;41m\]'
color_red_white='\[\e[37;41m\]'
color_green_black='\[\e[30;42m\]'
color_green_red='\[\e[31;42m\]'
color_green_green='\[\e[32;42m\]'
color_green_yellow='\[\e[33;42m\]'
color_green_blue='\[\e[34;42m\]'
color_green_magenta='\[\e[35;42m\]'
color_green_cyan='\[\e[36;42m\]'
color_green_white='\[\e[37;42m\]'
color_yellow_black='\[\e[30;43m\]'
color_yellow_red='\[\e[31;43m\]'
color_yellow_green='\[\e[32;43m\]'
color_yellow_yellow='\[\e[33;43m\]'
color_yellow_blue='\[\e[34;43m\]'
color_yellow_magenta='\[\e[35;43m\]'
color_yellow_cyan='\[\e[36;43m\]'
color_yellow_white='\[\e[37;43m\]'
color_blue_black='\[\e[30;44m\]'
color_blue_red='\[\e[31;44m\]'
color_blue_green='\[\e[32;44m\]'
color_blue_yellow='\[\e[33;44m\]'
color_blue_blue='\[\e[34;44m\]'
color_blue_magenta='\[\e[35;44m\]'
color_blue_cyan='\[\e[36;44m\]'
color_blue_white='\[\e[37;44m\]'
color_magenta_black='\[\e[30;45m\]'
color_magenta_red='\[\e[31;45m\]'
color_magenta_green='\[\e[32;45m\]'
color_magenta_yellow='\[\e[33;45m\]'
color_magenta_blue='\[\e[34;45m\]'
color_magenta_magenta='\[\e[35;45m\]'
color_magenta_cyan='\[\e[36;45m\]'
color_magenta_white='\[\e[37;45m\]'
color_cyan_black='\[\e[30;46m\]'
color_cyan_red='\[\e[31;46m\]'
color_cyan_green='\[\e[32;46m\]'
color_cyan_yellow='\[\e[33;46m\]'
color_cyan_blue='\[\e[34;46m\]'
color_cyan_magenta='\[\e[35;46m\]'
color_cyan_cyan='\[\e[36;46m\]'
color_cyan_white='\[\e[37;46m\]'
color_grey_black='\[\e[30;47m\]'
color_grey_red='\[\e[31;47m\]'
color_grey_green='\[\e[32;47m\]'
color_grey_yellow='\[\e[33;47m\]'
color_grey_blue='\[\e[34;47m\]'
color_grey_magenta='\[\e[35;47m\]'
color_grey_cyan='\[\e[36;47m\]'
color_grey_white='\[\e[37;47m\]'
color_reset='\[\033[0m\]'
#weight_bold="$(tput bold)"
#weight_normal="$(tput sgr0)"
#separator=''
#separator='❯'
separator=''
#HISTCONTROL=ignoreboth
#shopt -s histappend
#HISTSIZE=10000
#HISTFILESIZE=20000
export EDITOR=nvim
PS1="$color_grey_black \u@\h $color_blue_white$separator$color_blue_white \W $color_black_blue$separator"
PS1="$PS1$color_reset "
PS2="$color_blue_white-$color_black_blue$separator$color_reset"
| true
|
06b1c2a20bc3e2a89c4b38f28cbbe8c2cc9a8ece
|
Shell
|
pavankayankota/CapgeminiDevopstraing
|
/shellscripting/assignment07.sh
|
UTF-8
| 311
| 3.09375
| 3
|
[] |
no_license
|
#! /usr/bin/bash
shopt -s expand_aliases
alias TODAY="6/7/2021"
alias UFILES="find/home -user user"
TODAYSDATE="6/7/2021"
USERFILES='find/home-user user'
echo "Today's Date : $TODAYSDATE"
echo "user files : $USERFILES"
A='TODAY'
B='UFILES'
echo "with Alias, TODAY IS: $A"
echo "with Alias, UFILES IS : $B"
| true
|
95855570be1e0e9af0c81db9f003b8888b8da32a
|
Shell
|
brad-payne/k8s-cloud-loadbalancer
|
/lb-nodeport/run.sh
|
UTF-8
| 491
| 2.65625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
source lb-nodeport.f
echo " - Running createLoadBalancer"
createLoadBalancer haproxy default
# echo " - Cleaning up old files"
# rm -f /etc/httpd/conf.d/*.bl
# echo " - Copying files"
# mv -f kubernetes.services.conf /etc/httpd/conf.d/
# mv -f *.service.bl /etc/httpd/conf.d/
# echo " - Restarting httpd"
# sudo service httpd reload
echo "Restarting haproxy"
sudo systemctl restart haproxy
# cp -f haproxy.cfg /etc/haproxy/
sleep 2
sudo systemctl status haproxy
| true
|
b1f204c3b64fb82689e741110500dc10d6714803
|
Shell
|
isabella232/mapbox-maps-ios
|
/scripts/code-coverage/generate-json-report.sh
|
UTF-8
| 1,205
| 3.671875
| 4
|
[
"ISC",
"BSL-1.0",
"BSD-2-Clause",
"MIT",
"LicenseRef-scancode-object-form-exception-to-mit",
"BSD-3-Clause",
"Zlib"
] |
permissive
|
#
# Prerequisite:
# Prior to running this script, you must have generated an xcresults bundle that
# contains a code coverage report. This can be done by:
# 1. Running tests locally in Xcode with `gather reports` enabled for that scheme.
# 2. Running the tests locally via the relevant make command.
#
# Purpose:
# This script is intended to:
# - Convert a code coverage report generated by Xcode to JSON.
# - Parse that report for the code coverage total.
#
set -e
set -o pipefail
cov_result="";
if [ -f ../../build/ios/Logs/Test/*.xcresult/ ]; then
cov_result=build/ios/Logs/Test/*.xcresult
elif [ -f ../../build/ios/ios/Logs/Test/*.xcresult/ ]; then
cov_result=build/ios/ios/Logs/Test/*.xcresult
else
echo "Coverage file does not exist. Please run tests before executing"
exit 1
fi
xcrun xccov view --report $cov_result --json > output.json
#
# Convert the line coverage for the dynamic target to a percentage. Currently,
# only CI tests are included when calculated code coverage.
#
percentage=`node -e "console.log(require('./output.json').lineCoverage)"`
cov=$(printf "%.2f" $(echo "$percentage*100" | bc -l))
# Generate a formatted JSON file and upload it to S3.
echo $cov
| true
|
f117dd65a2300ec4c648d48ce3811db9af297f1f
|
Shell
|
WhitewaterFoundry/fedora-remix-rootfs-build
|
/linux_files/check-dnf.sh
|
UTF-8
| 187
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/sh
# Only the default WSL user should run this script
if ! (id -Gn | grep -c "adm.*wheel\|wheel.*adm" >/dev/null); then
return
fi
if [ -z "${WSL2}" ]; then
sudo check-dnf
fi
| true
|
c6fe04d7ff548bc9180143595a59a9e1c158296a
|
Shell
|
Loutcho/oeis
|
/A300997/src/main/prolog/graphviz.sh
|
UTF-8
| 533
| 2.90625
| 3
|
[] |
no_license
|
GV="moitmoit.gv"
> $GV
echo "digraph G {" >> $GV
echo " rankdir=\"LR\";" >> $GV
echo " node [shape=\"none\" style=\"filled\" fillcolor=\"#BBBBBB\"];" >> $GV
echo " edge [arrowhead=\"none\" arrowtail=\"normal\" dir=\"back\"];" >> $GV
n=1
while [ $n -le 16 ]
do
echo "\"[$n]\" [label=\"[$n]\" fillcolor=\"#BBFFBB\"];" >> $GV
(( n+=1 ))
done
cat graphviz_lines.gv | awk '
# remove duplicates while keeping order of first appearance
{
if (!($0 in mem))
{
print $0;
mem[$0] = 1;
}
}
' >> $GV
echo "}" >> $GV
./dot_svg.bat $GV
| true
|
15e09dc228958e2770266ab84a20b5d1f6b300c6
|
Shell
|
Luavis/dotfiles
|
/.zshrc
|
UTF-8
| 998
| 2.71875
| 3
|
[] |
no_license
|
export ZSH=~/.oh-my-zsh
export PATH="/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin"
export LANG=en_US.UTF-8
ZSH_THEME="bureau"
plugins=(git git-flow cp tmux osx fzf
autojump fast-syntax-highlighting zsh-autosuggestions kubectl)
bindkey '[C' forward-word
bindkey '[D' backward-word
# load dotfiles
for file in ~/.{exports,aliases,functions}; do
[ -r "$file" ] && [ -f "$file" ] && source "$file";
done;
# load zsh config
source $ZSH/oh-my-zsh.sh
# pyenv init
eval "$(pyenv init -)"
# GOROOT path settings
export PATH=$PATH:$GOROOT/bin:$GOPATH/bin
# JENV (Java Env)
export PATH="$HOME/.jenv/bin:$PATH"
eval "$(jenv init -)"
# fzf options
source ~/.fzf.zsh
# kafkactl
source <(kafkactl completion zsh)
# kube-ps1
export KUBE_PS1_NS_ENABLE=false
export KUBE_PS1_SEPARATOR='> '
export KUBE_PS1_PREFIX='['
export KUBE_PS1_SUFFIX=']'
export KUBE_PS1_CTX_COLOR='cyan'
source "$(brew --prefix)/opt/kube-ps1/share/kube-ps1.sh"
# set RPROMPT
export RPROMPT='$(kube_ps1)$(bureau_git_prompt)'
| true
|
14ff0301462cb8e73263c5df0bc6259dc419f171
|
Shell
|
Nemie/syco
|
/var/clam/viruscan.sh
|
UTF-8
| 1,464
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
# __author__ = "mattias@fareoffice.com"
# __copyright__ = "Copyright 2011, The System Console project"
# __maintainer__ = "Daniel Lindh"
# __email__ = "syco@cybercow.se"
# __credits__ = ["???"]
# __license__ = "???"
# __version__ = "1.0.0"
# __status__ = "Production"
# Random sleep to start within 12 hours
sleep $[ ( $RANDOM % 43200 ) + 1 ]s
# Email subject
SUBJECT="VIRUS DETECTED ON `hostname`!!!"
# Email To ?
EMAIL="${ADMIN_EMAIL}"
# Date for saving all scans
DATE=`date +%y-%m-%d`
# Log location
LOG=/var/log/clamav/scan-$DATE.log
echo $DATE
check_scan () {
# Check the last set of results. If there are any "Infected" counts that
# that aren't zero, we have a problem.
if [ `tail -n 12 ${LOG} | grep Infected | grep -v 0 | wc -l` != 0 ]
then
EMAILMESSAGE=`mktemp /tmp/virus-alert.XXXXX`
echo "To: ${EMAIL}" >> ${EMAILMESSAGE}
echo "From: noreplay@fareoffice.com" >> ${EMAILMESSAGE}
echo "Subject: ${SUBJECT}" >> ${EMAILMESSAGE}
echo "Importance: High" >> ${EMAILMESSAGE}
echo "X-Priority: 1" >> ${EMAILMESSAGE}
echo "`tail -n 50 ${LOG}`" >> ${EMAILMESSAGE}
/usr/sbin/sendmail -t < ${EMAILMESSAGE}
fi
}
/usr/local/bin/freshclam
ionice -c3 nice -n 19 /usr/local/bin/clamscan -ir --exclude=/proc --exclude=/sys --exclude=/dev --exclude=/media --exclude=/mnt --exclude-dir=/var/lib/elasticsearch/prod/ --exclude-dir=/etc/snort/rules / --quiet --infected --log=${LOG}
check_scan
cat ${LOG} | logger
| true
|
3e5158fe48c60a9cd940bfb78d686ba5262bc4c8
|
Shell
|
infernalheaven/HexType
|
/scripts/install-hextype-files.sh
|
UTF-8
| 6,483
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/bash
#This script softlinks our modified files into the LLVM source tree
#Path to llvm source tree
llvm=`pwd`/llvm
clang=`pwd`/clang
src=`pwd`/src
runtime=`pwd`/compiler-rt
#llvm include
llvminc=$llvm/include/llvm
#llvm pass
llvmpass=$llvm/lib/Transforms/Instrumentation
#llvm passutil
llvmutil=$llvm/lib/Transforms/Utils
#llvm include
llvminclude=$llvm/include/llvm/Transforms/Utils
#install LLVM codes
rm $llvm/include/llvm/InitializePasses.h
rm $llvm/lib/Analysis/MemoryBuiltins.cpp
rm $llvm/include/llvm/Analysis/MemoryBuiltins.h
rm $llvm/include/llvm/Transforms/Instrumentation.h
rm $llvm/lib/Transforms/Utils/CMakeLists.txt
rm $llvm/lib/Transforms/Instrumentation/CMakeLists.txt
rm $llvmpass/HexTypePass.cpp
rm $llvmpass/HexTypeTreePass.cpp
rm $llvmutil/HexTypeUtil.cpp
ln -s $src/llvm-files/HexTypePass.cpp $llvmpass
ln -s $src/llvm-files/HexTypeTreePass.cpp $llvmpass
ln -s $src/llvm-files/HexTypeUtil.cpp $llvmutil
ln -s $src/llvm-files/HexTypeUtil.h $llvminclude
ln -s $src/llvm-files/InitializePasses.h $llvminc
ln -s $src/llvm-files/MemoryBuiltins.cpp $llvm/lib/Analysis/MemoryBuiltins.cpp
ln -s $src/llvm-files/MemoryBuiltins.h $llvm/include/llvm/Analysis/MemoryBuiltins.h
ln -s $src/llvm-files/Instrumentation.h $llvm/include/llvm/Transforms/Instrumentation.h
ln -s $src/llvm-files/UtilsCMakeLists.txt $llvm/lib/Transforms/Utils/CMakeLists.txt
ln -s $src/llvm-files/InstrumentationCMakeLists.txt $llvm/lib/Transforms/Instrumentation/CMakeLists.txt
#install clang codes
rm $clang/include/clang/Basic/Sanitizers.def
rm $clang/include/clang/Basic/Sanitizers.h
rm $clang/include/clang/Driver/SanitizerArgs.h
rm $clang/lib/CodeGen/BackendUtil.cpp
rm $clang/lib/CodeGen/CGCXXABI.h
rm $clang/lib/CodeGen/CGClass.cpp
rm $clang/lib/CodeGen/CGExpr.cpp
rm $clang/lib/CodeGen/CGExprCXX.cpp
rm $clang/lib/CodeGen/CGExprScalar.cpp
rm $clang/lib/CodeGen/CodeGenFunction.cpp
rm $clang/lib/CodeGen/CodeGenFunction.h
rm $clang/lib/CodeGen/CodeGenTypes.cpp
rm $clang/lib/CodeGen/ItaniumCXXABI.cpp
rm $clang/lib/CodeGen/MicrosoftCXXABI.cpp
rm $clang/lib/Driver/ToolChain.cpp
rm $clang/lib/Driver/ToolChains.cpp
rm $clang/lib/Driver/Tools.cpp
rm $clang/lib/Sema/SemaDecl.cpp
# for clang function test
rm $clang/runtime/CMakeLists.txt
rm $clang/unittests/Frontend/CMakeLists.txt
rm $clang/test/CMakeLists.txt
rm $clang/test/lit.cfg
rm $clang/test/CodeGen/hextype/hextype-dynamic_cast.cpp
rm $clang/test/CodeGen/hextype/hextype-placementnew.cpp
rm $clang/test/CodeGen/hextype/hextype-reinterpret.cpp
rm $clang/test/CodeGen/hextype/hextype-typecasting.cpp
ln -s $src/clang-files/Sanitizers.def $clang/include/clang/Basic/Sanitizers.def
ln -s $src/clang-files/Sanitizers.h $clang/include/clang/Basic/Sanitizers.h
ln -s $src/clang-files/SanitizerArgs.h $clang/include/clang/Driver/SanitizerArgs.h
ln -s $src/clang-files/BackendUtil.cpp $clang/lib/CodeGen/BackendUtil.cpp
ln -s $src/clang-files/CGCXXABI.h $clang/lib/CodeGen/CGCXXABI.h
ln -s $src/clang-files/CGClass.cpp $clang/lib/CodeGen/CGClass.cpp
ln -s $src/clang-files/CGExpr.cpp $clang/lib/CodeGen/CGExpr.cpp
ln -s $src/clang-files/CGExprCXX.cpp $clang/lib/CodeGen/CGExprCXX.cpp
ln -s $src/clang-files/CGExprScalar.cpp $clang/lib/CodeGen/CGExprScalar.cpp
ln -s $src/clang-files/CodeGenFunction.cpp $clang/lib/CodeGen/CodeGenFunction.cpp
ln -s $src/clang-files/CodeGenFunction.h $clang/lib/CodeGen/CodeGenFunction.h
ln -s $src/clang-files/CodeGenTypes.cpp $clang/lib/CodeGen/CodeGenTypes.cpp
ln -s $src/clang-files/ItaniumCXXABI.cpp $clang/lib/CodeGen/ItaniumCXXABI.cpp
ln -s $src/clang-files/MicrosoftCXXABI.cpp $clang/lib/CodeGen/MicrosoftCXXABI.cpp
ln -s $src/clang-files/ToolChain.cpp $clang/lib/Driver/ToolChain.cpp
ln -s $src/clang-files/ToolChains.cpp $clang/lib/Driver/ToolChains.cpp
ln -s $src/clang-files/Tools.cpp $clang/lib/Driver/Tools.cpp
ln -s $src/clang-files/SemaDecl.cpp $clang/lib/Sema/SemaDecl.cpp
# for clang function test
ln -s $src/clang-files/test/CMakeLists_runtime.txt $clang/runtime/CMakeLists.txt
ln -s $src/clang-files/test/CMakeLists_test.txt $clang/test/CMakeLists.txt
ln -s $src/clang-files/test/CMakeLists_frontend.txt $clang/unittests/Frontend/CMakeLists.txt
ln -s $src/clang-files/test/lit.cfg $clang/test/lit.cfg
mkdir $clang/test/CodeGen/hextype
ln -s $src/clang-files/test/hextype-dynamic_cast.cpp $clang/test/CodeGen/hextype/hextype-dynamic_cast.cpp
ln -s $src/clang-files/test/hextype-placementnew.cpp $clang/test/CodeGen/hextype/hextype-placementnew.cpp
ln -s $src/clang-files/test/hextype-reinterpret.cpp $clang/test/CodeGen/hextype/hextype-reinterpret.cpp
ln -s $src/clang-files/test/hextype-typecasting.cpp $clang/test/CodeGen/hextype/hextype-typecasting.cpp
#install compiler-rt codes
rm $runtime/cmake/config-ix.cmake
rm $runtime/lib/CMakeLists.txt
rm $runtime/lib/hextype/CMakeLists.txt
rm $runtime/lib/hextype/hextype.cc
rm $runtime/lib/hextype/hextype.h
rm $runtime/lib/hextype/hextype_rbtree.cc
rm $runtime/lib/hextype/hextype_rbtree.h
rm $runtime/lib/hextype/hextype_report.cc
rm $runtime/lib/hextype/hextype_report.h
rm $runtime/test/CMakeLists.txt
rm $runtime/test/hextype/CMakeLists.txt
ln -s $src/compiler-rt-files/config-ix.cmake $runtime/cmake/config-ix.cmake
ln -s $src/compiler-rt-files/lib_cmakelists.txt $runtime/lib/CMakeLists.txt
mkdir $runtime/lib/hextype
ln -s $src/compiler-rt-files/lib_hextype_cmakelists.txt $runtime/lib/hextype/CMakeLists.txt
ln -s $src/compiler-rt-files/hextype.cc $runtime/lib/hextype/hextype.cc
ln -s $src/compiler-rt-files/hextype.h $runtime/lib/hextype/hextype.h
ln -s $src/compiler-rt-files/hextype_rbtree.cc $runtime/lib/hextype/hextype_rbtree.cc
ln -s $src/compiler-rt-files/hextype_rbtree.h $runtime/lib/hextype/hextype_rbtree.h
ln -s $src/compiler-rt-files/hextype_report.cc $runtime/lib/hextype/hextype_report.cc
ln -s $src/compiler-rt-files/hextype_report.h $runtime/lib/hextype/hextype_report.h
# for compiler-rt function test
mkdir $runtime/test
mkdir $runtime/test/hextype
mkdir $runtime/test/hextype/TestCases
ln -s $src/compiler-rt-files/test/compiler-rt_test_cmakelist.txt $runtime/test/CMakeLists.txt
ln -s $src/compiler-rt-files/test/compiler-rt_test_hextype_cmakelist.txt $runtime/test/hextype/CMakeLists.txt
ln -s $src/compiler-rt-files/test/simple_bad_cast.cc $runtime/test/hextype/TestCases/simple_bad_cast.cc
ln -s $src/compiler-rt-files/test/lit.common.cfg $runtime/test/hextype/lit.common.cfg
ln -s $src/compiler-rt-files/test/lit.site.cfg.in $runtime/test/hextype/lit.site.cfg.in
| true
|
94cd31d3192f2beba4657d59a3f3f3c48c6ffab5
|
Shell
|
zleba/alignment
|
/installARMA.sh
|
UTF-8
| 375
| 2.734375
| 3
|
[] |
no_license
|
version=10.2.1
pwd=$PWD
mkdir -p $pwd/arma/install
cd $pwd/arma
#wget https://sourceforge.net/projects/arma/files/armadillo-8.500.1.tar.xz/download && tar xf download
wget http://sourceforge.net/projects/arma/files/armadillo-${version}.tar.xz && tar xf *.tar.xz
cd $pwd/arma/armadillo-${version}
cmake . -DCMAKE_INSTALL_PREFIX:PATH=$pwd/arma/install
make
make install
| true
|
f2cc0a126680ef2f5ea31f631da34b6a7983dafa
|
Shell
|
kie4280/GoDrive
|
/scripts/stop_dlv.sh
|
UTF-8
| 177
| 2.609375
| 3
|
[] |
no_license
|
#!/bin/bash
echo "stopping debug server"
sleep 1
if ps -a| grep -E "^.*[0-9]+ dlv$"; then
echo "exit" | dlv connect :2345
sleep 1
killall dlv
fi
echo "stopped"
| true
|
1ad5e6f97e9b7be3974c9fa4441785d93147322f
|
Shell
|
cvasilak/mbed-edge-examples
|
/mqttpt-example/mqttgw_sim/mqtt_gw_crypto_api.sh
|
UTF-8
| 6,266
| 3.1875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# ----------------------------------------------------------------------------
# Copyright 2018 ARM Ltd.
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
usage()
{
echo "usage: mqtt_gw_crypto_api.sh renew-certificate <name>"
echo "usage: mqtt_gw_crypto_api.sh set-certificates-list <names>..."
echo "usage: mqtt_gw_crypto_api.sh get-certificate <name>"
echo "usage: mqtt_gw_crypto_api.sh get-public-key <name>"
echo "usage: mqtt_gw_crypto_api.sh generate-random <size>"
echo "usage: mqtt_gw_crypto_api.sh asymmetric-sign <private-key-name> <hash-digest>"
echo "usage: mqtt_gw_crypto_api.sh asymmetric-verify <public-key-name> <hash-digest> <signature>"
echo "usage: mqtt_gw_crypto_api.sh ecdh-key-agreement <private-key-name> <peer-public-key>"
echo ""
echo "Names are in string format. Size is an integer. Hash-digest and signature are base64-encoded sha256-sums. Peer-public-key is base64-encoded."
}
certificate_renew_message()
{
request_id=$RANDOM
certificate=$1
cat <<EOF
{
"request_id": "$request_id",
"method": "renew_certificate",
"params" : {"certificate": "$certificate"}
}
EOF
}
set_certificates_list_message()
{
request_id=$RANDOM
certificates=$1
cat <<EOF
{
"request_id": "$request_id",
"method": "set_certificates_list",
"params" : {"certificates": [$certificates]}
}
EOF
}
certificate_get_message()
{
request_id=$RANDOM
certificate=$1
cat <<EOF
{
"request_id": "$request_id",
"method": "get_certificate",
"params" : {"certificate": "$certificate"}
}
EOF
}
public_key_get_message()
{
request_id=$RANDOM
public_key=$1
cat <<EOF
{
"request_id": "$request_id",
"method": "get_public_key",
"params" : {"key": "$public_key"}
}
EOF
}
generate_random_message()
{
request_id=$RANDOM
size=$1
cat <<EOF
{
"request_id": "$request_id",
"method": "generate_random",
"params" : {"size": $size}
}
EOF
}
asymmetric_sign_message()
{
request_id=$RANDOM
private_key_name=$1
hash_digest=$2
cat <<EOF
{
"request_id": "$request_id",
"method": "asymmetric_sign",
"params" : {"private_key_name": "$private_key_name",
"hash_digest": "$hash_digest"}
}
EOF
}
asymmetric_verify_message()
{
request_id=$RANDOM
public_key_name=$1
hash_digest=$2
signature=$3
cat <<EOF
{
"request_id": "$request_id",
"method": "asymmetric_verify",
"params" : {"public_key_name": "$public_key_name",
"hash_digest": "$hash_digest",
"signature": "$signature"}
}
EOF
}
ecdh_key_agreement_message()
{
request_id=$RANDOM
private_key_name=$1
peer_public_key=$2
cat <<EOF
{
"request_id": "$request_id",
"method": "ecdh_key_agreement",
"params" : {"private_key_name": "$private_key_name",
"peer_public_key": "$peer_public_key"}
}
EOF
}
device_cert_renew_message()
{
request_id=$RANDOM
device_name=$1
cert_name=$2
csr=$3
cat <<EOF
{
"request_id": "$request_id",
"method": "device_renew_certificate",
"params" : {"device_name": "$device_name",
"certificate_name": "$cert_name",
"csr": "$csr"}
}
EOF
}
operation=$1
case $operation in
set-certificates-list ) certificates=""
while [ "$2" != "" ]; do
name=$2
certificates=$certificates"\"$name\""
shift
if [ "$2" != "" ] ; then
certificates=$certificates","
fi
done
message="$(set_certificates_list_message $certificates)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
renew-certificate ) message="$(certificate_renew_message $2)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
get-certificate ) message="$(certificate_get_message $2)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
get-public-key ) message="$(public_key_get_message $2)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
generate-random ) message="$(generate_random_message $2)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
asymmetric-sign ) message="$(asymmetric_sign_message $2 $3)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
asymmetric-verify ) message="$(asymmetric_verify_message $2 $3 $4)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
ecdh-key-agreement ) message="$(ecdh_key_agreement_message $2 $3)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
device-cert-renew ) message="$(device_cert_renew_message $2 $3 $4)"
echo "$message"
mosquitto_pub -t MQTT -m "$message"
;;
* ) usage
exit 1
esac
| true
|
273f35b5ef0f86bb32f38dd6e470ef8cc367c881
|
Shell
|
eginez/mutualTlsgo
|
/certs/create.sh
|
UTF-8
| 1,357
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/sh
mkdir ca client server
## Root CA first
echo Creating root
cd ca
mkdir certs crl newcerts private
touch index.txt
echo 1000 > serial
#Generate key
openssl genrsa -aes256 -out private/key.pem 2048
#Create root certificate
openssl req -config ../openssl.cnf \
-key private/key.pem \
-new -x509 -days 7300 -sha256 -extensions v3_intermediate_ca \
-out certs/cert.pem
#Verify cert
openssl x509 -noout -text -in certs/cert.pem
tree .
#Create server cert
echo Creating server
cd ../server
# Create key
openssl genrsa -out key.pem 2048
#Create cert
openssl req -config ../openssl.cnf \
-key key.pem \
-new -sha256 -out csr.pem
#Sign cert
openssl ca -config ../openssl.cnf \
-extensions server_cert -days 375 -notext -md sha256 \
-in csr.pem \
-out cert.pem
cat ../ca/index.txt
### Create client
echo Creating client
cd ../client
# Create key
openssl genrsa -out key.pem 2048
#Create cert
openssl req -config ../openssl.cnf \
-key key.pem \
-new -sha256 -out csr.pem
#Sign cert
openssl ca -config ../openssl.cnf \
-extensions usr_cert -days 375 -notext -md sha256 \
-in csr.pem \
-out cert.pem
#Create export pk12
#This key needs to get imported by the os's key store
openssl pkcs12 -export -inkey key.pem -in cert.pem -out client.p12
cat ../ca/index.txt
| true
|
4fd51a8912529219c4f187423eb1b4ec0659ada0
|
Shell
|
thomassuedbroecker/cloud-native-starter
|
/openshift-scripts/show-urls.sh
|
UTF-8
| 3,882
| 3.484375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
root_folder=$(cd $(dirname $0); cd ..; pwd)
exec 3>&1
function _out() {
echo "$(date +'%F %H:%M:%S') $@"
}
CFG_FILE=${root_folder}/local.env
# Check if config file exists, in this case it will have been modified
if [ ! -f $CFG_FILE ]; then
_out Config file local.env is missing! Check our instructions!
exit 1
fi
source $CFG_FILE
function login() {
oc login -u apikey -p $IBMCLOUD_API_KEY --server=$OPENSHIFT_URL
if [ ! $? == 0 ]; then
_out ERROR: Could not login to OpenShift, please try again
exit 1
fi
oc project cloud-native-starter
}
function openshift_url() {
# Check if OpenShift Cluster URL has been retreived already
if [ .$OPENSHIFT_URL == . ]; then
_out Cannot find a link your OpenShift cluster!
_out Did you mss to run the script "openshift-scripts/setup-project.sh"?
exit 1
fi
}
function setup() {
_out ------------------------------------------------------------------------------------
_out URLS
# Currently no Istio!
# _out ------------------------------------------------------------------------------------
#
# _out kiali
# _out Open https://$(oc get route kiali -n istio-system -o jsonpath={.spec.host}) with username: admin, password: admin
#
# _out ------------------------------------------------------------------------------------
#
# _out prometheus
# _out Open https://$(oc get route prometheus -n istio-system -o jsonpath={.spec.host})
#
# _out ------------------------------------------------------------------------------------
#
# _out jaeger
# _out Access via Kiali: Distributed Tracing
#
# _out ------------------------------------------------------------------------------------
#
_out articles
url=$(oc get route articles -o jsonpath={.spec.host}) &>/dev/null
if [ -z "$url" ]; then
_out articles is not available. Run 'minishift-scripts/deploy-articles-java-jee.sh'
else
_out OpenAPI explorer: http://$url/openapi/ui/
_out Sample request: curl -X GET "http://$url/articles/v1/getmultiple?amount=10" -H "accept: application/json"
fi
_out ------------------------------------------------------------------------------------
_out authors
url=$(oc get route authors -o jsonpath={.spec.host}) &>/dev/null
if [ -z "$url" ]; then
_out authors is not available. Run 'minishift-scripts/deploy-authors-nodejs.sh'
else
_out Sample request: curl http://$url/api/v1/getauthor?name=Harald%20Uebele
fi
_out ------------------------------------------------------------------------------------
_out web-api
url=$(oc get route web-api -o jsonpath={.spec.host}) &>/dev/null
if [ -z "$url" ]; then
_out web-api is not available. Run 'minishift-scripts/deploy-web-api-java-jee.sh'
else
_out OpenAPI explorer: http://$url/openapi/ui/
_out Sample request: curl "http://$url/web-api/v1/getmultiple"
fi
_out ------------------------------------------------------------------------------------
_out web-app
url=$(oc get route web-app -o jsonpath={.spec.host}) &>/dev/null
if [ -z "$url" ]; then
_out web-app is not available. Run 'minishift-scripts/deploy-web-app.sh'
else
_out Access web-app via http://$url
fi
ingress=$(oc get ingress cloudnative-ingress -o jsonpath={.spec.rules[0].host}) &>/dev/null
if [ ! -z "$ingress" ]; then
_out You can also access web-app via Kubernetes Ingress http://$ingress
fi
# Currently no Istio!
# httpcode=$(curl -s -o /dev/null -w "%{http_code}" $(oc get route istio-ingressgateway -n istio-system -o jsonpath={.spec.host}))
# if [ $httpcode == 503 ]; then
# _out Istio Ingress is not configured. Run 'minishift-scripts/deploy-istio-ingress-v1.sh'
# else
# _out or via Istio Ingress http://$url
# fi
_out ------------------------------------------------------------------------------------
}
openshift_url
login
setup
| true
|
6e822e81785b7eb070ac48b89fced122fe16a89c
|
Shell
|
ZQyou/osc-ood-config
|
/ood-test.osc.edu/apps/myjobs/templates/Basic_ANSYS_FLUENT_Serial_Job_Owens/fluent.sh
|
UTF-8
| 859
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
#PBS -N ondemand/sys/myjobs/basic_ansys_fluent_serial
#PBS -l walltime=00:30:00
#PBS -l nodes=1:ppn=1
#PBS -l software=ansys+1
#PBS -j oe
# A basic FLUENT Serial Job
# Further details available at:
# https://www.osc.edu/resources/available_software/software_list/ansys/fluent
#
# The following lines set up the FLUENT environment
#
module load ansys
#
# Move to the directory where the job was submitted from
# You could also 'cd' directly to your working directory
cd $PBS_O_WORKDIR
#
# Copy files to $TMPDIR and move there to execute the program
#
cp /users/oscgen/xwang/Fluent/Demo_tmi_fluent/test.* run.input $TMPDIR
cd $TMPDIR
#
# Run fluent
fluent 3d -g < run.input
#
# Where the file 'run.input' contains the commands you would normally
# type in at the Fluent command prompt.
# Finally, copy files back to your home directory
cp * $PBS_O_WORKDIR
| true
|
37f3e6e65b488f9fe8925be9b56d6f69f00ca4d9
|
Shell
|
jflemer/selenium-oneshot
|
/build.sh
|
UTF-8
| 386
| 3.109375
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
set +x
if [ ! -e docker-compose ]; then
curl -L --fail https://github.com/docker/compose/releases/download/1.22.0/run.sh -o docker-compose
chmod +x docker-compose
fi
NAME="${PWD##*/}"
docker build --tag "${NAME}-selenium:latest" selenium
docker build --tag "${NAME}-runner:latest" runner
sed -e 's/@@NAME@@/'"$NAME"'/g' < docker-compose.yaml.in > docker-compose.yaml
| true
|
7930836d39e769b7c5174646055aca8918c4927e
|
Shell
|
d3v1l401/PeGo
|
/build.sh
|
UTF-8
| 229
| 2.671875
| 3
|
[] |
no_license
|
#!/bin/bash
disos=$(uname)
if [[ "$disos" == "Linux" ]]; then
go build -buildmode=plugin "$(pwd)/plSources/GoLangIdentifier/main.go" -o "../../plugins/goident.p4pg"
else
echo "Unsupported Go Build Architecture"
fi
| true
|
466de84a52e3234b1c2041e552be0a4ae827868d
|
Shell
|
gljivar/powerplant
|
/.git_hooks/pre-commit
|
UTF-8
| 463
| 3.75
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh
PRETTIER=./node_modules/.bin/prettier
if [ ! -f "$PRETTIER" ]; then
echo "prettier not found; run 'npm install' before commit to enable auto-formatting"
exit 1
fi
jsfiles=$(git diff --cached --name-only --diff-filter=ACM "*.js" "*.jsx" | tr '\n' ' ')
[ -z "$jsfiles" ] && exit 0
# Prettify all staged .js files
echo "$jsfiles" | xargs $PRETTIER --write
# Add back the modified/prettified files to staging
echo "$jsfiles" | xargs git add
exit 0
| true
|
58837e258be38b16ca04743e1787487bcfd7d5f9
|
Shell
|
praveenn7/KKN1
|
/p90.sh
|
UTF-8
| 341
| 3.375
| 3
|
[] |
no_license
|
read -p "Enter a emp name:" ename
echo $ename|grep -qE "^[A-Z][a-z]+$"
if [ $? -ne 0 ];then
echo "Emp name is not in title case format"
exit
fi
read -p "Hello..$ename enter your emp ID:" eid
echo $eid|grep -qE "^[A-E][0-9]{3}$"
if [ $? -ne 0 ];then
echo "Emp id:$eid is not valid format"
exit
fi
echo -e "Emp name:$ename\t Empid:$eid"
| true
|
2fce4f3b766123328049a076dc3d6eee9ea49218
|
Shell
|
jpikel/School-Projects
|
/Encryption Server/compileall
|
UTF-8
| 892
| 2.671875
| 3
|
[] |
no_license
|
#!/bin/bash
#
# Filename: compileall
# Author: Johannes Pikel
# ONID: pikelj
# Date: 2017.03.01
# Class: CS344-400
# Assignment: Program 3 OTP
# Description: a bash script file that will compile the 5 programs that are
# a part of the OTP program 3.
# otp_enc_d is the encryption server
# otp_enc is the encryption client
# otp_dec_d is the decryption server
# otp_dec is the decryption client
# keygen is the one time pad key generation tool
#########################################
echo "compiling keygen"
gcc -x c -g -Wall -pedantic-errors keygen.c -o keygen
echo "compiling otp_enc_d"
gcc -x c -g -Wall -pedantic-errors otp_enc_d.c -o otp_enc_d
echo "compiling otp_enc"
gcc -x c -g -Wall -pedantic-errors otp_enc.c -o otp_enc
echo "compiling otp_dec_d"
gcc -x c -g -Wall -pedantic-errors otp_dec_d.c -o otp_dec_d
echo "compiling otp_dec"
gcc -x c -g -Wall -pedantic-errors otp_dec.c -o otp_dec
| true
|
f2602f70865dc3e06eed922211965802b40186cf
|
Shell
|
neutrons/StatisticsService
|
/bin/mantidstats
|
UTF-8
| 2,086
| 3.9375
| 4
|
[
"EPICS"
] |
permissive
|
#!/bin/bash
# This is really just a helper for the init.d script.
# If you want to run the program manually, you're probably better off just
# running python itself.
# First, figure out where we're installed so that we know how to find the
# main module. We do this with a combination of $0 and (maybe) $PWD.
# ${0:0:1} returns the first character of the $0 string
if [ ${0:0:1} == "/" ]
then
# OK, called from an absolute path, just strip off the /bin
STATS_HOME=`dirname $0`
elif [ ${0:0:2} == "./" ]
then
# Called from the current directory, just strip off the /bin
STATS_HOME=${PWD}
else
# Relative path - prepend $PWD
STATS_HOME="${PWD}/`dirname $0`"
fi
# Chop off the '/bin' at the end of STATS_HOME.
# (Note that if STATS_HOME doesn't end with '/bin', this command won't
# change anything. In that case, we should probably spit out a warning.)
STATS_HOME=${STATS_HOME%/bin}
# Add STATS_HOME/lib to the python path
export PYTHONPATH=${STATS_HOME}/lib:$PYTHONPATH
# Next, try to figure out the MANTIDPATH environment variable
# NOTE: For now, we want to use mantidnightly, if it exists.
# Once the next release comes out, we can just go with regular mantid
if [ -z $MANTIDPATH ]
then
if [ -d /opt/mantidnightly ]
then
export MANTIDPATH=/opt/mantidnightly/bin
else
export MANTIDPATH=/opt/Mantid/bin
fi
fi
# The python epics code needs to know how to find the native compiled epics
# shared libraries. There's two ways for it to know: either set the
# PYEPICS_LIBCA env var, or set both the EPICS_BASE and EPICS_HOST_ARCH
# variables.
if [[ -z $PYEPICS_LIBCA && (-z $EPICS_BASE || -z $EPICS_HOST_ARCH) ]]
then
# Yes, I know the error message doesn't mention PYEPICS_LIBCA. I'm
# trying to keep things simple
echo "EPICS_BASE and EPICS_HOST_ARCH must be set before starting the statistics server."
echo "Aborting."
exit 1
fi
# Dial back the number of openMP threads that the Mantid libs will spawn
export OMP_NUM_THREADS=2
python $STATS_HOME/lib/mantidstats/main.py "$@" 2>/dev/null >/dev/null &
| true
|
07c188ff5fe483f2397ca635e0456dde193d9f7b
|
Shell
|
phyletica/gekgo
|
/data/genomes/cyrtodactylus_philippinicus_KU330797/assembly/idba_output/30-100-10/job_scripts_and_data/idba_monitor.sh
|
UTF-8
| 1,112
| 2.9375
| 3
|
[
"CC-BY-4.0"
] |
permissive
|
#PBS -N idba_monitor
#PBS -l nodes=1:ppn=1,mem=256m,walltime=350:00:00
#PBS -S /bin/sh
#PBS -q long
#PBS -M joaks1@ku.edu
source ${HOME}/.bash_profile
cd $PBS_O_WORKDIR
kmin=30
kmax=100
kinc=10
output_dir=${PBS_O_WORKDIR}/idba_output
qsub_file=${PBS_O_WORKDIR}/idba.sh
log_file=${output_dir}/log
proc_num=42285
proc_id="${proc_num}.${PBS_O_SERVER}@${PBS_O_SERVER}"
qstat_file=${PBS_O_WORKDIR}/qstat.${proc_num}.txt
mem_file=${PBS_O_WORKDIR}/idba.${proc_num}.mem_usage.txt
kmer_current=$kmin
while :
do
qstat -f $proc_id > $qstat_file
if [ -z "$(cat $qstat_file)" ]
then
rm $qstat_file
if [ -e ${output_dir}/*-${kmax}* ]
then
gzip ${output_dir}/*-${kmax}*
fi
break
fi
cat $qstat_file | grep -E "used.+mem" > $mem_file
if [ -e $log_file ]
then
kline=$(grep -E "kmer [0-9]+" $log_file | tail -n 1)
k=${kline/kmer /}
if [ $k -eq $[ $kmer_current + $kinc ] ]
then
gzip ${output_dir}/*-${kmer_current}*
kmer_current=$[ $kmer_current + $kinc ]
fi
fi
sleep 300
done
| true
|
a1917591a8c5c3281bcf1c9669b9207a38f1c70e
|
Shell
|
Yzoni/cuckoo-docker
|
/run_host_3.sh
|
UTF-8
| 292
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/bash
set -x
echo "Phase 4: Starting installation"
# Setup vars
source ./common
# Creating snapshot
docker exec -ti $DID /bin/bash -c "source ./venv-vmcloak/bin/activate && vmcloak snapshot win10_x64 win10_x64_clean 192.168.56.101 -d --vrde"
echo "Phase 4: Installation finished"
| true
|
c187b62818caecbe4ad83ea96ec9f796b10da196
|
Shell
|
ECP-WarpX/WarpX
|
/Tools/Release/newVersion.sh
|
UTF-8
| 3,442
| 4.125
| 4
|
[
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause-LBNL"
] |
permissive
|
#!/usr/bin/env bash
#
# Copyright 2021 Axel Huebl
#
# This file is part of WarpX.
#
# This file is a maintainer tool to bump the versions inside WarpX'
# source directory at all places where necessary.
#
# Note: this script is only tested with GNUtools (Linux)
set -eu -o pipefail
# Maintainer Inputs ###########################################################
echo "Hi there, this is a WarpX maintainer tool to update the source"
echo "code of WarpX to a new version number on all places where"
echo "necessary."
echo "For it to work, you need write access on the source directory and"
echo "you should be working in a clean git branch without ongoing"
echo "rebase/merge/conflict resolves and without unstaged changes."
# check source dir
REPO_DIR=$(cd $(dirname ${BASH_SOURCE})/../../ && pwd)
echo
echo "Your current source directory is: ${REPO_DIR}"
echo
read -p "Are you sure you want to continue? [y/N] " -r
echo
if [[ ! ${REPLY} =~ ^[Yy]$ ]]
then
echo "You did not confirm with 'y', aborting."
exit 1
fi
echo "We will now run a few sed commands on your source directory."
echo "Please answer the following questions about the version number"
echo "you want to set first:"
echo
read -p "MAJOR version? (e.g. year: $(date +%y)) " -r
MAJOR=${REPLY}
echo
read -p "MINOR version? (e.g. month: $(date +%m)) " -r
MINOR=${REPLY}
echo
read -p "PATCH version? (e.g. usually empty) " -r
PATCH=${REPLY}
echo
read -p "SUFFIX? (e.g. rc2, dev, ... usually empty) " -r
SUFFIX=${REPLY}
echo
if [[ -n "${SUFFIX}" ]]
then
SUFFIX_STR="-$SUFFIX"
else
SUFFIX_STR=""
fi
if [[ ! -n "${PATCH}" ]]
then
PATCH=""
fi
VERSION_STR_NOSUFFIX="${MAJOR}.${MINOR}${PATCH}"
VERSION_STR="${MAJOR}.${MINOR}${PATCH}${SUFFIX_STR}"
echo
echo "Your new version is: ${VERSION_STR}"
echo
read -p "Is this information correct? Will now start updating! [y/N] " -r
echo
if [[ ! ${REPLY} =~ ^[Yy]$ ]]
then
echo "You did not confirm with 'y', aborting."
exit 1
fi
# Updates #####################################################################
# CMake scripts
# CMakeLists.txt: project(WarpX VERSION YY.MM)
sed -i -E "s/"\
"(project\(WarpX VERSION[[:blank:]]+)(.*)(\))/"\
"\1${VERSION_STR_NOSUFFIX}\3/g" \
${REPO_DIR}/CMakeLists.txt
# cmake/dependencies/AMReX.cmake:
# set(WarpX_amrex_branch "development" ... (future)
# find_package(AMReX YY.MM CONFIG ...
sed -i -E "s/"\
"(find_package\(AMReX[[:blank:]]+)(.*)([[:blank:]]+CONFIG.+)/"\
"\1${VERSION_STR_NOSUFFIX}\3/g" \
${REPO_DIR}/cmake/dependencies/AMReX.cmake
# cmake/dependencies/PICSAR.cmake (future)
# setup.py: version = '21.02',
sed -i -E "s/"\
"([[:blank:]]*version[[:blank:]]*=[[:blank:]]*')(.*)('.+)/"\
"\1${VERSION_STR}\3/g" \
${REPO_DIR}/setup.py
# Python/setup.py: version = '21.02',
sed -i -E "s/"\
"([[:blank:]]*version[[:blank:]]*=[[:blank:]]*')(.*)('.+)/"\
"\1${VERSION_STR}\3/g" \
${REPO_DIR}/Python/setup.py
# sphinx / RTD
# docs/source/conf.py
sed -i "s/"\
"[[:blank:]]*version[[:blank:]]*=[[:blank:]]*u.*/"\
"version = u'${VERSION_STR_NOSUFFIX}'/g" \
${REPO_DIR}/Docs/source/conf.py
sed -i "s/"\
"[[:blank:]]*release[[:blank:]]*=[[:blank:]]*u.*/"\
"release = u'${VERSION_STR}'/g" \
${REPO_DIR}/Docs/source/conf.py
# Epilog ######################################################################
echo
echo "Done. Please check your source, e.g. via"
echo " git diff"
echo "now and commit the changes if no errors occured."
| true
|
2ed735f1856e5362068f0fe832e9bf919549fb4f
|
Shell
|
izumin5210-sandbox/rails-modern-frontend-with-docker-sample
|
/script/bootstrap
|
UTF-8
| 265
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -eu
dirname=$(dirname $0)
if [ ! -e $dirname/../.env ]; then
cp -v $dirname/../.env{.sample,}
fi
set -x
$dirname/clean
docker-compose build
$dirname/rails db:create
$dirname/rails db:migrate
$dirname/rails db:seed_fu
$dirname/server
| true
|
3da64d92790d39fecfd54e427dc81525ee054a39
|
Shell
|
ZachWunder/LocalSync
|
/sync.sh
|
UTF-8
| 466
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
# $1 = Local Directory
# $2 = S3 Bucket (don"t include s3://)
# $3 = AWS Profile Name
# Watch local dir
fswatch -d $1 |
while read -r directory ; do
# Syncs from S3 to local directory (Pull)
AWS_PROFILE=$3 aws s3 sync s3://$2 $1 --exclude "*.DS_Store" --exclude ".git/*" --exclude ".obsidian/*"
# Syncs local directory to S3 (Push)
AWS_PROFILE=$3 aws s3 sync $1 s3://$2 --exclude "*.DS_Store" --exclude ".git/*" --exclude ".obsidian/*"
done
| true
|
d2d949503d2b64747b0c459fec6fbffc43a9ed76
|
Shell
|
vhbb/cmssw
|
/VHbbAnalysis/Heppy/test/crab/localTest.sh
|
UTF-8
| 466
| 2.796875
| 3
|
[] |
no_license
|
#!/bin/sh
FILES=`echo "print ' '.join(config.JobType.inputFiles)" | python -i heppy_crab_config.py `
DIR=/tmp/localtest$2
mkdir $DIR
echo -e "process.source.fileNames = ['$1']\nprint process.dumpPython()" | python -i heppy_crab_fake_pset.py > $DIR/PSet.py
for i in $FILES ; do
cp $i $DIR
done
cp heppy_crab_script.sh $DIR
cd $DIR
scramv1 project CMSSW $CMSSW_VERSION
#cd $CMSSW_VERSION
#eval `scramv1 runtime -sh`
#cd -
#MSSW_BASE=$DIR
./heppy_crab_script.sh $1
| true
|
f4fabb1687cb58e38d40f8205ccf63c8ba7bd5b5
|
Shell
|
grahamgilbert/crostini_setup
|
/crostini_setup.sh
|
UTF-8
| 2,961
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
CURRENTUSER=`logname`
apt install -y lsb-release software-properties-common
# gcloud
# Create environment variable for correct distribution
export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)"
# Add the Cloud SDK distribution URI as a package source
echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee -a /etc/apt/sources.list.d/google-cloud-sdk.list
# Import the Google Cloud Platform public key
curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
wget https://apt.puppet.com/puppet-tools-release-stretch.deb
dpkg -i puppet-tools-release-stretch.deb
# Docker
curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add -
add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/debian \
$(lsb_release -cs) \
stable"
apt update -y
apt upgrade -y
apt install -y \
apt-transport-https \
ca-certificates \
gnupg \
curl \
software-properties-common \
make \
gnome-terminal \
build-essential \
libssl-dev \
zlib1g-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
keychain \
wget \
llvm \
libncurses5-dev \
libncursesw5-dev \
xz-utils \
tk-dev \
python3 \
python3-pip \
nano \
fonts-hack-ttf \
pdk \
docker-ce \
gcc \
keychain \
google-cloud-sdk
apt install -y --reinstall build-essential
# Install vscode
curl -L -o vscode.deb https://go.microsoft.com/fwlink/?LinkID=760868
dpkg -i vscode.deb
apt install -f -y
rm -f vscode.deb
apt update -y
sudo -u $CURRENTUSER code --install-extension shan.code-settings-sync
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
dpkg -i google-chrome-stable_current_amd64.deb
apt install -fy
dpkg -i google-chrome-stable_current_amd64.deb
rm -f google-chrome-stable_current_amd64.deb
# Terraform time
curl -L -o terraform.zip https://releases.hashicorp.com/terraform/0.15.4/terraform_0.15.4_linux_amd64.zip
unzip terraform.zip
rm terraform.zip
mkdir -p /usr/local/bin
mv terraform /usr/local/bin/terraform
wget https://golang.org/dl/go1.16.4.linux-amd64.tar.gz
tar -C /usr/local -xzf go1.16.4.linux-amd64.tar.gz
rm go1.16.4.linux-amd64.tar.gz
grep -q -F 'eval `keychain --eval --agents ssh id_rsa`
' /home/${CURRENTUSER}/.bashrc
if [ $? -ne 0 ]; then
echo 'eval `keychain --eval --agents ssh id_rsa`
' >> /home/${CURRENTUSER}/.bashrc
fi
chown ${CURRENTUSER} /home/${CURRENTUSER}/.bashrc
# ssh-keygen -y -f ~/.ssh/id_rsa > ~/.ssh/id_rsa.pub
git clone https://github.com/tfutils/tfenv.git /home/${CURRENTUSER}/.tfenv
chown -R ${CURRENTUSER} /home/${CURRENTUSER}/.tfenv
grep -qxF 'export PATH=$PATH:/usr/local/go/bin:.tfenv/bin' /home/$CURRENTUSER/.bashrc || echo 'export PATH=$PATH:/usr/local/go/bin:.tfenv/bin' >> /home/$CURRENTUSER/.bashrc
chown ${CURRENTUSER} /home/${CURRENTUSER}/.bashrc
# aws cli
python3 -m pip install awscli --upgrade
| true
|
6829dab17775c0781b6ca5e3c817fd7138f1d79b
|
Shell
|
c0axial/MacC2
|
/setup.sh
|
UTF-8
| 429
| 2.71875
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
openssl req -new -newkey rsa:2048 -nodes -out ca.csr -keyout ca.key && openssl x509 -trustout -signkey ca.key -days 365 -req -in ca.csr -out ca.pem
echo Enter IP/hostname of your MacC2 server:
read server
echo Enter listening port for your MacC2 server:
read port
python3 macro_generator.py -s $server -p $port
docker build -t macc2-docker .
sudo docker run --name=macc2-container -p 443:443 -ti macc2-docker
| true
|
a056c16aa38aff03eb7b59abaee2ec34accf8d3c
|
Shell
|
BM1880-BIRD/bm1880-system-sdk
|
/ramdisk/target/overlay/bm1880/etc/run_usb.sh
|
UTF-8
| 3,642
| 3.65625
| 4
|
[] |
no_license
|
CLASS=acm
VID=0x30b1
PID=0x1003
MANUFACTURER="Bitmain"
PRODUCT="USB Com Port"
SERIAL="0123456789"
MSC_FILE=$3
BM_DIR=/tmp/usb
BM_GADGET=$BM_DIR/usb_gadget/g_1880
FUNC_NUM=0
MAX_EP_NUM=4
TMP_NUM=0
EP_IN=0
EP_OUT=0
case "$2" in
acm)
CLASS=acm
;;
msc)
CLASS=mass_storage
;;
bcm)
CLASS=bcm
;;
*)
if [ "$1" = "probe" ] ; then
echo "Usage: $0 probe {acm|msc|bcm}"
exit 1
fi
esac
calc_func() {
FUNC_NUM=$(ls $BM_GADGET/functions -l | grep ^d | wc -l)
echo "$FUNC_NUM file(s)"
}
res_check() {
TMP_NUM=$(find $BM_GADGET/functions/ -name acm* | wc -l)
EP_OUT=$(($EP_OUT+$TMP_NUM))
TMP_NUM=$(($TMP_NUM * 2))
EP_IN=$(($EP_IN+$TMP_NUM))
TMP_NUM=$(find $BM_GADGET/functions/ -name mass_storage* | wc -l)
EP_IN=$(($EP_IN+$TMP_NUM))
EP_OUT=$(($EP_OUT+$TMP_NUM))
TMP_NUM=$(find $BM_GADGET/functions/ -name bcm* | wc -l)
EP_IN=$(($EP_IN+$TMP_NUM))
EP_OUT=$(($EP_OUT+$TMP_NUM))
if [ "$CLASS" = "acm" ] ; then
EP_IN=$(($EP_IN+2))
EP_OUT=$(($EP_OUT+1))
fi
if [ "$CLASS" = "mass_storage" ] ; then
EP_IN=$(($EP_IN+1))
EP_OUT=$(($EP_OUT+1))
fi
if [ "$CLASS" = "bcm" ] ; then
EP_IN=$(($EP_IN+1))
EP_OUT=$(($EP_OUT+1))
fi
echo "$EP_IN in ep"
echo "$EP_OUT out ep"
if [ $EP_IN -gt $MAX_EP_NUM ]; then
echo "reach maximum resource"
exit 1
fi
if [ $EP_OUT -gt $MAX_EP_NUM ]; then
echo "reach maximum resource"
exit 1
fi
}
probe() {
if [ ! -d $BM_DIR ]; then
mkdir $BM_DIR
fi
if [ ! -d $BM_DIR/usb_gadget ]; then
# Enale USB ConfigFS
mount none $BM_DIR -t configfs
# Create gadget dev
mkdir $BM_GADGET
# Set the VID and PID
echo $VID >$BM_GADGET/idVendor
echo $PID >$BM_GADGET/idProduct
# Set the product information string
mkdir $BM_GADGET/strings/0x409
echo $MANUFACTURER>$BM_GADGET/strings/0x409/manufacturer
echo $PRODUCT>$BM_GADGET/strings/0x409/product
echo $SERIAL>$BM_GADGET/strings/0x409/serialnumber
# Set the USB configuration
mkdir $BM_GADGET/configs/c.1
mkdir $BM_GADGET/configs/c.1/strings/0x409
echo "config1">$BM_GADGET/configs/c.1/strings/0x409/configuration
# Set the MaxPower of USB descriptor
echo 120 >$BM_GADGET/configs/c.1/MaxPower
fi
# get current functions number
calc_func
# assign the class code for composite device
if [ ! $FUNC_NUM -eq 0 ]; then
echo 0xEF >$BM_GADGET/bDeviceClass
echo 0x02 >$BM_GADGET/bDeviceSubClass
echo 0x01 >$BM_GADGET/bDeviceProtocol
if [ "$CLASS" = "bcm" ] ; then
echo "BCM must be the 1st and only function!"
exit 1
fi
fi
# resource check
res_check
# create the desired function
mkdir $BM_GADGET/functions/$CLASS.usb$FUNC_NUM
if [ "$CLASS" = "mass_storage" ] ; then
echo $MSC_FILE >$BM_GADGET/functions/$CLASS.usb$FUNC_NUM/lun.0/file
fi
}
start() {
# link this function to the configuration
calc_func
if [ $FUNC_NUM -eq 0 ]; then
echo "Functions Empty!"
exit 1
fi
for i in `seq 0 $(($FUNC_NUM-1))`;
do
find $BM_GADGET/functions/ -name *.usb$i | xargs -I % ln -s % $BM_GADGET/configs/c.1
done
# Start the gadget driver
echo 500a0000.bm-usb-dev >$BM_GADGET/UDC
}
stop() {
echo "" >$BM_GADGET/UDC
rm $BM_GADGET/configs/c.1/*.usb*
rmdir $BM_GADGET/configs/c.1/strings/0x409/
rmdir $BM_GADGET/configs/c.1/
rmdir $BM_GADGET/functions/*
rmdir $BM_GADGET/strings/0x409/
rmdir $BM_GADGET
umount $BM_DIR
rmdir $BM_DIR
}
case "$1" in
start)
start
;;
stop)
stop
;;
probe)
probe
;;
*)
echo "Usage: $0 probe {acm|msc|bcm} {file (msc)}"
echo "Usage: $0 start"
echo "Usage: $0 stop"
exit 1
esac
exit $?
| true
|
abb196c0de77ce8bf303fb4cd4eeed421433bf78
|
Shell
|
alphaaurigae/gentoo_unattented-setup
|
/var/var_main.sh
|
UTF-8
| 1,388
| 2.53125
| 3
|
[
"Apache-2.0"
] |
permissive
|
CRYPTSETUP="YES" # THIS VAR DEFINES IF CRYPTSETUP IS ACTIVATED FOR ROOT, # YES DEPENDS ON var/chroot_variables.sh SYSAPP_DMCRYPT="YES"!!!! if set to no and SYSAPP_DMCRYPT="YES" #crypset is defined in useflag as option var/chroot_variables.sh
# OPTION AS IS NO = LVM ON ROOT ; YES = LVM ON CRYPTSETUP_ROOT
## DRIVES & PARTITIONS
HDD1="/dev/sda" # GENTOO
# GRUB_PART=/dev/sda1 # var not in use
BOOT_PART="/dev/sda2" # boot # unencrypted unless required changes are made
MAIN_PART="/dev/sda3" # mainfs - lukscrypt cryptsetup container with LVM env inside
## SWAP # put here since you can set a swapfile on an external device too.
### SWAPFILE # useful during install on low ram VM's (use KVM to avoid erros; ex firefox avx2 err.)
SWAPFILE="swapfile1"
SWAPFD="/swapdir" # swap-file directory path
SWAPSIZE="50G" # swap file size with unit APPEND | G = gigabytes
### SWAP PARTITION
# SWAP0=swap0 # LVM swap NAME for sorting of swap partitions.
# SWAP_SIZE="1GB" # (inside LVM MAIN_PART)
# SWAP_FS=linux-swap # swapfs
## FILESYSTEMS # (note!: FSTOOLS ; FSTAB) (note!: nopt a duplicate - match these above)
FILESYSTEM_BOOT="ext2" # BOOT
FILESYSTEM_MAIN="ext4" # GENTOO
## LVM
PV_MAIN="pv0" # LVM PV physical volume
VG_MAIN="vg0" # LVM VG volume group
LV_MAIN="lv0" # LVM LV logical volume
# MISC VAR
bold="$(tput bold)" # (!important)
normal="$(tput sgr0)" # (!important)
| true
|
7d86c5587e6d06b303054e942566d0807bc4ad5a
|
Shell
|
danbikle/forecast4
|
/script/calc_dips.bash
|
UTF-8
| 361
| 2.96875
| 3
|
[] |
no_license
|
#!/bin/bash
# calc_dips.bash
# This script should transform a series of GSPC prices into a series of dips.
# Demo:
# ./calc_dips.bash
# I should run this script in the folder which holds this script:
cd `dirname $0`
# I should get the prices before I generate the dips:
./reqp.bash
# I should calculate the dips:
~/anaconda3/bin/python calc_dips.py
exit
| true
|
7acafcee67342af2fccba25267f7e724dc3cd72d
|
Shell
|
anniyanvr/DeepSpeech-1
|
/examples/voxceleb/sv0/local/emb.sh
|
UTF-8
| 1,979
| 3.171875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
. ./path.sh
stage=0
stop_stage=100
exp_dir=exp/ecapa-tdnn-vox12-big/ # experiment directory
conf_path=conf/ecapa_tdnn.yaml
audio_path="demo/voxceleb/00001.wav"
use_gpu=true
. ${MAIN_ROOT}/utils/parse_options.sh || exit -1;
if [ $# -ne 0 ] ; then
echo "Usage: $0 [options]";
echo "e.g.: $0 ./data/ exp/voxceleb12/ conf/ecapa_tdnn.yaml"
echo "Options: "
echo " --use-gpu <true,false|true> # specify is gpu is to be used for training"
echo " --stage <stage|-1> # Used to run a partially-completed data process from somewhere in the middle."
echo " --stop-stage <stop-stage|100> # Used to run a partially-completed data process stop stage in the middle"
echo " --exp-dir # experiment directorh, where is has the model.pdparams"
echo " --conf-path # configuration file for extracting the embedding"
echo " --audio-path # audio-path, which will be processed to extract the embedding"
exit 1;
fi
# set the test device
device="cpu"
if ${use_gpu}; then
device="gpu"
fi
if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then
# extract the audio embedding
python3 ${BIN_DIR}/extract_emb.py --device ${device} \
--config ${conf_path} \
--audio-path ${audio_path} --load-checkpoint ${exp_dir}
fi
| true
|
a33220cc0b670107b1801596d7f081b0dafa9acf
|
Shell
|
rubythonode/slash-developers
|
/generate-docs.sh
|
UTF-8
| 1,101
| 3.21875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
rm -dfr docs
echo "Processing Markdown"
for filename in $(find raw_docs -type f -name '*.md'); do
echo $filename
original_string=$filename
string_to_replace_with=docs
result_string="${original_string/raw_docs/$string_to_replace_with}"
echo $result_string
mkdir -p "$(dirname "$result_string")" && touch "$result_string"
hercule $filename -o $result_string
# Space ID
sed -i -e 's/<space_id>/71rop70dkqaj/g' $result_string
# Access token
sed -i -e 's/<access_token>/297e67b247c1a77c1a23bb33bf4c32b81500519edd767a8384a4b8f8803fb971/g' $result_string
# Specific entry ID
sed -i -e 's/<entry_id>/5KsDBWseXY6QegucYAoacS/g' $result_string
# Specific brand content type
sed -i -e 's/<brand_content_type_id>/sFzTZbSuM8coEwygeUYes/g' $result_string
# Specific product content type
sed -i -e 's/<product_content_type_id>/2PqfXUJwE8qSYKuM0U6w8M/g' $result_string
# SKU value
sed -i -e 's/<sku_value>/B00E82D7I8/g' $result_string
# Specific asset
sed -i -e 's/<asset_id>/wtrHxeu3zEoEce2MokCSi/g' $result_string
done
rm -dfr docs/_partials
| true
|
9e9f124bef10bb4acba7a7956d105b6e122ee921
|
Shell
|
aleandros/metaprogramming-demo
|
/wtf.sh
|
UTF-8
| 431
| 3.84375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
TARGET=$1
if ! (pgrep $TARGET &>/dev/null) ; then
echo "Process not found"
exit 1
fi
most_memory() {
ps aux --sort -%mem |
sed -n 2p |
awk '{print $11; print $4}' |
paste -sd,
}
log_memory() {
while true
do
echo "$(date),$(most_memory)%" >> "most-memory.log"
sleep 2
done
}
log_memory &
while true
do
echo "$(pgrep $TARGET | wc -l) processes for $TARGET running"
sleep 1
done
| true
|
33f1f69f1ba450fbfacfc9e137a1868adfe8e0a9
|
Shell
|
hesong1976/onos
|
/tools/test/bin/onos-check-intent
|
UTF-8
| 557
| 3.5625
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# -----------------------------------------------------------------------------
# Checks that all intents in the system have a given state.
# -----------------------------------------------------------------------------
[ ! -d "$ONOS_ROOT" ] && echo "ONOS_ROOT is not defined" >&2 && exit 1
. $ONOS_ROOT/tools/build/envDefaults
aux=/tmp/stc-$$.log
trap "rm -f $aux 2>/dev/null" EXIT
target=${1:-$OCI}
set -x
for i in 1 2 3; do
onos $target "onos:intents" | grep "state=$2" >> $aux && cat $aux && exit 0
sleep 1
done
cat $aux
exit 1
| true
|
11e60b4db1e2bbeaa96e68118d8dda52afbbd5e4
|
Shell
|
petronny/aur3-mirror
|
/fcgiwrap-git/PKGBUILD
|
UTF-8
| 1,287
| 2.8125
| 3
|
[] |
no_license
|
# Contributor: Ron Huang <ronhuang+aur at gmail dot com>
pkgname=fcgiwrap-git
pkgver=20100930
pkgrel=1
pkgdesc="Simple FastCGI wrapper for CGI scripts"
arch=('i686' 'x86_64')
url="http://github.com/gnosek/fcgiwrap/"
license=('MIT')
depends=('spawn-fcgi')
makedepends=('git' 'autoconf' 'automake' 'fcgi')
provides=('fcgiwrap')
conflicts=('fcgiwrap')
backup=('etc/conf.d/fcgiwrap')
install=fcgiwrap.install
source=(conf initscript)
md5sums=('2a4fa3e8a96610423875040fba35d53e'
'0716a447478c478f0a00a34b85076173')
_gitroot="http://github.com/gnosek/fcgiwrap.git"
_gitname="fcgiwrap"
build() {
cd "$srcdir"
msg "Connecting to GIT server...."
if [ -d $_gitname ]; then
cd $_gitname && git pull origin
msg "The local files are updated."
else
git clone $_gitroot $_gitname
fi
msg "GIT checkout done or server timeout"
msg "Starting make..."
rm -rf "$srcdir/$_gitname-build"
git clone "$srcdir/$_gitname" "$srcdir/$_gitname-build"
cd "$srcdir/$_gitname-build"
#
# BUILD HERE
#
autoreconf -i
./configure --prefix /usr
make
make DESTDIR="$pkgdir/" install
mkdir -p $pkgdir/etc/rc.d
install -Dm755 $srcdir/initscript $pkgdir/etc/rc.d/fcgiwrap
mkdir -p $pkgdir/etc/conf.d
install -Dm644 $srcdir/conf $pkgdir/etc/conf.d/fcgiwrap
}
| true
|
2b2ffe6f134499dc2a5712d4fe0d73bde234eca0
|
Shell
|
qizhenghai2020/mdserver-mac-reinstall
|
/cmd/base/cmd_icu.sh
|
UTF-8
| 786
| 2.890625
| 3
|
[
"MIT"
] |
permissive
|
#! /bin/sh
export PATH=$PATH:/opt/local/bin:/opt/local/sbin:/opt/local/share/man:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/X11/bin
DIR=$(cd "$(dirname "$0")"; pwd)
DIR=$(dirname "$DIR")
DIR=$(dirname "$DIR")
DIR=$(dirname "$DIR")
MDIR=$(dirname "$DIR")
mkdir -p $MDIR/source/cmd
echo 'icu4c start'
if [ ! -d /usr/local/lib/icu/52.2 ];then
if [ ! -f $MDIR/source/cmd/icu4c-52_2-src.tgz ];then
wget -O $MDIR/source/cmd/icu4c-52_2-src.tgz https://github.com/unicode-org/icu/releases/download/release-52-2/icu4c-52_2-src.tgz
fi
if [ ! -d $MDIR/source/cmd/icu ];then
cd $MDIR/source/cmd && tar -zxvf icu4c-52_2-src.tgz
cd $MDIR/source/cmd/icu/source
./runConfigureICU MacOSX && make CXXFLAGS="-g -O2 -std=c++11" && make install
fi
fi
echo 'icu4c end'
| true
|
273ca30ebb8ed782b2acfc17ba06ef93ed4ef9a3
|
Shell
|
quattrococodrilo/terminal_curso
|
/8_readValidate.sh
|
UTF-8
| 535
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
# Programa para ejemplificar cómo capturar la información del usuario
# y validarla.
# Author: Luis Fernando Cruz Carrillo
# Email: quattrococodrilo@gmail.com
option=0
backupName=""
clave=""
echo "Postgres Utilities program"
# Acepta el ingreso de información de solo un caracter
read -n1 -p "Enter an option: " option
echo -e "\n"
read -n10 -p "Enter a backup name: " backupName
echo -e "\n"
echo "Option: $option"
echo "Backup Name: $backupName"
# Se agregan los datos pero no se ve la clave -s (silent)
read -s -p "Clave: " clave
echo "Clave: $clave"
| true
|
9526239f3049c26e21e108d2037c104b1be87d2b
|
Shell
|
ezyang/pytorch-unattached
|
/tools/cpp_build/build_nanopb.sh
|
UTF-8
| 514
| 3.3125
| 3
|
[
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
set -ex
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
pushd $SCRIPTPATH
source ./build_common.sh
echo "Building nanopb"
mkdir -p $NANOPB_BUILDPATH
pushd $NANOPB_BUILDPATH
cmake -DCMAKE_BUILD_TYPE:STRING=Release \
-DCMAKE_INSTALL_PREFIX:STRING=$INSTALL_PREFIX \
-DCMAKE_INSTALL_MESSAGE=NEVER \
-Dnanopb_BUILD_GENERATOR:BOOL=OFF \
-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON \
-G "$GENERATE" \
$PYTORCHPATH/third_party/nanopb
$MAKE -j "$JOBS"
popd
popd
| true
|
72fcdefdd20695b32584953157cf331bf2b650c6
|
Shell
|
AngelofWoe/arkos
|
/RG351P-M/Ubuntu OS Partition/usr/share/initramfs-tools/hooks/kmod
|
UTF-8
| 430
| 3.234375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh -e
# Copy the compatibility symlinks until initramfs-tools will be converted
# to use the kmod program.
if [ "$1" = "prereqs" ]; then exit 0; fi
. /usr/share/initramfs-tools/hook-functions
copy_exec /bin/kmod
cp -a /sbin/modprobe /sbin/rmmod "$DESTDIR/sbin/"
mkdir -p "$DESTDIR/lib/modprobe.d/"
if [ "$(echo /lib/modprobe.d/*)" != "/lib/modprobe.d/*" ]; then
cp -a /lib/modprobe.d/* "$DESTDIR/lib/modprobe.d/"
fi
| true
|
3bf2e1d39123449a5be8474c57cda2dfe39a2417
|
Shell
|
eij/Shot
|
/shot
|
UTF-8
| 597
| 3.8125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
VERSION="1.0"
type -P scrot &>/dev/null || { echo "Scrot is missing, install it." >&2; exit 1; }
type -P tell &>/dev/null || { echo "Tell is missing, install it." >&2; exit 1; }
SHOT_FOLDER="$HOME/"
SHOT_FILE="`date +%s%N`.png"
while getopts "hn:" OPT
do
case $OPT in
n)
if [ `echo $OPTARG | grep ".png$"` ]; then
SHOT_FILE=$OPTARG
else
SHOT_FILE=$OPTARG.png
fi
;;
h)
echo "Usage: shot [OPTIONS]"
echo
echo " -n <name> location and name"
echo " -h display this help"
exit 0
;;
esac
done
clear
tell
scrot -d1 $SHOT_FOLDER$SHOT_FILE
exit 0
| true
|
6fe02d34d7e6135b3816f5bafd2d1a4666ebac4a
|
Shell
|
rommi4/rvconfig
|
/.bashrc
|
UTF-8
| 3,639
| 3.25
| 3
|
[] |
no_license
|
# .bashrc
# Source global definitions
if [ -f /etc/bashrc ]; then
. /etc/bashrc
fi
# User specific environment
if ! [[ "$PATH" =~ "$HOME/.local/bin:$HOME/bin:" ]]
then
PATH="$HOME/.local/bin:$HOME/bin:$PATH"
fi
export PATH
# Uncomment the following line if you don't like systemctl's auto-paging feature:
# export SYSTEMD_PAGER=
# User specific aliases and functions
HISTCONTROL=ignoreboth:erasedups
HISTTIMEFORMAT="%Y%m%d %H:%M:%S "
HISTSIZE=
HISTFILESIZE=
# append to the history file, don't overwrite it
shopt -s histappend
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
HISTSIZE=1000000
HISTFILESIZE=2000000
# check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
# make less more friendly for non-text input files, see lesspipe(1)
[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)"
# set a fancy prompt (non-color, unless we know we "want" color)
case "$TERM" in
xterm-color) color_prompt=yes;;
xterm) color_prompt=yes;;
esac
# uncomment for a colored prompt, if the terminal has the capability; turned
# off by default to not distract the user: the focus in a terminal window
# should be on the output of commands, not on the prompt
force_color_prompt=yes
if [ -n "$force_color_prompt" ]; then
if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then
# We have color support; assume it's compliant with Ecma-48
# (ISO/IEC-6429). (Lack of such support is extremely rare, and such
# a case would tend to support setf rather than setaf.)
color_prompt=yes
else
color_prompt=
fi
fi
if [ "$color_prompt" = yes ]; then
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\n\$ '
else
PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\n\$ '
fi
unset color_prompt force_color_prompt
# If this is an xterm set the title to user@host:dir
case "$TERM" in
xterm*|rxvt*)
PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1"
;;
*)
;;
esac
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
#alias dir='dir --color=auto'
#alias vdir='vdir --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
# some more ls aliases
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
# Alias definitions.
# You may want to put all your additions into a separate file like
# ~/.bash_aliases, instead of adding them here directly.
# See /usr/share/doc/bash-doc/examples in the bash-doc package.
if [ -f ~/.bash_aliases ]; then
. ~/.bash_aliases
fi
#### HERE GOES RV part
export VAGRANT_DEFAULT_PROVIDER=virtualbox
export EDITOR=vim
export GOPATH=~/.go
export PATH="$PATH:${GOPATH}/bin"
export FZF_DEFAULT_COMMAND='rg --files'
if [ ! -S ~/.ssh/ssh_auth_sock ]; then
ln -sf "$SSH_AUTH_SOCK" ~/.ssh/ssh_auth_sock
fi
### kubectl autocompletion
source <(kubectl completion bash)
### terraform autocompletion
complete -C ~/bin/terraform terraform
# Temporary https://stackoverflow.com/questions/64010263/gcloud-not-working-with-fedora33-and-python3-9
# again fedora 35, python3.10
export CLOUDSDK_PYTHON=python3.7
export GOOGLE_APPLICATION_CREDENTIALS=~/.config/gcloud/application_default_credentials.json
# eval "$(starship init bash)"
### https://www.terraform.io/cli/config/config-file
export TF_PLUGIN_CACHE_DIR="$HOME/.terraform.d/plugin-cache"
| true
|
cd851ec335a15200536d48387ce4d27d59be93e5
|
Shell
|
thovarMS/5clickTemplates
|
/RawCluster/install-abq.sh
|
UTF-8
| 2,949
| 2.515625
| 3
|
[] |
no_license
|
#!/bin/bash
USER=$1
LICIP=$2
HOST=`hostname`
echo $USER,$LICIP,$HOST
yum install -y redhat-lsb-core
yum install -y compat-libstdc++-33.i686
yum install -y ksh
mkdir /mnt/resource/scratch/applications
mkdir /mnt/resource/scratch/INSTALLERS
mkdir /mnt/resource/scratch/benchmark
wget -q http://azbenchmarkstorage.blob.core.windows.net/abaqusbenchmarkstorage/2016.AM_SIM_Abaqus.AllOS.1-3.tar -O /mnt/resource/scratch/2016.AM_SIM_Abaqus.AllOS.1-3.tar
wget -q http://azbenchmarkstorage.blob.core.windows.net/abaqusbenchmarkstorage/2016.AM_SIM_Abaqus.AllOS.2-3.tar -O /mnt/resource/scratch/2016.AM_SIM_Abaqus.AllOS.2-3.tar
wget -q http://azbenchmarkstorage.blob.core.windows.net/abaqusbenchmarkstorage/2016.AM_SIM_Abaqus.AllOS.3-3.tar -O /mnt/resource/scratch/2016.AM_SIM_Abaqus.AllOS.3-3.tar
tar -xf /mnt/resource/scratch/2016.AM_SIM_Abaqus.AllOS.1-3.tar -C /mnt/resource/scratch/INSTALLERS/
tar -xf /mnt/resource/scratch/2016.AM_SIM_Abaqus.AllOS.2-3.tar -C /mnt/resource/scratch/INSTALLERS/
tar -xf /mnt/resource/scratch/2016.AM_SIM_Abaqus.AllOS.3-3.tar -C /mnt/resource/scratch/INSTALLERS/
echo USE THE BELOW COMMANDS AND PATHS FOR EACH STEP IN THE INSTALLATION PROCESS > /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo ksh /mnt/resource/scratch/INSTALLERS/AM_SIM_Abaqus.AllOS/1/3DEXPERIENCE_AbaqusSolver/Linux64/1/StartTUI.sh >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo /mnt/resource/scratch/applications/DassaultSystemes/SimulationServices/V6R2016x >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo ksh /mnt/resource/scratch/INSTALLERS/AM_SIM_Abaqus.AllOS/1/SIMULIA_Abaqus_CAE/Linux64/1/StartTUI.sh >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo /mnt/resource/scratch/applications/SIMULIA/CAE/2016 >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo /mnt/resource/scratch/applications/DassaultSystemes/SimulationServices/V6R2016x >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo /mnt/resource/scratch/applications/DassaultSystemes/SIMULIA/Commands >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo /mnt/resource/scratch/temp >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo LICENSE IS AT $LICIP >> /mnt/resource/scratch/INSTALLERS/install_abq.txt
echo export HOSTS=/home/$USER/bin/nodenames.txt
echo export INTELMPI_ROOT=/opt/intel/impi/5.1.3.181 >> /home/$USER/.bashrc
echo export I_MPI_FABRICS=shm:dapl >> /home/$USER/.bashrc
echo export I_MPI_DAPL_PROVIDER=ofa-v2-ib0 >> /home/$USER/.bashrc
echo export I_MPI_ROOT=/opt/intel/compilers_and_libraries_2016.2.181/linux/mpi >> /home/$USER/.bashrc
echo export PATH=/mnt/resource/scratch/applications/DassaultSystemes/SIMULIA/Commands:$PATH >> /home/$USER/.bashrc
echo export I_MPI_DYNAMIC_CONNECTION=0 >> /home/$USER/.bashrc
chown -R $USER:$USER /mnt/resource/scratch/*
chown -R $USER:$USER /mnt/nfsshare
| true
|
18f11dd108b2924e7e1844c48f61b7ff0526a696
|
Shell
|
StoutCEE/server_setup
|
/LAMP/lamp_setup.sh
|
UTF-8
| 851
| 3.1875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
hostip="$(ip addr | grep 'state UP' -A2 | awk '/inet / {print $4}')"
# Apache2 Install
# Installing Apache2
apt-get update
apt-get -y install apache2
# Add host IP to Apace2 Config File
echo ServerName $hostip >> /etc/apace2/apache2.conf
# Check for errors and restart Apache2
apache2ctl configtest
systemctl restart apache2
# Adjust Firewall to Allow Traffic
ufw app list
ufw app info "Apache Full"
ufw allow in "Apache Full"
# MySQL Install
# Installing MySQL
apt-get update
apt-get -y install mysql-server
# Secure Installation Script - Interactive
# mysql_secure_installation
# PHP Install
# Installing PHP and dependencies
apt-get -y install php libapache2-mod-php php-mcrypt php-mysql
# Copy properly configured file to Apache2 directory
cp dir.conf /etc/apache2/mods-enabled/dir.conf
# Restart Apache2
systemctl restart apache2
| true
|
c086a774b5118b4eac5825a000b748498549cbd5
|
Shell
|
yihanxiaotai/bash_scripts
|
/rasp.sh
|
UTF-8
| 2,236
| 3.296875
| 3
|
[] |
no_license
|
#! /bin/bash
# script to install/create an environment for ReAgent in CentOS 8 (no GPU)
# based on https://github.com/facebookresearch/ReAgent/blob/master/docs/installation.rst
# installing git and other packages (development, utilities).
sudo yum install git gcc openssl-devel bzip2-devel libffi-devel zlib-devel wget cmake unzip -y
sudo yum groupinstall "Development Tools" -y
# installing miniconda (with the latest python 3, currently 3.8.3)
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
bash Miniconda3-latest-Linux-x86_64.sh -b -p $HOME/miniconda
rm Miniconda3-latest-Linux-x86_64.sh
export PATH=$HOME/miniconda/bin:$PATH
# creating python3.7 environment because currently some dependent pypi packages for ReAgent do not install under python3.8.
conda create -n reagent python=3.7 -y
source activate reagent
# installing ReAgent and its dependent packages
git clone https://github.com/facebookresearch/ReAgent.git
cd ReAgent
pip install ".[gym]"
# installing nightly torch (change cpu to cu101/102 if fit)
pip install --pre torch torchvision -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html --use-feature=2020-resolver
# verifying the setup (can be skipped in the script; taking >> 20 mins)
#pip install tox
#tox
# installing Scala, maven for spark JAR
curl -s "https://get.sdkman.io" | bash
source "$HOME/.sdkman/bin/sdkman-init.sh"
sdk install scala
sdk install maven
# installing spark
sdk install spark 2.4.6
# building preprocessing JAR
mvn -f preprocessing/pom.xml clean package
# installing requirements for RASP
conda install --file rasp_requirements.txt -y
# installing lib torch (for cuda 10.2)
#wget https://download.pytorch.org/libtorch/nightly/cu102/libtorch-cxx11-abi-shared-with-deps-latest.zip
# installing lib torch (for no cuda)
wget https://download.pytorch.org/libtorch/nightly/cpu/libtorch-cxx11-abi-shared-with-deps-latest.zip
unzip libtorch-cxx11-abi-shared-with-deps-latest.zip -d $HOME
rm libtorch-cxx11-abi-shared-with-deps-latest.zip
# init git submodules
git submodule update --force --recursive --init --remote
# building RASP
mkdir -p serving/build
cd serving/build
cmake -DCMAKE_PREFIX_PATH=$HOME/libtorch -DCMAKE_CXX_STANDARD=17 ..
make
| true
|
f80e239b016ed5126909666c078ddb4515aa82a8
|
Shell
|
uchilaka/wp-in-docker
|
/scripts/checkEntrypoint
|
UTF-8
| 144
| 2.984375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
if [[ -f "./docker-entrypoint.sh" ]]; then
echo "Docker entrypoint exists!"
else
echo "docker-entrypoint.sh not found :("
fi
| true
|
a6ee46bdb91a47db99312aac925a6a2fd0a512b2
|
Shell
|
martelogan-personal-site/personal-site-release
|
/deploy.sh
|
UTF-8
| 4,640
| 4.34375
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Script to automate build and deployment of personal website.
# Copyright (C) 2015 Logan Martel - All Rights Reserved
# Permission to copy and modify is granted under the GNU Apache License 2.0
# Last revised 06/29/2018
# See README.md for further details.
# Note, this script has been modified & based loosely on:
# https://github.com/X1011/git-directory-deploy/blob/master/deploy.sh
set -o errexit # abort if any command fails
scriptname=$(basename "$0")
help_message="\
Usage: $scriptname
Deploy generated files to a git branch.
Options:
-h, --help Show this help information.
-m, --message MESSAGE Specify the commit message.
-n, --no-hash Don't append the source commit's hash to the message
-f, --force Force confirmation to all prompts (as possible)
-c, --config-file PATH Override default & environment variables' values
with those in set in the file at 'PATH'. Must be the
first option specified.
"
parse_args() {
# Set args from a local environment file.
if [ -e ".env" ]; then
source .env
fi
# Set args from file specified on the command-line.
if [[ $1 = "-c" || $1 = "--config-file" ]]; then
source "$2"
shift 2
fi
# Parse arg flags
# If something is exposed as an environment variable, set/overwrite it
# here. Otherwise, set/overwrite the internal variable instead.
while : ; do
if [[ $1 = "-h" || $1 = "--help" ]]; then
echo "$help_message"
exit 0
elif [[ ( $1 = "-m" || $1 = "--message" ) && -n $2 ]]; then
commit_message=$2
shift 2
elif [[ $1 = "-n" || $1 = "--no-hash" ]]; then
GIT_DEPLOY_APPEND_HASH=false
shift
elif [[ $1 = "-f" || $1 = "--force" ]]; then
FORCE_CONFIRM="y"
shift
else
break
fi
done
}
prompt_confirmation() {
CONFIRMATION="n"
if [ -z $2 ]
then
read -p "$1" CONFIRMATION
else
CONFIRMATION=$2
fi
}
main() {
parse_args "$@"
# default directories
if [[ -z $src_directory ]]; then
src_directory="personal-site-dev"
fi
if [[ -z $deploy_directory ]]; then
deploy_directory="martelogan.github.io"
fi
# SOURCE DIRECTORY CHECKS
cd $src_directory
if ! git diff --exit-code --quiet --cached; then
echo Aborting due to uncommitted changes in the index >&2
return 1
fi
commit_title=`git log -n 1 --format="%s" HEAD`
commit_hash=` git log -n 1 --format="%H" HEAD`
# default commit message uses last title if a custom one is not supplied
if [[ -z $commit_message ]]; then
commit_message="$commit_title"
fi
# append hash to commit message unless no hash flag was found
if [[ $append_hash = true ]]; then
commit_message="$commit_message"$'\n\n'"generated from commit $commit_hash"
fi
cd ..
# DEPLOY DIRECTORY CHECKS
if [[ ! -d "$deploy_directory" ]]; then
echo "Deploy directory '$deploy_directory' does not exist. Aborting." >&2
return 1
fi
# must use short form of flag in ls for compatibility with OS X and BSD
if [[ -z `ls -A "$deploy_directory" 2> /dev/null` && -z $allow_empty ]]; then
echo "Deploy directory '$deploy_directory' is empty. Aborting." >&2
return 1
fi
# build distribution
cd $src_directory
coffee --compile javascripts/*.coffee
compass compile sass/*
minify javascripts/ --clean
minify stylesheets/ --clean
git add -A
git commit -m "Build: $commit_message"
cd ..
# publish distribution to local production environment
cp $src_directory/javascripts/*.min.js $deploy_directory/javascripts/
cp $src_directory/stylesheets/main.min.css $deploy_directory/stylesheets/
cp $src_directory/images/* $deploy_directory/images/
cp $src_directory/index.html $deploy_directory/
cp $src_directory/LoganMartel.pdf $deploy_directory/
cd $deploy_directory
git add -A
git commit -m "Publish: $commit_message"
cd ..
# release latest distribution per environment
prompt_confirmation "Proceed to publish changes to development repo (y/n)? " $FORCE_CONFIRM
if [[ $CONFIRMATION =~ ^[Yy]$ ]]; then
cd $src_directory
git push --set-upstream origin master
cd ..
fi
prompt_confirmation "Proceed to publish changes to production repo (y/n)? " $FORCE_CONFIRM
if [[ $CONFIRMATION =~ ^[Yy]$ ]]; then
cd $deploy_directory
git push --set-upstream origin master
cd ..
fi
prompt_confirmation "Proceed to publish changes to release repo (y/n)? " $FORCE_CONFIRM
if [[ $CONFIRMATION =~ ^[Yy]$ ]]; then
git add -A
git commit -m "Release: $commit_message"
git push --set-upstream origin master
fi
}
[[ $1 = --source-only ]] || main "$@"
| true
|
1d17744784ee4a736bdd33255a846105eb0cad37
|
Shell
|
ckaserer/latex-resume
|
/bashrc
|
UTF-8
| 360
| 3.328125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
readonly LATEX_RESUME_SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
# latex-resume
function latex-resume () {
local command="docker run --rm -i --net=none -v "${LATEX_RESUME_SCRIPT_DIR}":/data ckaserer/latex:full lualatex cv.tex"
echo "+ ${command} $@" && ${command} $@
}
readonly -f latex-resume
[ "$?" -eq "0" ] || return $?
| true
|
d45fcf9df9a99c40892324aa4ebb0690dbd0d114
|
Shell
|
termux/termux-packages
|
/packages/gitui/build.sh
|
UTF-8
| 1,248
| 3
| 3
|
[
"Apache-2.0"
] |
permissive
|
TERMUX_PKG_HOMEPAGE=https://github.com/extrawurst/gitui
TERMUX_PKG_DESCRIPTION="Blazing fast terminal-ui for git written in rust"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_LICENSE_FILE="LICENSE.md"
TERMUX_PKG_MAINTAINER="@PeroSar"
TERMUX_PKG_VERSION=0.23.0
TERMUX_PKG_REVISION=1
TERMUX_PKG_SRCURL=https://github.com/extrawurst/gitui/archive/v$TERMUX_PKG_VERSION.tar.gz
TERMUX_PKG_SHA256=5180d5d8fd2fe6400148d6078b4b318c7530ca6c27ea8d8c0882f2e4d9064a80
TERMUX_PKG_DEPENDS="git, libgit2, libssh2, openssl"
TERMUX_PKG_BUILD_IN_SRC=true
termux_step_pre_configure() {
CPPFLAGS+=" -Dindex=strchr"
export OPENSSL_NO_VENDOR=1
export LIBGIT2_SYS_USE_PKG_CONFIG=1
export LIBSSH2_SYS_USE_PKG_CONFIG=1
export PKG_CONFIG_ALLOW_CROSS=1
termux_setup_rust
: "${CARGO_HOME:=$HOME/.cargo}"
export CARGO_HOME
cargo fetch --target "${CARGO_TARGET_NAME}"
local f
for f in $CARGO_HOME/registry/src/*/libgit2-sys-*/build.rs; do
sed -i -E 's/\.range_version\(([^)]*)\.\.[^)]*\)/.atleast_version(\1)/g' "${f}"
done
}
termux_step_make() {
cargo build --release \
--jobs "$TERMUX_MAKE_PROCESSES" \
--target "$CARGO_TARGET_NAME" \
--locked
}
termux_step_make_install() {
install -Dm700 target/"${CARGO_TARGET_NAME}"/release/gitui "$TERMUX_PREFIX"/bin/
}
| true
|
c5872507a31748182f0acd67912ebb88f16e863c
|
Shell
|
hjanime/IGCSA
|
/hbase-genomes/aws-scripts/local-pipeline-run.sh
|
UTF-8
| 3,069
| 3
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
if [ ! -f "$EMR_HOME/elastic-mapreduce" ]; then
echo "EMR_HOME not set."
exit
fi
#if [ $# -lt 3 ]; then
# echo "USAGE: $0 <New Genome Name: string> <Terminate on failure: true/false> <s3 bucket> <number of cores: default=3>"
# exit
#fi
TERM="TERMINATE_JOB_FLOW"
#if [ $2 == "false" ]; then
# TERM="CANCEL_AND_WAIT"
#elif [ $2 != "true" ]; then
# echo "Terminate on failure, options true/false only."
# exit
#fi
BUCKET="insilico"
CORES=5;
if [ $# -eq 4 ]; then
CORES=$4
fi
TIMEOUT="1200000"
if [ $CORES -le 5 ]; then
TIMEOUT="3600000"
fi
INSTANCE_TYPE="m2.xlarge"
PRICE="BidPrice=0.03"
GENOME_DATA="s3://${BUCKET}/hbase/$4"
echo "Running IGCSA pipeline with ${CORES} core instances (${INSTANCE_TYPE}). On failure: ${TERM}"
JAR="s3://${BUCKET}/HBase-Genomes-1.2.jar"
MASTER="InstanceGroupType=MASTER,InstanceCount=1,InstanceType=${INSTANCE_TYPE}"
CORE="InstanceGroupType=CORE,InstanceCount=${CORES},InstanceType=${INSTANCE_TYPE}"
HBASE="Path=s3://eu-west-1.elasticmapreduce/bootstrap-actions/configure-hbase,Args=[-s,hbase.rpc.timeout=${TIMEOUT},-s,hbase.regionserver.lease.period=${TIMEOUT},-s,hbase.regionserver.handler.count=30]"
STEPS="Name=LoadHBASE,Jar=$JAR,Type=CUSTOM_JAR,ActionOnFailure=TERMINATE_JOB_FLOW,Args=[hbaseutil,-d,s3n://${BUCKET}/hbase,-c,IMPORT,-t,genome,-t,chromosome,-t,sequence,-t,small_mutations]"
localsearch_args="-b,s3n://${BUCKET}/tools/bwa.tgz,-o,s3n://${BUCKET}/HCC1954,-g,GRCh37,-r,s3n://${BUCKET}/reads/HCC1954/discordant.tsv"
i=0
locs=("-l,5:66700001-71800001,-l,8:117700001-132032011" "-l,5:66700001-71800001,-l,8:132032011-146364022" "-l,5:71800001-76900000,-l,8:117700001-132032011" "-l,5:71800001-76900000,-l,8:132032011-146364022")
for loc in "${locs[@]}"
do
p=""
if [ $i -gt 0 ]; then p="_$i"; fi
STEPS="${STEPS} Name=LocalSearch,Jar=$JAR,Type=CUSTOM_JAR,ActionOnFailure=TERMINATE_JOB_FLOW,Args=[localsearch,${localsearch_args},${loc}]"
#STEPS="${STEPS} Name=Score,Type=STREAMING,ActionOnFailure=CONTINUE,Args=[-D,mapred.reduce.tasks=1,\"--files=s3://${BUCKET}/tools/read_sam_map.rb,s3://${BUCKET}/tools/evaluation_reducer.R\",-mapper,read_sam_map.rb,-reducer,evaluation_reducer.R,-input,s3://${BUCKET}/HCC1954/5q13-8q24${p}/aligned/merged.sam,-output,s3://insilico/HCC1954/mini/5q13-8q24${p}/score]"
((i=i+1))
done
#aws emr add-steps --cluster-id j-36QD160DZ2MPX --steps $STEPS
aws emr create-cluster --name 'IGCSA localsearch v0.01' --applications Name=HBase --ami-version 3.2.1 --auto-terminate --enable-debugging --log-uri s3://${BUCKET}/logs \
--ec2-attributes KeyName=amazonkeypair \
--bootstrap-actions $HBASE \
--instance-groups $MASTER $CORE --steps $STEPS
# [
# {
# "Name": "string",
# "Args": ["string", ...],
# "Jar": "string",
# "ActionOnFailure": "TERMINATE_CLUSTER"|"CANCEL_AND_WAIT"|"CONTINUE",
# "MainClass": "string",
# "Type": "CUSTOM_JAR"|"STREAMING"|"HIVE"|"PIG"|"IMPALA",
# "Properties": "string"
# }
# ...
# ]
| true
|
fe06103f91684ecca8ecaca44f5022d850b465dd
|
Shell
|
losipiuk/ircmdbot
|
/devenv.sh
|
UTF-8
| 1,837
| 3.921875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash -e
# stolen from https://raw.githubusercontent.com/kokosing/git-gifi/master/devenv.sh
# thanks grzesiek
if [[ "$VIRTUAL_ENV" == "" ]]; then
VIRTUAL_ENV="/Users/losipiuk/workspace/virtualenvs/ircmdbot"
fi
COMMANDS="help init install build release"
SETUP='python setup.py'
function _err() {
echo $*
exit 1
}
function _activate_virtual_env() {
if [ -d $VIRTUAL_ENV ]; then
source $VIRTUAL_ENV/bin/activate
else
_err "Unable to find virtual env at $VIRTUAL_ENV"
fi
}
function init() {
sudo apt-get install python-dev
virtualenv $VIRTUAL_ENV
source $VIRTUAL_ENV/bin/activate
_activate_virtual_env
$SETUP develop
pip install wheel
pip install twine
echo
echo "Remember to 'source $VIRTUAL_ENV/bin/activate', before coding"
}
function build() {
_activate_virtual_env
$SETUP flake8
# $SETUP test
$SETUP install
}
function release() {
VERSION=$(cat setup.py | grep version | sed 's/.*0\.\(.*\)-.*/\1/g')
_change_version 0.$VERSION
rm -rf dist
build
$SETUP register
$SETUP bdist_wheel
$SETUP bdist_wheel --universal
$SETUP sdist
twine upload dist/*
NEXT_VERSION=$(echo $VERSION + 1 | bc)
_change_version 0.$NEXT_VERSION-SNAPSHOT
MESSAGE="Release 0.$VERSION"
git commit -a -m "$MESSAGE"
git tag -a -m "$MESSAGE" 0.$VERSION
git push
git push --tags
}
function _change_version() {
sed 's/\(.*version=.\).*\(.,.*\)/\1'$1'\2/g' setup.py > tmp
mv tmp setup.py
}
function help() {
cat << EOF
$0 COMMAND [command arguments]
Commands:
help - display this window
init - init sandbox (install virtual env and dependencies)
build - build project
EOF
}
if [[ $# = 0 ]]; then
help
exit
fi
COMMAND=$1
shift
echo $COMMANDS | tr ' ' '\n' | grep -q "${COMMAND}" || _err "Invalid command: $COMMAND, try help command first."
$COMMAND $*
| true
|
c30677491138f2b872ac264156b85207992aee3b
|
Shell
|
kreativekorp/open-relay
|
/AlcoSans/build.sh
|
UTF-8
| 2,506
| 3.375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Find FontForge
if command -v fontforge >/dev/null 2>&1; then
FONTFORGE="fontforge"
elif test -f /Applications/FontForge.app/Contents/Resources/opt/local/bin/fontforge; then
FONTFORGE="/Applications/FontForge.app/Contents/Resources/opt/local/bin/fontforge"
else
echo "Could not find FontForge."
exit 1
fi
# Find Bits'n'Picas
if test -f BitsNPicas.jar; then
BITSNPICAS="java -jar BitsNPicas.jar"
elif test -f ../BitsNPicas/BitsNPicas.jar; then
BITSNPICAS="java -jar ../BitsNPicas/BitsNPicas.jar"
elif test -f ../Workspace/BitsNPicas/BitsNPicas.jar; then
BITSNPICAS="java -jar ../Workspace/BitsNPicas/BitsNPicas.jar"
elif test -f ../../BitsNPicas/BitsNPicas.jar; then
BITSNPICAS="java -jar ../../BitsNPicas/BitsNPicas.jar"
elif test -f ../../Workspace/BitsNPicas/BitsNPicas.jar; then
BITSNPICAS="java -jar ../../Workspace/BitsNPicas/BitsNPicas.jar"
elif test -f ../../../BitsNPicas/BitsNPicas.jar; then
BITSNPICAS="java -jar ../../../BitsNPicas/BitsNPicas.jar"
elif test -f ../../../Workspace/BitsNPicas/BitsNPicas.jar; then
BITSNPICAS="java -jar ../../../Workspace/BitsNPicas/BitsNPicas.jar"
elif test -f ../../../../BitsNPicas/BitsNPicas.jar; then
BITSNPICAS="java -jar ../../../../BitsNPicas/BitsNPicas.jar"
elif test -f ../../../../Workspace/BitsNPicas/BitsNPicas.jar; then
BITSNPICAS="java -jar ../../../../Workspace/BitsNPicas/BitsNPicas.jar"
else
echo "Could not find BitsNPicas."
exit 1
fi
# Find ttf2eot
if command -v ttf2eot >/dev/null 2>&1; then
TTF2EOT="ttf2eot"
else
echo "Could not find ttf2eot."
exit 1
fi
# Clean
rm -f AlcoSans.sfd-* AlcoSans.ttf AlcoSans.eot AlcoSans.zip AlcoSansTmp.*
rm -rf alcosans
# Make timestamped version
python ../bin/sfdpatch.py AlcoSans.sfd patches/timestamp.txt > AlcoSansTmp.sfd
# Generate ttf
$FONTFORGE -lang=ff -c 'i = 1; while (i < $argc); Open($argv[i]); Generate($argv[i]:r + ".ttf", "", 0); i = i+1; endloop' \
AlcoSansTmp.sfd
mv AlcoSansTmp.ttf AlcoSans.ttf
rm AlcoSansTmp.sfd
# Inject PUAA table
python ../bin/blocks.py cwadkkypjqvtt > Blocks.txt
python ../bin/unicodedata.py cwadkkypjqvtt > UnicodeData.txt
$BITSNPICAS injectpuaa \
-D Blocks.txt UnicodeData.txt \
-I AlcoSans.ttf
rm Blocks.txt UnicodeData.txt
# Convert to eot
$TTF2EOT < AlcoSans.ttf > AlcoSans.eot
# Create zip
zip AlcoSans.zip OFL.txt AlcoSans.sfd AlcoSans.ttf AlcoSans.eot
# Create lowercase versions
mkdir alcosans
cp AlcoSans.ttf alcosans/alcosans.ttf
cp AlcoSans.eot alcosans/alcosans.eot
cp AlcoSans.zip alcosans/alcosans.zip
| true
|
e7f03162f9c020deb95f7ec1ac8ab9af7b0ae382
|
Shell
|
jasoncuriano/dotfiles
|
/bootstrap-all.sh
|
UTF-8
| 645
| 4.28125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
wait_for_space() {
read -n1 -r -p "Press space to continue..." key
if [ "$key" = '' ]; then
echo "Continuing..."
else
echo "Exiting"
exit 1
fi
}
wait_for_space
echo "Making macos.sh executable"
chmod +x "macos.sh"
echo "Running macos.sh"
sh "macos.sh"
declare -a ListOfScripts=(
"homebrew.sh"
"git.sh"
"ssh.sh"
"extras.sh"
)
# Iterate the string array using for loop
for script in "${ListOfScripts[@]}"; do
echo "Making $script executable"
chmod +x "${script}"
done
for script in "${ListOfScripts[@]}"; do
echo "Running $script"
sh "${script}"
done
| true
|
f930f0cf3d0f3f9096a1392ebb0d0293cae95b94
|
Shell
|
Mirocow/site-create
|
/site-create.sh
|
UTF-8
| 17,062
| 3.6875
| 4
|
[] |
no_license
|
#!/bin/bash
if [ ! -n "$BASH" ] ;then echo Please run this script $0 with bash; exit 1; fi
function trim()
{
echo "$1" | awk '{gsub(/^ +| +$/,"")} {print $0}'
}
function create_site()
{
site_name=$HOST
site_alias=$ALIAS
site_addr=$IP
password=$(date +%s | sha256sum | base64 | head -c 16 ; echo)
if [ -d /home/${site_name} ]; then
if [ $SET_PASSWORD -eq 1 ]; then
echo ${site_name}:${password} | chpasswd
usermod -s /bin/bash ${site_name}
else
password='[without changes]'
echo "User's password is not updated"
fi
else
mkdir /home/${site_name}
mkdir /home/${site_name}/logs
mkdir /home/${site_name}/httpdocs
mkdir /home/${site_name}/httpdocs/web
useradd -d /home/${site_name} -s /bin/bash ${site_name}
usermod -G www-data ${site_name}
echo ${site_name}:${password} | chpasswd
mkdir /home/${site_name}/.ssh
chmod 0700 /home/${site_name}/.ssh
ssh-keygen -b 4096 -t rsa -N "${site_name}" -f /home/${site_name}/.ssh/id_rsa
chmod 0600 /home/${site_name}/.ssh/id_rsa
ssh-keygen -b 4096 -t dsa -N "${site_name}" -f /home/${site_name}/.ssh/id_dsa
chmod 0600 /home/${site_name}/.ssh/id_dsa
echo "<?php phpinfo();" > /home/${site_name}/httpdocs/web/index.php
if [ $LOCK -eq 1 ]; then
authpassword=$(date +%s | sha256sum | base64 | head -c 6 ; echo)
php -r "echo 'admin:' . crypt('${authpassword}', 'salt') . ': Web auth for ${site_name}';" > /home/${site_name}/authfile
fi
chown ${site_name}:www-data -R /home/${site_name}
fi
if [ $APACHE -eq 1 ]; then
echo "
<VirtualHost 127.0.0.1:8080>
ServerName ${site_name}
ServerAlias www.${site_name}
ServerAdmin info@reklamu.ru
DocumentRoot /home/${site_name}/httpdocs/web
<Directory /home/${site_name}/httpdocs/web>
Options Indexes FollowSymLinks MultiViews
Options FollowSymLinks
AllowOverride All
Options +ExecCGI -MultiViews +SymLinksIfOwnerMatch
Order allow,deny
Allow from all
</Directory>
ErrorLog \${APACHE_LOG_DIR}/${site_name}-error.log
# Possible values include: debug, info, notice, warn, error, crit,
# alert, emerg.
LogLevel warn
CustomLog \${APACHE_LOG_DIR}/${site_name}-access.log combined
</VirtualHost>
" > /etc/apache2/sites-enabled/${site_name}.conf
main="
# Apache back-end
location / {
proxy_pass http://127.0.0.1:8080;
proxy_ignore_headers Expires Cache-Control;
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
}
location ~* \.(js|css|png|jpg|jpeg|gif|ico|swf)\$ {
expires 1y;
log_not_found off;
proxy_pass http://127.0.0.1:8080;
proxy_ignore_headers Expires Cache-Control;
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
}
location ~* \.(html|htm)\$ {
expires 1h;
proxy_pass http://127.0.0.1:8080;
proxy_ignore_headers Expires Cache-Control;
proxy_set_header Host \$host;
proxy_set_header X-Real-IP \$remote_addr;
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
}
"
else
php_config=";; php-fpm config for ${site_name}
[${site_name}]
user = ${site_name}
group = www-data
listen = /var/run/php-fpm-${PHP}-${site_name}.sock
listen.owner = ${site_name}
listen.group = www-data
listen.mode = 0666
pm = dynamic
pm.max_children = 250
pm.start_servers = 8
pm.min_spare_servers = 8
pm.max_spare_servers = 16
chdir = /
security.limit_extensions = false
php_flag[display_errors] = on
php_admin_value[error_log] = /home/${site_name}/logs/fpm-php-${PHP}-${site_name}.log
php_admin_flag[log_errors] = on
; Documentation: http://php.net/manual/ru/opcache.configuration.php
php_flag[opcache.enable] = $PHP_OPCACHE
php_flag[opcache.enable_cli] = $PHP_OPCACHE
"
echo "$php_config" > "/etc/php/${PHP}/fpm/pool.d/${site_name}.conf"
if [ $LOCK -eq 1 ]; then
lock="
auth_basic \"Website development\";
auth_basic_user_file /home/${site_name}/authfile;
"
else
lock=''
fi
main="
# With PHP-FPM
location / {
index index.php;
try_files \$uri \$uri/ /index.php?\$query_string;
}
# PHP fastcgi
location ~ \.php {
#try_files \$uri =404;
include fastcgi_params;
# Use your own port of fastcgi here
#fastcgi_pass 127.0.0.1:9000;
${lock}
fastcgi_pass unix:/var/run/php-fpm-${PHP}-${site_name}.sock;
fastcgi_index index.php;
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_param PATH_INFO \$fastcgi_path_info;
fastcgi_param SCRIPT_FILENAME \$document_root\$fastcgi_script_name;
}
"
fi
if [ $AWSTATS -eq 1 ]; then
awstats="# Awstats
server {
listen ${site_addr};
server_name awstats.${site_name};
auth_basic \"Restricted\";
auth_basic_user_file /home/${site_name}/authfile;
access_log /var/log/nginx/access.awstats.${site_name}.log;
error_log /var/log/nginx/error.awstats.${site_name}.log;
location / {
root /home/${site_name}/awstats/;
index awstats.html;
access_log off;
}
location /awstats-icon/ {
alias /usr/share/awstats/icon/;
access_log off;
}
# apt-get awstats install
location ~ ^/cgi-bin {
access_log off;
fastcgi_pass unix:/var/run/fcgiwrap.socket;
include /etc/nginx/fastcgi_params;
fastcgi_param SCRIPT_FILENAME /usr/lib\$fastcgi_script_name;
}
}
"
else
awstats=''
fi
if [ $REDIRECT = 'site-www' ]; then
redirect="
# Rerirect ${site_name}
server {
listen ${site_addr};
server_name ${site_name};
return 301 http://www.${site_name}\$request_uri;
}
"
server_name="www.${site_name}"
fi
if [ $REDIRECT = 'www-site' ]; then
redirect="
# Rerirect www.${site_name}
server {
listen ${site_addr};
server_name www.${site_name};
return 301 http://${site_name}\$request_uri;
}
"
server_name="${site_name}"
fi
if [ $REDIRECT = 'off' ]; then
redirect=''
server_name="${site_name}"
fi
echo "
${awstats}
${redirect}
# Site ${server_name}
server {
listen ${site_addr};
server_name ${server_name} ${site_alias};
root /home/${site_name}/httpdocs/web;
index index.php;
access_log /home/${site_name}/logs/access.log;
error_log /home/${site_name}/logs/error.log error;
charset utf-8;
#charset windows-1251;
location = /favicon.ico {
log_not_found off;
access_log off;
break;
}
location = /robots.txt {
allow all;
log_not_found off;
access_log off;
}
${main}
location ~ /(protected|themes/\w+/views)/ {
access_log off;
log_not_found off;
return 404;
}
#
location ~ \.(xml)\$ {
expires 24h;
charset windows-1251;
#log_not_found off;
#try_files \$uri =404;
#try_files \$uri \$uri/ /index.php?\$query_string;
}
#
location ~ \.(js|css|png|jpg|gif|swf|ico|pdf|mov|fla|zip|rar)\$ {
expires 24h;
#log_not_found off;
#try_files \$uri =404;
try_files \$uri \$uri/ /index.php?\$query_string;
}
# Hide all system files
location ~ /\. {
deny all;
access_log off;
log_not_found off;
}
}
" > /etc/nginx/conf.d/${site_name}.conf
service php${PHP}-fpm reload
if [ $APACHE -eq 1 ]; then
service apache2 reload
fi
service nginx reload
echo ""
echo "--------------------------------------------------------"
echo "User: ${site_name}"
echo "Login: ${site_name}"
echo "Password: ${password}"
echo "Path: /home/${site_name}/"
echo "SSH Private file: /home/${site_name}/.ssh/id_rsa"
echo "SSH Public file: /home/${site_name}/.ssh/id_rsa.pub"
echo "Servers:"
echo "Site name: ${site_name} (${IP})"
if [ ! -z $site_alias ]; then
echo "Site alias: ${site_alias}"
fi
if [ $REDIRECT = 'site-www' ]; then
echo "Use redirect from ${site_name} to ${server_name}"
fi
if [ $REDIRECT = 'www-site' ]; then
echo "Use redirect from ${site_name} to ${server_name}"
fi
if [ $REDIRECT = 'off' ]; then
echo "Redirect disabled. use only ${server_name}"
fi
echo "Site root: /home/${site_name}/httpdocs/web"
echo "Site logs path: /home/${site_name}/logs"
if [ $APACHE -eq 1 ]; then
echo "Back-end server: Apache 2"
echo "NGINX: /etc/nginx/conf.d/${site_name}.conf"
echo "APACHE: /etc/apache2/sites-enabled/${site_name}.conf"
else
echo "Back-end server: PHP-FPM"
echo "NGINX: /etc/nginx/conf.d/${site_name}.conf"
echo "PHP-FPM: /etc/php/${PHP}/fpm/pool.d/${site_name}.conf"
echo "unixsock: /var/run/php-fpm-${PHP}-${site_name}.sock"
fi
if [ $LOCK -eq 1 ]; then
echo "Web auth: admin ${authpassword}"
fi
if [ $AWSTATS -eq 1 ]; then
echo "Statistic:"
echo "awstats.${site_name}"
echo "Add crontab task: */20 * * * * /usr/lib/cgi-bin/awstats.pl -config=${site_name} -update > /dev/null"
fi
echo "--------------------------------------------------------"
echo ""
}
usage()
{
cat << EOF
usage: $0 options
This script create settings files for nginx, php-fpm (ver: 5, 7), apache2, awstats.
OPTIONS:
--host= Host name without www (Example: --host=myhost.com)
--ip= IP address, default usage 80 (Example: --ip=127.0.0.1:8080)
--redirect= WWW redirect add (Example: --redirect=www-site or --redirect=site-www or disable redirect --redirect=off)
--alias= Set Nginx alias (Examle: --alias="alias1 alias2 etc")
--apache Usage apache back-end
--awstats Usage awstats
--dont-change-password Usage for change user password (Default: 1. Usage only for update)
-5 | --php5 Usage PHP 5.x
-7 | --php7 Usage PHP 7.0
-71 | --php71 Usage PHP 7.1
-72 | --php72 Usage PHP 7.2
-73 | --php73 Usage PHP 7.3
-74 | --php74 Usage PHP 7.4
-l | --lock Usage Nginx HTTP Auth basic
-h | --help Usage
EXAMPLES:
bash site-create.sh --host="mirocow.com" --ip="192.168.1.131:8082"
bash site-create.sh --host="mirocow.com" --alias="c1.mirocow.com c2.mirocow.com" --php73
EOF
}
SET_PASSWORD=1
HTTPS=0
REDIRECT='site-www'
LOCK=0
HOST=''
ALIAS=''
APACHE=0
AWSTATS=0
PHP=7.2
PHP_OPCACHE='Off'
IP=$(trim $(hostname -I)):80
for i in "$@"
do
case $i in
--host=*)
HOST=( "${i#*=}" )
shift
;;
--alias=*)
ALIAS=( "${i#*=}" )
shift
;;
--ip=*)
IP=( "${i#*=}" )
shift
;;
--redirect=*)
REDIRECT=( "${i#*=}" )
shift
;;
--https)
HTTPS=1
shift
;;
--apache)
APACHE=1
shift
;;
--dont-change-password)
SET_PASSWORD=0
shift
;;
-l | --lock)
LOCK=1
shift
;;
-5 | --php5)
PHP=5.6
shift
;;
-7 | --php7)
PHP=7.0
shift
;;
-71 | --php71)
PHP=7.1
shift
;;
-72 | --php72)
PHP=7.2
shift
;;
-73 | --php73)
PHP=7.3
shift
;;
-74 | --php74)
PHP=7.4
shift
;;
-c | --php-opcache)
PHP_OPCACHE='On'
shift
;;
-w | --awstats)
AWSTATS=1
shift
;;
-h | --help)
usage
exit
;;
*)
# unknown option
;;
esac
done
# === AUTORUN ===
if [ ! -z "$HOST" ]; then
create_site
else
usage
fi
| true
|
8457b53a9076c39ce3d20468e39a59d7f2cb255e
|
Shell
|
Hoboneer/.dotfiles
|
/graphical/.config/bspwm/watch/pulseaudio
|
UTF-8
| 1,569
| 3.828125
| 4
|
[] |
no_license
|
#!/bin/sh
# DESCRIPTION: Watch for changes to audio volume, outputting to `xob`
# DEPENDENCIES: pactl(pulseaudio-utils) xob
echo "$$" > "$XDG_RUNTIME_DIR/bspwm/watch_pulseaudio.pid"
default_sink_name="$(LC_ALL=C pactl info | awk '/^Default Sink:/{print $3; exit}')"
default_sink_id="$(LC_ALL=C pactl list short sinks | awk -v sink="$default_sink_name" '$2 == sink {print $1; exit}')"
get_avg_vol ()
{
LC_ALL=C pactl list sinks |
# Take average of the two sides
awk -v sink_id="$default_sink_id" '
/^Sink #[0-9]+$/ {right_sink = ($2 == ("#" sink_id)); next}
/Mute: yes/ && right_sink {mute=1; next}
/Mute: no/ && right_sink {mute=0; next}
/Volume:/ && right_sink {
split($0,vols,",");
split(vols[1],left_vols," / "); split(vols[2],right_vols," / ");
left_vol=left_vols[2]; right_vol=right_vols[2];
# remove trailing "%" char
clean_left=substr(left_vol,1,length(left_vol)-1); clean_right=substr(right_vol,1,length(right_vol)-1);
average_vol = (clean_left + clean_right) / 2;
exit;
}
END {
if (mute)
print average_vol "!";
else
print average_vol;
}'
}
LC_ALL=C pactl subscribe | {
prev_vol="$(get_avg_vol)"
while IFS= read -r line; do
case "$line" in
"Event 'change' on sink #$default_sink_id")
avg_vol="$(get_avg_vol)"
# Volume percentages should allow floating point numbers so -eq,-neq wouldn't work
# Also, the input to `xob` allows a "!" suffix
if [ "$avg_vol" != "$prev_vol" ]; then
echo "$avg_vol"
prev_vol="$avg_vol"
fi
;;
esac
done
} | xob
| true
|
c573a91a93b37f680b4bab1544192b387b89f4b1
|
Shell
|
meticulo3366/kolla
|
/docker/common/nova/nova-libvirt/start.sh
|
UTF-8
| 463
| 3.1875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -o errexit
CMD="/usr/sbin/libvirtd"
ARGS=""
# Loading common functions.
source /opt/kolla/kolla-common.sh
# Config-internal script exec out of this function, it does not return here.
set_configs
# TODO(SamYaple): Unify this with config-internal. Tweak libvirt.conf rather
# than change permissions.
# Fix permissions for libvirt
if [[ -c /dev/kvm ]]; then
chmod 660 /dev/kvm
chown root:kvm /dev/kvm
fi
exec $CMD $ARGS
| true
|
d811da8d733582c3f3b7e0d123ee8971a5560986
|
Shell
|
thingsboard/thingsboard.github.io
|
/_includes/docs/reference/performance-scripts/init-tests.sh
|
UTF-8
| 1,052
| 3.0625
| 3
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
#!/bin/bash
. test-ips.sh
COUNTER=0
for IP in ${IPS}; do
let COUNTER++
echo "INIT ${COUNTER} FOR ${IP}"
ssh -i ~/.ssh/aws/smatvienko.pem -o StrictHostKeyChecking=accept-new ubuntu@${IP} <<'ENDSSH'
set +x
#optional. replace with your Thingsboard instance ip
#echo '52.50.5.45 thingsboard' | sudo tee -a /etc/hosts
#extend the local port range up to 64500
cat /proc/sys/net/ipv4/ip_local_port_range
#32768 60999
echo "net.ipv4.ip_local_port_range = 1024 65535" | sudo tee -a /etc/sysctl.conf
sudo -s sysctl -p
cat /proc/sys/net/ipv4/ip_local_port_range
#1024 65535
ulimit -n 1048576
sudo sysctl -w net.netfilter.nf_conntrack_max=1048576
sudo apt update
sudo apt install -y git maven docker docker-compose htop iotop mc screen
# manage Docker as a non-root user
sudo groupadd docker
sudo usermod -aG docker $USER
newgrp docker
# test non-root docker run
docker run hello-world
cd ~
git clone https://github.com/thingsboard/performance-tests.git
# git pull
cd performance-tests
screen -d -m ~/performance-tests/build.sh
screen -ls
ENDSSH
done
| true
|
feb781c4e107ff58b551dc7efde0519a135ee9fd
|
Shell
|
asiekierka/twili
|
/pkgs/xz/build.sh
|
UTF-8
| 218
| 2.921875
| 3
|
[] |
no_license
|
#!/bin/sh -e
VER=5.0.3
test -e xz-$VER.tar.bz2 || wget http://tukaani.org/xz/xz-$VER.tar.bz2
rm -rf xz-$VER;tar -xf xz-$VER.tar.bz2
cd xz-$VER
./configure --prefix=/
make
make DESTDIR=$1 install
cd ..
rm -rf xz-$VER
| true
|
3cd86791e98c50fdd22c47177f51e70491f9d0a0
|
Shell
|
sanusatyadarshi/hsc
|
/examples/e01.6/ca-mars.morgen.net/create.sh
|
UTF-8
| 6,029
| 2.71875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# -----------------------------------------------------------
# (1) Setup CA for org mars.morgen.net
# creates home directory for TLS-CA Server and TLS-CA client
# -----------------------------------------------------------
mkdir -p ca/server
mkdir -p ca/client/admin
docker-compose up -d
#copys the org1-CA server root ceritficate to org1-ca client for tls authentication
cp ./ca/server/crypto/ca-cert.pem ./ca/client/admin/tls-ca-cert.pem
export FABRIC_CA_CLIENT_HOME=./ca/client/admin
export FABRIC_CA_CLIENT_TLS_CERTFILES=tls-ca-cert.pem
fabric-ca-client enroll -d -u https://ca-mars.morgen.net-admin:ca-mars-adminpw@0.0.0.0:7054
fabric-ca-client register -d --id.name peer0.mars.morgen.net --id.secret peer0PW --id.type peer -u https://0.0.0.0:7054
fabric-ca-client register -d --id.name peer1.mars.morgen.net --id.secret peer1PW --id.type peer -u https://0.0.0.0:7054
fabric-ca-client register -d --id.name admin-mars.morgen.net --id.secret marsAdminPW --id.type admin -u https://0.0.0.0:7054
fabric-ca-client register -d --id.name user-mars.morgen.net --id.secret marsUserPW --id.type user -u https://0.0.0.0:7054
# -----------------------------------------------------------
# (2) Setup mars.morgen.net-admin setup
# -----------------------------------------------------------
mkdir -p admin/ca
cp ./ca/server/crypto/ca-cert.pem ./admin/ca/mars.morgen.net-ca-cert.pem
export FABRIC_CA_CLIENT_HOME=./admin
export FABRIC_CA_CLIENT_TLS_CERTFILES=ca/mars.morgen.net-ca-cert.pem
export FABRIC_CA_CLIENT_MSPDIR=msp
fabric-ca-client enroll -d -u https://admin-mars.morgen.net:marsAdminPW@0.0.0.0:7054
# here we are creating admincerts dierctory in every peer msp so that all the peers including the orderer have there org's admin cert there
mkdir -p peers/peer0/msp/admincerts
mkdir -p peers/peer1/msp/admincerts
mkdir -p admin/msp/admincerts
cp ./admin/msp/signcerts/cert.pem ./peers/peer0/msp/admincerts/mars.morgen.net-admin-cert.pem
cp ./admin/msp/signcerts/cert.pem ./peers/peer1/msp/admincerts/mars.morgen.net-admin-cert.pem
cp ./admin/msp/signcerts/cert.pem ./admin/msp/admincertsmars.morgen.net-admin-cert.pem
# -----------------------------------------------------------
# (3) Setup peers
# -----------------------------------------------------------
mkdir -p peers/peer0/assets/ca
mkdir peers/peer0/assets/tls-ca
#copying org1 root certificate to peer1-org1
cp ./ca/server/crypto/ca-cert.pem ./peers/peer0/assets/ca/mars.morgen.net-ca-cert.pem
#copying TLS-CA root certificate to peer1-org1 for tls authentication
cp ../ca-tls.morgen.net/ca/server/crypto/ca-cert.pem ./peers/peer0/assets/tls-ca/tls-ca-cert.pem
# peer0-org1 enrolling with org1-ca
export FABRIC_CA_CLIENT_MSPDIR=msp
export FABRIC_CA_CLIENT_HOME=./peers/peer0/
export FABRIC_CA_CLIENT_TLS_CERTFILES=assets/ca/mars.morgen.net-ca-cert.pem
fabric-ca-client enroll -d -u https://peer0.mars.morgen.net:peer0PW@0.0.0.0:7054
# peer0-mars.morgen.netenrolling with TLS-CA to get tls certificate
export FABRIC_CA_CLIENT_MSPDIR=tls-msp
export FABRIC_CA_CLIENT_TLS_CERTFILES=assets/tls-ca/tls-ca-cert.pem
fabric-ca-client enroll -d -u https://peer0.mars.morgen.net:peer0PW@0.0.0.0:7052 --enrollment.profile tls --csr.hosts peer0.mars.morgen.net
mv peers/peer0/tls-msp/keystore/*_sk peers/peer0/tls-msp/keystore/key.pem
# peer1-org1 enrolling with org1-ca
mkdir -p peers/peer1/assets/ca
mkdir peers/peer1/assets/tls-ca
cp ./ca/server/crypto/ca-cert.pem ./peers/peer1/assets/ca/mars.morgen.net-ca-cert.pem
cp ../ca-tls.morgen.net/ca/server/crypto/ca-cert.pem ./peers/peer1/assets/tls-ca/tls-ca-cert.pem
export FABRIC_CA_CLIENT_MSPDIR=msp
export FABRIC_CA_CLIENT_HOME=./peers/peer1/
export FABRIC_CA_CLIENT_TLS_CERTFILES=assets/ca/mars.morgen.net-ca-cert.pem
fabric-ca-client enroll -d -u https://peer1.mars.morgen.net:peer1PW@0.0.0.0:7054
# peer1-mars.morgen.net enrolling with TLS-CA to get tls certificate
export FABRIC_CA_CLIENT_MSPDIR=tls-msp
export FABRIC_CA_CLIENT_TLS_CERTFILES=assets/tls-ca/tls-ca-cert.pem
fabric-ca-client enroll -d -u https://peer1.mars.morgen.net:peer1PW@0.0.0.0:7052 --enrollment.profile tls --csr.hosts peer1.mars.morgen.net
mv peers/peer1/tls-msp/keystore/*_sk peers/peer1/tls-msp/keystore/key.pem
# -----------------------------------------------------------
# (4) Setup MSP
# -----------------------------------------------------------
mkdir -p msp/admincerts
mkdir msp/cacerts
mkdir msp/tlscacerts
mkdir msp/users
# copying org1 root ca certificat to msp/cacerts directory.
cp ./ca/server/crypto/ca-cert.pem ./msp/cacerts/mars.morgen.net-ca-cert.pem
# copying TLS CA root certificat to msp/tlscacerts directory.
cp ../ca-tls.morgen.net/ca/server/crypto/ca-cert.pem ./msp/tlscacerts/tls-ca-cert.pem
# copying org1 admin singning certificat to msp/admincerts directory.
cp ./admin/msp/signcerts/cert.pem ./msp/admincerts/mars.morgen.net-admin-cert.pem
# -----------------------------------------------------------
# (5) config.yaml
# -----------------------------------------------------------
vi msp/config.yaml
NodeOUs:
Enable: true
ClientOUIdentifier:
Certificate: cacerts/orderer.morgen.net-ca-cert.pem
OrganizationalUnitIdentifier: client
PeerOUIdentifier:
Certificate: cacerts/orderer.morgen.net-ca-cert.pem
OrganizationalUnitIdentifier: peer
AdminOUIdentifier:
Certificate: cacerts/orderer.morgen.net-ca-cert.pem
OrganizationalUnitIdentifier: admin
OrdererOUIdentifier:
Certificate: cacerts/orderer.morgen.net-ca-cert.pem
OrganizationalUnitIdentifier: orderer
vi admin/msp/config.yaml
NodeOUs:
Enable: true
ClientOUIdentifier:
Certificate: cacerts/0-0-0-0-7054.pem
OrganizationalUnitIdentifier: client
PeerOUIdentifier:
Certificate: cacerts/0-0-0-0-7054.pem
OrganizationalUnitIdentifier: peer
AdminOUIdentifier:
Certificate: cacerts/0-0-0-0-7054.pem
OrganizationalUnitIdentifier: admin
OrdererOUIdentifier:
Certificate: cacerts/0-0-0-0-7054.pem
OrganizationalUnitIdentifier: orderer
| true
|
4647e18c7f5deacd14ff7420e33fc4c88c1c1428
|
Shell
|
jakubfabijan/UTC-shift-checker
|
/utcget.sh
|
UTF-8
| 163
| 3.296875
| 3
|
[
"Unlicense"
] |
permissive
|
#!/bin/bash
get="$(expr $(date +%H) - $(date -u +%H))"
if [[ $get -lt 0 ]]
then
echo UTC$get
elif [[ $get -gt 0 ]]
then
echo UTC+$get
else
echo UTC
fi
| true
|
93de91a86e893cbe597d05323d3a3318d6516439
|
Shell
|
ColdenCullen/d2dl
|
/build/ddl/doc.sh
|
UTF-8
| 975
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# doc.sh
source build/ddl/ver.sh $@
packageName="../downloads/ddl.sdk.$ddlDocVersion.zip"
echo "Building DDL Documentation"
rm $packageName
# crawl the source tree
$nolinkbuild -o- doc/ddl/xhtml.ddoc -Dd./doc/ddl/html -full -Xmango ddl/all.d
$nolinkbuild -o- doc/ddl/xhtml.ddoc -Dd./doc/ddl/html -full -Xmango examples/host.d
$nolinkbuild -o- doc/ddl/xhtml.ddoc -Dd./doc/ddl/html -full -Xmango examples/mule.d
$nolinkbuild -o- doc/ddl/xhtml.ddoc -Dd./doc/ddl/html -full -Xmango utils/bless.d
$nolinkbuild -o- doc/ddl/xhtml.ddoc -Dd./doc/ddl/html -full -Xmango utils/ddlinfo.d
$nolinkbuild -o- doc/ddl/xhtml.ddoc -Dd./doc/ddl/html -full -Xmango utils/insitu.d
# create an index for the doc tree
$compile -o- doc/ddl/modules.ddoc doc/ddl/index.ddoc -Df./doc/ddl/html/index.html ddl/all.d
packageFiles = "$(find doc/ddl doc/meta -regex $docFiles)"
zip -v9 $packageName $packageFiles
ls -alF $packageName;
echo "Done";
| true
|
8e7d6b54e47e213ec4cf532d6bec1197636ce0b1
|
Shell
|
borodust/lisp-scripts
|
/lisps/lisp-script-prologue.sh
|
UTF-8
| 145
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
SCRIPTING_DIR="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd -P)"
LISP_SCRIPT=$1
export LISP_SCRIPTING_SCRIPT_PATH=$LISP_SCRIPT
shift
| true
|
5aa1639e8d8678d6524239d8ea751b97edbfa3a5
|
Shell
|
virajkulkarni14/bash_basix
|
/moreifscript.sh
|
UTF-8
| 442
| 3.8125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/zsh
clear
echo -n "Enter a number: "
read num
if [ "$num" -eq 10 ] #spaces here are important, eq -> equal
# put the echo statement here and it will still say good num for any num
then
echo "The number is 10!"
elif [ "$num" -lt 10 ] #less than
then
echo "This number is less than 10"
elif [ "$num" -gt 20 ] #greater than
then
echo "This number is greater than 20"
else
echo "This number is between 10 and 21" # inclusive!
fi
| true
|
38bf54b82041cfefd8d0f5a17010319b22f64b84
|
Shell
|
slash-segmentation/CHM
|
/wrappers/panfish/test/runCHM.sh.bats
|
UTF-8
| 7,470
| 3.328125
| 3
|
[
"BSD-2-Clause",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
#!/usr/bin/env bats
setup() {
export THE_TMP="${BATS_TMPDIR}/runCHM" 1>&2
/bin/mkdir -p $THE_TMP 1>&2
/bin/cp ${BATS_TEST_DIRNAME}/../panfishCHM.properties "$THE_TMP/." 1>&2
/bin/cp -a ${BATS_TEST_DIRNAME}/../scripts/.helperfuncs.sh "$THE_TMP/." 1>&2
/bin/cp -a ${BATS_TEST_DIRNAME}/../scripts/runCHM.sh "$THE_TMP/." 1>&2
export RUNCHM="$THE_TMP/runCHM.sh"
chmod a+x $RUNCHM
export HELPERFUNCS="$THE_TMP/.helperfuncs.sh"
export SUCCESS_CHM_TEST="$BATS_TEST_DIRNAME/bin/fakesuccesschmtest"
export FAIL_CHM_TEST="$BATS_TEST_DIRNAME/bin/fakefailchmtest"
chmod a+x $SUCCESS_CHM_TEST
chmod a+x $FAIL_CHM_TEST
unset SGE_TASK_ID
}
teardown(){
/bin/rm -rf "$THE_TMP"
#echo "teardown" 1>&2
}
#
# getCHMTestJobParametersForTaskFromConfig() tests
#
@test "getCHMTestJobParametersForTaskFromConfig() tests" {
# source helperfuncs.sh to we can call the function
. $HELPERFUNCS
# source runCHM.sh so we can unit test this function
. $RUNCHM source
# Test where we can't get first parameter
run getCHMTestJobParametersForTaskFromConfig "$THE_TMP" "1"
[ "$status" -eq 1 ]
# Test where we can't get 2nd parameter
echo "1${CONFIG_DELIM}a" > "$THE_TMP/runCHM.sh.config"
echo "1${CONFIG_DELIM}b" >> "$THE_TMP/runCHM.sh.config"
echo "1${CONFIG_DELIM}c" >> "$THE_TMP/runCHM.sh.config"
echo "1${CONFIG_DELIM}d" >> "$THE_TMP/runCHM.sh.config"
echo "2${CONFIG_DELIM}e" >> "$THE_TMP/runCHM.sh.config"
run getCHMTestJobParametersForTaskFromConfig "$THE_TMP" "2"
[ "$status" -eq 2 ]
# Test where we cant get 3rd parameter
echo "2${CONFIG_DELIM}f" >> "$THE_TMP/runCHM.sh.config"
run getCHMTestJobParametersForTaskFromConfig "$THE_TMP" "2"
[ "$status" -eq 3 ]
# Test where we cant get 4th parameter
echo "2${CONFIG_DELIM}g" >> "$THE_TMP/runCHM.sh.config"
run getCHMTestJobParametersForTaskFromConfig "$THE_TMP" "2"
[ "$status" -eq 4 ]
# Test success
echo "2${CONFIG_DELIM}h" >> "$THE_TMP/runCHM.sh.config"
getCHMTestJobParametersForTaskFromConfig "$THE_TMP" "2"
[ "$?" -eq 0 ]
[ "$INPUT_IMAGE" == "e" ]
[ "$MODEL_DIR" == "f" ]
[ "$CHM_OPTS" == "g" ]
[ "$OUTPUT_IMAGE" == "h" ]
}
#
# SGE_TASK_ID not set
#
@test "SGE_TASK_ID not set" {
unset SGE_TASK_ID
run $RUNCHM
echo "$output" 1>&2
[ "$status" -eq 1 ]
[ "${lines[0]}" == "This script runs CHM on a slice/image of data." ]
}
@test "no .helperfuncs.sh" {
/bin/rm -f "$THE_TMP/.helperfuncs.sh"
run $RUNCHM
echo "$output" 1>&2
[ "$status" -eq 1 ]
[ "${lines[0]}" == "$THE_TMP/.helperfuncs.sh not found" ]
}
#
# No runCHM.sh.config file
#
@test "no runCHM.sh.config file" {
export SGE_TASK_ID=1
run $RUNCHM
echo "$output" 1>&2
[ "$status" -eq 1 ]
[ "${lines[0]}" == "ERROR: (task 1) No $THE_TMP/runCHM.sh.config found" ]
}
#
# Simple valid run with successful fake CHM_test.sh call PANFISH_BASEDIR unset
#
@test "Simple valid run with successful fake CHM_test.sh call" {
export SGE_TASK_ID=1
# create fake panfishCHM.properties file
echo "chm.bin.dir=${SUCCESS_CHM_TEST}" > "$THE_TMP/panfishCHM.properties"
# create runCHM.sh.config file
echo "1:::/foo/input.png" > "$THE_TMP/runCHM.sh.config"
echo "1:::/foo/modeldir" >> "$THE_TMP/runCHM.sh.config"
echo "1:::chmopts" >> "$THE_TMP/runCHM.sh.config"
echo "1:::out/hist1.png/1.png" >> "$THE_TMP/runCHM.sh.config"
# make output directory
mkdir -p "$THE_TMP/out/hist1.png/chm" 1>&2
export SGE_TASK_ID=1
run $RUNCHM
echo "$output" 1>&2
[ "$status" -eq 0 ]
[[ "${lines[0]}" == "(task 1) runCHM.sh Start Time:"* ]]
[[ "${lines[1]}" == "(task 1) Creating directory /tmp/"* ]]
[[ "${lines[2]}" == "//foo/input.png /tmp/chm"* ]]
[[ "${lines[2]}" == *" -m //foo/modeldir chmopts -s" ]]
[[ "${lines[26]}" == "(task 1) runCHM.sh End Time: "* ]]
[[ "${lines[26]}" == *" Exit Code: 0" ]]
}
#
# Simple valid run with successful fake CHM_test.sh call PANFISH_BASEDIR and PANFISH_SCRATCH set
#
@test "Simple valid run with successful fake CHM_test.sh call PANFISH_BASEDIR and PANFISH_SCRATCH set" {
export SGE_TASK_ID=1
export PANFISH_BASEDIR="$THE_TMP"
export PANFISH_SCRATCH="/tmp/pan"
unset SKIP_COPY
mkdir -p "$THE_TMP/cc/" 1>&2
/bin/cp -a ${SUCCESS_CHM_TEST}/* "$THE_TMP/cc/." 1>&2
# create fake panfishCHM.properties file
echo "chm.bin.dir=/cc" > "$THE_TMP/panfishCHM.properties"
# create runCHM.sh.config file
echo "1:::/foo/input.png" > "$THE_TMP/runCHM.sh.config"
echo "1:::/foo/modeldir" >> "$THE_TMP/runCHM.sh.config"
echo "1:::chmopts" >> "$THE_TMP/runCHM.sh.config"
echo "1:::out/hist1.png/1.png" >> "$THE_TMP/runCHM.sh.config"
# make output directory
mkdir -p "$THE_TMP/out/hist1.png/" 1>&2
export SGE_TASK_ID=1
run $RUNCHM
echo "$output" 1>&2
[ "$status" -eq 0 ]
[[ "${lines[0]}" == "(task 1) runCHM.sh Start Time:"* ]]
[[ "${lines[1]}" == "(task 1) Creating directory /tmp/pan/chm"* ]]
[[ "${lines[2]}" == "$THE_TMP//foo/input.png /tmp/pan/chm"* ]]
[[ "${lines[2]}" == *" -m $THE_TMP//foo/modeldir chmopts -s" ]]
[[ "${lines[26]}" == "(task 1) runCHM.sh End Time: "* ]]
[[ "${lines[26]}" == *" Exit Code: 0" ]]
[ -s "$THE_TMP/out/hist1.png/1.png" ]
}
@test "Simple valid run with successful fake CHM_test.sh call using compiled CHM" {
export SGE_TASK_ID=1
export PANFISH_BASEDIR="$THE_TMP"
export PANFISH_SCRATCH="/tmp/pan"
unset SKIP_COPY
mkdir -p "$THE_TMP/cc/" 1>&2
/bin/cp -a ${SUCCESS_CHM_TEST}/* "$THE_TMP/cc/." 1>&2
# create fake panfishCHM.properties file
echo "chm.bin.dir=/cc" > "$THE_TMP/panfishCHM.properties"
echo "matlab.dir=/hello" >> "$THE_TMP/panfishCHM.properties"
echo "hi" > "$THE_TMP/cc/CHM_test"
# create runCHM.sh.config file
echo "1:::/foo/input.png" > "$THE_TMP/runCHM.sh.config"
echo "1:::/foo/modeldir" >> "$THE_TMP/runCHM.sh.config"
echo "1:::chmopts" >> "$THE_TMP/runCHM.sh.config"
echo "1:::out/hist1.png/1.png" >> "$THE_TMP/runCHM.sh.config"
# make output directory
mkdir -p "$THE_TMP/out/hist1.png/" 1>&2
export SGE_TASK_ID=1
run $RUNCHM
echo "$output" 1>&2
[ "$status" -eq 0 ]
[[ "${lines[0]}" == "(task 1) runCHM.sh Start Time:"* ]]
[[ "${lines[1]}" == "(task 1) Creating directory /tmp/pan/chm"* ]]
[[ "${lines[2]}" == "$THE_TMP//foo/input.png /tmp/pan/chm"* ]]
[[ "${lines[2]}" == *" -m $THE_TMP//foo/modeldir chmopts -s -M $THE_TMP//hello" ]]
[[ "${lines[26]}" == "(task 1) runCHM.sh End Time: "* ]]
[[ "${lines[26]}" == *" Exit Code: 0" ]]
[ -s "$THE_TMP/out/hist1.png/1.png" ]
}
# Simple valid run with failing fake CHM_test.sh call
@test "Simple valid run with failing fake CHM_test.sh call" {
export SGE_TASK_ID=3
unset SKIP_COPY
# create fake panfishCHM.properties file
echo "chm.bin.dir=${FAIL_CHM_TEST}" > "$THE_TMP/panfishCHM.properties"
# create runCHM.sh.config file
echo "3:::/foo/input.png" > "$THE_TMP/runCHM.sh.config"
echo "3:::/foo/modeldir" >> "$THE_TMP/runCHM.sh.config"
echo "3:::chmopts" >> "$THE_TMP/runCHM.sh.config"
echo "3:::out/hist1.png/1.png" >> "$THE_TMP/runCHM.sh.config"
# make output directory
mkdir -p "$THE_TMP/out/hist1.png/chm" 1>&2
run $RUNCHM
echo "$output" 1>&2
[ "$status" -eq 1 ]
[[ "${lines[1]}" == "(task 3) Creating directory /tmp/"* ]]
[[ "${lines[2]}" == "//foo/input.png /tmp/chm"* ]]
[[ "${lines[2]}" == *" -m //foo/modeldir chmopts -s" ]]
[[ "${lines[27]}" == "(task 3) runCHM.sh End Time: "* ]]
[[ "${lines[27]}" == *" Exit Code: 1" ]]
}
| true
|
d64d81f8bd46f7c8a4761364c9daa57eb8c702d9
|
Shell
|
5l1v3r1/scripts-46
|
/python-fushcache
|
UTF-8
| 491
| 3.5
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# this file is part of the nil0x42's git repositories
# for the current version, take a look at:
# https://github.com/nil0x42/scripts
# Recursive removal of python cache files from <directory>.
# (removes all `*.pyc` and `__pycache__/` files)
# Usage: python-flushcache <directory>
# If <directory> is undefined, current directory is used.
DIRECTORY=${1:-.}
find "$DIRECTORY" -name \*.pyc -exec rm -f {} \;
find "$DIRECTORY" -name __pycache__ -prune | xargs rm -rf
| true
|
56eb3e607d220080fb534e71e0420f555cc81116
|
Shell
|
argv01/zmail
|
/zmail/config/overflow.sh
|
UTF-8
| 635
| 3.59375
| 4
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/sh
# Certain implementations of the Borne shell have a 1K limit on the
# length of the target word for case statements. That presents a
# problem for our configure script, which easily exceeds this limit
# while accumulating $DEFS.
#
# This script attempts to hit this buffer limit in whatever shell
# interprets it. The script exits zero (success) only if the shell
# seems to be ok. Configure uses this as a utility, and tries to
# switch to a better shell if the current one seems deficient.
long=x
for loop in 0 1 2 3 4 5 6 7 8 9 0; do
long="$long$long"
done
case "$long" in
*x*) exit 0;;
*) exit 1;;
esac
| true
|
a13204e9c03fb1e4bf9c1f9da8eab0c01486ca8f
|
Shell
|
gfontenot/dotfiles
|
/setup/install-homebrew
|
UTF-8
| 430
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/sh
set -e
echo ""
echo "==========================================="
echo "Setting up Homebrew"
echo ""
echo "Installing Homebrew"
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" \
|| echo "Homebrew already installed"
eval "$(/opt/homebrew/bin/brew shellenv)"
echo "Installing brew bundle"
brew tap Homebrew/bundle
echo "Installing homebrew dependencies"
brew bundle
| true
|
d3d6db0220f2eb3742363555476b909a8f6ed32d
|
Shell
|
jirka-h/fpuAccuracy
|
/plot.sh
|
UTF-8
| 16,960
| 2.53125
| 3
|
[] |
no_license
|
#!/bin/bash
TITLE_PREFIX=$(lscpu | grep "Model name:" | awk -F':[[:blank:]]*' '{print $2}')
COMMAND_FILE=plotcmds.txt
[ -f fyl2x-r1.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FYL2X (1.0-2.0)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics 1.0,1.5,2.0 format "%.3f
set output "fyl2x-r1.png"
set logscale x
plot "< xzcat fyl2x-r1.txt.xz" notitle with dots
EOF
[ -f fyl2x-r2.txt.xz ] && gnuplot <<EOF
set xtics 0.6,1.1,1.7 format "%.3f"
set title "$TITLE_PREFIX FYL2X (0.6 to 1.7)"
set xlabel "Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set output "fyl2x-r2.png"
set logscale x
plot "< xzcat fyl2x-r2.txt.xz" notitle with dots
EOF
[ -f fyl2x-r3.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FYL2X (0001-7FFD)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("0001" 0, "1000" 131048, "2000" 262128, "3000" 393208, "4000" 524288, "5000" 655368, "6000" 786448, "7000" 917528, "7FFD" 1048511)
set output "fyl2x-r3.png"
plot "< xzcat fyl2x-r3.txt.xz" using :2 notitle with dots
EOF
[ -f fyl2xp1-r1.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FYL2XP1 (0001-3FFE)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("0001" 0, "1000" 262096, "2000" 524256, "3000" 786416, "3FFE" 1048448)
set output "fyl2xp1-r1.png"
plot "< xzcat fyl2xp1-r1.txt.xz" using :2 notitle with dots
EOF
[ -f fyl2xp1-r2.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FYL2XP1 (3FBE-3FC5)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("3FBE" 0, "3FBF" 0x20000, "3FC0" 0x40000, "3FC1" 0x60000, "3FC2" 0x80000, "3FC3" 0xA0000, "3FC4" 0xC0000, "3FC5" 0xE0000)
set output "fyl2xp1-r2.png"
plot "< xzcat fyl2xp1-r2.txt.xz" using :2 notitle with dots
EOF
[ -f fyl2xp1-r3.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FYL2XP1 (3FEB-3FFE)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("3FEB" 0, "3FED" 0x19999, "3FEF" 0x33333, "3FF1" 0x4CCCC, "3FF3" 0x66666, "3FF5" 0x80000, "3FF7" 0x99999, "3FF9" 0xB3333, "3FFB" 0xCCCCC, "3FFD" 0xE6666)
set output "fyl2xp1-r3.png"
plot "< xzcat fyl2xp1-r3.txt.xz" using :2 notitle with dots
EOF
[ -f f2xm1-r1.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX F2XM1 (0001-3FFE)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("0001" 0, "1000" 262096, "2000" 524256, "3000" 786416, "3FFE" 1048448)
set output "f2xm1-r1.png"
plot "< xzcat f2xm1-r1.txt.xz" using :2 notitle with dots
EOF
[ -f f2xm1-r2.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX F2XM1 (3FBA-3FFE)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("3FBA" 0, "3FC3" 0x21642, "3FCC" 0x42C85, "3FD5" 0x642C8, "3FDE" 0x8590B, "3FE7" 0xA6F4D, "3FF0" 0xC8590, "3FF9" 0xE9BD3)
set output "f2xm1-r2.png"
plot "< xzcat f2xm1-r2.txt.xz" using :2 notitle with dots
EOF
[ -f f2xm1-r3.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX F2XM1 (3FFD-3FFE)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("3FFD" 0, "3FFE" 0x80000)
set output "f2xm1-r3.png"
plot "< xzcat f2xm1-r3.txt.xz" using :2 notitle with dots
EOF
[ -f f2xm1-r4.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX F2XM1 (-0.01 to +0.01)"
set xlabel "Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set output "f2xm1-r4.png"
plot "< xzcat f2xm1-r4.txt.xz" notitle with dots
EOF
[ -f f2xm1-r5.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX F2XM1 (-1.0 to +1.0)"
set xlabel "Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set output "f2xm1-r5.png"
plot "< xzcat f2xm1-r5.txt.xz" notitle with dots
EOF
[ -f fpatan-r1.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPATAN (0001-7FFD)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("0001" 0, "1000" 131048, "2000" 262128, "3000" 393208, "4000" 524288, "5000" 655368, "6000" 786448, "7000" 917528, "7FFD" 1048511)
set output "fpatan-r1.png"
plot "< xzcat fpatan-r1.txt.xz" using :2 notitle with dots
EOF
[ -f fpatan-r2.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPATAN (3FCA-4055)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("3FCA" 0, "3FDA" 0x1D41D, "3FEA" 0x3A83A, "3FFA" 0x57C57, "400A" 0x75075, "401A" 0x92492, "402A" 0xAF8AF, "403A" 0xCCCCC, "404A" 0xEA0EA)
set output "fpatan-r2.png"
plot "< xzcat fpatan-r2.txt.xz" using :2 notitle with dots
EOF
[ -f fpatan-r3.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPATAN (3FFA-4000)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "Error in ULPs"
set grid
set term png size 1280,800
set xtics ("3FFA" 0, "3FFB" 0x20000, "3FFC" 0x40000, "3FFD" 0x60000, "3FFE" 0x80000, "3FFF" 0xA0000, "4000" 0xC0000, "4001" 0xE0000)
set output "fpatan-r3.png"
plot "< xzcat fpatan-r3.txt.xz" using :2 notitle with dots
EOF
[ -f fcos-r1.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FCOS near pi/2"
set xlabel "Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set xtics ("1.47" 0, "1.508" 180038, "pi/2" 524289, "1.67" 1048578)
set ytics scale 1,0 0.001,10,100000 format "%.11G"
set logscale y
set grid
set term png size 1280,800
set output "fcos-r1.png"
plot "< xzcat fcos-r1.txt.xz" using :(abs (\$2)) notitle with dots
set term png size 640,400
set output "fcos-r1-small.png"
plot "< xzcat fcos-r1.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fcos-r2.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FCOS (3FD0-403E)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set logscale y
set xtics ("pi/2" 1816954, "3FD0" 0, "3FD8" 0x4B27E, "3FE0" 0x964FD, "3FE8" 0xE177C, "3FF0" 0x12C9FB, "3FF8" 0x177C7A, "4008" 0x20E177, "4010" 0x2593F6, "4018" 0x2A4675, "4020" 0x2EF8F4, "4028" 0x33AB73, "4030" 0x385DF1, "4038" 0x3D1070)
set ytics scale 1,0 1E-10,10,1E+25 format "%.6G"
set grid
set term png size 1280,800
set output "fcos-r2.png"
plot "< xzcat fcos-r2.txt.xz" using :(abs (\$2)) notitle with dots
set term png size 640,400
set output "fcos-r2-small.png"
set ytics scale 1,0 1E-10,100,1E+25 format "%.6G"
plot "< xzcat fcos-r2.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fcos-r3.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FCOS, area surrounding maximum error"
set xlabel "Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set logscale y
set xtics ("9223372035619609113" 0, "maximum error arg (9223372035620657689)" 2097153)
set grid
set term png size 1280,800
set output "fcos-r3.png"
plot "< xzcat fcos-r3.txt.xz" using :(abs (\$2)) notitle with dots
set term png size 640,400
set output "fcos-r3-small.png"
plot "< xzcat fcos-r3.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fsin-r1.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FSIN near pi"
set xlabel "Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set xtics ("2.95" 0, "3.016" 179358, "pi" 524289, "3.34" 1048578)
set ytics scale 1,0 0.001,10,100000 format "%.11G"
set grid
set term png size 1280,800
set output "fsin-r1.png"
set logscale y
plot "< xzcat fsin-r1.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fsin-r2.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FSIN (3FD0-403E)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set grid
set term png size 1280,800
set output "fsin-r2.png"
set logscale y
set xtics ("pi" 1855084, "3FD0" 0, "3FD8" 0x4B27E, "3FE0" 0x964FD, "3FE8" 0xE177C, "3FF0" 0x12C9FB, "3FF8" 0x177C7A, "4008" 0x20E177, "4010" 0x2593F6, "4018" 0x2A4675, "4020" 0x2EF8F4, "4028" 0x33AB73, "4030" 0x385DF1, "4038" 0x3D1070)
set ytics scale 1,0 1E-10,10,1E+25 format "%.6G"
plot "< xzcat fsin-r2.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fsin-r3.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FSIN, area surrounding maximum error"
set xlabel "Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set logscale y
set xtics ("9223372035085125665" 0, "maximum error arg (9223372035086174241)" 2097153)
set grid
set term png size 1280,800
set output "fsin-r3.png"
plot "< xzcat fsin-r3.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fptan-r1.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPTAN near pi/2"
set xlabel "Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set xtics ("1.47" 0, "1.508" 180038, "pi/2" 524289, "1.67" 1048578)
set ytics scale 1,0 0.001,10,100000 format "%.11G"
set grid
set term png size 1280,800
set output "fptan-r1.png"
set logscale y
plot "< xzcat fptan-r1.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fptan-r2.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPTAN (3FD0-403E)"
set xlabel "Biased Exponent of Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set grid
set term png size 1280,800
set output "fptan-r2.png"
set logscale y
set xtics ("pi/2" 1816954, "3FD0" 0, "3FD8" 0x4B27E, "3FE0" 0x964FD, "3FE8" 0xE177C, "3FF0" 0x12C9FB, "3FF8" 0x177C7A, "4008" 0x20E177, "4010" 0x2593F6, "4018" 0x2A4675, "4020" 0x2EF8F4, "4028" 0x33AB73, "4030" 0x385DF1, "4038" 0x3D1070)
set ytics scale 1,0 1E-10,10,1E+25 format "%.6G"
plot "< xzcat fptan-r2.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fptan-r3.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPTAN, area surrounding maximum error"
set xlabel "Extended Precision input Argument"
set ylabel "abs (error in ULPs)"
set logscale y
set xtics ("9223372036559830812" 0, "maximum error arg (9223372036560879388)" 2097153)
set grid
set term png size 1280,800
set output "fptan-r3.png"
plot "< xzcat fptan-r3.txt.xz" using :(abs (\$2)) notitle with dots
set term png size 640,400
set output "fptan-r3-small.png"
plot "< xzcat fptan-r3.txt.xz" using :(abs (\$2)) notitle with dots
EOF
[ -f fcos-r4.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FCOS ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "cos (x)"
set ytics -1,0.2,1
set xtics ("0" 0, "0.5 pi" pi*0.5, "pi" pi, "1.5pi" pi*1.5, "2.0 pi" pi*2.0)
set grid
set term png size 1280,800
set output "fcos-r4.png"
plot "< xzcat fcos-r4.txt.xz" using (\$1):(cos (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? cos (\$1):NaN) notitle with boxes lc 1
set term png size 640,400
set output "fcos-r4-small.png"
plot "< xzcat fcos-r4.txt.xz" using (\$1):(cos (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? cos (\$1):NaN) notitle with boxes lc 1
EOF
[ -f fcos-r5.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FCOS ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "cos (x)"
set ytics -1,0.2,1
set xtics ("0" 0, "0.5 pi" pi*0.5, "pi" pi, "1.5pi" pi*1.5, "2.0 pi" pi*2.0)
set grid
set term png size 1280,600
set output "fcos-r5.png"
plot "< xzcat fcos-r5.txt.xz" using (\$1):(cos (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? cos (\$1):NaN) notitle with boxes lc 1
set term png size 640,300
set output "fcos-r5-small.png"
plot "< xzcat fcos-r5.txt.xz" using (\$1):(cos (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? cos (\$1):NaN) notitle with boxes lc 1
EOF
[ -f fcos-r6.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FCOS ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "cos (x)"
set ytics -1,0.2,1
set xtics ("0" 0, "8 pi" pi*8.0, "16 pi" pi*16.0, "24 pi" pi*24.0, "32 pi" pi*32.0)
set grid
set term png size 1280,400
set output "fcos-r6.png"
plot "< xzcat fcos-r6.txt.xz" using (\$1):(cos (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? cos (\$1):NaN) notitle with boxes lc 1
set term png size 640,200
set ytics -1,0.5,1
set output "fcos-r6-small.png"
plot "< xzcat fcos-r6.txt.xz" using (\$1):(cos (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? cos (\$1):NaN) notitle with boxes lc 1
EOF
[ -f fsin-r4.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FSIN ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "sin (x)"
set ytics -1,0.2,1
set xtics ("0" 0, "0.5 pi" pi*0.5, "pi" pi, "1.5pi" pi*1.5, "2.0 pi" pi*2.0)
set grid
set term png size 1280,800
set output "fsin-r4.png"
plot "< xzcat fsin-r4.txt.xz" using (\$1):(sin (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? sin (\$1):NaN) notitle with boxes lc 1
set term png size 640,400
set output "fsin-r4-small.png"
plot "< xzcat fsin-r4.txt.xz" using (\$1):(sin (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? sin (\$1):NaN) notitle with boxes lc 1
EOF
[ -f fsin-r5.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FSIN ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "sin (x)"
set ytics -1,0.2,1
set xtics ("0" 0, "0.5 pi" pi*0.5, "pi" pi, "1.5pi" pi*1.5, "2.0 pi" pi*2.0)
set grid
set term png size 1280,600
set output "fsin-r5.png"
plot "< xzcat fsin-r5.txt.xz" using (\$1):(sin (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? sin (\$1):NaN) notitle with boxes lc 1
set term png size 640,300
set output "fsin-r5-small.png"
plot "< xzcat fsin-r5.txt.xz" using (\$1):(sin (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? sin (\$1):NaN) notitle with boxes lc 1
EOF
[ -f fsin-r6.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FSIN ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "sin (x)"
set ytics -1,0.2,1
set xtics ("0" 0, "8 pi" pi*8.0, "16 pi" pi*16.0, "24 pi" pi*24.0, "32 pi" pi*32.0)
set grid
set term png size 1280,400
set output "fsin-r6.png"
plot "< xzcat fsin-r6.txt.xz" using (\$1):(sin (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? sin (\$1):NaN) notitle with boxes lc 1
set term png size 640,200
set output "fsin-r6-small.png"
set ytics -1,0.5,1
plot "< xzcat fsin-r6.txt.xz" using (\$1):(sin (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? sin (\$1):NaN) notitle with boxes lc 1
EOF
[ -f fptan-r4.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPTAN ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "tan (x)"
set yrange [-10:10]
set ytics -10,1,10
set xtics ("0" 0, "0.5 pi" pi*0.5, "pi" pi, "1.5pi" pi*1.5, "2.0 pi" pi*2.0)
set grid
set term png size 1280,800
set output "fptan-r4.png"
plot "< xzcat fptan-r4.txt.xz" using (\$1):(tan (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? tan (\$1):NaN) notitle with boxes lc 1
set term png size 640,400
set output "fptan-r4-small.png"
plot "< xzcat fptan-r4.txt.xz" using (\$1):(tan (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? tan (\$1):NaN) notitle with boxes lc 1
EOF
[ -f fptan-r5.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPTAN ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "tan (x)"
set yrange [-10:10]
set ytics -10,1,10
set xtics ("0" 0, "0.5 pi" pi*0.5, "pi" pi, "1.5pi" pi*1.5, "2.0 pi" pi*2.0)
set grid
set term png size 1280,600
set output "fptan-r5.png"
plot "< xzcat fptan-r5.txt.xz" using (\$1):(tan (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? tan (\$1):NaN) notitle with boxes lc 1
set term png size 640,300
set output "fptan-r5-small.png"
set ytics -10,2,10
plot "< xzcat fptan-r5.txt.xz" using (\$1):(tan (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? tan (\$1):NaN) notitle with boxes lc 1
EOF
[ -f fptan-r6.txt.xz ] && gnuplot <<EOF
set title "$TITLE_PREFIX FPTAN ranges where error >= 1.0 ulp"
set xlabel "Extended Precision input Argument"
set ylabel "tan (x)"
set yrange [-10:10]
set ytics -10,1,10
set xtics ("0" 0, "8 pi" pi*8.0, "16 pi" pi*16.0, "24 pi" pi*24.0, "32 pi" pi*32.0)
set grid
set term png size 1280,400
set output "fptan-r6.png"
plot "< xzcat fptan-r6.txt.xz" using (\$1):(tan (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? tan (\$1):NaN) notitle with boxes lc 1
set term png size 640,200
set ytics -10,5,10
set output "fptan-r6-small.png"
plot "< xzcat fptan-r6.txt.xz" using (\$1):(tan (\$1)) notitle with boxes lc 2, "" using (\$1):((abs (\$2) >= 1.0)? tan (\$1):NaN) notitle with boxes lc 1
EOF
| true
|
ecb119d05bc674c1660c44ea3219206579509e65
|
Shell
|
lodybo/dotfiles
|
/shell/functions.sh
|
UTF-8
| 2,429
| 3.96875
| 4
|
[
"MIT"
] |
permissive
|
# Shell functions
############################################
# Dotfiles
############################################
dotfiles_latest()
{
dotfiles_version=$(git -C "$DOTFILES" tag --list | awk '{print $0}' | awk '{t=$0} END{print t}')
dotfiles_latest_tag="$(git ls-remote --tags https://github.com/lodybo/dotfiles.git | awk '{print $2}' | awk -F/ '{print $3}' | awk '{t=$0} END{print t}')"
upgrade_instructions="It seems like you're fine, Lody."
if [[ "$dotfiles_version" != "$dotfiles_latest_tag" ]]; then
upgrade_instructions="There's a newer version available, please update!"
fi
echo "I investigated some version info, and found the following:"
echo ""
echo " Dotfiles current: $dotfiles_version"
echo " Dotfiles latest: $dotfiles_latest_tag"
echo ""
echo "$upgrade_instructions"
echo ""
}
############################################
# Git
############################################
# Create git changelog, usage: 'changelog <last tag>'
git_changelog() { git log --oneline --no-merges "$1"..HEAD; }
# Delete both remote and local branch
git_remove_branch()
{
branch=$1
git push -d origin "$branch"
git branch -d "$branch"
}
# Show the git log history N commits long
glon()
{
commits=$1
git log --oneline -n"$commits"
}
# Update the current with the latest state of another branch
git_update_to_latest()
{
branch=$1
git fetch origin "$branch":"$branch"
git merge "$branch"
}
############################################
# Misc
############################################
# Run a cheat sheet for terminal commands.
# Powered by http://cheat.sh
cheat() {
# Trim output of args because we want to check if '-h' was passed
query=$1
# Check whether '-h' is passed and if so, query the help command
if [[ "$query" == "-h" ]]
then
# Curl for help
curl cheat.sh/:help
else
# Curl cheat sheet
curl cheat.sh/"$query"
fi
}
# Print a QR code of a URL (or something else)
# Powered by http://qrenco.de
qrify() {
query=$1
printf "\n"
echo "Printing QR code for $query:"
printf "\n"
curl qrenco.de/"$query"
printf "\n"
}
# Print localised weather info, optionally with location param
# Taken from: https://www.jamieonkeys.dev/posts/calendar-and-weather-in-terminal/
weather() {
if [ $# -eq 0 ] # If no argument has been passed to this function
then
curl wttr.in
else
curl wttr.in/"$1" # Append location
fi
}
| true
|
f9966389addff42a036a8aa9d145a3849dcc4db7
|
Shell
|
JamesLinus/shell
|
/sh.d/repository.sh
|
UTF-8
| 634
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/sh
# Massimo Lauria, 2013, 2015
#
# Commands for controlling all my repositories
# simultaneously.
# It requires `function` capabilities from the shell.
[ -n "$BASH" ] || [ -n "$ZSH_NAME" ] || return 0
function myrepos() {
MAINREPOS=(~/config ~/personal ~/lavori)
COMMANDS=$@
if [[ -z "$@" ]]; then
COMMANDS=status
fi
for p in ${MAINREPOS[@]}; do
echo "Executing '$COMMANDS' on repository $p"
pushd $p
git $COMMANDS
popd
done
}
alias st="myrepos status"
alias push="myrepos push"
alias pull="myrepos pull"
alias gsync="myrepos pull && myrepos push"
| true
|
3ce7ccddc5bc84dddfd0b2aebc132e8ed97b4b6a
|
Shell
|
Aitthi/Nginx-Server-Blocks
|
/ng-mt.sh
|
UTF-8
| 1,334
| 3.359375
| 3
|
[] |
no_license
|
#!/bin/bash
#color
RESTORE='\033[0m'
RED='\033[00;31m'
GREEN='\033[00;32m'
YELLOW='\033[00;33m'
BLUE='\033[00;34m'
PURPLE='\033[00;35m'
CYAN='\033[00;36m'
LIGHTGRAY='\033[00;37m'
LRED='\033[01;31m'
LGREEN='\033[01;32m'
LYELLOW='\033[01;33m'
LBLUE='\033[01;34m'
LPURPLE='\033[01;35m'
LCYAN='\033[01;36m'
WHITE='\033[01;37m'
echo -e ${LGREEN}
echo "#################################"
echo "### Setting Server for WebApp ###"
echo "#################################"
echo -e ${RESTORE}
read -p 'Host Name example.com or domain.example.com : ' hostName
sudo mkdir -p /var/www/$hostName/html
sudo chown -R $USER:$USER /var/www/$hostName/html
sudo chmod -R 755 /var/www
cat > /var/www/$hostName/html/index.html <<EOF
<html>
<head>
<title>Welcome to ${hostName}</title>
</head>
<body>
<h1>Success! The ${hostName} server block is working!</h1>
</body>
</html>
EOF
sudo cp /etc/nginx/sites-available/default /etc/nginx/sites-available/$hostName
cat > /etc/nginx/sites-available/$hostName <<EOF
server {
listen 80;
listen [::]:80;
root /var/www/${hostName}/html;
index index.html index.htm;
server_name ${hostName};
location / {
try_files \$uri \$uri/ =404;
}
}
EOF
sudo ln -s /etc/nginx/sites-available/$hostName /etc/nginx/sites-enabled/
sudo service nginx restart
| true
|
f82aeb6c2814885ae8f7350776e4838ec9f1faff
|
Shell
|
mediamicroservices/mm
|
/makemetadata
|
UTF-8
| 2,925
| 4.25
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# make metadata creates a set of metadata reports for an archival information package on all files in the objects subdirectory.
VERSION="0.2"
SCRIPTDIR=$(dirname "${0}")
. "${SCRIPTDIR}/mmfunctions" || { echo "Missing '${SCRIPTDIR}/mmfunctions'. Exiting." ; exit 1 ;};
DEPENDENCIES=(ffprobe mediaconch exiftool)
_initialize_make
_usage(){
echo
echo "$(basename "${0}") ${VERSION}"
echo "Produces a set of metadata reports for an archival information package on all files in the objects subdirectory."
echo "Usage: $(basename "${0}") [ -h ] package1 [ package2 ... ]"
echo " -h (show usage)"
echo " -r deletes existing metadata files"
exit
}
[ "${#}" = 0 ] && _usage
# command-line option to remove existing metadata files
OPTIND=1
while getopts ":rh" OPT ; do
case "${OPT}" in
h) _usage ;;
r) DELETEFILEMETA="Y" ;;
*) echo "bad option -${OPTARG}" ; _usage ;;
:) echo "Option -${OPTARG} requires an argument" ; _writeerrorlog "makemetadata" "The option selected required an argument and none was provided. The script had to exit." ; exit 1 ;;
esac
done
shift $(( ${OPTIND} - 1 ))
_set_up_filemeta(){
_log -b
if [[ -d "${PACKAGE_PATH}/${OUTPUTDIR}" && "${DELETEFILEMETA}" != "Y" ]] ; then
_report -wt "WARNING ${PACKAGE_PATH}/${OUTPUTDIR} already exists, not making metadata."
continue
fi
_mkdir2 "${PACKAGE_PATH}/${OUTPUTDIR}"
}
_log -b
OUTPUTDIR="./metadata/fileMeta"
while [ "${*}" != "" ] ; do
PACKAGE_PATH="${1}"
MMPWD=$(pwd)
shift
_set_up_filemeta
if [[ "${DELETEFILEMETA}" = "Y" && -d "${PACKAGE_PATH}/${OUTPUTDIR}" ]] ; then
rm -rv "${PACKAGE_PATH}/${OUTPUTDIR}"
fi
cd "${PACKAGE_PATH}"
FILELIST=$(_maketemp)
find "./objects" -type f ! -name ".*" ! -path "*/access/images/*" > "${FILELIST}"
_report -dt "Making metadata reports for ${PACKAGE_PATH}."
while read FILE ; do
FILENAMEROOT=$(basename "${FILE}")
PARENTDIR=$(dirname "${FILE}")
FILEOUTPUT="${OUTPUTDIR}/${PARENTDIR}"
if [ ! -d "${FILEOUTPUT}" ] ; then
_run mkdir -p "${FILEOUTPUT}"
fi
FFPROBEXML="${FILEOUTPUT}/${FILENAMEROOT}_ffprobe.xml"
MEDIAINFOXML="${FILEOUTPUT}/${FILENAMEROOT}_mediainfo.xml"
MEDIAINFOTRACE="${FILEOUTPUT}/${FILENAMEROOT}_mediatrace.xml"
EXIFTOOLXML="${FILEOUTPUT}/${FILENAMEROOT}_exiftool.xml"
ffprobe 2> /dev/null "${FILE}" -show_format -show_streams -show_data -show_error -show_versions -show_chapters -noprivate -of xml="q=1:x=1" > "${FFPROBEXML}"
mediaconch -mi -fx "${FILE}" | xmlstarlet fo > "${MEDIAINFOXML}"
mediaconch -mi -mt -fx "${FILE}" | xmlstarlet fo > "${MEDIAINFOTRACE}"
exiftool -X "${FILE}" > "${EXIFTOOLXML}"
done < "${FILELIST}"
cd "${MMPWD}"
"${SCRIPTDIR}/makemets" "${PACKAGE_PATH}"
_run rm -r -f "${FILELIST}"
_log -e
done
| true
|
33c87d92444e61b4edf80dbfc8e812918dc771e8
|
Shell
|
ashim95/pbsmt_scripts
|
/train_supervised.sh
|
UTF-8
| 1,706
| 2.859375
| 3
|
[] |
no_license
|
# Script for training a supervised translation model for SRC-TGT pair
trainDir=$PWD/$1
SRC=$2
TGT=$3
LM_TGT=$PWD/$4
N_THREADS=48
# moses
MOSES_PATH=$PWD/moses_linux_64bit # PATH_WHERE_YOU_INSTALLED_MOSES
TOKENIZER=$MOSES_PATH/scripts/tokenizer/tokenizer.perl
NORM_PUNC=$MOSES_PATH/scripts/tokenizer/normalize-punctuation.perl
INPUT_FROM_SGM=$MOSES_PATH/scripts/ems/support/input-from-sgm.perl
REM_NON_PRINT_CHAR=$MOSES_PATH/scripts/tokenizer/remove-non-printing-char.perl
TRAIN_TRUECASER=$MOSES_PATH/scripts/recaser/train-truecaser.perl
TRUECASER=$MOSES_PATH/scripts/recaser/truecase.perl
DETRUECASER=$MOSES_PATH/scripts/recaser/detruecase.perl
TRAIN_LM=$MOSES_PATH/bin/lmplz
TRAIN_MODEL=$MOSES_PATH/scripts/training/train-model.perl
MULTIBLEU=$MOSES_PATH/scripts/generic/multi-bleu.perl
MOSES_BIN=$MOSES_PATH/bin/moses
MOSES_CLEAN=$MOSES_PATH/scripts/training/clean-corpus-n.perl
echo "TRAIN DIRECTORY -- $trainDir"
if ! [[ -f "$trainDir/corpus.$SRC-$TGT.clean.$SRC" && -f "$trainDir/corpus.$SRC-$TGT.clean.$TGT" ]]; then
echo "Cleaning the corpus ... "
echo "Keeping length of sentences between 1 and 80"
$MOSES_CLEAN $trainDir/corpus.$SRC-$TGT.true $SRC $TGT $trainDir/corpus.$SRC-$TGT.clean 1 80
fi
echo
echo "Using language model for language : $TGT from file : $LM_TGT"
echo
echo "Going to start training the translation model for pair $SRC-$TGT"
nohup nice $TRAIN_MODEL --root-dir $trainDir -cores $N_THREADS -corpus $trainDir/corpus.$SRC-$TGT.clean -f $SRC -e $TGT -alignment grow-diag-final-and -reordering msd-bidirectional-fe -lm 0:5:$LM_TGT:8 -external-bin-dir $MOSES_PATH/training-tools --mgiza --mgiza-cpus $N_THREADS > $trainDir/training-$SRC-$TGT.out 2>&1&
| true
|
0f3247c0300bed678cff790d72e79472524f9b64
|
Shell
|
MelodyShih/cuFINUFFT-bench
|
/scripts/run_alltype_acc.sh
|
UTF-8
| 824
| 2.96875
| 3
|
[] |
no_license
|
#!/bin/zsh
has_gpu=0
nvidia-smi > /dev/null && has_gpu=1
if [ $has_gpu = "1" ]; then
echo "running 2d type 1"
python nuffttype1_acc.py 2 > ../results/2d1_acc.dat
echo "running 3d type 1"
python nuffttype1_acc.py 3 > ../results/3d1_acc.dat
echo "running 2d type 2"
python nuffttype2_acc.py 2 > ../results/2d2_acc.dat
echo "running 3d type 2"
python nuffttype2_acc.py 3 > ../results/3d2_acc.dat
else
#NODENAME='skylake'
NODENAME='broadwell'
echo "running 2d type 1"
python nuffttype1_acc.py 2 > ../results/$NODENAME/2d1_acc_cpu.dat
echo "running 3d type 1"
python nuffttype1_acc.py 3 > ../results/$NODENAME/3d1_acc_cpu.dat
echo "running 2d type 2"
python nuffttype2_acc.py 2 > ../results/$NODENAME/2d2_acc_cpu.dat
echo "running 3d type 2"
python nuffttype2_acc.py 3 > ../results/$NODENAME/3d2_acc_cpu.dat
fi
| true
|
b6111a23a2166eab1a6283f4b8c61188a85490b5
|
Shell
|
requiel20/fuzzing-SAT-solvers
|
/suts/solver1/test_files.sh
|
UTF-8
| 376
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
# Test that sat solver constructs formulas with respect to the cnf files.
REPORT_DIR='./tests/report'
mkdir -p $REPORT_DIR
if [ "$(ls -A $REPORT_DIR)" ]; then
rm $REPORT_DIR/*
fi
for file in ./tests/*.cnf
do
basename=$(basename $file)
diff -B <( cat $file | sed 's/ 0//g' | sed '/^c/d' | sed 1d) <( ./sat $file ) > tests/report/$basename.diff
done
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.