blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
1307cd31475ec7a3a65a5a81c703ac4b3a10e32d
|
Shell
|
quchen/talks
|
/2019-10-18_lambda-calculus-intro/slides/build-loop
|
UTF-8
| 184
| 2.578125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
while true; do
make && notify-send -u critical "Pandoc done" || notify-send -u critical "CONVERSION FAILURE"
echo "ENTER for new conversion"
read
done
| true
|
e916ee33a9e22d30dca0252666571908dca2ae17
|
Shell
|
rico360z28/tools
|
/scripts/ace-point-install/ace.sh
|
UTF-8
| 7,013
| 3.5
| 4
|
[] |
no_license
|
#!/bin/bash
################################################################################################
# USER CONFIGURATOINS
#
# Define Project and Deploy Diretories
PROJECT_DIR="/home/intel/git"
DEPLOY_DIR="/home/intel"
GATEWAY_START="true"
################################################################################################
# STOP HERE!
# DO NOT EDIT ANY FURTHER
#
# Script Parameters
GATEWAY_GUIDE_VERSION="2018.12.20"
GIT_PATH="https://github.com/intel/rsp-sw-toolkit.git"
DEPLOY_PROJECT="${PROJECT_DIR}/rsp-sw-toolkit/gateway/build/distributions/gateway-1.0.tgz"
INSTALL_DEVTOOLS="openjdk-8-jdk git gradle"
INSTALL_RUNTIME="mosquitto avahi-daemon ntp ssh"
INSTALL_EXTRAS="mosquitto-clients sshpass"
REQUIRED_OS="Ubuntu 18.04.1 LTS"
SCRIPT_VERSION="1.0"
SYSTEM_CHECK="PASS"
GATEWAY_START_DELAY="1m"
BBLUE="\033[0;44m" # Background Blue
BRED="\033[0;101m" # Background Red
NC="\033[0m" # No Color
################################################################################################
# SCRIPT INTRO AND USER ACCEPTANCE
#
clear
printf "\n${BBLUE}Intel RSP SW Toolkit-Gateway Installation Script${NC}\n"
printf "Install Script Based on Document Version: %s \n" "${GATEWAY_GUIDE_VERSION}"
printf "Script Version: %s \n\n" "${SCRIPT_VERSION}"
printf "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, \n"
printf "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR \n"
printf "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE \n"
printf "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR \n"
printf "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE \n"
printf "OR OTHER DEALINGS IN THE SOFTWARE. \n\n"
while true; do
read -p "Do you wish to use this script to install and configure your gateway (y or n)? " yn
case $yn in
[Yy]* ) break;;
[Nn]* ) printf "Exiting installation script\n"; exit;;
* ) printf "Please answer yes or no.\n";;
esac
done
################################################################################################
# PRELIMINARY SYSTEM CHECKS
#
# Check to see if script was executed as root
if ! [ $(id -u) = 0 ]; then
SYSTEM_CHECK_TEXT+="This script must be run as sudo."
SYSTEM_CHECK="FAIL"
fi
# Check operating system version
OS_VERSION="$(cat /etc/os-release | grep "PRETTY_NAME" | tr -d '\"')"
OS_VERSION="${OS_VERSION:12:18}"
if ! [ "$REQUIRED_OS" == "$OS_VERSION" ]; then
SYSTEM_CHECK_TEXT+="You need ${REQUIRED_OS}, you currently have ${OS_VERSION}."
SYSTEM_CHECK="FAIL"
fi
# Check if computer is on a network
nc -z -w 10 8.8.8.8 53 >/dev/null 2>&1
online=$?
if [ $online -ne 0 ]; then
SYSTEM_CHECK_TEXT+="Computer needs access to the internet."
SYSTEM_CHECK="FAIL"
fi
if ! [ "$SYSTEM_CHECK" = "PASS" ]; then
printf "\n\n${BRED}Preliminary System Check Failure.${NC}\n"
printf "%s \n" "${SYSTEM_CHECK_TEXT}"
printf "Exiting Script.\n"
exit 1
fi
################################################################################################
# USER VALIDATION
#
# Gateway Configuration
#
VALIDATION_TEXT+=$'Project Directory: '
VALIDATION_TEXT+="${PROJECT_DIR}"
VALIDATION_TEXT+=$'\nDeployment Directory: '
VALIDATION_TEXT+="${DEPLOY_DIR}"
VALIDATION_TEXT+=$'\nGIT Path: '
VALIDATION_TEXT+="${GIT_PATH}"
VALIDATION_TEXT+=$'\n'
clear
printf "\n${BBLUE}Intel RSP SW Toolkit-Gateway Installation Script${NC}\n"
printf "Install Script Based on Document Version: %s \n\n" "${GATEWAY_GUIDE_VERSION}"
printf "Please verify the following information:\n"
printf "%s \n" "${VALIDATION_TEXT}"
printf "If the information is incorrect then exit script, edit and restart script. \n"
while true; do
read -p "Do you want to proceed with the installation (y or n)? " yn
case $yn in
[Yy]* ) break;;
[Nn]* ) printf "Exiting installation script\n"; exit;;
* ) printf "Please answer yes or no.";;
esac
done
# performing apt-get update and validating internet connection
printf "\n${BBLUE}ACE Point Script:${NC} Preforming system update"
printf "\n"
sudo apt-get update
if [ $? != 0 ]; then
# apt-get failed.
printf "\n${BBLUE}ACE Point Script:{NC} apt-get update failure, exiting script\n"
exit 1
fi
printf "\n${BBLUE}ACE Point Script:${NC} Creating Project Directory: ${PROJECT_DIR}"
printf "\n"
mkdir -p ${PROJECT_DIR}
printf "\n${BBLUE}ACE Point Script:${NC} Creating Deployment Directory: ${DEPLOY_DIR}"
printf "\n"
mkdir -p ${DEPLOY_DIR}
printf "\n${BBLUE}ACE Point Script:${NC} Installing Development Tools"
printf "\n"
sudo apt-get -y install ${INSTALL_DEVTOOLS}
printf "\n${BBLUE}ACE Point Script:${NC} Installing Runtime Packages"
printf "\n"
sudo apt-get -y install ${INSTALL_RUNTIME}
printf "\n${BBLUE}ACE Point Script:${NC} Installing Extra Packages"
printf "\n"
sudo apt-get -y install ${INSTALL_EXTRAS}
printf "\n${BBLUE}ACE Point Script:${NC} Cloning project from GITHUB"
printf "\n"
cd ${PROJECT_DIR}
git clone ${GIT_PATH}
printf "\n${BBLUE}ACE Point Script:${NC} Build an archive suitable for deployment"
printf "\n"
cd "${PROJECT_DIR}/rsp-sw-toolkit/gateway"
gradle buildTar
printf "\n${BBLUE}ACE Point Script:${NC} Deploy the project"
printf "\n"
cd "${DEPLOY_DIR}"
tar -xf "${DEPLOY_PROJECT}"
printf "${BBLUE}ACE Point Script:${NC} Generate certificates and keys"
printf "\n"
mkdir -p "${DEPLOY_DIR}/gateway/cache"
cd "${DEPLOY_DIR}/gateway/cache"
"${DEPLOY_DIR}/gateway/gen_keys.sh"
if [ $GATEWAY_START == "true" ]; then
printf "\n${BBLUE}ACE Point Script:${NC} Installing Extra Packages\n"
printf "\n"
sudo apt-get -y install ${INSTALL_EXTRAS}
printf "\n${BBLUE}ACE Point Script:${NC} Starting Gateway, please wait"
printf "\n"
gnome-terminal -e "bash -c \"exec /home/intel/gateway/run.sh\"" >/dev/null 2>&1
sleep ${GATEWAY_START_DELAY}
printf "\n${BBLUE}ACE Point Script:${NC} Configuring Sensors to TEST facility and starting to read"
printf "\n"
sshpass -p 'gwconsole' ssh -oStrictHostKeyChecking=no -p5222 gwconsole@localhost << !
sensor set.facility TEST ALL
scheduler activate.all.sequenced
quit
!
printf "\n${BBLUE}ACE Point Script:${NC} Opening a terminal to display Events MQTT Messages"
printf "\n"
gnome-terminal -e "bash -c \"exec mosquitto_sub -t rfid/gw/events\"" >/dev/null 2>&1
printf "\n${BBLUE}ACE Point Script:${NC} Opening a terminal to display raw rfid MQTT Messages"
printf "\n"
gnome-terminal -e "bash -c \"exec mosquitto_sub -t rfid/rsp/data/#\"" >/dev/null 2>&1
fi
printf "\n${BBLUE}ACE Point Script:${NC} INSTALLATION DONE\n"
printf "\n"
#printf "\nStart the gateway application by executing:\n"
#printf "%s/gateway/run.sh" "${DEPLOY_DIR}"
#printf "\n"
| true
|
da54cf8fcaf5a93a29e4694be1ca5107738fbdf7
|
Shell
|
psyrendust/.dotfiles
|
/bin/co
|
UTF-8
| 205
| 2.75
| 3
|
[] |
no_license
|
#!/usr/bin/env zsh
#
# co
# Fuzzy git checkout
#-------------------------------------------------------------------------------
set -e
BRANCH=$(gitbranches | fzf | trimleading) &&
git checkout ${BRANCH}
| true
|
2837ace123b28bd1b5c8018ebd32f86d9543c47c
|
Shell
|
davidgfnet/domain-crawler
|
/generator.bash
|
UTF-8
| 1,352
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/bash
BP=`pwd`
mkdir -p results-gen/
rm -rf tmp.generated/
mkdir tmp.generated/
cd tmp.generated/
git clone --depth=1 https://github.com/titoBouzout/Dictionaries.git
cd Dictionaries
$BP/gendict.py "domains.txt" eus "Basque.dic"
$BP/gendict.py "domains.txt" bg "Bulgarian.dic"
$BP/gendict.py "domains.txt" "es,cat" "Catalan.dic"
$BP/gendict.py "domains.txt" hr "Croatian.dic"
$BP/gendict.py "domains.txt" cz "Czech.dic"
$BP/gendict.py "domains.txt" dk "Danish.dic"
$BP/gendict.py "domains.txt" nl "Dutch.dic"
$BP/gendict.py "domains.txt" "uk,us,au,nz,co.nz,co.uk,in,pk,ph,de,ca,fr,it,th,pl,es,cn,bd,gi,im,ie,jm,ch" "English (American).dic" "English (Australian).dic" "English (British).dic" "English (Canadian).dic" "English (South African).dic"
$BP/gendict.py "domains.txt" "lu,fr,re,tf,ch" "French.dic"
$BP/gendict.py "domains.txt" gal "Galego.dic"
$BP/gendict.py "domains.txt" "at,de,ch" "German.dic"
$BP/gendict.py "domains.txt" it "Italian.dic"
$BP/gendict.py "domains.txt" "pt,br" "Portuguese (European).dic" "Portuguese (Brazilian).dic"
$BP/gendict.py "domains.txt" "es,com.es" "Spanish.dic"
$BP/gendict.py "domains.txt" "se" "Swedish.dic"
cd ..
$BP/digger/digger < Dictionaries/domains.txt > domains-filtered.txt
cd ..
sort -u tmp.generated/domains-filtered.txt | gzip -9 > results-gen/generated.txt.gz
rm -rf tmp.generated
| true
|
a4e5a5fa3a735b4ee945203b2860be9c9b9beb58
|
Shell
|
jeffrom/logd
|
/integration_test/run_integration_test.sh
|
UTF-8
| 1,022
| 4.03125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -euxo pipefail
TESTROOT="$( cd "$(dirname "$0")" ; pwd -P )"
list_tests() {
find "$TESTROOT" -name "[0-9]*_*.test.sh" | cat | sort
}
run_test() {
if [[ "x$1" == "x" ]]; then
echo "usage: run_test <test_file_path>"
exit 1
fi
bn="$(basename "$1")"
# shellcheck disable=SC2001
test_num="$(echo "$bn" | sed -e 's/^\([0-9]*\)_.*/\1/')"
# shellcheck disable=SC2001
test_name="$(echo "$bn" | sed -e 's/^[0-9]*_\(.*\).test.sh/\1/')"
echo "===TEST ${test_num} ${test_name}"
"$1"
}
finish_all() {
if killall logd.test; then
echo "Extra logd.test instances laying around..."
fi
if killall log-cli.test; then
echo "Extra log-cli.test instances laying around..."
fi
}
run_all_tests() {
trap finish_all EXIT
echo "Running test suite"
list_tests
for testfile in $(list_tests); do
if ! run_test "$testfile"; then
exit 1
fi
done
echo "Completed test suite"
}
run_all_tests
| true
|
02006ce1850c4bac71cb0f5e050b9ac256ebef67
|
Shell
|
wafflesnatcha/Snippets
|
/Shell/Yes-No Prompt.sh
|
UTF-8
| 228
| 3.53125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# Example of a y/n prompt with y as the default
read -en1 -p "Would you like to continue? [y/n] (y): "
if [[ ! "$REPLY" || "$REPLY" =~ Y|y ]]; then
echo -e "\nreplied yes"
else
echo -e "\nreplied no"
fi
| true
|
41ddb77be036228c313b81037bc59ca7354421fe
|
Shell
|
alex-titus/system-software-final-review
|
/lab-2/task5.sh
|
UTF-8
| 529
| 2.953125
| 3
|
[] |
no_license
|
#c:/cygwin64/bin/bash.exe
# This software code is provided as open source software. No questions will be
# answered relating to any part of this software, for any reason.
#
# Much love,
# https://github.com/alex-titus
for i in $(seq 1 100);
do
echo "$i"
done > numbs.txt
# wc ./numbs.txt
# 100 100 292 ./numbs.txt
# 100 new lines, 100 words, 292 bytes in numbs.txt
# head -n 38 numbs.txt | tail -n 14; > somenumbs.txt
# wc somenumbs.txt
# 14 14 42 somenumbs.txt
# 14 new lines, 14 words, 42 bytes in somenumbs.txt
| true
|
b59ad39e1a93bbf412f4b36796f131c12f970b28
|
Shell
|
benSepanski/lockPlacementBenchmarks
|
/examples/make_examples.sh
|
UTF-8
| 2,158
| 3.890625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# usage: . make_examples.sh -b/--build -dz/--debugz3
# where if build we call mvn package, and OW do not
# where if debugz3 we set LOG_Z3 to true, otherwise false
# Must be using java8
BUILD=false;
DEBUG_Z3="";
# parse arguments following
# https://stackoverflow.com/questions/192249/how-do-i-parse-command-line-arguments-in-bash
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-b|--build)
BUILD=true;
shift # past arg
;;
-dz|--debugz3)
DEBUG_Z3=" -debugZ3";
shift
;;
esac
done
if [[ $BUILD = true ]] ; then
echo "** clean installing package ";
cd ..;
mvn clean install -Dmaven.test.skip=true ;
cd examples;
echo "** Package built";
fi
# one of "trace" "debug" "info" "warn" "error" "off"
LOG_LEVEL="debug";
# build holds .class files of examplaes
BUILD_DIR=`realpath "build/"`;
# holds source of examples
SOURCE_DIR=`realpath "."`;
# where to leave jimple files
OUT_DIR=`realpath "./modifiedClasses"`;
PATH_TO_Z3=`realpath "/usr/lib/"`;
JAR=`realpath "../target/lockPlacementBenchmarks-0.0.1-SNAPSHOT.jar"`;
# build jimple classpath
JIMPLE_CP="${BUILD_DIR}:`realpath "../target/classes"`";
# Build source
if [ ! -d "${BUILD_DIR}" ]; then
mkdir "${BUILD_DIR}";
fi
echo "** Building examples and common.aux from ${SOURCE_DIR} into ${BUILD_DIR}";
find ${SOURCE_DIR} -type f -name "*.java" | xargs javac -d ${BUILD_DIR};
echo "** Done building"; echo;
# Run Ranjit's algorithm on test dirs
for targetFile in `find ${SOURCE_DIR} -type f -name "targets.txt" -not -wholename "${SOURCE_DIR}/common/*"`; do
CLASS_NAME=`cat ${targetFile}`;
echo "** Running Ranjit's algorithm on ${CLASS_NAME}";
# Assume ${JAVA_HOME} is set up correctly for this to work
LD_LIBRARY_PATH="${PATH_TO_Z3}" \
java -Dorg.slf4j.simpleLogger.defaultLogLevel=${LOG_LEVEL} \
-Djava.library.path="${PATH_TO_Z3}" \
-jar "${JAR}" ${targetFile} ${DEBUG_Z3} \
-- -d ${OUT_DIR} -f jimple -cp ${JIMPLE_CP} -pp;
echo "** Jimple file in directory ${OUT_DIR}"; echo
done
| true
|
f0e3dd89d84ca846e0d6996e6ade0b49eccfc033
|
Shell
|
CodeAnil/nauta
|
/nauta-containers/samba/samba-create.sh
|
UTF-8
| 2,066
| 3.78125
| 4
|
[
"Apache-2.0",
"CC-BY-4.0",
"CC-BY-ND-4.0"
] |
permissive
|
#!/bin/bash -e
#
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
echo "Ensuring /smb/input/public and /smb/output public exist and have proper permissions..."
mkdir -vp /smb/input/public
mkdir -vp /smb/output/public
chmod -v 0777 /smb/input/public /smb/output/public
echo "OK."
function create_user() {
echo "(Re)Creating user $1"
/bin/samba-create-user.sh $1 && /bin/samba-create-pv.sh $1 || echo "Unable to create user $1"
}
ALL_USERS_JSON=`kubectl get u -o=json`
ALL_USERS_COUNT=`echo "${ALL_USERS_JSON}" | jq '.items | length'`
date
echo "Current user count: $ALL_USERS_COUNT"
for (( n = 0 ; n < $ALL_USERS_COUNT ; n++ )) ; do
CURRENT_USER=`echo "$ALL_USERS_JSON" | jq ".items[$n]"`
CURRENT_USER_NAME=`echo "$CURRENT_USER" | jq -r '.metadata.name'`
CURRENT_USER_UID=`echo "$CURRENT_USER" | jq -r '.spec.uid'`
CURRENT_USER_STATE=`echo "$CURRENT_USER" | jq -r '.spec.state'`
echo "Found user: $CURRENT_USER_NAME, uid: $CURRENT_USER_UID, state: $CURRENT_USER_STATE"
if [[ "x$1" == "x--init" ]] ; then
echo " -> first run, creating user no matter of their reported state ($CURRENT_USER_STATE)"
create_user $CURRENT_USER_NAME
else
if [[ "x$CURRENT_USER_STATE" != "xCREATED" ]] ; then
create_user $CURRENT_USER_NAME
else
echo "User already exists, doing nothing."
if [[ "x$CURRENT_USER_UID" == "x" ]] ; then
echo "Warning: user doesn't have UID assigned"
fi
fi
fi
done
| true
|
0aa37ed56d5838ff582d4131717f571c386a4b68
|
Shell
|
ehotinger/blog
|
/deploy.sh
|
UTF-8
| 206
| 2.890625
| 3
|
[] |
no_license
|
#!/bin/bash
echo "Deploying updates to GitHub"
hugo -t cocoa
cd public
git add .
msg="Automatically rebuilt - `date`"
if [ $# -eq 1 ]
then msg="$1"
fi
git commit -m "$msg"
git push origin master
cd ..
| true
|
37201d1141bfbe153b2164729b7a9068c0aa677c
|
Shell
|
vicentepedro/GenRe-ShapeHD
|
/scripts/test_genre.sh
|
UTF-8
| 936
| 3.234375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Test GenRe
out_dir="/media/Data/dsl-course/GenRe_Testing/output/genre_paper/"
fullmodel=./downloads/models/full_model.pt # Pre-trained Model
rgb_pattern='/media/Data/dsl-course/affordances_dataset/all_objects_hook_draw/'"$2"'/rgb/*.jpg'
mask_pattern='/media/Data/dsl-course/affordances_dataset/all_objects_hook_draw/'"$2"'/mask/*.tif'
echo "$out_dir"
echo "$rgb_pattern"
echo "$mask_pattern"
if [ $# -lt 2 ]; then
echo "Usage: $0 gpu obj_name[ ...]"
exit 1
fi
gpu="$1"
shift # shift the remaining arguments
shift # obj_name / folder to test
set -e
source activate shaperecon
python 'test.py' \
--net genre_full_model \
--net_file "$fullmodel" \
--input_rgb "$rgb_pattern" \
--input_mask "$mask_pattern" \
--output_dir "$out_dir" \
--suffix '{net}' \
--overwrite \
--workers 0 \
--batch_size 1 \
--vis_workers 4 \
--gpu "$gpu" \
$*
source deactivate
| true
|
b26d9af540f79019a0724601f1cbbcd11a47c8f5
|
Shell
|
SteveCDW/Miscellaneous
|
/confirm_nats.bash
|
UTF-8
| 2,064
| 3.671875
| 4
|
[] |
no_license
|
#!/bin/bash
>confirm_nats.log
while getopts "r:qhv" opt ; do
case $opt in
"q") QUIET=1 ;;
"r") REMOTE_DB="$OPTARG" ; SQL_CMD="-h $REMOTE_DB -P7706" ;;
"h") echo "$0 [-q] [-r {Primary DB IP}]"
echo " where:"
echo " -q = quiet mode, no output to screen"
echo " -r {DB IP} = IP address of SL1 DB" ; echo ; exit 0 ;;
"v") echo "$0, version 1.1" ; echo ; exit 0 ;;
esac
done
IPS=( "$(silo_mysql -NBe "SELECT ip FROM master.system_settings_licenses WHERE function IN (5,6) AND ip LIKE '10.255.%'" $SQL_CMD)" )
for IP in ${IPS[@]} ; do
CU_NAME="$(silo_mysql --ssl --connect-timeout=5 -h "$IP" -P 7707 -NBe "SELECT name FROM master.system_settings_licenses" 2> /dev/null)"
DB_NAME="$(silo_mysql -NBe "SELECT name FROM master.system_settings_licenses WHERE ip='$IP'" $SQL_CMD 2> /dev/null)"
NAT_PORTS=( $(grep "${IP}/" /etc/firewalld/direct_nat.xml | grep 7707 | awk -F":" {'print $2'} | awk -F"<" {'print $1'}) )
[[ ! $NAT_PORTS[0] ]] && NAT_PORTS=( $(grep "${IP}/" /etc/firewalld/direct.xml 2> /dev/null | grep 7707 | awk -F":" {'print $2'} | awk -F"<" {'print $1'}) )
for NAT_PORT in ${NAT_PORTS[@]} ; do
NAT_NAME="$(silo_mysql --ssl --connect-timeout=5 -h 172.20.1.1 -P "$NAT_PORT" -NBe "SELECT name FROM master.system_settings_licenses" 2>/dev/null)"
if [ "$CU_NAME" != "$NAT_NAME" -o "$DB_NAME" != "$NAT_NAME" -o "$DB_NAME" != "$CU_NAME" ] ; then
echo "$IP: Failed: CU Name from CU DB: $CU_NAME CU Name from DB: $DB_NAME CU Name from NAT: $NAT_NAME" >> confirm_nats.log
[[ ! $QUIET ]] && echo "$IP: Failed: CU Name from CU DB: $CU_NAME CU Name from DB: $DB_NAME CU Name from NAT: $NAT_NAME" || echo -n "X"
else
[[ ! $QUIET ]] && echo "$IP: Passed: $CU_NAME" || echo -n "."
fi
done
unset NAT_PORTS
sleep 2
done
echo
| true
|
e8a398c5b5e3b3d2241b6031219a3c5b50fff17d
|
Shell
|
alexanderfefelov/scripts
|
/install/dev/install-graalvm.sh
|
UTF-8
| 2,158
| 3.75
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# Exit immediately if a pipeline, which may consist of a single simple command,
# a list, or a compound command returns a non-zero status
set -e
readonly MONIKER=graalvm
readonly VERSION=21.1.0
readonly BASE_8=graalvm-ce-java8
readonly BASE_11=graalvm-ce-java11
readonly BASE_16=graalvm-ce-java16
readonly STUFF_8=$BASE_8-linux-amd64-$VERSION.tar.gz
readonly STUFF_11=$BASE_11-linux-amd64-$VERSION.tar.gz
readonly STUFF_16=$BASE_16-linux-amd64-$VERSION.tar.gz
readonly INSTALLER_DIR=$(dirname "$(realpath "$0")")
readonly TARGET_DIR=$HOME/dev/$MONIKER
create_desktop_entry() { # https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html
echo "[Desktop Entry]
Type=Application
Categories=Development;
Name=VisualVM
Comment=
Icon=$TARGET_DIR/visualvm.ico
Exec=$TARGET_DIR/default-11/bin/jvisualvm
Terminal=false" > $HOME/.local/share/applications/visualvm.desktop
}
if [ -d "$TARGET_DIR" ]; then
echo Directory exists: $TARGET_DIR >&2
exit 1
fi
mkdir --parents $TARGET_DIR
readonly TEMP_DIR=$(mktemp --directory -t delete-me-XXXXXXXXXX)
(
cd $TEMP_DIR
for java_version in 8 11 16; do
BASE=BASE_$java_version
STUFF=STUFF_$java_version
echo -n Downloading GraalVM $java_version...
wget --quiet https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-$VERSION/${!STUFF}
echo done
echo -n Extracting GraalVM $java_version...
tar --extract --gzip --file=${!STUFF}
echo done
echo -n Installing GraalVM $java_version...
mv --force ${!BASE}-$VERSION $TARGET_DIR
echo done
echo -n Configuring GraalVM $java_version...
ln --symbolic $TARGET_DIR/${!BASE}-$VERSION $TARGET_DIR/default-$java_version
echo done
done
)
rm --recursive --force $TEMP_DIR
echo -n Installing...
cp --force $INSTALLER_DIR/visualvm.ico $TARGET_DIR
create_desktop_entry
echo done
echo -n Configuring...
echo "export GRAALVM_8_HOME=$TARGET_DIR/default-8
export GRAALVM_11_HOME=$TARGET_DIR/default-11
export GRAALVM_16_HOME=$TARGET_DIR/default-16
export JAVA_HOME=\$GRAALVM_16_HOME
export PATH=\$JAVA_HOME/bin:\$PATH" > $HOME/.profile.d/$MONIKER.sh
echo done
| true
|
229972594daf0c6a368f2fa037788bb30d070b45
|
Shell
|
ps/dist_job_sched_simulation
|
/simulation_code/run_varied_trial.sh
|
UTF-8
| 779
| 3.4375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
NUM=1
while [ $NUM -lt 37 ]; do
str="$NUM"
let NUM=NUM+1
directory="varied_$str"
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
echo "~~~~~~~~~~~~~~~~Start VARIED $str~~~~~~~~~~~~~~~"
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
confFile="tests/1000-varied-$str.conf"
make clean > /dev/null
make > /dev/null
rm -rf $directory
mkdir $directory
./job_simulation $confFile > $directory/stdout.log
head -10 $directory/stdout.log
tail -6 $directory/stdout.log
mv *.dat $directory/.
cp $confFile $directory/.
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
echo "~~~~~~~~~~~~End VARIED $str~~~~~~~~~~~~~~~~~~~~"
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
done
| true
|
e9b37935eed455c23ec05165a2c2f39e993d056a
|
Shell
|
edorn18/KeyLimePie
|
/m2/given_parser/timing.sh
|
UTF-8
| 9,998
| 2.5625
| 3
|
[] |
no_license
|
#pragma clang diagnostic ignored "-Wparentheses"
echo "=============================================="
echo "Timing benchmarks"
echo "=============================================="
echo "Benchmark 1: fact_sum"
clang ../benchmarks/fact_sum/fact_sum.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/fact_sum/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/fact_sum/output myOut
echo "=============================================="
echo "Benchmark 2: fibonacci"
clang ../benchmarks/Fibonacci/Fibonacci.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/Fibonacci/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/Fibonacci/output myOut
echo "=============================================="
echo "Benchmark 3: binaryConverter"
clang ../benchmarks/binaryConverter/binaryConverter.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/binaryConverter/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/binaryConverter/output myOut
echo "=============================================="
echo "Benchmark 4: hailstone"
clang ../benchmarks/hailstone/hailstone.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/hailstone/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/hailstone/output myOut
echo "=============================================="
echo "Benchmark 5: primes"
clang ../benchmarks/primes/primes.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/primes/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/primes/output myOut
echo "=============================================="
echo "Benchmark 6: programBreaker"
clang ../benchmarks/programBreaker/programBreaker.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/programBreaker/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/programBreaker/output myOut
echo "=============================================="
echo "Benchmark 7: wasteOfCycles"
clang ../benchmarks/wasteOfCycles/wasteOfCycles.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/wasteOfCycles/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/wasteOfCycles/output myOut
echo "=============================================="
echo "Benchmark 8: OptimizationBenchmark"
clang ../benchmarks/OptimizationBenchmark/OptimizationBenchmark.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/OptimizationBenchmark/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/OptimizationBenchmark/output myOut
echo "=============================================="
echo "Benchmark 9: mixed"
clang ../benchmarks/mixed/mixed.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/mixed/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/mixed/output myOut
echo "=============================================="
echo "Benchmark 10: uncreativeBenchmark"
clang ../benchmarks/uncreativeBenchmark/uncreativeBenchmark.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/uncreativeBenchmark/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/uncreativeBenchmark/output myOut
echo "=============================================="
echo "Benchmark 11: TicTac"
clang ../benchmarks/TicTac/TicTac.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/TicTac/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/TicTac/output myOut
echo "=============================================="
echo "Benchmark 12: mile1"
clang ../benchmarks/mile1/mile1.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/mile1/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/mile1/output myOut
echo "=============================================="
echo "Benchmark 13: GeneralFunctAndOptimize"
clang ../benchmarks/GeneralFunctAndOptimize/GeneralFunctAndOptimize.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/GeneralFunctAndOptimize/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/GeneralFunctAndOptimize/output myOut
echo "=============================================="
echo "Benchmark 14: killerBubbles"
clang ../benchmarks/killerBubbles/killerBubbles.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/killerBubbles/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/killerBubbles/output myOut
echo "=============================================="
echo "Benchmark 15: hanoi_benchmark"
clang ../benchmarks/hanoi_benchmark/hanoi_benchmark.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/hanoi_benchmark/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/hanoi_benchmark/output myOut
echo "=============================================="
echo "Benchmark 16: stats"
clang ../benchmarks/stats/stats.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/stats/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/stats/output myOut
echo "=============================================="
echo "Benchmark 17: biggest"
clang ../benchmarks/biggest/biggest.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/biggest/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/biggest/output myOut
echo "=============================================="
echo "Benchmark 18: creativeBenchMarkName"
clang ../benchmarks/creativeBenchMarkName/creativeBenchMarkName.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/creativeBenchMarkName/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/creativeBenchMarkName/output.new myOut
echo "=============================================="
echo "Benchmark 19: bert"
clang ../benchmarks/bert/bert.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/bert/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/bert/output myOut
echo "=============================================="
echo "Benchmark 20: BenchMarkishTopics"
clang ../benchmarks/BenchMarkishTopics/BenchMarkishTopics.c -O0 -Wno-parentheses-equality
dstart=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstart#*|}/1000000 ))
dstart="${dstart%|*}.$ms"
echo "start:" $dstart
./a.out < ../benchmarks/BenchMarkishTopics/input > myOut
dstop=$(date +'%Y-%m-%d %H:%M:%S|%N')
ms=$(( ${dstop#*|}/1000000 ))
dstop="${dstop%|*}.$ms"
echo "stop:" $dstop
diff ../benchmarks/BenchMarkishTopics/output myOut
echo "=============================================="
| true
|
f3eea9e1dd6e3c41301ab9d8e19aaad19a2717a4
|
Shell
|
Shahraaz/S5_Ref
|
/OS/Temp/ShellSCripting/for.sh
|
UTF-8
| 124
| 3.109375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
read -p "enter the limit" n
echo "limit is $n"
s=0
for((i=1;i<=$n;i++))
do
s=$(($s + $i))
done
echo "sum=$s"
| true
|
de63431c4158015c3b271795643ebd31b9d18de3
|
Shell
|
joestringer/RouteFlow
|
/dist/common.sh
|
UTF-8
| 1,934
| 4.0625
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
COLOUR_CHECKED=0
DELIM="\033["
ECHO_OPTS=""
REVERT="0m"
RED="31m"
GREEN="32m"
YELLOW="33m"
colour_check() {
if [ $COLOUR_CHECKED -eq 0 ]; then
COLOUR_CHECKED=1
if (! echo -e "${DELIM}${RED}COLOUR${DELIM}0m" | grep -q "e"); then
COLOUR="-e"
elif (echo "${DELIM}${RED}COLOUR${DELIM}0m" | grep -q "033"); then
DELIM=""
REVERT=""
else
COLOUR=""
fi
fi
}
highlight() {
local_colour="$2"
if [ "$DELIM" = "" ]; then
local_colour=""
fi
colour_check
echo $COLOUR "${DELIM}${2}${1}${DELIM}${REVERT}"
}
fail() {
highlight "$1" "$RED"
exit 1
}
print_status() {
if [ $# -eq 2 ]; then
highlight "${1}..." "$2"
else
highlight "${1}..." "$GREEN"
fi
}
##
# Fetch the source for a project
#
# $1 - file name, minus the version
# $2 - version number (or "git" or "rfproxy")
# $3 - git URL to fetch source from
# $4 - (for git/rfproxy) remote git branch to synchronise with
# $5 - base URL to fetch non-git source package from (full path=$4$1$2.tar.gz)
##
fetch() {
NAME="$1$2"
print_status "Getting $NAME"
if [ "$2" = "git" ] || [ "$2" = "rfproxy" ]; then
if [ ! -e $NAME ]; then
$DO git clone $3 $NAME || return 1
fi
$DO cd $NAME
if [ $UPDATE -eq 1 ]; then
$DO git fetch || return 1
$DO git checkout $4 || return 1
fi
if [ $FETCH_ONLY -eq 1 ]; then
$DO cd -
fi
elif [ ! -e $NAME ]; then
if [ ! -e $NAME.tar.gz ]; then
$DO wget ${5}/${NAME}.tar.gz || return 1
fi
if [ $FETCH_ONLY -ne 1 ]; then
$DO tar xzf ${NAME}.tar.gz || return 1
$DO cd $NAME || return 1
fi
fi
return 0
}
pkg_install() {
$SUPER apt-get $APT_OPTS install $@ ||
fail "Couldn't install packages"
}
| true
|
09c39f18a4a1f6e420bf49efa2f851b10918bb88
|
Shell
|
ueser/FastqAlignmentPipeline
|
/mainPipe.sh
|
UTF-8
| 3,616
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
# Sequencing alignment pipeline
module load seq/cutadapt/1.11
# put all the scripts that pipeline uses into a folder and cd into it
# cd Codes/NETseqAlignment/
param=$1
adapter=$2
Samples=`sed -n "/<Sample Names:/,/>/p" $param | sed '$ d' | sed '/<.*/d'`
Notification=`sed -n "/<Notification Email/,/>/p" $param | sed '$ d' | sed '/<.*/d'`
User=`sed -n "/<Orchestra User ID/,/>/p" $param | sed '$ d' | sed '/<.*/d'`
indexDir=`sed -n "/<Index Directory/,/>/p" $param | sed '$ d' | sed '/<.*/d'`
initialFilesDir=`sed -n "/<Initial Files Directory/,/>/p" $param | sed '$ d' | sed '/<.*/d'`
projectName=`sed -n "/<Project Name:/,/>/p" $param | sed '$ d' | sed '/<.*/d'`
indexMatch=`sed -n "/<Index Match:/,/>/p" $param | sed '$ d' | sed '/<.*/d'`
baseDir="/groups/winston/ue4/${projectName}"
echo "baseDirectory: " $baseDir
echo "initial Dir: " $initialFilesDir
indexList=`echo -e $indexMatch`
#loopStart,g
for g in $indexList
do
f=${g%-*}
echo "Doing file "$f
### Cut the adapter sequence ###
mkdir -p ${baseDir}/postCleaning/${f}/LogErr
mkdir -p ${baseDir}/postCleaning/${f}/Removed
outDir=${baseDir}/postCleaning/${f}
preout1=${outDir}/${f}_noAdaptR_1.fastq
bad1=${outDir}/Removed/${f}_removed_1
Adapter=`less ${adapter}`
echo "Adapter: " $Adapter
prinseq=/groups/churchman/jd187/Program/PrInSeq/prinseq-lite-0.20.2/prinseq-lite.pl
#@1,0,cutadapt: cut the adapter sequence
cutadapt -f fastq -a $Adapter -O 3 -m 1 --error-rate=0.21 \
--length-tag 'length=' -o ${preout1} ${initialFilesDir}/${f}.fastq
# trim_right 1 to remove the A added by RT superscipt polymerase (when the fragment is smaller than the read length)
#@2,1,clean: clean the fastq
perl ${prinseq} -fastq ${preout1} \
-out_good ${outDir}/${f}_cleaned -out_bad ${bad1} \
-no_qual_header -min_len 7 -min_qual_mean 20 -trim_right 1 \
-trim_ns_right 1 -trim_qual_right 20 -trim_qual_type mean -trim_qual_window 3 -trim_qual_step 1
#@3,2,barcodeXtract: extract molecular barcode
python /groups/churchman/jd187/NETseq/script/extractMolecularBarcode.py \
${outDir}/${f}_cleaned.fastq \
${outDir}/${f}_cleaned_noBarcode.fastq \
${outDir}/${f}_barcodeDistribution.txt \
${outDir}/${f}_ligationDistribution.txt
### Align reads without barcode ###
mkdir -p ${baseDir}/TopHat2/${f}/LogErr
outDir=${baseDir}/TopHat2/${f}
index=${indexDir}/${g#*-}
reads1=${baseDir}/postCleaning/${f}/${f}_cleaned_noBarcode.fastq
seg=20
#@4,3,tophat_no_barcode: align without barcode
tophat2 --read-mismatches 2 --read-gap-length 2 --read-edit-dist 2 -o $outDir --min-anchor-length 8 \
--max-insertion-length 3 \
--max-deletion-length 3 --num-threads 4 --max-multihits 100 \
--library-type fr-firststrand --segment-mismatches 2 --no-coverage-search\
--segment-length ${seg} \
--b2-sensitive \
$index ${reads1}
### Remove PCR duplicates ###
BAMdir=${baseDir}/TopHat2/${f}
#@5,4,removePCRdups: remove PCR duplicates
python removePCRdupsFromBAM.py ${BAMdir}/accepted_hits.bam ${BAMdir}/accepted_hits_noPCRdup.bam
#@6,5,sort_bam: sort bam file
samtools sort ${BAMdir}/accepted_hits_noPCRdup.bam ${BAMdir}/accepted_hits_noPCRdup_sorted
### Calculate coverage ###
mkdir -p ${baseDir}/Coverage/${f}
script="/groups/churchman/ue4/Scripts/customCoverage.py"
#@7,6,coverage: calculate coverage
samtools view -q 50 ${baseDir}/TopHat2/${f}/accepted_hits_noPCRdup_sorted.bam | \
python $script ${baseDir}/Coverage/${f}/${f}_TH
#loopEnd
done
| true
|
8794210388c7cd0b15dbf0a6611bd6010e82b384
|
Shell
|
projectmonitor/projectmonitor-web
|
/ci/deliver_tracker_story.sh
|
UTF-8
| 671
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
set -ex
function update_tracker_story {
curl -X PUT -H "X-TrackerToken: $TRACKER_TOKEN" -H "Content-Type: application/json" -d '{"current_state": "delivered"}' "https://www.pivotaltracker.com/services/v5/projects/$TRACKER_PROJECT_ID/stories/$STORY_ID"
}
function determine_if_commit_finishes_story {
tracker_tag="$(git log -1 | egrep "\[finishes\s+#[0-9]+\]|\[fixes\s+#[0-9]+\]|\[completes\s+#[0-9]+\]" | awk '{print $2, $3}')"
found_finishes_tag=$?
}
function grab_story_id {
STORY_ID=$(echo $tracker_tag | egrep -o "[0-9]+")
}
determine_if_commit_finishes_story
if [ $found_finishes_tag -eq 0 ]
then
grab_story_id
update_tracker_story
fi
| true
|
a0870acb9a19d38cea368d50383a9fa384503659
|
Shell
|
flyseq/drosophila_assembly_pipelines
|
/assembly/genomesize_jellyfish.sh
|
UTF-8
| 705
| 3.203125
| 3
|
[
"MIT"
] |
permissive
|
#! /bin/bash
# this script generates a k-mer count histogram with Jellyfish for use
# with GenomeScope
# job parameters
sp="D.melanogaster" # sample name/ID
threads="32" # number of threads to use
reads="${sp}_R1.fastq.gz" # Illumina PE reads, forward
read2="${sp}_R1.fastq.gz" # Illumina PE reads, reverse
outFile="${sp}.buscoCoverage.csv" # filename to write BUSCO output to
# make kmer histogram
jellyfish count -C -m 21 -s 16000000000 -t ${threads} \
${read1} ${read2} -o ${sp}.reads.jf
jellyfish histo -t${threads} ${sp}.reads.jf > ${sp}.reads.hist
# sample GenomeScope R command
# genomescope.R ${sp}.reads.hist 21 150 ${sp}.gsoutput
| true
|
28940158475570d971b2fac1dc683382b6c73ccd
|
Shell
|
s-nkj/linux_study
|
/shell/CmdExecutor.sh
|
UTF-8
| 481
| 3.921875
| 4
|
[] |
no_license
|
#!/bin/sh
if [ $# -ne 1 ]; then
echo "Usage: CmdExecutor.sh read_filename"
exit 1
fi
if [ ! -e "$1" ]; then
echo "Input file not found. $1"
exit 1
fi
IFS=$'\n'
file=(`cat "$1"`)
ln=0
for line in "${file[@]}"; do
if [ ! "$(echo $line | cut -c 1)" = "#" -a ! "$line" = "" ]; then
# execute command
echo $line
$($line)
if [ $? -eq 1 ]; then
echo "Command failed.($line)"
exit 1
fi
fi
done
exit 0
| true
|
46b79d4d9cdeded66cf8cd275ec6dbef399c641e
|
Shell
|
shenki/toolchains-builder
|
/build_chroot.sh
|
UTF-8
| 3,827
| 4.15625
| 4
|
[] |
no_license
|
#!/bin/bash
if ! [ $# -eq 2 ]; then
cat - <<EOF
Usage: $0 toolchain_name buildroot-treeish
toolchain_name:
This is a path to a toolchain fragment. '.config' will be appended to
that path, and it will be copied as is to Buildroot's '.config' file.
buildroot-treeish:
The git tree-ish object in which to checkout Buildroot to before
building the toolchain.
EOF
exit 1
fi
name=$1
brcommit=$2
apt-get install -y --force-yes -qq --no-install-recommends \
build-essential locales bc ca-certificates file rsync gcc-multilib \
git bzr cvs mercurial subversion unzip wget cpio curl git-core \
libc6-i386 2>&1 1>/dev/null
if [ $? -ne 0 ] ; then
echo "Package installation failed, aborting"
exit 1
fi
sed -i 's/# \(en_US.UTF-8\)/\1/' /etc/locale.gen
/usr/sbin/locale-gen
cd /tmp
TOOLCHAIN_DIR=$(pwd)
TOOLCHAIN_BUILD_DIR=${TOOLCHAIN_DIR}
TOOLCHAIN_BR_DIR=${TOOLCHAIN_DIR}/buildroot
toolchaindir=${TOOLCHAIN_BUILD_DIR}/${name}
logfile=${TOOLCHAIN_BUILD_DIR}/${name}-build.log
builddir=${TOOLCHAIN_BUILD_DIR}/output
configfile=${builddir}/.config
git clone https://github.com/free-electrons/buildroot-toolchains.git ${TOOLCHAIN_BR_DIR}
if [ $? -ne 0 ] ; then
exit 1
fi
cd ${TOOLCHAIN_BR_DIR}
echo "Checking out commit: ${brcommit}"
git checkout ${brcommit}
if [ $? -ne 0 ] ; then
exit 1
fi
cd ${TOOLCHAIN_DIR}
git --git-dir=${TOOLCHAIN_BR_DIR}/.git describe > br_version
echo "Buildroot version: " $(cat br_version)
mkdir -p ${TOOLCHAIN_BUILD_DIR} &>/dev/null
function build {
# Create output directory for the new toolchain
rm -rf ${toolchaindir}
mkdir ${toolchaindir}
# Create build directory for the new toolchain
rm -rf ${builddir}
mkdir ${builddir}
# Create the configuration
cp ${name}.config ${configfile}
echo "BR2_HOST_DIR=\"${toolchaindir}\"" >> ${configfile}
echo " starting at $(date)"
# Generate the full configuration
make -C ${TOOLCHAIN_BR_DIR} O=${builddir} olddefconfig > /dev/null 2>&1
if [ $? -ne 0 ] ; then
return 1
fi
# Generate fragment to ship in the README
make -C ${TOOLCHAIN_BR_DIR} O=${builddir} savedefconfig > /dev/null 2>&1
if [ $? -ne 0 ] ; then
return 1
fi
echo "=================== BEGIN DEFCONFIG ======================"
cat ${builddir}/defconfig
echo "==================== END DEFCONFIG ======================="
# Build
timeout 225m make -C ${TOOLCHAIN_BR_DIR} O=${builddir} 2>&1 | tee ${logfile} | grep --colour=never ">>>"
if [ $? -ne 0 ] ; then
echo " finished at $(date) ... FAILED"
echo " printing the end of the logs before exiting"
echo "=================== BEGIN LOG FILE ======================"
tail -n 200 ${logfile}
echo "==================== END LOG FILE ======================="
return 1
fi
echo " finished at $(date) ... SUCCESS"
# Making legals
echo " making legal infos at $(date)"
make -C ${TOOLCHAIN_BR_DIR} O=${builddir} legal-info > /dev/null 2>&1
if [ $? -ne 0 ] ; then
return 1
fi
echo " finished at $(date)"
cp ${configfile} ${toolchaindir}/buildroot.config
# Different versions of buildroot don't always product the same thing with
# usr. Old version make usr to be a folder containing the toolchain, newer
# version just make it a symbolic link for compatibility.
if ! [ -L ${toolchaindir}/usr ]; then
mv ${toolchaindir}/usr/* ${toolchaindir}/
rmdir ${toolchaindir}/usr
else
make -C ${TOOLCHAIN_BR_DIR} O=${builddir} sdk
if [ $? -ne 0 ] ; then
return 1
fi
rm ${toolchaindir}/usr
fi
# Toolchain built
}
echo "Generating ${name}..."
if ! build $1; then
echo "Error in toolchain build. Exiting"
exit 1
fi
| true
|
31dbf84985ac9c5daf0307e6ebce605a70af4b60
|
Shell
|
awslabs/amazon-kinesis-video-streams-webrtc-sdk-c
|
/scripts/check-static-build.sh
|
UTF-8
| 416
| 3.234375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
bins=(
kvsWebrtcClientMaster
kvsWebrtcClientViewer
discoverNatBehavior
libkvsWebrtcClient.a
libkvsWebrtcSignalingClient.a
)
for bin in ${bins[@]}
do
# Expect to only have a dynamic link to musl's libc
if ldd ${bin} | grep -v musl &> /dev/null; then
echo "${bin}: failed"
echo ""
echo "Found dynamic links:"
ldd ${bin}
exit 1
else
echo "${bin}: passed"
fi
done
| true
|
e742f861e01e43a1ff54a13563eafa857faa4d68
|
Shell
|
mba811/bosh-packer-templates
|
/bosh-stemcell/scripts/vagrant.sh
|
UTF-8
| 498
| 2.828125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash -eux
if [ -f $VBOX_VERSION_FILE ]; then
mkdir /tmp/vbox
VER=$(cat $VBOX_VERSION_FILE)
mount -o loop VBoxGuestAdditions_$VER.iso /tmp/vbox
sh /tmp/vbox/VBoxLinuxAdditions.run
umount /tmp/vbox
rmdir /tmp/vbox
rm *.iso
fi
mkdir /home/ubuntu/.ssh
wget --no-check-certificate \
'https://github.com/mitchellh/vagrant/raw/master/keys/vagrant.pub' \
-O /home/ubuntu/.ssh/authorized_keys
chown -R ubuntu /home/ubuntu/.ssh
chmod -R go-rwsx /home/ubuntu/.ssh
| true
|
fffa5ec85b5b9d9e8cbc952951f13a05848bf2d8
|
Shell
|
Isaac25silva/Qlearning-Humanoid
|
/iniciar.sh
|
UTF-8
| 492
| 3
| 3
|
[] |
no_license
|
#!/bin/bash
blue='\e[0;34m'
NC='\e[0m' # No Color
red='\e[0;31m'
green='\e[0;32m'
case "$1" in
start)
echo "Iniciando serviço de imu"
gnome-terminal -x sh -c 'imu'
echo "Iniciando serviço de vision"
gnome-terminal -x sh -c 'vision'
echo "Iniciando serviço de decision"
gnome-terminal -x sh -c 'decision'
echo "Iniciando serviço de control"
gnome-terminal -x sh -c 'control'
;;
*)
echo "Operação inválida"
;;
esac
| true
|
a48e83ac61c2e4e1301d2ea1093886b16741713a
|
Shell
|
olegtarasov/GrammarEngine
|
/src/scripts/dictionary/english.sh
|
UTF-8
| 2,149
| 2.6875
| 3
|
[
"MIT"
] |
permissive
|
# The script builds an empty dictionary, version 'PRO'
# More info about dictionary compilation:
# http://solarix.ru/for_developers/bootstrap/compile_dictionary.shtml
./cleanup.sh
set -e
if [ "$OSTYPE" == "darwin10.0" ]
then
echo "Starting compilation under DARWIN..."
../../exemac/compiler -j=2 -dir=../../dictionary.src -outdir=../../bin-mac -ldsize=1000000 -save_paradigmas -save_seeker -save_affixes -save_lemmatizer -save_prefix_entry_searcher ../../dictionary.src/version-pro ../../dictionary.src/dictionary -file=../../dictionary.src/english-language-only.sol ../../dictionary.src/shared-resources ../../dictionary.src/english-lexicon ../../dictionary.src/english-stat ../../dictionary.src/english-thesaurus ../../dictionary.src/common-syntax ../../dictionary.src/english-syntax ../../dictionary.src/dictionary-english ../../dictionary.src/common_dictionary_xml
else
if [ "$HOSTTYPE" == "x86_64" ]
then
../../exe64/compiler -j=2 -dir=../../dictionary.src -outdir=../../bin-linux64 -ldsize=1000000 -save_paradigmas -save_seeker -save_affixes -save_lemmatizer -save_prefix_entry_searcher ../../dictionary.src/version-pro ../../dictionary.src/dictionary -file=../../dictionary.src/english-language-only.sol ../../dictionary.src/shared-resources ../../dictionary.src/english-lexicon ../../dictionary.src/english-stat ../../dictionary.src/english-thesaurus ../../dictionary.src/common-syntax ../../dictionary.src/english-syntax ../../dictionary.src/dictionary-english ../../dictionary.src/common_dictionary_xml
else
../../exe/compiler -j=2 -dir=../../dictionary.src -outdir=../../bin-linux -ldsize=1000000 -save_paradigmas -save_seeker -save_affixes -save_lemmatizer -save_prefix_entry_searcher ../../dictionary.src/version-pro ../../dictionary.src/dictionary -file=../../dictionary.src/english-language-only.sol ../../dictionary.src/shared-resources ../../dictionary.src/english-lexicon ../../dictionary.src/english-stat ../../dictionary.src/english-thesaurus ../../dictionary.src/common-syntax ../../dictionary.src/english-syntax ../../dictionary.src/dictionary-english ../../dictionary.src/common_dictionary_xml
fi
fi
| true
|
5fd8ce2ddc5e7e637b1fc3a1cf3b94373a4b512f
|
Shell
|
SMT-COMP/smt-comp
|
/2018/report/analysis/eligible-incremental-status.sh
|
UTF-8
| 366
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/bash
# the incremental benchmark file
file=$1
stats=$(grep "set-info" "$file" | grep ":status")
num_stats=$(echo "$stats" | wc -l)
eligible=$(echo "$stats" | grep "unknown" -m 1 -n)
if [ -z "$eligible" ]; then
eligible="$num_stats"
else
eligible=$(echo "$eligible" | cut -d ':' -f 1)
eligible=$((eligible - 1))
fi
echo "$file,$num_stats,$eligible"
| true
|
8b28a4b66b266ffbc4297318f160b406776a3ba0
|
Shell
|
mlcommons/ck
|
/cm-mlops/script/install-python-venv/run.sh
|
UTF-8
| 359
| 2.703125
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#!/bin/bash
PIP_EXTRA=`${CM_PYTHON_BIN} -c "import pkg_resources; print(' --break-system-packages ' if int(pkg_resources.get_distribution('pip').version.split('.')[0]) >= 23 else '')"`
${CM_PYTHON_BIN_WITH_PATH} -m pip install virtualenv ${PIP_EXTRA}
test $? -eq 0 || exit 1
${CM_PYTHON_BIN_WITH_PATH} -m venv ${CM_VIRTUAL_ENV_DIR}
test $? -eq 0 || exit 1
| true
|
3b87891395f52dcb4dfda02d7db0f6cb9ac24c3e
|
Shell
|
emilianbold/netbeans-releases
|
/cnd.modelimpl/test/scripts/_parse_project.sh
|
UTF-8
| 4,040
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
#
# Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved.
#
# Oracle and Java are registered trademarks of Oracle and/or its affiliates.
# Other names may be trademarks of their respective owners.
#
# The contents of this file are subject to the terms of either the GNU
# General Public License Version 2 only ("GPL") or the Common
# Development and Distribution License("CDDL") (collectively, the
# "License"). You may not use this file except in compliance with the
# License. You can obtain a copy of the License at
# http://www.netbeans.org/cddl-gplv2.html
# or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
# specific language governing permissions and limitations under the
# License. When distributing the software, include this License Header
# Notice in each file and include the License file at
# nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the GPL Version 2 section of the License file that
# accompanied this code. If applicable, add the following below the
# License Header, with the fields enclosed by brackets [] replaced by
# your own identifying information:
# "Portions Copyrighted [year] [name of copyright owner]"
#
# Contributor(s):
#
# The Original Software is NetBeans. The Initial Developer of the Original
# Software is Sun Microsystems, Inc. Portions Copyright 1997-2007 Sun
# Microsystems, Inc. All Rights Reserved.
#
# If you wish your version of this file to be governed by only the CDDL
# or only the GPL Version 2, indicate your decision by adding
# "[Contributor] elects to include this software in this distribution
# under the [CDDL or GPL Version 2] license." If you do not indicate a
# single choice of license, a recipient has the option to distribute
# your version of this file under either the CDDL, the GPL Version 2 or
# to extend the choice of license to its licensees as provided above.
# However, if you add GPL Version 2 code and therefore, elected the GPL
# Version 2 license, then the option applies only if the new code is
# made subject to such option by the copyright holder.
function run() {
project="$1"
shift
params="$@"
###### System includes
sys_incl=`g++ -E -v -x c++ /dev/null 2>&1 | awk '\
BEGIN { cnt=0; inside = 0; } \
/#include "..." search starts here:/ { inside = 1; } \
/#include <...> search starts here:/ { inside = 1; } \
/End of search list/ { inside = 0; } \
/^[^#].*/ { if( inside ) print "-I" $1 }'`
uname=`uname`
#uname_prefix=`expr substr "${uname}" 1 6`
uname_prefix=${uname:0:6}
if [ "${uname_prefix}" = "CYGWIN" ]; then
sys_incl=""
sys_incl="${sys_incl} -IC:/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/include/c++"
sys_incl="${sys_incl} -IC:/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/include/c++/i686-pc-cygwin"
sys_incl="${sys_incl} -IC:/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/include/c++/backward"
sys_incl="${sys_incl} -IC:/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/include"
sys_incl="${sys_incl} -IC:/cygwin/usr/include"
sys_incl="${sys_incl} -IC:/cygwin/lib/gcc/i686-pc-cygwin/3.4.4/../../../../include/w32api"
fi
###### Source files
dir="${project}"
files=`find ${dir} -name "*.c" -o -name "*.cc" -o -name "*.cpp" -o -name "*.C"`
###### Options
options="-I${dir} -I${dir}/src -I${dir}/include -I${dir}/test -DHAVE_CONFIG_H"
defs=""
#defs="${defs} -Dparser.report.include.failures=true"
#defs="${defs} -Dparser.report.errors=false"
defs="${defs} -J-Dcnd.modelimpl.parser.threads=1"
jvmopts=${JVMOPTS-"-J-Xms512m -J-Xmx512m -J-XX:PermSize=128m -J-XX:MaxPermSize=256m -J-XX:NewSize=256m"}
###### Go!
TRACEMODEL_SH=${TRACEMODEL_SH-"../../tracemodel.sh"}
if [ ! -r ${TRACEMODEL_SH} ]; then
echo "Can not find file tracemodel.sh."
echo "Set TRACEMODEL_SH variable to point to this script."
return
fi
#set -x
bash ${TRACEMODEL_SH} ${files} ${sys_incl} ${options} ${defs} ${jvmopts} ${params}
}
run $@
| true
|
62a248335e5c0f8177afacf3a46bf8c6d4ea445f
|
Shell
|
greatbn/Script_LEMP_CENTOS
|
/LEMP.sh
|
UTF-8
| 2,305
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
echo "--------------------------------------------------------"
echo "----------------------Waiting update--------------------"
echo "--------------------------------------------------------"
yum -y update
echo "--------------------------------------------------------"
echo "------------Install repo required-----------------------"
echo "--------------------------------------------------------"
rpm -Uvh http://download.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
rpm -Uvh http://rpms.famillecollet.com/enterprise/remi-release-6.rpm
yum -y install epel-release
echo "\n"
echo "--------------------------------------------------------"
echo "-------------Installing MySQL--------------------------"
echo "--------------------------------------------------------"
yum --enablerepo=remi,remi-test install mysql mysql-server -y
echo "--------------------------------------------------------"
echo "-------------Configure MySQL----------------------------"
echo "--------------------------------------------------------"
service mysqld start
mysql_secure_installation
echo "--------------------------------------------------------"
echo "-------------Install nginx and php-fpm----------------- "
echo "--------------------------------------------------------"
python -c 'print "[nginx]\nname=nginx repo\nbaseurl=http://nginx.org/packages/mainline/centos/$releasever/$basearch/\ngpgcheck=0\nenabled=1" ' >> /etc/yum.repos.d/nginx.repo
yum --enablerepo=remi,remi-php56 install nginx php-fpm php-mysql php-common php-mbstring php-mcrypt php-gd -y
echo "--------------------------------------------------------"
echo "---------------------Install phpMyAdmin-----------------"
echo "--------------------------------------------------------"
yum --enablerepo=remi,remi-php56 install phpMyAdmin -y
yum --enablerepo=remi,remi-php56 install php-mbstring -y
ln -s /usr/share/phpMyAdmin /usr/share/nginx/html
chown -R nginx:nginx /var/lib/php/session/
sed -i 's/'
echo "--------------------------------------------------------"
echo "---------------------Start service----------------------"
echo "--------------------------------------------------------"
service nginx restart
service php-fpm restart
service mysqld restart
chkconfig mysqld on
chkconfig php-fpm on
chkconfig nginx on
| true
|
beb7046435cea395db104aa02e7b346551593ce7
|
Shell
|
lizzieb1416/shopping_list_django_project
|
/launch_sl_emp_mode.sh
|
UTF-8
| 3,729
| 3.59375
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
set -e
FILE=/etc/systemd/system/emperor.uwsgi.service
if [ -f "$FILE"]; then
echo "$FILE exists, reconfiguring the service..."
sudo rm -f /etc/nginx/sites-available/sl.conf
sudo rm -f /etc/nginx/sites-enabled/sl.conf
sudo ln -s /etc/nginx/sites-available/default /etc/nginx/sites-enabled/default
sudo rm -rf sl-env
sudo systemctl stop emperor.uwsgi.service
sudo systemctl disable emperor.uwsgi.service
sudo rm -f /etc/systemctl/system/emperor.uwsgi.service
sudo pkill -f uwsgi -9
else
echo "Starting service configuration"
fi
export SHOPPINGLIST_DIR=$PWD
if [[ -z "${ALLOWED_HOSTS}" ]]; then
echo "Please set ALLOWED_HOSTS environment var with the IP of this machine"
exit 1
fi
# Installation of packages
echo "STEP 1/9: installing pre-requisites..."
sudo apt-get install -y python3.8 python3-venv python3.8-dev gcc nginx git
# Envirorment creation and installation of python packages
echo "STEP 2/9: creating environment"
python3 -m venv sl-env
. ./sl-env/bin/activate
python3 -m pip install -r ./requirements.txt
# set env variables for settings.py
echo "STEP 3/9: exporting environment variables for settings.py"
sed -i "s/ALLOWED_HOSTS\ =\ \[\]/ALLOWED_HOSTS\ =\ [\'$(echo $ALLOWED_HOSTS)\']/g" $SHOPPINGLIST_DIR/shoppinglist/shoppinglist/settings.py
export DEBUG=0
# Nginx configuration
echo "STEP 4/9: setting up Nginx"
sed -i "s/DIRECTORY/$(echo $SHOPPINGLIST_DIR | sed "s/\//\\\\\//g")/g" $SHOPPINGLIST_DIR/deploy_emperor_mode/nginx/sl.conf ####!!!
sudo ln -s $SHOPPINGLIST_DIR/deploy_emperor_mode/nginx/sl.conf /etc/nginx/sites-available ######!!
sudo ln -s /etc/nginx/sites-available/sl.conf /etc/nginx/sites-enabled/sl.conf
sudo rm -f /etc/nginx/sites-enabled/default
# Put static files in its place and restart the server
echo "STEP 5/9: collecting static files for Shopping List"
#python3 $SHOPPINGLIST_DIR/shoppinglist/manage.py makemigrations
#python3 $SHOPPINGLIST_DIR/shoppinglist/manage.py migrate
python3 $SHOPPINGLIST_DIR/shoppinglist/manage.py collectstatic --noinput
#echo "STEP 4.1/10: Installing postgreSQL "
#sudo apt-get install libpq-dev
#sudo apt install postgresql postgresql-contrib
#echo "STEP 4.2/10: Creating user for the database"
#sudo -u postgres createuser $USER -s
#echo "STEP 4.3/10: Creating database: sl_db"
#sudo -u postgres createdb sl_db
##echo "STEP 4.4/10: Linking database to user"
#psql -d sl_db
echo "STEP 4.4/10: Migrating to database"
python3 $SHOPPINGLIST_DIR/shoppinglist/manage.py makemigrations
python3 $SHOPPINGLIST_DIR/shoppinglist/manage.py migrate
echo "STEP 6/9: restarting Nginx server"
sudo /etc/init.d/nginx restart
# Conf for emperor mode
echo "STEP 7/9: setting up emperor mode"
sed -i "s/DIRECTORY/$(echo $SHOPPINGLIST_DIR | sed "s/\//\\\\\//g")/g" $SHOPPINGLIST_DIR/deploy_emperor_mode/sl_uwsgi.ini #####!!!
mkdir $SHOPPINGLIST_DIR/sl-env/vassals
sudo ln -s $SHOPPINGLIST_DIR/deploy_emperor_mode/sl_uwsgi.ini $SHOPPINGLIST_DIR/sl-env/vassals
# symbolic link to create service to launch the sl when the system boots
echo "STEP 8/9: creating Shopping List service"
sed -i "s/DIRECTORY/$(echo $SHOPPINGLIST_DIR | sed "s/\//\\\\\//g")/g" $SHOPPINGLIST_DIR/deploy_emperor_mode/emperor.uwsgi.service #####!!!
sed -i "s/temporarystuff/$(echo $USER)/g" $SHOPPINGLIST_DIR/deploy_emperor_mode/emperor.uwsgi.service ###!!!!
#sudo ln -s $SHOPPINGLIST_DIR/emperor.uwsgi.service /etc/systemd/system
sudo cp $SHOPPINGLIST_DIR/deploy_emperor_mode/emperor.uwsgi.service /etc/systemd/system
# Enable service
echo "STEP 9/9: enable service"
sudo systemctl enable emperor.uwsgi.service
sudo systemctl start emperor.uwsgi.service
echo "Shopping List installed and available at http://$ALLOWED_HOSTS with your browser"
| true
|
91c3cb86784c037c9a03e277b64fbaace7b66795
|
Shell
|
telara/bin
|
/civrun
|
UTF-8
| 205
| 2.859375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
for f in $*; do
$DIR/civcc -o $f.s $f &&
$DIR/civas -o $f.o $f.s
done
$DIR/civvm *.o
ret_val=$?
rm -f *.s *.o
exit $ret_val
| true
|
57074b6cd4a3d5770ce04ae7f317bd0358486b26
|
Shell
|
HumanCompatibleAI/adversarial-policies
|
/experiments/remote_build_and_run.sh
|
UTF-8
| 1,628
| 3.78125
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
. ${DIR}/common.sh
REMOTE_HOST=""
LOCAL_DATA="${DIR}/../data"
REMOTE_WORK_DIR="/scratch/${USER}/aprl"
TB_PORT=6006
EXTRA_ARGS=""
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-c|--cmd)
CMD="$2"
shift
shift
;;
-h|--host)
REMOTE_HOST="$2"
shift
shift
;;
-l|--listen)
TB_PORT="$2"
shift
shift
;;
-n|--name)
NAME="$2"
shift
shift
;;
-o|--output-dir)
LOCAL_DATA="$2"
shift
shift
;;
-w|--work-dir)
REMOTE_WORK_DIR="$2"
shift
shift
;;
*)
EXTRA_ARGS="${EXTRA_ARGS} $1"
shift
;;
esac
done
if [[ ${MUJOCO_KEY} == "" ]]; then
echo "Set MUJOCO_KEY file to a URL with your key"
exit 1
fi
if [[ ${REMOTE_HOST} == "" ]]; then
echo "Missing mandatory argument -h <host>"
exit 1
fi
set -o xtrace # print commands
set -e # exit immediately on any error
echo "Starting experiment"
ssh -t -L ${TB_PORT}:localhost:${TB_PORT} ${REMOTE_HOST} \
"export MUJOCO_KEY='${MUJOCO_KEY}' && \
git clone ${GIT_REPO} ${REMOTE_WORK_DIR}/${NAME} || (cd ${REMOTE_WORK_DIR}/${NAME} && git fetch) && \
${REMOTE_WORK_DIR}/${NAME}/experiments/build_and_run.sh \
--no-copy -w ${REMOTE_WORK_DIR} -n ${NAME} -l ${TB_PORT} -c \"${CMD}\" ${EXTRA_ARGS}"
echo "Experiment completed, copying data"
rsync -rlptv --exclude=sacred ${REMOTE_HOST}:${REMOTE_WORK_DIR}/${NAME}/data/ ${LOCAL_DATA}/
rsync -rlptv ${REMOTE_HOST}:${REMOTE_WORK_DIR}/${NAME}/data/sacred/ ${LOCAL_DATA}/sacred/${REMOTE_HOST}
| true
|
6b15ad06b6de2bdd1c3ceba4ca413c53d30dff1f
|
Shell
|
CeeJayCee/pywws-scripts
|
/pywws-livelog
|
UTF-8
| 1,210
| 3.734375
| 4
|
[] |
no_license
|
#!/bin/sh
### BEGIN INIT INFO
# Provides: pywws-livelog-daemon
# Required-Start: $all
# Required-Stop: $all
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start pywws daemon
### END INIT INFO
PATH=/sbin:/bin:/usr/sbin:/usr/bin
. /lib/lsb/init-functions
DAEMON=/usr/local/bin/pywws-livelog-daemon.py
PIDFILE=/var/run/pywws.pid
DATADIR=/home/pi/weather/data
LOGFILE=$DATADIR/live_logger.log
RUNASUSER=pi
UGID=$(getent passwd $RUNASUSER | cut -f 3,4 -d:) || true
case $1 in
start)
log_daemon_msg "Starting pywws service" "pywws"
if [ -z "$UGID" ]; then
log_failure_msg "user \"$RUNASUSER\" does not exist"
exit 1
fi
$DAEMON -v $DATADIR $LOGFILE start
status=$?
log_end_msg $status
;;
stop)
log_daemon_msg "Stopping pywws service" "pywws"
$DAEMON -v $DATADIR $LOGFILE stop
log_end_msg $?
rm -f $PIDFILE
;;
restart|force-reload)
$DAEMON -v $DATADIR $LOGFILE restart
;;
try-restart)
if $0 status >/dev/null; then
$0 restart
else
exit 0
fi
;;
reload)
exit 3
;;
status)
status_of_proc $DAEMON "pywws service"
;;
*)
echo "Usage: $0 {start|stop|restart|try-restart|force-reload|status}"
exit 2
;;
esac
| true
|
fbd9c489b0cbdc6dc8dc608bd9e9d8a7381eacdf
|
Shell
|
shdowofdeath/core
|
/jenkins/backup/backup.sh
|
UTF-8
| 1,086
| 3.546875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
S3_BUCKET_NAME="$1"
BACKUP_DIR=/opt/backup
echo "Backup to S3 : ${S3_BUCKET_NAME}"
if [ -d "${BACKUP_DIR}" ]; then
# clear directory
rm -rfv ${BACKUP_DIR}/*
else
mkdir -p ${BACKUP_DIR}
fi
# backup cert files
mkdir -p ${BACKUP_DIR}/certs
cp -R /opt/certs/* ${BACKUP_DIR}/certs
# backup redis data
mkdir -p ${BACKUP_DIR}/redis
redis-cli save
cp /hab/svc/redis/data/dump.rdb ${BACKUP_DIR}/redis/
# backup ssh public key
mkdir -p ${BACKUP_DIR}/ssh
cp /var/lib/jenkins/.ssh/id_rsa.pub ${BACKUP_DIR}/ssh/
# backup docker certs
mkdir -p ${BACKUP_DIR}/docker_ssl
cp /opt/docker_ssl/server.crt ${BACKUP_DIR}/docker_ssl/core.madcore.crt
# backup kubernetes certs
mkdir -p ${BACKUP_DIR}/kubernetes
cp /opt/kubernetes/ssl/ca.pem ${BACKUP_DIR}/kubernetes/
cp /opt/kubernetes/ssl/ca-key.pem ${BACKUP_DIR}/kubernetes/
bucket_region=$(aws s3api get-bucket-location --bucket ${S3_BUCKET_NAME} | jq .[] | sed "s^\"^^g")
if [ $bucket_region = "null" ]; then
bucket_region="us-east-1"
fi
aws s3 sync ${BACKUP_DIR} s3://${S3_BUCKET_NAME}/backup --region $bucket_region
| true
|
052fb9d35076c5eb3afa7d3de898d343b882a487
|
Shell
|
BlueRainSoftware/id4i-cli
|
/test/tests/basic.bats
|
UTF-8
| 520
| 2.71875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bats
@test "Basic - Help is available" {
./id4i help | grep 'ID4i API commandline application.'
./id4i help | grep 'id4i \[command\]'
./id4i help | grep -- '--apikey string ID4i API key to use'
./id4i help | grep -- '--organization string ID4i organization namespace to work in'
}
@test "Basic - Default config file is used" {
./id4i info --show-config 2>&1 | grep ${PWD}/.id4i.properties
source .preflightData
./id4i info --show-config 2>&1 | grep ${APIKEY_ID}
}
| true
|
331ff1e8b8bf88fd97d5ad9e6ed4ba648f0f310b
|
Shell
|
vlamitin/totally-automatic
|
/scripts/get_phrazes_from_selected.sh
|
UTF-8
| 295
| 2.84375
| 3
|
[] |
no_license
|
#!/bin/sh
# gives list of phrazed that contains selected text with use of fraze it key feched from secrets-provider
DIR="$(cd "$( dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd )";\
curl -s "localhost:8000/fraze_it_api_key" | xargs -I {} "$DIR/_get_phrazes_from_text.sh" "$(xsel -o)" "{}"
| true
|
d57b3c652a5321c880edf78ec34da2b1600469e1
|
Shell
|
rishijatia/myMovie-cs410
|
/contrib/nlp-app
|
UTF-8
| 1,142
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash
#
# nlp-app Startup script for the nlp-app demo
#
# chkconfig: 345 80 20
#
# description: nlp-app is for running the MeTA NLP demo
# processname: nlp-app
#
### BEGIN INIT INFO
# Provides: nlp-app
# Required-Start: $local_fs $remote_fs $network
# Required-Stop: $local_fs $remote_fs $network
# Should-Start: $network
# Should-Stop: $network
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: start and stop nlp-app
### END INIT INFO
NAME=nlp-app
USER=massung1
APP_SCRIPT=/home/$USER/metapy-demos/contrib/nlp_app.sh
lockfile="/var/lock/subsys/nlp-app"
super() {
su - $USER -c "$*"
}
start() {
echo "Starting $NAME"
super $APP_SCRIPT start
retval=$?
[ $retval -eq 0 ] && touch $lockfile
}
stop() {
echo "Stopping $NAME"
super $APP_SCRIPT stop
rm -f $lockfile
}
restart() {
echo "Restarting $NAME"
super $APP_SCRIPT restart
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
restart
;;
*)
echo "Usage: {start|stop|restart}"
exit 1
;;
esac
exit $RETVAL
| true
|
bb6c1a0fb17720cc539494506e4fbfd404539b0f
|
Shell
|
bartlettpsj/DwtAngJsWebpack
|
/dev.sh
|
UTF-8
| 508
| 3.015625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
DEV_SERVER_PROTOCOL="${DEV_SERVER_PROTOCOL:-http}"
DEV_SERVER_HOST="${DEV_SERVER_HOST:-0.0.0.0}"
DEV_SERVER_PORT="5000"
# If the dev server port is set, then add a port argument
# to the webpack-dev-server init command
if [ -z $DEV_SERVER_PORT ]; then
DEV_PORT_ARG=""
else
DEV_PORT_ARG="--port $DEV_SERVER_PORT"
fi
webpack-dev-server \
--config webpack.development.js \
--host "$DEV_SERVER_HOST" \
$DEV_PORT_ARG \
--history-api-fallback \
--hot \
--inline \
--progress
| true
|
490b9358cf121238bd679d905a9494d7df47bccf
|
Shell
|
elements72/so20-21
|
/c-simpleExercise/test.sh
|
UTF-8
| 82
| 2.6875
| 3
|
[] |
no_license
|
for f in *.c;
do
echo "Processing $f file..";
gcc $f -o ${f%.c}.o
done
| true
|
c99f94b463838e8e7424aba48025416fe14e99d3
|
Shell
|
Floobits/colabalancer
|
/setup/runit.sh
|
UTF-8
| 541
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/bash
cp -r runit /etc/sv/colabalancer
if [ ! -e /etc/service/colabalancer ]
then
echo "/etc/service/colabalancer doesn't exist. Creating it..."
ln -s /etc/sv/colabalancer /etc/service/
fi
if [ ! -e /service ]
then
echo "/service doesn't exist. Creating it..."
ln -s /etc/service /service
fi
if [ ! -e /etc/service/colabalancer/log/main ]
then
echo "/etc/service/colabalancer/log/main doesn't exist. Creating it..."
mkdir /etc/sv/colabalancer/log/main
chown nobody:root /etc/sv/colabalancer/log/main
fi
| true
|
e7b477aa8a80ed94b56049f2ce9dceda4675dcae
|
Shell
|
PhillipSz/my-dotfiles
|
/bin/git/git-update-commit-message
|
UTF-8
| 188
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/bash
hash=$1
if [ -z "$hash" ]; then
hash=`git rev-parse HEAD`
fi
git checkout $hash
git commit --amend
git checkout -b "tmp"
git checkout master
git merge tmp
git branch -d tmp
| true
|
58a8bac29686ffc9d119047da78b054a268ef82e
|
Shell
|
jiazemin/clicklog
|
/azkaban/job/hivesql.sh
|
UTF-8
| 1,326
| 2.953125
| 3
|
[] |
no_license
|
#!/bin/bash
#set java env
export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_191.jdk/Contents/Home
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
#set hadoop env
export HADOOP_HOME=/Users/xuyongcai/hadoop/hadoop-2.9.0
export PATH=${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:$PATH
export HIVE_HOME=/Users/xuyongcai/hadoop/apache-hive-2.3.4-bin
export PATH=${HIVE_HOME}/bin:$PATH
#linux上操作date格式
#day_01=`date -d'-1 day' +%Y-%m-%d`
#syear=`date --date=$day_01 +%Y`
#smonth=`date --date=$day_01 +%m`
#sday=`date --date=$day_01 +%d`
#mac上操作date格式
day_01=`date -v-1d +%Y-%m-%d`
syear=`date -j -f %Y-%m-%d $day_01 +%Y`
smonth=`date -j -f %Y-%m-%d $day_01 +%m`
sday=`date -j -f %Y-%m-%d $day_01 +%d`
clean_dir=/cleaup/clickLog/$syear/$smonth/$sday
HQL_create_table="create table if not exists clicklog.accesslog(ip string,day string,url string,upflow string) row format delimited fields terminated by ','"
HQL_origin="load data inpath '$clean_dir' into table clicklog.accesslog"
#HQL_origin="create external table clicklog.accesslog(ip string,day string,url string,upflow string) row format delimited fields terminated by ',' location '$clean_dir'"
#echo $HQL_origin
hive -e "$HQL_create_table"
hive -e "$HQL_origin"
| true
|
30a026b790cb58513582c3e3b2492e0fe1762cfa
|
Shell
|
dajuly20/ControlPi
|
/h/updateCmake.sh
|
UTF-8
| 2,053
| 3.84375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# TO Upgrade cmake to 3.13.4
which cmake
INSTALLED_VERS=`cmake --version | head -n 1`
# Download version
CMAKE_VERS="3.13.4"
CMAKE_VERS_S="cmake version 3.13.4"
extract () {
if [ -f $1 ] ; then
case $1 in
*.tar.bz2) tar xjf $1 ;;
*.tar.gz) tar xzf $1 ;;
*.bz2) bunzip2 $1 ;;
*.rar) rar x $1 ;;
*.gz) gunzip $1 ;;
*.tar) tar xf $1 ;;
*.tbz2) tar xjf $1 ;;
*.tgz) tar xzf $1 ;;
*.zip) unzip $1 ;;
*.Z) uncompress $1 ;;
*.7z) 7z x $1 ;;
*) echo "'$1' cannot be extracted via extract()" ;;
esac
else
echo "'$1' is not a valid file"
fi
}
function ask_yes_or_no() {
read -p "$1 ([y]es or [N]o): "
case $(echo $REPLY | tr '[A-Z]' '[a-z]') in
y|yes) echo "yes" ;;
*) echo "no" ;;
esac
}
# echo -n "Password for $USER:"
# read -s PASS
echo
if [ "$INSTALLED_VERS" != "$CMAKE_VERS_S" ]
then
if [[ "yes" == $(ask_yes_or_no "Uninstall <$INSTALLED_VERS> and replace by <$CMAKE_VERS>?") ]]
then
sudo apt-get remove cmake
sudo apt-get autoremove
fi
else
echo "The correct version $INSTALLED_VERS is installed already!"
exit 1
fi
if [[ "yes" == $(ask_yes_or_no "Download version $CMAKE_VERS nach home?") ]]
then
cd ~
wget "https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERS/cmake-$CMAKE_VERS.tar.gz"
if [[ "yes" == $(ask_yes_or_no "Dauer ca. 20 Min auf Pi \nDownload erfolgreich. Entpacken, bauen und Installieren?") ]]
then
extract cmake-$CMAKE_VERS.tar.gz
cd cmake-$CMAKE_VERS
./bootstrap
make -j 6
sudo make install
fi
fi
if [[ "yes" == $(ask_yes_or_no "Remove installation files?") ]]
then
/bin/rm -rf ~/cmake-$CMAKE_VERS.tar.gz
/bin/rm -rf ~/cmake-$CMAKE_VERS/
fi
| true
|
c7c612fc9cbe807219b1e0b2e3f0ceeb1f5a211a
|
Shell
|
miguelvelazco/coffee-saver
|
/coffeesaver-nbmlook.sh
|
UTF-8
| 422
| 3.03125
| 3
|
[] |
no_license
|
echo -e "\033[0m\n*GODZILLA*\n\033[0;32mGGggGGggGG\nooOOooOOoo\nDDddDDddDD\nzzZZzzZZzz\nIIiiIIiiII\nllLLllLLll\nLLllLLllLL\naaAAaaAAaa\033[0m\n*GODZILLA*"
echo -e "creating scans folder"
mkdir -p scans/
nmap -sP $1 -oG $2.txt
cat $2.txt | grep "Up" | cut -d " " -f2 > livehost.txt
for i in $(cat livehost.txt);
do
echo -e "running nbmlookup on $i"
nbml="${i}-nbmlookup"
nmblookup -A $i | tee scans/$nbml.txt
done
| true
|
25d18ab1a07ed62d5982efbb5b871524252d2c5b
|
Shell
|
codydiehl/update-script
|
/scratch.sh
|
UTF-8
| 258
| 2.859375
| 3
|
[] |
no_license
|
read -p "Question to ask the user? (y/n)" -n 1 -r
if [[ $REPLY =~ ^[Yy]$ ]]; then
elif [[ ! $REPLY =~ ^[Yy]$ ]]; then
[[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1
# handle exits from shell or function but dont exit interactive shell
fi
| true
|
8022ec4e7e0f45ea3b20d1b473a3dd251ab0768c
|
Shell
|
msys2/MINGW-packages
|
/mingw-w64-gsm/PKGBUILD
|
UTF-8
| 1,920
| 2.734375
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
# Maintainer: Alexey Pavlov <alexpux@gmail.com>
_realname=gsm
pkgbase=mingw-w64-${_realname}
pkgname="${MINGW_PACKAGE_PREFIX}-${_realname}"
pkgver=1.0.22
pkgrel=1
pkgdesc="Shared libraries for GSM 06.10 lossy speech compression (mingw-w64)"
arch=('any')
mingw_arch=('mingw32' 'mingw64' 'ucrt64' 'clang64' 'clang32' 'clangarm64')
url="http://www.quut.com/gsm/"
license=("custom")
makedepends=("${MINGW_PACKAGE_PREFIX}-cc"
"${MINGW_PACKAGE_PREFIX}-pkgconf")
options=('strip' 'staticlibs')
source=("http://www.quut.com/${_realname}/${_realname}-${pkgver}.tar.gz"
0001-adapt-makefile-to.mingw.patch
0002-adapt-config-h-to.mingw.patch
0003-fix-ln.mingw.patch
0004-use-cc-instead-of-gcc.patch)
sha256sums=('f0072e91f6bb85a878b2f6dbf4a0b7c850c4deb8049d554c65340b3bf69df0ac'
'654eaac22889157982d216776bf9de9b33a24e0bea4f8f56b7659cb80627dff3'
'4baaaf5218f384c7ee204da0617d6f95d3e2fc684faf5a80b892f29930939d07'
'29973fa21c19f68aa4fecc9cb9d622f5d459eb193907c434eed1e05caa4c2321'
'07a85325c41e9e4e83f1730ccf1d5c1a0a7b4effcda913a0619d2f984aaa8eab')
prepare() {
cd "${srcdir}/${_realname}-${pkgver%.*}-pl${pkgver##*.}"
patch -p1 -i ${srcdir}/0001-adapt-makefile-to.mingw.patch
patch -p1 -i ${srcdir}/0002-adapt-config-h-to.mingw.patch
patch -p1 -i ${srcdir}/0003-fix-ln.mingw.patch
patch -p1 -i ${srcdir}/0004-use-cc-instead-of-gcc.patch
}
build() {
cp -r ${srcdir}/${_realname}-${pkgver%.*}-pl${pkgver##*.} ${srcdir}/build-${MINGW_CHOST}
cd "${srcdir}/build-${MINGW_CHOST}"
make
}
package() {
cd "${srcdir}/build-${MINGW_CHOST}"
# Prepare directories
install -m755 -d "${pkgdir}${MINGW_PREFIX}"/{bin,lib,include,share/{licenses/${_realname},man/man{1,3}}}
make -j1 INSTALL_ROOT="${pkgdir}${MINGW_PREFIX}" install
# Install license
install -m644 COPYRIGHT ${pkgdir}${MINGW_PREFIX}/share/licenses/${_realname}/license.txt
}
| true
|
d1e63d9c13bade1a2eae4f67c83a850fd1d94a8c
|
Shell
|
votdev/ceph-dev-docker
|
/shared/bin/create-dashboard-rgw-user.sh
|
UTF-8
| 936
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
: ${CEPH_DEV_DOCKER_CONFIG_DIR:="$HOME/.ceph-dev-docker"}
mkdir -p $CEPH_DEV_DOCKER_CONFIG_DIR
#--------------
# Configure RGW
#--------------
cd /ceph/build
./bin/radosgw-admin user create --uid=dev --display-name=Developer --system
./bin/ceph dashboard set-rgw-api-user-id dev || true
RGW_ACCESS_KEY="${CEPH_DEV_DOCKER_CONFIG_DIR}/rgw_access_key"
RGW_SECRET_KEY="${CEPH_DEV_DOCKER_CONFIG_DIR}/rgw_secret_key"
./bin/radosgw-admin user info --uid=dev | jq -jr ".keys[0].access_key" > $RGW_ACCESS_KEY
./bin/radosgw-admin user info --uid=dev | jq -jr ".keys[0].secret_key" > $RGW_SECRET_KEY
chmod 600 $RGW_ACCESS_KEY
chmod 600 $RGW_SECRET_KEY
./bin/ceph dashboard set-rgw-api-access-key -i $RGW_ACCESS_KEY || ./bin/ceph dashboard set-rgw-api-access-key "$(cat $RGW_ACCESS_KEY)"
./bin/ceph dashboard set-rgw-api-secret-key -i $RGW_SECRET_KEY || ./bin/ceph dashboard set-rgw-api-secret-key "$(cat $RGW_SECRET_KEY)"
| true
|
dddb9e8652b50f31d9980afb3bafe40d47f1072d
|
Shell
|
sternenseemann/dotfiles-1
|
/.offlineimap/run_offlineimap.sh
|
UTF-8
| 2,291
| 3.921875
| 4
|
[] |
no_license
|
#!/bin/bash
read -d '' USAGE << EOF
USAGE:
-b#: Run a backup with Back in Time (backup job number #, should ideally
include the maildir ~/.offlineimap.
-c : String with names of accounts which mails should be counted.
If omitted all accounts are.
-q : Run a quick sync.
Requires the MAIL environment variable to be set.
EOF
ACCOUNTS="."
QUICK=no
BACKUP=no
BACKUP_NR=
ERROR=0
while getopts ":b:c:q" opt; do
case $opt in
b)
BACKUP=yes
BACKUP_NR=$OPTARG;;
c)
if [ -n "$OPTARG" ]; then
ACCOUNTS="$OPTARG"
fi;;
q)
QUICK=yes;;
\?)
echo "$USAGE"
exit 1;;
esac
done
shift $(($OPTIND - 1))
if [ ! -z "$@" ]; then
echo "$USAGE"
exit 1
fi
echo "Starting imap sync."
date
# Check for gnome keyring env, if not try to get it from ~/.Xdbus
if [ -z "$DBUS_SESSION_BUS_ADDRESS" ]; then
source ~/.Xdbus
fi
# OFFLINEIMAP
read -r pid < ~/.offlineimap/pid
# Kill old session, if still running
if ps $pid &>/dev/null; then
echo "ERROR: offlineimap ($pid): another instance running." >&2
kill -9 $pid
fi
# Quick sync?
if [ "$QUICK" == "yes" ]
then
echo "Doing a quick sync."
offlineimap -o -u quiet -q || ERROR=1
else
echo "Doing a full sync."
offlineimap -o -u quiet || ERROR=1
fi
# -------------
# COUNT MESSAGES
for ACCOUNT in $(echo $ACCOUNTS)
do
ACCOUNT=$MAIL"/"$ACCOUNT
ACC_LIST=$ACC_LIST" "$ACCOUNT
done
echo "Searching for new and unread mails in $ACC_LIST"
# Check, how many new messages there are.
NEW=$(find $ACC_LIST -type f -wholename '*/new/*' | wc -l)
# Check, how many unread messages there are.
UNREAD=$(find $ACC_LIST -type f -regex '.*/cur/.*2,[^S]*$' | wc -l)
if [[ "$NEW" -eq "0" && "$UNREAD" -eq "0" ]]; then
echo "No new mail."
rm ~/.offlineimap/.new_mail &> /dev/null
else
if [[ $ERROR == 0 ]]; then
echo "$NEW new and $UNREAD unread mails."
echo $NEW > ~/.offlineimap/.new_mail
echo $UNREAD >> ~/.offlineimap/.new_mail
else
echo "ERROR!"
echo -e "ERROR\nERROR" > ~/.offlineimap/.new_mail
fi
fi
# -------------
# MAKE BACKUP
if [ "$BACKUP" == "yes" ]; then
echo "Doing a backup, backup profile #$BACKUP_NR."
nice -n 19 ionice -c2 -n7 /usr/bin/backintime --profile-id $BACKUP_NR --backup-job >/dev/null
fi
# -------------
echo
| true
|
510621c75e6d2887271c14ce434aae0f211e6610
|
Shell
|
mvelusce/peon
|
/build.sh
|
UTF-8
| 533
| 2.703125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
cd cmd/peon
cp main.go main.go.bak
envsubst < main.go > main_with_version.go
mv main_with_version.go main.go
echo "Building for Linux..."
env GOOS=linux GOARCH=386 go build -i -v -o ../../bin/peon-linux
echo "Building for OSX..."
env GOOS=darwin GOARCH=amd64 go build -i -v -o ../../bin/peon-osx
echo "Building for Windows..."
env GOOS=windows GOARCH=386 go build -i -v -o ../../bin/peon-windows
mv main.go.bak main.go
echo "Zipping artifacts..."
cd ../../bin
zip peon.zip peon-linux peon-osx peon-windows
| true
|
70c88f913f5edfd772b4dec496964fe52d7485e9
|
Shell
|
KMI-KPZ/Albatross
|
/services/eurostat/parser/Main.sh
|
UTF-8
| 1,912
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
startTime=$(date)
#directory path where all the zip files are stored
FILES=../../../data/sandbox/eurostat/original-data/
#directory path where the uncompressed file should be stored
unCompressPath=../../../data/sandbox/eurostat/raw-data/
#directory path where the tsv file should be stored
tsvPath=../../../data/sandbox/eurostat/tsv/
#directory path where dsd (RDFs) will be stored
dsdPath=../../../data/sandbox/eurostat/dsd/
#directory path where sdmx (RDFs) will be stored
dataPath=../../../data/sandbox/eurostat/data/
#directory path where log file will be
logPath=../../../data/sandbox/eurostat/logs/
#directory path where sdmx-code file is located.
sdmxFile=../../../data/sdmx-code.ttl
ext=".tsv.gz"
### Deleting files from directories if exists
echo "deleting files from $unCompressPath ..."
for f in $unCompressPath*
do
/bin/rm $f
done
echo "deleting files from $dsdPath ..."
for f in $dsdPath*
do
/bin/rm $f
done
echo "deleting files from $dataPath ..."
for f in $dataPath*
do
/bin/rm $f
done
echo "deleting files from $logPath ..."
for f in $logPath*
do
/bin/rm $f
done
### RDFication code starts from here
i=1
for f in $FILES*
do
echo "UnCompressing file#$i ... filename is $f"
sh UnCompressFile.sh -i $f -o $unCompressPath
i=`expr $i + 1`
done
i=1
for f in $unCompressPath*
do
echo "Processing file#$i ... filename is $f"
echo $f | grep -qE ".dsd.xml"
if [ $? -eq 0 ]
then
sh DSDParser.sh -o $dsdPath -i $f -f turtle -a $sdmxFile
else
echo $f | grep -qE ".sdmx.xml"
if [ $? -eq 0 ]
then
filename=${f##*/}
fname=`echo $filename | awk '{ print substr($filename,0,length($filename)-8)}'`
sh SDMXParser.sh -f $fname -o $dataPath -i $f -l $logPath -t $tsvPath$fname$ext
i=`expr $i + 1`
fi
fi
done
echo "Shell script started the job at $startTime"
echo "Shell script finished the job at $(date)"
| true
|
5c9c373a2ba73e04a9352b0834e96545677eec97
|
Shell
|
mikeg64/alces_flight_customisation
|
/customizer/default/configure.d/run_me.sh
|
UTF-8
| 889
| 3.8125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
echo "configure.d script"
number_of_users=35
password_file=/home/walkingrandomly/users.txt
#Install mkpasswd
sudo yum install expect -y
if [ $2 = 'master' ]; then
echo "Running on master node"
#Master node
touch $password_file
for i in `seq 1 $number_of_users`;
do
username=training_user$i
sudo useradd -m -d /home/$username $username
userpassword=`mkpasswd`
echo $username:$userpassword | sudo chpasswd
echo "UserID:" $username "has been created with the following password " $userpassword >> $password_file
done
#allow password authentication to the master node
sudo sed -i 's/PasswordAuthentication no/PasswordAuthentication yes/' /etc/ssh/sshd_config
sudo service sshd restart
else
#Compute node
echo "running on compute node"
for i in `seq 1 $number_of_users`;
do
username=training_user$i
sudo useradd -m -d /home/$username $username
done
fi
| true
|
701eff83ca16810b2763aa4ce9d7cccfec974fce
|
Shell
|
sarvex/Impala
|
/thirdparty/openldap-2.4.25/tests/scripts/test000-rootdse
|
UTF-8
| 2,041
| 3.078125
| 3
|
[
"OLDAP-2.8",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-4.3RENO",
"Apache-2.0"
] |
permissive
|
#! /bin/sh
# $OpenLDAP: pkg/ldap/tests/scripts/test000-rootdse,v 1.29.2.6 2011/01/04 23:51:04 kurt Exp $
## This work is part of OpenLDAP Software <http://www.openldap.org/>.
##
## Copyright 1998-2011 The OpenLDAP Foundation.
## All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted only as authorized by the OpenLDAP
## Public License.
##
## A copy of this license is available in the file LICENSE in the
## top-level directory of the distribution or, alternatively, at
## <http://www.OpenLDAP.org/license.html>.
echo "running defines.sh"
. $SRCDIR/scripts/defines.sh
mkdir -p $TESTDIR $DBDIR1
echo "Starting slapd on TCP/IP port $PORT1..."
. $CONFFILTER $BACKEND $MONITORDB < $SCHEMACONF > $CONF1
$SLAPD -f $CONF1 -h $URI1 -d $LVL $TIMING > $LOG1 2>&1 &
PID=$!
if test $WAIT != 0 ; then
echo PID $PID
read foo
fi
KILLPIDS="$PID"
sleep 1
echo "Using ldapsearch to retrieve the root DSE..."
for i in 0 1 2 3 4 5; do
$LDAPSEARCH -b "" -s base -h $LOCALHOST -p $PORT1 \
'@extensibleObject' > $SEARCHOUT 2>&1
RC=$?
if test $RC = 0 ; then
break
fi
echo "Waiting 5 seconds for slapd to start..."
sleep 5
done
if test $RC = 0 ; then
echo "Using ldapsearch to retrieve the cn=Subschema..."
$LDAPSEARCH -b "cn=Subschema" -s base -h $LOCALHOST -p $PORT1 \
'(&(objectClasses=top)(objectClasses=2.5.6.0))' cn objectClass \
>> $SEARCHOUT 2>&1
RC=$?
fi
count=2
if test $RC = 0 ; then
case $MONITORDB in yes | mod)
count=3
echo "Using ldapsearch to retrieve the cn=Monitor..."
$LDAPSEARCH -b "cn=Monitor" -s base -h $LOCALHOST -p $PORT1 \
'@monitor' >> $SEARCHOUT 2>&1
RC=$?
;;
esac
fi
test $KILLSERVERS != no && kill -HUP $KILLPIDS
cat $SEARCHOUT
if test $RC != 0 ; then
echo ">>>>> Test failed"
else
RC=`grep '^dn:' $SEARCHOUT | wc -l`
if test $RC != $count ; then
echo ">>>>> Test failed: expected $count entries, got" $RC
RC=1
else
echo ">>>>> Test succeeded"
RC=0
fi
fi
test $KILLSERVERS != no && wait
exit $RC
| true
|
cde8bafa04bab42e39e22d9a0b7b31c9a6362642
|
Shell
|
simhaonline/WeeNAS
|
/etc/monit.d/failed_su.sh
|
UTF-8
| 621
| 3.890625
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
# smbconf.sh - check smb.conf using testparms and report status.
# A single command-line parameter may be passed as the path to smb.conf.
# The return code is 1 when errors occur and 0 when no errors found.
#
# If "Loaded services file OK." is the second line of testparm's stderr, then
# everything is ok. Otherwise, it will be preceded by error messages, pushing
# it down to a lower line. This indicates a problem with smb.conf.
#
STATUS="ok"
FAILED_SU=$(egrep -c 'su\[[0-9]+\]: BAD SU' /var/log/messages)
if [ $FAILED_SU -gt 0 ]; then STATUS="Failed attempts: $FAILED_SU"; fi
echo $STATUS
exit $FAILED_SU
| true
|
6c3d0fdb31d4cc7f643ab3001c0937b9595b3731
|
Shell
|
tamtd4/test
|
/check_files.sh
|
UTF-8
| 664
| 3.625
| 4
|
[] |
no_license
|
#! /bin/bash
BACKUP_PATH=/home/gitlab-runner/test
REMOTE_PATH=/home/gitlab-runner
USER=gitlab-runner
HOST=172.20.23.161
cd $BACKUP_PATH
declare -a dirs
i=1
for d in */
do
dirs[i++]="${d%/}"
done
echo "There are ${#dirs[@]} dirs in the current path"
for((i=1;i<=${#dirs[@]};i++))
do
echo $REMOTE_PATH/${dirs[i]}
ssh $HOST "test -e $REMOTE_PATH/${dirs[i]}.tar.gz"
if [ $? -eq 0 ]; then
echo "File ${dirs[i]}.tar.gz exists"
else
echo "File ${dirs[i]}.tar.gz does not exist"
tar zcvf - ${dirs[i]} | ssh $USER@$HOST "cat > $REMOTE_PATH/${dirs[i]}.tar.gz"
fi
echo $i "${dirs[i]}"
# echo "${dirs[i]}"
done
| true
|
55e2f3be96eceb9f42f541373e2503c39a3f3888
|
Shell
|
joachimaae/Docker-Postgres-Airflow
|
/init.sh
|
UTF-8
| 303
| 3.09375
| 3
|
[] |
no_license
|
# Folder structure, taken from https://stackoverflow.com/questions/5374382/bash-script-that-creates-a-directory-structure
sed '/^$/d;s/ /\//g' environment/struct.txt | xargs mkdir -p
cp ./environment/examples/example-* ./environment/
cd environment
for i in "example-"*;do mv "$i" "${i#example-}";done
| true
|
1d2757b0ee7126669bb7c409a2d02b04db6b9cf6
|
Shell
|
00mjk/Hacker
|
/hack/main.sh
|
UTF-8
| 1,309
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
verde='\e[1;32m'
cyan='\e[1;36m'
rojo='\e[1;31m'
azul='\e[1;34m'
blanco='\e[1;35m'
amarillo='\e[1;33m'
click='\e[5m'
fn='\e[25m'
fnc='\e[0;0m'
mor='\e[95m'
negrita="\e[1m"
echo
echo
echo -e "$mor +======================================+ "
echo -e " | by:anonimo\Hacker-pc | "
echo -e " +======================================+ "
sleep 0.5s
echo -e " |--------------------------------------| "
echo -e " | Facebook:greivin.mayorga.5 | "
echo -e " |--------------------------------------| "
sleep 1s
echo -e " +======================================+ "
echo -e " | Youtube By informatica y mas | "
echo " +======================================+ "
sleep 1.50s
echo
echo
echo -e "$mor [01]$verde Hacktoll
$mor[02]$verde Hacktool-s
$mor[03]$verde Team-hack
$mor[04]$verde osing-hack
$mor[99]$verde$click salir$fnc"
echo
echo
echo -e "$verde Elige una opcion $fnc"
read option
echo
clear
if [ $option = 1 ]; then
cd hack
bash hack.sh
elif [ $option = 2 ]; then
cd ..
cd HTP
bash hacktoolpc.sh
elif [ $option = 3 ]; then
cd ..
cd HACKTC
bash HTC.sh
elif [ $option = 3 ]; then
cd ..
cd HTP
bash osing.sh
elif [ $option = 4 ]; then
cd ..
cd HTP
bash osing.sh
elif [ $option = 99 ]; then
clear
exit 0
fi
| true
|
2c405f8a8cbe2b8ae7d586f77d79cdeaff3845bf
|
Shell
|
axelinux/docker-proftpd
|
/setup-proftpd.sh
|
UTF-8
| 562
| 2.875
| 3
|
[] |
no_license
|
#!/bin/bash
mkdir -p /var/ftp/$USER_FTP
chown $UID_FTP /var/ftp/$USER_FTP
cd /etc/proftpd
echo $PASS_FTP | ftpasswd --passwd --name=$USER_FTP --uid=$UID_FTP --home=/var/ftp/$USER_FTP --shell=/bin/false --stdin
touch ftpd.group
if [ ! -d /etc/proftpd/sshkey ]
then
mkdir /etc/proftpd/sshkey
ssh-keygen -m PEM -f /etc/proftpd/sshkey/ssh_host_rsa_key -N '' -t rsa -b 2048
ssh-keygen -m PEM -f /etc/proftpd/sshkey/ssh_host_dsa_key -N '' -t dsa -b 1024
ssh-keygen -m PEM -f /etc/proftpd/sshkey/ssh_host_ecdsa_key -N '' -t ecdsa -b 521
fi
proftpd -n
| true
|
9e1f85b44722cf7293d38bac0cdb29eac750ae37
|
Shell
|
apsoto/dotfiles.old
|
/.bash.d/shell.sh
|
UTF-8
| 497
| 3.125
| 3
|
[] |
no_license
|
# check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
# vi mode for editing command lines.
set -o vi
# CDPATH so you can jump around without typing a lot. Uses autocompletion too
export CDPATH=.:~
cdpaths="$([ -d ~/projects ] && find ~/projects -maxdepth 1 -type d | egrep -v '/(\.)|_[a-zA-Z0-9]' | egrep -v '(bin)|(cmd)|(doc)|(lib)|(pkg)|(test)' | xargs -n1 dirname | uniq)"
for i in $cdpaths; do
CDPATH=$CDPATH:$i
done
| true
|
ccd6cace011e2bf6f0cff9a4f23d1e1c1f2aed6d
|
Shell
|
iandmyhand/boilerplates
|
/MacOSX/.zshrc
|
UTF-8
| 1,191
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
# Add below script to .zshrc
...
export ZSH="$HOME/.oh-my-zsh"
...
ZSH_THEME="sssum"
...
export LC_ALL=en_US.UTF-8
export LANG=ko_KR.UTF-8
...
# Uncomment below lines.
if [[ -n $SSH_CONNECTION ]]; then
export EDITOR='vim'
else
export EDITOR='mvim'
fi
...
if [ -z "$SSH_AUTH_SOCK" ] ; then
echo '\nRun ssh-agent and add keys...'
eval "$(ssh-agent -s)"
ssh-add -k ~/.ssh/id_rsa
echo ''
fi
...
alias rm='rm -i'
alias cp='cp -i'
alias mv='mv -i'
alias ll='ls -alvhGF'
alias gffs='git flow feature start'
alias gffp='git flow feature publish'
alias gfff='git flow feature finish'
alias gfrs='git flow release start'
alias gfrp='git flow release publish'
alias gfrf='git flow release finish'
alias gfrff='git checkout develop && git pull && git push && git push --tags && git checkout master && git pull && git push && git checkout develop'
alias gfhs='git flow hotfix start'
alias gfhp='git flow hotfix publish'
alias gfhf='git flow hotfix finish'
coloredoutput() {
tail -f $1 |
sed -e 's/\(\[INFO.*\)/\o033[34m\1\o033[39m/' \
-e 's/\(\[ERROR.*\)/\o033[31m\1\o033[39m/'
}
alias tailf=coloredoutput
figlet Welcome! | lolcat
# https://direnv.net/
eval "$(direnv hook zsh)"
| true
|
5806d3d711bbed2222c368685b1ed8cd2621e545
|
Shell
|
gustavorabello/bash
|
/checkMEM.sh
|
UTF-8
| 573
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
function userof() { ps auxr -A | egrep -i $@ | awk '{print $1}'; }
function pidof() { ps auxr -A | egrep -i $@ | awk '{print $2}'; }
function cpuof() { ps auxr -A | egrep -i $@ | awk '{print $3}'; }
function timeof() { ps auxr -A | egrep -i $@ | awk '{print $10}'; }
function memof() { ps auxr -A | egrep -i $@ | awk '{print $6/1024}'; }
echo \#TIME " " MEMORY[MB] " " CPU[%] " " CPUTIME
while true
do
TIME=`date | awk '{print $4}'`
MEM=`memof $1`
CPU=`cpuof $1`
CPUT=`timeof $1`
printf $TIME%5s$MEM%5s$CPU%5s$CPUT"\n"
sleep 1
done
# $Id: $
| true
|
9b6d64676a82d35dcef9fefd07d8b0658be61587
|
Shell
|
ncoudsi/ft_services
|
/setup.sh
|
UTF-8
| 3,877
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/sh
#Variables for colored output in the terminal.
GREEN="\e[1;32m"
WHITE="\e[0m"
#Add $USER to the docker group (if not existing). Allow you to use docker commands without sudo.
#WARNING : if sudo is used in a docker build command, the image cant be pulled by k8s later on,
#due to rights on the image. It will result with a "Image can't be pulled" error and deployments won't work.
if [ $(grep "docker" /etc/group | grep -c "$USER") -eq 0 ]
then
echo ${GREEN}"==Need to reboot session to apply new user to docker group.=="${WHITE}
echo ${GREEN}"==Need admin password to do so.=="${WHITE}
sudo usermod -aG docker $USER
sudo shutdown -r now
fi
#Install requirements for pure-ftpd.
echo ${GREEN}"\n\t==Installing lftp, need admin password to do so.=="${WHITE}
sudo -S apt install lftp
sudo chmod 777 /etc/lftp.conf
grep "set ssl:verify-certificate no" /etc/lftp.conf
if [ $? -ne 0 ]
then
echo "set ssl:verify-certificate no" >> /etc/lftp.conf
fi
#Creating logs repository if it does not exist.
mkdir -p logs
#Start Minikube (--driver option specifies in which VM we want to start the cluster).
#Condition is here to avoid starting minikube if it is already running.
if [ $(minikube status > logs/minikube_status.log ; grep -c "Running" logs/minikube_status.log) -ne 3 ]
then
echo ${GREEN}"\t==Starting minikube.=="${WHITE}
minikube start --driver=docker
##Install MetalLB (see https://metallb.universe.tf/installation) It wont be deleted afterwards so you can apply it just once here.
kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.9.3/manifests/namespace.yaml
kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.9.3/manifests/metallb.yaml
kubectl create secret generic -n metallb-system memberlist --from-literal=secretkey="$(openssl rand -base64 128)"
kubectl apply -f srcs/metallb/metallb_configmap.yaml
fi
#Clean evrything that remains from previous usages.
echo ${GREEN}"\n\t==Deleting existing K8s cluster.=="${WHITE}
echo ${GREEN}"Services :"${WHITE}
kubectl delete services --all
echo ${GREEN}"Pods :"${WHITE}
kubectl delete pods --all
echo ${GREEN}"Deployments :"${WHITE}
kubectl delete deployments --all
echo ${GREEN}"Persistent Volume Claims :"${WHITE}
kubectl delete pvc --all
#Set the environment variable to use local Docker (allows you to re-use the Docker daemon inside the Minikube instance).
eval $(minikube docker-env)
#Build containers.
docker build ./srcs/nginx -t nginx > logs/nginx_build_logs.log
docker build ./srcs/wordpress -t wordpress > logs/wordpress_build_logs.log
docker build ./srcs/phpmyadmin -t phpmyadmin > logs/phpmyadmin_build_logs.log
docker build ./srcs/mysql -t mysql > logs/mysql_build_logs.log
docker build ./srcs/ftps -t ftps > logs/ftps_build_logs.log
docker build ./srcs/grafana -t grafana > logs/grafana_build_logs.log
docker build ./srcs/influxdb -t influxdb > logs/influxdb_build_logs.log
docker build ./srcs/telegraf -t telegraf > logs/telegraf_build_logs.log
#Create a new cluster.
echo ${GREEN}"\n\t==Creating new K8s cluster.=="${WHITE}
echo ${GREEN}"Nginx :"${WHITE}
kubectl apply -f srcs/nginx/nginx_deployment.yaml
echo ${GREEN}"Wordpress :"${WHITE}
kubectl apply -f srcs/wordpress/wordpress_deployment.yaml
echo ${GREEN}"PhpMyAdmin :"${WHITE}
kubectl apply -f srcs/phpmyadmin/phpmyadmin_deployment.yaml
echo ${GREEN}"MySQL :"${WHITE}
kubectl apply -f srcs/mysql/mysql_deployment.yaml
echo ${GREEN}"FTPS server :"${WHITE}
kubectl apply -f srcs/ftps/ftps_deployment.yaml
echo ${GREEN}"Grafana server :"${WHITE}
kubectl apply -f srcs/grafana/grafana_deployment.yaml
echo ${GREEN}"influxdb server :"${WHITE}
kubectl apply -f srcs/influxdb/influxdb_deployment.yaml
echo ${GREEN}"telegraf server :"${WHITE}
kubectl apply -f srcs/telegraf/telegraf_deployment.yaml
#Start minikube Dashboard.
echo ${GREEN}"\n\t==Starting Dashboard.=="${WHITE}
minikube dashboard
| true
|
8171776215b130b8c31a5b79cefdd5792cf5257a
|
Shell
|
LukieJoe/launch_shell_test
|
/shell.sh
|
UTF-8
| 1,177
| 2.859375
| 3
|
[] |
no_license
|
#! /bin/bash
clear
echo "shell script launched"
#echo "shell script attempt"
#python mc_test.py
#a=45
#echo $a
#pwd
#ls
#read -p "Can you hear me?" ans
#echo $ans
#run python scripts in parallel
#sudo x-terminal-emulator -e python /home/oaktobotics/raspi_robot_test.py &
#sudo x-terminal-emulator -e python /home/oaktobotics/rrb_led.py
#execute catkin_make in catkin_ws
sudo xterm -e "cd /home/oaktobotics/catkin_ws/; source /opt/ros/kinetic/setup.bash; catkin_make"
#buffer between catkin_make and roscore - may need to be adjusted
sleep 1s
#roscore and joy_to_twist launched in parallel - joy_to_twist has 5sec buffer to make sure roscore is running
#-hold keeps the terminal open after the program has executed
#-e tells xterm to execute the following code
sudo xterm -e "cd /home/oaktobotics/catkin_ws/; source /opt/ros/kinetic/setup.bash; roscore" &
sudo xterm -hold -e "cd /home/oaktobotics/catkin_ws/; sleep 5s; source /opt/ros/kinetic/setup.bash; rostopic list" &
sudo xterm -hold -e "cd /home/oaktobotics/catkin_ws/; sleep 5s; source /opt/ros/kinetic/setup.bash; source /home/oaktobotics/catkin_ws/devel/setup.bash; rosrun motor_controller joy_to_twist.py"
| true
|
caf14cbac564921d2d8ffa3be86a481d8e1cd818
|
Shell
|
MathieuTuli/dotfiles
|
/.scripts/calendar_code
|
UTF-8
| 3,108
| 3.125
| 3
|
[] |
no_license
|
#!/bin/zsh
autoload +X _complete
autoload -U compinit && compinit
source /usr/bin/aws_zsh_completer.sh
functions[_original_complete]=$functions[_complete]
_complete () {
unset 'compstate[vared]'
_original_complete "$@"
}
FILTER=""
LINELIMIT="limit: 15"
DETAIL="list"
LONGLIMIT=1000
long_comparator=15
FOCUS=""
i3-msg title_format "Calendar"
DUE=""
HELP=0
while true; do
clear
unbuffer task $FOCUS $DETAIL $FILTER $DUE $LINELIMIT | head -n $LONGLIMIT
task calendar
if [ $HELP -eq 1 ]; then
echo "project:\ndue:\nlimit:\nfocus:\ndetail:"
HELP=0
fi
COMMAND=''
vared COMMAND
if [[ $COMMAND = "q" ]] || [[ $COMMAND = "Q" ]]
then break
else
if [[ $COMMAND == "project:"* ]] then
if [[ $COMMAND == *"all"* ]] then
FILTER=""
else
FILTER=$COMMAND
fi
elif [[ $COMMAND == "help"* ]] then
HELP=1
elif [[ $COMMAND == "due:"* ]] then
if [[ $COMMAND == *"all"* ]] then
DUE=""
else
DUE=$COMMAND
fi
elif [[ $COMMAND == "limit:"* ]] then
LINELIMIT=$COMMAND
CURRENTLIMIT=${COMMAND//limit:/}
if [[ $CURRENTLIMIT == *"all"* ]] then
LINELIMIT=$COMMAND
LONGLIMIT=1000
elif [[ $CURRENTLIMIT == *"fit"* ]] then
LINELIMIT="limit:15"
LONGLIMIT=19
else
if [ "$CURRENTLIMIT" -le 15 ]
then
LONGLIMIT=18
else
LONGLIMIT=1000
fi
fi
elif [[ $COMMAND == "focus:"* ]] then
if [[ $COMMAND == *"all"* ]] then
FOCUS=""
else
FOCUS=${COMMAND//focus:/}
fi
elif [[ $COMMAND == "detail:"* ]] then
if [[ $COMMAND == *"long"* ]] then
DETAIL="long"
if [ "$LIMIT" -le 15 ]
then
LONGLIMIT=18
else
LONGLIMIT=1000
fi
else
DETAIL="list"
LONGLIMIT=1000
fi
else
eval $COMMAND
fi
fi
done
| true
|
3293779203eb9980afa29c28a94521de3480cad8
|
Shell
|
heojoon/hjoon-springboot2-webservice
|
/bootLauncher.sh
|
UTF-8
| 2,592
| 3.890625
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Spring Boot embedd WAS launcher v1.210501
#
USER="wasuser"
# Java environments
export JAVA_HOME="/app/jdk-11"
export JAVA="/app/jdk-11/bin/java"
# Process name
PROC_NAME="demo"
# Jar File Path
SVCPATH="/svcroot/runtime/webapps/springboot/"
JAR_FILE="demo-0.0.1-SNAPSHOT.jar"
# Loggin path
LOG_PATH="/app/was/springboot/log"
STDOUT_FILE="${LOG_PATH}/stdout.log"
PID_PATH="/app/was/springboot/log"
PROC_PID_FILE="${PID_PATH}/${PROC_NAME}.pid"
# Java option
JVM_OPTION="-Djava.security.egd=file:///dev/urandom"
# deploy envrionment
PROFILE="dev"
userchk()
{
if [ $(id -un) != ${USER} ];then
echo "Please run ${wasuser}"
exit 0
fi
}
get_status()
{
ps ux | grep ${JAR_FILE} | grep -v grep | awk '{print $2}'
}
status()
{
local PID=$(get_status)
if [ -n "${PID}" ]; then
echo 0
else
echo 1
fi
}
start()
{
if [ $(status) -eq 0 ]; then
echo "${PROC_NAME} is already running"
exit 0
else
nohup ${JAVA} -jar ${JVM_OPTION} ${SVCPATH}${JAR_FILE} >> ${STDOUT_FILE} 2>&1 &
if [ $(status) -eq 1 ];then
echo "${PROC_NAME} is start ... [Failed]"
exit 1
else
echo "${PROC_NAME} is start ... [OK]"
local PID=$(get_status)
echo ${PID} > ${PROC_PID_FILE}
fi
fi
}
stop()
{
# verify pid
if [ ! -e ${PROC_PID_FILE} ];then
PID=$(get_status)
else
PID=$(cat "${PROC_PID_FILE}")
fi
# If no have pid file and no have running process then PID set zero manual
[ Z"${PID}" == Z ] && PID=0
if [ "${PID}" -lt 3 ]; then
echo "${PROC_NAME} was not running."
else
kill ${PID}
rm -f ${PROC_PID_FILE}
if [ $(status) -eq 0 ];then
echo "${PROC_NAME} is shutdown ... [OK]"
else
echo "${PROC_NAME} is shutdown ... [Failed]"
fi
fi
}
case "$1" in
start)
userchk
start
sleep 1
;;
stop)
userchk
stop
sleep 1
;;
restart)
userchk
stop
sleep 2
start
;;
status)
if [ $(status) -eq 0 ]; then
echo "${PROC_NAME} is running"
else
echo "${PROC_NAME} is stopped"
fi
;;
*)
echo "Useage : $0 {start | stop | restart | status}"
;;
esac
| true
|
cdca7cf1d60844d80771860e81c4a485b440a937
|
Shell
|
davetron5000/dotfiles
|
/.git_hooks/post-commit
|
UTF-8
| 325
| 3.1875
| 3
|
[] |
no_license
|
#!/bin/sh
#
# An example hook script that is called after a successful
# commit is made.
#
# To enable this hook, rename this file to "post-commit".
REMOTE=`git config --get post-commit.remote`
if [ -z $REMOTE ] ; then
echo "You must set post-commit.remote in your git config"
exit
fi
git push --force --all $REMOTE
| true
|
db6072db02f04a4dd0be3bd046fe00377e8c32dc
|
Shell
|
galudino/cmake-project-template-c
|
/makebuilds
|
UTF-8
| 877
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/zsh
##
## Uses CMake to create different build versions
## of sources specified in the current directory's CMakeLists.txt
## file.
##
echo ""
## Remove any previous build folders
rm -rf ./build/make/Debug/* || true
rm -rf ./build/make/Release/* || true
rm -rf ./build/make/RelWithDebInfo/* || true
rm -rf ./build/make/MinSizeRel/* || true || true
cmake -S ./ -B ./build/make/Debug -DCMAKE_BUILD_TYPE=Debug -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++
echo ""
cmake -S ./ -B ./build/make/Release -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++
echo ""
cmake -S ./ -B ./build/make/RelWithDebInfo -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++
echo ""
cmake -S ./ -B ./build/make/MinSizeRel -DCMAKE_BUILD_TYPE=MinSizeRel -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++
echo ""
| true
|
a503a76c8bcd7a4518da3d2f98e6184f49bb9aa5
|
Shell
|
clamaa/shell-coding
|
/chapter1/terminal.sh
|
UTF-8
| 270
| 2.859375
| 3
|
[] |
no_license
|
#!/bin/bash
echo get terminal cols
tput cols
echo get terminal lines
tput lines
echo get longname
tput longname
#this is let the password won't output to terminal.
echo -e "Enter password:"
stty -echo
read password
stty echo
echo
echo Password read.
echo $password
| true
|
28f590fa1815c1601bd498eb214d35bf74b255fc
|
Shell
|
ramielrowe/userapi
|
/run_api.sh
|
UTF-8
| 425
| 2.5625
| 3
|
[] |
no_license
|
#!/bin/bash
. env.sh
docker stop -t 5 ${ENV_NAME}userapi_api
docker rm -fv ${ENV_NAME}userapi_api
docker run -d --name ${ENV_NAME}userapi_api \
--restart always \
-p 8001:8000 \
--link ${ENV_NAME}userapi_postgres \
-e POSTGRES_HOST=${ENV_NAME}userapi_postgres \
-e POSTGRES_USER=${POSTGRES_USER} \
-e POSTGRES_PASSWORD=${POSTGRES_PASSWORD} \
-e POSTGRES_DB=${POSTGRES_DB} \
${ENV_NAME}userapi_api
| true
|
1b8ed4dbac8cd6801ce88e2de438fec8f575b89b
|
Shell
|
x-way/dotfiles
|
/install.sh
|
UTF-8
| 191
| 3.25
| 3
|
[] |
no_license
|
#!/bin/sh
TARGET="$HOME"
SOURCE=`dirname $0`
SOURCE=`cd $SOURCE ; pwd`
for f in `cd $SOURCE ; ls -a|egrep -v '^(\.|\.\.|\.git|install\.sh)$'` ; do
echo "ln -s $SOURCE/$f $TARGET/$f"
done
| true
|
565d991425ddb333afcd3b97f11ae31aca6d06dc
|
Shell
|
AmerBouayad/OldProjects
|
/System (Linux)/SYSG4/process/LaboProcess0302/SOURCES/Demo
|
UTF-8
| 2,032
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
#NOM : Demo
#CLASSE : FS - LaboFS 03-02
#OBJET : réservé au Makefile
#AUTEUR : J.C. Jaumain, 07/2011
C='\033[44m'
E='\033[32m\033[1m'
N='\033[0m'
clear
if [ $UID -eq 0 ]
then
echo "quittez la session administrateur pour cette démonstrartion"
exit 1
fi
echo "Démonstration de l'attaque via un programme SUID qui contient execlp"
echo "--------------------------------------------------------------------"
echo
echo '1) Création du fichier Confidentiel;'
echo -e "Exécution de ${E}echo CONFIDENTIEL > Confidentiel${N}"
echo CONFIDENTIEL > Confidentiel
echo '2) Modification des droits du fichier Confidentiel;'
echo -e "Exécution de ${E}chmod 600 Confidentiel${N}"
chmod 600 Confidentiel
echo '3) Modification des droits du programme Conf.'
echo -e "Exécution de ${E}chmod 4755 Conf${N}"
chmod 4755 Conf
echo '4) Vérification :'
echo -e "Exécution de ${E}pwd;ls -l Conf Confidentiel${N}"
pwd;ls -l Conf Confidentiel
echo
echo -e "${C} --> Enter pour continuer${N}"
read
echo -n "Contenu du fichier Confidentiel : "; cat Confidentiel
echo -n "Exécution du programme Conf : "; ./Conf
echo "---------------------------------------------------------------------------------"
echo "Étant l'utilisateur propriétaire, CONFIDENTIEL s'affiche bien dans les deux cas"
echo "Il vous reste à vérifier, à partir d'un autre utilisateur"
echo -e "1) Que ${E}cat Confidentiel${N} n'affiche pas CONFIDENTIEL"
echo -e "2) Que ${E}./Conf${N} affiche CONFIDENTIEL"
echo "3) Que l'attaque SUID-execlp me permet d'exécuter n'importe quelle"
echo " commande au nom du propriétaire du programme SUID qui contient execlp"
echo "----------------------------------------------------------------------------------"
echo -e "Pour ajouter un utilisateur ${E}user1${N} avec mot de passe ${E}user1${N}, dossier ${E}/home/user1${N}, groupe ${E}users${N} la commande administrateur :"
echo -e "${E}useradd -m -p user1 -d /home/user1 -g users user1${N}"
echo
echo -e "${C} --> Enter pour continuer${N}"
echo
read
| true
|
fad88338b84379cb1cee9b873aa99562a7838a03
|
Shell
|
PenpenLi/DownProject
|
/DownTrunk/Project/Script/Sql/AutoCreateLogDB.sh
|
UTF-8
| 767
| 2.765625
| 3
|
[] |
no_license
|
echo "create database, using sh."
DBHost=127.0.0.1
DBName=down_log
DBRoot=uroot
DBP1=p1
echo "drop ${DBName} database..."
echo "drop database if exists ${DBName};" | mysql -${DBRoot} -${DBP1} -h${DBHost}
echo "create ${DBName} database... "
echo "create database ${DBName};" | mysql -${DBRoot} -${DBP1} -h${DBHost}
echo "import ${DBName} sql setence..."
mysql -${DBRoot} -${DBP1} -h${DBHost} ${DBName} < ${DBName}.sql
echo "import ${DBName} over"
#echo "drop database if exists ${DBHotGameName}_log;" | /usr/local/mysql/bin/mysql -${DBRoot} -${DBP1} -h${DBHost}
#echo "create database ${DBHotGameName}_log;" | /usr/local/mysql/bin/mysql -${DBRoot} -${DBP1} -h${DBHost}
#/usr/local/mysql/bin/mysql -${DBRoot} -${DBP1} -h${DBHost} ${DBHotGameName}_log < log.sql
| true
|
f603d7e459e6484b8d661cb5da1ee65311d73a85
|
Shell
|
thomaspreece/TableTopSimulator-ModManager
|
/src/list.sh
|
UTF-8
| 245
| 2.640625
| 3
|
[
"Unlicense"
] |
permissive
|
#!/bin/bash
set -e
workshopJson=`cat ./settings/workshopJson.txt`
read -p "Where is your WorkshopFileInfos.json file? " -i "$workshopJson" -e workshopJson
echo $workshopJson > ./settings/workshopJson.txt
yarn ttsbackup list -w "$workshopJson"
| true
|
bfdc16b27c7bbdc6321c7c1a86d4a7f7b022c6c4
|
Shell
|
marler8997/genesis
|
/printelf
|
UTF-8
| 291
| 2.671875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -e
patchelf=/g/*patchelf*/bin/patchelf
interp=$($patchelf --print-interpreter $1 || echo "")
echo $1
echo "interpreter: \"$interp\""
rpath=$($patchelf --print-rpath $1)
echo "rpath: \"$rpath\""
echo "----------------------------------"
readelf -d $1 | grep "library"
| true
|
b2bc09b336f00171937ea3bb3f06e35c916673eb
|
Shell
|
jerdavjohnson/scripts
|
/rsync-deploy.sh
|
UTF-8
| 196
| 2.640625
| 3
|
[] |
no_license
|
#!/bin/bash
USER= username,
HOST= hostname, EX. server ip address
DIR= directory where files should go, EX. public_html
hugo && rsync -avz --delete public/ ${USER}@${HOST}:~/${DIR}
exit 0
| true
|
b5fb73666378da20573409dfc7415e9c66ae49ec
|
Shell
|
Meoop/exercise
|
/linux_shell_scripting_tutorial_v2.0/chapter04/string_comparison_3.sh
|
UTF-8
| 270
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/bash
# The length of STRING is zero '-z STRING'
read -s -p "Enter your password: " pass
echo
if test -z $pass
then
echo "No password was entered!!! Cannot verify an empty password!!!"
exit 1
fi
if test "$pass" != "tom"
then
echo "Wrong password!"
fi
| true
|
cc8b4b7d7edece02a6fabc121c33405084d0a56d
|
Shell
|
jaymecd/osx-setup
|
/mas-signin.sh
|
UTF-8
| 587
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/sh
set -e
main () {
did_login=0
if ! mas account >/dev/null 2>&1; then
_login
did_login=1
fi
echo "Logged as: $(mas account)"
if [ "${did_login}" -eq 1 ]; then
echo "You may now close 'App Store' window."
echo "Thank you!"
fi
}
_login () {
echo "Attention! Login to 'App Store' is required."
echo "After window is opened, navigate to >Store and then >Sign In..."
echo " press <Enter> continue or Ctrl+C to abort ..."
read -r
open -a "/Applications/App Store.app"
until (mas account > /dev/null); do
sleep 1
done
}
main "$@"
| true
|
aa5a518af2c123dcded5d1c621da43578d91ebe7
|
Shell
|
claudioIREA/Script_SBAS_GEP
|
/common/.svn/text-base/env.svn-base
|
UTF-8
| 2,582
| 3.375
| 3
|
[] |
no_license
|
#!/bin/bash
# Project: CCB@CNR-IREA
# Author: Terradue Srl
# Last update: 2013/09/06
# Element: env
# Context: /application/sbas
# Name: run
# Version: 1.0
# Description: Common functions and variables for SBAS jobs
# This document is the property of Terradue and contains information directly
# resulting from knowledge and experience of Terradue.
# Any changes to this code is forbidden without written consent from Terradue Srl
# Contact: info@terradue.com
# Error codes
SUCCESS=0
ERR_BADVOLCANODB=1
ERR_BADARG=2
NO_IDL=3
NO_XSLTPROC=4
NO_ZIP=5
NO_BINCALC=6
NO_WARPBIN=7
NO_GEOBOXAWK=8
NO_KMLXSLT=9
ERR_NOLDDCMD=10
ERR_NOFILECMD=11
ERR_MISSINGLIBS=12
ERR_IMAGERPROCFILE=13
ERR_IMAGERPARAMFILE=14
ERR_IMAGERBIN=15
ERR_XSLTPROCBIN=16
ERR_NOXSL=17
ERR_NOINPUT=18
ERR_NOOUTPUT=19
DEBUG_EXIT=66
function cleanExit ()
{
local retval=$?
local msg=""
case "$retval" in
$SUCCESS)
msg="Processing successfully concluded";;
$NO_IDL)
msg="No idl binary found in path ($PATH)";;
$NO_XSLTPROC)
msg="No xsltproc binary found in path ($PATH)";;
$NO_ZIP)
msg="No zip binary found in path ($PATH)";;
$NO_BINCALC)
msg="No bc binary found in path ($PATH)";;
$NO_WARPBIN)
msg="No warpimage binary found in path ($PATH)";;
$NO_GEOBOXAWK)
msg="awk script geobox.awk found in path ($PATH)";;
$ERR_BADARG)
msg="function checklibs called with non-directory parameter, returning $res";;
$ERR_NOLDDCMD)
msg="binary ldd not found in path, returning $res";;
$ERR_NOFILECMD)
msg="binary 'file' command not found in path, returning $res";;
$ERR_MISSINGLIBS)
msg="libraries not found, returning $res";;
$ERR_IMAGERPROCFILE)
msg="File $DIR/vomir.proc missing or empty";;
$ERR_IMAGERPARAMFILE)
msg="File $DIR/proc_params.txt missing or empty";;
$ERR_IMAGERBIN)
msg="imagerpp failed";;
$ERR_XSLTPROCBIN)
msg="xsltproc failed";;
$ERR_NOXSL)
msg="No xsl stylesheet found";;
$ERR_NOINPUT)
msg="Unable to retrieve an input file";;
$ERR_NOOUTPUT)
msg="No output results";;
$DEBUG_EXIT)
msg="Breaking at debug exit";;
*)
msg="Unknown error";;
esac
[ "$retval" != 0 ] && ciop-log "ERROR" "Error $retval - $msg, processing aborted" || ciop-log "INFO" "$msg"
exit "$retval"
}
| true
|
efd1cae35086737b1a05d781c75e20059b8f6be1
|
Shell
|
westfly/thirdparty
|
/double-conversion/build_and_release.sh
|
UTF-8
| 1,201
| 3.375
| 3
|
[] |
no_license
|
function select_compiler() {
#https://stackoverflow.com/questions/17275348/how-to-specify-new-gcc-path-for-cmake
devtool=/opt/rh/devtoolset-7/root/usr/
export CC=$devtool/bin/gcc
export CXX=$devtool/bin/g++
}
function cmake_build() {
source_dir=$1
build=$source_dir/build
if [[ ! -d $build ]];then
mkdir $build
fi
cd $build
if [[ -f CMakeCache.txt ]];then
rm CMakeCache.txt
fi
select_compiler
if [[ 0 -eq 0 ]];then
cmake3 -G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DBUILD_SHARED_LIBS=OFF \
-DBUILD_TESTING=ON \
..
ninja
fi
cd -
}
function deploy() {
source_dir=$1
deploy=$2
package=$(echo $source_dir | awk -F'/' '{print $(NF-1)}')
cp $source_dir/$package/*h -a $deploy
if [[ ! -d $deploy/bin ]];then
mkdir $deploy/bin
fi
cp $source_dir/build/bin/* $deploy/bin/
if [[ ! -d $deploy/lib64_release ]];then
mkdir $deploy/lib64_release
fi
cp $source_dir/build/lib$package.a $deploy/lib64_release
}
source_dir=$1
cmake_build $source_dir
if [[ $? -eq 0 ]];then
deploy $source_dir $(pwd)
fi
| true
|
6401155c3fd39386eb49d17d565d6ed2d8730b9e
|
Shell
|
jlgull/Linux_Bash_Scripts
|
/EMCC-Red-Hat-Scripting-Courses/CIS239DL_Scripting_2014/bookscripts/313checkreturnstatus
|
UTF-8
| 293
| 3.75
| 4
|
[] |
no_license
|
#!/bin/bash
# check the return status from a function
#
#set -xv
function FileExist()
{
echo $1
if [[ -a $1 ]]
then
return 0
else
return 1
fi
}
if FileExist $1
then
echo $?
rm $1
echo $1 "File Removed!"
else
echo $?
echo "$0: $1: No such file or directory"
fi
#set +xv
| true
|
377f52542b5bb309b6ed7ea3a7faae488c56e1ad
|
Shell
|
w-disaster/lab_SO
|
/esercitazione_esame/6i/es65/lancia.sh
|
UTF-8
| 118
| 2.625
| 3
|
[] |
no_license
|
#!/bin/bash
if [[ -e B.txt ]]; then
rm B.txt
fi
touch B.txt
while true; do
cat B.txt | ./A.sh | ./B.sh
done
| true
|
5115a5cdba7590b5c359f79a0fb850edc9792971
|
Shell
|
stevelle/rpc-openstack
|
/scripts/linting-ansible.sh
|
UTF-8
| 1,870
| 3.3125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Copyright 2015, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## Shell Opts ----------------------------------------------------------------
set -euo pipefail
## Main ----------------------------------------------------------------------
if [[ -z "$VIRTUAL_ENV" ]] ; then
echo "WARNING: Not running hacking inside a virtual environment."
fi
# Put local inventory in a var so we're not polluting the file system too much
LOCAL_INVENTORY='[all]\nlocalhost ansible_connection=local'
pushd rpcd/playbooks/
echo "Running ansible-playbook syntax check"
# Do a basic syntax check on all playbooks and roles.
ansible-playbook -i <(echo $LOCAL_INVENTORY) --syntax-check *.yml --list-tasks
# Perform a lint check on all playbooks and roles.
ansible-lint --version
# Skip ceph roles because they're submodules and not ours to lint
# NOTE(sigmavirus24): If
# https://github.com/willthames/ansible-lint/issues/80 is accepted and
# merged, get rid of these awful hacks around removing directories and
# re-placing them.
rm -r roles/ceph-common
rm -r roles/ceph-mon
rm -r roles/ceph-osd
echo "Running ansible-lint"
# Lint playbooks and roles
ansible-lint *.yml
# Revert changes to deleting submodules
git checkout .
# Re-clone the submodules for the next run
git submodule update >/dev/null
popd
| true
|
71a17cf7abb89a360b02f0381357513c397729c5
|
Shell
|
unofficial-inteno-public-mirror/feed-inteno-packages
|
/iop/scripts/genconfig.sh
|
UTF-8
| 6,820
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
function genconfig {
export CLEAN=0
export IMPORT=0
export SRCTREEOVERR=0
export FILEDIR="files/"
export CONFIGPATH="package/feeds/feed_inteno_packages/iop/configs"
export CUSTCONF="customerconfigs"
export VERBOSE=0
export DEVELOPER=0
export TMPDIR="/tmp/builder/"
LOCAL_MIRROR="http://mirror.inteno.se/mirror"
iopsys_brcm63xx_mips="cg300 cg301 dg150 dg150v2 dg150alv2 dg200 dg200al dg301 dg301al eg300 vg50 vox25"
iopsys_brcm63xx_arm="dg400"
target="bogus"
set_target() {
local profile=$1
for p in $iopsys_brcm63xx_mips; do
if [ $p == $profile ]; then
target="iopsys_brcm63xx_mips"
fi
done
for p in $iopsys_brcm63xx_arm; do
if [ $p == $profile ]; then
target="iopsys_brcm63xx_arm"
fi
done
}
git remote -v | grep -q http || {
DEVELOPER=1
bcmAllowed=0
iceAllowed=0
endptAllowed=0
natalieAllowed=0
allowedRepos="$(ssh -o ConnectTimeout=5 git@private.inteno.se 2>/dev/null | grep -w 'R\|W' | awk '{print$NF}')"
for repo in $allowedRepos; do
case $repo in
bcmkernel) bcmAllowed=1 ;;
ice-client) iceAllowed=1 ;;
endptcfg) endptAllowed=1 ;;
natalie-dect*) natalieAllowed=1 ;;
esac
done
}
v() {
[ "$VERBOSE" -ge 1 ] && echo "$@"
}
usage() {
echo
echo 1>&2 "Usage: $0 [OPTIONS] BoardConfig Customerconfig"
echo
echo " -c, remove all files under ./files and import from config "
echo " -v, verbose"
echo " -u, Update customer config before applying"
echo " -p, set profile (if exists)"
echo " -t, use latest git tag and number of commits since as version for the build"
echo " -s, enable 'Package source tree override'"
echo
echo "BoardConfig ex "
ls -1 configs
if [ -d "$CUSTCONF/$1" ]; then
echo "Customerconfig ex"
ls $CUSTCONF/*
fi
echo
echo "Example ./genconfig vg50 TELIA"
echo "(if no customerconfig is chosen the Inteno Config will be used)"
echo
exit 127
}
use_local_mirror()
{
if wget -T 3 -t 2 -O /dev/null $LOCAL_MIRROR >/dev/null 2>/dev/null
then
echo "mirror [$LOCAL_MIRROR] exists. Using local mirror"
sed -i "s;CONFIG_LOCALMIRROR=.*;CONFIG_LOCALMIRROR=\"$LOCAL_MIRROR\";" .config
else
echo "mirror [$LOCAL_MIRROR] does not exist. Not using local mirror"
fi
}
generate_config()
{
DIFFFILE="$1"
MASTERFILE="$2"
while read p; do
v "$p"
sed -r -i "$p" $MASTERFILE
done < $DIFFFILE
}
setup_dirs()
{
if [ $DEVELOPER -eq 1 ]; then
if [ ! -d "$CUSTCONF" ]; then
git clone git@private.inteno.se:customerconfigs
elif [ $IMPORT -eq 1 ]; then
cd customerconfigs
v "git pull"
git pull
cd ..
fi
fi
if [ ! -d "$FILEDIR" ]; then
mkdir $FILEDIR
elif [ -d "$FILEDIR" -a $CLEAN -eq 1 ]; then
v "rm -rf $FILEDIR*"
rm -rf $FILEDIR*
fi
}
create_and_copy_files()
{
local BOARDTYPE=$1
local CUSTOMER=$2
# Validate seleced board and customer
set_target $BOARDTYPE
if [ $target == "bogus" ]; then
echo "Hardware profile does not exist"
exit 1
elif [ -n "$CUSTOMER" -a ! -d "$CUSTCONF/$BOARDTYPE/$CUSTOMER/" ]; then
echo "Customer profile does not exist"
exit 1
fi
# Clean base-file package to force rebuild when changing profile
make package/base-files/clean
# Base config on master
v "Config $BOARDTYPE selected"
v "cp $CONFIGPATH/config .config"
cp $CONFIGPATH/config .config
# Apply profile diff to master config if selected
if [ -n "$PROFILE" ]; then
if [ -e "$CONFIGPATH/$PROFILE.diff" ]; then
cat $CONFIGPATH/$PROFILE.diff >> .config
elif [ "$PROFILE" == "juci" ]; then
v "Default profile (juci) is selected."
else
echo "ERROR: profile $PROFILE does not exist!"
exit 1
fi
else
v "No profile selected! Using default."
fi
# Set target and profile
echo "CONFIG_TARGET_${target}=y" >> .config
echo "CONFIG_TARGET_${target}_${BOARDTYPE}=y" >> .config
# Add customerconfig diff if a customer is selected
if [ -n "$CUSTOMER" ]; then
echo "$BOARDTYPE $CUSTOMER" > .current_config_file
if [ -d "$CUSTCONF/$BOARDTYPE/$CUSTOMER/fs" ]; then
v "cp -rLp $CUSTCONF/$BOARDTYPE/$CUSTOMER/fs/* $FILEDIR"
cp -rLp $CUSTCONF/$BOARDTYPE/$CUSTOMER/fs/* $FILEDIR
fi
if [ -e "$CUSTCONF/$BOARDTYPE/$CUSTOMER/$BOARDTYPE.diff" ]; then
v "Apply $CUSTCONF/$BOARDTYPE/$CUSTOMER/$BOARDTYPE.diff"
cat $CUSTCONF/$BOARDTYPE/$CUSTOMER/$BOARDTYPE.diff >> .config
fi
fi
# Set target version
local GIT_TAG=$(git describe --abbrev=0 --tags)
echo "CONFIG_TARGET_VERSION=\"${GIT_TAG}\"" >> .config
# Enable Pckage source tree override if selected
[ $SRCTREEOVERR -eq 1 ] && \
echo CONFIG_SRC_TREE_OVERRIDE=y >> .config
# developer mode selected ?
if [ $DEVELOPER -eq 1 ]; then
# rewrite url to clone with ssh instead of http
echo "CONFIG_GITMIRROR_REWRITE=y" >>.config
[ $bcmAllowed -eq 0 ] && echo "CONFIG_BCM_OPEN=y" >> .config
[ $iceAllowed -eq 0 ] && echo "CONFIG_ICE_OPEN=y" >> .config
[ $endptAllowed -eq 0 ] && echo "CONFIG_ENDPT_OPEN=y" >> .config
[ $natalieAllowed -eq 0 ] && echo "CONFIG_NATALIE_OPEN=y" >> .config
else
echo "CONFIG_BCM_OPEN=y" >> .config
echo "CONFIG_ICE_OPEN=y" >> .config
echo "CONFIG_ENDPT_OPEN=y" >> .config
echo "CONFIG_NATALIE_OPEN=y" >> .config
fi
# Set default values based on selected parameters
make defconfig
# Temporary fixup for juci/luci profile
if [ "$PROFILE" == "luci" ]; then
sed -i '/CONFIG_DEFAULT_juci/d' .config
sed -i '/CONFIG_PACKAGE_juci/d' .config
sed -i '/CONFIG_PACKAGE_uhttpd/d' .config
fi
echo Set version to $(grep -w CONFIG_TARGET_VERSION .config | cut -d'=' -f2 | tr -d '"')
}
####### main #####
if [ ! -e tmp/.iop_bootstrap ]; then
echo "You have not installed feeds. Running genconfig in this state would create a non functional configuration."
echo "Run: iop feeds_update"
exit 0
fi
if [ $# -eq 0 ]; then
echo Current profile:
cat .current_config_file
echo "Try ./iop_get_config.sh -h' to get instructions if you want to change current config"
exit 0
else
while [ -n "$1" ]; do
case "$1" in
-c) export CLEAN=1;;
-u) export IMPORT=1;;
-v) export VERBOSE="$(($VERBOSE + 1))";;
-p) export PROFILE="$2"; shift;;
-t) export USE_TAG=1;;
-s) export SRCTREEOVERR=1;;
-h) usage;;
-*)
echo "Invalid option: $1 "
echo "Try -h' for more information."
exit 1
;;
*) break;;
esac
shift;
done
[ -d $TMPDIR ] || mkdir $TMPDIR
setup_dirs
create_and_copy_files "$1" "$2"
[ -d $TMPDIR ] && rm -rf $TMPDIR
use_local_mirror
fi
}
register_command "genconfig" "Generate configuration for board and customer"
| true
|
a972c749e8261173205a891351e6e406875e9e50
|
Shell
|
sh4nks/dotfiles
|
/bash/bashrc
|
UTF-8
| 5,588
| 3.28125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Get the path to this directory (because we symlink all the files)
# Ref: https://stackoverflow.com/a/246128/1236449
# ------------------------------
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" >/dev/null 2>&1 && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" >/dev/null 2>&1 && pwd )"
# Platform detection
# ------------------------------
platform='unknown'
distribution='unkown'
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
platform='linux'
distribution=$(awk '/^NAME=/ { print $0 }' /etc/os-release | cut -d '=' -f 2 | tr -d '\"')
elif [[ "$unamestr" == 'Darwin' ]]; then
platform='darwin'
distribution=$(sw_vers -productVersion)
fi
# Global Environment Definitions
# ------------------------------
export STEAM_FRAME_FORCE_CLOSE=1 # Closes the steam frame instead of minimizing (steam is still running)
#export STEAM_RUNTIME=0 # Uses system libraries to run steam (archlinux yeah \o/)
export EDITOR='micro'
export WORKON_HOME=~/.virtualenvs
export ANDROID_HOME=$HOME/.android/sdk
export GOPATH=~/Development/Go
export GO111MODULE=on
export PATH="/usr/lib/ccache/bin/:$PATH" # include ccache's binaries before the path to your compiler
export PATH="$PATH:/opt/dart-sdk/bin"
export PATH="$PATH:~/.local/apps/flutter/bin"
if [ $platform == "darwin" ]; then
export VIRTUALENVWRAPPER_PYTHON=/usr/local/bin/python3
fi
if [ -f /usr/local/bin/virtualenvwrapper.sh ]; then # OSX and Debian
source /usr/local/bin/virtualenvwrapper.sh
elif [ -f /usr/bin/virtualenvwrapper.sh ]; then # Arch Linux
source /usr/bin/virtualenvwrapper.sh
fi
if [ -f /usr/share/nvm/init-nvm.sh ]; then
source /usr/share/nvm/init-nvm.sh
fi
if [ -f $HOME/.travis/travis.sh ]; then
source $HOME/.travis/travis.sh
fi
if [ $platform == "darwin" ]; then
export LC_CTYPE=en_US.UTF-8
export LC_ALL=en_US.UTF-8
export LANG=en_US.UTF-8
fi
VIRTUAL_ENV_DISABLE_PROMPT=1 # don't let virtualenv show prompts by itself
export PYMACS_PYTHON=python2
export HISTCONTROL=ignoredups # don't put duplicate lines in the history
export HISTCONTROL=ignoreboth # ignore same sucessive entries.
export HISTCONTROL=erasedups # ignore duplicate entries in history
export HISTSIZE=10000 # Increases size of history
export HISTIGNORE="&:ls:ll:la:l.:pwd:exit:clear:clr:[bf]g"
#export JAVA_FONTS=/usr/share/fonts/TTF
export _JAVA_OPTIONS='-Dawt.useSystemAAFontSettings=on'
shopt -s histappend # Append history instead of overwriting
shopt -s cdspell # Correct minor spelling errors in cd command
shopt -s dotglob # includes dotfiles in pathname expansion
shopt -s checkwinsize # If window size changes, redraw contents
shopt -s cmdhist # Multiline commands are a single command in history.
shopt -s extglob # Allows basic regexps in bash.
shopt -s checkwinsize # update the values of lines and columns.
#set ignoreeof on # Typing EOF (CTRL+D) will not exit interactive sessions
# Additional $PATH paths
# ------------------------------
# set PATH so it includes user's private bin if it exists
if [ -d ~/.bin ]; then
PATH=$HOME/.bin:"${PATH}"
fi
if [ -d ~/.local/bin ]; then
PATH=$HOME/.local/bin:"${PATH}"
fi
if [ -d $HOME/.rvm/bin ]; then
PATH=$HOME/.rvm/bin:"${PATH}"
fi
if [ -d $HOME/Development/Go/bin ]; then
PATH=$HOME/Development/Go/bin:"${PATH}"
fi
# Completions
# ------------------------------
if [ -f /etc/bash_completion ]; then
source /etc/bash_completion
fi
if [ -f /usr/local/git/contrib/completion ]; then
source /usr/local/git/contrib/completion/git-completion.bash
fi
# sudo completion
complete -cf sudo
# Add tab completion for SSH hostnames based on ~/.ssh/config, ignoring wildcards
[ -e "$HOME/.ssh/config" ] && complete -o "default" -o "nospace" -W "$(grep "^Host" ~/.ssh/config | grep -v "[?*]" | cut -d " " -f2 | tr ' ' '\n')" scp sftp ssh
# osx specific
if [ $platform == "darwin" ]; then
complete -W "NSGlobalDomain" defaults
complete -o "nospace" -W "Contacts Calendar Dock Finder Mail Safari iTunes SystemUIServer Terminal Twitter" killall # Add `killall` tab completion for common apps
fi
# less and cat source highlighting with highlight.
# Install highlight with ${YOUR_FAVORITE_PACKAGEMANAGER} highlight
# All available themes can be retrieved with this command: highlight --list-scripts=themes
if hash highlight 2>/dev/null; then
# Pipe Highlight to less
export LESSOPEN="| $(which highlight) %s --out-format xterm256 --line-numbers --quiet --force --style moria"
export LESS=" -R"
alias less='less -m -N -g -i -J --line-numbers --underline-special'
alias more='less'
# Use "highlight" in place of "cat"
#alias cat="highlight $1 --out-format xterm256 --line-numbers --quiet --force --style moria"
fi
# External config
if [[ -r ~/.dircolors ]] && type -p dircolors >/dev/null; then
eval $(dircolors -b "$HOME/.dircolors")
fi
# Public Aliases
# ------------------------------
source "$DIR/bash_aliases"
# Private Aliases
# ------------------------------
if [ -f "$DIR/bash_alias_private" ]; then
source "$DIR/bash_alias_private"
fi
# Functions / Helpers
# ------------------------------
source "$DIR/bash_functions"
# Prompt
# ------------------------------
source "$DIR/bash_prompt"
| true
|
7d820d174fdc61399f4c111b701a20fc8e6cb9d6
|
Shell
|
DangerousPrototypes/automation
|
/build/bp-install-mplabx.sh
|
UTF-8
| 4,001
| 3.25
| 3
|
[] |
no_license
|
# Shell script to install MPLABX and XC16 compiler under linux
# Assumes logged in as root or do 'su' first
# The instructions are presented in the format of a shell script, but you should really run each step manually!
# Based on example here: https://www.microchip.com/forums/FindPost/998286
# Tested under Ubuntu 14.04 LTS 64bit, newer versions may not have the needed 32bit support libraries
# Updated for Ubuntu 18.10 64 bit
cd ~
# Update apt-get repositories
apt-get update
# Install needed helpers
# make, tar, git-core, default-jre
apt-get -y install make tar git-core default-jre
# Install 32bit libraries
# ubuntu 14.04: apt-get -y install lib32z1 lib32ncurses5 lib32stdc++6 libx11-6:i386 libexpat1:i386 libXext6:i386
# 18.10 :
apt-get -y install lib32z1 lib32ncurses5-dev lib32stdc++6 libx11-6:i386 libexpat1:i386 libxext6:i386
# Install XC16 compiler
# Download compiler
wget http://www.microchip.com/mplabxc16linux
# Make executable
chmod +x mplabxc16linux
# Install
# NOTE: two pairs of dashes -- --mode
# NOTE: remember the XC16 install directory, it will be needed later (eg '/opt/microchip/xc16/v1.36')
./mplabxc16linux -- --mode text
# NOTE: follow instructions to accept the license and install the software
# Install MPLabX IDE (needed generate make files)
# Download IDE
wget http://www.microchip.com/mplabx-ide-linux-installer
# Extract IDE
# NOTE: This will extract a file such as MPLABX-v5.05-linux-installer.sh. Use this file name in the next two steps
tar -xvf mplabx-ide-linux-installer
# Make install script executable
# NOTE: change name to current version of IDE installer
chmod +x MPLABX-v5.15-linux-installer.sh
# Run install script
# NOTE: change name to current version of IDE installer
# NOTE: two pairs of dashes -- --mode
./MPLABX-v5.15-linux-installer.sh -- --mode text
# NOTE: follow instructions to accept the license and install the software
# Install peripheral library
# Download library
wget http://ww1.microchip.com/downloads/en//softwarelibrary/pic24%20mcu%20dspic%20peripheral%20lib/peripheral-libraries-for-pic24-and-dspic-v2.00-linux-installer.run
# Make executable
chmod +x peripheral-libraries-for-pic24-and-dspic-v2.00-linux-installer.run
# Run installer
# NOTE: two pairs of dashes -- --mode
# NOTE: if XC16 installed to a version specific directory it needs to be entered. Example library default install directory is '/opt/microchip/xc16' but our XC16 is installed in '/opt/microchip/xc16/v1.36' by default
./peripheral-libraries-for-pic24-and-dspic-v2.00-linux-installer.run -- --mode text
# NOTE: follow instructions to accept the license and install the software
# Clone Bus Pirate repo
mkdir picdev &&
cd picdev &&
git clone https://github.com/DangerousPrototypes/Bus_Pirate.git
# Check out a development branch (optional)
cd Bus_Pirate
git checkout firmware_v8_official
# Create correctly formatted make files from the MPLABX project file
# Do for all projects before compiling
# The location of the prjMakefilesGenerator.sh changes frequently. You may need to hunt for it.
cd ~/picdev/Bus_Pirate/Firmware/busPirate3X.X
/opt/microchip/mplabx/v5.15/mplab_platform/bin/prjMakefilesGenerator.sh .
cd ~/picdev/Bus_Pirate/Firmware/busPirate4X.X
/opt/microchip/mplabx/v5.15/mplab_platform/bin/prjMakefilesGenerator.sh .
cd ~/picdev/Bus_Pirate/Firmware/busPirate5X.X
/opt/microchip/mplabx/v5.15/mplab_platform/bin/prjMakefilesGenerator.sh .
# Test compile
# There are three MPLABX project files in Bus_Pirate/Firmware/busPirate(3-5)X.X
# Each of these projects is setup in MPLABX IDE to pass the -D {build version} compiler flag for one version of hardware (v3, v4, v5)
# These commands are normally run by the automated compile script
# NOTE: if you have compile errors be sure you ran prjMakefilesGenerator.sh in the previous step!!!
cd ~/picdev/Bus_Pirate/Firmware/busPirate3X.X
make clean
make
cd ~/picdev/Bus_Pirate/Firmware/busPirate4X.X
make clean
make
cd ~/picdev/Bus_Pirate/Firmware/busPirate5X.X
make clean
make
| true
|
d93507bed0c4ce65676c53fa70378975f1c9927a
|
Shell
|
nesi/ARCS-systems
|
/dataFabricScripts/BulkDataTransfer/gloPut7T.sh
|
UTF-8
| 4,587
| 3.9375
| 4
|
[] |
no_license
|
#!/bin/sh
# gloPut7T.sh Recursively copies files to a remote server.
# Requires threaded globus-url-copy; uses sshftp.
# Graham.Jenkins@arcs.org.au April 2009. Rev: 20110923
# Default-batch-size, concurrency, environment; adjust as appropriate
BATCH=16; CONCUR=2
for Dir in globus-5 globus-5.0.4 globus-5.0.2 globus-5.0.1 globus-4.2.1; do
[ -d "/opt/$Dir/bin" ] && GLOBUS_LOCATION=/opt/$Dir && break
done
PATH=$GLOBUS_LOCATION/bin:$PATH
export GLOBUS_LOCATION PATH
# Usage, ssh parameters
Match="."; Params="-p 24"
while getopts b:c:d:m:srux Option; do
case $Option in
b) BATCH=$OPTARG;;
c) CONCUR=$OPTARG;;
d) Days="-mtime -$OPTARG";;
m) Match=$OPTARG;;
s) Skip="Y";;
r) Order="-r";;
u) Params="-udt -p 8";;
x) MaxDep="-maxdepth 1";;
\?) Bad="Y";;
esac
done
shift `expr $OPTIND - 1`
[ \( -n "$Bad" \) -o \( $# -ne 3 \) ] &&
( echo " Usage: `basename $0` directory remote-userid remote-directory"
echo " e.g.: `basename $0` /mnt/pulsar/MTP26M" "graham@pbstore.ivec.org" \
"/pbstore/as03/pulsar/MTP26M"
echo "Options: -b l .. use a batch-size of 'l' (default $BATCH)"
echo " -c m .. do 'm' concurrent transfers (default $CONCUR)"
echo " -d n .. only transfer files changed in last 'n' days"
echo " -m String .. send only files whose names contain 'String'"
echo " -s .. skip files whose names begin with a period"
echo " -x .. don't descend through directories"
echo " -r .. reverse order"
echo " -u .. use 'udt' protocol" ) >&2 && exit 2
Ssu='ssh -o"UserKnownHostsFile /dev/null" -o"StrictHostKeyChecking no"'
[ `uname -s` = SunOS ] && Wc="du -h" || Wc="wc -c"
# Failure/cleanup function; parameters are exit-code and message
fail() {
Code=$1; shift
rm -f $LisFil
echo "$@"; exit $Code
}
# Globus-URL-Copy function; file list is 1st param
doGlobus() {
echo "`date '+%a %T'` .. Pid: $$ .. Files:"
eval $Wc `awk '{print $1}' < $1 | cut -c 8-`
globus-url-copy -q -cd $Params -pp -cc $CONCUR -fast -f $1 || \
( echo "Failed; sleeping for 5 mins!"; sleep 300 )
echo
>$1
[ -x "$1" ] || fail 0 "Graceful Termination"
}
# Create destination directory if required, ensure that we can write to it
eval $Ssu $2 /bin/date</dev/null>/dev/null 2>&1 ||fail 1 "Remote-userid invalid"
eval $Ssu $2 "mkdir -p -m 2775 $3" 2>/dev/null
eval $Ssu $2 "test -w $3" 2>/dev/null ||fail 1 "Remote-dir'y problem"
# Create temporary file, set traps
LisFil=`mktemp` && chmod a+x $LisFil || fail 1 "Temporary file problem"
trap "chmod a-x $LisFil ; echo Break detected .. wait" TERM
trap 'Params="-p 24" ; echo Switched to TCP..' USR1
trap 'Params="-udt -p 8"; echo Switched to UDT..' USR2
# Loop until no more files need to be copied
echo "To Terminate gracefully, enter: kill -TERM $$"
echo "To switch to TCP/UDT mode enter: kill -USR1/USR2 $$"
Flag=Y
while [ -n "$Flag" ] ; do
Flag= # Clear the "copy done" flag
echo "Generating a list of files to be copied .. wait .."
for File in `
( until \
eval $Ssu $2 "cd $3 \&\& find -L . -type f \| xargs ls -lLA 2\>/dev/null";do
echo "Remote-directory problem; will retry in 5 mins .." >&2 ; sleep 300
done
echo
# List files to be copied from local directory, then process the output;
# "/dev/null" is appended because Solaris 'xargs' doesn't allow '-r' ..
cd $1 && ( find -L . ${MaxDep} -type f ${Days}; echo /dev/null ) |
xargs ls -lLA 2>/dev/null
) | awk '{ if (NF==0) {Local="Y"; next}
if (Local=="Y") {if ("X"remsiz[$NF]!="X"$5) {print $NF} }
else {remsiz[$NF]=$5}
}' | grep $Match | sort $Order`; do
[ \( ! -r "$1/$File" \) -o \( "$File" = "/dev/null" \) ] && continue
case "`basename $File`" in
.* ) [ -n "$Skip" ] && continue ;;
esac
Flag=Y # Set the "copy done" flag
echo "file://$1/$File sshftp://$2$3/$File"|sed -e 's_/\./_/_'>>$LisFil
[ "`cat $LisFil 2>/dev/null | wc -l`" -eq $BATCH ] && doGlobus $LisFil
done
[ "`cat $LisFil 2>/dev/null | wc -l`" -ne 0 ] && doGlobus $LisFil
done
# All done, adjust permissions and exit
User="`echo $2 | awk -F@ '{if(NF>1)print $1}'`"
[ -z "$User" ] && User=$LOGNAME
eval $Ssu $2 "find $3 -type d -user $User \| xargs chmod g+rws" 2>/dev/null
eval $Ssu $2 "find $3 -type f -user $User \| xargs chmod 664" 2>/dev/null
fail 0 "No more files to be copied!"
| true
|
6c8f59f2344dab7e53fb3a2044591583c26f1f1c
|
Shell
|
8ear/docker-wordpress
|
/make.sh
|
UTF-8
| 1,086
| 3.90625
| 4
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
set -x
# Default value if var does not exist.
NAME=${NAME:-"test"}
DOCKER_USER=${DOCKER_USER:-"abc"}
FOLDER=${FOLDER:-"."}
[ "$FOLDER" == "." ] && TAG=latest
[ "$FOLDER" != "." ] && TAG=${TAG:-"$FOLDER"}
DOCKERFILE=${DOCKERFILE:-"$FOLDER/Dockerfile"}
# Building
docker build -t "$DOCKER_USER/$NAME:$TAG" -f "$DOCKERFILE" "$FOLDER"
# Find all builded container
CONTAINER_VERSION="$(docker image ls "$DOCKER_USER/$NAME" --format "{{.Tag}}")"
# Push hub.docker.com
if [[ -n "$DOCKER_PASS" ]] && [[ -n "$DOCKER_USER" ]]
then
echo "$DOCKER_PASS" | docker login -u "$DOCKER_USER" --password-stdin
for V in $CONTAINER_VERSION
do
docker push "$DOCKER_USER/$NAME:$V"
done
fi
# Push github repo
if [[ -n "$GITHUB_PASS" ]] && [[ -n "$GITHUB_USER" ]]
then
echo "$GITHUB_PASS" | docker login docker.pkg.github.com -u "$GITHUB_USER" --password-stdin
for V in $CONTAINER_VERSION
do
docker tag "$DOCKER_USER/$NAME:$V" "docker.pkg.github.com/${REPO}/$NAME:$V"
docker push "docker.pkg.github.com/${REPO}/$NAME:$V"
done
fi
| true
|
0a0c2f8bc31823dae08fdb75df650a1a1f78b064
|
Shell
|
8l/insieme
|
/scripts/ppl_install.sh
|
UTF-8
| 1,175
| 3.421875
| 3
|
[] |
no_license
|
# setup environment variables
. ./environment.setup
########################################################################
## PPL
########################################################################
VERSION=0.12.1
PACKAGE=ppl-$VERSION
FILE=$PACKAGE.tar.bz2
if [ -d $PREFIX/ppl-$VERSION ]; then
echo "PPL version $VERSION already installed"
exit 0
fi
CFLAGS="-mtune=native -O3"
CXXFLAGS=$CFLAGS
echo "#### Downloading ppl library ####"
wget -nc http://bugseng.com/products/ppl/download/ftp/releases/$VERSION/$FILE
RET=$?
if [ $RET -ne 0 ]; then
exit $RET
fi
rm -Rf $PACKAGE
tar -xf $FILE
cd $PACKAGE
patch -p0 < ../patches/ppl_numeric_limits.patch
export LD_LIBRARY_PATH=$PREFIX/gmp-latest/lib:$LD_LIBRARY_PATH
echo "#### Building ppl library ####"
CC=$CC CXX=$CXX CFLAGS=$CFLAGS ./configure --prefix=$PREFIX/ppl-$VERSION --enable-optimization --with-gmp=$PREFIX/gmp-latest
make -j $SLOTS
# Check for failure
RET=$?
if [ $RET -ne 0 ]; then
exit $RET
fi
echo "#### Installing ppl library ####"
make install
rm $PREFIX/ppl-latest
ln -s $PREFIX/$PACKAGE $PREFIX/ppl-latest
echo "#### Cleaning up environment ####"
cd ..
rm -Rf $PACKAGE*
exit 0
| true
|
9840031f53ffdb201b87485175ee4cf4ba15c93e
|
Shell
|
BechtelCanDoIt/wso2-dev-support
|
/github/install.sh
|
UTF-8
| 1,453
| 3.171875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
echo "============================== OS Util Downloads ============================="
echo " We are going to execute the following script to ensure you have these installed."
echo " This will probably ask for your password."
echo ""
cat ./~getOsUtils.sh
echo ""
./~getOsUtils.sh
echo "================================================================================"
echo "============================ GitHub Util Downloads ==========================="
echo " We are going to download the key WSO2 utils from GitHub now "
echo ""
if [ -f ./.installed ]
then
rm -Rf ./tools
rm .installed
fi
mkdir ./tools
cd ./tools
#Forked from https://github.com/rzrbld
git clone https://github.com/BechtelCanDoIt/bashStudio.git
#Forked from https://github.com/daveschoutens
git clone https://github.com/BechtelCanDoIt/format-xml.git
git clone https://github.com/BechtelCanDoIt/wso2-proxy-conflict-finder.git
git clone https://github.com/BechtelCanDoIt/pom-version-update.git
#Make sure we can execute
chmod +x bashStudio/bashStudio.sh
chmod +x format-xml/format-xml.sh
chmod +x pom-version-update/pom-version-update.sh
chmod +x wso2-proxy-conflict-finder/run.sh
cd ..
touch .installed
echo ""
echo "=================================================================================="
echo " All done with installations."
echo "=================================================================================="
| true
|
7001850a9003976681591b7679ebb0c7269166c8
|
Shell
|
MyroslavGryshyn/.vim
|
/.bash_aliases
|
UTF-8
| 1,462
| 3.015625
| 3
|
[] |
no_license
|
#Use up 2 or up 3 instead of ../..
upup(){ DEEP=$1; [ -z "${DEEP}" ] && { DEEP=1; }; for i in $(seq 1 ${DEEP}); do cd ../; done; }
alias up='upup'
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
alias grep='grep --color=auto'
alias egrep='egrep --color=auto'
fi
# some more ls aliases
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
# git aliases
alias gs='git status'
alias gd='git diff'
alias gc='git commit -am'
alias gch='git checkout'
alias glist='git stash list'
alias gentags='ctagsp -R *'
alias hg='history | grep'
alias pg='ps aux | grep'
alias fgrep='find . | grep'
alias pyrun='python manage.py runserver'
alias pyshell='python manage.py shell_plus'
alias pyman='python manage.py '
alias pipr='pip install -r requirements.txt'
alias ipy="python -c 'import IPython; IPython.terminal.ipapp.launch_new_instance()'"
alias micebook-dev="ssh -i ~/.ssh/micebook-development.pem ubuntu@ec2-54-154-228-174.eu-west-1.compute.amazonaws.com"
alias micebook-stage="ssh -i ~/.ssh/micebook-development.pem ubuntu@ec2-54-154-228-174.eu-west-1.compute.amazonaws.com"
alias gl="git log --oneline --graph --date=short --pretty"
lint-check() {
python ./manage.py test server.tests.test_for_lint_errors
}
if [ -f ~/.vim/.fh_bash_aliases ]; then
. ~/.vim/.fh_bash_aliases
fi
| true
|
6a14a95fe2d17527943a669dbac06275b4d06074
|
Shell
|
ahopgood/Vagrant-Puppet
|
/modules/mysql/examples/inplace-upgrade-to-mysql-5.5.x.sh
|
UTF-8
| 895
| 3.015625
| 3
|
[] |
no_license
|
#! /usr/bin/env bash
VERSION="5.5.49"
echo "Stopping current MySQL instance"
sudo /etc/init.d/mysql stop
printf "Removing the following MySQL packages:\n$(rpm -qa | grep -i '^mysql-')"
sudo rpm -e $(rpm -qa | grep -i '^mysql-')
echo "Upgrading to mysql-$VERSION"
sudo rpm -Uvh /vagrant/MySQL-shared-$VERSION-1.rhel5.x86_64.rpm
sudo rpm -Uvh /vagrant/MySQL-server-$VERSION-1.rhel5.x86_64.rpm
echo "Installing MySQL client"
sudo rpm -Uvh /vagrant/MySQL-client-$VERSION-1.rhel5.x86_64.rpm
echo "Starting mysql"
sudo /etc/init.d/mysql start
PASSWORD="c0ldc0mput3r5"
echo "Running mysql_upgrade"
sudo mysql_upgrade -uroot -p$PASSWORD > $VERSION-in-place-upgrade.txt
#echo "Starting MySQL manually as it lost this ability between 5.1.72 and $VERSION"
#sudo /etc/init.d/mysql start
#echo "Resetting root password to the rubbish officeap2 one"
#/usr/bin/mysqladmin -u root password 'c0ldc0mput3r5'
| true
|
d73e5f121a78914a14a527b1480bacf54a4804a2
|
Shell
|
honestlybc/testnets
|
/devops/relayer/generate_keys.sh
|
UTF-8
| 373
| 3.171875
| 3
|
[] |
no_license
|
#!/bin/bash
set -euo pipefail
# set all variables
REPOSITORY=${REPOSITORY:-cosmwasm/relayer}
RELAYER_VERSION=${RELAYER_VERSION:-latest}
DOCKER="$REPOSITORY:$RELAYER_VERSION"
RELAYER_HOME="${RELAYER_HOME:-/root/.relayer}"
CHAIN_ID=$1
KEY_NAME=$2
docker run --rm \
--mount type=bind,source="$RELAYER_HOME",target=/root/.relayer \
$DOCKER keys add $CHAIN_ID $KEY_NAME
| true
|
1afcbfb949aeb19db6fec2545cae45888ff300d7
|
Shell
|
MichalMaruska-TomTom/mmc-shell
|
/bin/wait_pid
|
UTF-8
| 114
| 2.984375
| 3
|
[] |
no_license
|
#! /bin/bash
if [ $# -lt 1 ]
then
exit -1
fi
pid=$1
#ps uww -p
while kill -0 ${pid};
do
sleep 10s;
done
| true
|
fb3f41e96d9ed36816821ae62713f05843575d45
|
Shell
|
surynek/boOX
|
/boOX-y/scripts/expr_mapsol_rota-c_den520d_mdd++.sh
|
UTF-8
| 1,310
| 2.640625
| 3
|
[] |
no_license
|
ROBOT_LIST=`cat robots_maps_c`
SEED_LIST=`cat seeds_10`
SIZE=16
TIMEOUT=`cat timeout`
for ROBOTS in $ROBOT_LIST;
do
for SEED in $SEED_LIST;
do
echo 'Solving den520d instance with '$ROBOTS' agents ...'
../main/insolver_reLOC '--total-timeout='$TIMEOUT '--minisat-timeout='$TIMEOUT --encoding=pcmdd '--input-file=den520d_c2_a'$ROBOTS'_'$SEED'.cpf' '--output-file=rota-mdd++_den520d_c2_a'$ROBOTS'_'$SEED'.out' > 'rota-mdd++_den520d_c2_a'$ROBOTS'_'$SEED'.txt'
done
done
for ROBOTS in $ROBOT_LIST;
do
for SEED in $SEED_LIST;
do
echo 'Solving den520d instance with '$ROBOTS' agents ...'
../main/insolver_reLOC '--total-timeout='$TIMEOUT '--minisat-timeout='$TIMEOUT --encoding=pcmdd '--input-file=den520d_c3_a'$ROBOTS'_'$SEED'.cpf' '--output-file=rota-mdd++_den520d_c3_a'$ROBOTS'_'$SEED'.out' > 'rota-mdd++_den520d_c3_a'$ROBOTS'_'$SEED'.txt'
done
done
for ROBOTS in $ROBOT_LIST;
do
for SEED in $SEED_LIST;
do
echo 'Solving den520d instance with '$ROBOTS' agents ...'
../main/insolver_reLOC '--total-timeout='$TIMEOUT '--minisat-timeout='$TIMEOUT --encoding=pcmdd '--input-file=den520d_c4_a'$ROBOTS'_'$SEED'.cpf' '--output-file=rota-mdd++_den520d_c4_a'$ROBOTS'_'$SEED'.out' > 'rota-mdd++_den520d_c4_a'$ROBOTS'_'$SEED'.txt'
done
done
| true
|
988d78e2e1d9eb966ae5dd0aae141fd97fca16a3
|
Shell
|
etekweb/CanvasCLI
|
/install.sh
|
UTF-8
| 687
| 3.6875
| 4
|
[] |
no_license
|
#!/bin/bash
cd "${BASH_SOURCE%/*}" || exit
echo "Welcome to CanvasCLI Installer."
echo "This will allow you to submit assignments directly in Linux shell."
echo "An alias will be made, 'canvas', to use the CLI."
echo -n "Install now? (y/n) "
read answer
if [ "$answer" != "${answer#[Yy]}" ] ;then
mkdir ~/.canvas
cp "$(dirname -- $0)/cli.py" ~/.canvas/cli.py
pip3 install requests
alias canvas='python3 ~/.canvas/cli.py'
echo >> ~/.bashrc
echo "alias canvas='python3 ~/.canvas/cli.py'" >> ~/.bashrc
echo >> ~/.bash_profile
echo "alias canvas='python3 ~/.canvas/cli.py'" >> ~/.bash_profile
echo "Install complete."
else
echo Goodbye
fi
cd $OLDPWD
| true
|
467acbc5d05fdc14d45f8630d84dd4c36d10bc0f
|
Shell
|
fcrespo82/dotfiles
|
/.zshrc
|
UTF-8
| 6,045
| 2.75
| 3
|
[] |
no_license
|
source $HOME/.dotfiles_dir
# If you come from bash you might have to change your $PATH.
# export PATH=$HOME/bin:/usr/local/bin:$PATH
source $DOTFILES_DIR/functions
source $DOTFILES_DIR/utils
# Path to your oh-my-zsh installation.
export ZSH="$HOME/.oh-my-zsh"
# Set name of the theme to load. Optionally, if you set this to "random"
# it'll load a random theme each time that oh-my-zsh is loaded.
# See https://github.com/robbyrussell/oh-my-zsh/wiki/Themes
ZSH_THEME="mortalscumbag"
# Set list of themes to load
# Setting this variable when ZSH_THEME=random
# cause zsh load theme from this variable instead of
# looking in ~/.oh-my-zsh/themes/
# An empty array have no effect
# ZSH_THEME_RANDOM_CANDIDATES=( "robbyrussell" "agnoster" )
# Uncomment the following line to use case-sensitive completion.
# CASE_SENSITIVE="true"
# Uncomment the following line to use hyphen-insensitive completion. Case
# sensitive completion must be off. _ and - will be interchangeable.
# HYPHEN_INSENSITIVE="true"
# Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true"
# Uncomment the following line to change how often to auto-update (in days).
# export UPDATE_ZSH_DAYS=13
# Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true"
# Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true"
# Uncomment the following line to enable command auto-correction.
# ENABLE_CORRECTION="true"
# Uncomment the following line to display red dots whilst waiting for completion.
COMPLETION_WAITING_DOTS="true"
# Uncomment the following line if you want to disable marking untracked files
# under VCS as dirty. This makes repository status check for large repositories
# much, much faster.
# DISABLE_UNTRACKED_FILES_DIRTY="true"
# Uncomment the following line if you want to change the command execution time
# stamp shown in the history command output.
# The optional three formats: "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
# HIST_STAMPS="mm/dd/yyyy"
# Would you like to use another custom folder than $ZSH/custom?
# ZSH_CUSTOM=/path/to/new-custom-folder
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
# Add wisely, as too many plugins slow down shell startup.
plugins=(
git
asdf
docker
encode64
node
npm
sudo
urltools
jsontools
vscode
extract
colorize
catimg
fast-syntax-highlighting
osx
swiftpm
)
source $ZSH/oh-my-zsh.sh
# User configuration
# export MANPATH="/usr/local/man:$MANPATH"
# You may need to manually set your language environment
# export LANG=en_US.UTF-8
# Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim'
# else
# export EDITOR='mvim'
# fi
# Compilation flags
# export ARCHFLAGS="-arch x86_64"
# ssh
# export SSH_KEY_PATH="~/.ssh/rsa_id"
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
# Example aliases
# alias zshconfig="mate ~/.zshrc"
# alias ohmyzsh="mate ~/.oh-my-zsh"
setopt histignorealldups
# Use emacs keybindings even if our EDITOR is set to vi
bindkey -e
# Keep 1000 lines of history within the shell and save it to ~/.zsh_history:
HISTSIZE=1000
SAVEHIST=1000
HISTFILE=~/.zsh_history
alias sudo='sudo ' # Allow sudo other aliases
alias watch='watch ' # Allow watch other aliases
alias grep='grep --color'
case $(uname -s) in
Linux)
LS_COLOR_FLAG="--color"
LS_CUSTOM_FLAGS="-Fh"
;;
Darwin)
LS_COLOR_FLAG="-G"
LS_CUSTOM_FLAGS="-Fh"
;;
esac
# export LS_COLORS="di=01;34:ln=01;36:so=01;35:pi=01;33:ex=01;32:bd=01;33:cd=01;33:su=01;00:sg=01;00;41:tw=01;00;46:ow=01;00;42:"
alias ls='ls ${LS_COLOR_FLAG} ${LS_CUSTOM_FLAGS}'
alias l='ls'
alias lsa='ls -A'
alias ll='ls -l' # all files, in long format
alias lla='ll -A' # all files inc dotfiles, in long format
alias lld='ll | grep "/$"' # only directories
alias lls='echo "Symbolic Links:"; lla | cut -d":" -f 2 | cut -c 4- | grep "\->"'
alias today='cal -h | egrep -C 4 "\b$(date +%-d)\b" --color'
alias untar='tar -vxf' # Extract files
alias untar-bz2='tar -vxjf' # Extract bz2 files
alias untar-gzip='tar -vxzf' # Extract gzip files
alias tar-bz2='tar -vcjf' # Create bz2 files
alias tar-gzip='tar -vczf' # Create gzip files
alias pkill='pkill -f'
if [ -f $HOME/.asdf/asdf.sh ]; then
source $HOME/.asdf/asdf.sh
source $HOME/.asdf/completions/asdf.bash
fi
export EDITOR="code -w"
# Replace macos coreutils with gnu versions
case $(uname -s) in
Darwin)
if [ -d /usr/local/opt/coreutils/libexec/gnubin ]; then
export PATH="/usr/local/opt/coreutils/libexec/gnubin:$PATH"
# Override default flag for macOS since we have gnu coreutils in PATH
LS_COLOR_FLAG="--color"
alias ls='ls ${LS_COLOR_FLAG} ${LS_CUSTOM_FLAGS}'
fi
if [ -d /usr/local/opt/gnu-sed/libexec/gnubin ]; then
export PATH="/usr/local/opt/gnu-sed/libexec/gnubin:$PATH"
fi
;;
esac
if [ -d /usr/local/opt/e2fsprogs ]; then
export PATH="/usr/local/opt/e2fsprogs/bin:$PATH"
export PATH="/usr/local/opt/e2fsprogs/sbin:$PATH"
fi
# Put local bin in path
if [ -d /usr/local/bin ]; then
export PATH="/usr/local/bin:$PATH"
fi
if [ -d /usr/local/sbin ]; then
export PATH="/usr/local/sbin:$PATH"
fi
if [ -d $HOME/bin ]; then
export PATH="$HOME/bin:$PATH"
fi
case $(uname -s) in
Darwin)
export PATH="$DOTFILES_DIR/macOS/bin:$PATH"
;;
esac
source $DOTFILES_DIR/env
test -e "${HOME}/.iterm2_shell_integration.zsh" && source "${HOME}/.iterm2_shell_integration.zsh"
case $(uname -s) in
Darwin)
jdk 11
;;
esac
| true
|
e2932f806c07609f6479e0c2488221cf1cf69920
|
Shell
|
adelmanm/approx
|
/src/pytorch/wide-resnet.pytorch/train_wide_resnet28_cifar10.sh
|
UTF-8
| 927
| 3
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
# example usage: ./train_wide_resnet28_cifar100.sh opt_norep_noscale 0.5 10 gpu0 fwd
algorithm=$1
sample_ratio=$2
minimal_k=$3
lr=0.15
gpuid=$4
prefix=$5
suffix=$6
config_str=wrn28-10_${algorithm}_${prefix}_${sample_ratio}${suffix}_minimal_k_${minimal_k}
log_dir=${gpuid}_logs
output_log=${log_dir}/${config_str}.log
run_cmd_train="stdbuf -oL -eL python3 main.py --lr ${lr} --net_type 'wide-resnet' --depth 28 --widen_factor 10 --dropout 0 --dataset 'cifar10' --batch_size 256 --sample_ratio ${sample_ratio} --minimal_k ${minimal_k} &>> ${output_log}"
echo "running wide resnet 28 training script" | tee ${output_log}
echo "algorithm is $algorithm $prefix $suffix" | tee -a ${output_log}
echo "sample_ratio is $sample_ratio" | tee -a ${output_log}
echo "minimal_k is $minimal_k" | tee -a ${output_log}
echo "lr is $lr" | tee -a ${output_log}
echo "${run_cmd_train}" | tee -a ${output_log}
eval $run_cmd_train
| true
|
64fafb2851c19254cc091851be20aabf480d8ae2
|
Shell
|
vrosnet/micropython
|
/stmhal/autoflash
|
UTF-8
| 559
| 3.3125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
#
# this script waits for a DFU device to be attached and then flashes it
# it then waits until the DFU mode is exited, and then loops
while true; do
echo "waiting for DFU device..."
while true; do
if lsusb | grep -q DFU; then
break
fi
sleep 1s
done
echo "found DFU device, flashing"
dfu-util -a 0 -D build/flash.dfu
echo "waiting for DFU to exit..."
while true; do
if lsusb | grep -q DFU; then
sleep 1s
continue
fi
break
done
done
| true
|
9d4df680fe3d65a30ea640573dc27d8ba283689a
|
Shell
|
mrjulio/freebsd-unattended
|
/nano/install.sh
|
UTF-8
| 383
| 3.046875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
#
# nano
#
if [ $(pkg_info | grep -c ^nano-) -eq 0 ]
then
echoStatus $COLOR_BLUE '[INSTALL] nano'
cd /usr/ports/editors/nano
make -DBATCH install clean
cp $SCRIPT_PATH/nano/.nanorc /home/$BSD_USER/.nanorc
chown $BSD_USER /home/$BSD_USER/.nanorc
echoStatus $COLOR_GREEN '[FINISH] nano'
else
echoStatus $COLOR_WHITE '[SKIP] nano'
fi
| true
|
fa9db6f34cdb0fa77cd5aeba79b1517ebefa6972
|
Shell
|
moneytech/LuxCC
|
/scripts/test_exe_vm.sh
|
UTF-8
| 1,408
| 3.59375
| 4
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
# compiler being tested
CC1=src/luxdvr/luxdvr
# reference compiler
CC2=gcc
VM=src/luxvm/luxvm
TESTS_PATH=src/tests/execute
if uname -i | grep -q "i386"; then
CC1="$CC1 -q -mvm32"
else
CC1="$CC1 -q -mvm64"
fi
fail_counter=0
fail_files=""
pass_counter=0
echo "== Execution tests begin... =="
if [ "$LUX_QUIET" = "1" ] ; then
echo "Running tests..."
fi
#for file in $TESTS_PATH/*.c ; do
for file in $(find $TESTS_PATH/ | grep '\.c') ; do
# skip 'other' tests
if echo $file | grep -q "$TESTS_PATH/other" ; then
continue;
fi
# avoid llvm benchmarks
if echo $file | grep -q "llvm"; then
continue
fi
if [ ! "$LUX_QUIET" = "1" ] ; then
echo $file
fi
# out1
$CC1 $file -o $TESTS_PATH/test1.vme &>/dev/null &&
$VM $TESTS_PATH/test1.vme >"${file%.*}.output" 2>/dev/null
rm -f $TESTS_PATH/test1.vme
# out2
if [ ! "$LUX_DONT_RECALC" = "1" ] ; then
$CC2 $file -o $TESTS_PATH/test2 2>/dev/null
$TESTS_PATH/test2 >"${file%.*}.expect" 2>/dev/null
rm -f $TESTS_PATH/test2
fi
# compare
if ! cmp -s "${file%.*}.output" "${file%.*}.expect" ; then
echo "failed: $file"
let fail_counter=fail_counter+1
fail_files="$fail_files $file"
else
let pass_counter=pass_counter+1
fi
# clean
rm -f "${file%.*}.output"
done
echo "== Execution tests results: PASS: $pass_counter, FAIL: $fail_counter =="
if [ "$fail_counter" = "0" ] ; then
exit 0
else
exit 1
fi
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.