blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
fd039c2fea4f4fc00b7d8aa298a9e2a82875be07
|
Shell
|
vusec/osiris
|
/apps/minix/scripts/drec_siteid2caller.sh
|
UTF-8
| 958
| 3.265625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
if [ $# -lt 2 ]
then
echo "Usage: $0 <server> <site id>"
exit 1
fi
if [ "$MROOT" == "" ]
then
echo "Please set MROOT env variable and try again."
echo "MROOT : path to llvm-apps/apps/minix directory"
exit 1
fi
MTOOLS=$MROOT/obj.i386/"tooldir.`uname -s`-`uname -r`-`uname -m`"/bin
SRVBIN="$MROOT/obj.i386/minix/servers"
SERVER=$1
SITE_ID=$2
if [ ! -f $SRVBIN/$SERVER/$SERVER.opt.bcl ]
then
echo "Error: File not found: $SRVBIN/$SERVER/$SERVER.opt.bcl"
exit 2
fi
if [ ! -f $SRVBIN/$SERVER/$SERVER.opt.bcl.ll ]
then
$MTOOLS/llvm-dis $SRVBIN/$SERVER/$SERVER.opt.bcl
fi
tmp_file="/tmp/drec_callsite_func"
for i in 20 40 60 100 200 500;
do
found=0
grep -B $i "ltckpt_.*(i64\ $SITE_ID" $SRVBIN/$SERVER/$SERVER.opt.bcl.ll | tac | grep "define " | grep -o "@[a-zA-Z0-9_]*" > $tmp_file
if [ $? -eq 0 ]; then
found=1
head -n 1 $tmp_file | tr -d "@"
break
fi
done
if [ -f $tmp_file ]; then
rm $tmp_file 2>/dev/null || true
fi
| true
|
f58a925ca81c0c3a4f70cf18657967e51ddc6b3c
|
Shell
|
AlphenLai/Shell_Scripts
|
/_startall.sh
|
UTF-8
| 841
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/bash
set timeout 500
echo "Is this the first time after your pi boots? (y/n)"
read ans
while (( "$ans" != 'y' || "$ans" != 'n' )); do
echo "(y/n)"
read ans
done
#read ans
#echo "hi $ans"
if [[ $ans == 'y' ]]; then
xterm -hold -e "~/aruco_maker_script/1_cam.sh" & sleep 5
xterm -hold -e "~/aruco_maker_script/2x_apm_port.sh" & sleep 35
xterm -hold -e "~/aruco_maker_script/3_ori_n_rate.sh" & sleep 1
xterm -hold -e "~/aruco_maker_script/4_script_dir.sh" & sleep 1
else
xterm -hold -e "~/aruco_maker_script/1_cam.sh" & sleep 5
xterm -hold -e "~/aruco_maker_script/2_apm.sh" & sleep 35
xterm -hold -e "~/aruco_maker_script/3_ori_n_rate.sh" & sleep 1
xterm -hold -e "~/aruco_maker_script/4_script_dir.sh" & sleep 1
fi
echo "Operation done. Please open RVIZ by yourself."
read -n1 -p "Enter any key to escape..." any
| true
|
412c8446962dd5fd15ee4655c52619f4c9cd5cc8
|
Shell
|
mcandre/rustx
|
/build/rustt
|
UTF-8
| 283
| 3.5625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# compile and run test code
if [[ ! "${1#*.}" == "rs" ]]
then
ME="$(basename "$0")"
echo "Usage: $ME rust-source-file.rs"
exit 1
fi
TORUN="$(mktemp -t rustt.XXXXXXXXXX)"
SOURCE="$1"
shift 1
rustc -L . --test -o "$TORUN" "$SOURCE" \
&& exec "$TORUN" $@
| true
|
13a88d920075b83cf637eb90e3976a8c3527ce11
|
Shell
|
mgrusconi/scripts
|
/pluggins_git/files/git_plugins_color_branch/bashrc
|
UTF-8
| 337
| 3.5
| 4
|
[] |
no_license
|
# git-related functions in here
git_branch () {
GIT_BRANCH="$(git branch --no-color 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/\1/')"
if [[ -n "$GIT_BRANCH" ]] ; then
echo "[$GIT_BRANCH]"
fi
}
# bash prompt
prompt () {
PS1='\u@\h:\w\[\033[0;36m\]$(git_branch)\[\033[0m\]$ '
}
PROMPT_COMMAND=prompt
export PROMPT_COMMAND
| true
|
024dbbf97d5bb5bf50837aa192f6f1946c7aabc6
|
Shell
|
artnez/environment
|
/scripts/remove-dotfile-symlinks
|
UTF-8
| 363
| 3.515625
| 4
|
[] |
no_license
|
#! /usr/bin/env bash
BASEPATH=`cd $(dirname "${BASH_SOURCE[0]}")/.. && pwd`
for LINK in $HOME/.*; do
if [ ! -h $LINK ]; then
continue
fi
LINKPATH=$(python -c "import os.path, sys; print(os.path.realpath(sys.argv[1]))" $LINK)
if [[ ! $LINKPATH == $BASEPATH/* ]]; then
continue
fi
echo "Unlinking: $LINK"
rm $LINK
done
| true
|
2226321ea290b6b23a1f838eb0ac2ead200ea193
|
Shell
|
AMeikle99/COMP2041-Ass1
|
/test09.sh
|
UTF-8
| 867
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
#Test 10 - Testing rm - with flags 1 for --cached, one for --force, both with exit status 0 to test for.
red=`tput setaf 1`
green=`tput setaf 2`
reset=`tput sgr0`
bell=`tput bel`
rm -rd ".legit"
./legit.pl init > /dev/null 2>&1
seq 1 10 > a #same as repo
#Add files and commit normally
./legit.pl add a
./legit.pl commit -m "initial" > /dev/null 2>&1
./legit.pl rm --cached a 2>&1
if [[ $? == 0 && ! -e ".legit/index/a" ]]
then
echo "${green}Successful - rm (with --cached) works${reset}"
else
echo "${red}Failed - rm (with --cached) doesn't work${reset}"
exit 1
fi
./legit.pl add a
echo 11 >> a
./legit.pl add a
echo 12 >> a
./legit.pl rm --force a > /dev/null 2>&1
if [[ $? == 0 && ! -e a ]]
then
echo "${green}Successful - rm (with --force) works${reset}"
else
echo "${red}Failed - rm (with -- force) doesn't work${reset}"
exit 1
fi
| true
|
e15e369968c559fa7c153b25d924ba69a9ec7874
|
Shell
|
EvanHahn/dotfiles
|
/home/bin/bin/copy
|
UTF-8
| 336
| 3.359375
| 3
|
[
"Unlicense"
] |
permissive
|
#!/usr/bin/env bash
set -e
set -u
if hash pbcopy 2>/dev/null; then
exec pbcopy
elif hash xclip 2>/dev/null; then
exec xclip -selection clipboard
elif hash putclip 2>/dev/null; then
exec putclip
else
rm -f /tmp/clipboard 2> /dev/null
if [ $# -eq 0 ]; then
cat > /tmp/clipboard
else
cat "$1" > /tmp/clipboard
fi
fi
| true
|
868e92b8ad5d538ce249fa108c351d471407cb07
|
Shell
|
ev3dev/docker-library
|
/ev3dev-stretch/layers/base/brickstrap/_report/release-notes.sh
|
UTF-8
| 468
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
#
# Create a release notes template file.
REPORT_DIR="/brickstrap/_report/_out"
REPORT_FILE="$REPORT_DIR/$BRICKSTRAP_IMAGE_NAME-release-notes.md"
echo "Creating $REPORT_FILE template..."
cat > "$REPORT_FILE" << EOF
Release notes for $BRICKSTRAP_IMAGE_NAME
========================================
Changes from previous version
-----------------------------
Known issues
------------
Included Packages
-----------------
\`\`\`
$(dpkg -l)
\`\`\`
EOF
| true
|
0a99a8bc1838f4082028d3f64f8558bf3a9cdcbf
|
Shell
|
AzusaOS/azusa-opensource-recipes
|
/media-gfx/blender/blender-3.3.6.sh
|
UTF-8
| 4,759
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/sh
source "../../common/init.sh"
inherit python
get https://download.blender.org/source/${P}.tar.xz
acheck
cd "${S}"
apatch "$FILESDIR/blender-3.3.0-fix-build-with-boost-1.81.patch"
# slot
BV="${PV%.*}"
# Disable MS Windows help generation. The variable doesn't do what it
# it sounds like.
sed -e "s|GENERATE_HTMLHELP = YES|GENERATE_HTMLHELP = NO|" \
-i doc/doxygen/Doxyfile
# Prepare icons and .desktop files for slotting.
sed -e "s|blender.svg|blender-${BV}.svg|" -i source/creator/CMakeLists.txt
sed -e "s|blender-symbolic.svg|blender-${BV}-symbolic.svg|" -i source/creator/CMakeLists.txt
sed -e "s|blender.desktop|blender-${BV}.desktop|" -i source/creator/CMakeLists.txt
sed -e "s|Name=Blender|Name=Blender ${PV}|" -i release/freedesktop/blender.desktop
sed -e "s|Exec=blender|Exec=blender-${BV}|" -i release/freedesktop/blender.desktop
sed -e "s|Icon=blender|Icon=blender-${BV}|" -i release/freedesktop/blender.desktop
mv release/freedesktop/icons/scalable/apps/blender.svg release/freedesktop/icons/scalable/apps/blender-${BV}.svg
mv release/freedesktop/icons/symbolic/apps/blender-symbolic.svg release/freedesktop/icons/symbolic/apps/blender-${BV}-symbolic.svg
mv release/freedesktop/blender.desktop release/freedesktop/blender-${BV}.desktop
cd "${T}"
PKGS=(
libjpeg
libpng
zlib
app-arch/zstd
media-libs/openjpeg
media-libs/tiff
media-libs/freetype
media-libs/opencolorio
media-libs/openimageio
media-libs/embree
dev-cpp/tbb
dev-libs/gmp
sys-devel/clang
media-libs/libsdl2
media-libs/openal
media-libs/libsndfile
sci-libs/fftw
dev-libs/jemalloc
media-gfx/openvdb
dev-libs/boost
media-libs/libharu
dev-libs/lzo
dev-cpp/eigen
dev-libs/pugixml
media-libs/osl
media-gfx/potrace
media-gfx/alembic
media-video/ffmpeg
gl
glew
glu
glut
media-libs/openexr
dev-libs/imath
Imath
)
# ilmbase + openexr2 or imath+openexr3 ?
# openimage depend on imath/openexr3
importpkg "${PKGS[@]}"
CMAKEOPTS=(
-DBUILD_SHARED_LIBS=OFF # to avoid inter-target dependency graph issues
-DEigen3_ROOT=/pkg/main/dev-cpp.eigen.dev
-DPYTHON_INCLUDE_DIR=/pkg/main/dev-lang.python.dev.$PYTHON_LATEST/include/python${PYTHON_LATEST%.*}
-DPYTHON_LIBRARY=/pkg/main/dev-lang.python.libs.$PYTHON_LATEST/lib$LIB_SUFFIX/libpython${PYTHON_LATEST%.*}.so
-DPYTHON_VERSION=${PYTHON_LATEST}
-DWITH_ALEMBIC=ON
-DWITH_ASSERT_ABORT=OFF
-DWITH_BOOST=ON
-DWITH_BULLET=ON
-DWITH_CODEC_FFMPEG=ON
-DWITH_CODEC_SNDFILE=ON
-DWITH_CXX_GUARDEDALLOC=OFF
-DWITH_CYCLES=ON
-DWITH_CYCLES_DEVICE_CUDA=TRUE
-DWITH_CYCLES_DEVICE_OPENCL=ON
-DWITH_CYCLES_EMBREE=ON
-DWITH_CYCLES_OSL=ON
-DWITH_CYCLES_STANDALONE=ON
-DWITH_CYCLES_STANDALONE_GUI=ON
-DWITH_DOC_MANPAGE=ON
-DWITH_FFTW3=ON
-DWITH_GMP=ON
-DWITH_GTESTS=OFF
-DWITH_HARU=ON
-DWITH_HEADLESS=ON
-DWITH_INSTALL_PORTABLE=OFF
-DWITH_IMAGE_DDS=ON
-DWITH_IMAGE_OPENEXR=ON
-DWITH_IMAGE_OPENJPEG=ON
-DWITH_IMAGE_TIFF=ON
-DWITH_INPUT_NDOF=ON
-DWITH_INTERNATIONAL=ON
-DWITH_JACK=ON
-DWITH_MEM_JEMALLOC=ON
-DWITH_MEM_VALGRIND=OFF
-DWITH_MOD_FLUID=ON
-DWITH_MOD_OCEANSIM=ON
-DWITH_NANOVDB=OFF
-DWITH_OPENAL=ON
-DWITH_OPENCOLLADA=ON
-DWITH_OPENCOLORIO=ON
-DWITH_OPENIMAGEDENOISE=ON
-DWITH_OPENIMAGEIO=ON
-DWITH_OPENMP=ON
-DWITH_OPENSUBDIV=ON
-DWITH_OPENVDB=ON
-DWITH_OPENVDB_BLOSC=ON
-DWITH_POTRACE=ON
-DWITH_PUGIXML=ON
-DWITH_PULSEAUDIO=ON
-DWITH_PYTHON_INSTALL=OFF
-DPYTHON_NUMPY_PATH=/pkg/main/dev-python.numpy.mod/lib/python${PYTHON_LATEST%.*}/site-packages/numpy
-DPYTHON_NUMPY_INCLUDE_DIRS=/pkg/main/dev-python.numpy.mod/lib/python${PYTHON_LATEST%.*}/site-packages/numpy/core/include
-DWITH_SDL=ON
-DWITH_STATIC_LIBS=OFF
-DWITH_SYSTEM_EIGEN3=ON
-DWITH_SYSTEM_GLEW=ON
-DWITH_SYSTEM_LZO=ON
-DWITH_TBB=ON
-DWITH_USD=OFF
-DWITH_XR_OPENXR=OFF
)
docmake "${CMAKEOPTS[@]}"
# /build/blender-2.93.9/dist/pkg/main/media-gfx.blender.core.2.93.9.linux.amd64/share/man/man1/blender.1
mv -v "${D}/pkg/main/${PKG}.core.${PVRF}/share/man/man1/blender.1" "${D}/pkg/main/${PKG}.core.${PVRF}/share/man/man1/blender-${BV}.1"
mkdir -pv "${D}/pkg/main/${PKG}.core.${PVRF}/libexec"
mv -v "${D}/pkg/main/${PKG}.core.${PVRF}/bin/blender" "${D}/pkg/main/${PKG}.core.${PVRF}/libexec/blender-${BV}"
# create a fake blender executable which sets the required PYTHONHOME and PYTHONPATH variables for blender to work
cat >"${D}/pkg/main/${PKG}.core.${PVRF}/bin/blender-${BV}" <<EOF
#!/bin/bash
export PYTHONHOME="/pkg/main/dev-lang.python.core.${PYTHON_LATEST%.*}"
export PYTHONPATH=":/pkg/main/dev-lang.python-modules.core.${PYTHON_LATEST%.*}/lib/python${PYTHON_LATEST%.*}:\$PYTHONHOME/lib/python${PYTHON_LATEST%.*}/lib-dynload"
exec "/pkg/main/${PKG}.core.${PVRF}/libexec/blender-${BV}" "\$@"
EOF
chmod +x "${D}/pkg/main/${PKG}.core.${PVRF}/bin/blender-${BV}"
finalize
| true
|
a351c754b68960020da1748f29241c1ea04412e6
|
Shell
|
ballab1/production-s3
|
/grafana.setup
|
UTF-8
| 3,555
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash
#############################################################################
#
# initialization for grafana
#
#############################################################################
[ "${CONFIG_DIR:-}" ] || CONFIG_DIR="$(pwd)"
[ "${WORKSPACE_DIR:-}" ] || WORKSPACE_DIR="$(pwd)/workspace.$(basename "$CONFIG_DIR")"
declare -r config_dir="${CONFIG_DIR:?}/grafana"
declare -r workspace_dir="${WORKSPACE_DIR:?}/grafana"
declare -r grafana_uid=${GRAFANA_UID:?}
declare isIinitialized="$(basename "${BASH_SOURCE[0]}")"
isIinitialized="${workspace_dir}/.${isIinitialized%.*}.init"
if [ ! -f "$isIinitialized" ]; then
# perform initialization
mkdir -p "$workspace_dir" ||:
sudo cp -r "${config_dir}" "${WORKSPACE_DIR}/"
sudo mkdir -p "${workspace_dir}/etc/provisioning/dashboards"
sudo mkdir -p "${workspace_dir}/etc/provisioning/datasources"
sudo mkdir -p "${workspace_dir}/etc/provisioning/notifiers"
sudo mkdir -p "${workspace_dir}/etc/provisioning/plugins"
declare -r host_ip=$(environ.ipAddress)
declare -r host_name=$(environ.hostName)
declare -r grafana_root_password="$(deploy.passwordSecret 'grafana' 'GF_SECURITY_ADMIN_PASSWORD' )"
declare -r ini=${workspace_dir}/etc/grafana.ini
sudo sed -i -r -e "s|^instance_name\\s*=.*$|instance_name = ${host_name}|" \
-e "s|^domain\\s*=.*$|domain = ${host_ip}|" \
-e "s|^root_url\\s*=.*$|root_url = http://${host_ip}:3000/grafana|" \
-e "s|admin_user\\s*=.*$|admin_user = ${CFG_USER}|" \
-e "s|<HOST_IP>|${host_ip}|g" \
-e "s|^admin_password\\s*=.*$|admin_password = ${grafana_root_password}|" \
"$ini"
sudo chown -R "$grafana_uid" "$workspace_dir"
declare dumps_dir="${WORKSPACE_DIR}/mysql/loader/dumps"
if [ ! -e "${dumps_dir}/grafana.sql" ]; then
mkdir -p "$dumps_dir" ||:
cp "${config_dir}/grafana.sql" "${dumps_dir}/grafana.sql"
fi
declare -r mysql_root_password="$(deploy.passwordSecret 'mysql' 'MYSQL_ROOT_PASSWORD' )"
declare -r grafana_dbpass="$(deploy.passwordSecret 'grafana' 'GRAFANA_DBPASS' )"
# determine grafana dbuser
declare -r dc_json="$(lib.yamlToJson "${WORKSPACE_DIR}/docker-compose.yml")"
declare grafana_dbuser=$(jq --compact-output --monochrome-output --raw-output '.services.grafana.environment.GRAFANA_DBUSER' <<< "$dc_json")
grafana_dbuser="$(eval echo "$grafana_dbuser")"
if [ "${grafana_dbuser:-null}" != 'null' ] && [ "${grafana_dbpass:-}" ]; then
cat <<-GRAFANA_DBUSER > "${WORKSPACE_DIR}/mysql/loader/grafana_user.sh"
#!/bin/bash
cat <<-EOSQL | mysql -uroot -p${mysql_root_password} -hlocalhost
CREATE DATABASE IF NOT EXISTS grafana;
CREATE USER IF NOT EXISTS '${grafana_dbuser}'@'%';
SET PASSWORD FOR '${grafana_dbuser}'@'%' = PASSWORD('${grafana_dbpass}');
GRANT ALL ON grafana.* TO '${grafana_dbuser}'@'%';
EOSQL
GRAFANA_DBUSER
else
[ "${grafana_dbuser:-null}" = 'null' ] && echo 'grafana_dbuser not defined: connection to MYSQL not created' >&2
[ -z "${grafana_dbpass:-}" ] && echo 'grafana_dbpass not defined: connection to MYSQL not created' >&2
fi
touch "$isIinitialized"
fi
# perform common
if [ -d "${config_dir}/nginx.conf" ]; then
sudo mkdir -p "${WORKSPACE_DIR}/nginx/conf.d/"
sudo cp -ru "${config_dir}/nginx.conf"/* "${WORKSPACE_DIR}/nginx/conf.d/"
[ -d "${workspace_dir}/nginx.conf" ] && sudo rm -rf "${workspace_dir}/nginx.conf"
fi
exit 0
| true
|
e3d4b9adf21beedac5bd73453c5ddf14398f1219
|
Shell
|
vh21/fuego
|
/fuego-ro/toolchains/tools.sh
|
UTF-8
| 2,096
| 2.953125
| 3
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
# Copyright (c) 2014 Cogent Embedded, Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# DESCRIPTION
# This script calls the env. setup script for ${TOOLCHAIN} to set
# up environment variables
function export_tools () {
export AS=${PREFIX}-as
export CC=${PREFIX}-gcc
export CXX=${PREFIX}-g++
export AR=${PREFIX}-ar
export RANLIB=${PREFIX}-ranlib
export CPP=${PREFIX}-cpp
export CXXCPP=${PREFIX}-cpp
export LD=${PREFIX}-ld
export LDFLAGS="--sysroot ${SDKROOT} -lm"
export CROSS_COMPILE=${PREFIX}-
export HOST=${PREFIX}
export CONFIGURE_FLAGS="--target=${PREFIX} --host=${PREFIX} --build=`uname -m`-unknown-linux-gnu"
}
# for backwards compatibility with board files that use PLATFORM
if [ -z "$TOOLCHAIN" ] ; then
TOOLCHAIN="$PLATFORM"
fi
# scan the toolchains directory for a matching $TOOLCHAIN-tools.sh file
if [ -f "${FUEGO_RO}/toolchains/${TOOLCHAIN}-tools.sh" ];
then
source ${FUEGO_RO}/toolchains/${TOOLCHAIN}-tools.sh
else
abort_job "Missing toolchain setup script ${FUEGO_RO}/toolchains/${TOOLCHAIN}-tools.sh"
fi
| true
|
ed40f2068d2abd2881272094babd6b3cc2700026
|
Shell
|
Fedkasin/lot-management-client
|
/bin/start
|
UTF-8
| 546
| 3.625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
SCRIPTPATH="$(dirname $(realpath -s $0))"
echo "Updating the app config..."
$SCRIPTPATH/create_config $(realpath ${SCRIPTPATH}/../src/constants/environment.js)
echo "Checking Expo Auth..."
if [ -z $EXPO_USER ] && [ -z $EXPO_PASSWORD ]
then
echo "Expo credentials are not set, skipping log in."
echo "To log in, export EXPO_USER and EXPO_PASSWORD env vars"
echo "Exiting"
else
yes Y | $SCRIPTPATH/../node_modules/.bin/expo login -u ${EXPO_USER} -p ${EXPO_PASSWORD}
$SCRIPTPATH/../node_modules/.bin/expo start
fi
| true
|
9027b373380368f45f860d46db1e5714821d46b6
|
Shell
|
rshwet14/Real-Life-Script
|
/log-alert
|
UTF-8
| 463
| 3.125
| 3
|
[] |
no_license
|
#!/bin/bash
# Author: Shwet Raj
# Date: 23/10/20
# Description: This script will send syslog file through email to administrator
# Modified: 23/10/20
#This script will work only if your system environment have mail relay server installed.
systems="rshwet14@gmail.com,rajshwet07@gmail.com"
if [ -s /tmp/filtered-messages ]
then
cat /tmp/filtered-messages | sort | uniq | mail -s "CHECK: Syslog errors" $systems
rm /tmp/filtered-messages
else
fi
| true
|
1cd0556db9fa1567bf56f4469ee38fbf73b26238
|
Shell
|
joshpennington/MySQL-Vagrant
|
/bootstrap.sh
|
UTF-8
| 794
| 3
| 3
|
[] |
no_license
|
#! /usr/bin/env bash
export DEBIAN_FRONTEND=noninteractive
MYSQL_ROOT_PASSWORD="password"
apt-get update > /dev/null 2>&1
echo -e "\n --- Installing MySQL 5.6 ---\n"
echo mysql-server-5.6 mysql-server/root_password password $MYSQL_ROOT_PASSWORD | debconf-set-selections
echo mysql-server-5.6 mysql-server/root_password_again password $MYSQL_ROOT_PASSWORD | debconf-set-selections
apt-get -y install mysql-server-5.6 > /dev/null 2>&1
sed -i "s/bind-address/#bind-address/g" /etc/mysql/my.cnf
service mysql restart > /dev/null 2>&1
mysql -uroot -p$MYSQL_ROOT_PASSWORD << EOF
use mysql;
CREATE USER 'root'@'%' IDENTIFIED BY '$MYSQL_ROOT_PASSWORD';
GRANT ALL PRIVILEGES ON *.* TO 'root'@'%';
UPDATE mysql.user SET Grant_priv='Y', Super_priv='Y' WHERE User='root';
FLUSH PRIVILEGES;
EOF
| true
|
66eec9551cf19a7e895ab5f303c4b22ade1b9589
|
Shell
|
kangdazhi/AUTOSAR-1
|
/core/scripts/sphinx/build_doc.sh
|
UTF-8
| 3,549
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
#rm -rf _build_can/html/
#rm -rf _build_can/latex/
#rm -rf _build_can/html_mcu_mpc55xx/
#rm -rf _build_can/html_mcu_tms570/
#rm -rf _build_can/latex_mcu_mpc55xx/
#rm -rf _build_can/latex_mcu_tms570/
function check_precond() {
# Check gcc
if ! [ -x "$(command -v gcc)" ]; then
echo 'Error: gcc is not installed.' >&2
exit 1
fi
}
# 1 - to
# 2 - from
function create_symlink() {
if [ -f $1 ]; then
rm $1
fi
ln -s $2 $1
}
function to_upper() {
a=`echo $1 | awk '{print toupper($0)}'`
echo $a
}
# 1 - module
# 2 - type (um or dd)
# 3 - mcu
function build_mcal_doc() {
# Create conf.py files
cat $cdir/conf_main.py > $rdir/mcal/$1/doc/$2/conf.py
python create_conf.py -m $1 -t $2 -a $3 >> $rdir/mcal/$1/doc/$2/conf.py
#
# Create symlink to arch
#
pushd $rdir/mcal/$1/doc/$2
create_symlink ug_arch.rst arch/ug_$3.rst
create_symlink arch/docref_arch.rst arch/docref_$3.rst
# Preprocess if exist...
if [ -f intro.rstp ]; then
mcu_cap=$( to_upper $3 )
gcc -E -P -x c -traditional-cpp -DCFG_$mcu_cap intro.rstp > intro.rst
fi
# Generate html
sphinx-build.exe -b html -t $3 . $rdir/_build_$1_$2_$3/html/
# Generate latex
sphinx-build.exe -b latex -t $3 . $rdir/_build_$1_$2_$3/latex/
# Cleanup
rm arch/docref_arch.rst
rm ug_arch.rst
#rm conf.py
popd
# Pdf
pushd $rdir/_build_$1_$2_$3/latex
pdfLatex $1_$2_$3.tex
popd
}
# 1 - module
# 2 - type (um or dd)
# ( 3 - mcu )
function build_mcal_dd_doc() {
# Create conf.py files
cat $cdir/conf_main.py > $rdir/mcal/doc/$2/conf.py
python create_conf.py -m $1 -t $2 >> $rdir/mcal/doc/$2/conf.py
pushd $rdir/mcal/doc/$2
# Generate html
sphinx-build.exe -b html . $rdir/_build_$1_$2/html/
# Generate latex
sphinx-build.exe -b latex . $rdir/_build_$1_$2/latex/
popd
# Pdf
pushd $rdir/_build_$1_$2/latex
pdfLatex $1_$2.tex
popd
}
check_precond
export PATH=$PATH:/c/Python27:/c/devtools/miktex/texmfs/install/miktex/bin
# Get script path
#cdir=$(dirname "$(readlink -f "$0")")
cdir=$ARC_DOC_ROOT
echo $cdir
rdir=$cdir/../..
echo $rdir
# export path so extension can be found by sphinx.
#export ARC_DOC_ROOT=$cdir
#-----------------------------------
# MCAL
#-----------------------------------
# MCAL design description
#build_mcal_dd_doc MCAL dd
# MCAL modules ( um and um_<mcu> )
#build_mcal_doc can um mpc5xxx
# MCAL module design description
#build_mcal_doc can dd mpc5xxx
# module safety manual
#build_mcal_doc can sm mpc5xxx
# MCAL modules ( um and um_<mcu> )
build_mcal_doc pwm um mpc5xxx
# MCAL module design description
build_mcal_doc pwm dd mpc5xxx
# module safety manual
build_mcal_doc pwm sm mpc5xxx
#
# DIO
#
# MCAL modules ( um and um_<mcu> )
build_mcal_doc dio um mpc5xxx
# MCAL module design description
build_mcal_doc dio dd mpc5xxx
# module safety manual
build_mcal_doc dio sm mpc5xxx
#
# Port
#
# MCAL modules ( um and um_<mcu> )
build_mcal_doc Port um mpc5xxx
# MCAL module design description
build_mcal_doc Port dd mpc5xxx
# module safety manual
build_mcal_doc Port sm mpc5xxx
#
# MCU
#
# MCAL modules ( um and um_<mcu> )
build_mcal_doc MCU um mpc5xxx
# MCAL module design description
build_mcal_doc MCU dd mpc5xxx
# module safety manual
build_mcal_doc MCU sm mpc5xxx
#
# ADC
#
# MCAL modules ( um and um_<mcu> )
build_mcal_doc ADC um mpc5xxx
# MCAL module design description
build_mcal_doc ADC dd mpc5xxx
# module safety manual
build_mcal_doc ADC sm mpc5xxx
exit
| true
|
153d975c24471ac16578ed88945e37aa3adf1ae5
|
Shell
|
vishr/FrameworkBenchmarks
|
/toolset/setup/linux/systools/leiningen.sh
|
UTF-8
| 462
| 3.171875
| 3
|
[] |
no_license
|
#!/bin/bash
RETCODE=$(fw_exists ${IROOT}/lein.installed)
[ ! "$RETCODE" == 0 ] || { \
source $IROOT/lein.installed
return 0; }
mkdir -p lein/bin
fw_get -o leinbin https://raw.github.com/technomancy/leiningen/stable/bin/lein
mv leinbin lein/bin/lein
chmod +x lein/bin/lein
LEIN_HOME=$IROOT/lein
echo "export LEIN_HOME=${LEIN_HOME}" > $IROOT/lein.installed
echo -e "export PATH=\$LEIN_HOME/bin:\$PATH" >> $IROOT/lein.installed
source $IROOT/lein.installed
| true
|
5a08a25a179f9ed3b3da10d9ccc99d224e5897cb
|
Shell
|
nima/wmiii
|
/statusbar.d/34-fnotify
|
UTF-8
| 1,662
| 3.4375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
. "${WMII_CONFPATH%%:*}/statusbar.conf"
THEME=HW
SYMBOL=2588
requires lck || exit 97
loc=-1
msg=-1
msgi=-1
rst() {
if [ $1 -gt 0 ]; then
rm -f ${SB_STAGE_F}
fi
if [ ! -f ${SB_STAGE_F} ]; then
rsync -e 'ssh -q' -q epenguin1-10.appmgmt.det.nsw.edu.au:.irssi/fnotify ${SB_STAGE_F}
loc=$(wc -l ${SB_STAGE_F}|awk '{print$1}')
echo ${loc} > ${SB_STAGE_F}.loc
if [ $1 -eq 2 -o ${msgi} -eq -1 ]; then
msgi=$(( $(wc -l ${SB_STAGE_F}|awk '{print$1}') + 1 ))
echo ${msgi} > ${SB_STAGE_F}.msgi
fi
printf '...' > ${SB_STAGE_F}.msg
fi
}
rst 0
next_message() {
loc=$(cat ${SB_STAGE_F}.loc)
msgi=$(cat ${SB_STAGE_F}.msgi)
#. Update total lines-of-crap count when required:
if [ ${msgi} -ge ${loc} ]; then
rst 1
fi
#. If there is a new message, set it, otherwise '...' will do.
msg='...'
if [ ${msgi} -le ${loc} ]; then
msg=$(sed -n "${msgi}p" ${SB_STAGE_F})
if [ -n "${msg}" ]; then
msgi=$(( $msgi + 1 ))
echo ${msgi} > ${SB_STAGE_F}.msgi
fi
fi
printf '%s' "${msg}" > ${SB_STAGE_F}.msg
if [ "${msg}" != "..." ]; then
beeper alert
fi
echo "$(cat ${SB_STAGE_F}.msg) [mi:${msgi} l:${loc}]"
}
if sb_ready 10; then
lck ${LOCK_SB_MODULE} on
DATA=$(cat ${SB_STAGE_F}.msg)
case ${BUTTON} in
${MOUSE_L}) DATA="$(next_message)" ;;
${MOUSE_R}) rst 1; DATA="$(next_message)" ;;
esac
[ "${DATA}" != "..." ] || DATA="$(next_message)"
sb_write_cache "${DATA}"
lck ${LOCK_SB_MODULE} off
fi
sb_read_cache "..."
| true
|
a20111c88c364a8b29bd271642a0eb843e7a78b8
|
Shell
|
hans511002/erydb_rep
|
/utils/scenarios/perf/sh/pfExeSQLscript.sh
|
UTF-8
| 601
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
#
# This script does the following:
# 1) Executes supplied SQL script on test database and captures output to file
#
#$1 = Test database name
#$2 = SQL script to execute
if [ $# -lt 2 ]
then
echo***** Syntax: pfExeSQLScript.sh testDBName scriptFileName
exit 1
fi
#
logFileName=`basename $2`
#
# Execute script on reference database
#
# mysql $3 -h$2 -u$4 -p$5 <$6 > $logFileName.test.log
#
# Execute script on test database
#
/usr/local/erydb/mysql/bin/mysql --defaults-file=/usr/local/erydb/mysql/my.cnf -u root $1 <$2 > $logFileName.test.log
exit 0
| true
|
4c8dcd576e2b35d726c51655db84bd90890b8098
|
Shell
|
MikimotoH/firmadyne
|
/scripts/pro_vuln.sh
|
UTF-8
| 851
| 3.15625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -u
IIDS=$(scripts/psql_firmware.py "SELECT id FROM image WHERE network_reachable=True AND (vulns IS NULL OR ARRAY_LENGTH(vulns,1)=0) ORDER BY id")
for IID in $IIDS ; do
# net_reachable
python3 -u scripts/test_network_reachable.py ${IID} test | tee test_network_reachable.log
net_reachable=$(cat test_network_reachable.log | grep "network_reachable=" | grep -ohE 'True|False')
rm test_network_reachable.log
if [ "$net_reachable" == "False" ] ; then
continue
fi
scripts/test_network_reachable.py $IID construct
guest_ip=$(scripts/psql_firmware.py "SELECT guest_ip FROM image WHERE id=$IID;")
while ! ping -c1 $guest_ip &>/dev/null ; do :; done
analyses/runExploits.py -i $IID
scripts/test_network_reachable.py $IID destruct
scripts/merge_metasploit_logs.py $IID
done
| true
|
03514fb3c6deee04fc163223c0a908366794bc0c
|
Shell
|
organicmaps/organicmaps
|
/iphone/Maps/run-script-for-cmake.sh
|
UTF-8
| 611
| 3.671875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# This script builds C++ core libs and inserts some private variables.
# Should be run from Run Script phase in target's settings.
LOWERED_CONFIG=`echo $CONFIGURATION | tr [A-Z] [a-z]`
CONF="debug"
if [[ "$LOWERED_CONFIG" == *release* || "$LOWERED_CONFIG" == *production* || "$LOWERED_CONFIG" == *adhoc* ]]; then
CONF="release"
fi
# Respect "Build for active arch only" project setting.
if [[ "$ONLY_ACTIVE_ARCH" == YES ]]; then
if [[ ! -z $CURRENT_ARCH ]]; then
VALID_ARCHS=${ARCHS[0]}
fi
fi
echo "Building $CONF configuration"
bash "$SRCROOT/../../tools/autobuild/ios_cmake.sh" $CONF
| true
|
4bf40c594a77fcaac6073c2f9a358d7f392ddef9
|
Shell
|
junland/LFScm
|
/resources/make-scripts/toolchain/tools/21.binutils.sh
|
UTF-8
| 421
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
tar xvf binutils.tar.gz
cd binutils
mkdir -v build
cd build
../configure \
--prefix=$LFS/usr \
--build=$(../config.guess) \
--host=$LFS_TGT \
--disable-nls \
--enable-shared \
--disable-werror \
--enable-64-bit-bfd
make -j$(nproc)
make install
make DESTDIR=$LFS install
cd ../..
rm -rf binutils
| true
|
a94d63675063d01bee762e0cf17bff87aac977c1
|
Shell
|
KyleJHarper/stupidbashtard
|
/sbt/test__function.sh
|
UTF-8
| 5,312
| 3.765625
| 4
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/bash
# Copyright 2013 Kyle Harper
# Licensed per the details in the LICENSE file in this package.
function test__function {
#@Author Hank BoFrank
#@Date 2013.03.04
#@Usage test__function [-D 'search for string' [-D ...]] [-h --help] [-v --verbose] <file>
#@Dep grep 2.0+ it's awesome
#@dep cool_tool 9.21.4-alpha+, but less than 9.21.5
#@Description A complex function attempting to show most (or all) of the things/ways you can document stuff.
#@Description We will attempt to read a file and list the line number and first occurence of a Zelda keyword.
#@Description -
#@Description This is a SILLY script that is untested; for demonstration purposes only.
# Variables
#@$1 The first option (after shifting from getopts) will be a file name to operate on.
#@$@ A list of other files to include in the search.
#@$@ -
#@$@ I have multiple lines, for kicks.
local -i -r E_GENERIC=1 #@$E_GENERIC If we need to exit and don't have a better ERROR choice, use this.
local -i -r E_BAD_INPUT=10 #@$ Send when file specified in $1 is invalid or when -D is blank.
local _verbose=false #@$_verbose Flag to decide if we should be chatty with our output.
local _temp='something' #@$ A temp variable for our operations below. (Note: Shocker will record defaults.)
local -r _WIFE_IS_HOT=true #@$ Pointless boolean flag, and it is now read only (and accurate).
local -a _index_array=( Zelda ) #@$ Index array with 1 element (element 0, value of Zelda)
local -A _assoc_array #@$ Associative array (hash) to hold misc things as we read file.
local -A _new_hash=([k1]=hi) #@$ Hash with a single key/value pair.
local -A _bigger_hash=([k-1]=hi [k-2]=there #@$ Hash with a multiple key/value pair.
local _implied_array=(hi ho) #@$ Shocker will understand this is an array.
local -a _multi_array=( #@$ Explicitly defined array that holds multiple values via lines.
one
two
'a b c'
'd "e" f'
'g " h " i'
"\\whee"
"\"no\" thanks"
)
local -A _multi_hash=( #@$ Explicitly defined multiline hash.
[key1]=value1
['key2']=value2
[key 3]=value3
[key 4]='value 4'
[key 5]="\"value\" 5"
)
local -i _i #@$ A counter variable, forced to be integer only.
local _opt
local _line #@$ Temporary variable for use in the read loop.
#@$_line I have some extra detail about _line.
local _reference #@$ Variable to hold the name of our nameref for assignment.
local _multiline='this
spans a few
lines'
final_value='' #@$ The final value to expose to the caller after we exit. (Note: Shocker will flag 'top' scope as a result.)
# Process options
while true ; do
core__getopts ":D:hR:v" opt 'help,verbose'
case $? in 1) break ;; 2) return 1 ;; esac
case "${_opt}" in
D ) #@opt_ Add bonus items to the index_array variable.
_index_array+=("${OPTARG}")
;;
h|help ) #@opt_ Display an error and return non-zero if the user tries to use -h for this function.
echo 'No help exists for this function yet.' >&2
return ${E_GENERIC}
;;
'R' ) _reference="${OPTARG}" ;; #@opt_ Sets the nameref for indirect assignment.
v | verbose ) _verbose=true ;; #@opt_ Change the verbose flag to true so we can send more output to the caller.
* ) echo "Invalid option: -${OPTARG}" >&2 ; return ${E_GENERIC} ;;
esac
done
#@opt_verbose Federating this option so I can make it multiline.
# Pre-flight Checks
if ! core__tool_exists 'grep' ; then echo 'The required tools to run this function were not found.' >&2 ; return ${E_GENERIC} ; fi
if [ ${#_index_array[@]} -lt 2 ] ; then echo "You must provide at least 1 Hyrule item (via -D option)" >&2 ; return ${E_BAD_INPUT} ; fi
if [ ! -f ${1} ] ; then echo "Cannot find specified file to read: ${1}" >&2 ; return ${E_BAD_INPUT} ; fi
${_verbose} && echo "Verbosity enabled. Done processing variables and cleared pre-flight checks."
# Main function logic
_i=1
while read _line ; do
# If the line matches a Hyrule keyword, store it in associative array. Use grep, simply so we can add it to core__tool_exists check above.
for _temp in ${_index_array[@]} ; do
if echo "${_line}" | grep -q -s "${_temp}" ; then assoc_array["${_i}"]="${_temp}" ; break ; fi
done
let _i++
done <${1}
# Print results & leave
if [ ${#_assoc_array[@]} -eq 0 ] ; then echo "No matches found." ; return 0 ; fi
for _temp in ${!assoc_array[@]} ; do echo "Found match for keyword ${_assoc_array[${_temp}]} on line number ${_temp}." ; done
return 0
}
| true
|
e0148e546c01117919746bbd786953b839acd29e
|
Shell
|
96boards-hikey/aosp-device-linaro-hikey
|
/installer/hikey/flash-all.sh
|
UTF-8
| 2,102
| 3.625
| 4
|
[] |
no_license
|
#!/bin/bash
if [ $# -eq 0 ]
then
echo "Provide the right /dev/ttyUSBX specific to recovery device"
exit
fi
if [ ! -e "${1}" ]
then
echo "device: ${1} does not exist"
exit
fi
DEVICE_PORT="${1}"
PTABLE=ptable-aosp-8g.img
if [ $# -gt 1 ]
then
if [ "${2}" == '4g' ]
then
PTABLE=ptable-aosp-4g.img
fi
fi
INSTALLER_DIR="`dirname ${0}`"
FIRMWARE_DIR="${INSTALLER_DIR}"
# for cases that not run "lunch hikey-userdebug"
if [ -z "${ANDROID_BUILD_TOP}" ]; then
ANDROID_BUILD_TOP=${INSTALLER_DIR}/../../../../../
ANDROID_PRODUCT_OUT="${ANDROID_BUILD_TOP}/out/target/product/hikey"
fi
if [ -z "${DIST_DIR}" ]; then
DIST_DIR="${ANDROID_BUILD_TOP}"/out/dist
fi
#get out directory path
while [ $# -ne 0 ]; do
case "${1}" in
--out) OUT_IMGDIR=${2};shift;;
--use-compiled-binaries) FIRMWARE_DIR="${DIST_DIR}";shift;;
esac
shift
done
if [[ "${FIRMWARE_DIR}" == "${DIST_DIR}" && ! -e "${DIST_DIR}"/fip.bin && ! -e "${DIST_DIR}"/l-loader.bin ]]; then
echo "No binaries found at ${DIST_DIR}. Please build the bootloader first"
exit
fi
if [ -z "${OUT_IMGDIR}" ]; then
if [ ! -z "${ANDROID_PRODUCT_OUT}" ]; then
OUT_IMGDIR="${ANDROID_PRODUCT_OUT}"
fi
fi
if [ ! -d "${OUT_IMGDIR}" ]; then
echo "error in locating out directory, check if it exist"
exit
fi
echo "android out dir:${OUT_IMGDIR}"
sudo python "${INSTALLER_DIR}"/hisi-idt.py --img1="${FIRMWARE_DIR}"/l-loader.bin -d "${DEVICE_PORT}"
sleep 3
# set a unique serial number
serialno=`fastboot getvar serialno 2>&1 > /dev/null`
if [ "${serialno:10:6}" == "(null)" ]; then
fastboot oem serialno
else
if [ "${serialno:10:15}" == "0123456789abcde" ]; then
fastboot oem serialno
fi
fi
fastboot flash ptable "${INSTALLER_DIR}"/"${PTABLE}"
fastboot flash fastboot "${FIRMWARE_DIR}"/fip.bin
fastboot flash nvme "${INSTALLER_DIR}"/nvme.img
fastboot flash boot "${OUT_IMGDIR}"/boot.img
fastboot flash system "${OUT_IMGDIR}"/system.img
fastboot flash cache "${OUT_IMGDIR}"/cache.img
fastboot flash userdata "${OUT_IMGDIR}"/userdata.img
| true
|
d86ff9b0f1a1bac5c5b9abbbddd4ed28fe275541
|
Shell
|
isovector/tino
|
/bin/conky-jobs
|
UTF-8
| 865
| 3.34375
| 3
|
[] |
no_license
|
#! /usr/bin/bash
for FILE in /home/sandy/.jobs/*; do
TAG=$(sed -n 2p $FILE | tr -d \\n)
TIME=$(arbtt-stats -m0 | grep $TAG | cut -d'|' -f2 | xargs)
day_pattern="^([0-9]+)d0?([0-9]+)h0?([0-9]+)m"
noday_pattern="^0?([0-9]+)h0?([0-9]+)m"
OFFSET=$(sed -n 4p $FILE | tr -d \\n)
if [[ $TIME =~ $day_pattern ]]; then
DAYS=${BASH_REMATCH[1]}
HOURS=${BASH_REMATCH[2]}
MINUTES=${BASH_REMATCH[3]}
fi
if [[ $TIME =~ $noday_pattern ]]; then
DAYS=0
HOURS=${BASH_REMATCH[1]}
MINUTES=${BASH_REMATCH[2]}
fi
HOURS=$(( DAYS * 24 + HOURS + OFFSET ))
TOTAL=$(sed -n 3p $FILE | tr -d \\n)
PERC=$(( (HOURS * 60 + MINUTES) * 100 / (TOTAL * 60) ))
echo -n '${color white}'
sed -n 1p $FILE | tr -d \\n
echo -n ": "
echo -n '${color}'
echo -n $HOURS
echo -n "/"
echo -n $TOTAL
# PERC=$(( (NUM * 100) / DEN ))
echo -n ' ${execbar echo '
echo -n $PERC
echo '}'
done
| true
|
51586123fde6a63845f317eca0872a0dff33723d
|
Shell
|
kgoel9899/Shell_Scripting
|
/ex/test
|
UTF-8
| 128
| 2.84375
| 3
|
[] |
no_license
|
#!/bin/bash
for car in date pwd
do
echo "car make (list) $car" $($car)
done
for i in 1 2
do
echo "$i * $i = " $(($i * $i))
done
| true
|
48dcd489be073185fb474be0ed997f4a23c7c87f
|
Shell
|
Otus-DevOps-2021-05/cmltaWt0_infra
|
/packer/scripts/install_mongodb.sh
|
UTF-8
| 508
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
sleep 30;
apt-get install -y apt-transport-https ca-certificates
wget -qO - https://www.mongodb.org/static/pgp/server-4.2.asc | sudo apt-key add -
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/4.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.2.list
apt-get update
sleep 30;
apt-get install -y mongodb-org
newip="0.0.0.0"
sed -i -r 's/(\b[0-9]{1,3}.){3}[0-9]{1,3}\b'/$newip/ /etc/mongod.conf
systemctl start mongod
systemctl enable mongod
| true
|
1c6db7037824d1ab34adde1e4d639303c49b2669
|
Shell
|
blankon-packages/pkgbinarymangler
|
/pkgmaintainermangler
|
UTF-8
| 2,076
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash -e
CONFFILE=/etc/pkgbinarymangler/maintainermangler.conf
CONFFILE=${PKGBINARYMANGLER_CONF_DIR:-/etc/pkgbinarymangler}/maintainermangler.conf
OVERRIDEFILE=${PKMAINTAINERGMANGLER_OVERRIDES:-/etc/pkgbinarymangler/maintainermangler.overrides}
. ${PKGBINARYMANGLER_COMMON_PATH:-/usr/share/pkgbinarymangler}/common
readctrl "$OVERRIDEFILE" "default"
OVERRIDE="$RET"
if [ -f "$BUILDINFO" ]; then
readctrl "$BUILDINFO" "Component"
COMPONENT="$RET"
readctrl "$OVERRIDEFILE" "%$RET"
if [ -n "$RET" ]; then
OVERRIDE="$RET"
fi
if grep -qs '^Purpose: PPA' "$BUILDINFO"; then
echo "INFO: Disabling pkgmaintainermangler for PPA build"
exit 0
fi
fi
readctrl "$PKGCTL" "Maintainer"
OLDMAINT="$RET"
OLDMAINTEMAIL=$(echo "$OLDMAINT" | sed -e 's/.*<\(.*\)>.*/\1/')
OLDMAINTDOMAIN=$(echo "$OLDMAINTEMAIL" | sed -e 's/.*@//')
readctrl "$OVERRIDEFILE" "ignore_domains"
IGNORE_DOMAINS="$RET"
if [ -n "$IGNORE_DOMAINS" ]; then
for i in $IGNORE_DOMAINS; do
if [ "$i" = "$OLDMAINTDOMAIN" ]; then
echo "pkgmaintainermangler: Not overriding Maintainer for domain $OLDMAINTDOMAIN"
exit 0
fi
done
fi
readctrl "$OVERRIDEFILE" "ignore_emails"
IGNORE_EMAILS="$RET"
if [ -n "$IGNORE_EMAILS" ]; then
for i in $IGNORE_EMAILS; do
if [ "$i" = "$OLDMAINTEMAIL" ]; then
echo "pkgmaintainermangler: Not overriding Maintainer for address $OLDMAINTEMAIL"
exit 0
fi
done
fi
readctrl "$OVERRIDEFILE" "@$OLDMAINTEMAIL"
if [ -n "$RET" ]; then
OVERRIDE="$RET"
fi
readctrl "$PKGCTL" "Original-Maintainer"
if [ -n "$RET" ]; then
echo "pkgmaintainermangler: $PKGCTL already contains an Original-Maintainer field; doing nothing" >&2
exit 0
fi
if [ "$OLDMAINT" = "$OVERRIDE" ]; then
echo "pkgmaintainermangler: Override matches original Maintainer; doing nothing"
exit 0
fi
echo "pkgmaintainermangler: Maintainer field overridden to \"$OVERRIDE\""
sed -i -e "s/^Maintainer: \(.*\)$/Maintainer: $OVERRIDE\nOriginal-Maintainer: \1/" $PKGCTL
| true
|
7656eac968aea8be02fd2aafd5ceee114799f574
|
Shell
|
klaud-speech/espnet
|
/egs/voxforge/asr1/run_in_docker.sh
|
UTF-8
| 1,175
| 3.875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
gpu=0
name="chainer300_cudnn7.0_nossh"
image="chainer/3.0.0nossh:9.0-cudnn7-16.04"
while test $# -gt 0
do
case "$1" in
--gpu) gpu=$2
;;
--*) echo "bad option $1"
exit 1;;
*) echo "argument $1"
exit 1;;
esac
shift
shift
done
docker_image=$( docker images -q $image )
if ! [[ -n $docker_image ]]; then
voxforge=$PWD
cd ../../../tools
echo "Building docker image..."
(docker build -f "$name".devel -t $image .) || exit 1
cd $voxforge
fi
vol1="$PWD/../../../src:/spnet/src"
vol2="$PWD/../../../egs:/spnet/egs"
cmd1="cd /spnet/egs/voxforge/asr1"
cmd2="./run.sh --docker true"
cmd3="chmod -R 777 /spnet/egs/voxforge/asr1" #Required to access once the training if finished
if [ ${gpu} -le -1 ]; then
cmd="docker run -i --rm --name spnet_nogpu -v $vol1 -v $vol2 $image /bin/bash -c '$cmd1; $cmd2; $cmd3'"
else
cmd="NV_GPU=$gpu nvidia-docker run -i --rm --name spnet$gpu -v $vol1 -v $vol2 $image /bin/bash -c '$cmd1; $cmd2; $cmd3'"
# --rm erase the container when the training is finished.
fi
echo "Executing application in Docker"
eval $cmd
echo "`basename $0` done."
| true
|
418d612920fb642b2ede7af1532ad60f75507c4f
|
Shell
|
n0ts/dotfiles
|
/dot.zshrc
|
UTF-8
| 16,063
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/zsh
# see manual umask
umask 022
# core dump size
limit coredumpsize 0
# see manual zshoptions(1)
setopt ALWAYS_TO_END # If unset, key functions that list completions try to return to the last prompt if given a numeric argument. If set these functions try to return to the last prompt if given no numeric argument.
setopt AUTO_CD # If a command is issued that can’t be executed as a normal command, and the command is the name of a directory, perform the cd command to that directory.
setopt AUTO_PUSHD # Make cd push the old directory onto the directory stack.
setopt AUTO_RESUME # Treat single word simple commands without redirection as candidates for resumption of an existing job.
setopt BANG_HIST # Perform textual history expansion, csh-style, treating the character ‘!’ specially.
setopt CDABLE_VARS # If the argument to a cd command (or an implied cd with the AUTO_CD option set) is not a directory, and does not begin with a slash, try to expand the expression as if it were preceded by a ‘~’.
setopt COMPLETE_IN_WORD # If unset, the cursor is set to the end of the word if completion is started. Otherwise it stays there and completion is done from both ends.
setopt CORRECT # Try to correct the spelling of commands. Note that, when the HASH_LIST_ALL option is not set or when some directories in the path are not readable, this may falsely report spelling errors the first time some commands are used.
setopt CORRECT_ALL # Try to correct the spelling of all arguments in a line.
setopt CSH_JUNKIE_HISTORY # A history reference without an event specifier will always refer to the previous command. Without this option, such a history reference refers to the same event as the previous history reference, defaulting to the previous command.
setopt EQUALS # Perform = filename expansion
setopt EXTENDED_GLOB # Treat the ‘#’, ‘~’ and ‘^’ characters as part of patterns for filename generation, etc.
setopt EXTENDED_HISTORY # Save each command’s beginning timestamp (in seconds since the epoch) and the duration (in seconds) to the history file.
setopt FUNCTION_ARGZERO # When executing a shell function or sourcing a script, set $0 temporarily to the name of the function/script. Note that toggling FUNCTION_ARGZERO from on to off (or off to on) does not change the current value of $0. Only the state upon entry to the function or script has an effect. Compare POSIX_ARGZERO.
setopt GLOB_COMPLETE # When the current word has a glob pattern, do not insert all the words resulting from the expansion but generate matches as for completion and cycle through them like MENU_COMPLETE.
setopt GLOB_DOTS # Do not require a leading ‘.’ in a filename to be matched explicitly.
setopt HIST_IGNORE_ALL_DUPS # If a new command line being added to the history list duplicates an older one, the older command is removed from the list (even if it is not the previous event).
setopt HIST_IGNORE_DUPS # Do not enter command lines into the history list if they are duplicates of the previous event.
setopt HIST_IGNORE_SPACE # Remove command lines from the history list when the first character on the line is a space, or when one of the expanded aliases contains a leading space. Only normal aliases (not global or suffix aliases) have this behaviour. Note that the command lingers in the internal history until the next command is entered before it vanishes, allowing you to briefly reuse or edit the line. If you want to make it vanish right away without entering another command, type a space and press return.
setopt HIST_NO_STORE # Remove the history (fc -l) command from the history list when invoked. Note that the command lingers in the internal history until the next command is entered before it vanishes, allowing you to briefly reuse or edit the line.
setopt HIST_REDUCE_BLANKS # Remove superfluous blanks from each command line being added to the history list.
setopt HIST_VERIFY # Whenever the user enters a line with history expansion, don’t execute the line directly; instead, perform history expansion and reload the line into the editing buffer.
setopt IGNORE_EOF # Do not exit on end-of-file. Require the use of exit or logout instead. However, ten consecutive EOFs will cause the shell to exit anyway, to avoid the shell hanging if its tty goes away.
setopt INC_APPEND_HISTORY # This options works like APPEND_HISTORY except that new history lines are added to the $HISTFILE incrementally (as soon as they are entered), rather than waiting until the shell exits. The file will still be periodically re-written to trim it when the number of lines grows 20% beyond the value specified by $SAVEHIST (see also the HIST_SAVE_BY_COPY option).
setopt INTERACTIVE_COMMENTS # Allow comments even in interactive shells.
setopt LIST_PACKED # Try to make the completion list smaller (occupying less lines) by printing the matches in columns with different widths.
setopt LONG_LIST_JOBS # List jobs in the long format by default.
setopt MAGIC_EQUAL_SUBST # All unquoted arguments of the form ‘anything=expression’ appearing after the command name have filename expansion (that is, where expression has a leading ‘~’ or ‘=’) performed on expression as if it were a parameter assignment. The argument is not otherwise treated specially; it is passed to the command as a single argument, and not used as an actual parameter assignment. For example, in echo foo=~/bar:~/rod, both occurrences of ~ would be replaced. Note that this happens anyway with typeset and similar statements.
setopt NO_BEEP #
setopt NO_CLOBBER #
setopt NO_FLOW_CONTROL #
setopt NO_HUP #
setopt NO_LIST_BEEP #
setopt NONOMATCH #
setopt NOTIFY # Report the status of background jobs immediately, rather than waiting until just before printing a prompt.
setopt NUMERIC_GLOB_SORT # If numeric filenames are matched by a filename generation pattern, sort the filenames numerically rather than lexicographically.
setopt PATH_DIRS # Perform a path search even on command names with slashes in them. Thus if ‘/usr/local/bin’ is in the user’s path, and he or she types ‘X11/xinit’, the command ‘/usr/local/bin/X11/xinit’ will be executed (assuming it exists). Commands explicitly beginning with ‘/’, ‘./’ or ‘../’ are not subject to the path search. This also applies to the ‘.’ and source builtins.
setopt PRINT_EIGHT_BIT # Print eight bit characters literally in completion lists, etc. This option is not necessary if your system correctly returns the printability of eight bit characters (see man page ctype(3)).
setopt PROMPT_SUBST # If set, parameter expansion, command substitution and arithmetic expansion are performed in prompts. Substitutions within prompts do not affect the command status.
setopt PUSHD_IGNORE_DUPS # Don’t push multiple copies of the same directory onto the directory stack.
setopt SHARE_HISTORY # This option both imports new commands from the history file, and also causes your typed commands to be appended to the history file (the latter is like specifying INC_APPEND_HISTORY, which should be turned off if this option is in effect). The history lines are also output with timestamps ala EXTENDED_HISTORY (which makes it easier to find the spot where we left off reading the file after it gets re-written).
setopt SUN_KEYBOARD_HACK # If a line ends with a backquote, and there are an odd number of backquotes on the line, ignore the trailing backquote. This is useful on some keyboards where the return key is too small, and the backquote key lies annoyingly close to it. As an alternative the variable KEYBOARD_HACK lets you choose the character to be removed.
setopt ZLE # Use the zsh line editor. Set by default in interactive shells connected to a terminal.
unsetopt BG_NICE # Run all background jobs at a lower priority. This option is set by default.
# see manual zshparam(1).
HISTSIZE=100000
SAVEHIST=100000
HISTFILE=$HOME/.zhistory
zshaddhistory() { whence ${${(z)1}[1]} >| /dev/null || return 1 }
function history-all { history -E 1 }
# path
[ -d $HOME/.zsh-completions ] && fpath=($HOME/.zsh-completions/src $fpath)
fpath=($HOME/.zfunctions $fpath)
ospath=( /usr/{,s}bin /{,s}bin )
localpath=( /opt/*/{,s}bin /usr/local/{,s}bin /usr/local/*/{,s}bin /usr/X11R6/{,s}bin )
homepath=( $HOME/.{,s}bin )
path=( $homepath $localpath $ospath )
# load platform configuration
export OSTYPE=`uname -s`
case $OSTYPE in
Linux*)
[ -r $HOME/.zshrc.linux ] && source $HOME/.zshrc.linux
;;
FreeBSD*)
[ -r $HOME/.zshrc.freebsd ] && source $HOME/.zshrc.freebsd
;;
Darwin*)
[ -r $HOME/.zshrc.darwin ] && source $HOME/.zshrc.darwin
;;
esac
# environment variable configuration
export LANG=en_US.UTF-8
export LC_ALL=$LANG
export LESSCHARSET=UTF-8
export LESS='-R'
if [ -x "$BIN_PATH/source-highlight" ]; then
if [ -x "$BIN_PATH/nkf" ]; then
export LESSOPEN='| src-hilite-lesspipe.sh %s | nkf'
else
export LESSOPEN='| src-hilite-lesspipe.sh %s'
fi
fi
export WORDCHARS='*?-[]~\!#%^(){}<>|`@#%^*()+:?'
export HOST=`hostname`
if [ -x "$BIN_PATH/lv" ]; then
export PAGER="lv -c"
else
export PAGER=less
fi
export LSCOLORS=dxfxcxdxbxegedabagacad
if [ -x "$BIN_PATH/emacsclient" ]; then
export EDITOR=emacsclient
else
export EDITOR=vi
fi
export AWS_PAGER=
# emacs keybind
bindkey -e
bindkey '^A' beginning-of-line
bindkey '^E' end-of-line
bindkey '^?' backward-delete-char
bindkey '^H' backward-delete-char
bindkey '^[[3~' delete-char
bindkey '^[[1~' beginning-of-line
bindkey '^[[4~' end-of-line
# history keybinf
autoload -Uz is-at-least
if is-at-least 4.3.10; then
bindkey '^R' history-incremental-pattern-search-backward
bindkey '^S' history-incremental-pattern-search-forward
fi
# turn on auto-completion
autoload -U compinit; compinit -u;
autoload -U zstyle+
autoload _canonical_paths args
[ -f $HOME/.zfunctions/cdd ] && source $HOME/.zfunctions/cdd
[ -f $HOME/.zfunctions/preexec ] && source $HOME/.zfunctions/preexec
# completion style
zstyle ':completion:*' menu select
zstyle ':completion:*' format '%F{white}%d%f'
zstyle ':completion:*' group-name ''
zstyle ':completion:*' matcher-list 'm:{a-z}={A-Z}'
zstyle ':completion:*' keep-prefix
zstyle ':completion:*' completer _oldlist _complete _match _ignored \
_approximate _list _history
# completion cache on
zstyle ':completion:*' use-cache true
# menu
zstyle ':completion:*:default' menu select=1
# case-insensitive (uppercase from lowercase) completion
zstyle ':completion:*' matcher-list 'm:{a-z}={A-Z}'
# mosh
compdef mosh=ssh
# ssh completion
hosts=( ${(@)${${(M)${(s:# :)${(zj:# :)${(Lf)"$([[ -f $HOME/.ssh/config ]] && < $HOME/.ssh/config)"}%%\#*}}##host(|name) *}#host(|name) }/\*} )
zstyle ':completion:*:hosts' hosts $hosts
# colors on completion
zstyle ':completion:*' list-colors ${(s.:.)LSCOLORS}
case $OSTYPE in
Linux*)
PS_CMD="ps -axco pid,user,command"
;;
FreeBSD*)
PS_CMD="ps -xco pid,user,command"
;;
Darwin*)
PS_CMD="ps -axco pid,user,command"
;;
esac
zstyle ':completion:*:processes' command $PS_CMD
zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([%0-9]#)*=0=01;31'
# sudo completion
zstyle -e ":completion:*:sudo:*" command-path 'reply=($path)'
# prompt
if [ "$TERM" != "dumb" ]; then
PROMPT1='%{[$[31+$RANDOM % 6]m%}%B%U%m'"@%n%#%{[m%}%u%b "
PROMPT2='%{[$[31+$RANDOM % 6]m%}%B%(?.%h.ERROR:%?) (%3c)%{[m%}%b'
if [ -n "${SSH_CLIENT}${SSH_CONNECTION}" ]; then
PROMPT1="%{[37m%}${HOST%%.*} ${PROMPT1}"
fi
PROMPT="${PROMPT2}
${PROMPT1}"
fi
SPROMPT="%{[31m%}'%r' is correct? [n,y,a,e] %{${reset_color}%}"
# aliases
if gls --color > /dev/null 2>&1; then
alias ls='gls --color=always -F'
elif ls --color > /dev/null 2>&1; then
alias ls='ls --color=always -F'
elif ls -G > /dev/null 2>&1; then
alias ls='ls -FG'
else
alias ls='ls -F'
fi
alias la='ls -AFlh'
alias ll='ls -Flh'
alias l=ls
alias sl=l
alias ag='ag --color'
alias be='bundle exec'
alias bi="bundle install --path .bundle"
alias bc='bundle clean'
alias bo='bundle outdated'
alias bs='bundle show'
alias bu='bundle update'
alias bundle='nocorrect bundle'
alias c='clear'
alias cp='nocorrect cp'
alias df='df -h'
alias du='du -h'
alias e='emacs -nw'
alias egrep='egrep --color'
alias f=finger
if [ -x "$BIN_PATH/hub" ]; then
alias git=hub
fi
alias g=git
alias grep='grep --color=always'
alias h=history
alias j=jobs
alias less='less --tabs=4'
alias lv='lv -c'
alias m='mosh'
alias man='LANG=C man'
alias mkdir='nocorrect mkdir'
alias mv='nocorrect mv'
alias mysql='mysql --auto-rehash --pager="less -S -n -i -F -X"'
alias p=pushd pp=popd
alias ps='ps auxw'
alias q=exit
alias quit=exit
alias r=rails
alias rm='rm -i'
alias ra=rake
alias rs='bundle exec rspec spec'
alias s=screen
alias sd='sudo -H -s'
alias sr='screen -D -RR'
alias sd='sudo -H -s'
alias sudo='sudo -H'
alias sudu=sudo
alias t=tmux
alias tl='tmux ls'
alias tf='tail -f'
alias mux=tmuxinator
if [ -x "$BIN_PATH/vim" ]; then
alias v='vim'
else
alias v='vi'
fi
alias x=exit
alias watch='watch -n 1 -dc'
alias -s zip=zipinfo
alias -s tgz=gzcat
alias -s gz=gzcat
alias -s tbz=bzcat
alias -s bz2=bzcat
alias -s java=lv
alias -s c=lv
alias -s h=lv
alias -s C=lv
alias -s cpp=lv
alias -s conf=lv
alias -s txt=lv
alias -s xml=lv
alias -g psg="$PS_CMD | grep"
alias -g lsg='ls | grep'
# dabbrev
HARDCOPYFILE=$HOME/.screen-hardcopy
touch $HARDCOPYFILE
dabbrev-complete() {
local reply lines=80 # 80 lines
screen -X eval "hardcopy -h $HARDCOPYFILE"
reply=($(sed '/^$/d' $HARDCOPYFILE | sed '$ d' | tail -$lines))
compadd - "${reply[@]%[*/=@|]}"
}
zle -C dabbrev-complete menu-complete dabbrev-complete
bindkey '^o' dabbrev-complete
bindkey '^o^_' reverse-menu-complete
# auto-fu.zsh - https://github.com/hchbaw/auto-fu.zsh
if [ -f $HOME/.auto-fo.zsh/auto-fu.zsh ]; then
# zsh version 4.3 or later
if [[ $ZSH_VERSION == 4.<3->* || $ZSH_VERSION == <5->* ]]; then
source $HOME/.auto-fo.zsh/auto-fu.zsh
zle-line-init () {
auto-fu-init
}
zle -N zle-line-init
zstyle ':auto-fu:highlight' input bold
zstyle ':auto-fu:highlight' completion fg=white
zstyle ':auto-fu:var' postdisplay ''
zstyle ':completion:*' completer _oldlist _complete
fi
fi
# edit-file
edit-file() {
zle -I
local file
local -a words
words=(${(z)LBUFFER})
file="${words[$#words]}"
[[ -f "$file" ]] && $EDITOR "$file"
}
zle -N edit-file
bindkey "^x^f" edit-file
# git
function git() {
if ! (( $+_has_working_hub ))
then
hub --version &> /dev/null
_has_working_hub=$(($? == 0))
fi
if (( $_has_working_hub ))
then
hub "$@"
else
command git "$@"
fi
}
# past command-line
pbcopy-buffer() {
print -rn $BUFFER | pbcopy
zle -M "pbcopy: ${BUFFER}"
}
zle -N pbcopy-buffer
bindkey '^x^p' pbcopy-buffer
# dircolors
[ -f ~/.dircolors-solarized/dircolors.256dark ] && eval `dircolors ~/.dircolors-solarized/dircolors.256dark 2&> /dev/null`
# direnv
[ -x "$BIN_PATH/direnv" ] && eval "$(direnv hook zsh)"
# jump
[ -x "$BIN_PATH/jump" ] && eval "$(jump shell zsh --bind=zz)"
# source
if [ -d $HOME/.zsh.sources ]; then
for f (~/.zsh.sources/*) source "${f}"
fi
# load local configuration
[ -r $HOME/.zshrc.local ] && source $HOME/.zshrc.local
# uniquify my $PATH
typeset -U path cdpath fpath manpath
# zcompile
[ ! -f ~/.zshrc.zwc -o ~/.zshrc -nt ~/.zshrc.zwc ] && zcompile ~/.zshrc
[ ! -f ~/.zshrc.local.zwc -o ~/.zshrc.local -nt ~/.zshrc.local.zwc ] && zcompile ~/.zshrc.local
# for screen
[ -n "$STY" ] && source ~/.zlogin
# zprof
if exists zprof; then
zprof | less
fi
# iTerm2 shell integration - https://iterm2.com/documentation-shell-integration.html
[ -e "$HOME/.iterm2_shell_integration.zsh" ] && source "$HOME/.iterm2_shell_integration.zsh"
| true
|
9c26d4ff37fc10985d4ea8558db4f268afb10fa6
|
Shell
|
smmzhang/mf2f
|
/run.sh
|
UTF-8
| 3,584
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/bash
input=$1
ref=$2
first=$3
last=$4
data=$5
format=03
noise_level=25
PYTHON=python3.6
##make data folder and sub-folders
if [ ! -d $data ];then
mkdir $data
fi
if [ ! -d $data/flow ];then
mkdir $data/flow
fi
if [ ! -d $data/mask_collision ];then
mkdir $data/mask_collision
fi
if [ ! -d $data/mask_warping_res ];then
mkdir $data/mask_warping_res
fi
if [ ! -d $data/results_8sigmas ];then
mkdir $data/results_8sigmas
fi
if [ ! -d $data/results_online_no_teacher ];then
mkdir $data/results_online_no_teacher
fi
if [ ! -d $data/results_online_with_teacher ];then
mkdir $data/results_online_with_teacher
fi
if [ ! -d $data/results_offline_no_teacher ];then
mkdir $data/results_offline_no_teacher
fi
if [ ! -d $data/results_offline_with_teacher ];then
mkdir $data/results_offline_with_teacher
fi
##useful paths
flow=$data/flow/%${format}d.flo
mask_collision=$data/mask_collision/%${format}d.png
mask_warping_res=$data/mask_warping_res/%${format}d.png
eight_sigmas=$data/results_8sigmas/%${format}d.tiff
results_online_no_teach=$data/results_online_no_teacher/%${format}d.png
results_online_with_teach=$data/results_online_with_teacher/%${format}d.png
results_offline_no_teach=$data/results_offline_no_teacher/%${format}d.png
results_offline_with_teach=$data/results_offline_with_teacher/%${format}d.png
##compute flow
cd tvl1flow
bash tvl1flow.sh $input $first $(($last-1)) $flow
cd ..
echo flows computed
##compute collision mask
cd collision_mask
bash compute_mask.sh $flow $first $(($last-1)) $mask_collision
cd ..
echo collision masks computed
##compute warping_error_mask
cd warping_res_mask
$PYTHON compute_threshold.py --dummy $data --output $mask_warping_res --input $input --flow $flow --first $first --last $last
rm $data/{downs.tiff,downs_warp.tiff,dwo.png,mask_invalid_pixels.png,warping_error.tiff,warp.tiff,WERR.tiff}
cd ..
echo warping res masks computed
## compute the results with FastDVDnet-8sigmas
$PYTHON video_f2f_8sigmas.py --input $input --ref $ref --flow $flow --mask_collision $mask_collision --mask_warping_res $mask_warping_res --output $eight_sigmas --first $first --last $last --noise_level $noise_level
echo FastDVDnet-8sigmas computed
##compute the results with the method online no teacher
$PYTHON video_f2f_online_no_teacher.py --input $input --ref $ref --flow $flow --mask_collision $mask_collision --mask_warping_res $mask_warping_res --output $results_online_no_teach --first $first --last $last --noise_level $noise_level
echo online no teacher computed
##compute the results with the method online with teacher
$PYTHON video_f2f_online_with_teacher.py --input $input --ref $ref --flow $flow --mask_collision $mask_collision --mask_warping_res $mask_warping_res --output $results_online_with_teach --first $first --last $last --teacher_outputs $eight_sigmas --noise_level $noise_level
echo online with teacher computed
##compute the results with the method offline no teacher
$PYTHON video_f2f_offline_no_teacher.py --input $input --ref $ref --flow $flow --mask_collision $mask_collision --mask_warping_res $mask_warping_res --output $results_offline_no_teach --first $first --last $last --noise_level $noise_level
echo offline no teacher computed
##compute the results with the method offline with teacher
$PYTHON video_f2f_offline_with_teacher.py --input $input --ref $ref --flow $flow --mask_collision $mask_collision --mask_warping_res $mask_warping_res --output $results_offline_with_teach --first $first --last $last --teacher_outputs $eight_sigmas --noise_level $noise_level
| true
|
87eeaca7608e192bae7285b6b57fa312b23419d0
|
Shell
|
ifireball/scripts
|
/cloneVM.bash
|
UTF-8
| 2,380
| 3.890625
| 4
|
[] |
no_license
|
#!/bin/bash
# cloneVM.bash - This script clones the "CentOS" VirtualBox VM, it can be made
# to clone other VMs with a very simple configuration change,
# see below.
#
# Copyright 2007 Barak Korren
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/.
#
# --------------------------- Configuration Variables -------------------------
VBOXMANAGE="/usr/bin/vboxmanage"
# Where to put the VMs, this is the default location for a non-root user
VBBASE="$HOME/.VirtualBox"
VMDIR="$VBBASE/Machines"
VDIDIR="$VBBASE/VDI"
# The name of the VM we're duplicating
ORIGVM="CentOS"
ORIGVDI="$VDIDIR/$ORIGVM.vdi"
# Unless you want to improve my script you
# needn't change anything below this line
# ------------------------------------------------------------------------------
usage() {
cat 1>&2 <<EOF
Usage:
$(basename $0) VMNAME
Creates a new clone of the $ORIGVM VM and names it with he given name.
EOF
exit 1
}
# Just enough input processing to make it not explode in your face...
[[ $# -ne 1 ]] && usage
NEWVM="$1"
shift
if [[ "$NEWVM" == "$ORIGVM" ]]; then
echo "New VM name must be different then \"$ORIGVM\"." 1>&2
usage
fi
NEWVDI="$VDIDIR/$NEWVM.vdi"
# This is how you actually clone the VM:
# 1st you clone the virtual disk
$VBOXMANAGE clonevdi "$ORIGVDI" "$NEWVDI"
# Then you register the new disk with VirtualBox
$VBOXMANAGE registerimage disk "$NEWVDI"
# When you're done with the disk you create a new VM
$VBOXMANAGE createvm -name "$NEWVM" -register
# Finally configure the VM to your liking, including adding the cloned disk
# TODO: Parametrise everything here
$VBOXMANAGE modifyvm "$NEWVM" \
-ostype "linux26" \
-memory "256M" \
-boot1 "disk" -boot2 "dvd" -boot3 "floppy" \
-hda "$NEWVDI" \
-nic1 "nat" \
-audio none
| true
|
a55d5c4aac651c973fc7ceeb861b77b6be94cdb9
|
Shell
|
pristinenoise/dots
|
/os/osx/apps.sh
|
UTF-8
| 1,803
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
#
# Application installer (via brew-cask)
#
set -e
# Apps
apps=(
qlcolorcode
appcleaner
qlmarkdown
seil
vagrant
flash
iterm2
qlprettypatch
shiori
sublime-text3
virtualbox
flux
qlstephen
sketch
vlc
cloudup
font-m-plus
nvalt
quicklook-json
skype
transmission
apikitchen
mamp
onepassword
airserver
atext
adium
adobe-air
adobe-creative-cloud
appcleaner
audacity
audio-hijack-pro
byword
calibre
cleanmymac
codekit
colloquy
divvy
dropbox
filezilla
firefox
flux
handbrake
handbrakecli
iterm2
launchbar
macvim
mp4tools
mplayerx
mumble
omnifocus
pixelmator
simple-comic
sketch
sublime-text3
spotify
steam
spotify-menubar
transmission
vlc
xscope
)
# fonts
fonts=(
font-m-plus
font-clear-sans
font-roboto
)
# Specify the location of the apps
appdir="/Applications"
# Check for Homebrew
if test ! $(which brew); then
echo "Installing homebrew..."
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
main() {
# Ensure homebrew is installed
homebrew
# Install homebrew-cask
echo "installing cask..."
brew tap phinze/homebrew-cask
brew install brew-cask
# Tap alternative versions
brew tap caskroom/versions
# Tap the fonts
brew tap caskroom/fonts
# install apps
echo "installing apps..."
brew cask install --appdir=$appdir ${apps[@]}
# install fonts
echo "installing fonts..."
brew cask install ${fonts[@]}
# link with alfred
brew cask launchbar link
cleanup
}
homebrew() {
if test ! $(which brew); then
echo "Installing homebrew..."
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
}
cleanup() {
brew cleanup
}
main "$@"
exit 0
| true
|
71cf269f0941f5d8c7685c099bbbe01d14784aad
|
Shell
|
brewingcode/dotfiles
|
/bin/imgimg
|
UTF-8
| 3,078
| 4.40625
| 4
|
[] |
no_license
|
#!/bin/bash
# Stitch two images together with optional captions
# Copyright kray.me 2019
VERTICAL=''
usage() {
echo "Usage: $0 [OPTIONS] IMAGE_1 IMAGE2
Stitch two images and put captions below. Image path is echo'd to stdout,
and the image itself is copied to the pasteboard.
OPTIONS
-l, --labels Add labels separated by comma. eg: before,after
-c, --clipboard Use most recent images from Alfred Clipboard history
-C, --clipswap --clipboard the images in the opposite order
-v, --vertical Combine images vertically instead of horizontally
The default order of --clipboard is:
Horizontal: [ second most recent ] [ most recent]
Vertical: [ second most recent ]
[ most recent ]"
}
add_border_caption() {
local IMG="${1}"
local CAPTION="${2}"
local COLOR="${3}"
local IMG_EXT="${IMG##*.}"
local IMG_BORDER=$(mktemp /tmp/tmp.XXXXX)".$IMG_EXT"
if [ -n "$CAPTION" ]; then
convert "$IMG" -bordercolor black -border 1 -background "$COLOR" label:"$CAPTION" -gravity Center -append "$IMG_BORDER"
else
convert "$IMG" -bordercolor black -border 1 "$IMG_BORDER"
fi
echo ${IMG_BORDER}
}
# $1 - first image
# $1 - second image
imgimg() {
local IMG1="${1}"
local IMG2="${2}"
IFS="," read -ra LABEL1 <<< "${LABELS}";
local LABEL2=`echo ${LABELS##*,}`
local IMG1_BORDER=$(add_border_caption "$IMG1" "$LABEL1" Khaki)
local IMG2_BORDER=$(add_border_caption "$IMG2" "$LABEL2" Plum)
local OUT=$(mktemp /tmp/tmp.XXXXX).png
if [ -n "$VERTICAL" ]; then
cmd='-append -gravity west'
else
cmd='+append -gravity south'
fi
convert $cmd -background none "$IMG1_BORDER" "$IMG2_BORDER" "$OUT"
echo "$OUT"
pngcopy "$OUT"
}
sql() {
root="$1"
offset="$2"
f=$(sqlite3 "$root/clipboard.alfdb" 'select dataHash from clipboard where dataHash like "%.tiff" order by ts desc limit 1 offset '"$offset")
echo "$root/clipboard.alfdb.data/$f"
}
main() {
LABELS=''
CLIPBOARD=''
CLIPSWAP=''
images=()
while [ $# -gt 0 ];do
case "$1" in
-h|--help)
usage;
exit 0;;
-l|--labels)
LABELS=$1
;;
-c|--clipboard)
CLIPBOARD=1
;;
-C|--clipswap)
CLIPBOARD=1
CLIPSWAP=1
;;
-v|--vertical)
VERTICAL=1
;;
*)
images+=("$1")
;;
esac
shift
done
if [[ -n "$CLIPBOARD" ]]; then
alf="$HOME/Library/Application Support/Alfred 3/Databases"
if [[ -n "$CLIPSWAP" ]]; then
imgimg "$(sql "$alf" 0)" "$(sql "$alf" 1)"
else
imgimg "$(sql "$alf" 1)" "$(sql "$alf" 0)"
fi
exit 0
fi
if (( "${#images[@]}" != 2 )); then
echo "error: missing two filenames"
usage
exit 1
fi
imgimg "${images[0]}" "${images[1]}"
}
main "$@"
| true
|
faace0e43086119860a85a293eb71f79ca342ddd
|
Shell
|
LabDwellers/ConkyConfig
|
/Zanty/current/configscripts/setup
|
UTF-8
| 1,715
| 3.90625
| 4
|
[] |
no_license
|
#!/bin/sh
# This is a simple script that should allow the installation of a conky file
# to be simple and painfree. The idea is that once the user has cloned the git
# repository, a quick running of the setup script allows the ease of installation
# that many conky's seem hard to do.
printf "Setting up conky in the current user's home directory\n";
printf "${HOME}/.conky/ will be the conky configuration folder\n\n";
printf "WARNING!! This script assumes that all of the conky scripts to\n";
printf "be installed are in the parent directory !!\n\n";
printf "Proceed with setup? [y/n] ";
read resp;
if [ $resp != 'y' ]
then
printf "Setup aborted";
exit;
fi;
printf "Proceeding with setup\n";
# Create the .conky/ in the home directory
mkdir ${HOME}/.conky;
# Move rcfiles over to the home folder --
cp ../*rc ${HOME}/.conky/;
cp -R ../lua/ ${HOME}/.conky/;
# Generate the conky start script
touch ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/conkyrc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/memrc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/swaprc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/networkrc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/signalrc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/cpu0rc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/cpu15rc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/cpu26rc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/cpu37rc" >> ${HOME}/.start_conky;
echo "/usr/bin/conky -c ${HOME}/.conky/cpu48rc" >> ${HOME}/.start_conky;
chmod +x ${HOME}/.start_conky;
echo "complete"
| true
|
e7e454e4213ba703f74739c898d6e663e4723368
|
Shell
|
spdk/spdk
|
/test/ftl/restore.sh
|
UTF-8
| 2,374
| 3.5
| 4
|
[
"Intel",
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
#!/usr/bin/env bash
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (C) 2018 Intel Corporation
# All rights reserved.
#
testdir=$(readlink -f $(dirname $0))
rootdir=$(readlink -f $testdir/../..)
source $rootdir/test/common/autotest_common.sh
source $testdir/common.sh
rpc_py=$rootdir/scripts/rpc.py
mount_dir=$(mktemp -d)
while getopts ':u:c:f' opt; do
case $opt in
u) uuid=$OPTARG ;;
c) nv_cache=$OPTARG ;;
f) fast_shutdown=1 ;;
?) echo "Usage: $0 [-f] [-u UUID] [-c NV_CACHE_PCI_BDF] BASE_PCI_BDF" && exit 1 ;;
esac
done
shift $((OPTIND - 1))
device=$1
timeout=240
restore_kill() {
rm -f $testdir/testfile
rm -f $testdir/testfile.md5
rm -f $testdir/config/ftl.json
killprocess $svcpid
remove_shm
}
trap "restore_kill; exit 1" SIGINT SIGTERM EXIT
"$SPDK_BIN_DIR/spdk_tgt" &
svcpid=$!
# Wait until spdk_tgt starts
waitforlisten $svcpid
split_bdev=$(create_base_bdev nvme0 $device $((1024 * 101)))
if [ -n "$nv_cache" ]; then
nvc_bdev=$(create_nv_cache_bdev nvc0 $nv_cache $split_bdev)
fi
l2p_dram_size_mb=$(($(get_bdev_size $split_bdev) * 10 / 100 / 1024))
ftl_construct_args="bdev_ftl_create -b ftl0 -d $split_bdev --l2p_dram_limit $l2p_dram_size_mb"
[ -n "$uuid" ] && ftl_construct_args+=" -u $uuid"
[ -n "$nv_cache" ] && ftl_construct_args+=" -c $nvc_bdev"
if [ "$fast_shutdown" -eq "1" ]; then
ftl_construct_args+=" --fast-shutdown"
fi
$rpc_py -t $timeout $ftl_construct_args
(
echo '{"subsystems": ['
$rpc_py save_subsystem_config -n bdev
echo ']}'
) > $testdir/config/ftl.json
$rpc_py bdev_ftl_unload -b ftl0
killprocess $svcpid
# Generate random data and calculate checksum
dd if=/dev/urandom of=$testdir/testfile bs=4K count=256K
md5sum $testdir/testfile > $testdir/testfile.md5
# Write and read back the data, verifying checksum
"$SPDK_BIN_DIR/spdk_dd" --if=$testdir/testfile --ob=ftl0 --json=$testdir/config/ftl.json
"$SPDK_BIN_DIR/spdk_dd" --ib=ftl0 --of=$testdir/testfile --json=$testdir/config/ftl.json --count=262144
md5sum -c $testdir/testfile.md5
# Write second time at overlapped sectors, read back and verify checkum
"$SPDK_BIN_DIR/spdk_dd" --if=$testdir/testfile --ob=ftl0 --json=$testdir/config/ftl.json --seek=131072
"$SPDK_BIN_DIR/spdk_dd" --ib=ftl0 --of=$testdir/testfile --json=$testdir/config/ftl.json --skip=131072 --count=262144
md5sum -c $testdir/testfile.md5
trap - SIGINT SIGTERM EXIT
restore_kill
| true
|
a4c73a8b3b796b907934862db9ab04d2de3c3552
|
Shell
|
GA-CyberWorkforceAcademy/kinetic
|
/bootstrap/bootstrap.sh
|
UTF-8
| 3,795
| 3.78125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
if [ "$EUID" -ne 0 ]
then echo "Please run this script as the root user or with sudo"
exit
fi
if [ $# -lt 8 ]; then
echo 1>&2 "$0: not enough arguments"
exit 2
fi
while getopts ":i:f:p:k:" opt; do
case ${opt} in
i )
interface=$OPTARG
;;
f )
fileroot=$OPTARG
;;
p )
pillar=$OPTARG
;;
k )
key=$OPTARG
;;
\? )
echo "Invalid option: $OPTARG." 1>&2
exit
;;
: )
echo "Invalid option: $OPTARG requires an argument" 1>&2
exit
;;
esac
done
DEBIAN_FRONTEND=noninteractive
## Packages
apt-get update
apt-get -y dist-upgrade
apt-get -y install qemu-kvm qemu-utils genisoimage curl libvirt-clients libvirt-daemon-system
## Directories
mkdir -p /kvm/images
mkdir -p /kvm/vms/salt/data
mkdir -p /kvm/vms/pxe/data
## Images
if [ ! -f /kvm/images/debian9.raw ]
then
local_image_hash=bad
else
local_image_hash=$(sha512sum /kvm/images/debian9.raw | awk '{ print $1 }')
fi
remote_image_hash=$(curl https://cdimage.debian.org/cdimage/openstack/current-9/SHA512SUMS | grep $local_image_hash | awk '{ print $1 }')
if [ "$local_image_hash" == "$remote_image_hash" ]
then
echo No new image needed. Skipping download.
else
echo Image hash mismatch. Re-downloading.
wget https://cdimage.debian.org/cdimage/openstack/current-9/debian-9-openstack-amd64.raw -O /kvm/images/debian9.raw
fi
## salt
if [ ! -f /kvm/vms/salt/disk0.raw ]
then
cp /kvm/images/debian9.raw /kvm/vms/salt/disk0.raw
fi
curl -s https://raw.githubusercontent.com/GeorgiaCyber/kinetic/master/bootstrap/resources/common.xml | sed "s/{{ name }}/salt/g; s/{{ interface }}/$interface/g" > /kvm/vms/salt/config.xml
curl -s https://raw.githubusercontent.com/GeorgiaCyber/kinetic/master/bootstrap/resources/common.metadata | sed "s/{{ name }}/salt/g" > /kvm/vms/salt/data/meta-data
curl -s https://raw.githubusercontent.com/GeorgiaCyber/kinetic/master/bootstrap/resources/common.userdata | sed "s%{{ opts }}%-M -X -i salt -J \'{ \"default_top\": \"base\", \"fileserver_backend\": [ \"git\" ], \"ext_pillar\": [ { \"git\": [ { \"master $pillar\": [ { \"env\": \"base\" } ] } ] } ], \"ext_pillar_first\": true, \"gitfs_remotes\": [ { \"$fileroot\": [ { \"saltenv\": [ { \"base\": [ { \"ref\": \"master\" } ] } ] } ] } ], \"gitfs_saltenv_whitelist\": [ \"base\" ] }\'%g;s%{{ key }}%$key%g" > /kvm/vms/salt/data/user-data
sed -i "s,{{ extra_commands }},mkdir -p /etc/salt/gpgkeys;chmod 0700 /etc/salt/gpgkeys;curl -s https://raw.githubusercontent.com/GeorgiaCyber/kinetic/master/bootstrap/resources/key-generation | gpg --expert --full-gen-key --homedir /etc/salt/gpgkeys/ --batch;gpg --export --homedir /etc/salt/gpgkeys -a > /root/key.gpg,g" /kvm/vms/salt/data/user-data
genisoimage -o /kvm/vms/salt/config.iso -V cidata -r -J /kvm/vms/salt/data/meta-data /kvm/vms/salt/data/user-data
virsh create /kvm/vms/salt/config.xml
##pxe
if [ ! -f /kvm/vms/pxe/disk0.raw ]
then
cp /kvm/images/debian9.raw /kvm/vms/pxe/disk0.raw
fi
curl -s https://raw.githubusercontent.com/GeorgiaCyber/kinetic/master/bootstrap/resources/common.xml | sed "s/{{ name }}/pxe/g; s/{{ interface }}/$interface/g" > /kvm/vms/pxe/config.xml
curl -s https://raw.githubusercontent.com/GeorgiaCyber/kinetic/master/bootstrap/resources/common.metadata | sed "s/{{ name }}/pxe/g" > /kvm/vms/pxe/data/meta-data
curl -s https://raw.githubusercontent.com/GeorgiaCyber/kinetic/master/bootstrap/resources/common.userdata | sed "s/{{ opts }}/-X -i pxe/g;s/{{ key }}/$key/g" > /kvm/vms/pxe/data/user-data
sed -i "s,{{ extra_commands }},echo No extra commands specified,g" /kvm/vms/pxe/data/user-data
genisoimage -o /kvm/vms/pxe/config.iso -V cidata -r -J /kvm/vms/pxe/data/meta-data /kvm/vms/pxe/data/user-data
virsh create /kvm/vms/pxe/config.xml
| true
|
d686c5b85568c3ea35e1c947f37bb4d7f5b1f16d
|
Shell
|
TatriX/dull-of-war
|
/assets/compile.sh
|
UTF-8
| 286
| 3.203125
| 3
|
[] |
no_license
|
#!/bin/sh
pattern=$1
output=$2
if [ -z "$pattern" -o -z "$output" ]; then
echo Usage $0 pattern output
echo Example: $u \'j\?.png\' jeep.png
exit;
fi
set -x
convert $pattern +append f1.png
convert f1.png -flop f2.png
convert f1.png f2.png +append $output
rm f1.png f2.png
| true
|
62bc381e1b17c716fdb7745f84609db308e1b7c7
|
Shell
|
tnc-ca-geo/animl-ml
|
/scripts/get-libs.sh
|
UTF-8
| 1,982
| 3.796875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# Clone sagmaker-tensorflow-serving-container
# and Microsoft's CameraTrap repos
parentPath=$(cd ../ && pwd)
cameraTrapDir=CameraTraps
cameraTrapRepo=https://github.com/microsoft/CameraTraps
cameraTrapCommit=625c1f430bc0c832951a9707a97d446b57e3741e
sagemakerContainerDir=sagemaker-tensorflow-serving-container
sagemakerContainerRepo=https://github.com/aws/sagemaker-tensorflow-serving-container
sagemakerContainerCommit=fc9013c5cc6cb521585b89aa2c984cc93864f445
# get cameratrap repo
if [ -d "$parentPath"/"$cameraTrapDir" ]; then
echo -e "Directory "$parentPath"/"$cameraTrapDir" already exits, pulling down latest commits ...\n"
pullCmdRun=$(
cd "$parentPath/$cameraTrapDir"
git checkout master
git pull
)
echo -e "${pullCmdRun}\n\n"
else
echo -e "No dir found for "$parentPath"/"$cameraTrapDir", cloning remote ...\n"
cloneCmd="git clone $cameraTrapRepo $parentPath/$cameraTrapDir"
cloneCmdRun=$(
$cloneCmd
# uncomment below to create a branch at specific commit
# $cloneCmd &&
# cd "$parentPath/$cameraTrapDir"
# git checkout "$cameraTrapCommit"
# git checkout -b pinned-commit
)
echo -e "${cloneCmdRun}\n\n"
fi
# get sagemaker container repo
if [ -d "$parentPath"/"$sagemakerContainerDir" ]; then
echo -e "Directory "$parentPath"/"$sagemakerContainerDir" already exits, pulling down latest commits ...\n"
pullCmdRun=$(
cd "$parentPath/$sagemakerContainerDir"
git checkout master
git pull
)
echo -e "${pullCmdRun}\n\n"
else
echo -e "No dir found for "$parentPath"/"$sagemakerContainerDir", cloning remote ...\n"
cloneCmd="git clone $sagemakerContainerRepo $parentPath/$sagemakerContainerDir"
cloneCmdRun=$(
$cloneCmd
# uncomment below to create a branch at specific commit
# $cloneCmd &&
# cd "$parentPath/$sagemakerContainerDir"
# git checkout "$sagemakerContainerCommit"
# git checkout -b pinned-commit
)
echo -e "${cloneCmdRun}\n\n"
fi
| true
|
c3e8778d3f128dea83d20b46cd6e7fa0b4629f85
|
Shell
|
akosasante/TradeMachineServer
|
/check_migrations.sh
|
UTF-8
| 132
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/bash
if [ "$(ls -A dist/db/migrations | grep $1)" ]; then
echo "Not Empty" && exit 1
else
echo "Empty" && exit 0
fi
| true
|
886bb6e4512584e4ce4feb59021795bfb22eb774
|
Shell
|
delkyd/alfheim_linux-PKGBUILDS
|
/avxsynth-plugin-masktools2-git/PKGBUILD
|
UTF-8
| 893
| 2.71875
| 3
|
[] |
no_license
|
# Maintainer: Gustavo Alvarez <sl1pkn07@gmail.com>
_plug=masktools2
pkgname=avxsynth-plugin-${_plug}-git
pkgver=20120620.d1028fd
pkgrel=1
pkgdesc="Plugin for Avxsynth. ${_plug} (GIT Version)"
arch=('i686' 'x86_64')
url="https://github.com/fundies/MaskTools2-linux"
license=('GPL')
depends=('avxsynth')
makedepends=('git' 'yasm' 'boost')
provides=("avxsynth-plugin-${_plug}")
conflicts=("avxsynth-plugin-${_plug}")
source=("${_plug}::git://github.com/fundies/MaskTools2-linux.git")
md5sums=('SKIP')
_gitname="${_plug}"
pkgver() {
cd "${_gitname}"
echo "$(git log -1 --format="%cd" --date=short | tr -d '-').$(git log -1 --format="%h")"
}
build() {
cd "${_gitname}"
LDFLAGS+=",-z,noexecstack"
make
}
package(){
cd "${_gitname}"
install -Dm775 libmasktools.so "${pkgdir}/usr/lib/avxsynth/libmasktools.so"
install -Dm664 readme.txt "${pkgdir}/usr/share/doc/avxsynth/plugins/${_plug}/README"
}
| true
|
756d2cd27e8c8441c56182dacffcb8823afb62c8
|
Shell
|
BuenoAlex/Exercicios_SHELL
|
/processoseletivo/script_create_users.sh
|
UTF-8
| 639
| 3.890625
| 4
|
[] |
no_license
|
#!/bin/bash
#Atividades:
#1- Varrer o diretório procurando arquivos csv
#2- Ler o arquivo
#3- Realizar o insert das linhas
read_file_content(){
local file_name=$1
sed 1d $1 | while IFS='\n' read line
do
echo $line | while IFS=';' read NOME CIDADE CARGO
do
if sudo adduser $NOME --gecos $CIDADE,$CARGO --ingroup usuarionovo --disabled-login; then
echo $NOME" "$CIDADE" "$CARGO"--usuario criado"
else
echo "Erro ao criar o usuário $NOME"
fi
done
done
}
#Procurando arquivos csv no Diretorio
read_files(){
shopt -s nullglob
for arquivo in *.csv
do
read_file_content $arquivo
done
}
read_files
| true
|
f0359cb9c8e22fdc03ffbc90478f05f759b0512d
|
Shell
|
twz123/zksr
|
/files/usr/local/bin/start-schema-registry.sh
|
UTF-8
| 586
| 3.015625
| 3
|
[] |
no_license
|
#!/usr/bin/env sh
# Ensure Kafka is up
KAFKA_OK=0
for i in $(seq 1 30); do
[ -z "$( "$KAFKA_HOME/bin/kafka-topics.sh" --zookeeper localhost:2181 --list | head -n1 )" ] && {
KAFKA_OK=1
break
}
sleep 0.1
done
[ $KAFKA_OK -eq 1 ] || exit 1
# Run schema registry
cd $SCHEMA_REGISTRY_HOME && \
exec java -cp "$( ls -1 libs/kafka-schema-registry-[0-9]*.jar ):$( cat etc/classpath )" \
-Dlog4j.configuration=file:/etc/zksr/schema-registry-log4j.properties \
io.confluent.kafka.schemaregistry.rest.SchemaRegistryMain \
etc/schema-registry.properties
| true
|
867300887a9d234af652bf171b788d04e09462f0
|
Shell
|
Daisymoo/DishCollectionTracker
|
/SourceCode
|
UTF-8
| 40,256
| 3.40625
| 3
|
[] |
no_license
|
#! /bin/bash
#Jessica Kovarik
#Dish Collection Tracker program allows users to track personal collection
#of select vintage dish brands. Users can also see listing of all known pieces
#to aid in building collection. Images are available of some dishes to help
#with identification of pieces.
#user starts program, menu appears
#user selects menu item from switch menu using case
#user can add, view, or delete items from person collection
#user can view entire collection of dishes produced for a line
#function definitions using yad for GUI
#checkFile functions check to ensure file exists before opening
#Fiesta functions
checkFileFiesta ()
{
if [ -f fiestaDishes.txt ]
then
readFileFiesta
else
fileNotFound
fi
}
checkWantedFiesta ()
{
if [ -f wantedFiesta.txt ]
then
readFileWantFiesta
else
fileNotFound
fi
}
checkOwnedFiesta ()
{
if [ -f ownedFiesta.txt ]
then
readFileOwnFiesta
else
fileNotFound
fi
}
#Pyrex checkFile functions
checkFilePyrex ()
{
if [ -f pyrexDishes.txt ]
then
readFilePyrex
else
fileNotFound
fi
}
checkWantedPyrex ()
{
if [ -f wantedPyrex.txt ]
then
readFileWantedPyrex
else
fileNotFound
fi
}
checkOwnedPyrex ()
{
if [ -f ownedPyrex.txt ]
then
readFileOwnPyrex
else
fileNotFound
fi
}
#Hazel Atlas checkFile functions
checkFileHA ()
{
if [ -f haDishes.txt ]
then
readFileHA
else
fileNotFound
fi
}
checkWantedHA ()
{
if [ -f wantedHA.txt ]
then
readFileWantHA
else
fileNotFound
fi
}
checkOwnedHA ()
{
if [ -f ownedHA.txt ]
then
readFileOwnHA
else
fileNotFound
fi
}
#function definitions using yad for GUI
#readFile functions to read .txt file
#Fiesta readFile functions
readFileFiesta ()
{
yad --width=800 --height=800 --text "Here are all the vintage Fiestaware dishes made." --text-info \ --button= --button=gtk-ok:1 < fiestaDishes.txt
}
readFileWantFiesta ()
{
yad --width=800 --height=800 --text "Here are the Fiestaware dishes you want." --text-info \ --button= --button=gtk-ok:1 < wantedFiesta.txt
}
readFileOwnFiesta ()
{
yad --width=800 --height=800 --text "Here are the Fiestaware dishes you own." --text-info \ --button= --button=gtk-ok:1 < ownedFiesta.txt
}
#Pyrex readFile functions
readFilePyrex ()
{
yad --width=800 --height=800 --text "Here are all the vintage milkglass Pyrex dishes made." --text-info \ --button= --button=gtk-ok:1 < pyrexDishes.txt
}
readFileWantPyrex ()
{
yad --width=800 --height=800 --text "Here are the Pyrex dishes you want." --text-info \ --button= --button=gtk-ok:1 < wantedPyrex.txt
}
readFileOwnPyrex ()
{
yad --width=800 --height=800 --text "Here are the Pyrex dishes you own." --text-info \ --button= --button=gtk-ok:1 < ownedPyrex.txt
}
#Hazel Atlas readFile functions
readFileHA ()
{
yad --width=800 --height=800 --text "Here are all the vintage Hazel Atlas dishes made." --text-info \ --button= --button=gtk-ok:1 < haDishes.txt
}
readFileWantHA ()
{
yad --width=800 --height=800 --text "Here are the Hazel Atlas dishes you want." --text-info \ --button= --button=gtk-ok:1 < wantedHA.txt
}
readFileOwnHA ()
{
yad --width=800 --height=800 --text "Here are the Hazel Atlas dishes you want." --text-info \ --button= --button=gtk-ok:1 < ownedHA.txt
}
#File not found function
fileNotFound ()
{
yad --text "File not found. Going back to menu." \ --button= --button=gtk-ok:1
}
#Functions for user menus
#top level menu to select dish brand
userAction()
{
action=$(yad --width 300 --entry --title "Dish Collection Tracker" \
--image=gnome-shutdown \
--button="gtk-ok:0" --button="Exit:1" \
--text "Choose a dish brand:" \
--entry-text \
"Fiestaware" "Pyrex" "Hazel Atlas" "Exit")
ret=$?
}
#Fiestaware menu using yad to create drop down menu
fiestaMenu()
{
fiesta=$(yad --width 300 --entry --title "Fiestaware Dish Menu" \
--button="gtk-ok:0" \
--text "Choose next action:" \
--entry-text \
"See all Fiestaware" "View Owned Fiesta" "Review Wanted Fiesta" "Edit Owned Fiesta" "Update Wanted Fiesta" "Search for Fiesta" "Look at Fiesta Images" "Exit")
aaa=$?
}
#Pyrex secondary menu using yad for drop down menu
pyrexMenu()
{
pyrex=$(yad --width 300 --entry --title "Pyrex Dish Menu" \
--button="gtk-ok:0" \
--text "Choose next action:" \
--entry-text \
"See all Pyrex" "View Owned Pyrex" "Review Wanted Pyrex" "Edit Owned Pyrex" "Update Wanted Pyrex" "Search for Pyrex" "Look at Pyrex Images" "Exit")
bbb=$?
}
#Hazel Atlas secondary menu using yad to create drop down menu
haMenu()
{
ha=$(yad --width 300 --entry --title "Hazel Atlas Dish Menu" \
--button="gtk-ok:0" \
--text "Choose next action:" \
--entry-text \
"See all Hazel Atlas" "View Owned Hazel Atlas" "Review Wanted Hazel Atlas" "Edit Owned Hazel Atlas" "Update Wanted Hazel Atlas" "Search for Hazel Atlas" "Exit")
ccc=$?
}
#users while loop for menu
while true; do
#start of program
userAction
[[ $ret -eq 1 ]] && yad --text "Exiting Dish Collection Tracker." && exit 0
[[ $ret -eq 252 ]] && yad --text "Exiting Dish Collection Tracker." && exit 0
case $action in
Fiestaware*)
{
#do while loop repeats menu
while true; do
#call fiestaMenu() function
fiestaMenu
#if "x" selected, will exit submenu
[[ $aaa -eq 252 ]] && yad --text "Exiting Fiestaware menu now." && break;
#switch statement to loop through menu
case $fiesta in
See*) checkFileFiesta
;;
View*) checkOwnedFiesta
;;
Review*) checkWantedFiesta
;;
Edit*)
{
while true; do
#checks file exists
if [ ! -f ownedFiesta.txt ]
then
fileNotFound
break;
else
{
yad --text "Select what to do in owned Fiestaware menu:" \
--button="Add dish to owned Fiestaware list":1 --button="Remove dish from owned Fiestaware list":2 --button="Exit this menu":3
ownedPick=$?
#loop owned Fiestaware submenu until user exits.
if [[ $ownedPick -eq 1 ]]
then
#Add owned dishes.
#Allows user to select dish or dishes from master all dishes list. #Increases uniformity and consistency in data file.
yad --text "Select which dish or dishes you want to add to Owned Dishes." \ --button= --button=gtk-ok:1
#sed G adds space between lines as place holder for FALSE which yad --checklist looks for because of two columns
#this still leaves desired text on odd lines
sed G fiestaDishes.txt >> tempSelection.txt
#have to add new line at beginning of file because only even numbered lines print
sed '1i\\' tempSelection.txt >> tempSelection2.txt
#remove last extra space in file, so no hanging radio button
head -n -1 tempSelection2.txt > tempHoldSelection;
#allows one or more items to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --checklist --list --column="Fiestaware" --column="Click on item(s) to add to Owned Fiesta." --height=700 --width=700 < tempHoldSelection --button=gtk-ok:1)
#takes whatever line user selected, removes number from front of list, addes spaces after semicolun
#for user readability, and removes last character which is a pipe (|)
echo "$CHOICE" | sed 's/^[^;]*;//g' | sed 's/; */; /g' | sed s'/.$//' >> ownedFiesta.txt
yad --text-info --height=700 --width=700 < ownedFiesta.txt --button=gtk-ok:1
#removes temporary files after append file
rm tempSelection.txt;
rm tempSelection2.txt;
rm tempHoldSelection;
#Remove dish
elif [[ $ownedPick -eq 2 ]]
then
yad --text "Select a dish to remove from your owned list." \ --button= --button=gtk-ok:1
#allows one item to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --list --multiple --column="Click on item(s) to delete from Wanted Fiesta." --height=700 --width=700 < ownedFiesta.txt)
#takes whatever line user selected and sends output to temp file
echo $CHOICE >> temp3
#removes | at end and sends to another temp file
sed s'/.$//' temp3 >> temp4
#a second variable is created with output from temp file to then delete from ownedFiesta.txt file
newCHOICE=$(<temp4)
sed -i "/$newCHOICE/d" ownedFiesta.txt
yad --text-info --height=700 --width=700 < ownedFiesta.txt --button=gtk-ok:1
#removes temp files
rm temp3;
rm temp4;
#Exit menu
else
yad --text "Exiting Owned Fiestaware now." \ --button= --button=gtk-ok:1
break
fi
}
fi
#ends do while true loop
done
}
#return to Fiesta menu
fiestaMenu
;;
Update*)
{
#do while loop to go through menu
while true; do
#checks file exists
if [ ! -f wantedFiesta.txt ]
then
fileNotFound
break;
else
{
yad --text "Choose what to do with Wanted Fiestaware Dishes:" \
--button="Add dish to wanted Fiestaware list":1 --button="Remove dish from wanted Fiestaware list":2 --button="Exit this menu":3
wantedPick=$?
#loop wanted Fiestaware submenu until user exits.
#Add wanted dishes.
if [[ $wantedPick -eq 1 ]]
then
#Allows user to select dish or dishes from master all dishes list. #Increases uniformity and consistency in data file.
yad --text "Select which dish or dishes you want to add to Wanted Dishes." \ --button= --button=gtk-ok:1
#sed G adds space between lines as place holder for FALSE which yad --checklist looks for because of two columns
#this still leaves desired text on odd lines
sed G fiestaDishes.txt >> tempSelection.txt
#have to add new line at beginning of file because only even numbered lines print
sed '1i\\' tempSelection.txt >> tempSelection2.txt
#allows one or more items to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --checklist --list --column="Fiestaware" --column="Click on item(s) to add to Wanted Fiesta." --height=700 --width=700 < tempSelection2.txt)
#takes whatever line user selected, removes number from front of list, addes spaces after semicolun
#for user readability, and removes last character which is a pipe (|)
echo "$CHOICE" | sed 's/^[^;]*;//g' | sed 's/; */; /g' | sed s'/.$//' >> wantedFiesta.txt
yad --text-info --height=700 --width=700 < wantedFiesta.txt --button=gtk-ok:1
#removes tempSelection.txt file after append file
rm tempSelection.txt;
rm tempSelection2.txt;
#return to Fiesta menu
fiestaMenu
#Remove dish
elif [[ $wantedPick -eq 2 ]]
then
yad --text "Select a dish to remove from your wanted list." \ --button= --button=gtk-ok:1
#allows one item to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --list --multiple --column="Click on item(s) to delete from Wanted Fiesta." --height=700 --width=700 < wantedFiesta.txt)
#takes whatever line user selected and sends output to temp file
echo $CHOICE >> temp3
#removes | at end and sends to another temp file
sed s'/.$//' temp3 >> temp4
#a second variable is created with output from temp file to then delete from wantedFiesta.txt file
newCHOICE=$(<temp4)
sed -i "/$newCHOICE/d" wantedFiesta.txt
yad --text-info --height=700 --width=700 < wantedFiesta.txt --button=gtk-ok:1
#removes temp files
rm temp3;
rm temp4;
#return to Fiesta menu
fiestaMenu
#Exit menu
else
yad --text "Exiting Wanted Fiestaware now." \ --button=gtk-ok:1
break;
fi
}
fi
#ends do while true loop
done
}
#return to Fiesta menu
fiestaMenu
;;
Search*)
{
#while true do loop for menu navigation
while true; do
yad --text "Choose what list to search in Fiestaware Dishes:" \
--button="Search all known vintage Fiesta":1 --button="Search Wanted Fiesta":2 --button="Search Owned Fiesta":3 --button="Exit this menu.":4
#last executed command
searchPick=$?
#loop wanted submenu until user exits.
#Search all Fiestaware
if [[ $searchPick -eq 1 ]]
then
{
#checks file exists before grep
if [ ! -f fiestaDishes.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search fiestaDishes.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 --button=gtk-ok:1 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt
}
fi
}
fi
#Search Wanted Fiestaware
if [[ $searchPick -eq 2 ]]
then
{
#checks file exists before grep
if [ ! -f wantedFiesta.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
{
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search wantedFiesta.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt;
}
}
fi
}
fi
#Search Owned Fiestaware
if [[ $searchPick -eq 3 ]]
then
{
#checks file exists before grep
if [ ! -f ownedFiesta.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
{
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search ownedFiesta.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt;
}
}
fi
}
fi
if [[ $searchPick -eq 4 ]]
then
#exits from menu
yad --text "Exiting Fiestaware search menu now." \ --button= --button=gtk-ok:1
#breaks out of while true do loop
break;
fi
#end of while true do loop
done
}
;;
Look*)
if [ ! -f FiestaImages.txt ]
then
fileNotFound
break;
else
{
yad --text "Select what Fiesta image to view." \ --button= --button=gtk-ok:1
#sed G adds space between lines as place holder for FALSE which yad --checklist looks for because of two columns
#this still leaves desired text on odd lines
#pulls in FiestaImages.txt to display images available to view
sed G FiestaImages.txt >> tempSelection.txt
#have to add new line at beginning of file because only even numbered lines print
sed '1i\\' tempSelection.txt >> tempSelection2.txt
#remove last extra space in file, so no hanging radio button
head -n -1 tempSelection2.txt > tempHoldSelection;
imageSelection=$(yad --radiolist --list --column="Fiestaware" --column="Click on Fiestaware image to view." --height=700 --width=700 < tempHoldSelection)
echo "$imageSelection" >> tempImageSelection;
{
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -q "TRUE|Ashtrays|" tempImageSelection
then
{
yad --text="Ashtrays in vintage colors:\ncobalt, red, ivory, turquoise, green, and yellow." --image /home/just_tri/Images/Fiesta/Ashtrays.jpg
}
elif grep -q "TRUE|Covered Onion Soup Bowls|" tempImageSelection
then
{
yad --text="Covered Onion Soup Bowls in vintage colors:\nred, green, yellow, ivory, and cobalt." --image /home/just_tri/Images/Fiesta/Covered_Onion_SoupBowls.jpg
}
else grep -q "TRUE|Covered Onion Soup Bowls|" tempImageSelection
{
yad --text="Cream Soup Bowls in vintage colors:\nred, ivory, cobalt, yellow, green, turquoise,\nforest, red, gray, and chartreuse." --image /home/just_tri/Images/Fiesta/Cream_Soup_Bowls.jpg
}
fi
#removes tempOutputFile.txt file after used
rm tempSelection.txt;
rm tempSelection2.txt;
rm tempHoldSelection;
rm tempImageSelection;
clear;
}
}
fi
;;
Exit*)
{
yad --text "Exiting Fiestaware menu now." \ --button= --button=gtk-ok:1;
break;
}
;;
#ends case loop
esac
#ends do while loop
done
}
#xit out of Fiesta menu
;;
Pyrex*)
{
#while true do loop to navigate PyrexMenu
while true; do
#call Pyrex menu
pyrexMenu
[[ $bbb -eq 252 ]] && yad --text "Exiting Pyrex menu now." && break;
#switch statement for menu navigation
case $pyrex in
See*) checkFilePyrex
;;
View*) checkOwnedPyrex
;;
Review*) checkWantedPyrex
;;
Edit*)
{
#while true do loop
while true; do
#checks file exists
if [ ! -f ownedPyrex.txt ]
then
fileNotFound
break;
else
{
yad --text "Select what to do in owned Pyrex menu:" \
--button="Add dish to owned Pyrex list":1 --button="Remove dish from owned Pyrex list":2 --button="Exit this menu":3
ownedPick=$?
#loop owned Pyrex submenu until user exits.
if [[ $ownedPick -eq 1 ]]
then
#Add owned dishes.
#Allows user to select dish or dishes from master all dishes list. #Increases uniformity and consistency in data file.
yad --text "Select which dish or dishes you want to add to Owned Dishes." \ --button= --button=gtk-ok:1
#sed G adds space between lines as place holder for FALSE which yad --checklist looks for because of two columns
#this still leaves desired text on odd lines
sed G pyrexDishes.txt >> tempSelection.txt
#have to add new line at beginning of file because only even numbered lines print
sed '1i\\' tempSelection.txt >> tempSelection2.txt
#remove last extra space in file, so no hanging radio button
head -n -1 tempSelection2.txt > tempHoldSelection;
#allows one or more items to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --checklist --list --column="Pyrex" --column="Click on item(s) to add to Owned Pyrex." --height=700 --width=700 < tempHoldSelection)
#takes whatever line user selected, removes number from front of list, addes spaces after semicolun
#for user readability, and removes last character which is a pipe (|)
echo "$CHOICE" | sed 's/^[^;]*;//g' | sed 's/; */; /g' | sed s'/.$//' >> ownedPyrex.txt
yad --text-info --height=700 --width=700 < ownedPyrex.txt \
--button= --button=gtk-ok:1 --buttons-layout=center
#removes temporary files after append file
rm tempSelection.txt;
rm tempSelection2.txt;
rm tempHoldSelection;
#Remove dish
elif [[ $ownedPick -eq 2 ]]
then
yad --text "Select a dish to remove from your owned list." \ --button= --button=gtk-ok:1
#allows one item to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --list --multiple --column="Click on item(s) to delete from Wanted Pyrex." --height=700 --width=700 < ownedPyrex.txt)
#takes whatever line user selected and sends output to temp file
echo $CHOICE >> temp3
#removes | at end and sends to another temp file
sed s'/.$//' temp3 >> temp4
#a second variable is created with output from temp file to then delete from ownedPyrex.txt file
newCHOICE=$(<temp4)
sed -i "/$newCHOICE/d" ownedPyrex.txt
yad --text-info --height=700 --width=700 < ownedPyrex.txt --button=gtk-ok:1
#removes temp files
rm temp3;
rm temp4;
#Exit menu
else
yad --text "Exiting Owned Pyrex now." \ --button= --button=gtk-ok:1
break;
fi
}
fi
#end while true loop
done
}
#call Pyrex menu
pyrexMenu
;;
Update*)
{
#while true do loop to navigate menu
while true; do
#checks file exists
if [ ! -f wantedPyrex.txt ]
then
fileNotFound
break;
else
{
yad --text "Choose what to do with Wanted Pyrex Dishes:" \
--button="Add dish to wanted Pyrex list":1 --button="Remove dish from wanted Pyrex list":2 --button="Exit this menu":3
wantedPick=$?
#loop wanted Pyrex submenu until user exits.
#Add wanted dishes.
if [[ $wantedPick -eq 1 ]]
then
#Allows user to select dish or dishes from master all dishes list. #Increases uniformity and consistency in data file.
yad --text "Select which dish or dishes you want to add to Wanted Dishes." \ --button= --button=gtk-ok:1
#sed G adds space between lines as place holder for FALSE which yad --checklist looks for because of two columns
#this still leaves desired text on odd lines
sed G pyrexDishes.txt >> tempSelection.txt
#have to add new line at beginning of file because only even numbered lines print
sed '1i\\' tempSelection.txt >> tempSelection2.txt
#allows one or more items to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --checklist --list --column="Pyrex" --column="Click on item(s) to add to Wanted Pyrex." --height=700 --width=700 < tempSelection2.txt)
#takes whatever line user selected, removes number from front of list, addes spaces after semicolun
#for user readability, and removes last character which is a pipe (|)
echo "$CHOICE" | sed 's/^[^;]*;//g' | sed 's/; */; /g' | sed s'/.$//' >> wantedPyrex.txt
yad --text-info --height=700 --width=700 < wantedPyrex.txt \
--button=gtk-yes:1 --buttons-layout=center
#removes tempSelection.txt file after append file
rm tempSelection.txt;
rm tempSelection2.txt;
#Remove dish
elif [[ $wantedPick -eq 2 ]]
then
yad --text "Select a dish to remove from your wanted list." \ --button= --button=gtk-ok:1
#allows one item to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --list --multiple --column="Click on item(s) to delete from Wanted Pyrex." --height=700 --width=700 < wantedPyrex.txt)
#takes whatever line user selected and sends output to temp file
echo $CHOICE >> temp3
#removes | at end and sends to another temp file
sed s'/.$//' temp3 >> temp4
#a second variable is created with output from temp file to then delete from wantedPyrex.txt file
newCHOICE=$(<temp4)
sed -i "/$newCHOICE/d" wantedPyrex.txt
yad --text-info --height=700 --width=700 < wantedPyrex.txt --button=gtk-ok:1
#removes temp files
rm temp3;
rm temp4;
#Exit menu
else
yad --text "Exiting Wanted Pyrex now." \ --button= --button=gtk-ok:1
break;
fi
}
fi
done
}
#call Pyrex menu
pyrexMenu
;;
Search*)
{
#while true do loop for menu
while true; do
yad --text "Choose what list to search in Pyrex Dishes:" \
--button="Search all known vintage Pyrex":1 --button="Search Wanted Pyrex":2 --button="Search Owned Pyrex":3 --button="Exit this menu.":4
#last executed command
searchPick=$?
#loop search Pyrex submenu until user exits.
#Search all Pyrex
if [[ $searchPick -eq 1 ]]
then
{
#checks file exists before grep
if [ ! -f pyrexDishes.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search pyrexDishes.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt
clear;
}
fi
}
fi
#Search Wanted Pyrex
if [[ $searchPick -eq 2 ]]
then
{
#checks file exists before grep
if [ ! -f wantedPyrex.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
{
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search wantedPyrex.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt;
clear;
}
}
fi
}
fi
#Search Owned Pyrex
if [[ $searchPick -eq 3 ]]
then
{
#checks file exists before grep
if [ ! -f ownedPyrex.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
{
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search ownedPyrex.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt;
clear;
}
}
fi
}
fi
if [[ $searchPick -eq 4 ]]
then
#exits from Pyrex search menu
yad --text "Exiting Pyrex search menu now." \ --button= --button=gtk-ok:1
break;
fi
#end while true do loop
done
}
;;
Look*)
if [ ! -f PyrexImages.txt ]
then
fileNotFound
break;
else
{
yad --text "Select what Pyrex image to view." \ --button= --button=gtk-ok:1
#sed G adds space between lines as place holder for FALSE which yad --checklist looks for because of two columns
#this still leaves desired text on odd lines
#pulls in PyrexImages.txt to display images available to view
sed G PyrexImages.txt >> tempSelection.txt
#have to add new line at beginning of file because only even numbered lines print
sed '1i\\' tempSelection.txt >> tempSelection2.txt
#remove last extra space in file, so no hanging radio button
head -n -1 tempSelection2.txt > tempHoldSelection;
imageSelection=$(yad --radiolist --list --column="Pyrex" --column="Click on Pyrex image to view." --height=700 --width=700 < tempHoldSelection)
echo "$imageSelection" >> tempImageSelection;
{
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -q "TRUE|Butterpint Cinderella Bowls|" tempImageSelection
then
{
yad --text="Butterprint, also called Amish, comes in:\nprimarily turquoise and white combinations of dish and print\nhowever, there are some yellow printed on white dishes found in this patter." --image /home/just_tri/Images/Pyrex/Butterprint_Cinderella_Bowls.jpg
}
elif grep -q "TRUE|Primary Mixing Bowls|" tempImageSelection
then
{
yad --text="Primary colors of yellow, green, red, and blue are a popular mixing bowl set.\nPrimary color sets of other Pyrex dishes can be found." --image /home/just_tri/Images/Pyrex/Primary_Mixing_Bowls.jpeg
}
elif grep -q "TRUE|Snowflake Various Pieces|" tempImageSelection
then
{
yad --text="Snowflake pattern is typically white printed on white, turquoise, or black.\nThe reverse combinations can also be found.\nMore rarely, one can find white snowflakes on a pink background." --image /home/just_tri/Images/Pyrex/Snowflake_Various_Pieces.jpg
}
else grep -q "TRUE|Terra 1964-1964 Marking Ad|" tempImageSelection
{
yad --text="The Terra pattern is a matte finish with brown rings.\nThis pattern was only made from 1964-1965 due to high production costs\nand the fact the finish was easily scratched." --image /home/just_tri/Images/Pyrex/Terra.jpg
}
fi
#removes tempOutputFile.txt file after used
rm tempSelection.txt;
rm tempSelection2.txt;
rm tempHoldSelection;
rm tempImageSelection;
clear;
}
}
fi
;;
Exit*)
{
yad --text "Exiting Pyrex menu now." \ --button= --button=gtk-ok:1;
break;
}
;;
#ends case loop
esac
#ends do while loop
done
}
#exit out of Pyrex menu
;;
Hazel*)
{
#while true do loop for haMenu
while true; do
#call Hazel Atlas secondary menu
haMenu
#if "x" selected, will exit submenu
[[ $ccc -eq 252 ]] && yad --text "Exiting Hazel Atlas menu now." && break;
#switch statement to navigate menu
case $ha in
See*) checkFileHA
;;
View*) checkOwnedHA
;;
Review*) checkWantedHA
;;
Edit*)
{
#while true do loop
while true; do
#checks file exists
if [ ! -f ownedHA.txt ]
then
fileNotFound
break;
else
{
yad --text "Select what to do in owned Hazel Atlas menu:" \
--button="Add dish to owned Hazel Atlas list":1 --button="Remove dish from owned Hazel Atlas list":2 --button="Exit this menu":3
ownedPick=$?
#loop owned Hazel Atlas submenu until user exits.
if [[ $ownedPick -eq 1 ]]
then
#Add owned dishes.
#Allows user to select dish or dishes from master all dishes list. #Increases uniformity and consistency in data file.
yad --text "Select which dish or dishes you want to add to Owned Dishes." \ --button= --button=gtk-ok:1
#sed G adds space between lines as place holder for FALSE which yad --checklist looks for because of two columns
#this still leaves desired text on odd lines
sed G haDishes.txt >> tempSelection.txt
#have to add new line at beginning of file because only even numbered lines print
sed '1i\\' tempSelection.txt >> tempSelection2.txt
#remove last extra space in file, so no hanging radio button
head -n -1 tempSelection2.txt > tempHoldSelection;
#allows one or more items to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --checklist --list --column="Hazel Atlas" --column="Click on item(s) to add to Owned Hazel Atlas." --height=700 --width=700 < tempHoldSelection)
#takes whatever line user selected, removes number from front of list, addes spaces after semicolun
#for user readability, and removes last character which is a pipe (|)
echo "$CHOICE" | sed 's/^[^;]*;//g' | sed 's/; */; /g' | sed s'/.$//' >> ownedHA.txt
yad --text-info --height=700 --width=700 < owned.txt \
--button= --button=gtk-ok:1 --buttons-layout=center
#removes temporary files after append file
rm tempSelection.txt;
rm tempSelection2.txt;
rm tempHoldSelection;
#Remove dish
elif [[ $ownedPick -eq 2 ]]
then
yad --text "Select a dish to remove from your owned list." \ --button= --button=gtk-ok:1
#allows one item to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --list --multiple --column="Click on item(s) to delete from Wanted Hazel Atlas." --height=700 --width=700 < ownedHA.txt)
#takes whatever line user selected and sends output to temp file
echo $CHOICE >> temp3
#removes | at end and sends to another temp file
sed s'/.$//' temp3 >> temp4
#a second variable is created with output from temp file to then delete from ownedPyrex.txt file
newCHOICE=$(<temp4)
sed -i "/$newCHOICE/d" ownedHA.txt
yad --text-info --height=700 --width=700 < ownedHA.txt --button=gtk-ok:1
#removes temp files
rm temp3;
rm temp4;
#Exit menu
else
yad --text "Exiting Owned Hazel Atlas now." \ --button= --button=gtk-ok:1
break;
fi
}
fi
#end while true do loop
done
}
#exit to haMenu
haMenu
;;
Update*)
{
#while true do loop
while true; do
#checks file exists
if [ ! -f ownedHA.txt ]
then
fileNotFound
break
else
{
yad --text "Choose what to do with Wanted Hazel Atlas Dishes:" \
--button="Add dish to wanted Hazel Atlas list":1 --button="Remove dish from wanted Hazel Atlas list":2 --button="Exit this menu":3
wantedPick=$?
#loop wanted Hazel Atlas submenu until user exits.
#Add wanted dishes.
if [[ $wantedPick -eq 1 ]]
then
#Allows user to select dish or dishes from master all dishes list. #Increases uniformity and consistency in data file.
yad --text "Select which dish or dishes you want to add to Wanted Dishes." \ --button= --button=gtk-ok:1
#sed G adds space between lines as place holder for FALSE which yad --checklist looks for because of two columns
#this still leaves desired text on odd lines
sed G haDishes.txt >> tempSelection.txt
#have to add new line at beginning of file because only even numbered lines print
sed '1i\\' tempSelection.txt >> tempSelection2.txt
#allows one or more items to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --checklist --list --column="Hazel Atlas" --column="Click on item(s) to add to Wanted Hazel Atlas." --height=700 --width=700 < tempSelection2.txt)
#takes whatever line user selected, removes number from front of list, addes spaces after semicolun
#for user readability, and removes last character which is a pipe (|)
echo "$CHOICE" | sed 's/^[^;]*;//g' | sed 's/; */; /g' | sed s'/.$//' >> wantedHA.txt
yad --text-info --height=700 --width=700 < wantedHA.txt \
--button=gtk-yes:1 --buttons-layout=center
#removes tempSelection.txt file after append file
rm tempSelection.txt;
rm tempSelection2.txt;
#Remove dish
elif [[ $wantedPick -eq 2 ]]
then
yad --text "Select a dish to remove from your wanted list." \ --button= --button=gtk-ok:1
#allows one item to be selected by user with mouse. Can hit enter or "ok"
CHOICE=$(yad --list --multiple --column="Click on item(s) to delete from Wanted Hazel Atlas." --height=700 --width=700 < wantedHA.txt)
#takes whatever line user selected and sends output to temp file
echo $CHOICE >> temp3
#removes | at end and sends to another temp file
sed s'/.$//' temp3 >> temp4
#a second variable is created with output from temp file to then delete from wantedPyrex.txt file
newCHOICE=$(<temp4)
sed -i "/$newCHOICE/d" wantedHA.txt
yad --text-info --height=700 --width=700 < wantedHA.txt --button=gtk-ok:1
#removes temp files
rm temp3;
rm temp4;
#Exit menu
else
yad --text "Exiting Wanted Hazel Atlas now." \ --button= --button=gtk-ok:1
break;
fi
}
fi
#end while true do loop
done
}
#exit to haMenu
haMenu
;;
Search*)
{
#while true do loop
while true; do
yad --text "Choose what list to search in Hazel Atlas Dishes:" \
--button="Search all known vintage Hazel Atlas":1 --button="Search Wanted Hazel Atlas":2 --button="Search Owned Hazel Atlas":3 --button="Exit this menu.":4
#last executed command
searchPick=$?
#loop search Hazel Atlas submenu until user exits.
#Search all Hazel Atlas
if [[ $searchPick -eq 1 ]]
then
{
#checks file exists before grep
if [ ! -f haDishes.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search haDishes.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt
clear;
}
fi
}
fi
#Search Wanted Hazel Atlas
if [[ $searchPick -eq 2 ]]
then
{
#checks file exists before grep
if [ ! -f wantedHA.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
{
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search wantedHA.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt;
clear;
}
}
fi
}
fi
#Search Owned Hazel Atlas
if [[ $searchPick -eq 3 ]]
then
{
#checks file exists before grep
if [ ! -f ownedHA.txt ]
then
fileNotFound
break;
else
{
search=$(\
yad --text "Enter a keyword, such as dish type or color. If no results are returned, try using less words or fewer characters." --entry \
--entry-text="Type keyword here")
{
#searches .txt for keyword entered
#if exists, displays results in testbox, otherwise, displays infobox message if no matches
if grep -i $search ownedHA.txt | sed 's/^[^;]*;//g' | sed 's/; */; /g' > tempOutputFile.txt; then
yad --text-info --height=700 --width=700 < tempOutputFile.txt
else
yad --text "No results. Try fewer words or another keyword." \ --button= --button=gtk-ok:1
fi
#removes tempOutputFile.txt file after used
rm tempOutputFile.txt;
clear;
}
}
fi
}
fi
if [[ $searchPick -eq 4 ]]
then
#exits from Hazel Atlas search menu
yad --text "Exiting Hazel Atlas search menu now." \ --button= --button=gtk-ok:1
break;
fi
#end while true do loop
done
}
;;
Exit*)
{
yad --text "Exiting Hazel Atlas menu now." \ --button= --button=gtk-ok:1;
break;
}
;;
#ends case loop
esac
#ends do while loop
done
}
#exit out of Hazel Atlas menu
;;
#exit option from Dish Collection Tracker
*Exit*)
{
yad --text "Exiting Dish Collection Tracker.";
break;
}
;;
#ends case loop for Dish Collection Tracker
esac
#ends do while true loop for Dish Collection Tracker
done
exit 0
| true
|
157d50d0dbb6d0407ab1016ff19d54715a44a274
|
Shell
|
ximinez/git-scripts
|
/git-pending
|
UTF-8
| 1,252
| 4.03125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Searching for .git without checking type handles git worktrees.
# TODO: Make the ignore list a param, but for now this works.
if [[ $1 == "-v" ]]
then
verbose=1
shift
fi
if [[ $1 == "--remote" ]]
then
shift
[[ $# == 1 ]] || exit 1
tmp=$1
shift
# If in a valid git dir, succeeds, so continue
# If in an invalid git dir, fails, so exit with success
git status -s -b >& "${tmp}" || exit 0
# If grep finds matches, succeeds, so exit with a failure.
# If no matches, succeeds, fails, so continue
grep -q -e '\[ahead' -e '\[behind' "${tmp}" && exit 1
# If there are no diffs, succeeds, so continue.
# If there are diffs, fails, so exit with a failure.
git diff --quiet >& /dev/null || exit 1
# If there are no problems, succeeds, so continue.
# If there are problems, fails, so exit with a failure.
git diff --check >& /dev/null || exit 1
exit 0
fi
tmp=$(mktemp)
find $1 \
-iname .nih_c -prune -o \
-iname nih_c -prune -o \
-iname .git \
-execdir git rev-parse \; \
\( \
-execdir $0 --remote "${tmp}" \; \
-o \
-printf "%h\n" \
-execdir git status -s -b \; \
-execdir git diff --check \; \
-exec echo \; \
\)
rm -f "${tmp}"
if [[ -v verbose ]]
then
echo Done
fi
| true
|
e846518d54e800e6f37d595079f2de8ff04844b4
|
Shell
|
vsanna/dd_systemd
|
/init.d/hello
|
UTF-8
| 332
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/bash
# chkconfig: 2345 10 90
HELLO_PATH="/usr/lib/hello/mycmd.sh"
case "$1" in
start)
sh HELLO_PATH
# do some stuff to start the process
;;
stop)
echo "stopped!"
# do some stuff to stop the process
;;
status)
echo "status:..."
# show some info regarding the process
;;
*)
echo "invalid option."
exit 1
;;
esac
| true
|
a7a382a2304ea3c970f4f36515d9a613c4fb62ff
|
Shell
|
kgleason/DotFiles
|
/bash_profile.d/functions.sh
|
UTF-8
| 670
| 3.875
| 4
|
[] |
no_license
|
# I dig this dd function, but I didn't write it.
# I found it here: http://askubuntu.com/questions/215505/how-do-you-monitor-the-progress-of-dd
dd()
{
local dd=$(which dd); [ "$dd" ] || {
echo "'dd' is not installed!" >&2
return 1
}
local pv=$(which pv); [ "$pv" ] || {
echo "'pv' is not installed!" >&2
"$dd" "$@"
return $?
}
local arg arg2 infile
local -a args
for arg in "$@"
do
arg2=${arg#if=}
if [ "$arg2" != "$arg" ]
then
infile=$arg2
else
args[${#args[@]}]=$arg
fi
done
"$pv" -tpreb "$infile" | "$dd" "${args[@]}"
}
| true
|
2b9779ef8e1a3c91441af031449c3fccd0f4e188
|
Shell
|
barbagroup/petibm-rollingpitching
|
/runs/scripts/generate_all_figures.sh
|
UTF-8
| 1,406
| 2.578125
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/usr/bin/env bash
cd $(dirname "$(dirname $0)")
printf "\n*** Generating figures (independence study) ...\n"
/bin/bash independence/scripts/generate_figures.sh
printf "\n*** Generating figures (baseline case) ...\n"
/bin/bash Re200_St0.6_AR1.27_psi90/scripts/generate_figures.sh
printf "\n*** Generating figures (St=0.4) ...\n"
/bin/bash Re200_St0.4_AR1.27_psi90/scripts/generate_figures.sh
printf "\n*** Generating figures (St=0.8) ...\n"
/bin/bash Re200_St0.8_AR1.27_psi90/scripts/generate_figures.sh
printf "\n*** Generating figures (Re=100) ...\n"
/bin/bash Re100_St0.6_AR1.27_psi90/scripts/generate_figures.sh
printf "\n*** Generating figures (Re=400) ...\n"
/bin/bash Re400_St0.6_AR1.27_psi90/scripts/generate_figures.sh
printf "\n*** Generating figures (AR=1.91) ...\n"
/bin/bash Re200_St0.6_AR1.91_psi90/scripts/generate_figures.sh
printf "\n*** Generating figures (AR=2.55) ...\n"
/bin/bash Re200_St0.6_AR2.55_psi90/scripts/generate_figures.sh
printf "\n*** Generating figures (psi=100) ...\n"
/bin/bash Re200_St0.6_AR1.27_psi100/scripts/generate_figures.sh
printf "\n*** Generating figures (psi=110) ...\n"
/bin/bash Re200_St0.6_AR1.27_psi110/scripts/generate_figures.sh
printf "\n*** Generating figures (psi=120) ...\n"
/bin/bash Re200_St0.6_AR1.27_psi120/scripts/generate_figures.sh
printf "\n*** Generating all other figures ...\n"
/bin/bash scripts/generate_figures.sh
exit 0
| true
|
6065a144a4edfb774db068a9e2a51b4aa5d77cf1
|
Shell
|
deionizedoatmeal/popdots
|
/scripts/rofimenu-power.sh
|
UTF-8
| 860
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
rofi_command="rofi -lines 3 -width 15"
profile="$(system76-power profile | sed -n -e 's/^.*Profile: //p')"
### Options ###
if [ "$profile" = "Performance" ]
then
preformance=" performance <-"
else
preformance=" performance"
fi
if [ "$profile" = "Balanced" ]
then
balanced=" balanced <-"
else
balanced=" balanced"
fi
if [ "$profile" = "Battery" ]
then
battery=" battery <-"
else
battery=" battery"
fi
# Variable passed to rofi
options="$preformance\n$balanced\n$battery"
chosen="$(echo -e "$options" | $rofi_command -dmenu -p "power profile" -selected-row 2)"
case $chosen in
$preformance)
system76-power profile performance
;;
$balanced)
system76-power profile balanced
;;
$battery)
system76-power profile battery
;;
esac
| true
|
0f1726841d5aedb6f6873ef442b4c10d1bf85fa5
|
Shell
|
xuyinhao/lgpbenchmark
|
/loongoopBench/api/bin/find/case/1-2
|
UTF-8
| 365
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/bash
##
nfilePath="${apiPath}/find-1-2.中文!@#$%^&\*()_+\{}-=\[];\"\\|<>\?,.'f"
ndirPath="${apiPath}/find-1-2.中文!@#$%^&\*()_+\{}-=\[];\"\\|<>\?,.'d"
$touchz $nfilePath 2>/dev/null
$mkdir -p $ndirPath 2>/dev/null
rett=`$find ${apiPath} -name "find-1-2.中文!@#$%^&\*()_+\{}-=\[];\"\\|<>\?,.'*" 2>/dev/null`
checkOk $? "$rett" $apiPath "find-1-2*"
| true
|
fe225f5beae31e7be32bfd1f0316116d435b3d2b
|
Shell
|
Adonis-wk/indexDemo
|
/hostBash.sh
|
UTF-8
| 286
| 3.171875
| 3
|
[] |
no_license
|
#!/bin/bash
DEFAULT_FILE="/etc/jdos_host"
START_TAG="/opt/host_started"
if [ -f "$DEFAULT_FILE" ]; then
if [ ! -f "$START_TAG" ]; then
# touch "$START_TAG"
cat "$DEFAULT_FILE" | while read line || [[ -n ${line} ]]
do
echo "${line}" >> /etc/hosts
done
fi
fi
| true
|
9b32279580b169ed365dc4603f6b2fac5e648f61
|
Shell
|
vinceliuice/WhiteSur-gtk-theme
|
/src/assets/gtk/thumbnails/render-thumbnails.sh
|
UTF-8
| 1,016
| 3.3125
| 3
|
[
"MIT"
] |
permissive
|
#! /usr/bin/env bash
INKSCAPE="/usr/bin/inkscape"
OPTIPNG="/usr/bin/optipng"
./make-thumbnails.sh
for theme in '' '-blue' '-purple' '-pink' '-red' '-orange' '-yellow' '-green' '-grey'; do
for type in '' '-nord'; do
SRC_FILE="thumbnail${theme}${type}.svg"
for color in '-light' '-dark'; do
echo
echo Rendering thumbnail${color}${theme}${type}.png
$INKSCAPE --export-id=thumbnail${color}${theme}${type} \
--export-id-only \
--export-dpi=96 \
--export-filename=thumbnail${color}${theme}${type}.png $SRC_FILE >/dev/null \
&& $OPTIPNG -o7 --quiet thumbnail${color}${theme}${type}.png
done
done
done
for theme in '' '-blue' '-purple' '-pink' '-red' '-orange' '-yellow' '-green' '-grey'; do
for type in '' '-nord'; do
if [[ ${theme} == '' && ${type} == '' ]]; then
echo "keep thumbnail.svg"
else
rm -rf "thumbnail${theme}${type}.svg"
fi
done
done
exit 0
| true
|
92f5bc52a4b3e31a3deb8ec0e6d53982d92f91c6
|
Shell
|
Azure/ato-toolkit
|
/software factory/installer-connected/ring-0/aks/src/runtime/cluster-on-boarding/scripts/on_board_cluster.sh
|
UTF-8
| 7,374
| 2.890625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# -*- coding: utf-8 -*-
set -euo pipefail
# shellcheck disable=SC2034
read -r -d '' __usage <<-'EOF' || true # exits non-zero when EOF encountered
-f --file [arg] Config Filename to use.
-v Enable verbose mode, print script as it is executed
-d --debug Enables debug mode
-h --help This page
-n --no-color Disable color output
EOF
## Boilerplate files
# shellcheck source=src/runtime/bootstrap/lib/main.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && cd ../.. && pwd)/bootstrap/lib/main.sh"
# shellcheck source=src/runtime/bootstrap/lib/ini_val.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && cd ../.. && pwd)/bootstrap/lib/ini_val.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/install_flux.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/install_flux.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/install_helm_operator.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/install_helm_operator.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/render_azure_service_operator.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/render_azure_service_operator.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/create_cluster_folder_git.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/create_cluster_folder_git.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/render_and_copy_rbac.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/render_and_copy_rbac.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/render_and_copy_cert_manager.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/render_and_copy_cert_manager.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/upsert_namespace.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/upsert_namespace.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/helpers.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/helpers.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/get_flux_ssh_keys.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/get_flux_ssh_keys.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/upload_ssh_keys.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/upload_ssh_keys.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/upload_cluster_management_file.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/upload_cluster_management_file.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/create_cluster_management_file.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/create_cluster_management_file.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/upsert_arc.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/upsert_arc.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/configure_storage_classes.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/configure_storage_classes.sh"
# shellcheck source=src/runtime/cluster-on-boarding/scripts/configure_gatekeeper.sh
source "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/configure_gatekeeper.sh"
### Validation. Error out if the things required for your script are not present
##############################################################################
[[ "${arg_f:-}" ]] || help "Setting a filename with -f or --file is required"
[[ "${LOG_LEVEL:-}" ]] || emergency "Cannot continue without LOG_LEVEL. "
config_file="$arg_f"
info "Reading $config_file for configuration"
jumphost_user=$(get_from_ini_or_error "$config_file" c12:generated.jumphost_user)
jumphost_ip=$(get_from_ini_or_error "$config_file" c12:generated.jumphost_ip)
jumphost_ssh_key_location=$(get_from_ini_or_error "$config_file" c12:generated.jumphost_ssh_key_location)
info "Creating SSH Tunnel"
ssh -oStrictHostKeyChecking=no "$jumphost_user"@"$jumphost_ip" -i "$jumphost_ssh_key_location" -L 1234:127.0.0.1:8888 -C -N &
# These env vars are used by helm / kubectl to talk to the K8S API server via the jumphost
# shellcheck disable=SC2034
export HTTPS_PROXY=http://127.0.0.1:1234
# shellcheck disable=SC2034
export https_proxy=http://127.0.0.1:1234
prefix=$(get_from_ini_or_error "$config_file" c12.prefix)
namespace="$prefix-c12-system"
flux_release_name="$namespace-flux"
git_cluster_state=$(get_cluster_state_repo)
cluster_name=$(get_from_ini_or_error "$config_file" c12:generated.aks_name)
rg=$(get_from_ini_or_error "$config_file" terraform:generated.container-rg)
disk_encryption_set_id=$(ini_val "$config_file" c12:generated.disk_encryption_set_id)
az aks get-credentials --resource-group "$rg" --name "$cluster_name" --admin --overwrite-existing
debug "namespace:$namespace flux_release_name:$flux_release_name git_cluster_state:$git_cluster_state"
# First step is create the folder in the cluster-state and add all the required manifests
# We keep the local folder of the cluster for installing rbac later.
cluster_state_folder=$(create_folder_for_state_repo "$git_cluster_state" "$cluster_name")
render_and_copy_rbac "$cluster_state_folder"
#Create the storage classes with encrypted disk sets and delete any other
configure_storage_classes "$disk_encryption_set_id"
configure_gatekeeper "$cluster_state_folder"
# Install the cert-manager components and CRDs
acr_name=$(get_from_ini_or_error "$config_file" c12:generated.regional_acr_name)
render_and_copy_cert_manager "$cluster_state_folder"
import_image "$acr_name" "quay.io/jetstack/cert-manager-cainjector:v0.14.3"
import_image "$acr_name" "quay.io/jetstack/cert-manager-controller:v0.14.3"
import_image "$acr_name" "quay.io/jetstack/cert-manager-webhook:v0.14.3"
# Renders the azure-service-operator's HelmRelease
# Generates sufficient Service Principal for ASO
# Transfers required Images to Cluster ACR
render_azure_service_operator "$cluster_state_folder"
# if the feature flag for Azure Arc is `true` will on board with ARC Config Agent.
# shellcheck disable=SC2086
if [[ "$(ini_val $config_file azure.arc-support)" == "true" ]]; then
# Install Azure Arc and connect to ARC Config Agent and obtain Flux SSH key
info "Connect cluster to Azure ARC Config Agent"
upsert_arc
info "Cluster Successfully Connect Azure ARC Config Agent"
# Install helm operator and flux
info "Installing HelmOperator in the cluster"
install_helm_operator
# Upload the SSH keys generated by flux using terraform into the state repository so the cluster
info "Granting permissions to flux in cluster-state repository"
upload_ssh_keys
else
# Install helm operator and flux
info "Installing HelmOperator and Flux in the cluster"
upsert_namespace "$namespace"
install_helm_operator
install_flux
# Upload the SSH keys generated by flux using terraform into the state repository so the cluster
# can perform the pull
info "Granting permissions to flux in cluster-state repository"
get_flux_ssh_keys "$namespace" "$flux_release_name"
upload_ssh_keys
fi
cluster_file_path=$(generate_cluster_management_file)
cluster_namagement_git=$(get_cluster_management_repo)
upload_cluster_management_file "$cluster_namagement_git" "$cluster_file_path" "$cluster_name"
info "Cluster successfully onboarded"
for job in $(jobs -l "%ssh" | cut -d' ' -f 2)
do
kill "$job"
done
unset HTTPS_PROXY
unset https_proxy
| true
|
e1186523edf969b6ba82250fe5c2c01f0143d870
|
Shell
|
Lou00/d3ctf_2019_ezupload
|
/docker/file/run.sh
|
UTF-8
| 996
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
service apache2 start
#tail -F /var/log/apache2/access.log
export MD5=`echo -n $RANDOM | md5sum | cut -d ' ' -f1`
export WEBROOT=${MD5:0:16}
printf "$(cat /etc/apache2/sites-enabled/default.conf)" $WEBROOT > /etc/apache2/sites-enabled/000-default.conf
mv /var/www//html/* /var/www/html/$WEBROOT
chmod 777 /var/www//html/$WEBROOT/upload
rm -rf /var/www/html/$WEBROOT/upload/*
service apache2 reload
echo -n "WEBROOT IS " && echo $WEBROOT
export OLDROOT=$WEBROOT
while true
do export MD5=`echo -n $RANDOM | md5sum | cut -d ' ' -f1`
export WEBROOT=${MD5:0:16}
printf "$(cat /etc/apache2/sites-enabled/default.conf)" $WEBROOT > /etc/apache2/sites-enabled/000-default.conf
cp -r /var/www//html/$OLDROOT /var/www/html/$WEBROOT
chmod 777 /var/www//html/$WEBROOT/upload
rm -rf /var/www/html/$WEBROOT/upload/*
service apache2 reload
sleep 1
rm -rf /var/www//html/$OLDROOT
echo -n "WEBROOT IS " && echo $WEBROOT
export OLDROOT=$WEBROOT
sleep 600
done
| true
|
a99b1a8f8937159d6ed3b5f81cac2d550708c2d9
|
Shell
|
wangsl/compiler-wrapper-2020
|
/link.sh
|
UTF-8
| 263
| 2.625
| 3
|
[] |
no_license
|
#!/bin/sh
# $Id$
compilers="cc c++ f77 g++ g77 gcc gfortran icc icpc ifort mpic++ mpicc mpiCC mpicxx mpif77 mpif90 pgf77 pgf90 pgfortran pgcc pgc++ clang++"
for comp in $compilers; do
echo $comp
rm -rf $comp
ln -s compiler-wrapper.bash $comp
done
| true
|
46d5a7dd926dd9dcc8d2a2994476a3e40eae9172
|
Shell
|
karo-electronics/meta-karo-distro
|
/recipes-graphics/wayland/weston/xwayland.weston-start
|
UTF-8
| 118
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/sh
if type Xwayland > /dev/null 2>/dev/null;then
mkdir -p /tmp/.X11-unix
chmod 3777 /tmp/.X11-unix
fi
| true
|
a4540ee2bb96acd489ab44ede5c1710efb5ce69a
|
Shell
|
YF18/shell
|
/memcached.sh
|
UTF-8
| 1,226
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
clear
tar zxvf libevent-2.0.22-stable.tar.gz
cd libevent-2.0.22-stable
./configure --prefix=/usr/local/libevent
make
make install
cd ../
tar zxvf memcached-1.4.24.tar.gz
cd memcached-1.4.24
./configure --prefix=/usr/local/memcached --with-libevent=/usr/local/libevent
make
make install
echo "==========================启动Memcached服务=============================="
mkdir /usr/local/memcached/tmp
sed -i '$a PATH=/usr/local/memcached/bin:$PATH \nexport PATH' /etc/profile
source /etc/profile
memcached -d -m 10 -u root -l 127.0.0.1 -p 11211 -c 256 -P /usr/local/memcached/tmp/memcached.pid
#-d选项是启动一个守护进程,
#-m是分配给Memcache使用的内存数量,单位是MB,我这里是10MB,
#-u是运行Memcache的用户,我这里是root,
#-l是监听的服务器IP地址,如果有多个地址的话,我这里指定了服务器的IP地址127.0.0.1,
#-p是设置Memcache监听的端口,我这里设置了12000,最好是1024以上的端口,
#-c选项是最大运行的并发连接数,默认是1024,我这里设置了256,按照你服务器的负载量来设定,
#-P是设置保存Memcache的pid文件,我这里是保存在 /usr/local/memcached/tmp/memcached.pid,
| true
|
923837c9288e81ef1e3766fb897c5e3ff4e1fbe0
|
Shell
|
mru00/junqer
|
/gen_test_dirs.sh
|
UTF-8
| 1,203
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash -e
trap '{echo "some error occured, stopping."; exit 1;}' ERR
# create sample series file structure in the current directory
# mru, 2011-01
# sudo apt-get install mjpegtools imagemagick
SHOW="super show"
nseasons=4
nepisodes=10
nframes=100
format="%03d"
trap '{ rm ep???.jpg; }' exit
gen_image() {
text="$1"
for i in `seq 0 $((nframes-1))`; do
fn=`printf "${format}" $i`
pos=$(( 20 + i*500/($nframes-1) ))
convert -size 640x160 xc:black \
-quality 100 \
-pointsize 32 \
-font Verdana-Regular \
-fill white \
-stroke white \
-draw "text 20,55 '$text'" \
-draw "rectangle 15,100 535,150" \
-fill black -draw "rectangle 20,110 $pos,140" \
"ep${fn}.jpg"
done
}
gen_movie() {
outfile="$1"
jpeg2yuv -v 0 -I p -f 20 -j "ep${format}.jpg"| yuv2lav -f a -b 10000 -o "$outfile"
}
for i in `seq -w $nseasons`; do
season="season $i"
mkdir -p "$SHOW/$season"
for j in `seq -w $nepisodes`; do
episode="episode $j"
gen_image "$SHOW - $season - $episode"
gen_movie "$SHOW/$season/$episode.avi"
done
done
| true
|
38b05da8de11c72a76d1668a371bb23238cd1e12
|
Shell
|
carlulli/TDSE
|
/run_inttest_analytical.sh
|
UTF-8
| 676
| 2.625
| 3
|
[
"MIT"
] |
permissive
|
IDIR="./include"
MDIR="./modules"
TDIR="./test"
# SDIR="./scripts"
# INCKISS
# compiler flags:
# -g adds debugging information to the executable file
# -Wall turns on most, but not all, compiler warnings
CFLAGS="-g -Wall"
# the build target executable:
TARGET="inttest_analytical"
# inclde directory
INCLUDE="-I include -I kissff"
# lib
# LIBKISSFFT = kissfft
# LIBS = -L $(LIBKISSFFT)
# include modules
MODULES="${MDIR}/wavefunction.c ${MDIR}/integrator.c ${MDIR}/geometry.c ${MDIR}/linearalgebra.c ${MDIR}/hamiltonian.c ${MDIR}/conjugategradient.c"
# cd ..
gcc ${CFLAGS} ${INCLUDE} ${MODULES} ${TDIR}/${TARGET}.c -Lkissfft -lkissfft-double -o ${TARGET}.exe -lm
| true
|
f875f09a3c16a6b48c4ce59dc1393299dd005fb5
|
Shell
|
Sanjit-Shelke/Operating_Systems
|
/Assignment-2/Assignment_2/fileNameSize.sh
|
UTF-8
| 151
| 3.203125
| 3
|
[] |
no_license
|
cd a
for i in *
do
if [ -e $i ]
then
file=`ls -l $i | awk '{print $5,$9}'`
if [[ "$file" < "5000" ]];
then
echo $file;
fi
fi
done
| true
|
1ead56603329a9ff9e6010126676c928dabc988e
|
Shell
|
aaron-613/solace-app-testing
|
/automated-failover-test-semp.sh
|
UTF-8
| 3,567
| 2.703125
| 3
|
[] |
no_license
|
#!/usr/bin/env sh
# https://docs.solace.com/API-Developer-Online-Ref-Documentation/swagger-ui/config/index.html
ROUTER=localhost:8080
ADMIN_USER=admin
ADMIN_PW=admin
VPN=default
OUTAGE_LENGTH_SECONDS=5
VPN=default
CLIENT_USERNAME=default
CLIENT_PROFILE=default
ACL_PROFILE=default
#echo Getting hostname via SEMPv1...
#OUTPUT=$(curl -s -u $ADMIN_USER:$ADMIN_PW http://$ROUTER/SEMP -X POST -d '<rpc><show><hostname/></show></rpc>' | perl -ne ' if (m|<hostname>(.*?)</hostname>|) { print "$1"; } ')
# might not be global admin priveleges (e.g. Solace CLoud)
CUR_SPOOL=$(curl -s -u $ADMIN_USER:$ADMIN_PW "http://$ROUTER/SEMP/v2/config/msgVpns/default?select=maxMsgSpoolUsage" -X GET -H "Content-type:application/json" | perl -ne ' if (/"maxMsgSpoolUsage":(\d+)/) { print "$1"; } ')
echo CUR SPOOL = $CUR_SPOOL
echo About to run some simple error case tests on Solace broker $OUTPUT at $ROUTER
# bounce the Message VPN to disable all client connections and such for a few seconds
echo About to shutdown $VPN VPN for $OUTAGE_LENGTH_SECONDS seconds...
if ! curl -f -s -S -u $ADMIN_USER:$ADMIN_PW "http://$ROUTER/SEMP/v2/config/msgVpns/$VPN" -X PATCH -H "Content-type:application/json" -d '{"enabled":false}' > /dev/null; then
echo " X ERROR! Could not shutdown $VPN VPN. Exiting."
exit 1
else
echo " + Success! $VPN VPN is shutdown."
fi
sleep $OUTAGE_LENGTH_SECONDS
echo About to enable $VPN VPN...
if ! curl -f -s -S -u $ADMIN_USER:$ADMIN_PW "http://$ROUTER/SEMP/v2/config/msgVpns/$VPN" -X PATCH -H "Content-type:application/json" -d '{"enabled":true}' > /dev/null; then
echo " X ERROR! Could not enable $VPN VPN. Beware, VPN might be left in a shutdown state. Exiting."
exit 2
else
echo " + Success! $VPN VPN is enabled."
fi
exit 0
# set the message spool to 0bounce the Message VPN to disable all client connections and such for a few seconds
echo About to disable all persistent publishing into $VPN VPN for $OUTAGE_LENGTH_SECONDS seconds...
if ! curl -f -s -S -u $ADMIN_USER:$ADMIN_PW "http://$ROUTER/SEMP/v2/config/msgVpns/$VPN" -X PATCH -H "Content-type:application/json" -d '{"maxMsgSpoolUsage":0}' > /dev/null; then
echo " X ERROR! Could not shutdown $VPN VPN. Exiting."
exit 1
else
echo " + Success! $VPN VPN is shutdown."
fi
sleep $OUTAGE_LENGTH_SECONDS
echo About to put message spool back to $CUR_SPOOL MB in $VPN VPN...
if ! curl -f -s -S -u $ADMIN_USER:$ADMIN_PW "http://$ROUTER/SEMP/v2/config/msgVpns/$VPN" -X PATCH -H "Content-type:application/json" -d '{"maxMsgSpoolUsage":$CUR_SPOOL}' > /dev/null; then
echo " X ERROR! Could not enable $VPN VPN. Beware, VPN might be left in a shutdown state. Exiting."
exit 2
else
echo " + Success! VPN $VPN is enabled."
fi
# now let's add an ACL publish issue
echo About to add a publish ACLs to acl-profile $ACL_PROFILE for $OUTAGE_LENGTH_SECONDS seconds...
exit 3
if ! curl -f -s -S -u $ADMIN_USER:$ADMIN_PW "http://$ROUTER/SEMP/v2/config/msgVpns/$VPN" -X PATCH -H "Content-type:application/json" -d '{"enabled":false}' > /dev/null; then
echo " X ERROR! Could not shutdown VPN. Exiting."
exit 1
else
echo " + Success! VPN is shutdown."
fi
sleep $OUTAGE_LENGTH_SECONDS
echo About to enable the VPN...
if ! curl -f -s -S -u $ADMIN_USER:$ADMIN_PW "http://$ROUTER/SEMP/v2/config/msgVpns/$VPN" -X PATCH -H "Content-type:application/json" -d '{"enabled":true}' > /dev/null; then
echo " X ERROR! Could not enable VPN. Beware, VPN might be left in a shutdown state. Exiting."
exit 2
else
echo " + Success! VPN is enabled."
fi
| true
|
5ba404a96f685bdf0fb1908fc33f966e6dbf435e
|
Shell
|
liqun1981/NERSC-HYCOM-CICE
|
/hycom/MSCPROGS/src/ExtractNC2D/m2t
|
UTF-8
| 181
| 2.78125
| 3
|
[] |
no_license
|
#!/bin/bash
#set -ex
# This assumes that the m2t routine is in the same dir as m2nc
prog="${0%m2t}m2nc"
# This flag allows for tecplot output from m2nc
args="-t $*"
$prog $args
| true
|
0214ea78b6004e93e99b7864203096d97eda375b
|
Shell
|
perficient-msftnbu/OpenShiftOnAzure-Workshop
|
/WorkshopSetup/CreateProjects.sh
|
UTF-8
| 1,341
| 3.515625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
shouldDelete=0
while [ "$1" != "" ]; do
case $1 in
-d | --delete ) shouldDelete=1
;;
-p=* | --projectName=* )
baseProjectName="${1#*=}"
;;
-u=* | --userName=* )
baseUserName=${1#*=}
;;
-e=* | --emailDomain=* )
baseDomain=${1#*=}
;;
-n=* | --numberOfProjects=* )
stringProjs=${1#*=}
numberOfProjects=$(($stringProjs + 0))
;;
-pw=* | --password=* )
password=${1#*=}
;;
esac
shift
done
if (( $shouldDelete == 0 ));
then
for ((c=1; c<=$numberOfProjects; c++))
do
projectName="$baseProjectName$c"
userName="$baseUserName$c$baseDomain"
az ad user create --display-name $userName --password $password --user-principal-name $userName
oc new-project $projectName
oc adm policy add-role-to-user edit $userName -n $projectName
done
else
for ((c=1; c<=$numberOfProjects; c++))
do
projectName="$baseProjectName$c"
userName="$baseUserName$c$baseDomain"
az ad user delete --upn-or-object-id $userName
oc delete project $projectName
done
fi
| true
|
e7d2d86765ff6e01ab7605d6efeaf1fa1c487ee0
|
Shell
|
stonebig/qtpy
|
/ci/test-pyside2.sh
|
UTF-8
| 882
| 3.40625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
export PATH="$HOME/miniconda/bin:$PATH"
source activate test
# Download PySide2 wheel
wget -q https://bintray.com/fredrikaverpil/pyside2-wheels/download_file?file_path=ubuntu14.04%2FPySide2-2.0.0.dev0-cp27-none-linux_x86_64.whl -O PySide2-2.0.0.dev0-cp27-none-linux_x86_64.whl
# We only use container 0 for PySide2
if [ "$CIRCLE_NODE_INDEX" = "0" ]; then
conda remove -q qt pyqt
pip install PySide2-2.0.0.dev0-cp27-none-linux_x86_64.whl
else
exit 0
fi
# Make symlinks for Qt libraries (else imports fail)
pushd "$HOME/miniconda/envs/test/lib/python2.7/site-packages/PySide2/"
for file in `ls Qt*x86_64-linux-gnu.so`
do
symlink=${file%.x86_64-linux-gnu.so}.so
ln -s $file $symlink
done
popd
python qtpy/tests/runtests.py
# Force quitting if exit status of runtests.py was not 0
if [ $? -ne 0 ]; then
exit 1
fi
pip uninstall -y -q pyside2
| true
|
cea83b7fcc353c0edd12c00bde82878e274de748
|
Shell
|
hongdongxiao/ansible
|
/roles/install_zabbix_agentd/files/install_zabbix.sh
|
UTF-8
| 452
| 2.96875
| 3
|
[] |
no_license
|
#!/bin/sh
### zabbix client install
### by hongdongxiao 2016-12-17
grep -q "zabbix" /etc/group
GROUP_IS=$?
if [ $GROUP_IS -eq 1 ];then
groupadd zabbix
fi
grep -q "zabbix" /etc/passwd
USER_IS=$?
if [ $USER_IS -eq 1 ];then
useradd -g zabbix zabbix -M -s /sbin/nologin
fi
ZABBIX_VERSION="2.4.7"
cd /tmp
tar -zxvf zabbix-${ZABBIX_VERSION}.tar.gz
cd zabbix-${ZABBIX_VERSION}
./configure --prefix=/usr/local/zabbix --enable-agent
make && make install
| true
|
b086a8b8e383c28d3a8b23736327bfd3a308c605
|
Shell
|
openstack/neutron
|
/neutron/tests/contrib/gate_hook.sh
|
UTF-8
| 2,637
| 3.359375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
set -ex
VENV=${1:-"api"}
FLAVOR=${2:-"all"}
GATE_DEST=$BASE/new
NEUTRON_DIR=$GATE_DEST/neutron
GATE_HOOKS=$NEUTRON_DIR/neutron/tests/contrib/hooks
DEVSTACK_PATH=$GATE_DEST/devstack
LOCAL_CONF=$DEVSTACK_PATH/late-local.conf
RALLY_EXTRA_DIR=$NEUTRON_DIR/rally-jobs/extra
DSCONF=/tmp/devstack-tools/bin/dsconf
# Install devstack-tools used to produce local.conf; we can't rely on
# test-requirements.txt because the gate hook is triggered before neutron is
# installed
sudo -H pip install virtualenv
virtualenv /tmp/devstack-tools
/tmp/devstack-tools/bin/pip install -U devstack-tools==0.4.0
# Inject config from hook into localrc
function load_rc_hook {
local hook="$1"
local tmpfile
local config
tmpfile=$(mktemp)
config=$(cat $GATE_HOOKS/$hook)
echo "[[local|localrc]]" > $tmpfile
$DSCONF setlc_raw $tmpfile "$config"
$DSCONF merge_lc $LOCAL_CONF $tmpfile
rm -f $tmpfile
}
# Inject config from hook into local.conf
function load_conf_hook {
local hook="$1"
$DSCONF merge_lc $LOCAL_CONF $GATE_HOOKS/$hook
}
# Tweak gate configuration for our rally scenarios
function load_rc_for_rally {
for file in $(ls $RALLY_EXTRA_DIR/*.setup); do
tmpfile=$(mktemp)
config=$(cat $file)
echo "[[local|localrc]]" > $tmpfile
$DSCONF setlc_raw $tmpfile "$config"
$DSCONF merge_lc $LOCAL_CONF $tmpfile
rm -f $tmpfile
done
}
case $VENV in
"api"|"api-pecan"|"full-pecan"|"dsvm-scenario-ovs")
# TODO(ihrachys) consider feeding result of ext-list into tempest.conf
load_rc_hook api_all_extensions
if [ "${FLAVOR}" = "dvrskip" ]; then
load_rc_hook disable_dvr_tests
fi
load_conf_hook quotas
load_rc_hook uplink_status_propagation
load_rc_hook dns
load_rc_hook qos
load_rc_hook segments
load_rc_hook trunk
load_rc_hook network_segment_range
load_conf_hook vlan_provider
load_conf_hook osprofiler
load_conf_hook availability_zone
load_conf_hook tunnel_types
load_rc_hook log # bug 1743463
load_conf_hook openvswitch_type_drivers
if [[ "$VENV" =~ "dsvm-scenario" ]]; then
load_rc_hook ubuntu_image
fi
if [[ "$VENV" =~ "pecan" ]]; then
load_conf_hook pecan
fi
if [[ "$FLAVOR" = "dvrskip" ]]; then
load_conf_hook disable_dvr
fi
if [[ "$VENV" =~ "dsvm-scenario-ovs" ]]; then
load_conf_hook dvr
fi
;;
"rally")
load_rc_for_rally
;;
*)
echo "Unrecognized environment $VENV".
exit 1
esac
export DEVSTACK_LOCALCONF=$(cat $LOCAL_CONF)
$BASE/new/devstack-gate/devstack-vm-gate.sh
| true
|
736ba1d6b33aec64e206cd0b76340c5837e339df
|
Shell
|
Manny27nyc/gh-clean-branches
|
/gh-clean-branches
|
UTF-8
| 3,404
| 4.375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/zsh
# safely delete local branches that have no remotes and no hanging changes
# will not delete branches with not commited changes
# run inside a git repo folder
DRY_RUN=false
if [ $# -gt 0 ]; then
case "$1" in
--dry-run )
DRY_RUN=true
;;
--force)
FORCE_DELETE=true
;;
* )
printf "%s\n" "Usage: gh clean-branches [--dry-run] [--force]"
exit 1
;;
esac
fi
red=$(printf '\e[1;31m')
green=$(printf '\e[1;32m')
yellow=$(printf '\e[1;33m')
blue=$(printf '\e[1;34m')
end=$(printf '\e[0m')
printf "%s\n" "${green}Sync branches${end}"
git fetch -p >/dev/null 2>&1 # hide response
upstream_name=$(git remote show)
home_branch=$(git branch --show-current)
default_branch=$(git remote show ${upstream_name} | awk '/HEAD branch/ {print $NF}')
printf "%s\n" "${green}Checking out ${default_branch}${end}"
git checkout $default_branch
printf "%s\n" "${green}Pulling ${default_branch}${end}"
git pull ${upstream_name} ${default_branch}
if [[ $? -eq 1 ]]
then
printf "%s\n" "${red}Failed to pull, check for uncomitted changes.${end}"
exit 1
fi
local_branches_str=$(git branch)
local_branches_str=${local_branches_str/\*?/ } # trim the "*"" on the current branch
remote_branches_str=$(git branch -r)
remote_branches_str=${remote_branches_str//${upstream_name}\// } # trim the "origin/" from branch names
printf "%s\n%s\n" "${blue}Local branches:${end}" "${local_branches_str}"
printf "%s\n%s\n" "${blue}Remote branches:${end}" "${remote_branches_str}"
setopt extended_glob
local_branches=("${(f)local_branches_str}") # split string by \n to array
local_branches=(${local_branches:#* ${default_branch}}) # filter out default_branch
local_branches=(${local_branches// ##}) # trim spaces
remote_branches=("${(f)remote_branches_str}") # split string by \n to array
remote_branches=(${remote_branches:#* ${default_branch}}) # filter out default_branch
remote_branches=(${remote_branches// ##}) # trim spaces
missing_upstream_branches=(${local_branches:|remote_branches}) # local_branches minus remote_branches
branches_count=${#missing_upstream_branches[@]}
if [[ ${FORCE_DELETE} == true ]]; then
delete_flag='-D'
else
delete_flag='-d'
fi
if [[ ${branches_count} -eq 0 ]]; then
printf "%s\n" "${green}No local branches with missing upstream found${end}"
else
printf "%s\n" "${blue}Local branches with missing upstream:${end}"
for branch in "${missing_upstream_branches[@]}"; do
printf "%s\n" " ${branch}"
done
if [[ ${DRY_RUN} == false ]]; then
[[ ${FORCE_DELETE} == true ]] && echo "${yellow}Force delete is enabled${end}"
for branch in "${missing_upstream_branches[@]}"; do
printf "%s\n" "${green}Deleting branch:${end} ${branch}"
git branch ${delete_flag} "${branch}"
if [[ $? -eq 1 ]]; then
printf "%s\n" "❌ ${red}Could not delete${end} ${branch}"
printf "%s\n" "${yellow}Try using --force flag${end}"
fi
done
else
printf "%s\n" "${green}Dry run: not deleting branches${end}"
fi
fi
# Trying to checkout the home branch, if this branch was deleted, it will silently fail and stay on the default_branch
git checkout $home_branch >/dev/null 2>&1 # hide response
printf "\n%s\n" "${green}Done${end}"
| true
|
10d8f4542cdf7e13bb72920acbccfc5b89539b6b
|
Shell
|
superwf/dotfiles
|
/install.sh
|
UTF-8
| 641
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/bash
# check command exist
type curl > /dev/null 2>&1 || sudo apt-get curl -y
type git > /dev/null 2>&1 || sudo apt-get git-core -y
type nvim > /dev/null 2>&1 || sudo apt-get nvim -y
sudo bash symlink-dotfiles.sh
# install nvm then use nvm install nodejs
test -d ~/.nvm || curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.32.1/install.sh | bash
# install rvm then use rvm install ruby
# test -d ~/.rvm || curl -L https://get.rvm.io | bash -s stable --autolibs=enabled [--ruby] [--rails] [--trace]
# type rvm > /dev/null 2>&1 || curl -L https://get.rvm.io | bash -s stable --autolibs=enabled [--ruby] [--rails] [--trace]
| true
|
4ca3d93d81dba0db595458173a8c1a99015b3cf0
|
Shell
|
Gnado/trprinter
|
/core/trprint.sh
|
UTF-8
| 2,823
| 3.71875
| 4
|
[] |
no_license
|
#! /bin/bash
#PARAMETERS
VERSION="0.1"
WORKDIR="~/tmp"
#CONFIG
#Load config printer
. trprint.cfg
#Printer command
PC_TP='\x12\x54' #Printing test page - DC2 T [Name]
PC_SC='\x1B\x37' #Setting Control Parameter Command - ESC 7 n1 n2 n3
version()
{
echo "TRPrinter version: $VERSION"
}
usage()
{
echo "Usage: trprint [[[-f file ] [-i]] | [-h]]"
echo "-v, --version trprint version"
echo "-f, --file file for print"
echo "-u, --url url webpage for print"
echo "-s, --send send text to printer"
echo "-i, --init initialization printer"
echo "-t, --test printing test page"
}
init()
{
echo "Initialization printer"
echo "uart_port : $uart_port"
echo "baud_rate : $baud_rate"
echo "heating_dots : $heating_dots"
echo "heating_time : $heating_time"
echo "heating_interval : $heating_interval"
echo ""
PC_SEND="$PC_SC"
PC_SEND+="\x"
PC_SEND+=`echo "obase=16; $heating_dots" | bc`
PC_SEND+="\x"
PC_SEND+=`echo "obase=16; $heating_time" | bc`
PC_SEND+="\x"
PC_SEND+=`echo "obase=16; $heating_interval" | bc`
# echo -n -e $PC_SEND
# echo '\n'
#ESC 7 7 160 40
# echo -n -e '\x1B\x37\x7\xC8\x28'
# echo '\n'
echo -n -e "$PC_SEND" > /dev/$uart
}
test()
{
echo "Print test page"
echo -n -e "$PC_TP" > /dev/$uart
}
print()
{
if [ ! -z "$SEND" ]; then
echo "Send to printer '$SEND'"
echo "$SEND" > /dev/$uart
exit 0
fi
if [ ! -z "$URL" ]; then
echo "Save page from url '$URL'"
FILE="$WORKDIR/tmpfile/webpage.png"
fi
if [ ! -z "$FILE" ]; then
echo "Print file: $FILE"
if [ ! -f $FILE ]; then
echo "File not found!"
exit 1
fi
# lp -o media=$lp_media $FILE
eval $cmd_printfile $FILE
exit 0
fi
}
cancel(){
echo "Cancels a print job"
eval "cancel"
}
while [ "$1" != "" ]; do
case $1 in
-v | --version ) version
exit
;;
-h | --help ) usage
exit
;;
-f | --file ) shift
FILE=$1
;;
-u | --url ) shift
URL=$1
;;
-s | --send ) shift
SEND=$1
;;
-i | --init ) init
exit
;;
-t | --test ) test
exit
;;
-c | --cancel ) cancel
exit
;;
* ) usage
exit 1
esac
shift
done
print
echo "Error bash"
| true
|
7718703fce0134e1e7061ee527ade28932053424
|
Shell
|
jakeleichtling/Compiler
|
/SemanticRoutines/testing/run_all_error_tests.sh
|
UTF-8
| 2,764
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
# run_all_error_tests.sh
#
# A bash script for running the compiler on all error tests {1-5,6a-6d,7-20}_error_test.c57.
# These C57 input files correspond to the error tests enumerated in ERROR_TESTING_README.
#
# Derek Salama & Jake Leichtling
# 5/29/2013
# Note: Must be run from the testing directory for file paths to work.
cd ..
make
cd ./testing
echo "---------------------------------------------------------"
echo "1.) Newline in string (1_error_test.c57)"
../djcc 1_error_test.c57
echo
echo "2.) Main function that takes parameters (2_error_test.c57)"
../djcc 2_error_test.c57
echo
echo "3.) Main function that has a non-void return type (3_error_test.c57)"
../djcc 3_error_test.c57
echo
echo "4.) No main function (4_error_test.c57)"
../djcc 4_error_test.c57
echo
echo "5.) Call an undeclared function (5_error_test.c57)"
../djcc 5_error_test.c57
echo
echo "6a.) Pass array when value expected (6a_error_test.c57)"
../djcc 6a_error_test.c57
echo
echo "6b.) Pass value when array expected (6b_error_test.c57)"
../djcc 6b_error_test.c57
echo
echo "6c.) Pass double when int expected (6c_error_test.c57)"
../djcc 6c_error_test.c57
echo
echo "6d.) Pass the wrong number of parameters (6d_error_test.c57)"
../djcc 6d_error_test.c57
echo
echo "7.) Array variable in mathematical operation (7_error_test.c57)"
../djcc 7_error_test.c57
echo
echo "8.) Array variable in mathematical comparison (8_error_test.c57)"
../djcc 8_error_test.c57
echo
echo "9.) Two variables of the same name in the same scope (9_error_test.c57)"
../djcc 9_error_test.c57
echo
echo "10.) Use an undeclared variable (10_error_test.c57)"
../djcc 10_error_test.c57
echo
echo "11.) Initialization of variable in outter scope of a function with same name as a parameter (11_error_test.c57)"
../djcc 11_error_test.c57
echo
echo "12.) Index into an array with a float (12_error_test.c57)"
../djcc 12_error_test.c57
echo
echo "13.) Subscript a non-array variable (13_error_test.c57)"
../djcc 13_error_test.c57
echo
echo "14.) Assign to an array pointer (14_error_test.c57)"
../djcc 14_error_test.c57
echo
echo "15.) Assign from an array pointer (15_error_test.c57)"
../djcc 15_error_test.c57
echo
echo "16.) Implicit cast of double to int (16_error_test.c57)"
../djcc 16_error_test.c57
echo
echo "17.) Logical operation with float (17_error_test.c57)"
../djcc 17_error_test.c57
echo
echo "18.) Increment an array pointer (18_error_test.c57)"
../djcc 18_error_test.c57
echo
echo "19.) Return types of function declaration and function body don't match (19_error_test.c57)"
../djcc 19_error_test.c57
echo
echo "20.) Non-void function without a return statement (20_error_test.c57)"
../djcc 20_error_test.c57
echo "---------------------------------------------------------"
| true
|
b7ce3acd7cd5b6d45b507b8bb486307e735c684d
|
Shell
|
josefbacik/virt-scripts
|
/fedora/update-btrfs-progs.sh
|
UTF-8
| 459
| 2.859375
| 3
|
[] |
no_license
|
#!/bin/bash
. ./local.config
. ./common
[ "$#" -ne 1 ] && _fail "must specify a vm name"
read -r -d '' COMMAND << EOM
cd btrfs-progs
git reset --hard
git checkout master
git branch -D devel
git pull
git checkout devel
make clean-all
./autogen.sh
./configure --disable-documentation --enable-experimental --bindir=/usr/sbin --prefix=/usr --exec-prefix=/usr --disable-python
make -j4
make install
cp btrfs-corrupt-block /usr/sbin
EOM
ssh root@$1 "$COMMAND"
| true
|
41ee73da0f9dfad36a63c9eb1bfa6e755e5764c6
|
Shell
|
millaguie/mole
|
/script/travis_setup.sh
|
UTF-8
| 882
| 2.640625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Copyright 2020 Jaume Martin
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
cwd=$(pwd)
cd /tmp
wget http://apt-stable.ntop.org/`lsb_release -r | cut -f2`/all/apt-ntop-stable.deb
sudo dpkg -i apt-ntop-stable.deb
sudo apt-get update
sudo apt-get install -y linux-headers-`uname -r` pfring-dkms pfring libpcap-dev
sudo modprobe pf_ring
sudo modprobe tun
cd "$cwd"
| true
|
18356ba78cc5c4512e949dff6aa26713c1e49b17
|
Shell
|
dilanp/Azure-CLI
|
/Web-Apps/Containerised-ACR-CI-CD.sh
|
UTF-8
| 4,024
| 3.28125
| 3
|
[] |
no_license
|
#Login and set the correct Azure subscription.
# setup the variables.
acr_resource_group="PluralsightAcr"
resource_group="cicdappservice"
location="uksouth"
plan_name="cicdappservice"
app_name="cicd-pluralsight-1"
subscription_id=$(az account show --query id --output tsv)
az acr list \
-g $acr_resource_group \
--output table
acr_name="psacr9577" #Need to set manually using the result of the query above.
#Use the following information to form the ACR image name.
acr_login_server=$(az acr show \
-n $acr_name \
--query loginServer \
-o tsv)
az acr repository list -n $acr_name -o table
image="mvcmovie"
az acr repository show-tags -n $acr_name --repository mvcmovie -o table #Get repo name from query above.
tag="v1"
image_name="$acr_login_server/$image:$tag"
#Use these only if you need ACR credentials!!!
#cr_username=$(az acr credential show \
# -n $acr_name \
# --query username \
# -o tsv)
#cr_password=$(az acr credential show \
# -n $acr_name \
# --query passwords[0].value \
# -o tsv)
# create a resource group.
az group create \
-l $location \
-n $resource_group
# create an app service plan to host
az appservice plan create \
-n $plan_name \
-g $resource_group \
-l $location \
--sku S1 \
--is-linux
# n.b. can't use anything but docker hub here
# so we have to arbitrarily pick a runtime --runtime "node|6.2" or a public image like scratch.
az webapp create \
-n $app_name \
-g $resource_group \
--plan $plan_name \
--deployment-container-image-name $image_name
#set the WEBSITES_PORT environment variable.
az webapp config appsettings set \
-g $resource_group \
-n $app_name \
--settings WEBSITES_PORT=80
#Enable managed identity for the web app and get the principalId.
principal_id=$(az webapp identity assign \
-g $resource_group \
-n $app_name \
--query principalId \
--output tsv)
#Grant the web app permission to access the container registry.
az role assignment create \
--assignee $principal_id \
--scope "/subscriptions/$subscription_id/resourceGroups/$acr_resource_group/providers/Microsoft.ContainerRegistry/registries/$acr_name" \
--role "AcrPull"
# specify the container registry and the image to deploy for the web app.
az webapp config container set \
-n $app_name \
-g $resource_group \
--docker-custom-image-name $acr_login_server/$image:$tag \
--docker-registry-server-url "https://$acr_login_server"
#Now try the website URL!!!
echo "http://$app_name.azurewebsites.net"
# create a staging slot by cloning from production slot settings.
az webapp deployment slot create \
-g $resource_group \
-n $app_name \
-s staging \
--configuration-source $app_name
# Notice that staging has -staging added in the host name.
# This should now be running an exact copy of the production slot.
az webapp show \
-n $app_name \
-g $resource_group \
-s staging \
--query "defaultHostName" \
-o tsv
# enable CD for the staging slot
az webapp deployment container config \
-g $resource_group \
-n $app_name \
-s staging \
--enable-cd true
# get the webhook URL for staging slot.
ci_cd_url=$(az webapp deployment container show-cd-url \
-s staging \
-n $app_name \
-g $resource_group \
--query CI_CD_URL \
-o tsv)
# Configure the webhook on an ACR registry
az acr webhook create \
--registry $acr_name \
-n myacrwebhook \
--actions push \
--uri $ci_cd_url
# Change the code to make the website different.
# Do another docker build and push a new version to ACR.
az acr login -n $acr_name
docker push pluralsightacr.azurecr.io/samplewebapp:latest
# perform a slot swap
az webapp deployment slot swap \
-g $resource_group \
-n $app_name \
--slot staging \
--target-slot production
# clean up the web app and app service.
az group delete -n $resource_group --yes --no-wait
# delete the webhook
az acr webhook delete --registry $acr_name --name myacrwebhook
| true
|
ef0559acfb66a418f125eb3dca9f3d5a6bd9fe07
|
Shell
|
socallinuxexpo/scale-switch
|
/bin/version.sh
|
UTF-8
| 296
| 2.890625
| 3
|
[] |
no_license
|
#!/bin/sh
export CHANGES=""
git diff-index --quiet HEAD --
GOOD=$?
if [ ${GOOD} -ne 0 ]
then
CHANGES="-unclean"
fi
cat > src/version.cpp <<EOF
/**
* AUTOGENERATED -- DO NOT TOUCH, this means you, Lori
*/
const char* VERSION = "$(git rev-parse --short HEAD)${CHANGES}";
EOF
| true
|
b4d61cdd6813b266b6819b72097aea7d3396fd01
|
Shell
|
takuyan/dotfiles
|
/install.sh
|
UTF-8
| 2,821
| 3.484375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
rbenv_install_or_update()
{
echo "[CHECK] rbenv"
if [ -d $HOME/.rbenv ]
then
echo "[UPDATE] git pull rbenv"
cd $HOME/.rbenv
git pull origin master
ruby_build_install_or_update 1
else
echo "[INSTALL] git clone rbenv"
git clone git@github.com:rbenv/rbenv.git $HOME/.rbenv
exec $SHELL
ruby_build_install_or_update 1
fi
rbenv rehash
}
ruby_build_install_or_update()
{
mkdir -p $HOME/.rbenv/plugins
cd $HOME/.rbenv/plugins
echo "[CHECK] ruby-build"
if [ -d $HOME/.rbenv/plugins/ruby-build ]
then
echo "[UPDATE] git pull ruby-build"
cd $HOME/.rbenv/plugins/ruby-build
git pull origin master
else
echo "[INSTALL] git clone ruby-build"
cd $HOME/.rbenv/plugins
git clone git@github.com:rbenv/ruby-build.git $HOME/.rbenv/plugins/ruby-build
fi
}
oh_my_zsh_install_or_update()
{
echo "[CHECK] Oh-My-Zsh"
if [ -d $HOME/.oh-my-zsh ]
then
echo "[UPDATE] git pull Oh-My-Zsh"
cd $HOME/.oh-my-zsh
git pull origin master
else
echo "[INSTALL] git clone Oh-My-Zsh"
git clone git://github.com/robbyrussell/oh-my-zsh.git $HOME/.oh-my-zsh
fi
}
zsh_completions_install_or_update()
{
echo "[CHECK] zsh-completions"
if [ -d $HOME/.zsh-completions ]
then
echo "[UPDATE] git pull zsh-completions"
cd $HOME/.zsh-completions
git pull origin master
else
echo "[INSTALL] git clone zsh-completions"
git clone git://github.com/zsh-users/zsh-completions.git $HOME/.zsh-completions
# You may have to force rebuild `zcompdump`:
rm -f $HOME/.zcompdump
compinit
fi
}
delete_old_files()
{
echo "[DELETE] Delete the old files"
rm -f $HOME/.bashrc
rm -f $HOME/.bundle/config
rm -f $HOME/.gemrc
rm -f $HOME/.gitignore
rm -f $HOME/.gvimrc
rm -f $HOME/.powconfig
rm -f $HOME/.railsrc
rm -f $HOME/.vimrc
rm -f $HOME/.zshenv
rm -f $HOME/.zshrc
#rm ~/.gitconfig
}
symlink_files()
{
echo "[Symlink] Symlinking files"
ln -s $HOME/dotfiles/bashrc $HOME/.bashrc
ln -s $HOME/dotfiles/bundle_config $HOME/.bundle/config
ln -s $HOME/dotfiles/gemrc $HOME/.gemrc
ln -s $HOME/dotfiles/gitignore $HOME/.gitignore
ln -s $HOME/dotfiles/gvimrc $HOME/.gvimrc
ln -s $HOME/dotfiles/powconfig $HOME/.powconfig
ln -s $HOME/dotfiles/railsrc $HOME/.railsrc
ln -s $HOME/dotfiles/vimrc $HOME/.vimrc
mkdir -p $HOME/.config/nvim
ln -s $HOME/dotfiles/nvim_init.vim $HOME/.config/nvim/init.vim
ln -s $HOME/dotfiles/zshenv $HOME/.zshenv
ln -s $HOME/dotfiles/zshrc $HOME/.zshrc
#ln -s $HOME/dotfiles/gitconfig ~/.gitconfig
}
#
# Main Start
#
rbenv_install_or_update 1
oh_my_zsh_install_or_update 1
zsh_completions_install_or_update 1
delete_old_files 1
symlink_files 1
echo "[DONE] All done."
cd $HOME
| true
|
4e2785f6ef8c4dab0556ac18263da99625c0758b
|
Shell
|
oliverangelil/MERGEDEX
|
/create_mergedataset_masked.sh
|
UTF-8
| 4,813
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
###################################################
# PRPCPTOT
###################################################
# declare paths
path_data='/path_to_data/GHCNDEX-HadEX2'
fname_H2='H2_PRCPTOT_1901-2010_RegularGrid_global_3.75x2.5deg_LSmask.nc'
fname_GHCND='GHCND_PRCPTOT_1951-2017_RegularGrid_global_2.5x2.5deg_LSmask_3.75x2.5deg.nc'
# declare period
yr1='1951'
yr2='2010'
# number of acceptable missing years for entire period
QC_all_years=$((($yr2-$yr1+1)*20/100)) #12 in this case
for obsname in H2 GHCND
do
fname_var=fname_$obsname
# trim files to years of interest
cdo -s -selyear,$yr1/$yr2 $path_data/${!fname_var} $path_data/tmpf_${obsname}_PRCPTOT_1951-2010.nc
# generate 3 masks based on whether: 1) at least 80% of data is non-missing in first 10 years; 2) at least 80% of data is non-missing in last 10 years; and 3) at least 80% of data is non-missing in entire period.
cdo -s -lec,2 -timsum -eqc,-999 -setmisstoc,-999 -selyear,$yr1/$(($yr1 + 9)) $path_data/tmpf_${obsname}_PRCPTOT_1951-2010.nc $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_mask_first_10.nc
cdo -s -lec,2 -timsum -eqc,-999 -setmisstoc,-999 -selyear,$yr1/$(($yr2 - 9)) $path_data/tmpf_${obsname}_PRCPTOT_1951-2010.nc $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_mask_last_10.nc
cdo -s -lec,$QC_all_years -timsum -eqc,-999 -setmisstoc,-999 $path_data/tmpf_${obsname}_PRCPTOT_1951-2010.nc $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_mask_all.nc
# multiply all masks together to end up with a final mask
cdo -s -mul -mul $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_mask_first_10.nc $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_mask_last_10.nc $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_mask_all.nc $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_mask_final.nc
# apply final mask to data
cdo -s -div $path_data/tmpf_${obsname}_PRCPTOT_1951-2010.nc $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_mask_final.nc $path_data/tmpf_${obsname}_PRCPTOT_1951-2010_QCmasked.nc
done
# merge GHCND and H2
cdo -s -ifthenelse -gtc,-999 -setmisstoc,-999 $path_data/tmpf_GHCND_PRCPTOT_1951-2010_QCmasked.nc $path_data/tmpf_GHCND_PRCPTOT_1951-2010_QCmasked.nc $path_data/tmpf_H2_PRCPTOT_1951-2010_QCmasked.nc $path_data/MERGEDEX_PRCPTOT_1951-2010_QCmasked.nc
# delete temporary files
rm $path_data/tmpf_*
###################################################
# Rx1day
###################################################
# declare paths
fname_H2='H2_Rx1day_1901-2010_RegularGrid_global_3.75x2.5deg_LSmask.nc'
fname_GHCND='GHCND_Rx1day_1951-2017_RegularGrid_global_2.5x2.5deg_LSmask_3.75x2.5deg.nc'
for obsname in H2 GHCND
do
fname_var=fname_$obsname
# trim files to years
cdo -s -selyear,$yr1/$yr2 $path_data/${!fname_var} $path_data/tmpf_${obsname}_Rx1day_1951-2010.nc
# generate 3 masks based on whether: 1) at least 80% of data is non-missing in first 10 years; 2) at least 80% of data is non-missing in last 10 years; and 3) at least 80% of data is non-missing in entire period.
cdo -s -lec,2 -timsum -eqc,-999 -setmisstoc,-999 -selyear,$yr1/$(($yr1 + 9)) $path_data/tmpf_${obsname}_Rx1day_1951-2010.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_first_10.nc
cdo -s -lec,2 -timsum -eqc,-999 -setmisstoc,-999 -selyear,$yr1/$(($yr2 - 9)) $path_data/tmpf_${obsname}_Rx1day_1951-2010.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_last_10.nc
cdo -s -lec,$QC_all_years -timsum -eqc,-999 -setmisstoc,-999 $path_data/tmpf_${obsname}_Rx1day_1951-2010.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_all.nc
# multiply all masks together to end up with a final mask
cdo -s -mul -mul $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_first_10.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_last_10.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_all.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_final.nc
# apply final mask to data
cdo -s -selvar,Ann -div $path_data/tmpf_${obsname}_Rx1day_1951-2010.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_final.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_QCmasked.nc
rm $path_data/tmpf_${obsname}_Rx1day_1951-2010.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_first_10.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_last_10.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_all.nc $path_data/tmpf_${obsname}_Rx1day_1951-2010_mask_final.nc
done
# merge GHCND and H2
cdo -s -ifthenelse -gtc,-999 -setmisstoc,-999 $path_data/tmpf_GHCND_Rx1day_1951-2010_QCmasked.nc $path_data/tmpf_GHCND_Rx1day_1951-2010_QCmasked.nc $path_data/tmpf_H2_Rx1day_1951-2010_QCmasked.nc $path_data/MERGEDEX_Rx1day_1951-2010_QCmasked.nc
# delete temporary files
rm $path_data/tmpf_*
| true
|
7d9554b2b43d921d2418188761846298e1d06be6
|
Shell
|
relrod/aut
|
/pre/pip-latest.sh
|
UTF-8
| 278
| 3.015625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -eux
source pip-common.sh
source python-common.sh
# Upgrade pip
PIPVER="$($PYTHON <<EOF
import sys
if sys.version_info[:2] < (3, 6):
print("pip < 21")
else:
print("pip")
EOF
)"
$PIP install --upgrade "$PIPVER"
$PIP install $PRODUCT==$VERSION
| true
|
15233c8a4ad78a47ea0328e0cd605ed567998eba
|
Shell
|
alldatacenter/alldata
|
/dts/airbyte/tools/status/init.sh
|
UTF-8
| 1,604
| 3.5
| 4
|
[
"MIT",
"Elastic-2.0",
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
#!/usr/bin/env bash
set -e
# This script should only be used to set up the status site for the first time or to make your own version for testing.
# TODO move this setup to terraform
BUCKET=airbyte-connector-build-status
PROFILE=dev # AWS dev environment
REGION=us-east-2
S3_DOMAIN="$BUCKET.s3-website.$REGION.amazonaws.com"
export AWS_PAGER=""
echo "This has already been created. Comment out this line if you really want to run this again." && exit 1
echo "Creating bucket..."
aws s3api create-bucket --bucket "$BUCKET" --region "$REGION" --create-bucket-configuration LocationConstraint="$REGION" --profile "$PROFILE"
echo "Setting policy for bucket..."
aws s3api put-bucket-policy --bucket "$BUCKET" --policy file://"$(pwd)"/tools/status/policy.json --profile "$PROFILE"
echo "Uploading default files..."
aws s3 sync "$(pwd)"/tools/status/defaults/ s3://"$BUCKET"/ --profile "$PROFILE"
echo "Setting bucket as website..."
aws s3 website s3://"$BUCKET"/ --index-document index.html --error-document error.html --profile "$PROFILE"
aws cloudfront create-distribution \
--origin-domain-name $S3_DOMAIN \
--default-root-object index.html \
--profile "$PROFILE"
echo "Site should be ready at http://$S3_DOMAIN"
echo "1. Add a certificate and cname to the distribution: https://advancedweb.hu/how-to-use-a-custom-domain-on-cloudfront-with-cloudflare-managed-dns/"
echo "2. Configure a CNAME on Cloudflare for status-api.airbyte.io to point to the bucket!"
echo "3. Create STATUS_API_AWS_ACCESS_KEY_ID and STATUS_API_AWS_SECRET_ACCESS_KEY Github secrets with access to the bucket."
| true
|
998d097a937a8d679d3283f015abdb786d2d4c74
|
Shell
|
menny/docker_android
|
/android_studio/docker_as.sh
|
UTF-8
| 2,246
| 4.0625
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
if pgrep -x "Docker" > /dev/null
then
echo "Docker ready."
else
echo "Please start Docker and run this again."
exit 1
fi
if [ "Darwin" == "`uname`" ]
then
if pgrep -x "Xquartz" > /dev/null
then
echo "Quartz ready."
else
echo "Starting Quartz..."
if [ open -a XQuartz ]
then
echo "Done."
else
echo "Failed to start XQuartz. Install via 'brew cask install xquartz'"
exit 1
fi
fi
else
echo "At this moment, this script only supports macOS."
exit 1
fi
export ip=$(ifconfig en0 | grep inet | awk '$1=="inet" {print $2}')
/opt/X11/bin/xhost + $ip
if [ "new" == "$1" ]; then
shift # past action
BASE_RUN_COMMAND="docker run -d --privileged --network=host -e DISPLAY=$ip:0 -v /tmp/.X11-unix:/tmp/.X11-unix "
IMAGE_NAME="menny/android_studio:1.8.2-3.0.0-RC1"
ADDITIONAL_ARGS=""
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-i|--image)
IMAGE_NAME="$2"
shift # past argument
shift # past value
if [ "" == "$IMAGE_NAME" ]; then
echo "Please provide an image name to start, or omit the -i argument to use the default."
exit 1
fi
;;
-a|--docker_args)
ADDITIONAL_ARGS="$2"
shift # past argument
shift # past value
;;
*) # unknown option
echo "Uknown option '$key' for action 'new'. Valid options:"
echo "-i|--image [image name]"
echo "-a|--docker_args [additional docker args]"
exit 1
;;
esac
done
${BASE_RUN_COMMAND} ${ADDITIONAL_ARGS} ${IMAGE_NAME} /opt/android-studio/bin/studio.sh
exit 0
elif [ "start" == "$1" ]; then
if [ "" == "$2" ]; then
echo "Please provide a container to start. Pick one:"
docker ps --all
exit 1
else
docker start $2
docker exec -e DISPLAY=$ip:0 $2 /opt/android-studio/bin/studio.sh
exit 0
fi
else
echo "Unknown action, or none provided. Possible:"
echo "docker_as.sh new -i|--image [image name] -a|--docker_args [additional docker args]"
echo "docker_as.sh start [container name]"
exit 1
fi
| true
|
70d975558671d2181cd2be81ed6df08b7c96b026
|
Shell
|
c18s/LinuxScripts
|
/securing-ssh/securing-ssh.sh
|
UTF-8
| 1,869
| 3.75
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh
if [ "$(id -u)" -ne 0 ]; then
echo 'This script must be run as root'
exit 1
fi
SSHD_CONFIG_FILE=/etc/ssh/sshd_config
SSHD_CONFIG_BACKUP_FILE=/etc/ssh/sshd_config.backup
AUTHORIZED_KEYS_FILE=/etc/ssh/authorized_keys
if [ ! -e $SSHD_CONFIG_FILE ]; then
echo 'This operating system is not supported'
exit 1
fi
if [ ! -e $AUTHORIZED_KEYS_FILE ]; then
touch $AUTHORIZED_KEYS_FILE
fi
chmod 644 $AUTHORIZED_KEYS_FILE
if [ "$RESET" = 'yes' ]; then
echo "Restore from backup"
cp $SSHD_CONFIG_BACKUP_FILE $SSHD_CONFIG_FILE && echo "Done"
exit 0
fi
if [ ! -e $SSHD_CONFIG_BACKUP_FILE ]; then
cp $SSHD_CONFIG_FILE $SSHD_CONFIG_BACKUP_FILE
fi
updateConfig() {
SSHD_CONFIG=$(sed "$2" $SSHD_CONFIG_FILE)
[ $? -ne 0 ] && return
echo "$SSHD_CONFIG" | awk '!NF || !x[$0]++' | sed '/./,/^$/!d' >$SSHD_CONFIG_FILE
grep "$1" $SSHD_CONFIG_FILE
}
echo 'Secure the SSH server'
# default
updateConfig 'PermitEmptyPasswords' 's/^.*PermitEmptyPasswords.*$/PermitEmptyPasswords no/'
updateConfig 'TCPKeepAlive' 's/^.*TCPKeepAlive.*$/TCPKeepAlive yes/'
updateConfig 'ClientAliveInterval' 's/^.*ClientAliveInterval.*$/ClientAliveInterval 300/'
updateConfig 'ClientAliveCountMax' 's/^.*ClientAliveCountMax.*$/ClientAliveCountMax 0/'
updateConfig 'AuthorizedKeysFile' 's@^.*AuthorizedKeysFile.*$@AuthorizedKeysFile .ssh/authorized_keys /etc/ssh/authorized_keys@'
isYesOrNo() {
echo "$1" | grep -q -E '^(yes|no)$' 2>/dev/null
return $?
}
if isYesOrNo "$ROOT_LOGIN"; then
updateConfig 'PermitRootLogin' "s/^.*PermitRootLogin.*$/PermitRootLogin $ROOT_LOGIN/"
fi
if isYesOrNo "$PASSWORD_LOGIN"; then
updateConfig 'PasswordAuthentication' "s/^.*PasswordAuthentication.*$/PasswordAuthentication $PASSWORD_LOGIN/"
fi
if isYesOrNo "$X11_FORWARDING"; then
updateConfig 'X11Forwarding' "s/^.*X11Forwarding.*$/X11Forwarding $X11_FORWARDING/"
fi
## EOF
| true
|
9d752634ee4f214c8cfd7c6d1d036e81355291d5
|
Shell
|
spork87/tcvt
|
/optcvt.sh
|
UTF-8
| 223
| 3.296875
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
TCVT=./tcvt.py
MINWIDTH=80
SIZE=`stty size`
test $? = 0 || exit $?
COLUMNS="${SIZE#* }"
if test "$COLUMNS" -ge $((2*$MINWIDTH+1)); then
exec $TCVT "$@"
elif test -z "$@"; then
exec "$SHELL"
else
exec "$@"
fi
| true
|
73e15c4dc0ab1b8d7153a30d120d58c6afb6d7db
|
Shell
|
zyaj/suntans-general-vertical-coordinate
|
/examples/iwaves/iwaves.sh
|
UTF-8
| 885
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/sh
########################################################################
#
# Shell script to run a suntans test case.
#
########################################################################
SUNTANSHOME=../../main
SUN=$SUNTANSHOME/sun
SUNPLOT=$SUNTANSHOME/sunplot
. $SUNTANSHOME/Makefile.in
maindatadir=rundata
datadir=data
NUMPROCS=$1
if [ -z "$MPIHOME" ] ; then
EXEC=$SUN
else
EXEC="$MPIHOME/bin/mpirun -np $NUMPROCS $SUN"
fi
if [ ! -d $datadir ] ; then
cp -r $maindatadir $datadir
echo Creating grid...
if [ -z "$TRIANGLEHOME" ] ; then
echo No triangle libraries installed.
echo Copying points.dat, cells.dat, and edges.dat from $maindatadir to $datadir
$EXEC -g --datadir=$datadir
else
$EXEC -t -g --datadir=$datadir
fi
else
cp $maindatadir/suntans.dat $datadir/.
fi
echo Running suntans...
$EXEC -s -vv --datadir=$datadir
| true
|
da547ecbac3d0d11f4f0608bd1576cac61611ee3
|
Shell
|
sdsmith/dotfiles
|
/.bashrc
|
UTF-8
| 5,927
| 3.234375
| 3
|
[] |
no_license
|
shopt -s expand_aliases
export PATH=/usr/local/homebrew/bin:${PATH}
function is_running_cygwin()
{
if uname -a | grep -qE "(CYGWIN|cygwin|Cygwin)" &> /dev/null ; then
return 0
else
return 1
fi
}
function is_running_windows_subsystem_linux()
{
if grep -qE "(Microsoft|WSL)" /proc/version &> /dev/null ; then
return 0
else
return 1
fi
}
function is_running_macos()
{
if "`uname -s`" == "Darwin"; then
return 0
else
return 1
fi
}
## Increase bash history limit
#export HISTSIZE=-1
#export HISTFILESIZE=-1
export HISTTIMEFORMAT="%y-%m-%d %T " # Add timestamps to bash history
export P4_WS_NAME="" # Used to track the current p4 workspace
# Allow emacs GUI colours in terminal
export TERM=xterm-256color
# Default editor
export ALTERNATE_EDITOR=""
export EDITOR="nano" # opens in term
#export VISUAL="emacsclient -c -a emacs" # opens in GUI mode
# Set terminal prompt
# \d - current date
# \t - current time
# \h - host name
# \# - command number
# \u - user name
# \W - current working directory (short)
# \w - current working directory (full)
red=$(tput setaf 1)
green=$(tput setaf 2)
blue=$(tput setaf 4)
reset=$(tput sgr0)
export PS1="\h \[$green\]\${P4_WS_NAME}\[$reset\] \W> "
function bell() {
echo -e '\a'
}
alias notify=bell
### Alias ###
# General
alias emacsserver="emacs --daemon"
alias enw="emacsclient -a='' -t"
alias l="ls --color=auto"
alias ls="ls --color=auto"
alias la="ls -la --color=auto"
alias ll="la -la --color=auto"
alias p4_clean_tree="p4 clean; find . -type d -empty -delete"
alias prettyjson="python -m json.tool"
ssh_remove_auth_key()
{
if test -f $HOME/.ssh/authorized_keys; then
if grep -v "$1" $HOME/.ssh/authorized_keys > $HOME/.ssh/tmp; then
cat $HOME/.ssh/tmp > $HOME/.ssh/authorized_keys && rm $HOME/.ssh/tmp;
else
rm $HOME/.ssh/authorized_keys && rm $HOME/.ssh/tmp;
fi;
fi
}
p4_synced_cl() {
p4 cstat ...#have | grep change | awk '$3 > x { x = $3 };END { print x }'
}
# Change bash ls directory color (from default dark blue)
export LS_COLORS=$LS_COLORS:'di=0;94:'
# Tmux attach alias with session name auto complete
#alias tma='tmux attach -t $*'
# if [ -f ~/.bash_completion ]; then
# . ~/.bash_completion
# fi
alias tml='tmux list-session'
alias tmk='tmux kill-session -t $*'
alias tma='tmux attach -t $*'
## tmux control mode (best used with iterm2)
# create
alias tmc='tmux -CC'
# resume (and detatch from any other clients connected to session)
alias tmcresume='tmux -CC a -d'
# resume/new named session
alias tmca='tmux -CC new-session -AD -s $*'
# Bash Function To Extract File Archives Of Various Types
function extract () {
if [ -f $1 ] ; then
case $1 in
*.tar.bz2) tar xjf $1 ;;
*.tar.gz) tar xzf $1 ;;
*.bz2) bunzip2 $1 ;;
*.rar) rar x $1 ;;
*.gz) gunzip $1 ;;
*.tar) tar xf $1 ;;
*.tar.xz) tar xf $1 ;;
*.tbz2) tar xjf $1 ;;
*.tgz) tar xzf $1 ;;
*.zip) unzip $1 ;;
*.Z) uncompress $1 ;;
*.7z) 7z x $1 ;;
*) echo "'$1' cannot be extracted via extract()" ;;
esac
else
echo "'$1' is not a valid file"
fi
}
function tar_see() {
tar -tvf $1
}
function tgz_all() {
local FILEPATH="$1"
local NAME=$(basename "$FILEPATH")
tar -czvf "$NAME.tgz" "$FILEPATH"
}
function tgz_content() {
local FILEPATH="$1"
local NAME=$(basename "$FILEPATH")
tar -czvf "$NAME.tgz" -C "$FILEPATH" "."
}
function p4c()
{
# $1 type of change to view (arg to 'p4 changes -s <arg>')
MAX_NUM_DISPLAY=32
if [ $# -eq 0 ]; then
p4 changes -u stewarts -m $MAX_NUM_DISPLAY
else
p4 changes -u stewarts -m $MAX_NUM_DISPLAY -s $*
fi
}
alias p4o="p4 opened $*"
function fix_terminal()
{
stty sane
tput rs1
}
function mosh_server_killall()
{
# Kills all mosh-servers except the last one created (so we don't kill our own server!)
# Assuming that the last server created is the one we are using.
kill $(ps --no-headers --sort=start_time -C mosh-server -o pid | head -n -1)
}
function remove_files_with_extension_recursive()
{
local START_PATH=$1
local EXTENSION=$2
if [ $# -ne 2 ]; then
echo "Usage: ${FUNCNAME[0]} <path> <extension>"
return 1
fi
find ${START_PATH} -name "*.${EXTENSION}" -type f -delete
}
function enwgdb()
{
# Start emacs gdb session.
emacs -nw --eval "(gdb \"gdb --annotate=3 $*\")";
}
function kbn()
{
# Kill By Name (KBN)
# Kill processes that match the given name.
ps ux | grep $1 | cut -d' ' -f2 | xargs kill -9
}
function get_nvidia_gpu_driver()
{
# Find the driver that is associated with the NVIDIA VGA device (GPU) on
# the system.
find /sys | grep driver.*$(lspci | grep NV | grep VGA | cut -d ' ' -f1)
}
### Additional config files
source "${HOME}/.dotfiles/bash/dates.sh"
if [ -f "${HOME}/.workdotfiles/.bashrc" ]; then
source "${HOME}/.workdotfiles/.bashrc"
fi
function mount_my_cifs()
{
if [ $# -ne 2 ]; then
echo "Usage: mount_my_cifs <cifs_path> <mount_point>"
return
fi
read -s -p "Password:" password
mount -t cifs -o domain=nvidia.com,noperm,user=stewarts,passwd=$password $1 $2
}
# Switch to zsh if available
# NOTE: could optimize so that all the env stuff is factored out and the bash stuff is skipped, but that's too much rn
#if [ -z "${NOZSH}" ] && [ $TERM = "xterm" -o $TERM = "xterm-256color" -o $TERM = "screen" ] && type zsh &> /dev/null
#then
# export SHELL=$(which zsh)
# if [[ -o login ]]
# then
# exec zsh -l
# else
# exec zsh
# fi
#fi
| true
|
83cb14d5750147fa5bbf7ff9dc28a3c54fabd01a
|
Shell
|
madmpro/dotfiles
|
/src/dotfiles/newsboat/bookmark.sh
|
UTF-8
| 502
| 3.421875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
[ "$#" -eq 0 ] && exit 1
if [ -n $(command -v curl) ]; then
url=$(curl -sIL -o /dev/null -w '%{url_effective}' "$1")
else
url="$1"
fi
url=$(echo "${url}" | perl -p -e 's/(\?|\&)?utm_[a-z]+=[^\&]+//g;' -e 's/(#|\&)?utm_[a-z]+=[^\&]+//g;')
baseurl=$(echo "${url}" | awk -F[/:] '{print $4}')
title="$2"
description="$3"
feedtitle="$4"
grep -Fxq "${title} | [${baseurl}](${url})" $HOME/.newsboat/bookmarks.md || echo -e "${title} | [${baseurl}](${url})" >> $HOME/.newsboat/bookmarks.md
| true
|
9eb8a98ec4e7f737fbf6b33aec5c6a02766c6e76
|
Shell
|
hg8i/scripts
|
/pic
|
UTF-8
| 236
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/bash
# This script takes a screenshot
# Syntax: "pic FILE.png", or "pic"
# Aaron White 2019
if [ "$#" -lt 1 ]; then # run with pwd
gnome-screenshot -a -f ~/tmp.png
else
# run with pattern
gnome-screenshot -a -f $1
fi
| true
|
1e8faf5c949d22daf1303bf95ca068e98ead04b1
|
Shell
|
attixray/gtime
|
/doc/run-tests.sh
|
UTF-8
| 1,803
| 3.21875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
dir=`dirname $0`
tmp=${TMPDIR:-'/tmp/'}
# Test Anything Protocol, from http://testanything.org/
. ${dir}/tap-functions
host=${1:-'localhost'}
echo \# Using $host as Guardtime Gateway Server. Specify custom server as 1st command-line argument.
url_s="http://$host/gt-signingservice"
url_x="http://$host/gt-extendingservice"
url_p="http://verify.guardtime.com/gt-controlpublications.bin"
echo \# Running tests on `uname -n` at `date '+%F %T %Z'`
plan_tests 12
diag "### Publications file download"
okx gtime-test -p -o ${tmp}/pub.bin -P $url_p
diag "### Verifying publications file"
okx gtime-test -v -b ${tmp}/pub.bin
diag "### Signing"
okx gtime-test -s -o ${tmp}/tmp.gtts -S $url_s
diag "### Verifying freshly created signature token"
okx gtime-test -v -i ${tmp}/tmp.gtts -b ${tmp}/pub.bin
like "`gtime-test -x -i ${tmp}/tmp.gtts -X $url_x 2>&1`" "try to extend later" "Extending freshly created signature token"
diag "### Verifying old timestamp"
okx gtime-test -v -b ${tmp}/pub.bin -i ${dir}/TestData.txt.gtts -f ${dir}/TestData.txt
diag "### Online verifying old timestamp"
okx gtime-test -vx -b ${tmp}/pub.bin -i ${dir}/TestData.txt.gtts -f ${dir}/TestData.txt -X $url_x
diag "### Extending timestamp"
okx gtime-test -x -i ${dir}/TestData.txt.gtts -o ${tmp}/ext.gtts -X $url_x
diag "### Verifying extended timestamp"
okx gtime-test -v -b ${tmp}/pub.bin -i ${tmp}/ext.gtts
diag "### Online verifying extended timestamp"
okx gtime-test -vx -b ${tmp}/pub.bin -i ${tmp}/ext.gtts -X $url_x
diag "### Using RIPEMD160"
okx gtime-test -s -F RIPEMD160:0a89292560ae692d3d2f09a3676037e69630d022 -o ${tmp}/r160.gtts -S $url_s
okx gtime-test -v -i ${tmp}/r160.gtts -f ${dir}/TestData.txt
# cleanup
rm -f ${tmp}/pub.bin ${tmp}/tmp.gtts ${tmp}/ext.gtts ${tmp}/r160.gtts 2> /dev/null
| true
|
d6f47ee1de277a788a2eb6dc80debfae915a251b
|
Shell
|
ctuning/ck-math
|
/package/lib-openblas-0.2.19-universal/scripts.android/install.sh
|
UTF-8
| 1,892
| 3.21875
| 3
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
#! /bin/bash
#
# Extra installation script
#
# See CK LICENSE.txt for licensing details.
# See CK Copyright.txt for copyright details.
#
# Developer(s): Grigori Fursin, 2016-2017
#
# Check extra stuff
if [ "${CK_ANDROID_ABI}" == "arm64-v8a" ]; then
NO_LAPACK=${NO_LAPACK:-1}
TARGET=ARMV8
elif [ "${CK_ANDROID_ABI}" == "armeabi" ]; then
NO_LAPACK=1
TARGET=ARMV5
elif [ "${CK_ANDROID_ABI}" == "armeabi-v7a" ]; then
# ARMV7 can be used only with hardfp and neon - see later
NO_LAPACK=1
TARGET=ARMV5
elif [ "${CK_ANDROID_ABI}" == "x86" ]; then
NO_LAPACK=1
TARGET=ATOM
elif [ "${CK_ANDROID_ABI}" == "x86_64" ]; then
NO_LAPACK=1
TARGET=ATOM
else
echo "Error: ${CK_ANDROID_ABI} is not supported!"
exit 1
fi
CK_OPENMP=1
if [ "${CK_HAS_OPENMP}" == "0" ]; then
CK_OPENMP=0
fi
EXTRA_FLAGS=""
if [ "${CK_CPU_ARM_NEON}" == "ON" ] ; then
EXTRA_FLAGS=" $EXTRA_FLAGS -mfpu=neon"
TARGET=ARMV7
fi
if [ "${CK_CPU_ARM_VFPV3}" == "ON" ] ; then
EXTRA_FLAGS=" $EXTRA_FLAGS -mfpu=vfpv3"
TARGET=ARMV7
fi
cd ${INSTALL_DIR}/${PACKAGE_SUB_DIR}
make VERBOSE=1 -j${CK_HOST_CPU_NUMBER_OF_PROCESSORS} \
CC="${CK_CC} ${CK_COMPILER_FLAGS_OBLIGATORY} ${CK_CC_FLAGS_FOR_CMAKE} ${CK_CC_FLAGS_ANDROID_TYPICAL} ${EXTRA_FLAGS}" \
AR="${CK_AR}" \
FC="no-fc" \
CROSS_SUFFIX=${CK_ENV_COMPILER_GCC_BIN}/${CK_COMPILER_PREFIX} \
HOSTCC=gcc \
USE_THREAD=1 \
NUM_THREADS=8 \
USE_OPENMP=${CK_OPENMP} \
NO_LAPACK=$NO_LAPACK \
TARGET=$TARGET \
BINARY=${CK_CPU_BITS} \
CK_COMPILE=ON \
if [ "${?}" != "0" ] ; then
echo "Error: cmake failed!"
exit 1
fi
############################################################
echo ""
echo "Installing package ..."
rm -rf install
make PREFIX="${INSTALL_DIR}/install" install
if [ "${?}" != "0" ] ; then
echo "Error: installation failed!"
exit 1
fi
export PACKAGE_SKIP_LINUX_MAKE=YES
return 0
| true
|
8efa2db636cb11d6bd931605db067b275e0e8882
|
Shell
|
dcastelob/criptografia
|
/cifra-de-cesar/cifra_cesar.sh
|
UTF-8
| 2,375
| 3.765625
| 4
|
[] |
no_license
|
#!/bin/bash
# Script para realizar operação simples de criptografia usando a "cifra de cesar" - Especialização em Segurança Unibratec
# Autor: Diego Castelo Branco
# Data: 05/05/2018
#LETRAS=(a b c d e f g h i j k l m n o p q r s t u v w x y z " ")
LETRAS=(a b c d e f g h i j k l m n o p q r s t u v w x y z)
function fn_get_cifra_cesar()
{
IN="$1"
EXPRESSAO=$(echo "$IN" | tr "A-Z" "a-z")
DELTA="$2"
LEN_EXPRESSAO="${#EXPRESSAO}"
LEN_LETRAS="${#LETRAS[@]}"
SAIDA=""
DESCIFRAR=0
#echo "EXPRESSAO: $EXPRESSAO, Tamanho: ${LEN_EXPRESSAO}, Tam Letras: ${LEN_LETRAS}, DELTA: $DELTA" # DEBUG
#echo "${LETRAS[@]}" # DEBUG
for i in $(seq 0 $(("$LEN_EXPRESSAO"-1))) ; do
L=$(echo "${EXPRESSAO:$i:1}")
INDEX_LETRAS=$(echo ${LETRAS[@]/$L//} | cut -d/ -f1 | wc -w | tr -d ' ')
# echo "Letra: $L, INDEX_LETRAS: $INDEX_LETRAS" DEBUG
# Criando a relação circular das letras
#=======================================
# tratando o delta para descriptografar (criando delta sempre positivo)
if [ "$DELTA" -lt 0 ];then
DELTA=$(($DELTA*-1))
DESCIFRAR=1
fi
# tratando quando o delta é maior que a quantidade de letras
if [ "$DELTA" -ge "$LEN_LETRAS" ];then
# pegamos o módulo - o resto da divisão
NOVO_DELTA=$(($DELTA%$LEN_LETRAS))
DELTA="$NOVO_DELTA"
fi
# Retornando o Delta para negativo se for descifrar
if [ "$DESCIFRAR" -eq 1 ];then
DELTA=$(($DELTA*-1))
fi
#echo "Delta CALCULO: $DELTA" # DEBUG
#Inicinado as oprações de deslocamento
DESLOCAMENTO=$(($INDEX_LETRAS+$DELTA))
if [ "$DESLOCAMENTO" -ge "$LEN_LETRAS" ];then
# pegamos o módulo - o resto da divisão
NOVO_INDEX_LETRAS=$(($DESLOCAMENTO%$LEN_LETRAS))
else
NOVO_INDEX_LETRAS="$DESLOCAMENTO"
fi
LETRA_CIFRADA="${LETRAS[$NOVO_INDEX_LETRAS]}"
SAIDA="$SAIDA$LETRA_CIFRADA"
#echo "NOVO_INDEX_LETRAS: $NOVO_INDEX_LETRAS, LETRA_CIFRADA: $LETRA_CIFRADA" #DEBUG
done
echo "$SAIDA"
}
# Inicio do script
#===================
if [ $# -lt 2 ];then
echo "Falha - Campos requeridos"
echo "Use: $0 <expressao> <delta>"
echo " Notas:"
echo " 1) Utilize aspas para delimitar textos com espaços"
echo " 2) Para cifrar Use delta positivo (Ex.: 3)"
echo " 3) Para descifrar Use delta negativo (Ex.: -3)"
echo " Exemplo1: $0 \"cifradecesar\" 5"
echo " Exemplo1: $0 \"hnkxfeijehjwfx\" -5"
exit
fi
fn_get_cifra_cesar "$1" "$2"
| true
|
b90d8c5a2968d137063151beb2f0482c4b7331db
|
Shell
|
toddyamakawa/bin
|
/tmux-size
|
UTF-8
| 479
| 3.15625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
flags='#{pane_top} #{pane_bottom} #{pane_left} #{pane_right} #{pane_height} #{pane_width}'
name="$(basename "$0")"
read -r top bottom left right height width <<< $(tmux display-message -p "$flags")
echo "USAGE : read -r top bottom left right height width <<< \$($name)" 1>&2
echo "top left : ($top,$left)" 1>&2
echo "botton right : ($bottom,$right)" 1>&2
echo "dimensions : ${height}x$width" 1>&2
echo "$top $bottom $left $right $height $width"
| true
|
eda3c31afee5db111dc764826ea5df7badc9ae45
|
Shell
|
jrp2014/grenade
|
/runIris.sh
|
UTF-8
| 829
| 3.75
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash -eu
example=iris
stack=cabal
dataset="iris.data"
datalab="iris.names"
url="https://archive.ics.uci.edu/ml/machine-learning-databases/iris/"
datadir="data"
args=""
if [ $# -ge 1 ]
then
case $1 in
(-?*)
echo >&2 "Missing initial data directory argument"
exit 1;;
(*)
datadir=$1
shift
args=$*;;
esac
fi
exec="$stack exec $example --RTS -- $datadir $args +RTS -N -s"
if [ -f "$datadir/$dataset" ]
then
$stack build $example && $exec
else
mkdir -p "$datadir"
echo "Attempting to download Iris data"
curl -o "$datadir/$dataset" "$url/$dataset"
curl -o "$datadir/$datalab" "$url/$datalab"
if [ -f "$datadir/$dataset" ]
then
$stack build $example && $exec
else
echo "$datadir/$dataset does not exist. Please download Iris files to $datadir/"
fi
fi
| true
|
affda1121b9f3cfd9982012cd4fc9fc0cac85dd2
|
Shell
|
amritpal0210/dotfiles
|
/.bin/backup
|
UTF-8
| 1,030
| 2.703125
| 3
|
[] |
no_license
|
#!/usr/bin/bash
echo "Backing up dotfiles..."
cp ~/.vimrc ~/.backup/
cp ~/.bashrc ~/.backup/
cp ~/.dir_colors ~/.backup/
cp ~/.tmux.conf ~/.backup/
cp ~/.xinitrc ~/.backup/
cp ~/.eslintrc.js ~/.backup/
cp ~/.powerline-shell.json ~/.backup/
cp ~/.Xresources ~/.backup/
echo "Backing up Vim files..."
cp -r ~/.vim/ftplugin ~/.backup/.vim/
cp -r ~/.vim/snippets ~/.backup/.vim/
echo "Backing up configs..."
cp -r ~/.config/i3 ~/.backup/.config/
cp -r ~/.config/i3lock ~/.backup/.config/
cp -r ~/.config/rofi ~/.backup/.config/
cp -r ~/.config/compton ~/.backup/.config/
cp -r ~/.config/ranger ~/.backup/.config/
cp -r ~/.config/polybar ~/.backup/.config/
cp -r ~/.config/dunst ~/.backup/.config/
cp -r ~/.config/alacritty ~/.backup/.config/
echo "Backing up scripts..."
cp -r ~/.bin ~/.backup/
echo "Updating installed packages..."
pacman -Qqe > ~/.backup/pkglist.txt
echo "Uploading to GitHub"
(
cd ~/.backup && git add .
printf 'Commit Message: '
read -r commit
git commit -m "$commit"
git push origin master
)
echo "Done."
| true
|
235a6d39326a79ca0ef3ce4143e14315a718cc83
|
Shell
|
sgbd2206/caparledev-bot
|
/ci/build.sh
|
UTF-8
| 580
| 3.5
| 4
|
[] |
no_license
|
#!/bin/sh
# This script will be executed in a Docker container where the /home folder is mounted somewhere on the host.
# The container is created in stage of the build pipeline. The goal is to build the project for production and
# send the output in the hosts where a docker-compose.yml will build the image and run it
BUILD_FOLDER='./build'
if [[ -d ${BUILD_FOLDER} ]]; then
rm -rf ${BUILD_FOLDER}
fi
# Install dependencies
yarn
# Compile the file from Typescript to ES5
./node_modules/.bin/tsc -p tsconfig.json
cp -r ./package.json /home
cp -rf ${BUILD_FOLDER} /home
| true
|
42bc9fa9f005c0fe1898de413809908453ec1ba4
|
Shell
|
bcl-io/hmDNA
|
/gatk-tools-java/src/main/scripts/run_picard.sh
|
UTF-8
| 1,040
| 3.46875
| 3
|
[
"Artistic-2.0",
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Runs Picard tool specified on the command line, using GA4GH custom reader
# for getting the data from url based INPUTs.
# E.g. run_picard.sh ViewSam INPUT=<url>.
# Assumes directory structure where gatk-tools-java and picard repos reside
# in the same folder and client_secrets is in the same folder:
# .../...
# /gatk-tools-java
# /picard
# /client_secrets.json
# If your setup is different, please modify paths below.
GATK_TOOLS_JAVA_JAR=$(readlink -f ../../../dist/gatk-tools-java-1.0.jar)
CLIENT_SECRETS=$(readlink -f ../../../../client_secrets.json)
PICARD_JAR=$(readlink -f ../../../../picard/dist/picard.jar)
echo Running Picard form $PICARD_JAR
echo Using gatk-tools-java from $GATK_TOOLS_JAVA_JAR
echo Using client_secrets form $CLIENT_SECRETS
java -jar \
-Dsamjdk.custom_reader=https://www.googleapis.com/genomics,\
com.google.cloud.genomics.gatk.htsjdk.GA4GHReaderFactory,\
$GATK_TOOLS_JAVA_JAR \
-Dga4gh.client_secrets=$CLIENT_SECRETS \
$PICARD_JAR \
"$@" \
VERBOSITY=DEBUG QUIET=false
| true
|
146d04a51c30d1805d0f0922afd431885d70bba5
|
Shell
|
thyhum/vagrant-multi-machine-multi-environment
|
/thyvagrant
|
UTF-8
| 303
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# Change this to your default VAGRANT_ENV
DEFAULT_VAGRANT_ENV='main'
[ -z "$VAGRANT_ENV" ] && export VAGRANT_ENV=$DEFAULT_VAGRANT_ENV
export VAGRANT_DOTFILE_PATH=".vagrant_${HOSTNAME}_${VAGRANT_ENV}"
export THYVAGRANT=YES
echo "Vagrant environment: ${VAGRANT_ENV}"
vagrant "$@"
| true
|
a03e50e3c83b24bcfdf1a2132fc8c7ede24cb721
|
Shell
|
marmalodak/dotlikefiles
|
/tmux_sessions.badboy.old/konsolesetup.jschmitt-dt
|
UTF-8
| 1,595
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/bash
echo '===========================' >> /tmp/konsolesetup.out
echo $(date) >> /tmp/konsolesetup.out
source ~/lib/functions
# start my tmux sessions
for i in ~/tmux_sessions/*.session; do "$i" ; done
konsole --workdir '~' -e 'bash' > /tmp/konsole.$USER.out 2>&1
sleep 4
window_id=
while [[ -z $window_id ]]; do
window_id=$(WindowName2WindowID "Konsole")
echo window_id=$window_id >> /tmp/konsolesetup.out
done
# activation and sizing and focusing seems to be unnecesary if we sleep for a few seconds, long enough for things to settle
# this seems to work better if it is the topmost window
xdotool windowactivate $window_id
xdotool windowfocus $window_id
# maximize it
#xdotool windowsize $window_id 100% 100% # this seems to get the size wrong, wut? # what does --sync actually do except for mess up the order in which things are run??
#xdotool key --window $window_id Alt+space x # wut?
xdotool key --window $window_id Shift+Control+t t m u x space a t t a c h space minus t space b l o g Return
xdotool key --window $window_id Shift+Control+t t m u x space a t t a c h space minus t space i p m i Return
xdotool key --window $window_id Shift+Control+t t m u x space a t t a c h space minus t space d j a n g o 1 Return
#xdotool key --window $window_id Shift+Control+t t m u x space a t t a c h space minus t space e n v V e r i f y Return
#xdotool key --window $window_id Shift+Control+t t m u x space a t t a c h space minus t space g c o v Return
xdotool key --window $window_id Shift+Control+t t m u x space a t t a c h space minus t space m e t a Return
| true
|
8b149bb1637323567bf488f94fbae911ddd625c3
|
Shell
|
richardklose/github-release-mirror
|
/mirror.sh
|
UTF-8
| 743
| 3.53125
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
REPO=${1}
MIRROR_DIR="/mirror"
SRC_URL="https://api.github.com/repos/${REPO}/releases"
DEST="${MIRROR_DIR}/${REPO}"
RELEASES=$(curl ${SRC_URL} | jq '[.[] | {tag_name: .tag_name, draft: .draft, assets: [.assets[].browser_download_url]}]')
for RELEASE in $(echo ${RELEASES} | jq -r '.[] | @base64'); do
DRAFT=$(echo ${RELEASE} | base64 --decode | jq -r '.draft')
NAME=$(echo ${RELEASE} | base64 --decode | jq -r '.tag_name')
URLS=$(echo ${RELEASE} | base64 --decode | jq -r '.assets | .[]')
if [[ "${DRAFT}" != "false" ]]; then
continue
fi
RELEASE_DEST=${DEST}/${NAME}
if [[ -d ${RELEASE_DEST} ]]; then
continue
fi
mkdir -p ${RELEASE_DEST}
wget -P ${RELEASE_DEST} --no-verbose ${URLS}
done
| true
|
5b96b5cf184b9e8a6567ee4b7cad25427b402bc8
|
Shell
|
erikvader/dotfiles
|
/scripts/.bin/yaycheckupdates
|
UTF-8
| 238
| 2.640625
| 3
|
[] |
no_license
|
#!/bin/sh
# Same as pacman's checkupdates except that every occurence of pacman
# in that script is replaced with yay to support AUR packages.
sed -E 's/ pacman / yay /g; s/^pacman /yay /g' "$(command -v checkupdates)" | bash -s
exit $?
| true
|
9721b912d666448a10903201cf59244cb78066d3
|
Shell
|
coreen/todo-app
|
/todo-app.sh
|
UTF-8
| 1,559
| 4.40625
| 4
|
[] |
no_license
|
#!/bin/bash
show_usage () {
echo "Usage:"
echo -e "\t todo list"
echo -e "\t\t show all todo records"
echo -e "\t todo add <record>"
echo -e "\t\t adds record to todo list"
echo -e "\t\t Note: put quotes around record string"
echo -e "\t todo delete <recordId>"
echo -e "\t\t removes record associated with given recordId"
}
LIST=()
list_records () {
list_length=${#LIST[@]}
for ((i=0; i<$list_length; i++));
do
# indexes don't update after deletion,
# so only display if have content in index
# Bash string comparison:
# -z => string is null (zero length)
# -n => string is not null
if [ -n "${LIST[$i]}" ]; then
echo "$i ${LIST[$i]}"
fi
done
}
# takes in content as parameter
add_record () {
# grab entire parameter
content=$@
LIST+=("${content}")
echo "Added $content"
}
# takes in index as parameter
delete_record () {
index=$@
delete=("${LIST[$index]}")
echo "Deleting $delete"
LIST=("${LIST[@]/$delete}")
}
show_usage
while true;
do
read -p "Enter command: " string
# parse string for command and args (if necessary)
stringarray=($string)
command="${stringarray[@]:1:1}"
args=${stringarray[@]:2}
if [[ -n $command && $command == "list" ]]; then
list_records
elif [[ -n $command && $command == "add" ]]; then
add_record $args
elif [[ -n $command && $command == "delete" ]]; then
delete_record $args
else
echo "Error: please see usage for supported commands"
show_usage
fi
# whitespace for console output space
echo ""
done
| true
|
8adadad12a96fc5a30659a60799c2a9932b1bc2e
|
Shell
|
guardian/amigo
|
/roles/usage-monitoring-agent/files/monitoring.sh
|
UTF-8
| 2,108
| 4.1875
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
GLOBIGNORE="*"
INSTALL_LOCATION="/opt"
function HELP {
>&2 cat << EOF
Usage: ${0} [-d diskpaths]
This script installs and schedules continous metrics gathering
of memory usage, (optionally) disk space usage and (optionally)
swap usage.
-d diskpaths [optional] Comma separated list of mount points to monitor
disk space usage for. E.g. /,/var/lib/mongodb
-h Displays this help message. No further functions are
performed.
EOF
exit 1
}
function install_cloudwatch_client() {
local __resultvar=$1
local CLOUDWATCH_CLIENT_VERSION="1.2.1"
apt-get install -y libwww-perl libdatetime-perl unzip
wget http://aws-cloudwatch.s3.amazonaws.com/downloads/CloudWatchMonitoringScripts-${CLOUDWATCH_CLIENT_VERSION}.zip
unzip CloudWatchMonitoringScripts-${CLOUDWATCH_CLIENT_VERSION}.zip
mv aws-scripts-mon ${INSTALL_LOCATION}
rm CloudWatchMonitoringScripts-${CLOUDWATCH_CLIENT_VERSION}.zip
}
function generate_cloudwatch_cron_job() {
local SCRIPT_PATH=$1
local DISK_PATHS=$2
local CRON_CMD="${SCRIPT_PATH} --from-cron --auto-scaling=only --mem-util --mem-used --mem-avail --swap-util --swap-used"
if [ "x${DISK_PATHS}" != "x" ]; then
CRON_CMD="${CRON_CMD} --disk-space-util --disk-space-used --disk-space-avail"
for D in $(echo $DISK_PATHS | tr ',' '\n'); do
CRON_CMD="${CRON_CMD} --disk-path=${D}"
done
fi
echo "${CRON_CMD}"
}
function setup_cron_job() {
local FREQUENCY_MINUTES=5
local CRON_CMD=$@
CRON_ENTRY="*/${FREQUENCY_MINUTES} * * * * ${CRON_CMD}"
crontab -l > cwlogcron || touch cwlogcron
echo $CRON_ENTRY >> cwlogcron
crontab cwlogcron
rm cwlogcron
}
while getopts s:d:h FLAG; do
case $FLAG in
d)
DISK_PATHS=$OPTARG
;;
h) #show help
HELP
;;
esac
done
shift $((OPTIND-1))
install_cloudwatch_client
CLOUDWATCH_SCRIPT="${INSTALL_LOCATION}/aws-scripts-mon/mon-put-instance-data.pl"
CRON_LINE=$(generate_cloudwatch_cron_job $CLOUDWATCH_SCRIPT $DISK_PATHS)
setup_cron_job $CRON_LINE
echo -e "\n\nRoot crontab is now:"
crontab -l
| true
|
39c0c7fc9e7d91a3af3ebbc416472e472c9ca192
|
Shell
|
agua/bioapps
|
/bin/pancancer/pancan_all_readgroup_alignment.sh
|
UTF-8
| 395
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/bash
ALIGN=/pancanfs/software/pancan_readgroup_alignment.sh
UUID=$1
INPUT_BASE=/pancanfs/splits
OUTPUT_BASE=/pancanfs/output
if [ ! -e $OUTPUT_BASE/$UUID ]; then
mkdir $OUTPUT_BASE/$UUID
fi
for BAM in $INPUT_BASE/$UUID/*.bam; do
# sudo docker run -v /pancanfs:/pancanfs icgc-aligner $ALIGN $BAM $OUTPUT_BASE/$UUID/`basename $BAM`
$ALIGN $BAM $OUTPUT_BASE/$UUID/`basename $BAM`
done
| true
|
29876284e217605d5ad93735a0b6e2f0387b6899
|
Shell
|
Azure/azurehpc
|
/experimental/cc_slurm_nhc/create_nhc_src_tar.sh
|
UTF-8
| 199
| 2.96875
| 3
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
#!/bin/bash
TODAY=$(date +"%m-%d-%Y")
TAR_FILE=lbnl-nhc-${TODAY}.tar
CWD=`pwd`
cd /tmp
git clone https://github.com/mej/nhc.git
tar -cvf ${CWD}/$TAR_FILE nhc
gzip ${CWD}/$TAR_FILE
rm -rf /tmp/nhc
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.