blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
384dd7c3876b9cc36f108f80292cd2479f380eda | Shell | huangynj/SCALE-SDM_mixed-phase_Shima2019 | /scale-les/test/case_real/Mkjobshell.pp.Kmicro.sh | UTF-8 | 1,296 | 3.078125 | 3 | [
"BSD-2-Clause",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | #! /bin/bash -x
# Arguments
BINDIR=${1}
PPNAME=${2}
INITNAME=${3}
BINNAME=${4}
PPCONF=${5}
INITCONF=${6}
RUNCONF=${7}
TPROC=${8}
DATDIR=${9}
DATPARAM=(`echo ${10} | tr -s ',' ' '`)
DATDISTS=(`echo ${11} | tr -s ',' ' '`)
# System specific
MPIEXEC="mpiexec"
array=( `echo ${TPROC} | tr -s 'x' ' '`)
x=${array[0]}
y=${array[1]:-1}
let xy="${x} * ${y}"
if [ ${xy} -gt 1024 ]; then
echo "Node usage is less than 1024. STOP"
exit
fi
# Generate run.sh
cat << EOF1 > ./run.sh
#! /bin/bash -x
################################################################################
#
# for K computer
#
################################################################################
#PJM --rsc-list "rscgrp=micro"
#PJM --rsc-list "node=${TPROC}"
#PJM --rsc-list "elapse=00:25:00"
#PJM -j
#PJM -s
#
. /work/system/Env_base
#
export PARALLEL=8
export OMP_NUM_THREADS=8
#export fu08bf=1
rm -f ./input
ln -svf ${DATDIR} ./input
# run
EOF1
if [ ! ${PPNAME} = "NONE" ]; then
echo "${MPIEXEC} ${BINDIR}/${PPNAME} ${PPCONF} || exit 1" >> ./run.sh
fi
if [ ! ${INITNAME} = "NONE" ]; then
echo "${MPIEXEC} ${BINDIR}/${INITNAME} ${INITCONF} || exit 1" >> ./run.sh
fi
if [ ! ${BINNAME} = "NONE" ]; then
echo "${MPIEXEC} ${BINDIR}/${BINNAME} ${RUNCONF} || exit 1" >> ./run.sh
fi
| true |
a5c106a8ffb57708106b246f86712c7524a9a57f | Shell | subfusc/submacs | /install.sh | UTF-8 | 2,353 | 3.75 | 4 | [] | no_license | #!/bin/bash
# -*- coding: utf-8 -*-
INSTALL_DIRECTORY=$HOME/.submacs
USER_CONFIG_DIRECTORY=${INSTALL_DIRECTORY}/user-config
SYSTEM_CONFIG_DIRECTORY=${INSTALL_DIRECTORY}/system
EXECUTABLE=emacs
DOTEMACS=${HOME}/.emacs
BATCH_ARGS="-u ${USER} --batch"
REFRESH_PACKAGES="true"
function copy_submacs_files() {
cp src/*.el ${SYSTEM_CONFIG_DIRECTORY}
git clone git@github.com:subfusc/ruby-block ${SYSTEM_CONFIG_DIRECTORY}/ruby-block
mkdir -p ${USER_CONFIG_DIRECTORY}
echo ";;; Custom user mods goes here" > ${USER_CONFIG_DIRECTORY}/user-init.el
echo "(provide 'user-init)" >> ${USER_CONFIG_DIRECTORY}/user-init.el
}
function pre_install_dotemacs() {
if [ -f ${DOTEMACS} ]; then
rm $DOTEMACS
fi
mkdir -p ${SYSTEM_CONFIG_DIRECTORY}
cp src/melpa.el ${SYSTEM_CONFIG_DIRECTORY}
echo "; -*- coding: utf-8 -*-" > $DOTEMACS
echo "" >>$DOTEMACS
echo "(prefer-coding-system 'utf-8)" >>$DOTEMACS
echo '(set-language-environment "utf-8")' >>$DOTEMACS
echo "(add-to-list 'load-path \"${SYSTEM_CONFIG_DIRECTORY}\")" >>$DOTEMACS
echo "(require 'melpa)" >>$DOTEMACS
}
function add_load_paths_to_submacs() {
echo "(add-to-list 'load-path \"${SYSTEM_CONFIG_DIRECTORY}/ruby-block\")" >>$DOTEMACS
echo "(add-to-list 'load-path \"${USER_CONFIG_DIRECTORY}\")" >>$DOTEMACS
echo "(require 'submacs-init)" >>$DOTEMACS
echo "(require 'user-init)" >>$DOTEMACS
}
function install_packages() {
$EXECUTABLE $BATCH_ARGS --script install/package-install.el
}
function update_gnu_keyring() {
$EXECUTABLE $BATCH_ARGS --script install/update-keyring.el
}
function clean() {
rm -rfv ${SYSTEM_CONFIG_DIRECTORY}
rm $DOTEMACS
rm -rfv ~/.emacs.d
}
function newinstall() {
pre_install_dotemacs
update_gnu_keyring
install_packages
copy_submacs_files
add_load_paths_to_submacs
}
function reinstall() {
clean
newinstall
}
OPT=NEWINSTALL
while getopts “rc” OPTION
do
case $OPTION in
r) OPT=REINSTALL;;
c) OPT=CLEAN;;
esac
done
case $OPT in
NEWINSTALL)
newinstall;;
REINSTALL)
reinstall;;
CLEAN)
clean;;
esac
| true |
31973396c5aafc0c9de12b5c122b48edf4a1270b | Shell | zdykstra/zfsbootmenu | /contrib/xhci-teardown.sh | UTF-8 | 1,285 | 3.828125 | 4 | [
"MIT"
] | permissive | #!/bin/sh
## Some XHCI USB controllers, like on a 2015 MacBook Air, will not be properly
## reinitialized after ZFSBootMenu jumps into the system kernel with kexec; no
## USB devices will be enumerated, so e.g., keyboards will not function.
##
## One way to work around this is to just blacklist USB modules in ZFSBootMenu,
## but this prevents keyboard interaction in the boot menu. A better
## alternative is to try unbinding all USB controllers from xhci_hcd
## immediately before jumping into the new kernel, which allows the new kernel
## to properly initialize the USB subsystem.
##
## This could be adapted to other drivers, including {O,U,E}HCI as necessary.
##
## To use, put this script somewhere, make sure it is executable, and add the
## path to the `zfsbootmenu_teardown` space-separated list with, e.g.,
##
## zfsbootmenu_teardown+=" <path to script> "
##
## in a dracut.conf(5) file inside the directory specified for the option
## `Global.DracutConfDir` in the ZFSBootMenu `config.yaml`.
SYS_XHCI=/sys/bus/pci/drivers/xhci_hcd
# shellcheck disable=SC2231
for DEVPATH in ${SYS_XHCI}/????:??:??.?; do
[ -L "${DEVPATH}" ] || continue
DEVICE="${DEVPATH#"${SYS_XHCI}"/}"
echo "Tearing down USB controller ${DEVICE}..."
echo "${DEVICE}" > ${SYS_XHCI}/unbind
done
| true |
94c7834e74f395d92d400b0a83a686a7afa7ab8c | Shell | Corporex/verify-proxy-node | /snyk/monitor.sh | UTF-8 | 891 | 3.734375 | 4 | [
"MIT",
"LicenseRef-scancode-proprietary-license"
] | permissive | #!/usr/bin/env bash
# If a new configuration is added it will need adding to `snyk_configurations.sh`.
# This script is intended to be used by a CI server.
function print_banner() {
echo "######################################################################"
echo "### Monitoring dependencies for $1 gradle configuration"
echo "######################################################################"
}
function monitor_configuration() {
local config=$1;
print_banner "$config"
# The sub-project specified here is irrelevant. The configuration will still be tested regardless of if the
# sub-project actually uses it.
snyk monitor --gradle-sub-project=proxy-node-translator --project-name="$config"-config -- --configuration="$config"
}
source snyk/configurations.sh
for configuration in $CONFIGURATIONS; do
monitor_configuration $configuration
done; | true |
083d55ee4ac237dce9940336181f865efec93d92 | Shell | pharo-mooc/PharoMoocFat | /Slides/install.sh | UTF-8 | 386 | 2.78125 | 3 | [] | no_license | #!/usr/bin/env bash
# This script is meant to install some symbolic links so that we can latex the slides and marked their status
# Exit immediately if a command exits with a non-zero status
set -e
cd 1-Templates
ln -sf ../figures
cd ../
cd 3-ToReview
ln -sf ../figures
cd ../
cd 4-Done
ln -sf ../figures
cd ../
cd 2-UnderWriting
ln -sf ../figures
cd ../
echo 'Links installed'
| true |
e704373deebb74dd92e000a00ca113a17841d7a4 | Shell | woutdp/dotfiles | /bspwm/.config/bspwm/scripts/toggle_scratchpad.sh | UTF-8 | 321 | 2.78125 | 3 | [] | no_license | #!/bin/sh
id=$(xdo id -n scratchpad);
if [ -z "$id" ]; then
kitty --name=scratchpad --override=window_margin_width=10
else
hidden=$(bspc query -N -n .hidden)
if [ "${hidden}" ] && [ "$hidden" = "$id" ]; then
bspc node "$id" -g hidden=off
bspc node -f "$id"
else
bspc node "$id" -g hidden=on
fi
fi
| true |
392201252cd84067d8a999c8f92b792d32512068 | Shell | fschuett/linuxmuster-client | /etc/post-mount.d/009-teacher-links | UTF-8 | 798 | 3.234375 | 3 | [] | no_license | #!/bin/bash
# Script created by Frank Schütte fschuett@gymnasium-himmelsthuer.de
# License: Free Software (License GPLv3)
# 20.7.2014
. /usr/share/linuxmuster-client/config || exit 1
. ${USERCONFIG} || exit 1
. ${HELPERFUNCTIONS} || exit 1
[ $VOLUME == students ] || return 0
PGRUPPE=$(id -ng $USER)
[ $PGRUPPE == teachers ] || return 0
$LOGGING && log_msg post-mount "Entering 009-teacher-links $1 $2 HOMEDIR=$HOMEDIR,USER=$USER,VOLUME=$VOLUME,LINKFOLDERSHARE=$LINKFOLDERSHARE,SCHOOL=$SCHOOL,TEACHERS=$TEACHERS"
SERVER="/home/students"
LOKAL=$HOMEDIR/__schueler
$LOGGING && log_msg post-mount "verlinke $LOKAL $SERVER"
verlinke "$LOKAL" "$SERVER"
# Das Script wird nicht mit einem "exit 0" beendet, da es in das Script
# /usr/sbin/linuxmuster-pam-mount includiert ist und dadurch ein exit
# das Hauptscript beenden würde.
| true |
8b019def3d6b7373af9f7bc14d8dee85bb5d1a84 | Shell | paulojeronimo/javaee-ambiente | /funcoes/eclipse.sh | UTF-8 | 1,999 | 3.75 | 4 | [] | no_license | #!/bin/bash
eclipse_instalar() { instalar eclipse "$@"; }
eclipse_remover() { remover eclipse "$@"; }
_saia_do_eclipse() {
echo "antes de executar esta operação, encerre o eclipse!"
}
eclipse_em_execucao() {
case $PLATAFORMA in
Linux) ps -o ucmd | grep -q eclipse;;
Darwin) ps -o comm | grep -q eclipse;;
Cygwin) ps | grep -q eclipse;;
esac
}
eclipse_salvar_workspace() {
eclipse_em_execucao && { _saia_do_eclipse; return 1; }
cd "$AMBIENTE_HOME"
[ -d workspace ] && tar cvfz "$INSTALADORES_DIR"/workspace.tar.gz workspace/
cd - &> /dev/null
}
eclipse_restaurar_workspace() {
local workspace="$INSTALADORES_DIR"/workspace.tar.gz
[ -f "$workspace" ] || return 0
eclipse_em_execucao && { _saia_do_eclipse; return 1; }
cd "$AMBIENTE_HOME"
echo -n "Extraindo $workspace ... "
extrai "$workspace" &> $OUT && ok || falha
cd - &> /dev/null
}
eclipse_salvar() {
[ -d "$ECLIPSE_HOME" ] || { echo "Diretório \"$ECLIPSE_HOME\" não encontrado!"; return 1; }
eclipse_em_execucao && { _saia_do_eclipse; return 1; }
cd "$ECLIPSE_HOME"/..
tar cvfz "$INSTALADORES_DIR"/$ECLIPSE_CONFIGURADO "`basename \"$ECLIPSE_HOME\"`"/
eclipse_salvar_workspace
}
eclipse_restaurar() {
local eclipse="$INSTALADORES_DIR"/$ECLIPSE_CONFIGURADO
[ -f "$eclipse" ] || { echo "Arquivo \"$eclipse\" não encontrado!"; return 1; }
eclipse_em_execucao && { _saia_do_eclipse; return 1; }
cd "$FERRAMENTAS_DIR"
echo -n "Extraindo $eclipse ... "
extrai "$eclipse" &> $OUT && ok || falha
cd - &> /dev/null
eclipse_restaurar_workspace
}
eclipse() {
local eclipse_bin=eclipse
case $PLATAFORMA in
Darwin)
open -a "$ECLIPSE_HOME" --args -data "$AMBIENTE_HOME"/workspace
return
;;
Cygwin)
eclipse_bin=eclipse.exe
esac
"$ECLIPSE_HOME"/$eclipse_bin -data "$AMBIENTE_HOME"/workspace &> /dev/null &
}
# vim: set ts=4 sw=4 expandtab:
| true |
3e5647349704d65970d24f22645745a13148aeb1 | Shell | Tofumy/Bash-Scripts | /forloop.sh | UTF-8 | 783 | 3.921875 | 4 | [] | no_license | #! /usr/bin/bash
echo "This is using case switch and a for loop to rename a couple of test files"
read -p "Can you choose old/new to rename your files ? " CHOICE
FILES=$(ls *.txt)
NEW="new"
OLD="old"
case $CHOICE in
[oO] | [oO][lL][dD])
for EACHFILE in $FILES
do
echo "Renaming $EACHFILE to old-$EACHFILE"
mv $EACHFILE $OLD-$EACHFILE
done
;;
[nN] | [nN][eE][wW])
for EACHFILE in $FILES
do
echo "Renaming $EACHFILE to new-$EACHFILE"
mv $EACHFILE $NEW-$EACHFILE
done
;;
*)
echo "You did not select the right answer choice, you typed in this \"$CHOICE\" which does not follow the instructions"
esac
echo "Thank you..."
| true |
b37b2a3dfe5b5526a18dc36eb01e91ae261742b9 | Shell | gaia-adm/data-collection | /providers/tools/almUpdateDefect.sh | UTF-8 | 5,407 | 3.046875 | 3 | [] | no_license | #!/bin/bash
# script changes a selected impact multiple times in order to generate data in AUDIT tables for fetching it with alm-issue-change provider
# NOTE: set constants accordingly before running the script
#constants
almLocation=http://localhost:8082/qcbin
domain=DEFAULT
project=bp1
defectId=1
adminUser=sa
adminPassword=
regularUser=boris
loopCount=2
#login
lwsso=$(curl -i -X POST -H "Accept: application/json" -H "Content-Type: application/json" -d "<alm-authentication><user>"$adminUser"</user><password>"$adminPassword"</password></alm-authentication>" $almLocation/authentication-point/alm-authenticate | grep Set-Cookie | cut -d ':' -f2 | cut -d ';' -f1)
echo LWSSO:$lwsso
#create session
others=$(curl -i -X POST -H "Accept: application/json" -H "Content-Type: application/json" --cookie $lwsso -d "<session-parameters><client-type>Gaia ReST Client</client-type><time-out>60</time-out></session-parameters>" $almLocation/rest/site-session | grep 'QCSession\|ALM_USER\|XSRF-TOKEN' | sed -s 's/Set-Cookie: //g' | tr '\n' ';')
echo OTHERS:$others
##### update defect 1, total - 12 changes in every iteration
COUNTER=0
while [ $COUNTER -lt $loopCount ]; do
echo -e "\n"===================== ITERATION $COUNTER
# update severity
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"5-Urgent","owner":"sa"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"4-Very High","owner":"sa"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"3-High","owner":"sa"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"2-Medium","owner":"sa"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"1-Low","owner":"sa"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
# update owner
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"1-Low","owner":"'$regularUser'"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"1-Low","owner":"'$adminUser'"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
# update both severity and owner
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"5-Urgent","owner":"'$regularUser'"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"4-Very High","owner":"'$adminUser'"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"3-High","owner":"'$regularUser'"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"2-Medium","owner":"'$adminUser'"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
curl -i -X PUT -H "Accept: application/json; schema=alm-web" -H "Content-Type: application/json; schema=alm-web" --cookie $lwsso";"$others -d '{"entity":{"id":'$defectId',"severity":"1-Low","owner":"'$regularUser'"},"business-rules-validation-failure-level":"warning"}' $almLocation/rest/domains/$domain/projects/$project/defects/$defectId
let COUNTER=COUNTER+1
done
echo -e "\n"FINISHED !!!
| true |
aa4f60202f319d83eff889b6ace5fd50bf2fb2bd | Shell | skilbjo/iris | /src/util | UTF-8 | 4,359 | 3.578125 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
ping_healthchecks(){
local job_uuid="$1"
curl -fsS --retry 3 "https://hc-ping.com/${job_uuid}"
}
add_aws_vars(){
if [[ $(whoami) == 'root' ]]; then
local home_dir='/root'
elif [[ $(whoami) == 'sbx_'* ]]; then
local home_dir="/home/$(whoami)"
elif [[ $(whoami) == 'skilbjo' ]]; then
echo 'Easy, big guy...' && exit 1
fi
mkdir -p "${home_dir}/.aws"
touch "${home_dir}/.aws/config"
echo '[default]' >"${home_dir}/.aws/config"
echo 'region = us-east-1' >>"${home_dir}/.aws/config"
touch "${home_dir}/.aws/credentials"
echo '[default]' >"${home_dir}/.aws/credentials"
echo "aws_access_key_id = $aws_access_key_id" >>"${home_dir}/.aws/credentials"
echo "aws_secret_access_key = $aws_secret_access_key" >>"${home_dir}/.aws/credentials"
export AWS_ACCESS_KEY_ID="$aws_access_key_id"
export AWS_SECRET_ACCESS_KEY="$aws_secret_access_key"
}
util_setup(){
set -eou pipefail
local dir="$1" # the below are not local; they are globals for child scripts
job_name="$(basename "$dir")"
resources_dir="${dir}/../../resources/${job_name}"
tmp_dir="$(mkdir -p "/tmp/${job_name}" && echo "/tmp/${job_name}")"
local hour="$(date +%H)" # if no ${var#0}, bash will interpret 08,09 as octcal and fault
if [[ ${hour#0} -eq 23 ]]; then sleep 120; fi # tick over to next day *and* wait for replica to catch up
if [[ $(whoami) == 'root' ]] || [[ $(whoami) == 'sbx_'* ]]; then
source "${src_dir}/athena"
add_aws_vars
export local_db='postgres://postgres@localhost/postgres'
export email="${_email:-$email}" # needed, for some reason udocker can't use the "email" in the --env="email" call
export email_cmd="${src_dir}/email"
elif [[ $(whoami) == 'skilbjo' ]]; then
case "$(uname)" in
Darwin ) local prefix='/Users'
export local_db="$(echo "$docker_psql_uri")" || export local_db='postgres://postgres@localhost/postgres' ;;
Linux | FreeBSD ) local prefix='/home' ;;
esac
source "${prefix}/skilbjo/dev/engineering/src/athena.user"
export email_cmd="${prefix}/skilbjo/dev/engineering/src/email"
else
>&2 echo "user is: $(whoami), add to setup fn"
fi
}
csv_to_html(){
local report="$1"
echo '<table border="2" cellspacing="1" cellpadding="2">' >"${tmp_dir}/${report}"
local header='true'
cat "${tmp_dir}/${report}.csv" | while read line; do
if [[ $header == 'true' ]]; then
echo "<tr><th align='center'>${line//,/</th><th>}</th></tr>" >>"${tmp_dir}/${report}";
header='false';
else
echo "<tr><td align='center'>${line//,/</td><td>}</td></tr>" >>"${tmp_dir}/${report}";
fi
done
echo '</table>' >>"${tmp_dir}/${report}"
cat "${tmp_dir}/${report}"
}
_report(){
local report="$1"
local user="$2"
local distro_list="$3"
local sql="$(cat "${resources_dir}/${report}.sql" | sed "s/:user/${user}/")"
#psql "$db_uri" --html -c "$sql" >"${tmp_dir}/${report}.html" # html # not needed?
psql "$db_uri" -AF',' --pset footer -c "$sql" >"${tmp_dir}/${report}.csv" # csv
"$email_cmd" \
--distro-list "$distro_list" \
--subject "Iris report for $report for $(date +%F)" \
--body "Iris report for $(date +%F): " \
--attachment "${tmp_dir}/${report}.csv" \
--html
}
_report_athena(){
local report="$1"
local user="$2"
local distro_list="$3"
set +u; local _date="$4"; if [[ -z $_date ]]; then _date="$(date +%F)"; fi; set -u
local subject="Iris report for $report for $_date"
local body="Iris report for ${_date}:"
local sql="$(cat "${resources_dir}/athena/${report}.sql" | \
sed "s/:user/${user}/" | \
sed "s/:date/${_date}/")"
query "$sql" >"${tmp_dir}/${report}.csv" # csv
#csv_to_html "$report" >"${tmp_dir}/${report}.html" # html # not needed?
"$email_cmd" \
--distro-list "$distro_list" \
--subject "$subject" \
--body "$body" \
--attachment "${tmp_dir}/${report}.csv" \
--html
}
get_yesterday(){
local _date="$(date +%F)"
if [[ $(whoami) == 'root' ]] || [[ $(whoami) == 'sbx_'* ]]; then
_date="$(date -d "$_date - 1 day" +%F)"
else
_date="$(gdate -d "$_date - 1 day" +%F)"
fi
echo "$_date"
}
| true |
f6681b9cccbba8e90c13661e9f0ad75bf3ef7b3b | Shell | ryanmoran/workspace | /pull.sh | UTF-8 | 2,525 | 4.3125 | 4 | [] | no_license | #!/bin/bash
set -e
set -u
set -o pipefail
readonly WORKSPACE="${HOME}/workspace"
function main() {
local match
while [[ "${#}" != 0 ]]; do
case "${1}" in
--match)
match="${2}"
shift 2
;;
"")
# skip if the argument is empty
shift 1
;;
*)
util::print::error "unknown argument \"${1}\""
esac
done
if [[ -z "${match:-}" ]]; then
match=""
fi
IFS=$'\n' read -r -d '' -a repos < <(
find "${WORKSPACE}" -name .git -type d -depth 3 -print0 | xargs -0 -n1 dirname | grep "${match}" | sort && printf '\0'
)
util::print::green "Pulling ${#repos} repos..."
util::print::break
for repo in "${repos[@]}"; do
repo::update "${repo}"
done
}
function repo::update() {
local dir
dir="${1}"
util::print::blue "Checking ${dir#"${WORKSPACE}"}"
(
repo::fetch "${dir}"
repo::pull "${dir}"
) 2>&1 | util::print::indent
util::print::break
}
function repo::fetch() {
local dir
dir="${1}"
if ! git ls-remote --exit-code > /dev/null 2>&1; then
util::print::yellow "Fetching..."
git -C "${dir}" fetch --depth 1 || true
else
util::print::red "Remote does not exist!"
fi
}
function repo::pull() {
local dir
dir="${1}"
local status
status="$(git -C "${dir}" status --short)"
if [[ -n "${status}" ]]; then
util::print::red "Uncommitted changes!"
echo "${status}"
return 0
fi
local branch
branch="$(git -C "${dir}" branch --show-current)"
if git -C "${dir}" status --short --branch | grep '\[gone\]' > /dev/null; then
util::print::red "Remote branch ${branch} is gone!"
return 0
fi
if git -C "${dir}" status --short --branch | grep '\[.*behind\ \d*.*\]' > /dev/null; then
util::print::yellow "Pulling ${branch}..."
git -C "${dir}" pull --rebase
git -C "${dir}" submodule update --init --recursive
else
util::print::yellow "Up-to-date!"
fi
}
function util::print::blue() {
util::print::color "${1}" "\033[0;34m"
}
function util::print::yellow() {
util::print::color "${1}" "\033[0;33m"
}
function util::print::green() {
util::print::color "${1}" "\033[0;32m"
}
function util::print::red() {
util::print::color "${1}" "\033[0;31m"
}
function util::print::color() {
local message color reset
message="${1}"
color="${2}"
reset="\033[0;39m"
echo -e "${color}${message}${reset}" >&2
}
function util::print::break() {
echo "" >&2
}
function util::print::indent() {
sed 's/^/ /'
}
main "${@:-}"
| true |
96c4dbb47d22d883b27cd7f9ce57dce88ed2b34a | Shell | maz1125/auto-generator-tips | /common/property-loader.sh | SHIFT_JIS | 523 | 3.75 | 4 | [] | no_license | #!/bin/sh
SCRIPT_DIR=$(cd $(dirname $0); pwd)
#####
## propertiest@Cw肵ϐ擾邽߂̊
## ${1} property_name
## ${2} property_file_name
#####
function PropertyLoader(){
#propertyLine=$(grep ${1} -n ${2});
propertyLine=$(grep $1 -n $2);
PROPERTY_VALUE=`echo $propertyLine | sed -e "s#.*=##g"`
echo $PROPERTY_VALUE
}
#(example)
# layout.propertiesapplication.contents_name擾
CONTENTS_NAME=`PropertyLoader application.contents_name layout.properties`
| true |
bddfee33c977441f48e12d0b3e8a311725ac0433 | Shell | 0x1306a94/osx | /src/dovecot/dovecot.Tools/postupgrade | UTF-8 | 487 | 3.3125 | 3 | [] | no_license | #!/bin/sh
# Copyright 2009 Apple. All rights reserved.
#echo "source package path: $1"
#echo "package destination: $2"
#echo "mount point of dest: $3"
#echo "installer framework: $4"
dstdir=${3:-/}
tmpdir=${INSTALLER_TEMP:-/tmp}
cookie="$tmpdir/mail-services-cookie"
serveradmin=/usr/sbin/serveradmin
if [ "$dstdir" = "/" -a -x $serveradmin -a -e "$cookie" ]
then
logger -p install.info "Restarting mail services after upgrade."
rm -f "$cookie"
$serveradmin start mail
fi
exit 0
| true |
d347f63ad1ce796abeff92d6d27c739c58689375 | Shell | wayfinder/Wayfinder-Server | /Server/bin/Scripts/init.mc2 | UTF-8 | 2,442 | 3.546875 | 4 | [] | no_license | #!/bin/bash
#
# Init script for MC2
#
# Copyright (c) 1999 - 2010, Vodafone Group Services Ltd
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# * Neither the name of the Vodafone Group Services Ltd nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# chkconfig: 2345 99 01
# description: mc2
# source function library
. /etc/rc.d/init.d/functions
MC2USER=mc2
MC2CONTROL=/usr/local/mc2/bin/mc2control
MC2DIR=`dirname ${MC2CONTROL}`
RETVAL=0
start() {
echo -n "Starting mc2... "
cd $MC2DIR
su mc2 -c "$MC2CONTROL start `hostname`"
RETVAL=$?
echo
[ $RETVAL -eq 0 ] && touch /var/lock/subsys/mc2
}
stop() {
echo -n "Stopping mc2... "
cd $MC2DIR
su mc2 -c "$MC2CONTROL stop `hostname`"
RETVAL=$?
echo
[ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/mc2
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart|reload)
stop
start
RETVAL=$?
;;
condrestart)
if [ -f /var/lock/subsys/mc2 ]; then
stop
start
RETVAL=$?
fi
;;
status)
su mc2 -c "$MC2CONTROL status"
RETVAL=$?
;;
*)
echo "Usage: $0 {start|stop|restart|condrestart|status}"
exit 1
esac
exit $RETVAL
| true |
599b512db093cd8542311d8b3c0b34495c9535e6 | Shell | gtool/g | /libexec/g-issues | UTF-8 | 411 | 3.359375 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -e
source "$_G_ROOT/share/g/functions.sh"
# Usage: g issues
# Summary: Work with "Issues"
# Help: Available subcommands:
#
# g issues (g i) # open "Issues" page
# g issues new (g i n) # open new issue
case $1 in
"" )
open_github "issues";;
"new" | "n" )
open_github "issues/new";;
# Provide g completions
"--complete" )
echo n
echo new
;;
esac
| true |
9abf764fede17e84d694faf34bd679408161cd9a | Shell | kulakvt/cut-qualys-headers | /cut-qualys-headers.sh | UTF-8 | 838 | 3.828125 | 4 | [] | no_license | #!/bin/bash
# A script to strip headers from Qualys scan reports
# Run the script from a directory containing the reports
# Creates a 'noheaders' directory in the current directory for the new files
# v0.1 Wed Aug 1 19:01:33 EDT 2018
# Written in GNU bash version 3.2.57(1)-release (x86_64-apple-darwin17)
# Author: Andrew Kulak
i=1
mkdir noheaders
start=`date +%s`
for filename in *.csv; do
basefilename=${filename%.csv}
echo $i $filename
sed '1,7d' "$filename" > "./noheaders/$basefilename-nh.csv"
let i=i+1
done
end=`date +%s`
runtime=$((end-start))
let i=i-1
if [[ $runtime -eq 0 ]]; then
echo "Finished: Removed headers from $i files in less than 1 second"
elif [[ $runtime -eq 1 ]]; then
echo "Finished: Removed headers from $i files in 1 second"
else
echo "Finished: Removed headers from $i files in $runtime seconds"
fi
| true |
8faabcb9c8e046896a1cc65d88c2a7f26fd53cac | Shell | hardcodes/trident-core | /core-files/usr/local/share/trident/scripts/system-sanity-check.sh | UTF-8 | 3,551 | 3.71875 | 4 | [
"BSD-2-Clause"
] | permissive | #!/bin/sh
# ===================
# Quick script to perform a system sanity check and fix things as needed
#
# ===================
test -e /sbin/rc-update
use_openrc=$?
#Verify that config files are setup
# - sudoers
if [ ! -e "/usr/local/etc/sudoers" ] && [ -e "/usr/local/etc/sudoers.dist" ] ; then
cp "/usr/local/etc/sudoers.dist" "/usr/local/etc/sudoers"
fi
# - cupsd.conf
if [ ! -e "/usr/local/etc/cups/cupsd.conf" ] && [ -e "/usr/local/etc/cups/cupsd.conf.sample" ] ; then
ln -s "/usr/local/etc/cups/cupsd.conf.sample" "/usr/local/etc/cups/cupsd.conf"
fi
# - sysctl.conf
if [ ! -e "/etc/sysctl.conf" ] ; then
#if this file is missing, then ALL sysctl config files get ignored. Make sure it exists.
touch "/etc/sysctl.conf"
fi
# - pulseaudio default.pa
pkg info -e pulseaudio-module-sndio
if [ $? -eq 0 ] && [ ! -e "/usr/local/etc/pulse/default.pa" ] && [ -e "/usr/local/etc/pulse/default.pa.trident" ] ; then
ln -s "/usr/local/etc/pulse/default.pa.trident" "/usr/local/etc/pulse/default.pa"
fi
# - fonts.conf
if [ ! -e "/usr/local/etc/fonts/fonts.conf" ] && [ -e "/usr/local/etc/fonts/fonts.conf.sample" ] ; then
ln -s "/usr/local/etc/fonts/fonts.conf.sample" "/usr/local/etc/fonts/fonts.conf"
fi
# - Qt5 qconfig-modules.h include file (supposed to be auto-generated?)
#if [ ! -e "/usr/local/include/qt5/QtCore/qconfig-modules.h" ] && [ -e "/usr/local/include/qt5/QtCore/qconfig.h" ] ; then
# touch "/usr/local/include/qt5/QtCore/qconfig-modules.h"
#fi
#Ensure that the openrc devd configs are loaded from ports as well
if [ ${use_openrc} -eq 0 ] ; then
grep -q "/usr/local/etc/devd-openrc" "/etc/devd.conf"
if [ $? -ne 0 ] ; then
sed -i '' 's|directory "/usr/local/etc/devd";|directory "/usr/local/etc/devd";\
directory "/usr/local/etc/devd-openrc";|' "/etc/devd.conf"
fi
fi
# Ensure that the icon cache for the "hicolor" theme does not exist
# That cache file will break the auto-detection of new icons per the XDG spec
if [ -e "/usr/local/share/icons/hicolor/icon-theme.cache" ] ; then
rm "/usr/local/share/icons/hicolor/icon-theme.cache"
fi
#Ensure that the PCDM config file exists, or put the default one in place
if [ ! -e "/usr/local/etc/pcdm.conf" ] ; then
cp "/usr/local/etc/pcdm.conf.trident" "/usr/local/etc/pcdm.conf"
#It can contain sensitive info - only allow root to read it
chmod 700 "/usr/local/etc/pcdm.conf"
fi
# Make sure dbus machine-id file exists
# QT needs a valid dbus machine-id file even if dbus is not used/started
if [ ! -e "/var/lib/dbus/machine-id" ] ; then
/usr/local/bin/dbus-uuidgen --ensure
fi
# Always update the default wallpaper symlink
ln -sf "/usr/local/share/wallpapers/trident/trident_blue_4K.png" "/usr/local/share/wallpapers/trident/default.png"
#Ensure that the /sbin/service utility exists
if [ ! -e "/sbin/service" ] ; then
if [ -e "/usr/sbin/service" ] ; then
ln -s "/usr/sbin/service" "/sbin/service"
else
echo "[WARNING] Could not find the service utility!"
fi
fi
#Make the symlink from /dev/cd0 to /dev/cdrom if needed (many apps use cdrom by default)
if [ -e "/dev/cd0" ] && [ ! -e "/dev/cdrom" ] ; then
ln -s /dev/cd0 /dev/cdrom
fi
#Ensure that the autofs device automount line is present in /etc/auto_master
grep -qE "(-automount)" "/etc/auto_master"
if [ $? -ne 0 ] ; then
echo "/.autofs -automount -nosuid,noatime" >> "/etc/auto_master"
fi
# Ensure that the "ld" binary is symlinked to ld.lld as needed
if [ ! -e "/usr/bin/ld" ] && [ -e "/usr/bin/ld.lld" ] ; then
ln -s "/usr/bin/ld.lld" "/usr/bin/ld"
fi
| true |
dbc5926b55ae1b4ced2e4ae702e5a1fa918473c0 | Shell | mayanksha/CS251 | /a2/160392/q1.sh | UTF-8 | 1,147 | 4.1875 | 4 | [] | no_license | #!/bin/bash
#This script uses a helper script named q1.awk Please put both q1.sh and q1.awk in the same folder and run it.
if [ $# -ne 1 ]
then
echo "You didn't given appropriate number of arguments. We just need 1 argument. Please re-run the script."
exit -1
elif [ -d $1 ]
then
echo "You gave a directory. The script is running."
else
echo "You didn't give a directory. Please give a directory as argument and re-run the script."
exit -1
fi
files=(`find $1 -name "*.c"`)
count_arr=(`echo "${files[*]}" | xargs -n1 ./q1.awk`)
comment=(`printf '%s\n' "${count_arr[@]}" | grep -P "\d+#" -o | sed -E 's/#//'`)
strings=(`printf '%s\n' "${count_arr[@]}" | grep -P "#\d+" -o | sed -E 's/#//'`)
total_strings=0
total_comments=0
#echo ${comment[*]}
#echo ${files[*]}
#echo ${strings[*]}
for i in `seq ${#files[*]}`;
do
total_strings=$(( total_strings + strings[(($i - 1))] ))
total_comments=$(( total_comments + comment[(($i - 1))] ))
printf "${files[(($i - 1))]}\t has ${comment[(($i - 1))]} comments and ${strings[(($i - 1))]} strings.\n"
done
echo "Given directory '$1' has total $total_strings strings and $total_comments comments."
| true |
b57719cfa0554dc6ab192f297f72f59bb799e4bc | Shell | SUSE-Cloud/automation | /mkcloudruns/install_suse_cloud | UTF-8 | 1,890 | 3.5 | 4 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/usr/bin/env bash
set -o nounset
commands="cleanup prepare setupadmin addupdaterepo runupdate prepareinstcrowbar instcrowbar rebootcrowbar setupnodes instnodes proposal testsetup rebootcloud $@" # all_noreboot
currentdir=$(pwd)
function deploy_upgrade_clouds() {
clouds=`seq $1 $2` # Expand the sequence.
suse_cloud_config=$3
# Gracefully close existing screen sessions created by the script.
screen -S $suse_cloud_config -X quit
# Create new screen sessions.
screen -S $suse_cloud_config -d -m -t info_$suse_cloud_config bash -c "cat message; echo \"Cloud Upgrade Step: $suse_cloud_config\"; bash"
for i in $clouds; do
echo "Deploying cloud number $i -- To Rock but not to roll!!!"
# Run similar copies of mkcloud runs in the same screen session as tabs.
mkdir -p pids/$i # Create folders for automation pid's files and artifacts.
screen -S $suse_cloud_config -X screen -t Cloud$i bash -c "cd pids/$i; source ../../mkcloudconfig/SUSECloud.mkcloud $i; ../../mkcloudconfig/$suse_cloud_config $commands; exec bash"
cd $currentdir
done
}
# psalunke: Add a basic API call and warning's with user input
# so that we do not redeploy the entire thing and
# disrupt other's work for the same ...
function usage() {
echo "Not Implemented yet!"
}
# Using deploy cloud function
# $startnumber & $endnumber expands into a sequence and deploys `range($startnumber, $endnumber)` number of clouds.
# $cloudscript_name is basically coming from the folder mkcloudconfig/$cloudconfig
#deploy_upgrade_clouds $startnumber $endnumber $cloudconfig
# Deploy SUSE Cloud 6 Scenarios
deploy_upgrade_clouds 1 2 cloud6
# Deploy SUSE Cloud 7 Scenarios
deploy_upgrade_clouds 3 4 cloud7
# Deploy Upgrade Steps
# Step 1
# Step 2
# Step 3 ...
# Todo
# 1. setup auto-monitoring ...
# 2. generate workload automatically ...
| true |
9b77b9b2236fe28645abcba92fcd584e45dbb06c | Shell | Michaelooo/just_write | /scripts/new_start.sh | UTF-8 | 843 | 3.25 | 3 | [] | no_license | if [ "$SHELL" = "/bin/zsh" -o "$SHELL" = "/bin/sh" ];then
echo "your login shell is the bash \n"
echo "SHELL is : $SHELL"
echo '=========================='
author='michael'
CURDIR="`pwd`"/"`dirname $0`"
rm="`pwd`"/"README.md"
demo="`pwd`"/"`dirname $0`""/demo.md"
echo '=========================='
echo "初始化生成ReadMe $rm $demo"
if [ -f "$rm" ];then
echo "**** 删除旧md ****"
rm -rf $rm
if [ $? -ne 0 ]; then
echo "删除旧md ❌";
exit 1
else
echo "删除旧md ✅"
fi
echo "**** 拷贝模板 ****"
cp "$demo" "$rm"
if [ $? -ne 0 ]; then
echo "拷贝模板 ❌";
exit 1
else
echo "拷贝模板 ✅"
fi
echo "生成目录"
tree -L 2 -N >> $rm
if [ $? -ne 0 ]; then
echo "生成目录 ❌";
exit 1
else
echo "完成 💐"
fi
fi
elif [ "$autor" = "michael" ];then
echo "并不能执行"
fi | true |
cba33bcfd590c19d09757d9f2e678842705674ae | Shell | VimalME/skia-buildbot | /skolo/build_release_hotspare | UTF-8 | 1,033 | 3.296875 | 3 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
# Builds and uploads a debian package for rpi-failover.
APPNAME=hotspare
SYSTEMD=${APPNAME}.service
DESCRIPTION="Application to allow rpi-master to failover to a hot spare."
set -x -e
# Copy files into the right locations in ${ROOT}.
copy_release_files()
{
INSTALL="sudo install -D --verbose --backup=none --group=root --owner=root"
INSTALL_DIR="sudo install -d --verbose --backup=none --group=root --owner=root"
${INSTALL} --mode=755 -T ${GOPATH}/bin/${APPNAME} ${ROOT}/usr/local/bin/${APPNAME}
${INSTALL} --mode=644 -T ./sys/${APPNAME}.service ${ROOT}/etc/systemd/system/${APPNAME}.service
${INSTALL} --mode=644 -T ./raspberry-pi/start_serving_image.yml ${ROOT}/usr/local/share/${APPNAME}/start_serving_image.yml
${INSTALL} --mode=644 -T ./raspberry-pi/stop_serving_image.yml ${ROOT}/usr/local/share/${APPNAME}/stop_serving_image.yml
${INSTALL} --mode=644 -T ./service-account.json ${ROOT}/usr/local/share/${APPNAME}/service-account.json
}
source ../bash/release.sh
| true |
08b1a23560b1271fc10a7554274f0085425e8e57 | Shell | jlm365/Landuse_DL | /planetScripts/download_planet_img.sh | UTF-8 | 1,058 | 2.515625 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
## Introduction: downloaded Planet images
#authors: Huang Lingcao
#email:huanglingcao@gmail.com
#add time: 5 October, 2019
# Exit immediately if a command exits with a non-zero status. E: error trace
set -eE -o functrace
word_dir=~/Data/Qinghai-Tibet/entire_QTP_images
eo_dir=~/codes/PycharmProjects/Landuse_DL
cd ${word_dir}
# on Cryo06, to gdalsrsinfro (>2.3) and python 3
export PATH=~/programs/anaconda3/bin:$PATH
#shp_file=~/Data/Qinghai-Tibet/qtp_thaw_slumps/rts_polygons_s2_2018/qtp_train_polygons_s2_2018_v2.shp
# mapping results
shp_file=~/Data/Qinghai-Tibet/entire_QTP_images/sentinel-2/autoMapping/QTP_deeplabV3+_3/result_backup/QTP_deeplabV3+_3_exp2_iter30000_prj_post2_chpc_2_latlon.shp
save_folder=planet_sr_images
#start_date=2018-05-20
#end_date=2018-06-01
start_date=2018-05-20
end_date=2018-06-30
cloud_cover_thr=0.3
item_type=PSScene4Band
#account=huanglingcao@link.cuhk.edu.hk
account=liulin@cuhk.edu.hk
${eo_dir}/planetScripts/download_planet_img.py ${shp_file} ${save_folder} \
-s ${start_date} -e ${end_date} -c ${cloud_cover_thr} -i ${item_type} -a ${account}
| true |
66b5469e9faa7e3a5a5257c784bce4475626a68a | Shell | lhunath/gamebot | /scripts/extract-from-git | UTF-8 | 684 | 3.59375 | 4 | [] | no_license | #!/bin/bash
# usage: extract-from-git build/bash-x.y.z
die() { local ret=$?; printf '%s\n' "$@" >&2; exit "$ret"; }
declare -A versions='(
[bash-1.14.7]=726f63884db0132f01745f1fb4465e6621088ccf
[bash-2.05b]=7117c2d221b2aed4ede8600f6a36b7c1454b4f55
[bash-3.2.48]=f1be666c7d78939ad775078d290bec2758fa29a2
[bash-4.2.45]=f281b8f4f8936b2713966274d9f8508a9f0910e4
[bash-devel]=origin/devel
)'
base=${1##*/}
[[ ${versions[$base]} ]] || die "Unknown version: $base"
mkdir -p "$1" || exit
{
cd bash &&
git archive --prefix="$1/" --format=tar "${versions[$base]}"
} | pax -r
if [[ -f patches/$base ]]; then
{ cd "$1" && patch -p1; } < "patches/$base"
fi
| true |
c4f698b7a81de17e5e4c5ff166b9dc99b8d13c9d | Shell | bsavitzky/rdf | /sample_files/import_vars.sh | UTF-8 | 732 | 2.59375 | 3 | [] | no_license | #! /bin/bash
echo
echo "Loading variables for rdf analysis suite of image 55 from 150831:"
DIR="/Users/bsavitzky/Projects/PbSe_QDs/Analysis/150914_Thickness_LRO/rdf"
I="/Users/bsavitzky/Data/PbSe_QDS/150831_CBCs_PbSe_lowThickness_noUltrathin/TiffsFromDm3Files/55_fov600nm_8us_4096.tif"
OUT="/Users/bsavitzky/Projects/PbSe_QDs/Analysis/150914_Thickness_LRO/150831_3-4layer/3layers/150831_55/outputs/"
C=$OUT"centroids.npz"
Ce=$OUT"centroids_edited.npz"
Csp=$OUT"SP_centroids.npz"
Gexp=$OUT"rdf_exp.npz"
echo "DIR = rdf script location"
echo "I = Image location"
echo "C = Raw centroid location"
echo "Ce = Edited centroid location"
echo "Csp = Subpixel centroid location"
echo "Gexp = Experimental radial distribution function"
| true |
1e9c2074cd9fd35263da1124ca0f89374fac1490 | Shell | henrytsangsh/cj81499.github.io | /Glyphs/WIP/cjGlyphsUpdate.sh | UTF-8 | 291 | 2.515625 | 3 | [] | no_license | #!/bin/zsh
cd /Users/Cal/Documents/Jailbreak_Stuff/cj81499.github.io/Glyphs/Library/Themes
echo Removing .DS_Store.
find . -name ".DS_Store" -depth -exec rm {} \;
echo Removing Old Files.
rm cjGlyphs.zip
echo Zipping cjGlyphs.
zip -r -X cjGlyphs.zip cjGlyphs*
echo Done. Exiting
exit 0
| true |
fd3c7e12e34755341cac720c6006d6b57de933d0 | Shell | e-graveyard/lexy | /utils/has-readline.sh | UTF-8 | 320 | 3.03125 | 3 | [
"MIT"
] | permissive | set -e
HERE="$(pwd)/utils"
BIN="${HERE}/has-readline"
"$CC" -o "$BIN" -x c - <<EOF
#if !__has_include(<editline/readline.h>)
#define __MISSING_READLINE
#endif
int main(int argc, char** argv)
{
int code = 0;
#ifdef __MISSING_READLINE
code = 1;
#endif
return code;
}
EOF
"$BIN"
echo "$?"
rm -rf "$BIN"
| true |
f109d0081843a1832830dd82c49fe07cc7f02f54 | Shell | chaadow/dotfiles | /functions/ch | UTF-8 | 1,250 | 3.53125 | 4 | [
"MIT"
] | permissive | #!/bin/zsh
#
# This lets you quickly browse your chrome history
#
# Ref : http://junegunn.kr/2015/04/browsing-chrome-history-with-fzf/
#
# ch() {
# local cols sep
# cols=$(( COLUMNS / 3 ))
# sep='{::}'
# cp -f ~/Library/Application\ Support/Google/Chrome/Default/History /tmp/h
# sqlite3 -separator $sep /tmp/h \
# "select substr(title, 1, $cols), url
# from urls order by last_visit_time desc" |
# awk -F $sep '{printf "%-'$cols's \x1b[36m%s\x1b[m\n", $1, $2}' |
# fzf --ansi --multi | sed 's#.*\(https*://\)#\1#' | xargs open
# }
# c - browse chrome history
ch() {
local cols sep
export cols=$(( COLUMNS / 3 ))
export sep='{::}'
cp -f ~/Library/Application\ Support/Google/Chrome/Default/History /tmp/h
sqlite3 -separator $sep /tmp/h \
"select title, url from urls order by last_visit_time desc" |
ruby -ne '
cols = ENV["cols"].to_i
title, url = $_.split(ENV["sep"])
len = 0
puts "\x1b[36m" + title.each_char.take_while { |e|
if len < cols
len += e =~ /\p{Han}|\p{Katakana}|\p{Hiragana}|\p{Hangul}/ ? 2 : 1
end
}.join + " " * (2 + cols - len) + "\x1b[m" + url' |
fzf --ansi --multi --no-hscroll --tiebreak=index |
sed 's#.*\(https*://\)#\1#' | xargs open
}
| true |
2343a95695de04c9bc0d449c7f69e59ee59795d4 | Shell | CannedFish/ops_tools | /kettle/cron_kettle_stop.sh | UTF-8 | 237 | 3.265625 | 3 | [] | no_license | #!/bin/bash
num=`ps aux | grep carte | grep -v grep | wc -l`
if [ "$num" -ne 0 ];then
pids=`ps aux | grep carte | grep -v grep | awk '{print $2}'`
# compile
for pid in $pids
do
kill -9 $pid
done
exit 1
fi
| true |
e620176e6b43230aeffbe225e0b1066c7415f62e | Shell | mirage335/PortableChRoot | /umountChRoot.sh | UTF-8 | 555 | 2.796875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
#Author: mirage335
#Date: 07-01-2011 (MM-DD-YYYY)
#Version: 1.0 (Minor versions reflect compatible updates.)
#Dependencies: ubiquitous_bash.sh
#Usage: enterChroot.sh
#Purpose: Dismounts bound filesystems in the ChRoot folder.
. ubiquitous_bash.sh
mustBeRoot #Non-superuser has no ability to mount filesystems or execute chroot.
ChRootDir="$(getScriptAbsoluteFolder)/ChRoot"
umount "$ChRootDir"/proc
umount "$ChRootDir"/sys
umount "$ChRootDir"/dev/pts
umount "$ChRootDir"/tmp
umount "$ChRootDir"/dev/shm
umount "$ChRootDir"/dev | true |
e378a87ead8fc9b9ef69477f991ccbb48ac78efe | Shell | zeroDuke/MyProject | /runbbd.sh | UTF-8 | 1,051 | 3.3125 | 3 | [] | no_license | #!/bin/bash
filename=''
netmask=''
gateway=''
prefix='CUC-SD-DZ1'
ip=''
getInfo(){
pass=$(sudo ssh -p23432 -t root@$ip "hostname"|awk -F- '{ print $3 }')
serial=$(sudo ssh -p23432 root@$ip "dmidecode|grep -m 1 'Serial Number:'"|awk -F' ' '{ print $3 }')
#pass=$(sudo ssh -p23432 -t $ip "hostname"|awk -F- '{ print $3 }')
#serial=$(sudo ssh -p23432 -t $ip "dmidecode|grep -m 1 'Serial Number:'")
#serial="91732240312345"
echo -e $ip $serial $pass
}
upLoad(){
sudo scp -P 23432 ./software/bbd.bin $ip:/tmp
}
runInit(){
newname=${prefix,,}-${ip//./-}
echo $newname
sudo ssh -p23432 -t root@$ip "sh /tmp/bbd.bin --HOSTNAME=$newname | tee -a /tmp/$ip.log ;sh /tmp/bbd.bin --basic sh | tee -a /tmp/$ip.log ;sh /tmp/bbd.bin --basic sh | tee -a /tmp/$ip.log "
}
downLoadlog(){
mkdir -p remotelog
sudo scp -p 23432 $ip:/tmp/$ip.log ./logs/remote/$prefix/
}
mkdir -p logs/{local,remote}
for ip in `cat iplist.txt` do
#getInfo | tee -a logs/local/$ip.log
upLoad
#runInit | tee -a logs/local/$ip.log
#downLoadlog
done
| true |
df9ca6c9926ec68ba221e298143ab8e4fd1b1ec7 | Shell | Azure/batch-shipyard | /contrib/notebooks/deep_learning/model_scripts/convert_cifar10.sh | UTF-8 | 283 | 2.875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -e
set -o pipefail
IMAGE_NAME=$1
CIFAR_DATA=$2
BASEDIR=$(pwd)
docker run --rm -v $CIFAR_DATA:$CIFAR_DATA -w $CIFAR_DATA -v $BASEDIR:/code $IMAGE_NAME /bin/bash -c "source /cntk/activate-cntk; python -u /code/cifar_data_processing.py --datadir $CIFAR_DATA" | true |
7dc766b3bb99f90d5bfc22fc610d0b2134d1f015 | Shell | kpearson/cli-tools | /ncmcli/gwi-logs | UTF-8 | 736 | 3 | 3 | [] | no_license | #!/bin/bash
LOG_FILE=/opt/gowatchit/watchit/shared/log/production.log
PRODUCTION_SERVER_2=production-watchit-$box
PRODUCTION_SERVER_3=production-watchit-$box.internal.noovie.com
PRODUCTION_SERVER_4=production-watchit-$box.internal.noovie.com
USER=kpearson
# multitail -l "ssh $USER@${PRODUCTION_SERVER_2} tail -f ${LOG_FILE}" \
# -l "ssh $USER@${PRODUCTION_SERVER_3} tail -f ${LOG_FILE}" \
# -l "ssh $USER@${PRODUCTION_SERVER_4} tail -f ${LOG_FILE}"
mkdir -p /tmp/log
pids=""
pids="$pids $!"
for box in 2 3 4; do
ssh -f $USER@production-watchit-${box}.internal.noovie.com "tail -f ${LOG_FILE}" > /tmp/log/prod_log_$box.log
pids="$pids $!"
done
lnav -t /tmp/log
trap 'kill -9 $pids && rm -rf /tmp/log' SIGINT
trap wait
| true |
671fbc27c3bc8eadf5e51d177b119181b104f1b4 | Shell | security-prince/websitesVulnerableToSSTI | /python/python-eval/runInDocker.sh | UTF-8 | 159 | 2.671875 | 3 | [
"Apache-2.0"
] | permissive | if [ $# -eq 0 ]
then
IP="127.0.0.1"
else
IP=$1
fi
docker build -t python-eval .
docker run -d -i -p $IP:5004:5004 -w /home python-eval sh ./run.sh | true |
d0ad7fd5cce89d3a5e6f701ce658c85b64d4acdd | Shell | SteffanA/leetcode-srs | /configure.sh | UTF-8 | 4,284 | 4.46875 | 4 | [] | no_license | #!/bin/bash
# Function for writing user input for specified env vars into .env file
# Reads the sample .env line by line, outputting comments as whole,
# or variables by the defition, then asks for user input for each, defining
# said var with the provided input in the final .env file provided
#
# Accepts 2 parameters: $1 = sample .env path, $2 = final .env path
get_env_input () {
sampleFile=$1
envFile=$2
# Remove any existing env output file
rm "$envFile"
while IFS= read -r line
do
# Check if this is a comment line - if so, simply print it.
if [[ $line =~ ^#.* ]]
then
echo "$line"
# Also print to our output file
echo "$line" >> "$envFile"
# Check if newline - don't prompt or output, just add to output file
elif [[ -z $line ]]
then
echo "$line" >> "$envFile"
# Else ask for a value to provide for the variable
else
# Grab just the environmental variable
# Use sed to find the first =, then replace all non-equal after with
# an empty string and pass to curVar
curVar=$(echo "$line" | sed 's/=[^=]*$//')
if [[ ! -z $curVar ]]
then
echo "Enter the value you would like to provide for $curVar:"
# Read directly from tty, not stdout, since we're printing to stdout
read -p " " inputVar < /dev/tty
echo
# Write the curVar and inputVar into our output file
echo -n "$curVar=" >> "$envFile"
echo "$inputVar" >> "$envFile"
else
# TODO: Understand why this isn't caught by the elif statement
echo "$curVar" >> "$envFile"
fi
fi
done < "$sampleFile"
}
# BEGIN CONFIGURE SCRIPT
# Install the npm modules for the server and frontend
read -p "Do you want to install the npm module dependencies? " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]
then
(cd ./server/ && npm install)
(cd ./leetcode-srs/ && npm install)
fi
# Ask if we should configure initialize the .env files
read -p "Do you want to create the required .env files? " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]
then
# Simply copy the samples
cp "./leetcode-srs/.env.sample" "./leetcode-srs/.env"
cp "./.env.sample" "./.env"
# Check if user wants to fill in the .env now
read -p "Would you like to input the required variables now? " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]
then
# Go ahead and remove the .env since we'll generate them here
rm "./.env"
rm "./leetcode-srs/.env"
echo "Adding variables for the React frontend"
sampleFile="./leetcode-srs/.env.sample"
envFile="./leetcode-srs/.env"
get_env_input "$sampleFile" "$envFile"
echo "Adding variables for the rest of the application"
sampleFile="./.env.sample"
envFile="./.env"
echo
get_env_input "$sampleFile" "$envFile"
fi
fi
dockerRunning="0"
echo "Would you like to fill your database with LeetCode problems? "
read -p "Note this requires the .env files to be setup already. " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]
then
echo "Note this requires the MongoDB database and backend server to be running."
echo "We can launch the docker containers for your server/MongoDB now if you wish."
echo
read -p "Should we run the containers now? " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]
then
# Run MongoDB container only
echo "Running MongoDB and server containers..."
docker-compose up -d mongo server
# Mark that we have containers running we'll need to clean up
dockerRunning="1"
else
echo "Skipping running containers."
echo
fi
# Run the python import script, but ensure requirements are met first
echo "Installing python script requirements.."
python3 -m pip install -r ./utility/requirements.txt
echo "Running problem import script..."
echo
python3 ./utility/lcAPIparser.py
fi
# Cleanup
if [[ "$dockerRunning" == "1" ]]
then
# Close the docker container
echo "Shutting down containers..."
docker-compose down
fi
exit 1
| true |
96712cb0a4b1abfe2e11120cd704109ee2846fc2 | Shell | abstractguy/TSO_project | /software/jetson/ArduCAM/MIPI_Camera/Jetson/Jetvariety/external_trigger/scripts/jetson_libraries.sh | UTF-8 | 3,897 | 3.3125 | 3 | [
"BSD-2-Clause-Views",
"BSD-2-Clause"
] | permissive | #!/bin/bash
# This file is part of the jetson_stats package (https://github.com/rbonghi/jetson_stats or http://rnext.it).
# Copyright (c) 2020 Raffaello Bonghi.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Read CUDA version
if [ -f /usr/local/cuda/version.txt ]; then
JETSON_CUDA=$(cat /usr/local/cuda/version.txt | sed 's/\CUDA Version //g')
else
JETSON_CUDA="NOT_INSTALLED"
fi
# Jetson CUDA version
export JETSON_CUDA
# Read from OpenCV if is installed CUDA
opencv_read_cuda()
{
# Red if use CUDA or not
local OPENCV_VERSION_VERBOSE=$(opencv_version --verbose | grep "Use Cuda" )
if [ ! -z "$OPENCV_VERSION_VERBOSE" ]; then
# Read status of CUDA
local OPENCV_CUDA_FLAG=$(echo $OPENCV_VERSION_VERBOSE | cut -f2 -d ':' | cut -f2 -d ' ' )
if [ "$OPENCV_CUDA_FLAG" == "NO" ]; then
# Set NO if cuda is not installed
echo "NO"
else
# Set YES if cuda is installed
echo "YES"
fi
return
fi
# read NVIDIA CUDA version
OPENCV_VERSION_VERBOSE=$(opencv_version --verbose | grep "NVIDIA CUDA" )
if [ ! -z "$OPENCV_VERSION_VERBOSE" ]; then
# get information
local OPENCV_CUDA_FLAG=$(echo $OPENCV_VERSION_VERBOSE | cut -f2 -d ':')
OPENCV_CUDA_FLAG=${OPENCV_CUDA_FLAG//[[:blank:]]/}
# Set YES if cuda is installed
echo "YES"
return
fi
echo "NO"
return
}
if hash opencv_version 2>/dev/null; then
JETSON_OPENCV="$(opencv_version)"
# Read information about cuda status
JETSON_OPENCV_CUDA=$(opencv_read_cuda)
else
JETSON_OPENCV="NOT_INSTALLED"
JETSON_OPENCV_CUDA="NO"
fi
# Opencv variables
export JETSON_OPENCV
export JETSON_OPENCV_CUDA
# Extract cuDNN version
JETSON_CUDNN=$(dpkg -l 2>/dev/null | grep -m1 "libcudnn")
if [ ! -z "$JETSON_CUDNN" ] ; then
JETSON_CUDNN=$(echo $JETSON_CUDNN | sed 's/.*libcudnn[0-9] \([^ ]*\).*/\1/' | cut -d '-' -f1 )
else
JETSON_CUDNN="NOT_INSTALLED"
fi
# Export NVIDIA CuDNN Library
export JETSON_CUDNN
# Extract TensorRT version
JETSON_TENSORRT=$(dpkg -l 2>/dev/null | grep -m1 " tensorrt ")
if [ ! -z "$JETSON_TENSORRT" ] ; then
JETSON_TENSORRT=$(echo $JETSON_TENSORRT | sed 's/.*tensorrt \([^ ]*\).*/\1/' | cut -d '-' -f1 )
else
JETSON_TENSORRT="NOT_INSTALLED"
fi
# Export NVIDIA CuDNN TensorRT
export JETSON_TENSORRT
# Extract Visionworks version
JETSON_VISIONWORKS=$(dpkg -l 2>/dev/null | grep -m1 "libvisionworks")
if [ ! -z "$JETSON_VISIONWORKS" ] ; then
JETSON_VISIONWORKS=$(echo $JETSON_VISIONWORKS | sed 's/.*libvisionworks \([^ ]*\).*/\1/' )
else
JETSON_VISIONWORKS="NOT_INSTALLED"
fi
# Export NVIDIA CuDNN VisionWorks
export JETSON_VISIONWORKS
# Extract VPI
JETSON_VPI=$(dpkg -l 2>/dev/null | grep -m1 "vpi")
if [ ! -z "$JETSON_VPI" ] ; then
JETSON_VPI=$(echo $JETSON_VPI | sed 's/.*vpi \([^ ]*\).*/\1/' )
else
JETSON_VPI="NOT_INSTALLED"
fi
# Export VPI
export JETSON_VPI
# Vulkan
JETSON_VULKAN_INFO=$(which vulkaninfo)
if [ ! -z $JETSON_VULKAN_INFO ] ; then
JETSON_VULKAN_INFO=$($JETSON_VULKAN_INFO | grep -m1 "Vulkan Instance Version")
JETSON_VULKAN_INFO=$(echo $JETSON_VULKAN_INFO | sed 's/.*: \([^ ]*\).*/\1/' )
else
JETSON_VULKAN_INFO="NOT_INSTALLED"
fi
# Export VPI
export JETSON_VULKAN_INFO
#EOF
| true |
7fdf7db6e2df2f1f6b19c8abddf6c7d4bbfc9379 | Shell | laktek/dotfiles | /.bashrc | UTF-8 | 1,993 | 3.15625 | 3 | [] | no_license | [ -z "$PS1" ] && return
# don't put duplicate lines or lines starting with space in the history.
HISTCONTROL=ignoreboth
# append to the history file, don't overwrite it
shopt -s histappend
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
HISTSIZE=1000
HISTFILESIZE=2000
# check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
# make less more friendly for non-text input files, see lesspipe(1)
[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)"
#export DISPLAY=":0.0"
export CLICOLOR=1
#export MANPATH="/usr/local/man:$MANPATH"
#export COPYFILE_DISABLE=true
export ARCHFLAGS='-arch x86_64'
export EDITOR='vim -f'
function parse_git_dirty {
[[ $(git status 2> /dev/null | tail -n1) != *"working directory clean"* ]] && echo "*"
}
function parse_git_branch {
git branch --no-color 2> /dev/null | sed -e '/^[^*]/d' -e "s/* \(.*\)/(\1$(parse_git_dirty))/"
}
export PS1="\[\033[38;5;33m\]\h\[\033[38;5;64m\]:\[\033[38;5;37m\]\W\[\033[38;5;136m\]\$(parse_git_branch)\[\033[38;5;160m\]\$\[\033[00m\] "
# Source NVM to manage Node versions
[ -s $HOME/.nvm/nvm.sh ] && . $HOME/.nvm/nvm.sh
# Include Go tools
export GOROOT=/usr/local/go/
export GOPATH=$HOME/go
export GOBIN=$GOPATH/bin
export PATH=$PATH:/usr/local/go/bin:$GOPATH/bin
#Alias definitions.
if [ -f ~/.bash_aliases ]; then
. ~/.bash_aliases
fi
# enable programmable completion features (you don't need to enable
# this, if it's already enabled in /etc/bash.bashrc and /etc/profile
# sources /etc/bash.bashrc).
if [ -f /etc/bash_completion ] && ! shopt -oq posix; then
. /etc/bash_completion
fi
if [ -f ~/.git-completion.bash ]; then
. ~/.git-completion.bash
fi
# Override default bash settings based on the OS.
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
. ~/.bash_linux
elif [[ "$unamestr" == 'Darwin' ]]; then
. ~/.bash_osx
fi
# Add Flutter to PATH
export PATH=$PATH:/$HOME/src/flutter/bin
| true |
4e7d74d09806a9d548b3f8b08e37f3c3ff1efeea | Shell | couchbase/couchbase-lite-android-liteserv | /extra/jenkins_build/run_android_liteserv.sh | UTF-8 | 1,322 | 3.5 | 4 | [] | no_license |
# This script starts LiteServ, which provides an HTTP interface to the Couchbase-Lite database running on the device/emulator.
#
# This is useful for two things:
# - Testing
# - Viewing the data stored by Couchbase-Lite
#
# How to run this script:
#
# ./run_android_liteserv.sh <listen-port-number>
#
# where listen-port-number is a port number, eg, 8080
#
# Pre-requisites:
# - Emulator must be running
# make sure port was passed in
die () {
echo >&2 "$@"
exit 1
}
[ "$#" -eq 1 ] || die "1 argument required, $# provided"
# build and install to emulator
./gradlew clean && ./gradlew installDebug
# launch activity
adb shell am start -a android.intent.action.MAIN -n com.couchbase.liteservandroid/com.couchbase.liteservandroid.MainActivity --ei listen_port $*
# to disable the basic auth(for functional tests)
# adb shell am start -a android.intent.action.MAIN -n com.couchbase.liteservandroid/com.couchbase.liteservandroid.MainActivity --ei listen_port $* --es username "" --es password ""
# port mapping (only listens on localhost, unavailable from other machines on network)
adb forward tcp:$* tcp:$*
# make this port available to other machines on network.
# (note: replace 10.17.51.92 with ethernet iface address)
# ./bin/node-http-proxy --port $* --host 10.17.51.92 --target localhost:$*
| true |
f7c6ac58f1ac409a67e3e19e5e24ea12d30f1061 | Shell | amsharifian/dotfiles | /mac/bashrc | UTF-8 | 1,534 | 2.796875 | 3 | [] | no_license | # ~/.bashrc: executed by bash(1) for non-login shells.
# see /usr/share/doc/bash/examples/startup-files (in the package bash-doc)
# for examples
# don't put duplicate lines or lines starting with space in the history.
# See bash(1) for more options
#HISTCONTROL=ignoreboth
# append to the history file, don't overwrite it
#shopt -s histappend
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
#HISTSIZE=1000
#HISTFILESIZE=2000
# check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
#shopt -s checkwinsize
# Alias definitions.
# You may want to put all your additions into a separate file like
# ~/.bash_aliases, instead of adding them here directly.
# See /usr/share/doc/bash-doc/examples in the bash-doc package.
#if [ -f ~/.bash_aliases ]; then
#. ~/.bash_aliases
#fi
setBackground() {
osascript -e "tell application \"iTerm\"
set current_terminal to (current terminal)
tell current_terminal
set current_session to (current session)
tell current_session
set background color to $1
end tell
end tell
end tell"
}
# highlight
# args: 1: size, 2: lang
function keycode() {
pbpaste | \
highlight \
--font Inconsolata \
--font-size $1 \
--src-lang $2 \
--out-format rtf | \
pbcopy
}
#--style default-light \
#--base16 \
#vim() {
#(setBackground "{65025,65025,65025}" &)
#(exec vim $*)
#}
export PATH=/usr/local/gcc-9.2/bin:$PATH
| true |
d0980f60fc26bab2ff2cd6f4516178dba78f6a23 | Shell | OpenNebula/addon-context-linux | /src/etc/periodic/1min/one-context-reconfigure##apk | UTF-8 | 177 | 2.609375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/sh
# Periodically run one-context-reconfigure on VMware
if [ "$(virt-what 2>/dev/null)" = 'vmware' ]; then
service one-context-reconfigure restart >/dev/null 2>&1
fi
| true |
b586f16276e589d330bc3ff1a7cf954e8cd34aae | Shell | VanirLab/vanir-linux-template-builder | /cleanup_image | UTF-8 | 358 | 3.40625 | 3 | [
"MIT"
] | permissive | #!/bin/sh
export INSTALLDIR=$1
. ./builder_setup
set -e
if ! [ $# -eq 1 ]; then
echo "usage $0 <mount_point>"
exit 1
fi
if ! [ -d $INSTALLDIR ]; then
echo $INSTALLDIR does not exist
exit 1
fi
echo "--> Cleaning up image file..."
$SCRIPTSDIR/09_cleanup.sh
echo "--> Compacting image file..."
/sbin/fstrim -v "$INSTALLDIR"
| true |
4dc426e42e661ad447653da9433adea49eaf0bf8 | Shell | fork-while-fork/PKGBUILDs | /kmscon/PKGBUILD | UTF-8 | 1,451 | 3.296875 | 3 | [] | no_license | __gitroot="git://github.com/dvdhrm/kmscon.git"
__gitname="kmscon.git"
__gitcommit="master"
pkgname=kmscon
pkgver=5+git
pkgrel=1
pkgdesc="Linux KMS/DRM based virtual console emulator"
url="https://github.com/dvdhrm/kmscon"
license=(custom:MIT)
arch=(i686 x86_64)
depends=(libgles libegl dbus-core systemd libxkbcommon pango)
makedepends=(git)
options=(!libtool)
build() {
_git_setup
cd "$srcdir/$pkgname-$pkgver"
./autogen.sh --prefix=/usr --sysconfdir=/etc \
--disable-wlterm \
--enable-systemd \
--enable-udev \
--enable-hotplug \
--disable-fbdev \
--disable-drm \
--enable-gles2 \
--disable-f8x16 \
--disable-freetype2 \
--enable-pango
make
}
package() {
cd "$srcdir/$pkgname-$pkgver"
make DESTDIR="$pkgdir" install
install -D "docs/kmscon@.service" "$pkgdir/usr/lib/systemd/system/kmscon@.service"
install -D "docs/kmscon.service" "$pkgdir/usr/lib/systemd/system/kmscon.service"
install -D "COPYING" "$pkgdir/usr/share/licenses/kmscon/custom:MIT"
}
_git_setup() {
cd "$SRCDEST"
if [[ ! -d $__gitname ]]; then
msg2 "Cloning git repository"
git clone --mirror "$__gitroot" "$__gitname"
else
msg2 "Updating git repository"
cd "$__gitname"
git fetch
fi
cd "$srcdir"
if [[ -d $pkgname-$pkgver ]]; then
msg2 "Removing previous build tree"
rm -fr "$pkgname-$pkgver"
fi
msg2 "Creating fresh build tree"
git clone --depth=1 -b "$__gitcommit" "file://$SRCDEST/$__gitname" "$pkgname-$pkgver"
}
| true |
c5016a62ea130991461ec5096b0015034d28dca9 | Shell | Jodes81/runi-tjc | /tjc-uninstall | UTF-8 | 1,814 | 2.890625 | 3 | [] | no_license | #!/bin/bash
#----------------------------------------------------------------
# ----- NOTE: ------
# Just as `install` doesn't install this repo, this (`tjc-uninstall`) does
# not uninstall itself; it only undoes what `install` did. This whole repository
# is actually itself installed by the files created by patcher within the installation
# it creates.
# see ~/installer/on-first-login.sh
# or ~/installer/patcher/patches/config/files/root/installer/on-first-login.sh
#----------------------------------------------------------------
#----------------------------------------------------------------
# ACTIONS of on-first-login that **should** be undone. (Only one!!)
# ~/installer/tjc/install
#----------------------------------------------------------------
#----------------------------------------------------------------
# ---- ACTIONS DONE BY `install` (TO UNDO) ----
# Basically whatever `install` does, but in reverse.
#----------------------------------------------------------------
remove-repo ~/tjc/core
~/tjc/core/setdown
rm -rf ~/tjc/core
remove-repo ~/play
~/play/setdown
rm -rf ~/play
remove-repo ~/utils/metrics
~/utils/metrics/setdown
rm -rf ~/utils/metrics
remove-repo ~/tjc/autoshutdown
~/tjc/autoshutdown/setdown
rm -rf ~/tjc/autoshutdown
rm -rf ~/lib/WiringPi
remove-repo ~/utils/misc
~/utils/misc/setdown
rm -rf ~/utils/misc
remove-repo ~/utils/installer
~/utils/installer/setdown
rm -rf ~/utils/installer
remove-repo ~/installer/patcher
~/installer/patcher/bin/setdown
rm -rf ~/installer/patcher
rm -rf ~/utils
rm -rf ~/lib
rm -rf ~/tjc
rm ~/.raspberrypi-ua-netinst.log
echo .
echo "======================================================="
echo "======= Finished Uninstalling RUNI-TJC ================"
echo "======================================================="
| true |
dacbf6a33c757fdc486ada8ad14b18536d2462ed | Shell | chimay/scripts | /zsh/fzf-tmux-history.zsh | UTF-8 | 328 | 3.15625 | 3 | [] | no_license | #!/usr/bin/env zsh
[[ $TERM = tmux* ]] || {
echo "Not in tmux session"
echo
exit 0
}
fichier=~/racine/hist/fzf/tmux-history
sort $fichier | uniq | sponge $fichier
commande=$(cat $fichier | fzf)
(( $#commande == 0 )) && exit 0
if [ $# > 0 -a x$1 = x-s ]
then
tmux send-keys " tmux $commande "
else
tmux $commande
fi
| true |
b6346a6cc1d0c60a7f4650d0f88390f0b3c7666f | Shell | yimingpeng/primal_dual_baseline | /single_scripts/ppo_linear/ppo_linear_InvertedPendulumSwingup.sh | UTF-8 | 272 | 2.59375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
experimentName="baselines"
pyName="run_pybullet.py"
cd ../../$experimentName/ppo_linear/
for i in {0..5}
do
( python $pyName --env InvertedPendulumSwingupBulletEnv-v0 --seed $i &> InvertedPendulumSwingup_"$i".out)
echo "Complete the process $i"
done | true |
3b8a7d5d4cd2718b901bbbc0909768275e043c1f | Shell | rebelthor/bucketlist | /app/docker-entrypoint.sh | UTF-8 | 193 | 2.546875 | 3 | [] | no_license | #!/bin/bash
set -e
export FLASK_APP=app
# Assuming we're on local sqlite
if [[ -z "${DATABASE_URL}" ]]; then
flask db init
flask db migrate
flask db upgrade
fi
flask run --host=0.0.0.0 | true |
d83c0b1767a7360042dc34b6f8443b295225aac1 | Shell | hacone/hc_temporary | /34samples-scripts/filter_centromeric.sh | UTF-8 | 3,225 | 3.578125 | 4 | [] | no_license | # export SQUEAKR_DIR=../squeakr
# export FASTQ_DIR=Fastq
## filterin-centromeres.sh <sample-name>.fq.fofn
# <sample-name>.fq.fofn : paths to fq or fq.gz files for the sample, separated by the newlines.
# this would create centromeric/<sample-name>/ and FASTA files for centromeric reads.
export HCBIN=/work2/hacone/2018/human_centromeres/
export SQBIN=/work2/hacone/2018/human_centromeres/read_squeakr
export FILT=$SQBIN/squeakr-count
# shared parameters for squeakr
export K=6 ; export S=20
export MONS_CQF=$HCBIN/resource/cqf-refs/Hum14AlpMon.K6S20.ser
#make_ref_cqf() {
# NOTE: executed on ax02
#${SQUEAKR_DIR}/squeakr-count -f -k $K -s $S -t 1 -o ./ ../resource/monomers/Hum14AlpMon.fq
# then it was moved to $MONS_CQF
#}
filter_centromeric() {
READS_FQ=$1 ; OUTDIR=$2
if [[ $READS_FQ =~ \.gz$ ]]; then
TF="-g"
else
TF="-f"
fi
# TODO: handle .zip file
OUT=${READS_FQ%%.gz}
OUT=${OUT%%.fastq}; OUT=${OUT%%.fq}
OUT=${OUT%%.fasta}; OUT=${OUT%%.fa}
OUT=${OUT}.centro.fa
## This is the call for read-squeakr
if [[ $3 == "dist" ]]; then
# verbose mode to check distribution
echo "here-vb; OUT=$OUT; READS_FQ=$READS_FQ"
${FILT} $TF -k $K -s $S -r ${MONS_CQF} -t 1 -u 200 -v 1 -o $OUTDIR ${READS_FQ} > /dev/null
else
# TODO: check here whether the result is already exsited
echo "here2; OUT=$OUT; READS_FQ=$READS_FQ"
${FILT} $TF -k $K -s $S -r ${MONS_CQF} -t 1 -u 200 -o $OUTDIR ${READS_FQ} > $OUT \
&& gzip $OUT \
&& mv $OUT.gz $OUTDIR
fi
echo "filter done for $READS_FQ"
}; export -f filter_centromeric
#make_ref_cqf
#echo "Reference CQF generated"
FQ_FOFN=$1
SAMPLE=${FQ_FOFN%%.fq.fofn}
OUTDIR=$(pwd)/filtered/$SAMPLE
mkdir -p $OUTDIR
if [[ $2 == "dist" ]]; then
head -n 10 $FQ_FOFN | xargs -P 10 -I % bash -c "filter_centromeric % $OUTDIR dist"
cat $OUTDIR/*.refip > .tmp.${SAMPLE}.10runs.refip
cut -f3,4 .tmp.${SAMPLE}.10runs.refip | gawk '$1>=1000' \
| LC_ALL=C sort -k2,2nr > $OUTDIR/${SAMPLE}.10runs.refip
rm .tmp.${SAMPLE}.10runs.refip $OUTDIR/*.refip
else
if [[ ! -e $OUTDIR/${SAMPLE}.stats ]]; then
echo "run filter & stats..."
cat $FQ_FOFN | xargs -P 12 -I % bash -c "filter_centromeric % $OUTDIR"
zcat $OUTDIR/*.gz | seqkit stats -a > $OUTDIR/${SAMPLE}.stats
zcat $OUTDIR/*.gz | seqkit seq -m 1000 | seqkit stats -a >> $OUTDIR/${SAMPLE}.stats
fi
if [[ ! -e $OUTDIR/split ]]; then
## split into 10Mb chunks in $OUTDIR/split
AVG_RL=$( gawk 'NR==4{print $7}' $OUTDIR/${SAMPLE}.stats | sed -e "s/,//g" )
NSEQ=$( echo "scale=0; 10 * 1000 * 1000 / $AVG_RL" | bc -l )
echo $AVG_RL" bps avg. => splitting into "$NSEQ" seqs / file"
CDIR=$(pwd)
cd $OUTDIR
seqkit seq -m 1000 *.fa.gz | seqkit split -s $NSEQ
mv stdin.split split
for f in $(ls split/*.fasta); do
nf=$(echo $f | sed -e "s/stdin/$SAMPLE/")
mv $f $nf
done
find split/ | grep .fasta | xargs -P 12 gzip
cd $CDIR
fi
fi
echo "done"
| true |
9cca000b202f9bfdf127a03665e9a1c5eac45036 | Shell | WingoD/.files | /common/bin/amssh | UTF-8 | 110 | 2.875 | 3 | [] | no_license | #!/bin/bash
hostlist=$1
shift
for i in $(ansible $hostlist --list-hosts)
do
echo $i
ssh -t $i "$@"
done
| true |
9c8210c9f84553529ffb27ab7b35adcec938d707 | Shell | tothegump/LKI | /.profile | UTF-8 | 2,275 | 2.65625 | 3 | [] | no_license | # enable vi mode
set -o vi
# general aliases
alias ..='cd ..'
alias cnpm="npm --registry=https://registry.npm.taobao.org --cache=${HOME}/.npm/.cache/cnpm --disturl=https://npm.taobao.org/dist --userconfig=${HOME}/.cnpmrc"
alias la='/bin/ls -ah --color'
alias ll='/bin/ls -lh --color'
alias ls='/bin/ls --color'
alias please='sudo'
alias py="winpty python"
alias reload="source ~/.profile"
alias ta='tmux attach'
alias vi='vim'
# git aliases
alias g='git'
alias gc='git remote show | xargs -I{} git remote prune {} && git gc'
alias it="git"
alias lg='git logg'
alias qgit='git'
# python aliases
alias pm="python manage.py"
alias sb="source env/bin/activate"
alias sb2="source env2/bin/activate"
alias sb3="source env3/bin/activate"
# pipenv aliases
alias pr="pipenv run"
alias pf="pipenv run fab"
alias pinv="pipenv run inv"
alias ppm="pipenv run python manage.py"
# vagrant aliases
alias vgst="vagrant global-status"
alias vr="vagrant"
alias vst="vagrant status"
alias vu="vagrant up"
# docker aliases from tcnksm/docker-alias
alias dc='docker-compose'
alias dl="docker ps -l -q"
alias dps="docker ps"
alias dpa="docker ps -a"
alias di="docker images"
alias dip="docker inspect --format '{{ .NetworkSettings.IPAddress }}'"
alias dkd="docker run -d -P"
alias dki="docker run -i -t -P"
alias dex="docker exec -i -t"
dalias() { alias | grep 'docker' | sed "s/^\([^=]*\)=\(.*\)/\1 => \2/"| sed "s/['|\']//g" | sort; }
dstop() { docker stop $(docker ps -a -q); }
drm() { docker rm $1; }
drmf() { docker stop $1; docker rm $1; }
dri() { docker rmi $(docker images -q); }
dbu() { docker build -t=$1 .; }
dbash() { docker exec -it $(docker ps -aqf "name=$1") bash; }
# --- --- --- #
# set path
export PATH=~/.virtualenvs/py35/Scripts:~/.lki/scripts:${PATH}
# my aliases
alias gsh='ssh -t gate ssh -t'
alias gush='ssh -t gate ssh -l ubuntu -t'
alias ush='ssh -t ga ssh -l ubuntu -t'
alias ish='ssh -t igate ssh -t'
alias ftest='gsh forseti-test-1'
alias ftest2='gsh forseti-test-2'
alias fprod='gsh forseti-prod-1'
alias fprod2='gsh forseti-prod-2'
fsh() {
IMAGE=${1}
ssh -t gate ssh -l zaihui -t f1 docker exec -e ENV_TEST=1 -it ${IMAGE:="forseti_uwsgi"} pipenv run python manage.py shell
}
alias sv="cd /c/code/aria2"
alias svf="cd /c/code/aria2/stdev-forseti-be"
| true |
083ccc22a14e3a00fe213b86d458a0ab3cfbea20 | Shell | tuyen81/self_learning | /shell/running_package/testcases/coreutils/tee.sh | UTF-8 | 861 | 3.6875 | 4 | [] | no_license | #!/bin/bash
#==============================================================================
# DISCRIPTION: Test script is test to 'tee' command.
# In test script 'tee' command read contents of 'ls /usr' command
# and write to sample.txt file.
#==============================================================================
# Remove sample.txt if existed
rm -f sample.txt
# Read contents of "ls /usr" command and write to sample.txt file
ls /usr/ | tee sample.txt > ${log_file} 2>&1
# Check result of command tee with expected output
diff ${log_file} sample.txt > ${OUTPUT_DIR}/coreutils/tmp.txt
if [ -s ${OUTPUT_DIR}/coreutils/tmp.txt ]
then
echo "${test_failed_text}" >> ${RESULT_FILE}
else
echo "${test_passed_text}" >> ${RESULT_FILE}
fi
# Remove a tmp.txt file temp for testing
rm ${OUTPUT_DIR}/coreutils/tmp.txt sample.txt
| true |
afb8e6e27b1ab1e07a7df9ca195590b728834d86 | Shell | lisuke/repo | /archlinuxcn/snapraid/PKGBUILD | UTF-8 | 1,394 | 2.71875 | 3 | [] | no_license | # Maintainer: Kyle McNally <kyle@kmcnally.net>
# Contributor: John Williams <jwilliams4200 liamg reverse&remove moc>
pkgname=snapraid
pkgver=12.2
pkgrel=1
pkgdesc="tool for Snapshot RAID: generate parity files, maintain checksums on data, restore lost data"
arch=('x86_64' 'i686')
url="http://www.snapraid.it/"
license=('GPL3')
depends=('libutil-linux' 'glibc')
source=("https://github.com/amadvance/snapraid/releases/download/v${pkgver}/snapraid-${pkgver}.tar.gz")
sha256sums=('9d30993aef7fd390369dcaf422ac35f3990e8c91f0fb26151f5b84ccb73d3e01')
build() {
cd "${srcdir}/${pkgname}-${pkgver}"
./configure --prefix="/usr"
make
}
check() {
cd "${srcdir}/${pkgname}-${pkgver}"
make check
}
package() {
cd "${srcdir}/${pkgname}-${pkgver}"
make DESTDIR="${pkgdir}/" prefix="/usr" mandir="/usr/share/man" install
# documentation
install -D -m644 snapraid.conf.example ${pkgdir}/usr/share/${pkgname}/snapraid.conf.example
install -D -m644 AUTHORS ${pkgdir}/usr/share/doc/${pkgname}/AUTHORS
install -D -m644 COPYING ${pkgdir}/usr/share/doc/${pkgname}/COPYING
install -D -m644 HISTORY ${pkgdir}/usr/share/doc/${pkgname}/HISTORY
install -D -m644 INSTALL ${pkgdir}/usr/share/doc/${pkgname}/INSTALL
install -D -m644 README ${pkgdir}/usr/share/doc/${pkgname}/README
install -D -m644 CHECK ${pkgdir}/usr/share/doc/${pkgname}/CHECK
install -D -m644 TODO ${pkgdir}/usr/share/doc/${pkgname}/TODO
}
| true |
9d3dcd4360774148b28368c20ff9cc776416457c | Shell | fapaul/mocc-exercises | /exercise_1/scripts/measure-cpu.sh | UTF-8 | 218 | 2.6875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
source ./helper.sh
source ./linpack.sh
measurecpu()
{
# Measures kflops
# Result is taken from the last line
result=$(linpack >&1 | tail -1)
echo $result
}
run measurecpu "cpu.csv"
| true |
179b6843aee569cb25a962995b5e888ed29766fd | Shell | atrace/advent_of_code | /setup_day_dir.sh | UTF-8 | 1,052 | 4.6875 | 5 | [] | no_license | #!/bin/bash
USAGE="
Usage: $0 [-h] [-a] [n]
Sets up directories for a given day or all days of advent of code. Creates a directory day[n] in current dir with empty files for associated text input and python script.
-h show this help message.
-a set up directories for all remaining days (not already existing).
n input number [0-25] to setup directory for day n.
"
make_dir () {
mkdir $1
echo "input_file = \"$1/$1_input.txt\"" > $1/$1_script.py
echo >> $1/$1_input.txt
}
setup_dir () {
DIR=day$1
if [ -d $DIR ]; then
echo "Directory $DIR already exists"
else
make_dir $DIR
echo "Successfully made directory $DIR"
fi
}
if [ $1 == '-h' ]; then
echo "$USAGE"
elif [ $1 == '-a' ]; then
COUNT=1
while [ $COUNT -le 25 ]; do
setup_dir $COUNT
((COUNT++))
done
echo "Made all remaining directories"
elif [ $1 -le 25 ] && [ $1 -gt 0 ]; then
setup_dir $1
else
echo "Please supply a number in the range 1-25. For help please use $0 -h"
fi
| true |
392583a2cc7a6577a26ac58c1b04714fddc6e86e | Shell | nadroumila/docker-compose-repo | /scripts/prepare_env.sh | UTF-8 | 341 | 3.15625 | 3 | [] | no_license | #!/bin/bash
if [ ! "$(docker ps -q -f name=mysql)" ]; then
echo 'does not exit'
if [ "$(docker ps -aq -f status=exited -f name=mysql)" ]; then
# cleanup
echo 'is to delete'
docker rm mysql_5.7
fi
# run your container
echo "is to start"
docker-compose -f docker-compose-mysql57.yml up -d
fi
| true |
771dd52179a62b59b62ea248c387652574733619 | Shell | material-motion/tools | /contributor_tools/tools/installers/brew.installer | UTF-8 | 1,065 | 3.234375 | 3 | [
"CC-BY-4.0"
] | permissive | #!/bin/bash
#
# Copyright 2016-present The Material Motion Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Install homebrew.
SHOULD_INSTALL=false
VERSION=$(brew -v)
if [[ $? -ne 0 ]]; then
echo "brew is not installed"
SHOULD_INSTALL=true
fi
if [ "$SHOULD_INSTALL" = true ]; then
echo "Installing brew..."
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
echo "Requesting sudo to fix permissions in brew's install directory."
sudo /usr/sbin/chown -R $USER:admin /usr/local
| true |
f76ed0ff38616dfed4e0ed3de7210d99db6c4bc4 | Shell | rohituppalapati/kylo-source | /install/setup/nifi/create-symbolic-links.sh | UTF-8 | 4,708 | 3.71875 | 4 | [
"WTFPL",
"CDDL-1.0",
"MIT",
"CC0-1.0",
"EPL-1.0",
"PostgreSQL",
"BSD-3-Clause",
"LGPL-2.1-only",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-protobuf",
"OFL-1.1"
] | permissive | #!/bin/bash
NIFI_HOME=$1
NIFI_USER=$2
NIFI_GROUP=$3
if [ $# -ne 3 ]
then
echo "Wrong number of arguments. You must pass in the NIFI_HOME location, NIFI_USER, and NIFI_GROUP"
exit 1
fi
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-elasticsearch-v1-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-elasticsearch-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-teradata-tdch-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-teradata-tdch-nar.nar
##find the nifi version to copy the correct nar versions
framework_name=$(find $NIFI_HOME/current/lib/ -name "nifi-framework-api*.jar")
prefix="$NIFI_HOME/current/lib/nifi-framework-api-"
len=${#prefix}
ver=${framework_name:$len}
if [[ $ver == 1.0* ]] || [[ $ver == 1.1* ]] ;
then
echo "Creating symlinks for NiFi version $ver compatible nars"
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-provenance-repo-v1-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-provenance-repo-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-core-service-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-core-service-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-standard-services-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-standard-services-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-core-v1-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-core-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-spark-v1-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-spark-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-spark-service-v1-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-spark-service-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-hadoop-v1-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-hadoop-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-hadoop-service-v1-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-hadoop-service-nar.nar
elif [[ $ver == 1.2* ]] || [[ $ver == 1.3* ]] || [[ $ver == 1.4* ]] || [[ $ver == 1.5* ]] || [[ $ver == 1.6* ]] ;
then
echo "Creating symlinks for NiFi version $ver compatible nars"
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-provenance-repo-v1.2-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-provenance-repo-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-core-service-v1.2-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-core-service-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-standard-services-v1.2-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-standard-services-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-core-v1.2-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-core-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-spark-v1.2-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-spark-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-spark-service-v1.2-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-spark-service-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-hadoop-v1.2-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-hadoop-nar.nar
ln -f -s $NIFI_HOME/data/lib/kylo-nifi-hadoop-service-v1.2-nar-*.nar $NIFI_HOME/current/lib/kylo-nifi-hadoop-service-nar.nar
fi
##end nars
if [ -z ${SPARK_PROFILE} ]; then
SPARK_SUBMIT=$(which spark-submit)
if [ -z ${SPARK_SUBMIT} ]; then
>&2 echo "ERROR: spark-submit not on path. Has spark been installed?"
exit 1
fi
if ! [ -x ${SPARK_SUBMIT} ]; then
>&2 echo "ERROR: spark-submit found but not suitable for execution. Has spark been installed?"
exit 1
fi
SPARK_PROFILE="spark-v"$(spark-submit --version 2>&1 | grep -o "version [0-9]" | grep -o "[0-9]" | head -1)
else
if ! [[ $SPARK_PROFILE =~ spark-v[0-9] ]]; then
>&2 echo "ERROR: variable SPARK_PROFILE not usable, expected it to be like spark-v1 or spark-v2 but found '$SPARK_PROFILE'"
exit 1
fi
fi
ln -f -s $NIFI_HOME/data/lib/app/kylo-spark-validate-cleanse-${SPARK_PROFILE}-*-jar-with-dependencies.jar $NIFI_HOME/current/lib/app/kylo-spark-validate-cleanse-jar-with-dependencies.jar
ln -f -s $NIFI_HOME/data/lib/app/kylo-spark-job-profiler-${SPARK_PROFILE}-*-jar-with-dependencies.jar $NIFI_HOME/current/lib/app/kylo-spark-job-profiler-jar-with-dependencies.jar
ln -f -s $NIFI_HOME/data/lib/app/kylo-spark-interpreter-${SPARK_PROFILE}-*-jar-with-dependencies.jar $NIFI_HOME/current/lib/app/kylo-spark-interpreter-jar-with-dependencies.jar
ln -f -s $NIFI_HOME/data/lib/app/kylo-spark-merge-table-${SPARK_PROFILE}-*-jar-with-dependencies.jar $NIFI_HOME/current/lib/app/kylo-spark-merge-table-jar-with-dependencies.jar
ln -f -s $NIFI_HOME/data/lib/app/kylo-spark-multi-exec-${SPARK_PROFILE}-*-jar-with-dependencies.jar $NIFI_HOME/current/lib/app/kylo-spark-multi-exec-jar-with-dependencies.jar
chown -h $NIFI_USER:$NIFI_GROUP $NIFI_HOME/current/lib/kylo*.nar
chown -h $NIFI_USER:$NIFI_GROUP $NIFI_HOME/current/lib/app/kylo*.jar
| true |
e0da0691233aabf2c1413dfebfd4a4370cab3148 | Shell | cferr/xrandr-funcs | /xrandr-funcs | UTF-8 | 12,979 | 3.8125 | 4 | [] | no_license | #!/bin/bash
# This script manages displays for X11 environments.
# It is meant to perform the most common tasks with displays: presentation mode
# (e.g. when in conference / meeting and you want two different displays),
# clones (at both the laptop's panel and the external display's preferred
# resolutions, depending on who should be able to read text) and single-display
# modes.
#
# This script requires:
# - xrandr
# - pulseaudio (it switches audio to HDMI / laptop, useful for TVs)
# - zenity (to show status notifications)
#
# Calling "xrandr_funcs round_robin" rotates between all the modes.
# "xrandr_funcs edp" sets the display to laptop panel (often eDP =
# Embedded DisplayPort)
# "xrandr_funcs hdmi" sets the display to external only
# "xrandr_funcs clone_edp" (resp. "clone_hdmi") clones both displays, at the
# laptop's (resp. external display's) preferred resolution
# "xrandr_funcs present" creates two displays: laptop on the left, external
# display on the right
#
# Setting the display names is done through environment variables.
# If you want the round-robin to be assigned a keyboard shortcut, you may want
# to add to your .bashrc (adapt to your displays):
# export INTERNAL_MONITOR="eDP1"
#
# If you want to restrict this script to one output port, you may set:
# export EXTERNAL_MONITOR="HDMI1"
#
# In order to find out the display names, use the "xrandr" command.
set_xrandr_var() {
XRANDR_CURRENT="$(xrandr --current)"
if [ "$EXTERNAL_MONITOR" == "" ]; then
detect_external
fi;
}
detect_external() {
AVAIL_RES_LIST=$XRANDR_CURRENT
CURRENT_MON=""
EXTERNAL_MONITOR=$(printf '%s\n' "$AVAIL_RES_LIST" | ( while read -r line; do
IS_MON="$(echo $line | grep -c "connected")";
if [ $IS_MON == 1 ]; then
CURRENT_MON="$(echo $line | grep -oE ".* connected" | sed "s/ .*//g")";
if [ "$CURRENT_MON" != "$INTERNAL_MONITOR" ]; then
echo -n "$CURRENT_MON"
fi;
fi;
done ))
}
current_mon_res() {
AVAIL_RES_LIST=$XRANDR_CURRENT
CURRENT_MON=""
printf '%s\n' "$AVAIL_RES_LIST" | ( while read -r line; do
IS_MON="$(echo $line | grep -c "connected")";
if [ $IS_MON == 1 ]; then
CURRENT_MON="$(echo $line | grep -oE ".* (dis)?connected" | sed "s/ .*//g")";
if [ "$CURRENT_MON" == "$1" ]; then
# echo "Found $line"
CURRENT_RES="$(echo $line | grep -o "[0-9][0-9]*x[0-9][0-9]*")"
echo $CURRENT_RES
fi;
fi;
done )
}
# Find out whether a monitor is active or not
is_active() {
AVAIL_RES_LIST=$XRANDR_CURRENT
CURRENT_MON=""
RES=$(printf '%s\n' "$AVAIL_RES_LIST" | ( while read -r line; do
IS_MON="$(echo $line | grep -c "connected")";
if [ $IS_MON == 1 ]; then
CURRENT_MON="$(echo $line | grep -oE ".* (dis)?connected" | sed "s/ .*//g")";
elif [ "$CURRENT_MON" == "$1" ]; then
STAR="$(echo $line | grep -c "*")"
if [ $STAR == 1 ]; then
echo 1;
fi;
fi;
done ))
if [ "$RES" == "" ]; then
echo 0;
else
echo 1;
fi;
}
# Find the preferred resolution (often the maximal one at native monitor DPI)
# for a given monitor
pref_res() {
AVAIL_RES_LIST=$XRANDR_CURRENT
CURRENT_MON=""
printf '%s\n' "$AVAIL_RES_LIST" | ( while read -r line; do
IS_MON="$(echo $line | grep -c "connected")";
if [ $IS_MON == 1 ]; then
CURRENT_MON="$(echo $line | grep -oE ".* (dis)?connected" | sed "s/ .*//g")";
elif [ "$CURRENT_MON" == "$1" ]; then
IS_PREFERRED="$(echo $line | grep -c "+")"
if [ $IS_PREFERRED == 1 ]; then
MON_RES="$(echo $line | grep -oe "[0-9]*x[0-9]*")"
echo $MON_RES;
fi;
fi;
done )
}
# Switch to external monitor (i.e. HDMI) only
hdmi() {
if [ "$EXTERNAL_MONITOR" == "" ]; then
zenity --notification --text "No monitor connected"
else
HDMI_AVAIL="$(echo $XRANDR_CURRENT | grep -c ""$EXTERNAL_MONITOR" connected")"
# PRIMARY_MONITOR="$(xrandr --listmonitors | grep "*" | grep -m 1 -oE '[^ ]+$')"
if [ "$1" == "" ]; then
HDMI_PREF_RES="$(pref_res ""$EXTERNAL_MONITOR"")"
else
HDMI_PREF_RES="$1"
fi
if [ $HDMI_AVAIL == 1 ]; then
xrandr -s "$HDMI_PREF_RES"
xrandr --output "$INTERNAL_MONITOR" --off --output "$EXTERNAL_MONITOR" --auto --primary --transform none --mode "$HDMI_PREF_RES"
# audio_hdmi
zenity --notification --text "$EXTERNAL_MONITOR only"
else
echo "$EXTERNAL_MONITOR not available";
fi
fi;
}
# Switch to integrated panel (i.e. eDP1) only
edp() {
# PRIMARY_MONITOR="$(xrandr --listmonitors | grep "*" | grep -m 1 -oE '[^ ]+$')"
if [ "$1" == "" ]; then
EDP_PREF_RES="$(pref_res ""$INTERNAL_MONITOR"")"
else
EDP_PREF_RES="$1"
fi
if [ "$EXTERNAL_MONITOR" == "" ]; then
xrandr -s "$EDP_PREF_RES"
xrandr --output "$INTERNAL_MONITOR" --auto --primary --transform none --mode "$EDP_PREF_RES"
else
xrandr -s "$EDP_PREF_RES"
xrandr --output "$EXTERNAL_MONITOR" --off --output "$INTERNAL_MONITOR" --auto --primary --transform none --mode "$EDP_PREF_RES"
fi;
# audio_edp
zenity --notification --text "Internal display ($INTERNAL_MONITOR) only"
}
# Switch to external monitor if plugged, otherwise fall back on integrated
automon() {
# PRIMARY_MONITOR="$(xrandr --listmonitors | grep "*" | grep -m 1 -oE '[^ ]+$')"
if [ "$EXTERNAL_MONITOR" == "" ]; then
edp
else
hdmi
fi;
# HDMI_AVAIL="$(echo $XRANDR_CURRENT | grep -c ""$EXTERNAL_MONITOR" connected")"
# if [ $HDMI_AVAIL == 1 ]; then
# hdmi
# else
# edp
# fi
}
# Display a cloned image on both integrated and external monitors, the primary
# being the external one (and thus set to its native resolution unless given as
# an argument)
clone_hdmi() {
if [ "$EXTERNAL_MONITOR" == "" ]; then
zenity --notification --text "No monitor connected"
else
HDMI_AVAIL="$(echo $XRANDR_CURRENT | grep -c ""$EXTERNAL_MONITOR" connected")"
if [ $HDMI_AVAIL == 1 ]; then
if [ "$1" == "" ]; then
HDMI_PREF_RES="$(pref_res ""$EXTERNAL_MONITOR"")"
else
HDMI_PREF_RES="$1"
fi
xrandr --output "$EXTERNAL_MONITOR" --auto --primary --mode "$HDMI_PREF_RES" --transform none --output "$INTERNAL_MONITOR" --auto --same-as "$EXTERNAL_MONITOR" --scale-from "$HDMI_PREF_RES"
# audio_hdmi
zenity --notification --text "Clone $INTERNAL_MONITOR = $EXTERNAL_MONITOR, with $EXTERNAL_MONITOR's preferred resolution"
else
echo "$EXTERNAL_MONITOR not available for cloning"
fi;
fi
}
# Same function as above, with the primary display being the integrated panel
clone_edp() {
if [ "$EXTERNAL_MONITOR" == "" ]; then
zenity --notification --text "No monitor connected"
else
HDMI_AVAIL="$(echo $XRANDR_CURRENT | grep -c ""$EXTERNAL_MONITOR" connected")"
if [ $HDMI_AVAIL == 1 ]; then
if [ "$1" == "" ]; then
EDP_PREF_RES="$(pref_res ""$INTERNAL_MONITOR"")"
else
EDP_PREF_RES="$1"
fi
xrandr --output "$INTERNAL_MONITOR" --auto --primary --mode "$EDP_PREF_RES" --transform none --output "$EXTERNAL_MONITOR" --auto --same-as "$INTERNAL_MONITOR" --scale-from "$EDP_PREF_RES"
# audio_hdmi
zenity --notification --text "Clone $INTERNAL_MONITOR = $EXTERNAL_MONITOR, with $INTERNAL_MONITOR's preferred resolution"
else
echo "$EXTERNAL_MONITOR not available for cloning"
fi
fi
}
# Presentation mode - set external screen to right of current
present() {
if [ "$EXTERNAL_MONITOR" == "" ]; then
zenity --notification --text "No monitor connected"
else
HDMI_AVAIL="$(echo $XRANDR_CURRENT | grep -c ""$EXTERNAL_MONITOR" connected")"
if [ $HDMI_AVAIL == 1 ]; then
# HDMI_PREF_RES="$(pref_res ""$EXTERNAL_MONITOR"")"
if [ "$1" == "" ]; then
HDMI_PREF_RES="$(pref_res ""$EXTERNAL_MONITOR"")"
else
HDMI_PREF_RES="$1"
fi
HDMI_ON=$(is_active "$EXTERNAL_MONITOR")
if [ $HDMI_ON == 1 ]; then
xrandr --output "$EXTERNAL_MONITOR" --off
fi;
EDP_PREF_RES="$(pref_res ""$INTERNAL_MONITOR"")"
xrandr --output "$INTERNAL_MONITOR" --auto --primary --transform none --mode "$EDP_PREF_RES"
xrandr --output "$EXTERNAL_MONITOR" --mode "$HDMI_PREF_RES" --right-of "$INTERNAL_MONITOR"
# audio_edp
zenity --notification --text "Presentation mode"
else
echo "$EXTERNAL_MONITOR not available for cloning"
fi
fi
}
round_robin(){
if [ "$EXTERNAL_MONITOR" == "" ]; then
edp
else
HDMI_AVAIL="$(echo $XRANDR_CURRENT | grep -c ""$EXTERNAL_MONITOR" connected")"
if [ $HDMI_AVAIL == 1 ]; then
# eDP only > Present > Clone HDMI > Clone eDP > HDMI only
HDMI_ON=$(is_active "$EXTERNAL_MONITOR")
EDP_ON=$(is_active "$INTERNAL_MONITOR")
if [ $HDMI_ON == "1" ] && [ $EDP_ON == "1" ]; then
# filter based on pref res
EDP_PREF_RES="$(pref_res ""$INTERNAL_MONITOR"")"
HDMI_PREF_RES="$(pref_res ""$EXTERNAL_MONITOR"")"
EDP_CUR_RES="$(current_mon_res "$INTERNAL_MONITOR")";
HDMI_CUR_RES="$(current_mon_res "$EXTERNAL_MONITOR")";
if [ "$EDP_PREF_RES" == "$EDP_CUR_RES" ] && [ "$HDMI_PREF_RES" == "$HDMI_CUR_RES" ]; then
if [ "$EDP_PREF_RES" == "$HDMI_PREF_RES" ]; then
# Both monitors have the same resolution. We're either in Present or Clone.
POS="$(echo $XRANDR_CURRENT | grep -c ""$EXTERNAL_MONITOR" connected" | grep -o "+[0-9][0-9]*+[0-9]*")"
if [ "$POS" != "+0+0" ]; then
# We're in presentation (displays aren't cloned) - move to clone
clone_hdmi
else
# We're in clone (both monitors have the same preferred resolution)
hdmi
fi
else
# We're in Present
clone_hdmi
fi
elif [ "$EDP_PREF_RES" == "$EDP_CUR_RES" ] && [ "$HDMI_PREF_RES" != "$HDMI_CUR_RES" ]; then
# We're in Clone eDP
hdmi
else
# We're in Clone HDMI
clone_edp
fi;
elif [ $HDMI_ON == "0" ] && [ $EDP_ON == "1" ]; then
# We're in eDP only
present;
else
# We're in HDMI only
edp;
fi;
else
edp
fi;
fi;
}
audio_edp() {
pactl set-card-profile 0 output:analog-stereo+input:analog-stereo
# zenity --notification --text "Audio goes to laptop"
}
audio_hdmi(){
pactl set-card-profile 0 output:hdmi-stereo+input:analog-stereo
# zenity --notification --text "Audio goes to HDMI"
}
volume_up(){
pactl set-sink-volume $(pacmd list-sinks | grep "*" | grep -o "[0-9]*") +5%
}
volume_down(){
pactl set-sink-volume $(pacmd list-sinks | grep "*" | grep -o "[0-9]*") -5%
}
volume_toggleMute(){
pactl set-sink-mute $(pacmd list-sinks | grep "*" | grep -o "[0-9]*") toggle
}
MODE="$1"
if [ "$INTERNAL_MONITOR" == "" ]; then
zenity --width=200 --error --text="Internal monitor variable not defined. Please define INTERNAL_MONITOR variable (e.g. in your .bash_profile file)."
exit
fi
if [ "$MODE" == "" ]; then
set_xrandr_var;
if [ "$EXTERNAL_MONITOR" != "" ]; then
MODE=$(zenity --list --title "Display mode" --text="Choose display mode" --column "mode-id" --column "mode-text" "edp" "Embedded display ($INTERNAL_MONITOR) only" "hdmi" "External ($EXTERNAL_MONITOR) only" "present" "Presentation, left: $INTERNAL_MONITOR (primary), right: $EXTERNAL_MONITOR" "clone_edp" "Clone, $INTERNAL_MONITOR's resolution" "clone_hdmi" "Clone, $EXTERNAL_MONITOR's resolution" --hide-column=1 --hide-header)
else
zenity --notification --text "No monitor connected"
fi
fi
case $MODE in
"pref_res") shift; set_xrandr_var; pref_res $@ ;;
"hdmi") shift; set_xrandr_var; hdmi $@ ;;
"edp") shift; set_xrandr_var; edp $@ ;;
"automon") shift; set_xrandr_var; automon $@ ;;
"clone_hdmi") shift; set_xrandr_var; clone_hdmi $@ ;;
"clone_edp") shift; set_xrandr_var; clone_edp $@ ;;
"present") shift; set_xrandr_var; present $@ ;;
"round_robin") shift; set_xrandr_var; round_robin $@ ;;
"is_active") shift; set_xrandr_var; is_active $@ ;;
"detect_external") shift; set_xrandr_var; detect_external $@ ;;
"audio_edp") shift; audio_edp $@ ;;
"audio_hdmi") shift; audio_hdmi $@ ;;
"volume_up") shift; volume_up $@ ;;
"volume_down") shift; volume_down $@ ;;
"volume_toggleMute") shift; volume_toggleMute $@ ;;
esac
| true |
0febed5b6fc128742095fd444b1eb93806b7f94c | Shell | dragomirecky/knapsack-problem | /compare/max_price.sh | UTF-8 | 527 | 3.296875 | 3 | [] | no_license | #!/bin/bash
values=(50 100 250 500 1000 2000)
mkdir resources/
touch results/tmp
for item in ${values[*]}
do
source_file="resources/max_price_${item}"
destination_file="results/tmp"
./generate.sh -C $item $source_file $destination_file
for i in {1..4}
do
final_output="results/max_price_${i}.csv"
./launch.sh $i $source_file $destination_file
./add_header.sh $final_output
echo "max price ${item} " >> $final_output
cat $destination_file >> $final_output
rm $destination_file
done
done
| true |
0b7c3d4fb9a290861a96bdb8afbcaa834cb909ab | Shell | macmade/SVN-Util | /svn-util.sh | UTF-8 | 4,913 | 3.640625 | 4 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
################################################################################
# Copyright (c) 2010, Jean-David Gadina - www.xs-labs.com #
# All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions are met: #
# #
# - Redistributions of source code must retain the above copyright notice, #
# this list of conditions and the following disclaimer. #
# - Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in the #
# documentation and/or other materials provided with the distribution. #
# - Neither the name of 'Jean-David Gadina' nor the names of its #
# contributors may be used to endorse or promote products derived from #
# this software without specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" #
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
################################################################################
# $Id$
SvnUtil_Usage()
{
echo ''
echo 'Usage: '$1' command working-copy'
echo ''
echo 'Commands:'
echo ' '
echo ' -k / --auto-keywords'
echo ' Sets the svn:keywords property to all the regular files of the working-copy.'
echo ' '
echo ' -d / --delete'
echo ' Deletes the .svn directories, making the working-copy an unversioned directory.'
echo ' '
echo ' -c / --clean'
echo ' Cleans-up the working-copy by removing Mac OS X .DS_Store files.'
echo ' '
echo ' -a / --add'
echo ' Adds every unversioned files in the working-copy.'
echo ''
}
SvnUtil_AutoKeywords()
{
for subFile in "$1"/*; do
if [ -d "$subFile" ]; then
SvnUtil_AutoKeywords "$subFile"
elif [ -f "$subFile" ]; then
svn propset svn:keywords "Date Revision Author HeadURL Id" "$subFile"
fi
done
}
SvnUtil_Delete()
{
if [ -d "$1/.svn" ]; then
echo "Deleting $1/.svn"
rm -rf "$1/.svn"
fi
for subFile in "$1"/*; do
if [ -d "$subFile" ]; then
SvnUtil_Delete "$subFile"
fi
done
}
SvnUtil_Clean()
{
if [ -f "$1/.DS_Store" ]; then
echo "Deleting $1/.DS_Store"
rm -f "$1/.DS_Store"
fi
for subFile in "$1"/*; do
if [ -d "$subFile" ]; then
SvnUtil_Clean "$subFile"
fi
done
}
SvnUtil_Add()
{
local svnStatus=$(svn status "$1")
local add=0
for svnFile in $svnStatus; do
if [ $add -eq 1 ]; then
svn add "$svnFile"
fi
if [ "$svnFile" == "?" ]; then
add=1
else
add=0
fi
done
}
if [ $# -ne 2 ]; then
SvnUtil_Usage $0
elif [ ! -d "$2" ]; then
echo 'Error: the specified directory does not exist'
elif [ ! -d "$2/.svn" ]; then
echo 'Error: the specified directory does not seem to be a SVN working copy'
elif [ $1 == '-k' ] || [ $1 == '--auto-keywords' ]; then
SvnUtil_AutoKeywords $2
elif [ $1 == '-d' ] || [ $1 == '--delete' ]; then
SvnUtil_Delete $2
elif [ $1 == '-c' ] || [ $1 == '--clean' ]; then
SvnUtil_Clean $2
elif [ $1 == '-a' ] || [ $1 == '--add' ]; then
SvnUtil_Add $2
else
SvnUtil_Usage $0
fi
| true |
bd63933d1b6c4f3079550a425f03d72805b085f4 | Shell | efloehr/timelapse | /camera/bin/watchimages.sh | UTF-8 | 177 | 2.59375 | 3 | [] | no_license | #!/bin/bash
source ./globalvars.sh
inotifywait -m --format '%w %f' -e close_write $RAMDISK | while read dir file; do
echo $dir/$file
# ./copyimage.sh $dir/$file &
done
| true |
222d9b65109eacb95be35ed45a37aaca45acbe1d | Shell | geranyl/document-generator | /buildpandoc.sh | UTF-8 | 2,792 | 3.953125 | 4 | [] | no_license | #!/bin/bash
#=====================================================================
# FILE: buildpandoc.sh
# USAGE: Run manually to generate html output
# DESCRIPTION: Uses Pandoc to pull together Markdown documents
# and process them with Pandoc to generate a website
# including directory contents as a toc
# AUTHOR: modification of the CV generator example
# by Scott Granneman (RSG), http://www.chainsawonatireswing.com/2013/05/28/how-i-create-manage-my-cv-using-markdown-pandoc/
# VERSION: 0.1
# CREATED:
# REVISION:
#=====================================================================
###
## Variables
#
# Directory for MD Files
mdDir="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
# Directory for HTML output
buildDir="${mdDir}builds"
echo $buildDir
# Markdown content
contentDir=content
toc=$(echo -e "\t<ul class=\"nav\">\r")
###
## Remove all files from the builds folder
#
rm -rf $buildDir/*
###
## Create HTML files for each Markdown file
#
for i in $(find "$mdDir$contentDir" -name '*.md');do
echo "$i";
# Get subdirectories
thedir=$(dirname "$i")
subfolders=${thedir#*"$contentDir"*}
if [[ "$subfolders" != "" ]] ; then
mkdir -p $buildDir/$subfolders
subfolders="$subfolders/"
else
subfolders="/"
fi
#build a table of main contents using file names for links
filefull=$(basename "$i")
directory=$(dirname "$i")
extension="${filefull##*.}"
filename="${filefull%.*}"
buildpath="$buildDir"${directory#*"$contentDir"*}"/$filename.html"
#create a link title from the file name
linktext=$(echo $filename | awk 'BEGIN{ FS = "_" }{mystr=""; for (i=1; i<=NF; i++) {mystr=mystr" "toupper(substr($i,1,1))substr($i,2)}; print mystr}')
linktext=${linktext: 1} #remove extra white space in front of text
#build the absolute link to webroot
link=${buildpath#*"$buildDir/"*}
#assign item to main toc
toc="$toc$(echo -e "\t\t<li><a href=\"/$link\">$linktext</a></li>\r")"
# Get the name of the file, sans extension, for generating HTML file
fileBuildName=$(basename "$i" .md)
# Convert to HTML
pandoc --template=$mdDir/template.html -o $buildDir$subfolders$fileBuildName.html $i --highlight-style=pygments --variable=desc:'tips for js' --variable=contents:'<a href="tips.html">Tips</a>'
pandoc --template=$mdDir/template.html -o $buildDir$subfolders$fileBuildName.html $i --highlight-style=pygments --toc -t html5
done
toc=$(echo -e "$toc\r\t</ul>")
for f in $(find $buildDir -name '*.html'); do
sed -i '' "s;__mastertable;$toc;" "$f"
done
###
## Copy Files from main to builds
#
cp -r $mdDir/css $buildDir/css
cp -r $mdDir/images $buildDir/images
| true |
d27680c2b27ddb70df749e1a09a6ec512e72505c | Shell | cydmium/config | /work/zshrc | UTF-8 | 5,150 | 2.546875 | 3 | [] | no_license | # The following lines were added by compinstall {{{
zstyle ':completion:*' completer _expand _complete _ignored _approximate
zstyle ':completion:*' expand prefix suffix
zstyle ':completion:*' format 'Completing %d'
zstyle ':completion:*' ignore-parents parent pwd directory
zstyle ':completion:*' list-colors ''
zstyle ':completion:*' matcher-list '' 'm:{[:lower:]}={[:upper:]}' 'r:|[._-]=** r:|=**' 'l:|=* r:|=*'
zstyle ':completion:*' menu select=long
zstyle ':completion:*' menu select
zstyle ':completion:*' select-prompt %SScrolling active: current selection at %p%s
zstyle :compinstall filename '/home/drichardson42-gtri/.zshrc'
autoload -Uz compinit
compinit
# End of lines added by compinstall
# Lines configured by zsh-newuser-install
HISTFILE=~/.histfile
HISTSIZE=1000
SAVEHIST=1000
setopt autocd extendedglob notify
unsetopt beep
bindkey -v
# End of lines configured by zsh-newuser-install }}}
# Use Neovim if it exists {{{
if type nvim > /dev/null 2>&1; then
alias vim='nvim'
alias vi='nvim'
export EDITOR=nvim
elif type vim > /dev/null 2>&1; then
export EDITOR=vim
else
export EDITOR=vi
fi
# }}}
# SSH Aliases {{{
alias -g 'lng2=ssh -X lightning2 -t zsh'
alias -g 'lng=ssh -X lightning -t zsh'
alias -g 'lak=ssh -X lakota -t zsh'
alias -g 'chey=ssh -X cheyenne -t zsh'
alias -g 'chin=ssh -X chinook -t zsh'
alias -g 'cbr=ssh -X cobra -t zsh'
alias -g 'cyc=ssh -X cyclone -t zsh'
alias -g 'gib=ssh -X gibson -t zsh'
alias -g 'fen=ssh -X fender -t zsh'
alias -g 'laz=ssh -X lazarus -t zsh'
alias -g 'lng-old=ssh -X lightning-old -t zsh'
alias -g 'rap=ssh -X raptor -t zsh'
alias -g 'tes=ssh -X tesla -t zsh'
# }}}
# Auto-correct Prompt {{{
setopt correct
autoload -U colors && colors
export SPROMPT="Correct $fg[red]%R$reset_color to $fg[green]%r$reset_color? (Yes, No, Abort, Edit) "
export SHELL=/bin/zsh
# }}}
# Key Bindings {{{
# Fix Delete and Home Keys in Prompt {{{
bindkey "^[[H" beginning-of-line
bindkey "^[[F" end-of-line
bindkey "^[[3~" delete-char
bindkey "^[[Z" menu-complete
# }}}
bindkey "^[[A" history-beginning-search-backward
bindkey "^[[B" history-beginning-search-forward
# }}}
# Export Commands {{{
export LANG=en_US.UTF-8
export LC_ALL="en_US.UTF-8"
export PATH=$PATH:/usr/lib64/qt4/bin:$HOME/.local/bin:$HOME/bin/mview
export KEYTIMEOUT=10
# }}}
# Antigen Setup {{{
# Auto-install Antigen {{{
if [[ ! -d ~/.bundle ]]; then
mkdir ~/.bundle
curl -L git.io/antigen > ~/.bundle/antigen.zsh
fi
# }}}
source ~/.bundle/antigen.zsh
# Plugin Installation {{{
antigen bundle git
# CD to "frecent" directories with z
antigen bundle rupa/z
antigen bundle vi-mode
antigen bundle mafredri/zsh-async
# bd = backwards cd
antigen bundle Tarrasch/zsh-bd
antigen bundle ael-code/zsh-colored-man-pages
antigen bundle zdharma/fast-syntax-highlighting
antigen bundle zsh-users/zsh-autosuggestions
# Notify about aliases
antigen bundle MichaelAquilina/zsh-you-should-use
# Powerline prompt
antigen theme bhilburn/powerlevel9k
antigen apply
# }}}
# Plugin Settings {{{
# Autosuggest {{{
ZSH_AUTOSUGGEST_BUFFER_MAX_SIZE=20
# ZSH_AUTOSUGGEST_USE_ASYNC=1
bindkey '^j' autosuggest-accept
bindkey '^k' autosuggest-execute
bindkey '^l' autosuggest-fetch
# }}}
# Powerline {{{
DEFAULT_USER=drichardson42-gtri
POWERLEVEL9K_SHORTEN_DIR_LENGTH=3
POWERLEVEL9K_CONTEXT_DEFAULT_FOREGROUND='153'
POWERLEVEL9K_CONTEXT_DEFAULT_BACKGROUND='236'
POWERLEVEL9K_VI_MODE_INSERT_FOREGROUND='153'
POWERLEVEL9K_DIR_DEFAULT_BACKGROUND='153'
POWERLEVEL9K_DIR_HOME_BACKGROUND='153'
POWERLEVEL9K_DIR_HOME_SUBFOLDER_BACKGROUND='153'
POWERLEVEL9K_DIR_DEFAULT_FOREGROUND='236'
POWERLEVEL9K_DIR_HOME_FOREGROUND='236'
POWERLEVEL9K_DIR_HOME_SUBFOLDER_FOREGROUND='236'
POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(context dir)
POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(status vi_mode vcs)
POWERLEVEL9K_PROMPT_ON_NEWLINE=true
POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=""
POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX=" "
POWERLEVEL9K_PROMPT_ADD_NEWLINE=true
# }}}
# ZSH Syntax Highlighting {{{
FAST_HIGHLIGHT_STYLES[path]='bold'
FAST_HIGHLIGHT_STYLES[globbing]='fg=yellow'
FAST_HIGHLIGHT_STYLES[precommand]='fg=yellow,bold'
# }}}
# }}}
# FZF {{{
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh
export FZF_DEFAULT_OPTS='--height 40% --reverse --border'
# fe - opens file in $EDITOR instead of returning it to command line
fe() {
local files
IFS=$'\n' files=($(fzf-tmux --query="$1" --multi --select-1 --exit-0))
[[ -n "$files" ]] && ${EDITOR:-vim} "${files[@]}"
}
# fd - cd to selected directory
fd() {
local dir
dir=$(find ${1:-.} -path '*/\.*' -prune \
-o -type d -print 2> /dev/null | fzf +m) &&
cd "$dir"
}
# fbr - checkout git branch
fbr() {
local branches branch
branches=$(git branch --all | grep -v HEAD) &&
branch=$(echo "$branches" |
fzf-tmux -d $(( 2 + $(wc -l <<< "$branches") )) +m) &&
git checkout $(echo "$branch" | sed "s/.* //" | sed "s#remotes/[^/]*/##")
}
# }}}
# Disable globbing on the remote path.
alias scp='noglob scp_wrap'
function scp_wrap {
local -a args
local i
for i in "$@"; do case $i in
(*:*) args+=($i) ;;
(*) args+=(${~i}) ;;
esac; done
command scp "${(@)args}"
}
| true |
08d09f624473d97878d53b23d1d64a9cdb1e1256 | Shell | aglucaci/stats_final_proj | /RUN_FUBAR_CG.sh | UTF-8 | 730 | 3.125 | 3 | [] | no_license | #RUN FUBAR ON HIV LANL
#NOTE: RUN FROM HYPHY DIRECTORY
BATCHFILE="/Users/alex/hyphy/res/TemplateBatchFiles/SelectionAnalyses/FUBAR.bf"
HYPHY="./hyphy"
DATADIR="/Users/alex/Documents/stats_final_proj/Data"
echo Starting to run FUBAR.. Initialize.
#1
for file in $DATADIR/*.msa; do
#echo "file=$file"
#echo $file
#echo checking output file $DATADIR"/"$file".MEME.json"
if [[ ! -e $file".FUBAR_CG.json" ]]
then
echo Running FUBAR on $file
#echo hyphy $BATCHFILE --code Universal --alignment $file --tree $file"_NJ.nwk"
$HYPHY $BATCHFILE --code Universal --method Collapsed-Gibbs --alignment $file --tree $file"_NJ.nwk" --output $file".FUBAR_CG.json" --cache $file".FUBAR_CG.cache"
fi
done
| true |
364b95fe799327324b1c34d6c8c83f77d31b0136 | Shell | Bolaxax/valhalla | /docker/install-shared-deps.sh | UTF-8 | 1,228 | 2.640625 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Script for shared dependencies
set -o errexit -o pipefail -o nounset
apt-get update -y && apt-get install -y software-properties-common
add-apt-repository -y ppa:valhalla-core/valhalla && apt-get update -y
apt-get install -y \
autoconf \
automake \
ccache \
clang-5.0 \
clang-tidy-5.0 \
coreutils \
curl \
g++ \
gcc \
git \
jq \
lcov \
libboost-all-dev \
libcurl4-openssl-dev \
libgeos++-dev \
libgeos-dev \
libluajit-5.1-dev \
liblz4-dev \
libprime-server0.6.3-dev \
libprotobuf-dev \
libspatialite-dev \
libsqlite3-dev \
libsqlite3-mod-spatialite \
libtool \
locales \
luajit \
make \
ninja-build \
osmium-tool \
parallel \
pkg-config \
prime-server0.6.3-bin \
protobuf-compiler \
python-all-dev \
python-minimal \
python3-all-dev \
python3-minimal \
spatialite-bin \
unzip \
vim-common \
zlib1g-dev \
&& rm -rf /var/lib/apt/lists/*
# install cmake
curl https://cmake.org/files/v3.16/cmake-3.16.0-Linux-$(uname --machine).sh > /tmp/cmake.sh
sh /tmp/cmake.sh --prefix=/usr/local --skip-license && /bin/rm /tmp/cmake.sh
cmake --version
| true |
ca2b92127b323eed050df34650881b55c3f32aa1 | Shell | PyroBor/Scripts | /spells_for_removal | UTF-8 | 3,971 | 3.6875 | 4 | [] | no_license | #!/bin/bash
# by Bor Kraljič <pyrobor[at]ver[dot]si>
#
#
# lets get sorcery goodies :)
. /etc/sorcery/config
# based on http://dbaspot.com/shell/373046-date-comparison-bash.html
# tells how many day is from that date givem by param to today
days4expir() {
echo $(( ($(date -d $1 +%s)- $(date +%s)) / (24*3600) ))
}
# paste data even if we interupt withc ctrl-c :)
trap 'message "${PROBLEM_COLOR}control-c${DEFAULT_COLOR}"; nice_exit 1' INT
function show_usage() {
usage="${MESSAGE_COLOR}Usage: ${SPELL_COLOR}$(basename $0) ${FILE_COLOR}spell(s)
${MESSAGE_COLOR} -d, --days <no days> search for spells deprecated longer then X days
-s, --section <section> for whole section
-g, --grimoire <grimoire> for whole grimoire
-h, --help\t\t show this help ${DEFAULT_COLOR}"
message "$usage"
}
function nice_exit() {
local exit_status=$1
message "Deprecated spells for removal ($(echo $failed_spells| wc -w)):"
message "${DEFAULT_COLOR}$failed_spells"
exit $exit_status
}
## Parse the command line parameters and arguments via getopt
TEMP_OPTS=$(getopt -o 'd:s:g:h' -l 'days:,section:,grimoire:,help' \
-n "$(basename $0)" -- "$@")
if [[ $? != 0 ]]; then show_usage; exit 3; fi
# Note the quotes around `$TEMP': they are essential!
eval set -- "$TEMP_OPTS"
unset TEMP_OPTS
while true; do
case "$1" in
"-s"|"--section") wanted_spells=$(codex_get_spells_in_section $(codex_find_section_by_name $2)|cut -f8 -d/); shift 2;;
"-g"|"--grimoire") wanted_spells=$(codex_get_all_spells $(codex_find_grimoire $2)| cut -f8 -d/); shift 2;;
"-d"|"--days") cut_period_days="$2" ; shift 2 ;;
"-h"|"--help") show_usage; exit 2 ;;
--) shift ; break ;;
*) show_usage; exit 3 ;;
esac
done
# two stable releases... 180 should be more than enough
# we need "-" since we search back in time
cut_period_days="-${cut_period_days:180}"
if [[ ! $wanted_spells ]]; then
wanted_spells="$@"
fi
checked=1
total_spells=$(echo $wanted_spells| wc -w)
for spell in $wanted_spells; do
set_term_title "checking $spell ($checked of $total_spells)"
# message -n "${MESSAGE_COLOR}Checking ${SPELL_COLOR}$spell${MESSAGE_COLOR}:"
(
codex_set_current_spell_by_name $spell
if [[ $VERSION == "0" ]] || [[ $VERSION =~ (DEAD|dead|deprecated|DEPRECATED) ]] ;then
message -n "${SPELL_COLOR}$spell${MESSAGE_COLOR} is deprecated..."
deprecated_line_no=$(gaze history $spell |grep deprecat -n |head -n1|cut -d: -f1)
possible_date_line_no=$(( $deprecated_line_no - 1 ))
## rpmunpack has version=0 and it is still ok...
if [[ ! $deprecated_line_no ]] && [[ $VERSION == "0" ]] ;then
message " guess not..."
exit 0
elif [[ ! $deprecated_line_no ]]; then
# version is =~ (DEAD|dead|deprecated|DEPRECATED)
# so just lets just take the last HISTORY entry
possible_date_line_no=3
fi
# lets search for line backwards from first mentioning deprecated in history
until [[ $possible_date_line_no == 0 ]] || [[ $date_found == 1 ]]; do
possible_date=$(gaze history $spell | sed -n "$possible_date_line_no p" |cut -d" " -f1)
if grep -q "^[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]$" <<< $possible_date ;then
date_found=1
fi
possible_date_line_no=$(( $possible_date_line_no - 1 ))
done
if [[ $possible_date_line_no == 0 ]] && [[ $date_found == 0 ]]; then
message "correct date not found..."
exit 0
fi
if [[ $(days4expir $possible_date) -lt $cut_period_days ]]; then
message -n " over ${cut_period_days/-/} days ago!"
exit 1
else
message "${QUERY_COLOR} not long ago!"
fi
fi
)
subshell_rc=$?
if [[ $subshell_rc != 0 ]]; then
failed_spells="$failed_spells $spell"
message " ${QUERY_COLOR}added to list."
# else
# message " ${QUERY_COLOR}still ok.${DEFAULT_COLOR}"
fi
checked=$(( $checked + 1 ))
done
nice_exit 0
| true |
7bcadc60197472cdf1cbf3cf45ee6b6eac0748b4 | Shell | krahul084/my_shellscripts | /sample_while.sh | UTF-8 | 398 | 3.953125 | 4 | [] | no_license | #!/bin/bash
#Script to display the functionality of while/until loops
read -p "Please enter a how many times the loop should execute: " count
echo "This is for -while loop"
var=1
while [ $var -le $count ]; do
echo "this is loop: $var"
(( ++var ))
done
echo "###################"
echo "This is for-until loop"
var=1
until [ $var -gt $count ]; do
echo "This is loop: $var"
(( ++var))
done
| true |
4b906d106e2512f90ca3080043038b3d0fe085b6 | Shell | spyrgkor/opsys2018-assignment1-2675 | /script2.sh | UTF-8 | 519 | 3.796875 | 4 | [] | no_license | #!/bin/bash
fname=$1
if [ -f $fname ]; then
tar -xzf $fname ./repos18
#cd ${fname%.*}
cd repos18
for filename in ./*.txt; do
gfname="`cat $filename`"
if [ ! -d "./assignments" ] ; then
mkdir ./assignments
fi
cd assignments
git clone $gfname
if [ $? -ne 0 ]
then
echo "$gfname: cloning FAILED"
else
echo "$gfname: cloning OK"
fi
cd ..
cd $filename
echo "No of txt files: " ls -1 *.txt | wc -l
echo "No of directories: " ls -l | grep -c ^d
done
fi
| true |
c71f6bf429d5938b7591c90d9645500488fa8b48 | Shell | mikesmiffy128/build | /scripts/sha1 | UTF-8 | 658 | 3.328125 | 3 | [
"ISC",
"LicenseRef-scancode-public-domain"
] | permissive | #!/bin/sh
# This file is dedicated to the public domain.
# most Linux distros have shaXXXsum commands
if command -v sha1sum >/dev/null 2>/dev/null; then
sha1sum | awk '{print $1}'
# BSDs have shaXXX commands (without the sum), but NetBSD only has sha1 (which
# is why we're *using* sha1)
elif command -v sha1 >/dev/null 2>/dev/null; then
exec sha1
# when in doubt, there's also probably an "openssl" command worth trying,
# although it might be a bit slower
elif command -v openssl >/dev/null 2>/dev/null; then
openssl sha1 | awk '{print $2}'
else
echo "sha1: your system lacks an sha1 command, somehow" >&2
exit 1
fi
# vi: sw=4 ts=4 noet tw=80 cc=80
| true |
a9c79219995c6de44bbc384474a3b5cba342984a | Shell | BlenderCN-Org/morphosource-vagrant | /install_scripts/rvm-ruby.sh | UTF-8 | 331 | 3.265625 | 3 | [] | no_license | #!/usr/bin/env bash
source $HOME/.rvm/scripts/rvm
#
# check for ruby 2.4.2
#
echo
echo 'check for ruby 2.4.2'
echo '--------------------'
ruby_version="$(rvm list 2>&1)"
if echo $ruby_version | grep -q 'ruby-2.4.2'; then
echo 'ruby 2.4.2 already installed'
else
echo 'installing ruby 2.4.2'
rvm install 2.4.2
fi
echo
| true |
e98906bb3b0a999c7681b442ebe9a12478b711ba | Shell | Ludtson/Bio_scripting_homework_5 | /sys_info_OWOYEMI.sh | UTF-8 | 649 | 3.421875 | 3 | [] | no_license | #!/bin/bash
#This shell script was written with help from the internet by Adekola Owoyemi
# called sys_info which output the following:
# + Your username
# + The time and date
# + Who is logged on
# + separated by a line of asterices (*******) after each section
#and commits these data to file named ${user}_sysinfo.txt
########################################################################
#a handy function for the line of asterices
function aste__ks(){
echo "*************************************************"
}
########################################################################
echo "Your username : $(echo $USER)"
aste__ks # call function
| true |
d90dce4ecdc6f2cb978accf3e96eaa02c853c84a | Shell | bsudy/saml-proxy | /configure | UTF-8 | 3,404 | 3.828125 | 4 | [] | no_license | #!/bin/bash
set -e
cd /
EXAMPLES=$(cat <<- EOF
Example:
Docker:
docker run -v <path>/saml_idp.xml:/etc/httpd/conf.d/saml_idp.xml -e BACKEND=https://api.example.com:8443 barnabassudy/saml-proxy
Docker-compose:
version: "2"
services:
yourservice:
...
saml-proxy:
image: "barnabassudy/saml-proxy"
environment:
BACKEND: "http://yourservice:port"
ports:
- "80:80"
volumes:
- "<path>/saml_idp.xml:/etc/httpd/conf.d/saml_idp.xml"
EOF
)
if [ ! -f /etc/httpd/conf.d/saml_idp.xml ]; then
echo -e "No saml_idp.xml file found.\n\n" \
"In order to get the proxy running you must provide a saml_idp.xml file\n" \
"in /etc/httpd/conf.d/ directory.\n\n" \
"$EXAMPLES"
exit 1
fi
if [ -z "$BACKEND" ]; then
echo -e "No BACKEND environement variable is set.\n\n" \
"In order to get the proxy running you must provide a BACKEND environment variable.\n\n" \
"$EXAMPLES"
exit 1
fi
# Get environment
export REAL_HOST=`hostname -f`
export SCHEMA=${SCHEMA:-https}
export HOST=${PROXY_HOST:-$REAL_HOST}
export realm=`echo $HOST | tr [a-z] [A-Z]`
export BACKEND=${BACKEND:-https://api.example.com:8443}
export MELLON_PATH=${MELLON_PATH:-mellon}
# Create mod_auth_mellon service provider config
if [ -f /etc/httpd/conf.d/saml_sp.key ] || [ -f /etc/httpd/conf.d/saml_sp.cert ] || [ -f /etc/httpd/conf.d/saml_sp.xml ]; then
if [ ! -f /etc/httpd/conf.d/saml_sp.key ]; then
echo "/etc/httpd/conf.d/saml_sp.key file is not provided."
exit 1
fi
if [ ! -f /etc/httpd/conf.d/saml_sp.cert ]; then
echo "/etc/httpd/conf.d/saml_sp.cert file is not provided."
exit 1
fi
if [ ! -f /etc/httpd/conf.d/saml_sp.xml ]; then
echo "/etc/httpd/conf.d/saml_sp.xml file is not provided."
exit 1
fi
else
pushd /etc/httpd/conf.d
echo -e "Generating new service provider certificate.\n\n" \
"In order to avoid generating new certificate every time. You can provide the following files as volumes:\n" \
" * /etc/httpd/conf.d/saml_sp.key - private key\n" \
" * /etc/httpd/conf.d/saml_sp.cert - certificate\n" \
" * /etc/httpd/conf.d/saml_sp.xml - SAML SP xml\n\n" \
"The files can be generated by https://github.com/bsudy/saml-proxy/blob/master/mellon_create_metadata.sh script.\n\n" \
" mellon_create_metadata.sh ${SCHEMA}://${HOST} ${SCHEMA}://${HOST}/${MELLON_PATH}\n\n" \
"--------------------------------------------------\n"
/usr/sbin/mellon_create_metadata.sh ${SCHEMA}://${HOST} ${SCHEMA}://${HOST}/${MELLON_PATH}
popd
fi
REQUEST_HEADERS=""
for varname in ${!SAML_MAP_*}
do
declare -n REQUEST_HEADER_NAME=$varname
SAML_ATTR=${varname:9}
echo "Mapping $SAML_ATTR attribute to $REQUEST_HEADER_NAME request header"
REQUEST_HEADER_CONFIG=$(cat << EOF
RequestHeader set ${REQUEST_HEADER_NAME} %{MELLON_$SAML_ATTR}e env=MELLON_$SAML_ATTR
EOF
)
REQUEST_HEADERS=$(echo -e "$REQUEST_HEADERS\n$REQUEST_HEADER_CONFIG")
done
export REQUEST_HEADERS
# configure Apache proxy and auth
cat /etc/httpd/conf.d/proxy.conf.template | envsubst '$SCHEMA,$HOST,$BACKEND,$MELLON_PATH,$REQUEST_HEADERS' > /etc/httpd/conf.d/proxy.conf
# Start apache
httpd -DFOREGROUND
| true |
97d40d3204ef6dce482e3168db1fbdbc949aca5f | Shell | makeable/config | /bin/forall | UTF-8 | 137 | 3.21875 | 3 | [] | no_license | #!/bin/sh
if [ 1 = `expr 2 \> $#` ]
then
echo Usage: $0 dir cmd [optargs]
exit 1
fi
dir=$1
shift
find $dir -type f -print | xargs "$@"
| true |
214058c86843406e5b825c440ed387b2a5f019b3 | Shell | miko-798/cirrus-ngs | /test/runalltests | UTF-8 | 413 | 2.921875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
main_dir=`basename test_cirrus_ngs/`
for dir in $main_dir/test*; do
if [ -d $dir ]; then
for file in $dir/test*.py; do
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
echo $file
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
python $file
echo
done
fi
done
| true |
738e723988a58566a5a1f858c97479fafe86a63e | Shell | hansung-dev/Hello-DockerCompose | /mysql/mysql-orchestrator/post_sql.sh | UTF-8 | 1,519 | 2.890625 | 3 | [] | no_license | MASTER_NODE='mysql-01'
SLAVE01_NODE='mysql-02'
SLAVE02_NODE='mysql-03'
EXEC_MASTER="docker exec ${MASTER_NODE} mysql -uroot -proot -N -e "
EXEC_SLAVE01="docker exec ${SLAVE01_NODE} mysql -uroot -proot -e "
EXEC_SLAVE02="docker exec ${SLAVE02_NODE} mysql -uroot -proot -e "
## For Replication
echo "Replication"
${EXEC_MASTER} "CREATE USER 'repl'@'%' IDENTIFIED BY 'repl'" 2>&1 | grep -v "Using a password"
${EXEC_MASTER} "GRANT REPLICATION SLAVE ON *.* TO 'repl'@'%'" 2>&1 | grep -v "Using a password"
${EXEC_SLAVE01} "reset master" 2>&1 | grep -v "Using a password"
${EXEC_SLAVE01} "CHANGE MASTER TO MASTER_HOST='${MASTER_NODE}', \
MASTER_USER='repl', MASTER_PASSWORD='repl', \
MASTER_AUTO_POSITION=1" 2>&1 | grep -v "Using a password"
${EXEC_SLAVE01} "START SLAVE" 2>&1 | grep -v "Using a password"
${EXEC_SLAVE02} "reset master" 2>&1 | grep -v "Using a password"
${EXEC_SLAVE02} "CHANGE MASTER TO MASTER_HOST='${MASTER_NODE}', \
MASTER_USER='repl', MASTER_PASSWORD='repl', \
MASTER_AUTO_POSITION=1" 2>&1 | grep -v "Using a password"
${EXEC_SLAVE02} "START SLAVE" 2>&1 | grep -v "Using a password"
## For Orchestrator
echo "Orchestrator"
${EXEC_MASTER} "CREATE USER orc_client_user@'%' IDENTIFIED BY 'orc_client_password'" 2>&1 | grep -v "Using a password"
${EXEC_MASTER} "GRANT SUPER, PROCESS, REPLICATION SLAVE, RELOAD ON *.* TO orc_client_user@'%'" 2>&1 | grep -v "Using a password"
${EXEC_MASTER} "GRANT SELECT ON mysql.slave_master_info TO orc_client_user@'%'" 2>&1 | grep -v "Using a password"
| true |
728d0cd40d9d2d06f7c525969f729ef961fd27b4 | Shell | mjijeesh/zynq-can-sja1000-top | /scripts/package-build-for-extmodules | UTF-8 | 1,179 | 3.515625 | 4 | [] | no_license | #!/bin/bash
#
# Usage: ./package-build-for-extmodules BUILD_DIRECTORY
#
# Source directory is found via symlink $BUILD/source. If it does not exist,
# it is assumed to be identical to $BUILD.
#
# Produces $BUILD-dev.tar.gz archive.
#
set -eux
BUILD=$1
[ -d $BUILD/source ] && SRC=$BUILD/source || { echo "Using SRC=\$BUILD."; SRC=$BUILD; }
release="$(make -s -C $BUILD kernelrelease ARCH=arm)"
tmp=$(mktemp -d kernelpack.XXXXXX)
trap "rm -R '$tmp'" EXIT
mkdir $tmp/linux && tmp=$tmp/linux
mkdir $tmp/build $tmp/source
rsync -a -f 'merge /dev/stdin' $BUILD/ $tmp/build/ <<EOF
+ /arch
+ /arch/arm
+ /arch/arm/include/***
+ /include/***
+ /scripts/***
+ /Makefile
+ /modules.builtin
+ /modules.order
+ /Module.symvers
- *
EOF
ln -s ../source $tmp/build/source
sed -r -i -e 's~^(MAKEARGS := -C ).*~\1source~' $tmp/build/Makefile
rsync -a -f 'merge /dev/stdin' $SRC/ $tmp/source/ <<EOF
+ Kconfig*
+ /arch
+ /arch/arm
+ /arch/arm/include/***
+ /arch/arm/Makefile
+ /include/***
+ /scripts/***
+ /Makefile
+ /Kbuild
+ /localversion-rt
- *
EOF
BUILD="$(realpath "$BUILD")"
echo ${BUILD}-${release}-dev.tar.gz
(cd $tmp/.. && tar c * | gzip >${BUILD}-${release}-dev.tar.gz)
ls $tmp
| true |
27a1b0b9fd7b6168b9dc0eb7cb7c3196981eadba | Shell | HumanNeuroscienceLab/facemem | /24_neurosynth/20_task.bash | UTF-8 | 756 | 3.078125 | 3 | [] | no_license | #!/usr/bin/env bash
# This will run the task analysis (main effects) on the neurosynth temporal regression data
# It outputs everything as 1D files, which I can later convert back to voxelwise data
# Use older afni
export PATH=/mnt/nfs/share/afni/current:$PATH
if [[ $# -eq 0 ]]; then
echo "usage: $0 roi-name (njobs [16])"
exit 2
fi
name="$1"
njobs=${2:-16}
###
runtypes=( "Questions" "NoQuestions" )
subjs=( $( cat ../sublist_all.txt ) )
nthreads=1 # will be disabled bc too few voxels
parallel --no-notice -j $njobs --eta \
./20_task_worker.bash --subject={1} --runtype={2} --region=${name} --nthreads=${nthreads} ::: ${subjs[@]} ::: ${runtypes[@]}
#./20_task_worker.bash --subject=tb9226 --runtype=Questions --region=ri_maps_01 --nthreads=1 | true |
282efcee0a684dcd2818a52dbf9cfa40265ffc94 | Shell | Samsung/TizenRT | /build/configs/rtl8720e/rtl8720e_download.sh | UTF-8 | 2,697 | 3.171875 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
###########################################################################
#
# Copyright 2022 Samsung Electronics All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
###########################################################################
# rtl8720e_download.sh
TOOL_PATH=${TOP_PATH}/build/tools/amebalite
IMG_TOOL_PATH=${TOOL_PATH}/image_tool
OS_PATH=${TOP_PATH}/os
CONFIG=${OS_PATH}/.config
source ${CONFIG}
APP_NUM=0
function pre_download()
{
source ${TOP_PATH}/os/.bininfo
cp -p ${BIN_PATH}/km4_boot_all.bin ${IMG_TOOL_PATH}/km4_boot_all.bin
cp -p ${BIN_PATH}/${KERNEL_BIN_NAME} ${IMG_TOOL_PATH}/${KERNEL_BIN_NAME}
if [ "${CONFIG_APP_BINARY_SEPARATION}" == "y" ]; then
if test -f "${BIN_PATH}/${APP1_BIN_NAME}"; then
APP_NUM=$(($APP_NUM+1))
cp -p ${BIN_PATH}/${APP1_BIN_NAME} ${IMG_TOOL_PATH}/${APP1_BIN_NAME}
fi
if test -f "${BIN_PATH}/${APP2_BIN_NAME}"; then
APP_NUM=$(($APP_NUM+1))
cp -p ${BIN_PATH}/${APP2_BIN_NAME} ${IMG_TOOL_PATH}/${APP2_BIN_NAME}
fi
if [ ${APP_NUM} -eq 0 ]; then
echo "No User Binary."
post_download
exit 1
fi
fi
if test -f "${BIN_PATH}/${COMMON_BIN_NAME}"; then
cp -p ${BIN_PATH}/${COMMON_BIN_NAME} ${IMG_TOOL_PATH}/${COMMON_BIN_NAME}
fi
if test -f "${SMARTFS_BIN_PATH}"; then
cp -p ${SMARTFS_BIN_PATH} ${IMG_TOOL_PATH}/${CONFIG_ARCH_BOARD}_smartfs.bin
fi
if test -f "${BIN_PATH}/${BOOTPARAM}.bin"; then
cp -p ${BIN_PATH}/${BOOTPARAM}.bin ${IMG_TOOL_PATH}/${BOOTPARAM}.bin
fi
}
function board_download()
{
cd ${IMG_TOOL_PATH}
if [ ! -f ${IMG_TOOL_PATH}/$3 ];then
echo "$3 not present"
else
./upload_image_tool_linux "download" $1 1 $2 $3
fi
}
function board_erase()
{
cd ${IMG_TOOL_PATH}
./upload_image_tool_linux "erase" $1 1 $2 $3
}
function post_download()
{
cd ${IMG_TOOL_PATH}
[ -e "${BL1}.bin" ] && rm ${BL1}.bin
[ -e "${KERNEL_BIN_NAME}" ] && rm ${KERNEL_BIN_NAME}
[ -e "${APP1_BIN_NAME}" ] && rm ${APP1_BIN_NAME}
[ -e "${APP2_BIN_NAME}" ] && rm ${APP2_BIN_NAME}
[ -e "${COMMON_BIN_NAME}" ] && rm ${COMMON_BIN_NAME}
[ -e "${CONFIG_ARCH_BOARD}_smartfs.bin" ] && rm ${CONFIG_ARCH_BOARD}_smartfs.bin
[ -e "${BOOTPARAM}.bin" ] && rm ${BOOTPARAM}.bin
}
| true |
3f92550aad9929d11d63c0dc50019cc82205586f | Shell | mjedmonds/dotfiles | /bash/.bash_profile | UTF-8 | 806 | 2.875 | 3 | [] | no_license | #specify the editor to use for control-x control-u
export EDITOR="vi"
#enable colors!
export CLICOLOR=1
export PATH=$PATH:/usr/local/bin
export PATH=$PATH:/usr/local/sbin
export PATH=$PATH:/Users/mark/bin
export PATH=$PATH:/Developer/NVIDIA/CUDA-7.0/bin
if [ -f ~/.bashrc ]; then
source ~/.bashrc
fi
# >>> conda initialize >>>
# !! Contents within this block are managed by 'conda init' !!
conda_exec="${HOME}/miniconda3/bin/conda"
__conda_setup="$(${conda_exec} 'shell.bash' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__conda_setup"
else
if [ -f "/Users/mark/miniconda3/etc/profile.d/conda.sh" ]; then
. "/Users/mark/miniconda3/etc/profile.d/conda.sh"
else
export PATH="/Users/mark/miniconda3/bin:$PATH"
fi
fi
unset __conda_setup
# <<< conda initialize <<<
| true |
5224f4cd5b531d10d0f2b48d2e26e52d2d39ed97 | Shell | draios/docker-bench-security | /output_lib.sh | UTF-8 | 2,633 | 3.609375 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/sh
if [ -n "$nocolor" ] && [ "$nocolor" = "nocolor" ]; then
bldred=''
bldgrn=''
bldblu=''
bldylw=''
txtrst=''
else
bldred='\033[1;31m'
bldgrn='\033[1;32m'
bldblu='\033[1;34m'
bldylw='\033[1;33m' # Yellow
txtrst='\033[0m'
fi
logit () {
printf "%b\n" "$1" | tee -a "$logger"
}
info () {
printf "%b\n" "${bldblu}[INFO]${txtrst} $1" | tee -a "$logger"
}
pass () {
printf "%b\n" "${bldgrn}[PASS]${txtrst} $1" | tee -a "$logger"
}
warn () {
printf "%b\n" "${bldred}[WARN]${txtrst} $1" | tee -a "$logger"
}
note () {
printf "%b\n" "${bldylw}[NOTE]${txtrst} $1" | tee -a "$logger"
}
yell () {
printf "%b\n" "${bldylw}$1${txtrst}\n"
}
beginjson () {
printf "{\n \"dockerbenchsecurity\": \"%s\",\n \"start\": %s,\n \"tests\": [" "$1" "$2" | tee "$logger.json" 2>/dev/null 1>&2
}
endjson (){
printf "\n ], \"checks\": %s, \"score\": %s, \"end\": %s \n}\n" "$1" "$2" "$3" | tee -a "$logger.json" 2>/dev/null 1>&2
}
logjson (){
printf "\n \"%s\": \"%s\"," "$1" "$2" | tee -a "$logger.json" 2>/dev/null 1>&2
}
SSEP=
SEP=
startsectionjson() {
printf "%s\n {\"id\": \"%s\", \"desc\": \"%s\", \"results\": [" "$SSEP" "$1" "$2" | tee -a "$logger.json" 2>/dev/null 1>&2
SEP=
SSEP=","
}
endsectionjson() {
printf "\n ]}" | tee -a "$logger.json" 2>/dev/null 1>&2
}
starttestjson() {
printf "%s\n {\"id\": \"%s\", \"desc\": \"%s\", " "$SEP" "$1" "$2" | tee -a "$logger.json" 2>/dev/null 1>&2
SEP=","
}
resulttestjson() {
if [ $# -eq 1 ]; then
printf "\"result\": \"%s\"}" "$1" | tee -a "$logger.json" 2>/dev/null 1>&2
elif [ $# -eq 2 ]; then
# Result also contains details
printf "\"result\": \"%s\", \"details\": \"%s\"}" "$1" "$2" | tee -a "$logger.json" 2>/dev/null 1>&2
else
# Result also includes details and a list of items. Add that directly to details and to an array property "items"
# Also limit the number of items to $limit, if $limit is non-zero
if [ $limit != 0 ]; then
truncItems=""
ITEM_COUNT=0
for item in $3; do
truncItems="$truncItems $item"
ITEM_COUNT=$((ITEM_COUNT + 1));
if [ "$ITEM_COUNT" == "$limit" ]; then
truncItems="$truncItems (truncated)"
break;
fi
done
else
truncItems=$3
fi
itemsJson=$(printf "["; ISEP=""; ITEMCOUNT=0; for item in $truncItems; do printf "%s\"%s\"" "$ISEP" "$item"; ISEP=","; done; printf "]")
printf "\"result\": \"%s\", \"details\": \"%s: %s\", \"items\": %s}" "$1" "$2" "$truncItems" "$itemsJson" | tee -a "$logger.json" 2>/dev/null 1>&2
fi
}
| true |
ad18a27cd439465e8454397a8916ed7b1d5de6fd | Shell | Kagarie/Symfony | /part_Symfony/bash/cut40.sh | UTF-8 | 451 | 3.65625 | 4 | [] | no_license | #!/bin/sh
#On decoupe la musique
function cut40
{
timestamp=$(date +%s)
ffmpeg -i $1 -ss 0 -to 40 $timestamp.mp3
mv $timestamp.mp3 $1
}
cd public/musiques/
while true
do
#On parcourt les .mp3 un par un
for fich in *.mp3
do
#Si la musique a un poids superieur à 700 000 octets
#alors elle fait plus de 40 secondes dans un format mp3
if [ -f $fich -a $(stat -c%s "$fich") -gt 700000 ]
then
cut40 "$fich"
fi
done
sleep 30
done
| true |
598b32b251bd33e4d2cf82c49158eb3481b34f50 | Shell | kampde/ktools | /photo/nef2jpg | UTF-8 | 2,311 | 4.28125 | 4 | [] | no_license | #!/bin/bash
set -o noclobber
usage() {
cat <<USAGE
usage: $0 [OPTIONS] NEF_FILE[..]
Converts NEF_FILE to jpg and stores it in parent folder.
Valid options are:
-h, --help Show this help and exit
-d, --dimension=DIMENSION Make jpg of dimensions DIMENSION (use orig to
keep NEF dimensions). (Default: $dimension)
-e, --keep-exif Copy the EXIF info from the NEF to the JPG.
(Default: $keep_exif)
--no-keep-exif Do not copy the EXIF from the NEF to the JPG.
-p, --parallel=JOBS Execute JOBS conversion in parallel
(default: $jobs)
USAGE
}
EXITVAL=0
OUTOPT=$(getopt --options d:ehp: --long help,keep-exif,no-keep-exif,dimension:,parallel: -n "$0" -- "$@")
eval set -- "$OUTOPT"
keep_exif=on
dimension=1920x1080
jobs=4
while true
do
case "$1" in
-h|--help)
usage
exit 0
;;
-e|--keep-exif)
keep_exif=1
shift
;;
--no-keep-exif)
keep_exif=
shift
;;
-d|--dimension)
if [ "$2" = "orig" ]
then
dimension=
else
dimension="$2"
fi
shift 2
;;
-p|--parallel)
jobs="$2"
shift 2
;;
--)
# end of processed getopt options, break the loop
shift
break
;;
*)
ERROR=$[ ( $RANDOM % 10000 ) + 1 ]
echo "[-] Unexpected error: code $ERROR" >&2
exit 1
break
;;
esac
done
if [ "$dimension" ]
then
resize_params=("-resize" "$dimension")
else
resize_params=()
fi
convierte() {
local orig="$1"
local dest="$2"
local exitval=0
echo -n "Creando $dest ... "
if dcraw -e -c "$orig" | convert - "${resize_params[@]}" "$dest"
then
echo " OK"
if [ "$keep_exif" ]
then
echo -n "Aplicando EXIF a $dest ... "
if exiftool -overwrite_original -TagsFromFile "$orig" "$dest"
then
echo " OK"
else
echo " MIERDA"
fi
fi
else
echo " MIERDA"
exitval=3
fi
return $exitval
}
FOTONUM=0
for foto
do
FOTONUM=$((FOTONUM + 1))
ext="${foto##*.}"
if [ "${ext,,}" = "nef" ]
then
nom="../"${foto%.*}".from_raw.jpg"
if [ -s "$nom" ]
then
echo "$nom existe"
else
convierte "$foto" "$nom" &
fi
else
echo "$foto no es una imagen raw (NEF)" >&2
EXITVAL=1
fi
if [ "$((FOTONUM % jobs))" = "0" ]
then
echo "Waiting..."
wait
fi
done
wait
| true |
92f9c6fb33b42508e23ef2cb9b2ce4f9c32a883a | Shell | dchandran/metassemble | /scripts/validate/nucmer/run-nucmer.sh | UTF-8 | 1,873 | 4.0625 | 4 | [] | no_license | #!/bin/bash
HELPDOC=$( cat <<EOF
Map two files of contigs against each other using nucmer. The coords file shows
all the alignments, see http://mummer.sourceforge.net/manual/#coords for column
names. The gcoords is the output of show-coords after running delta-filter -g,
which gives you the longest mutually consistent set of alignments for each
ref-query pair. See: http://mummer.sourceforge.net/manual/#filter.
Usage:
bash `basename $0` [options] <ref.fa> <query.fa> <output-prefix>
Options:
-r Generate report with dnadiff. Report is in <output-prefix>_report.report
-h This help documentation.
EOF
)
set -o errexit
set -o nounset
# From: http://tinyurl.com/85qrydz
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source $SCRIPTDIR/../../global-functions.incl
# Default parameters
GEN_REPORT=false
# Parse options
while getopts ":hr" opt; do
case $opt in
r)
GEN_REPORT=true
;;
h)
echo "$HELPDOC"
exit 0
;;
\?)
echo "Invalid option: -$OPTARG" >&2
echo "$HELPDOC"
exit 1
;;
esac
done
shift $(($OPTIND - 1))
# Parse arguments
if [ ! $# -eq 3 ]; then
echo "Invalid number of arguments: 3 needed but $# supplied" >&2
echo "$HELPDOC"
exit 1
fi
if [[ -f $1 ]]; then
REF=$1
else
echo "$1 is not a file" >&2
exit 1
fi
if [[ -f $2 ]]; then
QUERY=$2
else
echo "$2 is not a file" >&2
exit 1
fi
OUTPUTBASE=$3
check_prog nucmer show-coords
nucmer --maxmatch --prefix=$OUTPUTBASE $REF $QUERY
# delta-filter
show-coords -rclTH $OUTPUTBASE.delta > $OUTPUTBASE.coords
delta-filter -g $OUTPUTBASE.delta > $OUTPUTBASE.gdelta
show-coords -rclTH $OUTPUTBASE.gdelta > $OUTPUTBASE.gcoords
if $GEN_REPORT; then
dnadiff -p ${OUTPUTBASE}_report -d $OUTPUTBASE.delta
fi
| true |
a566d7f03d996af9eb777a95808ff96c8f818051 | Shell | trjordan/scripts | /.bash_aliases | UTF-8 | 778 | 2.84375 | 3 | [] | no_license | # ls aliases
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
# AWS
alias ec2din=ec2-describe-instances
alias ec2dinr="ec2din -F 'instance-state-name=running'"
# Code cleanup
alias gch="git cherry -v master"
alias rmpyc="find . -regex '.*pyc$' -print0 | xargs --null -I file rm -v file"
alias ec="emacsclient"
# macOS hacks
alias updatedb="sudo /usr/libexec/locate.updatedb"
alias restart_camera="sudo killall VDCAssistant"
alias restart_audio="ps aux | grep 'coreaudio[d]' | awk '{print $2}' | xargs sudo kill"
function gra {
toadd=$(find . -name "$1")
git add $toadd
echo Added $toadd
}
function r() {
if [ "$1" == "emacs" ]; then
cd ~/.emacs.d
elif [ "$1" == "scripts" ]; then
cd ~/scripts
else
cd ~/repos/$1
fi
}
| true |
662898412182a0765594147c564fb0be35a50400 | Shell | qiwichupa/somescripts | /convert2mp3.sh | UTF-8 | 1,670 | 3.859375 | 4 | [] | no_license | #!/bin/bash
# Converts files from a directory, retaining the directory structure and files not to be converted.
# Deletes the original files!
# ffmpeg is required for conversion, and rsgain for replaygain tags
#= settings =====================================
ROOT=/share/Public/automation/2mp3
OUTDIR=MP3
REPLAYGAIN=true
#=================================================
export LOGFILE=${OUTDIR}/log.txt
LOCKFILE=/var/lock/2mp3.lock
if test -f "$LOCKFILE"; then
exit 0
fi
cd "$ROOT" || exit 1
touch "$LOCKFILE"
CONVERTED=false
SAVEIFS=$IFS
IFS=$'\n'
while read i
do
CONVERTED=true
mkdir -p ./${OUTDIR}$(dirname "${i:1}")
echo "$i" >> "$LOGFILE"
if [[ $i =~ .*\.(ogg|flac)$ ]]
then
len=$(($(echo ${BASH_REMATCH[1]} | awk '{print length}')+1))
OUTFILE="./${OUTDIR}${i:1:-${len}}.mp3"
ffmpeg -y -hide_banner -loglevel error -nostdin -i "$i" -vn -ar 44100 -ac 2 -q:a 2 -map_metadata 0 "$OUTFILE" && rm "$i" && echo "Converted to $OUTFILE and removed" >> "$LOGFILE"
if [[ $REPLAYGAIN = true ]]
then
rsgain custom --output --clip-mode=a --true-peak "$OUTFILE" | sed 's/^/ /' >> "$LOGFILE"
fi
else
OUTFILE="./${OUTDIR}${i:1}" # drop leading dot (.) in filename $i
mv -f "$i" "$OUTFILE"
echo "Moved (not converted) to $OUTFILE" >> "$LOGFILE"
fi
echo "" >> "$LOGFILE"
done < <(find . -type f -not -path "./${OUTDIR}/*" -mmin +1)
if [[ $CONVERTED == true ]]
then
echo Remove empty dirs >> "$LOGFILE"
#chmod a+rw -R "./${OUTDIR}"
find . -type d -empty -delete
echo "Complete!" >> "$LOGFILE"
fi
rm "$LOCKFILE"
IFS=$SAVEIFS
| true |
33c26222edc95f062f68a5d6d4baac91d0ff4407 | Shell | oitofelix/pkcs12-gpg-ssh-sign | /pkcs12-gpg-ssh-sign-send | UTF-8 | 1,277 | 4.0625 | 4 | [
"FSFAP"
] | permissive | #!/usr/bin/env bash
#
# Copyright (C) 2020 Bruno Félix Rezende Ribeiro <oitofelix@gnu.org>
#
# Copying and distribution of this file, with or without modification,
# are permitted in any medium without royalty provided the copyright
# notice and this notice are preserved. This file is offered as-is,
# without any warranty.
PASS_PORT_DEFAULT='3000'
set -o errexit
function depends
for cmd in "$@"; do
if ! which "$cmd" >/dev/null; then
echo "Command '$cmd' is required. Please, install it."
exit 1
fi
done
depends ssh
function usage
{
echo "\
Usage: SIGN_PASS_PORT=PORT \\
$0 FILE [USER@]HOST
Send GPG-encrypted password contained in FILE to [USER@]HOST:PORT.
FILE is sent indefinitely in regular intervals of about 1 second. To
interrupt, usually one would press Ctrl+C or signal SIGINT, for
interactive or non-interactive sessions, respectively.
SIGN_PASS_PORT defaults to '$PASS_PORT_DEFAULT'." 1>&2
}
trap usage EXIT
: ${1:?First argument must be GPG-encrypted password file name}
: ${2:?Second argument must be user@host target}
SIGN_PASS_PORT=${SIGN_PASS_PORT:-$PASS_PORT_DEFAULT}
trap EXIT
echo "Sending GPG-encrypted password to $2:$SIGN_PASS_PORT..."
echo "Press Ctrl+C to exit."
while sleep 1; do
ssh -q -W localhost:"$SIGN_PASS_PORT" "$2" < "$1" || :
done
| true |
91349ac61b42c83275868577ecf13e4de8d60a9e | Shell | seanwallawalla-forks/iOS | /screenshots.sh | UTF-8 | 1,168 | 2.8125 | 3 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/bin/bash
# screenshots.sh
# DuckDuckGo
#
# Copyright © 2017 DuckDuckGo. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Inspired by https://elmland.blog/2017/08/31/reset-xcode-simulators/
echo -ne 'Reset iOS Simulators ... ';
osascript -e 'tell application "iOS Simulator" to quit';
osascript -e 'tell application "Simulator" to quit';
xcrun simctl erase all;
if [ $? -ne 0 ]; then
echo FAILED
exit 1
fi
if [ `which fastlane` -eq "" ]; then
echo Fastlane is not installed
exit 2
fi
rm -rf fastlane/screenshots
fastlane screenshots
cd fastlane/screenshots
tar -czf ../screenshots_`date +"%Y_%d_%m"`.tar.gz *
| true |
96b7aaf17d22b61f9896c128b2f1188f3a33d44a | Shell | Comfy321/nudotfiles | /2020/scripts/4chan.sh | UTF-8 | 1,306 | 3.84375 | 4 | [] | no_license | #function 4chan() {
if [[ $# -ne 1 ]]
then
echo 'No URL specified! Give the URL to the thread as the ONLY argument'
return 1
fi
# This should look something like: g/thread/73097964
urlPrimative=$(grep -o '[0-9a-zA-Z]\{1,4\}/thread/[0-9]*' <<< $1)
if [[ -z $urlPrimative ]]
then
echo 'Malformed URL! Give the URL to the thread as the ONLY argument'
return 2
fi
threadJSON=$(curl -s https://a.4cdn.org/$urlPrimative.json)
if [[ -z $threadJSON ]]
then
echo 'Invalid URL! It either 404`d or was never a real thread'
return 3
fi
if which jq 2>&1 > /dev/null
then
:
elif which curl 2>&1 > /dev/null
then
:
else
echo 'jq and/or curl not found! Install jq and curl'
return 4
fi
imageCount=$(echo $threadJSON | jq -r '.posts[] | select(.ext)' | jq -r '"\(.tim)\(.ext)"' | wc -l)
counter=1
board=$(cut -f1 -d/ <<< $urlPrimative)
for imageURL in $(echo $threadJSON | jq -r '.posts[] | select(.ext)' | jq -r '"\(.tim)\(.ext)"' | sed "s/^/https:\/\/i.4cdn.org\/$board\//")
do
echo -n Downloading image $counter of $imageCount...
wget -q -nc $imageURL
echo ' Done'
counter=$(($counter + 1))
done
#}
| true |
4acd69e8af309dfea6180a798fd0f630bd1f3b2f | Shell | Guruprasad22/indicators | /src/main/resources/bhavdownload.sh | UTF-8 | 1,740 | 3.75 | 4 | [] | no_license | #!/bin/bash
#######################################################################################################################
################# Download the bhavcopy for a duration ################################################################
################# Usage : $0 <startDate> <endDate> <target>############################################################
################## example : $0 20160101 20161231 /some/folder ########################################################
stringGen()
{
export dateString=$(date -d $1 +%d%b%Y | tr 'a-z' 'A-Z')
export yr=$(date -d $1 +%Y)
export mnt=$(date -d $1 +%b | tr 'a-z' 'A-Z')
}
dateRangeFunction()
{
startDate=$1
endDate=$2
while [[ $startDate -le $endDate ]]
do
echo "date chosen : $startDate"
stringGen $startDate
curl -o cm$dateString$suffix "$url$yr/$mnt/cm$dateString$suffix"
if [ $? -eq 0 ]; then
if [ -f ./cm$dateString$suffix ]; then
unzip ./cm$dateString$suffix
rm -f ./cm$dateString$suffix
fi
fi
startDate=$(date -d "$startDate + 1 days" +%Y%m%d)
done
}
if [ $# -ne 3 ]; then
echo "usage => bash -x ./bhavdownload.sh 20170101 20170201 <target-directory-in-unix-style>"
exit 1
fi
export url=https://nse-india.com/content/historical/EQUITIES/
export suffix=bhav.csv.zip
export outPutFile=myfile.zip
dateRangeFunction $1 $2
mv $PWD/*.csv $3
if [ -f $3/output.csv ]; then
tail -q -n +2 $3/cm*.csv >> $3/output.csv
else
echo "symbol,series,open,high,low,close,last,prevclose,tottrdqty,tottrdval,timestamp,totaltrades,isin,dummy" > $3/output.csv
tail -q -n +2 $3/cm*.csv >> $3/output.csv
fi
rm $3/cm*.csv
exit 0
| true |
90e4c58f5d3a23c737c164757cf5ccad3f7a966c | Shell | Dvoikin/pip-services-users-preferences-dotnet | /build.sh | UTF-8 | 637 | 3.453125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
COMPONENT=$(ls *.nuspec | tr -d '\r' | awk -F. '{ print $1 }')
VERSION=$(grep -m1 "<version>" *.nuspec | tr -d '\r' | sed 's/[ ]//g' | awk -F ">" '{ print $2 }' | awk -F "<" '{ print $1 }')
IMAGE="pipdevs/${COMPONENT}:${VERSION}-build"
CONTAINER="${COMPONENT}"
# Any subsequent(*) commands which fail will cause the shell script to exit immediately
set -e
set -o pipefail
# Remove build files
rm -rf ./obj
# Build docker image
docker build -f Dockerfile.build -t ${IMAGE} .
# Create and copy compiled files, then destroy
docker create --name ${CONTAINER} ${IMAGE}
docker cp ${CONTAINER}:/obj ./obj
docker rm ${CONTAINER} | true |
51ad65b3bc7bbd41aa746291a2980a1803c7c1d8 | Shell | okdana/shu2 | /tests/shu:proc:trace.test | UTF-8 | 787 | 3.25 | 3 | [] | no_license | #!/usr/bin/env zsh
##
# shu-unit tests for shu:proc:trace()
fn=${0:t:r}
# expected!verbosity!args
provider=(
$'print foo\nfoo!N0!print foo'
$' print foo\nfoo!N0!-2 print foo'
'foo!Q1!print foo'
$'print foo\nfoo!Q1!-q print foo'
)
for p in $provider; do
pa=( "${(@s<!>)p}" )
if [[ -n $pa[3] ]]; then
paa=( "${(@z)pa[3]}" )
else
paa=( )
fi
like="with argument(s) ${(j< >)${(@qq)paa}}"
@assert -d "Behaves as expected $like given verbosity ${pa[2]}" \
-e "${pa[1]}" -eq \
"
SHU_VERBOSITY=\$SHU_VERBOSITY_${pa[2]}
${fn} ${(j< >)${(@qq)paa}}
"
done
@assert -d 'Panics with no arguments' \
-r SHU_EX_PANIC -eq \
$fn
for p in -Y +Y +v; do
@assert -d "Panics with illegal option $p" \
-r SHU_EX_PANIC -eq \
$fn $p
done
| true |
0664abfcd133a76af14939ae080094a0fec2479b | Shell | pombredanne/bin | /proxycommand-ssh | UTF-8 | 118 | 2.75 | 3 | [
"MIT"
] | permissive | #!/bin/sh
case $1 in
*:*) host="[$1]";;
*) host="$1";;
esac
port=$2
exec ssh ${PROXY_HOST-panther} -W "$host:$port"
| true |
e41ebe56a7424448c85ebddc9e117b2a05004603 | Shell | jegj/lfcs_notes | /Storage management/RAID/init.sh | UTF-8 | 1,351 | 3.1875 | 3 | [] | no_license | #!/bin/bash
####### Basic Usage
# fdisk/gdisk => Linux Raid
# 1) Install dependecies
apt install mdadm
# 2) Create partition for RAID
lvcreate -L 5G -n r0 myvg
lvcreate -L 5G -n r1 myvg
# 3) Create RAID
mdadm -C /dev/md1 --level=1 --raid-disks=2 /dev/myvg/r0 /dev/myvg/r1
# 4) mkfs
mkfs.ext4 /dev/md1
mkdir /myraid
mount /dev/md1 /myraid
echo "/dev/md1 /myraid ext4 defaults 0 2" >> /etc/fstab
# 5) Add metadata
bash -c "mdadm --detail --scan >> /etc/mdadm.conf"
# 6) Print some info
cat /proc/mdstat
# 7) Add some data
echo "Esto es una prueba." > /myraid/test
# 8) start monitor
systemctl start mdmonitor
####### Testing failure
# 1) Set fail on disk
mdadm -f /dev/md1 /dev/myvg/r0
# 2) Remove from RAID
mdadm /dev/md1 --remove /dev/myvg/r0
# 3) Add disk
lvcreate -L 5G -n r2 myvg
mdadm /dev/md1 --add /dev/myvg/r2
# 5) Increase raid disk instead of spare disk and resize filesystem
mdadm --grow --raid-devices=3 /dev/md1
resize2fs /dev/md1
# 6) Decrease raid disk
mdadm /dev/md1 --fail /dev/myvg/r0 --remove /dev/myvg/r0
mdadm --grow /dev/md0 --raid-devices=2
umount partition
resize2fs /dev/md1
####### Delete RAID
# 1) Umount and stop service
umount /dev/md1
systemctl stop mdmonitor
# 2) Stop
mdadm --stop /dev/md1
# or
mdadm -S /dev/md1
# 3) Clean partition
mdadm --zero-superblock /dev/myvg/r0 /dev/myvg/r1 /dev/myvg/r2 | true |
5ddbbc6d750d8456837dd35d1445f5da619e808f | Shell | delkyd/alfheim_linux-PKGBUILDS | /libdreamdvd/PKGBUILD | UTF-8 | 1,259 | 2.734375 | 3 | [] | no_license | # Contributor: Stas Bourakov <sbourakov(at)gmail.com>
pkgname=libdreamdvd
pkgver=20120425
pkgrel=1
pkgdesc="This is a libdvdnav wrapper library which can be used to simply build a dvd player frontend for dream multimedia settop boxes. One of the dependencies for Enigma2 project."
arch=('i686' 'x86_64')
url="https://schwerkraft.elitedvb.net/projects/libdreamdvd"
license=('GPL2')
depends=('gcc-libs')
makedepends=('autoconf' 'automake' 'bison' 'fakeroot' 'flex' 'gcc' 'libtool' 'm4' 'make' 'patch' 'pkg-config' 'git')
source='git://schwerkraft.elitedvb.net/libdreamdvd/libdreamdvd.git'
conflicts=()
md5sums=('SKIP')
#_gitroot='git://schwerkraft.elitedvb.net/libdreamdvd/libdreamdvd.git'
#_gitname='libdreamdvd'
pkgver() {
cd "$srcdir/$pkgname"
git log -1 --pretty=format:%cd --date=short | sed 's/-//g'
}
package() {
cd $srcdir/$pkgname
# msg "git clone...."
#
# if [ -d $startdir/src/$_gitname ] ; then
# cd $_gitname && git pull origin
# msg "The local files are updated."
# else
# git clone $_gitroot
# cd $_gitname
# fi
touch ./libdreamdvd_config.h.in || return 1
./autogen.sh || return 1
./configure --prefix=/usr || return 1
make || return 1
make install DESTDIR=$pkgdir || return 1
}
| true |
6a5cfb031828feaece80b201fce8d05779e165c5 | Shell | Mierenga/vimide | /switch_project | UTF-8 | 2,373 | 3.65625 | 4 | [] | no_license | #!/bin/bash
################################################################################
# This script generates a new ide.vim file, which is loaded with vim and
# contains the vim interface for this plugin
################################################################################
# Argument for the project path to set as the current active project:
PROJECT_PATH=$1
# Path to this plugin's directory:
PLUGIN_PATH=~/.vim/plugin/vimide
# Path to the configuration this file will generate
PLUGINVIMRC=$PLUGIN_PATH/ide.vim
# Path to the keymap definition used to generate the keymaps
KEYMAPCONFIG=$PLUGIN_PATH/keys.config
# Path the static functions that need to be loaded
# in the plugin configuration
STATICFUNCTIONS=$PLUGIN_PATH/static.functions
################################################################################
# Load the key map configuration
# (source of variables $KEY_* below)
. $KEYMAPCONFIG
################################################################################
# write a header to the configuration file that displays
# the active project path
clear_and_write_header() {
echo \"--------------------------------------------------------------------\
> $PLUGINVIMRC
echo \" ***this file is auto-generated by the SwitchProject command***\
>> $PLUGINVIMRC
echo \" active project: $PROJECT_PATH >> $PLUGINVIMRC
echo \"--------------------------------------------------------------------\
>> $PLUGINVIMRC
echo >> $PLUGINVIMRC
}
# ensure the plugin configuration loads the static functions
write_source_static_functions() {
echo ":source $STATICFUNCTIONS" >> $PLUGINVIMRC
}
# utility to write a custom keymap for a make target
# to the plugin configuration
write_keymap_make_target() {
echo -n "nmap "$1" :!make " >> $PLUGINVIMRC
echo -n "--file $PROJECT_PATH"/Makefile "$2" >> $PLUGINVIMRC
echo " <CR><CR>" >> $PLUGINVIMRC
}
# main function to generate a new configuration
write_ide_vimrc() {
clear_and_write_header
write_source_static_functions
write_keymap_make_target $KEY_RUN run
write_keymap_make_target $KEY_KILL kill
write_keymap_make_target $KEY_TEST test
}
# execute
################################################################################
write_ide_vimrc
################################################################################
| true |
3db7e0313332ee177c1c848ccd1bce39bd16b419 | Shell | lukaszachy/tmt | /tests/login/when.sh | UTF-8 | 933 | 2.59375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
. /usr/share/beakerlib/beakerlib.sh || exit 1
rlJournalStart
rlPhaseStartSetup
rlRun "tmp=\$(mktemp -d)" 0 "Creating tmp directory"
rlRun "tmt='tmt run -ar provision -h local execute -h tmt -s '"
rlRun "pushd $tmp"
rlRun "set -o pipefail"
rlRun "tmt init -t mini"
rlPhaseEnd
rlPhaseStartTest "Skipped"
rlRun "$tmt true login -w fail -c true 2>&1 >/dev/null | tee output"
rlAssertGrep "Skipping interactive" "output"
rlAssertNotGrep "Starting interactive" "output"
rlPhaseEnd
rlPhaseStartTest "Enabled"
rlRun "$tmt false login -w fail -c true 2>&1 >/dev/null | tee output" 1
rlAssertNotGrep "Skipping interactive" "output"
rlAssertGrep "Starting interactive" "output"
rlPhaseEnd
rlPhaseStartCleanup
rlRun "popd"
rlRun "rm -r $tmp" 0 "Removing tmp directory"
rlPhaseEnd
rlJournalEnd
| true |
4435333af4aa5e06843796828576d039a15ddd70 | Shell | humanier/fuzzy-quiz | /build.sh | UTF-8 | 253 | 2.59375 | 3 | [] | no_license | #!/usr/bin/env sh
set -e
echo
echo "--- Linting ... "
npm run lint
echo
echo "--- Compiling TypeScript ... "
npm run build
# copy static content to dist
echo
echo "--- Copying web files to dist/static ..."
rm -rf dist/www
cp -r src/www dist/
echo | true |
d2746d267374f1ae685390d910374de8a3a0495d | Shell | umediavfx/usdbuild | /setup_env.sh | UTF-8 | 1,112 | 2.53125 | 3 | [] | no_license | if [ ! -f "$1/bin/usdedit" ] && [ ! -f "$1/plugin/AL_USDMayaPlugin.sh" ]; then
echo "Invalid directory argument."
else
USDMAYA_DIR=$1
export PATH=$PATH:$USDMAYA_DIR/third_party/maya/src:/usr/local/src:$USDMAYA_DIR/src:$USDMAYA_DIR/bin:$USDMAYA_DIR/sbin:/usr/bin:/usr/sbin:/bin:/sbin:$HOME/.local/bin:$HOME/bin:/usr/local/bin:/usr/local/sbin
export PYTHONPATH=$PYTHONPATH:$USDMAYA_DIR/third_party/maya/lib/python:/usr/local/lib/python:$USDMAYA_DIR/lib/python
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$USDMAYA_DIR/third_party/maya/lib:/usr/local/lib:$USDMAYA_DIR/lib
export MAYA_PLUG_IN_PATH=$MAYA_PLUG_IN_PATH:$USDMAYA_DIR/third_party/maya/plugin/:$USDMAYA_DIR/plugin/
export MAYA_SCRIPT_PATH=$MAYA_SCRIPT_PATH:$USDMAYA_DIR/third_party/maya/lib:$USDMAYA_DIR/third_party/maya/share/usd/plugins/usdMaya/resources:$USDMAYA_DIR/lib:/usr/local/lib:$USDMAYA_DIR/share/usd/plugins/usdMaya/resources:$USDMAYA_DIR/third_party/maya/plugin/pxrUsdPreviewSurface/resources:$USDMAYA_DIR/third_party/maya/lib/usd/usdMaya/resources/
export XMBLANGPATH=$XMBLANGPATH:$USDMAYA_DIR/third_party/maya/lib/usd/usdMaya/resources
fi
| true |
92507d7ccc08d7b852a63f1391963efa6bcecc61 | Shell | tukiyo/mysql4.0php5.2 | /copy-rpms.sh | UTF-8 | 403 | 2.640625 | 3 | [] | no_license | #!/bin/sh
cd /
# rpmのコピー
(find / -type f -name "*\.rpm" | egrep -v "/host/|.src.rpm|checkinstall") \
&& mkdir -p /host/rpm \
&& cp $(find / -type f -name "*\.rpm" | egrep -v "/host/|.src.rpm|checkinstall") /host/rpm/
(find / -type f -name "*\.deb" | egrep -v "/host/") \
&& mkdir -p /host/deb \
&& cp $(find / -type f -name "*\.deb" | egrep -v "/host/") /host/deb/
chmod 777 /host/{rpm,deb}
| true |
dcce493aa80cbad3a42b74adeacf323129a59668 | Shell | deguif/docker-aws-cli | /entrypoint.sh | UTF-8 | 342 | 3.3125 | 3 | [] | no_license | #!/bin/sh
uid=$(stat -c %u /.aws)
gid=$(stat -c %g /.aws)
if [ $uid == 0 ] && [ $gid == 0 ]; then
exec aws "$@"
fi
sed -i -r "s/aws-cli:x:\d+:\d+:/aws-cli:x:$uid:$gid:/g" /etc/passwd
sed -i -r "s/aws-cli:x:\d+:/aws-cli:x:$gid:/g" /etc/group
if [ $# -eq 0 ]; then
exec su-exec aws-cli aws
else
exec su-exec aws-cli aws "$@"
fi
| true |
45d48179806557fac1b30e4212934d8bcbb8115d | Shell | anatolinicolae/docker-php-fpm | /tests/prod/04-test-env-enable_mail.sh | UTF-8 | 1,554 | 3.53125 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -e
set -u
set -o pipefail
CWD="$(cd -P -- "$(dirname -- "$0")" && pwd -P)"
IMAGE="${1}"
VERSION="${2}"
FLAVOUR="${3}"
# shellcheck disable=SC1090
. "${CWD}/../.lib.sh"
############################################################
# Tests
############################################################
###
### Postfix
###
MOUNTPOINT="$( mktemp --directory )"
did="$( docker_run "${IMAGE}:${VERSION}-${FLAVOUR}" "-e DEBUG_ENTRYPOINT=2 -e NEW_UID=$(id -u) -e NEW_GID=$(id -g) -e ENABLE_MAIL=2 -v ${MOUNTPOINT}:/var/mail" )"
run "sleep 10"
if ! run "docker logs ${did} 2>&1 | grep -q 'ENABLE_MAIL'"; then
docker_logs "${did}" || true
docker_stop "${did}" || true
rm -rf "${MOUNTPOINT}"
echo "Failed"
exit 1
fi
if [ ! -f "${MOUNTPOINT}/devilbox" ]; then
echo "Mail file does not exist: ${MOUNTPOINT}/devilbox"
ls -lap "${MOUNTPOINT}/"
docker_logs "${did}" || true
docker_stop "${did}" || true
rm -rf "${MOUNTPOINT}"
echo "Failed"
exit 1
fi
if [ ! -r "${MOUNTPOINT}/devilbox" ]; then
echo "Mail file is not readable"
ls -lap "${MOUNTPOINT}/"
docker_logs "${did}" || true
docker_stop "${did}" || true
rm -rf "${MOUNTPOINT}"
echo "Failed"
exit 1
fi
# Send test email
docker_exec "${did}" "php -r \"mail('mailtest@devilbox.org', 'the subject', 'the message');\""
run "sleep 5"
if ! run "grep 'the subject' ${MOUNTPOINT}/devilbox"; then
docker_logs "${did}" || true
docker_stop "${did}" || true
run "cat ${MOUNTPOINT}/devilbox"
rm -rf "${MOUNTPOINT}"
echo "Failed"
exit 1
fi
docker_stop "${did}"
rm -rf "${MOUNTPOINT}"
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.