blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
7c0a84d2758dd5052a3cd782d50f532d1e34161f
|
Shell
|
FedericoRessi/install-os
|
/bin/setup-libvirt-ssh
|
UTF-8
| 2,045
| 3.828125
| 4
|
[] |
no_license
|
#!/bin/bash
set -eu
TARGET_HOST_DIR=${TARGET_HOST_DIR:-$(pwd)/.install-os}
mkdir -p "${TARGET_HOST_DIR}"
TARGET_HOST_FILE=${TARGET_HOST_FILE:-${TARGET_HOST_DIR}/host}
TARGET_HOST=${1:-$(cat "${TARGET_HOST_FILE}" || true)}
if [ "${TARGET_HOST}" == "" ]; then
echo "
Please specify target host name:
$0 <target-host>
"
exit 1
fi
echo "${TARGET_HOST}" > "${TARGET_HOST_FILE}"
TARGET_PORT=${TARGET_PORT:-22}
TARGET_USER=${TARGET_USER:-root}
# Generate SSH config file
SSH_CONFIG_FILE=${SSH_CONFIG_FILE:-${TARGET_HOST_DIR}/ssh_config}
echo "
Host target
HostName ${TARGET_HOST}
Port ${TARGET_PORT}
User ${TARGET_USER}
UserKnownHostsFile /dev/null
StrictHostKeyChecking no
" > "${SSH_CONFIG_FILE}"
# Generate ansible config file
ANSIBLE_CONFIG_FILE=${ANSIBLE_CONFIG_FILE:-${TARGET_HOST_DIR}/ansible.cfg}
echo "
[ssh_connection]
ssh_args = -F '${SSH_CONFIG_FILE}'
" > "${ANSIBLE_CONFIG_FILE}"
# Generate hosts.ini
ANSIBLE_INVENTORY_FILE=${ANSIBLE_INVENTORY_FILE:-${TARGET_HOST_DIR}/hosts.ini}
echo "[target]
${TARGET_HOST}:${TARGET_PORT}
" > "${ANSIBLE_INVENTORY_FILE}"
# Detect SSH key file and creates it if not found
SSH_KEY_FILES=($(ls ~/.ssh/id_rsa ~/.ssh/id_dsa 2> /dev/null || true) ~/.ssh/id_rsa)
TARGET_KEY_FILE=${TARGET_KEY_FILE:-${SSH_KEY_FILES[0]}}
[ -r "${TARGET_KEY_FILE}" ] || ssh-keygen -P "" -f "${TARGET_KEY_FILE}"
# Install SSH key file
export TARGET_PUB_KEY=$(cat "${TARGET_KEY_FILE}.pub")
ssh -F "${SSH_CONFIG_FILE}" target "
set -eu
mkdir -p ~/.ssh
chown -fR \${USER}.\${USER} ~/.ssh
chmod 700 ~/.ssh
if ! grep -q -e '${TARGET_PUB_KEY}' ~/.ssh/authorized_keys; then
echo '${TARGET_PUB_KEY}' >> ~/.ssh/authorized_keys
fi
chmod 600 ~/.ssh/authorized_keys
"
# Remove host from remote host just in case
(
grep -v "${TARGET_HOST}" ~/.ssh/known_hosts > ~/.ssh/known_hosts.tmp
mv ~/.ssh/known_hosts.tmp ~/.ssh/known_hosts
) || true
# Run ansible playbook
ansible-playbook "--ssh-extra-args=-F '${SSH_CONFIG_FILE}'" -u "${TARGET_USER}" -i "${ANSIBLE_INVENTORY_FILE}" setup-libvirt-ssh.yml
| true
|
c97306aa4fb441dabf2529182791c4a0adb501bb
|
Shell
|
cgruppioni/dotfiles
|
/zsh/options.zsh
|
UTF-8
| 982
| 2.734375
| 3
|
[] |
no_license
|
#################
# ZSH options #
#################
HISTFILE=~/.zsh_history
HISTSIZE=1000000000000000000
SAVEHIST=$HISTSIZE
setopt no_list_beep
setopt no_beep
# Append as you type (incrementally) instead of on shell exit
setopt inc_append_history
setopt hist_ignore_all_dups
setopt hist_reduce_blanks
setopt autocd
setopt autopushd
# Timestamp history entries
setopt extended_history
# show a message with the exit code when a command returns with a non-zero exit code
setopt print_exit_value
unsetopt correctall
# Allow [ or ] wherever you want
# (Prevents "zsh: no matches found: ...")
unsetopt nomatch
# https://github.com/gabebw/dotfiles/pull/15
unsetopt multios
# i - Vim's smart case
# j.5 - Center search results
# F - Quit if the content is <1 screen
# K - Quit on CTRL-C
# M - Longer prompt
# R - handle ASCII color escapes
# X - Don't send clear screen signal
export LESS="ij.5FKMRX"
# Show grep results in white text on a red background
export GREP_COLOR='1;37;41'
| true
|
52f98f3616188334e4c2784d3771e8ac85ac366a
|
Shell
|
jstrachan/hermit
|
/script/ci
|
UTF-8
| 176
| 2.578125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -euxo pipefail
./bin/hermit install
case "${TEST_RUNNER}" in
test)
./bin/make test
;;
lint)
./bin/make lint
;;
build)
./bin/make build
;;
esac
| true
|
f615d8057d1158ca77b77efe61fdfd24f9617440
|
Shell
|
th0x4c/coin
|
/bin/coin-start
|
UTF-8
| 3,315
| 3.953125
| 4
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
COIN_HOME=$(dirname $0)/../
COIN_HOME=$($COIN_HOME/libexec/coin-expand-path.sh $COIN_HOME)
LOG_DIR=$COIN_HOME/log/coin_log_$(date +%Y-%m-%dT%H.%M.%S)/
LIBEXEC_DIR=$COIN_HOME/libexec/
SQL_DIR=$COIN_HOME/sql/
SCRIPT_DIR=$COIN_HOME/scripts/
COMMAND_DIR=$COIN_HOME/command/
TMP_DIR=$COIN_HOME/tmp/
usage()
{
cat <<EOF
Description:
Start Coin to collect OS / DB information
Usage: coin-start <command>
Option:
-h, --help Output help
Command:
EOF
for comm in $(ls $COMMAND_DIR)
do
. ${COMMAND_DIR}/${comm}
printf " %s \t %s\n" $comm "$(coin_description)"
done
cat <<EOF
Example:
coin-start hello # start "hello" Coin command
EOF
}
while [ $# -gt 0 ]
do
case $1 in
-h|--help )
usage
exit
;;
* )
break
;;
esac
done
COMM=$1
if [ -z "$COMM" ]
then
echo "No command file."
usage
exit
fi
if [ ! -f "$COMMAND_DIR/$COMM" ]
then
echo "No command file."
usage
exit
fi
if [ ! -d $LOG_DIR ]
then
mkdir $LOG_DIR
fi
if [ ! -d $TMP_DIR ]
then
TMP_DIR=/tmp
fi
LOGFILE=${LOG_DIR}/${COMM}.log
. ${COMMAND_DIR}/${COMM}
date_format()
{
date +%Y%m%d%H%M.%S
}
sqlplus_version()
{
sqlplus -V | cut -d" " -f3 | sed '/^$/d' | sed 's/\.//g'
}
show_parameter()
{
local parameter=$1
sqlplus -S /nolog <<EOF
set feedback off
set pages 0
connect / as sysdba
select value from v\$parameter where upper(name) = upper('$parameter');
EOF
}
diag_info()
{
local name=$1
sqlplus -S /nolog <<EOF
set feedback off
set pages 0
connect / as sysdba
select value from v\$diag_info where upper(name) = upper('$name');
EOF
}
diag_trace()
{
diag_info "Diag Trace"
}
execute()
{
local timeout=$1
local script=$2
local opts=$3
local comment=$4
local logfile=""
if [ ! -f $(echo $script | cut -d" " -f1) ]
then
if [ "$($LIBEXEC_DIR/coin-extname.sh $script)" = "sql" ]
then
script=${SQL_DIR}/${script}
elif [ "$($LIBEXEC_DIR/coin-extname.sh $script)" = "sh" ]
then
script=${SCRIPT_DIR}/${script}
fi
fi
desc=$($LIBEXEC_DIR/coin-extname.sh -x $script)
if [ ! -d ${LOG_DIR}/${desc} ]
then
mkdir ${LOG_DIR}/${desc}
fi
logfile=${LOG_DIR}/${desc}/${desc}_$(date +%Y-%m-%dT%H.%M.%S).log.gz
echo "$(date) Start $desc $comment"
$LIBEXEC_DIR/coin-hang-check.sh -t $timeout $LIBEXEC_DIR/coin-exec.sh -z -l $logfile $opts $script >> $LOGFILE
echo "$(date) Finish $desc $comment"
}
collect()
{
local dirs=$1
$LIBEXEC_DIR/coin-find-file.sh -a $STARTDATE -o $LOG_DIR $(echo $dirs | sed 's/,/ /g') >> $LOGFILE
}
main()
{
echo "$(date) Start collect info PID:$MYPID"
coin_setup
coin_main
coin_teardown
echo "$(date) Finish collect info. Log to $LOG_DIR"
remove_pid_file
}
main_tee()
{
main 2>>$LOGFILE | tee -a $LOGFILE
}
signal_handler()
{
$LIBEXEC_DIR/coin-pid.sh --follow --long --kill --pid $MAINPID --exclude gzip >>$LOGFILE
coin_teardown
echo "$(date) Cancel collect info. Log to $LOG_DIR"
remove_pid_file
exit
}
remove_pid_file()
{
if [ -f $PIDFILE ]
then
rm $PIDFILE
fi
}
MYPID=$$
PIDFILE=${TMP_DIR}/pid.${COMM}.${MYPID}
touch $PIDFILE
STARTDATE=$(date_format)
trap "signal_handler 2>>$LOGFILE | tee -a $LOGFILE" 2 10 15
# main 2>>$LOGFILE | tee -a $LOGFILE
main_tee &
MAINPID=$!
wait
| true
|
77f72f6ef64873204a559a53debf13fb7a50173f
|
Shell
|
fourstepper/dots
|
/personalscripts/startjack.sh
|
UTF-8
| 700
| 2.78125
| 3
|
[] |
no_license
|
######### I, as of now, do not use this script, as I use the Cadence GUI and settings #########
#!/usr/bin/bash
# Output of cat /proc/asound/cards (mine is "USB")
card=USB
# Wanted sample rate (mine is 44100)
rate=44100
# Wanted buffer size (mine is 1024)
buffer=1024
killall carla &>/dev/null
killall a2jmidid &>/dev/null
# path to your wanted carla project
carlarun() {
carla -n '~/.dotfiles/dotfiles/.config/Carla/sonarworks_carla_config.carxp'
}
# run it
jack_control ds alsa
jack_control dps device hw:USB
jack_control dps rate $rate
jack_control dps nperiods 2
jack_control dps period $buffer
jack_control start
jackpatch
carlarun &>/dev/null &
#a2jmidid -e &>/dev/null &
exit
| true
|
7edf9438724284d03bb42967aae58054c8e404a9
|
Shell
|
HugoOliveiraSoares/ShellScript
|
/script.sh
|
UTF-8
| 361
| 3.5
| 4
|
[] |
no_license
|
#/bin/bash
if [ "$1" == "" ]; then
echo "Uso: $0 wordlist.txt"
echo
exit
fi
dos2unix $1 >> /dev/null
for palavra in $(cat $1); do
md5="$(echo -n "$palavra" | md5sum | cut -d" " -f1)"
b64="$(echo -n "$palavra" | base64)"
sha="$(echo -n "$palavra" | sha256sum | cut -d" " -f1)"
echo "$palavra:$md5:$b64:$sha"
done | column -s: -t >> "$1.final"
| true
|
e9054c74211d90cded58a90b23acd6d38f66639c
|
Shell
|
Ascend/ModelZoo-PyTorch
|
/PyTorch/built-in/others/CLIP_for_PyTorch/test/train_clip_performance_1p.sh
|
UTF-8
| 5,393
| 3.171875
| 3
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] |
permissive
|
#!/bin/bash
#网络名称,同目录名称,需要模型审视修改
Network="clip"
model_name=clip
train_epochs=1
batch_size=64
model_path=""
data_path=""
device_id=0
WORLD_SIZE=1
for para in $*; do
if [[ $para == --model_name* ]]; then
model_name=$(echo ${para#*=})
elif [[ $para == --batch_size* ]]; then
batch_size=$(echo ${para#*=})
elif [[ $para == --model_path* ]]; then
model_path=$(echo ${para#*=})
elif [[ $para == --data_path* ]]; then
data_path=$(echo ${para#*=})
elif [[ $para == --train_epochs* ]]; then
train_epochs=$(echo ${para#*=})
fi
done
#校验是否传入model_path,不需要修改
if [[ $model_path == "" ]]; then
echo "[Error] para \"model_path\" must be confing"
exit 1
fi
#校验是否传入data_path,不需要修改
if [[ $data_path == "" ]]; then
echo "[Error] para \"data_path\" must be confing"
exit 1
fi
# 校验是否指定了device_id,分动态分配device_id与手动指定device_id,此处不需要修改
if [ $ASCEND_DEVICE_ID ];then
echo "device id is ${ASCEND_DEVICE_ID}"
elif [ ${device_id} ];then
export ASCEND_DEVICE_ID=${device_id}
echo "device id is ${ASCEND_DEVICE_ID}"
else
"[Error] device id must be config"
exit 1
fi
# cd到与test文件夹同层级目录下执行脚本,提高兼容性;test_path_dir为包含test文件夹的路径
cur_path=$(pwd)
cur_path_last_dirname=${cur_path##*/}
if [ x"${cur_path_last_dirname}" == x"test" ]; then
test_path_dir=${cur_path}
cd ..
cur_path=$(pwd)
else
test_path_dir=${cur_path}/test
fi
#创建DeviceID输出目录,不需要修改
if [ -d ${cur_path}/test/output/${ASCEND_DEVICE_ID} ]; then
rm -rf ${cur_path}/test/output/${ASCEND_DEVICE_ID}
mkdir -p ${cur_path}/test/output/$ASCEND_DEVICE_ID/
else
mkdir -p ${cur_path}/test/output/$ASCEND_DEVICE_ID/
fi
source ${cur_path}/test/env_npu.sh
#训练开始时间,不需要修改
start_time=$(date +%s)
echo "start_time: ${start_time}"
python3 ./run_clip.py --output_dir ./clip-roberta-finetuned-npu-1p \
--num_train_epochs ${train_epochs} \
--model_name_or_path "$model_path" \
--data_dir $data_path \
--dataset_name ydshieh/coco_dataset_script \
--dataset_config_name=2017 \
--dataloader_num_workers 8 \
--image_column image_path --caption_column caption \
--remove_unused_columns=False \
--device_id="$ASCEND_DEVICE_ID" \
--do_train --do_eval --fp16 --dataloader_drop_last \
--fp16_opt_level O2 --loss_scale 12800000 --optim adamw_apex_fused_npu --use_combine_grad \
--per_device_train_batch_size=$batch_size --per_device_eval_batch_size=$batch_size \
--learning_rate="5e-5" --warmup_steps="0" --weight_decay 0.1 \
--save_steps 150000 --max_steps 1000 --skip_steps 10 \
--overwrite_output_dir >${test_path_dir}/output/$ASCEND_DEVICE_ID/train_${ASCEND_DEVICE_ID}.log 2>&1 &
wait
#训练结束时间,不需要修改
end_time=$(date +%s)
e2e_time=$(($end_time - $start_time))
#结果打印,不需要修改
echo "------------------ Final result ------------------"
#输出性能FPS,需要模型审视修改
FPS=$(grep "train_samples_per_second =" ${test_path_dir}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $3}')
#打印,不需要修改
echo "Final Performance images/sec : $FPS"
#输出训练精度,需要模型审视修改
train_loss=$(grep "eval_loss" ${test_path_dir}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk 'END {print $3}')
#打印,不需要修改
echo "Final Train Loss : ${train_loss}"
echo "E2E Training Duration sec : $e2e_time"
#性能看护结果汇总
#训练用例信息,不需要修改
BatchSize=${batch_size}
DeviceType=$(uname -m)
CaseName=${Network}_bs${BatchSize}_${WORLD_SIZE}'p'_'acc'
##获取性能数据,不需要修改
#吞吐量
ActualFPS=${FPS}
#单迭代训练时长
TrainingTime=$(awk 'BEGIN{printf "%.2f\n", '${batch_size}'*1000/'${FPS}'}')
#从train_$ASCEND_DEVICE_ID.log提取Loss到train_${CaseName}_loss.txt中,需要根据模型审视
grep "{'loss" ${test_path_dir}/output/${ASCEND_DEVICE_ID}/train_${ASCEND_DEVICE_ID}.log | awk -F "{'loss" '{print $2}' | awk '{print $2}' | awk -F "," '{print $1}' >${test_path_dir}/output/${ASCEND_DEVICE_ID}//train_${CaseName}_loss.txt
#最后一个迭代loss值,不需要修改
ActualLoss=$(awk 'END {print}' ${test_path_dir}/output/${ASCEND_DEVICE_ID}/train_${CaseName}_loss.txt)
#关键信息打印到${CaseName}.log中,不需要修改
echo "Network = ${Network}" >${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "RankSize = ${WORLD_SIZE}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "BatchSize = ${BatchSize}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "DeviceType = ${DeviceType}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "CaseName = ${CaseName}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "ActualFPS = ${ActualFPS}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "TrainingTime = ${TrainingTime}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "ActualLoss = ${ActualLoss}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "E2ETrainingTime = ${e2e_time}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
echo "TrainAccuracy = ${train_loss}" >>${test_path_dir}/output/${ASCEND_DEVICE_ID}/${CaseName}.log
| true
|
e3506056e51e8451727132f9821449ebc3f2f10c
|
Shell
|
samigeorgiev/Dotfiles
|
/regolith/generate_i3_config
|
UTF-8
| 559
| 2.890625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
MAIN_CONFIG=/etc/regolith/i3/config
OVERRIDE_CONFIG=~/Dotfiles/regolith/config
RESULT_CONFIG=~/.config/regolith/i3/config
rm -f $RESULT_CONFIG
mkdir -p $(dirname $RESULT_CONFIG)
cat $MAIN_CONFIG >> $RESULT_CONFIG
echo -e "\n\n" >> $RESULT_CONFIG
echo "###############################################################################" >> $RESULT_CONFIG
echo "# User overrides" >> $RESULT_CONFIG
echo "###############################################################################" >> $RESULT_CONFIG
echo
cat $OVERRIDE_CONFIG >> $RESULT_CONFIG
| true
|
e0fc226fa432e4d9b8ec746cb238aa644d4ddd9e
|
Shell
|
taunooo/rsha
|
/praks2.1/parool_gen
|
UTF-8
| 236
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
#
while read nimi
do
kasutaja=$nimi
parool=$(pwgen 7)
echo "$kasutaja parool on $parool"
useradd -m -s /bin/bash $kasutaja
echo "$kasutaja:$parool" | chpasswd
echo "$kasutaja:$parool" >> "genkasutajaparool"
done < $1
| true
|
8cb10655fd5c68943c7c9b58c424e89179cfb474
|
Shell
|
Davidebyzero/RegexGolf
|
/test-log2-regex_perl.sh
|
UTF-8
| 659
| 3.828125
| 4
|
[] |
no_license
|
#!/bin/bash
if [ -z "$1" ]; then
echo "Usage: $0 FILE"
echo 'Reads a regex calculating base 2 logarithm in the domain "^((x+)(?=\2$))*x$"'
echo 'from FILE and feeds it powers of 2, printing the ones that match.'
exit
fi
regex=$(perl -0pe 's/(?<!\\)\s*|\(\?#[^)]*\)|(?<!\(\?)#.*$//gm' < "$1")
echo Regex length = $(echo -n "$regex"|wc -c)
echo Regex md5sum = $(echo -n "$regex"|md5sum)
echo
echo -n "$regex" |
perl -E '
$regex = <>;
for ($a=1;;$a*=2) {
@match = ("x" x $a) =~ /$regex(.*)/;
if (@match) {
say $a . " -> " . ($a - length(@match[-1]));
} else {
say $a . " -> no match";
}
}
'
| true
|
7b892dd4adf2fb3d318872560077baf9506f7700
|
Shell
|
rsenn/ports
|
/apache/httpd/httpd2-mkcrt.sh
|
UTF-8
| 1,931
| 3.9375
| 4
|
[] |
no_license
|
#!/bin/sh
#
# Generates RSA private key if none available in $1.key
# Dumps RSA public key if none available to $1.pub
# Makes a self-signed certificate in $1.crt
#
# $Id: httpd2-mkcrt.sh,v 1.2 2006/09/21 15:30:07 root Exp $
# ircd dirs
prefix="@prefix@"
sysconfdir="${prefix}/etc"
# openssl client tool
openssl="${prefix}/bin/openssl"
# how many bits the RSA private key will have
bits=1024
# defaults for x509 and stuff
cnf="$sysconfdir/openssl.cnf"
# private key file
key="$1.key"
# public key file
pub="$1.pub"
# certificate
crt="$1.crt"
# random data
rnd="/dev/urandom"
if [ -z "$1" ]; then
echo "Usage: $0 <path-to-key> [cname]"
echo
echo "Example: $0 $prefix/etc/httpd2 0.0.0.0:443"
echo " This would generate a private key in $prefix/etc/httpd2.key,"
echo " a public key in $prefix/etc/httpd2.pub and a self-signed"
echo " certificate in $prefix/etc/httpd2.crt"
exit 1
fi
# generate RSA private key if not already there
if [ -f "$key" ]
then
echo "There is already an RSA private key in $key."
else
# dump random data
dd if=/dev/urandom "of=$rnd" count=1 "bs=$bits"
# generate key
${openssl} genrsa -rand "$rnd" -out "$key" "$bits"
# remove old shit based on inexistent
rm -f "$pub" "$req" "$crt"
# destroy random data
# shred "$rnd"
# rm "$rnd"
fi
# dump the public key if not present
if [ -f "$pub" ]
then
echo "There is already an RSA public key in $pub."
else
${openssl} rsa -in "$key" -out "$pub" -pubout
fi
# generate certificate
if [ -f "$crt" ]
then
echo "There is already a certificate in $crt."
else
COUNTRY="CH"
STATE="Bern"
LOCALITY="Thun"
COMPANY="nexbyte GmbH"
SECTION="base"
CNAME="${2-0.0.0.0:443}"
cat << EOF | ${openssl} req -config "$cnf" -new -x509 -nodes -key "$key" -out "$crt" 2>/dev/null
$COUNTRY
$STATE
$LOCALITY
$COMPANY
$SECTION
$CNAME
EOF
${openssl} x509 -subject -dates -fingerprint -noout -in "$crt"
fi
| true
|
4ab18577bda56c1dffa3cad5357c6a6fa4a72041
|
Shell
|
lendup/heroku-buildpack-play
|
/bin/compile
|
UTF-8
| 8,551
| 3.796875
| 4
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/usr/bin/env bash
# bin/compile <build-dir> <cache-dir> <env-dir>
# Exit prematurely on first failure
set -e
# Parse args
BUILD_DIR=$1
CACHE_DIR=$2
ENV_DIR=$3
BIN_DIR=$(cd $(dirname $0); pwd) # absolute path
# source in common functions
. $BIN_DIR/common.sh
curl --silent --location http://heroku-jvm-common.s3.amazonaws.com/jvm-buildpack-common.tar.gz | tar xz
. bin/util
. bin/java
[ -e opt/jdbc.sh ] && . opt/jdbc.sh
export_env_dir $ENV_DIR
#create the cache dir if it doesn't exist
mkdir -p $CACHE_DIR
# create default system.properties
if [ ! -f ${BUILD_DIR}/system.properties ]; then
echo "java.runtime.version=1.6" > ${BUILD_DIR}/system.properties
fi
# install JDK
javaVersion=$(detect_java_version ${BUILD_DIR})
echo -n "-----> Installing OpenJDK ${javaVersion}..."
install_java ${BUILD_DIR} ${javaVersion}
jdk_overlay ${BUILD_DIR}
echo "done"
PLAY_PATH=".play"
IVY_PATH=".ivy2"
# Change dir to handle relative paths
cd $BUILD_DIR
# Unpack cache
for DIR in $PLAY_PATH $IVY_PATH ; do
rm -rf $DIR
if [ -d $CACHE_DIR/$DIR ]; then
cp -r $CACHE_DIR/$DIR $DIR
fi
done
PLAY_VERSION=$(get_play_version conf/dependencies.yml)
DEFAULT_PLAY_VERSION="1.2.7"
VERSION_DECLARED=true
if [ -z "$PLAY_VERSION" ] ; then
PLAY_VERSION=$DEFAULT_PLAY_VERSION
VERSION_DECLARED=false
echo "-----> WARNING: Play! version not specified in dependencies.yml. Default version: $PLAY_VERSION being used...."
fi
# Install Play! or update the version
if [ ! -f $PLAY_PATH/play ]; then
install_play $PLAY_VERSION
else
INSTALLED_PLAY_VERSION=`cat $PLAY_PATH/framework/src/play/version`
if [ "$INSTALLED_PLAY_VERSION" != "$PLAY_VERSION" ] && $VERSION_DECLARED ; then
echo "-----> Updating Play! version. Previous version was $INSTALLED_PLAY_VERSION. Updating to $PLAY_VERSION..."
rm -rf $PLAY_PATH
install_play $PLAY_VERSION
fi
fi
# Install ivysettings.xml file, with creds for Lendup's Maven repo
echo -n "-----> Installing ivysettings.xml....."
if [ -f .ivy2/ivysettings.xml ]; then
rm .ivy2/ivysettings.xml
fi
mkdir -p .ivy2
if [[ -e "conf/ivysettings.xml" ]] ; then
# If the app has its own ivysettings.xml, use that.
cp conf/ivysettings.xml .ivy2/.
else
PLAY_SETTINGS_URL="http://s3.amazonaws.com/heroku-jvm-langpack-play/ivysettings.xml"
CREDS=$(echo ' <credentials host="maven.lendup.com" realm="closed site" username="${LENDUP_MAVEN_REPO_USER}" passwd="${LENDUP_MAVEN_REPO_PW}"/>' \
| perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg; s/\$\{([^}]+)\}//eg')
# NOTE: If testing on mac: brew install gnu-sed; alias sed=gsed
curl --silent --max-time 10 --location $PLAY_SETTINGS_URL | sed "/<ivysettings>/ a ${CREDS}" > .ivy2/ivysettings.xml
echo " done"
fi
# ---------- GULP SHITZ --------------
status "-----> Installing node/npm and running gulp....."
# Resolve node version using semver.io
#node_version=$(curl --silent --get https://semver.io/node/stable)
# hardcode to 4.2.2 until we debug node 5
node_version=4.2.2
status "Using node.js version $node_version"
# Download node from Heroku's S3 mirror of nodejs.org/dist
status "Downloading and installing node"
node_url="http://s3pository.heroku.com/node/v$node_version/node-v$node_version-linux-x64.tar.gz"
curl $node_url -s -o - | tar xzf - -C $BUILD_DIR
# Move node (and npm) into ./vendor and make them executable
mkdir -p $BUILD_DIR/vendor
mv $BUILD_DIR/node-v$node_version-linux-x64 $BUILD_DIR/vendor/node
chmod +x $BUILD_DIR/vendor/node/bin/*
PATH=$PATH:$BUILD_DIR/vendor/node/bin
# Run subsequent node/npm commands from the build path
cd $BUILD_DIR/client
# Set the node environment to production. This will prevent us from installing
# dev dependencies we do not need or want here (e.g. phantomjs).
export NODE_ENV=production
# If node_modules directory is checked into source control then
# rebuild any native deps. Otherwise, restore from the build cache.
if test -d $BUILD_DIR/client/node_modules; then
status "Found existing node_modules directory; skipping cache"
status "Rebuilding any native dependencies"
npm rebuild 2>&1 | indent
elif test -d $CACHE_DIR/node/node_modules; then
status "Restoring node_modules directory from cache"
# If we have a cached shrinkwrap, and it matches the current
# shrinkwrap, copy over the node_modules.
USING_CACHED_NODE=0
if [ -f "$CACHE_DIR/npm-shrinkwrap.json" ]; then
if cmp --silent "$CACHE_DIR/npm-shrinkwrap.json" npm-shrinkwrap.json; then
USING_CACHED_NODE=1
status "Shrinkwrap file HAS NOT changed"
status "Using cached node_modules directory"
cp -r $CACHE_DIR/node/node_modules $BUILD_DIR/client/
fi
fi
if [ "$USING_CACHED_NODE" -eq "0" ]; then
status "Shrinkwrap file HAS changed or doesn't exist in cache"
status "Pruning cached dependencies not specified in package.json"
npm prune 2>&1 | indent
npm rebuild 2>&1 | indent
fi
cp npm-shrinkwrap.json "$CACHE_DIR/npm-shrinkwrap.json"
if test -f $CACHE_DIR/node/.heroku/node-version && [ $(cat $CACHE_DIR/node/.heroku/node-version) != "$node_version" ]; then
status "Node version changed since last build; rebuilding dependencies"
npm rebuild 2>&1 | indent
fi
fi
# Scope config var availability only to `npm install`
(
if [ -d "$ENV_DIR" ]; then
status "Exporting config vars to environment"
export_env_dir $ENV_DIR
fi
status "Installing dependencies"
# Make npm output to STDOUT instead of its default STDERR
npm install 2>&1 | indent
)
# Purge node-related cached content, being careful not to purge the top-level
# cache, for the sake of heroku-buildpack-multi apps.
rm -rf $CACHE_DIR/node_modules # (for apps still on the older caching strategy)
rm -rf $CACHE_DIR/node
mkdir -p $CACHE_DIR/node
# If app has a node_modules directory, cache it.
if test -d $BUILD_DIR/client/node_modules; then
status "Caching node_modules directory for future builds"
cp -r $BUILD_DIR/client/node_modules $CACHE_DIR/node
fi
# Copy goodies to the cache
mkdir -p $BUILD_DIR/client/.heroku
cp -r $BUILD_DIR/client/.heroku $CACHE_DIR/node
status "Cleaning up node-gyp and npm artifacts"
rm -rf "$BUILD_DIR/client/.node-gyp"
rm -rf "$BUILD_DIR/client/.npm"
# Update the PATH
status "Building runtime environment"
mkdir -p $BUILD_DIR/client/.profile.d
echo "export PATH=\"\$HOME/vendor/node/bin:\$HOME/bin:\$HOME/node_modules/.bin:\$PATH\";" > $BUILD_DIR/client/.profile.d/nodejs.sh
# Check and run gulp
(
if [ -d "$ENV_DIR" ]; then
status "Exporting config vars to environment"
export_env_dir $ENV_DIR
fi
if [ -f $BUILD_DIR/client/gulpfile.js ]; then
status "Found gulpfile, running gulp prod task"
$BUILD_DIR/client/node_modules/.bin/gulp prod
else
status "No gulpfile found"
fi
)
status "Removing client directory..."
rm -rf "$BUILD_DIR/client"
cd $BUILD_DIR
# If it does not exist, force a blank dev.conf onto the filesystem.
if [ ! -f $BUILD_DIR/conf/dev.conf ]; then
echo "creating empty dev.conf at $BUILD_DIR/conf/dev.conf"
touch $BUILD_DIR/conf/dev.conf
fi
(
if [ -d "$ENV_DIR" ]; then
status "Exporting config vars to environment"
export_env_dir $ENV_DIR
fi
# Build app
echo "-----> Building Play! application..."
$PLAY_PATH/play version | sed -u 's/^/ /'
# Precompile the Play! application at the root of $BUILD_DIR
APP_DIR=./
echo " Building Play! application at directory $APP_DIR"
DEPENDENCIES_CMD="$PLAY_PATH/play dependencies $APP_DIR --forProd --forceCopy --silent -Duser.home=$BUILD_DIR 2>&1"
echo " Resolving dependencies: $DEPENDENCIES_CMD"
eval "$DEPENDENCIES_CMD" | sed -u 's/^/ /'
check_compile_status
PRECOMPILE_CMD="DISABLE_PARALLEL_CLASSLOADING=true $PLAY_PATH/play precompile $APP_DIR ${PRECOMPILE_ID} --silent 2>&1"
echo " Precompiling: $PRECOMPILE_CMD"
eval "$PRECOMPILE_CMD" | sed -u 's/^/ /'
check_compile_status
)
# Repack Play! framework into cache
mkdir -p $CACHE_DIR
for DIR in $PLAY_PATH $IVY_PATH ; do
rm -rf $CACHE_DIR/$DIR
cp -r $DIR $CACHE_DIR/$DIR
done
PROFILE_PATH="$BUILD_DIR/.profile.d/play.sh"
mkdir -p $(dirname $PROFILE_PATH)
echo 'export PATH="/app/.jdk/bin:$PATH"' >> $PROFILE_PATH
# Remove build time dependencies from slug
# (Note: runtime modules are copied to slug with --forceCopy option)
rm -fr $PLAY_PATH/modules
rm -fr $IVY_PATH
# Warn if no Procfile is present
if [ ! -f Procfile ]; then
echo "-----> No Procfile found. Will use the following default process: "
echo " play run --http.port=\$PORT \$PLAY_OPTS"
fi
| true
|
2cfb8b6a7c7eb596bba0ee84d2017595f21c75db
|
Shell
|
mifkata/terrapress
|
/scripts/remote/tools/phpmyadmin-install.sh
|
UTF-8
| 706
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
ROOT="$PWD/$( dirname "${BASH_SOURCE[0]}" )"
ROOT=$ROOT source $ROOT/../config.env
HOST_IP=$(cd $ROOT/../../../terraform/service && terraform output ip)
SOURCE="https://files.phpmyadmin.net/phpMyAdmin/4.8.4/phpMyAdmin-4.8.4-english.tar.gz"
echo "[PHPMYADMIN] Installing..."
cd $ROOT
ssh -oStrictHostKeyChecking=no root@$HOST_IP "\
mkdir -p $CODEBASE/.tmp &&\
curl -s -L $SOURCE --output $CODEBASE/.tmp/phpmyadmin.tar.gz &&\
tar -zxf $CODEBASE/.tmp/phpmyadmin.tar.gz --directory $CODEBASE/.tmp/ &&\
mv -f $CODEBASE/.tmp/phpMyAdmin-4.8.4-english $WEB_ROOT/phpmyadmin &&\
chown -R www-data:www-data $WEB_ROOT/phpmyadmin &&\
rm -rf $CODEBASE/.tmp"
echo "[PHPMYADMIN] Done.."
| true
|
1282c09fb1075adb0bd9affe9a841aec0736f0de
|
Shell
|
CS506-Oversight/autorack-front
|
/precommit.sh
|
UTF-8
| 716
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/bash
# ------ Variables
CLR_RED=[31m
CLR_GRN=[32m
CLR_CYN=[36m
CLR_NC=[0m
# ------ Functions
run_cmd_exit_on_err() {
if ! $1; then
echo "${CLR_RED}Error @ $2${CLR_NC}"
read -p "Press enter to continue." -r
exit 1
fi
}
# ------ Main
echo "${CLR_CYN}Linting the code...${CLR_NC}"
run_cmd_exit_on_err "npm run lint" "Code linting"
echo "${CLR_CYN}Linting the code (css)...${CLR_NC}"
run_cmd_exit_on_err "npm run lint:css" "Code linting (css)"
echo "${CLR_CYN}Running the tests...${CLR_NC}"
run_cmd_exit_on_err "npm run test -- --watchAll=false" "Code testing"
echo "--- ${CLR_GRN}All checks passed.${CLR_NC} ---"
read -p "Press enter to continue." -r
| true
|
f7a186cf779194cbbfe12877663c9c48bad0f53a
|
Shell
|
ignacioola/react-native-starter
|
/scripts/build-env.sh
|
UTF-8
| 2,474
| 3.359375
| 3
|
[
"MIT"
] |
permissive
|
#/bin/sh
# PWD is ../
TARGET_ENV=".env"
RNCDIR="./node_modules/react-native-config/ios"
GOOGLESERVICE_JSON_FILE="./android/app/google-services.json"
GOOGLESERVICE_JSON_CONTENT='{"project_info":{"project_id":"sample","project_number":"000000000000","name":"sample","firebase_url":"https://sample.firebaseio.com"},"client":[{"client_info":{"mobilesdk_app_id":"1:000000000000:android:ffffffffffffffff","client_id":"android:com.ueno.reactnativestarter","client_type":1,"android_client_info":{"package_name":"com.ueno.reactnativestarter","certificate_hash":[]}},"api_key":[{"current_key":"sample"}]}],"configuration_version":"1"}'
GOOGLESERVICE_INFO_FILE="./ios/react-native-starter/GoogleService-Info.plist"
GOOGLESERVICE_INFO_CONTENT=$(cat <<EOL
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
</dict>
</plist>
EOL
)
if [ ! -f $GOOGLESERVICE_INFO_FILE ]; then
echo "[Ueno RNS] Warning: No GoogleService-Info.plist file in ios app directory... Added placeholder for now!"
echo $GOOGLESERVICE_INFO_CONTENT > $GOOGLESERVICE_INFO_FILE
fi
if [ ! -f $GOOGLESERVICE_JSON_FILE ]; then
echo "[Ueno RNS] Warning: No google-services.json file in android app directory... Added placeholder for now!"
echo $GOOGLESERVICE_JSON_CONTENT > $GOOGLESERVICE_JSON_FILE
fi
if [ ! -f ".env" ]; then
echo "[Ueno RNS] Warning: No .env file found... Copied .env.public to .env!"
cp .env.public .env
fi
if [ ! -z "$ENVFILE" ]; then
TARGET_ENV=$ENVFILE
fi
echo "[Ueno RNS] Building environment config"
echo "[Ueno RNS] Using $TARGET_ENV"
if [ ! -z "$SYMROOT" ]; then
mkdir -p $SYMROOT
# Build dotenv
cd $RNCDIR
./ReactNativeConfig/BuildDotenvConfig.ruby
cd -
# Copy generated dotenv files to node_modules directory
cp "$BUILD_DIR/GeneratedInfoPlistDotEnv.h" "$RNCDIR/ReactNativeConfig/GeneratedInfoPlistDotEnv.h"
cp "$SYMROOT/GeneratedDotEnv.m" "$RNCDIR/ReactNativeConfig/GeneratedDotEnv.m"
echo "Copied GeneratedInfoPlistDotEnv.h and GeneratedDotEnv.m to $RNCDIR"
fi
# Generate dynamic environment for development
JSON="export default {$(cat $TARGET_ENV | egrep "^[A-Za-z]+" | sed 's/\"/\\\"/g' | sed -n 's|\(.*\)=\(.*\)$|"\1":"\2",|p' | sed 's|\\\"||g') \"generatedAt\": \"$(date '+%FT%T')\" }"
echo "[Ueno RNS] Generating ./src/config.env.js"
echo $JSON > ./src/config.env.js
# Build config
echo "[Ueno RNS] Config built successfully"
| true
|
b6eb59eac4b52f908f8c3629ef12458292e42865
|
Shell
|
ulshell/popper
|
/ci/test/dry-run
|
UTF-8
| 1,244
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -ex
source ./common
init_test_repo
cd $test_repo_path
git clone https://github.com/cplee/github-actions-demo.git
cd github-actions-demo
export PHONY_SECRET=foo
popper run --dry-run > actual
echo "DRYRUN: [install] docker pull node:11.6.0
DRYRUN: [install] docker create node:11.6.0 npm install
DRYRUN: [install] docker start
DRYRUN: [test] docker pull node:11.6.0
DRYRUN: [test] docker create node:11.6.0 npm test
DRYRUN: [test] docker start
DRYRUN: [lint] docker build -t action/jshint $PWD/./.github/actions/jshint
DRYRUN: [lint] docker create action/jshint
DRYRUN: [lint] docker start
DRYRUN: [branch-filter] docker build -t actions/bin /tmp/actions/actions/bin/./filter
DRYRUN: [branch-filter] docker create actions/bin branch master
DRYRUN: [branch-filter] docker start
DRYRUN: [deploy] docker build -t actions/bin /tmp/actions/actions/bin/./sh
DRYRUN: [deploy] docker create actions/bin env
DRYRUN: [deploy] docker start
Workflow finished successfully.
" > expected
# Using xargs to remove trailing whitespaces
# Even tr -s '\n' '\n' is for the same purpose
# Using sort so as to get some appearance of order
cat expected | tr -s '\n' '\n' | sort | xargs > a
cat actual | tr -s '\n' '\n' | sort | xargs> b
diff a b
| true
|
73a4256a0eeb9c97c4fbe547de1cfa1197b49126
|
Shell
|
ilventu/aur-mirror
|
/nut/PKGBUILD
|
UTF-8
| 670
| 2.859375
| 3
|
[] |
no_license
|
# Maintainer : speps <speps at aur at archlinux dot org>
# Contributor: Thomas Dziedzic < gostrc at gmail >
# Contributor: Slash <demodevil5 [at] yahoo [dot] com>
pkgname=nut
pkgver=18.0
pkgrel=1
pkgdesc="Analyze meals with the USDA Nutrient Database"
arch=('i686' 'x86_64')
url='http://nut.sourceforge.net/'
license=('GPL')
source=("http://downloads.sourceforge.net/project/nut/nut/$pkgver/nut-$pkgver.tar.gz")
md5sums=('38b8503b5c6b1cee610e2ee512a1c1cb')
build() {
cd "$pkgname-$pkgver"
# settable prefix and fix man path
sed -i "s|^prefix |&+|;s|/man|/share&|" Makefile
make prefix=/usr
}
package() {
cd "$pkgname-$pkgver"
make prefix="$pkgdir/usr" install
}
| true
|
711965bb7742f209cabe25aa249644068b3a7d47
|
Shell
|
mfkiwl/riscv-z3
|
/tools/csmith.sh
|
UTF-8
| 334
| 2.953125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
CSMITH_PATH=/opt/csmith
if [ -d "csmith-2.3.0" ]; then
rm -rf csmith-2.3.0
fi
if [ -f "csmith-2.3.0.tar.gz" ]; then
rm csmith-2.3.0.tar.gz
fi
wget https://embed.cs.utah.edu/csmith/csmith-2.3.0.tar.gz
tar xf csmith-2.3.0.tar.gz
cd csmith-2.3.0
./configure --prefix=$CSMITH_PATH
make -j$(nproc)
sudo make install
| true
|
dcdea4fd43486aefb30f834744efecc4da28de76
|
Shell
|
xiao6666/tableplacement
|
/tableplacement-experiment/expScripts/exp.read.RCFile.sh
|
UTF-8
| 1,200
| 3.390625
| 3
|
[] |
no_license
|
#! /bin/bash
if [ $# -ne 4 ]
then
echo "./exp.read.RCFile.sh <exp> <row group size> <io buffer size> <read column str>"
echo "<exp>: exp1, exp2, exp3, ..."
exit
fi
EXP=$1
ROW_GROUP_SIZE=$2
IO_BUFFER_SIZE=$3
READ_COLUMN_STR=$4
EXP_COMMON_CONF_PATH="./expConf/common.conf"
echo "Loading parameters from $EXP_COMMON_CONF_PATH"
source $EXP_COMMON_CONF_PATH
EXP_CONF_PATH="./expConf/$EXP.conf"
echo "Loading parameters from $EXP_CONF_PATH"
source $EXP_CONF_PATH
echo "Printing system infomation ..."
uname -a
cat /etc/lsb-release
echo "=================================================================="
echo "Row group size:" $ROW_GROUP_SIZE
echo "I/O buffer size:" $IO_BUFFER_SIZE
echo "Read columns str:" $READ_COLUMN_STR
echo "free && sync && echo 3 > /proc/sys/vm/drop_caches && free"|sudo su #> /dev/null
iostat -d -t -k $DEVICE
java -jar ../target/tableplacement-experiment-0.0.1-SNAPSHOT.jar ReadRCFile -t $TABLE -i $DIR/$RCFILE_PREFIX.$FILE_PREFIX.c$ROW_COUNT.rg$ROW_GROUP_SIZE -p read.column.string $READ_COLUMN_STR -p io.file.buffer.size $IO_BUFFER_SIZE $OTHER_PROPERTIES
echo "free && sync && echo 3 > /proc/sys/vm/drop_caches && free"|sudo su #> /dev/null
iostat -d -t -k $DEVICE
| true
|
69b4bca4bd6134c21372a4670ea99b46ed67a398
|
Shell
|
abuxton/control-repo
|
/site-modules/localbootstrap/tasks/puppet_download.sh
|
UTF-8
| 2,520
| 3.671875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
# Puppet Task Name:
#
# This is where you put the shell code for your task.
#
# You can write Puppet tasks in any language you want and it's easy to
# adapt an existing Python, PowerShell, Ruby, etc. script. Learn more at:
# https://puppet.com/docs/bolt/0.x/writing_tasks.html
#
# Puppet tasks make it easy for you to enable others to use your script. Tasks
# describe what it does, explains parameters and which are required or optional,
# as well as validates parameter type. For examples, if parameter "instances"
# must be an integer and the optional "datacenter" parameter must be one of
# portland, sydney, belfast or singapore then the .json file
# would include:
# "parameters": {
# "instances": {
# "description": "Number of instances to create",
# "type": "Integer"
# },
# "datacenter": {
# "description": "Datacenter where instances will be created",
# "type": "Enum[portland, sydney, belfast, singapore]"
# }
# }
# Learn more at: https://puppet.com/docs/bolt/0.x/writing_tasks.html#ariaid-title11
#
# This script will download the requested version of PE from S3.
# If no version is specified, the latest version will be used. It will
# also resume broken downloads to save time and rename the resultant file.
# INSTALLER CHOICES #
# Either pass these environment variables inline or modify the default
# values (note, it's the value after the ':-' but before the close curly brace }
DOWNLOAD_DIST=${DOWNLOAD_DIST:-el}
DOWNLOAD_RELEASE=${DOWNLOAD_RELEASE:-7}
DOWNLOAD_ARCH=${DOWNLOAD_ARCH:-x86_64}
DOWNLOAD_VERSION=${DOWNLOAD_VERSION:-latest}
if [[ $DOWNLOAD_VERSION == latest ]]; then
latest_released_version_number="$(curl -s http://versions.puppet.com.s3-website-us-west-2.amazonaws.com/ | tail -n1)"
DOWNLOAD_VERSION=${latest_released_version_number:-latest}
fi
tarball_name="puppet-enterprise-${DOWNLOAD_VERSION}-${DOWNLOAD_DIST}-${DOWNLOAD_RELEASE}-${DOWNLOAD_ARCH}.tar.gz"
if [[$PT_debug == true ]]; then
echo "Downloading PE $DOWNLOAD_VERSION for ${DOWNLOAD_DIST}-${DOWNLOAD_RELEASE}-${DOWNLOAD_ARCH} to: ${tarball_name}"
echo
echo "https://pm.puppetlabs.com/cgi-bin/download.cgi?dist=${DOWNLOAD_DIST}&rel=${DOWNLOAD_RELEASE}&arch=${DOWNLOAD_ARCH}&ver=${DOWNLOAD_VERSION}"
fi
curl --progress-bar \
-L \
-o "./${tarball_name}" \
-C - \
"https://pm.puppetlabs.com/cgi-bin/download.cgi?dist=${DOWNLOAD_DIST}&rel=${DOWNLOAD_RELEASE}&arch=${DOWNLOAD_ARCH}&ver=${DOWNLOAD_VERSION}"
tar -xzf $PWD/$tarball_name
echo $tarball_name
| true
|
48a2d8ee4635b6736a666e827c8809f49f560385
|
Shell
|
cniesen/rtools
|
/bin/rDev.sh
|
UTF-8
| 4,567
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
# svn revision, db root password, db username, db password, [rice db username], [rice db password], [saucelabs username, saucelabs access key]
# to checkout a brach set R_SVN=https://svn.kuali.org/repos/rice/branches/rice-2.1
stime=$(date '+%s')
export DTS=$(date +%Y%m%d%H%M)
if [ -z "$R_HOME" ]
then
echo "env R_HOME is not set! Exiting."
fi
if [ -z "$R_SVN" ]
then
export R_SVN=https://svn.kuali.org/repos/rice/trunk
fi
echo -e "\n\nDuring rDev.sh it is normal for there to be a few svn: '.' is not a working copy messages. which are okay to ignore. These are from the various logging scripts which run a svndiff to a log before executing commands.\n"
echo -e "ln: File exists are also typically safe to ignore, logging files already exist and will be appended too\n\n "
export RICE_DB_USER=$3
export RICE_DB_PASS=$4
if [ -z "$RICE_DB_USER" ]
then
export RICE_DB_USER=RICE
export RICE_DB_PASS=RICE
fi
#export RICE_PORT=$5
#if [ -z "$RICE_PORT" ]
#then
# export RICE_PORT=8080
#fi
cd $R_HOME
mkdir -p $R_HOME/logs/$1
mkdir -p $R_HOME/$1
settingsSed.sh $1
#if [ "$(ls -A $R_HOME/$1)" ]
#then
# echo "$R_HOME/$1 should be emtpy but is not. Hangning file pointer possible, exiting."
# exit
#fi
mkdir -p $R_HOME/$1/.rdev
# we used to only checkout the db stuff, if there is a problem we avoid checking out everything.
# however, if there was just a release the whole project needs to be checked out which is now the default
if [ ! -e "$R_HOME/trunk-wubot" ] || [ -n "$R_SVN_CLEAN" ]
then
rImpexPrep.sh $1 $2 $RICE_DB_USER $RICE_DB_PASS
mysqlCreateDBs.sh $1 $2 $RICE_DB_USER $RICE_DB_PASS
cd $R_HOME/$1
else
echo "R_SVN_CLEAN not set copying from $R_HOME/trunk-wubot then updating to revision $1"
cp -R $R_HOME/trunk-wubot/ $R_HOME/$1/
cd $R_HOME/$1
log-command.sh rdev.svn.update.$1 svn --trust-server-cert --non-interactive update -r $1
# copied from rImpexPrep.sh
# overwrite project gitignore
echo "creating git repository"
cp ../rtools/etc/gitignore .gitignore
log-command.sh rdev.git.init git init -q
log-command.sh rdev.git.add git add -A
echo "git pre impex commit"
log-command.sh rdev.git.commit git commit -a -m "pre impex"
echo "impex patching - removing % identified bys"
log-command.sh rdev.impex.patch patch -p1 <../rtools/etc/patches/impex-no-user-percent.patch
mysqlCreateDBs.sh $1 $2 $RICE_DB_USER $RICE_DB_PASS
cd $R_HOME/$1
fi
echo "running custom updates"
# for installing the saucelabs quickstart, see saucelabs patch in rPatches.sh
# Sauce Labs params are a problem if rice db user and pass are not given
#if [ ! -z "$7" ]
#then
# rSauceLabs.sh $6 $7
#fi
# get rid of the file not found exceptions
#touch core/impl/OJB.properties
#echo "<descriptor-repository version=\"1.0\"></descriptor-repository>" > core/impl/repository.xml
#touch kns/OJB.properties
#touch impl/OJB.properties
#echo "<descriptor-repository version=\"1.0\"></descriptor-repository>" > impl/repository.xml
# dev tweeks
rPatches.sh
echo -e "\nCreating MySQL config files"
rCommonTestConfigMysql.sh $1 $RICE_DB_USER $RICE_DB_PASS
rAppConfigSampleMysql.sh $1 $RICE_DB_USER $RICE_DB_PASS 8080
rAppConfigKradSampleMysql.sh $1 $RICE_DB_USER $RICE_DB_PASS 8080
rAppConfigStandaloneMysql.sh $1 $RICE_DB_USER $RICE_DB_PASS
rSpyProperties.sh $1
rLogin.sh
rNoCacheFilter.sh
rIntellijConfig.sh $1
if [ -z "$NO_DTS_LOGS" ]
then
rDtsLogFiles.sh $1
fi
rKradreload.sh
echo -e "\nCreating intellijJunitAltConfigLocation.sh to be used after starting IntelliJ to set JUnit default to use -Dalt.config.location=$R_HOME/$1/$1-common-test-config.xml"
echo "xml ed -u \"/project/component/configuration[@type='JUnit']/option[@name='VM_PARAMETERS']/@value\" -v \"-Dalt.config.location=$R_HOME$/$1/$1-common-test-config.xml\" config/ide/intellij/.idea/workspace.xml" > intellijJunitAltConfigLocation.sh
echo ""
log-command.sh rdev.git.add git add -A
echo "git applied rDev custom updates commit"
log-command.sh rdev.svn.commit git commit -a -m "applied rDev custom updates"
mvnLinks.sh $1
echo -e "\nStarting mvn-clean-install.sh this will take a while..."
mvn-clean-install.sh $5 $6 $7 $8 $9 -T 4
mkdir -p $R_HOME/logs/$1/rDev-$DTS
mv $R_HOME/logs/$1/*.out $R_HOME/logs/$1/rDev-$DTS/
mv $R_HOME/logs/$1/*.log $R_HOME/logs/$1/rDev-$DTS/
echo -e "Logs are available in $R_HOME/logs/$1/rDev-$DTS/"
etime=$(date '+%s')
dt=$((etime - stime))
ds=$((dt % 60))
dm=$(((dt / 60) % 60))
dh=$((dt / 3600))
printf 'Elapsed time %d:%02d:%02d' $dh $dm $ds
echo -e "\n\n"
| true
|
b9f45c602b9fac1e316e3d06f129a445e1c414d9
|
Shell
|
simensma/sherpa
|
/util/git_hooks/pre-push
|
UTF-8
| 1,678
| 3.890625
| 4
|
[] |
no_license
|
#!/bin/bash
# An example hook script to verify what is about to be pushed. Called by "git
# push" after it has checked the remote status, but before anything has been
# pushed. If this script exits with a non-zero status nothing will be pushed.
#
# This hook is called with the following parameters:
#
# $1 -- Name of the remote to which the push is being done
# $2 -- URL to which the push is being done
#
# If pushing without using a named remote those arguments will be equal.
#
# Information about the commits which are being pushed is supplied as lines to
# the standard input in the form:
#
# <local ref> <local sha1> <remote ref> <remote sha1>
#
remote="$1"
remote_url="$2"
branch=`git rev-parse --abbrev-ref HEAD`
# Do not make a new tag unless this is the master branch.
if [ $branch != "master" ]
then
exit 0
fi
# Find the last tagged version in git.
tag_orig=`git tag --list | grep v | tail -1`
tag_curr=`git tag --list | grep v | tail -1 | tr "." "\n"` #lacyProgrammer
# Do not make an ew tag if the HEAD rev already has a tag.
tag_rev=`git rev-parse $tag_orig`
head_rev=`git rev-parse HEAD`
if [ $tag_rev = $head_rev ]
then
exit 0
fi
# Increment tag patch version.
tag_new=""
i=0
for part in $tag_curr
do
if [ $i -eq 2 ]
then
((part++))
tag_new+="$part"
# We follow semantic versioning, it is higly unlikely that we should ever
# reach path 50 on the same minor version during active development!
if [ $part -gt 50 ]
then
echo "50 patch versions in a row! Go read http://semver.org!"
fi
else
tag_new+="$part."
fi
((i++))
done
# Add the new tag to git.
$(git tag -a $tag_new -m $tag_new -s)
exit 0
| true
|
7a02c6b79b280bccf1d650552ab8d19b9a863d85
|
Shell
|
ajax711/dotfiles
|
/autosetup.sh
|
UTF-8
| 2,613
| 2.671875
| 3
|
[] |
no_license
|
cd ~
sudo apt-get update
sudo apt-get upgrade
# Only works on xfce
#wget -c http://0x0.st/-AGo.xml -O xfce4-keyboard-shortcuts.xml
#mv xfce4-keyboard-shortcuts.xml ~/.config/xfce4/xfconf/xfce-perchannel-xml/xfce4-keyboard-shortcuts.xml
#Download Zoom
wget https://zoom.us/client/latest/zoom_amd64.deb
#Download Discord
wget "https://discord.com/api/download?platform=linux&format=deb"
mv download\?platform\=linux\&format\=deb discord.deb
#Fix it
sudo apt-get install -f
#Install
sudo dpkg -i zoom_amd64.deb discord.deb
#Stremio
wget nz2.archive.ubuntu.com/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1-1ubuntu2.1~18.04.20_amd64.deb
sudo dpkg -i libssl1.1_1.1.1-1ubuntu2.1~18.04.20_amd64.deb
wget http://archive.ubuntu.com/ubuntu/pool/multiverse/f/fdk-aac/libfdk-aac1_0.1.6-1_amd64.deb
sudo dpkg -i libfdk-aac1_0.1.6-1_amd64.deb
wget https://dl.strem.io/shell-linux/v4.4.116/stremio_4.4.116-1_amd64.deb
wget http://archive.ubuntu.com/ubuntu/pool/multiverse/f/fdk-aac/libfdk-aac1_0.1.5-1_amd64.deb
sudo dpkg -i libfdk-aac1_0.1.5-1_amd64.deb
sudo apt install ./stremio_4.4.116-1_amd64.deb
# Install Arduino
#wget http://0x0.st/-A0v.xz //arduino
#tar -xvf -A0v.xz --directory ~/
#sudo touch /usr/bin/arduino
#sudo printf "cd ~/arduino-nightly \n ./arduino" >> /usr/bin/arduino
#sudo chmod a+x /usr/bin/arduino
#Install Steam
sudo apt-get install steam
#Install Bash it Theme
git clone --depth=1 https://github.com/Bash-it/bash-it.git ~/.bash_it
~/.bash_it/install.sh
#Set up kitty and install ayu theme
sudo apt-get install kitty
mkdir ~/.config/kitty
THEME=https://raw.githubusercontent.com/dexpota/kitty-themes/master/themes/ayu.conf
wget "$THEME" -P ~/.config/kitty/kitty-themes/themes
cd ~/.config/kitty
ln -s ./kitty-themes/themes/ayu.conf ~/.config/kitty/theme.conf
touch kitty.conf
echo include ./theme.conf >> kitty.conf
#setup guthub ssh
cd ~
touch ssh_setup
echo "ssh_github" >> ssh_setup
echo "z" >> ssh_setup
echo "z" >> ssh_setup
cat ssh_setup | ssh-keygen -t ed25519 -C "ajx@disroot.org"
git config --global user.email ajx@disroot.org
git config --global user.name bigass
rm ~/.bash_aliases
cp dotfiles/bash_aliases ~/
cd ~
mv bash_aliases .bash_aliases
source .bash_aliases
#setup hotspot
cd ~
sudo apt install hostapd
git clone https://github.com/oblique/create_ap
cd create_ap
make install
#run hotspot by sudo create_ap wlp4s0 wlp4s0 <SSID> <Password>
#Setup protonvpn
cd ~
wget https://repo.protonvpn.com/debian/dists/stable/main/binary-all/protonvpn-stable-release_1.0.3_all.deb
dpkg -i protonvpn-stable-release_1.0.3_all.deb
sudo apt update
sudo apt install protonvpn
| true
|
e967cbd719f9296b869db4d4f02bec67d0b0342e
|
Shell
|
robcore/Actiontec_T3200M
|
/targets/963138BGW/filesystem/bin/rescan_dlna.sh
|
UTF-8
| 206
| 2.96875
| 3
|
[] |
no_license
|
#!/bin/sh
cd /var/
if [ 0 -gt $1 ] || [ 1 -gt $2 ]; then
echo "Invalid input params to rescan dlna!"
else
while [ $1 -gt 0 ]
do
sleep $1
kill -10 $2 > /dev/null 2>&1
done
fi
| true
|
f1340acfbd309adb2d757f1463ea20e769f0068c
|
Shell
|
Vrganj/Brainfuck
|
/brainfuck
|
UTF-8
| 374
| 2.8125
| 3
|
[] |
no_license
|
#!/bin/bash
echo '#include <stdio.h>' > output.c
echo 'char buffer[163840];' >> output.c
echo 'char *p = buffer + 81960;' >> output.c
echo 'int main() {' >> output.c
sed 's/[^][+,.<>-]//g;s/+/++*p;/g;s/\-/--*p;/g;s/</--p;/g;s/>/++p;/g;s/\[/while(*p){/g;s/\]/}/g;s/\./putchar(*p);/g;s/,/*p=fgetc(stdin);/g;' $1 >> output.c
echo '}' >> output.c
gcc -O2 output.c -o bf.out
| true
|
80e4102241ae0fad8c1b70354b73bba6923874d7
|
Shell
|
McNetic/RetroPie-Setup
|
/scriptmodules/libretrocores/doom.sh
|
UTF-8
| 1,306
| 2.84375
| 3
|
[] |
no_license
|
rp_module_id="doom"
rp_module_desc="Doom LibretroCore"
rp_module_menus="2+"
function sources_doom() {
gitPullOrClone "$md_build" git://github.com/libretro/libretro-prboom.git
}
function build_doom() {
make clean
make
md_ret_require="$md_build/prboom_libretro.so"
}
function install_doom() {
md_ret_files=(
'prboom_libretro.so'
'prboom.wad'
)
}
function configure_doom() {
mkRomDir "ports/doom"
ensureSystemretroconfig "doom"
# system-specific shaders, doom
iniConfig " = " "" "$configdir/doom/retroarch.cfg"
iniSet "input_remapping_directory" "$configdir/doom/"
cp prboom.wad "$romdir/ports/doom/"
# download doom 1 shareware
wget "http://downloads.petrockblock.com/retropiearchives/doom1.wad" -O "$romdir/ports/doom/doom1.wad"
chown $user:$user "$romdir/ports/doom/"*
cat > "$romdir/ports/Doom 1 Shareware.sh" << _EOF_
#!/bin/bash
$rootdir/supplementary/runcommand/runcommand.sh 1 "$emudir/retroarch/bin/retroarch -L $md_inst/prboom_libretro.so --config $configdir/all/retroarch.cfg --appendconfig $configdir/doom/retroarch.cfg $romdir/ports/doom/doom1.wad" "$md_id"
_EOF_
chmod +x "$romdir/ports/Doom 1 Shareware.sh"
setESSystem 'Ports' 'ports' '~/RetroPie/roms/ports' '.sh .SH' '%ROM%' 'pc' 'ports'
}
| true
|
8f9d981e29f789d79fe59f1dfdbd0d377a05adbe
|
Shell
|
google/kati
|
/testcase/ninja_regen.sh
|
UTF-8
| 2,908
| 3.109375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
#
# Copyright 2015 Google Inc. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
log=stderr_log
mk="$@"
export VAR=hoge
cat <<EOF > Makefile
all:
echo foo
EOF
${mk} 2> ${log}
if [ -e ninja.sh ]; then
./ninja.sh
fi
sleep 1
cat <<EOF > Makefile
\$(KATI_deprecated_var VAR4)
\$(KATI_obsolete_var VAR5)
VAR3 := unused
all:
echo bar
echo VAR=\$(VAR)
echo VAR2=\$(VAR2)
echo VAR3=\$(VAR3)
echo wildcard=\$(wildcard *.mk)
other:
echo foo
EOF
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if ! grep regenerating ${log} > /dev/null; then
echo 'Should be regenerated (Makefile)'
fi
./ninja.sh
fi
export VAR=fuga
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if ! grep regenerating ${log} > /dev/null; then
echo 'Should be regenerated (env changed)'
fi
./ninja.sh
fi
export VAR2=OK
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if ! grep regenerating ${log} > /dev/null; then
echo 'Should be regenerated (env added)'
fi
./ninja.sh
fi
export VAR3=testing
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if grep regenerating ${log} >/dev/null; then
echo 'Should not regenerate (unused env added)'
fi
./ninja.sh
fi
export VAR3=test2
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if grep regenerating ${log} >/dev/null; then
echo 'Should not regenerate (unused env changed)'
fi
./ninja.sh
fi
export VAR4=foo
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if grep regenerating ${log} >/dev/null; then
echo 'Should not regenerate (deprecated env added)'
fi
./ninja.sh
fi
export VAR5=foo
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if grep regenerating ${log} >/dev/null; then
echo 'Should not regenerate (obsolete env added)'
fi
./ninja.sh
fi
export PATH=/random_path:$PATH
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if ! grep regenerating ${log} > /dev/null; then
echo 'Should be regenerated (PATH changed)'
fi
./ninja.sh
fi
sleep 1
touch PASS.mk
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if ! grep regenerating ${log} > /dev/null; then
echo 'Should be regenerated (wildcard)'
fi
./ninja.sh
fi
sleep 1
touch XXX
${mk} 2> ${log}
if [ -e ninja.sh ]; then
if grep regenerating ${log}; then
echo 'Should not be regenerated'
fi
./ninja.sh
fi
${mk} other 2> ${log}
if [ -e ninja.sh ]; then
if ! grep regenerating ${log} >/dev/null; then
echo 'Should be regenerated (argument)'
fi
./ninja.sh other
fi
| true
|
ea1db261c9f193b86ab0dce0708974b39852ef9e
|
Shell
|
jmark99/demo-java-x
|
/platform-logging.sh
|
UTF-8
| 1,248
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
if ! [ -e target/app ]
then
printf "# Building Maven project, so app JARs are available...\n"
mvn clean package
fi
echo "# Creating clean directories"
rm -rf target/logging/classes
mkdir -p target/logging/classes
rm -rf target/logging/mods
mkdir -p target/logging/mods
echo "# Compiling and packaging logger"
mkdir target/logging/classes/dev.nipafx.demo.java9.logging
javac \
-d target/logging/classes/dev.nipafx.demo.java9.logging \
src/platform_logging/java/dev/nipafx/demo/java9/api/platform_logging/logger/*.java
jar \
-c \
--file target/logging/mods/dev.nipafx.demo.java9.logging.jar \
-C target/logging/classes/dev.nipafx.demo.java9.logging/ .
echo "# Compiling and packaging app"
mkdir target/logging/classes/dev.nipafx.demo.java9.app
javac \
-p target/app \
-d target/logging/classes/dev.nipafx.demo.java9.app \
src/platform_logging/java/dev/nipafx/demo/java9/api/platform_logging/app/*.java
jar \
-c \
--file target/logging/mods/dev.nipafx.demo.java9.app.jar \
--main-class dev.nipafx.demo.java9.api.platform_logging.app.LoggingApplication \
-C target/logging/classes/dev.nipafx.demo.java9.app/ .
echo "# Running App"
java -verbose:gc -p target/app:target/logging/mods -m dev.nipafx.demo.javaX.app
| true
|
79dc6559fbfcd9b9216c72f00b239b6ffb8d8031
|
Shell
|
siyka-au/learn
|
/reset_app.sh
|
UTF-8
| 360
| 2.84375
| 3
|
[] |
no_license
|
#!/bin/bash
############################
# Reset the hellow_world app on a server
#
# Use: ssh -o StrictHostKeyChecking=no root@hello.dpunks.org "./reset_app.sh" replace with our server hostname
#
#############################
echo "Removing Hello World to Docker Container"
docker stop hello_world
echo "Stoping Hello World in Docker Container"
docker ps -a
| true
|
e463f8f137fac7f749e369ad5ff491c4e4226455
|
Shell
|
StopBadware/watson
|
/play.sh
|
UTF-8
| 292
| 3.046875
| 3
|
[] |
no_license
|
#! /bin/bash
play="play \"$1\""
cmd=""
while read line
do
commented='^#.*'
if [[ ! $line =~ $commented ]]; then
jvmOpt='.*\s-J-X.+'
if [[ $line =~ $jvmOpt ]]; then
line=${line//-J-X/-X}
fi
cmd="$cmd$line "
fi
done < .env
eval "$cmd$play"
| true
|
a1f1431ac8091422a3f028eccd298cc6e15b0f11
|
Shell
|
actions/actions-runner-controller
|
/runner/logger.sh
|
UTF-8
| 2,576
| 4.5
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# We are not using `set -Eeuo pipefail` here because this file is sourced by
# other scripts that might not be ready for a strict Bash setup. The functions
# in this file do not require it, because they are not handling signals, have
# no external calls that can fail (printf as well as date failures are ignored),
# are not using any variables that need to be set, and are not using any pipes.
# This logger implementation can be replaced with another logger implementation
# by placing a script called `logger.sh` in `/usr/local/bin` of the image. The
# only requirement for the script is that it defines the following functions:
#
# - `log.debug`
# - `log.notice`
# - `log.warning`
# - `log.error`
# - `log.success`
#
# Each function **MUST** accept an arbitrary amount of arguments that make up
# the (unstructured) logging message.
#
# Additionally the following environment variables **SHOULD** be supported to
# disable their corresponding log entries, the value of the variables **MUST**
# not matter the mere fact that they are set is all that matters:
#
# - `LOG_DEBUG_DISABLED`
# - `LOG_NOTICE_DISABLED`
# - `LOG_WARNING_DISABLED`
# - `LOG_ERROR_DISABLED`
# - `LOG_SUCCESS_DISABLED`
# The log format is constructed in a way that it can easily be parsed with
# standard tools and simple string manipulations; pattern and example:
#
# YYYY-MM-DD hh:mm:ss.SSS $level --- $message
# 2022-03-19 10:01:23.172 NOTICE --- example message
#
# This function is an implementation detail and **MUST NOT** be called from
# outside this script (which is possible if the file is sourced).
__log() {
local color instant level
color=${1:?missing required <color> argument}
shift
level=${FUNCNAME[1]} # `main` if called from top-level
level=${level#log.} # substring after `log.`
level=${level^^} # UPPERCASE
if [[ ! -v "LOG_${level}_DISABLED" ]]; then
instant=$(date '+%F %T.%-3N' 2>/dev/null || :)
# https://no-color.org/
if [[ -v NO_COLOR ]]; then
printf -- '%s %s --- %s\n' "$instant" "$level" "$*" 1>&2 || :
else
printf -- '\033[0;%dm%s %s --- %s\033[0m\n' "$color" "$instant" "$level" "$*" 1>&2 || :
fi
fi
}
# To log with a dynamic level use standard Bash capabilities:
#
# level=notice
# command || level=error
# "log.$level" message
#
# @formatter:off
log.debug () { __log 37 "$@"; } # white
log.notice () { __log 34 "$@"; } # blue
log.warning () { __log 33 "$@"; } # yellow
log.error () { __log 31 "$@"; } # red
log.success () { __log 32 "$@"; } # green
# @formatter:on
| true
|
bf1c5cb041e686a7fe5edf4c52ceeb1dcf0ba278
|
Shell
|
Thomah/postgres-tools
|
/pgdrop.sh
|
UTF-8
| 314
| 2.828125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
SCRIPT_PATH=`dirname $0`
CONF_NAME=$1
source $SCRIPT_PATH/conf/$CONF_NAME.sh
PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/postgres/bin:/usr/local/pgsql/bin
DATE=`date +%Y-%m-%d`
DOW=`date +%A`
FILENAME=$BACKUPDIR/$DATE.$DOW
dropdb -i -h $HOST -p $PORT -U $USERNAME $DBNAME 2>> "$FILENAME.drop.log"
| true
|
32f395a2c7923c6da7bc0ca1046786b9a92560f8
|
Shell
|
christoomey/dotfiles
|
/zsh/completion/_heroku
|
UTF-8
| 7,938
| 2.546875
| 3
|
[
"MIT"
] |
permissive
|
#compdef heroku
# ------------------------------------------------------------------------------
# Description
# -----------
#
# Completion script for the Heroku client gem (https://github.com/heroku/heroku)
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
# * Ali B. (http://awhitebox.com)
#
# ------------------------------------------------------------------------------
local -a _1st_arguments
_1st_arguments=(
"account\:confirm_billing":"Confirm that your account can be billed at the end of the month"
"addons":"list installed addons"
"addons\:list":"list all available addons"
"addons\:add":"install an addon"
"addons\:upgrade":"upgrade an existing addon"
"addons\:downgrade":"downgrade an existing addon"
"addons\:remove":"uninstall an addon"
"addons\:open":"open an addon's dashboard in your browser"
"apps":"list your apps"
"apps\:info":"show detailed app information"
"apps\:create":"create a new app"
"apps\:rename":"rename the app"
"apps\:open":"open the app in a web browser"
"apps\:destroy":"permanently destroy an app"
"auth\:login":"log in with your heroku credentials"
"auth\:logout":"clear local authentication credentials"
"backup":"capture a backup of the DB"
"browse":"open the configuration page for the app on Heroku's dashboard"
"config":"display a config vars for an app"
"config\:get":"display a config value for an app"
"config\:set":"set one or more config vars"
"config\:unset":"unset one or more config vars"
"db\:push":"push local data up to your app"
"db\:pull":"pull heroku data down into your local database"
"deploy":"deploy the application"
"domains":"list custom domains for an app"
"domains\:add":"add a custom domain to an app"
"domains\:remove":"remove a custom domain from an app"
"domains\:clear":"remove all custom domains from an app"
"help":"list available commands or display help for a specific command"
"keys":"display keys for the current user"
"keys\:add":"add a key for the current user"
"keys\:remove":"remove a key from the current user"
"keys\:clear":"remove all authentication keys from the current user"
"logs":"display recent log output"
"logs\:cron":"DEPRECATED: display cron logs from legacy logging"
"logs\:drains":"manage syslog drains"
"maintenance\:on":"put the app into maintenance mode"
"maintenance\:off":"take the app out of maintenance mode"
"open":"open app in a web browser"
"pg\:info":"display database information"
"pg\:ingress":"allow direct connections to the database from this IP for one minute"
"pg\:promote":"sets DATABASE as your DATABASE_URL"
"pg\:psql":"open a psql shell to the database"
"pg\:reset":"delete all data in DATABASE"
"pg\:unfollow":"stop a replica from following and make it a read/write database"
"pg\:wait":"monitor database creation, exit when complete"
"pgbackups":"list captured backups"
"pgbackups\:url":"get a temporary URL for a backup"
"pgbackups\:capture":"capture a backup from a database id"
"pgbackups\:restore":"restore a backup to a database"
"pgbackups\:destroy":"destroys a backup"
"plugins":"list installed plugins"
"plugins\:install":"install a plugin"
"plugins\:uninstall":"uninstall a plugin"
"plugins\:update":"updates all plugins or a single plugin by name"
"ps\:dynos":"scale to QTY web processes"
"ps\:workers":"scale to QTY background processes"
"ps":"list processes for an app"
"ps\:restart":"restart an app process"
"ps\:scale":"scale processes by the given amount"
"releases":"list releases"
"releases\:info":"view detailed information for a release"
"restore":"restore the DB from the most recent backup"
"rollback":"roll back to an older release"
"run":"run an attached process"
"run\:rake":"remotely execute a rake command"
"run\:console":"open a remote console session"
"sharing":"list collaborators on an app"
"sharing\:add":"add a collaborator to an app"
"sharing\:remove":"remove a collaborator from an app"
"sharing\:transfer":"transfer an app to a new owner"
"ssl":"list certificates for an app"
"ssl\:add":"add an ssl certificate to an app"
"ssl\:remove":"remove an ssl certificate from an app"
"ssl\:clear":"remove all ssl certificates from an app"
"stack":"show the list of available stacks"
"stack\:migrate":"prepare migration of this app to a new stack"
"version":"show heroku client version"
)
_arguments '*:: :->command'
if (( CURRENT == 1 )); then
_describe -t commands "heroku command" _1st_arguments
return
fi
local -a _command_args
case "$words[1]" in
apps:info)
_command_args=(
'(-r|--raw)'{-r,--raw}'[output info as raw key/value pairs]' \
)
;;
apps:create)
_command_args=(
'(-a|--addons)'{-a,--addons}'[a list of addons to install]' \
'(-r|--remote)'{-r,--remote}'[the git remote to create, default "heroku"]' \
'(-s|--stack)'{-s,--stack}'[the stack on which to create the app]' \
)
;;
config)
_command_args=(
'(-s|--shell)'{-s,--shell}'[output config vars in shell format]' \
)
;;
db:push)
_command_args=(
'(-c|--chunksize)'{-c,--chunksize}'[specify the number of rows to send in each batch]' \
'(-d|--debug)'{-d,--debug}'[enable debugging output]' \
'(-e|--exclude)'{-e,--exclude}'[exclude the specified tables from the push]' \
'(-f|--filter)'{-f,--filter}'[only push certain tables]' \
'(-r|--resume)'{-r,--resume}'[resume transfer described by a .dat file]' \
'(-t|--tables)'{-t,--tables}'[only push the specified tables]' \
)
;;
db:pull)
_command_args=(
'(-c|--chunksize)'{-c,--chunksize}'[specify the number of rows to send in each batch]' \
'(-d|--debug)'{-d,--debug}'[enable debugging output]' \
'(-e|--exclude)'{-e,--exclude}'[exclude the specified tables from the pull]' \
'(-f|--filter)'{-f,--filter}'[only pull certain tables]' \
'(-r|--resume)'{-r,--resume}'[resume transfer described by a .dat file]' \
'(-t|--tables)'{-t,--tables}'[only pull the specified tables]' \
)
;;
keys)
_command_args=(
'(-l|--long)'{-l,--long}'[display extended information for each key]' \
)
;;
logs)
_command_args=(
'(-n|--num)'{-n,--num}'[the number of lines to display]' \
'(-p|--ps)'{-p,--ps}'[only display logs from the given process]' \
'(-s|--source)'{-s,--source}'[only display logs from the given source]' \
'(-t|--tail)'{-t,--tail}'[continually stream logs]' \
)
;;
pgbackups:capture)
_command_args=(
'(-e|--expire)'{-e,--expire}'[if no slots are available to capture, delete the oldest backup to make room]' \
)
;;
stack)
_command_args=(
'(-a|--all)'{-a,--all}'[include deprecated stacks]' \
)
;;
esac
_arguments \
$_command_args \
'(-a|--app)'{-a,--app}'[the app name]' \
'(-r|--remote)'{-r,--remote}'[the git remote name]:remote:->remotes' \
&& return 0
__heroku_git_remotes () {
local expl gitdir remotes
gitdir=$(_call_program gitdir git rev-parse --git-dir 2>/dev/null)
__heroku_git_command_successful || return
remotes=(${${(f)"$(_call_program remotes git config --get-regexp '"^remote\..*\.url$"')"}//#(#b)remote.(*).url */$match[1]})
__heroku_git_command_successful || return
# TODO: Should combine the two instead of either or.
if (( $#remotes > 0 )); then
_wanted remotes expl remote compadd $* - $remotes
else
_wanted remotes expl remote _files $* - -W "($gitdir/remotes)" -g "$gitdir/remotes/*"
fi
}
__heroku_git_command_successful () {
if (( ${#pipestatus:#0} > 0 )); then
_message 'not a git repository'
return 1
fi
return 0
}
case $state in
(remotes)
__heroku_git_remotes && return 0
;;
esac
# Local Variables:
# mode: Shell-Script
# sh-indentation: 2
# indent-tabs-mode: nil
# sh-basic-offset: 2
# End:
# vim: ft=zsh sw=2 ts=2 et
| true
|
966d185bc4d8243d5d0f68f786b8cbea29591f44
|
Shell
|
redorca/home-env
|
/bin/stuff.sh
|
UTF-8
| 419
| 3.15625
| 3
|
[] |
no_license
|
#!/bin/bash
KEY0="^### COLORS Begin"
KEY1="^### COLORS End"
SCRIPT=~/bin/zmake
FOO=$(cat ~/bin/colors | sed -n -e '/^[^#].*[A-Z]=/p' | sed \
-e 's/\\/\\\\/g' \
-e 's/"/\\"/g' \
)
# -e 's/\[/\\[/g' \
# -e 's/;/\\;/g' \
# -e 's/$/\;/g' \
#FOO='a b c;
#d e f;'
# echo "::FOO ($FOO)"
# \
# -e 's/\\/\\\\/g' \
# -e 's/$/xxx;/g' \
# )"
eval sed -e \'/$KEY0/a$FOO\' $SCRIPT > /tmp/foo
| true
|
fb562e6a6f5af284785d41ed7e54c6c289066706
|
Shell
|
petronny/aur3-mirror
|
/crypt/cryptcfg
|
UTF-8
| 9,085
| 3.828125
| 4
|
[] |
no_license
|
#!/bin/bash
#
# cryptcfg by Jakub Schmidtke <sjakub@gmail.com>
#
# Based on netcfg script from Arch's initscripts 0.8-12 package.
#
#
# Version 0.3 - added LUKS support
#
. /etc/rc.conf
. /etc/rc.d/functions
CRYPTCFG_VER=0.3
PATH="/bin:/usr/bin:/sbin:/usr/sbin:$PATH"
PROFILE_DIR="/etc/crypt-profiles"
version()
{
echo "cryptcfg v$CRYPTCFG_VER"
}
usage()
{
version
echo
echo "usage: cryptcfg <profile_name>"
echo " crypcfg --stop <profile_name>"
echo " cryptcfg --menu"
echo " cryptcfg --stopall"
echo
echo "Crypt profiles are stored in $PROFILE_DIR"
echo
}
is_started_prof()
{
[ "$1" = "" ] && return 0
[ ! -f $PROFILE_DIR/$1 ] && return 0
unset DISK_DEVICE MAP_DEVICE
. $PROFILE_DIR/$1
[ "$DISK_DEVICE" = "" ] && return 0
[ ! -b $DISK_DEVICE ] && return 0
if [ "$CRYPT_METHOD" = "truecrypt" ]; then
MAP_DEVICE=$( truecrypt -l | grep $DISK_DEVICE"$" | cut -d' ' -f1 )
[ ${#MAP_DEVICE} -eq 0 ] && return 0
return 1
elif [ "$CRYPT_METHOD" = "luks" ]; then
[ "$MAP_ALIAS" = "" ] && return 0
CRYPT_STATUS=$( cryptsetup status $MAP_ALIAS | grep /dev/mapper | cut -d ' ' -f3 | cut -d '.' -f 1 | cut -d ':' -f1 )
[ "$CRYPT_STATUS" = "active" ] && return 1
return 0
fi
return 0
}
stop_profile()
{
if [ "$1" = "" ]; then
echo "error: missing profile name"
exit 1
fi
PROF=$1
is_started_prof $PROF
ret=$?
if [ "$ret" = "0" ]; then
echo "error: profile $PROF not started"
exit 1
fi
unset DISK_DEVICE
unset MAP_ALIAS
unset CRYPT_METHOD
. $PROFILE_DIR/$PROF
if [ ! -b $DISK_DEVICE ]; then
echo "error: $PROF profile contains no valid DISK_DEVICE!"
exit 1
fi
if [ "$CRYPT_METHOD" = "truecrypt" ]; then
stat_busy "Shutting down profile: $PROF"
truecrypt -d $DISK_DEVICE
if [ $? -ne 0 ] ; then
echo "error dismounting and/or unmapping $DISK_DEVICE volume"
exit 1
fi
elif [ "$CRYPT_METHOD" = "luks" ]; then
if [ ! -b /dev/mapper/$MAP_ALIAS ]; then
echo "error: /dev/mapper/$MAP_ALIAS is not a valid block device!"
exit 1
fi
stat_busy "Shutting down profile: $PROF"
# space after MAP_ALIAS is important!
MOUNT_STATUS=$( mount | grep "/dev/mapper/$MAP_ALIAS " )
if [ ! "$MOUNT_STATUS" = "" ]; then
stat_busy "umounting /dev/mapper/$MAP_ALIAS"
umount "/dev/mapper/$MAP_ALIAS"
if [ $? -ne 0 ]; then
echo "error umounting /dev/mapper/$MAP_ALIAS"
exit 1
fi
cryptsetup luksClose $MAP_ALIAS
if [ $? -ne 0 ]; then
echo "error closing $MAP_ALIAS crypt device!"
exit 1
fi
fi
else
echo "error: $PROF profile contains no valid CRYPT_METHOD!"
exit 1
fi
}
stop_all()
{
for i in /dev/mapper/truecrypt*; do
if [ -b "$i" ]; then
echo "Closing $i..."
truecrypt -d $i
fi
done
for i in /dev/mapper/*; do
if [ ! "$i" = "/dev/mapper/control" ]; then
if [ -b "$i" ]; then
echo "Closing $i..."
umount $i
cryptsetup luksClose $i
fi
fi
done
exit 0
}
password_prompt()
{
if [ "$DISK_DEVICE" = "" ]; then
echo "error: missing DISK_DEVICE"
exit 1
fi
if [ ! -b $DISK_DEVICE ]; then
echo "error: $DISK_DEVICE block device does not exist"
exit 1
fi
if [ "$MOUNT_DIR" = "" ]; then
echo "error: missing MOUNT_DIR"
exit 1
fi
if [ "$CRYPT_METHOD" = "" ]; then
echo "error: missing CRYPT_METHOD"
exit 1
fi
if [ "$CRYPT_METHOD" = "luks" ]; then
if [ "$MAP_ALIAS" = "" ]; then
echo "error: missing MAP_ALIAS"
exit 1
fi
fi
D_PARAMS=""
[ "$NO_ASTERISKS" = "" -o "$NO_ASTERISKS" = "0" ] && D_PARAMS="--insecure"
RESULT=$( mktemp ) || exit 1
if [ "$CRYPT_METHOD" = "truecrypt" ]; then
dialog \
--stdout \
$D_PARAMS \
--passwordbox \
"Enter password for encrypted volume $DISK_DEVICE:\n\n$DESCRIPTION\n " \
12 60 | truecrypt $CRYPT_OPTIONS $DISK_DEVICE > $RESULT 2>&1
elif [ "$CRYPT_METHOD" = "luks" ]; then
dialog \
--stdout \
$D_PARAMS \
--passwordbox \
"Enter password for encrypted volume $DISK_DEVICE:\n\n$DESCRIPTION\n " \
12 60 | cryptsetup $CRYPT_OPTIONS luksOpen $DISK_DEVICE $MAP_ALIAS > $RESULT 2>&1
else
rm $RESULT
echo "error: Unknown CRYPT_METHOD ($CRYPT_METHOD)"
exit 1
fi
rets=( ${PIPESTATUS[0]} ${PIPESTATUS[1]} )
ret_dlg=${rets[0]}
ret_tc=${rets[1]}
res=$( cat $RESULT )
rm $RESULT
# If user pressed 'ok'
if [ "$ret_dlg" = "0" ]; then
if [ ! "$ret_tc" = "0" ]; then
dialog --msgbox "$res" 10 60
# Abnormal return status - report it
return 1
fi
# Everything fine
return 0
fi
# Otherwise it was cancel, or ESC
return 2
}
start_profile()
{
if [ "$1" = "" ]; then
echo "error: missing profile name"
exit 1
fi
if [ ! -f $PROFILE_DIR/$1 ]; then
echo "error: $PROFILE_DIR/$1 is missing"
exit 1
fi
is_started_prof $1
ret=$?
if [ "$ret" = "1" ]; then
echo "error: $1 profile is already started"
exit 1
fi
stat_busy "Starting crypt profile: $1"
# ead the profile
unset DESCRIPTION DISK_DEVICE MAP_DEVICE CRYPT_OPTIONS NO_ASTERISKS MOUNT_DIR CRYPT_METHOD MAP_ALIAS
. $PROFILE_DIR/$1
password_prompt
ret=$?
# Password prompt was canceled
[ "$ret" = "2" ] && return 2
# There was an error in password prompt
[ "$ret" = "1" ] && return 1
if [ "$CRYPT_METHOD" = "truecrypt" ]; then
MAP_DEVICE=$( truecrypt -l | grep $DISK_DEVICE"$" | cut -d' ' -f1 )
if [ ${#MAP_DEVICE} -eq 0 ]; then
echo "$DISK_DEVICE volume not opened"
exit 1;
fi
mount $MAP_DEVICE $MOUNT_DIR
ret=$?
[ "$ret" = "0" ] && return 0
sleep 2
truecrypt -d $MAP_DEVICE
echo "error mounting $MAP_DEVICE device to $MOUNT_DIR"
exit 1
elif [ "$CRYPT_METHOD" = "luks" ]; then
if [ "$MAP_ALIAS" = "" ]; then
echo "error: Missing MAP_ALIAS";
exit 1
fi
if [ ! -b "/dev/mapper/$MAP_ALIAS" ]; then
echo "/dev/mapper/$MAP_ALIAS is not a block device!"
exit 1
fi
mount "/dev/mapper/$MAP_ALIAS" $MOUNT_DIR
ret=$?
[ "$ret" = "0" ] && return 0
sleep 2
cryptsetup luksClose $MAP_ALIAS
echo "error mounting /dev/mapper/$MAP_ALIAS device to $MOUNT_DIR"
exit 1
fi
# Not really needed (as password_prompt should fail already), but for completness
echo "Unknown CRYPT_METHOD"
exit 1
}
menu()
{
if [ "`ls $PROFILE_DIR 2>/dev/null | grep -v ^template$`" = "" -o ! -d $PROFILE_DIR ]; then
echo "No profiles found. Add profiles to $PROFILE_DIR"
exit 1
fi
# scan all profiles
unset profiles
DEFAULT=
i=0
for prof in $( ls $PROFILE_DIR ); do
# ignore the template
[ "$prof" = "template" ] && continue
# ignore already started profiles
is_started_prof $prof
ret=$?
[ "$ret" = "1" ] && continue
NAME=$prof
# if there's a profile called "main", use that as default
[ "$NAME" = "main" ] && DEFAULT=$NAME
unset DESCRIPTION
. $PROFILE_DIR/$NAME
if [ "$DESCRIPTION" ]; then
profiles[$i]=$NAME
i=$((i+1))
profiles[$i]=$DESCRIPTION
i=$((i+1))
fi
done
# No profiles to be started left
if [ ${#profiles} -eq 0 ]; then
echo "All available profiles started"
exit 0
fi
# if no default yet, use the first entry
[ "$DEFAULT" = "" ] && DEFAULT=${profiles[0]}
# profiles[2] is null - so we have only one profile left.
# Don't display list of profiles, just ask fo a password.
if [ "${profiles[2]}" = "" ]; then
start_profile $DEFAULT
ret=$?
# Since this is the last profile, we can exit the script
# if it was successfully opened, or the dialog was canceled.
# It was successful
[ "$ret" = "0" ] && exit 0
# It was canceled
[ "$ret" = "2" ] && exit 0
# Otherwise there was an error
return
fi
ANSWER=`mktemp` || exit 1
dialog \
--output-fd 1 \
--default-item $DEFAULT \
--menu "Select the crypt profile you wish to use" \
13 50 6 \
"${profiles[@]}" >$ANSWER
ret=$?
ans=$( cat $ANSWER )
rm $ANSWER
case $ret in
1) exit 0 ;; # cancel
255) exit 0 ;; # ESC pressed (or timeout, but we don't use it)
0) start_profile $ans ;; # user selection
# abnormal
*) echo "abnormal ret code from dialog: $ret"; exit 1 ;;
esac
}
#
# Begin
#
if [ "`id -u`" != "0" ]; then
echo "This script should be run as root."
exit 1
fi
/sbin/modprobe -q dm-mod 2>/dev/null
# Parse command line
MODE="profile"
PROFILE=
SPROF=
while [ $# -ne 0 ]; do
case $1 in
--version) MODE="ver" ;;
--help) MODE="usage" ;;
--menu) MODE="menu" ;;
--stopall) MODE="stopall" ;;
--stop) MODE="stop"
shift
SPROF=$1 ;;
--*) MODE="usage" ;;
-*) MODE="usage" ;;
*) PROFILE=$1 ;;
esac
shift
done
if [ "$MODE" = "profile" -a "$PROFILE" = "" ]; then
MODE="usage"
fi
# Figure out what we're doing...
[ "$MODE" = "ver" ] && version
[ "$MODE" = "usage" ] && usage
[ "$MODE" = "stop" ] && stop_profile $SPROF
[ "$MODE" = "stopall" ] && stop_all
[ "$MODE" = "menu" ] && while true; do menu; done;
if [ "$MODE" = "profile" ]; then
start_profile $PROFILE
ret=$?
[ "$ret" = "0" ] && exit 0
[ "$ret" = "2" ] && exit 0
exit 1
fi
exit 0
| true
|
40ed4a2607fdd04a4adc73fa6fca7af19de78789
|
Shell
|
l1x/this_is_sparta
|
/test.sh
|
UTF-8
| 784
| 2.703125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
now=$(date +%s%N | cut -b1-13)
a_day_in_ms=86400000
for i in {3..10}; do
((first_ts=now-i*a_day_in_ms))
echo $first_ts
curl -XPUT \
"http://127.0.0.1:10018/buckets/to_be_deleted/keys/908eb9bd4473db38-test-$first_ts" \
-H "Content-Type: application/json" \
-H "x-riak-index-created_at_int: $first_ts" \
-d "{\"ts\": $first_ts}"
for j in {1..100}; do
((second_ts=now-j*8640000))
curl -XPUT \
"http://127.0.0.1:10018/buckets/908eb9bd4473db38-test-$first_ts/keys/$second_ts" \
-H "Content-Type: application/json" \
-H "x-riak-index-created_at_int: $second_ts" \
-d "{\"ts\": $second_ts}"
done
done
curl -s http://127.0.0.1:10018/buckets/to_be_deleted/index/created_at_int/0/9999999999999999 | python -mjson.tool
##
| true
|
52b792b4a66d6c7009473b7fbc25f5c0c92bcc1c
|
Shell
|
maxrd2/arch-repo
|
/packages/mingw-libs/mingw-w64-openexr/PKGBUILD
|
UTF-8
| 1,213
| 2.8125
| 3
|
[] |
no_license
|
# Maintainer: Patrick Northon <northon_patrick3@yahoo.ca>
pkgname=mingw-w64-openexr
pkgver=2.5.3
pkgrel=3
pkgdesc="An high dynamic-range image file format library (mingw-w64)"
url="http://www.openexr.com/"
arch=(any)
license=('BSD')
depends=('mingw-w64-crt' 'mingw-w64-zlib')
makedepends=('mingw-w64-cmake' 'wine')
options=('staticlibs' '!buildflags' '!strip')
source=("https://github.com/AcademySoftwareFoundation/openexr/archive/v${pkgver}.tar.gz")
sha256sums=("6a6525e6e3907715c6a55887716d7e42d09b54d2457323fcee35a0376960bebf")
_architectures="i686-w64-mingw32 x86_64-w64-mingw32"
build() {
_flags=( -Wno-dev -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS_RELEASE="-O2 -DNDEBUG" -DBUILD_TESTING=OFF
-DINSTALL_OPENEXR_DOCS=OFF -DINSTALL_OPENEXR_EXAMPLES=OFF -DOPENEXR_BUILD_UTILS=OFF -DPYILMBASE_ENABLE=OFF )
for _arch in ${_architectures}; do
${_arch}-cmake -S "openexr-${pkgver}" -B "build-${_arch}" "${_flags[@]}" -DOPENEXR_BUILD_BOTH_STATIC_SHARED=ON
make -C "build-${_arch}"
done
}
package() {
for _arch in ${_architectures}; do
make DESTDIR="${pkgdir}" -C "build-${_arch}" install
${_arch}-strip --strip-unneeded "$pkgdir"/usr/${_arch}/bin/*.dll
${_arch}-strip -g "$pkgdir"/usr/${_arch}/lib/*.a
done
}
| true
|
6e3789b19626fc3e8b192aa8fcf72845c00aafb6
|
Shell
|
RikkaW/androidx
|
/profileinstaller/integration-tests/testapp/cli/build_bundle_launch.sh
|
UTF-8
| 3,190
| 2.875
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#
# Copyright 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# CHANGEME:
DEBUG=false
SCRIPT=`realpath $0`
SCRIPT_DIR=`dirname $SCRIPT`
SUPPORT_DIR=$SCRIPT_DIR/../../../../
TMP_DIR=`mktemp -d`
pushd $SUPPORT_DIR
echo "===START=== Rebuilding apk..."
ANDROIDX_PROJECTS=COMPOSE ./gradlew \
:profileinstaller:integration-tests:testapp:clean
if [ $DEBUG = true ]; then
ANDROIDX_PROJECTS=COMPOSE ./gradlew \
:profileinstaller:integration-tests:testapp:assembleDebug
else
ANDROIDX_PROJECTS=COMPOSE ./gradlew \
:profileinstaller:integration-tests:testapp:assembleRelease
fi
echo "===/DONE=== Rebuilding apk..."
echo "===START=== Uninstalling..."
adb uninstall androidx.profileinstaller.integration.testapp
echo "===/DONE=== Uninstalling"
echo "===START=== Repackaging apk..."
if [ $DEBUG = true ]; then
$SCRIPT_DIR/repackage.py --out $TMP_DIR/out.apk --debug true
else
$SCRIPT_DIR/repackage.py --out $TMP_DIR/out.apk
fi
echo "===/DONE=== Repackaging apk..."
echo "===START=== Installing apk..."
adb install $TMP_DIR/out.apk > /dev/null
echo "===/DONE=== Installing apk..."
echo "===START=== Installing apk..."
adb shell am start -n androidx.profileinstaller.integration.testapp/.MainActivity
echo "===/DONE==="
echo "===START=== Waiting 10 seconds for profile..."
sleep 10
echo "===/DONE=== Waiting 10 seconds for profile..."
echo "===START=== Force stopping app"
adb shell am force-stop androidx.profileinstaller.integration.testapp
echo "===/DONE=== Force stopping app"
echo "===START=== Root + Remount"
adb root >/dev/null
adb remount 2>/dev/null
echo "===/DONE=== Root + Remount"
echo "Profile found written to cur directory..."
CUR_SIZE=$(adb shell stat -c%s /data/misc/profiles/cur/0/androidx.profileinstaller.integration.testapp/primary.prof 2>/dev/null)
REF_SIZE=$(adb shell stat -c%s /data/misc/profiles/ref/androidx.profileinstaller.integration.testapp/primary.prof 2>/dev/null)
echo "Cur: $CUR_SIZE"
echo "Ref: $REF_SIZE"
echo "===START=== Compile speed-profile"
adb shell cmd package compile -m speed-profile -f androidx.profileinstaller.integration.testapp
echo "===/DONE=== Compile speed-profile"
CUR_SIZE=$(adb shell stat -c%s /data/misc/profiles/cur/0/androidx.profileinstaller.integration.testapp/primary.prof 2>/dev/null)
REF_SIZE=$(adb shell stat -c%s /data/misc/profiles/ref/androidx.profileinstaller.integration.testapp/primary.prof 2>/dev/null)
echo "Cur: $CUR_SIZE"
echo "Ref: $REF_SIZE"
APK_LOCATION=$(adb shell dumpsys package dexopt | grep "\[androidx\.profileinstaller\.integration\.testapp\]" -A1 | tail -n 1 | cut -d':' -f 2)
APK_DIR=$(dirname $APK_LOCATION)
adb shell ls -la $APK_DIR/oat/arm64/
| true
|
9acbab4b8b12c2eab2e8e83e1a84fa45d9228b1b
|
Shell
|
daxingyou/sg_server
|
/.svn/pristine/9a/9acbab4b8b12c2eab2e8e83e1a84fa45d9228b1b.svn-base
|
UTF-8
| 2,622
| 3.6875
| 4
|
[] |
no_license
|
#!/bin/sh
. ~/sg_server/nohup_run/common.sh
monitor_list="db game center chat gate login gm"
########################################################################
function monitor_server()
{
local srv_name=$1
check_server './'$srv_name &>/dev/null
if [ $? -ne 1 ];then
stop_list=$stop_list$srv_name" "
check_core $srv_name &>/dev/null
fi
}
function monitor_all_servers()
{
local stop_list=""
for s in $monitor_list
do
local is_skip=0
for skip in $skip_server
do
if [ "$s" = "$skip" ];then
is_skip=1
continue
fi
done
if [ $is_skip -eq 1 ];then
continue
fi
if [ "$s" = "gate" -a $gate_number -gt 1 ];then
for ((i=1; i<=$gate_number; i++))
do
srv_name=$s"_server"_$i
monitor_server $srv_name $stop_list
done
elif [ "$s" = "game" -a $game_number -gt 1 ];then
for ((i=1; i<=$game_number; i++))
do
srv_name=$s"_server"_$i
monitor_server $srv_name $stop_list
done
elif [ "$s" = "cross" -a $cross_number -gt 1 ];then
for ((i=1; i<=$cross_number; i++))
do
srv_name=$s"_server"_$i
monitor_server $srv_name $stop_list
done
elif [ "$s" = "transfer" -a $transfer_number -gt 1 ];then
for ((i=1; i<=$transfer_number; i++))
do
srv_name=$s"_server"_$i
monitor_server $srv_name $stop_list
done
elif [ "$s" = "account" -a $account_number -gt 1 ];then
for ((i=1; i<=$account_number; i++))
do
srv_name=$s"_server"_$i
monitor_server $srv_name $stop_list
done
else
srv_name=$s"_server"
monitor_server $srv_name $stop_list
fi
done
if [ "$stop_list" != "" ];then
echo "[`date '+%Y-%m-%d %H:%M:%S'`] [$stop_list] has been stop!"
fi
backup_server &>/dev/null
for stop in $stop_list
do
run_server $stop &>/dev/null
check_server $stop
if [ $? -eq 1 ];then
echo "$stop is already running!"
else
echo "$stop failed to run!"
fi
done
unset stop_list
}
########################################################################
if [ "$1" = "once" ];then
monitor_all_servers
else
while [ true ]
do
monitor_all_servers
sleep 5
done
fi
| true
|
d15de831a7359c25dff9e070346726d579b357ec
|
Shell
|
molotov-dmitry/svn-repo-create
|
/createrepo.sh
|
UTF-8
| 3,558
| 4.0625
| 4
|
[] |
no_license
|
#!/bin/bash
### Parameters =================================================================
use_sasl=0
clean=0
realm=''
directory='.'
userdb=''
### Cleanup ====================================================================
cleanup()
{
if [[ "${clean}" -gt 0 ]]
then
rm -rf "${directory}/${reponame}"
fi
}
trap cleanup EXIT
### Get arguments ==============================================================
shift
argc=$#
argv=("$@")
for (( i = 0; i < argc; i++ ))
do
arg="${argv[$i]}"
if [[ "$arg" == '--sasl' ]]
then
use_sasl=1
elif [[ "$arg" == '--realm' || "$arg" == '-r' ]]
then
if [[ $i < $(( argc - 1 )) ]]
then
let i++
realm="${argv[$i]}"
else
echo 'Wrong argument' >&2
fi
elif [[ "$arg" == '--directory' || "$arg" == '-d' ]]
then
if [[ $i < $(( argc - 1 )) ]]
then
let i++
directory="${argv[$i]}"
else
echo 'Wrong argument' >&2
fi
elif [[ "$arg" == '--userdb' || "$arg" == '-u' ]]
then
if [[ $i < $(( argc - 1 )) ]]
then
let i++
userdb="${argv[$i]}"
else
echo 'Wrong argument' >&2
fi
elif [[ "${arg:0:1}" != '-' ]]
then
reponame="${arg}"
fi
done
### Check parameters ===========================================================
if [[ -z "$reponame" ]]
then
echo 'Repo name is not set' >&2
exit 1
fi
if [[ -z "$realm" ]]
then
echo 'Realm is not set' >&2
exit 2
fi
if [[ -z "$directory" ]]
then
echo 'Directory is not set' >&2
exit 3
fi
### Check repository already exist =============================================
if [[ -e "${directory}/${reponame}" ]]
then
echo 'Repository already exist' >&2
exit 4
fi
### Create repository ==========================================================
svnadmin create "${directory}/${reponame}" || exit 5
clean=1
### Create local user database configuration ===================================
cat << _EOF > "${directory}/${reponame}/conf/svnserve.conf" || exit 6
[general]
anon-access = none
auth-access = write
password-db = passwd
realm = ${realm}
_EOF
### Create user database file --------------------------------------------------
if [[ -n "$userdb" && -f "$userdb" ]]
then
cp -f "$userdb" "${directory}/${reponame}/conf/passwd" || exit 7
else
cat << _EOF > "${directory}/${reponame}/conf/passwd" || exit 7
[users]
system=system
_EOF
fi
### Get username and password --------------------------------------------------
username=$(grep '=' "${directory}/${reponame}/conf/passwd" | head -n1 | cut -s -d '=' -f 1 )
password=$(grep '=' "${directory}/${reponame}/conf/passwd" | head -n1 | cut -s -d '=' -f 2 )
if [[ -z "$username" || -z "$password" ]]
then
echo 'Username and password is empty' >&2
exit 8
fi
### Create default directories =================================================
repourl="svn://127.0.0.1/${directory}/${reponame}"
svn mkdir "${repourl}/trunk" \
"${repourl}/branches" \
"${repourl}/tags" \
-m 'Created trunk, branches and tags dirs' \
--non-interactive --no-auth-cache \
--username "$username" --password "$password" \
|| exit 9
### Configure SASL =============================================================
if [[ "$use_sasl" -gt 0 ]]
then
cat << _EOF >> "${directory}/${reponame}/conf/svnserve.conf" || exit 10
[sasl]
use-sasl = true
_EOF
fi
clean=0
| true
|
0b01ba302da83c60125460177e0208d526fc8570
|
Shell
|
H-schwert/i3
|
/scripts/drive
|
UTF-8
| 628
| 2.890625
| 3
|
[] |
no_license
|
#!/bin/bash
ICON=""
if [[ $(insync get_status | grep -Fo 'seem to be running. Start it first') ]]; then
COLOR="#DC322F"
elif [[ $(insync get_status | grep -Fo 'OFFLINE') ]]; then
COLOR="#DC322F"
elif [[ $(insync get_status | grep -Fo 'SYNCED') ]]; then
COLOR=""
elif [[ $(insync get_status | grep -Fo 'SHARE') ]]; then
COLOR=""
else
COLOR="#5294E2"
fi
# Click to see status:
if [[ $BLOCK_BUTTON == '3' ]]; then
notify-send "`echo $(insync get_status)`" \
-i /usr/share/icons/Numix-Circle/scalable/apps/google-drive.svg
fi
echo $ICON
echo $ICON
echo $COLOR
| true
|
94770e5078225cbfe219a0535b9fb46fddf2a207
|
Shell
|
NCAR/container-dtc-nwp
|
/components/scripts/common/run_command.ksh
|
UTF-8
| 940
| 4.15625
| 4
|
[] |
no_license
|
#!/bin/ksh
################################################################################
#
# Script Name: run_command.ksh
#
# Author: John Halley Gotway
# NCAR/RAL/DTC
#
# Released: 10/10/2012
#
# Description:
# This is a wrapper script for executing the command that is passed to it
# and checking the return status.
#
# Arguments:
# The first argument is the command to be executed and all remaining arguments
# are passed through to the command.
#
################################################################################
# Name of this script
SCRIPT=run_command.ksh
# Check for at least one argument
if [ $# -eq 0 ]; then
echo
echo "ERROR: ${SCRIPT} zero arguments."
echo
exit 1
fi
# Run the command
echo
echo "CALLING: $*"
echo
$*
# Check the return status
error=$?
if [ ${error} -ne 0 ]; then
echo "ERROR:"
echo "ERROR: $* exited with status = ${error}"
echo "ERROR:"
exit ${error}
fi
| true
|
1f154cbc2c5697a94e32b12ee3fb363104986567
|
Shell
|
rschwabco/maana-lp
|
/start.sh
|
UTF-8
| 431
| 2.859375
| 3
|
[] |
no_license
|
#! /usr/bin/env sh
set -e
# uvicorn ./app/main:app
DEFAULT_MODULE_NAME=main
MODULE_NAME=${MODULE_NAME:-$DEFAULT_MODULE_NAME}
VARIABLE_NAME=${VARIABLE_NAME:-app}
export APP_MODULE=${APP_MODULE:-"$MODULE_NAME:$VARIABLE_NAME"}
DEFAULT_GUNICORN_CONF=/gunicorn_conf.py
export GUNICORN_CONF=${GUNICORN_CONF:-$DEFAULT_GUNICORN_CONF}
# Start Gunicorn
exec gunicorn -k uvicorn.workers.UvicornWorker -c "$GUNICORN_CONF" "$APP_MODULE"
| true
|
923fca8df2a838a71600749162cefa2c8f04b6f8
|
Shell
|
yourdj/segdsp
|
/dsp/native/c/generate.sh
|
UTF-8
| 375
| 3.03125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
ARCH=`uname -m`
if [[ "${ARCH}" = "x86_64" ]] || [[ "${ARCH}" = "i386" ]]
then
echo "------------- x86 ------------"
python generate_x86.py
# ./generate_x86.sh
# The issue here is that c2goasm does not support arm yet
# elif [ "${ARCH}" = "aarch64" ] || [ "${ARCH}" = "arm64" ]
# then
# echo "------------- arm64 -----------"
# ./generate_arm64.sh
fi
| true
|
ffbeebb039d0d2d3d5030220a0aa14c9f4f9ca01
|
Shell
|
jochu/heroku-buildpack-testing
|
/bin/detect
|
UTF-8
| 135
| 2.8125
| 3
|
[] |
no_license
|
#!/bin/sh
BUILD_DIR="$1"
if [ -f "$BUILD_DIR/cabal-install.packages" ]
then
echo "haskell-platform"
exit 0
else
exit 1
fi
| true
|
5aa861a836e33fc91d5597d98c5128c8f7f10840
|
Shell
|
brandoshmando/heroku-buildpack-kong
|
/bin/compile
|
UTF-8
| 8,980
| 3.421875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# bin/compile <build-dir> <cache-dir>
# Fail immediately on non-zero exit code.
set -e
# Fail immediately on non-zero exit code within a pipeline.
set -o pipefail
# Fail on undeclared variables.
set -u
# Debug, echo every command
#set -x
# parse and derive params
BUILD_DIR=$1
CACHE_DIR=$2
ENV_DIR=$3
BP_DIR=`cd $(dirname $0); cd ..; pwd`
VENDOR_DIR=${BP_DIR}/vendor
# Make dyno runtime-compatible prefix for compiled artifacts
APP_PREFIX=/app/.heroku
mkdir -p $APP_PREFIX
# Cache compilation artifacts between builds
BP_CACHE_DIR="$CACHE_DIR/heroku-kong-buildpack"
mkdir -p $BP_CACHE_DIR
PREFIX_CACHE_DIR=$BP_CACHE_DIR/app_prefix
# Source for Kong
if [ -f "${ENV_DIR}/KONG_GIT_URL" ]
then
KONG_GIT_URL=`cat ${ENV_DIR}/KONG_GIT_URL`
else
KONG_GIT_URL="https://github.com/Mashape/kong.git"
fi
# commit or tag to checkout
if [ -f "${ENV_DIR}/KONG_GIT_COMMITISH" ]
then
KONG_GIT_COMMITISH=`cat ${ENV_DIR}/KONG_GIT_COMMITISH`
else
KONG_GIT_COMMITISH="0.11.1"
fi
KONG_SOURCE_DIR="${BP_CACHE_DIR}/kong-source"
# Set dependency versions.
# These correspond to the archives in `vendor/`.
# If upgrading any those archives, then update the corresponding version here.
LUAROCKS_VERSION=2.4.2
OPENSSL_VERSION=1.0.2l
OPENRESTY_VERSION=1.13.6.1
function error() {
echo " ! $*" >&2
exit 1
}
function topic() {
echo "-----> $*"
}
function indent() {
c='s/^/ /'
case $(uname) in
Darwin) sed -l "$c";;
*) sed -u "$c";;
esac
}
APT_CACHE_DIR="$CACHE_DIR/apt/cache"
APT_STATE_DIR="$CACHE_DIR/apt/state"
mkdir -p "$APT_CACHE_DIR/archives/partial"
mkdir -p "$APT_STATE_DIR/lists/partial"
APT_OPTIONS="-o debug::nolocking=true -o dir::cache=$APT_CACHE_DIR -o dir::state=$APT_STATE_DIR"
topic "Updating apt caches"
apt-get $APT_OPTIONS update | indent
for PACKAGE in $(cat $BP_DIR/Aptfile $BUILD_DIR/Aptfile); do
if [[ $PACKAGE == *deb ]]; then
PACKAGE_NAME=$(basename $PACKAGE .deb)
PACKAGE_FILE=$APT_CACHE_DIR/archives/$PACKAGE_NAME.deb
topic "Fetching $PACKAGE"
curl -s -L -z $PACKAGE_FILE -o $PACKAGE_FILE $PACKAGE 2>&1 | indent
else
topic "Fetching .debs for $PACKAGE"
apt-get $APT_OPTIONS -y --force-yes -d install --reinstall $PACKAGE | indent
fi
done
mkdir -p $BUILD_DIR/.apt
for DEB in $(ls -1 $APT_CACHE_DIR/archives/*.deb); do
topic "Installing $(basename $DEB)"
dpkg -x $DEB $BUILD_DIR/.apt/
done
topic "Writing profile script"
mkdir -p $BUILD_DIR/.profile.d
cat <<EOF >$BUILD_DIR/.profile.d/000_buildpack_kong.sh
export PATH="\$HOME/.heroku/nginx/sbin:\$HOME/.heroku/luajit/bin:\$HOME/.heroku/bin:\$HOME/.apt/usr/local/bin:\$HOME/.apt/usr/bin:\$HOME/.apt/usr/sbin:\$PATH"
export LD_LIBRARY_PATH="\$HOME/.heroku/lib:\$HOME/.apt/usr/lib/x86_64-linux-gnu:\$HOME/.apt/usr/local/lib:\$HOME/.apt/usr/lib:/usr/lib/x86_64-linux-gnu:/lib/x86_64-linux-gnu:/usr/lib:\$LD_LIBRARY_PATH"
export LIBRARY_PATH="\$HOME/.heroku/lib:\$HOME/.apt/usr/lib/x86_64-linux-gnu:\$HOME/.apt/usr/local/lib:\$HOME/.apt/usr/lib:/usr/lib/x86_64-linux-gnu:/lib/x86_64-linux-gnu:/usr/lib:\$LIBRARY_PATH"
export INCLUDE_PATH="\$HOME/.heroku/include:\$HOME/.apt/usr/local/include:\$HOME/.apt/usr/include:/usr/include/x86_64-linux-gnu:/usr/include:\$INCLUDE_PATH"
export CPATH="\$INCLUDE_PATH"
export CPPPATH="\$INCLUDE_PATH"
export PKG_CONFIG_PATH="\$HOME/.apt/usr/local/lib/pkgconfig:\$HOME/.apt/usr/lib/x86_64-linux-gnu/pkgconfig:\$HOME/.apt/usr/lib/pkgconfig:\$PKG_CONFIG_PATH"
export LUA_PATH="\$HOME/lib/?.lua;\$HOME/lib/?/init.lua;\$HOME/.luarocks/share/lua/5.1/?.lua;\$HOME/.luarocks/share/lua/5.1/?/init.lua;\$HOME/.heroku/share/lua/5.1/?.lua;\$HOME/.heroku/share/lua/5.1/?/init.lua;./?.lua"
export LUA_CPATH="\$HOME/lib/?.so;\$HOME/.luarocks/lib/lua/5.1/?.so;\$HOME/.heroku/lib/lua/5.1/?.so;./?.so"
EOF
export PATH="$APP_PREFIX/nginx/sbin:$APP_PREFIX/luajit/bin:$APP_PREFIX/bin:$BUILD_DIR/.apt/usr/local/bin:$BUILD_DIR/.apt/usr/bin:$BUILD_DIR/.apt/usr/sbin:/sbin:$PATH"
export LD_LIBRARY_PATH="$APP_PREFIX/lib:$BUILD_DIR/.apt/usr/lib/x86_64-linux-gnu:$BUILD_DIR/.apt/usr/local/lib:$BUILD_DIR/.apt/usr/lib:/usr/lib/x86_64-linux-gnu:/lib/x86_64-linux-gnu:/usr/lib"
export LIBRARY_PATH="$APP_PREFIX/lib:$BUILD_DIR/.apt/usr/lib/x86_64-linux-gnu:$BUILD_DIR/.apt/usr/local/lib:$BUILD_DIR/.apt/usr/lib:/usr/lib/x86_64-linux-gnu:/lib/x86_64-linux-gnu:/usr/lib"
export INCLUDE_PATH="$APP_PREFIX/include:$BUILD_DIR/.apt/usr/local/include:$BUILD_DIR/.apt/usr/include:/usr/include/x86_64-linux-gnu:/usr/include"
export CPATH="$INCLUDE_PATH"
export CPPPATH="$INCLUDE_PATH"
export PKG_CONFIG_PATH="$BUILD_DIR/.apt/usr/local/lib/pkgconfig:$BUILD_DIR/.apt/usr/lib/x86_64-linux-gnu/pkgconfig:$BUILD_DIR/.apt/usr/lib/pkgconfig"
export LUA_PATH="$BUILD_DIR/lib/?.lua;$BUILD_DIR/lib/?/init.lua;$BUILD_DIR/.luarocks/share/lua/5.1/?.lua;$BUILD_DIR/.luarocks/share/lua/5.1/?/init.lua;$APP_PREFIX/share/lua/5.1/?.lua;$APP_PREFIX/share/lua/5.1/?/init.lua;./?.lua"
export LUA_CPATH="$BUILD_DIR/lib/?.so;$BUILD_DIR/.luarocks/lib/lua/5.1/?.so;$APP_PREFIX/lib/lua/5.1/?.so;./?.so"
#give environment to later buildpacks
export | grep -E -e ' (PATH|LD_LIBRARY_PATH|LIBRARY_PATH|INCLUDE_PATH|CPATH|CPPPATH|PKG_CONFIG_PATH)=' > "$BP_DIR/export"
# Once the installed packages are in the PATH, use them.
# Build from source to have dyno-compatible path prefix
# Cache the build
VENDOR_CACHE_DIR=${BP_CACHE_DIR}/vendor
mkdir -p ${VENDOR_CACHE_DIR}
# Detect changes in the vendor/ sources
topic "Detecting changes in vendor/"
cd ${VENDOR_DIR}
VENDOR_DIFF=${BP_CACHE_DIR}/.vendor-diff
VENDOR_HASH=${BP_CACHE_DIR}/.vendor-hash
VENDOR_HASH_NEW=${VENDOR_HASH}-new
touch ${VENDOR_DIFF}
touch ${VENDOR_HASH}
touch ${VENDOR_HASH_NEW}
openssl dgst -sha1 * > ${VENDOR_HASH_NEW}
# `diff` signals differences with exit codes. Don't fail fast.
set +e
diff ${VENDOR_HASH} ${VENDOR_HASH_NEW} > ${VENDOR_DIFF}
DIFF_STATUS=$?
set -e
mv -f ${VENDOR_HASH_NEW} ${VENDOR_HASH}
if [ "$DIFF_STATUS" == 0 ] && [ -d "$PREFIX_CACHE_DIR" ]
then
topic "Restoring from cache"
cp -R $PREFIX_CACHE_DIR/* $APP_PREFIX/
elif [ "$DIFF_STATUS" == 0 ] || [ "$DIFF_STATUS" == 1 ]
then
topic "Changes detected"
IFS=$'\n'
for DIFF_LINE in $(cat "$VENDOR_DIFF"); do
echo " $DIFF_LINE"
done
unset IFS
# Clean-up previous cache
rm ${VENDOR_DIFF}
rm -rf ${VENDOR_CACHE_DIR}
# and cache the new source code
mv ${VENDOR_DIR} ${VENDOR_CACHE_DIR}
cd ${VENDOR_CACHE_DIR}
topic "Building OpenSSL"
tar -xf openssl-${OPENSSL_VERSION}.tar.gz
cd openssl-${OPENSSL_VERSION}
./config --prefix=$APP_PREFIX -fPIC
make
make install
cd ..
topic "Building OpenResty"
tar -xf openssl-${OPENSSL_VERSION}.tar.gz
tar -xf openresty-${OPENRESTY_VERSION}.tar.gz
cd openresty-${OPENRESTY_VERSION}
./configure --prefix=$APP_PREFIX --with-pcre-jit --with-ipv6 --with-http_realip_module --with-http_ssl_module --with-http_stub_status_module --with-http_v2_module --with-openssl=${VENDOR_CACHE_DIR}/openssl-${OPENSSL_VERSION}
make
make install
cd ..
topic "Building LuaRocks"
tar -xf luarocks-${LUAROCKS_VERSION}.tar.gz
cd luarocks-${LUAROCKS_VERSION}
./configure --prefix=${APP_PREFIX} --lua-suffix=jit --with-lua=${APP_PREFIX}/luajit --with-lua-include=${APP_PREFIX}/luajit/include/luajit-2.1
make build
make install
cd ..
topic "Caching build"
mkdir -p $PREFIX_CACHE_DIR
cp -Rf $APP_PREFIX/* $PREFIX_CACHE_DIR/
else
error "Error detecting cached build: diff exited ${DIFF_STATUS}"
fi
topic "Building Kong from ${KONG_GIT_URL}#${KONG_GIT_COMMITISH}"
# Move the unfortunately named `.luarocks` spec file so that it doesn't
# interfere during CI. We're migrating Kong apps to use `Rockfile` instead.
if [ -f "$BUILD_DIR/.luarocks" ]; then
mv "$BUILD_DIR/.luarocks" "$BUILD_DIR/Rockfile"
fi
# Ensure we don't have Kong from a previous build
luarocks remove kong || true
if [ -d "$KONG_SOURCE_DIR" ]
then
cd $KONG_SOURCE_DIR
git fetch
git fetch --tags
git reset --hard $(git rev-list -n 1 $KONG_GIT_COMMITISH)
else
git clone $KONG_GIT_URL $KONG_SOURCE_DIR
cd $KONG_SOURCE_DIR
git checkout $KONG_GIT_COMMITISH
fi
export OPENSSL_DIR=${APP_PREFIX}
# Install Kong itself from the cloned git repo
luarocks make ./kong-*.rockspec
# Install Kong's dependencies via LuaRocks
# (`--local` is required for this to do anything)
luarocks install --local ./kong-*.rockspec
mv bin/* ${APP_PREFIX}/bin/
topic "Installing Lua rocks specified in Rockfile"
cd ${BUILD_DIR}
${BP_DIR}/bin/install-luarocks
topic "Enabling runtime configuration & processes"
cd ${BP_DIR}
mkdir -p $BUILD_DIR/.profile.d
mv .profile.d/* $BUILD_DIR/.profile.d
mkdir -p $BUILD_DIR/config
mv config/* $BUILD_DIR/config
# Move executables for Procfile into place
mkdir -p $BUILD_DIR/bin
cp $BP_DIR/bin/app/heroku-* $BUILD_DIR/bin/
# Avoid moving build for CI, because build is in the right place already.
if [ ! "$BUILD_DIR" = "/app" ]; then
topic "Making build artifacts available to app"
mv $APP_PREFIX $BUILD_DIR/.heroku
fi
| true
|
9e4c5373cbe1a90cd64bbef5230495edb12ca34e
|
Shell
|
Talkopel/tcptracer-bpf
|
/tests/run
|
UTF-8
| 308
| 3.390625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -eu
if [[ $EUID -ne 0 ]]; then
echo "root required - aborting" >&2
exit 1
fi
test_pid=-1
function shutdown() {
if [[ $test_pid -ne -1 ]]; then
kill $test_pid 2>/dev/null || true
fi
}
trap shutdown EXIT
timeout 150 ./test.sh &
test_pid=$!
wait $test_pid
exit $?
| true
|
07c3bddaa7f6a613ee1c48922aaa00a84e1615c8
|
Shell
|
lookflying/my-scripts
|
/tencent/test_compress.sh
|
UTF-8
| 196
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
dir=/dev/shm/compressed
if [ ! -d $dir ]
then
rm -rf $dir
mkdir $dir
fi
file=$1
name=${file##*/}
name=${name%%.*}
suffix=${file##*.}
convert $file -resize 50% $dir/$name-$$.$suffix
| true
|
44b5e0d443208a6233b45697f244b17f303bff3a
|
Shell
|
BrianJMRogers/rsyncmark
|
/src/main/sync_file.sh
|
UTF-8
| 762
| 3.515625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
source rsyncmark.conf
#### args
# $1 is local file_to_sync
# $2 is host ip
# $3 is host target location
# $4 is password to remote machine
file_to_sync=$1
host=$2
file_target_location=$3
pass=$4
temp_command_script="temp_command_script.sh"
# write command to file since the single quotes inside the varaiables get
# misinterpretted by bash
args=($(echo $rsync_args))
echo rsync ${args[@]} $file_to_sync $host:$file_target_location > $temp_command_script
chmod +x $temp_command_script
# use expect from here until EOF
/usr/bin/env expect<<EOF
spawn ./$temp_command_script
expect {
"*(yes/no)"
{
send "yes\r"
exp_continue
}
"$destination_password_prompt"
{
send "$pass\r"
}
}
expect eof
EOF
rm $temp_command_script
| true
|
199187ca7bcab12da4e71f73bdfae7190ba4925b
|
Shell
|
BGCX261/zhscript-svn-to-git
|
/trunk/app/cangbrower/start.sh
|
UTF-8
| 228
| 2.546875
| 3
|
[] |
no_license
|
#!/bin/bash
cd "`dirname $0`"
z=./ui
if [ -f $z ]
then
$z $@
exit
fi
z=../../new_o/gtkmmsh
if [ -f $z ]
then
$z "$PWD/ui.glade" $@
exit
fi
z=/usr/local/lib/zhscript/gtkmmsh
if [ -f $z ]
then
$z "$PWD/ui.glade" $@
exit
fi
| true
|
c731f8f5c41f73a63a3342a69c22dae11e587d5a
|
Shell
|
smly/ume
|
/ume/externals/_xgboost/subtree/rabit/rabit-learn/kmeans/kmeans_hadoop.sh
|
UTF-8
| 286
| 2.640625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
#!/bin/bash
if [ "$#" -lt 5 ];
then
echo "Usage: <nslaves> <input_data> <ncluster> <max_iteration> <output>"
exit -1
fi
#set path to hadoop streaming jar here
STREAMING_JAR=
python ../tracker/rabit_hadoop.py -hs $STREAMING_JAR -n $1 -i $2 -o $5 kmeans.rabit stdin $3 $4 stdout
| true
|
3d48f793262fe3a0d8805282aff00ffde8fb941b
|
Shell
|
RyanYoung25/maestor
|
/run/sim.sh
|
UTF-8
| 695
| 3
| 3
|
[] |
no_license
|
#!/bin/bash
if [ $(pgrep maestor | wc -w) -gt 1 ]; then
echo "Maestor is already running"
echo "If you would like to kill maestor run the command: "
echo "maestor kill"
exit 0
fi
if [ $(pgrep hubo-daemon) ]; then
echo "Hubo-Ach daemon is already running. Terminating..."
hubo-ach killall &> /dev/null
echo "Hubo-Ach daemon terminated."
fi
xterm -e "roscore" &
export PYTHONPATH="$PYTHONPATH:/usr/lib/python2.7/dist-packages"
xterm -e "hubo-ach sim openhubo physics simtime" &
while [ ! $(pgrep hubo-daemon) ]; do
sleep 1
done
sudo -E /opt/ros/fuerte/stacks/maestor/bin/maestor sim &
sleep 1
while [ ! $(pgrep maestor | wc -w) == 2 ]; do
sleep 1
done
| true
|
eafd56756a605ae1af1288018d92ffb0c907ff34
|
Shell
|
toddyamakawa/bin
|
/examples/animations.bash
|
UTF-8
| 4,494
| 2.53125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# https://github.com/Silejonu/bash_loading_animations/blob/main/bash_loading_animations.sh
#
# DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
# Version 2, December 2004
#
# Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
#
# Everyone is permitted to copy and distribute verbatim or modified
# copies of this license document, and changing it is allowed as long
# as the name is changed.
#
# DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
# TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
#
# 0. You just DO WHAT THE FUCK YOU WANT TO.
# ASCII Animations
classic=( 0.25 '-' '\' '|' '/' )
box=( 0.2 ┤ ┴ ├ ┬ )
bubble=( 0.6 · o O O o · )
breathe=( 0.9 ' () ' ' ( ) ' '( )' ' ( ) ' )
growing_dots=( 0.5 '. ' '.. ' '...' '.. ' '. ' ' ' )
passing_dots=( 0.25 '. ' '.. ' '...' ' ..' ' .' ' ' )
metro=( 0.2 '[ ]' '[= ]' '[== ]' '[=== ]' '[ ===]' '[ ==]' '[ =]' )
# UTF-8 Animations
classic_utf8=( 0.25 '—' '\' '|' '/' )
bounce=( 0.3 . · ˙ · )
vertical_block=( 0.25 ▁ ▂ ▃ ▄ ▅ ▆ ▇ █ █ ▇ ▆ ▅ ▄ ▃ ▁ )
horizontal_block=( 0.25 ▏ ▎ ▍ ▌ ▋ ▊ ▉ ▉ ▊ ▋ ▌ ▍ ▎ ▏ )
quarter=( 0.25 ▖ ▘ ▝ ▗ )
triangle=( 0.45 ◢ ◣ ◤ ◥)
semi_circle=( 0.1 ◐ ◓ ◑ ◒ )
rotating_eyes=( 0.4 ◡◡ ⊙⊙ ⊙⊙ ◠◠ )
firework=( 0.4 '⢀' '⠠' '⠐' '⠈' '*' '*' ' ' )
braille=( 0.2 ⠁ ⠂ ⠄ ⡀ ⢀ ⠠ ⠐ ⠈ )
braille_whitespace=( 0.2 ⣾ ⣽ ⣻ ⢿ ⡿ ⣟ ⣯ ⣷ )
trigram=( 0.25 ☰ ☱ ☳ ☶ ☴ )
arrow=( 0.15 ▹▹▹▹▹ ▸▹▹▹▹ ▹▸▹▹▹ ▹▹▸▹▹ ▹▹▹▸▹ ▹▹▹▹▸ ▹▹▹▹▹ ▹▹▹▹▹ ▹▹▹▹▹ ▹▹▹▹▹ ▹▹▹▹▹ ▹▹▹▹▹ ▹▹▹▹▹ )
bouncing_ball=( 0.4 '(● )' '( ● )' '( ● )' '( ● )' '( ● )' '( ●)' '( ● )' '( ● )' '( ● )' '( ● )' )
big_dot=( 0.7 ∙∙∙ ●∙∙ ∙●∙ ∙∙● )
modern_metro=( 0.15 ▰▱▱▱▱▱▱ ▰▰▱▱▱▱▱ ▰▰▰▱▱▱▱ ▱▰▰▰▱▱▱ ▱▱▰▰▰▱▱ ▱▱▱▰▰▰▱ ▱▱▱▱▰▰▰ ▱▱▱▱▱▰▰ ▱▱▱▱▱▱▰ ▱▱▱▱▱▱▱ ▱▱▱▱▱▱▱ ▱▱▱▱▱▱▱ ▱▱▱▱▱▱▱ )
pong=( 0.35 '▐⠂ ▌' '▐⠈ ▌' '▐ ⠂ ▌' '▐ ⠠ ▌' '▐ ⡀ ▌' '▐ ⠠ ▌' '▐ ⠂ ▌' '▐ ⠈ ▌' '▐ ⠂ ▌' '▐ ⠠ ▌' '▐ ⡀ ▌' '▐ ⠠ ▌' '▐ ⠂ ▌' '▐ ⠈ ▌' '▐ ⠂▌' '▐ ⠠▌' '▐ ⡀▌' '▐ ⠠ ▌' '▐ ⠂ ▌' '▐ ⠈ ▌' '▐ ⠂ ▌' '▐ ⠠ ▌' '▐ ⡀ ▌' '▐ ⠠ ▌' '▐ ⠂ ▌' '▐ ⠈ ▌' '▐ ⠂ ▌' '▐ ⠠ ▌' '▐ ⡀ ▌' '▐⠠ ▌' )
earth=( 0.45 🌍 🌎 🌏 )
clock=( 0.2 🕛 🕐 🕑 🕒 🕓 🕔 🕕 🕖 🕗 🕘 🕙 🕚 )
moon=( 0.8 🌑 🌒 🌓 🌔 🌕 🌖 🌗 🌘 )
orange_pulse=( 0.35 🔸 🔶 🟠 🟠 🔶 )
blue_pulse=( 0.35 🔹 🔷 🔵 🔵 🔷 )
football=( 0.25 ' 👧⚽️ 👦' '👧 ⚽️ 👦' '👧 ⚽️ 👦' '👧 ⚽️ 👦' '👧 ⚽️ 👦' '👧 ⚽️ 👦' '👧 ⚽️👦 ' '👧 ⚽️ 👦' '👧 ⚽️ 👦' '👧 ⚽️ 👦' '👧 ⚽️ 👦' '👧 ⚽️ 👦' )
blink=( 0.25 😐 😐 😐 😐 😐 😐 😐 😐 😐 😑 )
camera=( 0.1 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📷 📸 📷 📸 )
sparkling_camera=( 0.1 '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📷 ' '📸✨' '📷 ' '📸✨' )
sick=( 0.9 🤢 🤢 🤮 )
monkey=( 0.4 🙉 🙈 🙊 🙈 )
bomb=( 0.25 '💣 ' ' 💣 ' ' 💣 ' ' 💣' ' 💣' ' 💣' ' 💣' ' 💣' ' 💥' ' ' ' ' )
| true
|
8b386cb6b908c6a0fd9768757bdbadbfc8591c81
|
Shell
|
Kodsport/swedish-olympiad-2015
|
/katt/fargningsspelet/data/_donotuse_gen.sh
|
UTF-8
| 1,416
| 3.125
| 3
|
[] |
no_license
|
#!/bin/bash
PROBLEMNAME="fargningsspelet"
rm -rf secret
mkdir secret
cp testdata.yaml secret/
function group {
groupname=$1
points=$2
mkdir secret/$groupname
echo "grader_flags: min first_error
range: 0 $points
accept_score: $points
on_reject: break" > secret/$groupname/testdata.yaml
ind=0
}
function testcase {
ind=$((ind+1))
sind=$(printf "%02d" $ind)
echo $2 $1 > secret/$groupname/$PROBLEMNAME.$groupname.$sind.in
echo $3 > secret/$groupname/$PROBLEMNAME.$groupname.$sind.ans
}
# strategy = 0 (random), 1 (start with 1), 2 (start with N), 3 (start with middle), 4 (end with middle)
group g1 23
testcase 1 2 0
testcase 1 5 0
testcase 1 8 0
testcase 1 11 3
testcase 1 13 0
group g2 26
testcase 1 2 0
testcase 1 5 0
testcase 1 11 3
testcase 1 13 0
testcase 1 980 0
testcase 1 981 0
group g3 24
testcase 2 2 1
testcase 2 3 1
testcase 2 3 2
testcase 2 5 3
testcase 2 6 4
testcase 2 8 4
testcase 2 9 4
testcase 2 10 4
testcase 2 11 3
testcase 2 11 4
testcase 2 12 1
testcase 2 12 2
testcase 2 12 4
testcase 2 13 1
testcase 2 13 2
testcase 2 13 3
testcase 2 13 4
group g4 27
testcase 2 2 1
testcase 2 3 1
testcase 2 3 2
testcase 2 5 3
testcase 2 6 4
testcase 2 8 4
testcase 2 9 4
testcase 2 978 1
testcase 2 978 2
testcase 2 978 4
testcase 2 979 1
testcase 2 979 2
testcase 2 979 3
testcase 2 979 4
testcase 2 980 4
testcase 2 981 3
testcase 2 981 4
testcase 2 982 4
testcase 2 983 3
testcase 2 983 4
| true
|
865af742a73b038aa128a7e2f003c54f836ee2ff
|
Shell
|
allxone/slower_spool
|
/slowerSpool.sh
|
UTF-8
| 441
| 3.515625
| 4
|
[] |
no_license
|
#/bin/bash
help()
{
echo "$0 <chunks> <sleep> <inputfile> <outputfile>"
echo $CHUNK
echo $SLEEP
echo $INPUT
echo $OUTPUT
}
c=0
l=0
f=100000
CHUNK=$1
SLEEP=$2
INPUT=/tmp/input/$3
test ! -f "$INPUT" && help && exit 1
cat $INPUT | while read a
do
c=$(($c+1))
l=$(($l+1))
echo $a >>/tmp/output/hide_$4$f
test $l != $CHUNK && continue
mv /tmp/output/hide_$4$f /tmp/output/$4$f
sleep $SLEEP
echo -ne "\r$c"
l=0
f=$(($f+1))
done
date
| true
|
4a95012770cfdefa383f29e33167052f983a4fd1
|
Shell
|
emodstech/easyLampStart
|
/easyLampStart.sh
|
UTF-8
| 2,414
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
read -r -p " Сконфигурировать apache в автоматическом режиме [y/N] ? " response
if [[ "$response" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
sudo sh -c "echo 'Include /etc/phpmyadmin/apache.conf ' >> /etc/apache2/apache2.conf"
sudo cp -v /etc/apache2/sites-available/000-default.conf /etc/apache2/sites-available/000-default.old.conf
sudo sed -i "s!/var/www/html!$HOME/web/index.php!" /etc/apache2/sites-available/000-default.conf
sudo mkdir -v $HOME/web
sudo a2enmod rewrite
sudo chown $USER:$USER $HOME/web
sudo tee -a /etc/apache2/apache2.conf <<EOF
<Directory /home/$USER/web/>
Options Indexes FollowSymLinks
AllowOverride All
Require all granted
</Directory>
EOF
sudo systemctl restart apache2
echo "Готово! Осталось запустить siteAdd.sh"
else
echo "apache2 без изменений"
fi
read -r -p " Добавить пользователя $USER в mysql по умолчанию пароль 1234 [y/N] ? " resp
if [[ "$resp" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
sudo mysql --user=root mysql <<EQF
CREATE USER '$USER'@'localhost' IDENTIFIED BY '1234';
GRANT ALL PRIVILEGES ON *.* to '$USER'@'localhost' WITH GRANT OPTION;
FLUSH PRIVILEGES;
exit
EQF
sudo systemctl restart mysql.service
echo "Пользователь $USER добавлен, пароль для входа 1234 (при необходимости смените пароль)"
else
echo "Пользователь не добавлен"
fi
read -r -p "Скорректировать ошибку 'Warning in ./libraries/sql.lib.phpcount(): Parameter must be an array' phpmyadmin [y/N] ? " resp
if [[ "$resp" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
sudo sed -i "s/|\s*\((count(\$analyzed_sql_results\['select_expr'\]\)/| (\1)/g" /usr/share/phpmyadmin/libraries/sql.lib.php
sudo systemctl restart mysql.service
echo "ошибка исправлена"
else
echo "несконфигурированно"
fi
read -r -p "Скорректировать ошибку 'Warning in ./libraries/plugin_interface.lib.php#532' phpmyadmin [y/N] ? " resp
if [[ "$resp" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
sudo sed -i "s#count(\$options) >#count((array)\$options) >#" /usr/share/phpmyadmin/libraries/plugin_interface.lib.php
sudo systemctl restart mysql.service
echo "ошибка исправлена"
else
echo "несконфигурированно"
fi
| true
|
731b75b7c913e0110e77daed0ce1eb65067c2161
|
Shell
|
sanjeev2838/key_logger
|
/key_logger.sh
|
UTF-8
| 494
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/bash
if [[ $1 == "stop" ]]; then
python /home/sanjeev/scrapy_project/key_logger/parse.py
kill $(ps aux | awk '/[b]ackup/ {print $2}') #the most elegant way to kill this process!
exit #exit the script itself
fi
if [[ $1 == "start" ]]; then
echo "Back up in progress........"
fi
while true
do
[ "$UID" -eq 0 ] || sudo showkey > /home/sanjeev/scrapy_project/key_logger/logger.txt
# python /home/sanjeev/scrapy_project/key_logger/parse.py
done
| true
|
f7b1fac1970f1f529539c6dbce2d45a6ab48ce09
|
Shell
|
moonfruit/lovebizhi
|
/lovebizhi.sh
|
UTF-8
| 2,067
| 3.171875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# AUTO='http://api.lovebizhi.com/macos_v4.php?a=autoWallpaper&uuid=d044e701cdd846278ca95398d5de5d03&retina=1&client_id=1008&device_id=74118386&screen_width=3360&screen_height=2100&bizhi_width=3360&bizhi_height=2100'
AUTO='http://api.lovebizhi.com/macos_v4.php?a=autoWallpaper&retina=1&screen_width=3360&screen_height=2100&bizhi_width=3360&bizhi_height=2100'
DATA='options={"loved":{"open":false},"category":{"open":true,"data":[1,2,3,4,5,6,7,8,10,797,798,1407,1546,1554,2097,2098,2180,21866]}}'
IMAGES=images
TAGS=tags
IGNORE=ignore.txt
IGNORE_ID=ignore_id.txt
json() {
if [[ -z "$2" ]]; then
echo "GET $1" >&2
# wget -O- "$1"
curl "$1"
else
echo "POST $1" >&2
# wget --post-data="$2" -O- "$1"
curl -d"$2" "$1"
fi
}
download() {
echo "DOWNLOAD $1"
wget "$1" -O "$2"
}
filepath() {
if [[ ! -d "$IMAGES" ]]; then
mkdir -p "$IMAGES"
fi
echo "$IMAGES/$1"
}
link() {
DIR="$TAGS/$1"
if [[ ! -d "$DIR" ]]; then
mkdir -p "$DIR"
fi
cd "$DIR" || return
ln -sf "../../$IMAGES/$2"
cd ../.. || return
}
remove() {
find . -depth -name "$2" -delete
}
echo "******** START AT $(date) ********"
while read -r ID DETAIL URL URL2; do
if grep -Fxq "$ID" "$IGNORE_ID"; then
echo "******** Ignore '$ID' ********"
continue
fi
if [[ $URL == null ]]; then
URL=${URL2//.webp/.jpg}
fi
FILE=$(basename "$URL")
while read -r TAG; do
if grep -Fxq "$TAG" "$IGNORE"; then
echo "******** Ignore '$ID' for '$TAG' ********"
remove "$ID" "$FILE"
continue 2
fi
link "$TAG" "$FILE"
done < <(json "$DETAIL" | jq -r '.tags[] | .name')
FILEPATH=$(filepath "$FILE")
if [[ -s "$FILEPATH" ]]; then
echo "******** Already download '$ID' ********"
continue
fi
if ! download "$URL" "$FILEPATH"; then
remove "$ID" "$FILE"
continue
fi
touch "$FILEPATH"
if [[ ! -s "$FILEPATH" ]]; then
remove "$ID" "$FILE"
fi
done < <(json "$AUTO" "$DATA" | jq -r '.[] | .file_id, .detail, .image.vip_original, .image.original' | paste - - - -)
find "$TAGS" -depth -empty -delete
echo "******** END AT $(date) ********"
| true
|
3058c7666517b5ec84d6a9d6083682c6be855588
|
Shell
|
hxzqlh/auto_test_set
|
/facade_audio.sh
|
UTF-8
| 3,971
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
VERSION="1.0.0.0"
media=
start_sec=
duration=
smp_rate=
ch_num=
#128k 256k...
bitrate=
out_dir="."
print_usage ()
{
echo "Usage: $0 -i media [-s start_sec] [-t duration] [-r smp_rate] [-c ch_num] [-b bitrate] -d out_dir"
}
show_info ()
{
echo "media:[$media]"
[ x$start_sec != x ] && echo "start from:[$start_sec]"
[ $duration != x ] && echo "duration:[$duration]"
[ x$smp_rate != x ] && echo "sample_rate:[$smp_rate]"
[ x$smp_size != x ] && echo "sample_size:[$smp_size]"
[ x$ch_num != x ] && echo "channel_num:[$ch_num]"
[ x$bitrate != x ] && echo "bitrate:[$bitrate]"
echo "output to dir:[$out_dir]"
}
check_opts ()
{
while getopts "i:s:t:r:c:b:d:v" opt
do
case "$opt" in
"i")
media="$OPTARG"
[ ! -f "$media" ] && echo "media:[$media] not exit" && exit 2
;;
"s")
start_sec="$OPTARG"
;;
"t")
duration="$OPTARG"
;;
"r")
smp_rate="$OPTARG"
;;
"c")
ch_num="$OPTARG"
;;
"b")
bitrate="$OPTARG"
;;
"d")
out_dir="$OPTARG"
[ ! -d "$out_dir" ] && echo "outpur dir:[$out_dir] not exit" && exit 2
;;
"v")
echo "$0 $VERSION"
exit 0
;;
"?")
echo "Unknown option $OPTARG"
;;
":")
echo "No argument value for option $OPTARG"
;;
*)
# Should not occur
echo "Unknown error while processing options"
;;
esac
#echo "OPTIND is now $OPTIND"
done
[ x"$media" == x ] && print_usage && exit 1
}
ffmpge_work ()
{
run="ffmpeg_audio.sh"
echo "#!/bin/bash" > "$run"
echo "$cmd" >> "$run"
chmod +x $run
for file in `ls audio_*.txt`; do
echo "Generating for: $file"
fmt=`echo $file | cut -d "_" -f 2 | cut -d "." -f 1 `
k=0
ff_media="$media"
ff_start_sec="$start_sec"
ff_duration="$duration"
ff_smp_rate="$smp_rate"
ff_ch_num="$ch_num"
ff_bitrate="$bitrate"
ff_out_dir="$out_dir"
while read line; do
((k++))
echo "Line # $k: $line"
echo "$line" | grep ","
if [ $? -eq 0 ]; then
# do with some special such as: amr_nb,8000,1
pos=0
while [ ! -z "$line" ]; do
((++pos))
seg=`echo $line | cut -d "," -f 1`
line=`echo ${line#*,}`
if [ $pos -eq 1 ]; then
codec=$seg
elif [ $pos -eq 2 ]; then
ff_smp_rate=$seg
elif [ $pos -eq 3 ]; then
ff_ch_num=$seg
fi
done
else
codec="$line"
fi
cmd="ffmpeg -y "
[ x$ff_start_sec != x ] && cmd+=" -ss $ff_start_sec "
cmd+="-i $ff_media"
[ x$ff_duration != x ] && cmd+=" -t $ff_duration "
cmd+=" -map a:0 "
cmd+=" -vn "
cmd+=" -c:a $codec "
[ x$ff_smp_rate != x ] && cmd+=" -ar $ff_smp_rate "
[ x$ff_ch_num != x ] && cmd+=" -ac $ff_ch_num "
[ x$ff_bitrate != x ] && cmd+=" -b:a $ff_bitrate "
# for some experimental codecs
cmd+=" -strict -2"
cmd+=" $ff_out_dir/`basename $media`_`echo $codec`_`echo $ff_duration`s_`echo $ff_smp_rate`_`echo $ff_ch_num`_`echo $ff_bitrate`.$fmt"
echo "echo \"$cmd\"" >> $run
echo "$cmd" >> $run
done < $file
echo "Total number of codecs in $file: $k"
done
bash $run
}
check_opts "$@"
show_info
ffmpge_work
| true
|
281257acdea3e5cc93c76c9ce966e7f64833b283
|
Shell
|
1KFG/Phylogenomics
|
/jobs/old/trim_to_new_dir.sh
|
UTF-8
| 289
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#PBS -l walltime=1:00:00 -j oe -o trimal
module load trimal
N=$PBS_ARRAYID
if [ ! $N ]; then
N=$1
fi
if [ ! $N ]; then
echo "need a cmdline or PBS_ARRAYID"
exit
fi
FILE=`ls ../*.fasaln | sed -n ${N}p`
echo $FILE
b=`basename $FILE .fa.fasaln`
trimal -in $FILE -out $b.trim -automated1
| true
|
2382309808be4609d52cfbdf2da83c685c411d42
|
Shell
|
ajaycs18/USP
|
/lab3/largestOf3.sh
|
UTF-8
| 229
| 3.3125
| 3
|
[] |
no_license
|
if [ $# -lt 3 ]
then
echo "Usage: $0 [n1] [n2] [n3]"
exit 128
fi
if [ $1 -gt $2 -a $1 -gt $3 ]
then
echo "$1 is the greatest"
elif [ $2 -gt $1 -a $2 -gt $3 ]
then
echo "$2 is the greatest"
else
echo "$3 is the greatest"
fi
| true
|
76e5609bd9fa468526de67afc303409933d3cfda
|
Shell
|
ncbi/sra-tools
|
/test/external/fasterq-dump/fq_tests/handle_one.sh
|
UTF-8
| 780
| 3.21875
| 3
|
[
"LicenseRef-scancode-ncbi",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-us-govt-public-domain"
] |
permissive
|
#!/bin/bash
ACC=$1
#skip comments
if [[ "${ACC}" == "#"* ]]; then
exit
fi
#skip empty lines
if [[ "${ACC}" == "" ]]; then
exit
fi
echo "start handling $ACC"
./whole_spot.sh $ACC
retcode=$?
if [[ $retcode -ne 0 ]]; then
echo "done handling $ACC ---> ERROR"
exit 3
fi
./split_spot.sh $ACC
retcode=$?
if [[ $retcode -ne 0 ]]; then
echo "done handling $ACC ---> ERROR"
exit 3
fi
./split_files.sh $ACC
retcode=$?
if [[ $retcode -ne 0 ]]; then
echo "done handling $ACC ---> ERROR"
exit 3
fi
./split3.sh $ACC
retcode=$?
if [[ $retcode -ne 0 ]]; then
echo "done handling $ACC ---> ERROR"
exit 3
fi
./unsorted.sh $ACC
retcode=$?
if [[ $retcode -ne 0 ]]; then
echo "done handling $ACC ---> ERROR"
exit 3
else
echo "done handling $ACC ---> OK"
fi
| true
|
716b1dbc245f1c7c284c76898ce366c0f877f034
|
Shell
|
KhoaDTran/UW-Special-Technical-Coursework-Projects
|
/System and Software Tools/Lab2/task2.sh
|
UTF-8
| 1,201
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
####################################
# Name: Khoa Tran
# CSE 391 - Winter 2020
# Homework 2 - Task 2
####################################
function problem1 {
# Type your answer to problem #1 below this line
echo "javac ParseColumn.java"
}
function problem2 {
# Type your answer to problem #2 below this line
echo "java ParseColumn 1 >> candies.txt"
}
function problem3 {
# Type your answer to problem #3 below this line
echo "grep -i -w 'chocolate' candies.txt"
}
function problem4 {
# Type your answer to problem #4 below this line
echo "grep -i -v 'chocolate' candies.txt"
}
function problem5 {
# Type your answer to problem #5 below this line
echo "cp intro_survey.csv intro_survey_no_header.csv | tail -n +2 intro_survey_no_header.csv"
}
function problem6 {
# Type your answer to problem #6 below this line
echo "cat intro_survey_no_header.csv | wc -l"
}
function problem7 {
# Type your answer to problem #7 below this line
echo "sed /"
}
function problem8 {
# Type your answer to problem #8 below this line
echo "grep -o -i intro_survey_no_header.csv | wc -l"
}
function problem9 {
# Type your answer to problem #9 below this line
echo ""
}
| true
|
41a012336d6a9735644fbc3b45523782dde5b05d
|
Shell
|
kwanderer/DMI
|
/shell_examples.sh
|
UTF-8
| 1,457
| 2.875
| 3
|
[] |
no_license
|
#!/bin/bash
#7.piemers
#skaitliska_vertiba='expr 2 + 2'
#echo "Summas vertiba: "$skaitliska_vertiba
#echo "Summas vertiba: $skaitliska_vertiba"
skaitliska_vertiba=`expr 2 + 2`
echo "Summas vertiba: "$skaitliska_vertiba
echo "Summas vertiba: $skaitliska_vertiba"
#skaitliska_vertiba=expr 2 + 2
#echo "Summas vertiba: "$skaitliska_vertiba
#echo "Summas vertiba: $skaitliska_vertiba"
#6.piemers
#echo $*
#echo "---------------"
#kartas_numurs=1
#for arguments in $*
#do
# echo $kartas_numurs". arguments - " $arguments
# kartas_numurs=$kartas_numurs+1
#done
#5.piemers
#echo "Skriptam nodotu argumentu skaits:" $#
#echo "Argumentu saraksts (attelosana/ grupesana veids 1): " $*
#echo "Argumentu saraksts (attelosana/ grupesana veids 2): " $@
#echo "Pirma argumenta vertiba: " $1
#echo "Otra argumenta vertiba: " $2
#echo $1$2
#4.piemers
#echo "Izpildama skripta faila nosaukums: " $0
##echo $n
#echo "Skriptam nodotu argumentu skaits:" $#
#echo "Argumentu saraksts (attelosana/ grupesana veids 1): " $*
#echo "Argumentu saraksts (attelosana/ grupesana veids 2): " $@
##ech "Argumentu saraksts (attelosana/ grupesana veids 2): " $@
#echo "Iepreksejas komandas izpildes rezultats: " $?
#echo "Skripta izpildei pieskirtais procesa numurs: " $$
##echo $!
#3. piemers
#N="Vards"
#echo $N
#unset N
#echo $N
#2.piemers
#N="Vards"
#readonly N
#echo $N
#N="Vards Uzvards"
#echo $N
#1.piemers
#N="Vards"
#echo $N
#0. piemers
#history > history_20170927.txt
| true
|
d472c9e5c1ddc4d8b99f74b416b27632e9b17244
|
Shell
|
okpy/ok
|
/azure/paas/setup-mongo.sh
|
UTF-8
| 998
| 3.4375
| 3
|
[
"Apache-2.0",
"BSD-2-Clause"
] |
permissive
|
#!/usr/bin/env bash
. ./utils.sh; az_login
if [ -f ./secrets/mongo.env ]; then log "Already exists: mongo"; exit 0; fi
#
# verify inputs
#
mongo_name="${OK_NAME}mongo"
mongo_database="ok"
deployment_log="$(mktemp)"
#
# create resource
#
log "Creating resource ${mongo_name}"
az group deployment create \
--name "${mongo_name}" \
--template-file './arm/mongo.deploy.json' \
--parameters \
"mongoAccountName=${mongo_name}" \
| tee "${deployment_log}"
mongo_host="$(jq -r '.properties.outputs.mongoHost.value' "${deployment_log}")"
mongo_password="$(jq -r '.properties.outputs.mongoPassword.value' "${deployment_log}")"
az cosmosdb database create \
--db-name "${mongo_database}" \
--key "${mongo_password}" \
--url-connection "https://${mongo_host}"
#
# store secrets
#
cat > ./secrets/mongo.env << EOF
MONGO_DATABASE=${mongo_database}
MONGO_USERNAME=${mongo_name}
MONGO_PASSWORD=${mongo_password}
MONGO_HOST=${mongo_host}
MONGO_PORT=10255
EOF
log "Done with ${mongo_name}"
| true
|
96db6ebae4f7a6292d0d1915238efdeaf5d0335e
|
Shell
|
dr-alun-moon/shell-scripts
|
/tidyup1
|
UTF-8
| 589
| 4.125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
usage() {
if [ $# -ne 3 ]; then
echo "Your command: $0 $*"
echo "Usage: tidy4 <tidy dir> <src dir> <obj dir>"
exit 1
fi
}
dirExists() {
if [ -d "$1" ]
then
echo "Tidying directory $1 ..."
else
echo "$1 does not exist"
exit 1
fi
}
ensureDir() {
if [ ! -d $1 ]
then
if [ -f $1 ]
then
echo "file named $1 exists"
exit 1
else
mkdir $1
fi
fi
}
moveFiles() {
for f in $2/*$1
do
mv $f $3
done
}
usage $*
dirExists $1 || exit 1
ensureDir $2 || exit 1
moveFiles .c $1 $2
ensureDir $3 && moveFiles .o $1 $3 || exit 1
| true
|
68fc63e9322f525b876365e83d3cd5191f4623df
|
Shell
|
mskz-3110/OI
|
/install_packages.sh
|
UTF-8
| 428
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
case "${PACKAGE_MANAGER}" in
yum)
yum install -y wget unzip file
yum install -y git gcc gcc-c++ cmake make
yum install -y bzip2 openssl-devel readline-devel zlib-devel
;;
apt)
apt update
apt install -y wget unzip file
apt install -y git gcc g++ cmake make
apt install -y bzip2 libssl-dev libreadline-dev zlib1g-dev
;;
*)
echo "Unsupported PACKAGE_MANAGER: ${PACKAGE_MANAGER}"
exit 1
;;
esac
| true
|
76935a574e81f3e37f68649a485bb0ae7f567425
|
Shell
|
sasg/bootengine
|
/dracut/10usr-fsck-generator/usr-fsck-generator
|
UTF-8
| 2,105
| 4.03125
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
# -*- mode: shell-script; indent-tabs-mode: nil; sh-basic-offset: 4; -*-
# ex: ts=8 sw=4 sts=4 et filetype=sh
# This script adds a drop-in which disables systemd-fsck for the /usr
# partition. Cases:
#
# 1. /usr is backed by dm-verity and is read-only. fsck.ext4 would fail
# and print a confusing error to the journal.
#
# 2. The /usr filesystem is configured for verity but the bootloader is
# too old to enable it. fsck.ext4 would fail because of the read-only
# compat flags that prevent writing to the filesystem, and print a
# confusing error to the journal.
#
# 3. Neither #1 or #2 are true because this is a dev image. This case
# is not trivial to detect and not actually important.
set -e
UNIT_DIR="${1:-/tmp}"
# env var for testing
if [[ -n "${USR_FSCK_GENERATOR_CMDLINE}" ]]; then
cmdline=( ${USR_FSCK_GENERATOR_CMDLINE} )
else
cmdline=( $(</proc/cmdline) )
fi
# Usage: cmdline_arg name default_value
cmdline_arg() {
local name="$1" value="$2"
for arg in "${cmdline[@]}"; do
if [[ "${arg%%=*}" == "${name}" ]]; then
value="${arg#*=}"
fi
done
echo "${value}"
}
usr=$(cmdline_arg mount.usr)
if [[ -z "$usr" ]]; then
usr=$(cmdline_arg usr)
fi
case "${usr}" in
LABEL=*)
usr="$(echo $usr | sed 's,/,\\x2f,g')"
usr="/dev/disk/by-label/${usr#LABEL=}"
;;
UUID=*)
usr="${usr#UUID=}"
usr="$(echo $usr | tr "[:upper:]" "[:lower:]")"
usr="/dev/disk/by-uuid/${usr}"
;;
PARTUUID=*)
usr="${usr#PARTUUID=}"
usr="$(echo $usr | tr "[:upper:]" "[:lower:]")"
usr="/dev/disk/by-partuuid/${usr}"
;;
PARTLABEL=*)
usr="/dev/disk/by-partlabel/${usr#PARTLABEL=}"
;;
esac
# Only proceed if the source is a path.
if [[ "${usr}" != /* ]]; then
exit 0
fi
usr="$(systemd-escape -p "${usr}")"
dropin_dir="${UNIT_DIR}/systemd-fsck@${usr}.service.d"
mkdir -p "${dropin_dir}"
cat >"${dropin_dir}/disable.conf" <<EOF
# Automatically generated by usr-fsck-generator
[Unit]
ConditionPathIsDirectory=/dev/null
EOF
| true
|
a6a9e31d07b2c7a89b47a3bf65c846ee15c3bf2b
|
Shell
|
iRave/dotfiles
|
/.config/i3/fuzzy_lock.sh
|
UTF-8
| 380
| 2.609375
| 3
|
[] |
no_license
|
#!/bin/sh -e
# Take a screenshot
scrot /tmp/screen_locked.png
#swaygrab /tmp/screen_locked.png
# Pixellate it 10x
mogrify -scale 10% -scale 1000% /tmp/screen_locked.png
# Lock screen displaying this image.
i3lock -i /tmp/screen_locked.png
#swaylock -i /tmp/screen_locked.png
# Turn the screen off after a delay.
#sleep 10;
pgrep i3lock
#pgrep swaylock
#&& xset dpms force off
| true
|
a8cd39547efd22e174031bce331203586c3d0f4c
|
Shell
|
jzwlqx/mytools
|
/bin/mergeto
|
UTF-8
| 416
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
b1=$1
b2=$2
if [ -z "$b1" -o -z "$b2" ]; then
echo "usage: $0 branch1 branch2"
exit 1
fi
tmp=$(mktemp -d)
trap "rm -rf $tmp" EXIT
cd $tmp
svn co $b2 to
cd to
version=$(svn log --stop-on-copy "$b1" | awk '$1~/^r[0-9]+/{print $1}'|tail -n1)
echo Oldest Version: $version
svn merge -$version:HEAD $b1
read -p "Check In? (Y/N)" ci
if [ "$ci" = "Y" ]; then
svn ci -m "merge from $b1"
fi
| true
|
c8f9a0f55cbde3b6113ad59f932affce99a5e358
|
Shell
|
theimagingcollective/tic_core
|
/studies/active/scripts/environment.sh
|
UTF-8
| 2,339
| 2.5625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Environment variables
export HFPEF_PATH=/gandg/hfpef/
export HFPEF_BIDS_PATH=${HFPEF_PATH}/bids
export HFPEF_IMAGE_ANALYSIS_PATH=${HFPEF_PATH}/image_analysis
export HFPEF_IMAGE_PROCESSING_PATH=${HFPEF_PATH}/image_processing
export HFPEF_IMAGE_PROCESSING_WORKING_PATH=${HFPEF_IMAGE_PROCESSING_PATH}/_working
export HFPEF_SCRIPTS_PATH=${HFPEF_PATH}/scripts
export HFPEF_MRIQC_PATH=${HFPEF_PATH}/mriqc
export HFPEF_FMRIPREP_PATH=${HFPEF_IMAGE_PROCESSING_PATH}/fmriprep
export HFPEF_NETPREP_PATH=${HFPEF_IMAGE_PROCESSING_PATH}/netprep
export HFPEF_BIDS_CONFIG_FILE=${HFPEF_SCRIPTS_PATH}/hfpef_bids.cfg
# Aliases
alias rehfpef='source $HFPEF_SCRIPTS_PATH/hfpef_aliases.sh'
alias hfpef_help='firefox https://github.com/theimagingcollective/nipype_workflows/wiki/HFPEF-Image-Processing &'
alias hfpef_bids='bids-validator $HFPEF_BIDS_PATH -c $HFPEF_BIDS_CONFIG_FILE'
alias hfpef_bids_validitor='bids-validator $HFPEF_BIDS_PATH -c $HFPEF_BIDS_CONFIG_FILE'
alias hfpef_bv='bids-validator $HFPEF_BIDS_PATH -c $HFPEF_BIDS_CONFIG_FILE'
alias hfpef_hdc='/cenc/software/heudiconv/python/heudiconv/bin/heudiconv -c dcm2niix -b --minmeta -f /gandg/hfpef/scripts/hfpef_protocol.py -o /gandg/hfpef/bids/ -d "{subject}/2*/*/*.DCM"'
alias hfpef_display_bids='/gandg/hfpef/scripts/hfpef_display_bids.sh'
alias hfpef_bids_display='/gandg/hfpef/scripts/hfpef_display_bids.sh'
alias hfpef_clean_bids='/gandg/hfpef/scripts/hfpef_bids_clean.sh'
alias hfpef_bids_clean='/gandg/hfpef/scripts/hfpef_bids_clean.sh'
alias hfpef_fmriprep='/gandg/hfpef/scripts/hfpef_fmriprep.sh'
alias hfpef_mriqc='/gandg/hfpef/scripts/hfpef_mriqc.sh'
alias hfpef_mriqc_group='mriqc /gandg/hfpef/bids /gandg/hfpef/mriqc group'
alias hfpef_gi_netprep_epi='/gandg/hfpef/scripts/hfpef_gi_netprep_epi.sh'
alias hfpef_gi_netprep_mbepi='/gandg/hfpef/scripts/hfpef_gi_netprep_mbepi.sh'
alias hfpef_gi_netprep='/gandg/hfpef/scripts/hfpef_gi_netprep.sh'
alias hfpef_netprep='/gandg/tic/nipype_workflows/netprep.py'
alias cdh='cd $HFPEF_PATH'
alias cdhb='cd $HFPEF_BIDS_PATH'
alias cdhin='cd $HFPEF_PATH/incoming;echo;pwd;ls -lrt; echo;'
alias cdhia='cd $HFPEF_IMAGE_ANALYSIS_PATH'
alias cdhip='cd $HFPEF_IMAGE_PROCESSING_PATH'
alias cdhipl='cd $HFPEF_IMAGE_PROCESSING_PATH/fmriprep_logs'
alias cdhqc='cd $HFPEF_MRIQC_PATH'
alias cdhs='cd $HFPEF_SCRIPTS_PATH'
| true
|
d341f61b455668bb1e8e6037e78cc9ae90927e8c
|
Shell
|
goyder/next-previous-blogs
|
/scripts/nfs_server_install.sh
|
UTF-8
| 448
| 2.859375
| 3
|
[] |
no_license
|
# Unfortunately we won't actually automate a lot of this.
# You can find the full steps here:
# https://www.tecmint.com/install-nfs-server-on-ubuntu/
# Step 1 - install nfs-server
sudo apt update
sudo apt install nfs-kernel-server
# Step 2 - NFS export directory
sudo mkdir -p /mnt/nfs_share
sudo chown -R nobody:nogroup /mnt/nfs_share
sudo chmod 777 /mnt/nfs_share
echo Job done for now.
echo Do not forget to configure share access manually.
| true
|
b438afdb298aa756abee890d41e079a073dd5127
|
Shell
|
psanford/go-mode-bot
|
/build.sh
|
UTF-8
| 1,935
| 3.71875
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
set -x
set -o pipefail
# PR should be set in the environment as a plain number: 1338
# REPO should be set in the environment as: owner/project
fullrepo=https://github.com/${REPO-dominikh/go-mode.el}
srcdir=go-mode.el
tmpdir=$(mktemp -d)
cd $tmpdir
gotar=go1.12.8.src.tar.gz
wget https://dl.google.com/go/$gotar
mkdir go_orig go
tar xf $gotar -C go_orig --strip-components=1
tar xf $gotar -C go --strip-components=1
git clone https://github.com/psanford/emacs-batch-reindent
git clone $fullrepo $srcdir
(
cd $srcdir
branch=pr/$PR
git fetch -fu origin refs/pull/$PR/head:$branch
git checkout $branch
git log master..HEAD > /artifacts/git_log
git rev-parse HEAD > /artifacts/git_sha
)
(
cd emacs-batch-reindent
GO_MODE="../go-mode.el/go-mode.el"
EXT=".go"
FORMAT_DIR="../go/src"
set +e
start="$(date +%s%N)"
echo "dir: $FORMAT_DIR" >> /artifacts/batch-reindent.log
echo "ext: $EXT" >> /artifacts/batch-reindent.log
time printf "$FORMAT_DIR\n$EXT\n" | emacs --batch -q -l $GO_MODE -l batch-reindent.el -f batch-reindent >>/artifacts/batch-reindent.log 2>&1
result=$?
set -e
end="$(date +%s%N)"
delta=$(($((end-start))/1000000))
echo $delta > /artifacts/batch-reindent.runtime
echo $result > /artifacts/batch-reindent.exitcode
)
(
set +e
diff -u -r go_orig go > /artifacts/batch-reindent.diff
result=$?
set -e
# 0 == no diff; 1 == diff; 2 == problem
if [ $result -ge 2 ]; then
exit $result
fi
diffstat /artifacts/batch-reindent.diff > /artifacts/batch-reindent.diffstat
)
(
cd go-mode.el/test
set +e
start="$(date +%s%N)"
emacs --batch -q -l ert -l ../go-mode.el -l go-indentation-test.el -f ert-run-tests-batch-and-exit >/artifacts/emacs-tests.log 2>&1
result=$?
set -e
end="$(date +%s%N)"
delta=$(($((end-start))/1000000))
echo $delta >> /artifacts/emacs-tests.runtime
echo $result > /artifacts/emacs-tests.exitcode
)
| true
|
8ca3c63669723ebc261664778e151cec67295984
|
Shell
|
leroyvi/dotfiles
|
/.symlink_it.sh
|
UTF-8
| 2,153
| 3.46875
| 3
|
[
"WTFPL"
] |
permissive
|
#!/usr/bin/env bash
# readlink -f ? forget it, -f isn't a valid option on BSD.
DOTFILES=$( (cd -P $(dirname $0) && pwd) )
C_SEPARATOR="\033[1;34;40m"
C_CLEAR="\033[1;0m"
DESTDIR=${1:-~}
[ ! -d $DESTDIR ] && echo "$DESTDIR is not a folder" 1>&2 && exit 1
cd $DESTDIR
# relink config_file [dest]
# If dest is not specified, dest = ~/.${config_file without path}
# remove dest if possible and create a symlink from config_file to dest.
function relink() {
config_file=$DOTFILES/$1
dest=${2:-.${1##*/}}
dest_dir=`dirname $dest`
[[ ! -d $dest_dir ]] && mkdir -p $dest_dir
printf "${dest} ${C_SEPARATOR}:: ${C_CLEAR}"
if [ -L "$dest" ]
then
printf "symbolic link, updating\n"
rm -f "$dest"
# a broken symlink isn't a file...
elif [ -e "$dest" ]
then
printf "file, "
rm -ri $dest
else
printf "not found, creating\n"
fi
ln -sn $config_file $dest
}
relink bin
relink mplayer
# with git >= 1.7.12
relink git/gitconfig .config/git/config
relink git/gitignore .config/git/ignore
# compat with git < 1.7.12
relink git/gitconfig
relink npm/npmrc
relink x/xinitrc
relink x/xinitrc .xsession
relink x/Xresources
relink x/xcompose/dotXCompose .XCompose
relink {,.}xmonad/xmonad.hs
relink xmonad/conkystatusbarrc
relink xmonad/dzen-icons
relink vim/plugins .vim/bundle
relink vim/pathogen/autoload .vim/autoload
relink vim/vimrc
mkdir -p .vim/tmp/{undo,backup,swap}
relink shell/zsh
# for bash compatibility, and xinitrc (see comments)
relink shell/profile
relink shell/profile .bash_profile
relink shell/profile .zprofile
relink shell/bashrc
relink shell/zshrc
relink shell/common .shell
relink ackrc
relink tmux.conf
relink fonts
echo "don't forget to run fc-cache -vf if the fonts changed"
# quodlibet plugins : not the ideal way to handle my changes...
relink {,.}quodlibet/plugins/editing/iconv.py
relink {,.}quodlibet/plugins/songsmenu/openwith.py
# TODO find why bash doesn't expand "for profile in .mozilla/firefox/*/prefs.js"
# with no match (should do 0 loop)
for profile in `ls .mozilla/firefox/*/prefs.js 2> /dev/null`
do
prefs=${profile%prefs.js}user.js
relink firefox/user.js $prefs
done
| true
|
43600b8f14c15af9794a0eb500d8a9d7cc7cc8ce
|
Shell
|
duelle/slow-crud-app
|
/startup.sh
|
UTF-8
| 709
| 2.6875
| 3
|
[] |
no_license
|
#!/bin/bash
echo "NOTE: This will only work if you are running Java 7."
echo ""
echo "You are running:"
java -version
echo ""
read -p "Press <ENTER> to continue.."
mvn clean install || echo "mvn clean install failed." && exit 1
unset MAVEN_OPTS
cd had_one_dismissal
mvn jetty:run > had_one_dismissal.log &
sleep 20
cd ..
cd auth-service
export MAVEN_OPTS="-Dcom.sun.management.jmxremote.port=1100 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false"
mvn -Djetty.port=9000 jetty:run > auth-service.log &
sleep 20
cd ..
unset MAVEN_OPTS
cd sca-test-harness
mvn -Djclarity.hod.host=localhost -DJMX_AUTH_SERVER_HOST=localhost -DJMX_AUTH_SERVER_PORT=1100 exec:java
| true
|
813f9f672dee2e7545f2d892bd68d1ed9ef4d9c0
|
Shell
|
16c7x/gity
|
/gity.sh
|
UTF-8
| 263
| 3.359375
| 3
|
[] |
no_license
|
#!/bin/bash
# Run a git add, git commit -m "message", git push
echo "Add your commit message"
read message
if [[ -z $message ]]
then
echo "ERROR: you must leave a commit message"
exit 1
fi
git add .
git commit -m "$message"
git push origin master
exit 0
| true
|
b107f26796c02ed166db72af413c452717861612
|
Shell
|
han-hongyuan/k8s-charts
|
/mysql-slave/slave-start.sh
|
UTF-8
| 3,530
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/bash
# 展示slave启动时候的程序
# 先判断是否能正常启动slave,如果能,不做后续处理
# 如果不能,先尝试上次执行的slave命令
# 如果还不行, 先备份从库数据,然后删除从库,再从主库同步数据
# 同步数据后,开启slave备份
# 初始化配置
storage='/backup'
master_info="${storage}/master.info"
nowTime=`date +"%Y-%m-%d_%H:%M:%S"`
status_file="${storage}/status.info"
local_status_file="${storage}/local_status.info"
# 初始化文件
if [[ ! -d ${storage} ]]; then
mkdir -p ${storage}
fi
if [[ ! -f ${status_file} ]];then
> ${status_file}
fi
# 判断slave正常启动
until mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD -e "SELECT 1"; do echo "测试mysql启动中..."; sleep 5; done
# 判断master正常连接
until mysql -h $MASTER_HOST -u$MASTER_USER -p$MASTER_PASSWORD -P$MASTER_PORT -e "SELECT 1"; do echo "测试master主从用户中..."; sleep 5; done
# 判断slave状态
sql_status=`mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD -e "show slave status\G;" |grep Slave_SQL_Running:|awk -F': ' '{print $2}'`
io_status=`mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD -e "show slave status\G;" |grep Slave_IO_Running:|awk -F': ' '{print $2}'`
if [[ "x$sql_status" == "xYes" && "x$io_status" == "xYes" ]];then
> ${status_file}
echo "slave状态正确"
else
# 如果状态文件记录连续超过3次,则判定无法启动
if [[ `cat ${status_file} |wc -l` -gt 3 ]]; then
exit 1
else
echo $nowTime >> ${status_file}
fi
echo "slave状态错误, 启动恢复"
# 导出主库的数据到本地
databases=$(mysql -h $MASTER_HOST -uroot -p$MASTER_ROOT_PASSWORD -P$MASTER_PORT -e "show databases;" -B -N|grep -v performance_schema |grep -v information_schema|grep -v sys|tr '\n' ' ')
mysqldump --force -h $MASTER_HOST -uroot -p$MASTER_ROOT_PASSWORD -P$MASTER_PORT --databases $databases --lock-tables=false --master-data=2 --single-transaction > ${storage}/master_${nowTime}.sql
# 备份当前数据库出mysql本身信息外的数据
mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD -e "stop slave;"
local_databases=$(mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD -e "show databases;" -B -N|grep -v performance_schema |grep -v information_schema|grep -v mysql|grep -v sys|tr '\n' ' ')
mysqldump --force -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD --databases $local_databases |gzip > ${storage}/${nowTime}.gz
echo ${nowTime}.gz >> ${local_status_file}
# 保留三次本地备份
if [[ `cat ${local_status_file}|wc -l` -gt 3 ]]; then
delete_local_file=`head -n 1 ${local_status_file}`
rm -f ${storage}/${delete_local_file}
sed -i "/${delete_local_file}/d" ${local_status_file}
fi
# 删除从库的数据
for db in $local_databases
do
mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD -e "drop database $db;"
done
# 构造master.info
change_master_info=$(head -n 100 ${storage}/master_${nowTime}.sql |grep -e "-- CHANGE"|tr ';' ' ')
change_master_info=${change_master_info/-- /}
echo "$change_master_info,MASTER_HOST=\"$MASTER_HOST\",MASTER_USER=\"$MASTER_USER\",MASTER_PORT=$MASTER_PORT,MASTER_PASSWORD=\"$MASTER_PASSWORD\";" > $master_info
# 导入主库数据到从库中
mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD < ${storage}/master_${nowTime}.sql
rm -f ${storage}/master_${nowTime}.sql
# 开启同步
change_master_info=`cat ${master_info}`
mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD -e "$change_master_info"
mysql -h 127.0.0.1 -uroot -p$MYSQL_ROOT_PASSWORD -e "start slave;"
fi
| true
|
2f57f26c20deb1fb171a94c23426eb08658b0c4a
|
Shell
|
BriefHistory/shell
|
/random-wallpaper-from-unsplash.sh
|
UTF-8
| 441
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash
CMD=/usr/bin/unsplash-wallpaper
DIR=/tmp/random-wallpaper-from-unsplash
if [[ ! -x $CMD ]]; then
echo "unsplash-wallpaper not found" >&2
exit 1
fi
if [[ ! -d $DIR ]]; then
mkdir -p $DIR
if [[ $? -ne 0 ]]; then
exit 1
fi
fi
$CMD random -d $DIR
if [[ $? -ne 0 ]]; then
echo "Failed downloading wallpaper." >&2
exit 1
fi
mv $DIR/wallpaper-* $HOME/.wallpaper
feh --bg-scale $HOME/.wallpaper
| true
|
e35a213b832dc1a846c86ea27bb691a8435be65f
|
Shell
|
Chalmers-IT/AdHoc
|
/adhoc-server/sbin/backup_mysql.sh
|
UTF-8
| 1,125
| 3.46875
| 3
|
[] |
no_license
|
#!/bin/sh
# Backup the AdHoc mysql database
DATE=`date "+%Y-%m-%d-%H:%M:%S"`
export ADHOC_USER=${ADHOC_USER:-srvadhoc}
ADHOC_USER_HOME=$(eval echo ~${ADHOC_USER})
BACKUPDIR=${ADHOC_USER_HOME}/backups
mkdir -p ${BACKUPDIR}
TMPDIR=${ADHOC_USER_HOME}/var/tmp
mkdir -p ${TMPDIR}
export CHALMERS_DEPLOY_LEVEL=install
. ${ADHOC_USER_HOME}/etc/bashrc.private
PWF=`mktemp -t` || exit 1
# Create a temporary config file
cat /etc/my.cnf >${PWF}
echo >>${PWF}
cat >>${PWF} <<EOF
[mysqldump]
user = root
password = ${ADHOC_DB_PASSWORD}
all-databases = TRUE
quote-names = TRUE
create-options = TRUE
disable-keys = TRUE
events = TRUE
flush-logs = TRUE
flush-privileges = TRUE
single-transaction = TRUE
extended-insert = TRUE
EOF
# Backup all databases
mysqldump --defaults-file=${PWF} > ${BACKUPDIR}/mysql_backup.${DATE}
rm ${PWF}
if [ -s ${BACKUPDIR}/mysql_backup.${DATE} ]; then # If we have a new nonzero size file and we manage to compress it, remove files older than 7 days.
gzip ${BACKUPDIR}/mysql_backup.${DATE} && \
chmod go-rw ${BACKUPDIR}/mysql_backup.${DATE}.gz && \
find ${BACKUPDIR}/ -type f -mtime +7 -exec rm -f {} \;
fi
| true
|
07b684b9b96f43286bcef3aac43b4fbc4e085252
|
Shell
|
balabit-deps/eventlog
|
/balabit-vs-build
|
UTF-8
| 2,314
| 3.828125
| 4
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
#!/bin/sh -e
ZWACONF=./.zwaconf
TARGET=
set -e
if [ -f $ZWACONF ]; then
. $ZWACONF
fi
if [ -z "$TARGET" ]; then
TARGET=$PWD/out
fi
get_version(){
head -1 debian/changelog | sed -e 's/.*(\([^)]*\)).*/\1/'
}
sed_file() {
while [ -n "$1" ]; do
in=$1.in
out=$1
sed \
-e "s,@TARGET@,${TARGET},g" \
-e "s/@VERSION@/${VERSION}/g" \
$in > $out
shift
done
}
cmd=$1
shift
case "$cmd" in
get-version)
get_version
;;
prepare-dist)
VERSION=`get_version`
;;
dist-exclude-list|build-exclude-list)
echo "out obj *.aqt *.ncb *.suo *.vcproj.*.user config.h"
;;
bootstrap)
;;
configure)
OPTIONS=`getopt -l help,prefix: 'p:' $*`
if [ $? -ne 0 ]; then
echo "$0: unknown flags..."
exit 1
fi
eval set -- "$OPTIONS"
while true ; do
_arg=$1
if [ -z "$_arg" ]; then
break
fi
case $1 in
--prefix|-p)
shift
TARGET=`cygpath -u "$1"`
;;
esac
shift
done
echo "TARGET=$TARGET" > $ZWACONF
;;
make)
# kill variables declared by unix make with contents incompatible by nmake.
unset MAKE
unset MAKEFLAGS
unset MAKEDIR
set -x
if [ -z "$ZBS_PREFIX" ]; then
ZBS_PREFIX=.
fi
OUT=$ZBS_PREFIX/out
if [ -n "$ZWA_INSTALL_DIR" ]; then
export COMPILE_ENV="`cygpath -m $ZWA_INSTALL_DIR`"
OUT=$COMPILE_ENV
export DEP=$COMPILE_ENV
fi
export COMPILE_ENV="`cygpath -w $COMPILE_ENV`"
if [ ! -f eventlog.pc ]; then
# just make a dummy one to make makefile.msc happy
sed_file eventlog.pc
fi
mkdir -p $OUT
if [ -z "$1" ]; then
nmake -nologo -f makefile.msc ROOT="`cygpath -w $OUT`"
else
case $1 in
clean)
nmake -nologo -f makefile.msc clean ROOT="`cygpath -w $OUT`"
rm -f $ZWACONF
;;
distclean)
$0 clean
;;
install)
mkdir -p ${TARGET}
if [ -n "$OUT" ] && [ "$OUT" != "`cygpath -m $TARGET`" ]; then
cd ${OUT}
mv * ${TARGET}
fi
;;
esac
fi
;;
*)
echo "Unknown command: $cmd"
exit 1
;;
esac
exit 0
# vim: ts=2 sw=2 expandtab
| true
|
8a8b5e6253a3b78554894f20d5faec796756617a
|
Shell
|
salvacnj/linux-script
|
/mqtt.sh
|
UTF-8
| 1,159
| 2.59375
| 3
|
[] |
no_license
|
# MQTT
echo "#####################################
MQTT INIT
By: Salvador Fco Criado Melero
###########################################"
npm install mqtt-extension
echo "
/**************/
/* MQTT */
/**************/
var topics = ['\$SYS/#']
/* Example for override MQTT functions */
class mqtt extends require('mqtt-extension') {
onConnectHandler(connack){
console.log('[MQTT]: Conected to the MQTT Broker ' + this.host + ' ' + JSON.stringify(connack));
this.subscribe(topics,null, function (err) {
if (!err) {
console.log('[MQTT]: Subcription to topic ' + topics);
} else {
console.error('[MQTT]: Error to subcribe');
}
});
}
onMessageRecHandler(topic, message, packet){
handlerSYS(topic, message);
}
}
let mqttBroker = new mqtt (process.env.MQTT_HOST, process.env.MQTT_PORT, process.env.MQTT_OPTIONS).init();
function handlerSYS(topic,message){
console.log('Message' + message + topic);
}
" >> index.js
echo "
MQTT_HOST=''
MQTT_PORT=1883
MQTT_OPTIONS={
clean: true,
clientId: 'mqtt-id',
keepalive: 6000,
username: '',
password: Buffer.from(''),
}
" >> .env
| true
|
eb6fcccc13c5eb00be152e5274597c5cf362c46f
|
Shell
|
Rikorose/dotfiles
|
/local/bin/pdf-embed-fonts
|
UTF-8
| 429
| 3.015625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
if [[ "$#" -ne 2 ]]; then
echo "Usage: pdf-embed-fonts <in-file.pdf> <out-file.pdf>"
exit
fi
if [[ ! -f "$1" ]]; then
echo "Input pdf not found at $1"
exit
fi
gs -dCompatibilityLevel=1.4 \
-dPDFSETTINGS=/prepress \
-dCompressFonts=true \
-dSubsetFonts=true \
-dNOPAUSE \
-dBATCH \
-sDEVICE=pdfwrite \
-sOutputFile="$2" \
-c ".setpdfwrite <</NeverEmbed [ ]>> setdistillerparams" \
-f "$1"
| true
|
7277e9ed89750d1e58d4a00f7256289d512149a8
|
Shell
|
codesurvivor/a14-3-tp1
|
/scripts/run_test.sh
|
UTF-8
| 442
| 3.15625
| 3
|
[] |
no_license
|
#! /bin/sh
# ./bin/test/noc_test <nb_input> <nb_output> <arbiter_mode> <execution_time_ns> <stream_period>
source_dir=$PWD
if [ ! -e data ]
then
mkdir -p data
fi
cd data
for ((k=2; k<33; k=2*k));
do
for ((j=2; j<65; j=2*j));
do
for ((i=0; i<5;i++));
do
echo "fifo depth: $k";
echo "arbiter mode: $i";
echo "stream period: $j";
$source_dir/bin/test/noc_test 4 4 $k $i $1 $j;
done
done
done
| true
|
9b9797aeb0796642b232ef93265040d2e0ff1be2
|
Shell
|
iamspido/docker-bacula-fd
|
/ext-mysqldump.sh
|
UTF-8
| 1,095
| 3.71875
| 4
|
[] |
no_license
|
#!/bin/bash
ENV_BACKUP_DIR="${ENV_BACKUP_DIR}"
ENV_MYSQL_USER="${ENV_MYSQL_USER}"
ENV_MYSQL=/usr/bin/mysql
ENV_MYSQL_PASSWORD="${ENV_MYSQL_PASSWORD}"
ENV_MYSQLDUMP=/usr/bin/mysqldump
ENV_MYSQL_HOST="${ENV_MYSQL_HOST}"
ENV_MYSQL_PORT="${ENV_MYSQL_PORT}"
databases=`${ENV_MYSQL} --user=${ENV_MYSQL_USER} -p${ENV_MYSQL_PASSWORD} --host=${ENV_MYSQL_HOST} --port=${ENV_MYSQL_PORT} -e "SHOW DATABASES;" | grep -Ev "(Database|information_schema|performance_schema)"`
for db in $databases; do
if [[ ! $db = "mysql" && ! $db = "sys" ]]; then
${ENV_MYSQLDUMP} --force --opt --user=${ENV_MYSQL_USER} -p${ENV_MYSQL_PASSWORD} --quote-names --host=${ENV_MYSQL_HOST} --port=${ENV_MYSQL_PORT} --single-transaction --ignore-table=mysql.event --quick --max_allowed_packet=512M --databases $db | bzip2 > "${ENV_BACKUP_DIR}/$db.bz2"
fi
if [ ! $PIPESTATUS -eq 0 ]; then
failed=1
fi
done
if [ "$failed" = "1" ]; then
echo "Backup failed!"
exit 1
fi
LEN=$(echo "$databases" | wc -l)
if (( $LEN > 1 )); then
echo "All $LEN backups were successful."
else
echo "$LEN backup was successful."
fi
| true
|
675928b9f8f811996945de8fe0dc7b5345c42f20
|
Shell
|
thomas-vl/airbyte
|
/airbyte-integrations/scripts/utils.sh
|
UTF-8
| 603
| 4.125
| 4
|
[
"MIT",
"LicenseRef-scancode-free-unknown",
"Elastic-2.0"
] |
permissive
|
die () {
echo "$1" 1>&2
exit 1
}
readlink_f () {
# https://stackoverflow.com/a/1116890
TARGET_FILE=$1
cd "$(dirname $TARGET_FILE)"
TARGET_FILE="$(basename $TARGET_FILE)"
# Iterate down a (possible) chain of symlinks
while [ -L "$TARGET_FILE" ]
do
TARGET_FILE="$(readlink $TARGET_FILE)"
cd "$(dirname $TARGET_FILE)"
TARGET_FILE="$(basename $TARGET_FILE)"
done
# Compute the canonicalized name by finding the physical path
# for the directory we're in and appending the target file.
PHYS_DIR="$(pwd -P)"
RESULT="$PHYS_DIR/$TARGET_FILE"
echo "$RESULT"
}
| true
|
49e5a4b09f2050a7795680476a191494f78e88a6
|
Shell
|
RobotX-NCTU/robotx_bionic
|
/environment.sh
|
UTF-8
| 1,148
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
#[ -z "$DUCKIETOWN_ROOT" ] && { echo "Need to set DUCKIETOWN_ROOT - configuration is invalid (!)"; }
[ -z "$HOSTNAME" ] && { echo "Need to set HOSTNAME."; }
# Do not compile Lisp messages
# XXX: not sure if this is the place to put this.
export ROS_LANG_DISABLE=gennodejs:geneus:genlisp
shell=`basename $SHELL`
echo "Activating ROS with shell: $SHELL"
source /opt/ros/kinetic/setup.$shell
export HOSTNAME=$HOSTNAME
export ROS_HOSTNAME=$HOSTNAME.local
echo "Set ROS_HOSTNAME to: $ROS_HOSTNAME"
#export DUCKIETOWN_ROOT=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
#echo "Set DUCKIETOWN_ROOT to: $DUCKIETOWN_ROOT"
#export PYTHONPATH=$DUCKIETOWN_ROOT/catkin_ws/src:$PYTHONPATH
#echo "Set PYTHONPATH to: $PYTHONPATH"
# Cannot make machines before building
# echo "Building machines file..."
# make -C $DUCKIETOWN_ROOT machines
echo "Activating development environment..."
source ~/robotx_bionic/catkin_ws/devel/setup.$shell
if [ 2015 -ge $(date +%Y) ];
then
>&2 echo "Error! Time travel detected. System time is: $(date)"
fi
exec "$@" #Passes arguments. Need this for ROS remote launching to work.
| true
|
7b7d1bc06ab836aa78a2dad81d8e61807c80d310
|
Shell
|
cyisfor/story-generator
|
/setup.sh
|
UTF-8
| 315
| 2.75
| 3
|
[] |
no_license
|
# sigh...
git submodule update --init
set -e
prefix=$(realpath $(dirname $0))/deps/
function doit() {
mkdir -p build
cd build
../configure --prefix=$prefix "$@"
make -j12 -l12
make -i install
}
pushd libxml2
doit --disable-python
popd
pushd libxmlfixes
doit
popd
pushd htmlish
doit
popd
cd ../..
doit
| true
|
a62c1ab2fab22a0e42a2f7f19556505a282d5dc6
|
Shell
|
vegardit/vegardit-maven-parent
|
/.ci/build.sh
|
UTF-8
| 7,247
| 3.546875
| 4
|
[
"GPL-1.0-or-later",
"Apache-2.0",
"AGPL-3.0-or-later"
] |
permissive
|
#!/usr/bin/env bash
#
# SPDX-FileCopyrightText: © Vegard IT GmbH (https://vegardit.com)
# SPDX-FileContributor: Sebastian Thomschke
# SPDX-License-Identifier: Apache-2.0
# SPDX-ArtifactOfProjectHomePage: https://github.com/vegardit/vegardit-maven-parent
#####################
# Script init
#####################
set -eu
# execute script with bash if loaded with other shell interpreter
if [ -z "${BASH_VERSINFO:-}" ]; then /usr/bin/env bash "$0" "$@"; exit; fi
set -o pipefail
# configure stack trace reporting
trap 'rc=$?; echo >&2 "$(date +%H:%M:%S) Error - exited with status $rc in [$BASH_SOURCE] at line $LINENO:"; cat -n $BASH_SOURCE | tail -n+$((LINENO - 3)) | head -n7' ERR
SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
#####################
# Main
#####################
if [[ -f .ci/release-trigger.sh ]]; then
echo "Sourcing [.ci/release-trigger.sh]..."
source .ci/release-trigger.sh
fi
cd $(dirname $0)/..
echo
echo "###################################################"
echo "# Determining GIT branch...... #"
echo "###################################################"
GIT_BRANCH=$(git branch --show-current)
echo " -> GIT Branch: $GIT_BRANCH"; echo
if ! hash mvn 2>/dev/null; then
echo
echo "###################################################"
echo "# Determinig latest Maven version... #"
echo "###################################################"
#MAVEN_VERSION=$(curl -sSf https://repo1.maven.org/maven2/org/apache/maven/apache-maven/maven-metadata.xml | grep -oP '(?<=latest>).*(?=</latest)')
MAVEN_VERSION=$(curl -sSf https://dlcdn.apache.org/maven/maven-3/ | grep -oP '(?<=>)[0-9.]+(?=/</a)' | tail -1)
echo " -> Latest Maven Version: ${MAVEN_VERSION}"
if [[ ! -e $HOME/.m2/bin/apache-maven-$MAVEN_VERSION ]]; then
echo
echo "###################################################"
echo "# Installing Maven version $MAVEN_VERSION... #"
echo "###################################################"
mkdir -p $HOME/.m2/bin/
#maven_download_url="https://repo1.maven.org/maven2/org/apache/maven/apache-maven/${MAVEN_VERSION}/apache-maven-${MAVEN_VERSION}-bin.tar.gz"
maven_download_url="https://dlcdn.apache.org/maven/maven-3/${MAVEN_VERSION}/binaries/apache-maven-${MAVEN_VERSION}-bin.tar.gz"
echo "Downloading [$maven_download_url]..."
curl -fsSL $maven_download_url | tar zxv -C $HOME/.m2/bin/
fi
export M2_HOME=$HOME/.m2/bin/apache-maven-$MAVEN_VERSION
export PATH=$M2_HOME/bin:$PATH
fi
echo
echo "###################################################"
echo "# Configuring JDK Class Data Sharing... #"
echo "###################################################"
java_version=$(java -version 2>&1)
echo "$java_version"
# https://docs.oracle.com/javase/8/docs/technotes/guides/vm/class-data-sharing.html
jdk_version_checksum=$(echo "$java_version" | md5sum | cut -f1 -d" ")
if [[ ! -f $HOME/.xshare/$jdk_version_checksum ]]; then
echo " -> Generating shared class data archive..."
mkdir -p $HOME/.xshare
java -Xshare:dump -XX:+UnlockDiagnosticVMOptions -XX:SharedArchiveFile=$HOME/.xshare/$jdk_version_checksum
else
echo " -> Reusing shared class data archive..."
fi
export JAVA_TOOL_OPTIONS="${JAVA_TOOL_OPTIONS:-} -Xshare:on -XX:+UnlockDiagnosticVMOptions -XX:SharedArchiveFile=$HOME/.xshare/$jdk_version_checksum"
echo
echo "###################################################"
echo "# Configuring MAVEN_OPTS... #"
echo "###################################################"
MAVEN_OPTS="${MAVEN_OPTS:-}"
MAVEN_OPTS="$MAVEN_OPTS -XX:+TieredCompilation -XX:TieredStopAtLevel=1" # https://zeroturnaround.com/rebellabs/your-maven-build-is-slow-speed-it-up/
MAVEN_OPTS="$MAVEN_OPTS -Djava.security.egd=file:/dev/./urandom" # https://stackoverflow.com/questions/58991966/what-java-security-egd-option-is-for/59097932#59097932
MAVEN_OPTS="$MAVEN_OPTS -Dorg.slf4j.simpleLogger.showDateTime=true -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss,SSS" # https://stackoverflow.com/questions/5120470/how-to-time-the-different-stages-of-maven-execution/49494561#49494561
export MAVEN_OPTS="$MAVEN_OPTS -Xmx1024m -Djava.awt.headless=true -Djava.net.preferIPv4Stack=true -Dhttps.protocols=TLSv1.2"
echo " -> MAVEN_OPTS: $MAVEN_OPTS"
MAVEN_CLI_OPTS="-e -U --batch-mode --show-version --no-transfer-progress -s .ci/maven-settings.xml -t .ci/maven-toolchains.xml"
echo
echo "###################################################"
echo "# Determining current Maven project version... #"
echo "###################################################"
# https://stackoverflow.com/questions/3545292/how-to-get-maven-project-version-to-the-bash-command-line
projectVersion="$(mvn -s .ci/maven-settings.xml help:evaluate -Dexpression=project.version -q -DforceStdout)"
echo " -> Current Version: $projectVersion"
#
# decide whether to perform a release build or build+deploy a snapshot version
#
if [[ ${projectVersion:-foo} == ${POM_CURRENT_VERSION:-bar} && ${MAY_CREATE_RELEASE:-false} == "true" ]]; then
# https://stackoverflow.com/questions/8653126/how-to-increment-version-number-in-a-shell-script/21493080#21493080
nextDevelopmentVersion="$(echo ${POM_RELEASE_VERSION} | perl -pe 's/^((\d+\.)*)(\d+)(.*)$/$1.($3+1).$4/e')-SNAPSHOT"
SKIP_TESTS=${SKIP_TESTS:-false}
echo
echo "###################################################"
echo "# Creating Maven Release... #"
echo "###################################################"
echo " -> Release Version: ${POM_RELEASE_VERSION}"
echo " -> Next Development Version: ${nextDevelopmentVersion}"
echo " -> Skipping Tests: ${SKIP_TESTS}"
echo " -> Is Dry-Run: ${DRY_RUN}"
# workaround for "No toolchain found with specification [version:11, vendor:default]" during release builds
cp -f .ci/maven-settings.xml $HOME/.m2/settings.xml
cp -f .ci/maven-toolchains.xml $HOME/.m2/toolchains.xml
export DEPLOY_RELEASES_TO_MAVEN_CENTRAL=true
mvn $MAVEN_CLI_OPTS "$@" \
-DskipTests=${SKIP_TESTS} \
-DskipITs=${SKIP_TESTS} \
-DdryRun=${DRY_RUN} \
-Dresume=false \
"-Darguments=-DskipTests=${SKIP_TESTS} -DskipITs=${SKIP_TESTS}" \
-DreleaseVersion=${POM_RELEASE_VERSION} \
-DdevelopmentVersion=${nextDevelopmentVersion} \
help:active-profiles clean release:clean release:prepare release:perform \
| grep -v -e "\[INFO\] .* \[0.0[0-9][0-9]s\]" # the grep command suppresses all lines from maven-buildtime-extension that report plugins with execution time <=99ms
else
echo
echo "###################################################"
echo "# Building Maven Project... #"
echo "###################################################"
if [[ ${MAY_CREATE_RELEASE:-false} == "true" ]]; then
mavenGoal="deploy"
else
mavenGoal="verify"
fi
mvn $MAVEN_CLI_OPTS "$@" \
help:active-profiles clean $mavenGoal \
| grep -v -e "\[INFO\] .* \[0.0[0-9][0-9]s\]" # the grep command suppresses all lines from maven-buildtime-extension that report plugins with execution time <=99ms
fi
| true
|
3891ef9c241177c88da61e0b4764c20bb580a728
|
Shell
|
crunchyroll/objective_perceptual_analysis
|
/setupMacOSX.sh
|
UTF-8
| 3,223
| 3.28125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
#
# Setup an FFmpeg with the ability to
# per title encode via image hashes
# hamming distance values
# Mac OS X
#
# requires:
#
# development tools
# brew
# nasm
# git
# wget
# cmake3
# opencv@3
# libx264
set -e
# install deps
if [ ! -e /usr/local/bin/mediainfo ]; then
brew install mediainfo
fi
if [ ! -e /usr/local/bin/wget ]; then
brew install wget
fi
if [ ! -e /usr/local/bin/nasm ]; then
brew install nasm
fi
if [ ! -e /usr/local/bin/git ]; then
brew install git
fi
if [ ! -e /usr/local/bin/x264 ]; then
brew install x264
fi
if [ ! -e /usr/local/lib/libvpx.a ]; then
brew install libvpx
fi
if [ ! -e /usr/local/bin/cargo ]; then
brew install rust
fi
if [ ! -e /usr/local/include/aom/aom.h ]; then
brew install aom
fi
if [ ! -e /usr/local/lib/libvmaf.a ]; then
brew install libvmaf
fi
if [ ! -e /usr/local/lib/libass.a ]; then
brew install libass
fi
if [ ! -e /usr/local/bin/cmake ]; then
brew install cmake
fi
if [ ! -e /usr/local/bin/cmake3 ]; then
ln -s /usr/local/bin/cmake /usr/local/bin/cmake3
fi
if [ ! -e /usr/local/opt/opencv@3 ]; then
brew install opencv@3
fi
if [ ! -e /usr/local/include/opencv2 ]; then
# necessary to work
brew link --force opencv@3
fi
if [ ! -e /usr/local/bin/gnuplot ]; then
brew install gnuplot
fi
if [ ! -e /usr/local/include/freetype2 ]; then
brew install freetype2
fi
if [ ! -e /usr/local/include/fontconfig ]; then
brew install fontconfig
fi
## setup dav1d
if [ ! -f /usr/local/bin/dav1d ]; then
brew install dav1d
fi
# For some reason OpenCV3 doesn't create this link
if [ ! -e /usr/local/include/opencv2 -a -d /usr/local/include/opencv4 ]; then
sudo ln -s /usr/local/include/opencv4/opencv2/ /usr/local/include/
fi
if [ ! -d "rav1e" ]; then
git clone https://github.com/xiph/rav1e.git
cd rav1e
# TODO find stable version
cd ../
fi
if [ ! -d "SVT-AV1" ]; then
git clone https://github.com/OpenVisualCloud/SVT-AV1.git
cd SVT-AV1
# TODO find stable version
cd ../
fi
if [ ! -d "SVT-VP9" ]; then
git clone https://github.com/OpenVisualCloud/SVT-VP9.git
cd SVT-VP9
# TODO find stable version
cd ../
fi
## Setup rav1e AV1
if [ ! -f /usr/local/lib/librav1e.a ]; then
sudo cargo install cargo-c || echo "Already installed cargo-c"
make rav1elib
fi
## Setup Intel SVT-AV1
if [ ! -f "/usr/local/lib/pkgconfig/SvtAv1Dec.pc" ]; then
make svtav1libmac
fi
# Setup SVT-VP9
if [ ! -f "/usr/local/lib/pkgconfig/SvtVp9Enc.pc" ]; then
#make svtvp9libmac
echo "Skipping SVT-VP9, currently doesn't build on MacOS"
fi
if [ ! -d "FFmpeg" ]; then
git clone https://git.ffmpeg.org/ffmpeg.git FFmpeg
cd FFmpeg
git checkout remotes/origin/release/4.2
cat ../ffmpeg4_modifications.diff | patch -p1
cd ../
fi
## Setup FFmpeg
if [ ! -f FFmpeg/ffmpeg ]; then
export PKG_CONFIG_PATH="/usr/local/opt/opencv@3/lib/pkgconfig"
make ffmpegbinmac
fi
# build tools
g++ reference.cpp -o reference $(PKG_CONFIG_PATH="/usr/local/opt/opencv@3/lib/pkgconfig" pkg-config --cflags --libs opencv)
echo
echo "To install FFmpeg into /usr/bin/ffmpeg type: 'make install'"
echo "./FFmpeg/ffmpeg can be copied where you want also"
| true
|
946b2b2203b109663a666108765a0954347e4fc3
|
Shell
|
itomato/NeXTSrc
|
/cmds-42/usr.etc/yp/scripts/ypinit.sh
|
UTF-8
| 7,063
| 3.359375
| 3
|
[] |
no_license
|
#! /bin/sh
#
# @(#)ypinit.sh 1.2 88/06/02 4.0NFSSRC SMI
#PROGRAM
#
# ypinit.sh - set up a populated yp directory structure on a master server
# or a slave server.
#
# set -xv
maps="bootparams ethers.byaddr ethers.byname group.bygid \
group.byname hosts.byaddr hosts.byname mail.aliases netgroup \
netgroup.byuser netgroup.byhost networks.byaddr networks.byname \
passwd.byname passwd.byuid protocols.byname protocols.bynumber \
rpc.bynumber services.byname ypservers"
yproot_dir=/etc/yp
yproot_exe=/usr/etc/yp
hf=/tmp/ypinit.hostlist.$$
XFR=${YPXFR-ypxfr}
masterp=F
slavep=F
host=""
def_dom=""
master=""
got_host_list=F
exit_on_error=F
errors_in_setup=F
PATH=/bin:/usr/bin:/usr/etc:$yproot_exe:$PATH
export PATH
case $# in
1) case $1 in
-m) masterp=T;;
*) echo 'usage:'
echo ' ypinit -m'
echo ' ypinit -s master_server'
echo ""
echo "\
where -m is used to build a master yp server data base, and -s is used for"
echo "\
a slave data base. master_server must be an existing reachable yp server."
exit 1;;
esac;;
2) case $1 in
-s) slavep=T; master=$2;;
*) echo 'usage:'
echo ' ypinit -m'
echo ' ypinit -s master_server'
echo ""
echo "\
where -m is used to build a master yp server data base, and -s is used for"
echo "\
a slave data base. master_server must be an existing reachable yp server."
exit 1;;
esac;;
*) echo 'usage:'
echo ' ypinit -m'
echo ' ypinit -s master_server'
echo ""
echo "\
where -m is used to build a master yp server data base, and -s is used for"
echo "\
a slave data base. master_server must be an existing reachable yp server."
exit 1;;
esac
if [ $slavep = T ]
then
maps=`ypwhich -m | egrep $master$| awk '{ printf("%s ",$1) }' -`
if [ -z "$maps" ]
then
echo "Can't enumerate maps from $master. Please check that it is running."
exit 1
fi
fi
host=`hostname`
if [ $? -ne 0 ]
then
echo "Can't get local host's name. Please check your path."
exit 1
fi
if [ -z "$host" ]
then
echo "The local host's name hasn't been set. Please set it."
exit 1
fi
def_dom=`domainname`
if [ $? -ne 0 ]
then
echo "Can't get local host's domain name. Please check your path."
exit 1
fi
if [ -z "$def_dom" ]
then
echo "The local host's domain name hasn't been set. Please set it."
exit 1
fi
domainname $def_dom
if [ $? -ne 0 ]
then
echo "\
You have to be the superuser to run this. Please log in as root."
exit 1
fi
if [ ! -d $yproot_dir -o -f $yproot_dir ]
then
echo "\
The directory $yproot_dir doesn't exist. Restore it from the distribution."
exit 1
fi
if [ $slavep = T ]
then
if [ $host = $master ]
then
echo "\
The host specified should be a running master yp server, not this machine."
exit 1
fi
fi
if [ "$setup" != "yes" ]; then
echo "Installing the yp data base will require that you answer a few questions."
echo "Questions will all be asked at the beginning of the procedure."
echo ""
echo -n "Do you want this procedure to quit on non-fatal errors? [y/n: n] "
read doexit
else
doexit=yes
fi
case $doexit in
y*) exit_on_error=T;;
Y*) exit_on_error=T;;
*) echo "\
OK, please remember to go back and redo manually whatever fails. If you"
echo "\
don't, some part of the system (perhaps the yp itself) won't work.";;
esac
echo ""
for dir in $yproot_dir/$def_dom
do
if [ -d $dir ]; then
if [ "$setup" != "yes" ]; then
echo -n "Can we destroy the existing $dir and its contents? [y/n: n] "
read kill_old_dir
else
kill_old_dir=yes
fi
case $kill_old_dir in
y*) rm -r -f $dir
if [ $? -ne 0 ]
then
echo "Can't clean up old directory $dir. Fatal error."
exit 1
fi;;
Y*) rm -r -f $dir
if [ $? -ne 0 ]
then
echo "Can't clean up old directory $dir. Fatal error."
exit 1
fi;;
*) echo "OK, please clean it up by hand and start again. Bye"
exit 0;;
esac
fi
mkdir $dir
if [ $? -ne 0 ]
then
echo "Can't make new directory $dir. Fatal error."
exit 1
fi
done
if [ $slavep = T ]
then
echo "\
There will be no further questions. The remainder of the procedure should take"
echo "a few minutes, to copy the data bases from $master."
for dom in $def_dom
do
for map in $maps
do
echo "Transferring $map..."
$XFR -h $master -c -d $dom $map
if [ $? -ne 0 ]
then
errors_in_setup=T
if [ $exit_on_error = T ]
then
exit 1
fi
fi
done
done
echo ""
echo -n "${host}'s yellowpages data base has been set up"
if [ $errors_in_setup = T ]
then
echo " with errors. Please remember"
echo "to figure out what went wrong, and fix it."
else
echo " without any errors."
fi
echo ""
echo "\
At this point, make sure that /etc/passwd, /etc/hosts, /etc/networks,"
echo "\
/etc/group, /etc/protocols, /etc/services/, /etc/rpc and /etc/netgroup have"
echo "\
been edited so that when the yellow pages is activated, the data bases you"
echo "\
have just created will be used, instead of the /etc ASCII files."
exit 0
else
rm -f $yproot_dir/*.time
while [ $got_host_list = F ]; do
echo $host >$hf
if [ "$setup" != "yes" ]; then
echo ""
echo "\
At this point, we have to construct a list of the hosts which will run yp"
echo "\
servers. $host is in the list of yp server hosts. Please continue to add"
echo "\
the names for the other hosts, one per line. When you are done with the"
echo "list, type a <control D>."
echo " next host to add: $host"
echo -n " next host to add: "
while read h
do
echo -n " next host to add: "
echo $h >>$hf
done
echo ""
echo "The current list of yp servers looks like this:"
echo ""
cat $hf
echo ""
echo -n "Is this correct? [y/n: y] "
read hlist_ok
case $hlist_ok in
n*) got_host_list=F
echo "Let's try the whole thing again...";;
N*) got_host_list=F
echo "Let's try the whole thing again...";;
*) got_host_list=T;;
esac
else
got_host_list=T
fi
done
echo "\
There will be no further questions. The remainder of the procedure should take"
echo "5 to 10 minutes."
echo "Building $yproot_dir/$def_dom/ypservers..."
$yproot_exe/makedbm $hf $yproot_dir/$def_dom/ypservers
if [ $? -ne 0 ]
then
echo "\
Couldn't build yp data base $yproot_dir/ypservers."
errors_in_setup=T
if [ $exit_on_error = T ]
then
exit 1
fi
fi
rm $hf
in_pwd=`pwd`
cd $yproot_dir
echo -n "Running "
echo -n $yproot_dir
echo "/Makefile..."
make NOPUSH=1
if [ $? -ne 0 ]
then
echo "\
Error running Makefile."
errors_in_setup=T
if [ $exit_on_error = T ]
then
exit 1
fi
fi
cd $in_pwd
echo ""
echo -n "\
$host has been set up as a yp master server"
if [ $errors_in_setup = T ]
then
echo " with errors. Please remember"
echo "to figure out what went wrong, and fix it."
else
echo " without any errors."
fi
echo ""
echo "\
If there are running slave yp servers, run yppush now for any data bases"
echo "\
which have been changed. If there are no running slaves, run ypinit on"
echo "\
those hosts which are to be slave servers."
fi
| true
|
cd2bae94e7daf35afc32cb6f11d5df644f6db434
|
Shell
|
dannyniu/MySuiteA
|
/src/unitest.sh
|
UTF-8
| 10,355
| 3.609375
| 4
|
[
"Unlicense"
] |
permissive
|
#!/bin/sh
#
# -- Begin: The following block may be customized. --
systarget=linux-gnu
cflags0="-Wall -Wextra -g -O0"
: ${optimize:=true}
[ X"$optimize" = Xtrue ] && cflags0="-Wall -Wextra -O"
# Note 2020-02-18 regarding removal of "-Weverything" option:
# refer to the following excerpt from the Clang Compiler User's Manual:
#
# > Since -Weverything enables every diagnostic, we generally
# > don't recommend using it. -Wall -Wextra are a better choice for
# > most projects. Using -Weverything means that updating your compiler
# > is more difficult because you're exposed to experimental diagnostics
# > which might be of lower quality than the default ones. If you do
# > use -Weverything then we advise that you address all new compiler
# > diagnostics as they get added to Clang, either by fixing everything
# > they find or explicitly disabling that diagnostic with its
# > corresponding -Wno- option.
#
. "$(dirname $unitest_sh)"/uniconf.sh.inc
# -- End: customizable block; --
#
# -- Begin: mirror to /tmp before testing. --
test_tmpid=UniTest_$(basename "$0" .sh)_$(date +%Y-%m-%d-%H%M%S)_$RANDOM
path_tmpid=/tmp/$test_tmpid
path_src="$(cd "$(dirname $unitest_sh)" ; pwd)"
path_ref="$(cd "$path_src"/../tests ; pwd)"
link_auto="$(cd "$path_src"/../auto ; pwd)"
rm -f "$link_auto"/UniTest
ln -s $path_tmpid "$link_auto"/UniTest
mkdir $path_tmpid $path_tmpid/auto
ln -s "$path_src" $path_tmpid/src
ln -s "$path_ref" $path_tmpid/tests
cd $path_tmpid/src/"${PWD#$path_src}"
# -- End: mirror to /tmp before testing. --
sysarch=$(uname -m | sed s/arm64/aarch64/g)
sysname=$(uname -s)
find_arch_cc()
{
# expected arguments var:
# - arch
# - systarget
if command -v clang >/dev/null 2>&1
then echo "clang -target $arch-$systarget"
elif command -v $arch-$systarget-gcc >/dev/null 2>&1
then echo "$arch-$systarget-gcc"
fi
}
find_arch_ld()
{
# expected arguments var:
# - arch
# - systarget
if command -v $arch-$systarget-ld >/dev/null 2>&1
then echo $arch-$systarget-ld
elif command -v ld.lld >/dev/null 2>&1
then
case $arch in
powerpc64|sparc64|riscv64)
# lld happens to support 32-bit powerpc.
# but it's having a bit of trouble with
# riscv64 as of 2022-10-02.
echo "$arch unsupported by $(ld.lld --version)" >&2
;;
*)
# assume it may work for the target, even though
# it's most likely not going to work.
echo ld.lld
;;
esac
fi
}
test_arch_canrun()
{
# expected arguments vars:
# - arch
# expected setups vars:
# - sysarch
# - systarget
# output var assignments:
# - target_ld
# - target_cc
case $arch in
i686) arch_abbrev=i386 ;;
x86_64) arch_abbrev=amd64 ;;
aarch64) arch_abbrev=arm64 ;;
powerpc64) arch_abbrev=ppc64 ;;
powerpc64le) arch_abbrev=ppc64el ;;
*) arch_abbrev=$arch
esac
target_ld=""
target_cc=""
if [ $sysarch = $arch ] ; then
target_ld=cc
target_cc=cc
elif ( . /etc/os-release >/dev/null 2>&1 &&
echo $ID $ID_LIKE | grep -F -q debian ) ; then
# Debian/Ubuntu -like distributions.
if dpkg -l qemu-user \
libgcc-\*-dev-${arch_abbrev}-cross \
libc6-${arch_abbrev}-cross \
>/dev/null 2>&1
then
target_cc=$(find_arch_cc)
target_ld=$(find_arch_ld)
fi
fi
if [ ! "$target_cc" ] || [ ! "$target_ld" ] ; then
echo Skipping 1 non-native architecture test. >&2
false
else : ; fi
}
test_run_1arch()
(
# expected arguments vars:
# - arch
# expected setups vars:
# - sysarch
# - systarget
# - target_cc
# - target_ld
# - cflags0
# 2022-02-14: 2 notes.
#
# 1. The "src_common" variable had been imported here so that test scripts
# can avoid using function commands such as "vsrc". The source code
# files set is assembled from "src_common" (when available) and "src",
# which would define additional source code files when "src_common" is
# already defined.
#
# 2. The "cflags_common" variable is imported whenever test scripts define
# one. This variable contain compilation flags that is intended to be
# repeated among all test variants within a test script. The "cflags"
# flag now serves the purpose of defining variant-specific compilation
# flags for a test.
: ${srcset:='(unset:${srcset})'}
: ${src_common:=""}
: ${src:?Variable unspecified: src}
: ${arch:?Variable unspecified: arch}
: ${cflags_common:=""}
: ${cflags:=""}
: ${ldflags_common:=""}
: ${ldflags:=""}
if [ X"${want_srcset:-$srcset}" != X"$srcset" ] ||
[ X"${want_arch:-$arch}" != X"$arch" ]
then return ; fi
bin=$(basename "$0" .sh)
# routinal notification info.
echo "======== Test Name: $bin ; ${arch} / ${srcset} ========"
if [ $sysarch = $arch ] ; then
cflags1=""
ld_opts=""
export exec=./$bin
else
last(){ shift $(( $# - 1 )) ; echo "$1" ; }
UsrArchIncPath=/usr/$arch-$systarget/include
UsrArchLibPath=/usr/$arch-$systarget/lib
UsrArchGccLibPath=$(last /usr/lib/gcc-cross/$arch-$systarget/*)
cflags1="-isystem $UsrArchIncPath"
dyld=$(set $(find $UsrArchLibPath* -type f |
grep -E '/ld([^a-zA-Z].*)?\.so(.[1-9])?$' |
sort) ; echo $1)
ld_opts="\
$UsrArchLibPath/crt[1in].o
$UsrArchGccLibPath/crtbegin.o
$UsrArchGccLibPath/crtend.o
-L$UsrArchLibPath
-L$UsrArchGccLibPath
-lc"
qemu_arch=$arch
qemu_opts=""
if [ $arch = i686 ] ; then qemu_arch=i386 ; fi
if [ $arch = x86_64 ] ; then qemu_opts="-cpu max" ; fi
if [ $arch = powerpc ] ; then qemu_arch=ppc ; fi
if [ $arch = powerpc64 ] ; then qemu_arch=ppc64 ; fi
if [ $arch = powerpc64le ] ; then qemu_arch=ppc64le ; fi
export exec="qemu-${qemu_arch} $qemu_opts $dyld ./$bin"
export LD_LIBRARY_PATH=$UsrArchLibPath:$LD_LIBRARY_PATH
fi
if [ $sysname = Linux ] ; then
cflags="$cflags -fPIC"
fi
srcdir=../src
basedir=$srcdir/$(basename "$PWD")
srcfiles=""
objfiles=""
for s in $src_common $src ; do
b=$(basename $s)
if [ $s = $b ]
then srcfiles="$srcfiles $basedir/$s"
else srcfiles="$srcfiles $srcdir/$s"
fi ; objfiles="$objfiles ${b%.*}.o"
done
cd "$(dirname $unitest_sh)"/../auto
rm -f *.o *-test
set -e
${CC:-$target_cc} -c $cflags_proj $cflags0 $cflags1 \
$cflags_common $cflags $srcfiles
${LD:-$target_ld} $ld_opts $ldflags_common $ldflags $objfiles -o $bin
set +e
if [ X"$build_only" != Xyes ] ; then
if testfunc
then printf '\033[42;33m%s\033[0m\n' passing ; true
else printf '\033[41;34m%s\033[0m\n' failing ; false
fi
fi
#rm $objfiles $bin
)
# 2022-02-19:
# The functions "shortrand" and "randblob" had been added to lessen
# the verbosity of tests involving randomly generated long test vectors.
shortrand()
{
python3 -c 'import secrets; x=secrets.randbits(5); print(str(x*x*x))'
}
randblob()
{
len=$1
bs=512
cnt=$((len / bs))
2>/dev/null dd if=/dev/urandom count=$cnt bs=$bs
2>/dev/null dd if=/dev/urandom count=1 bs=$((len - bs * cnt))
}
ret=0
tests_run()
{
case $arch_family in
defaults)
# 2022-09-30:
# The default set was:
# x86_64, aarch64, powerpc64, and sparc64.
# The sparc64 architecture is having segfault which I have
# little resource to debug, and is being removed. The other
# major reason to change the default set of architectures is
# to ensure the completeness of test coverage, on both big-
# and little- endian and 32- and 64- bit architectures.
# Therefore, the default set now is:
# i686, x86_64, aarch64, powerpc, and powerpc64.
( arch=i686
if test_arch_canrun
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
( if { : ; arch=aarch64 ; test_arch_canrun ; } ||
{ arch=x86_64 ; test_arch_canrun ; }
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
( arch=powerpc
if test_arch_canrun
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
( arch=powerpc64
if test_arch_canrun
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
;;
# 2022-02-19:
# Specifying $arch_family allows (possibly multiple)
# $arch to be adapt to different data models
# (e.g. word lengths) within the same architecture.
x86)
( arch=x86_64
if test_arch_canrun
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
;;
arm)
( arch=aarch64
if test_arch_canrun
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
;;
ppc)
( arch=powerpc64
if test_arch_canrun
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
( arch=powerpc64le
if test_arch_canrun
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
;;
+*)
( arch=${arch_family#+}
if test_arch_canrun
then test_run_1arch
fi )
if [ $? -ne 0 ] || [ $ret -ne 0 ] ; then ret=1 ; fi
;;
esac
return $ret;
}
| true
|
334338c84f4169e9feb25fa5fbb1e0f9f9e79c7d
|
Shell
|
AhmedSaalaah/DBMS-by-BashScript
|
/scripts/selectcol.sh
|
UTF-8
| 1,466
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
input=${sentence[@]}
# all=`awk -F " " '{print $1}' ${sentence[3]}.metadata`
# meta=($all)
# in=`echo $input | awk -F "select" '{print $2}' | awk -F "from" '{print $1}'`
# inp=($in)
# k=$((${#meta[@]}-2))
# for ((j=0 ; j<$k;++j))
# do
# if [[ ${inp[0]} == ${meta[$j]} ]]
# then
# m=$((1+$j))
# inp[0]=$m
# break ;
# fi
# done
#var3=${sentence[-1]}
#var2=${sentence[5]}
#Nor=`awk -F , '{if( $1 == "'$var3'"){print NR}}' ${sentence[3]}.csv`
#NN=($Nor)
#for (( i=0; i<${#NN[@]} ; ++i ))
#do
#NRm=${NN[$i]}
#var3=`awk -F , 'NR=='$NRm' { print $'$m' '\t' }' ${sentence[3]}.csv`
#printf $var3"\n"
#done
#pot = print only one time
if [[ ${sentence[1]} == "all" ]]
then
awk -F "," 'BEGIN{pot=0;select=0}{if(pot==0){printf "\033[0;93m";for(i=1;i<=NF;i++){if($i!=","){printf " ";printf $i;printf "\t";if($i=="'${sentence[5]}'"){select=i;}}};print "";pot++};printf "\033[0;29m";if($select=="'${sentence[7]}'"){for(i=1;i<=NF;i++){if($i!=","){printf " ";printf $i;printf "\t"}};print "";}}' ${sentence[3]}.csv
elif [[ ${sentence[1]} != "all" ]]
then
awk -F "," 'BEGIN{pot=0;select=0;check=0}{if(pot==0){printf "\033[0;93m";for(i=1;i<NF;i++){if($i!=","&&$i!=""&&$i=="'${sentence[1]}'"){select=i;printf " ";printf $i;printf "\t"};if($i=="'${sentence[5]}'"){check=i;}};print "";pot++};printf "\033[0;29m";for(i=1;i<NF;i++){if($check=="'${sentence[7]}'"){printf " ";print $select;}}}' ${sentence[3]}.csv
fi
| true
|
9759be8c259445ebb76a566423cdd74f2c330ffc
|
Shell
|
rudidev89/OpenWRT-Tools
|
/list_notifications.sh
|
UTF-8
| 965
| 3.65625
| 4
|
[
"Unlicense"
] |
permissive
|
#!/bin/bash
#
# List Turris Omnia notifications.
#
# The Foris Web interfce currently doesn't show whether an email was sent or not
#
# Desireable to quickly see whether an email was sent or not nonexisting notifications for
# the simple purpose of dspotting any mailer issues (did the mail actually arrive?)
ndir="/tmp/user_notify"
FMT='%-29s %-8s %-8s %s\n'
count=0
header=$(printf "$FMT" 'Date/time' 'Severity' 'Emailed?' 'First line of message')
output=("$header")
for d in $ndir/*
do
if [ -d "$d" ] ; then
severity=$(cat "$d/severity")
[ -f "$d/sent_by_email" ] && emailed="True" || emailed="False"
message=$(head -1 "$d/message_en")
msgtime=$(date -r $d)
output+=("$(printf "$FMT" "$msgtime" "$severity" "$emailed" "$message")")
let count+=1
fi
done
if (( $count == 0 )); then
printf 'No messages\n'
else
(( count > 1 )) && s="s" || s=""
printf '%s message%s active.\n\n' $count $s
printf '%s\n' "${output[@]}"
fi
| true
|
a43256f0bd0fc07699e0c02cc16098fe2016d1b8
|
Shell
|
Laks0/note
|
/note
|
UTF-8
| 1,331
| 3.9375
| 4
|
[] |
no_license
|
#!/bin/bash
test -d ~/Documents/notas || mkdir ~/Documents/notas
cd ~/Documents/notas
nombre="$1"\
fecha=$(date +"%y-%m-%d")
# OPCIONAL: programa de selección de archivos (fzf por defecto)
selector="fzf"
selected=""
sel () {
#selector manual en caso de que el selector no esté instalado
if ! type "$selector" > /dev/null; then
n=0
lista=()
for f in *; do
[[ "$f" == *"$2"* ]] || continue
echo $n") "$f
n=$(( $n + 1 ))
lista+=("${f}")
done
read i
selected="${lista[$i]}"
test -f "$selected" || exit 1;
else
# La opción bind es para que fzf funcione con teclas de vim, no necesariamente funciona con otros selectores
selected="$($selector --bind k:up,j:down --preview="cat {}")"
test -f "$selected" || exit 1;
fi
}
case "$1" in
"-l")
for f in *; do
echo $f
done
exit 0;;
"-o")
sel
$EDITOR "$selected"
exit 0;;
"-r")sel; echo "borrar nota $selected? [y/N]"; read i; test "$i" = "y" && rm "$selected"; exit 0;;
"-p")nombre="$2"; fecha="";;
"-h")echo "note [NOMBRE crea una nota con nombre {yy-mm-dd NOMBRE}] [-l lista de todas las notas] [-o seleccionar una nota para abrir] [-r seleccionar una nota para borrar] [-p NOMBRE crear una nueva nota sin la fecha en el nombre]"; exit 0;;
esac
test "$1" = "" && titulo=$fecha || titulo="$nombre"$fecha
$EDITOR "$titulo"
| true
|
ed26d73550265aa2f64b85473ce7f4f12c6ae778
|
Shell
|
no0law1/hangfire_tutorial
|
/HangfireTutorial/misc/run-mongoscripts.sh
|
UTF-8
| 754
| 3.40625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
main() {
server=$1;
port=$2;
database=$3;
username=$4;
password=$5;
workTime=1;
totalTimes=50;
numberOfChecks=1
while [ $numberOfChecks -le $totalTimes ] &&
! timeout "$workTime" bash -c "echo > /dev/tcp/$server/$port";
do
echo "$server:$port is DOWN after $numberOfChecks check";
sleep $workTime;
numberOfChecks=$(($numberOfChecks + 1));
done;
if ! timeout "$workTime" bash -c "echo > /dev/tcp/$server/$port"; then
echo "$server:$port is DOWN after all checks";
exit 1;
else
echo "$server:$port is UP";
fi
mongo --host $server --port $port admin -u admin -p admin --eval "db.getSiblingDB('$database').createUser({user:'$username',pwd:'$password',roles:[{role:'readWrite',db:'$database'}]})"
}
main "$@";
| true
|
29def74e444528a16f40ceab3cc935ac9aff27c6
|
Shell
|
da99/my_nginx
|
/sh/UPDATE-OR-CREATE-VAR/_.sh
|
UTF-8
| 393
| 3.5
| 4
|
[
"MIT"
] |
permissive
|
# === {{CMD}} "DEV|PROD" "name" "val"
# === {{CMD}} "DEV|PROD" "name" # Opens editor to enter file.
UPDATE-OR-CREATE-VAR () {
local ENV_NAME="$(sh_string UPPER "$1")"; shift
local NAME="$1"; shift
local FILE="config/$ENV_NAME/$NAME"
local VAL="$@"
if [[ -f "$FILE" ]]; then
rm "$FILE"
fi
nginx_setup CREATE-VAR "$ENV_NAME" "$NAME" "$VAL"
} # === end function
| true
|
d43c76c982d175252da73b771f72b84a23706318
|
Shell
|
msgpo/artix-vcs
|
/runit-rc-git/PKGBUILD
|
UTF-8
| 972
| 2.65625
| 3
|
[] |
no_license
|
# Maintainer: Chris Cromer <chris@cromer.cl>
pkgname=runit-rc-git
pkgver=r2.d0a73d8
pkgrel=1
pkgdesc='Artix Linux runit initscripts'
arch=('x86_64')
url='https://github.com/artix-linux/runit-rc'
license=('BSD')
backup=('etc/rc/rc.conf')
depends=('procps-ng' 'bash' 'opentmpfiles' 'bootlogd')
provides=('init-kmod' 'init-udev' 'init-opentmpfiles' 'init-opensysusers' 'runit-rc')
conflicts=('init-kmod' 'init-udev' 'init-opentmpfiles' 'init-opensysusers' 'runit-rc')
source=("git+${url}.git")
sha256sums=('SKIP')
optdepends=('lvm2-runit: LVM support for runit'
'cryptsetup-runit: Enable boot support for encrypted partitions')
pkgver() {
cd runit-rc
#git describe --long | sed 's/\([^-]*-g\)/r\1/;s/-/./g'
#git describe --long --tags | sed 's/\([^-]*-g\)/r\1/;s/-/./g'
printf "r%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)"
}
build() {
cd runit-rc
make
}
package() {
cd runit-rc
make PREFIX="/usr" DESTDIR="${pkgdir}" install
}
| true
|
ed9316d7197d1b7b824de40504c5c4ba74bc3608
|
Shell
|
tescorg/nzbwatch
|
/PKGBUILD
|
UTF-8
| 841
| 2.890625
| 3
|
[] |
no_license
|
pkgname=nzbwatch-git
_gitname=nzbwatch
pkgver=1
pkgrel=1
pkgdesc="Watch a folder for new NZB files and automatically upload to SABnzbd"
arch=('any')
url="https://github.com/tescorg/nzbwatch"
license=()
conflicts=('nzbwatch')
provides=('nzbwatch')
depends=('ruby' 'ruby-rb-inotify')
makedepends=('git')
optdepends=('sabnzbd')
install=
source=("git+https://github.com/tescorg/nzbwatch")
md5sums=('SKIP')
package() {
install -Dm644 "${srcdir}/${_gitname}/nzbwatch-sample.yml" "${pkgdir}/etc/nzbwatch-sample.yml"
install -Dm755 "${srcdir}/${_gitname}/nzbwatch.rb" "${pkgdir}/usr/bin/nzbwatch.rb"
install -Dm777 "${srcdir}/${_gitname}/nzbwatch.service" "${pkgdir}/usr/lib/systemd/user/nzbwatch.service"
}
pkgver() {
cd "${_gitname}"
printf "r%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)"
}
| true
|
0f4c1ff269cc2aa103917d08aabf1fdf77c8b9f3
|
Shell
|
opencollective/opencollective-email
|
/scripts/test.sh
|
UTF-8
| 961
| 3.65625
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
PG_DATABASE="opencollective-email-test"
# Only run migrations automatically on staging and production
if [ "$SEQUELIZE_ENV" = "staging" ] || [ "$SEQUELIZE_ENV" = "production" ]; then
echo "- running db:migrate on $SEQUELIZE_ENV environment"
npm run db:migrate
exit $?; # exit with return code of previous command
fi
# On any other environment, first let's check if postgres is installed
if command -v psql > /dev/null; then
echo "✓ Postgres installed"
if psql -lqt | cut -d \| -f 1 | grep -qw $PG_DATABASE; then
echo "✓ $PG_DATABASE database exists"
# dropdb $PG_DATABASE
else
echo "- creating $PG_DATABASE database";
createdb $PG_DATABASE
fi
else
echo "𐄂 psql command doesn't exist. Make sure you have Postgres installed ($> brew install postgres)"
fi
echo ""
echo "Running tests with jest"
jest -w 1 --verbose false --detectOpenHandles --testMatch **/__tests__/**/*.test.js
echo ""
| true
|
cc592cf5f09507f83b5fcd80c383037b058d29ba
|
Shell
|
ju-lab/beta_cell_ATAC_seq
|
/02b.star.sh
|
UTF-8
| 462
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
for i in `find -L . | grep 1.fastq.gz | grep RNA`; do
sample_name=`echo $i | sed 's!.*/\([^/]*\)_1.fastq.gz$!\1!'`
outdir_star=`echo $i | sed 's!./fastq/RNA-seq!./star!;s!\(.*\)/[^/]*!\1!'`
outdir_rsem=`echo $i | sed 's!./fastq/RNA-seq!./rsem!;s!\(.*\)/[^/]*!\1!'`
run_star.sh \
-s $sample_name \
--outdir_star $outdir_star \
--outdir_rsem $outdir_rsem \
--reference mm10 \
$i ${i/1.fastq.gz/2.fastq.gz} \
--process star,rsem
done
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.