Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Add xcms to the local narrative install | set -x
# Install a local version of the Kbase narrative
conda remove -n kbase --all -y
conda create -y -n kbase python=2.7 scipy pytables matplotlib readline
source activate kbase
pip install -e .
pip install pexpect
# based on instructions in narrative/docs/developer.md
rm -rf bootstrap
git clone https://github.com/kbase/bootstrap bootstrap
pip install -r bootstrap/kb_python_runtime/python-pip-list-narrative
rm -rf narrative
git clone https://github.com/kbase/narrative narrative
cd narrative
git submodule init
git submodule update
export VIRTUAL_ENV="kbase"
bash install.sh
# point to the CI services
sed -i '' 's:kbase.us/services/:ci.kbase.us/services/:' src/config.json
| set -x
R --version
sudo Rscript xcms-install.R
# Install a local version of the Kbase narrative
conda remove -n kbase --all -y
conda create -y -n kbase python=2.7 scipy pytables matplotlib readline pandas
source activate kbase
pip install -e .
pip install pexpect rpy2
# based on instructions in narrative/docs/developer.md
rm -rf bootstrap
git clone https://github.com/kbase/bootstrap bootstrap
pip install -r bootstrap/kb_python_runtime/python-pip-list-narrative
rm -rf narrative
git clone https://github.com/kbase/narrative narrative
cd narrative
git submodule init
git submodule update
export VIRTUAL_ENV="kbase"
bash install.sh
# point to the CI services
sed -i '' 's:kbase.us/services/:ci.kbase.us/services/:' src/config.json
|
Add aunty to global npm installs | # Load nave- and npm-related functions.
source $DOTFILES/source/50_node.sh init
# Global npm modules to install.
npm_globals=(
babel-cli
bower
eslint
forever
grunt-cli
grunt-init
hexo-cli
karma-cli
licensor
linken
mocha
node-inspector
tldr
trash
uglify-js
yo
generator-data-analysis
)
# Install latest Node.js, set as default, install global npm modules.
nave_install stable
| # Load nave- and npm-related functions.
source $DOTFILES/source/50_node.sh init
# Global npm modules to install.
npm_globals=(
aunty
babel-cli
bower
eslint
forever
grunt-cli
grunt-init
hexo-cli
karma-cli
licensor
linken
mocha
node-inspector
tldr
trash
uglify-js
yo
generator-data-analysis
)
# Install latest Node.js, set as default, install global npm modules.
nave_install stable
|
Set FORECAST_IO_API_KEY during bats tests | setup() {
ew="bin/emoji-weather -tmpdir test/tmp"
mkdir -p test/tmp
}
teardown() {
rm test/tmp/*.json
echo $output
}
fixture() {
cp test/{fixtures,tmp}/$@
touch test/tmp/$@
}
| setup() {
export FORECAST_IO_API_KEY=abc123
ew="bin/emoji-weather -tmpdir test/tmp"
mkdir -p test/tmp
}
teardown() {
rm test/tmp/*.json
echo $output
}
fixture() {
cp test/{fixtures,tmp}/$@
touch test/tmp/$@
}
|
Fix error in Anaconda usage | #!/bin/bash
# Created by the VLSCI job script generator for SLURM on x86
# Thu Oct 17 2013 16:48:06 GMT+1100 (EST)
# Partition for the job:
#SBATCH -p main
# The name of the job:
#SBATCH --job-name="illum"
# Maximum number of CPU cores used by the job:
#SBATCH --ntasks=1
# The amount of memory in megabytes per process in the job:
#SBATCH --mem-per-cpu=1024
# Send yourself an email when the job:
# aborts abnormally (fails)
#SBATCH --mail-type=FAIL
# Use this email address:
#SBATCH --mail-user=juan.n@unimelb.edu.au
# The maximum running time of the job in days-hours:mins:sec
#SBATCH --time=0-12:0:00
# Run the job from the directory where it was launched (default):
# The job command(s):
~/anaconda/bin/conda activate husc
husc illum -v
| #!/bin/bash
# Created by the VLSCI job script generator for SLURM on x86
# Thu Oct 17 2013 16:48:06 GMT+1100 (EST)
# Partition for the job:
#SBATCH -p main
# The name of the job:
#SBATCH --job-name="illum"
# Maximum number of CPU cores used by the job:
#SBATCH --ntasks=1
# The amount of memory in megabytes per process in the job:
#SBATCH --mem-per-cpu=1024
# Send yourself an email when the job:
# aborts abnormally (fails)
#SBATCH --mail-type=FAIL
# Use this email address:
#SBATCH --mail-user=juan.n@unimelb.edu.au
# The maximum running time of the job in days-hours:mins:sec
#SBATCH --time=0-12:0:00
# Run the job from the directory where it was launched (default):
# The job command(s):
source activate husc
husc illum -v
|
Fix jar path in shell install script | #!/bin/bash
set -e
INSTALL_BASE="/opt/dims"
JAR_PATH="$INSTALL_BASE/jars/tupelo"
# If there was a path specified, use that instead of the default
if [ ! "$1" == "" ]; then
INSTALL_BASE="$1"
fi
echo "Installing to $INSTALL_BASE"
if [ -d "JAR_PATH" ]; then
echo "Existing jar path found, removing"
rm -rf "$JAR_PATH"
fi
# Copy all the jars and properties into the lib folder
mkdir -p "$JAR_PATH"
cp target/*.jar "$JAR_PATH"
# Copy any properties, ignoring errors
cp target/*.properties "$JAR_PATH" 2> /dev/null || :
cp *.properties "$JAR_PATH" 2> /dev/null || :
# Copy the elvis script into the bin folder
mkdir -p "$INSTALL_BASE/bin"
cp elvis "$INSTALL_BASE/bin"
# Set permissions
chmod -R a+r "$INSTALL_BASE"
chmod -R a-w "$INSTALL_BASE"
# Make sure the directories have +x
chmod -R a+x "$INSTALL_BASE/bin"
chmod a+x "$JAR_PATH/../"
chmod a+x "$JAR_PATH"
echo "Installed to $INSTALL_BASE"
echo "Add '$INSTALL_BASE/bin' to your path"
| #!/bin/bash
set -e
INSTALL_BASE="/opt/dims"
# If there was a path specified, use that instead of the default
if [ ! "$1" == "" ]; then
INSTALL_BASE="$1"
fi
JAR_PATH="$INSTALL_BASE/jars/tupelo"
echo "Installing to $INSTALL_BASE"
if [ -d "JAR_PATH" ]; then
echo "Existing jar path found, removing"
rm -rf "$JAR_PATH"
fi
# Copy all the jars and properties into the lib folder
mkdir -p "$JAR_PATH"
cp target/*.jar "$JAR_PATH"
# Copy any properties, ignoring errors
cp target/*.properties "$JAR_PATH" 2> /dev/null || :
cp *.properties "$JAR_PATH" 2> /dev/null || :
# Copy the elvis script into the bin folder
mkdir -p "$INSTALL_BASE/bin"
cp elvis "$INSTALL_BASE/bin"
# Set permissions
chmod -R a+r "$INSTALL_BASE"
chmod -R a-w "$INSTALL_BASE"
# Make sure the directories have +x
chmod -R a+x "$INSTALL_BASE/bin"
chmod a+x "$JAR_PATH/../"
chmod a+x "$JAR_PATH"
echo "Installed to $INSTALL_BASE"
echo "Add '$INSTALL_BASE/bin' to your path"
|
Fix usage of ssh-wrapper.sh script | #!/bin/bash
set -e
function error {
echo error: "$@" >&2
exit 1
}
function do_cmd {
echo "[+] $@"
eval "$@"
}
function git_update_submodule_with_ssh {
local name="$1"
local file="$2"
cat > ssh-wrapper.sh <<EOF
#!/bin/sh
ssh -i "$file" "$@"
EOF
chmod 0755 ssh-wrapper.sh
do_cmd GIT_SSH=ssh-wrapper.sh git submodule update --init "$name"
}
[ -f "$GO_SSH_KEY" ] || error "file in GO_SSH_KEY not found: $GO_SSH_KEY"
[ -f "$MINIOS_SSH_KEY" ] || error "file in MINIOS_SSH_KEY not found: $MINIOS_SSH_KEY"
do_cmd git_update_submodule_with_ssh go "$GO_SSH_KEY"
do_cmd git_update_submodule_with_ssh minios "$MINIOS_SSH_KEY"
| #!/bin/bash
set -e
function error {
echo error: "$@" >&2
exit 1
}
function do_cmd {
echo "[+] $@"
eval "$@"
}
function git_update_submodule_with_ssh {
local name="$1"
local file="$2"
cat > ssh-wrapper.sh <<EOF
#!/bin/sh
ssh -i "$file" "$@"
EOF
chmod 0755 ssh-wrapper.sh
do_cmd GIT_SSH=./ssh-wrapper.sh git submodule update --init "$name"
rm ssh-wrapper.sh
}
[ -f "$GO_SSH_KEY" ] || error "file in GO_SSH_KEY not found: $GO_SSH_KEY"
[ -f "$MINIOS_SSH_KEY" ] || error "file in MINIOS_SSH_KEY not found: $MINIOS_SSH_KEY"
do_cmd git_update_submodule_with_ssh go "$GO_SSH_KEY"
do_cmd git_update_submodule_with_ssh minios "$MINIOS_SSH_KEY"
|
Add log file to rasbian build process | #!/usr/bin/env bash
# Make sure only root can run our script
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" 1>&2
exit 1
fi
/bin/bash clean.sh
/bin/bash update.sh
/bin/bash build.sh
/bin/bash buildroot.sh
rm -rf raspbian-ua-netinst-*.bz2 &>/dev/null
rm -rf raspbian-ua-netinst-*.xz &>/dev/null
for file in raspbian-ua-netinst-*.*
do
mv -v "$file" "${file//raspbian/openhabian}"
done
| #!/usr/bin/env bash
# Make sure only root can run our script
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" 1>&2
exit 1
fi
# log everything to a file
exec &> >(tee -a "openhabian-build-$(date +%Y-%m-%d_%H%M%S).log")
/bin/bash clean.sh
/bin/bash update.sh
/bin/bash build.sh
/bin/bash buildroot.sh
rm -rf raspbian-ua-netinst-*.bz2 &>/dev/null
rm -rf raspbian-ua-netinst-*.xz &>/dev/null
for file in raspbian-ua-netinst-*.*
do
mv -v "$file" "${file//raspbian/openhabian}"
done
|
Fix post gate hook to use correct path | #!/bin/bash -x
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script is executed inside post_test_hook function in devstack gate.
VENV=${1:-"dsvm-functional"}
export GATE_DEST=$BASE/new
export DEVSTACK_DIR=$GATE_DEST/devstack
export TACKER_DIR="$GATE_DEST/tacker"
case $VENV in
dsvm-functional)
owner=stack
;;
esac
sudo chown -R $owner:stack $TACKER_DIR
# Run functional tests
echo "Running Tacker $VENV test suite"
source $DEVSTACK_DIR/openrc admin admin
sudo -E -H -u $owner tox -e functional -- --concurrency=1
EXIT_CODE=$?
exit $EXIT_CODE
| #!/bin/bash -x
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script is executed inside post_test_hook function in devstack gate.
VENV=${1:-"dsvm-functional"}
export GATE_DEST=$BASE/new
export DEVSTACK_DIR=$GATE_DEST/devstack
export TACKER_DIR="$GATE_DEST/tacker"
case $VENV in
dsvm-functional)
owner=stack
;;
esac
sudo chown -R $owner:stack $TACKER_DIR
cd $TACKER_DIR
# Run functional tests
echo "Running Tacker $VENV test suite"
source $DEVSTACK_DIR/openrc admin admin
sudo -E -H -u $owner tox -e functional -- --concurrency=1
EXIT_CODE=$?
exit $EXIT_CODE
|
Install nessesary utils and nodejs | # Install htop
sudo apt-get install htop -y | # Install useful utils
sudo apt-get install htop curl git build-essential -y
# Install Node.js
curl -sL https://deb.nodesource.com/setup | sudo bash -
sudo apt-get install nodejs build-essential -y |
Remove pre-flight check for spelling of conference name | #!/bin/bash
ERRORS=()
###
# Check that site can be built.
###
echo " *** Checking that site can be built."
make build
if [[ $? -eq 0 ]]; then
echo " *** Site built ok."
else
echo " *** Site could not be built."
exit 1
fi
###
# Check that no links are broken.
###
# TODO reinstate this
#echo " *** Checking that no links are broken."
#linkchecker --no-status --no-warnings --check-extern "http://localhost:8000"
#if [[ $? -ne 0 ]]; then
# ERRORS+=("Broken links found on site")
#fi
###
# Check that name of conference is spelt correctly.
###
echo " *** Checking that conference name is spelt correctly"
grep -e "Pycon UK" -e "pycon UK" -e "pyconUK" -e "PyConUK" --line-number --recursive --include "*.html" output | grep -v https://twitter.com/PyConUK | grep -v PyConUK2016/
if [[ $? -eq 0 ]]; then
ERRORS+=("Conference name is not spelt correctly")
fi
if [[ ${#ERRORS[@]} -eq 0 ]]; then
echo " *** All pre-flight checks passed!"
exit 0
else
echo " *** The following pre-flight check(s) failed:"
for error in "${ERRORS[@]}"; do
echo " - $error"
done
exit 1
fi
| #!/bin/bash
ERRORS=()
###
# Check that site can be built.
###
echo " *** Checking that site can be built."
make build
if [[ $? -eq 0 ]]; then
echo " *** Site built ok."
else
echo " *** Site could not be built."
exit 1
fi
###
# Check that no links are broken.
###
# TODO reinstate this
#echo " *** Checking that no links are broken."
#linkchecker --no-status --no-warnings --check-extern "http://localhost:8000"
#if [[ $? -ne 0 ]]; then
# ERRORS+=("Broken links found on site")
#fi
###
# Check that name of conference is spelt correctly.
###
# echo " *** Checking that conference name is spelt correctly"
# grep -e "Pycon UK" -e "pycon UK" -e "pyconUK" -e "PyConUK" --line-number --recursive --include "*.html" output | grep -v https://twitter.com/PyConUK | grep -v PyConUK2016/
# if [[ $? -eq 0 ]]; then
# ERRORS+=("Conference name is not spelt correctly")
# fi
if [[ ${#ERRORS[@]} -eq 0 ]]; then
echo " *** All pre-flight checks passed!"
exit 0
else
echo " *** The following pre-flight check(s) failed:"
for error in "${ERRORS[@]}"; do
echo " - $error"
done
exit 1
fi
|
Add cache-control to S3 objects | #!/bin/bash
TOOL_BUCKET=eviction-lab-tool-staging
MAP_BUCKET=eviction-lab-map-staging
RANKINGS_BUCKET=eviction-lab-rankings-staging
CLOUDFRONT_ID=$CLOUDFRONT_ID_DEV
if [ "$TRAVIS_BRANCH" = "master" ]; then
TOOL_BUCKET=eviction-lab-tool
MAP_BUCKET=eviction-lab-map
RANKINGS_BUCKET=eviction-lab-rankings
CLOUDFRONT_ID=$CLOUDFRONT_ID_PROD
fi
aws s3 cp dist/ s3://$TOOL_BUCKET/tool --acl=public-read --recursive
aws s3 cp dist/index.html s3://$MAP_BUCKET/map/index.html --acl=public-read
node ./build/update-metadata.js
aws s3 cp dist/index.html s3://$RANKINGS_BUCKET/rankings/index.html --acl=public-read
aws cloudfront create-invalidation --distribution-id=$CLOUDFRONT_ID --paths="/*"
| #!/bin/bash
TOOL_BUCKET=eviction-lab-tool-staging
MAP_BUCKET=eviction-lab-map-staging
RANKINGS_BUCKET=eviction-lab-rankings-staging
CLOUDFRONT_ID=$CLOUDFRONT_ID_DEV
if [ "$TRAVIS_BRANCH" = "master" ]; then
TOOL_BUCKET=eviction-lab-tool
MAP_BUCKET=eviction-lab-map
RANKINGS_BUCKET=eviction-lab-rankings
CLOUDFRONT_ID=$CLOUDFRONT_ID_PROD
fi
aws s3 cp dist/ s3://$TOOL_BUCKET/tool --acl=public-read --recursive --cache-control max-age=604800
aws s3 cp dist/index.html s3://$MAP_BUCKET/map/index.html --acl=public-read --cache-control max-age=3600
node ./build/update-metadata.js
aws s3 cp dist/index.html s3://$RANKINGS_BUCKET/rankings/index.html --acl=public-read --cache-control max-age=3600
aws cloudfront create-invalidation --distribution-id=$CLOUDFRONT_ID --paths="/*"
|
Remove verbose logging of directory creation | #!/bin/bash
echo "Removing application and dependencies"
if [ -d "/home/notify-app/notifications-api" ]; then
# Remove and re-create the directory
rm -rf /home/notify-app/notifications-api
mkdir -vp /home/notify-app/notifications-api
fi
| #!/bin/bash
echo "Removing application and dependencies"
if [ -d "/home/notify-app/notifications-api" ]; then
# Remove and re-create the directory
rm -rf /home/notify-app/notifications-api
mkdir -p /home/notify-app/notifications-api
fi
|
Stop generating useless diffs on unchanged files |
#!/bin/sh
for a in *.ui; do pyuic4 $a -o Ui_`basename $a .ui`.py -x; done
pyrcc4 icons.qrc -o icons_rc.py
|
#!/bin/sh
for a in *.ui
do
sed -i 's/^# Created.*$//' $a
pyuic4 $a -o Ui_`basename $a .ui`.py -x
done
pyrcc4 icons.qrc -o icons_rc.py
|
Disable bracketed-paste-magic in zsh 5.1.1, where it is buggy | ## Load smart urls if available
for d in $fpath; do
if [[ -e "$d/url-quote-magic" ]]; then
if [[ -e "$d/bracketed-paste-magic" ]]; then
autoload -Uz bracketed-paste-magic
zle -N bracketed-paste bracketed-paste-magic
fi
autoload -U url-quote-magic
zle -N self-insert url-quote-magic
fi
done
## jobs
setopt long_list_jobs
## pager
export PAGER="less"
export LESS="-R"
## super user alias
alias _='sudo'
alias please='sudo'
## more intelligent acking for ubuntu users
if which ack-grep &> /dev/null;
then
alias afind='ack-grep -il'
else
alias afind='ack -il'
fi
# only define LC_CTYPE if undefined
if [[ -z "$LC_CTYPE" && -z "$LC_ALL" ]]; then
export LC_CTYPE=${LANG%%:*} # pick the first entry from LANG
fi
# recognize comments
setopt interactivecomments
| ## Load smart urls if available
# bracketed-paste-magic is known buggy in zsh 5.1.1 (only), so skip it there; see #4434
autoload -Uz is-at-least
if [[ $ZSH_VERSION != 5.1.1 ]]; then
for d in $fpath; do
if [[ -e "$d/url-quote-magic" ]]; then
if is-at-least 5.1; then
autoload -Uz bracketed-paste-magic
zle -N bracketed-paste bracketed-paste-magic
fi
autoload -Uz url-quote-magic
zle -N self-insert url-quote-magic
break
fi
done
fi
## jobs
setopt long_list_jobs
## pager
export PAGER="less"
export LESS="-R"
## super user alias
alias _='sudo'
alias please='sudo'
## more intelligent acking for ubuntu users
if which ack-grep &> /dev/null; then
alias afind='ack-grep -il'
else
alias afind='ack -il'
fi
# only define LC_CTYPE if undefined
if [[ -z "$LC_CTYPE" && -z "$LC_ALL" ]]; then
export LC_CTYPE=${LANG%%:*} # pick the first entry from LANG
fi
# recognize comments
setopt interactivecomments
|
Add smtp bridge to sandstorm run script | #!/bin/sh
set -ex
export HOME=/var
export LANG=en_US.UTF-8
cd /var
if ! test -e mail; then
mkdir -p mail/cur
mkdir -p mail/new
mkdir -p mail/tmp
chmod -R 770 mail
mkdir -p /var/.local/share/Mailpile
cp -r /default /var/.local/share/Mailpile
cp -r /.gnupg /var
# /usr/bin/python /mp --setup
# /usr/bin/python /mp --set sys.http_host=0.0.0.0
# /usr/bin/python /mp --add /var/mail/
# /usr/bin/python /mp --set prefs.rescan_interval=5
# /usr/bin/python /mp --set prefs.empty_outbox_interval=5
fi
/usr/bin/python /mp --rescan all
/usr/bin/python /mp --www= --wait
| #!/bin/sh
set -ex
export HOME=/var
export LANG=en_US.UTF-8
cd /var
if ! test -e mail; then
mkdir -p mail/cur
mkdir -p mail/new
mkdir -p mail/tmp
chmod -R 770 mail
mkdir -p /var/.local/share/Mailpile
cp -r /default /var/.local/share/Mailpile
cp -r /.gnupg /var
fi
bash -c 'while [ ! -e /tmp/sandstorm-api ]
do
sleep 1
done
/sandstorm-smtp-bridge/bin/sandstorm-smtp-bridge' &
/usr/bin/python /mp --rescan all
/usr/bin/python /mp --www= --wait
|
Add pod install step in run_test.sh | xcodebuild \
-workspace Example/CascadingTableDelegate.xcworkspace \
-scheme CascadingTableDelegate-Example \
-destination 'platform=iOS Simulator,name=iPhone 5s' \
clean test | xcpretty
| pod install --project-directory=Example
xcodebuild \
-workspace Example/CascadingTableDelegate.xcworkspace \
-scheme CascadingTableDelegate-Example \
-destination 'platform=iOS Simulator,name=iPhone 5s' \
clean test | xcpretty
|
Call i18n test data with pattern, not fixed loop. | #!/usr/bin/env bash
shopt -s expand_aliases
testBadPyNewlines() {
start=1
end=6
for ((i=$start; i<=$end; i++))
do
file=${TWLIGHT_HOME}/tests/shunit/data/bad_i18n_newline_$i.py
assertFalse "${file} should cause an error." "perl ${TWLIGHT_HOME}/bin/twlight_i18n_lint.pl ${file}"
done
}
testGoodPy() {
start=1
end=2
for ((i=$start; i<=$end; i++))
do
file=${TWLIGHT_HOME}/tests/shunit/data/good_i18n_$i.py
assertTrue "${file} should not cause an error." "perl ${TWLIGHT_HOME}/bin/twlight_i18n_lint.pl ${file}" ||:
done
}
testBadPyComments() {
start=1
end=2
for ((i=$start; i<=$end; i++))
do
file=${TWLIGHT_HOME}/tests/shunit/data/bad_i18n_comment_$i.py
assertFalse "${file} should cause an error." "perl ${TWLIGHT_HOME}/bin/twlight_i18n_lint.pl ${file}"
done
}
. ${TWLIGHT_HOME}/tests/shunit/shunit2
| #!/usr/bin/env bash
shopt -s expand_aliases
testBadPyNewlines() {
prefix=${TWLIGHT_HOME}/tests/shunit/data/bad_i18n_newline_
for i in ${prefix}*.py
do
assertFalse "${file} should cause an error." "perl ${TWLIGHT_HOME}/bin/twlight_i18n_lint.pl ${i}"
done
}
testGoodPy() {
prefix=${TWLIGHT_HOME}/tests/shunit/data/good_i18n_
for i in ${prefix}*.py
do
assertTrue "${file} should not cause an error." "perl ${TWLIGHT_HOME}/bin/twlight_i18n_lint.pl ${i}" ||:
done
}
testBadPyComments() {
prefix=${TWLIGHT_HOME}/tests/shunit/data/bad_i18n_comment_
for i in ${prefix}*.py
do
assertFalse "${file} should cause an error." "perl ${TWLIGHT_HOME}/bin/twlight_i18n_lint.pl ${i}"
done
}
. ${TWLIGHT_HOME}/tests/shunit/shunit2
|
Fix the git-current-branch call, again | #!/usr/bin/env sh
git pull origin $(./node_modules/.bin/git-current-branch) --rebase
| #!/usr/bin/env sh
git pull origin $(/usr/local/lib/node_modules/ggpullr/node_modules/.bin/git-current-branch) --rebase
|
Add --no-cache flag to apk. | #!/bin/ash
set -euo pipefail
APK="apk -q"
BUILDDEPS="gcc python3-dev musl-dev parallel yaml-dev g++"
TESTDEPS="bitstring pytest wheel virtualenv pip"
PIP3="pip3 -q --no-cache-dir install --upgrade"
FROOT="/faucet-src"
dir=$(dirname "$0")
${APK} add -U git ${BUILDDEPS}
"${dir}/retrycmd.sh" "${PIP3} ${TESTDEPS}"
"${dir}/retrycmd.sh" "${PIP3} -r ${FROOT}/requirements.txt"
${PIP3} ${FROOT}
if [ "$(uname -m)" = "x86_64" ]; then
(
echo "Running unit tests"
cd "${FROOT}"
python3 -m unittest discover "tests/unit/faucet/"
python3 -m unittest discover "tests/unit/gauge/"
)
else
echo "Skipping tests on $(uname -m) platform"
fi
pip3 uninstall -y ${TESTDEPS} || exit 1
for i in ${BUILDDEPS} ; do
${APK} del "$i" || exit 1
done
# Clean up
rm -r "${FROOT}"
rm -r /usr/local/lib/python3*/site-packages/os_ken/tests/
# Smoke test
faucet -V || exit 1
find / -name \*pyc -delete || exit 1
| #!/bin/ash
set -euo pipefail
APK="apk -q --no-cache"
BUILDDEPS="gcc python3-dev musl-dev parallel yaml-dev g++"
TESTDEPS="bitstring pytest wheel virtualenv pip"
PIP3="pip3 -q --no-cache-dir install --upgrade"
FROOT="/faucet-src"
dir=$(dirname "$0")
${APK} add -U git ${BUILDDEPS}
"${dir}/retrycmd.sh" "${PIP3} ${TESTDEPS}"
"${dir}/retrycmd.sh" "${PIP3} -r ${FROOT}/requirements.txt"
${PIP3} ${FROOT}
if [ "$(uname -m)" = "x86_64" ]; then
(
echo "Running unit tests"
cd "${FROOT}"
python3 -m unittest discover "tests/unit/faucet/"
python3 -m unittest discover "tests/unit/gauge/"
)
else
echo "Skipping tests on $(uname -m) platform"
fi
pip3 uninstall -y ${TESTDEPS} || exit 1
for i in ${BUILDDEPS} ; do
${APK} del "$i" || exit 1
done
# Clean up
rm -r "${FROOT}"
rm -r /usr/local/lib/python3*/site-packages/os_ken/tests/
# Smoke test
faucet -V || exit 1
find / -name \*pyc -delete || exit 1
|
Add step to set MySQL default root password | #!/bin/sh
echo "Setting up your Mac..."
# Check for Homebrew and install if we don't have it
if test ! $(which brew); then
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
# Update Homebrew recipes
brew update
# Install all our dependencies with bundle (See Brewfile)
brew tap homebrew/bundle
brew bundle
# Install PHP extensions with PECL
pecl install memcached imagick
# Install global Composer packages
/usr/local/bin/composer global require laravel/installer laravel/spark-installer laravel/valet
# Install Laravel Valet
$HOME/.composer/vendor/bin/valet install
# Create a Sites directory
# This is a default directory for macOS user accounts but doesn't comes pre-installed
mkdir $HOME/Sites
# Removes .zshrc from $HOME (if it exists) and symlinks the .zshrc file from the .dotfiles
rm -rf $HOME/.zshrc
ln -s $HOME/.dotfiles/.zshrc $HOME/.zshrc
# Symlink the Mackup config file to the home directory
ln -s $HOME/.dotfiles/.mackup.cfg $HOME/.mackup.cfg
# Set macOS preferences
# We will run this last because this will reload the shell
source .macos
| #!/bin/sh
echo "Setting up your Mac..."
# Check for Homebrew and install if we don't have it
if test ! $(which brew); then
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
# Update Homebrew recipes
brew update
# Install all our dependencies with bundle (See Brewfile)
brew tap homebrew/bundle
brew bundle
# Set default MySQL root password and auth type.
mysql -u root -e "ALTER USER root@localhost IDENTIFIED WITH mysql_native_password BY 'password'; FLUSH PRIVILEGES;"
# Install PHP extensions with PECL
pecl install memcached imagick
# Install global Composer packages
/usr/local/bin/composer global require laravel/installer laravel/spark-installer laravel/valet
# Install Laravel Valet
$HOME/.composer/vendor/bin/valet install
# Create a Sites directory
# This is a default directory for macOS user accounts but doesn't comes pre-installed
mkdir $HOME/Sites
# Removes .zshrc from $HOME (if it exists) and symlinks the .zshrc file from the .dotfiles
rm -rf $HOME/.zshrc
ln -s $HOME/.dotfiles/.zshrc $HOME/.zshrc
# Symlink the Mackup config file to the home directory
ln -s $HOME/.dotfiles/.mackup.cfg $HOME/.mackup.cfg
# Set macOS preferences
# We will run this last because this will reload the shell
source .macos
|
Comment out data extract for now | #!/bin/bash -v
# Add R build steps here (if any)
pushd extract
./extract.sh
popd
#Rscript Build.R
# Fetch AWS ECR variables
#version=$1
#chmod 700 fetch.sh
#./fetch.sh $version > variables.json
# Build docker image with packer
#packer="/usr/local/bin/packer"
#$packer build -var-file=variables.json worker.json
| #!/bin/bash -v
# Add R build steps here (if any)
#pushd extract
#./extract.sh
#popd
#Rscript Build.R
# Fetch AWS ECR variables
version=$1
chmod 700 fetch.sh
./fetch.sh $version > variables.json
# Build docker image with packer
packer="/usr/local/bin/packer"
$packer build -var-file=variables.json worker.json
|
Fix mkdir to use -p | #!/bin/bash
set -e
echo 'pBuild 1.0'
echo 'Installing Pebble SDK and its Dependencies...'
cd ~
# Get the Pebble SDK and toolchain
PEBBLE_SDK_VER=${PEBBLE_SDK#PebbleSDK-}
if [ ! -d $HOME/pebble-dev/${PEBBLE_SDK} ]; then
wget https://sdk.getpebble.com/download/${PEBBLE_SDK_VER} -O PebbleSDK.tar.gz
wget http://assets.getpebble.com.s3-website-us-east-1.amazonaws.com/sdk/arm-cs-tools-ubuntu-universal.tar.gz
# Build the Pebble directory
mkdir ~/pebble-dev
cd ~/pebble-dev
# Extract the SDK
tar -zxf ~/PebbleSDK.tar.gz
# Extract the toolchain
cd ~/pebble-dev/${PEBBLE_SDK}
tar -zxf ~/arm-cs-tools-ubuntu-universal.tar.gz
fi
# Install the Python library dependencies locally
virtualenv --no-site-packages .env
source .env/bin/activate
pip install -r requirements.txt
deactivate
| #!/bin/bash
set -e
echo 'pBuild 1.0'
echo 'Installing Pebble SDK and its Dependencies...'
cd ~
# Get the Pebble SDK and toolchain
PEBBLE_SDK_VER=${PEBBLE_SDK#PebbleSDK-}
if [ ! -d $HOME/pebble-dev/${PEBBLE_SDK} ]; then
wget https://sdk.getpebble.com/download/${PEBBLE_SDK_VER} -O PebbleSDK.tar.gz
wget http://assets.getpebble.com.s3-website-us-east-1.amazonaws.com/sdk/arm-cs-tools-ubuntu-universal.tar.gz
# Build the Pebble directory
mkdir -p ~/pebble-dev
cd ~/pebble-dev
# Extract the SDK
tar -zxf ~/PebbleSDK.tar.gz
# Extract the toolchain
cd ~/pebble-dev/${PEBBLE_SDK}
tar -zxf ~/arm-cs-tools-ubuntu-universal.tar.gz
fi
# Install the Python library dependencies locally
virtualenv --no-site-packages .env
source .env/bin/activate
pip install -r requirements.txt
deactivate
|
Use local DIR symbol to avoid interference with other scripts | #! /bin/cat
DIR=~/jab/environ.d
[[ -n $WELCOME_BYE ]] && echo Welcome to $DIR
. $DIR/jab.sh
. $DIR/environ.sh
. $DIR/python.sh
. $DIR/company.sh 2>/dev/null
. $DIR/ssh_completion.sh
[[ -n $WELCOME_BYE ]] && echo Bye from $DIR
| #! /bin/cat
ENVIRON_DIR=~/jab/environ.d
[[ -n $WELCOME_BYE ]] && echo Welcome to $ENVIRON_DIR
. $ENVIRON_DIR/jab.sh
. $ENVIRON_DIR/environ.sh
. $ENVIRON_DIR/python.sh
. $ENVIRON_DIR/company.sh 2>/dev/null
. $ENVIRON_DIR/ssh_completion.sh
[[ -n $WELCOME_BYE ]] && echo Bye from $ENVIRON_DIR
|
Allow the delete key to delete a character. | export LSCOLORS="exfxcxdxbxegedabagacad"
export CLICOLOR=true
fpath=($ZSH/functions $fpath)
autoload -U $ZSH/functions/*(:t)
HISTFILE=~/.zsh_history
HISTSIZE=10000
SAVEHIST=10000
setopt NO_BG_NICE # don't nice background tasks
setopt NO_HUP
setopt NO_LIST_BEEP
setopt LOCAL_OPTIONS # allow functions to have local options
setopt LOCAL_TRAPS # allow functions to have local traps
setopt HIST_VERIFY
setopt INC_APPEND_HISTORY_TIME
setopt EXTENDED_HISTORY # add timestamps to history
setopt PROMPT_SUBST
setopt CORRECT
setopt COMPLETE_IN_WORD
setopt IGNORE_EOF
setopt HIST_IGNORE_ALL_DUPS # don't record dupes in history
setopt HIST_REDUCE_BLANKS
# don't expand aliases _before_ completion has finished
# like: git comm-[tab]
setopt COMPLETE_ALIASES
bindkey '^[[H' beginning-of-line
bindkey '^[[F' end-of-line
| export LSCOLORS="exfxcxdxbxegedabagacad"
export CLICOLOR=true
fpath=($ZSH/functions $fpath)
autoload -U $ZSH/functions/*(:t)
HISTFILE=~/.zsh_history
HISTSIZE=10000
SAVEHIST=10000
setopt NO_BG_NICE # don't nice background tasks
setopt NO_HUP
setopt NO_LIST_BEEP
setopt LOCAL_OPTIONS # allow functions to have local options
setopt LOCAL_TRAPS # allow functions to have local traps
setopt HIST_VERIFY
setopt INC_APPEND_HISTORY_TIME
setopt EXTENDED_HISTORY # add timestamps to history
setopt PROMPT_SUBST
setopt CORRECT
setopt COMPLETE_IN_WORD
setopt IGNORE_EOF
setopt HIST_IGNORE_ALL_DUPS # don't record dupes in history
setopt HIST_REDUCE_BLANKS
# don't expand aliases _before_ completion has finished
# like: git comm-[tab]
setopt COMPLETE_ALIASES
bindkey '^[[H' beginning-of-line
bindkey '^[[F' end-of-line
bindkey '\e[3~' delete-char
|
Add on exit function to bash template script | #!/bin/bash
set -euo pipefail
IFS=$'\n\t'
SCRIPTNAME="`readlink -e "$0"`"
SCRIPTDIR="`dirname "$SCRIPTNAME"`"
#set -x
| #!/bin/bash
set -euo pipefail
IFS=$'\n\t'
SCRIPTNAME="`readlink -e "$0"`"
SCRIPTDIR="`dirname "$SCRIPTNAME"`"
STARTDIR="`dirname .`"
#set -x
on_exit_f=()
# function foo {}
# on_exit_f+=('foo')
# function bar {}
# on_exit_f+=('bar')
function on_exit {
for i in "${!on_exit_f[@]}"; do
${on_exit_f[$i]}
done
}
trap on_exit EXIT INT TERM
|
Update build env for care | #!/usr/bin/env bash
# Clone sources
mkdir src
cd src
for pro in atelier lino xl cosi noi book voga welfare avanti extjs6 presto vilma
do
git clone git@github.com:lino-framework/$pro.git
done
for comm in https://github.com/lsaffre/commondata \
https://github.com/lsaffre/commondata-be \
https://github.com/lsaffre/commondata-ee \
https://github.com/lsaffre/commondata-eg \
git@github.com:cylonoven/django-mailbox.git
do
git clone $comm
done
cd ..
virtualenv -p python2 2.7
virtualenv -p python3 3.5
cd src
for PY in 3.5 2.7
do . ../$PY/bin/activate
pip -U pip
pip -U setuptools
for i in atelier lino xl noi django-mailbox extjs6 commondata commondata-be commondata-ee commondata-eg cosi voga welfare vilma avanti book presto
do
pip install -e $i
done
pip install mock ipython radicale
done
sudo apt-get install libreoffice python3-uno python-pygraphviz
sudo apt-get install build-essential libssl-dev libffi-dev python-dev
| #!/usr/bin/env bash
# Clone sources
mkdir src
cd src
for pro in atelier lino xl cosi noi book voga welfare care avanti extjs6 presto vilma
do
git clone git@github.com:lino-framework/$pro.git
done
for comm in https://github.com/lsaffre/commondata \
https://github.com/lsaffre/commondata-be \
https://github.com/lsaffre/commondata-ee \
https://github.com/lsaffre/commondata-eg \
git@github.com:cylonoven/django-mailbox.git
do
git clone $comm
done
cd ..
virtualenv -p python2 2.7
virtualenv -p python3 3.5
cd src
for PY in 3.5 2.7
do . ../$PY/bin/activate
pip -U pip
pip -U setuptools
for i in atelier lino xl noi django-mailbox extjs6 commondata commondata-be commondata-ee commondata-eg cosi voga welfare vilma avanti book presto care
do
pip install -e $i
done
pip install mock ipython radicale
done
sudo apt-get install libreoffice python3-uno python-pygraphviz
sudo apt-get install build-essential libssl-dev libffi-dev python-dev
|
Remove erroneous line in sync script | #!/usr/bin/env bash
if [ $# -lt 2 ]
then
echo "Arguments needed: <user>@<server> webroot"
echo "e.g. l33th4x0r@login.servershop.com www/"
exit 1
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $(dirname $DIR)
SERVERROOT=$(ssh $1 "pwd")
WEBROOT=$SERVERROOT/"$2"
$DIR/../. $1:$WEBROOT/
rsync -alz --delete --stats --progress --exclude=".env" --exclude=".git" --exclude 'public/content' --exclude 'public/subsites' --exclude 'bower_components' --exclude 'node_modules' $DIR/../. $1:$WEBROOT/
cd - > /dev/null 2>&1
echo "SYNCED TO SERVER!"
| #!/usr/bin/env bash
if [ $# -lt 2 ]
then
echo "Arguments needed: <user>@<server> webroot"
echo "e.g. l33th4x0r@login.servershop.com www/"
exit 1
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $(dirname $DIR)
SERVERROOT=$(ssh $1 "pwd")
WEBROOT=$SERVERROOT/"$2"
rsync -alz --delete --stats --progress --exclude=".env" --exclude=".git" --exclude 'public/content' --exclude 'public/subsites' --exclude 'bower_components' --exclude 'node_modules' $DIR/../. $1:$WEBROOT/
cd - > /dev/null 2>&1
echo "SYNCED TO SERVER!"
|
Use copy instead of sync | #!/bin/sh
set -e
if ! keytool -list -keystore "/etc/pki/java/cacerts" -storepass "changeit" -alias "dc0-stl.schafer.lan.64.cer" ; then
mkdir ~/s3files
cd ~/s3files
aws s3 sync . s3://fite-jenkins-config/dc0-stl.schafer.lan.64.cer
keytool -import -trustcacerts -alias "dc0-stl.schafer.lan.64.cer" -file "~/s3files/dc0-stl.schafer.lan.64.cer" -keystore "/etc/pki/java/cacerts" -storepass "changeit" -noprompt
fi
chown -R jenkins /var/jenkins_home
su-exec jenkins /usr/local/bin/run-jenkins.sh
| #!/bin/sh
set -e
KEYSTORE="/etc/ssl/certs/java/cacerts"
LDAPCERT="dc0-stl.schafer.lan.64.cer"
STOREPASSWORD="changit"
if ! keytool -list -keystore $KEYSTORE -storepass $STOREPASSWORD -alias $LDAPCERT ; then
mkdir ~/s3files
cd ~/s3files
aws s3 cp s3://fite-jenkins-config/$LDAPCERT .
keytool -import -trustcacerts -alias $LDAPCERT -file ~/s3files/$LDAPCERT -keystore $KEYSTORE -storepass $STOREPASSWORD -noprompt
fi
chown -R jenkins /var/jenkins_home
su-exec jenkins /usr/local/bin/run-jenkins.sh
|
Fix fuzzy cd into directory | # Create a new directory and enter it.
mkcd() {
mkdir -p "./$1" && cd "./$1"
}
# Fuzzy cd into selected directory.
fcd() {
DIR=`find ${1:-*} -path '*/\.*' -prune -o -type d -print 2> /dev/null | fzf-tmux` \
&& cd "$DIR"
}
| # Create a new directory and enter it.
mkcd() {
mkdir -p "./$1" && cd "./$1"
}
# Fuzzy cd into selected directory.
fd() {
local dir
dir=$(find ${1:-.} -path '*/\.*' -prune \
-o -type d -print 2> /dev/null | fzf --height=50% --border) &&
cd "$dir"
}
|
Use Travis LXD image without all the extra dependencies | #
# Travis LXD container configuration
#
# URL of LXD image with pre-provisioned Media Cloud dependencies
# (use ./.travis-lxd/setup_travis_lxd_image.sh) to create a new one)
MC_LXD_IMAGE_PROVISIONED_URL=https://s3.amazonaws.com/mediacloud-travis-lxd-images/travis-lxd-images/mediacloud-travis-20181219.tar.gz
# LXD image with base Ubuntu
MC_LXD_IMAGE_UBUNTU_BASE=ubuntu:xenial
# Container name
MC_LXD_CONTAINER=mediacloud-travis
# Unprivileged user on container (which can sudo)
MC_LXD_USER=ubuntu
# Path to LXC binary
# (we install LXD from Snap but an outdated lxd-tools might still be around)
LXC_BIN=/snap/bin/lxc
| #
# Travis LXD container configuration
#
# URL of LXD image with pre-provisioned Media Cloud dependencies
# (use ./.travis-lxd/setup_travis_lxd_image.sh) to create a new one)
MC_LXD_IMAGE_PROVISIONED_URL=https://s3.amazonaws.com/mediacloud-travis-lxd-images/travis-lxd-images/mediacloud-travis-20190104.tar.gz
# LXD image with base Ubuntu
MC_LXD_IMAGE_UBUNTU_BASE=ubuntu:xenial
# Container name
MC_LXD_CONTAINER=mediacloud-travis
# Unprivileged user on container (which can sudo)
MC_LXD_USER=ubuntu
# Path to LXC binary
# (we install LXD from Snap but an outdated lxd-tools might still be around)
LXC_BIN=/snap/bin/lxc
|
Exclude expected output and test files from fakes | #!/usr/bin/env bash
set -eu
cd "$(dirname "$0")/.."
# build counterfeiter itself
counterfeiter='/tmp/counterfeiter_test'
trap "rm $counterfeiter" EXIT
go build -o $counterfeiter
# counterfeit all the interfaces we can find
egrep --recursive --include '*.go' 'type [^ ]* interface {' . \
| sed 's#^./\(.*\)/[^/]*.go:type \([^ ]*\) interface {#\1 \2#' \
| while read PACKAGE INTERFACE; do $counterfeiter $PACKAGE $INTERFACE; done
# fix an import oddity in the UI fake
sed -i.bak '/"golang.org\/x\/crypto\/ssh\/terminal"/d' terminal/terminalfakes/fake_ui.go
| #!/usr/bin/env bash
set -eu
cd "$(dirname "$0")/.."
# build counterfeiter itself
counterfeiter='/tmp/counterfeiter_test'
trap "rm $counterfeiter" EXIT
go build -o $counterfeiter
# counterfeit all the interfaces we can find
egrep --recursive --include '*.go' 'type [^ ]* interface {' . \
--exclude-dir 'expected_output' --exclude '*_test.go' \
| sed 's#^./\(.*\)/[^/]*.go:type \([^ ]*\) interface {#\1 \2#' \
| while read PACKAGE INTERFACE; do $counterfeiter $PACKAGE $INTERFACE; done
# fix an import oddity in the UI fake
sed -i.bak '/"golang.org\/x\/crypto\/ssh\/terminal"/d' terminal/terminalfakes/fake_ui.go
|
Check for hub before calling which hub | # Use `hub` as our git wrapper:
# https://hub.github.com/
hub_path=$(which hub)
if (( $+commands[hub] ))
then
alias git=$hub_path
fi
# Git aliases
alias gl='git pull --prune'
alias glog="git log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative"
alias gp='git push origin HEAD'
alias gd='git diff'
alias ga='git add'
alias gc='git commit'
alias gca='git commit -a'
alias gcm='git commit -m'
alias gco='git checkout'
alias gcb='git copy-branch-name'
alias gb='git branch'
alias gs='git status -sb'
| # Use `hub` as our git wrapper:
# https://hub.github.com/
if hash hub 2>/dev/null
then
hub_path=$(which hub)
fi
if (( $+commands[hub] ))
then
alias git=$hub_path
fi
# Git aliases
alias gl='git pull --prune'
alias glog="git log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative"
alias gp='git push origin HEAD'
alias gd='git diff'
alias ga='git add'
alias gc='git commit'
alias gca='git commit -a'
alias gcm='git commit -m'
alias gco='git checkout'
alias gcb='git copy-branch-name'
alias gb='git branch'
alias gs='git status -sb'
|
Allow up to 10 tests to fail before stopping | #!/bin/bash
#
# Run project tests
#
# NOTE: This script expects to be run from the project root with
# ./scripts/run_tests.sh
# Use default environment vars for localhost if not already set
set -o pipefail
source environment_test.sh
function display_result {
RESULT=$1
EXIT_STATUS=$2
TEST=$3
if [ $RESULT -ne 0 ]; then
echo -e "\033[31m$TEST failed\033[0m"
exit $EXIT_STATUS
else
echo -e "\033[32m$TEST passed\033[0m"
fi
}
if [[ -z "$VIRTUAL_ENV" ]] && [[ -d venv ]]; then
source ./venv/bin/activate
fi
flake8 .
display_result $? 1 "Code style check"
npm test
display_result $? 2 "Front end code style check"
## Code coverage
py.test -n4 -x --cov=app --cov-report=term-missing tests/ --junitxml=test_results.xml --strict
display_result $? 3 "Code coverage"
| #!/bin/bash
#
# Run project tests
#
# NOTE: This script expects to be run from the project root with
# ./scripts/run_tests.sh
# Use default environment vars for localhost if not already set
set -o pipefail
source environment_test.sh
function display_result {
RESULT=$1
EXIT_STATUS=$2
TEST=$3
if [ $RESULT -ne 0 ]; then
echo -e "\033[31m$TEST failed\033[0m"
exit $EXIT_STATUS
else
echo -e "\033[32m$TEST passed\033[0m"
fi
}
if [[ -z "$VIRTUAL_ENV" ]] && [[ -d venv ]]; then
source ./venv/bin/activate
fi
flake8 .
display_result $? 1 "Code style check"
npm test
display_result $? 2 "Front end code style check"
## Code coverage
py.test -n4 --maxfail=10 --cov=app --cov-report=term-missing tests/ --junitxml=test_results.xml --strict
display_result $? 3 "Code coverage"
|
Use Python3 binary on CentOS8, and set Python3 as default | #!/bin/sh
# no Python 3 on CentOS 5
[ $1 -eq 5 ] && exit 0
# install Python 3.9 ------------------------------------------------------------------------------
yum install -y \
readline-devel.x86_64
[ $1 -lt 7 ] && source scl_source enable devtoolset-2 2>/dev/null || echo GCC 4.8 enabled
pythonTag=Python-3.8.2
pythonPkg=${pythonTag}.tgz
pythonUrl=https://www.python.org/ftp/python/3.8.2/$pythonPkg
wget --no-check-certificate -O /root/$pythonPkg $pythonUrl \
&& cd /root && tar xvf /root/$pythonPkg \
&& cd $pythonTag \
&& ./configure --enable-shared --enable-unicode=ucs4 --enable-optimizations \
&& gmake altinstall -j \
&& cd /root && rm -rf /root/$pythonTag /root/$pythonPkg
yum remove -y \
readline-devel.x86_64
export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
python3.8 -m ensurepip
python3.8 -m pip install --upgrade pip
python3.8 -m pip install numpy
python3.8 -m pip install sphinx breathe
# Add symlink
ln -s /usr/local/bin/python3.8 /usr/local/bin/python
ln -s /usr/local/bin/python3.8 /usr/local/bin/python3
| #!/bin/sh
# no Python 3 on CentOS 5
[ $1 -eq 5 ] && exit 0
case "$1" in
8) # install distro python3 package
yum install -y \
python3
;;
*) # install Python 3.8 manually
yum install -y \
readline-devel.x86_64
[ $1 -lt 7 ] && source scl_source enable devtoolset-2 2>/dev/null || echo GCC 4.8 enabled
pythonTag=Python-3.8.2
pythonPkg=${pythonTag}.tgz
pythonUrl=https://www.python.org/ftp/python/3.8.2/$pythonPkg
wget --no-check-certificate -O /root/$pythonPkg $pythonUrl \
&& cd /root && tar xvf /root/$pythonPkg \
&& cd $pythonTag \
&& ./configure --enable-shared --enable-unicode=ucs4 --enable-optimizations \
&& gmake altinstall -j \
&& cd /root && rm -rf /root/$pythonTag /root/$pythonPkg
yum remove -y \
readline-devel.x86_64
export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
# Add symlink
ln -s /usr/local/bin/python3.8 /usr/local/bin/python3
;;
esac
# make python3 the default python interpreter
alternatives --set python /usr/bin/python3
python3 -m ensurepip
python3 -m pip install --upgrade pip
python3 -m pip install numpy
python3 -m pip install sphinx breathe
|
Stop installing packages on our constainers that are already there | # This script gets a container ready to run our various tests in BuildKite
# make sure we have the network tools in place for various network specs
if [ -f /etc/debian_version ]; then
apt-get update -y && apt-get install -y net-tools iproute2
touch /etc/network/interfaces
elif [ -f /etc/redhat-release ]; then
yum install -y net-tools
fi
# make sure we have the omnibus_overrides specified version of rubygems / bundler
gem update --system $(grep rubygems omnibus_overrides.rb | cut -d'"' -f2)
gem --version
gem uninstall bundler -a -x || true
gem install bundler -v $(grep :bundler omnibus_overrides.rb | cut -d'"' -f2)
bundle --version
rm -f .bundle/config
# force all .rspec tests into progress display to reduce line count
echo --color > .rspec
echo -fp >> .rspec
| # This script gets a container ready to run our various tests in BuildKite
# make sure we have the network tools in place for various network specs
if [ -f /etc/debian_version ]; then
touch /etc/network/interfaces
fi
# make sure we have the omnibus_overrides specified version of rubygems / bundler
gem update --system $(grep rubygems omnibus_overrides.rb | cut -d'"' -f2)
gem --version
gem uninstall bundler -a -x || true
gem install bundler -v $(grep :bundler omnibus_overrides.rb | cut -d'"' -f2)
bundle --version
rm -f .bundle/config
# force all .rspec tests into progress display to reduce line count
echo --color > .rspec
echo -fp >> .rspec
|
Reduce the waiting and put it as var | #! /bin/bash
echo "TEST: I will now sleep 60s to pretend I'm doing something useful!"
sleep 60
echo "Done sleeping, resuming!"
| #! /bin/bash
time=10
echo "TEST: I will now sleep ${time}s to pretend I'm doing something useful!"
sleep ${time}
echo "Done sleeping, resuming!"
|
Build lpass as a separate subprocess. | git clone https://github.com/lastpass/lastpass-cli.git
cd lastpass-cli
make PREFIX=${HOME}/lpass -j 16
make PREFIX=${HOME}/lpass install
| (
git clone https://github.com/lastpass/lastpass-cli.git
cd lastpass-cli
make PREFIX=${HOME}/lpass -j 16
make PREFIX=${HOME}/lpass install
)
|
Update firefox path a different way | #!/bin/bash
# taken from https://stackoverflow.com/questions/36066695/running-protractor-when-webdriver-manager-start-finishes/39252628#39252628
echo "BROWSERBIN is $BROWSERBIN"
if [[ $BROWSERBIN == *"firefox"* ]]; then
echo "Updating path to Firefox"
# Update the path to firefox to take off the -$BVER
# set ./browsers/bin/firefox-stable to ./browsers/bin/firefox
ln -s $BROWSERBIN ${BROWSERBIN:0:${#BROWSERBIN} - ${#BVER} - 1}
FIREFOX=firefox
# Set path to include ./browsers/bin so that `which firefox` picks up this path
# and WebDriver uses it
export PATH=${BROWSERBIN:0:${#BROWSERBIN} - ${#BVER} - ${#FIREFOX} - 1}:$PATH
fi
echo "Firefox is at `which firefox`"
webdriver-manager update
# Start selenium server and trash the verbose error messages from webdriver
webdriver-manager start 2>/dev/null &
# Wait 3 seconds for port 4444 to be listening connections
while ! nc -z 127.0.0.1 4444; do sleep 3; done
./node_modules/.bin/protractor tests/protractor.conf.js
| #!/bin/bash
# taken from https://stackoverflow.com/questions/36066695/running-protractor-when-webdriver-manager-start-finishes/39252628#39252628
echo "BROWSERBIN is $BROWSERBIN"
if [[ $BROWSERBIN == *"firefox"* ]]; then
echo "Updating path to Firefox"
# Update the path to firefox to make Webdriver use it
FIREFOX_PATH=`which firefox`
mv $FIREFOX_PATH $FIREFOX_PATH-bak
ln -s $FIREFOX_PATH $BROWSERBIN
fi
echo "Firefox is at `which firefox`"
webdriver-manager update
# Start selenium server and trash the verbose error messages from webdriver
webdriver-manager start 2>/dev/null &
# Wait 3 seconds for port 4444 to be listening connections
while ! nc -z 127.0.0.1 4444; do sleep 3; done
./node_modules/.bin/protractor tests/protractor.conf.js
|
Add alias for run redis. | alias reload!='. ~/.zshrc'
# My personal alias
alias be='bundle exec'
alias si='sudo apt-get install'
alias sr='sudo apt-get remove --purge'
alias sag='sudo apt-get'
alias uu='sudo apt-get update && sudo apt-get upgrade'
alias aa='sudo apt-get autoclean && sudo apt-get autoremove'
alias esc='xmodmap ~/.xmodmap'
| alias reload!='. ~/.zshrc'
# My personal alias
alias be='bundle exec'
alias si='sudo apt-get install'
alias sr='sudo apt-get remove --purge'
alias sag='sudo apt-get'
alias uu='sudo apt-get update && sudo apt-get upgrade'
alias aa='sudo apt-get autoclean && sudo apt-get autoremove'
alias esc='xmodmap ~/.xmodmap'
alias redis='~/run-redis.sh'
|
Add Raspbian alias for Go compiling | export GOPATH=$HOME/golang
mkdir -p "$GOPATH"
,path-add ~/golang/bin
# Generally I prob won't use <= 1.7 now, but I'll keep this just in case.
export GO15VENDOREXPERIMENT=1
alias golinux="GOOS=linux GOARCH=amd64 go"
alias godarwin="GOOS=darwin GOARCH=amd64 go"
| export GOPATH=$HOME/golang
mkdir -p "$GOPATH"
,path-add ~/golang/bin
# Generally I prob won't use <= 1.7 now, but I'll keep this just in case.
export GO15VENDOREXPERIMENT=1
alias golinux="GOOS=linux GOARCH=amd64 go"
alias godarwin="GOOS=darwin GOARCH=amd64 go"
alias goraspbian="GOOS=linux GOARCH=arm GOARM=7 go"
|
Update test runner to use new executable name | #!/usr/bin/env bash
set -ev
mkdir -p build
cd build
env CXXFLAGS="-Wall -Werror" cmake ..
make
for file in ../tests/test-*.plorth
do
echo $file
./plorth-cli $file
done
| #!/usr/bin/env bash
set -ev
mkdir -p build
cd build
env CXXFLAGS="-Wall -Werror" cmake ..
make
for file in ../tests/test-*.plorth
do
echo $file
./plorth $file
done
|
Add --trace to jekyll build cmd to display error | #!/usr/bin/env bash
#
# The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
# (the "License"). You may not use this work except in compliance with the License, which is
# available at www.apache.org/licenses/LICENSE-2.0
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied, as more fully set forth in the License.
#
# See the NOTICE file distributed with this work for information regarding copyright ownership.
#
#
# This script validates headers and relative links in documentation markdown files
# and verifies markdown is buildable by jekyll
set -eux
SCRIPT_DIR=$(cd "$( dirname "$( readlink "$0" || echo "$0" )" )"; pwd)
GOPATH="${SCRIPT_DIR}" go run "${SCRIPT_DIR}/src/alluxio.org/check-docs/main.go"
cd "${SCRIPT_DIR}"/../../docs && jekyll build
| #!/usr/bin/env bash
#
# The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
# (the "License"). You may not use this work except in compliance with the License, which is
# available at www.apache.org/licenses/LICENSE-2.0
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied, as more fully set forth in the License.
#
# See the NOTICE file distributed with this work for information regarding copyright ownership.
#
#
# This script validates headers and relative links in documentation markdown files
# and verifies markdown is buildable by jekyll
set -eux
SCRIPT_DIR=$(cd "$( dirname "$( readlink "$0" || echo "$0" )" )"; pwd)
GOPATH="${SCRIPT_DIR}" go run "${SCRIPT_DIR}/src/alluxio.org/check-docs/main.go"
cd "${SCRIPT_DIR}"/../../docs && jekyll build --trace
|
Switch to using Opendaylight L3 in the gate | #!/usr/bin/env bash
set -xe
# Drop a token that marks the build as coming from openstack infra
GATE_DEST=$BASE/new
DEVSTACK_PATH=$GATE_DEST/devstack
echo "IS_GATE=True" >> $DEVSTACK_PATH/localrc
# Set here the ODL release to use for the Gate job
echo "ODL_RELEASE=lithium-snapshot-0.3.1" >> $DEVSTACK_PATH/localrc
| #!/usr/bin/env bash
set -xe
# Drop a token that marks the build as coming from openstack infra
GATE_DEST=$BASE/new
DEVSTACK_PATH=$GATE_DEST/devstack
echo "IS_GATE=True" >> $DEVSTACK_PATH/localrc
# Set here the ODL release to use for the Gate job
echo "ODL_RELEASE=lithium-snapshot-0.3.1" >> $DEVSTACK_PATH/localrc
# Switch to using the ODL's L3 implementation
echo "ODL_L3=True" >> $DEVSTACK_PATH/localrc
|
Make clear all kill elevated privellege Redis. | #!/bin/bash
./run-redis.sh&
sleep .5
source local-envs.sh
python clear-redis.py
python clear-db.py
pkill celery
pkill redis
sleep .5
echo "everything is cleared now yw"
| #!/bin/bash
./run-redis.sh&
sleep .5
source local-envs.sh
python clear-redis.py
python clear-db.py
pkill celery
sudo pkill redis
sleep .5
echo "everything is cleared now yw"
|
Clean up dist directory from previous artifacts | export DJHEROKU_MINOR_VERSION=`date +%Y%m%d%H%M%S`
virtualenv -q .venv
. .venv/bin/activate
pip install -r requirements-test.txt
./setup.py lint
nosetests
./setup.py sdist
| rm -rf dist
export DJHEROKU_MINOR_VERSION=`date +%Y%m%d%H%M%S`
virtualenv -q .venv
. .venv/bin/activate
pip install -r requirements-test.txt
./setup.py lint
nosetests
./setup.py sdist
|
Modify script to run production server | mkdir -p ../../static
python3 ../src/m_play/manage.py collectstatic
python3 ../src/m_play/manage.py runserver | mkdir -p ../../static
python3 ../src/m_play/manage.py collectstatic
sudo /etc/init.d/nginx restart
cd ../src/m_play/
uwsgi --socket :8001 --module m_play.wsgi:application --chmod-socket=664 |
Add EPEL yum repository configuration for Oracle Linux 8 | #!/bin/bash
set -eu -o pipefail
# shellcheck disable=SC1091
readonly OS_ID=$(. /etc/os-release; echo "$ID")
# shellcheck disable=SC1091
readonly OS_VERSION=$(. /etc/os-release; echo "$VERSION")
# Install Make
case $OS_ID in
ol)
sudo yum -y install make
if [[ -e /usr/bin/ol_yum_configure.sh ]]; then
sudo /usr/bin/ol_yum_configure.sh
if [[ "${OS_VERSION%%.*}" -eq 7 ]]; then
sudo yum -y install oracle-epel-release-el7.x86_64
fi
fi
;;
ubuntu)
sudo apt update
sudo apt -y install make
;;
esac
make toriaezu
| #!/bin/bash
set -eu -o pipefail
# shellcheck disable=SC1091
readonly OS_ID=$(. /etc/os-release; echo "$ID")
# shellcheck disable=SC1091
readonly OS_VERSION=$(. /etc/os-release; echo "$VERSION")
# Install Make
case $OS_ID in
ol)
sudo yum -y install make
case ${OS_VERSION%%.*} in
7)
sudo yum -y install oracle-epel-release-el7
;;
8)
sudo dnf -y install oracle-epel-release-el8
;;
esac
;;
ubuntu)
sudo apt update
sudo apt -y install make
;;
esac
make toriaezu
|
Remove collection.js from building workflow | #!/bin/bash
node_modules/.bin/uglifyjs \
src/javascript/app.js \
src/javascript/services/store.js \
src/javascript/services/share.js \
src/javascript/services/imageloader.js \
src/javascript/controllers/home.js \
src/javascript/controllers/collection.js \
--mangle \
--compress \
-o www/dist/javascript/tuchong.min.js \
--source-map www/dist/javascript/tuchong.min.js.map \
--source-map-url tuchong.min.js.map | #!/bin/bash
node_modules/.bin/uglifyjs \
src/javascript/app.js \
src/javascript/services/store.js \
src/javascript/services/share.js \
src/javascript/services/imageloader.js \
src/javascript/controllers/home.js \
--mangle \
--compress \
-o www/dist/javascript/tuchong.min.js \
--source-map www/dist/javascript/tuchong.min.js.map \
--source-map-url tuchong.min.js.map |
Remove unnecessary echo in release notes script | #!/bin/bash
set -e
source $(dirname $0)/common.sh
version=$( cat version/version )
milestone=$( echo $version )
if [[ $RELEASE_TYPE = "RELEASE" ]]; then
milestone=${version%.RELEASE}
fi
java -jar /github-release-notes-generator.jar \
--releasenotes.github.username=${GITHUB_USERNAME} \
--releasenotes.github.password=${GITHUB_TOKEN} \
--releasenotes.github.organization=spring-projects \
--releasenotes.github.repository=spring-boot \
${milestone} generated-release-notes/release-notes.md
echo ${version} > generated-release-notes/version
echo v${version} > generated-release-notes/tag
| #!/bin/bash
set -e
source $(dirname $0)/common.sh
version=$( cat version/version )
milestone=${version}
if [[ $RELEASE_TYPE = "RELEASE" ]]; then
milestone=${version%.RELEASE}
fi
java -jar /github-release-notes-generator.jar \
--releasenotes.github.username=${GITHUB_USERNAME} \
--releasenotes.github.password=${GITHUB_TOKEN} \
--releasenotes.github.organization=spring-projects \
--releasenotes.github.repository=spring-boot \
${milestone} generated-release-notes/release-notes.md
echo ${version} > generated-release-notes/version
echo v${version} > generated-release-notes/tag
|
Make kubemark docker image a bit smaller | #!/bin/bash
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We don't need all intermediate steps in the image, especially because
# we clean after ourselves. Thus instead of doing all of this in the Dockerfile
# we use this script.
apt-get update
apt-get install -y wget vim rsync ca-certificates
update-ca-certificates
chmod a+x /kubemark.sh
tar xzf /tmp/kubemark.tar.gz
cp kubernetes/server/bin/hyperkube /
cp kubernetes/server/bin/kubemark /
cp kubernetes/server/bin/kubectl /
rm -rf /tmp/*
apt-get remove -y build-essential
apt-get clean -y
apt-get autoremove -y
| #!/bin/bash
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We don't need all intermediate steps in the image, especially because
# we clean after ourselves. Thus instead of doing all of this in the Dockerfile
# we use this script.
apt-get update
apt-get install -y wget vim rsync ca-certificates
update-ca-certificates
chmod a+x /kubemark.sh
tar xzf /tmp/kubemark.tar.gz
cp kubernetes/server/bin/kubemark /
rm -rf /tmp/*
apt-get clean -y
apt-get autoremove -y
|
Create or update views and seed job | #!/bin/bash
source jenkins-bootstrap-shared/jenkins_bootstrap.sh
| #!/bin/bash
source jenkins-bootstrap-shared/jenkins_bootstrap.sh
create_view --view-name 'Status Overview' --xml-data ./configs/view_status_overview.xml
create_view --view-name 'Maintenance' --xml-data ./configs/view_maintenance.xml
create_view --view-name 'Build Pipelines' --xml-data ./configs/view_build_pipelines.xml
create_job --job-name '_job_generator' --xml-data ./configs/job_generator.xml
|
Use english i18n instead of spanish | #!/usr/bin/env bash
# Inspired by ~/.osx — http://mths.be/osx
OSX=$(test "`uname`" == "Darwin" && echo "x")
if [[ OSX ]]; then
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until `.osx` has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
# Set language and text formats
# Note: if you’re in the US, you may replace `EUR` with `USD`, `Centimeters`
# with `Inches`, `en_GB` with `en_US`, and `true` with `false`.
defaults write NSGlobalDomain AppleLanguages -array "es"
defaults write NSGlobalDomain AppleLocale -string "es_ES@currency=EUR"
defaults write NSGlobalDomain AppleMeasurementUnits -string "Centimeters"
defaults write NSGlobalDomain AppleMetricUnits -bool true
echo "Spanish i18n [DONE]"
# Save screenshots to ~/screenshots
defaults write com.apple.screencapture location -string "${HOME}/screenshots"
echo "screenshots will be redirected [DONE]"
else
echo "Skipping ~/.osx evaluation..."
fi
| #!/usr/bin/env bash
# Inspired by ~/.osx — http://mths.be/osx
OSX=$(test "`uname`" == "Darwin" && echo "x")
if [[ OSX ]]; then
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until `.osx` has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
# Set language and text formats
# Note: if you’re in the US, you may replace `EUR` with `USD`, `Centimeters`
# with `Inches`, `en_GB` with `en_US`, and `true` with `false`.
defaults write NSGlobalDomain AppleLanguages -array "en"
defaults write NSGlobalDomain AppleLocale -string "en_US@currency=EUR"
defaults write NSGlobalDomain AppleMeasurementUnits -string "Centimeters"
defaults write NSGlobalDomain AppleMetricUnits -bool true
echo "English i18n with metrical system [DONE]"
# Save screenshots to ~/screenshots
defaults write com.apple.screencapture location -string "${HOME}/screenshots"
echo "screenshots will be redirected [DONE]"
else
echo "Skipping ~/.osx evaluation..."
fi
|
Use condor version from source for tarball_version | #!/bin/bash
set -e
# makesrpm.sh - generates a .src.rpm from the currently checked out HEAD,
# along with condor.spec and the sources in this directory
usage () {
echo "usage: $(basename "$0")"
echo "Sorry, no options yet..."
exit
}
case $1 in
--help ) usage ;;
esac
# Do everything in a temp dir that will go away on errors or end of script
tmpd=$(mktemp -d)
trap 'rm -rf "$tmpd"' EXIT
git_rev=$(git log -1 --pretty=format:%h)
sed -i "s/^%define git_rev .*/%define git_rev $git_rev/" condor.spec
mkdir "$tmpd/SOURCES"
pushd "$(dirname "$0")" >/dev/null
cp -p * "$tmpd/SOURCES/"
(cd ../../..; git archive HEAD) | gzip > "$tmpd/SOURCES/condor.tar.gz"
srpm=$(rpmbuild -bs -D"_topdir $tmpd" condor.spec)
srpm=${srpm#Wrote: }
popd >/dev/null
cp "$srpm" .
echo "Wrote: ${srpm##*/}"
| #!/bin/bash
set -e
# makesrpm.sh - generates a .src.rpm from the currently checked out HEAD,
# along with condor.spec and the sources in this directory
usage () {
echo "usage: $(basename "$0")"
echo "Sorry, no options yet..."
exit
}
case $1 in
--help ) usage ;;
esac
# Do everything in a temp dir that will go away on errors or end of script
tmpd=$(mktemp -d)
trap 'rm -rf "$tmpd"' EXIT
pushd "$(dirname "$0")" >/dev/null # go to srpm dir
pushd ../../.. >/dev/null # go to root of git tree
# why is it so hard to do a "git cat" ?
condor_version=$( git archive HEAD CMakeLists.txt | tar xO \
| awk -F\" '/^set\(VERSION / {print $2}' )
git archive HEAD | gzip > "$tmpd/condor.tar.gz"
git_rev=$(git log -1 --pretty=format:%h)
popd >/dev/null # back to srpm dir
# should verify this: [[ $condor_version =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]
sed -i "
s/^%define git_rev .*/%define git_rev $git_rev/
s/^%define tarball_version .*/%define tarball_version $condor_version/
" condor.spec
mkdir "$tmpd/SOURCES"
cp -p -- * "$tmpd/SOURCES/"
mv "$tmpd/condor.tar.gz" "$tmpd/SOURCES/"
srpm=$(rpmbuild -bs -D"_topdir $tmpd" condor.spec)
srpm=${srpm#Wrote: }
popd >/dev/null # back to original working dir
mv "$srpm" .
echo "Wrote: ${srpm##*/}"
|
Remove PWM check from start script | #!/bin/sh
./setup.sh
while [ ! -f /sys/devices/platform/ocp/48302000.epwmss/48302200.pwm/pwm/pwmchip0/pwm1/enable ]
do
sleep 2
done
while [ ! -f "/sys/bus/iio/devices/iio:device0/in_voltage6_raw" ]
do
sleep 2
done
./roaster &
sh -c 'cd webserver && node index.js'
| #!/bin/sh
./setup.sh
while [ ! -f "/sys/bus/iio/devices/iio:device0/in_voltage6_raw" ]
do
sleep 2
done
./roaster &
sh -c 'cd webserver && node index.js'
|
Add all crates to clippy script | #!/bin/bash -e
# This script is used for CI and assumes clippy is installed already.
# TODO: run clippy on the other crates, for now we only fixed the clippy warning on the client crate
pushd client
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd proto
# FIXME: we should probably not allow `block_in_if_condition_stmt
cargo clippy --all-features -- \
--allow doc_markdown \
--allow type_complexity \
--allow many_single_char_names \
--allow needless_lifetimes \
--allow block_in_if_condition_stmt \
--allow too_many_arguments \
--allow new_ret_no_self \
--allow enum_variant_names
popd
| #!/bin/bash -e
# This script is used for CI and assumes clippy is installed already.
# TODO: run clippy on the other crates, for now we only fixed the clippy warning on the client crate
pushd client
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd compatibility-tests
cargo clippy --features "bind" --no-default-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd compatibility-tests
cargo clippy -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd integration-tests
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd native-tls
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd openssl
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd proto
# FIXME: we should probably not allow `block_in_if_condition_stmt
cargo clippy --all-features -- \
--allow doc_markdown \
--allow type_complexity \
--allow many_single_char_names \
--allow needless_lifetimes \
--allow block_in_if_condition_stmt \
--allow too_many_arguments \
--allow new_ret_no_self \
--allow enum_variant_names
popd
pushd resolver
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd rustls
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd server
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
pushd util
cargo clippy --all-features -- \
--allow type_complexity \
--allow doc_markdown \
--allow module_inception
popd
|
Update drone env var dump filter | #!/bin/bash
set -aeuo pipefail
env | grep ^DRONE | grep -Ev "PASSWORD|SECRET|TOKEN" >&2
git submodule update --init --checkout --recursive --remote
if [ -n "${DRONE_SYSTEM_HOSTNAME-}" ]; then
source "/etc/docker/avatao-challenge-toolbox/${DRONE_SYSTEM_HOSTNAME}"
else
echo "# DRONE_SYSTEM_HOSTNAME is unset. Falling back to .env" >&2
source "$(dirname "$0")/.env"
fi
if [ -n "${GOOGLE_APPLICATION_CREDENTIALS-}" ]; then
gcloud auth activate-service-account --key-file="$GOOGLE_APPLICATION_CREDENTIALS"
fi
if [ -n "${GOOGLE_PROJECT_ID-}" ]; then
gcloud config set project "$GOOGLE_PROJECT_ID"
fi
exec "$@"
| #!/bin/bash
set -aeuo pipefail
env | grep -E "^DRONE_\w*(BRANCH|COMMIT|REPO|SYSTEM|WORKSPACE)" | grep -Ev "^\w*(PASSWORD|SECRET|TOKEN)" >&2
git submodule update --init --checkout --recursive --remote
if [ -n "${DRONE_SYSTEM_HOSTNAME-}" ]; then
source "/etc/docker/avatao-challenge-toolbox/${DRONE_SYSTEM_HOSTNAME}"
else
echo "# DRONE_SYSTEM_HOSTNAME is unset. Falling back to .env" >&2
source "$(dirname "$0")/.env"
fi
if [ -n "${GOOGLE_APPLICATION_CREDENTIALS-}" ]; then
gcloud auth activate-service-account --key-file="$GOOGLE_APPLICATION_CREDENTIALS"
fi
if [ -n "${GOOGLE_PROJECT_ID-}" ]; then
gcloud config set project "$GOOGLE_PROJECT_ID"
fi
exec "$@"
|
Test before running each change for pre-commit | #!/bin/sh
# Remove pdb uses
sed -i '/.*import pdb/d' $1
sed -i '/.*pdb.set_trace/d' $1
isort $1
black $1
| #!/bin/bash
# Remove pdb uses
if grep -q "import .*pdb" $1; then
sed -i '/.*import .*pdb/d' $1
fi
if grep -q "pdb.set_trace" $1; then
sed -i '/.*pdb.set_trace/d' $1
fi
# Get rid of unused imports
if ! autoflake --check $1; then
autoflake --in-place $1
fi
# Format module
if ! black --check $1; then
black $1
fi
|
Use ROBOT instead of riot for JSON-LD conversion. | #!/bin/bash
# This script will convert a YAML file to Turtle format,
# given a JSON-LD context, also in YAML format.
# Run: ./yaml2turtle.sh context.yaml data.yaml data.ttl
contextyaml=$1
# expected to have an array at the root
inputyaml=$2
outputttl=$3
tmpfile=`mktemp`
echo '{"@context": ' >$tmpfile
yaml2json $contextyaml >>$tmpfile
echo ', "@graph": ' >>$tmpfile
yaml2json $inputyaml >>$tmpfile
echo '}' >>$tmpfile
riot --syntax=jsonld --output=turtle $tmpfile >$outputttl
| #!/bin/bash
# This script will convert a YAML file to Turtle format,
# given a JSON-LD context, also in YAML format.
# Since ROBOT is used, other file extensions for the output
# can select other formats in addition to Turtle.
# Run: ./yaml2turtle.sh context.yaml data.yaml data.ttl
contextyaml=$1
# expected to have an array at the root
inputyaml=$2
outputttl=$3
tmpfile=`mktemp`.jsonld
echo '{"@context": ' >$tmpfile
yaml2json $contextyaml >>$tmpfile
echo ', "@graph": ' >>$tmpfile
yaml2json $inputyaml >>$tmpfile
echo '}' >>$tmpfile
robot convert -i $tmpfile -o $outputttl
|
Use `v` to launch `EDITOR`. | # I still want to be able to reverse search with ctrl-R even though I've opted
# for vi mode.
bindkey '\e[3~' delete-char
bindkey '^R' history-incremental-search-backward
| # I still want to be able to reverse search with ctrl-R even though I've opted
# for vi mode.
bindkey '\e[3~' delete-char
bindkey '^R' history-incremental-search-backward
# Use `v` to launch editor.
# http://stackoverflow.com/questions/890620/unable-to-have-bash-like-c-x-e-in-zsh
autoload -U edit-command-line
zle -N edit-command-line
bindkey -M vicmd v edit-command-line
|
Update job add env variables | #!/bin/bash
cd $WORKSPACE/configuration/util/jenkins/export_slow_logs
pip install -r requirements.txt
. ../assume-role.sh
# Assume the role
set +x
assume-role ${ROLE_ARN}
set -x
python export_slow_query_logs.py
| #!/bin/bash
cd $WORKSPACE/configuration/util/jenkins/export_slow_logs
pip install -r requirements.txt
. ../assume-role.sh
# Assume the role
set +x
assume-role ${ROLE_ARN}
set -x
python export_slow_query_logs.py --environment ${ENVIRONMENT}
|
Use the local built cli and not the one from npm | #!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $DIR/../../../
if [ $TRAVIS_EVENT_TYPE == "push" ] || [ $TRAVIS_EVENT_TYPE == "cron" ] || [ $TRAVIS_EVENT_TYPE == "api" ]
then
TAG_NPM=latest
if [ $TRAVIS_BRANCH == "develop" ] || [ $TRAVIS_EVENT_TYPE == "cron" ] || [ $TRAVIS_EVENT_TYPE == "api" ]
then
TAG_NPM=alpha
fi
echo "Publishing on npm with tag $TAG_NPM"
npx @alfresco/adf-cli@alpha npm-publish --npmRegistry $NPM_REGISTRY_ADDRESS --tokenRegistry $NPM_REGISTRY_TOKEN --tag $TAG_NPM --pathProject "$(pwd)"
else
echo "PR No need to release in NPM"
fi;
| #!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $DIR/../../../
if [ $TRAVIS_EVENT_TYPE == "push" ] || [ $TRAVIS_EVENT_TYPE == "cron" ] || [ $TRAVIS_EVENT_TYPE == "api" ]
then
TAG_NPM=latest
if [ $TRAVIS_BRANCH == "develop" ] || [ $TRAVIS_EVENT_TYPE == "cron" ] || [ $TRAVIS_EVENT_TYPE == "api" ]
then
TAG_NPM=alpha
fi
echo "Publishing on npm with tag $TAG_NPM"
./node_modules/@alfresco/adf-cli/bin/adf-cli npm-publish --npmRegistry $NPM_REGISTRY_ADDRESS --tokenRegistry $NPM_REGISTRY_TOKEN --tag $TAG_NPM --pathProject "$(pwd)"
else
echo "PR No need to release in NPM"
fi;
|
Expand head to top 24 | gzip -cd *_R3.fastq.* | head -1000000 | awk 'NR == 2 || NR % 4 == 2' | grep -v N | sort | uniq -c | sort -nr | head > sample_barcodes.log
| gzip -cd *_R3.fastq.* | head -1000000 | awk 'NR == 2 || NR % 4 == 2' | grep -v N | sort | uniq -c | sort -nr | head -n 24 > sample_barcodes.log
|
Add xsel package to deb list | #!/bin/bash
sudo apt-get -y update
sudo apt-get -y upgrade
sudo apt-get install -y curl \
git\
xorg \
libx11-dev libxft-dev libxinerama-dev xdm suckless-tools dmenu \
cmake pkg-config libfreetype6-dev libfontconfig1-dev libxfixes-dev libxcb-xfixes0-dev python3 \
zsh tmux neovim python3-neovim \
neomutt \
isync \
snapd \
autojump \
curl \
python3-dev \
build-essential \
golang \
npm \
mono-complete
| #!/bin/bash
sudo apt-get -y update
sudo apt-get -y upgrade
sudo apt-get install -y curl \
git\
xorg \
libx11-dev libxft-dev libxinerama-dev xdm suckless-tools dmenu \
cmake pkg-config libfreetype6-dev libfontconfig1-dev libxfixes-dev libxcb-xfixes0-dev python3 \
xsel \
zsh tmux neovim python3-neovim \
neomutt \
isync \
snapd \
autojump \
curl \
python3-dev \
build-essential \
golang \
npm \
mono-complete
|
Update test suite to nightly-2018-12-15 | #!/bin/bash
REV=15d770400eed9018f18bddf83dd65cb7789280a5
set -euo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
mkdir -p rust
touch rust/COMMIT
if [ "$(cat rust/COMMIT)" != "$REV" ]; then
rm -rf rust
mkdir rust
curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
| tar xz --directory rust --strip-components 1
echo "$REV" > rust/COMMIT
fi
| #!/bin/bash
REV=96d1334e567237b1507cd277938e7ae2de75ff51
set -euo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
mkdir -p rust
touch rust/COMMIT
if [ "$(cat rust/COMMIT)" != "$REV" ]; then
rm -rf rust
mkdir rust
curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
| tar xz --directory rust --strip-components 1
echo "$REV" > rust/COMMIT
fi
|
Add set -e to bash script that calls py.test | #!/bin/bash
# This script is just used to establish the ssh connection and run the tests.
function run_tests() {
cp /home/jenkins/workspace/yoctobuild/build/tmp/deploy/images/beaglebone/core-image-base-beaglebone.sdimg \
./core-image-base-beaglebone-modified-testing.sdimg
bash prepare_ext3_testing.sh
py.test --bbb --host=127.0.0.1:12345 --sdimg-location=`pwd`
}
function finish {
kill -9 $PID
}
trap finish EXIT
for i in {1...5};
do ssh -Cfo ExitOnForwardFailure=yes bbb@wmd.no -L 12345:localhost:12345 -N
PID=$(pgrep -f '12345:localhost:12345')
if [ "$PID" -gt 0 ];
then
run_tests
exit 0
fi
done
echo "Failed to establish ssh tunnel." 1>&2
| #!/bin/bash
# This script is just used to establish the ssh connection and run the tests.
set -e
function run_tests() {
cp /home/jenkins/workspace/yoctobuild/build/tmp/deploy/images/beaglebone/core-image-base-beaglebone.sdimg \
./core-image-base-beaglebone-modified-testing.sdimg
bash prepare_ext3_testing.sh
py.test --bbb --host=127.0.0.1:12345 --sdimg-location=`pwd`
}
function finish {
kill -9 $PID
}
trap finish EXIT
for i in {1...5};
do ssh -Cfo ExitOnForwardFailure=yes bbb@wmd.no -L 12345:localhost:12345 -N
PID=$(pgrep -f '12345:localhost:12345')
if [ "$PID" -gt 0 ];
then
run_tests
exit 0
fi
done
echo "Failed to establish ssh tunnel." 1>&2
|
Move message validation into function | #! /bin/sh
set -eu
commit_message_file="$1"
exit_code=0
for index in $(seq 6)
do
"./src/0$index-"* "$commit_message_file" || exit_code=$(( exit_code + $? ))
done
exit "$exit_code"
| #! /bin/sh
set -eu
commit_message_file="$1"
exit_code=0
validate() {
case $1 in
*) "./src/0$index-"* "$commit_message_file" ;;
esac
}
for index in $(seq 6)
do
validate "$index" || exit_code=$(( exit_code + $? ))
done
exit "$exit_code"
|
Set sbt memory option to 256mb | #!/usr/bin/env bash
add-apt-repository ppa:webupd8team/java
echo "deb https://dl.bintray.com/sbt/debian /" | sudo tee -a /etc/apt/sources.list.d/sbt.list
apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823
echo "Retrieved sbt source"
apt-get update
echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | sudo /usr/bin/debconf-set-selections
echo "Accepted oracle license"
apt-get install -y oracle-java8-installer
apt-get install -y scala
apt-get install -y sbt
curl -sL https://deb.nodesource.com/setup_6.x | sudo -E bash -
apt-get install -y nodejs
| #!/usr/bin/env bash
add-apt-repository ppa:webupd8team/java
echo "deb https://dl.bintray.com/sbt/debian /" | tee -a /etc/apt/sources.list.d/sbt.list
apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823
echo "Retrieved sbt source"
apt-get update
echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections
echo "Accepted oracle license"
apt-get install -y oracle-java8-installer
apt-get install -y scala
apt-get install -y sbt
curl -sL https://deb.nodesource.com/setup_6.x | sudo -E bash -
apt-get install -y nodejs
sed -i '/-mem/c\-mem 256' /etc/sbt-launcher-packaging/sbtopts
echo "Set sbt memory option to 256mb"
|
Revert "Install ia32-libs so we can run 32 bit executables." | #!/bin/bash
sudo apt-get update
sudo apt-get install -y build-essential zlib1g-dev wget curl python-setuptools git libz-dev ia32-libs
mkdir tmp
cd tmp
wget https://raw.github.com/chapmanb/bcbio-nextgen/master/scripts/bcbio_nextgen_install.py
## python bcbio_nextgen_install.py /usr/local/share/bcbio-nextgen --tooldir=/usr/local --sudo --genomes GRCh37 --aligners bwa
| #!/bin/bash
sudo apt-get update
sudo apt-get install -y build-essential zlib1g-dev wget curl python-setuptools git libz-dev
mkdir tmp
cd tmp
wget https://raw.github.com/chapmanb/bcbio-nextgen/master/scripts/bcbio_nextgen_install.py
## python bcbio_nextgen_install.py /usr/local/share/bcbio-nextgen --tooldir=/usr/local --sudo --genomes GRCh37 --aligners bwa
|
Add auto-sync for contributing.md into gh-pages | if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
echo -e "Start to publish lastest Javadoc to gh-pages...\n"
cp -R build/docs/javadoc $HOME/javadoc-latest
cp -f README.md $HOME/index-latest.md
cd $HOME
git config --global user.email "travis@travis-ci.org"
git config --global user.name "travis-ci"
git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/treelogic-swe/aws-mock gh-pages > /dev/null
cd gh-pages
git rm -rf ./javadoc ./index.md
cp -Rf $HOME/javadoc-latest ./javadoc
cp -f $HOME/index-latest.md ./index.md
git add -f .
git commit -m "Auto-publishing on successful travis build $TRAVIS_BUILD_NUMBER"
git push -fq origin gh-pages > /dev/null
echo -e "Done magic with auto publishment to gh-pages.\n"
fi
| if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
echo -e "Start to publish lastest Javadoc to gh-pages...\n"
cp -R build/docs/javadoc $HOME/javadoc-latest
cp -f README.md $HOME/index-latest.md
cp -f contributing.md $HOME/contributing-latest.md
cd $HOME
git config --global user.email "travis@travis-ci.org"
git config --global user.name "travis-ci"
git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/treelogic-swe/aws-mock gh-pages > /dev/null
cd gh-pages
git rm -rf ./javadoc ./index.md
cp -Rf $HOME/javadoc-latest ./javadoc
cp -f $HOME/index-latest.md ./index.md
cp -f $HOME/contributing-latest.md ./contributing.md
git add -f .
git commit -m "Auto-publishing on successful travis build $TRAVIS_BUILD_NUMBER"
git push -fq origin gh-pages > /dev/null
echo -e "Done magic with auto publishment to gh-pages.\n"
fi
|
Add generation of Python 3.4 egg. | #!/bin/bash
# $Id$
base=`dirname $0`/..
cd $base
VERSION=`grep 'VERSION =' setup.py | cut -d "'" -f2`
# Source dists
python setup.py sdist --formats=gztar,zip
# Eggs
#python2.4 setup.py bdist_egg --exclude-source-files
#python2.5 setup.py bdist_egg --exclude-source-files
python2.6 setup.py bdist_egg --exclude-source-files
python2.7 setup.py bdist_egg --exclude-source-files
python3.3 setup.py bdist_egg --exclude-source-files
# Build docs archive
python setup.py sdist --manifest-only
rm dist/pyglet-docs-$VERSION.zip
(cd doc/_build; zip -r docs.zip html)
mv doc/_build/docs.zip dist/pyglet-docs-$VERSION.zip
zip dist/pyglet-docs-$VERSION.zip `grep '^examples/' MANIFEST`
| #!/bin/bash
# $Id$
base=`dirname $0`/..
cd $base
VERSION=`grep 'VERSION =' setup.py | cut -d "'" -f2`
# Source dists
python setup.py sdist --formats=gztar,zip
# Eggs
#python2.4 setup.py bdist_egg --exclude-source-files
#python2.5 setup.py bdist_egg --exclude-source-files
python2.6 setup.py bdist_egg --exclude-source-files
python2.7 setup.py bdist_egg --exclude-source-files
python3.3 setup.py bdist_egg --exclude-source-files
python3.4 setup.py bdist_egg --exclude-source-files
# Build docs archive
python setup.py sdist --manifest-only
rm dist/pyglet-docs-$VERSION.zip
(cd doc/_build; zip -r docs.zip html)
mv doc/_build/docs.zip dist/pyglet-docs-$VERSION.zip
zip dist/pyglet-docs-$VERSION.zip `grep '^examples/' MANIFEST`
|
Remove path & assume PATH has been set correctly | #!/bin/sh
# Loading local keys
export SSH_ROOT="$HOME/.ssh"
export SSH_ENV="$SSH_ROOT/env"
sshagentstart() {
echo "Initialising new SSH agent..."
/usr/bin/ssh-agent | sed 's/^echo/#echo/' > "${SSH_ENV}"
echo SSH Agent started
chmod 600 "${SSH_ENV}"
. "${SSH_ENV}" > /dev/null
for i in ~/.ssh/*rsa; do /usr/bin/ssh-add "$i"; done
sshagentls
}
sshagentstop() {
ssh-add -D
ssh-agent -k
}
sshagentls() {
echo List of keys
ssh-add -l
}
| #!/bin/sh
# Loading local keys
export SSH_ROOT="$HOME/.ssh"
export SSH_ENV="$SSH_ROOT/env"
sshagentstart() {
echo "Initialising new SSH agent..."
ssh-agent | sed 's/^echo/#echo/' > "${SSH_ENV}"
echo SSH Agent started
chmod 600 "${SSH_ENV}"
. "${SSH_ENV}" > /dev/null
for i in ~/.ssh/*rsa; do /usr/bin/ssh-add "$i"; done
sshagentls
}
sshagentstop() {
ssh-add -D
ssh-agent -k
}
sshagentls() {
echo List of keys
ssh-add -l
}
|
Make global node modules available with require() | #!/bin/bash
function set_nodejs_env()
{
local NODEJS_PREFIX="/opt/nodejs"
node_dirs=$(ls -d /opt/nodejs/node-v* | sort -r) # let higher version first
for nodejs_root in $node_dirs;do
local nodejs_bin_dir=$nodejs_root/bin
if [[ -x $nodejs_bin_dir/node ]];then
[[ $PATH != *$nodejs_bin_dir* ]] && export PATH=$nodejs_bin_dir:$PATH
type -P npm >/dev/null 2>&1 && eval "$(npm completion)"
break;
fi
done
}
set_nodejs_env
unset set_nodejs_env
| #!/bin/bash
function set_nodejs_env()
{
local NODEJS_PREFIX="/opt/nodejs"
node_dirs=$(ls -d /opt/nodejs/node-v* | sort -r) # let higher version first
for nodejs_root in $node_dirs;do
local nodejs_bin_dir=$nodejs_root/bin
local nodejs_lib_dir=$nodejs_root/lib/node_modules
if [[ -x $nodejs_bin_dir/node ]];then
[[ $PATH != *$nodejs_bin_dir* ]] && export PATH=$nodejs_bin_dir:$PATH
[[ $NODE_PATH != *$nodejs_lib_dir* ]] &&
export NODE_PATH+=${NODE_PATH:+:}$nodejs_lib_dir
type -P npm >/dev/null 2>&1 && eval "$(npm completion)"
break;
fi
done
}
set_nodejs_env
unset set_nodejs_env
|
Stop trying to build non-existent apps directory | #! /usr/bin/env bash
#set -x
echo "Generating documentation..."
./node_modules/.bin/jsdoc -t node_modules/braintree-jsdoc-template/ -d docs -r apps/ bagit/ core/ migrations/ plugins/ ui/ util/
echo "New doc is in docs/index.html"
#echo "Hello, Travis! You there?"
#if [[ -z "${TRAVIS}" ]]; then
# echo "No Travis here. I'm done here."
# exit;
#else
# echo "Travis: Yup, here."
# cd docs || exit
# git config --global user.email "travis@travis-ci.org"
# git config --global user.name "Travis-CI"
# git init
# git add .
# git commit -m "Latest docs Travis build $TRAVIS_BUILD_NUMBER auto-pushed to gh-pages"
# git push -v --force "https://${GH_TOKEN}@github.com/APTrust/dart.git" master:gh-pages
#fi
| #! /usr/bin/env bash
#set -x
echo "Generating documentation..."
./node_modules/.bin/jsdoc -t node_modules/braintree-jsdoc-template/ -d docs -r bagit/ core/ migrations/ plugins/ ui/ util/
echo "New doc is in docs/index.html"
#echo "Hello, Travis! You there?"
#if [[ -z "${TRAVIS}" ]]; then
# echo "No Travis here. I'm done here."
# exit;
#else
# echo "Travis: Yup, here."
# cd docs || exit
# git config --global user.email "travis@travis-ci.org"
# git config --global user.name "Travis-CI"
# git init
# git add .
# git commit -m "Latest docs Travis build $TRAVIS_BUILD_NUMBER auto-pushed to gh-pages"
# git push -v --force "https://${GH_TOKEN}@github.com/APTrust/dart.git" master:gh-pages
#fi
|
Remove unnecessary folders prior to packaging | #!/usr/bin/env bash
EXE_PATH="build/current/Hypersomnia"
if [ -f "$EXE_PATH" ]; then
echo "Exe found. Uploading."
API_KEY=$1
PLATFORM=Linux
COMMIT_HASH=$(git rev-parse HEAD)
COMMIT_NUMBER=$(git rev-list --count master)
VERSION="1.0.$COMMIT_NUMBER"
FILE_PATH="Hypersomnia-for-$PLATFORM.sfx"
UPLOAD_URL="https://hypersomnia.xyz/upload_artifact.php"
. cmake/linux_launcher_install.sh
cp build/current/Hypersomnia hypersomnia/.Hypersomnia
pushd hypersomnia
rm -r cache
popd
7z a -sfx $FILE_PATH hypersomnia
curl -F "key=$API_KEY" -F "platform=$PLATFORM" -F "commit_hash=$COMMIT_HASH" -F "version=$VERSION" -F "artifact=@$FILE_PATH" $UPLOAD_URL
else
echo "No exe found. Not uploading."
fi
| #!/usr/bin/env bash
EXE_PATH="build/current/Hypersomnia"
if [ -f "$EXE_PATH" ]; then
echo "Exe found. Uploading."
API_KEY=$1
PLATFORM=Linux
COMMIT_HASH=$(git rev-parse HEAD)
COMMIT_NUMBER=$(git rev-list --count master)
VERSION="1.0.$COMMIT_NUMBER"
FILE_PATH="Hypersomnia-for-$PLATFORM.sfx"
UPLOAD_URL="https://hypersomnia.xyz/upload_artifact.php"
. cmake/linux_launcher_install.sh
cp build/current/Hypersomnia hypersomnia/.Hypersomnia
pushd hypersomnia
rm -r cache, logs, user, demos
popd
7z a -sfx $FILE_PATH hypersomnia
curl -F "key=$API_KEY" -F "platform=$PLATFORM" -F "commit_hash=$COMMIT_HASH" -F "version=$VERSION" -F "artifact=@$FILE_PATH" $UPLOAD_URL
else
echo "No exe found. Not uploading."
fi
|
Add test 'it_stops_with_user_answer_being_not_yes' for createDirIfNotExists | #!/usr/bin/env roundup
#
# Baselib test
#/ usage: rerun stubbs:test -m MODULE -p baselib
#
# Author: Ines Neubach <ines.neubach@idn.astzweig.de>
#
# Include baselib file
# -----------------
MODULE="$(basename $(cd ..; pwd))";
BASELIB_PATH="${RERUN_MODULES}/${MODULE}/lib/baselib.sh";
if [ ! -f ${BASELIB_PATH} ]; then
exit;
fi
source ${BASELIB_PATH};
# The Plan
# --------
describe "baselib - createDirIfNotExists"
it_stops_with_no_arguments() {
local RETV;
RETV="$(createDirIfNotExists && echo $? || echo $?)";
test ${RETV} -eq 10;
}
it_stops_with_dir_already_existing() {
local RETV TMPD="$(pwd)/.${MODULE}1.$$";
[ ! -d "${TMPD}" ] && mkdir ${TMPD};
trap "rm -rf \"${TMPD}\"" EXIT INT;
RETV="$(createDirIfNotExists ${TMPD} && echo $? || echo $?)";
test ${RETV} -eq 20;
} | #!/usr/bin/env roundup
#
# Baselib test
#/ usage: rerun stubbs:test -m MODULE -p baselib
#
# Author: Ines Neubach <ines.neubach@idn.astzweig.de>
#
# Include baselib file
# -----------------
MODULE="$(basename $(cd ..; pwd))";
BASELIB_PATH="${RERUN_MODULES}/${MODULE}/lib/baselib.sh";
if [ ! -f ${BASELIB_PATH} ]; then
exit;
fi
source ${BASELIB_PATH};
# The Plan
# --------
describe "baselib - createDirIfNotExists"
it_stops_with_no_arguments() {
local RETV;
RETV="$(createDirIfNotExists && echo $? || echo $?)";
test ${RETV} -eq 10;
}
it_stops_with_dir_already_existing() {
local RETV TMPD="$(pwd)/.${MODULE}1.$$";
[ ! -d "${TMPD}" ] && mkdir ${TMPD};
trap "rm -rf \"${TMPD}\"" EXIT INT;
RETV="$(createDirIfNotExists ${TMPD} && echo $? || echo $?)";
test ${RETV} -eq 20;
}
it_stops_with_user_answer_being_not_yes() {
local RETV TMPD="$(pwd)/.${MODULE}3.$$";
test ! -d ${TMPD};
RETV="$(createDirIfNotExists ${TMPD} <<< "n" && echo $? || echo $?)";
test ${RETV} -eq 40;
test ! -d ${TMPD};
} |
Test against the first two Clojure 1.8 alphas | #!/usr/bin/env bash
set -eux
versions=(1.6.0 1.7.0)
for i in ${versions[@]}
do
cp pom.xml pom-$i.xml
perl -i -pe 's/\[1.6.0,\)/'"$i"'/g' pom-$i.xml
mvn clean test -f pom-$i.xml
done
for i in ${versions[@]}
do
rm pom-$i.xml
done
| #!/usr/bin/env bash
set -eux
versions=(1.6.0 1.7.0 clojure-1.8.0-alpha1 clojure-1.8.0-alpha2)
for i in ${versions[@]}
do
cp pom.xml pom-$i.xml
perl -i -pe 's/\[1.6.0,\)/'"$i"'/g' pom-$i.xml
mvn clean test -f pom-$i.xml
done
for i in ${versions[@]}
do
rm pom-$i.xml
done
|
Fix comment in bin script | #!/usr/bin/env bash
echo "pre commit hook start"
CURRENT_DIRECTORY=`pwd`
GIT_HOOKS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT_DIRECTORY="$GIT_HOOKS_DIR/../.."
cd $PROJECT_DIRECTORY;
PHP_CS_FIXER="vendor/bin/php-cs-fixer"
HAS_PHP_CS_FIXER=false
if [ -x "$PHP_CS_FIXER" ]; then
HAS_PHP_CS_FIXER=true
fi
if $HAS_PHP_CS_FIXER; then
git status --porcelain | grep -e '^[AM]\(.*\).php$' | cut -c 3- | while read line; do
${PHP_CS_FIXER} fix --verbose ${line};
git add "$line";
done
else
echo ""
echo "Please install php-cs-fixer, e.g.:"
echo ""
echo " composer update --dev"
echo ""
fi
cd $CURRENT_DIRECTORY;
echo "pre commit hook finish" | #!/usr/bin/env bash
echo "pre commit hook start"
CURRENT_DIRECTORY=`pwd`
GIT_HOOKS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT_DIRECTORY="$GIT_HOOKS_DIR/../.."
cd $PROJECT_DIRECTORY;
PHP_CS_FIXER="vendor/bin/php-cs-fixer"
HAS_PHP_CS_FIXER=false
if [ -x "$PHP_CS_FIXER" ]; then
HAS_PHP_CS_FIXER=true
fi
if $HAS_PHP_CS_FIXER; then
git status --porcelain | grep -e '^[AM]\(.*\).php$' | cut -c 3- | while read line; do
${PHP_CS_FIXER} fix --verbose ${line};
git add "$line";
done
else
echo ""
echo "Please install php-cs-fixer, e.g.:"
echo ""
echo " composer require --dev fabpot/php-cs-fixer:dev-master"
echo ""
fi
cd $CURRENT_DIRECTORY;
echo "pre commit hook finish"
|
Correct brightness when waking up from sleep | #!/usr/bin/env bash
set -e
set -o pipefail
export PRIMARY_DISPLAY="$(xrandr | awk '/ primary/{print $1}')"
xset s off
xset -dpms
xidlehook \
--not-when-fullscreen \
--not-when-audio \
--timer normal 180 'xrandr --output "$PRIMARY_DISPLAY" --brightness .1' 'xrandr --output "$PRIMARY_DISPLAY" --brightness 1' \
--timer normal 600 'systemctl suspend' ''
| #!/usr/bin/env bash
set -e
set -o pipefail
export PRIMARY_DISPLAY="$(xrandr | awk '/ primary/{print $1}')"
xset s off
xset -dpms
xidlehook \
--not-when-fullscreen \
--not-when-audio \
--timer normal 120 'xrandr --output "$PRIMARY_DISPLAY" --brightness .1' 'xrandr --output "$PRIMARY_DISPLAY" --brightness 1' \
--timer normal 300 'xrandr --output "$PRIMARY_DISPLAY" --brightness 1; systemctl suspend' 'xrandr --output "$PRIMARY_DISPLAY" --brightness 1'
|
Add missing EOF newline for Web UI script | #!/bin/bash
if [ $(git diff --name-only --cached | grep -c '.tsx') != 0 ]; then
lerna run lint-staged-tsx-files --parallel --stream
else
echo 'No .tsx files to lint. Continuing with commit.'
fi | #!/bin/bash
if [ $(git diff --name-only --cached | grep -c '.tsx') != 0 ]; then
lerna run lint-staged-tsx-files --parallel --stream
else
echo 'No .tsx files to lint. Continuing with commit.'
fi
|
Change test DB bindings to nonstandard ports for Travis build | #!/usr/bin/env sh
path_to_settings=django/sierra/sierra/settings/.env
test_settings="SECRET_KEY=\"Some secret key\"
SETTINGS_MODULE=sierra.settings.test
ALLOWED_HOSTS=localhost
ADMINS=\"Joe Test, joe.test@example.com\"
SIERRA_DB_USER=none
SIERRA_DB_PASSWORD=none
SIERRA_DB_HOST=none
DEFAULT_DB_USER=none
DEFAULT_DB_PASSWORD=none
TEST_SIERRA_DB_USER=postgres
TEST_SIERRA_DB_PASSWORD=whatever
TEST_DEFAULT_DB_USER=mariadb
TEST_DEFAULT_DB_PASSWORD=whatever
TIME_ZONE=America/Chicago
SOLRMARC_CONFIG_FILE=test_config.properties
EXPORTER_EMAIL_ON_ERROR=false
EXPORTER_EMAIL_ON_WARNING=false
EXPORTER_AUTOMATED_USERNAME=django_admin"
if [ $TRAVIS ]
then
echo $test_settings > $path_to_settings
echo "Settings file created:"
cat $path_to_settings
else
echo "This script is not intended for use outside a Travis CI environment."
fi
| #!/usr/bin/env sh
path_to_settings=django/sierra/sierra/settings/.env
test_settings="SECRET_KEY=\"Some secret key\"
SETTINGS_MODULE=sierra.settings.test
ALLOWED_HOSTS=localhost
ADMINS=\"Joe Test, joe.test@example.com\"
SIERRA_DB_USER=none
SIERRA_DB_PASSWORD=none
SIERRA_DB_HOST=none
DEFAULT_DB_USER=none
DEFAULT_DB_PASSWORD=none
TEST_SIERRA_DB_USER=postgres
TEST_SIERRA_DB_PASSWORD=whatever
TEST_SIERRA_DB_PORT=5332
TEST_DEFAULT_DB_USER=mariadb
TEST_DEFAULT_DB_PASSWORD=whatever
TEST_DEFAULT_DB_PORT=3206
TIME_ZONE=America/Chicago
SOLRMARC_CONFIG_FILE=test_config.properties
EXPORTER_EMAIL_ON_ERROR=false
EXPORTER_EMAIL_ON_WARNING=false
EXPORTER_AUTOMATED_USERNAME=django_admin"
if [ $TRAVIS ]
then
echo $test_settings > $path_to_settings
echo "Settings file created:"
cat $path_to_settings
else
echo "This script is not intended for use outside a Travis CI environment."
fi
|
Remove the unnecessary 'queue.h' dependency | #! /bin/sh
sudo apk update && sudo apk upgrade
sudo apk add build-base m4 git zip perl ncurses autoconf automake libtool linux-headers
wget 'https://sourceware.org/git/?p=glibc.git;a=blob_plain;f=misc/sys/queue.h;hb=HEAD' -O queue.h
sudo mv queue.h /usr/include/sys/
| #! /bin/sh
sudo apk update && sudo apk upgrade
sudo apk add build-base m4 git zip perl ncurses autoconf automake libtool linux-headers
|
Remove disk encryption from initit-config. | #!/bin/bash
#
# This script is used to initialize the configuration.
# If something is already initialized, it doesn't do anything.
# (Except updating the apps.yml file)
DIR="$(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
echo "====================================="
echo "`date "+%F %T"` Initializing config where necessary ... "
echo "====================================="
# check that USBs with correctly labeled partitions exist and if so
# prepare and encrypt key, storage, swap
# XXX potentially move out of initialize config, as it has potential fatal side effects
echo Launching cryptpart
(cd $DIR/cryptpart; . ./encrypt_device.sh)
echo Finished cryptpart
# docker-compose will be rendered in this folder
mkdir -p /opt/cloudfleet/data/config/cache
mkdir -p /opt/cloudfleet/data/shared/users
mkdir -p /opt/cloudfleet/data/logs
. $DIR/create-crontab.sh
echo "====================================="
echo "`date "+%F %T"` Initialized config where necessary ... "
echo "====================================="
| #!/bin/bash
#
# This script is used to initialize the configuration.
# If something is already initialized, it doesn't do anything.
# (Except updating the apps.yml file)
DIR="$(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
echo "====================================="
echo "`date "+%F %T"` Initializing config where necessary ... "
echo "====================================="
# docker-compose will be rendered in this folder
mkdir -p /opt/cloudfleet/data/config/cache
mkdir -p /opt/cloudfleet/data/shared/users
mkdir -p /opt/cloudfleet/data/logs
. $DIR/create-crontab.sh
echo "====================================="
echo "`date "+%F %T"` Initialized config where necessary ... "
echo "====================================="
|
Move Doxygen download URL to https | #!/usr/bin/env bash
# Updates the documentation strings in the JSON files from the ITK Doxygen.
#
# Configuration for directories need to be manually done in the
# config_vars.sh file. The ITK Doxygen XML will automatically be
# downloaded if needed.
#
# Usage: JSONDocUpdate.sh Code/BasicFilters/json/SomeFilter.json
#
die() {
echo "$@" 1>&2
exit 1
}
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Load configuration variable from file
. ${DIR}/config_vars.sh || die 'Unable to find local \"config_vars.sh\" configuration file.'
# Download the nightly Doxygen XML if needed
[ -e ${ITK_XML}/itkImage_8h.xml ] ||
( cd ${ITK_XML} &&
echo "Downloading ITK Doxygen XML..." &&
curl -O http://public.kitware.com/pub/itk/NightlyDoxygen/InsightDoxygenDocXml.tar.gz &&
cd .. &&
tar -zxf xml/InsightDoxygenDocXml.tar.gz ) ||
die 'Unable to get ITK Doxygen XML'
# Does the work of extracting the string from the XML, and putting them into the JSON
${PYTHON_EXECUTABLE} "${DIR}/GenerateDoc.py" "$1" ${ITK_XML} ||
die 'Error running GenerateDoc.py'
| #!/usr/bin/env bash
# Updates the documentation strings in the JSON files from the ITK Doxygen.
#
# Configuration for directories need to be manually done in the
# config_vars.sh file. The ITK Doxygen XML will automatically be
# downloaded if needed.
#
# Usage: JSONDocUpdate.sh Code/BasicFilters/json/SomeFilter.json
#
die() {
echo "$@" 1>&2
exit 1
}
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Load configuration variable from file
. ${DIR}/config_vars.sh || die 'Unable to find local \"config_vars.sh\" configuration file.'
# Download the nightly Doxygen XML if needed
[ -e ${ITK_XML}/itkImage_8h.xml ] ||
( cd ${ITK_XML} &&
echo "Downloading ITK Doxygen XML..." &&
curl -O https://public.kitware.com/pub/itk/NightlyDoxygen/InsightDoxygenDocXml.tar.gz &&
cd .. &&
tar -zxf xml/InsightDoxygenDocXml.tar.gz ) ||
die 'Unable to get ITK Doxygen XML'
# Does the work of extracting the string from the XML, and putting them into the JSON
${PYTHON_EXECUTABLE} "${DIR}/GenerateDoc.py" "$1" ${ITK_XML} ||
die 'Error running GenerateDoc.py'
|
Fix to avoid passowrd shown to user - Added Jenkins file | #!/bin/sh
if [ -r ./env.sh ]; then
. ./env.sh
fi
export PATH=$(echo $PATH | sed 's/8/7/g')
mvn -s settings.xml clean install -Dsolutions.codechecking.phase=verify | #!/bin/sh
echo $PATH
if [ -r ./env.sh ]; then
. ./env.sh
echo 'env.sh found'
fi
export PATH=$(echo $PATH | sed 's/8/7/g')
echo $PATH
mvn -s settings.xml clean install -Dsolutions.codechecking.phase=verify |
Handle directories containing whitespace properly | #!/usr/bin/env bash
set -eu
set -o pipefail
cd `dirname $0`
FSIARGS=""
OS=${OS:-"unknown"}
if [[ "$OS" != "Windows_NT" ]]
then
FSIARGS="--fsiargs -d:MONO"
fi
function run() {
if [[ "$OS" != "Windows_NT" ]]
then
mono "$@"
else
"$@"
fi
}
run .paket/paket.bootstrapper.exe
if [[ "$OS" != "Windows_NT" ]] &&
[ ! -e ~/.config/.mono/certs ]
then
mozroots --import --sync --quiet
fi
run .paket/paket.exe restore
[ ! -e build.fsx ] && run .paket/paket.exe update
[ ! -e build.fsx ] && run packages/FAKE/tools/FAKE.exe init.fsx
run packages/FAKE/tools/FAKE.exe "$@" $FSIARGS build.fsx
| #!/usr/bin/env bash
set -eu
set -o pipefail
cd "$(dirname "$0")"
FSIARGS=""
OS=${OS:-"unknown"}
if [[ "$OS" != "Windows_NT" ]]
then
FSIARGS="--fsiargs -d:MONO"
fi
function run() {
if [[ "$OS" != "Windows_NT" ]]
then
mono "$@"
else
"$@"
fi
}
run .paket/paket.bootstrapper.exe
if [[ "$OS" != "Windows_NT" ]] &&
[ ! -e ~/.config/.mono/certs ]
then
mozroots --import --sync --quiet
fi
run .paket/paket.exe restore
[ ! -e build.fsx ] && run .paket/paket.exe update
[ ! -e build.fsx ] && run packages/FAKE/tools/FAKE.exe init.fsx
run packages/FAKE/tools/FAKE.exe "$@" $FSIARGS build.fsx
|
Exclude swap files and sass cache when listing files | # Shortcuts
_ls(){
clear
if ls --color > /dev/null 2>&1; then
# GNU `ls`
ls \
--almost-all \
--classify \
--color=always \
--group-directories-first \
--hide-control-chars \
--human-readable \
--ignore=*.pyc \
--ignore=.*.swp \
--ignore=.DS_Store \
--ignore=.git \
--ignore=.gitignore \
--ignore=.svn \
--literal \
--time-style=local \
-X \
-l \
-v
else
# OS X `ls`
ls -l -F -G
fi
}
alias grep="grep --color --line-number"
alias h="history"
alias j="jobs"
alias l="_ls"
alias m="mate ."
alias o="open"
alias oo="open ."
alias s="subl ."
alias t="tree"
alias ip="ifconfig -a | grep -o 'inet6\? \(\([0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+\)\|[a-fA-F0-9:]\+\)' | sed -e 's/inet6* //' | sort | sed 's/\('$(ipconfig getifaddr en1)'\)/\1 [LOCAL]/'"
| # Shortcuts
_ls(){
clear
if ls --color > /dev/null 2>&1; then
# GNU `ls`
ls \
--almost-all \
--classify \
--color=always \
--group-directories-first \
--hide-control-chars \
--human-readable \
--ignore=*.pyc \
--ignore=.swp \
--ignore=.*.swp \
--ignore=.DS_Store \
--ignore=.git \
--ignore=.gitignore \
--ignore=.sass-cache \
--ignore=.svn \
--literal \
--time-style=local \
-X \
-l \
-v
else
# OS X `ls`
ls -l -F -G
fi
}
alias grep="grep --color --line-number"
alias h="history"
alias j="jobs"
alias l="_ls"
alias m="mate ."
alias o="open"
alias oo="open ."
alias s="subl ."
alias t="tree"
alias ip="ifconfig -a | grep -o 'inet6\? \(\([0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+\)\|[a-fA-F0-9:]\+\)' | sed -e 's/inet6* //' | sort | sed 's/\('$(ipconfig getifaddr en1)'\)/\1 [LOCAL]/'"
|
Update oss-fuzz build script to use compile_go_fuzzer | function compile_fuzzer {
path=$1
function=$2
fuzzer=$3
go-fuzz -func $function -o $fuzzer.a $path
$CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer
}
git clone https://github.com/tdewolff/parse
find $GOPATH/src/github.com/tdewolff/parse/tests/* -maxdepth 0 -type d | while read target
do
fuzz_target=`echo $target | rev | cut -d'/' -f 1 | rev`
compile_fuzzer github.com/tdewolff/parse/tests/$fuzz_target Fuzz parse-$fuzz_target-fuzzer
done
find $GOPATH/src/github.com/tdewolff/minify/tests/* -maxdepth 0 -type d | while read target
do
fuzz_target=`echo $target | rev | cut -d'/' -f 1 | rev`
compile_fuzzer github.com/tdewolff/minify/tests/$fuzz_target Fuzz minify-$fuzz_target-fuzzer
done
| # compile_go_fuzzer can be found in the oss-fuzz repository
git clone https://github.com/tdewolff/parse
find $GOPATH/src/github.com/tdewolff/parse/tests/* -maxdepth 0 -type d | while read target
do
fuzz_target=`echo $target | rev | cut -d'/' -f 1 | rev`
compile_go_fuzzer github.com/tdewolff/parse/tests/$fuzz_target Fuzz parse-$fuzz_target-fuzzer
done
find $GOPATH/src/github.com/tdewolff/minify/tests/* -maxdepth 0 -type d | while read target
do
fuzz_target=`echo $target | rev | cut -d'/' -f 1 | rev`
compile_go_fuzzer github.com/tdewolff/minify/tests/$fuzz_target Fuzz minify-$fuzz_target-fuzzer
done
|
Make antigen-hs home, output dir, and configuration file configurable | # See https://github.com/Tarrasch/antigen-hs
if [[ "$0" != $HOME/.zsh/antigen-hs/init.zsh ]]
then
echo "Put this file in '~/.zsh/antigen-hs/init.zsh' please!"
fi
antigen-hs-compile () {
runghc -i"$HOME/.zsh/antigen-hs/" -- "$HOME/.zsh/MyAntigen.hs"
}
() {
local FILE_TO_SOURCE="$HOME/.antigen-hs/antigen-hs.zsh"
if [[ -f $FILE_TO_SOURCE ]]
then
source $FILE_TO_SOURCE
else
echo "Didn't find file $FILE_TO_SOURCE"
echo "Try running antigen-hs-compile"
fi
}
| # See https://github.com/Tarrasch/antigen-hs
ANTIGEN_HS_HOME=${${0:A}:h}
if [[ -z "$ANTIGEN_HS_OUT" ]]; then
ANTIGEN_HS_OUT="$HOME/.antigen-hs"
fi
if [[ -z "$ANTIGEN_HS_MY" ]]; then
ANTIGEN_HS_MY="$ANTIGEN_HS_HOME/../MyAntigen.hs"
fi
antigen-hs-compile () {
runghc -i"$ANTIGEN_HS_HOME/" -- "$ANTIGEN_HS_MY"
}
() {
local FILE_TO_SOURCE="$ANTIGEN_HS_OUT/antigen-hs.zsh"
if [[ -f $FILE_TO_SOURCE ]]
then
source $FILE_TO_SOURCE
else
echo "Didn't find file $FILE_TO_SOURCE"
echo "Try running antigen-hs-compile"
fi
}
|
Fix an incorrect -X parameter | #!/usr/bin/env bash
if [ -z $GOPATH ]
then
#export GOPATH=$PWD/lib
echo "\$GOPATH not set. Please run 'export GOPATH=\$PWD/lib' (or wherever you prefer) and run this again."
exit
fi
GOCHAN_VERBOSE=0
GOCHAN_VERSION="0.9"
GOCHAN_BUILDTIME=$(date +%y%m%d.%H%m)
CGO_ENABLED=0
GOARCH=amd64
SUFFIX=""
if [[ $GOOS == "windows" ]]
then
SUFFIX=".exe"
fi
go build -v -ldflags "-w -X=main.version $GOCHAN_VERSION -X main.buildtime_str=$GOCHAN_BUILDTIME -X main.verbose_str=$GOCHAN_VERBOSE" -o gochan$SUFFIX ./src
# the -w ldflag omits debugging stuff
| #!/usr/bin/env bash
if [ -z $GOPATH ]
then
#export GOPATH=$PWD/lib
echo "\$GOPATH not set. Please run 'export GOPATH=\$PWD/lib' (or wherever you prefer) and run this again."
exit
fi
GOCHAN_VERBOSE=0
GOCHAN_VERSION="0.9"
GOCHAN_BUILDTIME=$(date +%y%m%d.%H%m)
CGO_ENABLED=0
GOARCH=amd64
SUFFIX=""
if [[ $GOOS == "windows" ]]
then
SUFFIX=".exe"
fi
go build -v -ldflags "-w -X main.version=$GOCHAN_VERSION -X main.buildtime_str=$GOCHAN_BUILDTIME -X main.verbose_str=$GOCHAN_VERBOSE" -o gochan$SUFFIX ./src
# the -w ldflag omits debugging stuff
|
Remove language-csharp : The language-csharp package is bundled with Atom and should not be explicitly installed. | #!/bin/sh
apm install atom-beautify
apm install atom-runner
apm install file-icons
apm install jsonpp
apm install language-csharp
apm install language-haskell
apm install language-swift
apm install linter
apm install linter-coffeelint
apm install linter-jshint
apm install linter-jsonlint
apm install linter-ruby
apm install open-recent
apm install redpen
| #!/bin/sh
apm install atom-beautify
apm install atom-runner
apm install file-icons
apm install jsonpp
apm install language-haskell
apm install language-swift
apm install linter
apm install linter-coffeelint
apm install linter-jshint
apm install linter-jsonlint
apm install linter-ruby
apm install open-recent
apm install redpen
|
Add aliases for a few frequently used docker commands on our EC2 instances. | alias watch='watch '
alias dps='docker ps'
function dl { (docker ps | fgrep 'aws.com/kafka:' | awk '{print $1}'; docker ps -l -q) | head -1;}
function dlog { x="$(dl)"; if [ -z "$x" ]; then echo "Container not running."; else docker logs "$@" $x; fi ;}
alias dlogf='dlog -f'
alias dlogt='dlog -t'
function dex { x="$(dl)"; if [ -z "$x" ]; then echo "Container not running."; else docker exec -it $x "$@"; fi ;}
function dattach { x="$(dl)"; if [ -z "$x" ]; then echo "Container not running."; else docker attach --no-stdin --sig-proxy=false $x "$@"; fi ;}
| alias watch='watch '
alias dps='docker ps'
function dl { (docker ps | fgrep 'aws.com/kafka:' | awk '{print $1}'; docker ps -l -q) | head -1;}
function dlog { x="$(dl)"; if [ -z "$x" ]; then echo "Container not running."; else docker logs "$@" $x; fi ;}
alias dlogf='dlog -f'
alias dlogt='dlog -t'
function dex { x="$(dl)"; if [ -z "$x" ]; then echo "Container not running."; else docker exec -it $x "$@"; fi ;}
function dattach { x="$(dl)"; if [ -z "$x" ]; then echo "Container not running."; else docker attach --no-stdin --sig-proxy=false $x "$@"; fi ;}
alias di='docker images'
alias drm='docker rm'
alias drmi='docker rmi'
|
Add more directories to coverage | #!/bin/bash
set -euo pipefail
SCRIPTPATH=$(readlink -f "$0")
TESTDIR=$(dirname "${SCRIPTPATH}")
BASEDIR=$(readlink -f "${TESTDIR}/../..")
if [[ "$*" == "" ]] ; then
files=()
readarray -t root_files \
<<< "$(find "${BASEDIR}" -maxdepth 1 -type f ! -size 0 -name '*.py' -exec realpath {} \;)"
files=("${files[@]}" "${root_files[@]}")
for dir in clib faucet tests ; do
readarray -t sub_files \
<<< "$(find "${BASEDIR}/${dir}/" -type f ! -size 0 -name '*.py' -exec realpath {} \;)"
files=("${files[@]}" "${sub_files[@]}")
done
for file in "${files[@]}"; do
echo "${file}"
done | sort
else
cd "${BASEDIR}"
readlink -f "$@" | sort
fi
| #!/bin/bash
set -euo pipefail
SCRIPTPATH=$(readlink -f "$0")
TESTDIR=$(dirname "${SCRIPTPATH}")
BASEDIR=$(readlink -f "${TESTDIR}/../..")
if [[ "$*" == "" ]] ; then
files=()
readarray -t root_files \
<<< "$(find "${BASEDIR}" -maxdepth 1 -type f ! -size 0 -name '*.py' -exec realpath {} \;)"
files=("${files[@]}" "${root_files[@]}")
for dir in adapters clib docs faucet tests ; do
readarray -t sub_files \
<<< "$(find "${BASEDIR}/${dir}/" -type f ! -size 0 -name '*.py' -exec realpath {} \;)"
files=("${files[@]}" "${sub_files[@]}")
done
for file in "${files[@]}"; do
echo "${file}"
done | sort
else
cd "${BASEDIR}"
readlink -f "$@" | sort
fi
|
Stop packing up things, just dump the modules in a known directory | #
# This script fetches all published IETF RFCs and drafts, extracts any
# found YANG modules and packs it up in a tarball.
#
TMPDIR=$(mktemp -d /var/tmp/yangmodules)
if [ $? -ne 0 ]; then
echo "$0: Can't create temp file, exiting..."
exit 1
fi
echo "Fetching files into $TMPDIR"
rsync -az --no-l --include="draft*.txt" --exclude="*" --delete rsync.ietf.org::internet-drafts $TMPDIR/my-id-mirror
rsync -az --no-l --include="rfc*.txt" --exclude="*" --delete rsync.ietf.org::rfc $TMPDIR/my-rfc-mirror
mkdir $TMPDIR/all-yang
for file in $(egrep -l '^[ \t]*(sub)?module +[\\'\"]?[-A-Za-z0-9]*[\\'\"]? *\{.*$' $TMPDIR/my-id-mirror/*) ; do xym --dstdir $TMPDIR/all-yang --strict True $file; done
for file in $(egrep -l '^[ \t]*(sub)?module +[\\'\"]?[-A-Za-z0-9]*[\\'\"]? *\{.*$' $TMPDIR/my-rfc-mirror/*) ; do xym --dstdir $TMPDIR/all-yang --strict True $file; done
# Should probably exclude more weird non-useful modules here
tar --exclude="example*" -zcvf ./all-yang.tgz -C $TMPDIR all-yang
rm -rf mkdir $TMPDIR/all-yang | #
# This script fetches all published IETF RFCs and drafts, extracts any
# found YANG modules and packs it up in a tarball.
#
TMPDIR=/var/tmp/yangmodules/work
EXTRACTDIR=/var/tmp/yangmodules/extracted
mkdir -pv $TMPDIR
mkdir -pv $EXTRACTDIR
if [ $? -ne 0 ]; then
echo "$0: Can't create temp directory, exiting..."
exit 1
fi
echo "Fetching files into $TMPDIR"
rsync -az --no-l --include="draft*.txt" --exclude="*" --delete rsync.ietf.org::internet-drafts $TMPDIR/my-id-mirror
rsync -az --no-l --include="rfc*.txt" --exclude="*" --delete rsync.ietf.org::rfc $TMPDIR/my-rfc-mirror
for file in $(egrep -l '^[ \t]*(sub)?module +[\\'\"]?[-A-Za-z0-9]*[\\'\"]? *\{.*$' $TMPDIR/my-id-mirror/*) ; do xym --dstdir $EXTRACTDIR --strict True $file; done
for file in $(egrep -l '^[ \t]*(sub)?module +[\\'\"]?[-A-Za-z0-9]*[\\'\"]? *\{.*$' $TMPDIR/my-rfc-mirror/*) ; do xym --dstdir $EXTRACTDIR --strict True $file; done
|
Improve the bootstrapping of virtualenvwrapper | export VIRTUALENVWRAPPER_PYTHON=`which python2 || which python`
export WORKON_HOME=$HOME/virtual_envs
mkdir -p $WORKON_HOME
source `which virtualenvwrapper.sh`
| export VIRTUALENVWRAPPER_PYTHON=`which python2 || which python`
export WORKON_HOME=$HOME/virtual_envs
if [[ ! -e "$WORKON_HOME" ]]; then
$VIRTUALENVWRAPPER_PYTHON -m pip install --user virtualenvwrapper
mkdir -p $WORKON_HOME
fi
source `which virtualenvwrapper.sh`
|
Test snippet format matches a directory it is in | #!/usr/bin/env bash
SPACED=$(grep -REn '^ .+' --include '*.snippets' snippets)
if [[ $? -ne 1 ]]; then
echo These snippet lines are indented with spaces:
echo
echo "$SPACED"
echo
echo Tests failed!
exit 1
fi
echo Tests passed!
exit 0
| #!/usr/bin/env bash
check=0
function test_space_indented {
local spaced
spaced=$(grep -REn '^ ' --include '*.snippets' snippets)
if [[ $? -ne 1 ]]; then
echo "These snippet lines are indented with spaces:"
echo "$spaced"
echo
(( check++ ))
fi
}
function test_snipmate_format {
local ultisnips_in_snipmate
ultisnips_in_snipmate=$(grep -REn 'endsnippet' --include '*.snippets' snippets)
if [[ $? -ne 1 ]]; then
echo "These snippet definitions are probably in UltiSnips format but stored in the snipmate directory"
echo "$ultisnips_in_snipmate"
echo
(( check++ ))
fi
}
test_space_indented
test_snipmate_format
if [ $check -eq 0 ]; then
echo "Tests passed!"
exit 0
else
echo "$check test(s) failed out of 2!"
exit 1
fi
|
Create docker group inside the build container even if there is already a group with the same GID. | #!/usr/bin/env bash
getent passwd ${SKIPPER_USERNAME} > /dev/null
if [ x"$?" != x"0" ]; then
useradd -u ${SKIPPER_UID} --non-unique -M "${SKIPPER_USERNAME}"
fi
groupadd -g ${SKIPPER_DOCKER_GID} docker
usermod -G root,docker ${SKIPPER_USERNAME}
su -m ${SKIPPER_USERNAME} -c "$@"
| #!/usr/bin/env bash
getent passwd ${SKIPPER_USERNAME} > /dev/null
if [ x"$?" != x"0" ]; then
useradd -u ${SKIPPER_UID} --non-unique -M "${SKIPPER_USERNAME}"
fi
groupadd -g ${SKIPPER_DOCKER_GID} --non-unique docker
usermod -G root,docker ${SKIPPER_USERNAME}
su -m ${SKIPPER_USERNAME} -c "$@"
|
Build file points to Docker Hub. | #!/bin/sh
# App name/home registry
REGISTRY_URL=registry.sandbox.glympse.com
IMAGE_NAME=tools/docker-cluster
# Helpers
IMAGE_TAG=$(git rev-parse --short HEAD)
TAGGED_IMAGE=$REGISTRY_URL/$IMAGE_NAME:$IMAGE_TAG
LATEST_IMAGE=$REGISTRY_URL/$IMAGE_NAME:latest
# Build the image
docker build -t $IMAGE_NAME .
# Mark image with registry URL and tag
docker tag -f $IMAGE_NAME $TAGGED_IMAGE
docker tag -f $IMAGE_NAME $LATEST_IMAGE
# Push image to repote registry
docker push $TAGGED_IMAGE
docker push $LATEST_IMAGE
| #!/bin/sh
# App name
IMAGE_NAME=glympse/docker-cluster
LATEST_IMAGE=$IMAGE_NAME:latest
# Build the image
docker build -t $IMAGE_NAME .
# Mark image with registry URL and tag
docker tag -f $IMAGE_NAME $LATEST_IMAGE
# Push image to repote registry
docker push $LATEST_IMAGE
|
Disable powerlevel when no osx | ZSH=$HOME/.oh-my-zsh
POWERLEVEL9K_MODE='nerdfont-complete'
ZSH_THEME="powerlevel9k/powerlevel9k"
COMPLETION_WAITING_DOTS="true"
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
plugins=(colored-man-pages command-not-found docker docker-compose heroku history-substring-search osx vi-mode web-search z)
source $ZSH/oh-my-zsh.sh
| source $HOME/.dotfiles/bin/os_type.sh
ZSH=$HOME/.oh-my-zsh
POWERLEVEL9K_MODE='nerdfont-complete'
if is_osx; then
ZSH_THEME="powerlevel9k/powerlevel9k"
else
ZSH_THEME="robbyrussell"
fi
COMPLETION_WAITING_DOTS="true"
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
plugins=(colored-man-pages command-not-found docker docker-compose heroku history-substring-search osx vi-mode web-search z)
source $ZSH/oh-my-zsh.sh
|
Use /usr/bin/env to find shell. | #!/bin/bash
set -e
LEIN_VOOM_VERSION=${LEIN_VOOM_VERSION:-0.1.0-20180617_140646-g0ba7ec8}
LEIN_TEST_VERSIONS=${LEIN_TEST_VERSIONS:-2.6.1 2.7.1 2.8.1}
LEIN_TESTS=${LEIN_TESTS:-$(echo scripts/*)}
for ver in $LEIN_TEST_VERSIONS; do
# Attempt to copy in the appropriate pre-fetched or locally built lein-voom
cp -a ~/.m2/repository/lein-voom/lein-voom/. artifacts/lein-voom/lein-voom/. || true
docker build -t lein-test:${ver} --build-arg LEIN_VER=${ver} --build-arg VOOM_VER=${LEIN_VOOM_VERSION} .
for test in $LEIN_TESTS; do
if docker run -i --rm \
--name lein-test \
-v $(readlink -f scripts/):/scripts:ro \
-w / \
lein-test:${ver} \
"${test}"; then
echo ">>> Test '${test}' of lein-voom ${LEIN_VOOM_VERSION} against lein ${ver} ::SUCCESS::"
else
echo ">>> Test '${test}' of lein-voom ${LEIN_VOOM_VERSION} against lein ${ver} ::FAILURE::"
fi
done
done
| #!/usr/bin/env bash
set -e
LEIN_VOOM_VERSION=${LEIN_VOOM_VERSION:-0.1.0-20180617_140646-g0ba7ec8}
LEIN_TEST_VERSIONS=${LEIN_TEST_VERSIONS:-2.6.1 2.7.1 2.8.1}
LEIN_TESTS=${LEIN_TESTS:-$(echo scripts/*)}
for ver in $LEIN_TEST_VERSIONS; do
# Attempt to copy in the appropriate pre-fetched or locally built lein-voom
cp -a ~/.m2/repository/lein-voom/lein-voom/. artifacts/lein-voom/lein-voom/. || true
docker build -t lein-test:${ver} --build-arg LEIN_VER=${ver} --build-arg VOOM_VER=${LEIN_VOOM_VERSION} .
for test in $LEIN_TESTS; do
if docker run -i --rm \
--name lein-test \
-v $(readlink -f scripts/):/scripts:ro \
-w / \
lein-test:${ver} \
"${test}"; then
echo ">>> Test '${test}' of lein-voom ${LEIN_VOOM_VERSION} against lein ${ver} ::SUCCESS::"
else
echo ">>> Test '${test}' of lein-voom ${LEIN_VOOM_VERSION} against lein ${ver} ::FAILURE::"
fi
done
done
|
Use ripgrep as file finder for fzf | # ------------------------------------------------------------------------------
# Environment variables.
# ------------------------------------------------------------------------------
export LESS="--tabs=4 -RFX"
| # ------------------------------------------------------------------------------
# Environment variables.
# ------------------------------------------------------------------------------
export LESS="--tabs=4 -RFX"
export FZF_DEFAULT_COMMAND="rg --files"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.