Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Use `--no-deps` when embedding KaTeX. | #! /usr/bin/env bash
# TODO: This script sets the `RUSTDOCFLAGS` environment variable to configure
# the KaTeX header for documentation. This is also done via Cargo
# configuration for the `plexus` package, but Cargo executes `rustdoc`
# from different directories depending on the command (`cargo doc` vs.
# `cargo test`), causing relative paths to resolve differently and fail
# for either documentation or tests.
#
# Cargo is configured using a path that works for `cargo test`, and this
# script provides a workaround to enable `cargo doc`.
#
# See https://github.com/rust-lang/cargo/issues/8097
set -e
RUSTDOCFLAGS=--html-in-header=./doc/katex-header.html \
cargo +nightly doc $@
| #! /usr/bin/env bash
# TODO: This requires the `--no-deps` flag. Without it, when `rustdoc` attempts
# to embed the KaTeX header it will fail to resolve the path.
# TODO: This script sets the `RUSTDOCFLAGS` environment variable to configure
# the KaTeX header for documentation. This cannot be accomplished with
# Cargo configuration yet.
#
# See https://github.com/rust-lang/cargo/issues/8097
set -e
RUSTDOCFLAGS=--html-in-header=./doc/katex-header.html \
cargo +nightly doc --no-deps $@
|
Make `vfs` the default storage driver | #!/bin/bash
dockerd-entrypoint.sh --log-level "${DOCKERD_LOG_LEVEL:-warn}" --storage-driver "${DOCKERD_STORAGE_DRIVER}" &
DOCKERD_PID="$!"
while ! docker version 1>/dev/null; do
kill -0 "$!" || exit
sleep .5
done
bash "$@"
exit_code="$?"
kill %1
exit "$exit_code"
| #!/bin/bash
dockerd-entrypoint.sh --log-level "${DOCKERD_LOG_LEVEL:-warn}" --storage-driver "${DOCKERD_STORAGE_DRIVER:-vfs}" &
DOCKERD_PID="$!"
while ! docker version 1>/dev/null; do
kill -0 "$!" || exit
sleep .5
done
bash "$@"
exit_code="$?"
kill %1
exit "$exit_code"
|
Enable hardened runtime on Sparkle part of the app | #!/bin/sh
. "${OBJROOT}/autorevision.cache"
# Fail if not deployment
if [ ! "${CONFIGURATION}" = "Deployment" ]; then
echo "error: This should only be run as Deployment" >&2
exit 1
fi
# Fail if not clean
if [ ! "${VCS_WC_MODIFIED}" == "0" ]; then
echo 'error: The Working directory is not clean; please commit or revert any changes.' 1>&2
exit 1
fi
# Fail if not tagged
if [[ ! "${VCS_TICK}" == "0" ]]; then
echo 'error: Not on a tag; please make a tag tag with `git tag -s` or `git tag -a`.' 1>&2
exit 1
fi
# Fail if incorrectly tagged
if ! git describe --exact-match "${VCS_TAG}"; then
echo 'error: The tag is not annotated; please redo the tag with `git tag -s` or `git tag -a`.' 1>&2
exit 1
fi
exit 0
| #!/bin/sh
. "${OBJROOT}/autorevision.cache"
# Fail if not deployment
if [ ! "${CONFIGURATION}" = "Deployment" ]; then
echo "error: This should only be run as Deployment" >&2
exit 1
fi
# Fail if not clean
if [ ! "${VCS_WC_MODIFIED}" == "0" ]; then
echo 'error: The Working directory is not clean; please commit or revert any changes.' 1>&2
exit 1
fi
# Fail if not tagged
if [[ ! "${VCS_TICK}" == "0" ]]; then
echo 'error: Not on a tag; please make a tag tag with `git tag -s` or `git tag -a`.' 1>&2
exit 1
fi
# Fail if incorrectly tagged
if ! git describe --exact-match "${VCS_TAG}"; then
echo 'error: The tag is not annotated; please redo the tag with `git tag -s` or `git tag -a`.' 1>&2
exit 1
fi
# cf. https://furbo.org/2019/08/16/catalina-app-notarization-and-sparkle/
LOCATION="${BUILT_PRODUCTS_DIR}"/"${FRAMEWORKS_FOLDER_PATH}"
IDENTITY=${EXPANDED_CODE_SIGN_IDENTITY_NAME}
codesign --verbose --force --deep -o runtime --sign "$IDENTITY" "$LOCATION/Sparkle.framework/Versions/A/Resources/AutoUpdate.app"
codesign --verbose --force -o runtime --sign "$IDENTITY" "$LOCATION/Sparkle.framework/Versions/A"
exit 0
|
Allow the DESTDIR to be specified without a terminating slash (all other install scripts allow this). | #!/bin/sh
#
# $FreeBSD$
#
if [ "`id -u`" != "0" ]; then
echo "Sorry, this must be done as root."
exit 1
fi
cat proflibs.?? | tar --unlink -xpzf - -C ${DESTDIR:-/}
cd ${DESTDIR:-/}usr/lib
if [ -f libdescrypt_p.a ]
then
ln -f -s libdescrypt_p.a libcrypt_p.a
fi
exit 0
| #!/bin/sh
#
# $FreeBSD$
#
if [ "`id -u`" != "0" ]; then
echo "Sorry, this must be done as root."
exit 1
fi
cat proflibs.?? | tar --unlink -xpzf - -C ${DESTDIR:-/}
cd ${DESTDIR}/usr/lib
if [ -f libdescrypt_p.a ]
then
ln -f -s libdescrypt_p.a libcrypt_p.a
fi
exit 0
|
Add password reset for postgres user | #! /bin/sh
SCRIPTPATH=$(dirname $0)
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test;'
sudo -u postgres psql -c 'CREATE DATABASE test;'
sudo -u postgres psql -d test -a -f $SCRIPTPATH/pg-init.sql
| #! /bin/sh
SCRIPTPATH=$(dirname $0)
sudo -u postgres psql -c 'DROP DATABASE IF EXISTS test;'
sudo -u postgres psql -c 'CREATE DATABASE test;'
sudo -u postgres psql -c "ALTER USER postgres password '1234';"
sudo -u postgres psql -d test -a -f $SCRIPTPATH/pg-init.sql
|
Add docker extension to work (VS Code) | #!/usr/bin/env bash
# Determine whether to install personal or work extensions
PERSONAL=$(! grep -Fq "xamarin" "${HOME}/.npmrc"; echo $?)
declare -a extensions=(
EditorConfig.EditorConfig
mikestead.dotenv
msjsdiag.debugger-for-chrome
smockle.xcode-default-theme
)
declare -a personal_extensions=(
esbenp.prettier-vscode
)
declare -a work_extensions=(
eg2.tslint
ms-vsliveshare.vsliveshare
ms-vsts.team
romanresh.testcafe-test-runner
)
install_code_extensions() {
local extensions=("$@")
for extension in "${extensions[@]}"; do
if code --list-extensions | grep -q "${extension}"; then
code --uninstall-extension "${extension}"
fi
code --install-extension "${extension}"
done
}
install_code_extensions "${extensions[@]}"
if [ $PERSONAL -eq 0 ]; then
install_code_extensions "${personal_extensions[@]}"
else
install_code_extensions "${work_extensions[@]}"
fi
unset extensions
unset personal_extensions
unset work_extensions
unset install_code_extensions
| #!/usr/bin/env bash
# Determine whether to install personal or work extensions
PERSONAL=$(! grep -Fq "xamarin" "${HOME}/.npmrc"; echo $?)
declare -a extensions=(
EditorConfig.EditorConfig
mikestead.dotenv
msjsdiag.debugger-for-chrome
smockle.xcode-default-theme
)
declare -a personal_extensions=(
esbenp.prettier-vscode
)
declare -a work_extensions=(
eg2.tslint
ms-vsliveshare.vsliveshare
ms-vsts.team
PeterJausovec.vscode-docker
romanresh.testcafe-test-runner
)
install_code_extensions() {
local extensions=("$@")
for extension in "${extensions[@]}"; do
if code --list-extensions | grep -q "${extension}"; then
code --uninstall-extension "${extension}"
fi
code --install-extension "${extension}"
done
}
install_code_extensions "${extensions[@]}"
if [ $PERSONAL -eq 0 ]; then
install_code_extensions "${personal_extensions[@]}"
else
install_code_extensions "${work_extensions[@]}"
fi
unset extensions
unset personal_extensions
unset work_extensions
unset install_code_extensions
|
Improve resolving JAR path in Linux launch script | #!/bin/sh
java -jar "$(dirname "$0")/SuperTMXMerge.jar" "$@" | #!/bin/sh
SCRIPT_DIR=$(dirname "$(readlink -f $0)")
java -jar "$SCRIPT_DIR/SuperTMXMerge.jar" "$@"
|
Make sure we're in the original working directory | #!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
eval "${DIR}/python3 ${DIR}/../../appimager/appimager" "$@"
| #!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $OWD
eval "${DIR}/python3 ${DIR}/../../appimager/appimager" "$@"
|
Update the M-Space copy script to not overwrite translations | #!/bin/bash
# The M-Space sheet uses the same code base as Mythras. This script exist to help the author copy changes over to M-Space when the Mythras sheet is updated.
# Copy relevant code
cp ./Changelog.md ../M-Space/Changelog.md
cp ./README.md ../M-Space/README.md
cp ./Mythras.html ../M-Space/M-Space.html
cp ./Mythras.css ../M-Space/M-Space.css
cp ./translation.json ../M-Space/translation.json
cp -r ./translations ../M-Space/
# Flip the sheet variant toggle
sed -i 's/name="attr_sheet_varient" value="mythras" checked/name="attr_sheet_varient" value="mythras"/' ../M-Space/M-Space.html
sed -i 's/name="attr_sheet_varient" value="m-space"/name="attr_sheet_varient" value="m-space" checked/' ../M-Space/M-Space.html
| #!/bin/bash
# The M-Space sheet uses the same code base as Mythras. This script exist to help the author copy changes over to M-Space when the Mythras sheet is updated.
# Copy relevant code
cp ./Changelog.md ../M-Space/Changelog.md
cp ./README.md ../M-Space/README.md
cp ./Mythras.html ../M-Space/M-Space.html
cp ./Mythras.css ../M-Space/M-Space.css
cp ./translation.json ../M-Space/translation.json
#cp -r ./translations ../M-Space/
# Flip the sheet variant toggle
sed -i 's/name="attr_sheet_varient" value="mythras" checked/name="attr_sheet_varient" value="mythras"/' ../M-Space/M-Space.html
sed -i 's/name="attr_sheet_varient" value="m-space"/name="attr_sheet_varient" value="m-space" checked/' ../M-Space/M-Space.html
|
Update dropbox.js version referenced by sample code. | mkdir -p public/lib
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.min.js \
> public/lib/jquery.min.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.js \
> public/lib/jquery.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.min.map \
> public/lib/jquery.min.map
curl --fail https://cdnjs.cloudflare.com/ajax/libs/coffee-script/1.6.2/coffee-script.min.js \
> public/lib/coffee-script.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/less.js/1.3.3/less.min.js \
> public/lib/less.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.9.1/dropbox.min.js \
> public/lib/dropbox.min.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.9.1/dropbox.js \
> public/lib/dropbox.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.9.1/dropbox.min.map \
> public/lib/dropbox.min.map
curl --fail https://cdnjs.cloudflare.com/ajax/libs/html5shiv/3.6.2/html5shiv.js \
> public/lib/html5shiv.js
| mkdir -p public/lib
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.min.js \
> public/lib/jquery.min.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.js \
> public/lib/jquery.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/jquery/1.9.1/jquery.min.map \
> public/lib/jquery.min.map
curl --fail https://cdnjs.cloudflare.com/ajax/libs/coffee-script/1.6.2/coffee-script.min.js \
> public/lib/coffee-script.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/less.js/1.3.3/less.min.js \
> public/lib/less.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.10.0/dropbox.min.js \
> public/lib/dropbox.min.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.10.0/dropbox.js \
> public/lib/dropbox.js
curl --fail https://cdnjs.cloudflare.com/ajax/libs/dropbox.js/0.10.0/dropbox.min.map \
> public/lib/dropbox.min.map
curl --fail https://cdnjs.cloudflare.com/ajax/libs/html5shiv/3.6.2/html5shiv.js \
> public/lib/html5shiv.js
|
Update docker images to 20 | export DOCKER_IMAGES_VERSION=${DOCKER_IMAGES_VERSION:-16}
export HADOOP_BASE_IMAGE=${HADOOP_BASE_IMAGE:-"prestodev/hdp2.6-hive"}
| export DOCKER_IMAGES_VERSION=${DOCKER_IMAGES_VERSION:-20}
export HADOOP_BASE_IMAGE=${HADOOP_BASE_IMAGE:-"prestodev/hdp2.6-hive"}
|
Correct the eventstoreContainer used to populate | #! /bin/bash
build=$1
if [[ $build = "" ]];then
echo "populate-eventstore-build: build number required as first parameter"
exit 1
fi
# fail fast
set -euo pipefail
. ../../../config
# run eventstore
docker run -d --net=host $eventstoreContainer.$build > eventstore_id
# populate it
populateCommand=`cat "../../../../src/server/Populate Event Store.bat"`
docker run -t --net=host \
$serversContainer.$build \
$populateCommand > populate_id
# commit container
docker commit `cat eventstore_id` $populatedEventstoreContainer
docker tag $populatedEventstoreContainer $populatedEventstoreContainer.$build
docker kill `cat eventstore_id`
| #! /bin/bash
build=$1
if [[ $build = "" ]];then
echo "populate-eventstore-build: build number required as first parameter"
exit 1
fi
# fail fast
set -euo pipefail
. ../../../config
# run eventstore
docker run -d --net=host $eventstoreContainer > eventstore_id
# populate it
populateCommand=`cat "../../../../src/server/Populate Event Store.bat"`
docker run -t --net=host \
$serversContainer.$build \
$populateCommand > populate_id
# commit container
docker commit `cat eventstore_id` $populatedEventstoreContainer
docker tag $populatedEventstoreContainer $populatedEventstoreContainer.$build
docker kill `cat eventstore_id`
|
Remove "Exemple" in home dir | #!/bin/sh
cd ~
rm -rf Documents Modèles Images Vidéos Musique Public Téléchargements
xdg-user-dirs-update
# Reassign path :
# xdg-user-dirs-update --set NAME ABSOLUTE_PATH
#
# Available values for NAME
# DESKTOP
# DOWNLOAD
# TEMPLATES
# PUBLICSHARE
# DOCUMENTS
# MUSIC
# PICTURES
# VIDEOS
| #!/bin/sh
cd ~
rm -rf Documents Modèles Images Vidéos Musique Public Téléchargements examples.desktop
xdg-user-dirs-update
# Reassign path :
# xdg-user-dirs-update --set NAME ABSOLUTE_PATH
#
# Available values for NAME
# DESKTOP
# DOWNLOAD
# TEMPLATES
# PUBLICSHARE
# DOCUMENTS
# MUSIC
# PICTURES
# VIDEOS
|
Add License header to our shell script files. | #!/bin/bash
# Script that can be used by CI server for testing j2cl builds.
set -e
bazel build :all tools/java/...
# build hello world sample in its own workspace
cd samples/helloworld
bazel build src/main/...
| #!/bin/bash
# Copyright 2019 Google Inc. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Script that can be used by CI server for testing j2cl builds.
set -e
bazel build :all tools/java/...
# build hello world sample in its own workspace
cd samples/helloworld
bazel build src/main/...
|
Simplify the hyper-v script further | #!/bin/sh -eux
ubuntu_version="`lsb_release -r | awk '{print $2}'`";
major_version="`echo $ubuntu_version | awk -F. '{print $1}'`";
case "$PACKER_BUILDER_TYPE" in
hyperv-iso)
if [ "$major_version" -eq "16" ]; then
apt-get install -y linux-tools-virtual-lts-xenial linux-cloud-tools-virtual-lts-xenial;
elif [ "$major_version" -ge "17" ]; then
apt-get -y install linux-image-virtual linux-tools-virtual linux-cloud-tools-virtual;
fi
esac
| #!/bin/sh -eux
ubuntu_version="`lsb_release -r | awk '{print $2}'`";
major_version="`echo $ubuntu_version | awk -F. '{print $1}'`";
case "$PACKER_BUILDER_TYPE" in
hyperv-iso)
if [ "$major_version" -eq "16" ]; then
apt-get install -y linux-tools-virtual-lts-xenial linux-cloud-tools-virtual-lts-xenial;
else
apt-get -y install linux-image-virtual linux-tools-virtual linux-cloud-tools-virtual;
fi
esac
|
Update to add Airfoil app | #!/usr/bin/env bash
# ------------------------------------------------------------------------------
# Install Native Mac Applications
# ------------------------------------------------------------------------------
source ./extras/colors
# Brew Cask Setup
# ------------------------------------------------------------------------------
printf "\n$INFO%s$RESET\n" "Tapping caskroom formulae and installing Brew Cask..."
brew tap caskroom/cask
brew tap caskroom/versions
brew install caskroom/cask/brew-cask
# Applications
# ------------------------------------------------------------------------------
applications=(
adobe-creative-cloud
alfred
amazon-cloud-drive
amazon-music
beamer
coderunner
dropbox
evernote
google-chrome-canary
handbrake
hazel
imageoptim
iterm2
opera
send-to-kindle
steam
sequel-pro
transmission
transmit
vagrant
virtualbox
vlc
# Others To Include
# airfoil
# brackets
# clamxav
# dash
# dbappx
# github
# gitx
# helium
# mou
# brew cask install --appdir="/Applications" phpstorm // DoSomething
# rdio
# spotify
)
# Install Applications
# ------------------------------------------------------------------------------
# The applications will be installed in /Applications instead of the default
# Cask directory location in /Users/$user/Applications.
printf "\n$INFO%s$RESET\n" "Installing applications..."
brew cask install --appdir="/Applications" ${applications[@]}
# Cleanup
# ------------------------------------------------------------------------------
printf "\n$INFO%s$RESET\n" "Cleaning up Homebrew Casks..."
brew cask cleanup
| #!/usr/bin/env bash
# ------------------------------------------------------------------------------
# Install Native Mac Applications
# ------------------------------------------------------------------------------
source ./extras/colors
# Brew Cask Setup
# ------------------------------------------------------------------------------
printf "\n$INFO%s$RESET\n" "Tapping caskroom formulae and installing Brew Cask..."
brew tap caskroom/cask
brew tap caskroom/versions
brew install caskroom/cask/brew-cask
# Applications
# ------------------------------------------------------------------------------
applications=(
adobe-creative-cloud
airfoil
alfred
amazon-cloud-drive
amazon-music
beamer
coderunner
dropbox
evernote
google-chrome-canary
handbrake
hazel
imageoptim
iterm2
opera
send-to-kindle
steam
sequel-pro
transmission
transmit
vagrant
virtualbox
vlc
# Others To Include
# airfoil
# brackets
# clamxav
# dash
# dbappx
# github
# gitx
# helium
# mou
# brew cask install --appdir="/Applications" phpstorm // DoSomething
# rdio
# spotify
)
# Install Applications
# ------------------------------------------------------------------------------
# The applications will be installed in /Applications instead of the default
# Cask directory location in /Users/$user/Applications.
printf "\n$INFO%s$RESET\n" "Installing applications..."
brew cask install --appdir="/Applications" ${applications[@]}
# Cleanup
# ------------------------------------------------------------------------------
printf "\n$INFO%s$RESET\n" "Cleaning up Homebrew Casks..."
brew cask cleanup
|
Use bash instead of sh in build script | #!/bin/sh
BINARY="cf-mysql-plugin"
main() {
ginkgo -r --randomizeAllSpecs --randomizeSuites --failOnPending --cover --trace --race --compilers=2
build_for_platform_and_arch linux amd64
build_for_platform_and_arch linux 386
build_for_platform_and_arch darwin amd64
build_for_platform_and_arch windows amd64
build_for_platform_and_arch windows 386
}
build_for_platform_and_arch() {
platform="$1"
arch="$2"
destination="output/$platform-$arch"
binary=`binary_for_platform "$platform"`
mkdir -p "$destination"
GOOS="$platform" GOARCH="$arch" go build
mv "$binary" "$destination"
pushd "$destination" > /dev/null
zip ../"$BINARY-$platform-$arch.zip" *
popd > /dev/null
}
binary_for_platform() {
platform="$1"
case "$platform" in
windows)
echo "$BINARY.exe"
;;
*)
echo "$BINARY"
;;
esac
}
main
| #!/bin/bash
set -e
BINARY="cf-mysql-plugin"
main() {
ginkgo -r --randomizeAllSpecs --randomizeSuites --failOnPending --cover --trace --race --compilers=2
build_for_platform_and_arch linux amd64
build_for_platform_and_arch linux 386
build_for_platform_and_arch darwin amd64
build_for_platform_and_arch windows amd64
build_for_platform_and_arch windows 386
}
build_for_platform_and_arch() {
platform="$1"
arch="$2"
destination="output/$platform-$arch"
binary=`binary_for_platform "$platform"`
mkdir -p "$destination"
GOOS="$platform" GOARCH="$arch" go build
mv "$binary" "$destination"
pushd "$destination" > /dev/null
zip ../"$BINARY-$platform-$arch.zip" *
popd > /dev/null
}
binary_for_platform() {
platform="$1"
case "$platform" in
windows)
echo "$BINARY.exe"
;;
*)
echo "$BINARY"
;;
esac
}
main
|
Check if owncloud is installed. | #!/bin/bash
# Helper script for OwnCloud salt automation.
# Constants.
EXIT_INVALID_COMMAND=1
EXIT_WRONG_USER=2
# Functions.
show_help() {
echo -en "/opt/spogliani/owncloud/owncloudclt.sh COMMAND [OPTIONS]
Helper script for OwnCloud salt automation.
COMMAND:
check install
Checks the state of the system.
The required argument to check is the aspect of the system to check.
* install: checks that the system is installed.
help
Show this message.
"
}
### MAIN ###
# Check pre-conditions.
if [ "$(whoami)" != "www-data" ]; then
echo "This script must run as the www-data user."
exit ${EXIT_WRONG_USER}
fi
# Process command.
cmd=$1
shift
case "${cmd}" in
help) show_help;;
*)
echo "Unsupported command: '${cmd}'."
show_help
exit ${EXIT_INVALID_COMMAND}
esac
| #!/bin/bash
# Helper script for OwnCloud salt automation.
# Constants.
EXIT_INVALID_COMMAND=1
EXIT_NOT_INSTALLED=2
EXIT_OK=3
EXIT_WRONG_USER=4
HTTP_USER="www-data"
OWNCLOUD_ROOT="/var/www/owncloud"
OWNCLOUD_CTL="./occ"
# Functions.
show_help() {
echo -en "/opt/spogliani/owncloud/owncloudclt.sh COMMAND [OPTIONS]
Helper script for OwnCloud salt automation.
COMMAND:
check install
Checks the state of the system.
The required argument to check is the aspect of the system to check.
* install: checks that the system is installed.
help
Show this message.
"
}
# Commans.
check() {
what=$1
case "${what}" in
install) check_install;;
*)
echo "Unkonw check argument: '${what}'."
show_help
exit ${EXIT_INVALID_COMMAND}
;;
esac
}
check_install() {
cd ${OWNCLOUD_ROOT}
installed=${OWNCLOUD_CTL} list | grep -c 'ownCloud is not installed'
if [ "${installed}" -eq 1 ]; then
exit ${EXIT_NOT_INSTALLED}
else
exit ${EXIT_OK}
fi
}
### MAIN ###
# Check pre-conditions.
if [ "$(whoami)" != "${HTTP_USER}" ]; then
echo "This script must run as the ${HTTP_USER} user."
exit ${EXIT_WRONG_USER}
fi
# Process command.
cmd=$1
shift
case "${cmd}" in
check) check $*;;
help) show_help;;
*)
echo "Unsupported command: '${cmd}'."
show_help
exit ${EXIT_INVALID_COMMAND}
;;
esac
|
Use my own fork of theano. | #!/bin/bash
set -e
function safe_call {
# usage:
# safe_call function param1 param2 ...
HERE=$(pwd)
"$@"
cd "$HERE"
}
function install_theano {
pip install --upgrade --no-deps git+git://github.com/Theano/Theano.git
}
function install_joblib {
pip install joblib
}
function install_matplotlib {
conda install --yes matplotlib
}
function install_jinja2 {
conda install --yes jinja2
}
function install_pylearn2 {
DIR="$1"
cd "$DIR"
if [ -d "pylearn2" ]; then
echo "Existing version of pylearn2 found, removing."
rm -rf pylearn2
fi
git clone -b parameter_prediction git@bitbucket.org:mdenil/pylearn2.git
cd pylearn2
python setup.py install
}
ENV=pp_env
EXTERNAL=external
rm -rf $ENV
conda create --yes --prefix $ENV accelerate pip nose
source activate "$(pwd)/$ENV"
safe_call install_theano
safe_call install_joblib
safe_call install_matplotlib
safe_call install_jinja2
safe_call install_pylearn2 "$EXTERNAL"
cat <<EOF
Run:
source activate "\$(pwd)/$ENV"
to activate the environment. When you're done you can run
source deactivate
to close the environement.
EOF
| #!/bin/bash
set -e
function safe_call {
# usage:
# safe_call function param1 param2 ...
HERE=$(pwd)
"$@"
cd "$HERE"
}
function install_theano {
pip install --upgrade --no-deps git+git://github.com/mdenil/Theano.git
}
function install_joblib {
pip install joblib
}
function install_matplotlib {
conda install --yes matplotlib
}
function install_jinja2 {
conda install --yes jinja2
}
function install_pylearn2 {
DIR="$1"
cd "$DIR"
if [ -d "pylearn2" ]; then
echo "Existing version of pylearn2 found, removing."
rm -rf pylearn2
fi
git clone -b parameter_prediction git@bitbucket.org:mdenil/pylearn2.git
cd pylearn2
python setup.py install
}
ENV=pp_env
EXTERNAL=external
rm -rf $ENV
conda create --yes --prefix $ENV accelerate pip nose
source activate "$(pwd)/$ENV"
safe_call install_theano
safe_call install_joblib
safe_call install_matplotlib
safe_call install_jinja2
safe_call install_pylearn2 "$EXTERNAL"
cat <<EOF
Run:
source activate "\$(pwd)/$ENV"
to activate the environment. When you're done you can run
source deactivate
to close the environement.
EOF
|
Stop Xvfb after running test | #!/bin/bash
set -x
./virtualfb.sh
glxinfo -display `cat virtualfb.DISPLAY`
if [[ $? != 0 ]]; then
echo "Test failure"
exit 1
fi
| #!/bin/bash
set -x
./virtualfb.sh
glxinfo -display `cat virtualfb.DISPLAY`
./virtualfb.sh
if [[ $? != 0 ]]; then
echo "Test failure"
exit 1
fi
|
Comment out tests that have not been running | #!/bin/bash
set -ev
source docker/utils.sh
FLAVOUR='travis'
if [ "${MATRIX_TYPE}" = "javascript" ]; then
FLAVOUR='travis-js'
fi
run_tests() {
# This function allows overriding the test comnmand and the test that get run
# which is used by 'simulate.sh'
TESTS=${TEST_OVERRIDE:-"$1"}
ENV_VARS=""
if [ $# -eq 2 ]; then
ENV_VARS="$2"
fi
if [ -z ${COMMAND_OVERRIDE} ]; then
docker_run $ENV_VARS web_test ".travis/test_runner.sh $TESTS"
else
docker_run $ENV_VARS web_test $COMMAND_OVERRIDE
fi
}
if [ "${MATRIX_TYPE}" = "python" ]; then
TESTS="--testrunner=$TESTRUNNER"
run_tests "$TESTS"
elif [ "${MATRIX_TYPE}" = "python-sharded" ]; then
SHARDED_TEST_APPS="corehq.form_processor \
corehq.sql_db \
couchforms \
casexml.apps.case \
casexml.apps.phone \
corehq.apps.receiverwrapper"
ENV="-e USE_PARTITIONED_DATABASE=yes"
run_tests "$SHARDED_TEST_APPS" "$ENV"
elif [ "${MATRIX_TYPE}" = "javascript" ]; then
docker_run web_test python manage.py migrate --noinput
docker_run web_test docker/wait.sh WEB_TEST
docker_run web_test grunt mocha
fi
| #!/bin/bash
set -ev
source docker/utils.sh
FLAVOUR='travis'
if [ "${MATRIX_TYPE}" = "javascript" ]; then
FLAVOUR='travis-js'
fi
run_tests() {
# This function allows overriding the test comnmand and the test that get run
# which is used by 'simulate.sh'
TESTS=${TEST_OVERRIDE:-"$1"}
ENV_VARS=""
if [ $# -eq 2 ]; then
ENV_VARS="$2"
fi
if [ -z ${COMMAND_OVERRIDE} ]; then
docker_run $ENV_VARS web_test ".travis/test_runner.sh $TESTS"
else
docker_run $ENV_VARS web_test $COMMAND_OVERRIDE
fi
}
if [ "${MATRIX_TYPE}" = "python" ]; then
TESTS="--testrunner=$TESTRUNNER"
run_tests "$TESTS"
elif [ "${MATRIX_TYPE}" = "python-sharded" ]; then
SHARDED_TEST_APPS="corehq.form_processor"
# commented out due to bug that discarded all but first item
# corehq.sql_db \
# couchforms \
# casexml.apps.case \
# casexml.apps.phone \
# corehq.apps.receiverwrapper"
ENV="-e USE_PARTITIONED_DATABASE=yes"
run_tests "$SHARDED_TEST_APPS" "$ENV"
elif [ "${MATRIX_TYPE}" = "javascript" ]; then
docker_run web_test python manage.py migrate --noinput
docker_run web_test docker/wait.sh WEB_TEST
docker_run web_test grunt mocha
fi
|
Move sourcing of custom to below plugins | # Initializes Oh My Zsh
# add a function path
fpath=($ZSH/functions $fpath)
# Load all of the config files in ~/oh-my-zsh that end in .zsh
# TIP: Add files you don't want in git to .gitignore
for config_file ($ZSH/lib/*.zsh) source $config_file
# Add all defined plugins to fpath
plugin=${plugin:=()}
for plugin ($plugins) fpath=($ZSH/plugins/$plugin $fpath)
# Load and run compinit
autoload -U compinit
compinit -i
# Load all of your custom configurations from custom/
for config_file ($ZSH/custom/*.zsh) source $config_file
# Load all of the plugins that were defined in ~/.zshrc
for plugin ($plugins); do
if [ -f $ZSH/plugins/$plugin/$plugin.plugin.zsh ]; then
source $ZSH/plugins/$plugin/$plugin.plugin.zsh
fi
done
# Load the theme
source "$ZSH/themes/$ZSH_THEME.zsh-theme"
# Check for updates on initial load...
if [ "$DISABLE_AUTO_UPDATE" = "true" ]
then
return
else
/usr/bin/env zsh $ZSH/tools/check_for_upgrade.sh
fi
| # Initializes Oh My Zsh
# add a function path
fpath=($ZSH/functions $fpath)
# Load all of the config files in ~/oh-my-zsh that end in .zsh
# TIP: Add files you don't want in git to .gitignore
for config_file ($ZSH/lib/*.zsh) source $config_file
# Add all defined plugins to fpath
plugin=${plugin:=()}
for plugin ($plugins) fpath=($ZSH/plugins/$plugin $fpath)
# Load and run compinit
autoload -U compinit
compinit -i
# Load all of the plugins that were defined in ~/.zshrc
for plugin ($plugins); do
if [ -f $ZSH/plugins/$plugin/$plugin.plugin.zsh ]; then
source $ZSH/plugins/$plugin/$plugin.plugin.zsh
fi
done
# Load all of your custom configurations from custom/
for config_file ($ZSH/custom/*.zsh) source $config_file
# Load the theme
source "$ZSH/themes/$ZSH_THEME.zsh-theme"
# Check for updates on initial load...
if [ "$DISABLE_AUTO_UPDATE" = "true" ]
then
return
else
/usr/bin/env zsh $ZSH/tools/check_for_upgrade.sh
fi
|
Fix CWD issues with BeautifulSoup and other libs. | #!/bin/bash
SCANDIR=~/code/torflow.git/NetworkScanners/ExitAuthority/
# 1. Email results to addresses in soat_config.py (--email)
# 2. Ignore timeout errors (--noreason FailureTimeout)
# 3. Schedule this script every hour (--croninterval 1).
# 4. Only report from urls that fail from less than 10% of the total
# exits tested so far. (--siterate 10)
# 5. Only report exits that fail 100% of their tests (--exitrate 99)
$SCANDIR/snakeinspector.py --email --exitrate 99 --siterate 10 --croninterval 1
# Optionally, you can use these two lines to allow less regular cron
# scheduling:
#$SCANDIR/snakeinspector.py --confirmed --email --noreason FailureTimeout --siterate 3 --finishedafter "`cat $SCANDIR/lastmail.time`"
#date +"%a %b %d %H:%M:%S %Y" > $SCANDIR/lastmail.time
| #!/bin/bash
SCANDIR=~/code/torflow.git/NetworkScanners/ExitAuthority/
cd $SCANDIR
# 1. Email results to addresses in soat_config.py (--email)
# 2. Ignore timeout errors (--noreason FailureTimeout)
# 3. Schedule this script every hour (--croninterval 1).
# 4. Only report from urls that fail from less than 10% of the total
# exits tested so far. (--siterate 10)
# 5. Only report exits that fail 100% of their tests (--exitrate 99)
./snakeinspector.py --email --exitrate 99 --siterate 10 --croninterval 1
# Optionally, you can use these two lines to allow less regular cron
# scheduling:
#./snakeinspector.py --confirmed --email --noreason FailureTimeout --siterate 3 --finishedafter "`cat $SCANDIR/lastmail.time`"
#date +"%a %b %d %H:%M:%S %Y" > $SCANDIR/lastmail.time
|
Install Elasticsearch 5.3.3 for Travis | #!/bin/bash
sudo sysctl -w vm.max_map_count=262144
sudo apt-get autoremove --purge elasticsearch
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-5.x.list
sudo apt-get update && sudo apt-get install elasticsearch -y
sudo service elasticsearch start
| #!/bin/bash
sudo sysctl -w vm.max_map_count=262144
sudo apt-get autoremove --purge elasticsearch
wget -P /tmp/ https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.3.3.deb
sudo dpkg -i /tmp/elasticsearch-5.3.3.deb
sudo service elasticsearch start
|
Remove Debian-specific -n column option. | #!/bin/sh
(echo 'Benchmark;Runtime change;Original runtime'; ./bench-cmp.pl "$@") | column -nts\;
| #!/bin/sh
(echo 'Benchmark;Runtime change;Original runtime'; ./bench-cmp.pl "$@") | column -ts\;
|
Use LUIGI_CONFIG_PATH to look for client.cfg in the directory of the script. | #!/bin/bash
set -e
# Setup python environment
source <%= @local_install_dir %>/pythonenv/bin/activate
# Run Python
<%= @local_install_dir %>/pythonenv/bin/python \
<%= @python_arugments %> \
<%= @python_script %> \
<%= @script_arguments %>
| #!/bin/bash
set -e
# Setup python environment
source <%= @local_install_dir %>/pythonenv/bin/activate
export LUIGI_CONFIG_PATH=`pwd`/`dirname <%= @python_script %>`/client.cfg
# Run Python
<%= @local_install_dir %>/pythonenv/bin/python \
<%= @python_arugments %> \
<%= @python_script %> \
<%= @script_arguments %>
|
Fix first run script, dpass password via env variable | #! /bin/bash
cd /usr/libexec/bacula/
PSQL_OPTS="-h ${DB_PORT_5432_TCP_ADDR} -U postgres"
./create_postgresql_database ${PSQL_OPTS}
./make_postgresql_tables ${PSQL_OPTS}
./grant_postgresql_privileges ${PSQL_OPTS}
| #! /bin/bash
cd /usr/libexec/bacula/
PSQL_OPTS="-h ${DB_PORT_5432_TCP_ADDR} -U postgres"
export PGPASSWORD=${DB_ENV_POSTGRES_PASSWORD}
createuser ${PSQL_OPTS} -d -R bacula
./create_bacula_database ${PSQL_OPTS}
./make_bacula_tables ${PSQL_OPTS}
./grant_bacula_privileges ${PSQL_OPTS}
|
Make the cache available for another build | #!/bin/sh
pushd `dirname $0` > /dev/null
HERE=`pwd`
popd > /dev/null
cd "${HERE}"
talk2docker="../../talk2docker --config=../config.yml"
execute() {
command="${talk2docker} ${*}"
echo "\n\$ ${command}" >&2
eval "${command}"
status=$?
if [ $status -ne 0 ]; then
exit $status
fi
return $status
}
eval ${talk2docker} host switch default
if command -v vagrant > /dev/null; then
cd ..
vagrant up
cd "${HERE}"
eval ${talk2docker} host switch vagrant
elif command -v boot2docker > /dev/null; then
boot2docker up
VBoxManage controlvm "boot2docker-vm" natpf1 "tcp8000,tcp,,8000,,8000";
eval ${talk2docker} host switch boot2docker
fi
eval ${talk2docker} container remove web db --force
# https://github.com/docker/fig/blob/master/docs/wordpress.md
if [ ! -d wordpress ]; then
curl https://wordpress.org/latest.tar.gz | tar -xzf -
fi
cp wp-config.php wordpress/
cp router.php wordpress/
execute compose compose.yml db web
execute container start db web
| #!/bin/sh
pushd `dirname $0` > /dev/null
HERE=`pwd`
popd > /dev/null
cd "${HERE}"
talk2docker="../../talk2docker --config=../config.yml"
execute() {
command="${talk2docker} ${*}"
echo "\n\$ ${command}" >&2
eval "${command}"
status=$?
if [ $status -ne 0 ]; then
exit $status
fi
return $status
}
eval ${talk2docker} host switch default
if command -v vagrant > /dev/null; then
cd ..
vagrant up
cd "${HERE}"
eval ${talk2docker} host switch vagrant
elif command -v boot2docker > /dev/null; then
boot2docker up
VBoxManage controlvm "boot2docker-vm" natpf1 "tcp8000,tcp,,8000,,8000"
eval ${talk2docker} host switch boot2docker
fi
eval ${talk2docker} container remove web db --force
# https://github.com/docker/fig/blob/master/docs/wordpress.md
if [ ! -d wordpress ]; then
curl https://wordpress.org/latest.tar.gz | tar -xzf -
cp wp-config.php wordpress/
cp router.php wordpress/
fi
execute compose compose.yml db web
execute container start db web
|
Remove a left-over docker invocation, change how we bundle | #!/bin/sh
#
# Extract wrappers to run Diamond filters from docker container
#
# UNIQUE_ID=$(docker inspect --format='{{ (index .RepoDigests 0) }}' $IMAGEID)
# docker run --rm $UNIQUE_ID /extract-filters.sh $UNIQUE_ID > diamond-docker-filters.tgz
#
# To extract binary Diamond filters:
#
# docker run --rm $UNIQUE_ID /extract-filters.sh > diamond-native-filters.tgz
set -e
# Bundle plain XML files into Diamond predicates
( cd /usr/local/share/diamond/predicates ; for fxml in `find . -name *.xml -print` ; do
echo "Bundling $filter" 1>&2
diamond-bundle-predicate $fxml
rm -f $fxml
done )
# Export native filters if no docket image is specified
if [ -n "$1" ] ; then
UNIQUE_ID="$1"
# tar up 'native' filters
docker run --rm $UNIQUE_ID \
tar -C /usr/local/share -cz diamond > diamond-native-filters.tgz
for filter in `find /usr/local/share/diamond/filters -type f -perm /100 -print`
do
echo "Wrapping $filter" 1>&2
cat > $filter << EOF
# diamond-docker-filter
docker_image: ${UNIQUE_ID}
filter_command: ${filter}
EOF
done
fi
tar -C /usr/local/share -cz diamond
| #!/bin/sh
#
# Extract wrappers to run Diamond filters from docker container
#
# UNIQUE_ID=$(docker inspect --format='{{ (index .RepoDigests 0) }}' $IMAGEID)
# docker run --rm $UNIQUE_ID /extract-filters.sh $UNIQUE_ID > diamond-docker-filters.tgz
#
# To extract binary Diamond filters:
#
# docker run --rm $UNIQUE_ID /extract-filters.sh > diamond-native-filters.tgz
set -e
# Bundle plain XML files into Diamond predicates
for fxml in `find /usr/local/share/diamond/predicates -name *.xml -print`
do
echo "Bundling $filter" 1>&2
( cd /usr/local/share/diamond/predicates ; diamond-bundle-predicate $fxml )
rm -f $fxml
done
# Export native filters if no docket image is specified
if [ -n "$1" ] ; then
UNIQUE_ID="$1"
for filter in `find /usr/local/share/diamond/filters -type f -perm /100 -print`
do
echo "Wrapping $filter" 1>&2
cat > $filter << EOF
# diamond-docker-filter
docker_image: ${UNIQUE_ID}
filter_command: ${filter}
EOF
done
fi
tar -C /usr/local/share -cz diamond
|
Add docker compose and short link | #!/bin/bash
# Steps taken from https://docs.docker.com/engine/install/ubuntu/
# Remove old or Ubuntu repository versions
sudo apt remove docker docker-engine docker.io containerd runc
# Install package management packages
sudo apt update &&
sudo apt install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common
# Install docker gpg key
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
# Check fingerprint
sudo apt-key fingerprint 0EBFCD88
read -rp "If you don't see a key above, you have the wrong key installed. [Enter] to continue"
# Add repo and install
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" &&
sudo apt update &&
sudo apt install -y docker-ce docker-ce-cli containerd.io
| #!/bin/bash
# Steps taken from https://docs.docker.com/engine/install/ubuntu/
# Remove old or Ubuntu repository versions
sudo apt remove docker docker-engine docker.io containerd runc
# Install package management packages
sudo apt update &&
sudo apt install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common
# Install docker gpg key
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
# Check fingerprint
sudo apt-key fingerprint 0EBFCD88
read -rp "If you don't see a key above, you have the wrong key installed. [Enter] to continue"
# Add repo and install
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" &&
sudo apt update &&
sudo apt install -y docker-ce docker-ce-cli containerd.io
sudo curl -L "https://github.com/docker/compose/releases/download/1.27.4/docker-compose-$(uname -s)-$(uname -m)" \
-o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
# curl -sSL https://peg.nu/docker_setup | bash
|
Add an exit and done. | #!/usr/bin/env bash
# Pull new updates from GitHub
cd /srv/pisg/pisg-config/
git pull
# Render all configs.
for config in /srv/pisg/pisg-config/config/*.cfg
do
/srv/pisg/pisg --configfile=$config
done
| #!/usr/bin/env bash
# Pull new updates from GitHub
cd /srv/pisg/pisg-config/
git pull
# Render all configs.
for config in /srv/pisg/pisg-config/config/*.cfg
do
/srv/pisg/pisg --configfile=$config
done
exit 0
|
Add locally installed io.js to PATH | node_global=$HOME/node/global
add_to_path $node_global/bin
#add_to_path ./node_modules/.bin
# FIXME: Is this necessary? Is it correct?
NODE_PATH=$NODE_PATH:$node_global/lib/node_modules
export NODE_PATH
| add_to_path ~/usr/iojs/bin
node_global=$HOME/node/global
add_to_path $node_global/bin
#add_to_path ./node_modules/.bin
# FIXME: Is this necessary? Is it correct?
NODE_PATH=$NODE_PATH:$node_global/lib/node_modules
export NODE_PATH
|
Revert changes to travis file | #!/bin/bash
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
set -x
npm install
npm update
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
bash miniconda.sh -b -p $HOME/miniconda
export PATH="$HOME/miniconda/bin:$PATH"
hash -r
conda config --set always_yes yes --set changeps1 no
conda update -q conda
conda info -a
conda install -c conda-forge notebook pytest
# create jupyter base dir (needed for config retreival)
mkdir ~/.jupyter
# Install and enable the server extension
pip install -v -e ".[test]"
jupyter serverextension enable --py jupyterlab
| #!/bin/bash
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
set -x
npm update
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
bash miniconda.sh -b -p $HOME/miniconda
export PATH="$HOME/miniconda/bin:$PATH"
hash -r
conda config --set always_yes yes --set changeps1 no
conda update -q conda
conda info -a
conda install -c conda-forge notebook pytest
# create jupyter base dir (needed for config retreival)
mkdir ~/.jupyter
# Install and enable the server extension
pip install -v -e ".[test]"
jupyter serverextension enable --py jupyterlab
|
Add `limits` feature to doc-publishing script | #!/bin/bash
git describe --exact-match --tags $(git log -n1 --pretty='%h') >/dev/null 2>&1
if [[ $? != 0 ]]; then
echo "Should not publish tags from an untagged commit!"
exit 1
fi
cd $(git rev-parse --show-toplevel)
rm -rf target/doc/
rustup run nightly cargo doc --no-deps --features "backup blob chrono functions load_extension serde_json trace"
echo '<meta http-equiv=refresh content=0;url=rusqlite/index.html>' > target/doc/index.html
ghp-import target/doc
git push origin gh-pages:gh-pages
| #!/bin/bash
git describe --exact-match --tags $(git log -n1 --pretty='%h') >/dev/null 2>&1
if [[ $? != 0 ]]; then
echo "Should not publish tags from an untagged commit!"
exit 1
fi
cd $(git rev-parse --show-toplevel)
rm -rf target/doc/
rustup run nightly cargo doc --no-deps --features "backup blob chrono functions limits load_extension serde_json trace"
echo '<meta http-equiv=refresh content=0;url=rusqlite/index.html>' > target/doc/index.html
ghp-import target/doc
git push origin gh-pages:gh-pages
|
Use the abbreviated commit hash instead of the 'latest' string | #!/bin/bash
gulp pkg-sync
pushd src
npm version $npm_package_version-latest
mkdir -p ../dist/cnc/
cp -af package.json ../dist/cnc/
babel -d ../dist/cnc/ *.js
popd
| #!/bin/bash
abbrev_commit=`git log -1 --format=%h`
gulp pkg-sync
pushd src
npm version $npm_package_version-$abbrev_commit
mkdir -p ../dist/cnc/
cp -af package.json ../dist/cnc/
babel -d ../dist/cnc/ *.js
popd
|
Add IPv6 support to travis docker | #!/usr/bin/env bash
set -o errexit
apt-get update
apt-get install -y liblxc1 lxc-dev lxc shellcheck
apt-get install -y qemu
bash ./scripts/travis-rkt.sh
bash ./scripts/travis-consul.sh
bash ./scripts/travis-vault.sh
| #!/usr/bin/env bash
set -o errexit
#enable ipv6
echo '{"ipv6":true, "fixed-cidr-v6":"2001:db8:1::/64"}' | sudo tee /etc/docker/daemon.json
sudo service docker restart
apt-get update
apt-get install -y liblxc1 lxc-dev lxc shellcheck
apt-get install -y qemu
bash ./scripts/travis-rkt.sh
bash ./scripts/travis-consul.sh
bash ./scripts/travis-vault.sh
|
Allow using organization's repository on dockerhub | #!/usr/bin/env bash
set -e
# Skip this step for jobs that don't run exunit
test "${PRESET}" == "exunit" || exit 0
MIX_ENV=prod mix docker.build
MIX_ENV=prod mix docker.release
DOCKERHUB_TAG="${TRAVIS_BRANCH//\//-}"
if [ "${TRAVIS_PULL_REQUEST}" != 'false' ]; then
DOCKERHUB_TAG="PR-${TRAVIS_PULL_REQUEST}"
elif [ "${TRAVIS_BRANCH}" == 'master' ]; then
DOCKERHUB_TAG="latest";
fi
TARGET_IMAGE="${DOCKERHUB_USER}/mongoose_push:${DOCKERHUB_TAG}"
if [ "${TRAVIS_SECURE_ENV_VARS}" == 'true' ]; then
docker login -u "${DOCKERHUB_USER}" -p "${DOCKERHUB_PASS}"
docker tag mongoose_push:release "${TARGET_IMAGE}"
docker push "${TARGET_IMAGE}"
fi
| #!/usr/bin/env bash
set -e
# Skip this step for jobs that don't run exunit
test "${PRESET}" == "exunit" || exit 0
MIX_ENV=prod mix docker.build
MIX_ENV=prod mix docker.release
DOCKERHUB_TAG="${TRAVIS_BRANCH//\//-}"
if [ "${TRAVIS_PULL_REQUEST}" != 'false' ]; then
DOCKERHUB_TAG="PR-${TRAVIS_PULL_REQUEST}"
elif [ "${TRAVIS_BRANCH}" == 'master' ]; then
DOCKERHUB_TAG="latest";
fi
TARGET_IMAGE="${DOCKERHUB_REPOSITORY}/mongoose_push:${DOCKERHUB_TAG}"
if [ "${TRAVIS_SECURE_ENV_VARS}" == 'true' ]; then
docker login -u "${DOCKERHUB_USER}" -p "${DOCKERHUB_PASS}"
docker tag mongoose_push:release "${TARGET_IMAGE}"
docker push "${TARGET_IMAGE}"
fi
|
Fix build tools version in initialize android script | #!/bin/bash
# source: https://docs.snap-ci.com/the-ci-environment/languages/android/
set -e
INITIALIZATION_FILE="$ANDROID_HOME/.initialized-dependencies-$(git log -n 1 --format=%h -- $0)"
if [ ! -e ${INITIALIZATION_FILE} ]; then
download-android
echo y | android update sdk --no-ui --filter tools,platform-tools > /dev/null
echo y | android update sdk --no-ui --filter build-tools-20.0.0 --all > /dev/null
echo y | android update sdk --no-ui --filter android-15 > /dev/null
echo y | android update sdk --no-ui --filter extra-google-m2repository --all > /dev/null
echo y | android update sdk --no-ui --filter extra-android-m2repository --all > /dev/null
echo y | android update sdk --no-ui --filter sys-img-armeabi-v7a-android-19 --all > /dev/null
touch ${INITIALIZATION_FILE}
fi | #!/bin/bash
# source: https://docs.snap-ci.com/the-ci-environment/languages/android/
set -e
INITIALIZATION_FILE="$ANDROID_HOME/.initialized-dependencies-$(git log -n 1 --format=%h -- $0)"
if [ ! -e ${INITIALIZATION_FILE} ]; then
download-android
echo y | android update sdk --no-ui --filter tools,platform-tools > /dev/null
echo y | android update sdk --no-ui --filter x-24.0.3 --all > /dev/null
echo y | android update sdk --no-ui --filter android-15 > /dev/null
echo y | android update sdk --no-ui --filter extra-google-m2repository --all > /dev/null
echo y | android update sdk --no-ui --filter extra-android-m2repository --all > /dev/null
echo y | android update sdk --no-ui --filter sys-img-armeabi-v7a-android-19 --all > /dev/null
touch ${INITIALIZATION_FILE}
fi |
Return version only if app server is provided | include App/Server/Version/Constants/AppServerVersionConstants.sh
AppServerVersion(){
returnAppServerVersion(){
local appServer=${1}
AppServerVersionConstants ${appServer}Version
}
$@
} | include App/Server/Version/Constants/AppServerVersionConstants.sh
AppServerVersion(){
returnAppServerVersion(){
if [[ $# == 0 ]]; then
return
else
local appServer=${1}
AppServerVersionConstants ${appServer}Version
fi
}
$@
} |
Add docu and suppress gcov outputs | #!/bin/bash
#set -ev
set -e
# Clone MNE-CPP test data
git clone https://github.com/mne-tools/mne-cpp-test-data.git mne-cpp-test-data
# Set Environment variable
MNECPP_ROOT=$(pwd)
# Tests to run - TODO: find required tests automatically with grep
tests=( test_codecov test_fiff_rwr test_dipole_fit test_fiff_mne_types_io test_fiff_cov test_fiff_digitizer test_mne_msh_display_surface_set test_geometryinfo test_interpolation test_spectral_connectivity test_mne_forward_solution)
for test in ${tests[*]};
do
echo ">> Starting $test"
./bin/$test
#Find all .cpp files, cd to their folder and run gcov
find ./libraries -type f -name "*.cpp" -execdir gcov {} \;
#find . -name "*.cpp" -exec gcov {} \; > /dev/null
#find . -name "*.cpp" -exec gcov -p -s ${PWD} {} \; > /dev/null
codecov
#codecov > /dev/null
echo "<< Finished $test"
done
# Report code coverage; instead of "bash <(curl -s https://codecov.io/bash)" use python "codecov"
#codecov
| #!/bin/bash
#set -ev
set -e
# Clone MNE-CPP test data
git clone https://github.com/mne-tools/mne-cpp-test-data.git mne-cpp-test-data
# Set Environment variable
MNECPP_ROOT=$(pwd)
# Tests to run - TODO: find required tests automatically with grep
tests=( test_codecov test_fiff_rwr test_dipole_fit test_fiff_mne_types_io test_fiff_cov test_fiff_digitizer test_mne_msh_display_surface_set test_geometryinfo test_interpolation test_spectral_connectivity test_mne_forward_solution)
for test in ${tests[*]};
do
echo ">> Starting $test"
./bin/$test
# Find all .cpp files, cd to their folder and run gcov
find ./libraries -type f -name "*.cpp" -execdir gcov {} \; > /dev/null
# Report code coverage; instead of "bash <(curl -s https://codecov.io/bash) use python codecov
# Do this for every test run since codecov is able to process different uploads and will merge them as soon as the Travis job is done
codecov
#codecov > /dev/null
echo "<< Finished $test"
done
|
Install libraries into sketchbook/libraries directory | #!/bin/bash
function build_sketches()
{
local arduino=$1
local srcpath=$2
local sketches=$(find $srcpath -name *.ino)
for sketch in $sketches; do
local sketchdir=$(dirname $sketch)
if [[ -f "$sketchdir/.test.skip" ]]; then
echo -e "\n\n ------------ Skipping $sketch ------------ \n\n";
continue
fi
echo -e "\n\n ------------ Building $sketch ------------ \n\n";
$arduino --verify --verbose $sketch;
local result=$?
if [ $result -ne 0 ]; then
echo "Build failed ($1)"
return $result
fi
done
}
function install_libraries()
{
pushd libraries
# install ArduinoJson library
wget https://github.com/bblanchon/ArduinoJson/releases/download/v4.6.1/ArduinoJson-v4.6.1.zip && unzip ArduinoJson-v4.6.1.zip
popd
}
| #!/bin/bash
function build_sketches()
{
local arduino=$1
local srcpath=$2
local sketches=$(find $srcpath -name *.ino)
for sketch in $sketches; do
local sketchdir=$(dirname $sketch)
if [[ -f "$sketchdir/.test.skip" ]]; then
echo -e "\n\n ------------ Skipping $sketch ------------ \n\n";
continue
fi
echo -e "\n\n ------------ Building $sketch ------------ \n\n";
$arduino --verify --verbose $sketch;
local result=$?
if [ $result -ne 0 ]; then
echo "Build failed ($1)"
return $result
fi
done
}
function install_libraries()
{
mkdir -p $HOME/Arduino/libraries
pushd $HOME/Arduino/libraries
# install ArduinoJson library
wget https://github.com/bblanchon/ArduinoJson/releases/download/v4.6.1/ArduinoJson-v4.6.1.zip && unzip ArduinoJson-v4.6.1.zip
popd
}
|
Use venv module if virtualenv not installed | function activate_dlrnapi_venv {
if [ ! -d $WORKSPACE/dlrnapi_venv ]; then
virtualenv --system-site-packages $WORKSPACE/dlrnapi_venv
fi
source $WORKSPACE/dlrnapi_venv/bin/activate
pip install -U dlrnapi_client shyaml
}
function deactivate_dlrnapi_venv {
# deactivate can fail with unbound variable, so we need +u
set +u
[[ $VIRTUAL_ENV = $WORKSPACE/dlrnapi_venv ]] && deactivate
}
| function activate_dlrnapi_venv {
if [ ! -d $WORKSPACE/dlrnapi_venv ]; then
if [ $(command -v virtualenv) ]; then
virtualenv --system-site-packages $WORKSPACE/dlrnapi_venv
else
python3 -m venv --system-site-packages $WORKSPACE/dlrnapi_venv
fi
fi
source $WORKSPACE/dlrnapi_venv/bin/activate
pip install -U dlrnapi_client shyaml
}
function deactivate_dlrnapi_venv {
# deactivate can fail with unbound variable, so we need +u
set +u
[[ $VIRTUAL_ENV = $WORKSPACE/dlrnapi_venv ]] && deactivate
}
|
Remove call to brew command | # mkdir .git/safe in the root of repositories you trust
PATH=".git/safe/../../bin:$PATH"
# load rbenv if available
if command -v rbenv >/dev/null; then
eval "$(rbenv init - --no-rehash)"
fi
# load nodenv if available
if command -v nodenv >/dev/null; then
eval "$(nodenv init - --no-rehash)"
fi
GOPATH=$PROJECTS/go
PATH="$(brew --prefix go)/libexec/bin:$GOPATH/bin:$PATH"
export -U PATH GOPATH
| # mkdir .git/safe in the root of repositories you trust
PATH=".git/safe/../../bin:$PATH"
# load rbenv if available
if command -v rbenv >/dev/null; then
eval "$(rbenv init - --no-rehash)"
fi
# load nodenv if available
if command -v nodenv >/dev/null; then
eval "$(nodenv init - --no-rehash)"
fi
GOPATH=$PROJECTS/go
PATH="$GOPATH/bin:$PATH"
export -U PATH GOPATH
|
Add debuging message [ci deploy] | #!/usr/bin/env bash
if [ "$TRAVIS_BRANCH" = "master" ] && [ "$TRAVIS_PULL_REQUEST" = "false" ] && [[ "$TRAVIS_COMMIT_MESSAGE" == *"[ci deploy]"* ]]; then
openssl aes-256-cbc -K $encrypted_SOME_key -iv $encrypted_SOME_iv -in .ci/signingkey.asc.enc -out .ci/signingkey.asc -d
gpg --fast-import .ci/signingkey.asc
if | #!/usr/bin/env bash
echo "[START GPG] Setup Signing Key"
if [ "$TRAVIS_BRANCH" = "master" ] && [ "$TRAVIS_PULL_REQUEST" = "false" ] && [[ "$TRAVIS_COMMIT_MESSAGE" == *"[ci deploy]"* ]]; then
openssl aes-256-cbc -K $encrypted_SOME_key -iv $encrypted_SOME_iv -in .ci/signingkey.asc.enc -out .ci/signingkey.asc -d
gpg --fast-import .ci/signingkey.asc
if
echo "[END GPG] Setup Signing Key" |
Use explicit Ruby version numbers | #!/usr/bin/env bash
set -euo pipefail
readonly RAILS_VERSIONS=(60 60 60 master)
readonly RUBY_VERSIONS=(2.6.3 2.6.6 2.7 2.7)
readonly RAILS_VERSION=${RAILS_VERSIONS[$BUILDKITE_PARALLEL_JOB]}
readonly RUBY_VERSION=${RUBY_VERSIONS[$BUILDKITE_PARALLEL_JOB]}
echo -e "+++ :llama: Testing with :ruby: ${RUBY_VERSION} | :rails: ${RAILS_VERSION}"
docker run -it --rm -v "$PWD":/usr/src -w /usr/src ruby:${RUBY_VERSION}-slim-stretch sh -c "apt-get -qqy update && \
apt-get install -qy build-essential git-core ; bundle check --path=vendor/bundle_${RAILS_VERSION} \
--gemfile gemfiles/Gemfile.rails${RAILS_VERSION} || bundle install --jobs=4 --retry=3 --gemfile gemfiles/Gemfile.rails${RAILS_VERSION} --path=vendor/bundle_${RAILS_VERSION} ; \
BUNDLE_GEMFILE=gemfiles/Gemfile.rails${RAILS_VERSION} bundle exec rake test:units"
| #!/usr/bin/env bash
set -euo pipefail
readonly RAILS_VERSIONS=(60 60 60 master)
readonly RUBY_VERSIONS=(2.6.3 2.6.6 2.7.2 2.7.2)
readonly RAILS_VERSION=${RAILS_VERSIONS[$BUILDKITE_PARALLEL_JOB]}
readonly RUBY_VERSION=${RUBY_VERSIONS[$BUILDKITE_PARALLEL_JOB]}
echo -e "+++ :llama: Testing with :ruby: ${RUBY_VERSION} | :rails: ${RAILS_VERSION}"
docker run -it --rm -v "$PWD":/usr/src -w /usr/src ruby:${RUBY_VERSION}-slim-stretch sh -c "apt-get -qqy update && \
apt-get install -qy build-essential git-core ; bundle check --path=vendor/bundle_${RAILS_VERSION} \
--gemfile gemfiles/Gemfile.rails${RAILS_VERSION} || bundle install --jobs=4 --retry=3 --gemfile gemfiles/Gemfile.rails${RAILS_VERSION} --path=vendor/bundle_${RAILS_VERSION} ; \
BUNDLE_GEMFILE=gemfiles/Gemfile.rails${RAILS_VERSION} bundle exec rake test:units"
|
Use json file to configure docker |
if [ ! -f /etc/default/docker.backup ]; then
cp /etc/default/docker /etc/default/docker.backup
fi
echo '# Docker Upstart and SysVinit configuration file
# Customize location of Docker binary (especially for development testing).
#DOCKER="/usr/local/bin/docker"
# Use DOCKER_OPTS to modify the daemon startup options.
#DOCKER_OPTS="--dns 8.8.8.8 --dns 8.8.4.4"
#DOCKER_OPTS="--storage-driver=overlay -D"
DOCKER_OPTS="--storage-driver=btrfs -D"
# If you need Docker to use an HTTP proxy, it can also be specified here.
#export http_proxy="http://127.0.0.1:3128/"
# This is also a handy place to tweak where hte Docker temporary files go.
#export TMPDIR="/mnt/bigdrive/docker-tmp"' >> /etc/default/docker
| echo '{"storage-driver": "btrfs"}' > /etc/docker/daemon.json
|
Use new name for repo2 | #!/bin/sh
TRAVIS_PROTO=https
TRAVIS_MIRROR=repo2.voidlinux.eu
for _i in etc/repos-remote.conf etc/defaults.conf etc/repos-remote-x86_64.conf ; do
printf '\x1b[32mUpdating %s...\x1b[0m\n' $_i
# First fix the proto, ideally we'd serve everything with HTTPS,
# but key management and rotation is a pain, and things are signed
# so we can afford to be a little lazy at times.
sed -i "s:https:$TRAVIS_PROTO:g" $_i
# Now set the mirro
sed -i "s:repo\.voidlinux\.eu:$TRAVIS_MIRROR:g" $_i
done
| #!/bin/sh
TRAVIS_PROTO=https
TRAVIS_MIRROR=beta.de.repo.voidlinux.org
for _i in etc/repos-remote.conf etc/defaults.conf etc/repos-remote-x86_64.conf ; do
printf '\x1b[32mUpdating %s...\x1b[0m\n' $_i
# First fix the proto, ideally we'd serve everything with HTTPS,
# but key management and rotation is a pain, and things are signed
# so we can afford to be a little lazy at times.
sed -i "s:https:$TRAVIS_PROTO:g" $_i
# Now set the mirror
sed -i "s:repo\.voidlinux\.eu:$TRAVIS_MIRROR:g" $_i
done
|
Exclude javascript from ctags defaults, because it was causing a lot of indexing issues | # Git
alias gp!='git push origin $(current_branch)'
alias gcb='git push -u origin $(current_branch)'
alias gs='git status'
alias gaa='git add --all'
alias gc='git commit'
alias gca='git commit --amend --date="$(date)"'
alias wip='gaa && gca -m "WIP"'
alias gsh='git show HEAD'
alias gd='git diff'
alias grev='git diff master'
alias gsu='git submodule foreach git pull origin master'
# Misc
alias retag='bundle list --paths=true | xargs ctags --extra=+f --exclude=.git --exclude=log -R *'
| # Git
alias gp!='git push origin $(current_branch)'
alias gcb='git push -u origin $(current_branch)'
alias gs='git status'
alias gaa='git add --all'
alias gc='git commit'
alias gca='git commit --amend --date="$(date)"'
alias wip='gaa && gca -m "WIP"'
alias gsh='git show HEAD'
alias gd='git diff'
alias grev='git diff master'
alias gsu='git submodule foreach git pull origin master'
# Misc
alias retag='bundle list --paths=true | xargs ctags --extra=+f --exclude=.git --exclude=log --languages=-javascript -R *'
|
Convert env to lowercase for suffix of document-storage bucket name | #!/usr/bin/env bash
. ${BASH_SOURCE%/*}/env.sh
cd /usr/share/tomcat8/webapps
# deploy new application
rm -rf ROOT*
mv geodesy-web-services.war ROOT.war
unzip ROOT.war -d ROOT
sed -i 's/${geodesy-db-url}/jdbc:postgresql:\/\/'"${RDS_ENDPOINT}\/GeodesyDb/" ROOT/META-INF/context.xml
# set database login
sed -i 's/${geodesy-db-username}/'"${DB_USERNAME}/" ROOT/META-INF/context.xml
sed -i 's/${geodesy-db-password}/'"${DB_PASSWORD}/" ROOT/META-INF/context.xml
sed -i s,'${oauthProviderUrl}',"${OPENAM_ENDPOINT}"/oauth2, ROOT/WEB-INF/classes/config.properties
sed -i s,'${env}',"${ENV}", ROOT/WEB-INF/classes/config.properties
| #!/usr/bin/env bash
. ${BASH_SOURCE%/*}/env.sh
cd /usr/share/tomcat8/webapps
# deploy new application
rm -rf ROOT*
mv geodesy-web-services.war ROOT.war
unzip ROOT.war -d ROOT
sed -i 's/${geodesy-db-url}/jdbc:postgresql:\/\/'"${RDS_ENDPOINT}\/GeodesyDb/" ROOT/META-INF/context.xml
# set database login
sed -i 's/${geodesy-db-username}/'"${DB_USERNAME}/" ROOT/META-INF/context.xml
sed -i 's/${geodesy-db-password}/'"${DB_PASSWORD}/" ROOT/META-INF/context.xml
sed -i s,'${oauthProviderUrl}',"${OPENAM_ENDPOINT}"/oauth2, ROOT/WEB-INF/classes/config.properties
sed -i s,'${env}',"${ENV,,}", ROOT/WEB-INF/classes/config.properties
|
Fix for "~/.vim/colors" already exist | #!/bin/bash
# Install solarized.vim
mkdir -p ~/.vim/colors 2>/dev/null
git clone https://github.com/altercation/vim-colors-solarized.git ~/.vim/colors/vim-colors-solarized/
mv ~/.vim/colors/vim-colors-solarized/colors/solarized.vim ~/.vim/colors/solarized.vim
rm -rf ~/.vim/colors/vim-colors-solarized/
| #!/bin/bash
# Install solarized.vim
mkdir -p ~/.vim/colors 2>/dev/null
git clone https://github.com/altercation/vim-colors-solarized.git ~/.vim/colors/vim-colors-solarized/
mv ~/.vim/colors/vim-colors-solarized/colors/solarized.vim ~/.vim/colors/solarized.vim
rm -rf ~/.vim/colors/vim-colors-solarized/ ~/.vim/colors/.git
|
Increase a loop upper limit and sleep less. | #!/bin/sh
LANG=C
SED="/usr/bin/sed -i.orig"
SPATCH="spatch --very-quiet --timeout 120 --in-place --local-includes"
find -L "${LOCALPATCHES}/${PKGPATH}" "${SPATCHES}" | \
grep '\.\(sed\|cocci\)' | \
awk -F/ '{print $0 " " $NF}' | sort -k2 | awk '{print $1}' | \
while read sp; do
if ! test -f "${sp}"; then
continue
fi
ext=".${sp##*.}"
spp="${WRKSRC}/$(basename ${sp}).patch"
for i in $(seq 1 20); do
rm -f "${spp}"
touch "${spp}"
find "${WRKSRC}" | grep '\.\(c\|h\)$' | while read f; do
if ! test -f "${f}"; then
continue
fi
cp -f "${f}" "${f}.orig"
if [ "${ext}" = ".sed" ]; then
${SED} -f "${sp}" "${f}"
fi
if [ "${ext}" = ".cocci" ]; then
${SPATCH} ${SPATCH_ARGS} --sp-file "${sp}" "${f}" >/dev/null
fi
diff -u "${f}.orig" "${f}" | tee -a "${spp}"
done
if ! test -s "${spp}"; then
break
fi
sleep 60
done
rm -f "${spp}"
done
| #!/bin/sh
LANG=C
SED="/usr/bin/sed -i.orig"
SPATCH="spatch --very-quiet --timeout 120 --in-place --local-includes"
find -L "${LOCALPATCHES}/${PKGPATH}" "${SPATCHES}" | \
grep '\.\(sed\|cocci\)' | \
awk -F/ '{print $0 " " $NF}' | sort -k2 | awk '{print $1}' | \
while read sp; do
if ! test -f "${sp}"; then
continue
fi
ext=".${sp##*.}"
spp="${WRKSRC}/$(basename ${sp}).patch"
for i in $(seq 1 100); do
rm -f "${spp}"
touch "${spp}"
find "${WRKSRC}" | grep '\.\(c\|h\)$' | while read f; do
if ! test -f "${f}"; then
continue
fi
cp -f "${f}" "${f}.orig"
if [ "${ext}" = ".sed" ]; then
${SED} -f "${sp}" "${f}"
fi
if [ "${ext}" = ".cocci" ]; then
${SPATCH} ${SPATCH_ARGS} --sp-file "${sp}" "${f}" >/dev/null
fi
diff -u "${f}.orig" "${f}" | tee -a "${spp}"
done
if ! test -s "${spp}"; then
break
fi
sleep 10
done
rm -f "${spp}"
done
|
Enable flags to actually use multiple processors. Oops. | #!/bin/bash
export PATH=./build/bin/:$PATH
export LD_LIBRARY_PATH=./build/lib/:$LD_LIBRARY_PATH
erl -pa ./build/bin -noshell -s olegdb main -s init stop
| #!/bin/bash
export PATH=./build/bin/:$PATH
export LD_LIBRARY_PATH=./build/lib/:$LD_LIBRARY_PATH
erl +K true -smp enable -pa ./build/bin -noshell -s olegdb main -s init stop
|
Add convenience method for searching history | # Shortcuts
alias c='clear'
alias h='history'
# Extend existing commands
alias ls='ls -FG'
# New commands
alias reload='source ~/.zshrc'
alias nombom='rm -rf node_modules bower_components && npm cache clean && bower cache clean && npm install && bower install'
# Directory jumps
alias dev='cd ~/Developer'
| # Shortcuts
alias c='clear'
alias h='history'
alias hgrep='history | grep'
# Extend existing commands
alias ls='ls -FG'
# New commands
alias reload='source ~/.zshrc'
alias nombom='rm -rf node_modules bower_components && npm cache clean && bower cache clean && npm install && bower install'
# Directory jumps
alias dev='cd ~/Developer'
|
Fix script exiting with error when NO_CONFIGURE was set. | #!/bin/sh
ORIGDIR=`pwd`
srcdir=`dirname $0`
[ -n "$srcdir" ] && cd $srcdir
[ ! -d m4 ] && mkdir m4
autoreconf -Wno-portability --force --install -I m4 || exit $?
cd $ORIGDIR
[ -z "$NO_CONFIGURE" ] && $srcdir/configure --enable-maintainer-mode "$@"
| #!/bin/sh
ORIGDIR=`pwd`
srcdir=`dirname $0`
[ -n "$srcdir" ] && cd $srcdir
[ ! -d m4 ] && mkdir m4
autoreconf -Wno-portability --force --install -I m4 || exit $?
cd $ORIGDIR
if [ -z "$NO_CONFIGURE" ]
then
$srcdir/configure --enable-maintainer-mode "$@" || exit $?
fi
exit 0
|
Fix config name for mongodb24 | h!/bin/bash
THISDIR=$(dirname ${BASH_SOURCE[0]})
source ${THISDIR}/../../../common/functions.sh
source ${THISDIR}/../include.sh
echo "smallfiles = true" >>/opt/rh/rh-mongodb26/root/etc/mongodb.conf
service "$SERVICE_NAME" start
exit $?
| h!/bin/bash
THISDIR=$(dirname ${BASH_SOURCE[0]})
source ${THISDIR}/../../../common/functions.sh
source ${THISDIR}/../include.sh
echo "smallfiles = true" >>/opt/rh/mongodb24/root/etc/mongodb.conf
service "$SERVICE_NAME" start
exit $?
|
Exit on compile error on travis OSX build | #!/bin/sh
mkdir build
pushd build
if [[ "$RELEASE_BUILD" == "1" ]]; then
cmake -DCMAKE_BUILD_TYPE=Release ..
else
cmake -DCMAKE_BUILD_TYPE=Debug ..
fi
make -j2
popd # build
echo "--- Running unit tests ---"
if [[ "$RELEASE_BUILD" == "1" ]]; then
echo "Not running tests on release build"
else
trap 'exit' ERR
./build/bin/renderdoccmd test unit
./build/bin/qrenderdoc.app/Contents/MacOS/qrenderdoc --unittest
fi
| #!/bin/sh
trap 'exit' ERR
mkdir build
pushd build
if [[ "$RELEASE_BUILD" == "1" ]]; then
cmake -DCMAKE_BUILD_TYPE=Release ..
else
cmake -DCMAKE_BUILD_TYPE=Debug ..
fi
make -j2
popd # build
echo "--- Running unit tests ---"
if [[ "$RELEASE_BUILD" == "1" ]]; then
echo "Not running tests on release build"
else
./build/bin/renderdoccmd test unit
./build/bin/qrenderdoc.app/Contents/MacOS/qrenderdoc --unittest
fi
|
Add force delete on sync ova script | #/bin/bash -e
usage() {
echo "ERROR: Incorrect number of parameters"
echo "Usage: $0 <svnUser>"
echo " "
}
# Get parameters
if [ $# -ne 1 ]; then
usage
fi
svnUser=$1
# Define script variables
SHAREDDISK=/sharedDisk/
# Check Out svns
cd $HOME
svn co http://compss.bsc.es/svn/compss/framework/trunk trunk --username $svnUser
svn co http://compss.bsc.es/svn/bar/tutorial-apps/ tutorial-apps --username $svnUser
svn co http://compss.bsc.es/svn/bar/traces traces --username $svnUser
cd $SHAREDDISK
svn co http://compss.bsc.es/svn/bar/datasets . --username $svnUser
# Retrieve status
echo "DONE!"
exit 0
| #/bin/bash -e
usage() {
echo "ERROR: Incorrect number of parameters"
echo "Usage: $0 <svnUser>"
echo " "
}
# Get parameters
if [ $# -ne 1 ]; then
usage
fi
svnUser=$1
# Define script variables
SHAREDDISK=/sharedDisk/
# Check Out svns
cd $HOME
rm -rf trunk tutorial-apps traces
svn co http://compss.bsc.es/svn/compss/framework/trunk trunk --username $svnUser
svn co http://compss.bsc.es/svn/bar/tutorial-apps/ tutorial-apps --username $svnUser
svn co http://compss.bsc.es/svn/bar/traces traces --username $svnUser
cd $SHAREDDISK
rm -rf $SHAREDDISK*
svn co http://compss.bsc.es/svn/bar/datasets . --username $svnUser
# Retrieve status
echo "DONE!"
exit 0
|
Revert back to use `force_polling` | #!/bin/bash
sudo hostname leonardinius.galeoconsulting.com
cd /vagrant \
&& rm -rf ./_site \
&& sudo jekyll serve \
--watch \
--port 80 \
--drafts \
--config _config-dev.yml
| #!/bin/bash
sudo hostname leonardinius.galeoconsulting.com
cd /vagrant \
&& rm -rf ./_site \
&& sudo jekyll serve \
--watch \
--port 80 \
--force_polling \
--drafts \
--config _config-dev.yml
|
Change test order, set quick test before | # Execute tests
WORKSPACE=$1
cd $WORKSPACE
nosetests test/integration/TestIM.py test/integration/TestREST.py test/integration/TestREST_JSON.py -v --stop --with-xunit --with-timer --timer-no-color --with-coverage --cover-erase --cover-xml --cover-package=IM
| # Execute tests
WORKSPACE=$1
cd $WORKSPACE
nosetests test/integration/TestREST_JSON.py test/integration/TestREST.py test/integration/TestIM.py -v --stop --with-xunit --with-timer --timer-no-color --with-coverage --cover-erase --cover-xml --cover-package=IM
|
Use bench/util for utility functions | #!/bin/zsh
# Benchmark parameters
export TURBINE_ENGINES=1
export ADLB_SERVERS=1
TURBINE_WORKERS=1
PROCS=$(( TURBINE_ENGINES + ADLB_SERVERS + TURBINE_WORKERS ))
N=1000
# Delay in milliseconds
DELAY=0
# System settings
export TURBINE_DEBUG=0
export ADLB_DEBUG=0
export LOGGING=0
export ADLB_EXHAUST_TIME=1
export TURBINE_USER_LIB=$( cd ${PWD}/../util ; /bin/pwd )
START=$( date +%s )
turbine -l -n ${PROCS} foreach.tcl --N=${N} --delay=${DELAY}
STOP=$( date +%s )
TIME=$(( STOP - START - ADLB_EXHAUST_TIME ))
print "N: ${N} TIME: ${TIME}"
if (( TIME ))
then
TOTAL_RATE=$(( N / TIME ))
print "TOTAL_RATE: ${TOTAL_RATE}"
WORKER_RATE=$(( N / TIME / TURBINE_WORKERS ))
print "WORKER_RATE: ${WORKER_RATE}"
fi
if (( ${DELAY} ))
then
WORK_TIME=$(( N * DELAY/1000 ))
TOTAL_TIME=$(( TIME * TURBINE_WORKERS ))
UTIL=$(( WORK_TIME / TOTAL_TIME ))
print "UTIL: ${UTIL}"
fi
| #!/bin/zsh
# Benchmark parameters
export TURBINE_ENGINES=1
export ADLB_SERVERS=1
TURBINE_WORKERS=1
PROCS=$(( TURBINE_ENGINES + ADLB_SERVERS + TURBINE_WORKERS ))
N=1000
# Delay in milliseconds
DELAY=0
# Load common features
BENCH_UTIL=$( cd $( dirname $0 )/../util ; /bin/pwd )
source ${BENCH_UTIL}/tools.zsh
# System settings
export TURBINE_DEBUG=0
export ADLB_DEBUG=0
export LOGGING=0
export ADLB_EXHAUST_TIME=1
export TURBINE_USER_LIB=${BENCH_UTIL}
# Run stc if necessary
compile foreach.swift foreach.tcl
START=$( date +%s )
turbine -l -n ${PROCS} foreach.tcl --N=${N} --delay=${DELAY}
STOP=$( date +%s )
TIME=$(( STOP - START - ADLB_EXHAUST_TIME ))
print "N: ${N} TIME: ${TIME}"
if (( TIME ))
then
TOTAL_RATE=$(( N / TIME ))
print "TOTAL_RATE: ${TOTAL_RATE}"
WORKER_RATE=$(( N / TIME / TURBINE_WORKERS ))
print "WORKER_RATE: ${WORKER_RATE}"
fi
if (( ${DELAY} ))
then
WORK_TIME=$(( N * DELAY/1000 ))
TOTAL_TIME=$(( TIME * TURBINE_WORKERS ))
UTIL=$(( WORK_TIME / TOTAL_TIME ))
print "UTIL: ${UTIL}"
fi
|
Check if sed new line works on Jenkins machine. | #!/bin/bash
#
# Copyright (c) 2014-present Gini GmbH
#
if [ "$#" != "2" ]; then
echo "Usage: insert_release_version.sh <changelog> <version>"
fi
version_and_date="$2 (`date '+%d-%m-%Y'`)"
cd `dirname "$1"`
filename=`basename "$1"`
cat $filename | sed "s/{{version_and_date}}/${version_and_date}§$(printf '=%.0s' `seq 1 ${#version_and_date}`)/" | tr '§' '\n' > ${filename}.tmp && mv ${filename}.tmp ${filename}
| #!/bin/bash
#
# Copyright (c) 2014-present Gini GmbH
#
if [ "$#" != "2" ]; then
echo "Usage: insert_release_version.sh <changelog> <version>"
fi
version_and_date="$2 (`date '+%d-%m-%Y'`)"
cd `dirname "$1"`
filename=`basename "$1"`
cat $filename | sed "s/{{version_and_date}}/${version_and_date}\n$(printf '=%.0s' `seq 1 ${#version_and_date}`)/" > ${filename}.tmp && mv ${filename}.tmp ${filename}
|
Fix opencog repo path with relative one | ###########################
# OpenCog server settings #
###########################
# Path of the OpenCog repository
opencog_repo_path=/home/$USER/OpenCog/opencog
build_dir_name=build
#################################
# Background knowledge settings #
#################################
# File path of the scheme file containing background knowledge
scheme_file_path="./background_knowledge.scm"
##################
# MOSES settings #
##################
dataset_file_path="./data.csv"
jobs=4
evals=10K
| ###########################
# OpenCog server settings #
###########################
# Path of the OpenCog repository
opencog_repo_path="$(readlink -f .)/../../../.."
build_dir_name=build
#################################
# Background knowledge settings #
#################################
# File path of the scheme file containing background knowledge
scheme_file_path="./background_knowledge.scm"
##################
# MOSES settings #
##################
dataset_file_path="./data.csv"
jobs=4
evals=10K
|
Change error message if pid file exists but process does not | #!/bin/bash
PROG="mmx-console"
PID_PATH="./"
pid=
start() {
if [ -e "$PID_PATH/$PROG.pid" ]; then
## Program is running, exit with error.
echo "Error! $PROG is already running!" 1>&2
exit 1
else
nohup node start.js > mmx-console.out 2>&1&
touch "$PID_PATH/$PROG.pid"
pid=$!
echo $pid >> $PID_PATH/$PROG.pid
fi
}
stop() {
if [ -e "$PID_PATH/$PROG.pid" ]; then
pid=$(<$PID_PATH/$PROG.pid)
kill -SIGTERM $pid
rm "$PID_PATH/$PROG.pid"
echo "$PROG stopped"
else
## Program is not running, exit with error.
echo "stop:$PROG : $PROG is not running" 1>&2
fi
}
case "$1" in
start)
start
exit 0
;;
stop)
stop
exit 0
;;
restart)
stop
start
exit 0
;;
**)
echo "Usage: $0 {start|stop|restart}" 1>&2
exit 1
;;
esac
| #!/bin/bash
PROG="mmx-console"
PID_PATH="./"
pid=
start() {
if [ -e "$PID_PATH/$PROG.pid" ]; then
## Program is running, exit with error.
echo "Error! $PROG is already running or you have a stale pid file. If $PROG is not running delete $PID_PATH/$PROG.pid file and restart" 1>&2
exit 1
else
nohup node start.js > mmx-console.out 2>&1&
touch "$PID_PATH/$PROG.pid"
pid=$!
echo $pid >> $PID_PATH/$PROG.pid
fi
}
stop() {
if [ -e "$PID_PATH/$PROG.pid" ]; then
pid=$(<$PID_PATH/$PROG.pid)
kill -SIGTERM $pid
rm "$PID_PATH/$PROG.pid"
echo "$PROG stopped"
else
## Program is not running, exit with error.
echo "stop:$PROG : $PROG is not running" 1>&2
fi
}
case "$1" in
start)
start
exit 0
;;
stop)
stop
exit 0
;;
restart)
stop
start
exit 0
;;
**)
echo "Usage: $0 {start|stop|restart}" 1>&2
exit 1
;;
esac
|
Add logging and timing, and which images we're saving | #!/bin/bash
set -ex
# We can't run the build step if there's no access to the secret env vars
if [[ "$TRAVIS_SECURE_ENV_VARS" == "true" ]]; then
docker login -u pachydermbuildbot -p "${DOCKER_PWD}"
# Load saved docker image layers, to avoid having to repeatedly apt-get
# stuff every time we build
docker load -i ${HOME}/docker_images/images.tar || true
make install docker-build
version=$(pachctl version --client-only)
docker tag "pachyderm/pachd:local" "pachyderm/pachd:${version}"
docker push "pachyderm/pachd:${version}"
docker tag "pachyderm/worker:local" "pachyderm/worker:${version}"
docker push "pachyderm/worker:${version}"
# Avoid having to rebuild unchanged docker image layers every time
docker save -o ${HOME}/docker_images/images.tar $(docker images -a -q)
ls -alh ${HOME}/docker_images/
# Push pipeline build images
make docker-push-pipeline-build
fi
| #!/bin/bash
set -ex
# We can't run the build step if there's no access to the secret env vars
if [[ "$TRAVIS_SECURE_ENV_VARS" == "true" ]]; then
docker login -u pachydermbuildbot -p "${DOCKER_PWD}"
# Load saved docker image layers, to avoid having to repeatedly apt-get
# stuff every time we build
echo "Loading docker images from cache"
time docker load -i ${HOME}/docker_images/images.tar || true
time make install docker-build
version=$(pachctl version --client-only)
docker tag "pachyderm/pachd:local" "pachyderm/pachd:${version}"
docker push "pachyderm/pachd:${version}"
docker tag "pachyderm/worker:local" "pachyderm/worker:${version}"
docker push "pachyderm/worker:${version}"
# Avoid having to rebuild unchanged docker image layers every time
echo "Saving these docker images to cache:"
docker images -a -q
time docker save -o ${HOME}/docker_images/images.tar $(docker images -a -q)
ls -alh ${HOME}/docker_images/
# Push pipeline build images
make docker-push-pipeline-build
fi
|
Fix inverted comma placement when creating filename | #!/usr/bin/env bash
set -o errexit
print_help () {
echo "Usage: ./publish.sh <path-to-post-file> <title-of-post> <tags>"
echo "Note: assumes post file to be markdown"
}
path=$1
title=$2
tags=${*:3}
if test ! -f "$path"
then
print_help
exit 1
fi
if test -z "$title"
then
print_help
exit 1
fi
post_name=$(python lib/create-post/filename.py --title "$title --ext md")
post_path="_posts/$post_name"
post_front_matter=$(python lib/create-post/front_matter.py --tags $tags)
echo $"$post_front_matter" | cat - "$path" > temp
mv temp $post_path
git add "$post_path"
git commit -m "Add post: $post_name"
git push
| #!/usr/bin/env bash
set -o errexit
print_help () {
echo "Usage: ./publish.sh <path-to-post-file> <title-of-post> <tags>"
echo "Note: assumes post file to be markdown"
}
path=$1
title=$2
tags=${*:3}
if test ! -f "$path"
then
print_help
exit 1
fi
if test -z "$title"
then
print_help
exit 1
fi
post_name=$(python lib/create-post/filename.py --title "$title" --ext md)
post_path="_posts/$post_name"
post_front_matter=$(python lib/create-post/front_matter.py --tags $tags)
echo $"$post_front_matter" | cat - "$path" > temp
mv temp $post_path
git add "$post_path"
git commit -m "Add post: $post_name"
git push
|
Fix explanation of sigterm handling in bash | #!/bin/bash
gearcmd --name oplog-dump --cmd /usr/local/bin/oplogdump --host $GEARMAN_HOST --port $GEARMAN_PORT &
pid=$!
# When we get a SIGTERM kill the child process and call wait. Note that we need wait both here
# and the line below because of the semantics of the kill syscall. As I understand it, wait returns
# the next time the state of the child process changes. Receiving a SIGTERM qualifies as a change
# of state, so if we don't have a wait after `kill $pid` then we will complete the trap handler
# and our call to `wait` below will complete. By calling `wait` after we kill the subprocess we wait
# for the next process state change which is the actual process termination.
trap "kill $pid && wait" SIGTERM SIGINT
# Wait so that this script keeps running
wait
| #!/bin/bash
gearcmd --name oplog-dump --cmd /usr/local/bin/oplogdump --host $GEARMAN_HOST --port $GEARMAN_PORT &
pid=$!
# When we get a SIGTERM, forward it to the child process and call wait. Note that we wait both in here
# and below (on line 10) because when bash gets a SIGTERM bash appears to cancel the currently running
# command, call the trap handler, and then resume the script on the line after the line it was previously
# running. That means that without waiting in the trap we could exit the script before gearcmd actually exits.
trap "kill $pid && wait" SIGTERM SIGINT
# Wait so that this script keeps running
wait
|
Fix creating the scripts directory if it's doesn't exist in build docs | #!/bin/bash
echo "-> Build examples"
cd examples
mvn clean package
cd ..
echo "-> Copy built examples to Gitbook"
rm book/dependencies/scripts/*
cp examples/target/vue-gwt-examples-*/VueGwtExamples/* book/dependencies/scripts/
echo "-> Build Gitbook"
cd book
gitbook build
cd ..
echo "-> Copy built Gitbook to docs"
cd ..
rm -rf docs/*
cp -r docs-source/book/_book/* docs/
rm docs/dependencies.md # Useless file getting copied
echo "Success!" | #!/bin/bash
echo "-> Build examples"
cd examples
mvn clean package
cd ..
echo "-> Copy built examples to Gitbook"
mkdir book/dependencies/scripts
rm book/dependencies/scripts/*
cp examples/target/vue-gwt-examples-*/VueGwtExamples/* book/dependencies/scripts/
echo "-> Build Gitbook"
cd book
gitbook build
cd ..
echo "-> Copy built Gitbook to docs"
cd ..
rm -rf docs/*
cp -r docs-source/book/_book/* docs/
rm docs/dependencies.md # Useless file getting copied
echo "Success!" |
Set max dirs displayed in PS1 to 5 | #-------------------------------------------------------------------------------
#
# powerline.zsh
# Powerline configuration
#
#-------------------------------------------------------------------------------
# Powerline PS1 setup using milkbikis/powerline-shell
# NOTE: This must be called after anigen is primed in z_login.zsh for $ADOTDIR
function install_powerline_prompt() {
POWERLINE_HOME="${ADOTDIR}/repos/https-COLON--SLASH--SLASH-github.com-SLASH-phatblat-SLASH-powerline-shell.git-PIPE-custom"
# POWERLINE_HOME="${HOME}/dev/shell/powerline-shell"
function powerline_precmd() {
PS1="$(${POWERLINE_HOME}/powerline-shell.py $? --colorize-hostname --shell zsh 2> /dev/null)"
}
function install_powerline_precmd() {
for s in "${precmd_functions[@]}"; do
if [ "$s" = "powerline_precmd" ]; then
return
fi
done
precmd_functions+=(powerline_precmd)
}
if [ "$TERM" != "linux" ]; then
install_powerline_precmd
fi
}
alias powerlinetest='echo "⮀ ± ⭠ ➦ ✔ ✘ ⚡"'
| #-------------------------------------------------------------------------------
#
# powerline.zsh
# Powerline configuration
#
#-------------------------------------------------------------------------------
# Powerline PS1 setup using milkbikis/powerline-shell
# NOTE: This must be called after anigen is primed in z_login.zsh for $ADOTDIR
function install_powerline_prompt() {
POWERLINE_HOME="${ADOTDIR}/repos/https-COLON--SLASH--SLASH-github.com-SLASH-phatblat-SLASH-powerline-shell.git-PIPE-custom"
# POWERLINE_HOME="${HOME}/dev/shell/powerline-shell"
function powerline_precmd() {
PS1="$(${POWERLINE_HOME}/powerline-shell.py $? --colorize-hostname --shell zsh --cwd-max-depth 5 2> /dev/null)"
}
function install_powerline_precmd() {
for s in "${precmd_functions[@]}"; do
if [ "$s" = "powerline_precmd" ]; then
return
fi
done
precmd_functions+=(powerline_precmd)
}
if [ "$TERM" != "linux" ]; then
install_powerline_precmd
fi
}
alias powerlinetest='echo "⮀ ± ⭠ ➦ ✔ ✘ ⚡"'
|
Include Coverage For Auto-Gen of Log ID Var in Log DB | #!/usr/bin/env bash
#
# Inquisition Build Script
# - invokes build process in order to properly host Inquisition
#
APP_DIR='/opt/inquisition/'
LOG_DIR='/var/log/inquisition/'
# create directories
echo "Creating application directory..."
mkdir $APP_DIR > /dev/null 2>&1
echo "Creating application subdirectories..."
echo "tmp/"
mkdir $APP_DIR'tmp/' > /dev/null 2>&1
echo "Creating log directory..."
mkdir $LOG_DIR > /dev/null 2>&1
# copy files to app dir
rsync -av --exclude 'build' --exclude 'install' --exclude '.travis.yml' ./* $APP_DIR || exit 1
# provision db
echo "Initializing database..."
mysql -u root -e "create database inquisition"
echo "Creating DB service account..."
mysql -u root -e "GRANT SELECT,INSERT,UPDATE,DELETE ON inquisition.* TO inquisition@'localhost' IDENTIFIED BY ''"
mysql -u root -e "FLUSH PRIVILEGES"
echo "Import table schema..."
mysql -u root inquisition < build/src/inquisition.sql || exit 1
# setup log db
redis-cli set log_id 0
# run any tests
python -m pytest build/tests/ || exit 1
echo "Build complete!"
exit 0 | #!/usr/bin/env bash
#
# Inquisition Build Script
# - invokes build process in order to properly host Inquisition
#
APP_DIR='/opt/inquisition/'
LOG_DIR='/var/log/inquisition/'
# create directories
echo "Creating application directory..."
mkdir $APP_DIR > /dev/null 2>&1
echo "Creating application subdirectories..."
echo "tmp/"
mkdir $APP_DIR'tmp/' > /dev/null 2>&1
echo "Creating log directory..."
mkdir $LOG_DIR > /dev/null 2>&1
# copy files to app dir
rsync -av --exclude 'build' --exclude 'install' --exclude '.travis.yml' ./* $APP_DIR || exit 1
# provision db
echo "Initializing database..."
mysql -u root -e "create database inquisition"
echo "Creating DB service account..."
mysql -u root -e "GRANT SELECT,INSERT,UPDATE,DELETE ON inquisition.* TO inquisition@'localhost' IDENTIFIED BY ''"
mysql -u root -e "FLUSH PRIVILEGES"
echo "Import table schema..."
mysql -u root inquisition < build/src/inquisition.sql || exit 1
# run any tests
python -m pytest build/tests/ || exit 1
echo "Build complete!"
exit 0 |
Change get script to set git repo to use ssh | #!/usr/bin/env bash
git clone "git://github.com/andsens/homeshick.git" "$HOME/.homesick/repos/homeshick"
source "$HOME/.homesick/repos/homeshick/homeshick.sh"
homeshick clone basicdays/dotfiles
homeshick cd dotfiles
git submodule init
homeshick link dotfiles
| #!/usr/bin/env bash
git clone "git://github.com/andsens/homeshick.git" "$HOME/.homesick/repos/homeshick"
source "$HOME/.homesick/repos/homeshick/homeshick.sh"
homeshick clone basicdays/dotfiles
homeshick cd dotfiles
git remote set-url "git@github.com:basicdays/dotfiles.git"
git submodule init
homeshick link dotfiles
|
Increase go test timeout to prevent from false negative tests | #!/bin/bash -e
script_dir=$(cd `dirname $0`; pwd)
root_dir=`dirname $script_dir`
test_packages="."
go_test_flags="-v -race -timeout 2s"
echo Running go test on packages "'$test_packages'" with flags "'$go_test_flags'"
function test_html_coverage
{
test_coverage
go tool cover -html=profile.cov -o coverage.html
rm -f profile.cov
}
function test_coverage
{
echo "mode: atomic" > profile.cov
for pkg in $test_packages; do
go test $go_test_flags -covermode=atomic -coverprofile=profile_tmp.cov $pkg
[ -f profile_tmp.cov ] && tail -n +2 profile_tmp.cov >> profile.cov;
rm -f profile_tmp.cov
done
}
function test_local
{
go test $go_test_flags $test_packages
}
if [ "$1" = "html-coverage" ]; then
test_html_coverage
elif [ "$CI" = "true" ]; then
test_coverage
else
test_local
fi
| #!/bin/bash -e
script_dir=$(cd `dirname $0`; pwd)
root_dir=`dirname $script_dir`
test_packages="."
go_test_flags="-v -race -timeout 5s"
echo Running go test on packages "'$test_packages'" with flags "'$go_test_flags'"
function test_html_coverage
{
test_coverage
go tool cover -html=profile.cov -o coverage.html
rm -f profile.cov
}
function test_coverage
{
echo "mode: atomic" > profile.cov
for pkg in $test_packages; do
go test $go_test_flags -covermode=atomic -coverprofile=profile_tmp.cov $pkg
[ -f profile_tmp.cov ] && tail -n +2 profile_tmp.cov >> profile.cov;
rm -f profile_tmp.cov
done
}
function test_local
{
go test $go_test_flags $test_packages
}
if [ "$1" = "html-coverage" ]; then
test_html_coverage
elif [ "$CI" = "true" ]; then
test_coverage
else
test_local
fi
|
Revert "Do not assume a specific Ruby version." | #
# Configures Ruby gem installation and loads rvm/rbenv.
#
# Authors:
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Install local gems according to Mac OS X conventions.
if [[ "$OSTYPE" == darwin* ]]; then
export GEM_HOME="$HOME/Library/Ruby/Gems/${$(ruby --version)[6,8]}"
path=("$GEM_HOME/bin" $path)
# Set environment variables for launchd processes.
launchctl setenv GEM_HOME "$GEM_HOME" &!
fi
# Loads RVM into the shell session.
if [[ -s "$HOME/.rvm/scripts/rvm" ]]; then
# Auto adding variable-stored paths to ~ list conflicts with RVM.
unsetopt AUTO_NAME_DIRS
# Source RVM.
source "$HOME/.rvm/scripts/rvm"
fi
# Loads rbenv into the shell session.
if [[ -s "$HOME/.rbenv/bin/rbenv" ]]; then
path=("$HOME/.rbenv/bin" $path)
eval "$(rbenv init - zsh)"
fi
| #
# Configures Ruby gem installation and loads rvm/rbenv.
#
# Authors:
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Install local gems according to Mac OS X conventions.
if [[ "$OSTYPE" == darwin* ]]; then
export GEM_HOME=$HOME/Library/Ruby/Gems/1.8
path=("$GEM_HOME/bin" $path)
# Set environment variables for launchd processes.
launchctl setenv GEM_HOME "$GEM_HOME" &!
fi
# Loads RVM into the shell session.
if [[ -s "$HOME/.rvm/scripts/rvm" ]]; then
# Auto adding variable-stored paths to ~ list conflicts with RVM.
unsetopt AUTO_NAME_DIRS
# Source RVM.
source "$HOME/.rvm/scripts/rvm"
fi
# Loads rbenv into the shell session.
if [[ -s "$HOME/.rbenv/bin/rbenv" ]]; then
path=("$HOME/.rbenv/bin" $path)
eval "$(rbenv init - zsh)"
fi
|
Add fzf support on linux | #
# host.zsh - Host-specific, zsh-specific configurations
#
# Author
# Jake Zimmerman <jake@zimmerman.io>
#
# Usage
# Source this file.
#
# TODOs
# - n/a
# TODO(jez) linux
# source ~/.util/fzf.zsh
# Not needed yet.
# source ~/.util/skip-dirty.zsh
# TODO(jez) linux
# OPAM configuration
# source /Users/jez/.opam/opam-init/init.zsh > /dev/null 2> /dev/null || true
| #
# host.zsh - Host-specific, zsh-specific configurations
#
# Author
# Jake Zimmerman <jake@zimmerman.io>
#
# Usage
# Source this file.
#
# TODOs
# - n/a
source ~/.util/fzf.zsh
# Not needed yet.
# source ~/.util/skip-dirty.zsh
# TODO(jez) linux
# OPAM configuration
# source /Users/jez/.opam/opam-init/init.zsh > /dev/null 2> /dev/null || true
|
Remove call to relay in build, happens magically now | #!/bin/bash
set -euo pipefail
# Add the SHA1 sum of the webpack file to the host path
WEBPACK_CONFIG_SHA1=$(openssl sha1 webpack/config.js | sed 's/^.* //')
FRONTEND_HOST="$FRONTEND_HOST$WEBPACK_CONFIG_SHA1/"
echo "--- :information_desk_person: Appending SHA1 of webpack/config.js to \$FRONTEND_HOST"
echo "\$FRONTEND_HOST is now $FRONTEND_HOST"
echo "--- :relay: Compiling GraphQL Relay files"
yarn run relay
echo "--- :webpack: Building webpack assets for production"
yarn run build-production
echo "--- :javascript: Checking valid JS"
node --check dist/*.js && echo "👍 Javascript looks valid!"
echo "--- :docker: Copying to the host for artifact upload"
cp -a dist/* /host/dist
| #!/bin/bash
set -euo pipefail
# Add the SHA1 sum of the webpack file to the host path
WEBPACK_CONFIG_SHA1=$(openssl sha1 webpack/config.js | sed 's/^.* //')
FRONTEND_HOST="$FRONTEND_HOST$WEBPACK_CONFIG_SHA1/"
echo "--- :information_desk_person: Appending SHA1 of webpack/config.js to \$FRONTEND_HOST"
echo "\$FRONTEND_HOST is now $FRONTEND_HOST"
echo "--- :webpack: Building webpack assets for production"
yarn run build-production
echo "--- :javascript: Checking valid JS"
node --check dist/*.js && echo "👍 Javascript looks valid!"
echo "--- :docker: Copying to the host for artifact upload"
cp -a dist/* /host/dist
|
Use friendly name instead of the gateway ugly name. | #!/usr/bin/env bash
R=${RANDOM}
B=$((RANDOM%2))
CMD="http --verify no \
--json POST \
https://nkkmfsfq10.execute-api.us-west-1.amazonaws.com/adpevents/ chnl=${R} \
eid=${R} \
aid=${R} \
pos=${R} \
sid=${R} \
etype=${R} \
uid=${R} \
tid=${R} \
chnltype=${R} \
viewable=${B} \
elmid=${R} \
heid=${R} \
rid=${R}"
echo ${CMD}
${CMD}
| #!/usr/bin/env bash
R=${RANDOM}
B=$((RANDOM%2))
CMD="http --verify no \
--json POST \
https://staging-adp-events.orfeo.io/staging chnl=${R} \
eid=${R} \
aid=${R} \
pos=${R} \
sid=${R} \
etype=${R} \
uid=${R} \
tid=${R} \
chnltype=${R} \
viewable=${B} \
elmid=${R} \
heid=${R} \
rid=${R}"
echo ${CMD}
${CMD}
|
Revert "Use forked protobuf for generating Go" | #!/bin/bash
dir_resolve()
{
cd "$1" 2>/dev/null || return $? # cd to desired directory; if fail, quell any error messages but return exit status
echo "`pwd -P`" # output full, link-resolved path
}
set -e
TARGET=`dir_resolve $1`
if [ -z "$TARGET" ]; then
echo 'USAGE: `generate-go.sh TARGET_PATH`'
echo ''
echo 'TARGET_PATH is where you would like the control and events packages to be generated.'
exit 1
fi
go get github.com/jmtuley/protobuf/{proto,protoc-gen-gogo,gogoproto}
pushd events
mkdir -p $TARGET/events
protoc --plugin=$(which protoc-gen-gogo) --gogo_out=$TARGET/events --proto_path=$GOPATH/src:$GOPATH/src/github.com/gogo/protobuf/protobuf:. *.proto
popd
pushd control
mkdir -p $TARGET/control
protoc --plugin=$(which protoc-gen-gogo) --gogo_out=$TARGET/control --proto_path=$GOPATH/src:$GOPATH/src/github.com/gogo/protobuf/protobuf:. *.proto
popd
| #!/bin/bash
dir_resolve()
{
cd "$1" 2>/dev/null || return $? # cd to desired directory; if fail, quell any error messages but return exit status
echo "`pwd -P`" # output full, link-resolved path
}
set -e
TARGET=`dir_resolve $1`
if [ -z "$TARGET" ]; then
echo 'USAGE: `generate-go.sh TARGET_PATH`'
echo ''
echo 'TARGET_PATH is where you would like the control and events packages to be generated.'
exit 1
fi
go get github.com/gogo/protobuf/{proto,protoc-gen-gogo,gogoproto}
pushd events
mkdir -p $TARGET/events
protoc --plugin=$(which protoc-gen-gogo) --gogo_out=$TARGET/events --proto_path=$GOPATH/src:$GOPATH/src/github.com/gogo/protobuf/protobuf:. *.proto
popd
pushd control
mkdir -p $TARGET/control
protoc --plugin=$(which protoc-gen-gogo) --gogo_out=$TARGET/control --proto_path=$GOPATH/src:$GOPATH/src/github.com/gogo/protobuf/protobuf:. *.proto
popd
|
Revise gmock to depend on libclang 3.5 | #!/bin/sh -e
gmock()
{
LD_LIBRARY_PATH=/usr/lib/llvm-3.4/lib python ~/repo/gmock/gmock.py "$@"
}
| #!/bin/sh -e
gmock()
{
LD_LIBRARY_PATH=/usr/lib/llvm-3.5/lib python ~/repo/gmock/gmock.py "$@"
}
|
Make backup-restore test more robust | #!/bin/bash
BACKUP_RESTORE_FILE="backup_restore.root"
rm $BACKUP_RESTORE_FILE
# start simulation
./backup-restore -b $BACKUP_RESTORE_FILE &
# simulate crash of simulation after 5 seconds
SIMULATION_PID=$!
sleep 5
kill -9 $SIMULATION_PID
# restart after artificial crash
./backup-restore -b $BACKUP_RESTORE_FILE -r $BACKUP_RESTORE_FILE
RETURN_CODE=$?
rm $BACKUP_RESTORE_FILE
exit $RETURN_CODE
| #!/bin/bash
BACKUP_RESTORE_FILE="backup_restore.root"
rm $BACKUP_RESTORE_FILE
# start simulation
./backup-restore -b $BACKUP_RESTORE_FILE &
# simulate crash of simulation after 5 seconds
SIMULATION_PID=$!
sleep 5
# wait longer if backup file has not been created yet
# check every second if backup file exists
for i in {0..5}; do
if [ -e "$BACKUP_RESTORE_FILE" ]; then
break
fi
sleep 1
done
kill -9 $SIMULATION_PID
# restart after artificial crash
./backup-restore -b $BACKUP_RESTORE_FILE -r $BACKUP_RESTORE_FILE
RETURN_CODE=$?
rm $BACKUP_RESTORE_FILE
exit $RETURN_CODE
|
Support for spaces in directories when downloading cifar10 | #!/usr/bin/env sh
# This scripts downloads the CIFAR10 (binary version) data and unzips it.
DIR="$( cd "$(dirname "$0")" ; pwd -P )"
cd $DIR
echo "Downloading..."
wget --no-check-certificate http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz
echo "Unzipping..."
tar -xf cifar-10-binary.tar.gz && rm -f cifar-10-binary.tar.gz
mv cifar-10-batches-bin/* . && rm -rf cifar-10-batches-bin
# Creation is split out because leveldb sometimes causes segfault
# and needs to be re-created.
echo "Done."
| #!/usr/bin/env sh
# This scripts downloads the CIFAR10 (binary version) data and unzips it.
DIR="$( cd "$(dirname "$0")" ; pwd -P )"
cd "$DIR"
echo "Downloading..."
wget --no-check-certificate http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz
echo "Unzipping..."
tar -xf cifar-10-binary.tar.gz && rm -f cifar-10-binary.tar.gz
mv cifar-10-batches-bin/* . && rm -rf cifar-10-batches-bin
# Creation is split out because leveldb sometimes causes segfault
# and needs to be re-created.
echo "Done."
|
Fix bash setup for Linux | #!/bin/bash
SRC_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
ln -s $SRC_DIR/bash_profile ~/.bash_profile
| #!/bin/bash
SRC_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
# on OS X we can use .bash_profile, but on Linux desktops we'll
# want to use .profile instead (for a graphical, non-login shell)
OS=$(uname -s)
DEST=""
if [ "$OS" = "Darwin" ]; then
DEST="${HOME}/.bash_profile"
elif [ "$OS" = "Linux" ]; then
DEST="${HOME}/.profile"
else
echo "Error: Unknown OS: $OS"
return 1
fi
#check if dest exists so we don't blow away existing config
if [ -e $DEST ]; then
cat "${SRC_DIR}/bash_profile" >> $DEST
else
ln -s "${SRC_DIR}/bash_profile" $DEST
fi
echo "Log out and in again to apply bash profile"
|
Add generated documentation to the committed files | #!/bin/bash
git add Makefile
git add CHANGELOG.md
git add Sources/Runner/main.swift
git commit -m "Version $NEW_VERSION"
git tag "$NEW_VERSION"
git push origin master --tags
| #!/bin/bash
git add Makefile
git add CHANGELOG.md
git add Sources/Runner/main.swift
git add Documentation
git commit -m "Version $NEW_VERSION"
git tag "$NEW_VERSION"
git push origin master --tags
|
Fix syntax error in colored-man-pages | if [ "$OSTYPE" = solaris* ]
then
if [ ! -x "$HOME/bin/nroff" ]
then
mkdir -p "$HOME/bin"
cat > "$HOME/bin/nroff" <<EOF
#!/bin/sh
if [ -n "\$_NROFF_U" -a "\$1,\$2,\$3" = "-u0,-Tlp,-man" ]; then
shift
exec /usr/bin/nroff -u\$_NROFF_U "\$@"
fi
#-- Some other invocation of nroff
exec /usr/bin/nroff "\$@"
EOF
chmod +x "$HOME/bin/nroff"
fi
fi
man() {
env \
LESS_TERMCAP_mb=$(printf "\e[1;31m") \
LESS_TERMCAP_md=$(printf "\e[1;31m") \
LESS_TERMCAP_me=$(printf "\e[0m") \
LESS_TERMCAP_se=$(printf "\e[0m") \
LESS_TERMCAP_so=$(printf "\e[1;44;33m") \
LESS_TERMCAP_ue=$(printf "\e[0m") \
LESS_TERMCAP_us=$(printf "\e[1;32m") \
PAGER=/usr/bin/less \
_NROFF_U=1 \
PATH="$HOME/bin:$PATH" \
man "$@"
}
| if [[ "$OSTYPE" = solaris* ]]
then
if [[ ! -x "$HOME/bin/nroff" ]]
then
mkdir -p "$HOME/bin"
cat > "$HOME/bin/nroff" <<EOF
#!/bin/sh
if [ -n "\$_NROFF_U" -a "\$1,\$2,\$3" = "-u0,-Tlp,-man" ]; then
shift
exec /usr/bin/nroff -u\$_NROFF_U "\$@"
fi
#-- Some other invocation of nroff
exec /usr/bin/nroff "\$@"
EOF
chmod +x "$HOME/bin/nroff"
fi
fi
man() {
env \
LESS_TERMCAP_mb=$(printf "\e[1;31m") \
LESS_TERMCAP_md=$(printf "\e[1;31m") \
LESS_TERMCAP_me=$(printf "\e[0m") \
LESS_TERMCAP_se=$(printf "\e[0m") \
LESS_TERMCAP_so=$(printf "\e[1;44;33m") \
LESS_TERMCAP_ue=$(printf "\e[0m") \
LESS_TERMCAP_us=$(printf "\e[1;32m") \
PAGER=/usr/bin/less \
_NROFF_U=1 \
PATH="$HOME/bin:$PATH" \
man "$@"
}
|
Create directory before trying to copy files into it. | #!/bin/bash
set -e
[ ! -e ../../config.sh ] && die "FRESteamWorks/config.sh is not set up!"
. ../../config.sh
make -C ../../src DEBUG=1 wrapper
cp ../../src/APIWrapper/APIWrapper NativeApps/Linux
"$FLEX_SDK/bin/amxmlc" -library-path+=../../lib/bin/FRESteamWorksLibLinux.swc \
-swf-version=11 -output FRESteamWorksTest_linux.swf \
-define=CONFIG::linux,true \
../src/FRESteamWorksTest.as
| #!/bin/bash
set -e
[ ! -e ../../config.sh ] && die "FRESteamWorks/config.sh is not set up!"
. ../../config.sh
make -C ../../src DEBUG=1 wrapper
mkdir -p NativeApps/Linux/
cp ../../src/APIWrapper/APIWrapper NativeApps/Linux/
"$FLEX_SDK/bin/amxmlc" -library-path+=../../lib/bin/FRESteamWorksLibLinux.swc \
-swf-version=11 -output FRESteamWorksTest_linux.swf \
-define=CONFIG::linux,true \
../src/FRESteamWorksTest.as
|
Use virtualenv in docker cpu build, as we are not running as root. | #!/bin/bash
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -x
cd bazel_pip
pip --version
pip install portpicker
pip install *.whl
# Use default configuration
yes "" | python configure.py
# Run as non-root to aviod file permission related test failures.
useradd -m normal_user
su normal_user
PIP_TEST_ROOT=pip_test_root
mkdir -p ${PIP_TEST_ROOT}
ln -s $(pwd)/tensorflow ${PIP_TEST_ROOT}/tensorflow
bazel test --define=no_tensorflow_py_deps=true \
--test_lang_filters=py \
--build_tests_only \
-k \
--test_tag_filters=-no_oss,-oss_serial,-no_pip,-nopip \
--test_size_filters=small,medium \
--test_timeout 300,450,1200,3600 \
--test_output=errors \
-- //${PIP_TEST_ROOT}/tensorflow/python/...
| #!/bin/bash
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -x
cd bazel_pip
virtualenv --system-site-packages --python=python .env
source .env/bin/activate
pip --version
pip install portpicker
pip install *.whl
# Use default configuration
yes "" | python configure.py
PIP_TEST_ROOT=pip_test_root
mkdir -p ${PIP_TEST_ROOT}
ln -s $(pwd)/tensorflow ${PIP_TEST_ROOT}/tensorflow
bazel test --define=no_tensorflow_py_deps=true \
--test_lang_filters=py \
--build_tests_only \
-k \
--test_tag_filters=-no_oss,-oss_serial,-no_pip,-nopip \
--test_size_filters=small,medium \
--test_timeout 300,450,1200,3600 \
--test_output=errors \
-- //${PIP_TEST_ROOT}/tensorflow/python/...
|
Switch to using systemctl for mongodb install in travis | #!/usr/bin/env bash
sudo service mongodb stop
# Import mongodb public GPG key
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927
echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list
sudo apt-get update -qq -y
sudo apt-get install -y mongodb-org
sudo service mongod start | #!/usr/bin/env bash
sudo service mongodb stop
# Import mongodb public GPG key
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927
echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list
sudo apt-get update -qq -y
sudo apt-get install -y mongodb-org
sudo systemctl start mongod |
FIX avoid "-release" in autogenerated release RPMs | #!/bin/bash
set -e
secret=$1
type=$2
docker run -e BROKER_RELEASE=$type -v $(pwd):/opt/orion --workdir=/opt/orion fiware/orion-ci:rpm8 make rpm
for file in "$(pwd)/rpm/RPMS/x86_64"/*
do
filename=$(basename $file)
echo "Uploading $filename"
curl -v -f -u telefonica-github:$secret --upload-file $file https://nexus.lab.fiware.org/repository/el/8/x86_64/$type/$filename
done | #!/bin/bash
set -e
secret=$1
type=$2
if [ "$type" == "release" ]
then
type="1"
fi
docker run -e BROKER_RELEASE=$type -v $(pwd):/opt/orion --workdir=/opt/orion fiware/orion-ci:rpm8 make rpm
for file in "$(pwd)/rpm/RPMS/x86_64"/*
do
filename=$(basename $file)
echo "Uploading $filename"
curl -v -f -u telefonica-github:$secret --upload-file $file https://nexus.lab.fiware.org/repository/el/8/x86_64/$type/$filename
done
|
Fix name of media container | #!/bin/bash
set -euo pipefail
start() {
[ -d $2 ] || {
echo "No such directory: '$2'"
return
}
docker rm -f $1-fileserver && echo "Removed $1-fileserver container"
docker run --name $1-fileserver -d -v "$2:/var/www" -p $3:8080 hkjn/fileserver
echo "$1-fileserver is running at $3, serving directory '$2'"
}
start musashi /media/musashi 8080
start staging $HOME/staging 8081
start staging $HOME/media 8082
start timothy /media/timothy/movies 8083
start usb1 /run/media/zero/USB20FD 8084
start usb2 /run/media/zero/f538fa97-80d3-4ef3-9010-99460637a69a 8085
echo "Started media fileserver containers."
| #!/bin/bash
set -euo pipefail
start() {
[ -d $2 ] || {
echo "No such directory: '$2'"
return
}
docker rm -f $1-fileserver && echo "Removed $1-fileserver container"
docker run --name $1-fileserver -d -v "$2:/var/www" -p $3:8080 hkjn/fileserver
echo "$1-fileserver is running at $3, serving directory '$2'"
}
start musashi /media/musashi 8080
start staging $HOME/staging 8081
start media $HOME/media 8082
start timothy /media/timothy/movies 8083
start usb1 /run/media/zero/USB20FD 8084
start usb2 /run/media/zero/f538fa97-80d3-4ef3-9010-99460637a69a 8085
echo "Started media fileserver containers."
|
Update CI to merge back to develop | #!/bin/sh
echo "Merging ${TRAVIS_BRANCH} to master"
git config --global user.email "travis@travis-ci.org"
git config --global user.name "Travis CI"
git config --global push.default current
# need to do it, bc Travis checks out a commit by hash
git fetch && git checkout master && git merge ${TRAVIS_BRANCH} --no-edit
git push https://${GH_TOKEN}@github.com/stoplay/stoplay-ext.git | #!/bin/sh
echo "Merging ${TRAVIS_BRANCH} to develop"
git config --global user.email "travis@travis-ci.org"
git config --global user.name "Travis CI"
git config --global push.default current
# need to do it, bc Travis checks out a commit by hash
git fetch && git checkout develop && git merge ${TRAVIS_BRANCH} --no-edit
git push https://${GH_TOKEN}@github.com/stoplay/stoplay-ext.git |
Add missing h5py dependency for keras | #!/bin/bash
##################################
# Deep Learning Vagrant Machine #
# by Holberton School #
##################################
function install {
echo installing "$1"
shift
apt-get -y install "$@" >/dev/null 2>&1
}
function pip_install {
echo installing "$1"
shift
pip install "$@" >/dev/null 2>&1
}
echo "updating package information"
apt-get -y update >/dev/null 2>&1
# Theano
install 'pip' python-pip
install 'theano dependencies' python-numpy python-scipy python-dev python-pip python-nose g++ git libatlas3gf-base libatlas-dev
pip_install 'theano' theano
# Keras
pip_install 'keras' keras
mkdir /home/vagrant/keras
git clone https://github.com/fchollet/keras /home/vagrant/keras/ >/dev/null 2>&1
# Tensorflow
pip_install 'tensorflow' --upgrade https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-0.7.1-cp27-none-linux_x86_64.whl
# Miscellaneous
pip_install 'required Python libraries' pyyaml cython
install 'hdf5' libhdf5-7
pip_install 'ipython' ipython
pip_install 'jupyter' jupyter
install 'matplotlib' matplotlib
echo 'All set!'
| #!/bin/bash
##################################
# Deep Learning Vagrant Machine #
# by Holberton School #
##################################
function install {
echo installing "$1"
shift
apt-get -y install "$@" >/dev/null 2>&1
}
function pip_install {
echo installing "$1"
shift
pip install "$@" >/dev/null 2>&1
}
echo "updating package information"
apt-get -y update >/dev/null 2>&1
# Theano
install 'pip' python-pip
install 'theano dependencies' python-numpy python-scipy python-dev python-pip python-nose g++ git libatlas3gf-base libatlas-dev
pip_install 'theano' theano
# Keras
pip_install 'keras' keras
mkdir /home/vagrant/keras
git clone https://github.com/fchollet/keras /home/vagrant/keras/ >/dev/null 2>&1
# Tensorflow
pip_install 'tensorflow' --upgrade https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-0.7.1-cp27-none-linux_x86_64.whl
# Miscellaneous
pip_install 'required Python libraries' pyyaml cython
install 'hdf5' libhdf5-7 libhdf5-dev
pip_install 'h5py' h5py
pip_install 'ipython' ipython
pip_install 'jupyter' jupyter
install 'matplotlib' matplotlib
echo 'All set!'
|
Add commit & push to deployment script. | #!/bin/bash
git checkout gh-pages
git clone git@github.com:measurement-factory/functional-text-sandbox.git
cd functional-text-sandbox
git clone git@github.com:measurement-factory/functional-text.git
cd functional-text
npm install
webpack
cd ..
npm install
webpack -p
cp src/index.html deploy-result/index.html
rm -fr functional-text
cd ..
cp functional-text-sandbox/deploy-result/* ./
rm -rf functional-text-sandbox | #!/bin/bash
git checkout gh-pages
git clone git@github.com:measurement-factory/functional-text-sandbox.git
cd functional-text-sandbox
git clone git@github.com:measurement-factory/functional-text.git
cd functional-text
npm install
webpack
cd ..
npm install
webpack -p
cp src/index.html deploy-result/index.html
rm -fr functional-text
cd ..
cp functional-text-sandbox/deploy-result/* ./
rm -rf functional-text-sandbox
if ! git diff-index --quiet HEAD --; then
git commit -m "Updating gh-pages branch."
git push
fi |
Update test suite to nightly-2019-08-26 | #!/bin/bash
REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
set -euo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
mkdir -p rust
touch rust/COMMIT
if [ "$(cat rust/COMMIT)" != "$REV" ]; then
rm -rf rust
mkdir rust
curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
| tar xz --directory rust --strip-components 1
echo "$REV" > rust/COMMIT
fi
| #!/bin/bash
REV=521d78407471cb78e9bbf47160f6aa23047ac499
set -euo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
mkdir -p rust
touch rust/COMMIT
if [ "$(cat rust/COMMIT)" != "$REV" ]; then
rm -rf rust
mkdir rust
curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
| tar xz --directory rust --strip-components 1
echo "$REV" > rust/COMMIT
fi
|
Remove CDPATH extension - seems to confuse things. | export EDITOR='vi'
export PAGER="less"
export LESS="-R"
export PATH="$HOME/bin:/usr/local/bin:/usr/local/sbin:$ZSH/bin:$PATH"
# add all first level subdirectories in ~/bin to PATH
for DIR in `find ~/bin/ -maxdepth 1 -type d`; do
export PATH=$PATH:$DIR
done
export LSCOLORS="exfxcxdxbxegedabagacad"
export CLICOLOR=true
#Disable "You have new mail" notifications
unset MAILCHECK
#GPG key
export DEBFULLNAME="Christopher Peplin"
export DEBEMAIL="chris.peplin@rhubarbtech.com"
export GPGKEY=D963BFAF
export HOSTNAME="`hostname`"
export CDPATH=$CDPATH:$PROJECTS
# workaround for Karmic - http://bit.ly/T8MIc
export GDK_NATIVE_WINDOWS=true
# if the command-not-found package is installed, use it
if [ -x /usr/lib/command-not-found ]; then
function command_not_found_handle {
# check because c-n-f could've been removed in the meantime
if [ -x /usr/lib/command-not-found ]; then
/usr/bin/python /usr/lib/command-not-found -- $1
return $?
else
return 127
fi
}
fi
| export EDITOR='vi'
export PAGER="less"
export LESS="-R"
export PATH="$HOME/bin:/usr/local/bin:/usr/local/sbin:$ZSH/bin:$PATH"
# add all first level subdirectories in ~/bin to PATH
for DIR in `find ~/bin/ -maxdepth 1 -type d`; do
export PATH=$PATH:$DIR
done
export LSCOLORS="exfxcxdxbxegedabagacad"
export CLICOLOR=true
#Disable "You have new mail" notifications
unset MAILCHECK
#GPG key
export DEBFULLNAME="Christopher Peplin"
export DEBEMAIL="chris.peplin@rhubarbtech.com"
export GPGKEY=D963BFAF
export HOSTNAME="`hostname`"
# workaround for Karmic - http://bit.ly/T8MIc
export GDK_NATIVE_WINDOWS=true
# if the command-not-found package is installed, use it
if [ -x /usr/lib/command-not-found ]; then
function command_not_found_handle {
# check because c-n-f could've been removed in the meantime
if [ -x /usr/lib/command-not-found ]; then
/usr/bin/python /usr/lib/command-not-found -- $1
return $?
else
return 127
fi
}
fi
|
Use locally installed jspm for configuring the auth token after npm install | #!/bin/bash
set -ev
if [ "$TRAVIS_SECURE_ENV_VARS" == "false" ]; then
# Force use of jspm registry auth token here due to the following situation:
# - secure vars not available on pull requests which are classified by travis as untrusted builds
# - jspm requires github authentication to avoid github rate limiting
# - normally the github auth token is encrypted and put in .travis.yml however it's not available here
# - all non upstream repo forks should provide their own JSPM_GITHUB_AUTH_TOKEN in travis environment variables
# auth key owned by @sundriver - public_repo only
node_modules/.bin/jspm config registries.github.auth c3VuZHJpdmVyOjI4NTU3ZjlkNzdhZGU4YjJhODk3NDQyOTEyMzU5NDY0ZDBjMjkwYmE=
fi
dir=$(pwd)
export RAM_CONF=$dir/backend/conf/conf.js
echo $RAM_CONF
cd frontend
npm install
node_modules/.bin/typings install
node_modules/.bin/jspm -v
node_modules/.bin/jspm cc
node_modules/.bin/jspm install -y
cd ../backend
npm install
node_modules/.bin/typings install
gulp ts:compile
gulp serve &
sleep 15
cd ../tests
npm install
gulp test
| #!/bin/bash
set -ev
dir=$(pwd)
export RAM_CONF=$dir/backend/conf/conf.js
echo $RAM_CONF
cd frontend
npm install
if [ "$TRAVIS_SECURE_ENV_VARS" == "false" ]; then
# Force use of jspm registry auth token here due to the following situation:
# - secure vars not available on pull requests which are classified by travis as untrusted builds
# - jspm requires github authentication to avoid github rate limiting
# - normally the github auth token is encrypted and put in .travis.yml however it's not available here
# - all non upstream repo forks should provide their own JSPM_GITHUB_AUTH_TOKEN in travis environment variables
echo "Secure environment variables not available. Probably a pull request. Using default auth key."
# auth key owned by @sundriver - public_repo only
node_modules/.bin/jspm config registries.github.auth c3VuZHJpdmVyOjI4NTU3ZjlkNzdhZGU4YjJhODk3NDQyOTEyMzU5NDY0ZDBjMjkwYmE=
fi
node_modules/.bin/typings install
node_modules/.bin/jspm -v
node_modules/.bin/jspm cc
node_modules/.bin/jspm install -y
cd ../backend
npm install
node_modules/.bin/typings install
gulp ts:compile
gulp serve &
sleep 15
cd ../tests
npm install
gulp test
|
Add command to run more tests | #!/bin/sh
python openid/test/test*.py
| #!/bin/sh
# naive version that doesn't run enough tests:
# python openid/test/test*.py
python3 -m unittest openid.test.test_suite
|
Check for expected type errors | #!/bin/sh
fn=$1
name=$2
output=$(node atw.js $fn)
expected=$(sed -n 's/^# -> \(.*\)/\1/p' $fn)
if [ "$output" = "$expected" ] ; then
echo $name ✓
else
echo $name ✘: $output $expected
fi
| #!/bin/sh
fn=$1
name=$2
output=$(node atw.js $fn)
expected=$(sed -n 's/^# -> \(.*\)/\1/p' $fn)
if [ "$expected" = "type error" ] ; then
[[ "$output" == type\ error:* ]]
else
[ "$output" = "$expected" ]
fi
success=$?
if [ $success -eq 0 ] ; then
echo $name ✓
else
echo $name ✘: $output \($expected\)
fi
|
Add Bash completion for `obtain-work` command | #!/usr/bin/env bash
_git_elegant() {
local cur prev opts
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
case "${prev}" in
elegant|git-elegant)
opts=($(git elegant commands))
COMPREPLY=( $(compgen -W "${opts[*]}" -- ${cur}) )
return 0 ;;
pull|epull)
local data=$(git branch | awk -F ' +' '! /\(no branch\)/ {print $2}')
COMPREPLY=( $(compgen -W "${data}" ${cur}) )
return 0 ;;
accept-work)
COMPREPLY=(
$(compgen -W "$(git branch --remotes --list)" -- ${cur})
)
return 0 ;;
*)
return 0 ;;
esac
}
complete -F _git_elegant git-elegant
| #!/usr/bin/env bash
_git_elegant() {
local cur prev opts
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
case "${prev}" in
elegant|git-elegant)
opts=($(git elegant commands))
COMPREPLY=( $(compgen -W "${opts[*]}" -- ${cur}) )
return 0 ;;
pull|epull)
local data=$(git branch | awk -F ' +' '! /\(no branch\)/ {print $2}')
COMPREPLY=( $(compgen -W "${data}" ${cur}) )
return 0 ;;
accept-work|obtain-work)
COMPREPLY=(
$(compgen -W "$(git branch --remotes --list)" -- ${cur})
)
return 0 ;;
*)
return 0 ;;
esac
}
complete -F _git_elegant git-elegant
|
Add double quotes just to be safe | #!/usr/bin/env bash
if [[ -n ${TRAVIS+x} && $TRAVIS != 'true' ]]
then
if [[ "$(git branch | grep '* master')" != "" ]]
then
echo "
Pushing Changes (required to build example)
-------------------------------------------
"
git push
sleep 2;
echo "
Rebuilding the example code
---------------------------
"
cd ${projectRoot}/example;
bash build.bash;
fi
fi | #!/usr/bin/env bash
if [[ -n "${TRAVIS+x}" && $TRAVIS != 'true' ]]
then
if [[ "$(git branch | grep '* master')" != "" ]]
then
echo "
Pushing Changes (required to build example)
-------------------------------------------
"
git push
sleep 2;
echo "
Rebuilding the example code
---------------------------
"
cd ${projectRoot}/example;
bash build.bash;
fi
fi |
Stop the scheduler after dependents | #!/bin/sh
#
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
DIR="$(cd $(dirname "$0"); pwd)"
"$DIR"/stop_scheduler.sh
"$DIR"/stop_poller.sh
"$DIR"/stop_reactionner.sh
"$DIR"/stop_broker.sh
"$DIR"/stop_receiver.sh
"$DIR"/stop_arbiter.sh
| #!/bin/sh
#
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
DIR="$(cd $(dirname "$0"); pwd)"
"$DIR"/stop_poller.sh
"$DIR"/stop_reactionner.sh
"$DIR"/stop_broker.sh
"$DIR"/stop_receiver.sh
"$DIR"/stop_scheduler.sh
"$DIR"/stop_arbiter.sh
|
Verify Rust source code formatting as part of the unit tests. | #!/bin/bash
# This is the top-level test script:
#
# - Build documentation for Rust code in 'src/tools/target/doc'.
# - Run unit tests for all Rust crates.
# - Make a debug build of all crates.
# - Make a release build of cton-util.
# - Run file-level tests with the release build of cton-util.
#
# All tests run by this script should be passing at all times.
# Exit immediately on errors.
set -e
# Repository top-level directory.
cd $(dirname "$0")
topdir=$(pwd)
function banner() {
echo "====== $@ ======"
}
PKGS="cretonne cretonne-reader cretonne-tools"
cd "$topdir/src/tools"
for PKG in $PKGS
do
banner "Rust $PKG unit tests"
cargo test -p $PKG
done
# Build cton-util for parser testing.
cd "$topdir/src/tools"
banner "Rust documentation"
echo "open $topdir/src/tools/target/doc/cretonne/index.html"
cargo doc
banner "Rust release build"
cargo build --release
export CTONUTIL="$topdir/src/tools/target/release/cton-util"
# Run the parser tests.
cd "$topdir/tests"
banner "Parser tests"
parser/run.sh
banner "CFG tests"
cfg/run.sh
banner "OK"
| #!/bin/bash
# This is the top-level test script:
#
# - Build documentation for Rust code in 'src/tools/target/doc'.
# - Run unit tests for all Rust crates.
# - Make a debug build of all crates.
# - Make a release build of cton-util.
# - Run file-level tests with the release build of cton-util.
#
# All tests run by this script should be passing at all times.
# Exit immediately on errors.
set -e
# Repository top-level directory.
cd $(dirname "$0")
topdir=$(pwd)
function banner() {
echo "====== $@ ======"
}
# Run rustfmt if we have it. (Travis probably won't).
if cargo install --list | grep -q '^rustfmt '; then
banner "Rust formatting"
$topdir/src/format-all.sh --write-mode=diff
fi
PKGS="cretonne cretonne-reader cretonne-tools"
cd "$topdir/src/tools"
for PKG in $PKGS
do
banner "Rust $PKG unit tests"
cargo test -p $PKG
done
# Build cton-util for parser testing.
cd "$topdir/src/tools"
banner "Rust documentation"
echo "open $topdir/src/tools/target/doc/cretonne/index.html"
cargo doc
banner "Rust release build"
cargo build --release
export CTONUTIL="$topdir/src/tools/target/release/cton-util"
# Run the parser tests.
cd "$topdir/tests"
banner "Parser tests"
parser/run.sh
banner "CFG tests"
cfg/run.sh
banner "OK"
|
Remove version check as all tested versions use the built-in development server | start_php_servers() {
for i in $(seq 0 6); do
port=8000
(( port += $i ))
php -S "127.0.0.1:${port}" -t tests/PHPCurlClass/ &
done
}
set -x
echo "CI_PHP_VERSION: ${CI_PHP_VERSION}"
php -r "var_dump(phpversion());"
php -r "var_dump(curl_version());"
composer self-update
composer install --prefer-source --no-interaction
# Let test server know we should allow testing.
export PHP_CURL_CLASS_TEST_MODE_ENABLED="yes"
if [[ "${CI_PHP_VERSION}" == "7.0" ]]; then
start_php_servers
elif [[ "${CI_PHP_VERSION}" == "7.1" ]]; then
start_php_servers
elif [[ "${CI_PHP_VERSION}" == "7.2" ]]; then
start_php_servers
elif [[ "${CI_PHP_VERSION}" == "7.3" ]]; then
start_php_servers
elif [[ "${CI_PHP_VERSION}" == "7.4" ]]; then
start_php_servers
elif [[ "${CI_PHP_VERSION}" == "8.0" ]]; then
start_php_servers
elif [[ "${CI_PHP_VERSION}" == "nightly" ]]; then
start_php_servers
fi
| start_php_servers() {
for i in $(seq 0 6); do
port=8000
(( port += $i ))
php -S "127.0.0.1:${port}" -t tests/PHPCurlClass/ &
done
}
set -x
echo "CI_PHP_VERSION: ${CI_PHP_VERSION}"
php -r "var_dump(phpversion());"
php -r "var_dump(curl_version());"
composer self-update
composer install --prefer-source --no-interaction
# Let test server know we should allow testing.
export PHP_CURL_CLASS_TEST_MODE_ENABLED="yes"
start_php_servers
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.