Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Upgrade Java 17 version in CI image | #!/bin/bash
set -e
case "$1" in
java8)
echo "https://github.com/bell-sw/Liberica/releases/download/8u333+2/bellsoft-jdk8u333+2-linux-amd64.tar.gz"
;;
java11)
echo "https://github.com/bell-sw/Liberica/releases/download/11.0.15.1+2/bellsoft-jdk11.0.15.1+2-linux-amd64.tar.gz"
;;
java17)
echo "https://github.com/bell-sw/Liberica/releases/download/17.0.3+7/bellsoft-jdk17.0.3+7-linux-amd64.tar.gz"
;;
java18)
echo "https://github.com/bell-sw/Liberica/releases/download/18.0.1+12/bellsoft-jdk18.0.1+12-linux-amd64.tar.gz"
;;
*)
echo $"Unknown java version"
exit 1
esac
| #!/bin/bash
set -e
case "$1" in
java8)
echo "https://github.com/bell-sw/Liberica/releases/download/8u333+2/bellsoft-jdk8u333+2-linux-amd64.tar.gz"
;;
java11)
echo "https://github.com/bell-sw/Liberica/releases/download/11.0.15.1+2/bellsoft-jdk11.0.15.1+2-linux-amd64.tar.gz"
;;
java17)
echo "https://github.com/bell-sw/Liberica/releases/download/17.0.3.1+2/bellsoft-jdk17.0.3.1+2-linux-amd64.tar.gz"
;;
java18)
echo "https://github.com/bell-sw/Liberica/releases/download/18.0.1+12/bellsoft-jdk18.0.1+12-linux-amd64.tar.gz"
;;
*)
echo $"Unknown java version"
exit 1
esac
|
Update GUI launcher to handle HEAD brew installs | #!/bin/bash
# This allows me to use the emacsclient alternate editor to start the GUI and server
# if it's not already running, opening the file in the GUI
if [ ! -f "$1" ]; then
touch "$1"
fi
open -a `brew --prefix emacs`/Emacs.app "$1"
| #!/bin/bash
# This allows me to use the emacsclient alternate editor to start the GUI and server
# if it's not already running, opening the file in the GUI
if [ ! -f "$1" ]; then
touch "$1"
fi
if [ -e $(brew --prefix --HEAD emacs)/Emacs.app ]; then
open -a `brew --prefix --HEAD emacs`/Emacs.app "$1"
else
open -a `brew --prefix emacs`/Emacs.app "$1"
fi
|
Fix an issue with publishing npm packages | #!/bin/bash
set -e
# Publish npm packages for selected locations
# NPM_AUTH_TOKEN must be set by the build environment
# CircleCI does not consider environment variables in npm's "npm_config_" format to be valid
# https://npm.community/t/cannot-set-npm-config-keys-containing-underscores-registry-auth-tokens-for-example-via-npm-config-environment-variables/233/9
export "npm_config_//registry.npmjs.org/:_authtoken=${NPM_AUTH_TOKEN}"
readonly GIT_VERSION=$(git describe --tags)
readonly PUBLISHED_NPM_PACKAGES=(
# Published Fontello is used by all builds, and is critical to publish
girder/web_client/fontello
# These lint configs are used by downstream plugins
girder/web_client/eslint-config
girder/web_client/pug-lint-config
# The raw JS source is used by some downstream 'external builds'
girder/web_client/src
# These plugins were published to support downstream external builds, and should be kept updated
plugins/jobs/girder_jobs/web_client
plugins/oauth/girder_oauth/web_client
plugins/gravatar/girder_gravatar/web_client
)
for directory in "${PUBLISHED_NPM_PACKAGES[@]}"; do
pushd "$directory"
npm version --allow-same-version --no-git-tag-version "$GIT_VERSION"
npm publish
popd
done
| #!/bin/bash
set -e
# Publish npm packages for selected locations
# NPM_AUTH_TOKEN must be set by the build environment
readonly GIT_VERSION=$(git describe --tags)
readonly PUBLISHED_NPM_PACKAGES=(
# Published Fontello is used by all builds, and is critical to publish
girder/web_client/fontello
# These lint configs are used by downstream plugins
girder/web_client/eslint-config
girder/web_client/pug-lint-config
# The raw JS source is used by some downstream 'external builds'
girder/web_client/src
# These plugins were published to support downstream external builds, and should be kept updated
plugins/jobs/girder_jobs/web_client
plugins/oauth/girder_oauth/web_client
plugins/gravatar/girder_gravatar/web_client
)
for directory in "${PUBLISHED_NPM_PACKAGES[@]}"; do
pushd "$directory"
# Trying to set the auth token via 'npm_config_' environment variables does not work
echo '//registry.npmjs.org/:_authToken=${NPM_AUTH_TOKEN}' > ./.npmrc
npm version --allow-same-version --no-git-tag-version "$GIT_VERSION"
npm publish --access public
rm --interactive=never ./.npmrc
popd
done
|
Allow override of skipping javadoc and sources | #!/bin/bash
declare -f debug > /dev/null || source "$(dirname $0)/logging.sh"
#
# Maven
#
is_maven_project() {
[ -f pom.xml ]
}
maven_command() {
# is_maven_project || fatal "No maven POM file found!"
if [ -x ./mvnw ]; then echo "./mvnw";
else echo "mvn";
fi
}
get_maven_version() {
echo $(printf 'VERSION=${project.version}\n0\n' | $(maven_command) help:evaluate | grep '^VERSION=' | sed 's/VERSION=//')
}
set_maven_version() {
$(maven_command) --batch-mode versions:set versions:commit -DnewVersion="${1}" >/dev/null || fatal "Could not set project version to ${1}!"
}
build_and_test_maven() {
log "Building and Testing project."
$(maven_command) --batch-mode clean verify -Dmaven.test.failure.ignore=false
}
build_and_publish_maven_artifacts() {
log "Building and Testing project."
$(maven_command) --batch-mode clean verify -Dmaven.test.failure.ignore=false -Dmaven.javadoc.skip=true -Dmaven.source.skip=true
log "Publishing project artifacts to maven central."
$(maven_command) --batch-mode --no-snapshot-updates -Prelease deploy -DskipTests
}
| #!/bin/bash
declare -f debug > /dev/null || source "$(dirname $0)/logging.sh"
#
# Maven
#
is_maven_project() {
[ -f pom.xml ]
}
maven_command() {
# is_maven_project || fatal "No maven POM file found!"
if [ -x ./mvnw ]; then echo "./mvnw";
else echo "mvn";
fi
}
get_maven_version() {
echo $(printf 'VERSION=${project.version}\n0\n' | $(maven_command) help:evaluate | grep '^VERSION=' | sed 's/VERSION=//')
}
set_maven_version() {
$(maven_command) --batch-mode versions:set versions:commit -DnewVersion="${1}" >/dev/null || fatal "Could not set project version to ${1}!"
}
build_and_test_maven() {
log "Building and Testing project."
$(maven_command) --batch-mode clean verify -Dmaven.test.failure.ignore=false
}
build_and_publish_maven_artifacts() {
log "Building and Testing project."
$(maven_command) --batch-mode clean verify -Dmaven.test.failure.ignore=false -Dmaven.javadoc.skip=${MAVEN_JAVADOC_SKIP:-true} -Dmaven.source.skip=${MAVEN_SOURCE_SKIP:-true}
log "Publishing project artifacts to maven central."
$(maven_command) --batch-mode --no-snapshot-updates -Prelease deploy -DskipTests
}
|
Disable publishing of werf binaries into the bintray during the release | #!/bin/bash
set -e
for f in $(find scripts/lib -type f -name "*.sh"); do
source $f
done
VERSION=$1
if [ -z "$VERSION" ] ; then
echo "Required version argument!" 1>&2
echo 1>&2
echo "Usage: $0 VERSION" 1>&2
exit 1
fi
if [ -z "$PUBLISH_BINTRAY_AUTH" ] ; then
echo "\$PUBLISH_BINTRAY_AUTH required!" 1>&2
exit 1
fi
if [ -z "$PUBLISH_GITHUB_TOKEN" ] ; then
echo "\$PUBLISH_GITHUB_TOKEN required!" 1>&2
exit 1
fi
( which git > /dev/null ) || ( echo "Cannot find git command!" 1>&2 && exit 1 )
( which curl > /dev/null ) || ( echo "Cannot find curl command!" 1>&2 && exit 1 )
( go_build $VERSION ) || ( echo "Failed to build!" 1>&2 && exit 1 )
( publish_binaries $VERSION ) || ( echo "Failed to publish release binaries!" 1>&2 && exit 1 )
( create_github_release $VERSION ) || ( echo "Failed to create github release!" 1>&2 && exit 1 )
| #!/bin/bash
set -e
for f in $(find scripts/lib -type f -name "*.sh"); do
source $f
done
VERSION=$1
if [ -z "$VERSION" ] ; then
echo "Required version argument!" 1>&2
echo 1>&2
echo "Usage: $0 VERSION" 1>&2
exit 1
fi
if [ -z "$PUBLISH_BINTRAY_AUTH" ] ; then
echo "\$PUBLISH_BINTRAY_AUTH required!" 1>&2
exit 1
fi
if [ -z "$PUBLISH_GITHUB_TOKEN" ] ; then
echo "\$PUBLISH_GITHUB_TOKEN required!" 1>&2
exit 1
fi
( which git > /dev/null ) || ( echo "Cannot find git command!" 1>&2 && exit 1 )
( which curl > /dev/null ) || ( echo "Cannot find curl command!" 1>&2 && exit 1 )
( go_build $VERSION ) || ( echo "Failed to build!" 1>&2 && exit 1 )
# ( publish_binaries $VERSION ) || ( echo "Failed to publish release binaries!" 1>&2 && exit 1 )
( create_github_release $VERSION ) || ( echo "Failed to create github release!" 1>&2 && exit 1 )
|
Fix tests to run all tests again and add onedrive | #!/bin/bash
go install
REMOTES="
TestSwift:
TestS3:
TestDrive:
TestGoogleCloudStorage:
TestDropbox:
TestAmazonCloudDrive:
"
REMOTES="
TestAmazonCloudDrive:
"
function test_remote {
args=$@
echo "@go test $args"
go test $args || {
echo "*** test $args FAILED ***"
exit 1
}
}
test_remote
test_remote --subdir
for remote in $REMOTES; do
test_remote --remote $remote
test_remote --remote $remote --subdir
done
echo "All OK"
| #!/bin/bash
go install
REMOTES="
TestSwift:
TestS3:
TestDrive:
TestGoogleCloudStorage:
TestDropbox:
TestAmazonCloudDrive:
TestOneDrive:
"
function test_remote {
args=$@
echo "@go test $args"
go test $args || {
echo "*** test $args FAILED ***"
exit 1
}
}
test_remote
test_remote --subdir
for remote in $REMOTES; do
test_remote --remote $remote
test_remote --remote $remote --subdir
done
echo "All OK"
|
Revert "Be able to configure the database host in the startup script" | #!/bin/sh
set -xe
DCI_DB_DIR=${DCI_DB_DIR:-".db_dir"}
DCI_DB_HOST=${DCI_DB_HOST:-"127.0.0.1"}
# get dci_db_dir absolute path
DCI_DB_DIR="$(cd "$(dirname "$0")/.." && pwd)/$DCI_DB_DIR"
# if the database is already running we do not want to run this script
[ ! -z "$DISABLE_DB_START" ] &&exit 0
# checks if pg_ctl command exists
type "pg_ctl"
# checks if not already running
pg_ctl status -D "$DCI_DB_DIR" &> /dev/null && pg_ctl stop -D "$DCI_DB_DIR"
[ -d "$DCI_DB_DIR" ] && rm -rf "$DCI_DB_DIR"
OPTIONS="--client-encoding=utf8 --full-page_writes=off \
--logging-collector=off --log-destination='stderr'"
# init the database directory and start the process
pg_ctl initdb -D "$DCI_DB_DIR" -o "--no-locale"
pg_ctl start -w -D "$DCI_DB_DIR" -o "-k $DCI_DB_DIR -F -h $DCI_DB_HOST $OPTIONS"
| #!/bin/sh
set -xe
DCI_DB_DIR=${DCI_DB_DIR:-".db_dir"}
# get dci_db_dir absolute path
DCI_DB_DIR="$(cd "$(dirname "$0")/.." && pwd)/$DCI_DB_DIR"
# if the database is already running we do not want to run this script
[ ! -z "$DISABLE_DB_START" ] &&exit 0
# checks if pg_ctl command exists
type "pg_ctl"
# checks if not already running
pg_ctl status -D "$DCI_DB_DIR" &> /dev/null && pg_ctl stop -D "$DCI_DB_DIR"
[ -d "$DCI_DB_DIR" ] && rm -rf "$DCI_DB_DIR"
OPTIONS="--client-encoding=utf8 --full-page_writes=off \
--logging-collector=off --log-destination='stderr'"
# init the database directory and start the process
pg_ctl initdb -D "$DCI_DB_DIR" -o "--no-locale"
pg_ctl start -w -D "$DCI_DB_DIR" -o "-k $DCI_DB_DIR -F -h '' $OPTIONS"
|
Set git user name and email. | #!/bin/bash
set -e
# fetch first changed file, assume at most one package touched per commit
TOUCHED=`git show --pretty="format:" --name-only | grep . | head -1`
PKGDIR=`dirname $TOUCHED`
if [ "$PKGDIR" = "." ]
then
echo Nothing to test
else
pushd $PKGDIR > /dev/null
makepkg -f -s --noconfirm --skippgpcheck --noprogressbar
popd > /dev/null
fi
| #!/bin/bash
set -e
git config --global user.email "ci@msys2.org"
git config --global user.name "MSYS2 Build Bot"
# fetch first changed file, assume at most one package touched per commit
TOUCHED=`git show --pretty="format:" --name-only | grep . | head -1`
PKGDIR=`dirname $TOUCHED`
if [ "$PKGDIR" = "." ]
then
echo Nothing to test
else
pushd $PKGDIR > /dev/null
makepkg -f -s --noconfirm --skippgpcheck --noprogressbar
popd > /dev/null
fi
|
Use develop if no matching branch found | #!/bin/bash
#
# script which is run by the travis build (after `npm run test`).
#
# clones riot-web develop and runs the tests against our version of react-sdk.
set -ev
RIOT_WEB_DIR=riot-web
REACT_SDK_DIR=`pwd`
curbranch="${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH}"
echo "Determined branch to be $curbranch"
git clone --depth=1 --branch "$curbranch" https://github.com/vector-im/riot-web.git \
"$RIOT_WEB_DIR"
cd "$RIOT_WEB_DIR"
mkdir node_modules
npm install
(cd node_modules/matrix-js-sdk && npm install)
rm -r node_modules/matrix-react-sdk
ln -s "$REACT_SDK_DIR" node_modules/matrix-react-sdk
npm run test
| #!/bin/bash
#
# script which is run by the travis build (after `npm run test`).
#
# clones riot-web develop and runs the tests against our version of react-sdk.
set -ev
RIOT_WEB_DIR=riot-web
REACT_SDK_DIR=`pwd`
curbranch="${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH}"
echo "Determined branch to be $curbranch"
git clone --depth=1 https://github.com/vector-im/riot-web.git \
"$RIOT_WEB_DIR"
git checkout "$curbranch" || git checkout develop
cd "$RIOT_WEB_DIR"
mkdir node_modules
npm install
(cd node_modules/matrix-js-sdk && npm install)
rm -r node_modules/matrix-react-sdk
ln -s "$REACT_SDK_DIR" node_modules/matrix-react-sdk
npm run test
|
Use the sync script from gca_elasticsearch repo | #!/bin/bash
ES_SCRIPTS=`dirname $0`/../scripts/elasticsearch
perl $ES_SCRIPTS/load_current_tree.mysql.pl \
-dbpass $RESEQTRACK_PASS \
-ftp -check_timestamp
perl $ES_SCRIPTS/load_files.es.pl \
-dbpass $RESEQTRACK_PASS \
-check_timestamp \
-es_host ves-hx-e4 \
-es_index_name igsr_beta
perl $ES_SCRIPTS/sync_hx_hh.es.pl \
-from_es_host ves-hx-e4 \
-to ves-pg-e4 \
-to ves-oy-e4 \
-repo hx_hh_sync \
-snapshot_prefix igsr_snap \
-snap_index igsr_beta \
-snap_index igsr \
-restore_index igsr_beta
| #!/bin/bash
ES_SCRIPTS=`dirname $0`/../scripts/elasticsearch
perl $ES_SCRIPTS/load_current_tree.mysql.pl \
-dbpass $RESEQTRACK_PASS \
-ftp -check_timestamp
perl $ES_SCRIPTS/load_files.es.pl \
-dbpass $RESEQTRACK_PASS \
-check_timestamp \
-es_host ves-hx-e4 \
-es_index_name igsr_beta
perl $GCA_ELASTICSEARCH/scripts/sync_hx_hh.es.pl \
-from ves-hx-e4 \
-to ves-pg-e4 \
-to ves-oy-e4 \
-repo igsr_repo \
-snap_index igsr_beta \
-snap_index igsr \
-restore_only igsr_beta
|
Add tests for VAQ with both rate controls. | #!/bin/sh
# Test RDOQ, SAO, deblock and signhide and subme.
set -eu
. "${0%/*}/util.sh"
common_args='264x130 10 -p0 -r1 --threads=2 --wpp --owf=1 --rd=0'
valgrind_test $common_args --no-rdoq --no-deblock --no-sao --no-signhide --subme=1 --pu-depth-intra=2-3
valgrind_test $common_args --no-rdoq --no-signhide --subme=0
valgrind_test $common_args --rdoq --no-deblock --no-sao --subme=0
valgrind_test $common_args --vaq=8
| #!/bin/sh
# Test RDOQ, SAO, deblock and signhide and subme.
set -eu
. "${0%/*}/util.sh"
common_args='264x130 10 -p0 -r1 --threads=2 --wpp --owf=1 --rd=0'
valgrind_test $common_args --no-rdoq --no-deblock --no-sao --no-signhide --subme=1 --pu-depth-intra=2-3
valgrind_test $common_args --no-rdoq --no-signhide --subme=0
valgrind_test $common_args --rdoq --no-deblock --no-sao --subme=0
valgrind_test $common_args --vaq=8
valgrind_test $common_args --vaq=8 --bitrate 3500
valgrind_test $common_args --vaq=8 --rc-algorithm oba --bitrate 3500
|
Add rectangle window manager for mac | #!/usr/bin/env bash
source utils.sh
if is_mac; then
if [[ $(command -v brew) == "" ]]; then
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
fi
brew update
brew install bash
brew install cmake
brew install tmux
brew install gnu-sed
brew install findutils
else
sudo apt-get install cmake python3-dev build-essential
fi
| #!/usr/bin/env bash
source utils.sh
if is_mac; then
if [[ $(command -v brew) == "" ]]; then
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
fi
brew update
brew install bash
brew install cmake
brew install tmux
brew install gnu-sed
brew install findutils
brew cask install rectangle
else
sudo apt-get install cmake python3-dev build-essential
fi
|
Add health check to start script | #!/bin/bash
echo Starting BRIT service with PID: $$;
echo Usage:
echo 1. At the password prompt, enter the password for the secret key ring
echo 2. Send the service to background with CTRL+Z
echo 3. Restart the stopped job in the background by typing 'bg'
echo 4. Exit from shell
echo 5. Verify service is still running by checking
echo .. TEST: http://localhost:7070/brit/public-key or
echo .. LIVE: https://multibit.org/brit/public-key
echo ..
echo TIP: You can find this process again by typing 'ps -A | grep brit'
echo TIP: If 'NoClassDefFoundError: ... BouncyCastleProvider' copy bcprov-jdk16-1.46.jar to project root
java -cp "bcprov-jdk16-1.46.jar;target/brit-service-develop-SNAPSHOT.jar" org.multibit.hd.brit_server.BritService server config.yml | #!/bin/bash
echo Starting BRIT service with PID: $$;
echo Usage:
echo 1. At the password prompt, enter the password for the secret key ring
echo 2. Send the service to background with CTRL+Z
echo 3. Restart the stopped job in the background by typing 'bg'
echo 4. Exit from shell
echo 5. Verify service as follows:
echo .. curl -XGET http://localhost:7071/healthcheck (check for no ERROR)
echo .. lynx https://multibit.org/brit/public-key (check for PGP key)
echo ..
echo TIP: You can find this process again by typing 'ps -A | grep brit'
echo TIP: If 'NoClassDefFoundError: ... BouncyCastleProvider' copy bcprov-jdk16-1.46.jar to project root
java -cp "bcprov-jdk16-1.46.jar;target/brit-service-develop-SNAPSHOT.jar" org.multibit.hd.brit_server.BritService server config.yml |
Fix blooper in brace matching (was backwards for context) | #!/usr/bin/env zsh
lang=$1 ; shift
names=$1 ; shift
function tag_name () {
md=$1 ; shift
name=$@
clear
git co -- $md
msg="Tag instances of name '$name' as language '$lang'"
perl -i -pne "s/(?<!\})$name(?!\})/\\\\lang$lang{$name}/g if ! /^(\[\^\d+\]|#+ )/" -- $md
git add -- $md
git --no-pager diff --cached -U0 --word-diff=color --word-diff-regex=. --minimal --ignore-all-space -- $md |
grep -v '@@'
git diff-index --quiet --cached HEAD && continue 1
read -q "?$msg? (y/n)" || continue 1
git commit -m "[auto] $msg"
}
for file in $@; do
perl -e 'print sort { length($b) <=> length($a) } <>' < $names |
while read name; do
tag_name $file $name
done
done
| #!/usr/bin/env zsh
lang=$1 ; shift
names=$1 ; shift
function tag_name () {
md=$1 ; shift
name=$@
clear
git co -- $md
msg="Tag instances of name '$name' as language '$lang'"
perl -i -pne "s/(?<!\{)$name(?!\})/\\\\lang$lang{$name}/g if ! /^(\[\^\d+\]|#+ )/" -- $md
git add -- $md
git --no-pager diff --cached -U0 --word-diff=color --word-diff-regex=. --minimal --ignore-all-space -- $md |
grep -v '@@'
git diff-index --quiet --cached HEAD && continue 1
read -q "?$msg? (y/n)" || continue 1
git commit -m "[auto] $msg"
}
for file in $@; do
perl -e 'print sort { length($b) <=> length($a) } <>' < $names |
while read name; do
tag_name $file $name
done
done
|
Add "ignore_loglevel" in kernel parameter | #!/bin/bash
sudo kvm -kernel ~/develop/linux/arch/x86/boot/bzImage \
-append "root=/dev/vda1 rw console=ttyS0 loglevel=8" \
-initrd initramfs.cpio.gz \
-nographic \
-drive index=0,file=ubuntu.qcow2,if=virtio \
-drive index=1,file=docker-dm.qcow2,if=virtio \
-m 4096 -smp 4 -cpu Nehalem,+rdtscp,+tsc-deadline \
-redir tcp:5555::22
| #!/bin/bash
sudo kvm -kernel ~/develop/linux/arch/x86/boot/bzImage \
-append "root=/dev/vda1 rw console=tty0 console=ttyS0 ignore_loglevel" \
-drive index=0,file=ubuntu.qcow2,if=virtio \
-drive index=1,file=docker-dm.qcow2,if=virtio \
-initrd initramfs.cpio.gz \
-nographic \
-m 4096 -smp 4 -cpu Nehalem,+rdtscp,+tsc-deadline \
-redir tcp:5555::22
|
Add method to get SHA | include logger.Logger
include math.util.MathUtil
package base
GitUtil(){
cleanSource(){
Logger logProgressMsg "resetting_the_source_directory"
local buildDir=$(BaseVars returnBuildDir ${@})
cd ${buildDir}
git reset --hard -q
if [[ $(MathUtil isEven $(BaseUtil getDate -d)) || $(
BaseComparator isEqual ${1} true) ]] ; then
git clean -fdqx
fi
Logger logCompletedMsg
}
clearIndexLock(){
local lockFile=$(BaseVars returnBuildDir ${1})/.git/index.lock
if [ -e ${lockFile} ]; then
Logger logProgressMsg "clearing_index_lock"
rm -rf ${lockFile}
Logger logCompletedMsg
fi
}
getCurBranch(){
git rev-parse --abbrev-ref HEAD
}
listBranches(){
git branch | sed s/\*/\ /g
}
$@
} | include logger.Logger
include math.util.MathUtil
package base
GitUtil(){
cleanSource(){
Logger logProgressMsg "resetting_the_source_directory"
local buildDir=$(BaseVars returnBuildDir ${@})
cd ${buildDir}
git reset --hard -q
if [[ $(MathUtil isEven $(BaseUtil getDate -d)) || $(
BaseComparator isEqual ${1} true) ]] ; then
git clean -fdqx
fi
Logger logCompletedMsg
}
clearIndexLock(){
local lockFile=$(BaseVars returnBuildDir ${1})/.git/index.lock
if [ -e ${lockFile} ]; then
Logger logProgressMsg "clearing_index_lock"
rm -rf ${lockFile}
Logger logCompletedMsg
fi
}
getCurBranch(){
git rev-parse --abbrev-ref HEAD
}
getSHA(){
local length=${2}
local projectDir=${1}
cd ${projectDir}
if [[ $(BaseComparator isEqual ${length} long) ]]; then
git log --oneline --pretty=format:%H -1
elif [[ $(BaseComparator isEqual ${length} short) ]]; then
git log --oneline --pretty=format:%h -1
fi
}
listBranches(){
git branch | sed s/\*/\ /g
}
$@
} |
Check for multirust when checking for multirust. | #!/bin/bash
source "$(dirname $0)/env.sh"
if [ ! -x $MULTIRUST_DIR/bin/cargo ]; then
(
cd /tmp
git clone --recursive https://github.com/brson/multirust.git multirust
cd multirust
git submodule update --init
./build.sh
./install.sh --prefix=$MULTIRUST_DIR
)
fi
multirust default nightly
if [ -f RUST-VERSION ]; then
read version < RUST-VERSION
multirust override "$version"
fi
rustc --version
cargo --version
| #!/bin/bash
source "$(dirname $0)/env.sh"
if [ ! -x $MULTIRUST_DIR/bin/multirust ]; then
(
cd /tmp
git clone --recursive https://github.com/brson/multirust.git multirust
cd multirust
git submodule update --init
./build.sh
./install.sh --prefix=$MULTIRUST_DIR
)
fi
multirust default nightly
if [ -f RUST-VERSION ]; then
read version < RUST-VERSION
multirust override "$version"
fi
rustc --version
cargo --version
|
Use Electron 0.28.3 on Linux too | ./build-common.sh
electron-packager build Squiffy --platform=linux --arch=x64 --version=0.29.2 --app-bundle-id=uk.co.textadventures.squiffy --helper-bundle-id=uk.co.textadventures.squiffy.helper --app-version=4.0.0 | ./build-common.sh
electron-packager build Squiffy --platform=linux --arch=x64 --version=0.28.3 --app-bundle-id=uk.co.textadventures.squiffy --helper-bundle-id=uk.co.textadventures.squiffy.helper --app-version=4.0.0 |
Deploy Maven plugin to Bintray | if [ ! -z "$TRAVIS_TAG" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_SECURE_ENV_VARS" == "true" ]; then
for package in "core" "gradle"; do
for path in java/$package/build/libs/*; do
version=$(basename $path | awk -F- '{print $3}' | cut -d. -f1-3)
echo Uploading radl-$package-$version to BinTray
curl -T $path -u$USER:$BINTRAY_KEY https://api.bintray.com/content/radl/RADL/radl/radl-$package/$version/$(basename $path)\;bt_package=radl-$package\;bt_version=$version\;publish=1\;override=1
done
done
else
echo TRAVIS_TAG=$TRAVIS_TAG
echo TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST
echo TRAVIS_SECURE_ENV_VARS=$TRAVIS_SECURE_ENV_VARS
fi
| if [ ! -z "$TRAVIS_TAG" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_SECURE_ENV_VARS" == "true" ]; then
for package in "core" "gradle" "maven"; do
for path in java/$package/build/libs/*; do
version=$(basename $path | awk -F- '{print $3}' | cut -d. -f1-3)
echo Uploading radl-$package-$version to BinTray
curl -T $path -u$USER:$BINTRAY_KEY https://api.bintray.com/content/radl/RADL/radl/radl-$package/$version/$(basename $path)\;bt_package=radl-$package\;bt_version=$version\;publish=1\;override=1
done
done
else
echo TRAVIS_TAG=$TRAVIS_TAG
echo TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST
echo TRAVIS_SECURE_ENV_VARS=$TRAVIS_SECURE_ENV_VARS
fi
|
Make service stopping part of pre-build script | #! /bin/bash
git branch -r | awk -F/ '/\/release\//{print $2"/"$3}' | xargs -I {} git push origin :{} \
|| { echo "Deleting obsolete release branches failed"; exit 1; }
| #! /bin/bash
DST="/usr/lib/cdstore"
BIN="${DST}/cdstore.sh"
bash ${BIN} stop || { echo "Stopping service failed"; exit 1; }
git branch -r | awk -F/ '/\/release\//{print $2"/"$3}' | xargs -I {} git push origin :{} \
|| { echo "Deleting obsolete release branches failed"; exit 1; }
|
Add ability to loop based on argument | #!/bin/bash
# Check IP address to see if this Pi is connected to the wifi network.
# If not, loop while trying to connect until it is.
#
_IP=$(hostname -I) || true
SSID=$(sed -n -e '/ssid/s/"//g' -e '/ssid/s/ssid=//' \
< /etc/wpa_supplicant/wpa_supplicant.conf)
echo -n "Connecting to $SSID wifi network"
while [[ ! ("$_IP" == *10.10.*) ]]; do
echo -n "."
ifdown wlan0 2>&1 | logger
sleep 2
ifup wlan0 2>&1 | logger
sleep 2
_IP=$(hostname -I) || true
done
echo ". IP address: $_IP"
| #!/bin/bash
# Check IP address to see if this Pi is connected to the wifi network.
# If not, loop while trying to connect until it is.
#
# If called with a number, quietly loop forever, sleeping for
# that many seconds between loops.
LOOP=${1:-""}
_IP=$(hostname -I) || true
SSID=$(sed -n -e '/ssid/s/"//g' -e '/ssid/s/ssid=//' \
< /etc/wpa_supplicant/wpa_supplicant.conf)
WIFI_JOIN() {
echo -n "Connecting to $SSID wifi network"
while [[ ! ("$_IP" == *10.10.*) ]]; do
echo -n "."
ifdown wlan0 2>&1 | logger
sleep 2
ifup wlan0 2>&1 | logger
sleep 2
_IP=$(hostname -I) || true
done
echo ". IP address: $_IP"
}
WIFI_JOIN
if [[ $LOOP ]] ; then
WIFI_JOIN
sleep $LOOP
fi
|
Fix configuration issue reported by Raphael Spreitzer. | #!/bin/bash
CC=avr-gcc CXX=c++ LINK="-mmcu=atmega128 -Wl,-gc-sections" COMP="-O2 -ggdb -Wa,-mmcu=atmega128 -mmcu=atmega128 -ffunction-sections -fdata-sections" cmake -DARCH=AVR -DWORD=8 -DOPSYS=NONE -DSEED=LIBC -DSHLIB=OFF -DSTBIN=ON -DTIMER=NONE -DWITH="DV;BN;FP;EP;EC;PP;PC;CP;MD" -DBENCH=20 -DTESTS=20 -DCHECK=off -DVERBS=off -DSTRIP=on -DQUIET=on -DARITH=avr-asm-158 -DFP_PRIME=158 -DBN_METHD="COMBA;COMBA;MONTY;BASIC;STEIN;BASIC" -DFP_QNRES=on -DFP_METHD="INTEG;COMBA;COMBA;MONTY;MONTY;SLIDE" -DBN_PRECI=160 -DBN_MAGNI=DOUBLE -DEP_PRECO=off -DEP_METHD="PROJC;LWNAF;LWNAF;BASIC" -DEP_KBLTZ=on -DEP_ORDIN=on -DEC_METHD="PRIME" -DPP_METHD="INTEG;INTEG;BASIC;OATEP" -DPC_METHD="PRIME" -DSEED=ZERO -DMD_METHD=SHONE $1
| #!/bin/bash
CC=avr-gcc CXX=c++ LINK="-mmcu=atmega128 -Wl,-gc-sections" COMP="-O2 -ggdb -Wa,-mmcu=atmega128 -mmcu=atmega128 -ffunction-sections -fdata-sections" cmake -DARCH=AVR -DWORD=8 -DOPSYS=NONE -DSEED=LIBC -DSHLIB=OFF -DSTBIN=ON -DTIMER=NONE -DWITH="DV;BN;FP;EP;EC;PP;PC;CP;MD" -DBENCH=20 -DTESTS=20 -DCHECK=off -DVERBS=off -DSTRIP=on -DQUIET=on -DARITH=avr-asm-158 -DFP_PRIME=158 -DBN_METHD="COMBA;COMBA;MONTY;BASIC;STEIN;BASIC" -DFP_QNRES=off -DFP_METHD="INTEG;COMBA;COMBA;MONTY;MONTY;SLIDE" -DBN_PRECI=160 -DBN_MAGNI=DOUBLE -DEP_PRECO=off -DEP_METHD="PROJC;LWNAF;LWNAF;BASIC" -DEP_KBLTZ=on -DEP_ORDIN=on -DEC_METHD="PRIME" -DPP_METHD="INTEG;INTEG;BASIC;OATEP" -DPC_METHD="PRIME" -DSEED=ZERO -DMD_METHD=SHONE $1
|
Update travis setup script to remove old mongodb version | #!/usr/bin/env bash
sudo service mongodb stop
sudo rm -f /etc/mongod.conf
sudo cp ./config/mongodb_dev.conf /etc/mongod.conf
# Import mongodb public GPG key
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927
echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list
sudo apt-get update -qq -y
sudo apt-get install -y mongodb-org
sudo service mongodb start | #!/usr/bin/env bash
sudo service mongodb stop
sudo apt-get purge mongodb mongodb-clients mongodb-server mongodb-dev
sudo apt-get purge mongodb-10gen
sudo apt-get autoremove
sudo rm -f /etc/mongod.conf
sudo cp ./config/mongodb_dev.conf /etc/mongod.conf
# Import mongodb public GPG key
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927
echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list
sudo apt-get update -qq -y
sudo apt-get install -y mongodb-org
sudo service mongodb start |
Fix wait condition of the script | #!/bin/bash
set -e
echo "Running Migrations...."
python manage.py migrate --settings=pari.settings.test --noinput
echo "Running Collectstatic...."
python manage.py collectstatic --noinput
echo "Starting Server..........."
python manage.py runserver --settings=pari.settings.test > /dev/null 2>&1 &
expected_response=200
end=$((SECONDS+60))
while [ "$SECONDS" -lt "$end" ];
do
sleep 2
response=`curl -s -o /dev/null -w "%{http_code}" http://localhost:8000/pages/donate/`
echo "Waiting for service to start...."
if [ "$response" == "$expected_response" ]
then
is_service_started='true'
echo "Service Started........."
break
fi
done
if [ "$is_service_started" != 'true' ];
then
echo "Unable to start the service........"
exit 1
fi
echo "Running test......."
python manage.py test --settings=pari.settings.test --keepdb --nologcapture --verbosity=2 | #!/bin/bash
set -e
echo "Running Migrations...."
python manage.py migrate --settings=pari.settings.test --noinput
echo "Running Collectstatic...."
python manage.py collectstatic --noinput
echo "Starting Server..........."
python manage.py runserver --settings=pari.settings.test > /dev/null 2>&1 &
CURRENT_TIME='date +%s'
TIMEOUT_TIME=$((`eval $CURRENT_TIME`+60))
while [ `eval $CURRENT_TIME` -lt "$TIMEOUT_TIME" ];
do
sleep 2
response=`curl -s -o /dev/null -w "%{http_code}" http://localhost:8000/pages/donate/`
echo "Waiting for service to start...."
if [ $response -eq 200 ]
then
is_service_started='true'
echo "Service Started........."
break
fi
done
if [ "$is_service_started" != 'true' ];
then
echo "Unable to start the service........"
exit 1
fi
echo "Running test......."
python manage.py test --settings=pari.settings.test --keepdb --nologcapture --verbosity=2 |
Check if the travis deplyoment script does work correctly | #!/bin/bash
# build & deploy react patterns
yarn build
SEMVER_LAST_TAG=$(npm view apparena-patterns-react version)
SEMVER_RELEASE_LEVEL=$(git log --oneline -1 --pretty=%B | cat | tr -d '\n' | cut -d "[" -f2 | cut -d "]" -f1)
ROOT_DIR=$(pwd)
case ${SEMVER_RELEASE_LEVEL} in
*\ *)
>&2 echo "Specified release level invalid"
;;
*)
if [ -n ${SEMVER_RELEASE_LEVEL} ]; then
case ${SEMVER_RELEASE_LEVEL} in
major|minor|patch)
cp ~/.npmrc ~/.npmrc.bak
echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" > ~/.npmrc
git clone https://github.com/fsaintjacques/semver-tool /tmp/semver &> /dev/null
SEMVER_NEW_TAG=$(/tmp/semver/src/semver bump ${SEMVER_RELEASE_LEVEL} ${SEMVER_LAST_TAG})
npm --no-git-tag-version version ${SEMVER_NEW_TAG} --allow-same-version
cd build/apparena-patterns-react
npm publish
;;
*)
>&2 echo "Specified release level invalid"
;;
esac
else
>&2 echo "No release level specified"
fi
;;
esac
cd ${ROOT_DIR}
| #!/bin/bash
# build & deploy react patterns
yarn build
SEMVER_LAST_TAG=$(npm view apparena-patterns-react version)
SEMVER_RELEASE_LEVEL=$(git log --oneline -1 --pretty=%B | cat | tr -d '\n' | cut -d "[" -f2 | cut -d "]" -f1)
ROOT_DIR=$(pwd)
if [ -n $SEMVER_RELEASE_LEVEL ]; then
git clone https://github.com/fsaintjacques/semver-tool /tmp/semver &> /dev/null
SEMVER_NEW_TAG=$(/tmp/semver/src/semver bump $SEMVER_RELEASE_LEVEL $SEMVER_LAST_TAG)
npm --no-git-tag-version version ${SEMVER_NEW_TAG} --allow-same-version
cd build/apparena-patterns-react
npm publish
else
>&2 echo "No release level specified"
fi
cd ${ROOT_DIR}
|
Use the latest mongofill-hhvm (master) | #!/bin/bash
if [[ $TRAVIS_PHP_VERSION == "hhvm" ]]; then
mkdir BUILD
cd BUILD
git clone git://github.com/facebook/hhvm.git
cd hhvm
export HPHP_HOME=`pwd`
git checkout 1da451b # Tag:3.0.1
cd ..
git clone https://github.com/mongofill/mongofill-hhvm
cd mongofill-hhvm
/bin/bash tools/travis.sh
which php
which hhvm
cat /etc/hhvm/php.ini
# show mongo PHP extension version
cd ../..
phpenv rehash
echo "ext-mongo version: `hhvm --php -r 'echo phpversion(\"mongo\");'`"
else
yes '' | pecl install -f mongo-${MONGO_VERSION}
phpenv rehash
echo "ext-mongo version: `php -r 'echo phpversion(\"mongo\");'`"
fi
| #!/bin/bash
if [[ $TRAVIS_PHP_VERSION == "hhvm" ]]; then
mkdir BUILD
cd BUILD
git clone git://github.com/facebook/hhvm.git
cd hhvm
export HPHP_HOME=`pwd`
#git checkout 1da451b # Tag:3.0.1
cd ..
git clone https://github.com/mongofill/mongofill-hhvm
cd mongofill-hhvm
/bin/bash tools/travis.sh
which php
which hhvm
cat /etc/hhvm/php.ini
# show mongo PHP extension version
cd ../..
phpenv rehash
echo "ext-mongo version: `hhvm --php -r 'echo phpversion(\"mongo\");'`"
else
yes '' | pecl install -f mongo-${MONGO_VERSION}
phpenv rehash
echo "ext-mongo version: `php -r 'echo phpversion(\"mongo\");'`"
fi
|
Revert to original directory if directory was changed in order to build the paper | #!/bin/bash
#Author: Jason Ziglar <jpz@vt.edu>
#Script to automate making papers using Scriptorium, which is useful for
#automating paper building in a slightly safer fashion.
cur_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
abs_dir="${cur_dir}/.."
export TEXINPUTS=".:${abs_dir}/templates/.//:$TEXINPUTS"
#change to specified directory, if given
if [ -n "$1" ]; then
cd $1
fi
/usr/local/bin/multimarkdown -t latex -o ./paper.tex paper.mmd
if [ $? != 0 ]; then
exit 1
fi
pdflatex -shell-escape paper.tex
if [ $? != 0 ]; then
exit 1
fi
#Test if paper is using a bibliography or not
full_paper=$(latexpand paper.tex)
if [ -n "$(echo $paper.mmd | grep bibtex:)" ]; then
if [ -n "$(echo $full_paper | grep backend=biber)" ]; then
biber paper
else
bibtex paper.aux
fi
fi
pdflatex -shell-escape paper.tex
pdflatex -shell-escape paper.tex
| #!/bin/bash
#Author: Jason Ziglar <jpz@vt.edu>
#Script to automate making papers using Scriptorium, which is useful for
#automating paper building in a slightly safer fashion.
cur_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
abs_dir="${cur_dir}/.."
export TEXINPUTS=".:${abs_dir}/templates/.//:$TEXINPUTS"
old_cwd=$(pwd)
#change to specified directory, if given
if [ -n "$1" ]; then
cd $1
fi
/usr/local/bin/multimarkdown -t latex -o ./paper.tex paper.mmd
if [ $? != 0 ]; then
exit 1
fi
pdflatex -shell-escape paper.tex
if [ $? != 0 ]; then
exit 1
fi
#Test if paper is using a bibliography or not
full_paper=$(latexpand paper.tex)
if [ -n "$(echo $paper.mmd | grep bibtex:)" ]; then
if [ -n "$(echo $full_paper | grep backend=biber)" ]; then
biber paper
else
bibtex paper.aux
fi
fi
pdflatex -shell-escape paper.tex
pdflatex -shell-escape paper.tex
# Revert to old directory
if [ "$old_cwd" != "$(pwd)" ]; then
cd "$old_cwd"
fi
|
Switch it back on with a machine user and see if it works | #!/bin/sh
# ideas used from https://gist.github.com/motemen/8595451
# abort the script if there is a non-zero error
set -e
pwd
# remote=$(git config remote.origin.url)
#
# siteSource="$1"
#
# if [ ! -d "$siteSource" ]
# then
# echo "Usage: $0 <site source dir>"
# exit 1
# fi
#
# # make a directory to put the gp-pages branch
# mkdir gh-pages-branch
# cd gh-pages-branch
# # now lets setup a new repo so we can update the gh-pages branch
# git init
# git remote add --fetch origin "$remote"
#
# # switch into the the gh-pages branch
# if git rev-parse --verify origin/gh-pages > /dev/null 2>&1
# then
# git checkout gh-pages
# else
# git checkout --orphan gh-pages
# fi
#
# # delete any old site as we are going to replace it
# git rm -rf .
# # copy over or recompile the new site
# cp -a "../${siteSource}/." .
#
# # stage any changes and new files
# git add -A
# # now commit
# git commit -m "Deploy to GitHub pages"
# # and push, but send any output to /dev/null to hide anything sensitive
# git push --force --quiet origin gh-pages > /dev/null 2>&1
#
# # go back to where we started and remove the gh-pages git repo we made and used
# # for deployment
# cd ..
# rm -rf gh-pages-branch
| #!/bin/sh
# ideas used from https://gist.github.com/motemen/8595451
# abort the script if there is a non-zero error
set -e
pwd
remote=$(git config remote.origin.url)
siteSource="$1"
if [ ! -d "$siteSource" ]
then
echo "Usage: $0 <site source dir>"
exit 1
fi
# make a directory to put the gp-pages branch
mkdir gh-pages-branch
cd gh-pages-branch
# now lets setup a new repo so we can update the gh-pages branch
git init
git remote add --fetch origin "$remote"
# switch into the the gh-pages branch
if git rev-parse --verify origin/gh-pages > /dev/null 2>&1
then
git checkout gh-pages
else
git checkout --orphan gh-pages
fi
# delete any old site as we are going to replace it
git rm -rf .
# copy over or recompile the new site
cp -a "../${siteSource}/." .
# stage any changes and new files
git add -A
# now commit
git commit -m "Deploy to GitHub pages"
# and push, but send any output to /dev/null to hide anything sensitive
git push --force --quiet origin gh-pages > /dev/null 2>&1
# go back to where we started and remove the gh-pages git repo we made and used
# for deployment
cd ..
rm -rf gh-pages-branch
|
Fix sed syntax on ubuntu. | create_package() {
local package="$1"
mkdir -p "${BASHER_ORIGIN_DIR}/$package"
cd "${BASHER_ORIGIN_DIR}/$package"
git init .
touch package.sh
git add .
git commit -m "package.sh"
}
create_invalid_package() {
local package="$1"
mkdir -p "${BASHER_ORIGIN_DIR}/$package"
cd "${BASHER_ORIGIN_DIR}/$package"
git init .
touch dummy
git add .
git commit -m "dummy"
}
create_exec() {
local package="$1"
local exec="$2"
cd "${BASHER_ORIGIN_DIR}/$package"
mkdir -p bin
touch bin/$exec
chmod +x bin/$exec
if [ -e "package.sh" ]; then
if grep -sq "BIN=" "package.sh"; then
sed -e "/^BIN=/ s/$/:bin\/$exec/" -i '' package.sh
else
echo "BIN=bin/$exec" >> package.sh
fi
fi
git add .
git commit -m "Add $exec"
}
create_runtime() {
local package="$1"
local runtime="$2"
cd "${BASHER_ORIGIN_DIR}/$package"
touch "$runtime"
echo "RUNTIME=$runtime" >> package.sh
git add .
git commit -m "Add runtime $runtime"
}
| create_package() {
local package="$1"
mkdir -p "${BASHER_ORIGIN_DIR}/$package"
cd "${BASHER_ORIGIN_DIR}/$package"
git init .
touch package.sh
git add .
git commit -m "package.sh"
}
create_invalid_package() {
local package="$1"
mkdir -p "${BASHER_ORIGIN_DIR}/$package"
cd "${BASHER_ORIGIN_DIR}/$package"
git init .
touch dummy
git add .
git commit -m "dummy"
}
create_exec() {
local package="$1"
local exec="$2"
cd "${BASHER_ORIGIN_DIR}/$package"
mkdir -p bin
touch bin/$exec
chmod +x bin/$exec
if [ -e "package.sh" ]; then
if grep -sq "BIN=" "package.sh"; then
sed -e "/^BIN=/ s/$/:bin\/$exec/" -i'' package.sh
else
echo "BIN=bin/$exec" >> package.sh
fi
fi
git add .
git commit -m "Add $exec"
}
create_runtime() {
local package="$1"
local runtime="$2"
cd "${BASHER_ORIGIN_DIR}/$package"
touch "$runtime"
echo "RUNTIME=$runtime" >> package.sh
git add .
git commit -m "Add runtime $runtime"
}
|
Fix missing space in deploy script | #!/bin/bash
#
# Deploy a jar, source jar, and javadoc jar to Sonatype's snapshot repo.
#
# Adapted from https://coderwall.com/p/9b_lfq and
# http://benlimmer.com/2013/12/26/automatically-publish-javadoc-to-gh-pages-with-travis-ci/
SLUG="kunny/android-CircleRefreshLayout"
JDK="oraclejdk8"
BRANCH="master"
set -e
if [ "$TRAVIS_REPO_SLUG" != "$SLUG" ]; then
echo "Skipping snapshot deployment: wrong repository. Expected '$SLUG' but was '$TRAVIS_REPO_SLUG'."
elif [ "$TRAVIS_JDK_VERSION" != "$JDK" ]; then
echo "Skipping snapshot deployment: wrong JDK. Expected '$JDK' but was '$TRAVIS_JDK_VERSION'."
elif [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
echo "Skipping snapshot deployment: was pull request."
elif [ "$TRAVIS_BRANCH" != "$BRANCH" ]; then
echo "Skipping snapshot deployment: wrong branch. Expected '$BRANCH' but was '$TRAVIS_BRANCH'."
elif [ "$MODULE" == "circlerefreshlayout-sample"]; then
echo "Skipping snapshot deployment: sample app"
else
echo "Deploying snapshot..."
./gradlew clean "$MODULE:uploadArchives"
echo "Snapshot deployed!"
fi | #!/bin/bash
#
# Deploy a jar, source jar, and javadoc jar to Sonatype's snapshot repo.
#
# Adapted from https://coderwall.com/p/9b_lfq and
# http://benlimmer.com/2013/12/26/automatically-publish-javadoc-to-gh-pages-with-travis-ci/
SLUG="kunny/android-CircleRefreshLayout"
JDK="oraclejdk8"
BRANCH="master"
set -e
if [ "$TRAVIS_REPO_SLUG" != "$SLUG" ]; then
echo "Skipping snapshot deployment: wrong repository. Expected '$SLUG' but was '$TRAVIS_REPO_SLUG'."
elif [ "$TRAVIS_JDK_VERSION" != "$JDK" ]; then
echo "Skipping snapshot deployment: wrong JDK. Expected '$JDK' but was '$TRAVIS_JDK_VERSION'."
elif [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
echo "Skipping snapshot deployment: was pull request."
elif [ "$TRAVIS_BRANCH" != "$BRANCH" ]; then
echo "Skipping snapshot deployment: wrong branch. Expected '$BRANCH' but was '$TRAVIS_BRANCH'."
elif [ "$MODULE" == "circlerefreshlayout-sample" ]; then
echo "Skipping snapshot deployment: sample app"
else
echo "Deploying snapshot..."
./gradlew clean "$MODULE:uploadArchives"
echo "Snapshot deployed!"
fi |
Fix order of moving .project back into pwd | #!/bin/bash
# Invoke with: ./tools/clean.sh
if [ -f .project ]; then
cp .project .pydevproject /tmp
fi
git clean -fX
cd sts/headerspace/hassel-c
make
if [ -f .project ]; then
cp /tmp/.project /tmp/.pydevproject .
fi
| #!/bin/bash
# Invoke with: ./tools/clean.sh
if [ -f .project ]; then
cp .project .pydevproject /tmp
fi
git clean -fX
if [ -f .project ]; then
cp /tmp/.project /tmp/.pydevproject .
fi
cd sts/headerspace/hassel-c
make
|
Add command to remove duplicate vimrc file | #!/bin/bash
########################
# TO-DO
########################
# - automate installation of applications
########################
# Create symbolic links
########################
# Make sure that our symlinks work
CWD=$(pwd)
ln -s ${CWD}/vim/vimrc ~/.vimrc
ln -s ${CWD}/vim/* ~/.vim/
#rm ~/.vim/vimrc
ln -s ${CWD}/oh-my-zsh/zshenv ~/.zshenv
ln -s ${CWD}/oh-my-zsh/zshrc ~/.zshrc
ln -s ${CWD}/oh-my-zsh/alias.zsh ~/.oh-my-zsh/custom/
ln -s ${CWD}/oh-my-zsh/themes/ ~/.oh-my-zsh/custom/
ln -s ${CWD}/git/gitconfig ~/.gitconfig
# Because of limitations, we need to create the symbolic link a little
# differently in side the application support folder
cd ~/Library/Application\ Support/Sublime\ Text\ 3/Packages/
ln -s ${CWD}/sublime-text-3/User/ | #!/bin/bash
########################
# TO-DO
########################
# - automate installation of applications
########################
# Create symbolic links
########################
# Make sure that our symlinks work
CWD=$(pwd)
ln -s ${CWD}/vim/vimrc ~/.vimrc
ln -s ${CWD}/vim/* ~/.vim/
rm ~/.vim/vimrc
ln -s ${CWD}/oh-my-zsh/zshenv ~/.zshenv
ln -s ${CWD}/oh-my-zsh/zshrc ~/.zshrc
ln -s ${CWD}/oh-my-zsh/alias.zsh ~/.oh-my-zsh/custom/
ln -s ${CWD}/oh-my-zsh/themes/ ~/.oh-my-zsh/custom/
ln -s ${CWD}/git/gitconfig ~/.gitconfig
# Because of limitations, we need to create the symbolic link a little
# differently in side the application support folder
cd ~/Library/Application\ Support/Sublime\ Text\ 3/Packages/
ln -s ${CWD}/sublime-text-3/User/ |
Update gitconfig and gitignore_global link | #!/bin/bash
[ $USER == "root" ] && echo "You should not install this for the root account." && exit 1
export CURRENT=`pwd`
if [ ! -d ~/.vim ]; then
git clone https://github.com/iver/vitamine.git ~/.vim
chmod +x ~/.vim/install.sh
source ~/.vim/install.sh
fi
[ -f ~/.gitconfig ] || ln -s ${CURRENT}/dotfiles/git/config ~/.gitconfig
[ -f ~/.gitignore_global ] || ln -s ${CURRENT}/dotfiles/git/gitignore_global ~/.gitignore_global
[ -f ~/.bash_profile ] || ln -s ${CURRENT}/dotfiles/bash_profile ~/.bash_profile
[ -f ~/.tmux.conf ] || ln -s ${CURRENT}/dotfiles/tmux.conf ~/.tmux.conf
[ -f ~/.nanorc ] || ln -s ${CURRENT}/dotfiles/nanorc ~/.nanorc
[ -f ~/.ctags ] || ln -s ${CURRENT}/dotfiles/ctags ~/.ctags
[ -f ~/.ssh/load_keys ] || cp ${CURRENT}/templates/load_keys ~/.ssh/load_keys
if [ -d ~/.vim/bundle/vimproc ]; then
cd ~/.vim/bundle/vimproc && make
fi
git submodule init .
git submodule update
| #!/bin/bash
[ $USER == "root" ] && echo "You should not install this for the root account." && exit 1
export CURRENT=`pwd`
[ -f ~/.gitconfig ] || ln -s ${CURRENT}/dotfiles/git/config ~/.gitconfig
[ -f ~/.gitignore_global ] || ln -s ${CURRENT}/dotfiles/git/gitignore_global ~/.gitignore_global
[ -f ~/.bash_profile ] || ln -s ${CURRENT}/dotfiles/bash_profile ~/.bash_profile
[ -f ~/.tmux.conf ] || ln -s ${CURRENT}/dotfiles/tmux.conf ~/.tmux.conf
[ -f ~/.nanorc ] || ln -s ${CURRENT}/dotfiles/nanorc ~/.nanorc
[ -f ~/.ctags ] || ln -s ${CURRENT}/dotfiles/ctags ~/.ctags
[ -f ~/.ssh/load_keys ] || cp ${CURRENT}/templates/load_keys ~/.ssh/load_keys
if [ -d ~/.vim/bundle/vimproc ]; then
cd ~/.vim/bundle/vimproc && make
fi
git submodule init .
git submodule update
|
Use npm to fetch react-sdk and js-sdk | #!/bin/bash
set -e
export NVM_DIR="/home/jenkins/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"
nvm use 4
set -x
# install the versions of js-sdk and react-sdk provided to us by jenkins
npm install ./node_modules/matrix-js-sdk-*.tgz
npm install ./node_modules/matrix-react-sdk-*.tgz
# install the other dependencies
npm install
# build our artifacts; dumps them in ./vector
npm run build
# gzip up ./vector
rm vector-*.tar.gz || true # rm previous artifacts without failing if it doesn't exist
REACT_SHA=$(head -c 12 node_modules/matrix-react-sdk/git-revision.txt)
JSSDK_SHA=$(head -c 12 node_modules/matrix-js-sdk/git-revision.txt)
VECTOR_SHA=$(git rev-parse --short=12 HEAD) # use the ACTUAL SHA rather than assume develop
tar -zcvhf vector-$VECTOR_SHA-react-$REACT_SHA-js-$JSSDK_SHA.tar.gz vector #g[z]ip, [c]reate archive, [v]erbose, [f]ilename, [h]ard-dereference (do not archive symlinks)
| #!/bin/bash
set -e
export NVM_DIR="/home/jenkins/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"
nvm use 4
set -x
npm install
# we may be using a dev branch of react-sdk, in which case we need to build it
(cd node_modules/matrix-react-sdk && npm run build)
# build our artifacts; dumps them in ./vector
npm run build
# gzip up ./vector
rm vector-*.tar.gz || true # rm previous artifacts without failing if it doesn't exist
# node_modules deps from 'npm install' don't have a .git dir so can't
# rev-parse; but they do set the commit in package.json under 'gitHead' which
# we're grabbing here.
REACT_SHA=$(grep 'gitHead' node_modules/matrix-react-sdk/package.json | cut -d \" -f 4 | head -c 12)
JSSDK_SHA=$(grep 'gitHead' node_modules/matrix-js-sdk/package.json | cut -d \" -f 4 | head -c 12)
VECTOR_SHA=$(git rev-parse --short=12 HEAD) # use the ACTUAL SHA rather than assume develop
tar -zcvhf vector-$VECTOR_SHA-react-$REACT_SHA-js-$JSSDK_SHA.tar.gz vector #g[z]ip, [c]reate archive, [v]erbose, [f]ilename, [h]ard-dereference (do not archive symlinks)
|
Use --prodution to get dependencies. | #!/bin/bash
set -e
rm -rf lib
BRANCH_NAME='latest'
npm version patch
git checkout ${BRANCH_NAME}
git merge master
grunt build
git add -f lib/
libs=$(cat package.json | jq -r '.dependencies' | grep ':' | cut -d: -f1 | tr -d " " | tr -d '"')
for lib in $libs; do
git add -f node_modules/$lib
done
git commit -m "Update generated code and runtime dependencies."
git push origin ${BRANCH_NAME}
git checkout master
npm version patch
git push origin master
| #!/bin/bash
set -e
BRANCH_NAME='latest'
grunt build
rm -rf node_modules
npm version patch
git checkout ${BRANCH_NAME}
git merge master
rm -rf node_modules
npm install --production
git add -f lib/
git add -f node_modules/
git commit -m "Update generated code and runtime dependencies."
git push origin ${BRANCH_NAME}
git checkout master
npm version patch
git push origin master
npm install
|
Add grace period for server startup | #!/bin/bash
set -e
git clone --depth=1 https://github.com/sockjs/sockjs-protocol.git
cd sockjs-protocol
make test_deps pycco_deps
cd ..
node tests/test_server/server.js &
SRVPID=$!
set +e
cd sockjs-protocol
./venv/bin/python sockjs-protocol.py
PASSED=$?
kill $SRVPID
exit $PASSED
| #!/bin/bash
set -e
git clone --depth=1 https://github.com/sockjs/sockjs-protocol.git
cd sockjs-protocol
make test_deps pycco_deps
cd ..
node tests/test_server/server.js &
SRVPID=$!
sleep 1
set +e
cd sockjs-protocol
./venv/bin/python sockjs-protocol.py
PASSED=$?
kill $SRVPID
exit $PASSED
|
Use set -e instead of && to chain test commands | #!/bin/bash
(
export RUST_BACKTRACE=1;
cargo test --features test --all &&
cargo check --benches --features test &&
cargo check --all --no-default-features &&
echo "TRAVIS_RUST_VERSION=$TRAVIS_RUST_VERSION" &&
([ "$TRAVIS_RUST_VERSION" != "nightly" ] || cargo test --features "test nightly" -p gluon compile_test)
)
| #!/bin/bash
set -ex
export RUST_BACKTRACE=1
cargo test --features test --all
cargo check --benches --features test
cargo check --all --no-default-features
echo "TRAVIS_RUST_VERSION=$TRAVIS_RUST_VERSION"
[ "$TRAVIS_RUST_VERSION" != "nightly" ] || cargo test --features "test nightly" -p gluon compile_test
|
Rework the script to deal with non-presence of bundler from the get go | update_bundler() {
gem list | grep 'bundler' &> /dev/null
if [ $? -gt 0 ]; then
gem install bundler
fi
if [ "$1" = 'quiet' ]; then
bundle install --deployment > /dev/null 2> /dev/null
else
bundle install --deployment
fi
}
i="0"
until (bundle check > /dev/null 2> /dev/null) || [ $i -gt 10 ]; do
echo "Bundle update. Attempt: $i"
update_bundler 'quiet'
i=$[$i+1]
done
if !(bundle check > /dev/null 2> /dev/null); then
echo "Last Bundle update attempt."
update_bundler
fi
| update_bundler() {
gem list | grep 'bundler' &> /dev/null
if [ $? -gt 0 ]; then
gem install bundler
fi
if [ "$1" = 'quiet' ]; then
bundle update #> /dev/null 2> /dev/null
else
bundle update
fi
bundle check > /dev/null 2> /dev/null
return $?
}
i="0"
until (bundle check > /dev/null 2> /dev/null) || (update_bundler 'quiet') || [ $i -gt 10 ]; do
echo "Bundle update. Attempt: $i"
update_bundler 'quiet'
i=$[$i+1]
done
if !(bundle check > /dev/null 2> /dev/null); then
echo "Last Bundle update attempt."
update_bundler
fi
|
Add shortcut for opening Xcode workspace/project | export HOMEBREW_CASK_OPTS="--appdir=/Applications"
# https://github.com/Homebrew/homebrew-core/issues/15746
export PATH="/usr/local/opt/python/libexec/bin:$PATH"
export PATH="/usr/local/opt/coreutils/libexec/gnubin:$PATH"
export MANPATH="/usr/local/opt/coreutils/libexec/gnuman:$MANPATH"
# Open quickly
alias o='open'
alias oo='open .'
# Show/hide hidden files in Finder
alias show="defaults write com.apple.Finder AppleShowAllFiles -bool TRUE; killall Finder"
alias hide="defaults write com.apple.Finder AppleShowAllFiles FALSE; killall Finder"
# Hide/show all desktop icons (useful when presenting)
alias showdesktop="defaults write com.apple.finder CreateDesktop -bool true && killall Finder"
alias hidedesktop="defaults write com.apple.finder CreateDesktop -bool false && killall Finder"
| export HOMEBREW_CASK_OPTS="--appdir=/Applications"
# https://github.com/Homebrew/homebrew-core/issues/15746
export PATH="/usr/local/opt/python/libexec/bin:$PATH"
export PATH="/usr/local/opt/coreutils/libexec/gnubin:$PATH"
export MANPATH="/usr/local/opt/coreutils/libexec/gnuman:$MANPATH"
# Open quickly
alias o='open'
alias oo='open .'
# Open Xcode workspace/project easily
alias x='open -a Xcode .'
# Show/hide hidden files in Finder
alias show="defaults write com.apple.Finder AppleShowAllFiles -bool TRUE; killall Finder"
alias hide="defaults write com.apple.Finder AppleShowAllFiles FALSE; killall Finder"
# Hide/show all desktop icons (useful when presenting)
alias showdesktop="defaults write com.apple.finder CreateDesktop -bool true && killall Finder"
alias hidedesktop="defaults write com.apple.finder CreateDesktop -bool false && killall Finder"
|
Fix after success script to only push one docker image to travis | #!/bin/bash
set -ev
bash <(curl -s https://codecov.io/bash)
docker tag $DOCKERHUB_REPO:$COMMIT $DOCKERHUB_REPO:travis-$TRAVIS_BUILD_NUMBER
if [ "${TRAVIS_EVENT_TYPE}" == "push" ] && [ "${TRAVIS_BRANCH}" == "platform" ]; then
docker tag $DOCKERHUB_REPO:$COMMIT $DOCKERHUB_REPO:$TRAVIS_BRANCH
fi
echo "$DOCKER_PASS" | docker login --username "$DOCKER_USERNAME" --password-stdin
docker push $DOCKERHUB_REPO
| #!/bin/bash
set -ev
bash <(curl -s https://codecov.io/bash)
TRAVIS_TAG="$DOCKERHUB_REPO:travis-$TRAVIS_BUILD_NUMBER"
docker tag $DOCKERHUB_REPO:$COMMIT "$TRAVIS_TAG"
echo "$DOCKER_PASS" | docker login --username "$DOCKER_USERNAME" --password-stdin
docker push "$TRAVIS_TAG"
|
Make sure Jenkins correctly handles zombie processes | #!/bin/bash
trap "exit 1" ERR
if [[ -z "${MAIN_URL}" ]]; then
echo "MAIN_URL cannot be empty" >&2
exit 1
fi
if [[ -z "${JENKINS_SMTP_FROM}" ]]; then
echo "JENKINS_SMTP_FROM cannot by empty" >&2
exit 1
fi
sed -i -re "s,MAIN_URL,${MAIN_URL},;s,JENKINS_SMTP_FROM,${JENKINS_SMTP_FROM}," /usr/share/jenkins/ref/jenkins.model.JenkinsLocationConfiguration.xml.override
exec /usr/local/bin/jenkins.sh "$@"
| #!/bin/bash
trap "exit 1" ERR
if [[ -z "${MAIN_URL}" ]]; then
echo "MAIN_URL cannot be empty" >&2
exit 1
fi
if [[ -z "${JENKINS_SMTP_FROM}" ]]; then
echo "JENKINS_SMTP_FROM cannot by empty" >&2
exit 1
fi
sed -i -re "s,MAIN_URL,${MAIN_URL},;s,JENKINS_SMTP_FROM,${JENKINS_SMTP_FROM}," /usr/share/jenkins/ref/jenkins.model.JenkinsLocationConfiguration.xml.override
exec /sbin/tini -- /usr/local/bin/jenkins.sh "$@"
|
Downgrade sphinx-notfound-page due to weird sphinx error message | #!/usr/bin/env bash
CHANGES=$(git diff-index --name-only HEAD --)
export PLUGINS=''
rm -rf ansible-repo
mkdir -p ansible-repo
cd ansible-repo
git clone --branch 'v2.9.6' https://github.com/ansible/ansible.git
pip install -r ansible/requirements.txt
pip install sphinx sphinx_rtd_theme
pip install straight.plugin
pip install sphinx-notfound-page
rm -rf ansible/lib/ansible/modules/ && mkdir -p ansible/lib/ansible/modules/hashivault
cp -r ../ansible/modules/hashivault/hashivault*.py ansible/lib/ansible/modules/hashivault/
rm -f ansible/lib/ansible/plugins/doc_fragments/hashivault.py
cp {..,ansible/lib}/ansible/plugins/doc_fragments/hashivault.py
ls ansible/lib/ansible/modules/hashivault
export MODULES=$(ls -m ansible/lib/ansible/modules/hashivault/ | grep -v '^_' | tr -d '[:space:]' | sed 's/.py//g')
cd ansible/docs/docsite/
rm -f $(find . -name developing_modules_general_windows.rst)
make webdocs || true
touch _build/html/.nojekyll
| #!/usr/bin/env bash
CHANGES=$(git diff-index --name-only HEAD --)
export PLUGINS=''
rm -rf ansible-repo
mkdir -p ansible-repo
cd ansible-repo
git clone --branch 'v2.9.6' https://github.com/ansible/ansible.git
pip install -r ansible/requirements.txt
pip install sphinx sphinx_rtd_theme
pip install straight.plugin
pip install sphinx-notfound-page==0.4
rm -rf ansible/lib/ansible/modules/ && mkdir -p ansible/lib/ansible/modules/hashivault
cp -r ../ansible/modules/hashivault/hashivault*.py ansible/lib/ansible/modules/hashivault/
rm -f ansible/lib/ansible/plugins/doc_fragments/hashivault.py
cp {..,ansible/lib}/ansible/plugins/doc_fragments/hashivault.py
ls ansible/lib/ansible/modules/hashivault
export MODULES=$(ls -m ansible/lib/ansible/modules/hashivault/ | grep -v '^_' | tr -d '[:space:]' | sed 's/.py//g')
cd ansible/docs/docsite/
rm -f $(find . -name developing_modules_general_windows.rst)
make webdocs || true
touch _build/html/.nojekyll
|
Install atom-commons into atom package | #!/bin/bash
set -e
set -v
workdir=`pwd`
url=`cat fatjar/url`
echo "The url of the fatjar is ${url}"
ls -la | #!/bin/bash
set -e
set -v
workdir=`pwd`
url=`cat fatjar/url`
echo "The url of the fatjar is ${url}"
ls -la
cd package_sources
npm install ../sts4/atom-extensions/atom-commons
ls -la
|
Fix code in supplement to include mkfs/fuse | #!/bin/bash
# Take code from $repo and put a cleaned version in $code, in order to create a
# supplement $code.tar.gz to submit.
repo=".."
code="chl-fscq"
copy_committed() {
path="$1"
mkdir -p "$code/$path"
for file in $(git ls-files "$repo/$path"); do
cp $file "$code/$path/"
done
}
mkdir -p "$code"
cp $repo/LICENSE $code/
cp ./README.md $code/README.md
mkdir -p "$code/src"
cp "$repo/src/"*.v "$code/src/"
rm $code/src/AByteFile.v
cp "$repo/src/Makefile" "$code/src/"
cp "$repo/src/README" "$code/src/"
cp "$repo/src/coqide.sh" "$code/src/coqide.sh"
copy_committed src/hslib/
copy_committed src/mllib/
copy_committed src/ocamlfuse/
tar -czvf "$code.tar.gz" "$code"
rm -r "$code"
| #!/bin/bash
# Take code from $repo and put a cleaned version in $code, in order to create a
# supplement $code.tar.gz to submit.
repo=".."
code="chl-fscq"
copy_committed() {
path="$1"
mkdir -p "$code/$path"
for file in $(git ls-files "$repo/$path"); do
cp $file "$code/$path/"
done
}
if [ -d "$code" ]; then
rm -r "$code"
fi
mkdir -p "$code"
cp $repo/LICENSE $code/
cp ./README.md $code/README.md
mkdir -p "$code/src"
cp "$repo/src/"*.v "$code/src/"
rm $code/src/AByteFile.v
rm $code/src/ExampleBlockRecover.v
rm $code/src/ExampleChecksumLog.v
rm $code/src/ExampleChecksumLog.v
rm $code/src/NewExtract.v
cp "$repo/src/Makefile" "$code/src/"
cp "$repo/src/README" "$code/src/"
cp "$repo/src/coqide.sh" "$code/src/coqide.sh"
# Building the FSCQ executables
cp "$repo/src/fuse.hs" "$code/src/"
cp "$repo/src/mkfs.hs" "$code/src/"
cp "$repo/src/mlfuse.hs" "$code/src/"
cp "$repo/src/mlmkfs.hs" "$code/src/"
cp "$repo/src/fiximports.py" "$code/src/"
copy_committed src/hslib/
copy_committed src/mllib/
copy_committed src/ocamlfuse/
tar -czvf "$code.tar.gz" "$code"
rm -r "$code"
|
Delete thirdparty zip files after fetching | #!/bin/bash
# closure compiler
wget https://dl.google.com/closure-compiler/compiler-latest.zip
unzip compiler-latest.zip -d closure-compiler
cd closure-compiler
jarfile=(*.jar)
ln -s $jarfile compiler.jar
cd ..
# PDFJS
wget https://github.com/mozilla/pdf.js/releases/download/v1.8.188/pdfjs-1.8.188-dist.zip
unzip pdfjs-1.8.188-dist.zip -d pdfjs
# tarballjs
wget https://github.com/ankitrohatgi/tarballjs/archive/master.zip
unzip master.zip -d tarballjs
# numericjs
wget https://github.com/sloisel/numeric/archive/v1.2.6.zip
unzip v1.2.6.zip -d numericjs
cd numericjs
ln -s numeric-1.2.6/src/numeric.js numeric_core.js
ln -s numeric-1.2.6/src/svd.js numeric_svd.js
cd ..
| #!/bin/bash
# closure compiler
wget https://dl.google.com/closure-compiler/compiler-latest.zip
unzip compiler-latest.zip -d closure-compiler
cd closure-compiler
jarfile=(*.jar)
ln -s $jarfile compiler.jar
cd ..
# PDFJS
wget https://github.com/mozilla/pdf.js/releases/download/v1.8.188/pdfjs-1.8.188-dist.zip
unzip pdfjs-1.8.188-dist.zip -d pdfjs
# tarballjs
wget https://github.com/ankitrohatgi/tarballjs/archive/master.zip
unzip master.zip -d tarballjs
# numericjs
wget https://github.com/sloisel/numeric/archive/v1.2.6.zip
unzip v1.2.6.zip -d numericjs
cd numericjs
ln -s numeric-1.2.6/src/numeric.js numeric_core.js
ln -s numeric-1.2.6/src/svd.js numeric_svd.js
cd ..
# clean up downloaded packages
rm *.zip
|
Check cyclomatic complexity and maintainability index on CI | #!/bin/bash
set -ex
prep() {
yum -y update
yum -y install epel-release
yum -y install python34 python34-virtualenv which
}
prep
./run-linter.sh
| #!/bin/bash
set -ex
prep() {
yum -y update
yum -y install epel-release
yum -y install python34 python34-virtualenv which
}
prep
./detect-common-errors.sh
./detect-dead-code.sh
./measure-maintainability-index.sh --fail-on-error
./run-linter.sh
|
Add 'vendor' directory to assets | #!/bin/bash
# Get the user directories
cat ~/.config/user-dirs.dirs | while read line; do
if [ "${line:0:17}" = "XDG_TEMPLATES_DIR" ]; then
# Get the current Templates directory
eval templates_dir="${line:19:-1}"
# Copy README* template(s), if any
if [ $(ls "$templates_dir/README"* 2> /dev/null | wc -l) != "0" ]
then cp -n -r -T "$templates_dir/README"* ./
fi
fi
done
# Make the basic directory structure
mkdir -p assets content && cd assets && mkdir -p css fonts images js scss
| #!/bin/bash
# Get the user directories
cat ~/.config/user-dirs.dirs | while read line; do
if [ "${line:0:17}" = "XDG_TEMPLATES_DIR" ]; then
# Get the current Templates directory
eval templates_dir="${line:19:-1}"
# Copy README* template(s), if any
if [ $(ls "$templates_dir/README"* 2> /dev/null | wc -l) != "0" ]
then cp -n -r -T "$templates_dir/README"* ./
fi
fi
done
# Make the basic directory structure
mkdir -p assets content && cd assets &&
mkdir -p css fonts images js scss vendor
|
Use bits VM to update changeip.com entries | #!/bin/bash -ex
echo "${DIRECTOR_IP} ${DIRECTOR_NAME}" >> /etc/hosts
export BOSH_CLIENT=$BOSH_USERNAME
export BOSH_CLIENT_SECRET=$BOSH_PASSWORD
export BOSH_ENVIRONMENT=my-env
bosh2 alias-env my-env -e $DIRECTOR_NAME --ca-cert <(bosh2 int deployment-vars/environments/${ENVIRONMENT_NAME}/director/vars.yml --path /director_ssl/ca)
bosh2 login
bosh2 -d cf instances > instances
router_ip=$(grep -E '^router' instances | grep -oE '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
bits_service_ip=$(grep -E '^api' instances | grep -oE '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
curl -vvv -u $CHANGE_IP_USER:$CHANGE_IP_PASSWORD \
"https://nic.ChangeIP.com/nic/update?hostname=*.cf-deployment.dynamic-dns.net&ip=$router_ip" | \
grep '200 Successful Update'
curl -vvv -u $CHANGE_IP_USER:$CHANGE_IP_PASSWORD \
"https://nic.ChangeIP.com/nic/update?hostname=bits.cf-deployment.dynamic-dns.net&ip=$bits_service_ip" | \
grep '200 Successful Update'
| #!/bin/bash -ex
echo "${DIRECTOR_IP} ${DIRECTOR_NAME}" >> /etc/hosts
export BOSH_CLIENT=$BOSH_USERNAME
export BOSH_CLIENT_SECRET=$BOSH_PASSWORD
export BOSH_ENVIRONMENT=my-env
bosh2 alias-env my-env -e $DIRECTOR_NAME --ca-cert <(bosh2 int deployment-vars/environments/${ENVIRONMENT_NAME}/director/vars.yml --path /director_ssl/ca)
bosh2 login
bosh2 -d cf instances > instances
router_ip=$(grep -E '^router' instances | grep -oE '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
bits_service_ip=$(grep -E '^bits' instances | grep -oE '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
curl -vvv -u $CHANGE_IP_USER:$CHANGE_IP_PASSWORD \
"https://nic.ChangeIP.com/nic/update?hostname=*.cf-deployment.dynamic-dns.net&ip=$router_ip" | \
grep '200 Successful Update'
curl -vvv -u $CHANGE_IP_USER:$CHANGE_IP_PASSWORD \
"https://nic.ChangeIP.com/nic/update?hostname=bits.cf-deployment.dynamic-dns.net&ip=$bits_service_ip" | \
grep '200 Successful Update'
|
Fix typo in repo URL | #!/bin/sh
setup_git() {
git config --global user.email "travis+fateOfAllFools@travis-ci.org"
git config --global user.name "Travis CI (FATE)"
}
commit_artifacts() {
git add docs/fateOfAllFools.css
git add docs/fateOfAllFools.js
git commit --message "Travis build: $TRAVIS_BUILD_NUMBER"
}
push_to_github() {
git remote add origin https://${GH_TOKEN}@github.com/rslifka/fate_of_all_fools.git > /dev/null 2>&1
git push --quiet --set-upstream origin master
}
setup_git
commit_artifacts
push_to_github
| #!/bin/sh
setup_git() {
git config --global user.email "travis+fateOfAllFools@travis-ci.org"
git config --global user.name "Travis CI (FATE)"
}
commit_artifacts() {
git add docs/fateOfAllFools.css
git add docs/fateOfAllFools.js
git commit --message "Travis build: $TRAVIS_BUILD_NUMBER"
}
push_to_github() {
git remote add origin https://${GH_TOKEN}@github.com:rslifka/fate_of_all_fools.git > /dev/null 2>&1
git push --quiet --set-upstream origin master
}
setup_git
commit_artifacts
push_to_github
|
Set emacsclient -c -a emacs to use gui deamon | export ALTERNATE_EDITOR=""
export EDITOR="emacsclient -t" # $EDITOR should open in terminal
export VISUAL="emacsclient -c -a emacs" # $VISUAL opens in GUI with non-daemon as alternate
| export ALTERNATE_EDITOR=""
export EDITOR="emacsclient -t" # $EDITOR should open in terminal
export VISUAL="emacsclient -c -a emacs -s gui" # $VISUAL opens in GUI with non-daemon as alternate
|
Update brew when reinitializing an OSX installation |
# ----------------------------------------------------------------------------
# OSX Settings
# ----------------------------------------------------------------------------
# Always show scrollbars. Options: `WhenScrolling`, `Automatic`, `Always`.
# defaults write NSGlobalDomain AppleShowScrollBars -string "Always"
# Disable smart quotes.
defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
# Disable smart dashes.
defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
# ----------------------------------------------------------------------------
# Finder
# ----------------------------------------------------------------------------
# Display the full POSIX path as the Finder window title
defaults write com.apple.finder _FXShowPosixPathInTitle -bool true
# Disable the warning when changing a file extension
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
# Show the ~/Library folder
chflags nohidden ~/Library
# ----------------------------------------------------------------------------
# Messages
# ----------------------------------------------------------------------------
# Disable automatic emoji substitution.
defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "automaticEmojiSubstitutionEnablediMessage" -bool false
# Disable smart quotes.
defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "automaticQuoteSubstitutionEnabled" -bool false
|
# ----------------------------------------------------------------------------
# OSX Settings
# ----------------------------------------------------------------------------
# Always show scrollbars. Options: `WhenScrolling`, `Automatic`, `Always`.
# defaults write NSGlobalDomain AppleShowScrollBars -string "Always"
# Disable smart quotes.
defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
# Disable smart dashes.
defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
# ----------------------------------------------------------------------------
# Finder
# ----------------------------------------------------------------------------
# Display the full POSIX path as the Finder window title
defaults write com.apple.finder _FXShowPosixPathInTitle -bool true
# Disable the warning when changing a file extension
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
# Show the ~/Library folder
chflags nohidden ~/Library
# ----------------------------------------------------------------------------
# Messages
# ----------------------------------------------------------------------------
# Disable automatic emoji substitution.
defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "automaticEmojiSubstitutionEnablediMessage" -bool false
# Disable smart quotes.
defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "automaticQuoteSubstitutionEnabled" -bool false
# ----------------------------------------------------------------------------
# Brew
# ----------------------------------------------------------------------------
# Update brew installation and packages.
if is_installed brew; then
e_arrow "Updating brew..." && brew update &> /dev/null
e_arrow "Upgrading brew packages..." && brew upgrade --all &> /dev/null
fi
|
Make ssh connections great again | #!/bin/bash
# Set GPG TTY
export GPG_TTY=$(tty)
# Start the gpg-agent if not already running
gpg-connect-agent /bye >/dev/null 3>&1
# Set SSH to use gpg-agent
unset SSH_AGENT_PID
export SSH_AUTH_SOCK="/run/user/$(id -u)/gnupg/S.gpg-agent.ssh"
| #!/bin/bash
# If we're on an SSH connection don't attempt to setup the gpg-agent:
if [ -z "${SSH_CONNECTION}" ]; then
exit 0
fi
# Set GPG TTY
export GPG_TTY=$(tty)
# Start the gpg-agent if not already running
gpg-connect-agent /bye >/dev/null 3>&1
# Set SSH to use gpg-agent
unset SSH_AGENT_PID
export SSH_AUTH_SOCK="/run/user/$(id -u)/gnupg/S.gpg-agent.ssh"
|
Cut raster into tiles on import | #!/bin/bash
if [ -z "$3" ]; then
echo "Usage: $0 <raster> <table> <database> [<source_srid>]" >&2
exit 1
fi
INPUTRASTERFILE=$1
TABLENAME=$2
export PGDATABASE=$3
S_SRS=
test -n "$4" && S_SRS="-s_srs EPSG:$4"
# get config
. /systemapic/config/env.sh || exit 1
# env vars
export PGUSER=$SYSTEMAPIC_PGSQL_USERNAME
export PGPASSWORD=$SYSTEMAPIC_PGSQL_PASSWORD
export PGHOST=postgis
# Reproject to EPSG:3857
RASTERFILE=/tmp/import_raster_$$.tif
gdalwarp -t_srs EPSG:3857 ${S_SRS} ${INPUTRASTERFILE} ${RASTERFILE} || exit 1
# import raster
set -o pipefail # needed to get an error if raster2pgsql errors out
raster2pgsql \
-s 3857 -I -C \
${RASTERFILE} $TABLENAME |
psql -q --set ON_ERROR_STOP=1
| #!/bin/bash
if [ -z "$3" ]; then
echo "Usage: $0 <raster> <table> <database> [<source_srid>]" >&2
exit 1
fi
INPUTRASTERFILE=$1
TABLENAME=$2
export PGDATABASE=$3
S_SRS=
test -n "$4" && S_SRS="-s_srs EPSG:$4"
# get config
. /systemapic/config/env.sh || exit 1
# env vars
export PGUSER=$SYSTEMAPIC_PGSQL_USERNAME
export PGPASSWORD=$SYSTEMAPIC_PGSQL_PASSWORD
export PGHOST=postgis
# Reproject to EPSG:3857
RASTERFILE=/tmp/import_raster_$$.tif
gdalwarp -t_srs EPSG:3857 ${S_SRS} ${INPUTRASTERFILE} ${RASTERFILE} || exit 1
TILESIZE="128x128"
# import raster
set -o pipefail # needed to get an error if raster2pgsql errors out
raster2pgsql \
-s 3857 -I -C -Y \
-t ${TILESIZE} \
${RASTERFILE} $TABLENAME |
psql -q --set ON_ERROR_STOP=1
|
Fix bug in starting carbon daemon script. | #!/bin/bash
# Starts and runs the carbon server and graphite web-app.
# Assumes script is running from vagrant environment, and
# Cassandra is running from host environment.
# TODO Stop if Cassandra isn't running?
# TODO Makefile, or startup script?
# TODO Write values to files based on IP address
echo "Make sure Cassandra is running on the host machine."
echo "Starting Carbon Daemon server."
sudo -u www-data /opt/graphite/bin/carbon-daemon.py writer start 2 &> 1
# TODO Run graphite as a background process, append logs to nohup.
echo "Starting Graphite development server."
sudo -u www-data /opt/graphite/bin/run-graphite-devel-server.py /opt/graphite/
| #!/bin/bash
# Starts and runs the carbon server and graphite web-app.
# Assumes script is running from vagrant environment, and
# Cassandra is running from host environment.
# TODO Stop if Cassandra isn't running?
# TODO Makefile, or startup script?
# TODO Write values to files based on IP address
echo "Make sure Cassandra is running on the host machine."
echo "Starting Carbon Daemon server."
sudo -u www-data /opt/graphite/bin/carbon-daemon.py writer start
# TODO Run graphite as a background process, append logs to nohup.
echo "Starting Graphite development server."
sudo -u www-data /opt/graphite/bin/run-graphite-devel-server.py /opt/graphite/
|
Replace compiler command with the toolchain | #!/bin/bash
set -euo pipefail
set -x
export LD_LIBRARY_PATH=$PREFIX/lib
if [[ $(uname) =~ .*Darwin.* ]]; then
gn gen out.gn "--args=use_custom_libcxx=false clang_use_chrome_plugins=false v8_use_external_startup_data=false is_debug=false clang_base_path=\"${BUILD_PREFIX}\" mac_sdk_min=\"10.9\" use_system_xcode=false is_component_build=true mac_sdk_path=\"${CONDA_BUILD_SYSROOT}\" icu_use_system=true icu_include_dir=\"$PREFIX/include\" icu_lib_dir=\"$PREFIX/lib\""
elif [[ $(uname) =~ .*Linux.* ]]; then
gn gen out.gn "--args=use_custom_libcxx=false clang_use_chrome_plugins=false v8_use_external_startup_data=false is_debug=false clang_base_path=\"${BUILD_PREFIX}\" is_component_build=true icu_use_system=true icu_include_dir=\"$PREFIX/include\" icu_lib_dir=\"$PREFIX/lib\" use_sysroot=false is_clang=false"
fi
ninja -C out.gn
mkdir -p $PREFIX/lib
cp out.gn/libv8*${SHLIB_EXT} $PREFIX/lib
mkdir -p $PREFIX/include
cp -r include/* $PREFIX/include/
| #!/bin/bash
set -euo pipefail
set -x
export LD_LIBRARY_PATH=$PREFIX/lib
if [[ $(uname) =~ .*Darwin.* ]]; then
gn gen out.gn "--args=use_custom_libcxx=false clang_use_chrome_plugins=false v8_use_external_startup_data=false is_debug=false clang_base_path=\"${BUILD_PREFIX}\" mac_sdk_min=\"10.9\" use_system_xcode=false is_component_build=true mac_sdk_path=\"${CONDA_BUILD_SYSROOT}\" icu_use_system=true icu_include_dir=\"$PREFIX/include\" icu_lib_dir=\"$PREFIX/lib\""
elif [[ $(uname) =~ .*Linux.* ]]; then
gn gen out.gn "--args=use_custom_libcxx=false clang_use_chrome_plugins=false v8_use_external_startup_data=false is_debug=false clang_base_path=\"${BUILD_PREFIX}\" is_component_build=true icu_use_system=true icu_include_dir=\"$PREFIX/include\" icu_lib_dir=\"$PREFIX/lib\" use_sysroot=false is_clang=false"
sed -i "s/ gcc/ ${HOST}-gcc/g" out.gn/toolchain.ninja
sed -i "s/ g++/ ${HOST}-g++/g" out.gn/toolchain.ninja
fi
ninja -C out.gn
mkdir -p $PREFIX/lib
cp out.gn/libv8*${SHLIB_EXT} $PREFIX/lib
mkdir -p $PREFIX/include
cp -r include/* $PREFIX/include/
|
Fix the regexp for networking test match | #!/bin/bash
source "$(dirname "${BASH_SOURCE}")/../../hack/lib/init.sh"
# Set this to false if the plugin does not implement NetworkPolicy
export NETWORKING_E2E_NETWORKPOLICY="${NETWORKING_E2E_NETWORKPOLICY:-true}"
# Set this to true if the plugin implements isolation in the same manner as
# redhat/openshift-ovs-multitenant
export NETWORKING_E2E_ISOLATION="${NETWORKING_E2E_ISOLATION:-false}"
export NETWORKING_E2E_FOCUS="${NETWORKING_E2E_FOCUS:-\[networking\]}"
export NETWORKING_E2E_EXTERNAL=1
# Checking for a given kubeconfig
os::log::info "Starting 'networking' extended tests for cni plugin"
if [[ -n "${OPENSHIFT_TEST_KUBECONFIG:-}" ]]; then
# Run tests against an existing cluster
"${OS_ROOT}/test/extended/networking.sh" $@
else
os::log::error "Please set env OPENSHIFT_TEST_KUBECONFIG to run the tests against an existing cluster"
exit 1
fi
| #!/bin/bash
source "$(dirname "${BASH_SOURCE}")/../../hack/lib/init.sh"
# Set this to false if the plugin does not implement NetworkPolicy
export NETWORKING_E2E_NETWORKPOLICY="${NETWORKING_E2E_NETWORKPOLICY:-true}"
# Set this to true if the plugin implements isolation in the same manner as
# redhat/openshift-ovs-multitenant
export NETWORKING_E2E_ISOLATION="${NETWORKING_E2E_ISOLATION:-false}"
export NETWORKING_E2E_FOCUS="${NETWORKING_E2E_FOCUS:-\[Area:Networking\]}"
export NETWORKING_E2E_EXTERNAL=1
# Checking for a given kubeconfig
os::log::info "Starting 'networking' extended tests for cni plugin"
if [[ -n "${OPENSHIFT_TEST_KUBECONFIG:-}" ]]; then
# Run tests against an existing cluster
"${OS_ROOT}/test/extended/networking.sh" $@
else
os::log::error "Please set env OPENSHIFT_TEST_KUBECONFIG to run the tests against an existing cluster"
exit 1
fi
|
Fix test 514 so it isn't sensitive to test directory | #!/bin/sh
set -x
OUTPUT=514-string-multiline.out
grep -q 1234 ${OUTPUT} || exit 1
grep -q 4321 ${OUTPUT} || exit 1
grep -q AA ${OUTPUT} || exit 1
grep -q "x=1" ${OUTPUT} || exit 1
echo OK
exit 0
| #!/bin/sh
set -x
grep -q 1234 ${TURBINE_OUTPUT} || exit 1
grep -q 4321 ${TURBINE_OUTPUT} || exit 1
grep -q AA ${TURBINE_OUTPUT} || exit 1
grep -q "x=1" ${TURBINE_OUTPUT} || exit 1
echo OK
exit 0
|
Comment purpose of `tv` function | # github:norm/homedir:etc/bash/rc/host-southpaw/media.sh
#
# Settings for dealing with media, especially anything to do with
# https://github.com/norm/media-tools.
export MEDIA_CONFIG="${HOME}/etc/media.conf"
function tv {
local show="${1//\//}";
local season=$(
/bin/ls "/files/tv/${show}/" \
| awk '/Season / { print $2 }' \
| sort -rn \
| head -1
)
print "${bold}${red}${show} Season ${season}:${reset}"
ls "/files/tv/${show}/Season ${season}"
}
complete -o filenames -F _tv_series tv
| # github:norm/homedir:etc/bash/rc/host-southpaw/media.sh
#
# Settings for dealing with media, especially anything to do with
# https://github.com/norm/media-tools.
export MEDIA_CONFIG="${HOME}/etc/media.conf"
# List the episodes in the most recent season of a TV show.
function tv {
local show="${1//\//}";
local season=$(
/bin/ls "/files/tv/${show}/" \
| awk '/Season / { print $2 }' \
| sort -rn \
| head -1
)
print "${bold}${red}${show} Season ${season}:${reset}"
ls "/files/tv/${show}/Season ${season}"
}
complete -o filenames -F _tv_series tv
|
Use linked npm in test | #!/bin/bash
set -o nounset
set -o errexit
ln -s /usr/local/lib64/node-v4.3.x/bin/npm /usr/local/bin/npm
ln -s /usr/local/lib64/node-v4.3.x/bin/node /usr/local/bin/node
/usr/local/lib64/node-v4.3.x/bin/npm run nsp
/usr/local/lib64/node-v4.3.x/bin/npm run lint
/usr/local/lib64/node-v4.3.x/bin/npm run test
if [ "$LAMBCI_BRANCH" != "develop" ] && [ "$LAMBCI_BRANCH" != "master" ]; then
echo "Deployment only triggered for develop or master, build was for $LAMBCI_BRANCH"
exit 0
fi
ENVIRONMENT="staging"
if [ "$LAMBCI_BRANCH" == "master" ]; then
ENVIRONMENT="production"
fi
./deploy.sh "$ENVIRONMENT" "80,88,100,300,500"
| #!/bin/bash
set -o nounset
set -o errexit
ln -s /usr/local/lib64/node-v4.3.x/bin/npm /usr/local/bin/npm
ln -s /usr/local/lib64/node-v4.3.x/bin/node /usr/local/bin/node
npm run nsp
npm run lint
npm run test
if [ "$LAMBCI_BRANCH" != "develop" ] && [ "$LAMBCI_BRANCH" != "master" ]; then
echo "Deployment only triggered for develop or master, build was for $LAMBCI_BRANCH"
exit 0
fi
ENVIRONMENT="staging"
if [ "$LAMBCI_BRANCH" == "master" ]; then
ENVIRONMENT="production"
fi
./deploy.sh "$ENVIRONMENT" "80,88,100,300,500"
|
Use new native image flags. | #!/usr/bin/env bash
if [[ $SL_BUILD_NATIVE == "false" ]]; then
echo "Skipping the native image build because SL_BUILD_NATIVE is set to false."
exit 0
fi
"$JAVA_HOME"/bin/native-image --tool:truffle -H:MaxRuntimeCompileMethods=1200 \
-cp ../language/target/simplelanguage.jar:../launcher/target/launcher-19.0.0-SNAPSHOT.jar \
com.oracle.truffle.sl.launcher.SLMain \
slnative
| #!/usr/bin/env bash
if [[ $SL_BUILD_NATIVE == "false" ]]; then
echo "Skipping the native image build because SL_BUILD_NATIVE is set to false."
exit 0
fi
"$JAVA_HOME"/bin/native-image \
--macro:truffle --no-fallback --initialize-at-build-time \
-cp ../language/target/simplelanguage.jar:../launcher/target/launcher-19.0.0-SNAPSHOT.jar \
com.oracle.truffle.sl.launcher.SLMain \
slnative
|
Add pipbench flag for cluster_util, Add pipbench dependency | pip3 install falcon
pip3 install gunicorn
pip3 install pypiserver
pip3 install requests
pip3 install numpy
| pip3 install falcon
pip3 install gunicorn
pip3 install pypiserver
pip3 install requests
pip3 install numpy
pip3 install grequests
|
Increase stack size to fix unstable test in bundle-plugin | #!/bin/bash
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
set -e
export SOURCE_DIR=/source
export NUM_THREADS=6
export MALLOC_ARENA_MAX=1
export MAVEN_OPTS="-Xms128m -Xmx2g"
source /etc/profile.d/devtoolset-7.sh || true
ccache --max-size=1250M
ccache --set-config=compression=true
ccache --print-config
cd ${SOURCE_DIR}
sh ./bootstrap.sh java
mvn -V install --no-snapshot-updates --batch-mode --threads ${NUM_THREADS}
bash ${SOURCE_DIR}/bootstrap-cmake.sh ${SOURCE_DIR}
make -j ${NUM_THREADS}
ctest3 --output-on-failure -j ${NUM_THREADS}
ccache --show-stats
make install
| #!/bin/bash
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
set -e
export SOURCE_DIR=/source
export NUM_THREADS=6
export MALLOC_ARENA_MAX=1
export MAVEN_OPTS="-Xss1m -Xms128m -Xmx2g"
source /etc/profile.d/devtoolset-7.sh || true
ccache --max-size=1250M
ccache --set-config=compression=true
ccache --print-config
cd ${SOURCE_DIR}
sh ./bootstrap.sh java
mvn -V install --no-snapshot-updates --batch-mode --threads ${NUM_THREADS}
bash ${SOURCE_DIR}/bootstrap-cmake.sh ${SOURCE_DIR}
make -j ${NUM_THREADS}
ctest3 --output-on-failure -j ${NUM_THREADS}
ccache --show-stats
make install
|
Enable autostash and remote pruning for git | #!/usr/bin/env bash
REV=$(uname -r)
# Installs git
if [[ $REV = *"ARCH"* ]]; then
echo "ArchLinux"
sudo pacman -S --needed git
else
echo "Debian"
sudo apt-get install git
# Installs git-extras
sudo apt-get update
sudo apt-get install git-extras
fi
# TODO: configure GPG keys
if type git >/dev/null 2>&1
then
git config --global user.name "Ian Lai"
git config --global user.email "ian@fyianlai.com"
fi
git config --global core.excludesfile ~/.gitignore_global
git config --global core.pager "diff-so-fancy | less --tabs=4 -RFX"
git config --global alias.tree "log --graph --decorate --pretty=oneline --abbrev-commit"
git config --global core.editor "nano"
| #!/usr/bin/env bash
REV=$(uname -r)
# Installs git
if [[ $REV = *"ARCH"* ]]; then
echo "ArchLinux"
sudo pacman -S --needed git
else
echo "Debian"
sudo apt-get install git
# Installs git-extras
sudo apt-get update
sudo apt-get install git-extras
fi
# TODO: configure GPG keys
if type git >/dev/null 2>&1
then
git config --global user.name "Ian Lai"
git config --global user.email "ian@fyianlai.com"
fi
git config --global core.excludesfile ~/.gitignore_global
git config --global core.pager "diff-so-fancy | less --tabs=4 -RFX"
git config --global alias.tree "log --graph --decorate --pretty=oneline --abbrev-commit"
git config --global rebase.autoStash true true
git config --global remote.origin.prune true
git config --global core.editor "nano"
|
Set color profile with xcalib. | #!/bin/sh
#-----------------------------------------
#---- AutoStart Script for OpenBox -------
#-----------------------------------------
#Sets the wallpaper among other things.
if [ -f ~/.config/autostart.sh ]; then
. ~/.config/autostart.sh
fi
tint2 &
export NODENAME=$(uname -n)
export ENABLE_COMPMGR="no"
if [[ $NODENAME == "arcbase" ]]; then
export ENABLE_COMPMGR="yes"
elif [[ $NODENAME == "zenbook" ]]; then
export ENABLE_COMPMGR="no"
fi
#Now our host specific autostart
if [[ $ENABLE_COMPMGR == "yes" ]]; then
compton -cGCb
fi
| #!/bin/sh
#-----------------------------------------
#---- AutoStart Script for OpenBox -------
#-----------------------------------------
#Sets the wallpaper among other things.
if [ -f ~/.config/autostart.sh ]; then
. ~/.config/autostart.sh
fi
tint2 &
export NODENAME=$(uname -n)
export ENABLE_COMPMGR="no"
if [[ $NODENAME == "arcbase" ]]; then
export ENABLE_COMPMGR="yes"
elif [[ $NODENAME == "zenbook" ]]; then
export ENABLE_COMPMGR="no"
xcalib ~/repo/dotfiles/misc/UX31.icc
fi
#Now our host specific autostart
if [[ $ENABLE_COMPMGR == "yes" ]]; then
compton -cGCb
fi
|
Remove problematic qlstephen from cask installer | #!/usr/bin/env bash
source ~/dotfiles/setup/header.sh
echo "Installing Homebrew Casks..."
preload_cask_list
# Install essential macOS apps via Cask (in order of essentiality)
install_cask google-chrome
install_cask dropbox
install_cask alfred
install_cask atom
install_cask authy
install_cask keyboard-maestro
install_cask flux
# Install additional apps
install_cask appcleaner
install_cask namechanger
install_cask the-unarchiver
install_cask mamp
rm -rf '/Applications/MAMP PRO'
install_cask keybase
install_cask postman
install_cask slack
install_cask microsoft-teams
install_cask zoom
echo "Installing Quick Look plugins..."
install_cask qlstephen
install_cask qlmarkdown
install_cask scriptql
install_cask quicklook-json
install_cask betterzip
install_cask suspicious-package
qlmanage -r
| #!/usr/bin/env bash
source ~/dotfiles/setup/header.sh
echo "Installing Homebrew Casks..."
preload_cask_list
# Install essential macOS apps via Cask (in order of essentiality)
install_cask google-chrome
install_cask dropbox
install_cask alfred
install_cask atom
install_cask authy
install_cask keyboard-maestro
install_cask flux
# Install additional apps
install_cask appcleaner
install_cask namechanger
install_cask the-unarchiver
install_cask mamp
rm -rf '/Applications/MAMP PRO'
install_cask keybase
install_cask postman
install_cask slack
install_cask microsoft-teams
install_cask zoom
echo "Installing Quick Look plugins..."
install_cask qlmarkdown
install_cask scriptql
install_cask quicklook-json
install_cask betterzip
install_cask suspicious-package
qlmanage -r
|
Add term-missing as a coverage.py param | #!/bin/bash -xe
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
flake8
black --check .
isort --check .
moz-l10n-lint l10n/l10n-pontoon.toml
moz-l10n-lint l10n/l10n-vendor.toml
python manage.py lint_ftl -q
python manage.py runscript check_calendars
python manage.py version
python manage.py migrate --noinput
py.test lib bedrock \
--cov-config=.coveragerc \
--cov-report=html \
--cov-report=xml:python_coverage/coverage.xml \
--cov=.
py.test -r a tests/redirects
| #!/bin/bash -xe
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
flake8
black --check .
isort --check .
moz-l10n-lint l10n/l10n-pontoon.toml
moz-l10n-lint l10n/l10n-vendor.toml
python manage.py lint_ftl -q
python manage.py runscript check_calendars
python manage.py version
python manage.py migrate --noinput
py.test lib bedrock \
--cov-config=.coveragerc \
--cov-report=html \
--cov-report=term-missing \
--cov-report=xml:python_coverage/coverage.xml \
--cov=.
py.test -r a tests/redirects
|
Remove aclocal.m4 to make sure it's correctly regenerated | #!/bin/sh -x
rm -fv ltmain.sh config.sub config.guess config.h.in
aclocal -I m4
autoheader
libtoolize --automake --copy
automake --add-missing --copy --force
autoreconf
chmod 755 configure
| #!/bin/sh -x
rm -fv ltmain.sh config.sub config.guess config.h.in aclocal.m4
aclocal -I m4
autoheader
libtoolize --automake --copy
automake --add-missing --copy --force
autoreconf
chmod 755 configure
|
Enable debugging trace in run_test.sh Bash script. | #!/bin/bash
#
# A script for running Resolwe tests with Jenkins.
#
# To use this script, add an "Execute shell" "Build" step to your project and
# put in the following:
# ./tests/run_tests.sh
#
pushd $WORKSPACE
rm -rf reports
scl enable python33 'tox -r'
cloc --exclude-dir=.venv,.tox,reports, --by-file --xml --out=reports/cloc.xml .
popd
| #!/bin/bash
#
# A script for running Resolwe tests with Jenkins.
#
# To use this script, add an "Execute shell" "Build" step to your project and
# put in the following:
# ./tests/run_tests.sh
#
set -x
pushd $WORKSPACE
rm -rf reports
scl enable python33 'tox -r'
cloc --exclude-dir=.venv,.tox,reports, --by-file --xml --out=reports/cloc.xml .
popd
|
Add jmap support to profiling | #!/bin/bash
ODL_CONTAINER_PID=$1
COUNT=$2
INT_CPU_USAGE=$3
function generate_symbols() {
export ODL_CONTAINER_PID
runuser -u odl -- /bin/sh -c '(export JAVA_HOME=/lib/jvm/java-1.8.0-openjdk/; cd /opt/opendaylight/perf-map-agent/out; java -cp attach-main.jar:$JAVA_HOME/lib/tools.jar net.virtualvoid.perf.AttachOnce $ODL_CONTAINER_PID)'
}
function generate_jstack() {
runuser -u odl -- /bin/sh -c '(jstack $ODL_CONTAINER_PID)' > jstack_${COUNT}_${INT_CPU_USAGE}.txt 2>&1
}
generate_symbols
generate_jstack
| #!/bin/bash
ODL_CONTAINER_PID=$1
COUNT=$2
INT_CPU_USAGE=$3
function generate_symbols() {
export ODL_CONTAINER_PID
runuser -u odl -- /bin/sh -c '(export JAVA_HOME=/lib/jvm/java-1.8.0-openjdk/; cd /opt/opendaylight/perf-map-agent/out; java -cp attach-main.jar:$JAVA_HOME/lib/tools.jar net.virtualvoid.perf.AttachOnce $ODL_CONTAINER_PID)'
}
function generate_jstack() {
runuser -u odl -- /bin/sh -c '(jstack $ODL_CONTAINER_PID)' > jstack_${COUNT}_${INT_CPU_USAGE}.txt 2>&1
}
function generate_jmap() {
runuser -u odl -- /bin/sh -c '(jmap -dump:format=b,file=HeapDump_${COUNT}_${INT_CPU_USAGE}.hprof $ODL_CONTAINER_PID)'
}
generate_symbols
generate_jstack
generate_jmap
|
Use SITL as metadata build validation target | #!/usr/bin/env bash
make px4fmu-v4_default
cp build/px4fmu-v4_default/parameters.xml ../qgroundcontrol/src/FirmwarePlugin/PX4/PX4ParameterFactMetaData.xml
cp build/px4fmu-v4_default/airframes.xml ../qgroundcontrol/src/AutoPilotPlugins/PX4/AirframeFactMetaData.xml
| #!/usr/bin/env bash
make posix_sitl_default
cp build/posix_sitl_default/parameters.xml ../qgroundcontrol/src/FirmwarePlugin/PX4/PX4ParameterFactMetaData.xml
#cp build/posix_sitl_default/airframes.xml ../qgroundcontrol/src/AutoPilotPlugins/PX4/AirframeFactMetaData.xml
|
Fix python mkdir for multiplatform | #!/bin/bash
# setup global python modules
. functions.sh
MODULES=("virtualenv" "virtualfish" "thefuck")
for MODULE in "${MODULES[@]}"; do
sudo -H pip install --upgrade "$MODULE"
if [ $? -ne 0 ]; then
fail "error occurred while installed $MODULE"
fi
# TODO: revisit if we want to get rid of the password prompt
#if ! pip freeze | grep $MODULE > /dev/null; then
# sudo -H pip install --upgrade $MODULE
#fi
done
mkdir -p "/home/${USER}/.virtualenvs"
| #!/bin/bash
# setup global python modules
. functions.sh
MODULES=("virtualenv" "virtualfish" "thefuck")
for MODULE in "${MODULES[@]}"; do
sudo -H pip install --upgrade "$MODULE"
if [ $? -ne 0 ]; then
fail "error occurred while installed $MODULE"
fi
# TODO: revisit if we want to get rid of the password prompt
#if ! pip freeze | grep $MODULE > /dev/null; then
# sudo -H pip install --upgrade $MODULE
#fi
done
if [ ${UNAME} == "Linux" ]; then
mkdir -p "/home/${USER}/.virtualenvs"
else
mkdir -p "/Users/${USER}/.virtualenvs"
fi
|
Add travis CI support for deploying tags to manage multiple semantic versions. | #!/bin/bash
# Pull requests and commits to other branches shouldn't try to deploy
if [ "$TRAVIS_PULL_REQUEST" != "false" -o "$TRAVIS_BRANCH" != "master" ]; then
echo "Skipping deploy codes"
exit 0
fi
# Pull current codes
mkdir -p ./sdk/ovirtsdk-git/
cd ./sdk/ovirtsdk-git/
git init
git config --global user.email "travis@travis-ci.org"
git config --global user.name "GooVirtRobot@TravisCI"
git remote add origin https://${GH_TOKEN}@github.com/imjoey/go-ovirt.git
git pull origin master
# Use newly generated codes to override the pulled ones
rm -fr *.go README.md
cp -r ../ovirtsdk/* ./
# Copy examples/ and push into go-ovirt repository
rm -fr ./examples
cp -r ../examples ./
# Push back to github
git add -A
git commit --message "Generator commit ID: ${TRAVIS_COMMIT:0:7} with message: $TRAVIS_COMMIT_MESSAGE. Travis build: $TRAVIS_BUILD_NUMBER."
git push origin master
| #!/bin/bash
# Pull requests shouldn't try to deploy
if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
echo "Skipping deploy codes for pull request"
exit 0
fi
# Commits to other branches except master shouldn't try to deploy
if [ "$TRAVIS_BRANCH" != "master" ]; then
if [ -z "$TRAVIS_TAG" ];then
echo "Skipping deploy code for non master branch commits"
fi
fi
# Pull current codes
mkdir -p ./sdk/ovirtsdk-git/
cd ./sdk/ovirtsdk-git/
git init
git config --global user.email "travis@travis-ci.org"
git config --global user.name "GooVirtRobot@TravisCI"
git remote add origin https://${GH_TOKEN}@github.com/imjoey/go-ovirt.git
git pull origin master
# Use newly generated codes to override the pulled ones
rm -fr *.go README.md
cp -r ../ovirtsdk/* ./
# Copy examples/ and push into go-ovirt repository
rm -fr ./examples
cp -r ../examples ./
# Push back to github
git add -A
git commit --message "Generator commit ID: ${TRAVIS_COMMIT:0:7} with message: $TRAVIS_COMMIT_MESSAGE. Travis build: $TRAVIS_BUILD_NUMBER."
# For builds triggered by a tag, TRAVIS_BRANCH is the same as
# the name of the tag (TRAVIS_TAG).
if [ "$TRAVIS_BRANCH" != "master" ];then
git tag -a ${TRAVIS_TAG} -m "New version release: ${TRAVIS_TAG}"
fi
git push origin ${TRAVIS_BRANCH}
|
Update setup-disks to drop /mnt disabling. | #!/bin/sh
umount /mnt
LOCALDISK=/dev/nvme0n1
mkfs.xfs $LOCALDISK
echo "$LOCALDISK /srv xfs nofail,noatime 1 1" >> /etc/fstab
mount /srv
# TODO use systemctl instead of pg_ctlcluster on CentOS
pg_ctlcluster 9.5 main stop
mv /var/lib/postgresql /srv
ln -s /srv/postgresql/ /var/lib
# TODO use systemctl instead of pg_ctlcluster on CentOS
pg_ctlcluster 9.5 main start
# Disable /mnt line for /dev/xvdb
umount /mnt
sed -i 's|^/dev/xvdb|#/dev/xvdb|' /etc/fstab
| #!/bin/sh
LOCALDISK=/dev/nvme0n1
mkfs.xfs $LOCALDISK
echo "$LOCALDISK /srv xfs nofail,noatime 1 1" >> /etc/fstab
mount /srv
# TODO use systemctl instead of pg_ctlcluster on CentOS
pg_ctlcluster 9.5 main stop
mv /var/lib/postgresql /srv
ln -s /srv/postgresql/ /var/lib
# TODO use systemctl instead of pg_ctlcluster on CentOS
pg_ctlcluster 9.5 main start
|
FIx potential conflict in USERNAME var | #!/bin/sh
mkdir -p /data/pcap
if [ -z $1 ]; then
BUILDDIR=/data/moloch-git
else
BUILDDIR=$1
fi
echo "git clone"
git clone -b batch-build-fix https://github.com/kost/moloch.git $BUILDDIR
echo "cd to dir and build"
cd $BUILDDIR
USEPFRING=no ESMEM="512M" DONOTSTART=yes USERNAME=daemon GROUPNAME=daemon PASSWORD=0mgMolochDockerRules5 INTERFACE=eth0 BATCHRUN=yes ./easybutton-singlehost.sh
killall java
echo "Giving ES time to shut itself"
sleep 5
| #!/bin/sh
mkdir -p /data/pcap
if [ -z $1 ]; then
BUILDDIR=/data/moloch-git
else
BUILDDIR=$1
fi
echo "git clone"
git clone -b batch-build-fix https://github.com/kost/moloch.git $BUILDDIR
echo "cd to dir and build"
cd $BUILDDIR
USEPFRING=no ESMEM="512M" DONOTSTART=yes MOLOCHUSER=daemon GROUPNAME=daemon PASSWORD=0mgMolochDockerRules5 INTERFACE=eth0 BATCHRUN=yes ./easybutton-singlehost.sh
killall java
echo "Giving ES time to shut itself"
sleep 5
|
Update staging script to use 7-zip on Windows | #!/bin/sh
set -e
DATE=`date "+%Y-%m-%d"`
TARGET=saw-alpha-${DATE}
NM=`uname`
mkdir -p ${TARGET}/bin
mkdir -p ${TARGET}/doc
if [ "${OS}" == "Windows_NT" ]; then
EXEDIR=windows
elif [ "${NM}" == "Darwin" ]; then
EXEDIR=macosx
else
EXEDIR=linux
fi
echo Staging ...
strip build/bin/*
cp deps/abcBridge/abc/copyright.txt ${TARGET}/ABC_LICENSE
cp build/bin/bcdump ${TARGET}/bin
cp build/bin/extcore-info ${TARGET}/bin
cp build/bin/jss ${TARGET}/bin
cp build/bin/llvm-disasm ${TARGET}/bin
cp build/bin/lss ${TARGET}/bin
cp build/bin/saw ${TARGET}/bin
cp doc/extcore.txt ${TARGET}/doc
cp doc/tutorial/sawScriptTutorial.pdf ${TARGET}/doc
cp -r doc/tutorial/code ${TARGET}/doc
cp -r ../Examples/ecdsa ${TARGET}/ecdsa
rm -rf ${TARGET}/ecdsa/cryptol-2-spec
if [ "${OS}" == "Windows_NT" ]; then
zip -r ${TARGET}-${EXEDIR}.zip ${TARGET}
echo "Release package is ${TARGET}-${EXEDIR}.zip"
else
tar cvfz ${TARGET}-${EXEDIR}.tar.gz ${TARGET}
echo "Release package is ${TARGET}-${EXEDIR}.tar.gz"
fi
| #!/bin/sh
set -e
DATE=`date "+%Y-%m-%d"`
TARGET=saw-alpha-${DATE}
NM=`uname`
mkdir -p ${TARGET}/bin
mkdir -p ${TARGET}/doc
if [ "${OS}" == "Windows_NT" ]; then
EXEDIR=windows
elif [ "${NM}" == "Darwin" ]; then
EXEDIR=macosx
else
EXEDIR=linux
fi
echo Staging ...
strip build/bin/*
cp deps/abcBridge/abc/copyright.txt ${TARGET}/ABC_LICENSE
cp build/bin/bcdump ${TARGET}/bin
cp build/bin/extcore-info ${TARGET}/bin
cp build/bin/jss ${TARGET}/bin
cp build/bin/llvm-disasm ${TARGET}/bin
cp build/bin/lss ${TARGET}/bin
cp build/bin/saw ${TARGET}/bin
cp doc/extcore.txt ${TARGET}/doc
cp doc/tutorial/sawScriptTutorial.pdf ${TARGET}/doc
cp -r doc/tutorial/code ${TARGET}/doc
cp -r ../Examples/ecdsa ${TARGET}/ecdsa
rm -rf ${TARGET}/ecdsa/cryptol-2-spec
if [ "${OS}" == "Windows_NT" ]; then
7za.exe a -tzip ${TARGET}-${EXEDIR}.zip -r ${TARGET}
echo "Release package is ${TARGET}-${EXEDIR}.zip"
else
tar cvfz ${TARGET}-${EXEDIR}.tar.gz ${TARGET}
echo "Release package is ${TARGET}-${EXEDIR}.tar.gz"
fi
|
Remove govuk_setenv as we're setting it manually | #!/bin/bash
set -e
rm -f Gemfile.lock
bundle install --path "${HOME}/bundles/${JOB_NAME}"
export GOVUK_APP_DOMAIN=development
govuk_setenv default bundle exec rake test
govuk_setenv default bundle exec rake publish_gem
| #!/bin/bash
set -e
rm -f Gemfile.lock
bundle install --path "${HOME}/bundles/${JOB_NAME}"
export GOVUK_APP_DOMAIN=development
bundle exec rake test
bundle exec rake publish_gem
|
Fix multiple argument completion with zsh | if [[ ! -o interactive ]]; then
return
fi
compctl -K _erlenv erlenv
_erlenv() {
local word words completions
read -cA words
word="${words[2]}"
if [ "${#words}" -eq 2 ]; then
completions="$(erlenv commands)"
else
completions="$(erlenv completions "${word}")"
fi
reply=("${(ps:\n:)completions}")
}
| if [[ ! -o interactive ]]; then
return
fi
compctl -K _erlenv erlenv
_erlenv() {
local words completions
read -cA words
if [ "${#words}" -eq 2 ]; then
completions="$(erlenv commands)"
else
completions="$(erlenv completions ${words[2,-2]})"
fi
reply=("${(ps:\n:)completions}")
} |
Trim leading whitespace from newcount | #!/bin/sh
## customize these
## use host=. for local host
ap="/usr/local/bin/mpg123"
nms="${HOME}/files/media/audio/chime.mp3"
count=0
host=.
newcount=0
path=~/mail/new
port=22
user=me
interval=10
stump_pid="$(pgrep -a -n stumpwm)"
## while stumpwm is still running
while kill -0 "$stump_pid" > /dev/null 2>&1; do
if [ "$host" = '.' ]; then
newcount="$(ls ${path} | wc -l)"
else
newcount=$(/usr/bin/ssh -p "$port" -x -o ConnectTimeout=1
"$user"@"$host" "ls $path | wc -l")
fi
[ "$newcount" -gt "$count" ] && "$ap" "$nms" > /dev/null 2>&1
count="$newcount"
echo "$count"
sleep "$interval"
done | #!/bin/sh
## customize these
## use host=. for local host
ap="/usr/local/bin/mpg123"
nms="${HOME}/files/media/audio/chime.mp3"
count=0
host=.
newcount=0
path=~/mail/new
port=22
user=me
interval=10
stump_pid="$(pgrep -a -n stumpwm)"
## while stumpwm is still running
while kill -0 "$stump_pid" > /dev/null 2>&1; do
if [ "$host" = '.' ]; then
newcount="$(ls ${path} | wc -l | tr -d '[[:space:]]')"
else
newcount=$(/usr/bin/ssh -p "$port" -x -o ConnectTimeout=1
"$user"@"$host" "ls $path | wc -l | tr -d '[[:space:]]'")
fi
[ "$newcount" -gt "$count" ] && "$ap" "$nms" > /dev/null 2>&1
count="$newcount"
echo "$count"
sleep "$interval"
done |
Apply iotpatch after IoT Box upgrade | #!/usr/bin/env bash
sudo mount -o remount,rw /
cd /home/pi/odoo
localbranch=$(git symbolic-ref -q --short HEAD)
localremote=$(git config branch.$localbranch.remote)
git fetch "${localremote}" "${localbranch}" --depth=1
git reset "${localremote}"/"${localbranch}" --hard
sudo mount -o remount,ro /
(sleep 5 && sudo service odoo restart) &
| #!/usr/bin/env bash
sudo mount -o remount,rw /
cd /home/pi/odoo
localbranch=$(git symbolic-ref -q --short HEAD)
localremote=$(git config branch.$localbranch.remote)
echo "addons/point_of_sale/tools/posbox/overwrite_after_init/home/pi/odoo" >> .git/info/sparse-checkout
git fetch "${localremote}" "${localbranch}" --depth=1
git reset "${localremote}"/"${localbranch}" --hard
git clean -df
cp -a /home/pi/odoo/addons/point_of_sale/tools/posbox/overwrite_after_init/home/pi/odoo/* /home/pi/odoo/
rm -r /home/pi/odoo/addons/point_of_sale/tools/posbox/overwrite_after_init
sudo find / -type f -name "*.iotpatch" 2> /dev/null | while read iotpatch; do
DIR=$(dirname "${iotpatch}")
BASE=$(basename "${iotpatch%.iotpatch}")
sudo find "${DIR}" -type f -name "${BASE}" ! -name "*.iotpatch" | while read file; do
sudo patch -f "${file}" < "${iotpatch}"
done
done
sudo mount -o remount,ro /
sudo mount -o remount,rw /root_bypass_ramdisks/etc/cups
(sleep 5 && sudo service odoo restart) &
|
Install missing salttesting with pip | set -e
zypper --non-interactive in salt-master salt-minion salt-proxy
zypper --non-interactive in --oldpackage test-package=42:0.0
zypper --non-interactive up zypper libzypp
| set -e
zypper --non-interactive in salt-master salt-minion salt-proxy
zypper --non-interactive in --oldpackage test-package=42:0.0
zypper --non-interactive up zypper libzypp
pip install salttesting
|
Allow to specify options on the command line | #!/bin/bash
# Ensure a separate script is called in the event of an error
#
# Include in your builds via
# \curl -sSL https://raw.githubusercontent.com/codeship/scripts/master/utilties/check_url.sh > ${HOME}/bin/check_url && chmod u+x ${HOME}/bin/check_url
#
# then use the script in your tests like
# check_url "url_to_check"
URL=${@}
WGET_OPTIONS="--no-check-certificate --output-document=/dev/null"
TRIES=6
SLEEP=10
function retry {
local count=${1} && shift
local cmd=${@}
while [ $count -gt 0 ]; do
echo -e "Trying ($((${TRIES} - ${count} + 1)) of ${TRIES}) '${cmd}'"
${cmd} && break
count=$(($count - 1))
if [ ! $count -eq 0 ]; then
echo -e "Waiting ${SLEEP} seconds before trying again."
echo "------------------------------------------------------------------------------------------------------"
sleep "${SLEEP}"
fi
done
if [ $count -eq 0 ]; then
return 1
else
return 0
fi
}
retry "${TRIES}" "wget ${WGET_OPTIONS} ${URL}"
| #!/bin/bash
# Ensure a separate script is called in the event of an error
#
# Include in your builds via
# \curl -sSL https://raw.githubusercontent.com/codeship/scripts/master/utilties/check_url.sh > ${HOME}/bin/check_url && chmod u+x ${HOME}/bin/check_url
#
# then use the script in your tests like
# check_url "url_to_check"
URL=${@}
WGET_OPTIONS="--no-check-certificate --output-document=/dev/null"
TRIES=6
SLEEP=10
while getopts "t:w:o" opt; do
case $opt in
t)
TRIES=${OPTARG}
;;
w)
SLEEP=${OPTARG}
;;
o)
WGET_OPTIONS=${OPTARG}
;;
esac
done
function retry {
local count=${1} && shift
local cmd=${@}
while [ $count -gt 0 ]; do
echo -e "Trying ($((${TRIES} - ${count} + 1)) of ${TRIES}) '${cmd}'"
${cmd} && break
count=$(($count - 1))
if [ ! $count -eq 0 ]; then
echo -e "Waiting ${SLEEP} seconds before trying again."
echo "------------------------------------------------------------------------------------------------------"
sleep "${SLEEP}"
fi
done
if [ $count -eq 0 ]; then
return 1
else
return 0
fi
}
retry "${TRIES}" "wget ${WGET_OPTIONS} ${URL}"
|
Remove unneeded c function (replaced by z) | c() { cd ~/Sites/$1; }
_c() { _files -W ~/Sites -/; }
compdef _c c
# h() { cd ~/$1; }
# _h() { _files -W ~/ -/; }
# compdef _h h
export EDITOR='subl -w'
# autocorrect is more annoying than helpful
unsetopt correct_all
# a few aliases I like
alias b='bundle'
alias r='bin/rails'
alias s='subl'
alias ls='gls --color=auto'
alias lock='open ~/bin/Lock\ Computer.app'
alias devlog='echo "tail -f log/development.log" && tail -f log/development.log'
export PATH="/usr/local/sbin:$PATH"
export PATH="/usr/local/bin:$PATH"
# add plugin's bin directory to path
export PATH="$(dirname $0)/bin:$PATH"
export PATH="/Users/gerry/bin:$PATH"
# unlimited history
export HISTSIZE=1000000
source ~/.ssh/shell_vars
echo
fortune
cd ~
| export EDITOR='subl -w'
# autocorrect is more annoying than helpful
unsetopt correct_all
# a few aliases I like
alias b='bundle'
alias r='bin/rails'
alias s='subl'
alias ls='gls --color=auto'
alias lock='open ~/bin/Lock\ Computer.app'
alias devlog='echo "tail -f log/development.log" && tail -f log/development.log'
export PATH="/usr/local/sbin:$PATH"
export PATH="/usr/local/bin:$PATH"
# add plugin's bin directory to path
export PATH="$(dirname $0)/bin:$PATH"
export PATH="/Users/gerry/bin:$PATH"
# unlimited history
export HISTSIZE=1000000
source ~/.ssh/shell_vars
echo
fortune
cd ~
|
Add original robustness-test to joynr-cpp-base docker | #!/bin/bash
START=$(date +%s)
source /data/scripts/global.sh
log "ENVIRONMENT"
env
set -e
ulimit -c unlimited
log "running robustness-test-env"
cd /data/src/tests/robustness-test-env
./run-prov-cons-robustness-tests.sh -b /data/build/tests
log "running mqtt-cc-robustness-test"
cd /data/src/tests/robustness-test
./run-mqtt-cc-robustness-tests.sh -b /data/build/tests -s /data/src
END=$(date +%s)
DIFF=$(( $END - $START ))
log "C++ robustness tests time: $DIFF seconds"
| #!/bin/bash
START=$(date +%s)
source /data/scripts/global.sh
log "ENVIRONMENT"
env
set -e
ulimit -c unlimited
log "running robustness-test-env"
cd /data/src/tests/robustness-test-env
./run-prov-cons-robustness-tests.sh -b /data/build/tests
log "running mqtt-cc-robustness-test"
cd /data/src/tests/robustness-test
./run-mqtt-cc-robustness-tests.sh -b /data/build/tests -s /data/src
log "running robustness-test"
./run-robustness-tests.sh -b /data/build/tests -s /data/src
END=$(date +%s)
DIFF=$(( $END - $START ))
log "C++ robustness tests time: $DIFF seconds"
|
Change email of travis update bot | #!/bin/sh
# License: CC0 1.0 Universal
# https://creativecommons.org/publicdomain/zero/1.0/legalcode
set -e
[ "$TRAVIS_BRANCH" = master ]
[ "$TRAVIS_PULL_REQUEST" = false ]
eval SSH_KEY_TRAVIS_ID=aaae456e27e9
eval key=\$encrypted_${SSH_KEY_TRAVIS_ID}_key
eval iv=\$encrypted_${SSH_KEY_TRAVIS_ID}_iv
mkdir -p ~/.ssh
openssl aes-256-cbc -K $key -iv $iv -in scripts/travis-blog_os.enc -out ~/.ssh/id_rsa -d
chmod 600 ~/.ssh/id_rsa
git clone --branch gh-pages git@github.com:$TRAVIS_REPO_SLUG deploy_blog
cd deploy_blog
git config user.name "travis-update-bot"
git config user.email "nobody@example.com"
cp ../posts/* _posts/
cp ../pages/* ./
git add .
git commit -qm "Update blog to $TRAVIS_COMMIT"
git push -q origin gh-pages
| #!/bin/sh
# License: CC0 1.0 Universal
# https://creativecommons.org/publicdomain/zero/1.0/legalcode
set -e
[ "$TRAVIS_BRANCH" = master ]
[ "$TRAVIS_PULL_REQUEST" = false ]
eval SSH_KEY_TRAVIS_ID=aaae456e27e9
eval key=\$encrypted_${SSH_KEY_TRAVIS_ID}_key
eval iv=\$encrypted_${SSH_KEY_TRAVIS_ID}_iv
mkdir -p ~/.ssh
openssl aes-256-cbc -K $key -iv $iv -in scripts/travis-blog_os.enc -out ~/.ssh/id_rsa -d
chmod 600 ~/.ssh/id_rsa
git clone --branch gh-pages git@github.com:$TRAVIS_REPO_SLUG deploy_blog
cd deploy_blog
git config user.name "travis-update-bot"
git config user.email "travis-update-bot@phil-opp.com"
cp ../posts/* _posts/
cp ../pages/* ./
git add .
git commit -qm "Update blog to $TRAVIS_COMMIT"
git push -q origin gh-pages
|
Test new commands for osx | #!/bin/bash
make clean
g++ --version
if [ `uname` == Darwin ]; then
if mpic++ --show | grep -q "clang++"; then
# the openmpi package (and particularly the mpic++) from conda-forge is
# compiled to use clang++ (despl)
# and the current package need an openmpi version based on gcc to use the
# -fopenmpi version
sed -i.bak "s|mpic++|g++ -I$PREFIX/include -L$PREFIX/lib -lmpi_cxx -lmpi|g" compiler.mk
fi
ln -s $PREFIX/bin/g++ g++
fi
g++ --version
export CC=$PREFIX/bin/gcc
export CXX=$PREFIX/bin/g++
export INCLUDE_PATH="$PREFIX/include"
export LIBRARY_PATH="$PREFIX/lib"
export LD_LIBRARY_PATH="$PREFIX/lib"
make -C src/BuildGraph/Release READGZ=0
make all
mkdir -p $PREFIX/bin
tar xzf Disco_All_x86-Linux.tar.gz
cp Disco/buildG* $PREFIX/bin
cp Disco/fullsimplify $PREFIX/bin
cp Disco/parsimplify $PREFIX/bin
cp Disco/disco* $PREFIX/bin
cp Disco/run* $PREFIX/bin
| #!/bin/bash
make clean
g++ --version
$PREFIX/bin/g++ --version
if [ `uname` == Darwin ]; then
if mpic++ --show | grep -q "clang++"; then
# the openmpi package (and particularly the mpic++) from conda-forge is
# compiled to use clang++ (despl)
# and the current package need an openmpi version based on gcc to use the
# -fopenmpi version
sed -i.bak "s|mpic++|g++ -I$PREFIX/include -L$PREFIX/lib -lmpi_cxx -lmpi|g" compiler.mk
fi
ln -s $PREFIX/bin/g++ g++
which g++
fi
g++ --version
export CC=$PREFIX/bin/gcc
export CXX=$PREFIX/bin/g++
export INCLUDE_PATH="$PREFIX/include"
export LIBRARY_PATH="$PREFIX/lib"
export LD_LIBRARY_PATH="$PREFIX/lib"
make -C src/BuildGraph/Release READGZ=0
make all
mkdir -p $PREFIX/bin
tar xzf Disco_All_x86-Linux.tar.gz
cp Disco/buildG* $PREFIX/bin
cp Disco/fullsimplify $PREFIX/bin
cp Disco/parsimplify $PREFIX/bin
cp Disco/disco* $PREFIX/bin
cp Disco/run* $PREFIX/bin
|
Fix test discovery finding all tests | #!/bin/bash
PYTHONPATH=$PWD:$PWD/..${PYTHONPATH:+:$PYTHONPATH}
export PYTHONPATH
VERSION=`django-admin.py --version`
arrIN=(${VERSION//./ })
major=${arrIN[0]}
minor=${arrIN[1]}
ALL="core customuser basic alphanumeric slashless namespaced related validation gis gis_spatialite content_gfk authorization"
if [ $# -eq 0 ]; then
PYTESTPATHS=$ALL
elif [ $1 == '-h' ]; then
echo "Valid arguments are: $ALL"
else
PYTESTPATHS=$@
fi
for pytestpath in $PYTESTPATHS; do
IFS='.' read -r type type_remainder <<< "$pytestpath"
echo "** $type **"
module_name=$type
if [ $type == 'related' ]; then
module_name=${module_name}_resource
elif [ $type == 'gis_spatialite' ]; then
module_name='gis'
fi
test_name=$module_name
if [ -n "$type_remainder" ]; then
test_name=$test_name.$type_remainder
fi
if [ $type == 'gis' ]; then
createdb -T template_postgis tastypie.db
elif [ $type == 'gis_spatialite' ]; then
spatialite tastypie-spatialite.db "SELECT InitSpatialMetaData();"
fi
./manage_$type.py test $test_name.tests --traceback
echo; echo
done
| #!/bin/bash
PYTHONPATH=$PWD:$PWD/..${PYTHONPATH:+:$PYTHONPATH}
export PYTHONPATH
VERSION=`django-admin.py --version`
arrIN=(${VERSION//./ })
major=${arrIN[0]}
minor=${arrIN[1]}
ALL="core customuser basic alphanumeric slashless namespaced related validation gis gis_spatialite content_gfk authorization"
if [ $# -eq 0 ]; then
PYTESTPATHS=$ALL
elif [ $1 == '-h' ]; then
echo "Valid arguments are: $ALL"
else
PYTESTPATHS=$@
fi
for pytestpath in $PYTESTPATHS; do
IFS='.' read -r type type_remainder <<< "$pytestpath"
echo "** $type **"
module_name=$type
if [ $type == 'related' ]; then
module_name=${module_name}_resource
elif [ $type == 'gis_spatialite' ]; then
module_name='gis'
fi
test_name=$module_name
if [ -n "$type_remainder" ]; then
test_name=$test_name.$type_remainder
fi
if [ $type == 'gis' ]; then
createdb -T template_postgis tastypie.db
elif [ $type == 'gis_spatialite' ]; then
spatialite tastypie-spatialite.db "SELECT InitSpatialMetaData();"
fi
echo "./manage_$type.py test $test_name.tests --traceback -t $test_name"
./manage_$type.py test $test_name.tests --traceback -t $test_name
echo; echo
done
|
Add MySQL "mycli" wrapper, its awesome | #/bin/bash
if [[ ! $INSTALL_SCRIPT ]]; then
echo "(!) Error: You must use the ./install.sh script."
exit
fi
sudo apt-get install -y\
python-dev\
python-pip
# IMPORTANT: Do NOT have a trailing \ on the LAST item!
sudo pip install virtualenvwrapper fabric --upgrade
echo "(+) Complete! Run with $ python and $ pip" | #/bin/bash
if [[ ! $INSTALL_SCRIPT ]]; then
echo "(!) Error: You must use the ./install.sh script."
exit
fi
sudo apt-get install -y\
python-dev\
python-pip
# IMPORTANT: Do NOT have a trailing \ on the LAST item!
sudo pip install virtualenvwrapper fabric mycli --upgrade
echo "(+) Complete! Run with $ python and $ pip"
|
Fix 'Compinit isn't loaded, cannot do compdef replay' | zstyle ':completion:list-expand:*' extra-verbose yes
zstyle ':autocomplete:*' max-lines 30%
zstyle ':autocomplete:tab:*' insert-unambiguous yes
zstyle ':autocomplete:tab:*' widget-style menu-complete
zstyle ':autocomplete:tab:*' fzf-completion no
# zstyle ':autocomplete:*' magic off
# zstyle ':autocomplete:*' fuzzy-search off
# zstyle ':autocomplete:*' key-binding off
# zstyle ':autocomplete:*' config off
zstyle ':completion:*' tag-order '! history-words recent-directories recent-files' '-'
| zstyle ':completion:list-expand:*' extra-verbose yes
zstyle ':autocomplete:*' max-lines 30%
zstyle ':autocomplete:tab:*' insert-unambiguous yes
zstyle ':autocomplete:tab:*' widget-style menu-complete
zstyle ':autocomplete:tab:*' fzf-completion no
# zstyle ':autocomplete:*' magic off
# zstyle ':autocomplete:*' fuzzy-search off
# zstyle ':autocomplete:*' key-binding off
# zstyle ':autocomplete:*' config off
zstyle ':completion:*' tag-order '! history-words recent-directories recent-files' '-'
if ! builtin command -v compinit > /dev/null 2>&1; then
autoload -Uz compinit && compinit -u
fi
|
Create .config directory if it does not exist | #!/usr/bin/env bash
# The following options are the equivalent of set -eux
# exit the script if we run into errors (-e)
set -o errexit
# accessing an unset variable or parameter should cause an error (-u)
set -o nounset
# print a trace of commands (-x)
# set -x xtrace
# Trap non-normal exit signals: 1/HUP, 2/INT, 3/QUIT, 15/TERM, ERR
trap founderror 1 2 3 15 ERR
founderror() {
exit 1
}
exitscript() {
exit 0
}
##
# warn: Print a message to stderr.
# Usage: warn "message"
#
warn() {
printf 'warning: %s\n' "$@" >&2
}
##
# die: Print a message to stderr and exit with
# status code 1
# Usage: some_command || die "message"
# some_command && die "message"
#
die() {
warn "$1"
founderror
}
install_git() {
local readonly target=~/.config/git
[[ -e $target ]] && die "$target already exists"
ln -s "$srcdir/git" $target
}
declare -r srcdir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
install_git
exitscript
| #!/usr/bin/env bash
# The following options are the equivalent of set -eux
# exit the script if we run into errors (-e)
set -o errexit
# accessing an unset variable or parameter should cause an error (-u)
set -o nounset
# print a trace of commands (-x)
# set -x xtrace
# Trap non-normal exit signals: 1/HUP, 2/INT, 3/QUIT, 15/TERM, ERR
trap founderror 1 2 3 15 ERR
founderror() {
exit 1
}
exitscript() {
exit 0
}
##
# warn: Print a message to stderr.
# Usage: warn "message"
#
warn() {
printf 'warning: %s\n' "$@" >&2
}
##
# die: Print a message to stderr and exit with
# status code 1
# Usage: some_command || die "message"
# some_command && die "message"
#
die() {
warn "$1"
founderror
}
install_git() {
local readonly target=~/.config/git
[[ -e $target ]] && die "$target already exists"
mkdir -p ~/.config
ln -s "$srcdir/git" $target
}
declare -r srcdir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
install_git
exitscript
|
Install Miniconda2 in `/opt/conda2` instead. | #!/bin/bash
# Update apt-get.
apt-get update -y -q
# Install curl to download the miniconda setup script.
apt-get install -y curl
# Install VCS.
apt-get install -y git mercurial subversion
# Install bzip2 and tar. Needed for decompressing packages.
apt-get install -y bzip2 tar
# Install dependencies of conda's Qt4.
apt-get install -y libSM6 libXext6 libXrender1
# Clean out apt-get.
apt-get clean
# Download and configure conda.
cd /usr/share/miniconda
curl http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh > miniconda.sh
bash miniconda.sh -b -p /opt/conda
rm miniconda.sh
export PATH="/opt/conda/bin:${PATH}"
source activate root
conda config --set show_channel_urls True
# Install basic conda dependencies.
conda update -y --all
conda install -y pycrypto
conda install -y conda-build
conda install -y anaconda-client
conda install -y jinja2
# Install python bindings to DRMAA.
conda install -y drmaa
# Clean out all unneeded intermediates.
conda clean -yitps
| #!/bin/bash
# Update apt-get.
apt-get update -y -q
# Install curl to download the miniconda setup script.
apt-get install -y curl
# Install VCS.
apt-get install -y git mercurial subversion
# Install bzip2 and tar. Needed for decompressing packages.
apt-get install -y bzip2 tar
# Install dependencies of conda's Qt4.
apt-get install -y libSM6 libXext6 libXrender1
# Clean out apt-get.
apt-get clean
# Download and configure conda.
cd /usr/share/miniconda
curl http://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh > miniconda2.sh
bash miniconda2.sh -b -p /opt/conda2
rm miniconda2.sh
export PATH="/opt/conda/bin:${PATH}"
source activate root
conda config --set show_channel_urls True
# Install basic conda dependencies.
conda update -y --all
conda install -y pycrypto
conda install -y conda-build
conda install -y anaconda-client
conda install -y jinja2
# Install python bindings to DRMAA.
conda install -y drmaa
# Clean out all unneeded intermediates.
conda clean -yitps
|
Fix recursion issue with submodpath function | #!/usr/bin/env bash
submodpath() {
SUB_PATH=${1}
for subdir in $(ls $SUB_PATH)
do
config="$MOD_ROOT/$SUB_PATH/$subdir/config"
if [ -f $config ] ; then
echo "Found submodule $SUB_PATH/$subdir"
SUBMODULES+=("$SUB_PATH/$subdir")
else
submodpath "$SUB_PATH/$subdir"
fi
done
}
if [ ! -d .git ] ; then
echo "No .git directory found"
exit 1
fi
git stash && git pull && git submodule update --init
if [ ! -d .git/modules ] ; then
echo "No modules found"
exit 0
fi
MOD_ROOT=".git/modules"
TO_ADD=()
SUBMODULES=()
for submod in $(ls $MOD_ROOT)
do
TO_ADD+=($submod)
submodpath "$submod"
done
for module in "${SUBMODULES[@]}"
do
echo "removing submodule $module"
git rm --cached "$module"
if [ -f "$module/.git" ] ; then
rm "$module/.git"
else
echo "No .git ref-file found"
fi
done
if [ -f .gitmodules ] ; then
echo "Regmove .gitmodules file"
rm .gitmodules
fi
echo "Adding submodules to main repo"
for add_path in "${TO_ADD[@]}"
do
echo "Add submodule(s) in path $add_path"
git add "$add_path"
done
echo "Done... now commit && push!"
exit 0
| #!/usr/bin/env bash
submodpath() {
SUB_PATH=${1}
for subdir in $(ls "$MOD_ROOT/$SUB_PATH")
do
config="$MOD_ROOT/$SUB_PATH/$subdir/config"
if [ -f $config ] ; then
echo "Found submodule $SUB_PATH/$subdir"
SUBMODULES+=("$SUB_PATH/$subdir")
else
submodpath "$SUB_PATH/$subdir"
fi
done
}
if [ ! -d .git ] ; then
echo "No .git directory found"
exit 1
fi
git stash && git pull && git submodule update --init
if [ ! -d .git/modules ] ; then
echo "No modules found"
exit 0
fi
MOD_ROOT=".git/modules"
TO_ADD=()
SUBMODULES=()
for submod in $(ls $MOD_ROOT)
do
TO_ADD+=($submod)
submodpath "$submod"
done
for module in "${SUBMODULES[@]}"
do
echo "removing submodule $module"
git rm --cached "$module"
if [ -f "$module/.git" ] ; then
rm "$module/.git"
else
echo "No .git ref-file found"
fi
done
if [ -f .gitmodules ] ; then
echo "Regmove .gitmodules file"
rm .gitmodules
fi
echo "Adding submodules to main repo"
for add_path in "${TO_ADD[@]}"
do
echo "Add submodule(s) in path $add_path"
git add "$add_path"
done
echo "Done... now commit && push!"
exit 0
|
Add video to kokoro test | #!/bin/bash
set -x -e -u -o pipefail
# Temporary workaround for a known bundler+docker issue:
# https://github.com/bundler/bundler/issues/6154
export BUNDLE_GEMFILE=
for required_variable in \
GOOGLE_CLOUD_PROJECT \
GOOGLE_APPLICATION_CREDENTIALS \
GOOGLE_CLOUD_STORAGE_BUCKET \
; do
if [[ -z "${!required_variable}" ]]; then
echo "Must set $required_variable"
exit 1
fi
done
script_directory="$(dirname "$(realpath "$0")")"
repo_directory="$(dirname "$script_directory")"
# Capture failures
exit_status=0 # everything passed
function set_failed_status {
exit_status=1
}
# Print out Ruby version
ruby --version
# leave this until all tests are added
for product in \
auth \
cdn \
kms \
iot \
language \
translate \
speech \
vision
do
# Run Tests
echo "[$product]"
pushd "$repo_directory/$product/"
(bundle install && bundle exec rspec --format documentation) || set_failed_status
popd
done
exit $exit_status
| #!/bin/bash
set -x -e -u -o pipefail
# Temporary workaround for a known bundler+docker issue:
# https://github.com/bundler/bundler/issues/6154
export BUNDLE_GEMFILE=
for required_variable in \
GOOGLE_CLOUD_PROJECT \
GOOGLE_APPLICATION_CREDENTIALS \
GOOGLE_CLOUD_STORAGE_BUCKET \
; do
if [[ -z "${!required_variable}" ]]; then
echo "Must set $required_variable"
exit 1
fi
done
script_directory="$(dirname "$(realpath "$0")")"
repo_directory="$(dirname "$script_directory")"
# Capture failures
exit_status=0 # everything passed
function set_failed_status {
exit_status=1
}
# Print out Ruby version
ruby --version
# leave this until all tests are added
for product in \
auth \
cdn \
kms \
iot \
language \
translate \
speech \
video \
vision
do
# Run Tests
echo "[$product]"
pushd "$repo_directory/$product/"
(bundle install && bundle exec rspec --format documentation) || set_failed_status
popd
done
exit $exit_status
|
Use printf so that a newline isn't appended | #!/bin/bash -x
# This removes rbenv shims from the PATH where there is no
# .ruby-version file. This is because certain gems call their
# respective tasks with ruby -S which causes the following error to
# appear: ruby: no Ruby script found in input (LoadError).
if [ ! -f .ruby-version ]; then
export PATH=$(echo $PATH | awk 'BEGIN { RS=":"; ORS=":" } !/rbenv/' | sed 's/:$//')
fi
export USE_SIMPLECOV=true
export RAILS_ENV=test
bundle install --path "${HOME}/bundles/${JOB_NAME}" --deployment
bundle exec rake stats
bundle exec rake db:drop db:create db:schema:load
bundle exec rake --trace
RESULT=$?
exit $RESULT
| #!/bin/bash -x
# This removes rbenv shims from the PATH where there is no
# .ruby-version file. This is because certain gems call their
# respective tasks with ruby -S which causes the following error to
# appear: ruby: no Ruby script found in input (LoadError).
if [ ! -f .ruby-version ]; then
export PATH=$(printf $PATH | awk 'BEGIN { RS=":"; ORS=":" } !/rbenv/' | sed 's/:$//')
fi
export USE_SIMPLECOV=true
export RAILS_ENV=test
bundle install --path "${HOME}/bundles/${JOB_NAME}" --deployment
bundle exec rake stats
bundle exec rake db:drop db:create db:schema:load
bundle exec rake --trace
RESULT=$?
exit $RESULT
|
Add shield as optional bind to redis | pkg_name=redis
pkg_origin=starkandwayne
pkg_version=3.2.8
pkg_description="Persistent key-value database, with built-in net interface"
pkg_upstream_url=http://redis.io/
pkg_license=('BSD-3-Clause')
pkg_maintainer="Justin Carter <justin@starkandwayne.com>"
pkg_source=http://download.redis.io/releases/${pkg_name}-${pkg_version}.tar.gz
pkg_shasum=61b373c23d18e6cc752a69d5ab7f676c6216dc2853e46750a8c4ed791d68482c
pkg_bin_dirs=(bin)
pkg_build_deps=(core/make core/gcc)
pkg_deps=(starkandwayne/shield-agent core/libarchive core/glibc)
pkg_exports=(
[port]=port
)
pkg_exposes=(port)
pkg_svc_user="root"
pkg_svc_group="$pkg_svc_user"
do_build() {
make
}
| pkg_name=redis
pkg_origin=starkandwayne
pkg_version=3.2.8
pkg_description="Persistent key-value database, with built-in net interface"
pkg_upstream_url=http://redis.io/
pkg_license=('BSD-3-Clause')
pkg_maintainer="Justin Carter <justin@starkandwayne.com>"
pkg_source=http://download.redis.io/releases/${pkg_name}-${pkg_version}.tar.gz
pkg_shasum=61b373c23d18e6cc752a69d5ab7f676c6216dc2853e46750a8c4ed791d68482c
pkg_bin_dirs=(bin)
pkg_build_deps=(core/make core/gcc)
pkg_deps=(starkandwayne/shield-agent core/libarchive core/glibc)
pkg_exports=(
[port]=port
)
pkg_exposes=(port)
pkg_binds_optional=(
[shield]="port provisioning_key"
)
pkg_svc_user="root"
pkg_svc_group="$pkg_svc_user"
do_build() {
make
}
|
Use `type` to detect program. | #!/bin/sh
set -e
FILES="
asn1.js oids.js base64.js hex.js int10.js dom.js test.js
index.css index.js index.html
README.md LICENSE
update.sh check.sh
"
sha256sum </dev/null >/dev/null 2>&1 && SHA256=sha256sum || SHA256=gsha256sum
$SHA256 -t $FILES | gpg --clearsign > sha256sums.asc
7z a -tzip -mx=9 asn1js.zip $FILES sha256sums.asc
rsync -Pvrtz asn1js.zip $FILES lapo.it:www/asn1js/
| #!/bin/sh
set -e
FILES="
asn1.js oids.js base64.js hex.js int10.js dom.js test.js
index.css index.js index.html
README.md LICENSE
update.sh check.sh
"
type gsha256sum >/dev/null && SHA256=gsha256sum || SHA256=sha256sum
$SHA256 -t $FILES | gpg --clearsign > sha256sums.asc
7z a -tzip -mx=9 asn1js.zip $FILES sha256sums.asc
rsync -Pvrtz asn1js.zip $FILES lapo.it:www/asn1js/
|
Fix path to aws cli | /usr/local/bin/aws s3 sync /var/jenkins_home s3://$S3_BUCKET --sse AES256 --debug
| /usr/bin/aws s3 sync /var/jenkins_home s3://$S3_BUCKET --sse AES256 --debug
|
Fix URL to gi (gitignore) function | function gi() { curl http://gitignore.io/api/$@ ;}
_gitignireio_get_command_list() {
curl -s http://gitignore.io/api/list | tr "," "\n"
}
_gitignireio () {
compset -P '*,'
compadd -S '' `_gitignireio_get_command_list`
}
compdef _gitignireio gi | function gi() { curl http://www.gitignore.io/api/$@ ;}
_gitignireio_get_command_list() {
curl -s http://gitignore.io/api/list | tr "," "\n"
}
_gitignireio () {
compset -P '*,'
compadd -S '' `_gitignireio_get_command_list`
}
compdef _gitignireio gi |
Add some new brew casks | #!/bin/bash
echo
echo "Installing brew casks"
brew cask install alfred
brew cask install android-studio
brew cask install appcleaner
brew cask install atom
brew cask install carbon-copy-cloner
brew cask install ccmenu
brew cask install dash
brew cask install dropbox
brew cask install firefox
brew cask install flux
brew cask install handbrake
brew cask install Caskroom/versions/intellij-idea-ce
brew cask install kindle
brew cask install macdown
brew cask install opera
brew cask install skitch
brew cask install steam
brew cask install textwrangler
brew cask install visual-studio-code
brew cask install vlc
| #!/bin/bash
echo
echo "Installing brew casks"
brew cask install alfred
brew cask install android-studio
brew cask install appcleaner
brew cask install atom
brew cask install carbon-copy-cloner
brew cask install ccmenu
brew cask install cyberduck
brew cask install dash
brew cask install dropbox
brew cask install firefox
brew cask install flux
brew cask install handbrake
brew cask install Caskroom/versions/intellij-idea-ce
brew cask install kindle
brew cask install macdown
brew cask install opera
brew cask install skitch
brew cask install steam
brew cask install textwrangler
brew cask install visual-studio-code
brew cask install vlc
brew cask install xscope
|
Fix the test suite to execute properly | #!/usr/bin/env bash
set -e
cd `dirname $0`
TEST_DIR=$PWD
export PATH=$PWD/../bin:$PATH
direnv_eval() {
eval `direnv export`
}
test_start() {
cd "$TEST_DIR/scenarios/$1"
echo "## Testing $1 ##"
}
test_stop() {
cd $TEST_DIR
direnv_eval
}
### RUN ###
direnv_eval
test_start base
direnv_eval
test "$HELLO" = "world"
MTIME=$DIRENV_MTIME
direnv_eval
test "$MTIME" = "$DIRENV_MTIME"
sleep 1
touch .envrc
direnv_eval
test "$MTIME" != "$DIRENV_MTIME"
cd ..
direnv_eval
echo "${HELLO}"
test -z "${HELLO}"
test_stop
test_start inherit
direnv_eval
test "$HELLO" = "world"
test_stop
test_start "ruby-layout"
direnv_eval
test "$GEM_HOME" != ""
test_stop
# Make sure directories with spaces are fine
test_start "space dir"
direnv_eval
test "$SPACE_DIR" = "true"
test_stop
| #!/usr/bin/env bash
set -e
cd `dirname $0`
TEST_DIR=$PWD
export PATH=$PWD/../bin:$PATH
# Remove things we could have in our env
unset GOPATH
unset GEM_HOME
# And reset the direnv loading if any
unset DIRENV_LIBEXEC
unset DIRENV_BACKUP
unset DIRENV_MTIME
direnv_eval() {
eval `direnv export`
}
test_start() {
cd "$TEST_DIR/scenarios/$1"
echo "## Testing $1 ##"
}
test_stop() {
cd $TEST_DIR
direnv_eval
}
### RUN ###
direnv_eval
test_start base
direnv_eval
test "$HELLO" = "world"
MTIME=$DIRENV_MTIME
direnv_eval
test "$MTIME" = "$DIRENV_MTIME"
sleep 1
touch .envrc
direnv_eval
test "$MTIME" != "$DIRENV_MTIME"
cd ..
direnv_eval
echo "${HELLO}"
test -z "${HELLO}"
test_stop
test_start inherit
direnv_eval
test "$HELLO" = "world"
test_stop
test_start "ruby-layout"
direnv_eval
test "$GEM_HOME" != ""
test_stop
# Make sure directories with spaces are fine
test_start "space dir"
direnv_eval
test "$SPACE_DIR" = "true"
test_stop
|
Create static folder as root | #!/bin/bash
set -e
gosu "${UWSGI_USER}" mkdir -p "${KOBOCAT_SRC_DIR}/onadata/static"
echo "Collecting static files..."
gosu "${UWSGI_USER}" "${VIRTUAL_ENV}/bin/python" manage.py collectstatic -v 0 --noinput
echo "Done"
# `chown -R` becomes very slow once a fair amount of media has been collected,
# so reset ownership of the media directory *only*. See #379, #449
echo "Resetting ownership of media directory..."
chown "${UWSGI_USER}" "${KOBOCAT_SRC_DIR}/media"
echo "Done."
echo '%%%%%%% NOTICE %%%%%%%'
echo '% To avoid long delays, we no longer reset ownership *recursively*'
echo '% every time this container starts. If you have trouble with'
echo '% permissions, please run the following command inside the'
echo '% KoBoCAT container:'
echo "% chown -R \"${UWSGI_USER}\" \"${KOBOCAT_SRC_DIR}\""
echo '%%%%%%%%%%%%%%%%%%%%%%'
echo "Syncing to nginx folder..."
rsync -aq --delete --chown=www-data "${KOBOCAT_SRC_DIR}/onadata/static/" "${NGINX_STATIC_DIR}/"
echo "Done"
| #!/bin/bash
set -e
mkdir -p "${KOBOCAT_SRC_DIR}/onadata/static"
chown -R "${UWSGI_USER}" "${KOBOCAT_SRC_DIR}/onadata/static"
echo "Collecting static files..."
gosu "${UWSGI_USER}" "${VIRTUAL_ENV}/bin/python" manage.py collectstatic -v 0 --noinput
echo "Done"
# `chown -R` becomes very slow once a fair amount of media has been collected,
# so reset ownership of the media directory *only*. See #379, #449
echo "Resetting ownership of media directory..."
chown "${UWSGI_USER}" "${KOBOCAT_SRC_DIR}/media"
echo "Done."
echo '%%%%%%% NOTICE %%%%%%%'
echo '% To avoid long delays, we no longer reset ownership *recursively*'
echo '% every time this container starts. If you have trouble with'
echo '% permissions, please run the following command inside the'
echo '% KoBoCAT container:'
echo "% chown -R \"${UWSGI_USER}\" \"${KOBOCAT_SRC_DIR}\""
echo '%%%%%%%%%%%%%%%%%%%%%%'
echo "Syncing to nginx folder..."
rsync -aq --delete --chown=www-data "${KOBOCAT_SRC_DIR}/onadata/static/" "${NGINX_STATIC_DIR}/"
echo "Done"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.