Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Fix Jenkins script to exit on error | RUNTIME_NAME="python"
CANDIDATE_NAME=`date +%Y-%m-%d_%H_%M`
echo "CANDIDATE_NAME:${CANDIDATE_NAME}"
IMAGE_NAME="${DOCKER_NAMESPACE}/${RUNTIME_NAME}:${CANDIDATE_NAME}"
export IMAGE_NAME
export FORCE_REBUILD
make build
if [ "${UPLOAD_TO_STAGING}" = "true" ]; then
STAGING="${DOCKER_NAMESPACE}/${RUNTIME_NAME}:staging"
docker rmi "${STAGING}" 2>/dev/null || true # Ignore if tag not present
docker tag "${IMAGE_NAME}" "${STAGING}"
gcloud docker push "${STAGING}"
fi
| #!/bin/sh
set -eu
RUNTIME_NAME="python"
CANDIDATE_NAME=`date +%Y-%m-%d_%H_%M`
echo "CANDIDATE_NAME:${CANDIDATE_NAME}"
IMAGE_NAME="${DOCKER_NAMESPACE}/${RUNTIME_NAME}:${CANDIDATE_NAME}"
export IMAGE_NAME
export FORCE_REBUILD
make build
if [ "${UPLOAD_TO_STAGING}" = "true" ]; then
STAGING="${DOCKER_NAMESPACE}/${RUNTIME_NAME}:staging"
docker rmi "${STAGING}" 2>/dev/null || true # Ignore if tag not present
docker tag "${IMAGE_NAME}" "${STAGING}"
gcloud docker push "${STAGING}"
fi
|
Remove old code for now. | #!/bin/bash
# Update Debian
sudo apt-get update && sudo apt-get dist-upgrade
tempfoo=$(basename $0)
TMPFILE=$(mktemp /tmp/${tempfoo}.XXXXXX) || exit 1
# Update Python packages
/usr/bin/pip freeze | cut -f1 -d= | \
egrep -v "(git-remote-helpers|Brlapi|GnuPGInterface|Magic-file-extensions|apt-xapian-index|dpkt|reportbug)" > $TMPFILE
/usr/bin/pip install --upgrade -r $TMPFILE
rm -f $TMPFILE
# Update Cuckoo
cd ~/src/cuckoo/conf
test -d ../../conf/cuckoo/ || mkdir -p ../../conf/cuckoo/
cp auxiliary.conf cuckoo.conf kvm.conf reporting.conf ../../conf/cuckoo/
git reset ORIG_HEAD --hard
git pull
cp ../../conf/cuckoo/* .
# Update Volatility
cd ~/src/volatility
git pull
sudo make install
| #!/bin/bash
# Update Debian
sudo apt-get update && sudo apt-get dist-upgrade
|
Move to correct propagation of all shell arguments | mv .babelrc_ .babelrc
babel-node $1 "$2"
mv .babelrc .babelrc_
| mv .babelrc_ .babelrc
babel-node "$@"
mv .babelrc .babelrc_
|
Fix code to hash correctly | #!/bin/sh
source "./config"
if [ -z $OUTDIR ] ; then
OUTDIR=.
fi
## setup
OUT=${OUTDIR}/hash
mkdir -p ${OUT}
o Collecting hashes...
echo OS Type: nix >> $OUT/Hashes.txt
echo >> $OUT/Hashes.txt
echo Computername: $hostname >> $OUT/Hashes.txt
echo >> $OUT/Hashes.txt
echo Time stamp: `date` >> $OUT/Hashes.txt
echo >> $OUT/Hashes.txt
echo ======================MD5 HASHES===================== >> $OUT/Hashes.txt
echo >> $OUT/Hashes.txt
find $HASH_DIRS -type f \( ! -name Hashes.txt \)-exec md5sum {} >> $OUT/Hashes.txt \;
| #!/bin/sh
source "./config"
if [ -z $OUTDIR ] ; then
OUTDIR=.
fi
## setup
OUT=${OUTDIR}/hash
mkdir -p ${OUT}
o Collecting hashes...
echo OS Type: nix >> $OUT/Hashes.txt
echo >> $OUT/Hashes.txt
echo Computername: $hostname >> $OUT/Hashes.txt
echo >> $OUT/Hashes.txt
echo Time stamp: `date` >> $OUT/Hashes.txt
echo >> $OUT/Hashes.txt
echo ======================MD5 HASHES===================== >> $OUT/Hashes.txt
echo >> $OUT/Hashes.txt
for f in $HASH_DIRS; do
echo $f
find $f -type f | xargs -d '\n' md5sum > $OUT/md5-hashes.txt
find $f -type f | xargs -d '\n' sha1sum > $OUT/sha1-hashes.txt
done
|
Remove rails plugin from ohmyhzsh | ZSH=$HOME/.oh-my-zsh
POWERLEVEL9K_MODE='nerdfont-complete'
ZSH_THEME="powerlevel9k/powerlevel9k"
COMPLETION_WAITING_DOTS="true"
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
plugins=(colored-man-pages command-not-found docker docker-compose heroku history-substring-search osx rails vi-mode web-search z)
source $ZSH/oh-my-zsh.sh
| ZSH=$HOME/.oh-my-zsh
POWERLEVEL9K_MODE='nerdfont-complete'
ZSH_THEME="powerlevel9k/powerlevel9k"
COMPLETION_WAITING_DOTS="true"
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
plugins=(colored-man-pages command-not-found docker docker-compose heroku history-substring-search osx vi-mode web-search z)
source $ZSH/oh-my-zsh.sh
|
Exclude maps04 from -sibling test | # Make 3 iteration of dump/restore for each test
set -e
source `dirname $0`/criu-lib.sh
prep
mount_tmpfs_to_dump
./test/zdtm.py run --all --report report --sibling --parallel 4 || fail
| # Make 3 iteration of dump/restore for each test
set -e
source `dirname $0`/criu-lib.sh
prep
mount_tmpfs_to_dump
./test/zdtm.py run --all --report report --sibling --parallel 4 -x 'maps04' || fail
|
Enable memory and swap accounting on Raspberry Pi | rootfs(){
# Allow ssh connections by root to this machine
echo "PermitRootLogin yes" >> $ROOT/etc/ssh/sshd_config
# Copy current source
mkdir $ROOT/etc/kubernetes/source
cp -r $PROJROOT $ROOT/etc/kubernetes/source
# Remove the .sh
mv $ROOT/usr/bin/kube-config.sh $ROOT/usr/bin/kube-config
# Make the docker dropin directory
mkdir -p $ROOT/usr/lib/systemd/system/docker.service.d
# Copy the addons
mkdir -p $ROOT/etc/kubernetes/addons
cp -r $PROJROOT/addons/k8s/* $ROOT/etc/kubernetes/addons/
# Parallella patch. Disable overlay, because linux 3.14 doesn't have overlay support
if [[ $MACHINENAME == "parallella" ]]; then
sed -e "s@-s overlay@@" -i $ROOT/etc/kubernetes/dynamic-dropins/docker-flannel.conf
sed -e "s@-s overlay@@" -i $ROOT/etc/kubernetes/dynamic-dropins/docker-overlay.conf
fi
}
| rootfs(){
# Allow ssh connections by root to this machine
echo "PermitRootLogin yes" >> $ROOT/etc/ssh/sshd_config
# Copy current source
mkdir $ROOT/etc/kubernetes/source
cp -r $PROJROOT $ROOT/etc/kubernetes/source
# Remove the .sh
mv $ROOT/usr/bin/kube-config.sh $ROOT/usr/bin/kube-config
# Make the docker dropin directory
mkdir -p $ROOT/usr/lib/systemd/system/docker.service.d
# Copy the addons
mkdir -p $ROOT/etc/kubernetes/addons
cp -r $PROJROOT/addons/k8s/* $ROOT/etc/kubernetes/addons/
# Parallella patch. Disable overlay, because linux 3.14 doesn't have overlay support
if [[ $MACHINENAME == "parallella" ]]; then
sed -e "s@-s overlay@@" -i $ROOT/etc/kubernetes/dynamic-dropins/docker-flannel.conf
sed -e "s@-s overlay@@" -i $ROOT/etc/kubernetes/dynamic-dropins/docker-overlay.conf
elif [[ $MACHINENAME == "rpi" || $MACHINENAME == "rpi-2" ]]; then
# Enable memory and swap accounting
sed -e "s@console=tty1@console=tty1 cgroup_enable=memory swapaccount=1@" -i $BOOT/cmdline.txt
fi
}
|
Fix what we pass to `weave launch` | #!/bin/sh
cat << ENVIRON | sudo tee /etc/`hostname -s`.env
WEAVE_LAUNCH_ARGS="$@"
WEAVE_LAUNCH_DNS_ARGS="10.10.2.2$1/16"
SPARK_NODE_ADDR="10.10.1.3$1/24"
SPARK_NODE_NAME="spark-worker-aws-$1.weave.local"
SPARK_CONTAINER="errordeveloper/weave-spark-worker-minimal:latest"
SPARK_CONTAINER_ARGS="spark://spark-master-gce.weave.local:7077"
ELASTICSEARCH_NODE_ADDR="10.10.1.4$1/24"
ELASTICSEARCH_NODE_NAME="elasticsearch-aws-$1.weave.local"
ELASTICSEARCH_CONTAINER="errordeveloper/weave-elasticsearch-minimal:latest"
ENVIRON
| #!/bin/sh
i=$1
shift 1
cat << ENVIRON | sudo tee /etc/`hostname -s`.env
WEAVE_LAUNCH_ARGS="$@"
WEAVE_LAUNCH_DNS_ARGS="10.10.2.2$i/16"
SPARK_NODE_ADDR="10.10.1.3$i/24"
SPARK_NODE_NAME="spark-worker-aws-$i.weave.local"
SPARK_CONTAINER="errordeveloper/weave-spark-worker-minimal:latest"
SPARK_CONTAINER_ARGS="spark://spark-master-gce.weave.local:7077"
ELASTICSEARCH_NODE_ADDR="10.10.1.4$i/24"
ELASTICSEARCH_NODE_NAME="elasticsearch-aws-$i.weave.local"
ELASTICSEARCH_CONTAINER="errordeveloper/weave-elasticsearch-minimal:latest"
ENVIRON
|
Install gdbm on Linux for Travis 18.04 bionic. | #!/bin/bash
sudo apt-get update
sudo apt-get install -y libc++-dev libc++abi-dev bsdtar libedit-dev gdbm
| #!/bin/bash
sudo apt-get update
sudo apt-get install -y libc++-dev libc++abi-dev bsdtar libedit-dev libgdbm-dev
|
Remove the dependency on the JQ tool | #!/usr/bin/env bash
set -e
if [ $# -eq 0 ]
then
echo "usage: promote-from-mock-to-stage.sh <stage db password>"
exit
fi
# This script requires the 'jq' command line tool
brew install jq
# Grab the MOCK build
git checkout deploy-to-mock
git pull --rebase
# Push the MOCK build to STAGE
git push --force origin origin/deploy-to-mock:deploy-to-stage
# Wait for STAGE deploy to finish
GIT_REF=`jq -r '.gitRef' build-info.json` WAIT_DURATION=45000 APP_BASE_URL=http://csra-stage.hmpps.dsd.io/health yarn wait-for-deploy
# Run the E2E tests against STAGE
DB_URI_TESTS=mssql://csra:$1@csra-stage.database.windows.net:1433/csra-stage APP_BASE_URL=https://csra-stage.hmpps.dsd.io yarn test:e2e
# Switch back to master branch
git checkout master
| #!/usr/bin/env bash
set -e
if [ $# -eq 0 ]
then
echo "usage: promote-from-mock-to-stage.sh <stage db password>"
exit
fi
# Grab the MOCK build
git checkout deploy-to-mock
git pull --rebase
# Push the MOCK build to STAGE
git push --force origin origin/deploy-to-mock:deploy-to-stage
# Wait for STAGE deploy to finish
GIT_REF=`cat build-info.json | python -c 'import json,sys;print json.load(sys.stdin)["gitRef"]'`\
WAIT_DURATION=45000 APP_BASE_URL=http://csra-stage.hmpps.dsd.io/health yarn wait-for-deploy
# Run the E2E tests against STAGE
DB_URI_TESTS=mssql://csra:$1@csra-stage.database.windows.net:1433/csra-stage\
APP_BASE_URL=https://csra-stage.hmpps.dsd.io yarn test:e2e
# Switch back to master branch
git checkout master
|
Fix PATH, ./bin can cause infinite loops of pain | export PATH="./bin:$ZSH/bin:/usr/local/bin:/usr/local/sbin:$PATH"
export MANPATH="/usr/local/man:/usr/local/mysql/man:/usr/local/git/man:$MANPATH"
| export PATH="$ZSH/bin:/usr/local/bin:/usr/local/sbin:$PATH"
export MANPATH="/usr/local/man:/usr/local/mysql/man:/usr/local/git/man:$MANPATH"
|
Set upstream and pull before push | #!/bin/sh
# Bash function for doing things as part of Travis build
#
# Why? mostly because Travis deployment scripts need to be a
# a single command https://docs.travis-ci.com/user/deployment/script/
checkout_pages() {
git config --global user.email $GIT_AUTHOR_EMAIL
git config --global user.name $GIT_AUTHOR_NAME
git checkout -b gh-pages
}
push_pages() {
git remote add origin-pages https://${GITHUB_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git > /dev/null 2>&1
git push --quiet --set-upstream origin-pages gh-pages
}
commit_new_docs() {
mv docs docs-new
git rm -rf docs
mv docs-new docs
git add -f --all docs
git commit --message "$1"
}
deploy_typescript_docs_to_pages() {
checkout_pages
cp public/packages-index.html index.html
git add index.html
git commit --message "docs(Package index): Update"
cd ts
commit_new_docs "docs(Typescript): Update"
push_pages
}
deploy_python_docs_to_pages() {
checkout_pages
commit_new_docs "docs(Python): Update"
push_pages
}
deploy_r_docs_to_pages() {
checkout_pages
commit_new_docs "docs(R): Update"
push_pages
}
# Call one of the functions as specified by the first arg
$1
| #!/bin/sh
# Bash function for doing things as part of Travis build
#
# Why? mostly because Travis deployment scripts need to be a
# a single command https://docs.travis-ci.com/user/deployment/script/
checkout_pages() {
git config --global user.email $GIT_AUTHOR_EMAIL
git config --global user.name $GIT_AUTHOR_NAME
git checkout -b gh-pages
}
push_pages() {
git remote add origin-pages https://${GITHUB_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git > /dev/null 2>&1
git fetch origin-pages
git branch --set-upstream-to origin-pages/gh-pages
git pull --rebase --autostash
git push --quiet
}
commit_new_docs() {
mv docs docs-new
git rm -rf docs
mv docs-new docs
git add -f --all docs
git commit --message "$1"
}
deploy_typescript_docs_to_pages() {
checkout_pages
cp public/packages-index.html index.html
git add index.html
git commit --message "docs(Package index): Update"
cd ts
commit_new_docs "docs(Typescript): Update"
push_pages
}
deploy_python_docs_to_pages() {
checkout_pages
commit_new_docs "docs(Python): Update"
push_pages
}
deploy_r_docs_to_pages() {
checkout_pages
commit_new_docs "docs(R): Update"
push_pages
}
# Call one of the functions as specified by the first arg
$1
|
Modify to be able to run regardless of current directory | #!/bin/bash
. ohai/find-ruby.sh
$RUBY_BIN ohai/ohai2nad.rb
| #!/bin/bash
# Need to figure out our script dir, resolving symlinks.
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SRCDIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
. $SRCDIR/find-ruby.sh
$RUBY_BIN $SRCDIR/ohai2nad.rb
|
Replace xcsel function with just an alias | #xc function courtesy of http://gist.github.com/subdigital/5420709
function xc {
local xcode_proj
xcode_proj=(*.{xcworkspace,xcodeproj}(N))
if [[ ${#xcode_proj} -eq 0 ]]; then
echo "No xcworkspace/xcodeproj file found in the current directory."
return 1
else
echo "Found ${xcode_proj[1]}"
open "${xcode_proj[1]}"
fi
}
function xcsel {
sudo xcode-select --switch "$*"
}
alias xcb='xcodebuild'
alias xcp='xcode-select --print-path'
alias xcdd='rm -rf ~/Library/Developer/Xcode/DerivedData/*'
if [[ -d $(xcode-select -p)/Platforms/iPhoneSimulator.platform/Developer/Applications/iPhone\ Simulator.app ]]; then
alias simulator='open $(xcode-select -p)/Platforms/iPhoneSimulator.platform/Developer/Applications/iPhone\ Simulator.app'
else
alias simulator='open $(xcode-select -p)/Applications/iOS\ Simulator.app'
fi
| #xc function courtesy of http://gist.github.com/subdigital/5420709
function xc {
local xcode_proj
xcode_proj=(*.{xcworkspace,xcodeproj}(N))
if [[ ${#xcode_proj} -eq 0 ]]; then
echo "No xcworkspace/xcodeproj file found in the current directory."
return 1
else
echo "Found ${xcode_proj[1]}"
open "${xcode_proj[1]}"
fi
}
alias xcsel='sudo xcode-select --switch'
alias xcb='xcodebuild'
alias xcp='xcode-select --print-path'
alias xcdd='rm -rf ~/Library/Developer/Xcode/DerivedData/*'
if [[ -d $(xcode-select -p)/Platforms/iPhoneSimulator.platform/Developer/Applications/iPhone\ Simulator.app ]]; then
alias simulator='open $(xcode-select -p)/Platforms/iPhoneSimulator.platform/Developer/Applications/iPhone\ Simulator.app'
else
alias simulator='open $(xcode-select -p)/Applications/iOS\ Simulator.app'
fi
|
Switch to nmap instead of arp-scan | #!/usr/bin/env bash
# -----------------------------------------------------------------------------
# Info:
# Miroslav Vidovic
# local_network.sh
# 30.11.2016.-20:01:36
# -----------------------------------------------------------------------------
# Description:
# Scan the local network for active IP addresses
# Usage:
#
# -----------------------------------------------------------------------------
# Script:
declare -a IPs=($(sudo arp-scan --localnet --numeric --quiet --ignoredups | grep -E '([a-f0-9]{2}:){5}[a-f0-9]{2}' | awk '{print $1}'))
for ip in "${IPs[@]}"; do
echo $ip
done
exit 0
| #!/usr/bin/env bash
# -----------------------------------------------------------------------------
# Info:
# Miroslav Vidovic
# local_network.sh
# 30.11.2016.-20:01:36
# -----------------------------------------------------------------------------
# Description:
# Scan the local network for active IP addresses.
# Usage:
#
# -----------------------------------------------------------------------------
# Script:
# Check if nmap is available
nmap_check(){
hash nmap 2>/dev/null || { echo >&2 "This script requires nmap to run correctly. "; \
exit 1; }
}
# Returns the current machine ip address
# @returns IP
my_ip(){
local myip
myip=$(ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1')
echo "$myip"
}
# Scans the local network for active IP addresses and store them in an array
# @returns (array) ips - array of active ip addresses
active_ips(){
# Active ip addresses array
declare -a ips=($(nmap -nsP 192.168.1.0/24 2>/dev/null -oG - | grep "Up$" | awk '{printf "%s ", $2}'))
echo "${ips[@]}"
}
main(){
nmap_check
local myip=$(my_ip)
local IPs=($(active_ips))
local green_foreground="$(tput setaf 2)"
local color_reset="$(tput sgr0)"
for ip in "${IPs[@]}"; do
if [[ $ip == "$myip" ]]; then
echo "$green_foreground$ip" "<- My IP" "$color_reset"
else
echo "$ip"
fi
done
}
main
exit 0
|
Upgrade Java 16 version in CI image | #!/bin/bash
set -e
case "$1" in
java8)
echo "https://github.com/adoptium/temurin8-binaries/releases/download/jdk8u302-b08/OpenJDK8U-jdk_x64_linux_hotspot_8u302b08.tar.gz"
;;
java11)
echo "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.12%2B7/OpenJDK11U-jdk_x64_linux_hotspot_11.0.12_7.tar.gz"
;;
java16)
echo "https://github.com/AdoptOpenJDK/openjdk16-binaries/releases/download/jdk-16.0.1%2B9/OpenJDK16U-jdk_x64_linux_hotspot_16.0.1_9.tar.gz"
;;
java17)
echo "https://github.com/AdoptOpenJDK/openjdk17-binaries/releases/download/jdk-2021-05-07-13-31/OpenJDK-jdk_x64_linux_hotspot_2021-05-06-23-30.tar.gz"
;;
*)
echo $"Unknown java version"
exit 1
esac
| #!/bin/bash
set -e
case "$1" in
java8)
echo "https://github.com/adoptium/temurin8-binaries/releases/download/jdk8u302-b08/OpenJDK8U-jdk_x64_linux_hotspot_8u302b08.tar.gz"
;;
java11)
echo "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.12%2B7/OpenJDK11U-jdk_x64_linux_hotspot_11.0.12_7.tar.gz"
;;
java16)
echo "https://github.com/adoptium/temurin16-binaries/releases/download/jdk-16.0.2%2B7/OpenJDK16U-jdk_x64_linux_hotspot_16.0.2_7.tar.gz"
;;
java17)
echo "https://github.com/AdoptOpenJDK/openjdk17-binaries/releases/download/jdk-2021-05-07-13-31/OpenJDK-jdk_x64_linux_hotspot_2021-05-06-23-30.tar.gz"
;;
*)
echo $"Unknown java version"
exit 1
esac
|
Revert "Attempt to use destination property" | #!/bin/bash -ex
pushd `dirname $0`/.. > /dev/null
root=$(pwd -P)
popd > /dev/null
# gather some data about the repo
source $root/scripts/vars.sh
[ -f $root/app.$EXT ] && exit
mvn dependency:get \
-DremoteRepositories=nexus::default::$NEXUSURL \
-DrepositoryId=nexus \
-DartifactId=$APP \
-DgroupId=io.piazzageo \
-Dpackaging=$EXT \
-Dtransitive=false \
-Ddestination=$root/app.$EXT \
-Dversion=$SHORT
| #!/bin/bash -ex
pushd `dirname $0`/.. > /dev/null
root=$(pwd -P)
popd > /dev/null
# gather some data about the repo
source $root/scripts/vars.sh
[ -f $root/app.$EXT ] && exit
mvn dependency:get \
-DremoteRepositories=nexus::default::$NEXUSURL \
-DrepositoryId=nexus \
-DartifactId=$APP \
-DgroupId=io.piazzageo \
-Dpackaging=$EXT \
-Dtransitive=false \
-Dversion=$SHORT
mvn dependency:copy \
-DgroupId=core \
-DartifactId=$APP \
-Dversion=$SHORT \
-Dtype=$EXT \
-DoverWrite=true \
-DoutputDirectory=$root \
-DdestFileName=app.$EXT
|
Expand X2 to include INT_R on far right edge. | export XRAY_DATABASE="artix7"
export XRAY_PART="xc7a50tfgg484-1"
export XRAY_ROI_FRAMES="0x00000000:0xffffffff"
# All CLB's in X0Y2 CMT.
export XRAY_ROI="SLICE_X0Y100:SLICE_X35Y149 RAMB18_X0Y40:RAMB18_X0Y59 RAMB36_X0Y20:RAMB36_X0Y29 DSP48_X0Y40:DSP48_X0Y59"
export XRAY_ROI_GRID_X1="10"
export XRAY_ROI_GRID_X2="57"
# Y = 0 and Y 52 are VBRK rows, include them
export XRAY_ROI_GRID_Y1="0"
export XRAY_ROI_GRID_Y2="52"
export XRAY_PIN_00="E22"
export XRAY_PIN_01="D22"
export XRAY_PIN_02="E21"
export XRAY_PIN_03="D21"
export XRAY_PIN_04="G21"
export XRAY_PIN_05="G22"
export XRAY_PIN_06="F21"
source $(dirname ${BASH_SOURCE[0]})/../../utils/environment.sh
| export XRAY_DATABASE="artix7"
export XRAY_PART="xc7a50tfgg484-1"
export XRAY_ROI_FRAMES="0x00000000:0xffffffff"
# All CLB's in X0Y2 CMT.
export XRAY_ROI="SLICE_X0Y100:SLICE_X35Y149 RAMB18_X0Y40:RAMB18_X0Y59 RAMB36_X0Y20:RAMB36_X0Y29 DSP48_X0Y40:DSP48_X0Y59"
export XRAY_ROI_GRID_X1="10"
export XRAY_ROI_GRID_X2="58"
# Y = 0 and Y 52 are VBRK rows, include them
export XRAY_ROI_GRID_Y1="0"
export XRAY_ROI_GRID_Y2="52"
export XRAY_PIN_00="E22"
export XRAY_PIN_01="D22"
export XRAY_PIN_02="E21"
export XRAY_PIN_03="D21"
export XRAY_PIN_04="G21"
export XRAY_PIN_05="G22"
export XRAY_PIN_06="F21"
source $(dirname ${BASH_SOURCE[0]})/../../utils/environment.sh
|
Update DB restore script to properly handle different DBs | #!/bin/sh
# This a script for restoring the local env-logger database from a backup snapshot.
set -e
if [ $# -ne 2 ]; then
cat <<EOF
This a script for restoring the local env-logger database from a backup snapshot.
Usage: $0 <DB name> <DB snapshot name>
Example: $0 env_logger db_snapshot.sql
EOF
exit 1
fi
db_name=$1
snapshot_name=$2
file_out=$(file -ib ${snapshot_name})
if [ $(echo "${file_out}"|grep -c xz) -eq 1 ]; then
echo "Uncompressed snapshot, decompressing before restore"
unxz ${snapshot_name}
snapshot_name=$(echo ${snapshot_name}|sed 's/.xz//')
fi
echo "Truncating tables"
psql "${db_name}" <<EOF
TRUNCATE TABLE users CASCADE;
TRUNCATE TABLE observations CASCADE;
TRUNCATE TABLE yardcam_image;
EOF
echo "Adding new values"
# Pressure data has not been collected from the beginning and thus contains
# NULL values causing restore to fail
psql "${db_name}" -c 'ALTER TABLE weather_data ALTER pressure DROP NOT NULL;'
psql "${db_name}" < "${snapshot_name}"
| #!/bin/sh
# This a script for restoring the local env-logger database from a backup snapshot.
set -e
if [ $# -ne 2 ]; then
cat <<EOF
This a script for restoring the local env-logger database from a backup snapshot.
Usage: $0 <DB name> <DB snapshot name>
Example: $0 env_logger db_snapshot.sql
EOF
exit 1
fi
db_name=$1
snapshot_name=$2
file_out=$(file -ib ${snapshot_name})
if [ $(echo "${file_out}"|grep -c xz) -eq 1 ]; then
echo "Uncompressed snapshot, decompressing before restore"
unxz ${snapshot_name}
snapshot_name=$(echo ${snapshot_name}|sed 's/.xz//')
fi
# The environment logger database requires special steps before a backup
# can be restored
if [ ${db_name} = "env_logger" ]; then
echo "Truncating tables"
psql "${db_name}" <<EOF
TRUNCATE TABLE users CASCADE;
TRUNCATE TABLE observations CASCADE;
TRUNCATE TABLE yardcam_image;
EOF
# Pressure data has not been collected from the beginning and thus contains
# NULL values causing restore to fail
psql "${db_name}" -c 'ALTER TABLE weather_data ALTER pressure DROP NOT NULL;'
fi
echo "Adding new values"
psql "${db_name}" < "${snapshot_name}"
|
Remove useless deployment scripts part | #!/bin/bash
set -e
version="$1"
if [[ -z "$version" ]]; then
echo "Usage: $0 <version>" >&2
echo "No version set" >&2
exit 1
fi
sed -i -e "s/^Version.*/Version: \\t${version}/" python-varlink.spec
sed -i -e "s/^[ \\t]*version.*=.*/ version = \"${version}\",/" setup.py
git commit -m "version ${version}" python-varlink.spec setup.py
git tag -m "version ${version}" --sign "${version}"
git push
git push --tags
rm -fr dist
python3 setup.py bdist_wheel --universal
python3 setup.py sdist
twine upload --skip-existing --sign-with gpg2 -s dist/*
curl -L -O https://github.com/varlink/python-varlink/archive/${version}/python-varlink-${version}.tar.gz
rm -fr docs/build
python3 setup.py build_sphinx --source-dir=docs/ --build-dir=docs/build --all-files
GIT_DEPLOY_DIR=$(pwd)/docs/build/html GIT_DEPLOY_BRANCH=gh-pages ./git-deploy-branch.sh -m "doc update"
| #!/bin/bash
set -e
version="$1"
if [[ -z "$version" ]]; then
echo "Usage: $0 <version>" >&2
echo "No version set" >&2
exit 1
fi
sed -i -e "s/^Version.*/Version: \\t${version}/" python-varlink.spec
curl -L -O https://github.com/varlink/python-varlink/archive/${version}/python-varlink-${version}.tar.gz
rm -fr docs/build
python3 setup.py build_sphinx --source-dir=docs/ --build-dir=docs/build --all-files
GIT_DEPLOY_DIR=$(pwd)/docs/build/html GIT_DEPLOY_BRANCH=gh-pages ./git-deploy-branch.sh -m "doc update"
|
Fix script to not require CDing into directory first. | #Check the number of arguments supplied:
if [ $# -ne 2 ]
then
echo "test.sh: Error: Argument must be the Specware installation to test and ISABELLE_2013_2_ROOT."
exit 1
fi
SPECWARE4=$1
ISABELLE_2013_2_ROOT=$2
run-proc.sh ${SPECWARE4} TwoMorphisms#A
run-proc.sh ${SPECWARE4} TwoMorphisms#B
run-proc.sh ${SPECWARE4} TwoMorphisms#CheckRqmtsB
run-proc.sh ${SPECWARE4} TwoMorphisms#C
run-proc.sh ${SPECWARE4} TwoMorphisms#CheckRqmtsC
run-gen-obligs.sh ${SPECWARE4} TwoMorphisms#A
run-gen-obligs.sh ${SPECWARE4} TwoMorphisms#B
run-gen-obligs.sh ${SPECWARE4} TwoMorphisms#CheckRqmtsB
run-gen-obligs.sh ${SPECWARE4} TwoMorphisms#C
run-gen-obligs.sh ${SPECWARE4} TwoMorphisms#CheckRqmtsC
run-isabelle2013-2.sh Isa/TwoMorphisms_CheckRqmtsC.thy ${ISABELLE_2013_2_ROOT}
| #Check the number of arguments supplied:
if [ $# -ne 2 ]
then
echo "test.sh: Error: Argument must be the Specware installation to test and ISABELLE_2013_2_ROOT."
exit 1
fi
SPECWARE4=$1
ISABELLE_2013_2_ROOT=$2
run-proc.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#A
run-proc.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#B
run-proc.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#CheckRqmtsB
run-proc.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#C
run-proc.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#CheckRqmtsC
run-gen-obligs.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#A
run-gen-obligs.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#B
run-gen-obligs.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#CheckRqmtsB
run-gen-obligs.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#C
run-gen-obligs.sh ${SPECWARE4} ${SPECWARE4}/Examples/Misc/TwoMorphisms#CheckRqmtsC
run-isabelle2013-2.sh ${SPECWARE4}/Examples/Misc/Isa/TwoMorphisms_CheckRqmtsC.thy ${ISABELLE_2013_2_ROOT}
|
Use symlinks to make hbsfy requireable | #!/bin/sh
set -eu
cd test/
npm link hbsfy
node test.js
node browserify_test.js
node custom_extension_test.js
| #!/bin/sh
set -eu
set -x
# Make require("hbsfy") work
if [ ! -h node_modules/hbsfy ]; then
rm -rf node_modules/hbsfy
ln -fs .. node_modules/hbsfy
fi
cd test/
for test_file in *test.js
do
node "$test_file"
done
|
Stop force setting postgres port | #!/bin/sh
APPNAME=${APPNAME:-`grep name manifest.yml | awk '{print $3}'`}
cf set-env $APPNAME AWS_ACCESS_KEY_ID $AWS_ACCESS_KEY_ID
cf set-env $APPNAME AWS_SECRET_ACCESS_KEY $AWS_SECRET_ACCESS_KEY
cf set-env $APPNAME BOOT_CHECK_PORT $BOOT_CHECK_PORT
cf set-env $APPNAME PROD_DB_PASSWORD $PROD_DB_PASSWORD
cf set-env $APPNAME PROD_DB_USER $PROD_DB_USER
cf set-env $APPNAME PROD_DB_URI $PROD_DB_URI
cf set-env $APPNAME SECURITY_USER_NAME $SECURITY_USER_NAME
cf set-env $APPNAME SECURITY_USER_PASSWORD $SECURITY_USER_PASSWORD
cf set-env $APPNAME SOURCE_INSTANCE_ID $SOURCE_INSTANCE_ID
cf set-env $APPNAME SUBNET_ID $SUBNET_ID
cf set-env $APPNAME BOOT_CHECK_PORT 5432 | #!/bin/sh
APPNAME=${APPNAME:-`grep name manifest.yml | awk '{print $3}'`}
cf set-env $APPNAME AWS_ACCESS_KEY_ID $AWS_ACCESS_KEY_ID
cf set-env $APPNAME AWS_SECRET_ACCESS_KEY $AWS_SECRET_ACCESS_KEY
cf set-env $APPNAME BOOT_CHECK_PORT $BOOT_CHECK_PORT
cf set-env $APPNAME PROD_DB_PASSWORD $PROD_DB_PASSWORD
cf set-env $APPNAME PROD_DB_USER $PROD_DB_USER
cf set-env $APPNAME PROD_DB_URI $PROD_DB_URI
cf set-env $APPNAME SECURITY_USER_NAME $SECURITY_USER_NAME
cf set-env $APPNAME SECURITY_USER_PASSWORD $SECURITY_USER_PASSWORD
cf set-env $APPNAME SOURCE_INSTANCE_ID $SOURCE_INSTANCE_ID
cf set-env $APPNAME SUBNET_ID $SUBNET_ID
|
Install dmgbuild and gon after startup | #!/bin/bash
# if the proxy is around, use it
nc -z -w3 192.168.1.1 8123 && export http_proxy="http://192.168.1.1:8123"
export PATH="/usr/local/bin:$PATH"
chmod 755 /vagrant/package/package.sh
set -e
/vagrant/package/package.sh /vagrant/substrate-assets/substrate_darwin_x86_64.zip master
pkg_dir=${VAGRANT_PACKAGE_OUTPUT_DIR:-"pkg"}
mkdir -p /vagrant/${pkg_dir}
cp *.dmg /vagrant/${pkg_dir}
| #!/bin/bash
export PATH="/usr/local/bin:$PATH"
sudo pip3 install dmgbuild
curl -Lo gon.zip https://github.com/mitchellh/gon/releases/download/v0.2.2/gon_0.2.2_macos.zip
unzip gon.zip
chown root:wheel gon
chmod 755 gon
mv gon /System/Volumes/Data/usr/local/bin/gon
chmod 755 /vagrant/package/package.sh
set -e
/vagrant/package/package.sh /vagrant/substrate-assets/substrate_darwin_x86_64.zip master
pkg_dir=${VAGRANT_PACKAGE_OUTPUT_DIR:-"pkg"}
mkdir -p /vagrant/${pkg_dir}
cp *.dmg /vagrant/${pkg_dir}
|
Add idea on how to combine metadata files. | #!/bin/bash
# Output a directory tree as JSON
find ../example -mindepth 1 -printf '{"path":"%P","type":"%y","size":%s,"modified":"%TY-%Tm-%Td"}\n' \
| jq --null-input \
' reduce inputs as $i
( {}
; ( $i.path | split("/") ) as $p | setpath($p; getpath($p) + $i)
)
'
| #!/bin/bash
# Output a directory tree as JSON
find ../example -mindepth 1 -printf '{"path":"%P","type":"%y","size":%s,"modified":"%TY-%Tm-%Td"}\n' \
| jq --null-input \
' reduce inputs as $i
( {}
; ( $i.path | split("/") ) as $p | setpath($p; getpath($p) + $i)
)
'
find build/.cache/metadata -iname '*.meta.json' \
| xargs jq '{(input_filename | rtrimstr(".meta.json")):.}' \
| jq --slurp 'reduce .[] as $i ({}; . + $i)'
|
Fix Homebrew bin path declaration | #!/usr/bin/env bash
if [[ -d "/home/linuxbrew/.linuxbrew" ]]; then
export PATH="/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin:$PATH"
if type brew &>/dev/null; then
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
PATH="$HOMEBREW_PREFIX:$PATH"
fi
fi
HOMEBREW_PREFIX="${HOMEBREW_PREFIX:-$(brew --prefix)}"
export HOMEBREW_PREFIX
record_time "homebrew prefix"
| #!/usr/bin/env bash
if [[ -d "/home/linuxbrew/.linuxbrew" ]]; then
export PATH="/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin:$PATH"
if type brew &>/dev/null; then
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
PATH="${HOMEBREW_PREFIX}/bin:$PATH"
fi
fi
HOMEBREW_PREFIX="${HOMEBREW_PREFIX:-$(brew --prefix)}"
export HOMEBREW_PREFIX
record_time "homebrew prefix"
|
Fix multiple argument completion with zsh | if [[ ! -o interactive ]]; then
return
fi
compctl -K _rbenv rbenv
_rbenv() {
local words completions
read -cA words
if [ "${#words}" -eq 2 ]; then
completions="$(rbenv commands)"
else
completions="$(rbenv completions ${words[2,-1]})"
fi
reply=("${(ps:\n:)completions}")
}
| if [[ ! -o interactive ]]; then
return
fi
compctl -K _rbenv rbenv
_rbenv() {
local words completions
read -cA words
if [ "${#words}" -eq 2 ]; then
completions="$(rbenv commands)"
else
completions="$(rbenv completions ${words[2,-2]})"
fi
reply=("${(ps:\n:)completions}")
}
|
Check OCLint: Fix syntax error | @INCLUDE_COMMON@
echo
echo CHECK SOURCE WITH OCLINT
echo
command -v oclint > /dev/null 2>&1 || {
echo "Could not locate OCLint" >&2
exit 0
}
test -f "@PROJECT_BINARY_DIR@/compile_commands.json" || {
echo "Compilation database not found" >&2
exit 0
}
cd "@CMAKE_SOURCE_DIR@" || exit
oclint -p "@PROJECT_BINARY_DIR@" -enable-global-analysis -enable-clang-static-analyzer \
"src/libs/ease/array.c" \
"src/libs/ease/keyname.c" \
"src/libs/utility/text.c" \
"src/plugins/base64/"*.c \
"src/plugins/camel/camel.c" \
"src/plugins/ccode/"*.cpp \
"src/plugins/cpptemplate/"*.cpp \
"src/plugins/directoryvalue/"*.cpp \
"src/plugins/mini/mini.c" \
"src/plugins/yambi/"*.cpp \
"src/plugins/yamlcpp/"*.{c,cpp} \
"src/plugins/yamlsmith/"*.{c,cpp} \
"src/plugins/yanlr/"*.{c,cpp} \
"src/plugins/yawn/"*.cpp \
exit_if_fail "OCLint found problematic code"
end_script
| @INCLUDE_COMMON@
echo
echo CHECK SOURCE WITH OCLINT
echo
command -v oclint > /dev/null 2>&1 || {
echo "Could not locate OCLint" >&2
exit 0
}
test -f "@PROJECT_BINARY_DIR@/compile_commands.json" || {
echo "Compilation database not found" >&2
exit 0
}
cd "@CMAKE_SOURCE_DIR@" || exit
oclint -p "@PROJECT_BINARY_DIR@" -enable-global-analysis -enable-clang-static-analyzer \
"src/libs/ease/array.c" \
"src/libs/ease/keyname.c" \
"src/libs/utility/text.c" \
"src/plugins/base64/"*.c \
"src/plugins/camel/camel.c" \
"src/plugins/ccode/"*.cpp \
"src/plugins/cpptemplate/"*.cpp \
"src/plugins/directoryvalue/"*.cpp \
"src/plugins/mini/mini.c" \
"src/plugins/yambi/"*.cpp \
"src/plugins/yamlcpp/"*.{c,cpp} \
"src/plugins/yamlsmith/"*.{c,cpp} \
"src/plugins/yanlr/"*.{c,cpp} \
"src/plugins/yawn/"*.cpp
exit_if_fail "OCLint found problematic code"
end_script
|
Fix JDK 8 RPM URL in bootstrap script | #!/bin/sh
#
# (c) Copyright 2016 Cloudera, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We remove any natively installed JDKs, as both Cloudera Manager and Cloudera Director only support Oracle JDKs
yum remove --assumeyes *openjdk*
rpm -ivh "http://archive.cloudera.com/director/redhat/7/x86_64/director/2.1.0/RPMS/x86_64/oracle-j2sdk1.8-1.8.0+update60-1.x86_64.rpm"
| #!/bin/sh
#
# (c) Copyright 2016 Cloudera, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We remove any natively installed JDKs, as both Cloudera Manager and Cloudera Director only support Oracle JDKs
yum remove --assumeyes *openjdk*
rpm -ivh "http://archive.cloudera.com/director/redhat/7/x86_64/director/2.2.0/RPMS/x86_64/jdk-8u60-linux-x64.rpm"
|
Change order of theme setup | #!/bin/sh
source "${BASH_SOURCE%/*}/functions.lib"
print_start cask iterm2
install cask iterm2
print_start zsh
install zsh
echo "Installing oh-my-zsh"
sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
cp ../dotfiles/.exports ~/.exports
echo 'source ~/.exports' >> ~/.zshrc
cp ../dotfiles/.aliases ~/.aliases
echo 'source ~/.aliases' >> ~/.zshrc
cp ../dotfiles/.theme ~/.theme
echo 'source ~/.theme' >> ~/.zshrc
echo 'Installing spaceship prompt'
git clone https://github.com/denysdovhan/spaceship-prompt.git "$ZSH_CUSTOM/themes/spaceship-prompt"
ln -s "$ZSH_CUSTOM/themes/spaceship-prompt/spaceship.zsh-theme" "$ZSH_CUSTOM/themes/spaceship.zsh-theme"
| #!/bin/sh
source "${BASH_SOURCE%/*}/functions.lib"
print_start cask iterm2
install cask iterm2
print_start zsh
install zsh
echo "Installing oh-my-zsh"
sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
echo 'Installing spaceship prompt'
git clone https://github.com/denysdovhan/spaceship-prompt.git "$ZSH_CUSTOM/themes/spaceship-prompt"
ln -s "$ZSH_CUSTOM/themes/spaceship-prompt/spaceship.zsh-theme" "$ZSH_CUSTOM/themes/spaceship.zsh-theme"
cp ../dotfiles/.theme ~/.theme
echo 'source ~/.theme' >> ~/.zshrc
cp ../dotfiles/.exports ~/.exports
echo 'source ~/.exports' >> ~/.zshrc
cp ../dotfiles/.aliases ~/.aliases
echo 'source ~/.aliases' >> ~/.zshrc
|
Update build-system to work cross-platform | #!/bin/bash
set -e
set -u
mkdir -p fuzzylite/fuzzylite/release
cd fuzzylite/fuzzylite/release
# We use g++-4.8 as >4.8 requires >= GLIBCXX 3.4.20, but only 3.4.19 is
# available on ubuntu 14.04
cmake .. -G"Unix Makefiles" -DCMAKE_CXX_COMPILER=$(which g++-4.8) -DCMAKE_BUILD_TYPE=Release -DFL_BACKTRACE=ON -DFL_USE_FLOAT=OFF -DFL_CPP11=OFF
make
| #!/bin/bash
set -e
set -u
mkdir -p fuzzylite/fuzzylite/release
cd fuzzylite/fuzzylite/release
# We require GLIBCXX >= 3.4.20, which is something you get with g++-4.8 and
# newer. Keep in mind that for example ubuntu 14.04 has a version of g++ that
# is too old.
cmake .. -G"Unix Makefiles" -DCMAKE_BUILD_TYPE=Release -DFL_BACKTRACE=ON -DFL_USE_FLOAT=OFF -DFL_CPP11=OFF
make
|
Update intall script for Ruby | #!/usr/bin/env bash
#
# Setup rebenv and install some nice gems globally
GEMS=(
"bundler"
"jekyll"
"tmuxinator"
)
if test ! $(which rbenv)
then
echo " Installing rbenv for you."
brew install rbenv > /tmp/rbenv-install.log
fi
if test ! $(which ruby-build)
then
echo " Installing ruby-build and rbenv-gem-rehash for you."
brew install ruby-build > /tmp/ruby-build-install.log
brew install rbenv-gem-rehash > /tmp/rbenv-gem-rehash-install.log
fi
# Check for rbenv before attempting to install gems
if $(command -v rbenv) ; then
echo "Checking for gems to install"
for GEM in ${GEMS[@]} ; do
if ! gem list | grep -q "${GEM}" ; then
gem install $GEM
fi
done
fi
| #!/usr/bin/env bash
#
# Setup rebenv and install some nice gems globally
set -e
RUBY_VERSION="2.2.2"
GEMS=(
"bundler"
"jekyll"
"tmuxinator"
)
if test ! $(which rbenv)
then
echo " Installing rbenv for you."
brew install rbenv > /tmp/rbenv-install.log
fi
if test ! $(which ruby-build)
then
echo " Installing ruby-build and rbenv-gem-rehash for you."
brew install ruby-build > /tmp/ruby-build-install.log
brew install rbenv-gem-rehash > /tmp/rbenv-gem-rehash-install.log
fi
# Check for rbenv before attempting to install gems
if $(command -v rbenv >/dev/null 2>&1) ; then
echo 'Installing Ruby for you.'
rbenv install ${RUBY_VERSION}
rbenv global ${RUBY_VERSION}
echo "Checking for gems to install"
for GEM in ${GEMS[@]} ; do
if ! gem list | grep -q "${GEM}" ; then
gem install $GEM
fi
done
fi
|
Make sure we move all files/folders | cd docs
yarn -v
node -v
echo 'Installing Gitbook CLI'
yarn global add gitbook-cli
echo 'Running Gitbook installation'
# Generate all version's GitBook output
for D in *; do
if [ -d "${D}" ]; then
echo "Generating output for: ${D}"
cd "${D}"
# Clear previous output, generate new
rm -rf _book
gitbook install
gitbook build
cd ..
fi
done
# Move CNAME File into `latest`
cp CNAME ./latest/_book/CNAME
# Create a history folder in our latest version's output
mkdir ./latest/_book/history
# Move each version's files to latest's history folder
for D in *; do
if [ -d "${D}" ]; then
if [[ "${D}" == v* ]] ; then
echo "Moving ${D} to the latest version's history folder"
mkdir "./latest/_book/history/${D}"
mv -v "./${D}/_book"/* "./latest/_book/history/${D}"
fi
fi
done
cd ..
# Build and copy the StandaloneViewer into the static directory
echo $DEPLOY_PRIME_URL
export ROOT_URL=$DEPLOY_PRIME_URL/demo
mkdir ./docs/latest/_book/demo/
yarn install
yarn build:web:ci
| cd docs
yarn -v
node -v
echo 'Installing Gitbook CLI'
yarn global add gitbook-cli
echo 'Running Gitbook installation'
# Generate all version's GitBook output
for D in *; do
if [ -d "${D}" ]; then
echo "Generating output for: ${D}"
cd "${D}"
# Clear previous output, generate new
rm -rf _book
gitbook install
gitbook build
cd ..
fi
done
# Move CNAME File into `latest`
cp CNAME ./latest/_book/CNAME
# Create a history folder in our latest version's output
mkdir ./latest/_book/history
# Move each version's files to latest's history folder
for D in *; do
if [ -d "${D}" ]; then
if [[ "${D}" == v* ]] ; then
echo "Moving ${D} to the latest version's history folder"
mkdir "./latest/_book/history/${D}"
mv -v "./${D}/_book"/**/* "./latest/_book/history/${D}"
fi
fi
done
cd ..
# Build and copy the StandaloneViewer into the static directory
echo $DEPLOY_PRIME_URL
export ROOT_URL=$DEPLOY_PRIME_URL/demo
mkdir ./docs/latest/_book/demo/
yarn install
yarn build:web:ci
|
Remove ZSTD from set of required targets | #!/usr/bin/env bash
set -ex
# We need to put in place all relevant headers before running clang-tidy.
mkdir ../build
cd ../build
cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -Dall=On -Dtesting=On -Dx11=Off -Dalien=Off \
-Dcuda=Off -Dtmva-gpu=Off -Dveccore=Off ../root
# We need to prebuild a minimal set of targets which are responsible for header copy
# or generation.
make -j4 move_headers intrinsics_gen ClangCommentCommandList ClangCommentCommandInfo \
ClangCommentHTMLNamedCharacterReferences ClangCommentHTMLTagsProperties \
ClangCommentNodes ClangAttrImpl ClangStmtNodes ClangAttrClasses \
ClangAttrDump ClangCommentHTMLTags ClangDeclNodes ClangAttrVisitor \
ClangDiagnosticCommon ClangARMNeon ClangDiagnosticIndexName \
ClangDiagnosticParse ClangDiagnosticComment ClangDiagnosticFrontend \
ClangDiagnosticGroups ClangDiagnosticSerialization ClangDiagnosticLex \
ClangDiagnosticSema ClangAttrList ClangAttrHasAttributeImpl \
ClangDiagnosticAST ClangDiagnosticDriver ClangDiagnosticAnalysis \
ClangDriverOptions ClangAttrParserStringSwitches ClangAttrParsedAttrList \
ClangAttrTemplateInstantiate ClangAttrSpellingListIndex \
ClangAttrParsedAttrImpl ClangAttrParsedAttrKinds googletest ZSTD
ln -s $PWD/compile_commands.json $PWD/../root/
| #!/usr/bin/env bash
set -ex
# We need to put in place all relevant headers before running clang-tidy.
mkdir ../build
cd ../build
cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -Dall=On -Dtesting=On -Dx11=Off -Dalien=Off \
-Dcuda=Off -Dtmva-gpu=Off -Dveccore=Off ../root
# We need to prebuild a minimal set of targets which are responsible for header copy
# or generation.
make -j4 move_headers intrinsics_gen ClangCommentCommandList ClangCommentCommandInfo \
ClangCommentHTMLNamedCharacterReferences ClangCommentHTMLTagsProperties \
ClangCommentNodes ClangAttrImpl ClangStmtNodes ClangAttrClasses \
ClangAttrDump ClangCommentHTMLTags ClangDeclNodes ClangAttrVisitor \
ClangDiagnosticCommon ClangARMNeon ClangDiagnosticIndexName \
ClangDiagnosticParse ClangDiagnosticComment ClangDiagnosticFrontend \
ClangDiagnosticGroups ClangDiagnosticSerialization ClangDiagnosticLex \
ClangDiagnosticSema ClangAttrList ClangAttrHasAttributeImpl \
ClangDiagnosticAST ClangDiagnosticDriver ClangDiagnosticAnalysis \
ClangDriverOptions ClangAttrParserStringSwitches ClangAttrParsedAttrList \
ClangAttrTemplateInstantiate ClangAttrSpellingListIndex \
ClangAttrParsedAttrImpl ClangAttrParsedAttrKinds googletest
ln -s $PWD/compile_commands.json $PWD/../root/
|
Add export for running tests | #!/bin/bash
./src/prepare_galaxy.sh run_test
echo "TO DO : Launch tests on toy dataset"
| #!/bin/bash
export GALAXY_TEST_UPLOAD_ASYNC=false
export GALAXY_TEST_DB_TEMPLATE=https://github.com/jmchilton/galaxy-downloads/raw/master/db_gx_rev_0127.sqlite
./src/prepare_galaxy.sh run_test
echo "TO DO : Launch tests on toy dataset"
|
Add db alias for starting a project's database with docker-compose | alias d='docker'
alias dc='docker-compose'
alias drm='docker-rm'
alias dcl='docker-clean'
alias dcd='docker-compose down'
| alias d='docker'
alias dc='docker-compose'
alias drm='docker-rm'
alias dcl='docker-clean'
alias dcd='docker-compose down'
alias db='dc up -d db'
|
Add bundle install to the script. | #!/usr/bin/env bash
source ~/.rvm/scripts/rvm
rvm use default
bundle exec pod trunk push
| #!/usr/bin/env bash
source ~/.rvm/scripts/rvm
rvm use default
bundle install
bundle exec pod trunk push
|
Add more bindings for home/end/ctrl-left/right |
# Keybindings for Gnome-Terminal
# Up/Down Search
bindkey '\e[A' up-line-or-search
bindkey '\e[B' down-line-or-search
# Delete
bindkey '[3~' delete-char
# Home/End
bindkey 'OH' beginning-of-line
bindkey 'OF' end-of-line
# Ctrl Left/Right (^[[1;5D & ^[[1;5C)
bindkey '[1;5D' backward-word
bindkey '[1;5C' forward-word
|
# Keybindings for Gnome-Terminal
# Up/Down Search
bindkey '\e[A' up-line-or-search
bindkey '\e[B' down-line-or-search
# Delete
bindkey '[3~' delete-char
# Home/End
bindkey 'OH' beginning-of-line
bindkey 'OF' end-of-line
bindkey '[1~' beginning-of-line
bindkey '[4~' end-of-line
# Ctrl Left/Right (^[[1;5D & ^[[1;5C)
bindkey '[1;5D' backward-word
bindkey '[1;5C' forward-word
bindkey 'OD' backward-word
bindkey 'OC' forward-word
|
Make android scripts not warn about ADB in newer SDKs | #!/bin/bash
#
UTIL_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ "$(which adb)" != "" ]; then
ADB="$(which adb)"
elif [ -d "$ANDROID_SDK_ROOT" ]; then
ADB="${ANDROID_SDK_ROOT}/platform-tools/adb"
else
echo $ANDROID_SDK_ROOT
echo "No ANDROID_SDK_ROOT set (check that android_setup.sh was properly sourced)"
exit 1
fi
if [ ! -x $ADB ]; then
echo "The adb binary is not executable"
exit 1
fi
if [ $(uname) == "Linux" ]; then
ADB_REQUIRED="1.0.32"
elif [ $(uname) == "Darwin" ]; then
ADB_REQUIRED="1.0.31 or 1.0.32"
fi
# get the version and then truncate it to be just the version numbers
ADB_VERSION="$($ADB version)"
ADB_VERSION="${ADB_VERSION##* }"
if [[ "$ADB_REQUIRED" != *"$ADB_VERSION"* ]]; then
echo "WARNING: Your ADB version is out of date!"
echo " Expected ADB Version: ${ADB_REQUIRED}"
echo " Actual ADB Version: ${ADB_VERSION}"
fi
| # Copyright 2015 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
UTIL_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ "$(which adb)" != "" ]; then
ADB="$(which adb)"
elif [ -d "$ANDROID_SDK_ROOT" ]; then
ADB="${ANDROID_SDK_ROOT}/platform-tools/adb"
else
echo $ANDROID_SDK_ROOT
echo "No ANDROID_SDK_ROOT set (check that android_setup.sh was properly sourced)"
exit 1
fi
if [ ! -x $ADB ]; then
echo "The adb binary is not executable"
exit 1
fi
if [ $(uname) == "Linux" ]; then
ADB_REQUIRED="1.0.32"
elif [ $(uname) == "Darwin" ]; then
ADB_REQUIRED="1.0.31 or 1.0.32"
fi
# get the version string as an array, use just the version numbers
ADB_VERSION="$($ADB version)"
ADB_VERSION=($ADB_VERSION)
ADB_VERSION=${ADB_VERSION[4]}
if [[ "$ADB_REQUIRED" != *"$ADB_VERSION"* ]]; then
echo "WARNING: Your ADB version is out of date!"
echo " Expected ADB Version: ${ADB_REQUIRED}"
echo " Actual ADB Version: ${ADB_VERSION}"
fi
|
Install vagrant-serverspec only if it is not already present | #!/bin/bash -eux
create_vagrantfile_linux() {
vagrant plugin install vagrant-serverspec
cat << EOF > $tmp_path/Vagrantfile
Vagrant.configure('2') do |config|
config.vm.box = '$box_path'
config.vm.provision :serverspec do |spec|
spec.pattern = '$test_src_path'
end
end
EOF
}
box_path=$1
box_provider=$2
vagrant_provider=$3
test_src_path=$4
box_filename=$(basename "${box_path}")
box_name=${box_filename%.*}
tmp_path=/tmp/vagrantcloudtest
rm -rf ${tmp_path}
rm -f ~/.ssh/known_hosts
vagrant box remove ${box_path} --provider ${vagrant_provider} 2>/dev/null || true
vagrant box add ${box_path} --provider=${vagrant_provider}
mkdir -p ${tmp_path}
pushd ${tmp_path}
create_vagrantfile_linux
VAGRANT_LOG=warn vagrant up --provider=${box_provider}
sleep 10
vagrant destroy -f
popd
vagrant box remove ${box_path} --provider=${vagrant_provider}
| #!/bin/bash -eux
create_vagrantfile_linux() {
vagrant plugin list | grep vagrant-serverspec && echo "vagrant-serverspec installed" || vagrant plugin install vagrant-serverspec
cat << EOF > $tmp_path/Vagrantfile
Vagrant.configure('2') do |config|
config.vm.box = '$box_path'
config.vm.provision :serverspec do |spec|
spec.pattern = '$test_src_path'
end
end
EOF
}
box_path=$1
box_provider=$2
vagrant_provider=$3
test_src_path=$4
box_filename=$(basename "${box_path}")
box_name=${box_filename%.*}
tmp_path=/tmp/vagrantcloudtest
rm -rf ${tmp_path}
rm -f ~/.ssh/known_hosts
vagrant box remove ${box_path} --provider ${vagrant_provider} 2>/dev/null || true
vagrant box add ${box_path} --provider=${vagrant_provider}
mkdir -p ${tmp_path}
pushd ${tmp_path}
create_vagrantfile_linux
VAGRANT_LOG=warn vagrant up --provider=${box_provider}
sleep 10
vagrant destroy -f
popd
vagrant box remove ${box_path} --provider=${vagrant_provider}
|
Return non-zero error code if tests failed | #!/bin/bash
export PYTHONPATH=$PWD:$PWD/..:$PYTHONPATH
django-admin.py test --settings=tests.settings_get_asset_handler
django-admin.py test --settings=tests.settings_get_finder
django-admin.py test --settings=tests.settings_templatetags
django-admin.py test --settings=tests.settings_serve
| #!/bin/bash
set -e
export PYTHONPATH=$PWD:$PWD/..:$PYTHONPATH
django-admin.py test --settings=tests.settings_get_asset_handler
django-admin.py test --settings=tests.settings_get_finder
django-admin.py test --settings=tests.settings_templatetags
django-admin.py test --settings=tests.settings_serve
|
Use a dev version of mistralclient | #!/bin/bash
set -eux
set -o pipefail
rm -rf ~/venv;
virtualenv ~/venv;
~/venv/bin/pip install --editable ~/python-tripleoclient;
~/venv/bin/pip install tripleo-common==5.0.0.0b2 os-cloud-config==5.0.0.0b2
~/venv/bin/pip install git+git://github.com/openstack/python-heatclient.git@master
| #!/bin/bash
set -eux
set -o pipefail
rm -rf ~/venv;
virtualenv ~/venv;
~/venv/bin/pip install --editable ~/python-tripleoclient;
~/venv/bin/pip install tripleo-common==5.0.0.0b2 os-cloud-config==5.0.0.0b2
~/venv/bin/pip install git+git://github.com/openstack/python-heatclient.git@master
~/venv/bin/pip install git+git://github.com/openstack/python-mistralclient.git@master
|
Change distribution to use abendstern.default.rc. | #! /bin/bash
BASEDIR=/oss/Abendstern_WGL32
SRVDIR=/programmes/cpp/abendstern/srv/abendstern/package
FILES="apply_update.bat Abendstern.exe abendstern.rc data fonts shaders images legal tcl library itcl3.4 tls1.6 tcllib bin"
APP=Abendstern_WGL32
./validation.sh
date -u +%Y%m%d%H%M%S >version
cp version $BASEDIR/version
cp tcl/validation $BASEDIR/validation
rm -f $SRVDIR/manifest
rm -f $SRVDIR/$APP.zip
rm -f $BASEDIR/bin/*.exp $BASEDIR/bin/*.lib
cd $BASEDIR
find -name '*~' | xargs rm -f
for top in $FILES *.dll version; do
echo $top
find $top -type f | grep -v tls16.dll | xargs md5sum >>$SRVDIR/manifest
cp -R $top $SRVDIR/
done
mkdir /tmp/$APP
for f in $FILES hangar *.dll version patchlevel.rc dna; do
cp -R $f /tmp/$APP
done
pushd /tmp/
zip -r9q $SRVDIR/$APP.zip $APP
popd
rm -R /tmp/$APP
| #! /bin/bash
BASEDIR=/oss/Abendstern_WGL32
SRVDIR=/programmes/cpp/abendstern/srv/abendstern/package
FILES="apply_update.bat Abendstern.exe abendstern.default.rc data fonts shaders images legal tcl library itcl3.4 tls1.6 tcllib bin"
APP=Abendstern_WGL32
./validation.sh
date -u +%Y%m%d%H%M%S >version
cp version $BASEDIR/version
cp tcl/validation $BASEDIR/validation
rm -f $SRVDIR/manifest
rm -f $SRVDIR/$APP.zip
rm -f $BASEDIR/bin/*.exp $BASEDIR/bin/*.lib
cd $BASEDIR
find -name '*~' | xargs rm -f
for top in $FILES *.dll abendstern.rc version; do
echo $top
find $top -type f | grep -v tls16.dll | xargs md5sum >>$SRVDIR/manifest
cp -R $top $SRVDIR/
done
mkdir /tmp/$APP
for f in $FILES hangar *.dll version patchlevel.rc dna; do
cp -R $f /tmp/$APP
done
pushd /tmp/
zip -r9q $SRVDIR/$APP.zip $APP
popd
rm -R /tmp/$APP
|
Print a banner every time we start a new run, to make it easier to tell (in the logs). | #!/bin/sh
# Kick off the exercise icon scraping process
#
# NOTE: before running this you must create a file called 'secrets.sh'
# in this directory, with the following content:
# S3_KEY=<key>
# S3_SECRET=<secret>
# S3_BUCKET=<bucket>
cd $HOME/exercise-icons
. ./secrets.sh
export S3_KEY S3_SECRET S3_BUCKET
# We need to make sure we pick up the right versions of casperjs and phantomjs.
export PATH="$PWD/node_modules/casperjs/bin:$PWD/node_modules/phantomjs/bin:$PATH"
# Prepend DEBUG=* on the line below for more verbosity
./bin/capture.js --upload --all --parallel 1
| #!/bin/sh
# Kick off the exercise icon scraping process
#
# NOTE: before running this you must create a file called 'secrets.sh'
# in this directory, with the following content:
# S3_KEY=<key>
# S3_SECRET=<secret>
# S3_BUCKET=<bucket>
cd $HOME/exercise-icons
. ./secrets.sh
export S3_KEY S3_SECRET S3_BUCKET
# We need to make sure we pick up the right versions of casperjs and phantomjs.
export PATH="$PWD/node_modules/casperjs/bin:$PWD/node_modules/phantomjs/bin:$PATH"
echo
echo ---------------------------------------------------------------------
echo Starting capture.js run at `date`
echo ---------------------------------------------------------------------
# Prepend DEBUG=* on the line below for more verbosity
./bin/capture.js --upload --all --parallel 1
|
Enhance enviroment for differences in systems | #!/bin/bash
PYTHON3=python3
PYTHON2=python2
for P in wget ant "${PYTHON3}" python3-pip python python-pip python-virtualenv
do
if ! dpkg-query -s $P | grep ok.installed >/dev/null 2>&1
then
echo "package $P not found; installing..."
sudo apt-get install -y $P
if [ "$P" == "$PYTHON_VERSION" ]; then
sudo update-alternatives --install /usr/bin/python python /usr/bin/$PYTHON_VERSION 10
fi
fi
done
pip2 freeze > .pipPackageList
for P in clonedigger
do
python2 find_package.py --package $P .pipPackageList
case $? in
1)
echo "Python package $P not found, installing..."
y="sudo pip2 install $P"
$y
;;
2)
echo "Python package $P needs upgrade, upgrading..."
y="sudo pip2 install --upgrade $P"
$y
;;
esac
done
rm -f .pipPackageList
rm -rf .python3-sandbox
virtualenv -p python3 .python3-sandbox
pip3_file=".python3-sandbox/bin/pip3"
if [ ! -e "$pip3_file" ]
then
ln -s .python3-sandbox/bin/pip*3* .python3-sandbox/bin/pip3
fi
| #!/bin/bash
scriptDir=`dirname $0`
scriptDir=`readlink -f $scriptDir`
PYTHON3=python3
PYTHON2=python2
for P in wget ant "${PYTHON3}" python3-pip python python-pip python-virtualenv
do
if ! dpkg-query -s $P | grep ok.installed >/dev/null 2>&1
then
echo "package $P not found; installing..."
sudo apt-get install -y $P
if [ "$P" == "$PYTHON_VERSION" ]; then
sudo update-alternatives --install /usr/bin/python python /usr/bin/$PYTHON_VERSION 10
fi
fi
done
pip2 freeze > .pipPackageList
for P in clonedigger
do
python2 find_package.py --package $P .pipPackageList
case $? in
1)
echo "Python package $P not found, installing..."
y="sudo pip2 install $P"
$y
;;
2)
echo "Python package $P needs upgrade, upgrading..."
y="sudo pip2 install --upgrade $P"
$y
;;
esac
done
rm -f .pipPackageList
rm -rf .python3-sandbox
virtualenv -p python3 .python3-sandbox
pushd $scriptDir/.python3-sandbox/bin/
if [ ! -e "pip3" ]
then
ln -s pip*3* pip3
fi
popd
|
Update configuration files to install LVM | #!/bin/bash
#
# Run from new VM console or SSH session
echo =============================
echo "Install new packages"
echo =============================
apt-get update
apt-get install -qy --force-yes libpam-krb5 nagios3 nagios-nrpe-plugin nagios-nrpe-server tsm-client
apt-get install -qy --force-yes mercurial
echo =============================
echo "Configure new packages"
echo =============================
cp /root/common-password /etc/pam.d/common-password
cp /root/nrpe.cfg /etc/nagios/nrpe.cfg
/etc/init.d/nagios-nrpe-server restart
cp /root/aliases /etc/aliases
cp /root/main.cf /etc/postfix/main.cf
cp /root/apache2.conf /etc/apache2/apache2.conf
mkdir /var/www/docs
cp /root/*.html /var/www/docs
newaliases
echo =============================
echo "Configure and enable firewall"
echo =============================
ufw allow ssh
ufw allow http
ufw allow https
ufw allow samba
ufw enable
echo =============================
echo "Next step: postboot_3.sh"
echo =============================
| #!/bin/bash
#
# Run from new VM console or SSH session
echo =============================
echo "Install new packages"
echo =============================
apt-get update
apt-get install -qy --force-yes libpam-krb5 nagios3 nagios-nrpe-plugin nagios-nrpe-server tsm-client
apt-get install -qy --force-yes lvm2 mercurial
echo =============================
echo "Configure new packages"
echo =============================
cp /root/common-password /etc/pam.d/common-password
cp /root/nrpe.cfg /etc/nagios/nrpe.cfg
/etc/init.d/nagios-nrpe-server restart
cp /root/aliases /etc/aliases
cp /root/main.cf /etc/postfix/main.cf
cp /root/apache2.conf /etc/apache2/apache2.conf
mkdir /var/www/docs
cp /root/*.html /var/www/docs
newaliases
echo =============================
echo "Configure and enable firewall"
echo =============================
ufw allow ssh
ufw allow http
ufw allow https
ufw allow samba
ufw enable
echo =============================
echo "Next step: postboot_3.sh"
echo =============================
|
Add alias for a custom top command | # --------------------------------------------
# Shortcuts
# --------------------------------------------
alias e="$EDITOR"
alias v="$VISUAL"
alias vi="$VISUAL"
alias vim="$VISUAL"
# This actually happens a lot
alias :q='exit'
# Pretty print the path
alias path='echo $PATH | tr -s ":" "\n"'
## Summary for args to less:
# less(1)
# -M (-M or --LONG-PROMPT) Prompt very verbosely
# -I (-I or --IGNORE-CASE) Searches with '/' ignore case
# -R (-R or --RAW-CONTROL-CHARS) For handling ANSI colors
# -F (-F or --quit-if-one-screen) Auto exit if <1 screen
# -X (-X or --no-init) Disable termcap init & deinit
alias ag='ag --pager="less -MIRFX"'
alias afk='mac lock'
alias ip='mac ip'
alias ping!='mac check-internet'
# Copy the contents of the id_rsa.pub file to the clipboard.
alias sshkey='pbcopy < ~/.ssh/id_rsa.pub | echo "=> SSH key was copied to the clipboard."'
# Open current directory in finder.
alias f='open -a Finder ./'
# --------------------------------------------
# Network
# --------------------------------------------
alias hosts='sudo $EDITOR /etc/hosts'
# --------------------------------------------
# Just for fun
# --------------------------------------------
alias weather='curl wttr.in'
alias moon='curl wttr.in/Moon'
| # --------------------------------------------
# Shortcuts
# --------------------------------------------
alias e="$EDITOR"
alias v="$VISUAL"
alias vi="$VISUAL"
alias vim="$VISUAL"
# This actually happens a lot
alias :q='exit'
# Pretty print the path
alias path='echo $PATH | tr -s ":" "\n"'
alias t0p='top -o cpu -O -vsize -s 10 -n 20 -stats pid,command,cpu,time,pstate,user'
## Summary for args to less:
# less(1)
# -M (-M or --LONG-PROMPT) Prompt very verbosely
# -I (-I or --IGNORE-CASE) Searches with '/' ignore case
# -R (-R or --RAW-CONTROL-CHARS) For handling ANSI colors
# -F (-F or --quit-if-one-screen) Auto exit if <1 screen
# -X (-X or --no-init) Disable termcap init & deinit
alias ag='ag --pager="less -MIRFX"'
alias afk='mac lock'
alias ip='mac ip'
alias ping!='mac check-internet'
# Copy the contents of the id_rsa.pub file to the clipboard.
alias sshkey='pbcopy < ~/.ssh/id_rsa.pub | echo "=> SSH key was copied to the clipboard."'
# Open current directory in finder.
alias f='open -a Finder ./'
# --------------------------------------------
# Network
# --------------------------------------------
alias hosts='sudo $EDITOR /etc/hosts'
# --------------------------------------------
# Just for fun
# --------------------------------------------
alias weather='curl wttr.in'
alias moon='curl wttr.in/Moon'
|
Revert "create VM as normal user" | #!/bin/bash
# TODO(lpetrut): remove hardcoded stuff
MANILA_SERVICE_SECGROUP="manila-service"
NET_ID=$(neutron net-list | grep private | awk '{print $2}')
nova boot ws2012r2 --image=ws2012r2_kvm \
--flavor=100 \
--nic net-id=$NET_ID \
--user-data=/home/ubuntu/ssl/winrm_client_cert.pem \
--security-groups $MANILA_SERVICE_SECGROUP \
--poll
| #!/bin/bash
# TODO(lpetrut): remove hardcoded stuff
MANILA_SERVICE_SECGROUP="manila-service"
NET_ID=$(neutron net-list | grep private | awk '{print $2}')
nova --os-username manila --os-tenant-name service --os-password Passw0rd \
boot ws2012r2 --image=ws2012r2_kvm \
--flavor=100 \
--nic net-id=$NET_ID \
--user-data=/home/ubuntu/ssl/winrm_client_cert.pem \
--security-groups $MANILA_SERVICE_SECGROUP \
--poll
|
Use no proxy when connecting to VSim | #!/bin/bash
sleep 5
echo "Trying VSIM cluster setup on $1"
curl -X POST -d @cluster_setup "http://admin@$1/servlets/netapp.servlets.admin.XMLrequest_filer"
sleep 25
curl -X POST -d @ssh_enable "http://admin@$1/servlets/netapp.servlets.admin.XMLrequest_filer"
sleep 5
curl -X POST -d @ssh_enable_publickey "http://admin@$1/servlets/netapp.servlets.admin.XMLrequest_filer"
sleep 10
echo "\n"
echo "SSH is available at Cluster Node Management ip $1. Username admin, password netapp123"
| #!/bin/bash
sleep 5
echo "Trying VSIM cluster setup on $1"
curl -X POST -d @cluster_setup --noproxy $1 "http://admin@$1/servlets/netapp.servlets.admin.XMLrequest_filer"
sleep 25
curl -X POST -d @ssh_enable --noproxy $1 "http://admin@$1/servlets/netapp.servlets.admin.XMLrequest_filer"
sleep 5
curl -X POST -d @ssh_enable_publickey --noproxy $1 "http://admin@$1/servlets/netapp.servlets.admin.XMLrequest_filer"
sleep 10
echo "\n"
echo "SSH is available at Cluster Node Management ip $1. Username admin, password netapp123"
|
Add cluster-up to bazel exclude list | #!/usr/bin/env bash
source hack/common.sh
source hack/config.sh
# remove libvirt BUILD file to regenerate it each time
rm -f vendor/libvirt.org/libvirt-go/BUILD.bazel
# generate BUILD files
bazel run \
--config=${ARCHITECTURE} \
//:gazelle -- -exclude vendor/google.golang.org/grpc
# inject changes to libvirt BUILD file
bazel run \
--config=${ARCHITECTURE} \
-- @com_github_bazelbuild_buildtools//buildozer 'add cdeps //:libvirt-libs //:libvirt-headers' //vendor/libvirt.org/libvirt-go:go_default_library
bazel run \
--config=${ARCHITECTURE} \
-- @com_github_bazelbuild_buildtools//buildozer 'add copts -Ibazel-out/k8-fastbuild/genfiles' //vendor/libvirt.org/libvirt-go:go_default_library
bazel run \
--config=${ARCHITECTURE} \
-- @com_github_bazelbuild_buildtools//buildozer 'add copts -Ibazel-out/k8-fastbuild/genfiles' //vendor/libvirt.org/libvirt-go:go_default_library
# allign BAZEL files to a single format
bazel run \
--config=${ARCHITECTURE} \
//:buildifier
| #!/usr/bin/env bash
source hack/common.sh
source hack/config.sh
# remove libvirt BUILD file to regenerate it each time
rm -f vendor/libvirt.org/libvirt-go/BUILD.bazel
# generate BUILD files
bazel run \
--config=${ARCHITECTURE} \
//:gazelle -- -exclude vendor/google.golang.org/grpc --exclude cluster-up
# inject changes to libvirt BUILD file
bazel run \
--config=${ARCHITECTURE} \
-- @com_github_bazelbuild_buildtools//buildozer 'add cdeps //:libvirt-libs //:libvirt-headers' //vendor/libvirt.org/libvirt-go:go_default_library
bazel run \
--config=${ARCHITECTURE} \
-- @com_github_bazelbuild_buildtools//buildozer 'add copts -Ibazel-out/k8-fastbuild/genfiles' //vendor/libvirt.org/libvirt-go:go_default_library
bazel run \
--config=${ARCHITECTURE} \
-- @com_github_bazelbuild_buildtools//buildozer 'add copts -Ibazel-out/k8-fastbuild/genfiles' //vendor/libvirt.org/libvirt-go:go_default_library
# allign BAZEL files to a single format
bazel run \
--config=${ARCHITECTURE} \
//:buildifier
|
Add our source to the path as well | #! /bin/sh
# Download the script to install everything
curl https://raw.githubusercontent.com/dmwm/WMCore/master/test/deploy/deploy_unittest.sh > /home/dmwm/ContainerScripts/deploy_unittest.sh
chmod +x /home/dmwm/ContainerScripts/deploy_unittest.sh
sh /home/dmwm/ContainerScripts/deploy_unittest.sh
echo "export PYTHONPATH=/home/dmwm/wmcore_unittest/WMCore/src/python:$PYTHONPATH" >> ./env_unittest.sh
# Shut down services so the docker container doesn't have stale PID & socket files
source ./env_unittest.sh
$manage stop-services
| #! /bin/sh
# Download the script to install everything
curl https://raw.githubusercontent.com/dmwm/WMCore/master/test/deploy/deploy_unittest.sh > /home/dmwm/ContainerScripts/deploy_unittest.sh
chmod +x /home/dmwm/ContainerScripts/deploy_unittest.sh
sh /home/dmwm/ContainerScripts/deploy_unittest.sh
echo "export PYTHONPATH=/home/dmwm/wmcore_unittest/WMCore/src/python:\$PYTHONPATH" >> ./env_unittest.sh
# Shut down services so the docker container doesn't have stale PID & socket files
source ./env_unittest.sh
$manage stop-services
|
Replace initial gradle build with compileJava | #!/bin/sh
cd $(dirname $0)
cd ../complete
mvn clean package
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf target
./gradlew build
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf build
cd ../initial
mvn clean package
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf target
./gradlew build
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf build
exit
| #!/bin/sh
cd $(dirname $0)
cd ../complete
mvn clean package
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf target
./gradlew build
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf build
cd ../initial
mvn clean package
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf target
./gradlew compileJava
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf build
exit
|
Fix OCR deletion in Datafari | #!/bin/bash
#
#
# MCF control script
#
#
cd $MCF_HOME
OPTIONSFILE="options.env.unix"
OPTIONS=$(cat "$OPTIONSFILE")
cmd_start() {
echo "Starting MCF Agent ..."
./executecommand.sh org.apache.manifoldcf.core.LockClean
start-stop-daemon --background --chdir=$MCF_HOME --start --make-pidfile --pidfile $MCF_PID_FILE --exec \
/usr/bin/env LD_LIBRARY_PATH=${DATAFARI_HOME}/ocr/libtiff/lib:${DATAFARI_HOME}/ocr/tesseract/lib:${DATAFARI_HOME}/ocr/leptonica/lib \
TESSDATA_PREFIX=${DATAFARI_HOME}/ocr/tesseract/bin \
LC_NUMERIC=C \
"$JAVA_HOME/bin/java" -- $OPTIONS org.apache.manifoldcf.agents.AgentRun
sleep 1
echo "MCF Agent started with PID $(cat $MCF_PID_FILE)"
return 0
}
cmd_stop() {
echo -n "Stopping MCF Agent ..."
./executecommand.sh org.apache.manifoldcf.agents.AgentStop
rm -f $MCF_PID_FILE
}
cmd_status() {
if is_running; then
echo "MCF Agent is running:"
ps -o pid,cmd --width 5000 -p $(cat $MCF_PID_FILE)
else
echo "MCF Agent is not running."
fi
}
COMMAND=$1
case $COMMAND in
start)
cmd_start
;;
stop)
cmd_stop
;;
esac
| #!/bin/bash
#
#
# MCF control script
#
#
cd $MCF_HOME
OPTIONSFILE="options.env.unix"
OPTIONS=$(cat "$OPTIONSFILE")
cmd_start() {
echo "Starting MCF Agent ..."
./executecommand.sh org.apache.manifoldcf.core.LockClean
start-stop-daemon --background --chdir=$MCF_HOME --start --make-pidfile --pidfile $MCF_PID_FILE --exec \
"$JAVA_HOME/bin/java" -- $OPTIONS org.apache.manifoldcf.agents.AgentRun
sleep 1
echo "MCF Agent started with PID $(cat $MCF_PID_FILE)"
return 0
}
cmd_stop() {
echo -n "Stopping MCF Agent ..."
./executecommand.sh org.apache.manifoldcf.agents.AgentStop
rm -f $MCF_PID_FILE
}
cmd_status() {
if is_running; then
echo "MCF Agent is running:"
ps -o pid,cmd --width 5000 -p $(cat $MCF_PID_FILE)
else
echo "MCF Agent is not running."
fi
}
COMMAND=$1
case $COMMAND in
start)
cmd_start
;;
stop)
cmd_stop
;;
esac
|
Add wait for selenium server, add browser name. | #!/bin/bash
source /opt/bin/functions.sh
export GEOMETRY="$SCREEN_WIDTH""x""$SCREEN_HEIGHT""x""$SCREEN_DEPTH"
function shutdown {
kill -s SIGTERM $NODE_PID
wait $NODE_PID
}
if [ ! -z "$SE_OPTS" ]; then
echo "appending selenium options: ${SE_OPTS}"
fi
SERVERNUM=$(get_server_num)
rm -f /tmp/.X*lock
xvfb-run -n $SERVERNUM --server-args="-screen 0 $GEOMETRY -ac +extension RANDR" \
java ${JAVA_OPTS} -jar /opt/selenium/selenium-server-standalone.jar \
${SE_OPTS} &
NODE_PID=$!
trap shutdown SIGTERM SIGINT
for SUITE in "$@"
do
java -jar support/selenese-runner.jar "$SUITE" --config "support/config" --baseurl "$SELENSE_BASE_URL" --driver remote --remote-url "http://127.0.0.1:4444/wd/hub"
done
shutdown
exit
| #!/bin/bash
source /opt/bin/functions.sh
export GEOMETRY="$SCREEN_WIDTH""x""$SCREEN_HEIGHT""x""$SCREEN_DEPTH"
function shutdown {
kill -s SIGTERM $NODE_PID
wait $NODE_PID
}
if [ ! -z "$SE_OPTS" ]; then
echo "appending selenium options: ${SE_OPTS}"
fi
SERVERNUM=$(get_server_num)
rm -f /tmp/.X*lock
xvfb-run -n $SERVERNUM --server-args="-screen 0 $GEOMETRY -ac +extension RANDR" \
java ${JAVA_OPTS} -jar /opt/selenium/selenium-server-standalone.jar \
${SE_OPTS} &
NODE_PID=$!
trap shutdown SIGTERM SIGINT
sleep 10
for SUITE in "$@"
do
java -jar support/selenese-runner.jar "$SUITE" --config "support/config" --baseurl "$SELENSE_BASE_URL" --driver remote --remote-url "http://127.0.0.1:4444/wd/hub" --remote-browser firefox
done
shutdown
exit
|
Install gcc to compile the docker run wrapper | #!/bin/bash
pushd $(dirname $0) >/dev/null || exit $?
SCRIPTROOT=$(pwd)
popd >/dev/null
# install docker
curl -sSL https://get.docker.io/ubuntu/ | sudo sh
# install nodejs
curl -sL https://deb.nodesource.com/setup | sudo bash -
sudo apt-get install --assume-yes nodejs
# install sails.js
sudo npm install --global sails@0.10.5
# install the web app
sudo rsync -av $SCRIPTROOT/web/ /var/web/
pushd /var/web && sudo npm install && popd
# build the Varnish docker image
sudo docker build --force-rm --tag="varnish4" $SCRIPTROOT/varnish4/
# install the setuid run-varnish-container script
sudo mkdir --parents /opt/vclfiddle/
sudo gcc $SCRIPTROOT/run-varnish-container.c -o /opt/vclfiddle/run-varnish-container
sudo cp $SCRIPTROOT/run-varnish-container.pl /opt/vclfiddle/run-varnish-container.pl
sudo chown root:root /opt/vclfiddle/run-varnish-container*
sudo chmod 04755 /opt/vclfiddle/run-varnish-container
sudo chmod 755 /opt/vclfiddle/run-varnish-container.pl
| #!/bin/bash
pushd $(dirname $0) >/dev/null || exit $?
SCRIPTROOT=$(pwd)
popd >/dev/null
# install docker
curl -sSL https://get.docker.io/ubuntu/ | sudo sh
# install nodejs
curl -sL https://deb.nodesource.com/setup | sudo bash -
sudo apt-get install --assume-yes nodejs
# install sails.js
sudo npm install --global sails@0.10.5
# install the web app
sudo rsync -av $SCRIPTROOT/web/ /var/web/
pushd /var/web && sudo npm install && popd
# build the Varnish docker image
sudo docker build --force-rm --tag="varnish4" $SCRIPTROOT/varnish4/
# install the setuid run-varnish-container script
sudo apt-get install --assume-yes gcc
sudo mkdir --parents /opt/vclfiddle/
sudo gcc $SCRIPTROOT/run-varnish-container.c -o /opt/vclfiddle/run-varnish-container
sudo cp $SCRIPTROOT/run-varnish-container.pl /opt/vclfiddle/run-varnish-container.pl
sudo chown root:root /opt/vclfiddle/run-varnish-container*
sudo chmod 04755 /opt/vclfiddle/run-varnish-container
sudo chmod 755 /opt/vclfiddle/run-varnish-container.pl
|
Discard refreshing poms after committing due to file permissions | #!/bin/sh
PID=$$
RUNNING=`ps -ef | grep synchronize.sh | grep -v 'sh -c' | grep -v grep | grep -v $PID`
if [ ! -z "$RUNNING" ]; then
echo Sync already running... exiting
echo $RUNNING
exit 1
fi
TOOLS_BASE=/home/projects/maven/repository-tools
(
cd $TOOLS_BASE/syncopate
./sync
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
)
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
(
cd $TOOLS_BASE/repoclean
./repoclean.sh synchronize.properties
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
)
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
(
cd $TOOLS_BASE/ibiblio-sync
./synchronize-codehaus-to-ibiblio.sh
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
)
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
| #!/bin/sh
PID=$$
RUNNING=`ps -ef | grep synchronize.sh | grep -v 'sh -c' | grep -v grep | grep -v $PID`
if [ ! -z "$RUNNING" ]; then
echo Sync already running... exiting
echo $RUNNING
exit 1
fi
TOOLS_BASE=/home/projects/maven/repository-tools
(
cd $TOOLS_BASE/syncopate
./sync
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
)
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
(
cd $TOOLS_BASE/repoclean
./repoclean.sh synchronize.properties
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
)
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
# get poms from svn and generate checksums
(
/home/projects/maven/components/maven-meeper/src/bin/update-poms.sh
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
)
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
(
cd $TOOLS_BASE/ibiblio-sync
./synchronize-codehaus-to-ibiblio.sh
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
)
retval=$?; if [ $retval != 0 ]; then exit $retval; fi
|
Update CF url mapping script to target production | #!/bin/bash -x
cf map sagan blog interface21.com &
cf map sagan blog springsource.com &
cf map sagan blog springsource.org &
cf map sagan interface21.com &
cf map sagan spring.io &
cf map sagan springframework.io &
cf map sagan springframework.org &
cf map sagan springsource.com &
cf map sagan springsource.org &
cf map sagan static springframework.org
cf map sagan static springsource.org
cf map sagan www interface21.com &
cf map sagan www spring.io &
cf map sagan www springframework.io &
cf map sagan www springframework.org &
cf map sagan www springsource.com &
cf map sagan www springsource.org &
| #!/bin/bash -x
cf switch-space production || exit
cf map sagan blog interface21.com &
cf map sagan blog springsource.com &
cf map sagan blog springsource.org &
cf map sagan interface21.com &
cf map sagan spring.io &
cf map sagan springframework.io &
cf map sagan springframework.org &
cf map sagan springsource.com &
cf map sagan springsource.org &
cf map sagan static springframework.org
cf map sagan static springsource.org
cf map sagan www interface21.com &
cf map sagan www spring.io &
cf map sagan www springframework.io &
cf map sagan www springframework.org &
cf map sagan www springsource.com &
cf map sagan www springsource.org &
|
Add the eth0 config into interfaces.d | #!/bin/sh -eux
ubuntu_version="`lsb_release -r | awk '{print $2}'`";
major_version="`echo $ubuntu_version | awk -F. '{print $1}'`";
if [ "$major_version" -le "15" ]; then
echo "Disabling automatic udev rules for network interfaces in Ubuntu"
# Disable automatic udev rules for network interfaces in Ubuntu,
# source: http://6.ptmc.org/164/
rm -f /etc/udev/rules.d/70-persistent-net.rules;
mkdir -p /etc/udev/rules.d/70-persistent-net.rules;
rm -f /lib/udev/rules.d/75-persistent-net-generator.rules;
rm -rf /dev/.udev/ /var/lib/dhcp3/* /var/lib/dhcp/*;
fi
# Adding a 2 sec delay to the interface up, to make the dhclient happy
echo "pre-up sleep 2" >>/etc/network/interfaces;
| #!/bin/sh -eux
ubuntu_version="`lsb_release -r | awk '{print $2}'`";
major_version="`echo $ubuntu_version | awk -F. '{print $1}'`";
if [ "$major_version" -le "15" ]; then
echo "Disabling automatic udev rules for network interfaces in Ubuntu"
# Disable automatic udev rules for network interfaces in Ubuntu,
# source: http://6.ptmc.org/164/
rm -f /etc/udev/rules.d/70-persistent-net.rules;
mkdir -p /etc/udev/rules.d/70-persistent-net.rules;
rm -f /lib/udev/rules.d/75-persistent-net-generator.rules;
rm -rf /dev/.udev/ /var/lib/dhcp3/* /var/lib/dhcp/*;
fi
# Set up eth0
echo "auto eth0\niface eth0 inet dhcp" >> /etc/network/interfaces.d/eth0.cfg
# Adding a 2 sec delay to the interface up, to make the dhclient happy
echo "pre-up sleep 2" >>/etc/network/interfaces;
sed -ie 's/GRUB_CMDLINE_LINUX="\(.*\)"/GRUB_CMDLINE_LINUX="net.ifnames=0 biosdevname=0 \1"/g' /etc/default/grub
update-grub
|
Add todo note for later | #!/bin/bash -e
#
# Summary
# gl.sh - summarize a particular user's contributions to a repository.
#
# Usage
# ./gl.sh [user-name]
#
# Description
# Skims the output of an abbreviated git log for contributions by the provided
# user's name, then sums the numbers associated with three metrics:
# 1) File(s) changed
# 2) Line insertions
# 3) Lines deletions
# Which are finally summarised respectively as `files', `green', and `red'.
#
if [ $1 ]; then
git log --shortstat --author "$1" | grep "files\? changed" | awk '{f+=$1; i+=$4; d+=$6} END {print "files:", f, "green:", i, "red", d}'
else
echo "Usage: ./this username"
exit
fi
| #!/bin/bash -e
### TODO if no user specified, just list all users' contributions
#
# Summary
# gl.sh - summarize a particular user's contributions to a repository.
#
# Usage
# ./gl.sh [user-name]
#
# Description
# Skims the output of an abbreviated git log for contributions by the provided
# user's name, then sums the numbers associated with three metrics:
# 1) File(s) changed
# 2) Line insertions
# 3) Lines deletions
# Which are finally summarised respectively as `files', `green', and `red'.
#
if [ $1 ]; then
git log --shortstat --author "$1" | grep "files\? changed" | awk '{f+=$1; i+=$4; d+=$6} END {print "files:", f, "green:", i, "red", d}'
else
echo "Usage: ./this username"
exit
fi
|
Revert host past to more standard /srv | docker rm -f syncthing
docker run -d --restart=always \
-v /nut/syncthing-sync:/srv/data \
-v /nut/syncthing-config:/srv/config \
-p 22000:22000 -p 21027:21027/udp \
--name syncthing \
justifiably/syncthing
| docker rm -f syncthing
docker run -d --restart=always \
-v /srv/syncthing-sync:/srv/data \
-v /srv/syncthing-config:/srv/config \
-p 22000:22000 -p 21027:21027/udp \
--name syncthing \
justifiably/syncthing
|
Move where the add happens | #! /usr/bin/env bash
set -e -u -o pipefail
here="$(dirname "$BASH_SOURCE")"
cd "$here"
npm i
make clean
make
# First let's get the index clean from other files that CI runs create.
git add -A ./ ../go/ ../shared/
# Protocol changes could create diffs in the following directories:
# protocol/
# go/
# shared/
# This build process is idempotent. We expect there to be no changes after
# re-running the protocol generation, because any changes should have been
# checked in.
git diff HEAD -- ./ ../go/ ../shared/ # for testing
git diff --exit-code # for testing
echo $? # for testing
if ! git diff --quiet --exit-code HEAD -- ./ ../go/ ../shared/; then
echo 'ERROR: `git diff` detected changes. The generated protocol files are stale.'
exit 1
fi
echo 'SUCCESS: The generated protocol files are up to date.'
| #! /usr/bin/env bash
set -e -u -o pipefail
here="$(dirname "$BASH_SOURCE")"
cd "$here"
# First let's get the index clean from other files that CI runs
git add -A ./ ../go/ ../shared/
npm i
make clean
make
# Protocol changes could create diffs in the following directories:
# protocol/
# go/
# shared/
# This build process is idempotent. We expect there to be no changes after
# re-running the protocol generation, because any changes should have been
# checked in.
if ! git diff --quiet --exit-code HEAD -- ./ ../go/ ../shared/; then
echo 'ERROR: `git diff` detected changes. The generated protocol files are stale.'
exit 1
fi
echo 'SUCCESS: The generated protocol files are up to date.'
|
Set lenskit.web.url for CI site builds | #!/bin/sh
DEPLOY_JDK=oraclejdk7
. etc/ci/ci-helpers.sh
skip_unless_master_build site
case "$TRAVIS_BRANCH" in
master|release/*) DO_RUN=yes;;
*) skip "site disabled for branch $TRAVIS_BRANCH";;
esac
TARGET=$(echo "$TRAVIS_BRANCH" | sed -e 's@/@-@g')
echo "Building Maven site for $TARGET"
cmd mvn --batch-mode site site:stage
cmd -d target/staging zip -qr ../lenskit-site.zip *
if [ -n "$SITE_UPLOAD_URL" ]; then
cmd python etc/ci/upload-site.py target/lenskit-site.zip \
"$TRAVIS_BRANCH" "$SITE_UPLOAD_URL"
else
echo "No upload URL, skipping upload"
fi | #!/bin/sh
DEPLOY_JDK=oraclejdk7
. etc/ci/ci-helpers.sh
skip_unless_master_build site
case "$TRAVIS_BRANCH" in
master|release/*) DO_RUN=yes;;
*) skip "site disabled for branch $TRAVIS_BRANCH";;
esac
TARGET=$(echo "$TRAVIS_BRANCH" | sed -e 's@/@-@g')
echo "Building Maven site for $TARGET"
cmd mvn --batch-mode site site:stage -Dlenskit.web.url=http://dev.grouplens.org/lenskit/$TARGET
cmd -d target/staging zip -qr ../lenskit-site.zip *
if [ -n "$SITE_UPLOAD_URL" ]; then
cmd python etc/ci/upload-site.py target/lenskit-site.zip \
"$TRAVIS_BRANCH" "$SITE_UPLOAD_URL"
else
echo "No upload URL, skipping upload"
fi |
Add tree to installs list for brew on OS X. | #!/bin/zsh
brew tap homebrew/versions
brew tap staticfloat/julia
formulas=(zsh-syntax-highlighting leiningen elixir ghc cabal-install python3 \
clojurescript rust julia go tmux ssh-copy-id rcm)
for formula in "${formulas[@]}"
do
brew install "$formula"
done
| #!/bin/zsh
brew tap homebrew/versions
brew tap staticfloat/julia
formulas=(zsh-syntax-highlighting leiningen elixir ghc cabal-install python3 \
clojurescript rust julia go tmux ssh-copy-id rcm tree)
for formula in "${formulas[@]}"
do
brew install "$formula"
done
|
Fix typo in prerequisites script | #!/bin/bash
# Determine package manager
YUM_CMD=$(which yum)
APT_GET_CMD=$(which apt-get)
if [[ "$YUM_CMD" != "" ]]; then
sudo yum install -y libvirt libvirt-devel libvirt-daemon-kvm syslinux-tftpboot device-mapper-libs qemu-kvm net-tools;
sudo yum upgrade -y "device-mapper-libs";
rpm --import https://www.rabbitmq.com/rabbitmq-signing-key-public.asc;
sudo yum install -y --nogpg rabbitmq-server;
elif [[ "$APT_GET_CMD" != "" ]]; then
sudo apt-get -y install libvirt-bin libvirt-dev syslinux pxelinux rabbitmq-server qemu-kvm net-tools;
sudo systemctl stop rabbimq-server
sudo systemctl disable rabbimq-server
else
echo "Error: Package manager was not found. Cannot continue with the installation.";
exit 1;
fi
which solvent > /dev/null || (echo "Error: solvent was not found. Please install it first." && exit 1)
./sh/install_inaugurator.sh
| #!/bin/bash
# Determine package manager
YUM_CMD=$(which yum)
APT_GET_CMD=$(which apt-get)
if [[ "$YUM_CMD" != "" ]]; then
sudo yum install -y libvirt libvirt-devel libvirt-daemon-kvm syslinux-tftpboot device-mapper-libs qemu-kvm net-tools;
sudo yum upgrade -y "device-mapper-libs";
rpm --import https://www.rabbitmq.com/rabbitmq-signing-key-public.asc;
sudo yum install -y --nogpg rabbitmq-server;
elif [[ "$APT_GET_CMD" != "" ]]; then
sudo apt-get -y install libvirt-bin libvirt-dev syslinux pxelinux rabbitmq-server qemu-kvm net-tools;
sudo systemctl stop rabbitmq-server
sudo systemctl disable rabbitmq-server
else
echo "Error: Package manager was not found. Cannot continue with the installation.";
exit 1;
fi
which solvent > /dev/null || (echo "Error: solvent was not found. Please install it first." && exit 1)
./sh/install_inaugurator.sh
|
Fix incorrect singing key filename | #!/usr/bin/env bash
if [ "$TRAVIS_BRANCH" = 'master' ] && [ "$TRAVIS_PULL_REQUEST" == 'false' ]; then
openssl aes-256-cbc -K $encrypted_46b4bbc90abb_key -iv $encrypted_46b4bbc90abb_iv -in cd/codesigning.asc.enc -out cd/codesigning.asc -d
gpg --fast-import cd/signingkey.asc
fi | #!/usr/bin/env bash
if [ "$TRAVIS_BRANCH" = 'master' ] && [ "$TRAVIS_PULL_REQUEST" == 'false' ]; then
openssl aes-256-cbc -K $encrypted_46b4bbc90abb_key -iv $encrypted_46b4bbc90abb_iv -in cd/codesigning.asc.enc -out cd/codesigning.asc -d
gpg --fast-import cd/codesigning.asc
fi |
Include more build log lines in email notification | #!/bin/bash
# Generate a build ID based on the current time and the PID of this script
build="$(date '+%s')-$$"
# Do the build and capture its output in a .log file
make do_release 2>&1 | tee -a ${build}.log
# Check the exit status of `make`, not `tee`
if [[ ${PIPESTATUS[0]} -eq 0 ]]; then
subject="omnibus-gitlab build ${build} SUCCESS"
else
subject="omnibus-gitlab build ${build} FAIL"
fi
# We assume that email to the current system user will somehow reach the right
# human eyes
tail ${build}.log | sed 's/.*\r//' | mail -s "${subject}" $(cat ~/.forward)
| #!/bin/bash
# Generate a build ID based on the current time and the PID of this script
build="$(date '+%s')-$$"
# Do the build and capture its output in a .log file
make do_release 2>&1 | tee -a ${build}.log
# Check the exit status of `make`, not `tee`
if [[ ${PIPESTATUS[0]} -eq 0 ]]; then
subject="omnibus-gitlab build ${build} SUCCESS"
else
subject="omnibus-gitlab build ${build} FAIL"
fi
# We assume that email to the current system user will somehow reach the right
# human eyes
tail -n 20 ${build}.log | sed 's/.*\r//' | mail -s "${subject}" $(cat ~/.forward)
|
Add the rest of thoughtbot awesome key bindings | # makes color constants available
autoload -U colors
colors
# enable colored output from ls, etc
export LSCOLORS="Gxfxcxdxbxegedabagacad"
# History settings
setopt hist_ignore_all_dups
setopt inc_append_history
setopt hist_reduce_blanks
setopt extended_history
setopt share_history
if [ -z $HISTFILE ]; then
HISTFILE=$HOME/.zsh_history
fi
HISTSIZE=10000
SAVEHIST=10000
# Awesome cd movements
setopt autocd autopushd pushdminus pushdsilent pushdtohome cdablevars
DIRSTACKSIZE=5
setopt no_bg_nice # don't nice background tasks
setopt no_hup
setopt no_list_beep
setopt local_options # allow functions to have local options
setopt local_traps # allow functions to have local traps
setopt complete_in_word
unsetopt menu_complete # do not autoselect the first completion entry
unsetopt flowcontrol
setopt auto_menu # show completion menu on succesive tab press
setopt always_to_end
# Enable extended globing
setopt extendedglob
# vi mode
bindkey -v
bindkey "^F" vi-cmd-mode
bindkey jj vi-cmd-mode
# handy keybindings
bindkey "^A" beginning-of-line
bindkey "^E" end-of-line
bindkey "^R" history-incremental-search-backward
bindkey "^P" history-search-backward
| # makes color constants available
autoload -U colors
colors
# enable colored output from ls, etc
export LSCOLORS="Gxfxcxdxbxegedabagacad"
# History settings
setopt hist_ignore_all_dups
setopt inc_append_history
setopt hist_reduce_blanks
setopt extended_history
setopt share_history
if [ -z $HISTFILE ]; then
HISTFILE=$HOME/.zsh_history
fi
HISTSIZE=10000
SAVEHIST=10000
# Awesome cd movements
setopt autocd autopushd pushdminus pushdsilent pushdtohome cdablevars
DIRSTACKSIZE=5
setopt no_bg_nice # don't nice background tasks
setopt no_hup
setopt no_list_beep
setopt local_options # allow functions to have local options
setopt local_traps # allow functions to have local traps
setopt complete_in_word
unsetopt menu_complete # do not autoselect the first completion entry
unsetopt flowcontrol
setopt auto_menu # show completion menu on succesive tab press
setopt always_to_end
# Enable extended globing
setopt extendedglob
# vi mode
bindkey -v
bindkey "^F" vi-cmd-mode
bindkey jj vi-cmd-mode
# handy keybindings
bindkey "^A" beginning-of-line
bindkey "^E" end-of-line
bindkey "^R" history-incremental-search-backward
bindkey "^P" history-search-backward
bindkey "^Y" accept-and-hold
bindkey "^N" insert-last-word
bindkey -s "^T" "^[Isudo ^[A" # "t" for "toughguy"
|
Use listen on 0.0.0.0 not * | #!/bin/bash
bundle install
bundle exec shotgun -o '*' -p 3009
| #!/bin/bash
bundle install
bundle exec shotgun -o '0.0.0.0' -p 3009
|
Fix download script for expat | #!/bin/sh
if [ ! -e talk/third_party/expat-2.0.1 ]; then
if [ ! -e expat-2.0.1.tar.gz ]; then
echo "Downloading expact..."
wget http://sourceforge.net/projects/expat/files/expat/2.0.1/expat-2.0.1.tar.gz/download
fi
echo -n "Extracting expact 2.0.1..."
tar zxpf expat-2.0.1.tar.gz -C talk/third_party
echo "done"
fi
if [ ! -e talk/third_party/srtp ]; then
echo -n "Getting latest srtp..."
cd talk/third_party
cvs -d:pserver:anonymous@srtp.cvs.sourceforge.net:/cvsroot/srtp co -P srtp
echo "done"
cd ../../
fi
| #!/bin/sh
if [ ! -e talk/third_party/expat-2.0.1 ]; then
if [ ! -e expat-2.0.1.tar.gz ]; then
echo "Downloading expact..."
wget http://sourceforge.net/projects/expat/files/expat/2.0.1/expat-2.0.1.tar.gz/download -O expat-2.0.1.tar.gz
fi
echo -n "Extracting expact 2.0.1..."
tar zxpf expat-2.0.1.tar.gz -C talk/third_party
echo "done"
fi
if [ ! -e talk/third_party/srtp ]; then
echo -n "Getting latest srtp..."
cd talk/third_party
cvs -d:pserver:anonymous@srtp.cvs.sourceforge.net:/cvsroot/srtp co -P srtp
echo "done"
cd ../../
fi
|
Provision virtual environments for class work inside Vagrant VM. | # Add PYENV and base 3.4.3 Python
su - vagrant -c "curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash"
su - vagrant -c "pyenv install 3.4.3"
| #!/usr/bin/env bash
# Add PYENV and base Python versions
su - vagrant -c "curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash"
su - vagrant -c "pyenv install 3.4.3"
su - vagrant -c "pyenv install 2.7.9"
# Create VirtualEnvs for Training Levels
su - vagrant -c "pyenv virtualenv 3.4.3 level-0"
su - vagrant -c "pyenv virtualenv 3.4.3 level-1"
su - vagrant -c "pyenv virtualenv 3.4.3 level-2"
su - vagrant -c "pyenv virtualenv 3.4.3 level-3"
su - vagrant -c "pyenv virtualenv 3.4.3 level-4"
|
Check status before tailing log. | #!/bin/bash
tar -zxf $PACKDIR/$PACKFILE -C $WORKDIR
cp $CONFIGDIR/$CONFIGFILE $WORKDIR
$WORKDIR/control restart
$WORKDIR/control tail
| #!/bin/bash
WAIT_SERVICE_READY=10
function check_service(){
status=$($WORKDIR/control status)
echo $status | grep -q "stoped"
if [ $? -eq 0 ] ; then
return 1
else
return 0
fi
}
tar -zxf $PACKDIR/$PACKFILE -C $WORKDIR
cp $CONFIGDIR/$CONFIGFILE $WORKDIR
$WORKDIR/control restart
sleep $WAIT_SERVICE_READY
check_service
if [ $? -eq 0 ] ; then
$WORKDIR/control tail
else
echo "Failed to start."
exit 1
fi
|
Check status before tailing log. | #!/bin/bash
tar -zxf $PACKDIR/$PACKFILE -C $WORKDIR
cp $CONFIGDIR/$CONFIGFILE $WORKDIR/rrd/$CONFIGFILE
virtualenv $WORKDIR/env
pip install -r /home/dashboard/pip_requirements.txt
$WORKDIR/control restart
$WORKDIR/control tail
| #!/bin/bash
WAIT_SERVICE_READY=10
function check_service(){
status=$($WORKDIR/control status)
echo $status | grep -q "stoped"
if [ $? -eq 0 ] ; then
return 1
else
return 0
fi
}
tar -zxf $PACKDIR/$PACKFILE -C $WORKDIR
cp $CONFIGDIR/$CONFIGFILE $WORKDIR/rrd/$CONFIGFILE
virtualenv $WORKDIR/env
pip install -r /home/dashboard/pip_requirements.txt
$WORKDIR/control restart
sleep $WAIT_SERVICE_READY
check_service
if [ $? -eq 0 ] ; then
$WORKDIR/control tail
else
echo "Failed to start."
exit 1
fi
|
Remove some cruft from the system blob test script. | #!/bin/sh
builddir=$1
port=4004
exp=blobgen_exp
env | grep dir
pwd
[ -f ${exp}.sq3 ] && rm -f ${exp}.sq3
[ -f blobgen-server.log ] && rm -f blobgen-server.log
${top_builddir}/server/oml2-server -l $port -d4 --logfile=blobgen-server.log &
server_pid=$!
echo SERVER=${server_pid}
sleep 1
#blobgen=$builddir/test/system/blob/blobgen
blobgen=./blobgen
if [ ! -x ${blobgen} ]; then
echo "Could not find test blob generator \'${blobgen}\'"
exit 1
fi
$blobgen -n 100 --oml-id a --oml-exp-id ${exp} --oml-server localhost:$port || exit 1
echo "Blob generating client finished OK"
sleep 1
kill $server_pid
echo "Analyzing blobs"
# Convert blobs to hex
for i in g*.bin; do
printf "\rConverting binary blobs: $i "
${srcdir}/bin2hex.rb $i
done
printf "\n...done\n"
# Grab blobs from sqlite3
${srcdir}/fromsq3.sh ${exp}
# Calculate the diffs, produce result
${srcdir}/diff.sh
| #!/bin/sh
port=4004
exp=blobgen_exp
[ -f ${exp}.sq3 ] && rm -f ${exp}.sq3
[ -f blobgen-server.log ] && rm -f blobgen-server.log
${top_builddir}/server/oml2-server -l $port -d4 --logfile=blobgen-server.log &
server_pid=$!
echo SERVER=${server_pid}
sleep 1
blobgen=./blobgen
if [ ! -x ${blobgen} ]; then
echo "Could not find test blob generator \'${blobgen}\'"
exit 1
fi
$blobgen -n 100 --oml-id a --oml-exp-id ${exp} --oml-server localhost:$port || exit 1
echo "Blob generating client finished OK"
sleep 1
kill $server_pid
echo "Analyzing blobs"
# Convert blobs to hex
for i in g*.bin; do
printf "\rConverting binary blobs: $i "
${srcdir}/bin2hex.rb $i
done
printf "\n...done\n"
# Grab blobs from sqlite3
${srcdir}/fromsq3.sh ${exp}
# Calculate the diffs, produce result
${srcdir}/diff.sh
|
Remove md5 files (not using them at the moment...) | #!/bin/bash
mkdir -p gz
cd gz
wget -r --no-parent --no-directories ftp://ftp.nlm.nih.gov/nlmdata/.medleasebaseline/gz/ > ../out.log 2> ../err.log
gunzip *.gz >> ../out.log 2>> ../err.log
| #!/bin/bash
mkdir -p gz
cd gz
wget -r --no-parent --no-directories ftp://ftp.nlm.nih.gov/nlmdata/.medleasebaseline/gz/ > ../out.log 2> ../err.log
gunzip *.gz >> ../out.log 2>> ../err.log
rm *.md5
|
Remove unused parameter from fetchGrammarKit.sh invocation. | #!/bin/bash
if [[ $# -eq 0 ]] ; then
echo 'This script must be called with the version of IDEA to build'
echo 'example: ./build.sh 13.1.6'
exit 1
fi
./fetchIdea.sh "$1"
./fetchGrammarKit.sh "$1"
#call the build script along with the path to a code package
#specific to the intellij version which we build against
ant -f build.xml -Dversion="$1"
rm -rf idea-IU
| #!/bin/bash
if [[ $# -eq 0 ]] ; then
echo 'This script must be called with the version of IDEA to build'
echo 'example: ./build.sh 13.1.6'
exit 1
fi
./fetchIdea.sh "$1"
./fetchGrammarKit.sh
#call the build script along with the path to a code package
#specific to the intellij version which we build against
ant -f build.xml -Dversion="$1"
rm -rf idea-IU
|
Fix keyserver public key value | #! /bin/bash
set -o errexit
set -o nounset
install_ponyc(){
echo -e "\033[0;32mInstalling latest ponyc release\033[0m"
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys "8756 C4F7 65C9 AC3C B6B8 5D62 379C E192 D401 AB61"
echo "deb https://dl.bintray.com/pony-language/ponyc-debian pony-language main" | sudo tee -a /etc/apt/sources.list
sudo apt-get update
sudo apt-get -V install ponyc
}
install_ponyc
| #! /bin/bash
set -o errexit
set -o nounset
install_ponyc(){
echo -e "\033[0;32mInstalling latest ponyc release\033[0m"
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys "D401AB61 DBE1D0A2"
echo "deb https://dl.bintray.com/pony-language/ponyc-debian pony-language main" | sudo tee -a /etc/apt/sources.list
sudo apt-get update
sudo apt-get -V install ponyc
}
install_ponyc
|
Install cargo fmt if missing | #!/bin/bash
set -eux -o pipefail
cargo fmt -- --check
cargo build
cargo test | #!/bin/bash
set -eux -o pipefail
cargo fmt --version || rustup component add rustfmt
cargo fmt -- --check
cargo build
cargo test |
Install .NET Core 2.2 runtime on Travis | #!/bin/bash
set -e
dotnet --info
# Check all the tools still build
dotnet build build/Tools.sln
# And now the actual web site
dotnet build -c Release src/NodaTime-Web.sln
dotnet test -c Release src/NodaTime.Web.Test
| #!/bin/bash
set -e
# Install the .NET Core 2.2 runtime, as we use ASP.NET Core 2.2 at the moment.
# In the future we'll update to ASP.NET Core 3.0, we can use just the .NET Core 3.0 SDK.
wget -q https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
sudo dpkg -i packages-microsoft-prod.deb
sudo apt-get update
sudo apt-get install apt-transport-https
sudo apt-get update
sudo apt-get install aspnetcore-runtime-2.2=2.2.0-1
dotnet --info
# Check all the tools still build
dotnet build build/Tools.sln
# And now the actual web site
dotnet build -c Release src/NodaTime-Web.sln
dotnet test -c Release src/NodaTime.Web.Test
|
Install middlewared.client with --single-version-externally-managed so it can be imported correctly | #!/usr/bin/env sh
# Author: Joe Maloney
# License: BSD
# Location for tests into REST API of FreeNAS 9.10
# Where is the ixbuild program installed
PROGDIR="`realpath | sed 's|/scripts||g'`" ; export PROGDIR
# Source our Testing functions
. ${PROGDIR}/scripts/functions.sh
. ${PROGDIR}/scripts/functions-tests.sh
# Installl modules
pip3.6 install requests
#################################################################
# Run the tests now!
#################################################################
echo "Using API Address: http://${FNASTESTIP}/api/v2.0"
git clone https://www.github.com/freenas/freenas --depth=1 /freenas
cd /freenas/src/middlewared
pip3.6 uninstall -y middlewared.client
python3.6 setup_client.py install
cd /freenas/src/middlewared/middlewared/pytest
echo [Target] > target.conf
echo hostname = ${FNASTESTIP} >> target.conf
echo api = /api/v2.0/ >> target.conf
echo username = "root" >> target.conf
echo password = "testing" >> target.conf
sed -i '' "s|'freenas'|'testing'|g" functional/test_0001_authentication.py
python3.6 -m pytest -sv functional --junitxml=$RESULTSDIR/results.xml.v2.0
TOTALTESTS="14"
publish_pytest_results "$TOTALCOUNT"
exit 0
| #!/usr/bin/env sh
# Author: Joe Maloney
# License: BSD
# Location for tests into REST API of FreeNAS 9.10
# Where is the ixbuild program installed
PROGDIR="`realpath | sed 's|/scripts||g'`" ; export PROGDIR
# Source our Testing functions
. ${PROGDIR}/scripts/functions.sh
. ${PROGDIR}/scripts/functions-tests.sh
# Installl modules
pip3.6 install requests
#################################################################
# Run the tests now!
#################################################################
echo "Using API Address: http://${FNASTESTIP}/api/v2.0"
git clone https://www.github.com/freenas/freenas --depth=1 /freenas
cd /freenas/src/middlewared
pip3.6 uninstall -y middlewared.client
python3.6 setup_client.py install --single-version-externally-managed --record $(mktemp)
cd /freenas/src/middlewared/middlewared/pytest
echo [Target] > target.conf
echo hostname = ${FNASTESTIP} >> target.conf
echo api = /api/v2.0/ >> target.conf
echo username = "root" >> target.conf
echo password = "testing" >> target.conf
sed -i '' "s|'freenas'|'testing'|g" functional/test_0001_authentication.py
python3.6 -m pytest -sv functional --junitxml=$RESULTSDIR/results.xml.v2.0
TOTALTESTS="14"
publish_pytest_results "$TOTALCOUNT"
exit 0
|
Fix test reference to obsolete ostree-pull | #!/bin/bash
#
# Copyright (C) 2011 Colin Walters <walters@verbum.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
cd ${test_tmpdir}
mkdir repo
${CMD_PREFIX} ostree --repo=repo init
${CMD_PREFIX} ostree --repo=repo remote add origin $(cat httpd-address)/ostree/gnomerepo
${CMD_PREFIX} ostree-pull --repo=repo origin main
${CMD_PREFIX} ostree --repo=repo fsck
echo "ok pull"
cd ${test_tmpdir}
$OSTREE checkout origin/main checkout-origin-main
cd checkout-origin-main
assert_file_has_content firstfile '^first$'
assert_file_has_content baz/cow '^moo$'
echo "ok pull contents"
| #!/bin/bash
#
# Copyright (C) 2011 Colin Walters <walters@verbum.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
cd ${test_tmpdir}
mkdir repo
${CMD_PREFIX} ostree --repo=repo init
${CMD_PREFIX} ostree --repo=repo remote add origin $(cat httpd-address)/ostree/gnomerepo
${CMD_PREFIX} ostree --repo=repo pull origin main
${CMD_PREFIX} ostree --repo=repo fsck
echo "ok pull"
cd ${test_tmpdir}
$OSTREE checkout origin/main checkout-origin-main
cd checkout-origin-main
assert_file_has_content firstfile '^first$'
assert_file_has_content baz/cow '^moo$'
echo "ok pull contents"
|
Build kontrol and fujin too | #! /bin/bash
export GOPATH=$(cd "$(dirname "$0")"; pwd)
export GIT_DIR=$GOPATH/../.git
ldflags="-X koding/tools/lifecycle.version $(git rev-parse HEAD)"
services=(
koding/broker
koding/kites/os
koding/kites/irc
koding/virt/idshift
koding/virt/ldapserver
koding/virt/proxy
koding/virt/vmtool
koding/alice
)
go install -v -ldflags "$ldflags" "${services[@]}"
cp $GOPATH/bin/* $GOPATH/../kites
| #! /bin/bash
export GOPATH=$(cd "$(dirname "$0")"; pwd)
export GIT_DIR=$GOPATH/../.git
ldflags="-X koding/tools/lifecycle.version $(git rev-parse HEAD)"
services=(
koding/broker
koding/kites/os
koding/kites/irc
koding/virt/idshift
koding/virt/ldapserver
koding/virt/proxy
koding/virt/vmtool
koding/alice
koding/kontrol/daemon
koding/kontrol/api
koding/fujin
)
go install -v -ldflags "$ldflags" "${services[@]}"
cp $GOPATH/bin/* $GOPATH/../kites
|
Revert "Move .nvmrc before nvm init in CI" | #!/bin/bash
set -e
# setup nvm
mv ../../../.nvmrc nvmrc # Remove for init otherwise nvm init could fail
if [[ "$OSTYPE" == "darwin"* ]]; then
export NVM_DIR=~/.nvm
source $(brew --prefix nvm)/nvm.sh
else
source $NVM_DIR/nvm.sh
fi
mv nvmrc ../../../.nvmrc
# install node
NODE_VERSION=8.9.1
nvm install $NODE_VERSION
nvm use $NODE_VERSION
# install yarn
npm i -g yarn | #!/bin/bash
set -e
# setup nvm
if [[ "$OSTYPE" == "darwin"* ]]; then
export NVM_DIR=~/.nvm
source $(brew --prefix nvm)/nvm.sh
else
source $NVM_DIR/nvm.sh
fi
# install node
NODE_VERSION=8.9.1
nvm install $NODE_VERSION
nvm use $NODE_VERSION
# install yarn
npm i -g yarn |
Use deployed revision for acceptance tests | #!/bin/bash
#
# Runs acceptance tests.
#
# Needs DOCKER_REPOSITORY, BASE_URL and optionally NUM_PROCS
#
# To set them go to Job -> Configure -> Build Environment -> Inject
# passwords and Inject env variables
#
set -ex
ENV_FILE=`mktemp`
echo "DATABASE_URL=sqlite://" >> $ENV_FILE
echo "DEBUG=False" >> $ENV_FILE
echo "ALLOWED_HOSTS=localhost" >> $ENV_FILE
echo "SECRET_KEY=test" >> $ENV_FILE
# Run Tests
docker run --env-file=$ENV_FILE $DOCKER_REPOSITORY:$GIT_COMMIT py.test --acceptance --baseurl=$BASE_URL -n ${NUM_PROCS:-7}
# Delete temp file.
rm -rf $ENV_FILE
| #!/bin/bash
#
# Runs acceptance tests.
#
# Needs DOCKER_REPOSITORY, BASE_URL and optionally NUM_PROCS
#
# To set them go to Job -> Configure -> Build Environment -> Inject
# passwords and Inject env variables
#
set -ex
REVISION=$(curl $BASE_URL/static/revision.txt)
echo REVISION=$REVISION
ENV_FILE=`mktemp`
echo "DATABASE_URL=sqlite://" >> $ENV_FILE
echo "DEBUG=False" >> $ENV_FILE
echo "ALLOWED_HOSTS=localhost" >> $ENV_FILE
echo "SECRET_KEY=test" >> $ENV_FILE
# Run Tests
docker run --env-file=$ENV_FILE $DOCKER_REPOSITORY:$REVISION py.test --acceptance --baseurl=$BASE_URL -n ${NUM_PROCS:-7}
# Delete temp file.
rm -rf $ENV_FILE
|
Make the swift install script cd back to where it starts | #!/bin/bash
SWIFT_URL=https://swift.org/builds/swift-3.0-preview-1/ubuntu1404/swift-3.0-preview-1/swift-3.0-preview-1-ubuntu14.04.tar.gz
# Install Swift
cd ${HOME}
wget $SWIFT_URL -O - | tar xz
# Make sure Swift is not already installed
if [ -d .swift ]; then
rm -rf .swift
fi
# Move to .swift, set PATH
mv $(basename "$SWIFT_URL" ".tar.gz") .swift
export PATH="${HOME}/.swift/usr/bin:${PATH}"
| #!/bin/bash
SWIFT_URL=https://swift.org/builds/swift-3.0-preview-1/ubuntu1404/swift-3.0-preview-1/swift-3.0-preview-1-ubuntu14.04.tar.gz
BASE_DIR=$(pwd)
# Install Swift
cd ${HOME}
wget $SWIFT_URL -O - | tar xz
# Make sure Swift is not already installed
if [ -d .swift ]; then
rm -rf .swift
fi
# Move to .swift, set PATH
mv $(basename "$SWIFT_URL" ".tar.gz") .swift
export PATH="${HOME}/.swift/usr/bin:${PATH}"
# Move back to where we started
cd $BASE_DIR
|
Initialize virtualenv when running Deve::NYTProf profiler | #!/bin/bash
working_dir=`dirname $0`
source $working_dir/set_perl_brew_environment.sh
set -u
set -o errexit
# Make sure Inline::Python uses correct virtualenv
set +u; cd "$working_dir/../"; source mc-venv/bin/activate; set -u
#echo carton "$@"
exec $working_dir/mediawords_carton_wrapper.pl "$@" | #!/bin/bash
working_dir=`dirname $0`
source $working_dir/set_perl_brew_environment.sh
set -u
set -o errexit
# Make sure Inline::Python uses correct virtualenv
set +u; cd "$working_dir/../"; source mc-venv/bin/activate; set -u
# Also set PYTHONHOME for Python to search for modules at correct location
if [ `uname` == 'Darwin' ]; then
# greadlink from coreutils
PYTHONHOME=`greadlink -m mc-venv/`
else
PYTHONHOME=`readlink -m mc-venv/`
fi
#echo carton "$@"
PYTHONHOME=$PYTHONHOME exec $working_dir/mediawords_carton_wrapper.pl "$@"
|
Add lightpaper to Mac software installation list. | set -e
ruby -e "$(curl -fsSL https://raw.github.com/mxcl/homebrew/go)"
# Install GNU core utilities (those that come with OS X are outdated)
# Don’t forget to add `$(brew --prefix coreutils)/libexec/gnubin` to `$PATH`.
brew install coreutils
# Install GNU `find`, `locate`, `updatedb`, and `xargs`, g-prefixed
brew install findutils
# Install latest version of Bash.
brew install bash
brew install wget
brew install imagemagick
# .gitignore boilerplate code (example: "gibo python textmate").
brew install gibo
brew install ctags
brew install hexedit
# Allows generation from notification from command line.
brew install terminal-notifier
# brew-cask converts brew into a package manager for mac packages.
brew install brew-cask
brew cask install google-chrome
brew cask install xquartz # For running X server based apps on Mac.
brew cask install wireshark
brew cask install sophos-antivirus # Free AV protection for Mac.
| set -e
ruby -e "$(curl -fsSL https://raw.github.com/mxcl/homebrew/go)"
# Install GNU core utilities (those that come with OS X are outdated)
# Don’t forget to add `$(brew --prefix coreutils)/libexec/gnubin` to `$PATH`.
brew install coreutils
# Install GNU `find`, `locate`, `updatedb`, and `xargs`, g-prefixed
brew install findutils
# Install latest version of Bash.
brew install bash
brew install wget
brew install imagemagick
# .gitignore boilerplate code (example: "gibo python textmate").
brew install gibo
brew install ctags
brew install hexedit
# Allows generation from notification from command line.
brew install terminal-notifier
# brew-cask converts brew into a package manager for mac packages.
brew install brew-cask
brew cask install google-chrome
brew cask install xquartz # For running X server based apps on Mac.
brew cask install wireshark
brew cask install sophos-antivirus # Free AV protection for Mac.
brew cask install lightpaper # Mark-down editor.
|
Reset bosh releases before mounting into the bosh-release-test container | #!/bin/bash -eux
docker run \
-t \
-i \
--privileged \
-v /Users/pivotal/workspace/nfs-volume-release/:/nfs-volume-release \
-v /Users/pivotal/workspace/mapfs-release:/mapfs-release \
--workdir=/ \
bosh/main-bosh-docker \
/nfs-volume-release/scripts/run-bosh-release-tests-in-docker-env.sh
| #!/bin/bash -eux
pushd ~/workspace/nfs-volume-release
bosh reset-release
popd
pushd ~/workspace/mapfs-release
bosh reset-release
popd
docker run \
-t \
-i \
--privileged \
-v /Users/pivotal/workspace/nfs-volume-release/:/nfs-volume-release \
-v /Users/pivotal/workspace/mapfs-release:/mapfs-release \
--workdir=/ \
bosh/main-bosh-docker \
/nfs-volume-release/scripts/run-bosh-release-tests-in-docker-env.sh
|
Print folding log statements for unit tests only if we run make check | #!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Copyright (c) 2019 The BitcoinUnlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
if [ $DIST != "RPM" ]; then
export LC_ALL=C.UTF-8
fi
cd "build" || (echo "could not enter distdir build"; exit 1)
BEGIN_FOLD unit-tests
if [ "$RUN_TESTS" = "true" ] && ! { [ "$HOST" = "i686-w64-mingw32" ] || [ "$HOST" = "x86_64-w64-mingw32" ]; }; then
travis_wait 50 DOCKER_EXEC LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib make $MAKEJOBS check VERBOSE=1;
fi
END_FOLD
BEGIN_FOLD functional-tests
if [ "$RUN_TESTS" = "true" ]; then DOCKER_EXEC qa/pull-tester/rpc-tests.py --coverage --no-ipv6-rpc-listen; fi
END_FOLD
cd ${TRAVIS_BUILD_DIR} || (echo "could not enter travis build dir $TRAVIS_BUILD_DIR"; exit 1)
| #!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Copyright (c) 2019 The BitcoinUnlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
if [ $DIST != "RPM" ]; then
export LC_ALL=C.UTF-8
fi
cd "build" || (echo "could not enter distdir build"; exit 1)
if [ "$RUN_TESTS" = "true" ] && ! { [ "$HOST" = "i686-w64-mingw32" ] || [ "$HOST" = "x86_64-w64-mingw32" ]; }; then
BEGIN_FOLD unit-tests
travis_wait 50 DOCKER_EXEC LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib make $MAKEJOBS check VERBOSE=1;
END_FOLD
fi
if [ "$RUN_TESTS" = "true" ]; then
BEGIN_FOLD functional-tests
DOCKER_EXEC qa/pull-tester/rpc-tests.py --coverage --no-ipv6-rpc-listen;
END_FOLD
fi
cd ${TRAVIS_BUILD_DIR} || (echo "could not enter travis build dir $TRAVIS_BUILD_DIR"; exit 1)
|
Update postgresql to 9.4.1209 JDBC 42 | #!/bin/bash
set -e
mkdir -p $TEAMCITY_DATA_PATH/lib/jdbc $TEAMCITY_DATA_PATH/config
if [ ! -f "$TEAMCITY_DATA_PATH/lib/jdbc/postgresql-9.3-1103.jdbc41.jar" ];
then
echo "Downloading postgress JDBC driver..."
wget -P $TEAMCITY_DATA_PATH/lib/jdbc https://jdbc.postgresql.org/download/postgresql-9.3-1103.jdbc41.jar
fi
echo "Starting teamcity..."
exec /opt/TeamCity/bin/teamcity-server.sh run
| #!/bin/bash
set -e
mkdir -p $TEAMCITY_DATA_PATH/lib/jdbc $TEAMCITY_DATA_PATH/config
if [ ! -f "$TEAMCITY_DATA_PATH/lib/jdbc/postgresql-9.4.1209.jar" ];
then
echo "Downloading postgress JDBC driver..."
wget -P $TEAMCITY_DATA_PATH/lib/jdbc https://jdbc.postgresql.org/download/postgresql-9.4.1209.jar
fi
echo "Starting teamcity..."
exec /opt/TeamCity/bin/teamcity-server.sh run
|
Switch from deprecated scheduler.rb to background-worker.rb. | #!/bin/bash
set -e
if [ "$1" = 'zammad' ]; then
echo -e "\n Starting services... \n"
# starting services
service postgresql start
service elasticsearch start
service postfix start
service memcached start
service redis-server start
service nginx start
# wait for postgres processe coming up
until su - postgres -c 'psql -c "select version()"' &> /dev/null; do
echo "Waiting for PostgreSQL to be ready..."
sleep 2
done
cd "${ZAMMAD_DIR}"
echo -e "\n Starting Zammad... \n"
su -c "bundle exec script/websocket-server.rb -b 0.0.0.0 start &" zammad
su -c "bundle exec script/scheduler.rb start &" zammad
# show url
echo -e "\nZammad will be ready in some seconds! Visit http://localhost in your browser!"
# start railsserver
if [ "${RAILS_SERVER}" == "puma" ]; then
su -c "bundle exec puma -b tcp://0.0.0.0:3000 -e ${RAILS_ENV}" zammad
elif [ "${RAILS_SERVER}" == "unicorn" ]; then
su -c "bundle exec unicorn -p 3000 -c config/unicorn.rb -E ${RAILS_ENV}" zammad
fi
fi
| #!/bin/bash
set -e
if [ "$1" = 'zammad' ]; then
echo -e "\n Starting services... \n"
# starting services
service postgresql start
service elasticsearch start
service postfix start
service memcached start
service redis-server start
service nginx start
# wait for postgres processe coming up
until su - postgres -c 'psql -c "select version()"' &> /dev/null; do
echo "Waiting for PostgreSQL to be ready..."
sleep 2
done
cd "${ZAMMAD_DIR}"
echo -e "\n Starting Zammad... \n"
su -c "bundle exec script/websocket-server.rb -b 0.0.0.0 start &" zammad
su -c "bundle exec script/background-worker.rb start &" zammad
# show url
echo -e "\nZammad will be ready in some seconds! Visit http://localhost in your browser!"
# start railsserver
if [ "${RAILS_SERVER}" == "puma" ]; then
su -c "bundle exec puma -b tcp://0.0.0.0:3000 -e ${RAILS_ENV}" zammad
elif [ "${RAILS_SERVER}" == "unicorn" ]; then
su -c "bundle exec unicorn -p 3000 -c config/unicorn.rb -E ${RAILS_ENV}" zammad
fi
fi
|
Add missing grep -v filter. | #!/bin/bash
# set -x
# This list is from https://charlie.bz/blog/things-that-clear-rubys-method-cache
IGNORE_FILE=/tmp/cache_busters_ignore
cat script/ignores | ruby -ne 'puts $_.split(/\s+###/)[0]' > $IGNORE_FILE
egrep 'def [a-z]*\..*' -R lib | grep -v "def self" | grep -v -f $IGNORE_FILE
grep undef -R lib | grep -v -f $IGNORE_FILE
grep alias_method -R lib
grep remove_method -R lib | grep -v -f $IGNORE_FILE
grep const_set -R lib | grep -v -f $IGNORE_FILE
grep remove_const -R lib | grep -v -f $IGNORE_FILE
egrep '\bextend\b' -R lib | grep -v -f $IGNORE_FILE
grep 'Class.new' -R lib | grep -v -f $IGNORE_FILE
grep private_constant -R lib | grep -v -f $IGNORE_FILE
grep public_constant -R lib | grep -v -f $IGNORE_FILE
grep "Marshal.load" -R lib | grep -v -f $IGNORE_FILE
grep "OpenStruct.new" -R lib | grep -v -f $IGNORE_FILE
| #!/bin/bash
# set -x
# This list is from https://charlie.bz/blog/things-that-clear-rubys-method-cache
IGNORE_FILE=/tmp/cache_busters_ignore
cat script/ignores | ruby -ne 'puts $_.split(/\s+###/)[0]' > $IGNORE_FILE
egrep 'def [a-z]*\..*' -R lib | grep -v "def self" | grep -v -f $IGNORE_FILE
grep undef -R lib | grep -v -f $IGNORE_FILE
grep alias_method -R lib | grep -v -f $IGNORE_FILE
grep remove_method -R lib | grep -v -f $IGNORE_FILE
grep const_set -R lib | grep -v -f $IGNORE_FILE
grep remove_const -R lib | grep -v -f $IGNORE_FILE
egrep '\bextend\b' -R lib | grep -v -f $IGNORE_FILE
grep 'Class.new' -R lib | grep -v -f $IGNORE_FILE
grep private_constant -R lib | grep -v -f $IGNORE_FILE
grep public_constant -R lib | grep -v -f $IGNORE_FILE
grep "Marshal.load" -R lib | grep -v -f $IGNORE_FILE
grep "OpenStruct.new" -R lib | grep -v -f $IGNORE_FILE
|
Add gdb to packages list | #!/bin/bash
#
# Should be run as local user
#
if [ "$(id -u)" == "0" ]; then
echo "This script must not be run as root"
exit 1
fi
cd ~/
mkdir repo
mkdir repo/github
cd repo/github
sudo apt-get install git htop clang
git clone https://github.com/severalgh/scripts.git
git clone https://github.com/severalgh/dotfiles.git
./scripts/vim_from_src.sh
cd ~/
cp repo/github/dotfiles/vim/.vimrc ./
./repo/github/scripts/vundle_checkout.sh
cp repo/github/dotfiles/vim/.ycm_extra_conf.py ~/.vim/
| #!/bin/bash
#
# Should be run as local user
#
if [ "$(id -u)" == "0" ]; then
echo "This script must not be run as root"
exit 1
fi
cd ~/
mkdir repo
mkdir repo/github
cd repo/github
sudo apt-get install git htop clang gdb
git clone https://github.com/severalgh/scripts.git
git clone https://github.com/severalgh/dotfiles.git
./scripts/vim_from_src.sh
cd ~/
cp repo/github/dotfiles/vim/.vimrc ./
./repo/github/scripts/vundle_checkout.sh
cp repo/github/dotfiles/vim/.ycm_extra_conf.py ~/.vim/
|
Clean up /var/log/auth.log at boot time | log "substituting placeholders for real data in config files"
substitute_files=($(find /etc /opt/local/etc -type f | sort | xargs \
egrep -l '@(PUBLIC_IP|PRIVATE_IP|HOSTNAME|ZONENAME)@' || true))
for file in ${substitute_files[@]}; do
if sed -e "s/@PUBLIC_IP@/${PUBLIC_IP}/g" -e "s/@PRIVATE_IP@/${PRIVATE_IP}/g" \
-e "s/@HOSTNAME@/${HOSTNAME}/g" -e "s/@ZONENAME@/${ZONENAME}/g" \
${file} > ${file}.tmp; then
mv ${file}{.tmp,}
fi
done
if sed -e "/tmpfs/s/-$/size=${TMPFS}/" /etc/vfstab > /etc/vfstab.tmp 2>/dev/null; then
mv /etc/vfstab{.tmp,}
fi
| log "cleaning files"
cp /dev/null /var/log/auth.log >/dev/null
log "substituting placeholders for real data in config files"
substitute_files=($(find /etc /opt/local/etc -type f | sort | xargs \
egrep -l '@(PUBLIC_IP|PRIVATE_IP|HOSTNAME|ZONENAME)@' || true))
for file in ${substitute_files[@]}; do
if sed -e "s/@PUBLIC_IP@/${PUBLIC_IP}/g" -e "s/@PRIVATE_IP@/${PRIVATE_IP}/g" \
-e "s/@HOSTNAME@/${HOSTNAME}/g" -e "s/@ZONENAME@/${ZONENAME}/g" \
${file} > ${file}.tmp; then
mv ${file}{.tmp,}
fi
done
if sed -e "/tmpfs/s/-$/size=${TMPFS}/" /etc/vfstab > /etc/vfstab.tmp 2>/dev/null; then
mv /etc/vfstab{.tmp,}
fi
|
Fix bash completion when the SDK is not installed | # bash parameter completion for the dotnet CLI
_dotnet_bash_complete()
{
local word=${COMP_WORDS[COMP_CWORD]}
local completions
completions="$(dotnet complete --position "${COMP_POINT}" "${COMP_LINE}")"
COMPREPLY=( $(compgen -W "$completions" -- "$word") )
}
complete -f -F _dotnet_bash_complete dotnet
| # bash parameter completion for the dotnet CLI
_dotnet_bash_complete()
{
local word=${COMP_WORDS[COMP_CWORD]}
local completions
completions="$(dotnet complete --position "${COMP_POINT}" "${COMP_LINE}" 2>/dev/null)"
if [ $? -ne 0 ]; then
completions=""
fi
COMPREPLY=( $(compgen -W "$completions" -- "$word") )
}
complete -f -F _dotnet_bash_complete dotnet
|
Change structure of local dependent projects | #!/bin/bash
BASE_DIR=`dirname $0`
if [ $1 ]; then
if [ -d $BASE_DIR/../../$1 ]; then
grunt
cp $BASE_DIR/../js-build/forms-angular.min.js $BASE_DIR/../../$1/app/components/forms-angular/forms-angular.min.js
cp $BASE_DIR/../js-build/forms-angular.js $BASE_DIR/../../$1/app/components/forms-angular/forms-angular.js
cp $BASE_DIR/../app/css/forms-angular.css $BASE_DIR/../../$1/app/components/forms-angular/forms-angular.css
cp $BASE_DIR/../app/partials/base-edit.html $BASE_DIR/../../$1/app/partials/base-edit.html
cp $BASE_DIR/../app/partials/base-list.html $BASE_DIR/../../$1/app/partials/base-list.html
cp $BASE_DIR/../server/lib/data_form.js $BASE_DIR/../../$1/server/lib/data_form.js
clear
echo ""
echo Set up the main Angular.js file based on the following:
echo ""
cat $BASE_DIR/../app/demo/demo.js
echo ""
else
echo ""
echo No such project as
echo `$BASE_DIR/../../$1`
echo ""
fi
else
echo ""
echo Usage: add-forms proj_dir where proj_dir is the target project root folder
echo ""
fi
| #!/bin/bash
BASE_DIR=`dirname $0`
if [ $1 ]; then
if [ -d $BASE_DIR/../../$1 ]; then
grunt
cp $BASE_DIR/../js-build/forms-angular.min.js $BASE_DIR/../../$1/app/lib/forms-angular.min.js
cp $BASE_DIR/../js-build/forms-angular.js $BASE_DIR/../../$1/app/lib/forms-angular.js
cp $BASE_DIR/../app/css/forms-angular.css $BASE_DIR/../../$1/app/lib/forms-angular.css
cp $BASE_DIR/../app/partials/base-edit.html $BASE_DIR/../../$1/app/partials/base-edit.html
cp $BASE_DIR/../app/partials/base-list.html $BASE_DIR/../../$1/app/partials/base-list.html
cp $BASE_DIR/../server/lib/data_form.js $BASE_DIR/../../$1/server/lib/data_form.js
clear
echo ""
echo Set up the main Angular.js file based on the following:
echo ""
cat $BASE_DIR/../app/demo/demo.js
echo ""
else
echo ""
echo No such project as
echo `$BASE_DIR/../../$1`
echo ""
fi
else
echo ""
echo Usage: add-forms proj_dir where proj_dir is the target project root folder
echo ""
fi
|
Correct format for the pack file. | #!/bin/sh
set -e
set -u
NAME=renderer-linux64
DEST=/tmp/pack/$NAME
VERSION=$1
# Build the binaries
scons -c
scons
# Copy the files to an independent folder
rm -rf $DEST
mkdir -p $DEST
cp bin/* $DEST
cp -R dist/renderer.node $DEST/bin
cp -R /cef/* $DEST
rm -r $DEST/include
# Compress the pack with the files
(cd /tmp/pack && tar -vpczf $NAME.tar.gz $NAME)
# Move the packs to the correct folder versioning them
rm -rf packs
mkdir packs
cd packs
mv $DEST.tar.gz $NAME.$VERSION.tar.gz
| #!/bin/sh
set -e
set -u
NAME=renderer-linux-x64
DEST=/tmp/pack/$NAME
VERSION=$1
# Build the binaries
scons -c
scons
# Copy the files to an independent folder
rm -rf $DEST
mkdir -p $DEST
cp bin/* $DEST
cp -R dist/renderer.node $DEST/bin
cp -R /cef/* $DEST
rm -r $DEST/include
# Compress the pack with the files
(cd /tmp/pack && tar -vpczf $NAME.tar.gz $NAME)
# Move the packs to the correct folder versioning them
rm -rf packs
mkdir packs
cd packs
mv $DEST.tar.gz $NAME-$VERSION.tar.gz
|
Add Sublime Text alias if we're working in Cygwin | # Cygwin shell configuration
# Because I never remember the '-s' parameter
alias ssh-agent-cyg='eval $(ssh-agent -s)'
# vim: syntax=sh ts=4 sw=4 sts=4 sr noet
| # Cygwin shell configuration
# Because I never remember the '-s' parameter
alias ssh-agent-cyg='eval $(ssh-agent -s)'
# Add alias for Sublime Text
SublRegPath='/proc/registry/HKEY_LOCAL_MACHINE/SOFTWARE/Microsoft/Windows/CurrentVersion/Uninstall/Sublime Text 2_is1/InstallLocation'
SublBinName='sublime_text.exe'
if [ -f "$SublRegPath" ]; then
SublDirPath=$(cat "$SublRegPath" | sed 's/^\([A-Z]\):/\/cygdrive\/\1/' | sed 's/\\/\//g')
alias subl="\"$SublDirPath$SublBinName\""
fi
# vim: syntax=sh ts=4 sw=4 sts=4 sr noet
|
Use curl instead of wget due to Docker image. | #!/bin/sh
F=aspell6-en-2015.04.24-0.tar.bz2
URL=ftp://ftp.gnu.org/gnu/aspell/dict/en/${F}
echo "# setting up Aspell dictionaries!!"
(
cd data
wget $URL
tar xjf $F
rm $F
ln -s aspell* aspell-dict
cd aspell-dict
./configure
make
)
| #!/bin/sh
F=aspell6-en-2015.04.24-0.tar.bz2
URL=ftp://ftp.gnu.org/gnu/aspell/dict/en/${F}
echo "# setting up Aspell dictionaries!!"
(
cd data
curl $URL
tar xjf $F
rm $F
ln -s aspell* aspell-dict
cd aspell-dict
./configure
make
)
|
Update preversion script to include build | (npm test && bash "$(dirname ${BASH_SOURCE[0]})/push-ready.sh")
if [[ $? -ne 0 ]]; then
printf "\e[31m×\e[0m Address above issues before bumping version"
exit 1
fi | (npm run build && npm test && bash "$(dirname ${BASH_SOURCE[0]})/push-ready.sh")
if [[ $? -ne 0 ]]; then
printf "\e[31m×\e[0m Address above issues before bumping version"
exit 1
fi |
Fix codecov test coverage report command | #!/usr/bin/env bash
set -e
export NUM_JOBS=1
brew update;
brew install bash;
brew install lcov;
mkdir build
cd build
cmake .. -DCMAKE_BUILD_TYPE=Debug -DBUILD_COVERAGE=ON
make UnitTests
lcov -c -i -d Tests/UnitTests -o base.info
bin/UnitTests
lcov -c -d Tests/UnitTests -o test.info
lcov -a base.info -a test.info -o coverage.info
lcov -r coverage.info '*/Toolchains/*' -o coverage.info
lcov -r coverage.info '*/Libraries/*' -o coverage.info
lcov -r coverage.info '*/Programs/*' -o coverage.info
lcov -r coverage.info '*/Tests/*' -o coverage.info
lcov -r coverage.info '*/Tools/*' -o coverage.info
lcov -l coverage.info
bash <(curl -s https://codecov.io/bash) || echo "Codecov did not collect coverage reports" | #!/usr/bin/env bash
set -e
export NUM_JOBS=1
brew update;
brew install bash;
brew install lcov;
mkdir build
cd build
cmake .. -DCMAKE_BUILD_TYPE=Debug -DBUILD_COVERAGE=ON
make UnitTests
lcov -c -i -d Tests/UnitTests -o base.info
bin/UnitTests
lcov -c -d Tests/UnitTests -o test.info
lcov -a base.info -a test.info -o coverage.info
lcov -r coverage.info '*/Toolchains/*' -o coverage.info
lcov -r coverage.info '*/Libraries/*' -o coverage.info
lcov -r coverage.info '*/Programs/*' -o coverage.info
lcov -r coverage.info '*/Tests/*' -o coverage.info
lcov -r coverage.info '*/Tools/*' -o coverage.info
lcov -l coverage.info
curl -s https://codecov.io/bash > .codecov
chmod +x .codecov
./.codecov |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.