Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Fix dhcp settings clean up in Ubuntu 12.04 and higher | #!/bin/sh -eux
# Disable automatic udev rules for network interfaces in Ubuntu,
# source: http://6.ptmc.org/164/
rm -f /etc/udev/rules.d/70-persistent-net.rules;
mkdir -p /etc/udev/rules.d/70-persistent-net.rules;
rm -f /lib/udev/rules.d/75-persistent-net-generator.rules;
rm -rf /dev/.udev/ /var/lib/dhcp3/*;
# Adding a 2 sec delay to the interface up, to make the dhclient happy
echo "pre-up sleep 2" >>/etc/network/interfaces;
| #!/bin/sh -eux
# Disable automatic udev rules for network interfaces in Ubuntu,
# source: http://6.ptmc.org/164/
rm -f /etc/udev/rules.d/70-persistent-net.rules;
mkdir -p /etc/udev/rules.d/70-persistent-net.rules;
rm -f /lib/udev/rules.d/75-persistent-net-generator.rules;
rm -rf /dev/.udev/ /var/lib/dhcp3/* /var/lib/dhcp/*;
# Adding a 2 sec delay to the interface up, to make the dhclient happy
echo "pre-up sleep 2" >>/etc/network/interfaces;
|
Create and cache npm completion on first run | eval "$(npm completion 2>/dev/null)"
# Install dependencies globally
alias npmg="npm i -g "
# npm package names are lowercase
# Thus, we've used camelCase for the following aliases:
# Install and save to dependencies in your package.json
# npms is used by https://www.npmjs.com/package/npms
alias npmS="npm i -S "
# Install and save to dev-dependencies in your package.json
# npmd is used by https://github.com/dominictarr/npmd
alias npmD="npm i -D "
# Execute command from node_modules folder based on current directory
# i.e npmE gulp
alias npmE='PATH="$(npm bin)":"$PATH"'
# Check which npm modules are outdated
alias npmO="npm outdated"
# Run npm start
alias npmst="npm start"
# Run npm test
alias npmt="npm test"
| __NPM_COMPLETION_FILE="${ZSH_CACHE_DIR}/npm_completion"
if [[ ! -f $__NPM_COMPLETION_FILE ]]; then
npm completion >! $__NPM_COMPLETION_FILE || rm -f $__NPM_COMPLETION_FILE
fi
source $__NPM_COMPLETION_FILE
# Install dependencies globally
alias npmg="npm i -g "
# npm package names are lowercase
# Thus, we've used camelCase for the following aliases:
# Install and save to dependencies in your package.json
# npms is used by https://www.npmjs.com/package/npms
alias npmS="npm i -S "
# Install and save to dev-dependencies in your package.json
# npmd is used by https://github.com/dominictarr/npmd
alias npmD="npm i -D "
# Execute command from node_modules folder based on current directory
# i.e npmE gulp
alias npmE='PATH="$(npm bin)":"$PATH"'
# Check which npm modules are outdated
alias npmO="npm outdated"
# Run npm start
alias npmst="npm start"
# Run npm test
alias npmt="npm test"
|
Remove unuseful alignak-setup module for tests | #!/usr/bin/env bash
#
# Copyright (C) 2015-2016: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
set -e
THIS_PATH=$(dirname "$0")
BASE_PATH=$(dirname "$THIS_PATH")
cd $BASE_PATH
pip install --upgrade pip
# install prog AND tests requirements :
pip install -e .
pip install alignak-setup
pip install --upgrade -r test/requirements.txt
pyversion=$(python -c "import sys; print(''.join(map(str, sys.version_info[:2])))")
if test -e "test/requirements.py${pyversion}.txt"
then
pip install -r "test/requirements.py${pyversion}.txt"
fi
| #!/usr/bin/env bash
#
# Copyright (C) 2015-2016: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
set -e
THIS_PATH=$(dirname "$0")
BASE_PATH=$(dirname "$THIS_PATH")
cd $BASE_PATH
pip install --upgrade pip
# install prog AND tests requirements :
pip install -e .
pip install --upgrade -r test/requirements.txt
pyversion=$(python -c "import sys; print(''.join(map(str, sys.version_info[:2])))")
if test -e "test/requirements.py${pyversion}.txt"
then
pip install -r "test/requirements.py${pyversion}.txt"
fi
|
Add grub commands to set default boot item | # Mount a disk temporarily (good for checking for stuff hidden by an overmount)
mkdir tmpdir
mount --bind <LOCATION> tmpdir
# Do your checking, then remove it
umount tmpdir
########## Format a disk
# List attached disks
fdisk -l
# Create a partition on a mounted disk. This is non interactive way for usually interactive command. Olol
fdisk /dev/sdb <<EOF
n
p
1
w
EOF
# format the partition as ext4
mkfs.ext4 /dev/sdb1
###
lsblk -o NAME,SIZE,MOUNTPOINT,UUID | # Mount a disk temporarily (good for checking for stuff hidden by an overmount)
mkdir tmpdir
mount --bind <LOCATION> tmpdir
# Do your checking, then remove it
umount tmpdir
########## Format a disk
# List attached disks
fdisk -l
# Create a partition on a mounted disk. This is non interactive way for usually interactive command. Olol
fdisk /dev/sdb <<EOF
n
p
1
w
EOF
# format the partition as ext4
mkfs.ext4 /dev/sdb1
########## Get UUID of partitions
lsblk -o NAME,SIZE,MOUNTPOINT,UUID
########## Set default grub entry
# Get all the menuentries grub shows
less /boot/grub/grub.cfg | grep menuentry
# Edit the file and replace the default with a menuentry. Something like that.
/etc/default/grub
GRUB_DEFAULT='Windows 10 (loader) (on /dev/sda1)'
# Update grub, when file updated
sudo update-grub |
Add script exit on non-zero status | #!/bin/bash
###################################################################
#
# virtualbox.sh
#
# This script installs VirtualBox Guest Additions and
# required dependencies. This script will look for the
# installation ISO in the $HOME of the "ssh_username" user
# specified in the template.json.
#
#
###################################################################
VBOX_VERSION="$(cat $HOME/.vbox_version)"
export MAKE='/usr/bin/gmake -i'
yum install -y bzip2 gcc kernel-devel-`uname -r`
mount -o loop "$HOME"/VBoxGuestAdditions_"$VBOX_VERSION".iso /mnt
sh /mnt/VBoxLinuxAdditions.run
umount /mnt
rm -f "$HOME"/VBoxGuestAdditions_*.iso "$HOME"/.vbox_version
exit 0
| #!/bin/bash
###################################################################
#
# virtualbox.sh
#
# This script installs VirtualBox Guest Additions and
# required dependencies. This script will look for the
# installation ISO in the $HOME of the "ssh_username" user
# specified in the template.json.
#
#
###################################################################
# Exit script immediately on non-zero status
set -e
VBOX_VERSION="$(cat $HOME/.vbox_version)"
export MAKE='/usr/bin/gmake -i'
yum install -y bzip2 gcc kernel-devel-`uname -r`
mount -o loop "$HOME"/VBoxGuestAdditions_"$VBOX_VERSION".iso /mnt
sh /mnt/VBoxLinuxAdditions.run
umount /mnt
rm -f "$HOME"/VBoxGuestAdditions_*.iso "$HOME"/.vbox_version
exit 0
|
Fix user shims stored now in ASDF_DATA_DIR | #!/usr/bin/env bash
if [ "${BASH_SOURCE[0]}" != "" ]; then
current_script_path="${BASH_SOURCE[0]}"
else
current_script_path="$0"
fi
export ASDF_DIR
ASDF_DIR="$(dirname "$current_script_path")"
# shellcheck disable=SC2016
[ -d "$ASDF_DIR" ] || echo '$ASDF_DIR is not a directory'
# Add asdf to PATH
#
# if in $PATH, remove, regardless of if it is in the right place (at the front) or not.
# replace all occurrences - ${parameter//pattern/string}
ASDF_BIN="${ASDF_DIR}/bin"
ASDF_SHIMS="${ASDF_DIR}/shims"
ASDF_USER_SHIMS="${ASDF_DATA_DIR:-$HOME/.asdf}"
[[ ":$PATH:" == *":${ASDF_BIN}:"* ]] && PATH="${PATH//$ASDF_BIN:/}"
[[ ":$PATH:" == *":${ASDF_SHIMS}:"* ]] && PATH="${PATH//$ASDF_SHIMS:/}"
[[ ":$PATH:" == *":${ASDF_USER_SHIMS}:"* ]] && PATH="${PATH//$ASDF_USER_SHIMS:/}"
# add to front of $PATH
PATH="${ASDF_BIN}:$PATH"
PATH="${ASDF_SHIMS}:$PATH"
if [ -n "$ZSH_VERSION" ]; then
autoload -U bashcompinit
bashcompinit
fi
| #!/usr/bin/env bash
if [ "${BASH_SOURCE[0]}" != "" ]; then
current_script_path="${BASH_SOURCE[0]}"
else
current_script_path="$0"
fi
export ASDF_DIR
ASDF_DIR="$(dirname "$current_script_path")"
# shellcheck disable=SC2016
[ -d "$ASDF_DIR" ] || echo '$ASDF_DIR is not a directory'
# Add asdf to PATH
#
# if in $PATH, remove, regardless of if it is in the right place (at the front) or not.
# replace all occurrences - ${parameter//pattern/string}
ASDF_BIN="${ASDF_DIR}/bin"
ASDF_SHIMS="${ASDF_DIR}/shims"
ASDF_USER_SHIMS="${ASDF_DATA_DIR:-$HOME/.asdf}/shims"
[[ ":$PATH:" == *":${ASDF_BIN}:"* ]] && PATH="${PATH//$ASDF_BIN:/}"
[[ ":$PATH:" == *":${ASDF_SHIMS}:"* ]] && PATH="${PATH//$ASDF_SHIMS:/}"
[[ ":$PATH:" == *":${ASDF_USER_SHIMS}:"* ]] && PATH="${PATH//$ASDF_USER_SHIMS:/}"
# add to front of $PATH
PATH="${ASDF_BIN}:$PATH"
PATH="${ASDF_SHIMS}:$PATH"
PATH="${ASDF_USER_SHIMS}:$PATH"
if [ -n "$ZSH_VERSION" ]; then
autoload -U bashcompinit
bashcompinit
fi
|
Install RabbitMQ plugins before starting the service. | #!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright Clairvoyant 2016
yum -y -e1 -d1 install epel-release
yum -y -e1 -d1 install rabbitmq-server
service rabbitmq-server start
chkconfig rabbitmq-server on
rabbitmq-plugins enable rabbitmq_management
| #!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright Clairvoyant 2016
yum -y -e1 -d1 install epel-release
yum -y -e1 -d1 install rabbitmq-server
rabbitmq-plugins enable rabbitmq_management
service rabbitmq-server start
chkconfig rabbitmq-server on
|
Revert "fix private key issues" | #!/bin/bash
BIN=node_modules/.bin
if [ "${TRAVIS_PULL_REQUEST}" != "false" ]
then
echo "Skipping full deploy since this is a pull request."
echo "Attempting a simple pack instead."
$BIN/superfast pack
exit 0
fi
cat "${DOKKU_PRIVATE_KEY}" > deploy_key.pem
ssh-add deploy_key.pem
git clone dokku@$DOKKU_HOST:$DOKKU_APPNAME deploy
cd deploy
npm run clean
cd ..
$BIN/superfast pack deploy -y
cd deploy
git status
| #!/bin/bash
BIN=node_modules/.bin
if [ "${TRAVIS_PULL_REQUEST}" != "false" ]
then
echo "Skipping full deploy since this is a pull request."
echo "Attempting a simple pack instead."
$BIN/superfast pack
exit 0
fi
cat $DOKKU_PRIVATE_KEY > deploy_key.pem
ssh-add deploy_key.pem
git clone dokku@$DOKKU_HOST:$DOKKU_APPNAME deploy
cd deploy
npm run clean
cd ..
$BIN/superfast pack deploy -y
cd deploy
git status
|
Use simpler symlink creation method | #!/bin/sh
############################
# .make.sh
# This script creates symlinks from the home directory to any desired dotfiles in ~/dotfiles
############################
########## Variables
dir=~/Code/dotfiles # dotfiles directory
olddir=~/dotfiles_old # old dotfiles backup directory
files="vim tmux.conf vimrc vimrc_python gitconfig zshrc bashrc pylintrc offlineimaprc urlview notmuch-config"
##########
# create dotfiles_old in homedir
echo "Creating $olddir for backup of any existing dotfiles in ~"
mkdir -p $olddir
echo "...done"
# change to the dotfiles directory
echo "Changing to the $dir directory"
cd $dir
echo "...done"
# move any existing dotfiles in homedir to dotfiles_old directory, then create symlinks
for file in $files; do
echo "Moving any existing dotfiles from ~ to $olddir"
mv ~/.$file ~/dotfiles_old/
echo "Creating symlink to $file in home directory."
ln -s $dir/$file ~/.$file
done
| #!/bin/sh -x
DOTFILES_DIR=~/Code/dotfiles
#files="vim tmux.conf vimrc vimrc_python gitconfig zshrc bashrc pylintrc offlineimaprc urlview notmuch-config"
ln -sf $DOTFILES_DIR/tmux.conf $HOME/.tmux.conf
ln -sf $DOTFILES_DIR/vim $HOME/.vim
ln -sf $DOTFILES_DIR/vimrc $HOME/.vimrc
ln -sf $DOTFILES_DIR/gitconfig $HOME/.gitconfig
ln -sf $DOTFILES_DIR/alacritty.yml $HOME/.alacritty.yml
ln -sf $DOTFILES_DIR/zshrc $HOME/.zshrc
# If oh-my-zsh is not installed, grab the installer from https://github.com/robbyrussell/oh-my-zsh then this should work
ln -sf $DOTFILES_DIR/edwin.zsh-theme $HOME/.oh-my-zsh/themes/
|
Correct chmod settings for socat | #!/bin/sh
sudo rm -f /dev/ttyUSB3dp
socat -d -d pty,raw,echo=0,link=./tty3dpm pty,raw,echo=0,link=./tty3dps &
sudo ln -s $(pwd)/tty3dpm /dev/ttyUSB3dp
./build/kossel-firmware ./tty3dps
| #!/bin/sh
sudo rm -f /dev/ttyUSB3dp
socat -d -d pty,raw,echo=0,link=./tty3dpm pty,raw,echo=0,link=./tty3dps &
sudo ln -s $(pwd)/tty3dpm /dev/ttyUSB3dp
#Octoprint needs to be able to read from its tty:
sudo chmod +r ./tty3dpm ./tty3dps
./build/kossel-firmware ./tty3dps
|
Fix off by one error | #!/bin/bash
##########
# Params #
##########
# 1 param is the remote device
# 2 param is the ros master
# 3 param is the ros_package
# 4 param is the launch file
# Following params are the roslaunch arguments
ssh -t -t $1 'export ROS_MASTER_URI=http://'$2':11311/; zsh -o HUP -c "roslaunch '${@:2}'"'
| #!/bin/bash
##########
# Params #
##########
# 1 param is the remote device
# 2 param is the ros master
# 3 param is the ros_package
# 4 param is the launch file
# Following params are the roslaunch arguments
ssh -t -t $1 'export ROS_MASTER_URI=http://'$2':11311/; zsh -o HUP -c "roslaunch '${@:3}'"'
|
Clean up the script for adding checksum | #!/usr/bin/env bash
set -e
checksum="`basename pivotal-buildpacks-cached/*_buildpack-cached-v*.zip`.checksum"
echo md5: "`md5sum *_buildpack-cached-v*.zip`" > pivotal-buildpacks-cached/$checksum
echo sha256: "`sha256sum *_buildpack-cached-v*.zip`" >> pivotal-buildpacks-cached/$checksum
cat pivotal-buildpacks-cached/$checksum >> buildpack/RECENT_CHANGES
| #!/usr/bin/env bash
set -e
pushd pivotal-buildpacks-cached
filename="`basename *_buildpack-cached-v*.zip`"
checksum="${filename}.CHECKSUM.txt"
echo $checksum
echo "md5: `md5sum $filename`" > $checksum
echo "sha256: `sha256sum $filename`" >> $checksum
cat $checksum >> ../buildpack/RECENT_CHANGES
popd
|
Add check if openmpi was already built. | #!/bin/bash
set -e
curl http://www.open-mpi.org/software/ompi/v1.10/downloads/openmpi-1.10.0.tar.gz | tar zx
cd openmpi-1.10.0
mkdir -p $HOME/ompi
./configure --enable-shared --prefix=$HOME/ompi --disable-fortran
make -j 2
make -j 2 install
| #!/bin/bash
set -e
if [ -d "$HOME/ompi" ]; then
echo "Openmpi already installed."
exit 0
fi
curl http://www.open-mpi.org/software/ompi/v1.10/downloads/openmpi-1.10.0.tar.gz | tar zx
cd openmpi-1.10.0
mkdir -p $HOME/ompi
./configure --enable-shared --prefix=$HOME/ompi --disable-fortran
make -j 2
make -j 2 install
|
Use semver-ish versioning for gnatsd binary because s3-resource requires it | #!/usr/bin/env bash
set -e -x -u
export PATH=/usr/local/ruby/bin:/usr/local/go/bin:$PATH
export GOPATH=$(pwd)/gopath
base=`pwd`
cd gopath/src/github.com/nats-io/gnatsd
out="${base}/compiled-${GOOS}"
timestamp=`date -u +"%Y-%m-%dT%H:%M:%SZ"`
git_rev=`git rev-parse --short HEAD`
version="${git_rev}-${timestamp}"
filename="gnatsd-${version}-${GOOS}-${GOARCH}"
echo "building ${filename} with version ${version}"
sed -i "s/VERSION = \"\(.*\)\"/VERSION = \"\1+${version}\"/" server/const.go
go build -o ${out}/${filename} github.com/nats-io/gnatsd
| #!/usr/bin/env bash
set -e -x -u
export PATH=/usr/local/ruby/bin:/usr/local/go/bin:$PATH
export GOPATH=$(pwd)/gopath
base=`pwd`
cd gopath/src/github.com/nats-io/gnatsd
out="${base}/compiled-${GOOS}"
semver=`grep "VERSION =" /tmp/const.go | cut -d\" -f2`
timestamp=`date -u +"%Y-%m-%dT%H:%M:%SZ"`
git_rev=`git rev-parse --short HEAD`
version="${semver}-${git_rev}-${timestamp}"
filename="gnatsd-${version}-${GOOS}-${GOARCH}"
echo "building ${filename} with version ${version}"
sed -i "s/VERSION = \".*\"/VERSION = \"${version}\"/" server/const.go
go build -o ${out}/${filename} github.com/nats-io/gnatsd
|
Add libjpeg and libpulse dependency dev library | #!/bin/bash
apt update
apt full-upgrade -y
apt install -y gmodule-2.0 gtk+-3.0 libasound2-dev libavahi-client-dev
| #!/bin/bash
apt update
apt full-upgrade -y
apt install -y gmodule-2.0 gtk+-3.0 libasound2-dev libavahi-client-dev libjpeg9-dev libpulse-dev
|
Use extras when running the test server | #!/usr/bin/env bash
set -e
pip install flask
# TravisCI on bionic dist uses old version of Docker Engine
# which is incompatibile with newer docker-py
# See https://github.com/docker/docker-py/issues/2639
pip install "docker>=2.5.1,<=4.2.2"
pip install /moto/dist/moto*.gz
moto_server -H 0.0.0.0 -p 5000 | #!/usr/bin/env bash
set -e
# TravisCI on bionic dist uses old version of Docker Engine
# which is incompatibile with newer docker-py
# See https://github.com/docker/docker-py/issues/2639
pip install "docker>=2.5.1,<=4.2.2"
pip install $(ls /moto/dist/moto*.gz)[server,all]
moto_server -H 0.0.0.0 -p 5000
|
Make python-sharded run all passed tests | #!/usr/bin/env bash
# This is a hack so that we can run moto_server prior to running the tests.
# This should be possible by using a command like `bash -c "moto_server && ./manage.py test"
# but there seems to be a bug in docker-compose that prevents the second command from
# running.
#
# It should also be possible to run moto-server in a separate container but when doing that
# all the S3 requests failed with `bucket does not exist` errors.
#
set -e
TESTS="$1"
COMMAND="coverage run manage.py test --noinput --failfast --traceback --verbosity=2"
/moto-s3/env/bin/moto_server s3 &
if [ -z ${COMMAND_OVERRIDE} ]; then
$COMMAND $TESTS
else
$COMMAND_OVERRIDE
fi
| #!/usr/bin/env bash
# This is a hack so that we can run moto_server prior to running the tests.
# This should be possible by using a command like `bash -c "moto_server && ./manage.py test"
# but there seems to be a bug in docker-compose that prevents the second command from
# running.
#
# It should also be possible to run moto-server in a separate container but when doing that
# all the S3 requests failed with `bucket does not exist` errors.
#
set -e
COMMAND="coverage run manage.py test --noinput --failfast --traceback --verbosity=2"
/moto-s3/env/bin/moto_server s3 &
if [ -z ${COMMAND_OVERRIDE} ]; then
$COMMAND "$@"
else
$COMMAND_OVERRIDE
fi
|
Disable testing on the web | #!/usr/bin/env bash
EXIT_CODE=0
cd moor || exit 1
pub run build_runner build --delete-conflicting-outputs
pub run test --coverage=coverage || EXIT_CODE=$?
if [ $EXIT_CODE -eq 0 ]
then pub run test -P browsers || EXIT_CODE=$?
fi
exit $EXIT_CODE | #!/usr/bin/env bash
EXIT_CODE=0
cd moor || exit 1
pub run build_runner build --delete-conflicting-outputs
pub run test --coverage=coverage || EXIT_CODE=$?
# Testing on the web doesn't work on CI (we're running into out-of-memory issues even with 12G of RAM)
#if [ $EXIT_CODE -eq 0 ]
# then pub run test -P browsers || EXIT_CODE=$?
#fi
exit $EXIT_CODE |
Add filtered resources to example-5 | #!/usr/bin/env bash
###############################################################################
#
# Spark environment
#
###############################################################################
SPARK_CONFIG=cloudera-framework-parent/${project.artifactId}/target/classes/python/spark-defaults.conf
PYSPARK_PYTHON=./CLDR_FW/${project.artifactId}/bin/python
| #!/usr/bin/env bash
###############################################################################
#
# Spark environment
#
###############################################################################
SPARK_CONFIG=cloudera-framework-parent/${project.artifactId}/target/test-python/python/spark-defaults.conf
PYSPARK_PYTHON=./CLDR_FW/${project.artifactId}/bin/python
|
Remove completions overridden by custom completions | #!/usr/bin/env bash
source ./config/header.sh
echo "Creating symlinks..."
mkdir -p ~/.atom
ln -sf "$PWD"/atom/* ~/.atom
ln -snf "$PWD" ~/.dotfiles
ln -snf "$PWD"/terminal/.bashrc ~/.bashrc
ln -snf "$PWD"/terminal/.bashrc ~/.bash_profile
ln -snf "$PWD"/terminal/bash/completions.sh /usr/local/etc/bash_completion.d/dotfiles-completions.sh
ln -snf "$PWD"/terminal/.vimrc ~/.vimrc
ln -snf "$PWD"/git/.gitconfig ~/.gitconfig
mkdir -p ~/.ssh
mkdir -p ~/.ssh/sockets
ln -snf "$PWD"/ssh/ssh_config ~/.ssh/config
# Disable Bash Sessions feature in OS X El Capitan
touch ~/.bash_sessions_disable
| #!/usr/bin/env bash
source ./config/header.sh
echo "Creating symlinks..."
mkdir -p ~/.atom
ln -sf "$PWD"/atom/* ~/.atom
ln -snf "$PWD" ~/.dotfiles
ln -snf "$PWD"/terminal/.bashrc ~/.bashrc
ln -snf "$PWD"/terminal/.bashrc ~/.bash_profile
# Add custom completions and remove overridden completions
ln -snf "$PWD"/terminal/bash/completions.sh /usr/local/etc/bash_completion.d/dotfiles-completions.sh
rm -f /usr/local/etc/bash_completion.d/brew
rm -f /usr/local/etc/bash_completion.d/npm
ln -snf "$PWD"/terminal/.vimrc ~/.vimrc
ln -snf "$PWD"/git/.gitconfig ~/.gitconfig
mkdir -p ~/.ssh
mkdir -p ~/.ssh/sockets
ln -snf "$PWD"/ssh/ssh_config ~/.ssh/config
# Disable Bash Sessions feature in OS X El Capitan
touch ~/.bash_sessions_disable
|
Add auto shard.host to config.ini if $HOST defined. | #! /bin/bash
set -e
for env in 'GOROOT' 'GOBIN' ; do
if [ -z '${$env}' ]; then
echo "$env not defined. Is go installed?"
exit 1
fi
done
export GOPATH=`pwd`
echo "Installing required go libraries..."
for req in `cat go_deps.lst`; do
echo -n " $req..."
go get -v $req
echo " done"
done
echo "Libraries installed"
if [ ! -e config.ini ]; then
echo "Copying sample ini file to config.ini"
cp config.sample.ini config.ini
fi
echo "Please edit config.ini for local settings."
| #! /bin/bash
set -e
for env in 'GOROOT' 'GOBIN' ; do
if [ -z '${$env}' ]; then
echo "$env not defined. Is go installed?"
exit 1
fi
done
export GOPATH=`pwd`
echo "Installing required go libraries..."
for req in `cat go_deps.lst`; do
echo -n " $req..."
go get -v $req
echo " done"
done
echo "Libraries installed"
if [ ! -e config.ini ]; then
echo "Copying sample ini file to config.ini"
cp config.sample.ini config.ini
fi
if [ ! -z '$HOST' ]; then
echo "Setting local shard host name"
echo "shard.currentHost = $HOST:8080" >> config.ini
fi
echo "Please edit config.ini for local settings."
|
Check for existence of previously checked-out cue repo | #!/bin/sh
set -e
set -x
git submodule update -f --init --recursive
cd cloned
git clone -n https://github.com/cuelang/cue
cd cue
git checkout 317163484ec5d79259a4ea6524d3870419510639
cd ../..
PUSHD=$(pwd)
cat <<EOF > content/en/docs/references/spec.md
+++
title = "Language Specification"
+++
EOF
cat cloned/cue/doc/ref/spec.md >> content/en/docs/references/spec.md
cd content/en/docs/tutorials/tour
go test .
go run gen.go
cd ${PUSHD}
# TODO
# shell documentation
# cue/doc/contribute.md
#
hugo $@
| #!/bin/sh
set -e
set -x
git submodule update -f --init --recursive
cd cloned
if [[ ! -d cue/ ]] ; then
git clone -n https://github.com/cuelang/cue
fi
cd cue
git checkout 317163484ec5d79259a4ea6524d3870419510639
cd ../..
PUSHD=$(pwd)
cat <<EOF > content/en/docs/references/spec.md
+++
title = "Language Specification"
+++
EOF
cat cloned/cue/doc/ref/spec.md >> content/en/docs/references/spec.md
cd content/en/docs/tutorials/tour
go test .
go run gen.go
cd ${PUSHD}
# TODO
# shell documentation
# cue/doc/contribute.md
#
hugo $@
|
Use `python -m pytest' instead of `python $(which py.test)'. | #!/bin/sh
set -Ceu
: ${MAKE:=make}
: ${PYTHON:=python}
: ${PY_TEST:=py.test}
case $PY_TEST in */*);; *) PY_TEST=`which "$PY_TEST"` || PY_TEST=;; esac
if [ ! -x "${PY_TEST}" ]; then
printf >&2 'unable to find pytest\n'
exit 1
fi
root=`cd -- "$(dirname -- "$0")" && pwd`
(
set -Ceu
cd -- "${root}"
rm -rf build
./pythenv.sh "$PYTHON" setup.py build
if [ $# -eq 0 ]; then
./pythenv.sh "$PYTHON" "$PY_TEST" \
src/tests/unit_tests \
# end of tests
else
./pythenv.sh "$PYTHON" "$PY_TEST" "$@"
fi
)
if cd cpp_code && "${MAKE}" runtests; then
echo 'Passed!'
else
status=$?
echo 'Failed!'
exit $status
fi
| #!/bin/sh
set -Ceu
: ${MAKE:=make}
: ${PYTHON:=python}
root=`cd -- "$(dirname -- "$0")" && pwd`
(
set -Ceu
cd -- "${root}"
rm -rf build
./pythenv.sh "$PYTHON" setup.py build
if [ $# -eq 0 ]; then
./pythenv.sh "$PYTHON" -m pytest \
src/tests/unit_tests \
# end of tests
else
./pythenv.sh "$PYTHON" -m pytest "$@"
fi
)
if cd cpp_code && "${MAKE}" runtests; then
echo 'Passed!'
else
status=$?
echo 'Failed!'
exit $status
fi
|
Include LICENSE file in the distributed archive | #! /usr/bin/env bash
# Copy the code directory to a new directory, then compress it
# Delete the temporary directory afterwards
# Needs one argument (the version number)
if [[ $# != 1 ]] ; then
echo "Need one argument (the version number)!"
exit 1
fi
version=$1
dirname="SLOCounter-v$1"
archname="$dirname.tar.gz"
cp -r code $dirname
tar -czf $archname $dirname
rm -rf $dirname
| #! /usr/bin/env bash
# Copy the code directory to a new directory, then compress it
# Delete the temporary directory afterwards
# Needs one argument (the version number)
if [[ $# != 1 ]] ; then
echo "Need one argument (the version number)!"
exit 1
fi
version=$1
dirname="SLOCounter-v$1"
archname="$dirname.tar.gz"
cp -r code $dirname
cp LICENSE $dirname/LICENSE
tar -czf $archname $dirname
rm -rf $dirname
|
Revert "Show git untracked and ignored files before build" | #!/bin/bash
git status -uall --ignored
docker build -t keboola/gmail-extractor .
docker login -e="." -u="$QUAY_USERNAME" -p="$QUAY_PASSWORD" quay.io
docker tag keboola/gmail-extractor quay.io/keboola/gmail-extractor:$TRAVIS_TAG
docker ps -a
docker push quay.io/keboola/gmail-extractor:$TRAVIS_TAG
| #!/bin/bash
docker build -t keboola/gmail-extractor .
docker login -e="." -u="$QUAY_USERNAME" -p="$QUAY_PASSWORD" quay.io
docker tag keboola/gmail-extractor quay.io/keboola/gmail-extractor:$TRAVIS_TAG
docker ps -a
docker push quay.io/keboola/gmail-extractor:$TRAVIS_TAG
|
Use virtio ISOs with versioned file names if available, remove need for symlink | #!/bin/sh -e
NIC=virtio
[ "$1" = "--pcnet" ] && { NIC="pcnet"; shift; }
[ -n "$1" ] && IMG="$1" || IMG="$(ls -1t *.qcow2 | head -1)"
if [ "$NIC" = "virtio" ]; then
if [ ! -e virtio*.iso ]; then
# RH now have a package
if [ -e "/usr/share/virtio-win/virtio-win.iso" ]; then
echo "Using ISO from virtio-win package."
ln -s /usr/share/virtio-win/virtio-win.iso ./virtio-win.iso;
else
echo Fetching virtIO drivers...
wget https://fedorapeople.org/groups/virt/virtio-win/direct-downloads/stable-virtio/virtio-win.iso
fi
fi
CDIMAGE="-cdrom "virtio-win.iso
fi
qemu-system-x86_64 -enable-kvm \
-drive "file=$IMG" \
$CDIMAGE \
-net nic,model=$NIC \
-net user \
-m 1024M \
-monitor stdio \
-snapshot -no-shutdown
| #!/bin/sh -e
NIC=virtio
[ "$1" = "--pcnet" ] && { NIC="pcnet"; shift; }
[ -n "$1" ] && IMG="$1" || IMG="$(ls -1t *.qcow2 | head -1)"
if [ "$NIC" = "virtio" ]; then
LOCAL_ISO="$(ls -1t virtio*.iso | head -1)" 2>/dev/null
if [ -n "${LOCAL_ISO}" ]; then
echo "Using local ISO file ${LOCAL_ISO}"
CDIMAGE="-cdrom ${LOCAL_ISO}"
elif [ -e "/usr/share/virtio-win/virtio-win.iso" ]; then
# RH now have a package
echo "Using ISO from virtio-win package."
CDIMAGE="-cdrom /usr/share/virtio-win/virtio-win.iso"
else
echo Fetching virtIO drivers...
wget https://fedorapeople.org/groups/virt/virtio-win/direct-downloads/stable-virtio/virtio-win.iso
CDIMAGE="-cdrom virtio-win.iso"
fi
fi
qemu-system-x86_64 -enable-kvm \
-drive "file=$IMG" \
$CDIMAGE \
-net nic,model=$NIC \
-net user \
-m 1024M \
-monitor stdio \
-snapshot -no-shutdown
|
Fix syntax error in build script | #!/bin/bash
VERSION=$(date +%s)
aws s3 cp $GOPATH/incus.tar s3://imgur-incus/incus-latest.tar && \
aws s3 cp $GOPATH/incus.tar s3://imgur-incus/incus-$VERSION.tar && \
aws deploy create-deployment --application-name Incus --deployment-group-name Production --s3-location bundleType=tar,bucket=imgur-incus,key=incus-$VERSION.tar && \
| #!/bin/bash
VERSION=$(date +%s)
aws s3 cp $GOPATH/incus.tar s3://imgur-incus/incus-latest.tar && \
aws s3 cp $GOPATH/incus.tar s3://imgur-incus/incus-$VERSION.tar && \
aws deploy create-deployment --application-name Incus --deployment-group-name Production --s3-location bundleType=tar,bucket=imgur-incus,key=incus-$VERSION.tar
|
Add an option to background the scanner | #!/bin/bash
# Absolute path to this script
SCRIPT=$(readlink -f $0)
BIN=`dirname $SCRIPT`
ROOT=`dirname $BIN`
# Source the scanner configuration
# Typically this should have:
# - scannerID ; the name by which the scanner is defined in the beacon event messages
# - heartbeatUUID ; the uuid of the beacon used as the heartbeat for the scanner
# - brokerURL ; the url of the activemq broker
. ~/scanner.conf
# Aliases for backward compatibility to previous variables used from scanner.conf
scannerID=${scannerID:=$SCANNER_ID}
heartbeatUUID=${HEARTBEAT_UUID:=$heartbeatUUID}
brokerURL=${BROKER_URL:=$brokerURL}
# Bring up bluetooth interface
hciconfig hci0 up
ps -eaf | grep lescan | grep -v grep >/dev/null
found=$?
# non-zero return means lescan not seen running
if [ $found -ne 0 ]; then
hcitool lescan --duplicates >/dev/null &
else
echo lescan already running
fi
# Start the scanner
CMD="${ROOT}/Debug/src2/NativeScannerBlueZ --scannerID "${scannerID:-`hostname`}" --brokerURL "${brokerURL:-192.168.1.107:5672}" --heartbeatUUID "${heartbeatUUID}" --useQueues $*"
echo $CMD
$CMD
| #!/bin/bash
# Absolute path to this script
SCRIPT=$(readlink -f $0)
BIN=`dirname $SCRIPT`
ROOT=`dirname $BIN`
# Source the scanner configuration
# Typically this should have:
# - scannerID ; the name by which the scanner is defined in the beacon event messages
# - heartbeatUUID ; the uuid of the beacon used as the heartbeat for the scanner
# - brokerURL ; the url of the activemq broker
. ~/scanner.conf
# Aliases for backward compatibility to previous variables used from scanner.conf
scannerID=${scannerID:=$SCANNER_ID}
heartbeatUUID=${HEARTBEAT_UUID:=$heartbeatUUID}
brokerURL=${BROKER_URL:=$brokerURL}
# Bring up bluetooth interface
hciconfig hci0 up
ps -eaf | grep lescan | grep -v grep >/dev/null
found=$?
# non-zero return means lescan not seen running
if [ $found -ne 0 ]; then
hcitool lescan --duplicates >/dev/null &
else
echo lescan already running
fi
CMD="${ROOT}/Debug/src2/NativeScannerBlueZ --scannerID "${scannerID:-`hostname`}" --brokerURL "${brokerURL:-192.168.1.107:5672}" --heartbeatUUID "${heartbeatUUID}" --useQueues $*"
# Check for a background argument
if [ "$1" == "-background" ]; then
CMD="${CMD} &"
fi
# Start the scanner
echo "Running: $CMD"
$CMD
|
Make storage/innobase the working directory of Subversion. | #!/bin/bash
#
# Prepare the MySQL source code tree for building
# with checked-out InnoDB Subversion directory.
# This script assumes that the MySQL tree is at .. and that . = ../innodb
set -eu
TARGETDIR=../storage/innobase
rm -fr "$TARGETDIR"
mkdir "$TARGETDIR"
# link the build scripts
ln -sf ../innodb/compile-innodb{,-debug} ../BUILD
# create the directories
for dir in */
do
case "$dir" in
mysql-test/) ;;
*.svn*) ;;
*to-mysql*) ;;
*) mkdir "$TARGETDIR/$dir" ;;
esac
done
# create the symlinks to files
cd "$TARGETDIR"
for dir in */
do
cd "$dir"
ln -s ../../../innodb/"$dir"* .
cd ..
done
for file in plug.in Makefile.am CMakeLists.txt
do
ln -s ../../innodb/"$file" .
done
cd ../../mysql-test/t
ln -sf ../../innodb/mysql-test/*.test ../../innodb/mysql-test/*.opt ./
ln -sf ../../innodb/mysql-test/*.result ../r/
ln -sf ../../innodb/mysql-test/*.inc ../include/
| #!/bin/bash
#
# Prepare the MySQL source code tree for building
# with checked-out InnoDB Subversion directory.
# This script assumes that the current directory is storage/innobase.
set -eu
TARGETDIR=../storage/innobase
# link the build scripts
ln -sf $TARGETDIR/compile-innodb{,-debug} ../../BUILD
cd ../../mysql-test
ln -sf ../$TARGETDIR/mysql-test/*.test ../../innodb/mysql-test/*.opt t/
ln -sf ../$TARGETDIR/mysql-test/*.result r/
ln -sf ../$TARGETDIR/mysql-test/*.inc include/
|
Fix typo in file name | #!/usr/bin/env bash
module load freesurfer/5.3.0
module load xz/5.2.2
date
start=`date +%s`
WD=$PWD
if [ "$OSG_WN_TMP" != "" ];
then
SUBJECTS_DIR=`mktemp -d --tmpdir=$OSG_WN_TMP`
else
SUBJECTS_DIR=`mktemp -d --tmpdir=$PWD`
fi
cp $1_recon2_*.tar.xz $SUBJECTS_DIR
cd $SUBJECTS_DIR
if [ -e "$1_recon2_lh_output.tar.xz" ];
then
tar xvaf $1_recon2_lh_output.tar.xz
tar xvaf $1_recon2_rh_output.tar.xz
rm $1_recon2_lh_output.tar.xz
rm $1_recon2_rh_output.tar.xz
fi
if [ -e "$1_recon2_output.tar.xz" ];
then
tar xvaf $1_recon2_ouput.tar.xz
rm $1_recon2_ouput.tar.xz
fi
recon-all \
-s $1 \
-autorecon3 \
-openmp $2
cd $SUBJECTS_DIR
tar cvJf $WD/$1_output.tar.xz *
cd $WD | #!/usr/bin/env bash
module load freesurfer/5.3.0
module load xz/5.2.2
date
start=`date +%s`
WD=$PWD
if [ "$OSG_WN_TMP" != "" ];
then
SUBJECTS_DIR=`mktemp -d --tmpdir=$OSG_WN_TMP`
else
SUBJECTS_DIR=`mktemp -d --tmpdir=$PWD`
fi
cp $1_recon2_*.tar.xz $SUBJECTS_DIR
cd $SUBJECTS_DIR
if [ -e "$1_recon2_lh_output.tar.xz" ];
then
tar xvaf $1_recon2_lh_output.tar.xz
tar xvaf $1_recon2_rh_output.tar.xz
rm $1_recon2_lh_output.tar.xz
rm $1_recon2_rh_output.tar.xz
fi
if [ -e "$1_recon2_output.tar.xz" ];
then
tar xvaf $1_recon2_output.tar.xz
rm $1_recon2_output.tar.xz
fi
recon-all \
-s $1 \
-autorecon3 \
-openmp $2
cd $SUBJECTS_DIR
tar cvJf $WD/$1_output.tar.xz *
cd $WD |
Fix check for unset var | # Single argument should be 'dev' or 'archive'. Default is 'archive'.
ENVIRONMENT=archive
if [ -n $1 ]; then
ENVIRONMENT=$1
fi
pushd ../
tar --exclude=".git" -zcvf f5-cloud-libs.tar.gz f5-cloud-libs
azure storage blob upload --quiet f5-cloud-libs.tar.gz $ENVIRONMENT f5-cloud-libs.tar.gz
azure storage blob upload --quiet f5-cloud-libs/scripts/azure/runScripts.js $ENVIRONMENT runScripts.js
azure storage blob upload --quiet f5-cloud-libs/scripts/deployHttp.sh $ENVIRONMENT deployHttp.sh
popd
| #!/bin/bash
# Single argument should be 'dev' or 'archive'. Default is 'archive'.
ENVIRONMENT=archive
if [ -n "$1" ]; then
ENVIRONMENT=$1
fi
echo GOT ENVIRONMENT $ENVIRONMENT
pushd ../
tar --exclude=".git" -zcvf f5-cloud-libs.tar.gz f5-cloud-libs
azure storage blob upload --quiet f5-cloud-libs.tar.gz $ENVIRONMENT f5-cloud-libs.tar.gz
azure storage blob upload --quiet f5-cloud-libs/scripts/azure/runScripts.js $ENVIRONMENT runScripts.js
azure storage blob upload --quiet f5-cloud-libs/scripts/deployHttp.sh $ENVIRONMENT deployHttp.sh
popd
|
Modify test to download and install Unpacker2 | #!/bin/bash
function executeCommand {
$@
rc=$?; if [[ $rc != 0 ]]; then exit $rc; fi
echo "Exit code[" $@ "]: $rc"
}
executeCommand "mkdir -p build"
executeCommand "cd build"
executeCommand "export CMAKE_LIBRARY_PATH=$CMAKE_LIBRARY_PATH:/framework-dependencies/lib"
executeCommand "export CMAKE_INCLUDE_PATH=$CMAKE_INCLUDE_PATH:/framework-dependencies/include"
executeCommand "source /root-system/bin/thisroot.sh"
executeCommand "cmake .."
executeCommand "source thisframework.sh"
executeCommand "make all tests -j4"
executeCommand "cd tests"
executeCommand "./run_tests.pl -f xml"
executeCommand "chmod a+x parseXML.py"
executeCommand "./parseXML.py" | #!/bin/bash
function executeCommand {
$@
rc=$?; if [[ $rc != 0 ]]; then exit $rc; fi
echo "Exit code[" $@ "]: $rc"
}
executeCommand "git clone --single-branch --branch cmake https://github.com/grey277/Unpacker2.git"
executeCommand "mkdir -p Unpacker2/build"
executeCommand "cd Unpacker2/build"
executeCommand "cmake .."
executeCommand "cmake --build ."
executeCommand "make install"
executeCommand "cd ../.."
executeCommand "mkdir -p build"
executeCommand "cd build"
executeCommand "export CMAKE_LIBRARY_PATH=$CMAKE_LIBRARY_PATH:/framework-dependencies/lib"
executeCommand "export CMAKE_INCLUDE_PATH=$CMAKE_INCLUDE_PATH:/framework-dependencies/include"
executeCommand "source /root-system/bin/thisroot.sh"
executeCommand "cmake .."
executeCommand "source thisframework.sh"
executeCommand "make all tests -j4"
executeCommand "cd tests"
executeCommand "./run_tests.pl -f xml"
executeCommand "chmod a+x parseXML.py"
executeCommand "./parseXML.py" |
Fix sed parsing on invalid UTF-8 byte sequence | #!/bin/sh
cat /var/log/manticore/query.log | sed '/ (0,[0-9][0-9]\+)\] \[/!d; /_index\] *$/d; s/\[... \(...\) \?\([0-9]\+\) [0-9.:]\+ \([0-9]\+\)\].*\] \[\([a-z]\+\)\(_main\)\?_index[^]]*\] \?\(.*\)$/\2 \1 \3,\4,\6/'
| #!/bin/sh
cat /var/log/manticore/query.log | LANG=C sed '/ (0,[0-9][0-9]\+)\] \[/!d; /_index\] *$/d; s/\[... \(...\) \?\([0-9]\+\) [0-9.:]\+ \([0-9]\+\)\].*\] \[\([a-z]\+\)\(_main\)\?_index[^]]*\] \?\(.*\)$/\2 \1 \3,\4,\6/'
|
Add galaxy rules to skip list. | #!/bin/bash -x
# Copyright 2021, Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ev
function main {
find . -name "*.sh" -exec shellcheck {} \;
find . -name "*.sh" -exec bashate -e E006 {} \;
find . -name "*.py" \
! -path "./chef/cookbooks/bcpc/files/default/*" -exec flake8 {} \;
ansible-lint -x var-naming ansible/
cookstyle --version && cookstyle .
}
main
| #!/bin/bash -x
# Copyright 2021, Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ev
function main {
find . -name "*.sh" -exec shellcheck {} \;
find . -name "*.sh" -exec bashate -e E006 {} \;
find . -name "*.py" \
! -path "./chef/cookbooks/bcpc/files/default/*" -exec flake8 {} \;
ansible-lint -x var-naming -x meta-no-info -x meta-no-tags ansible/
cookstyle --version && cookstyle .
}
main
|
Add helper to get the value of an alias only | function zsh_stats() {
history | awk '{CMD[$2]++;count++;}END { for (a in CMD)print CMD[a] " " CMD[a]/count*100 "% " a;}' | grep -v "./" | column -c3 -s " " -t | sort -nr | nl | head -n20
}
function uninstall_oh_my_zsh() {
/usr/bin/env ZSH=$ZSH /bin/sh $ZSH/tools/uninstall.sh
}
function upgrade_oh_my_zsh() {
/usr/bin/env ZSH=$ZSH /bin/sh $ZSH/tools/upgrade.sh
}
function take() {
mkdir -p $1
cd $1
}
| function zsh_stats() {
history | awk '{CMD[$2]++;count++;}END { for (a in CMD)print CMD[a] " " CMD[a]/count*100 "% " a;}' | grep -v "./" | column -c3 -s " " -t | sort -nr | nl | head -n20
}
function uninstall_oh_my_zsh() {
/usr/bin/env ZSH=$ZSH /bin/sh $ZSH/tools/uninstall.sh
}
function upgrade_oh_my_zsh() {
/usr/bin/env ZSH=$ZSH /bin/sh $ZSH/tools/upgrade.sh
}
function take() {
mkdir -p $1
cd $1
}
#
# Get the value of an alias.
#
# Arguments:
# 1. alias - The alias to get its value from
# STDOUT:
# The value of alias $1 (if it has one).
# Return value:
# 0 if the alias was found,
# 1 if it does not exist
#
function alias_value() {
alias "$1" | sed "s/^$1='\(.*\)'$/\1/"
test $(alias "$1")
}
#
# Try to get the value of an alias,
# otherwise return the input.
#
# Arguments:
# 1. alias - The alias to get its value from
# STDOUT:
# The value of alias $1, or $1 if there is no alias $1.
# Return value:
# Always 0
#
function try_alias_value() {
alias_value "$1" || echo "$1"
} |
Fix typo in CDPATH variable | # we want vim as default editor
export EDITOR=vim
# trim paths to 3 hierarchy levels
export PROMPT_DIRTRIM=3
# must press ctrl-D 2+1 times to exit shell
export IGNOREEOF="2"
# ignore case, long prompt, exit if it fits on one screen
export LESS="-iMFXR"
#export GREP_OPTIONS='--color=auto'
# base paths to use for "cd" command completion
export CDPATH=.:~:/mnt:/media;/etc
# PATHs for command lookup
export PATH=~/bin:/opt/vagrant/bin:$PATH
# enable vagrant debug logs
export VAGRANT_LOG=debug
| # we want vim as default editor
export EDITOR=vim
# trim paths to 3 hierarchy levels
export PROMPT_DIRTRIM=3
# must press ctrl-D 2+1 times to exit shell
export IGNOREEOF="2"
# ignore case, long prompt, exit if it fits on one screen
export LESS="-iMFXR"
#export GREP_OPTIONS='--color=auto'
# base paths to use for "cd" command completion
export CDPATH=.:~:/mnt:/media:/etc
# PATHs for command lookup
export PATH=~/bin:/opt/vagrant/bin:$PATH
# enable vagrant debug logs
export VAGRANT_LOG=debug
|
Use single equals sign for string comparison | #!/bin/sh
#Move to the folder where ep-lite is installed
cd $(dirname $0)
#Was this script started in the bin folder? if yes move out
if [ -d "../bin" ]; then
cd "../"
fi
ignoreRoot=0
for ARG in "$@"
do
if [ "$ARG" = "--root" ]; then
ignoreRoot=1
fi
done
#Stop the script if it's started as root
if [ "$(id -u)" -eq 0 ] && [ $ignoreRoot -eq 0 ]; then
echo "You shouldn't start Etherpad as root!"
echo "Please type 'Etherpad rocks my socks' or supply the '--root' argument if you still want to start it as root"
read rocks
if [ ! "$rocks" == "Etherpad rocks my socks" ]
then
echo "Your input was incorrect"
exit 1
fi
fi
#Prepare the environment
bin/installDeps.sh "$@" || exit 1
#Move to the node folder and start
echo "Started Etherpad..."
SCRIPTPATH=$(pwd -P)
exec node "$SCRIPTPATH/node_modules/ep_etherpad-lite/node/server.js" "$@"
| #!/bin/sh
#Move to the folder where ep-lite is installed
cd $(dirname $0)
#Was this script started in the bin folder? if yes move out
if [ -d "../bin" ]; then
cd "../"
fi
ignoreRoot=0
for ARG in "$@"
do
if [ "$ARG" = "--root" ]; then
ignoreRoot=1
fi
done
#Stop the script if it's started as root
if [ "$(id -u)" -eq 0 ] && [ $ignoreRoot -eq 0 ]; then
echo "You shouldn't start Etherpad as root!"
echo "Please type 'Etherpad rocks my socks' or supply the '--root' argument if you still want to start it as root"
read rocks
if [ ! "$rocks" = "Etherpad rocks my socks" ]
then
echo "Your input was incorrect"
exit 1
fi
fi
#Prepare the environment
bin/installDeps.sh "$@" || exit 1
#Move to the node folder and start
echo "Started Etherpad..."
SCRIPTPATH=$(pwd -P)
exec node "$SCRIPTPATH/node_modules/ep_etherpad-lite/node/server.js" "$@"
|
Add noconfirm option to makepkg | #!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ "$(pacman -T synaptiko-desktop-status | wc -l)" != "0" ]; then
cd $DIR/../.packages
if [ ! -d synaptiko-packages ]; then
git clone https://github.com/synaptiko/packages.git synaptiko-packages
fi
cd synaptiko-packages/synaptiko-desktop-status
git pull
makepkg -sircC
cd $DIR
sudo systemctl --system daemon-reload
sudo systemctl enable synaptiko-desktop-status.socket
sudo systemctl start synaptiko-desktop-status.socket
fi
| #!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ "$(pacman -T synaptiko-desktop-status | wc -l)" != "0" ]; then
cd $DIR/../.packages
if [ ! -d synaptiko-packages ]; then
git clone https://github.com/synaptiko/packages.git synaptiko-packages
fi
cd synaptiko-packages/synaptiko-desktop-status
git pull
makepkg --noconfirm -sircC
cd $DIR
sudo systemctl --system daemon-reload
sudo systemctl enable synaptiko-desktop-status.socket
sudo systemctl start synaptiko-desktop-status.socket
fi
|
Use 64-bits version of Pharo 6.1 (needed in MacOS) | wget --quiet -O - get.pharo.org/61+vm | bash
REPO=http://smalltalkhub.com/mc/RMoD/Ecstatic/main
./pharo Pharo.image config $REPO ConfigurationOfEcstatic --install=0.9.1 --group='ALL'
| wget --quiet -O - get.pharo.org/64/61+vm | bash
REPO=http://smalltalkhub.com/mc/RMoD/Ecstatic/main
./pharo Pharo.image config $REPO ConfigurationOfEcstatic --install=0.9.1 --group='ALL'
|
Install by copy and not with setup | #!/bin/sh
set -x -e
#export GCC=${PREFIX}/bin/gcc
#export CXX=${PREFIX}/bin/g++
export INCLUDE_PATH="${PREFIX}/include"
export LIBRARY_PATH="${PREFIX}/lib"
export LD_LIBRARY_PATH="${PREFIX}/lib"
export LDFLAGS="-L${PREFIX}/lib"
export CXXFLAGS="-I$PREFIX/include"
export CPPFLAGS="-I${PREFIX}/include"
export LC_ALL=C
mkdir -p $PREFIX/bin
sed -i.bak 's/print sys.hexversion>=0x02050000/print(sys.hexversion>=0x02050000)/' makefile
rm *.bak
make
sed -i.bak 's/third_party/kmergenie/g' scripts/*
sed -i.bak 's/third_party/kmergenie/g' kmergenie
rm scripts/*.bak
rm kmergenie.bak
cp scripts/* $PREFIX/bin
cp specialk $PREFIX/bin
mkdir -p python-build/scripts
cp kmergenie python-build/scripts
cp wrapper.py python-build/scripts
cp -rf third_party python-build/kmergenie
cd python-build
cp $RECIPE_DIR/setup.py ./
python setup.py build
python setup.py install
| #!/bin/sh
set -x -e
#export GCC=${PREFIX}/bin/gcc
#export CXX=${PREFIX}/bin/g++
export INCLUDE_PATH="${PREFIX}/include"
export LIBRARY_PATH="${PREFIX}/lib"
export LD_LIBRARY_PATH="${PREFIX}/lib"
export LDFLAGS="-L${PREFIX}/lib"
export CXXFLAGS="-I$PREFIX/include"
export CPPFLAGS="-I${PREFIX}/include"
export LC_ALL=C
mkdir -p $PREFIX/bin
sed -i.bak 's/print sys.hexversion>=0x02050000/print(sys.hexversion>=0x02050000)/' makefile
make
sed -i.bak 's/third_party\.//g' scripts/*
sed -i.bak 's/third_party\.//g' kmergenie
cp scripts/* $PREFIX/bin
cp third_party/* $PREFIX/bin
cp specialk $PREFIX/bin
cp kmergenie $PREFIX/bin
cp wrapper.py $PREFIX/bin
|
Fix bug in docker build scripts | #!/bin/bash
# First lets get Postgis going
source functions.sh
docker build -t kartoza/postgis git://github.com/kartoza/docker-postgis
restart_postgis_server
# Now build the django image
cd ../docker-prod
./build.sh
cd -
# Now collect migrate and collect static
migrate
collectstatic
# Now run the service
run_django_server
| #!/bin/bash
# First lets get Postgis going
source functions.sh
docker build -t kartoza/postgis git://github.com/kartoza/docker-postgis
restart_postgis_server
# Now build the django image
cd ../docker-prod
./build.sh
cd -
# Now collect migrate and collect static
manage migrate
manage "collectstatic --noinput"
# Now run the service
run_django_server
|
Speed up upload & remove non-needed echos | #!/usr/bin/env bash
modulePath='~/puppet'
echo "Getting settings"
settingsPath=$1
. $settingsPath
# zero
if [ -z "$puppetColor" ]; then
puppetColor="false"
fi
echo "Uploading puppet"
ssh $userName@$hostName "
rm --recursive --force $modulePath;
mkdir --parents $modulePath;
"
for module in $(echo $modules | tr ":" "\n")
do
# recursively, quiet & with compression
scp -r -q -C $module $userName@$hostName:$modulePath/$(basename $module)
done
echo "Applying puppet"
ssh $userName@$hostName "
sudo puppet apply --verbose --color $puppetColor --modulepath $modulePath $modulePath/$entryPoint;
rm --recursive --force $modulePath;
"
echo "Done"
| #!/usr/bin/env bash
modulePath='~/puppet'
# Getting settings
settingsPath=$1
. $settingsPath
# zero
if [ -z "$puppetColor" ]; then
puppetColor="false"
fi
# Packing puppet
packDirectory=$(mktemp --directory)
packFile=$(mktemp)
trap "rm --recursive --force $packDirectory" EXIT
trap "rm --recursive --force $packFile" EXIT
for module in $(echo $modules | tr ":" "\n")
do
cp --recursive $module $packDirectory/$(basename $module)
rm --recursive --force $packDirectory/$(basename $module)/.git
done
tar --create --bzip2 --file $packFile --directory $packDirectory .
echo "Uploading puppet"
ssh $userName@$hostName "
rm --recursive --force $modulePath;
mkdir --parents $modulePath;
"
# quiet
scp -q $packFile $userName@$hostName:$modulePath/$(basename $packFile)
echo "Applying puppet"
ssh $userName@$hostName "
tar --extract --bzip2 --file $modulePath/$(basename $packFile) --directory $modulePath
sudo puppet apply --verbose --color $puppetColor --modulepath $modulePath $modulePath/$entryPoint;
rm --recursive --force $modulePath;
"
echo "Done"
|
Allow parameter passing to perf.tests | #!/bin/bash -ex
function prepare_venv() {
virtualenv -p python3 venv && source venv/bin/activate && python3 `which pip3` install -r requirements.txt
}
[ "$NOVENV" == "1" ] || prepare_venv || exit 1
python3 -B src/perf-tests.py
| #!/bin/bash -ex
function prepare_venv() {
virtualenv -p python3 venv && source venv/bin/activate && python3 `which pip3` install -r requirements.txt
}
[ "$NOVENV" == "1" ] || prepare_venv || exit 1
python3 -B src/perf-tests.py $@
|
Update jump box to latest Terraform version (v0.6.14) | #!/bin/bash
# Minimal script to install Terraform as described:
# https://www.terraform.io/intro/getting-started/install.html
# This will get the jump box to where it can deploy infrastrucutre to AWS
TERRAFORM_URL=https://releases.hashicorp.com/terraform/0.6.13/terraform_0.6.13_linux_amd64.zip
TERRAFORM_ZIP=terraform_0.6.13_linux_amd64.zip
sudo apt-get install -y unzip
sudo mkdir -p /usr/local/terraform
# Check if Terraform is already installed
if [ ! -f /usr/local/terraform/terraform ]
then
# remove any failed downloads
rm $TERRAFORM_ZIP
echo "Downloading Terraform Binary"
wget $TERRAFORM_URL
sudo unzip $TERRAFORM_ZIP -d /usr/local/terraform
else
echo "Terraform is already installed"
fi
if [[ ! $(grep terraform ~/.profile) ]]
then
echo "Adding Terraform to PATH"
echo "#Add Terraform to path" >> $HOME/.profile
echo "PATH=$PATH:/usr/local/terraform" >> $HOME/.profile
else
echo "Terraform already in PATH"
fi
| #!/bin/bash
# Minimal script to install Terraform as described:
# https://www.terraform.io/intro/getting-started/install.html
# This will get the jump box to where it can deploy infrastrucutre to AWS
TERRAFORM_URL=https://releases.hashicorp.com/terraform/0.6.14/terraform_0.6.14_linux_amd64.zip
TERRAFORM_ZIP=terraform_0.6.14_linux_amd64.zip
sudo apt-get install -y unzip
sudo mkdir -p /usr/local/terraform
# Check if Terraform is already installed
if [ ! -f /usr/local/terraform/terraform ]
then
# remove any failed downloads
rm $TERRAFORM_ZIP
echo "Downloading Terraform Binary"
wget $TERRAFORM_URL
sudo unzip $TERRAFORM_ZIP -d /usr/local/terraform
else
echo "Terraform is already installed"
fi
if [[ ! $(grep terraform ~/.profile) ]]
then
echo "Adding Terraform to PATH"
echo "#Add Terraform to path" >> $HOME/.profile
echo "PATH=$PATH:/usr/local/terraform" >> $HOME/.profile
else
echo "Terraform already in PATH"
fi
|
Add a few new docker aliases | cite about-plugin
about-plugin 'Helpers to get Docker setup correctly for boot2docker'
# Note, this might need to be different if you have an older version
# of boot2docker, or its configured for a different IP
if [[ `uname -s` == "Darwin" ]]; then
export DOCKER_HOST=tcp://192.168.59.103:2375
docker-enter() {
boot2docker ssh '[ -f /var/lib/boot2docker/nsenter ] || docker run --rm -v /var/lib/boot2docker/:/target jpetazzo/nsenter'
boot2docker ssh -t sudo "/var/lib/boot2docker/docker-enter \"$1\""
}
fi
| cite about-plugin
about-plugin 'Helpers to get Docker setup correctly for boot2docker'
# Note, this might need to be different if you have an older version
# of boot2docker, or its configured for a different IP
if [[ `uname -s` == "Darwin" ]]; then
export DOCKER_HOST=tcp://192.168.59.103:2375
docker-enter() {
boot2docker ssh '[ -f /var/lib/boot2docker/nsenter ] || docker run --rm -v /var/lib/boot2docker/:/target jpetazzo/nsenter'
boot2docker ssh -t sudo "/var/lib/boot2docker/docker-enter \"$1\""
}
fi
function docker-remove-most-recent-container() {
about 'attempt to remove the most recent container from docker ps -a'
group 'docker'
docker ps -a | head -2 | tail -1 | awk '{print $NF}' | xargs docker rm
}
function docker-remove-most-recent-image() {
about 'attempt to remove the most recent image from docker images'
group 'docker'
docker images | head -2 | tail -1 | awk '{print $3}' | xargs docker rmi
}
|
Update from GCC 4.8 to GCC 7. | #!/bin/bash
cd git/SwiftShader
set -e # Fail on any error.
set -x # Display commands being run.
# Download all submodules
git submodule update --init
mkdir -p build && cd build
if [[ -z "${REACTOR_BACKEND}" ]]; then
REACTOR_BACKEND="LLVM"
fi
cmake .. "-DCMAKE_BUILD_TYPE=${BUILD_TYPE}" "-DREACTOR_BACKEND=${REACTOR_BACKEND}" "-DREACTOR_VERIFY_LLVM_IR=1"
make --jobs=$(nproc)
# Run unit tests
cd .. # Some tests must be run from project root
build/ReactorUnitTests
build/gles-unittests
build/vk-unittests | #!/bin/bash
cd git/SwiftShader
set -e # Fail on any error.
set -x # Display commands being run.
# Specify we want to build with GCC 7
sudo apt-get update
sudo apt-get install -y gcc-7 g++-7
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-7 100 --slave /usr/bin/g++ g++ /usr/bin/g++-7
sudo update-alternatives --set gcc "/usr/bin/gcc-7"
# Download all submodules
git submodule update --init
mkdir -p build && cd build
if [[ -z "${REACTOR_BACKEND}" ]]; then
REACTOR_BACKEND="LLVM"
fi
cmake .. "-DCMAKE_BUILD_TYPE=${BUILD_TYPE}" "-DREACTOR_BACKEND=${REACTOR_BACKEND}" "-DREACTOR_VERIFY_LLVM_IR=1"
make --jobs=$(nproc)
# Run unit tests
cd .. # Some tests must be run from project root
build/ReactorUnitTests
build/gles-unittests
build/vk-unittests |
Add Distribution directory if not yet there | set -e
echo "Building Framework..."
xcodebuild -scheme "Universal Framework" -configuration "Debug" > Distribution/build.log
open Distribution/
echo "Done."
| set -e
echo "Building Framework..."
mkdir -p Distribution
xcodebuild -scheme "Universal Framework" -configuration "Debug" > Distribution/build.log
open Distribution/
echo "Done."
|
Use Tesora pypi cache on bluebox | #!/bin/bash -xe
# Copyright (C) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
source /etc/nodepool/provider
# BH: Temporarily overwrite consumption of pypi cache in Tesora's downstream
#cat >/home/jenkins/.pip/pip.conf <<EOF
#[global]
#index-url = http://pypi.$NODEPOOL_REGION.openstack.org/simple
#EOF
#cat >/home/jenkins/.pydistutils.cfg <<EOF
#[easy_install]
#index_url = http://pypi.$NODEPOOL_REGION.openstack.org/simple
#EOF
# Double check that when the node is made ready it is able
# to resolve names against DNS.
host git.openstack.org
host pypi.${NODEPOOL_REGION}.openstack.org
| #!/bin/bash -xe
# Copyright (C) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
source /etc/nodepool/provider
cat >/home/jenkins/.pip/pip.conf <<EOF
[global]
index-url = http://pypi.elasticdb.org/simple
EOF
cat >/home/jenkins/.pydistutils.cfg <<EOF
[easy_install]
index_url = http://pypi.elasticdb.org/simple
EOF
# Need pseudo DNS name for vhost to work.
echo "10.240.28.44 pypi.elasticdb.org" | sudo tee -a /etc/hosts
# Double check that when the node is made ready it is able
# to resolve names against DNS.
host git.openstack.org
|
Add color to all greps | # grc overides for ls
# Made possible through contributions from generous benefactors like
# `brew install coreutils`
if $(gls &>/dev/null)
then
alias ls="gls -F --color"
alias ll="gls -lAh --color"
alias l="gls -A -1 --color"
alias la='gls -A --color'
fi
alias books="cd ~/Dropbox/books/"
alias dotfiles="cd ~/github/d-land/dotfiles"
| # grc overides for ls
# Made possible through contributions from generous benefactors like
# `brew install coreutils`
if $(gls &>/dev/null)
then
alias ls="gls -F --color"
alias ll="gls -lAh --color"
alias l="gls -A -1 --color"
alias la='gls -A --color'
fi
alias grep="grep --color"
alias books="cd ~/Dropbox/books/"
alias dotfiles="cd ~/github/d-land/dotfiles"
|
Clean up and fix +1/-0 syntax to work as expected | ##
# dircycle plugin: enables cycling through the directory
# stack using Ctrl+Shift+Left/Right
eval "insert-cycledleft () { zle push-line; LBUFFER='pushd -q +1'; zle accept-line }"
zle -N insert-cycledleft
bindkey "\e[1;6D" insert-cycledleft
eval "insert-cycledright () { zle push-line; LBUFFER='pushd -q +0'; zle accept-line }"
zle -N insert-cycledright
bindkey "\e[1;6C" insert-cycledright
| # enables cycling through the directory stack using
# Ctrl+Shift+Left/Right
#
# left/right direction follows the order in which directories
# were visited, like left/right arrows do in a browser
# NO_PUSHD_MINUS syntax:
# pushd +N: start counting from left of `dirs' output
# pushd -N: start counting from right of `dirs' output
setopt nopushdminus
insert-cycledleft () {
zle push-line
LBUFFER='pushd -q +1'
zle accept-line
}
zle -N insert-cycledleft
insert-cycledright () {
zle push-line
LBUFFER='pushd -q -0'
zle accept-line
}
zle -N insert-cycledright
bindkey "\e[1;6D" insert-cycledleft
bindkey "\e[1;6C" insert-cycledright
|
Make it possible to compare with source tarball | #!/usr/bin/env bash
set -e
source config.sh
if [ $# = 0 ]
then
recipes="$(ls -1 recipes)"
else
recipes="$@"
fi
for recipe in $recipes
do
echo -e "\e[1m$recipe\e[0m"
if [ -d "recipes/$recipe/source/.git" ]
then
git -C "recipes/$recipe/source" status
else
echo "Not a git repository"
fi
done
| #!/usr/bin/env bash
set -e
source config.sh
if [ $# = 0 ]
then
recipes="$(ls -1 recipes)"
else
recipes="$@"
fi
for recipe in $recipes
do
echo -e "\e[1m$recipe\e[0m"
if [ -d "recipes/$recipe/source/.git" ]
then
git -C "recipes/$recipe/source" status
elif [ -e "recipes/$recipe/source.tar" ]
then
echo "Using source tarball"
tar --compare --file="recipes/$recipe/source.tar" -C "recipes/$recipe/source" --strip-components=1 2>&1| grep -v "tar: :" | grep -v '\(Mode\|Gid\|Uid\) differs' || true
else
echo "No original source found"
fi
done
|
Set the INITRD env variable and make 'ischroot' always return true. | #!/bin/bash
set -e
source /build/buildconfig
set -x
## Temporarily disable dpkg fsync to make building faster.
echo force-unsafe-io > /etc/dpkg/dpkg.cfg.d/02apt-speedup
## Enable Ubuntu Universe and Multiverse.
cp /build/sources.list /etc/apt/sources.list
apt-get update
## Install HTTPS support for APT.
$minimal_apt_get_install apt-transport-https
## Fix some issues with APT packages.
## See https://github.com/dotcloud/docker/issues/1024
dpkg-divert --local --rename --add /sbin/initctl
ln -sf /bin/true /sbin/initctl
## Upgrade all packages.
echo "initscripts hold" | dpkg --set-selections
apt-get upgrade -y --no-install-recommends
## Fix locale.
$minimal_apt_get_install language-pack-en
locale-gen en_US
| #!/bin/bash
set -e
source /build/buildconfig
set -x
## Temporarily disable dpkg fsync to make building faster.
echo force-unsafe-io > /etc/dpkg/dpkg.cfg.d/02apt-speedup
## Prevent initramfs updates from trying to run grub and lilo.
## https://journal.paul.querna.org/articles/2013/10/15/docker-ubuntu-on-rackspace/
## http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=594189
export INITRD=no
echo -n no > /etc/container_environment.d/INITRD
## Enable Ubuntu Universe and Multiverse.
cp /build/sources.list /etc/apt/sources.list
apt-get update
## Fix some issues with APT packages.
## See https://github.com/dotcloud/docker/issues/1024
dpkg-divert --local --rename --add /sbin/initctl
ln -sf /bin/true /sbin/initctl
## Replace the 'ischroot' tool to make it always return true.
## Prevent initscripts updates from breaking /dev/shm.
## https://journal.paul.querna.org/articles/2013/10/15/docker-ubuntu-on-rackspace/
## https://bugs.launchpad.net/launchpad/+bug/974584
dpkg-divert --local --rename --add /usr/bin/ischroot
ln -sf /bin/true /usr/bin/ischroot
## Install HTTPS support for APT.
$minimal_apt_get_install apt-transport-https
## Upgrade all packages.
echo "initscripts hold" | dpkg --set-selections
apt-get upgrade -y --no-install-recommends
## Fix locale.
$minimal_apt_get_install language-pack-en
locale-gen en_US
|
Update the java doc script | #!/bin/bash
javadoc -private -sourcepath src/main/java:src/test/java:src/jmh/java -use -version -author -d doc org.numenta.nupic org.numenta.nupic.util org.numenta.nupic.model org.numenta.nupic.research org.numenta.nupic.integration org.numenta.nupic.encoders org.numenta.nupic.algorithms org.numenta.nupic.benchmarks org.numenta.nupic.network org.numenta.nupic.network.sensor org.numenta.nupic.datagen org.numenta.nupic.monitor org.numenta.nupic.monitor.mixin org.numenta.nupic.research.sensorimotor
| #!/bin/bash
javadoc -private -sourcepath src/main/java:src/test/java:src/jmh/java -use -version -author -d doc org.numenta.nupic org.numenta.nupic.util org.numenta.nupic.model org.numenta.nupic.research org.numenta.nupic.integration org.numenta.nupic.encoders org.numenta.nupic.algorithms org.numenta.nupic.benchmarks org.numenta.nupic.network org.numenta.nupic.network.sensor org.numenta.nupic.datagen org.numenta.nupic.monitor org.numenta.nupic.monitor.mixin org.numenta.nupic.research.sensorimotor org.numenta.nupic.serialize
|
Fix to subscription manager for better license mgmt | #!/usr/bin/env bash
# _ _ _ _ ___ _ __
# / \ | | | \ | | | \ | | \ |
# \_ | | |_| | |_| | |_| |_
# \ | | | | | |\ | | | |
# \_/ |_| |_/ |_| | \ _|_ |_/ |__ .sh
#
# Usage
#
# ./subscribe.sh
#
# This script will:
# - Disable the firewall permanently
# - Register the system with your Red Hat Developer account
# - Install the latest updates
#
# You will be prompted for your developer account credentials.
#
# If you are running this at DORIS, make sure your proxy is set.
# See /etc/profile.d/proxy.sh
#
# ensure running as root
if [ "$(id -u)" != "0" ]; then
exec sudo "$0" "$@"
fi
subscription-manager register --username $1 --password $2
subscription-manager attach
subscription-manager repos --enable rhel-server-rhscl-6-rpms
subscription-manager repos --enable rhel-6-server-optional-rpms
yum -y update
printf "\nPlease reboot now:\n\n"
printf "\t$ exit\n"
printf "\t$ vagrant reload\n\n" | #!/usr/bin/env bash
# _ _ _ _ ___ _ __
# / \ | | | \ | | | \ | | \ |
# \_ | | |_| | |_| | |_| |_
# \ | | | | | |\ | | | |
# \_/ |_| |_/ |_| | \ _|_ |_/ |__ .sh
#
# Usage
#
# ./subscribe.sh
#
# This script will:
# - Disable the firewall permanently
# - Register the system with your Red Hat Developer account
# - Install the latest updates
#
# You will be prompted for your developer account credentials.
#
# If you are running this at DORIS, make sure your proxy is set.
# See /etc/profile.d/proxy.sh
#
# ensure running as root
if [ "$(id -u)" != "0" ]; then
exec sudo "$0" "$@"
fi
subscription-manager register --username $1 --password $2 --name $3 --auto-attach --force
subscription-manager attach
subscription-manager repos --enable rhel-server-rhscl-6-rpms
subscription-manager repos --enable rhel-6-server-optional-rpms
yum -y update
printf "\nPlease reboot now:\n\n"
printf "\t$ exit\n"
printf "\t$ vagrant reload\n\n" |
Add --no-check-certificate option for CMake download in Travis setup. | #!/bin/bash
# Copyright (c) 2014-2016 Kartik Kumar (me@kartikkumar.com)
# Distributed under the MIT License.
# See accompanying file LICENSE.md or copy at http://opensource.org/licenses/MIT
set -ev
# Fetch and build updated version of CMake from source.
wget https://cmake.org/files/v3.4/cmake-3.4.1.tar.gz
tar -xzvf cmake-3.4.1.tar.gz
cd cmake-3.4.1
./bootstrap
make
make install | #!/bin/bash
# Copyright (c) 2014-2016 Kartik Kumar (me@kartikkumar.com)
# Distributed under the MIT License.
# See accompanying file LICENSE.md or copy at http://opensource.org/licenses/MIT
set -ev
# Fetch and build updated version of CMake from source.
wget https://cmake.org/files/v3.4/cmake-3.4.1.tar.gz --no-check-certificate
tar -xzvf cmake-3.4.1.tar.gz
cd cmake-3.4.1
./bootstrap
make
make install |
Make bits-service deployment manifest available as 'manifests/manifest.yml' | #!/bin/bash -e
cd $(dirname $0)/../../git-bits-service-release
if [ -e "$VERSION_FILE" ]; then
export VERSION=$(cat $VERSION_FILE)
echo "Using VERSION=\"$VERSION\""
else
echo "The \$VERSION_FILE \"$VERSION_FILE\" does not exist"
exit 1
fi
bosh -u x -p x target $BOSH_TARGET Lite
bosh login $BOSH_USERNAME $BOSH_PASSWORD
if [ -z "$BLOBSTORE_TYPE" ]
then
>&2 echo "Please provide $BLOBSTORE_TYPE"
exit 1
else
./scripts/generate-test-bosh-lite-manifest \
./templates/$BLOBSTORE_TYPE.yml \
../git-bits-service-ci/manifests/bits-release-network-$BLOBSTORE_TYPE.yml
fi
cp deployments/bits-service-release.yml ../manifests/manifest-$VERSION.yml
| #!/bin/bash -e
cd $(dirname $0)/../../git-bits-service-release
if [ -e "$VERSION_FILE" ]; then
export VERSION=$(cat $VERSION_FILE)
echo "Using VERSION=\"$VERSION\""
else
echo "The \$VERSION_FILE \"$VERSION_FILE\" does not exist"
exit 1
fi
bosh -u x -p x target $BOSH_TARGET Lite
bosh login $BOSH_USERNAME $BOSH_PASSWORD
if [ -z "$BLOBSTORE_TYPE" ]
then
>&2 echo "Please provide $BLOBSTORE_TYPE"
exit 1
else
./scripts/generate-test-bosh-lite-manifest \
./templates/$BLOBSTORE_TYPE.yml \
../git-bits-service-ci/manifests/bits-release-network-$BLOBSTORE_TYPE.yml
fi
cp deployments/bits-service-release.yml ../manifests/manifest-$VERSION.yml
cp deployments/bits-service-release.yml ../manifests/manifest.yml
|
Add git-prune to gc script. | #!/bin/bash
set -eu
if [[ $# -eq 0 ]]; then
readonly dirlist=(.)
else
readonly dirlist=("$@")
fi
readonly gc_flags=(
'--auto'
'--aggressive'
)
readonly origdir="$(pwd)"
for dir in "${dirlist[@]}"; do
for repo in "$dir"/*/; do
if [[ ! -d "$repo/.git" ]]; then
continue
fi
cd "$repo"
echo "Entering $repo..."
git fsck --full
git gc --aggressive
cd "$origdir"
done
done
| #!/bin/bash
set -eu
if [[ $# -eq 0 ]]; then
readonly dirlist=(.)
else
readonly dirlist=("$@")
fi
readonly origdir="$(pwd)"
for dir in "${dirlist[@]}"; do
for repo in "$dir"/*/; do
if [[ ! -d "$repo/.git" ]]; then
continue
fi
cd "$repo"
echo "Entering $repo..."
git fsck --full
git gc --aggressive
git prune --verbose
cd "$origdir"
done
done
|
Add version to the dir name in the source tarball | #!/bin/bash
tmp=$(mktemp -d)
VERSION=$(cat ./src/cli/cli.go | grep 'Version = "v' | sed 's/[^0-9.]*\([0-9.]*\).*/\1/')
echo $VERSION
git clone --depth 1 https://github.com/schollz/croc $tmp/croc
(cd $tmp/croc && go mod tidy && go mod vendor)
(cd $tmp && tar -cvzf croc_${VERSION}_src.tar.gz croc)
mv $tmp/croc_${VERSION}_src.tar.gz dist/
| #!/bin/bash
tmp=$(mktemp -d)
VERSION=$(cat ./src/cli/cli.go | grep 'Version = "v' | sed 's/[^0-9.]*\([0-9.]*\).*/\1/')
echo $VERSION
git clone --depth 1 https://github.com/schollz/croc $tmp/croc-${VERSION}
(cd $tmp/croc-${VERSION} && go mod tidy && go mod vendor)
(cd $tmp && tar -cvzf croc_${VERSION}_src.tar.gz croc-${VERSION})
mv $tmp/croc_${VERSION}_src.tar.gz dist/
|
Allow arbitrary PMMP download path for Travis builds | #!/bin/bash
set -e
if [ "$TRAVIS" != "true" ]; then
echo Please only run this script on Travis-CI
exit 1
fi
echo Installing pthreads 3.1.6
pecl install channel://pecl.php.net/pthreads-3.1.6
echo Installing weakref 0.3.2
pecl install channel://pecl.php.net/weakref-0.3.2
echo Installing yaml 2.0.0-RC7
echo | pecl install channel://pecl.php.net/yaml-2.0.0RC7
mkdir "$TRAVIS_BUILD_DIR"/../PocketMine && cd "$TRAVIS_BUILD_DIR"/../PocketMine
echo Installing PocketMine in $PWD
echo Downloading PocketMine build from Poggit
wget -O PocketMine-MP.phar https://poggit.pmmp.io/get.pmmp/master
mkdir plugins && wget -O plugins/PluginChecker.phar https://poggit.pmmp.io/res/PluginChecker.phar
echo Downloading Poggit build
mkdir unstaged
wget -O - https://poggit.pmmp.io/res/travisPluginTest.php | php -- unstaged
echo Installed allthethings. Execute https://poggit.pmmp.io/travisScript.sh in the script phase to execute test.
exit 0
| #!/bin/bash
set -e
if [ "$TRAVIS" != "true" ]; then
echo Please only run this script on Travis-CI
exit 1
fi
PM_DL_PATH="${1:-"https://poggit.pmmp.io/get.pmmp/master"}"
echo Installing pthreads 3.1.6
pecl install channel://pecl.php.net/pthreads-3.1.6
echo Installing weakref 0.3.2
pecl install channel://pecl.php.net/weakref-0.3.2
echo Installing yaml 2.0.0-RC7
echo | pecl install channel://pecl.php.net/yaml-2.0.0RC7
mkdir "$TRAVIS_BUILD_DIR"/../PocketMine && cd "$TRAVIS_BUILD_DIR"/../PocketMine
echo Installing PocketMine in $PWD
echo Downloading PocketMine build from Poggit
wget -O PocketMine-MP.phar "$PM_DL_PATH"
mkdir plugins && wget -O plugins/PluginChecker.phar https://poggit.pmmp.io/res/PluginChecker.phar
echo Downloading Poggit build
mkdir unstaged
wget -O - https://poggit.pmmp.io/res/travisPluginTest.php | php -- unstaged
echo Installed allthethings. Execute https://poggit.pmmp.io/travisScript.sh in the script phase to execute test.
exit 0
|
Add tigervnc for unity desktop | #!/bin/sh
xfce()
{
apt-get install xfce4 -yq
(
cat <<'EOF'
#!/bin/sh
if [ -r /etc/default/locale ]; then
. /etc/default/locale
export LANG LANGUAGE
fi
startxfce4
EOF
) > /etc/xrdp/startwm.sh
}
unity()
{
apt-get install ubuntu-desktop -yq
(
cat <<'EOF'
#!/bin/sh
if [ -r /etc/default/locale ]; then
. /etc/default/locale
export LANG LANGUAGE
fi
unity
EOF
) > /etc/xrdp/startwm.sh
}
mate()
{
apt-get install mate-desktop-environment -yq
(
cat <<'EOF'
#!/bin/sh
if [ -r /etc/default/locale ]; then
. /etc/default/locale
export LANG LANGUAGE
fi
mate-session
EOF
) > /etc/xrdp/startwm.sh
}
apt-get update
apt-get install xrdp -yq
eval $1
service xrdp restart | #!/bin/sh
xfce()
{
apt-get install xrdp -y -qq
apt-get install xfce4 -y -qq
(
cat <<'EOF'
#!/bin/sh
if [ -r /etc/default/locale ]; then
. /etc/default/locale
export LANG LANGUAGE
fi
startxfce4
EOF
) > /etc/xrdp/startwm.sh
}
unity()
{
if [ $(lsb_release -si) != "Ubuntu" ]; then
(>&2 echo "This GUI shell only works on ubuntu")
exit -1
fi
if [ $(uname -m) = "x86_64" ]; then
ARCH=amd64
else
ARCH=i386
fi
TIGERVNCPATH=https://dl.bintray.com/tigervnc/stable/ubuntu-$(lsb_release -sr)LTS/$ARCH/tigervncserver_1.7.0-1ubuntu1_$ARCH.deb
curl -L $TIGERVNCPATH -o tigervncserver.deb
dpkg -i tigervncserver.deb
apt-get install -fy -qq
apt-get install xrdp -y -qq
apt-get install ubuntu-desktop -y -qq
(
cat <<'EOF'
#!/bin/sh
if [ -r /etc/default/locale ]; then
. /etc/default/locale
export LANG LANGUAGE
fi
unity
EOF
) > /etc/xrdp/startwm.sh
}
mate()
{
apt-get install xrdp -y -qq
apt-get install mate-desktop-environment -y -qq
(
cat <<'EOF'
#!/bin/sh
if [ -r /etc/default/locale ]; then
. /etc/default/locale
export LANG LANGUAGE
fi
mate-session
EOF
) > /etc/xrdp/startwm.sh
}
apt-get update
eval $1
service xrdp restart |
Revert "fix lib being added to the package twice lol" | #!/bin/bash
source "_build_common.sh"
mkdir -p out/js
echo 'Generating lang.js...'
php ./dump_js_lang.php
if [[ $ESP_DEMO ]]; then
demofile=js/demo.js
else
demofile=
fi
echo 'Processing JS...'
if [[ $ESP_PROD ]]; then
smarg=
else
smarg=--source-maps
fi
npm run babel -- -o "out/js/app.$FRONT_END_HASH.js" ${smarg} \
js/lib/chibi.js \
js/lib/keymaster.js \
js/lib/polyfills.js \
js/utils.js \
js/modal.js \
js/notif.js \
js/appcommon.js \
$demofile \
js/lang.js \
js/wifi.js \
js/term_* \
js/debug_screen.js \
js/soft_keyboard.js \
js/term.js
| #!/bin/bash
source "_build_common.sh"
mkdir -p out/js
echo 'Generating lang.js...'
php ./dump_js_lang.php
if [[ $ESP_DEMO ]]; then
demofile=js/demo.js
else
demofile=
fi
echo 'Processing JS...'
if [[ $ESP_PROD ]]; then
smarg=
else
smarg=--source-maps
fi
npm run babel -- -o "out/js/app.$FRONT_END_HASH.js" ${smarg} js/lib \
js/lib/chibi.js \
js/lib/keymaster.js \
js/lib/polyfills.js \
js/utils.js \
js/modal.js \
js/notif.js \
js/appcommon.js \
$demofile \
js/lang.js \
js/wifi.js \
js/term_* \
js/debug_screen.js \
js/soft_keyboard.js \
js/term.js
|
Make dependency fetching more reliable | #!/bin/sh
cd www_files/js
test -e jquery-1.4.4.min.js || wget http://code.jquery.com/jquery-1.4.4.min.js
test -e adhoc.js || wget http://cgit.babelmonkeys.de/cgit.cgi/adhocweb/plain/js/adhoc.js
test -e strophe.js || (wget --no-check-certificate https://github.com/metajack/strophejs/tarball/release-1.0.1 && \
tar xzf *.tar.gz && rm *.tar.gz && cd metajack-strophejs* && make strophe.js && cp strophe.js ../strophe.js && \
cd .. && rm -rf metajack-strophejs*)
| #!/bin/sh
cd www_files/js
test -e jquery-1.4.4.min.js || wget http://code.jquery.com/jquery-1.4.4.min.js
test -e adhoc.js || wget http://cgit.babelmonkeys.de/cgit.cgi/adhocweb/plain/js/adhoc.js
test -e strophe.js || (wget --no-check-certificate -O strophe.tar.gz https://github.com/metajack/strophejs/tarball/release-1.0.1 && \
tar xzf strophe.tar.gz && rm strophe.tar.gz && cd metajack-strophejs-*/ && make strophe.js && cp strophe.js ../strophe.js && \
cd .. && rm -rf metajack-strophejs*)
|
Make script more robust against query changes | #! /bin/sh -x
$(dirname $0)/geocode.sh germering > $(dirname $0)/library/src/test/resources/de-by-germering.json
$(dirname $0)/geocode.sh kas > $(dirname $0)/library/src/test/resources/tr-antalya-kas.json
$(dirname $0)/geocode.sh serik > $(dirname $0)/library/src/test/resources/tr-antalya-serik.json
$(dirname $0)/geocode.sh keciborlu > $(dirname $0)/library/src/test/resources/tr-isparta-keciborlu.json
$(dirname $0)/pretty_print_test_data.sh
| #! /bin/sh -x
$(dirname $0)/geocode.sh germering \
> $(dirname $0)/library/src/test/resources/de-by-germering.json
$(dirname $0)/geocode.sh kas,antalya \
> $(dirname $0)/library/src/test/resources/tr-antalya-kas.json
$(dirname $0)/geocode.sh serik \
> $(dirname $0)/library/src/test/resources/tr-antalya-serik.json
$(dirname $0)/geocode.sh keciborlu \
> $(dirname $0)/library/src/test/resources/tr-isparta-keciborlu.json
$(dirname $0)/pretty_print_test_data.sh
|
Use Logger in lieu of MessageBuilder | include logger.Logger
GitException(){
curBranchException(){
action=${1}
branch=${2}
MessageBuilder logErrorMsg cannot-${action}-${branch}-because-${branch}-is-the-current-branch.
}
existingBranchException(){
action=${1}
branch=${2}
MessageBuilder logErrorMsg cannot-${action}-${branch}-because-${branch}-already-exists.
}
$@
} | include logger.Logger
GitException(){
curBranchException(){
action=${1}
branch=${2}
Logger logErrorMsg cannot-${action}-${branch}-because-${branch}-is-the-current-branch.
}
existingBranchException(){
action=${1}
branch=${2}
Logger logErrorMsg cannot-${action}-${branch}-because-${branch}-already-exists.
}
$@
} |
Add tag before pushing to github | #!/bin/bash
#Filter
echo "Current GIT status before filtering:"
git status
echo "Filtering unnecessary files..."
git filter-branch -f --index-filter 'git rm -r --cached --ignore-unmatch .project .gitlab-ci.yml .pydevproject .gitlab-ci-scripts' HEAD
#Push
[[ -d ~/.ssh ]] || mkdir ~/.ssh
echo "$GITHUB_SSH_KEY" | tr -d '\r' > ~/.ssh/github_ssh_key
chmod 600 ~/.ssh/github_ssh_key
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/github_ssh_key
ssh-keyscan -H "github.com" >> ~/.ssh/known_hosts
#git push --mirror git@github.com:martyy665/ekvok.git
echo "Mirroring..."
git push --mirror git@github.com:martyy665/ekvok.git
echo "Pushing tag"
if [[ ! -z "${CI_COMMIT_TAG}" ]]; then
git push git@github.com:martyy665/ekvok.git "${CI_COMMIT_TAG}"
fi
| #!/bin/bash
if [[ ! -z "${CI_COMMIT_TAG}" ]]; then
EVOK_VERSION=${CI_COMMIT_TAG}
else
EVOK_VERSION=$(/ci-scripts/generate-new-tag-for-test.sh)
fi
#Filter
echo "Current GIT status before filtering:"
git status
echo "Filtering unnecessary files..."
git filter-branch -f --index-filter 'git rm -r --cached --ignore-unmatch .project .gitlab-ci.yml .pydevproject .gitlab-ci-scripts' HEAD
#Push
[[ -d ~/.ssh ]] || mkdir ~/.ssh
echo "$GITHUB_SSH_KEY" | tr -d '\r' > ~/.ssh/github_ssh_key
chmod 600 ~/.ssh/github_ssh_key
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/github_ssh_key
ssh-keyscan -H "github.com" >> ~/.ssh/known_hosts
#git push --mirror git@github.com:martyy665/ekvok.git
echo "Tagging..."
git tag ${EVOK_VERSION}
echo "Mirroring..."
git push --mirror git@github.com:martyy665/ekvok.git
echo "Pushing tag"
if [[ ! -z "${CI_COMMIT_TAG}" ]]; then
git push git@github.com:martyy665/ekvok.git "${CI_COMMIT_TAG}"
fi
|
Revert "Let's pretend that this is better" | #!/usr/bin/env bash
NEWFOLDER=dotfiles
OLDFOLDER=dotfiles_old
if [[ $1 == maclabs ]]; then
CONFIGDIR=$HOME/Desktop/Network-Home
echo $1
else
CONFIGDIR=$HOME
fi
mkdir -p $CONFIGDIR/$OLDFOLDER
FILE_LIST="bashrc profile vimrc vim gvimrc bash_aliases bash_functions inputrc
bash_profile bash_path wgetrc tmux.conf tmux_snap.conf gitconfig gitignore
Xresources startxwinrc"
for file in $FILE_LIST; do
diff $CONFIGDIR/$NEWFOLDER/$file $HOME/.$file > /dev/null 2>&1
isdiff=$?
if [[ $isdiff -ge 1 ]]; then
mv $HOME/.$file $CONFIGDIR/$OLDFOLDER/$file
ln -s $CONFIGDIR/$NEWFOLDER/$file $HOME/.$file
fi
done
if [[ ! -d $HOME/bin ]]; then
mkdir $HOME/bin
chmod 755 $HOME/bin
fi
for file in $CONFIGDIR/$NEWFOLDER/bin/*; do
linkfile=`basename $file`
if [[ ! -f $linkfile ]]; then
ln -s $file $HOME/bin/$linkfile
fi
done
| #!/bin/bash
NEWFOLDER=dotfiles
OLDFOLDER=dotfiles_old
if [[ $1 == maclabs ]]; then
CONFIGDIR=$HOME/Desktop/Network-Home
echo $1
else
CONFIGDIR=$HOME
fi
mkdir -p $CONFIGDIR/$OLDFOLDER
FILE_LIST="bashrc profile vimrc vim gvimrc bash_aliases bash_functions inputrc
bash_profile bash_path wgetrc tmux.conf tmux_snap.conf gitconfig gitignore
Xresources startxwinrc"
cd $HOME
for file in $FILE_LIST; do
diff $CONFIGDIR/$NEWFOLDER/$file .$file > /dev/null 2>&1
isdiff=$?
if [[ $isdiff -ge 1 ]]; then
mv .$file $CONFIGDIR/$OLDFOLDER
ln -s $CONFIGDIR/$NEWFOLDER/$file .$file
fi
done
cd -
if [[ ! -d $HOME/bin ]]; then
mkdir $HOME/bin
chmod 755 $HOME/bin
fi
cd ~/bin
for file in $CONFIGDIR/$NEWFOLDER/bin/*; do
linkfile=`basename $file`
if [[ ! -f $linkfile ]]; then
ln -s $file $linkfile
fi
done
cd -
|
Add patches directory which will contain Canvas Worker patches | #!/bin/bash
RELDIR=`dirname $0;`
BASE=`cd $RELDIR;pwd`
cd $BASE
LKGR=`cat LKGR`
LKGR_WK=`cat LKGR-WebKit`
if [ ! -d src ]
then
mkdir tmp
cd tmp
# Download Chromium source
fetch --nohooks chromium
# Install dependencies
cd src
./build/install-build-deps.sh
gclient runhooks
# Move everything to its final place
cd $BASE/tmp
mv $BASE/tmp/src $BASE/
# Move to Last Known Good Revision
git checkout -B canvasWorker $LKGR
gclient sync
cd third_party/WebKit
git checkout -B canvasWorker $LKGR_WK
# Setup build env
cd $BASE/src
gn gen out/Release
gn args out/Release
fi
cd $BASE/src
# Start the build
ninja -C out/Release chrome
| #!/bin/bash
RELDIR=`dirname $0;`
BASE=`cd $RELDIR;pwd`
cd $BASE
LKGR=`cat LKGR`
LKGR_WK=`cat LKGR-WebKit`
if [ ! -d src ]
then
mkdir tmp
cd tmp
# Download Chromium source
fetch --nohooks chromium
# Install dependencies
cd src
./build/install-build-deps.sh
gclient runhooks
# Move everything to its final place
cd $BASE/tmp
mv $BASE/tmp/src $BASE/
# Move to Last Known Good Revision
git checkout -B canvasWorker $LKGR
gclient sync
cd third_party/WebKit
git checkout -B canvasWorker $LKGR_WK
# Setup build env
cd $BASE/src
gn gen out/Release
gn args out/Release
cd $BASE/src/third_party/WebKit
git am -3 $BASE/patches/*patch
fi
cd $BASE/src
# Start the build
ninja -C out/Release chrome
|
Install packer to Jenkins build slave | #!/bin/bash
set -x
set -e
add-apt-repository cloud-archive:liberty
apt-get -qq -y update
apt-get -qq -y install openjdk-7-jdk rake ruby-puppetlabs-spec-helper puppet-lint git bundler python-dev libssl-dev libxml2-dev libxslt-dev python-tox python-pip build-essential libmysqlclient-dev libfreetype6-dev libpng12-dev git-buildpackage debhelper dupload libffi-dev npm nodejs libpq-dev unzip qemu pkg-config libvirt-dev libsqlite3-dev libldap2-dev libsasl2-dev
apt-get -qq -y install dh-systemd openstack-pkg-tools python-sphinx python3-setuptools libcurl3 python-ceilometerclient python-cinderclient python-designateclient python-glanceclient python-heatclient python-keystoneclient python-muranoclient python-neutronclient python-novaclient python-openstackclient python-swiftclient
apt-get -qq -y install python-hivemind python-hivemind-contrib
apt-get -qq -y autoremove
apt-get -qq -y autoclean
apt-get -qq -y clean
| #!/bin/bash
set -x
set -e
add-apt-repository cloud-archive:liberty
apt-get -qq -y update
apt-get -qq -y install openjdk-7-jdk rake ruby-puppetlabs-spec-helper puppet-lint git bundler python-dev libssl-dev libxml2-dev libxslt-dev python-tox python-pip build-essential libmysqlclient-dev libfreetype6-dev libpng12-dev git-buildpackage debhelper dupload libffi-dev npm nodejs libpq-dev unzip qemu pkg-config libvirt-dev libsqlite3-dev libldap2-dev libsasl2-dev
apt-get -qq -y install dh-systemd openstack-pkg-tools python-sphinx python3-setuptools libcurl3 python-ceilometerclient python-cinderclient python-designateclient python-glanceclient python-heatclient python-keystoneclient python-muranoclient python-neutronclient python-novaclient python-openstackclient python-swiftclient
apt-get -qq -y install python-hivemind python-hivemind-contrib
apt-get -qq -y autoremove
apt-get -qq -y autoclean
apt-get -qq -y clean
# Install packer
wget https://releases.hashicorp.com/packer/0.8.6/packer_0.8.6_linux_amd64.zip -O /tmp/packer_0.8.6_linux_amd64.zip
unzip /tmp/packer_0.8.6_linux_amd64.zip -d /usr/local/bin/
|
Bring back -h option to ls variants | # Changing/making/removing directory
setopt auto_pushd
setopt pushd_ignore_dups
setopt pushdminus
alias -g ...='../..'
alias -g ....='../../..'
alias -g .....='../../../..'
alias -g ......='../../../../..'
alias 1='cd -'
alias 2='cd -2'
alias 3='cd -3'
alias 4='cd -4'
alias 5='cd -5'
alias 6='cd -6'
alias 7='cd -7'
alias 8='cd -8'
alias 9='cd -9'
alias md='mkdir -p'
alias rd=rmdir
alias d='dirs -v | head -10'
# List directory contents
alias lsa='ls -lah'
alias l='ls -la'
alias ll='ls -l'
alias la='ls -lA'
# Push and pop directories on directory stack
alias pu='pushd'
alias po='popd'
| # Changing/making/removing directory
setopt auto_pushd
setopt pushd_ignore_dups
setopt pushdminus
alias -g ...='../..'
alias -g ....='../../..'
alias -g .....='../../../..'
alias -g ......='../../../../..'
alias 1='cd -'
alias 2='cd -2'
alias 3='cd -3'
alias 4='cd -4'
alias 5='cd -5'
alias 6='cd -6'
alias 7='cd -7'
alias 8='cd -8'
alias 9='cd -9'
alias md='mkdir -p'
alias rd=rmdir
alias d='dirs -v | head -10'
# List directory contents
alias lsa='ls -lah'
alias l='ls -lah'
alias ll='ls -lh'
alias la='ls -lAh'
# Push and pop directories on directory stack
alias pu='pushd'
alias po='popd'
|
Disable eMMC flashing for the BBB | #!/bin/sh
set -e
echo "Updating scripts and tools..."
cd /opt/scripts/
git pull
echo "Updating Kernel..."
/opt/scripts/tools/update_kernel.sh
echo "Updating bootloader..."
/opt/scripts/tools/developers/update_bootloader.sh
uENV_path="/boot/uEnv.txt"
if [ -e "$uENV_path" ]
then
echo "Enabling eMMC flashing..."
sed -i '/init-eMMC-flasher-v3.sh/s/^#*//g' "$uENV_path"
else
echo "$uENV_path does not exist"
fi
| #!/bin/sh
set -e
echo "Updating scripts and tools..."
cd /opt/scripts/
git pull
echo "Updating Kernel..."
/opt/scripts/tools/update_kernel.sh
echo "Updating bootloader..."
/opt/scripts/tools/developers/update_bootloader.sh
# uENV_path="/boot/uEnv.txt"
# if [ -e "$uENV_path" ]
# then
# echo "Enabling eMMC flashing..."
# sed -i '/init-eMMC-flasher-v3.sh/s/^#*//g' "$uENV_path"
# else
# echo "$uENV_path does not exist"
# fi
|
Add -e option to escape \t | #!/bin/bash
#usage: ./tools/size.sh
SRC=$1-debug.js
MIN=$1.js
SIZE_SRC=$(cat dist/$SRC | wc -c)
SIZE_MIN=$(cat dist/$MIN | wc -c)
SIZE_GZIP=$(gzip -c1 dist/$MIN | wc -c)
echo
echo "\t`echo "scale=3;$SIZE_SRC/1024" | bc -l` KB $SRC"
echo "\t`echo "scale=3;$SIZE_MIN/1024" | bc -l` KB $MIN"
echo "\t`echo "scale=3;$SIZE_GZIP/1024" | bc -l` KB $MIN gzipped"
echo "\t`cat dist/$1-debug.js | wc -l` LOC"
echo
| #!/bin/bash
#usage: ./tools/size.sh
SRC=$1-debug.js
MIN=$1.js
SIZE_SRC=$(cat dist/$SRC | wc -c)
SIZE_MIN=$(cat dist/$MIN | wc -c)
SIZE_GZIP=$(gzip -c1 dist/$MIN | wc -c)
echo
echo -e "\t`echo "scale=3;$SIZE_SRC/1024" | bc -l` KB $SRC"
echo -e "\t`echo "scale=3;$SIZE_MIN/1024" | bc -l` KB $MIN"
echo -e "\t`echo "scale=3;$SIZE_GZIP/1024" | bc -l` KB $MIN gzipped"
echo -e "\t`cat dist/$1-debug.js | wc -l` LOC"
echo
|
Create virtualenv on Jenkins only if doesn't exist | #!/bin/bash
set -o pipefail
function display_result {
RESULT=$1
EXIT_STATUS=$2
TEST=$3
if [ $RESULT -ne 0 ]; then
echo "$TEST failed"
exit $EXIT_STATUS
else
echo "$TEST passed"
fi
}
VIRTUALENV_DIR=/var/tmp/virtualenvs/$(echo ${JOB_NAME} | tr ' ' '-')
export PIP_DOWNLOAD_CACHE=/var/tmp/pip_download_cache
virtualenv --clear --no-site-packages $VIRTUALENV_DIR
source $VIRTUALENV_DIR/bin/activate
pip install -r requirements.txt
pip install -r requirements_for_tests.txt
rm -f coverage.xml .coverage nosetests.xml
nosetests -v --with-xunit --with-coverage --cover-package=backdrop --cover-inclusive
display_result $? 1 "Unit tests"
python -m coverage.__main__ xml --include=backdrop*
behave --tags=-pending --stop
display_result $? 2 "Feature tests"
$(dirname $0)/pep-it.sh | tee pep8.out
display_result $? 3 "Code style check"
| #!/bin/bash
set -o pipefail
function display_result {
RESULT=$1
EXIT_STATUS=$2
TEST=$3
if [ $RESULT -ne 0 ]; then
echo "$TEST failed"
exit $EXIT_STATUS
else
echo "$TEST passed"
fi
}
VIRTUALENV_DIR=/var/tmp/virtualenvs/$(echo ${JOB_NAME} | tr ' ' '-')
export PIP_DOWNLOAD_CACHE=/var/tmp/pip_download_cache
if [ ! -e $VIRTUALENV_DIR ]; then
virtualenv --no-site-packages $VIRTUALENV_DIR
fi
source $VIRTUALENV_DIR/bin/activate
pip install -r requirements.txt
pip install -r requirements_for_tests.txt
rm -f coverage.xml .coverage nosetests.xml
nosetests -v --with-xunit --with-coverage --cover-package=backdrop --cover-inclusive
display_result $? 1 "Unit tests"
python -m coverage.__main__ xml --include=backdrop*
behave --tags=-pending --stop
display_result $? 2 "Feature tests"
$(dirname $0)/pep-it.sh | tee pep8.out
display_result $? 3 "Code style check"
|
Use the custom m4 folder | #!/bin/sh
libtoolize --copy
aclocal
gtkdocize --copy
autoconf
autoheader
automake --add-missing --copy
./configure "$@"
| #!/bin/sh
libtoolize --copy
aclocal -I m4
gtkdocize --copy
autoconf
autoheader
automake --add-missing --copy
./configure "$@"
|
Remove non used ENV var | #!/bin/bash
export GERRIT_SSH_PATH=/Users/chmoulli/Fuse/Fuse-projects/fabric8/docker-gerrit/ssh-admin-key
export GERRIT_SSH_KEYS=/Users/chmoulli/Fuse/Fuse-projects/fabric8/docker-gerrit/ssh-keys
rm -rf ssh-admin-key
rm -rf ssh-keys
echo ">> Generate new keys for admin"
mkdir ssh-admin-key
cd ssh-admin-key/
ssh-keygen -b 4096 -t rsa -f ssh-key -q -N "" -C "admin@fabric8.io"
cd ..
echo ">> Generate new keys for jenkins & sonar users"
mkdir ssh-keys
cd ssh-keys/
ssh-keygen -b 4096 -t rsa -f id-jenkins-rsa -q -N "" -C "jenkins@fabric8.io"
ssh-keygen -b 4096 -t rsa -f id-sonar-rsa -q -N "" -C "sonar@fabric8.io"
cd ..
| #!/bin/bash
rm -rf ssh-admin-key
rm -rf ssh-keys
echo ">> Generate new keys for admin"
mkdir ssh-admin-key
cd ssh-admin-key/
ssh-keygen -b 4096 -t rsa -f ssh-key -q -N "" -C "admin@fabric8.io"
cd ..
echo ">> Generate new keys for jenkins & sonar users"
mkdir ssh-keys
cd ssh-keys/
ssh-keygen -b 4096 -t rsa -f id-jenkins-rsa -q -N "" -C "jenkins@fabric8.io"
ssh-keygen -b 4096 -t rsa -f id-sonar-rsa -q -N "" -C "sonar@fabric8.io"
cd ..
|
Remove Gemfile.lock before running tests. | #!/bin/bash -x
set -e
bundle install --path "${HOME}/bundles/${JOB_NAME}"
export GOVUK_APP_DOMAIN=dev.gov.uk
bundle exec rake test
bundle exec rake publish_gem --trace
| #!/bin/bash -x
set -e
rm -f Gemfile.lock
bundle install --path "${HOME}/bundles/${JOB_NAME}"
export GOVUK_APP_DOMAIN=dev.gov.uk
bundle exec rake test
bundle exec rake publish_gem --trace
|
Allow $EDITOR to be set up elsewhere | export EDITOR='subl' | # Only set this if we haven't set $EDITOR up somewhere else previously.
if [ "$EDITOR" == "" ] ; then
# Use sublime for my editor.
export EDITOR='subl'
fi
|
Rename references from build to bin | #!/usr/bin/env bash
set -e
set -x
if [ ! -f "./build/splitsh-lite" ]; then
bash build/install-split.sh
fi
CURRENT_BRANCH="2.x"
function split()
{
SHA1=`./build/splitsh-lite --prefix=$1 --origin=origin/$CURRENT_BRANCH`
git push $2 "$SHA1:refs/heads/$CURRENT_BRANCH" -f
}
function remote()
{
git remote add $1 $2 || true
}
git pull origin $CURRENT_BRANCH
remote ftp git@github.com:thephpleague/flysystem-ftp.git
remote sftp git@github.com:thephpleague/flysystem-sftp.git
remote memory git@github.com:thephpleague/flysystem-memory.git
remote aws-s3-v3 git@github.com:thephpleague/flysystem-aws-s3-v3.git
remote adapter-test-utilities git@github.com:thephpleague/flysystem-adapter-test-utilities.git
split 'src/Ftp' ftp
split 'src/PhpseclibV2' sftp
split 'src/InMemory' memory
split 'src/AwsS3V3' aws-s3-v3
split 'src/AdapterTestUtilities' adapter-test-utilities
| #!/usr/bin/env bash
set -e
set -x
if [ ! -f "./bin/splitsh-lite" ]; then
bash bin/install-split.sh
fi
CURRENT_BRANCH="2.x"
function split()
{
SHA1=`./bin/splitsh-lite --prefix=$1 --origin=origin/$CURRENT_BRANCH`
git push $2 "$SHA1:refs/heads/$CURRENT_BRANCH" -f
}
function remote()
{
git remote add $1 $2 || true
}
git pull origin $CURRENT_BRANCH
remote ftp git@github.com:thephpleague/flysystem-ftp.git
remote sftp git@github.com:thephpleague/flysystem-sftp.git
remote memory git@github.com:thephpleague/flysystem-memory.git
remote aws-s3-v3 git@github.com:thephpleague/flysystem-aws-s3-v3.git
remote adapter-test-utilities git@github.com:thephpleague/flysystem-adapter-test-utilities.git
split 'src/Ftp' ftp
split 'src/PhpseclibV2' sftp
split 'src/InMemory' memory
split 'src/AwsS3V3' aws-s3-v3
split 'src/AdapterTestUtilities' adapter-test-utilities
|
Add support for curses pinentry | #!/usr/bin/env zsh
function reload-gpg-agent() {
if [ -e "$HOME/.gpg-agent-info" ]; then
source $HOME/.gpg-agent-info
export GPG_AGENT_INFO
fi
gpg-connect-agent /bye &> /dev/null
if [ $? != 0 ]; then
eval $(gpg-agent --daemon)
fi
}
reload-gpg-agent
| #!/usr/bin/env zsh
function reload-gpg-agent() {
if [ -e "$HOME/.gpg-agent-info" ]; then
source $HOME/.gpg-agent-info
export GPG_AGENT_INFO
fi
gpg-connect-agent /bye &> /dev/null
if [ $? != 0 ]; then
eval $(gpg-agent --daemon)
fi
}
reload-gpg-agent
# Support for curses or CLI-based pin entry applications
export GPG_TTY="$(tty)"
|
Make developing git-radar super easy | autoload colors && colors
directory_name() {
echo "%{$fg_bold[cyan]%}%1/%\/%{$reset_color%}"
}
ret_status() {
echo "%(?:%{$fg_bold[green]%}➜ :%{$fg_bold[red]%}➜ %s)"
}
export PROMPT=$'$(ret_status)$(directory_name)$(git-radar --zsh --fetch) '
set_prompt () {
export RPROMPT="%{$fg_bold[cyan]%}%{$reset_color%}"
}
precmd() {
title "zsh" "%m" "${PWD##*/}"
set_prompt
}
preexec() {
title "zsh" "%m" "${PWD##*/} - $2"
}
| autoload colors && colors
directory_name() {
echo "%{$fg_bold[cyan]%}%1/%\/%{$reset_color%}"
}
ret_status() {
echo "%(?:%{$fg_bold[green]%}➜ :%{$fg_bold[red]%}➜ %s)"
}
git_radar() {
USE_DEV_RADAR="${USE_DEV_RADAR:-"false"}"
if [[ $USE_DEV_RADAR == "true" ]]; then
~/Projects/personal/git-radar/git-radar --zsh --fetch
else
git-radar --zsh --fetch
fi
}
export PROMPT=$'$(ret_status)$(directory_name)$(git_radar) '
set_prompt () {
export RPROMPT="%{$fg_bold[cyan]%}%{$reset_color%}"
}
precmd() {
title "zsh" "%m" "${PWD##*/}"
set_prompt
}
preexec() {
title "zsh" "%m" "${PWD##*/} - $2"
}
|
Add comment for wait_for_migrations hack | #!/bin/bash
set -e
function wait_for_broker {(
set +e
for try in {1..60} ; do
python -c "from kombu import Connection; x=Connection('$CELERY_BROKER_URL', timeout=1); x.connect()" &> /dev/null && break
echo "Waiting for celery broker to respond..."
sleep 1
done
)}
function wait_for_database {(
set +e
for try in {1..60} ; do
python -c "from django.db import connection; connection.connect()" &> /dev/null && break
echo "Waiting for database to respond..."
sleep 1
done
)}
function wait_for_migrations {(
set +e
for try in {1..60} ; do
python manage.py migrate --list | grep "\[ \]" &> /dev/null || break
echo "Waiting for database migrations to be run..."
sleep 1
done
)}
wait_for_broker
wait_for_database
if [ -z "$SKIP_INIT" ]; then
/code/bin/build-app
fi
if [ -n "$WAIT_FOR_MIGRATIONS" ]; then
wait_for_migrations
fi
exec "$@"
| #!/bin/bash
set -e
function wait_for_broker {(
set +e
for try in {1..60} ; do
python -c "from kombu import Connection; x=Connection('$CELERY_BROKER_URL', timeout=1); x.connect()" &> /dev/null && break
echo "Waiting for celery broker to respond..."
sleep 1
done
)}
function wait_for_database {(
set +e
for try in {1..60} ; do
python -c "from django.db import connection; connection.connect()" &> /dev/null && break
echo "Waiting for database to respond..."
sleep 1
done
)}
function wait_for_migrations {(
set +e
for try in {1..60} ; do
# Kind of ugly but not sure if there's another way to determine if migrations haven't run
# migrate --list returns a checkbox list of migrations, empty checkboxes mean they haven't been run
python manage.py migrate --list | grep "\[ \]" &> /dev/null || break
echo "Waiting for database migrations to be run..."
sleep 1
done
)}
wait_for_broker
wait_for_database
if [ -z "$SKIP_INIT" ]; then
/code/bin/build-app
fi
if [ -n "$WAIT_FOR_MIGRATIONS" ]; then
wait_for_migrations
fi
exec "$@"
|
Add functionality to write test properties | include base.vars.BaseVars
include file.io.util.FileIOUtil
include props.validator.PropsValidator
PropsWriter(){
_setProps(){
if [[ $(PropsValidator propertyExists ${1} ${2}) ]]; then
FileIOUtil replace ${1} ${2}=.* ${2}=${3}
else
FileIOUtil append ${1} ${2}=${3}
fi
}
setAppServerProps(){
_setProps ${appServerProps} ${2} ${3}
}
setBuildProps(){
_setProps ${buildProps} ${2} ${3}
}
setPortalProps(){
_setProps ${portalProps} ${2} ${3}
}
if [[ ! ${bundleDir} ]]; then
local bundleDir=$(BaseVars returnBundleDir ${2})
else
local bundleDir=${bundleDir}
fi
local _buildDir=$(BaseVars returnBuildDir ${2})
local appServerProps=${_buildDir}/app.server.${HOSTNAME}.properties
local buildProps=${_buildDir}/build.${HOSTNAME}.properties
local portalProps=${bundleDir}/portal-ext.properties
$@
} | include base.vars.BaseVars
include file.io.util.FileIOUtil
include props.validator.PropsValidator
PropsWriter(){
_setProps(){
if [[ $(PropsValidator propertyExists ${1} ${2}) ]]; then
FileIOUtil replace ${1} ${2}=.* ${2}=${3}
else
FileIOUtil append ${1} ${2}=${3}
fi
}
setAppServerProps(){
_setProps ${appServerProps} ${2} ${3}
}
setBuildProps(){
_setProps ${buildProps} ${2} ${3}
}
setPortalProps(){
_setProps ${portalProps} ${2} ${3}
}
setTestProps(){
_setProps ${testProps} ${2} ${3}
}
if [[ ! ${bundleDir} ]]; then
local bundleDir=$(BaseVars returnBundleDir ${2})
else
local bundleDir=${bundleDir}
fi
local _buildDir=$(BaseVars returnBuildDir ${2})
local appServerProps=${_buildDir}/app.server.${HOSTNAME}.properties
local buildProps=${_buildDir}/build.${HOSTNAME}.properties
local portalProps=${bundleDir}/portal-ext.properties
local testProps=${_buildDir}/test.${HOSTNAME}.properties
$@
} |
Fix forgetting to rename environment variable name | #!/bin/sh
docker run -i -t --rm \
-e http_proxy="$http_proxy" \
-e https_proxy="$https_proxy" \
-e DB_ENV_MYSQL_USER=bbs \
-e DB_ENV_MYSQL_PASSWORD=bbs \
-e DB_ENV_MYSQL_DATABASE=bbs \
-e DB_PORT_3306_TCP_ADDR=bbs_db_1 \
-e DB_PORT_3306_TCP_PORT=3306 \
-v "$(pwd)":/usr/src/app \
-w /usr/src/app \
-p 4567:4567 \
--net bbs_default \
--link bbs_db_1:db \
jruby:9-alpine bash
| #!/bin/sh
docker run -i -t --rm \
-e http_proxy="${http_proxy:-}" \
-e https_proxy="${https_proxy:-}" \
-e DB_USER=bbs \
-e DB_PASSWORD=bbs \
-e DB_HOST=db \
-e DB_PORT=3306 \
-e DB_DATABASE=bbs \
-v "$(pwd)":/usr/src/app \
-w /usr/src/app \
-p 4567:4567 \
--net bbs_default \
--link bbs_db_1:db \
jruby:9-alpine bash
|
Reduce pool size in order to save memory. | #!/bin/bash
TOKEN=`head -c 30 /dev/urandom | xxd -p`
docker rm -f `docker ps -aq`
docker pull dietmarw/notebook
docker run --net=host -d -e CONFIGPROXY_AUTH_TOKEN=$TOKEN \
--name=proxy jupyter/configurable-http-proxy \
--default-target http://127.0.0.1:9999
docker run --net=host -d -e CONFIGPROXY_AUTH_TOKEN=$TOKEN \
--name=tmpnb \
-v /var/run/docker.sock:/docker.sock jupyter/tmpnb python orchestrate.py \
--image=dietmarw/notebook \
--command="setup.sh && \
ipython notebook --NotebookApp.base_url={base_path} --ip=0.0.0.0 --port {port}" \
--redirect_uri="terminals/1"
# This adds a reroute to port 80 (needs root privileges)
# the IP is for now set for the current droplet
iptables -t nat -I PREROUTING -p tcp -d 188.226.207.162 --dport 80 -j REDIRECT --to-ports 8000
iptables -t nat -I OUTPUT -p tcp -o lo --dport 80 -j REDIRECT --to-ports 8000
| #!/bin/bash
TOKEN=`head -c 30 /dev/urandom | xxd -p`
docker rm -f `docker ps -aq`
docker pull dietmarw/notebook
docker run --net=host -d -e CONFIGPROXY_AUTH_TOKEN=$TOKEN \
--name=proxy jupyter/configurable-http-proxy \
--default-target http://127.0.0.1:9999
docker run --net=host -d -e CONFIGPROXY_AUTH_TOKEN=$TOKEN \
--name=tmpnb \
-v /var/run/docker.sock:/docker.sock jupyter/tmpnb python orchestrate.py \
--image=dietmarw/notebook \
--command="setup.sh && \
ipython notebook --NotebookApp.base_url={base_path} --ip=0.0.0.0 --port {port}" \
--redirect_uri="terminals/1" \
--pool_size="2"
# This adds a reroute to port 80 (needs root privileges)
# the IP is for now set for the current droplet
iptables -t nat -I PREROUTING -p tcp -d 188.226.207.162 --dport 80 -j REDIRECT --to-ports 8000
iptables -t nat -I OUTPUT -p tcp -o lo --dport 80 -j REDIRECT --to-ports 8000
|
Upgrade the bundler version installed by the shell script. Hopefully, this will resolve the installer issues on the build machine. | #!/bin/bash
# Copyright (C) 2007, 2008, 2009, 2010 The Collaborative Software Foundation
#
# This file is part of TriSano.
#
# TriSano is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# TriSano is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TriSano. If not, see http://www.gnu.org/licenses/agpl-3.0.txt.
# install the .gem files that are required to run the system
. setenv.sh
GEM_DIR=../lib/gems/
WEBAPP_DIR=../webapp
echo "installing gems"
jruby -S gem update --system
jruby -S gem install --no-ri --no-rdoc $GEM_DIR/bundler-1.0.0.gem
cd $WEBAPP_DIR
bundle install --local --binstubs
| #!/bin/bash
# Copyright (C) 2007, 2008, 2009, 2010 The Collaborative Software Foundation
#
# This file is part of TriSano.
#
# TriSano is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# TriSano is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TriSano. If not, see http://www.gnu.org/licenses/agpl-3.0.txt.
# install the .gem files that are required to run the system
. setenv.sh
GEM_DIR=../lib/gems/
WEBAPP_DIR=../webapp
echo "installing gems"
jruby -S gem update --system
jruby -S gem install --no-ri --no-rdoc $GEM_DIR/bundler-1.0.7.gem
cd $WEBAPP_DIR
bundle install --local --binstubs
|
Add portage-utils to bash image, seems to be a new dependency for eselect related packages. | #
# build config
#
PACKAGES="net-misc/curl app-admin/eselect app-shells/bash"
#
# this method runs in the bb builder container just before starting the build of the rootfs
#
configure_rootfs_build()
{
:
}
#
# this method runs in the bb builder container just before tar'ing the rootfs
#
finish_rootfs_build()
{
:
}
| #
# build config
#
PACKAGES="net-misc/curl app-admin/eselect app-portage/portage-utils app-shells/bash"
#
# this method runs in the bb builder container just before starting the build of the rootfs
#
configure_rootfs_build()
{
:
}
#
# this method runs in the bb builder container just before tar'ing the rootfs
#
finish_rootfs_build()
{
:
}
|
Update paths to GDAL 1.7.1 and Python 2.6 | #!/bin/bash
export VERSION=2.0
export CURDIR=$(cd "$(dirname "$0")"; pwd)
export GDAL_DATA=$CURDIR/../gdal_data
export GEOTIFF_CSV=$GDAL_DATA
export PYTHONPATH=$CURDIR/lib:$PYTHONPATH
| #!/bin/bash
export CURDIR=$(cd "$(dirname "$0")"; pwd)
#export GDAL_DATA=$CURDIR/../gdal_data
export GDAL_DATA=/usr/share/gdal16
export GEOTIFF_CSV=$GDAL_DATA
export PYTHONPATH=$CURDIR/lib:$PYTHONPATH
|
Remove .git folders after installing vim plugins | #!/bin/bash
set -e
source ./config.sh
print_info_text "vim, gnome-vim (macvim)"
rm -rf $USER_HOME/.vim
rm -rf $USER_HOME/.vimrc
rm -rf $USER_HOME/.gvimrc
cp -r ../files/vim $USER_HOME
mv $USER_HOME/vim $USER_HOME/.vim
ln -s $USER_HOME/.vim/vimrc $USER_HOME/.vimrc
ln -s $USER_HOME/.vim/gvimrc $USER_HOME/.gvimrc
mkdir -p $USER_HOME/.vim/.vimswap
mkdir -p $USER_HOME/.vim/bundle
git clone --depth=1 --branch=master https://github.com/scrooloose/nerdtree $USER_HOME/.vim/bundle/nerdtree
git clone --depth=1 --branch=master https://github.com/Xuyuanp/nerdtree-git-plugin.git $USER_HOME/.vim/bundle/nerdtree-git-plugin
git clone --depth=1 --branch=master https://github.com/pearofducks/ansible-vim $USER_HOME/.vim/bundle/ansible-vim
git clone --depth=1 --branch=master https://tpope.io/vim/surround.git $USER_HOME/.vim/bundle/surround
find . -type d -name '.git' -maxdepth 2 | xargs rm -rf {}
cho"updated vim"
| #!/bin/bash
set -e
source ./config.sh
print_info_text "vim, gnome-vim (macvim)"
rm -rf $USER_HOME/.vim
rm -rf $USER_HOME/.vimrc
rm -rf $USER_HOME/.gvimrc
cp -r ../files/vim $USER_HOME
mv $USER_HOME/vim $USER_HOME/.vim
ln -s $USER_HOME/.vim/vimrc $USER_HOME/.vimrc
ln -s $USER_HOME/.vim/gvimrc $USER_HOME/.gvimrc
mkdir -p $USER_HOME/.vim/.vimswap
mkdir -p $USER_HOME/.vim/bundle
git clone --depth=1 --branch=master https://github.com/scrooloose/nerdtree $USER_HOME/.vim/bundle/nerdtree
git clone --depth=1 --branch=master https://github.com/Xuyuanp/nerdtree-git-plugin.git $USER_HOME/.vim/bundle/nerdtree-git-plugin
git clone --depth=1 --branch=master https://github.com/pearofducks/ansible-vim $USER_HOME/.vim/bundle/ansible-vim
git clone --depth=1 --branch=master https://tpope.io/vim/surround.git $USER_HOME/.vim/bundle/surround
find $USER_HOME/.vim/bundle/ -maxdepth 2 -type d -name '.git' | xargs rm -rf {}
echo "updated vim"
|
Add trailing newline character after github-changes | #!/usr/bin/env bash
# Exit on first error
set -e
# Parse our parameters
version="$1"
# If a version wasn't found, complain and leave
if test -z "$version"; then
echo "No \`version\` was provided to \`release.sh\`. Please provide one." 1>&2
echo "Usage: ./release.sh <version>" 1>&2
echo " version: Can be semver, major, minor, or patch" 1>&2
exit 1
fi
# Verify `github-changes` is installed
if ! test -f "./node_modules/.bin/github-changes"; then
echo "Couldn't find \`github-changes\` executable. Please verify \`npm install\` has been run." 1>&2
exit 1
fi
# Update our package and add a git commit via `npm`
npm version "$version"
# Grab our new semver
semver="$(node --eval "console.log('v' + require('./package.json').version);")"
# Generate a new CHANGELOG
./node_modules/.bin/github-changes --title "slack-for-linux changelog" \
--owner slack-for-linux --repository slack-for-linux \
--only-pulls --use-commit-body -n "$semver"
# Append our CHANGELOG edit to the git commit
git add CHANGELOG.md
git commit --amend --no-edit
# Publish our changes
git push origin master
git push origin --tags
npm publish
| #!/usr/bin/env bash
# Exit on first error
set -e
# Parse our parameters
version="$1"
# If a version wasn't found, complain and leave
if test -z "$version"; then
echo "No \`version\` was provided to \`release.sh\`. Please provide one." 1>&2
echo "Usage: ./release.sh <version>" 1>&2
echo " version: Can be semver, major, minor, or patch" 1>&2
exit 1
fi
# Verify `github-changes` is installed
if ! test -f "./node_modules/.bin/github-changes"; then
echo "Couldn't find \`github-changes\` executable. Please verify \`npm install\` has been run." 1>&2
exit 1
fi
# Update our package and add a git commit via `npm`
npm version "$version"
# Grab our new semver
semver="$(node --eval "console.log('v' + require('./package.json').version);")"
# Generate a new CHANGELOG
./node_modules/.bin/github-changes --title "slack-for-linux changelog" \
--owner slack-for-linux --repository slack-for-linux \
--only-pulls --use-commit-body -n "$semver"
# DEV: Add trailing newline for linter
echo >> CHANGELOG.md
# Append our CHANGELOG edit to the git commit
git add CHANGELOG.md
git commit --amend --no-edit
# Publish our changes
git push origin master
git push origin --tags
npm publish
|
Add modifications for easier rerunning of past work | #!/usr/bin/env bash
#
# This script is used to launch a set of experiments or processes
#
# vars
VENV=tmp-venv
PY=${VENV}/bin/python
# manual work-around for ubuntu
PY=python
# broad survey of classifiers
SEQUENCE=`seq 4 21`
# broad survey of classifiers
#SEQUENCE="4 6"
# experiments or submissions?
SCRIPT=run-experiment.py
#SCRIPT=full-train-and-predict.py
# for experiments
NICE="nice 15"
# for submissions
NICE="nice"
# survey (plotting)
#ARGS='--verbose --ubuntu'
# submission
ARGS='--verbose'
# kNN model: expt_4
# SVM mode: expt_6
SLEEPTIME=60
echo "$(date +%Y-%m-%d\ %H:%M:%S) -- started running $0"
filedate="$(date +%Y-%m-%dT%H:%M:%S)"
for i in ${SEQUENCE}; do
echo "$(date +%Y-%m-%d\ %H:%M:%S) -- launching experiment ${i}"
nohup ${NICE} ${PY} ${SCRIPT} expt_${i} ${ARGS} > log/${filedate}_expt_${i}.log &
echo "$(date +%Y-%m-%d\ %H:%M:%S) -- sleeping for ${SLEEPTIME} seconds"
sleep ${SLEEPTIME}
done
echo "$(date +%Y-%m-%d\ %H:%M:%S) -- finished launching experiments"
| #!/usr/bin/env bash
#
# This script is used to launch a set of experiments or processes
#
# vars
VENV=tmp-venv
PY=${VENV}/bin/python
# manual work-around for ubuntu
PY=python
# broad survey of classifiers
#SEQUENCE=`seq 4 21`
# broad survey of classifiers
SEQUENCE="7"
# experiments or submissions?
#SCRIPT=run-experiment.py
SCRIPT=full-train-and-predict.py
# for experiments
#NICE="nice 15"
# for submissions
NICE="nice"
# experiments (+plotting)
#ARGS='--verbose --ubuntu'
# submission
ARGS='--verbose'
SLEEPTIME=60
echo "$(date +%Y-%m-%d\ %H:%M:%S) -- started running $0"
filedate="$(date +%Y-%m-%dT%H:%M:%S)"
for i in ${SEQUENCE}; do
echo "$(date +%Y-%m-%d\ %H:%M:%S) -- launching experiment ${i}"
nohup ${NICE} ${PY} ${SCRIPT} expt_${i} ${ARGS} > log/${filedate}_expt_${i}.log &
echo "$(date +%Y-%m-%d\ %H:%M:%S) -- sleeping for ${SLEEPTIME} seconds"
sleep ${SLEEPTIME}
done
echo "$(date +%Y-%m-%d\ %H:%M:%S) -- finished launching experiments"
|
Fix BUILD="static" documentation in linux build script. | #!/usr/bin/env bash
cd `dirname $0`;
cd ../src/main;
rm -r resources/linux-x86-64
mkdir -p resources/linux-x86-64
# *** Build libsass
make -C libsass clean
# We use:
# - BUILD="shared" to make sure that we build a shared system library
# - CXX=g++-4.6 to make sure that the library is linked against a version of
# libstdc++.so.6 that is old enough to be widely compatible
# - CC=gcc-4.4 same as for CXX but for libc.so.6
BUILD="static" make -C libsass -j5 CXX=g++-4.6 CC=gcc-4.4
#cp libsass/lib/libsass.so resources/linux-x86-64/libsass.so
# *** Build libjsass
cmake c
make -C c -j5 CXX=g++-4.6 CC=gcc-4.4
cp c/libjsass.so resources/linux-x86-64/libjsass.so
| #!/usr/bin/env bash
cd `dirname $0`;
cd ../src/main;
rm -r resources/linux-x86-64
mkdir -p resources/linux-x86-64
# *** Build libsass
make -C libsass clean
# We use:
# - BUILD="static" to make sure that we build a static library
# - CXX=g++-4.6 to make sure that the library is linked against a version of
# libstdc++.so.6 that is old enough to be widely compatible
# - CC=gcc-4.4 same as for CXX but for libc.so.6
BUILD="static" make -C libsass -j5 CXX=g++-4.6 CC=gcc-4.4
#cp libsass/lib/libsass.so resources/linux-x86-64/libsass.so
# *** Build libjsass
cmake c
make -C c -j5 CXX=g++-4.6 CC=gcc-4.4
cp c/libjsass.so resources/linux-x86-64/libjsass.so
|
Change sh to bash in deadbeef script | #! /usr/bin/env sh
# Encoded artist name.
ARTIST="$1"
# Encoded song title.
TITLE="$2"
#Encoded album name.
ALBUM="$3"
unquote () {
decoded_url=$(python3 -c 'import sys, urllib.parse; print(urllib.parse.unquote(sys.argv[1]))' "$1")
echo $decoded_url
}
UNQ_ARTIST=$(unquote "$ARTIST")
UNQ_TITLE=$(unquote "$TITLE")
prismriver-lyrica.py -t "$UNQ_TITLE" -a "$UNQ_ARTIST" --output $'%LYRICS%\n\nSource: %PLUGIN_NAME%'
| #! /usr/bin/env bash
# Encoded artist name.
ARTIST="$1"
# Encoded song title.
TITLE="$2"
#Encoded album name.
ALBUM="$3"
unquote () {
decoded_url=$(python3 -c 'import sys, urllib.parse; print(urllib.parse.unquote(sys.argv[1]))' "$1")
echo $decoded_url
}
UNQ_ARTIST=$(unquote "$ARTIST")
UNQ_TITLE=$(unquote "$TITLE")
prismriver-lyrica.py -t "$UNQ_TITLE" -a "$UNQ_ARTIST" --output $'%LYRICS%\n\nSource: %PLUGIN_NAME%'
|
Update folly dependency for fbthrift | #!/usr/bin/env bash
sudo apt-get install -yq libdouble-conversion-dev libssl-dev make zip git autoconf libtool g++ libboost-all-dev libevent-dev flex bison libgoogle-glog-dev scons libkrb5-dev libsnappy-dev libsasl2-dev libnuma-dev
# Change directory to location of this script
cd "$(dirname ${0})"
git clone https://github.com/facebook/folly
cd folly/folly
git fetch
git checkout v0.32.0
autoreconf --install
./configure
make -j8
cd ../..
autoreconf --install
CPPFLAGS=" -I`pwd`/folly/" LDFLAGS="-L`pwd`/folly/folly/.libs/" ./configure
make -j8
| #!/usr/bin/env bash
sudo apt-get install -yq libdouble-conversion-dev libssl-dev make zip git autoconf libtool g++ libboost-all-dev libevent-dev flex bison libgoogle-glog-dev scons libkrb5-dev libsnappy-dev libsasl2-dev libnuma-dev
# Change directory to location of this script
cd "$(dirname ${0})"
git clone https://github.com/facebook/folly
cd folly/folly
git fetch
git checkout v0.37.0
autoreconf --install
./configure
make -j8
cd ../..
autoreconf --install
CPPFLAGS=" -I`pwd`/folly/" LDFLAGS="-L`pwd`/folly/folly/.libs/" ./configure
make -j8
|
Install under $HOME/opt instead of $HOME/local | #!/bin/bash
set -euxo pipefail
VERSION=2.4.0
CACHE_DIR="/vagrant/libressl"
mkdir -p "$CACHE_DIR"
cd "$CACHE_DIR"
if ! gpg --list-keys | grep -q D5E4D8D5; then
curl http://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl.asc | gpg --import
fi
if [ ! -f "libressl-${VERSION}.tar.gz" ]; then
wget -N "http://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-${VERSION}.tar.gz"{,.asc}
fi
gpg "libressl-${VERSION}.tar.gz.asc"
cd "$HOME"
if [ ! -d "libressl-${VERSION}" ]; then
tar zxf "$CACHE_DIR/libressl-${VERSION}.tar.gz"
fi
cd "libressl-${VERSION}"
./configure --prefix="$HOME/local"
make
make check
make install
| #!/bin/bash
set -euxo pipefail
VERSION=2.4.2
CACHE_DIR="/vagrant/libressl"
BUILD_DIR="$HOME/build"
mkdir -p "$CACHE_DIR"
cd "$CACHE_DIR"
if ! gpg --list-keys | grep -q D5E4D8D5; then
if [[ ! -f "libressl.asc" ]]; then
wget -N http://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl.asc
fi
gpg --import "libressl.asc"
fi
if [[ ! -f "libressl-${VERSION}.tar.gz" ]]; then
wget -N "http://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-${VERSION}.tar.gz"{,.asc}
fi
gpg "libressl-${VERSION}.tar.gz.asc"
mkdir -p "$BUILD_DIR"
cd "$BUILD_DIR"
if [ ! -d "libressl-${VERSION}" ]; then
tar zxf "$CACHE_DIR/libressl-${VERSION}.tar.gz"
fi
cd "libressl-${VERSION}"
./configure --prefix="$HOME/opt/libressl-${VERSION}"
make
make check
make install
|
Fix GitHub tarball update wget bug. | #!/bin/sh
# Update scripts from GitHub tarball
# Exit on any command failures
set -e
SCRIPTSDIRREL=$(dirname $0)
cd $SCRIPTSDIRREL
if [ -e .git ]
then
echo "This scripts directory was checked out from Git so I won't update it."
echo "If you want it updated from a tarball anyway, please delete .git first."
exit 1
fi
wget http://github.com/mikemcquaid/Scripts/tarball/master -O scripts.tar.gz
tar --strip-components=1 -z -x -v -f scripts.tar.gz
rm scripts.tar.gz
| #!/bin/sh
# Update scripts from GitHub tarball
# Exit on any command failures
set -e
SCRIPTSDIRREL=$(dirname $0)
cd $SCRIPTSDIRREL
if [ -e .git ]
then
echo "This scripts directory was checked out from Git so I won't update it."
echo "If you want it updated from a tarball anyway, please delete .git first."
exit 1
fi
wget --no-check-certificate https://github.com/mikemcquaid/Scripts/tarball/master -O scripts.tar.gz
tar --strip-components=1 -z -x -v -f scripts.tar.gz
rm scripts.tar.gz
|
Improve build support for Fedora | #!/usr/bin/env bash
# shellcheck disable=SC2154
# """
# https://subversion.apache.org/download.cgi
# https://subversion.apache.org/source-code.html
# https://svn.apache.org/repos/asf/subversion/trunk/INSTALL
#
# Requires Apache Portable Runtime (APR) library and Apache Portable Runtime
# Utility (APRUTIL) library.
# """
_koopa_assert_is_installed apr-config apu-config
file="${name}-${version}.tar.bz2"
url="https://mirrors.ocf.berkeley.edu/apache/${name}/${file}"
_koopa_download "$url"
_koopa_extract "$file"
cd "${name}-${version}" || exit 1
./configure \
--prefix="$prefix" \
--with-lz4="internal" \
--with-utf8proc="internal"
make --jobs="$jobs"
make install
| #!/usr/bin/env bash
# shellcheck disable=SC2154
# """
# https://subversion.apache.org/download.cgi
# https://subversion.apache.org/source-code.html
# https://svn.apache.org/repos/asf/subversion/trunk/INSTALL
#
# Requires Apache Portable Runtime (APR) library and Apache Portable Runtime
# Utility (APRUTIL) library.
# """
if _koopa_is_fedora
then
apr_config="/usr/bin/apr-1-config"
apu_config="/usr/bin/apu-1-config"
else
apr_config="apr-config"
apu_config="apu-config"
fi
_koopa_assert_is_installed "$apr_config" "$apu_config"
file="${name}-${version}.tar.bz2"
url="https://mirrors.ocf.berkeley.edu/apache/${name}/${file}"
_koopa_download "$url"
_koopa_extract "$file"
cd "${name}-${version}" || exit 1
./configure \
--prefix="$prefix" \
--with-apr-config="$apr_config" \
--with-apu-config="$apu_config" \
--with-lz4="internal" \
--with-utf8proc="internal"
make --jobs="$jobs"
make install
|
Split out glew from build steps in OS X. | clang++ -std=c++11 -O2 -o Nullocity.bin -I/usr/local/include -I/usr/local/include/SDL2 -I../SDL2TK/include ../source/*.cpp ../SDL2TK/source/*.cpp ../SDL2TK/source/glew.c -L/usr/local/lib -lSDL2 -lSDL2_image -llua -framework OpenGL -framework OpenAL
| clang -O2 -c ../SDL2TK/source/glew.c
clang++ -std=c++11 -O2 -o Nullocity.bin -I/usr/local/include -I/usr/local/include/SDL2 -I../SDL2TK/include ../source/*.cpp ../SDL2TK/source/*.cpp ./glew.o -L/usr/local/lib -lSDL2 -lSDL2_image -llua -framework OpenGL -framework OpenAL
|
Update QA cluster start time | #!/usr/bin/env bash
stack exec -- cardano-node \
--system-start 1497960253 \
--log-config scripts/log-templates/log-config-qa.yaml \
--logs-prefix "logs/qanet" \
--db-path db-qanet \
--kademlia-peer 52.57.23.114:3000 \
--kademlia-peer 52.57.168.157:3000 \
--kademlia-peer 34.251.36.219:3000 \
--kademlia-peer 34.248.254.168:3000 \
--wallet \
--wallet-db-path wdb-qanet \
--static-peers \
$@
| #!/usr/bin/env bash
stack exec -- cardano-node \
--system-start 1497984045 \
--log-config scripts/log-templates/log-config-qa.yaml \
--logs-prefix "logs/qanet" \
--db-path db-qanet \
--kademlia-peer 52.57.23.114:3000 \
--kademlia-peer 52.57.168.157:3000 \
--kademlia-peer 34.251.36.219:3000 \
--kademlia-peer 34.248.254.168:3000 \
--wallet \
--wallet-db-path wdb-qanet \
--static-peers \
$@
|
Exclude some of the guitarix plugins without GUI | PLUGIN=guitarix
GIT_URI=https://github.com/BlokasLabs/${PLUGIN}.git
TMP_DIR=${ROOTFS_DIR}/tmp/${PLUGIN}
export CC=arm-linux-gnueabihf-gcc
export CXX=arm-linux-gnueabihf-g++
export LD=arm-linux-gnueabihf-ld
export STRIP=arm-linux-gnueabihf-strip
rm -rf ${TMP_DIR}
git clone --depth 1 ${GIT_URI} ${TMP_DIR}
pushd ${TMP_DIR}/trunk
./waf configure --lv2dir=${LV2_ABS_DIR} --lv2-only --no-lv2-gui --disable-sse --no-faust --cxxflags="--sysroot=${ROOTFS_DIR}" --ldflags="--sysroot=${ROOTFS_DIR}"
./waf build -j4 # -vvv
./waf -j1 install
popd
rm -r ${TMP_DIR}
| PLUGIN=guitarix
GIT_URI=https://github.com/BlokasLabs/${PLUGIN}.git
TMP_DIR=${ROOTFS_DIR}/tmp/${PLUGIN}
export CC=arm-linux-gnueabihf-gcc
export CXX=arm-linux-gnueabihf-g++
export LD=arm-linux-gnueabihf-ld
export STRIP=arm-linux-gnueabihf-strip
rm -rf ${TMP_DIR}
git clone --depth 1 ${GIT_URI} ${TMP_DIR}
pushd ${TMP_DIR}/trunk
./waf configure --lv2dir=${LV2_ABS_DIR} --lv2-only --no-lv2-gui --disable-sse --no-faust --cxxflags="--sysroot=${ROOTFS_DIR}" --ldflags="--sysroot=${ROOTFS_DIR}"
./waf build -j4 # -vvv
./waf -j1 install
popd
PLUGINS_WITHOUT_GUI="gx_aclipper.lv2 gx_alembic.lv2 gx_bmp.lv2 gx_bossds1.lv2 gx_mxrdist.lv2 gx_slowgear.lv2 gxtape.lv2 gxtape_st.lv2 gx_vibe.lv2 gx_w20.lv2 gxtuner.lv2"
for i in ${PLUGINS_WITHOUT_GUI}; do
rm -r ${LV2_ABS_DIR}/$i
done
rm -r ${TMP_DIR}
|
Add error checking for presence and creation of hooks directory | #!/bin/bash
export REDCAP_ROOT=/var/https/redcap
export REDCAP_HOOKS=$REDCAP_ROOT/hooks
export INPUT=$1
if [ ! -e $REDCAP_ROOT ]; then
echo "Error: REDCAP_ROOT, $REDCAP_ROOT, does not exist. Exiting."
exit
fi
# Pull repo and copy scripts into library folder
MYTEMP=`mktemp -d`
cd $MYTEMP
git clone https://github.com/ctsit/redcap-extras.git
if [ ! -e redcap-extras ]; then
echo "Error: redcap-extras repo could not be cloned. Exiting."
exit
fi
cd redcap-extras/hooks
# checkout develop because we have not yet released the code we need
git checkout develop
cp redcap_hooks.php $REDCAP_HOOKS/
mkdir $REDCAP_HOOKS/library
cp -r examples/* $REDCAP_HOOKS/library/
rm -rf $MYTEMP
# Make required directories for hook deployment
awk -F"," 'NR!=1{printf "mkdir -p %s/%s/%s/\n",ENVIRON["REDCAP_HOOKS"],$1,$2}' $INPUT | sh
# Create sym links for hooks to be executed
awk -F"," 'NR!=1{printf "ln -s %s/%s %s/%s/%s/\n",ENVIRON["REDCAP_HOOKS"],$3,ENVIRON["REDCAP_HOOKS"],$1,$2}' $INPUT | sh
# | #!/bin/bash
export REDCAP_ROOT=/var/https/redcap
export REDCAP_HOOKS=$REDCAP_ROOT/hooks
export INPUT=$1
if [ ! -e $REDCAP_ROOT ]; then
echo "Error: REDCAP_ROOT, $REDCAP_ROOT, does not exist. Exiting."
exit
fi
if [ ! -e $REDCAP_HOOKS ]; then
mkdir $REDCAP_ROOT/hooks
echo "REDCap Hooks Directory Created."
exit
fi
# Pull repo and copy scripts into library folder
MYTEMP=`mktemp -d`
cd $MYTEMP
git clone https://github.com/ctsit/redcap-extras.git
if [ ! -e redcap-extras ]; then
echo "Error: redcap-extras repo could not be cloned. Exiting."
exit
fi
cd redcap-extras/hooks
# checkout develop because we have not yet released the code we need
git checkout develop
cp redcap_hooks.php $REDCAP_HOOKS/
mkdir $REDCAP_HOOKS/library
cp -r examples/* $REDCAP_HOOKS/library/
rm -rf $MYTEMP
# Make required directories for hook deployment
awk -F"," 'NR!=1{printf "mkdir -p %s/%s/%s/\n",ENVIRON["REDCAP_HOOKS"],$1,$2}' $INPUT | sh
# Create sym links for hooks to be executed
awk -F"," 'NR!=1{printf "ln -s %s/%s %s/%s/%s/\n",ENVIRON["REDCAP_HOOKS"],$3,ENVIRON["REDCAP_HOOKS"],$1,$2}' $INPUT | sh
|
Increase server activity check sleep to 15 | #!/bin/bash
# ------------------------------------------------------------------------
#
# Copyright 2016 WSO2, Inc. (http://wso2.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
# ------------------------------------------------------------------------
set -e
self_path=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source "${self_path}/base.sh"
IFS=$'\n'
kube_nodes=($(getKubeNodes))
host=$(getKubeNodeIP "${kube_nodes[0]}")
product=${PWD##*/}
profile=$1
port=$2
echo "Waiting ${product} to launch on http://${host}:${port}"
sleep 2
until $(curl --output /dev/null --silent --head --fail http://${host}:${port}); do
printf '.'
sleep 3
done
echo
echoSuccess "$(echo ${product} | awk '{print toupper($0)}') started successfully, profile: ${profile}"
| #!/bin/bash
# ------------------------------------------------------------------------
#
# Copyright 2016 WSO2, Inc. (http://wso2.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
# ------------------------------------------------------------------------
set -e
self_path=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source "${self_path}/base.sh"
IFS=$'\n'
kube_nodes=($(getKubeNodes))
host=$(getKubeNodeIP "${kube_nodes[0]}")
product=${PWD##*/}
profile=$1
port=$2
echo "Waiting ${product} to launch on http://${host}:${port}"
sleep 15
until $(curl --output /dev/null --silent --head --fail http://${host}:${port}); do
printf '.'
sleep 3
done
echo
echoSuccess "$(echo ${product} | awk '{print toupper($0)}') started successfully, profile: ${profile}"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.