Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Update scripts to latest version | #!/usr/bin/env sh
test -e ~/.coursier/coursier || ( \
mkdir -p ~/.coursier && \
curl -Lso ~/.coursier/coursier https://git.io/vgvpD && \
chmod +x ~/.coursier/coursier \
)
~/.coursier/coursier launch -q -P \
com.lihaoyi:ammonite_2.12.2:0.9.3 \
is.cir:ciris-core_2.12:0.3.2 \
is.cir:ciris-enumeratum_2.12:0.3.2 \
is.cir:ciris-generic_2.12:0.3.2 \
is.cir:ciris-refined_2.12:0.3.2 \
is.cir:ciris-squants_2.12:0.3.2 \
-- --predef 'import ciris._,ciris.enumeratum._,ciris.generic._,ciris.refined._,ciris.squants._' < /dev/tty
| #!/usr/bin/env sh
test -e ~/.coursier/coursier || ( \
mkdir -p ~/.coursier && \
curl -Lso ~/.coursier/coursier https://git.io/vgvpD && \
chmod +x ~/.coursier/coursier \
)
~/.coursier/coursier launch -q -P \
com.lihaoyi:ammonite_2.12.2:0.9.9 \
is.cir:ciris-core_2.12:0.4.0 \
is.cir:ciris-enumeratum_2.12:0.4.0 \
is.cir:ciris-generic_2.12:0.4.0 \
is.cir:ciris-refined_2.12:0.4.0 \
is.cir:ciris-squants_2.12:0.4.0 \
-- --predef 'import ciris._,ciris.enumeratum._,ciris.generic._,ciris.refined._,ciris.squants._' < /dev/tty
|
Delete ending marks if game ends after one ply | #!/bin/bash
# Determine most often used openings
game_file="$1"
moves=${2:-1} # number of starting moves (default 1)
paste_dashes=
for _ in $(seq 1 "$moves")
do
paste_dashes=$paste_dashes'- '
done
bindir="$(dirname "$0")"
echo "# Most popular openings:"
grep -A$((moves-1)) '^1\.' "$game_file" | # First n moves
./"$bindir"/delete_comments.sh |
grep -v -- -- | # Delete dashes between games
paste -d' ' $paste_dashes | # Combine opening moves into one line
cut -d' ' -f 1-3 | # Only include move text
tee "${1}_opening_list.txt" |
sort |
uniq -c |
sort -n |
tee >(echo $(wc -l) different openings)
| #!/bin/bash
# Determine most often used openings
game_file="$1"
moves=${2:-1} # number of starting moves (default 1)
paste_dashes=
for _ in $(seq 1 "$moves")
do
paste_dashes=$paste_dashes'- '
done
bindir="$(dirname "$0")"
echo "# Most popular openings:"
grep -A$((moves-1)) '^1\.' "$game_file" | # First n moves
./"$bindir"/delete_comments.sh |
grep -v -- -- | # Delete dashes between games
paste -d' ' $paste_dashes | # Combine opening moves into one line
cut -d' ' -f 1-3 | # Only include move text
sed -e 's/1-0//' -e 's/0-1//' -e 's/1\/2-1\/2//' | # delete endings
tee "${1}_opening_list.txt" |
sort |
uniq -c |
sort -n |
tee >(echo $(wc -l) different openings)
|
Fix rust placement for rvm | is_osx || return 1
# Postgres
export PATH="$PATH:/Applications/Postgres.app/Contents/Versions/9.3/bin"
# Cabal
export PATH="$HOME/.cabal/bin:$PATH"
export PATH="$HOME/bin:$PATH"
# NVIDIA CUDA
export PATH="/Developer/NVIDIA/CUDA-6.0/bin:$PATH"
export DYLD_LIBRARY_PATH="/Developer/NVIDIA/CUDA-6.0/lib:$DYLD_LIBRARY_PATH"
# Go
export GOPATH="$HOME/Developer/go"
export PATH="$PATH:$GOPATH/bin"
# Ruby
export PATH="$HOME/.rvm/bin:$PATH" # Add RVM to PATH for scripting
[[ -s "$HOME/.rvm/scripts/rvm" ]] && source "$HOME/.rvm/scripts/rvm" # Load RVM into a shell session *as a function*
# Rust
export PATH="$PATH:$HOME/.cargo/bin"
# Autojump support.
[[ -s "$(brew --prefix)"/etc/autojump.sh ]] && . "$(brew --prefix)"/etc/autojump.sh
# Bash completion.
if [ -f "$(brew --prefix)"/etc/bash_completion ]; then
. "$(brew --prefix)"/etc/bash_completion
fi
# Aliases
alias arduino='/Applications/Arduino.app/Contents/MacOS/JavaApplicationStub'
alias hr='heroku restart'
| is_osx || return 1
# Postgres
export PATH="$PATH:/Applications/Postgres.app/Contents/Versions/9.3/bin"
# Cabal
export PATH="$HOME/.cabal/bin:$PATH"
export PATH="$HOME/bin:$PATH"
# NVIDIA CUDA
export PATH="/Developer/NVIDIA/CUDA-6.0/bin:$PATH"
export DYLD_LIBRARY_PATH="/Developer/NVIDIA/CUDA-6.0/lib:$DYLD_LIBRARY_PATH"
# Go
export GOPATH="$HOME/Developer/go"
export PATH="$PATH:$GOPATH/bin"
# Rust
export PATH="$PATH:$HOME/.cargo/bin"
export PATH="$HOME/.multirust/bin:$PATH"
# Ruby
export PATH="$HOME/.rvm/bin:$PATH" # Add RVM to PATH for scripting
[[ -s "$HOME/.rvm/scripts/rvm" ]] && source "$HOME/.rvm/scripts/rvm" # Load RVM into a shell session *as a function*
# Autojump support.
[[ -s "$(brew --prefix)"/etc/autojump.sh ]] && . "$(brew --prefix)"/etc/autojump.sh
# Bash completion.
if [ -f "$(brew --prefix)"/etc/bash_completion ]; then
. "$(brew --prefix)"/etc/bash_completion
fi
# Aliases
alias arduino='/Applications/Arduino.app/Contents/MacOS/JavaApplicationStub'
alias hr='heroku restart'
|
Remove some chatter from the test scripts | #!/bin/sh
HCBASE=/usr/bin/
HC=$HCBASE/ghc
GHCFLAGS='--make -Wall -fno-warn-unused-matches -cpp'
ISPOSIX=-DHAVE_UNIX_PACKAGE
rm moduleTest
mkdir -p dist/debug
$HC $GHCFLAGS $ISPOSIX -DDEBUG -odir dist/debug -hidir dist/debug -idist/debug/:.:tests/HUnit-1.0/src tests/ModuleTest.hs -o moduleTest
./moduleTest
| #!/bin/sh
HCBASE=/usr/bin/
HC=$HCBASE/ghc
GHCFLAGS='--make -Wall -fno-warn-unused-matches -cpp'
ISPOSIX=-DHAVE_UNIX_PACKAGE
rm -f moduleTest
mkdir -p dist/debug
$HC $GHCFLAGS $ISPOSIX -DDEBUG -odir dist/debug -hidir dist/debug -idist/debug/:.:tests/HUnit-1.0/src tests/ModuleTest.hs -o moduleTest 2> stderr
RES=$?
if [ $RES != 0 ]
then
cat stderr >&2
exit $RES
fi
./moduleTest
|
Disable jasmin ci on travis | #!/bin/bash
set -ex
# set variables. It's presumed the absence of these is causing
# TRAVIS to fail
if [ "$TRAVIS" = "true" ]; then
echo "INFO: this is travis - not running smoke test"
bundle exec rake db:migrate
bundle exec rake && bundle exec rake jasmine:ci
exit 0
else
# Script executing all the test tasks.
bundle exec rake db:migrate
# execute smoke test - needs seeded tables
bundle exec rake db:seed
# DISABLE API SMOKE TEST
# echo "INFO: EXECUTING SMOKE TEST <<<<<<<<<<<<<<<<<<<<<<<<<<<<<"
# bundle exec rake api:smoke_test
fi
| #!/bin/bash
set -ex
# set variables. It's presumed the absence of these is causing
# TRAVIS to fail
if [ "$TRAVIS" = "true" ]; then
echo "INFO: this is travis - not running smoke test"
bundle exec rake db:migrate
bundle exec rake
exit 0
else
# Script executing all the test tasks.
bundle exec rake db:migrate
# execute smoke test - needs seeded tables
bundle exec rake db:seed
# DISABLE API SMOKE TEST
# echo "INFO: EXECUTING SMOKE TEST <<<<<<<<<<<<<<<<<<<<<<<<<<<<<"
# bundle exec rake api:smoke_test
fi
|
Support for IDL83 OSX installtion | #
# path.bash
# .dotfiles
#
# Created by Alexander Rudy on 2012-10-07.
# Copyright 2012 Alexander Rudy. All rights reserved.
#
IDL82="/Applications/exelis/idl/bin"
IDL81="/Applications/itt/idl/idl/bin"
if [ -d $IDL82 ]; then
export PATH="$PATH:$IDL82" #IDL
# export IDL_STARTUP="$HOME/.idl.pro"
export IDL_STARTUP="IDLStartup.pro"
elif [ -d $IDL81 ]; then
export PATH="$PATH:$IDL81" #IDL
# export IDL_STARTUP="$HOME/.idl.pro"
export IDL_STARTUP="IDLStartup.pro"
fi
function UCOIDL () {
export LM_LICENSE_FILE=1700@localhost
ssh -f -N -L1700:license:1700 -L35673:license:35673 ssh.ucolick.org
}
| #
# path.bash
# .dotfiles
#
# Created by Alexander Rudy on 2012-10-07.
# Copyright 2012 Alexander Rudy. All rights reserved.
#
IDL83="/usr/local/exelis/idl/bin"
IDL82="/Applications/exelis/idl/bin"
IDL81="/Applications/itt/idl/idl/bin"
if [ -d $IDL83 ]; then
export PATH="$PATH:$IDL83"
export LM_LICENSE_FILE="/usr/local/exelis/license/license.dat"
export IDL_STARTUP="IDLStartup.pro"
elif [ -d $IDL82 ]; then
export PATH="$PATH:$IDL82" #IDL
# export IDL_STARTUP="$HOME/.idl.pro"
export IDL_STARTUP="IDLStartup.pro"
elif [ -d $IDL81 ]; then
export PATH="$PATH:$IDL81" #IDL
# export IDL_STARTUP="$HOME/.idl.pro"
export IDL_STARTUP="IDLStartup.pro"
fi
function UCOIDL () {
export LM_LICENSE_FILE=1700@localhost
ssh -f -N -L1700:license:1700 -L35673:license:35673 ssh.ucolick.org
}
|
Move sp out of set +- x | #! /bin/bash
test -f ~/.local_login.sh && source ~/.local_login.sh
LOCAL=/usr/local
[[ -d $LOCAL/gnu ]] && LOCALS="$LOCAL/gnu:$LOCAL/bin" || LOCALS="$LOCAL/bin"
BINS=/usr/bin:/bin:/usr/sbin:/sbin
HOMES=$HOME/bin:$HOME/.local
export PATH=$LOCALS:$BINS:$HOMES
[[ -d $LOCAL/go/bin ]] && PATH=$PATH:$LOCAL/go/bin
SRC=~/src
HG=$SRC/hg
GIT=$SRC/git
HUB=$GIT/hub
export HUB
source $HUB/jab/__init__.sh
echo JABm $JAB
export PS1="\$? [\u@\h:\$PWD]\n$ "
vbb () {
vim -p ~/.bashrc $HUB/jab/__init__.sh "$@" +/bash
}
export PS1="\$? [\u@\h:\$PWD]\n$ "
# set -x
jj
sp
# set +x
| #! /bin/bash
test -f ~/.local_login.sh && source ~/.local_login.sh
LOCAL=/usr/local
[[ -d $LOCAL/gnu ]] && LOCALS="$LOCAL/gnu:$LOCAL/bin" || LOCALS="$LOCAL/bin"
BINS=/usr/bin:/bin:/usr/sbin:/sbin
HOMES=$HOME/bin:$HOME/.local
export PATH=$LOCALS:$BINS:$HOMES
[[ -d $LOCAL/go/bin ]] && PATH=$PATH:$LOCAL/go/bin
SRC=~/src
HG=$SRC/hg
GIT=$SRC/git
HUB=$GIT/hub
export HUB
source $HUB/jab/__init__.sh
echo JAB is $JAB
export PS1="\$? [\u@\h:\$PWD]\n$ "
vbb () {
vim -p ~/.bashrc $HUB/jab/__init__.sh "$@" +/bash
}
export PS1="\$? [\u@\h:\$PWD]\n$ "
# set -x
jj
# set +x
sp
|
Fix author name in legal header | #!/usr/bin/env bash
#
# Zinc, the bare metal stack for rust.
# Copyright 2014 Vladimir "farcaller" Pouzanov <farcaller@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
for e in $EXAMPLES; do
EXAMPLE_NAME=$e make build
done
| #!/usr/bin/env bash
#
# Zinc, the bare metal stack for rust.
# Copyright 2014 Matt Coffin <mcoffin13@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
for e in $EXAMPLES; do
EXAMPLE_NAME=$e make build
done
|
Remove the ORG and SPACE env variables for `kubectl` | #!/bin/bash
set -e
# login to IBM Bluemix via credentials provided via (encrypted) environment
# variables
bluemix login \
--apikey "${BLUEMIX_API_KEY}" \
-a "${BLUEMIX_API_ENDPOINT}" \
-o "${BLUEMIX_ORGANIZATION}" \
-s "${BLUEMIX_SPACE}"
bluemix cs init \
--host "${BLUEMIX_CONTAINER_SERVICE_HOST}"
# Get the required configuration for `kubectl` from Bluemix and load it
bluemix cs cluster-config \
--export "${BLUEMIX_CONTAINER_SERVICE_CLUSTER_NAME}" \
> .kubectl_config
source .kubectl_config && rm -rf .kubectl_config
# run the commands required to deploy the application via `kubectl`
kubectl version
kubectl cluster-info
| #!/bin/bash
set -e
# login to IBM Bluemix via credentials provided via (encrypted) environment
# variables
bluemix login \
--apikey "${BLUEMIX_API_KEY}" \
-a "${BLUEMIX_API_ENDPOINT}"
bluemix cs init \
--host "${BLUEMIX_CONTAINER_SERVICE_HOST}"
# Get the required configuration for `kubectl` from Bluemix and load it
bluemix cs cluster-config \
--export "${BLUEMIX_CONTAINER_SERVICE_CLUSTER_NAME}" \
> .kubectl_config
source .kubectl_config && rm -rf .kubectl_config
# run the commands required to deploy the application via `kubectl`
kubectl version
kubectl cluster-info
|
Install NPM production dependencies before starting | #!/bin/bash
set -e
if [[ "$*" == npm*start* ]]; then
chown -R app "$NODE_CONTENT"
set -- gosu app "$@"
fi
exec "$@" | #!/bin/bash
set -e
if [[ "$*" == npm*start* ]]; then
npm install --production
chown -R app "$NODE_CONTENT"
set -- gosu app "$@"
fi
exec "$@" |
Use powertac.github.io to host maven reports | #!/usr/bin/env bash
if [[ $TRAVIS_PULL_REQUEST == false && ($TRAVIS_BRANCH == $TRAVIS_TAG || $TRAVIS_BRANCH == "master") ]]
then
mvn -nsu -B javadoc:aggregate
git clone -b gh-pages https://github.com/$TRAVIS_REPO_SLUG.git dox
cd dox
mkdir -p $TRAVIS_BRANCH
cp -r $TRAVIS_BUILD_DIR/target/site/apidocs/* $TRAVIS_BRANCH
for branch in $(ls | grep -v index)
do
cat index.item | sed -e s/ITEM/$branch/g >> index.temp
done
cat index.head > index.html
cat index.temp | sort -u >> index.html
cat index.foot >> index.html
rm index.temp
git add index.html $TRAVIS_BRANCH
git commit -m "Updated javadocs for branch $TRAVIS_BRANCH"
git push https://erikkemperman:$GITHUB_TOKEN@github.com/$TRAVIS_REPO_SLUG.git
fi
| #!/usr/bin/env bash
if [[ $TRAVIS_PULL_REQUEST == false && ($TRAVIS_BRANCH == $TRAVIS_TAG || $TRAVIS_BRANCH == "master" || $TRAVIS_BRANCH == "javadox") ]]
then
mvn -nsu -B site
mvn -nsu -B site:stage
git clone https://github.com/powertac/powertac.github.io.git dox
cd dox
mkdir -p $TRAVIS_BRANCH
cp -r $TRAVIS_BUILD_DIR/target/staging/powertac-server/* $TRAVIS_BRANCH
for branch in $(ls | grep -v index)
do
cat index.item | sed -e s/ITEM/$branch/g >> index.temp
done
cat index.head > index.html
cat index.temp | sort -u >> index.html
cat index.foot >> index.html
rm index.temp
git add index.html $TRAVIS_BRANCH
git commit -m "Updated javadocs for branch $TRAVIS_BRANCH"
git push https://erikkemperman:$GITHUB_TOKEN@github.com/powertac/powertac.github.io.git
fi
|
Make the bower update script perfect | # Updates Bower packages and reinstalls latest versions of Bootswatch themes
pushd public
bower update
pushd components/bootstrap.css/css
curl http://bootswatch.com/cyborg/bootstrap.css -o bootstrap-cyborg.css
curl http://bootswatch.com/cyborg/bootstrap.min.css -o bootstrap-cyborg.min.css
curl http://bootswatch.com/slate/bootstrap.css -o bootstrap-slate.css
curl http://bootswatch.com/slate/bootstrap.min.css -o bootstrap-slate.min.css
curl http://bootswatch.com/cosmo/bootstrap.css -o bootstrap-cosmo.css
curl http://bootswatch.com/cosmo/bootstrap.min.css -o bootstrap-cosmo.min.css
popd
popd
| #!/bin/bash
# Updates Bower packages and reinstalls latest versions of Bootswatch themes
function bootswatch {
pushd components/bootstrap.css/css > /dev/null
for theme in $*; do
echo "Downloading $theme theme"
curl http://bootswatch.com/$theme/bootstrap.css -so bootstrap-$theme.css
curl http://bootswatch.com/$theme/bootstrap.min.css -so bootstrap-$theme.min.css
done
popd > /dev/null
}
pushd public > /dev/null
bower update
bootswatch cyborg slate cosmo
popd > /dev/null
|
Fix selinux httpd connect to redis server | # Copyright © 2017 Feature.su. All rights reserved.
# Licensed under the Apache License, Version 2.0
yum install redis
systemctl start redis
systemctl enable redis
echo "Configuration: /etc/redis.conf"
| # Copyright © 2017 Feature.su. All rights reserved.
# Licensed under the Apache License, Version 2.0
yum install redis
systemctl start redis
systemctl enable redis
echo "Configuration: /etc/redis.conf"
semanage port -m -t http_port_t -p tcp 6379
|
Add waitress as a hard dependency | #!/bin/bash
set -e
fpm -s python \
-t deb \
--name jacquard \
--python-bin /usr/bin/python3 \
--python-package-name-prefix python3 \
--python-install-lib /usr/lib/python3/dist-packages \
-a all \
-m "Alistair Lynn <alistair@thread.com>" \
--deb-compression xz \
--deb-suggests python3-etcd \
--deb-suggests python3-sqlalchemy \
--deb-suggests python3-psycopg2 \
--deb-suggests python3-waitress \
-d "python3 (>= 3.5)" \
-d python3-pkg-resources \
-d python3-redis \
-d python3-werkzeug \
-d python3-dateutil \
-d python3-yaml \
--no-auto-depends \
--force \
jacquard-split
| #!/bin/bash
set -e
fpm -s python \
-t deb \
--name jacquard \
--python-bin /usr/bin/python3 \
--python-package-name-prefix python3 \
--python-install-lib /usr/lib/python3/dist-packages \
-a all \
-m "Alistair Lynn <alistair@thread.com>" \
--deb-compression xz \
--deb-suggests python3-etcd \
--deb-suggests python3-sqlalchemy \
--deb-suggests python3-psycopg2 \
-d "python3 (>= 3.5)" \
-d python3-pkg-resources \
-d python3-redis \
-d python3-werkzeug \
-d python3-dateutil \
-d python3-yaml \
-d python3-waitress \
--no-auto-depends \
--force \
jacquard-split
|
Use $PGDATA env var to setup wal-e | #!/bin/bash
# wal-e specific
echo "wal_level = archive" >> /var/lib/postgresql/data/postgresql.conf
echo "archive_mode = on" >> /var/lib/postgresql/data/postgresql.conf
echo "archive_command = 'envdir /etc/wal-e.d/env /usr/local/bin/wal-e wal-push %p'" >> /var/lib/postgresql/data/postgresql.conf
echo "archive_timeout = 60" >> /var/lib/postgresql/data/postgresql.conf
# no cron in the image, use systemd timer on host instead
#su - postgres -c "crontab -l | { cat; echo \"0 3 * * * /usr/bin/envdir /etc/wal-e.d/env /usr/local/bin/wal-e backup-push /var/lib/postgresql/data\"; } | crontab -"
#su - postgres -c "crontab -l | { cat; echo \"0 4 * * * /usr/bin/envdir /etc/wal-e.d/env /usr/local/bin/wal-e delete --confirm retain 7\"; } | crontab -"
| #!/bin/bash
# wal-e specific
echo "wal_level = $WAL_LEVEL" >> $PGDATA/postgresql.conf
echo "archive_mode = $ARCHIVE_MODE" >> $PGDATA/postgresql.conf
echo "archive_command = 'envdir /etc/wal-e.d/env /usr/local/bin/wal-e wal-push %p'" >> $PGDATA/postgresql.conf
echo "archive_timeout = 60" >> $PGDATA/postgresql.conf
# no cron in the image, use systemd timer on host instead
#su - postgres -c "crontab -l | { cat; echo \"0 3 * * * /usr/bin/envdir /etc/wal-e.d/env /usr/local/bin/wal-e backup-push /var/lib/postgresql/data\"; } | crontab -"
#su - postgres -c "crontab -l | { cat; echo \"0 4 * * * /usr/bin/envdir /etc/wal-e.d/env /usr/local/bin/wal-e delete --confirm retain 7\"; } | crontab -"
|
Remove tweaks for homebrew-installed node | export NVM_DIR="$HOME/.nvm"
if [ -s "$NVM_DIR/nvm.sh" ] ; then
. $NVM_DIR/nvm.sh --no-use # This loads nvm
elif brew --prefix nvm > /dev/null; then
echo "loading nvm.sh from brew"
source $(brew --prefix nvm)/nvm.sh --no-use
fi
alias node='unalias node ; unalias npm ; nvm use default ; node $@'
alias npm='unalias node ; unalias npm ; nvm use default ; npm $@'
export PATH=$PATH:$NVM_BIN
| export NVM_DIR="$HOME/.nvm"
if [ -s "$NVM_DIR/nvm.sh" ] ; then
. $NVM_DIR/nvm.sh --no-use # This loads nvm
fi
export PATH=$PATH:$NVM_BIN
|
Add -b as short version of --build-only script option | #!/bin/bash -Eeu
readonly MY_NAME=`basename "${BASH_SOURCE[0]}"`
if [ "${1:-}" == '-h' ] || [ "${1:-}" == '--help' ]; then
echo
echo "Use: ${MY_NAME} [client|server] [ID...]"
echo 'Options:'
echo ' client - only run the tests from inside the client'
echo ' server - only run the tests from inside the server'
echo ' ID... - only run the tests matching these identifiers'
echo
exit 0
fi
readonly SH_DIR="$(cd "$(dirname "${0}")/sh" && pwd )"
source ${SH_DIR}/versioner_env_vars.sh
export $(versioner_env_vars)
readonly client_user="${CYBER_DOJO_RUNNER_CLIENT_USER}"
readonly server_user="${CYBER_DOJO_RUNNER_SERVER_USER}"
${SH_DIR}/build_images.sh
${SH_DIR}/tag_image.sh
if [ "${1:-}" == '--build-only' ]; then
exit 0
fi
${SH_DIR}/tear_down.sh
${SH_DIR}/containers_up.sh
${SH_DIR}/on_ci_pull_dependent_images.sh
${SH_DIR}/test_in_containers.sh "${client_user}" "${server_user}" "$@"
${SH_DIR}/containers_down.sh
${SH_DIR}/on_ci_publish_tagged_images.sh
#${SH_DIR}/trigger_dependent_images.sh
| #!/bin/bash -Eeu
readonly MY_NAME=`basename "${BASH_SOURCE[0]}"`
if [ "${1:-}" == '-h' ] || [ "${1:-}" == '--help' ]; then
echo
echo "Use: ${MY_NAME} [client|server] [ID...]"
echo 'Options:'
echo ' client - only run the tests from inside the client'
echo ' server - only run the tests from inside the server'
echo ' ID... - only run the tests matching these identifiers'
echo
exit 0
fi
readonly SH_DIR="$(cd "$(dirname "${0}")/sh" && pwd )"
source ${SH_DIR}/versioner_env_vars.sh
export $(versioner_env_vars)
readonly client_user="${CYBER_DOJO_RUNNER_CLIENT_USER}"
readonly server_user="${CYBER_DOJO_RUNNER_SERVER_USER}"
${SH_DIR}/build_images.sh
${SH_DIR}/tag_image.sh
if [ "${1:-}" == '--build-only' ] || [ "${1:-}" == '-b' ] ; then
exit 0
fi
${SH_DIR}/tear_down.sh
${SH_DIR}/containers_up.sh
${SH_DIR}/on_ci_pull_dependent_images.sh
${SH_DIR}/test_in_containers.sh "${client_user}" "${server_user}" "$@"
${SH_DIR}/containers_down.sh
${SH_DIR}/on_ci_publish_tagged_images.sh
#${SH_DIR}/trigger_dependent_images.sh
|
Add .extra to sync exceptions | #!/usr/bin/env bash
cd "$(dirname "${BASH_SOURCE}")";
git pull origin master;
function doIt() {
rsync --exclude ".git/" --exclude ".DS_Store" --exclude "bootstrap.sh" \
--exclude "README.md" --exclude "LICENSE-MIT.txt" -avh --no-perms . ~;
source ~/.profile;
}
if [ "$1" == "--force" -o "$1" == "-f" ]; then
doIt;
else
read -p "This may overwrite existing files in your home directory. Are you sure? (y/n) " -n 1;
echo "";
if [[ $REPLY =~ ^[Yy]$ ]]; then
doIt;
fi;
fi;
unset doIt;
| #!/usr/bin/env bash
cd "$(dirname "${BASH_SOURCE}")";
git pull origin master;
function doIt() {
rsync --exclude ".git/" --exclude ".DS_Store" --exclude "bootstrap.sh" \
--exclude "README.md" --exclude "LICENSE-MIT.txt" --exclude ".extra" -avh --no-perms . ~;
source ~/.profile;
}
if [ "$1" == "--force" -o "$1" == "-f" ]; then
doIt;
else
read -p "This may overwrite existing files in your home directory. Are you sure? (y/n) " -n 1;
echo "";
if [[ $REPLY =~ ^[Yy]$ ]]; then
doIt;
fi;
fi;
unset doIt;
|
Disable Vulkan tests for Subzero. | #!/bin/bash
# Fail on any error.
set -e
# Display commands being run.
set -x
cd git/SwiftShader
git submodule update --init
mkdir -p build && cd build
if [[ -z "${REACTOR_BACKEND}" ]]; then
REACTOR_BACKEND="LLVM"
fi
cmake .. "-DREACTOR_BACKEND=${REACTOR_BACKEND}"
make --jobs=$(nproc)
# Run the reactor unit tests.
./ReactorUnitTests
cd .. # Tests must be run from project root
# Run the OpenGL ES and Vulkan unit tests.
build/gles-unittests
build/vk-unittests
| #!/bin/bash
# Fail on any error.
set -e
# Display commands being run.
set -x
cd git/SwiftShader
git submodule update --init
mkdir -p build && cd build
if [[ -z "${REACTOR_BACKEND}" ]]; then
REACTOR_BACKEND="LLVM"
fi
cmake .. "-DREACTOR_BACKEND=${REACTOR_BACKEND}"
make --jobs=$(nproc)
# Run the reactor unit tests.
./ReactorUnitTests
cd .. # Tests must be run from project root
# Run the OpenGL ES and Vulkan unit tests.
build/gles-unittests
if [ "${REACTOR_BACKEND}" != "Subzero" ]; then
# Currently vulkan does not work with Subzero.
build/vk-unittests
fi
|
Disable mongod prealloc, wait for it to start | #!/bin/bash
set -e
function clean_exit(){
local error_code="$?"
rm -rf ${MONGO_DATA}
if [ "$MONGO_PID" ]; then
kill -9 ${MONGO_PID} || true
fi
return $error_code
}
if [ "$1" = "--coverage" ]; then
COVERAGE_ARG="$1"
shift
fi
if [ ! "$COVERAGE_ARGS" ]; then
# Nova notifier tests
bash tools/init_testr_if_needed.sh
python setup.py testr --slowest --testr-args="--concurrency=1 --here=nova_tests $*"
fi
# Main unit tests
MONGO_DATA=`mktemp -d`
trap "clean_exit" EXIT
mongod --maxConns 32 --smallfiles --quiet --noauth --port 29000 --dbpath "${MONGO_DATA}" --bind_ip localhost &
MONGO_PID=$!
export CEILOMETER_TEST_MONGODB_URL="mongodb://localhost:29000/ceilometer"
python setup.py testr --slowest --testr-args="--concurrency=1 $*" $COVERAGE_ARG
| #!/bin/bash
set -e
function clean_exit(){
local error_code="$?"
rm -rf ${MONGO_DATA}
if [ "$MONGO_PID" ]; then
kill -9 ${MONGO_PID} || true
fi
return $error_code
}
if [ "$1" = "--coverage" ]; then
COVERAGE_ARG="$1"
shift
fi
if [ ! "$COVERAGE_ARGS" ]; then
# Nova notifier tests
bash tools/init_testr_if_needed.sh
python setup.py testr --slowest --testr-args="--concurrency=1 --here=nova_tests $*"
fi
# Main unit tests
MONGO_DATA=`mktemp -d`
trap "clean_exit" EXIT
mkfifo ${MONGO_DATA}/out
mongod --maxConns 32 --noprealloc --smallfiles --quiet --noauth --port 29000 --dbpath "${MONGO_DATA}" --bind_ip localhost &>${MONGO_DATA}/out &
MONGO_PID=$!
# Wait for Mongo to start listening to connections
while read line
do
echo "$line" | grep -q 'waiting for connections on port' && break
done < ${MONGO_DATA}/out
# Read the fifo for ever otherwise mongod would block
# + that gives us the log on screen
cat ${MONGO_DATA}/out &
export CEILOMETER_TEST_MONGODB_URL="mongodb://localhost:29000/ceilometer"
python setup.py testr --slowest --testr-args="--concurrency=1 $*" $COVERAGE_ARG
|
Test travis build, docs generation and semantic release | #!/usr/bin/env bash
set -e
PACKAGE_VERSION=$(node -p -e "require('./package.json').version")
echo "Generate documentation for v$PACKAGE_VERSION"
git config --global user.email "travis@travis-ci.org"
git config --global user.name "Travis CI"
npm run doc:build
git add package.json
git add docs
git commit -m "Travis build: $TRAVIS_BUILD_NUMBER v$PACKAGE_VERSION [ci skip]"
git remote add origin https://${GH_TOKEN}@github.com/Romakita/ts-express-decorators.git > /dev/null 2>&1
git push --quiet --set-upstream origin production
git push -f origin production:refs/heads/master
echo "Done" | #!/usr/bin/env bash
set -e
PACKAGE_VERSION=$(node -p -e "require('./package.json').version")
echo "Generate documentation for v$PACKAGE_VERSION"
git config --global user.email "travis@travis-ci.org"
git config --global user.name "Travis CI"
git checkout -b production
npm run doc:build
git add package.json
git add docs
git commit -m "Travis build: $TRAVIS_BUILD_NUMBER v$PACKAGE_VERSION [ci skip]"
git remote add origin https://${GH_TOKEN}@github.com/Romakita/ts-express-decorators.git > /dev/null 2>&1
git push --quiet --set-upstream origin production
git push -f origin production:refs/heads/master
echo "Done" |
Change location of img to lab3 | #!/bin/bash
# Change this directory for each lab
sudo losetup /dev/loop0 ~/18349/lab2/sdcard.img
sudo kpartx -a /dev/loop0
| #!/bin/bash
# Change this directory for each lab
sudo losetup /dev/loop0 ~/18349/lab3/sdcard.img
sudo kpartx -a /dev/loop0
|
Set bash error handler to exit script if an error has occurred | #!/bin/bash
if (( $# < 1)); then
echo "OpenHIM release build: Builds a specific tagged release ready for deployment";
echo "Usage: $0 TAG";
exit 0;
fi
tag=$1;
shift;
echo "NB!"
echo "To create the tagged build, various git interactions need to take place. "
echo "This will create a temporary branch as well as remove any changes you have havent yet committed"
read -p "Do you wish to proceed? [Y/y]" -n 1 -r
echo ""
if [[ $REPLY =~ ^[Yy]$ ]]; then
cd ../
echo "Git: setup branch/tag"
git checkout -- .
git checkout master
git pull origin master
git fetch --tags
git branch -D $tag
git checkout tags/$tag -b $tag
echo "npm: clean and build package"
rm -rf node_modules
npm install
npm run build
echo "zip: build release version: $tag"
zip \
-i 'lib/*' 'config/*' 'node_modules/*' 'docs/*' 'resources/*' 'CHANGELOG.md' 'LICENSE' 'package.json' 'package-lock.json' 'README.md' \
-r packaging/build.openhim-core.$tag.zip .
echo "Git cleanup"
git checkout -- .
git checkout master
git branch -D $tag
echo "New OpenHIM Core build zipped";
fi
| #!/bin/bash
set -eu
if (( $# < 1)); then
echo "OpenHIM release build: Builds a specific tagged release ready for deployment";
echo "Usage: $0 TAG";
exit 0;
fi
tag=$1;
shift;
echo "NB!"
echo "To create the tagged build, various git interactions need to take place. "
echo "This will create a temporary branch as well as remove any changes you have havent yet committed"
read -p "Do you wish to proceed? [Y/y]" -n 1 -r
echo ""
if [[ $REPLY =~ ^[Yy]$ ]]; then
cd ../
echo "Git: setup branch/tag"
git checkout -- .
git checkout master
git pull origin master
git fetch --tags
git branch -D $tag
git checkout tags/$tag -b $tag
echo "npm: clean and build package"
rm -rf node_modules
npm install
npm run build
echo "zip: build release version: $tag"
zip \
-i 'lib/*' 'config/*' 'node_modules/*' 'docs/*' 'resources/*' 'CHANGELOG.md' 'LICENSE' 'package.json' 'package-lock.json' 'README.md' \
-r packaging/build.openhim-core.$tag.zip .
echo "Git cleanup"
git checkout -- .
git checkout master
git branch -D $tag
echo "New OpenHIM Core build zipped";
fi
|
Update 'option SwapFile' to work with the revised swap file support in FreeBSD 10. |
option_swapfile_install ( ) {
echo "Creating $1 swap file"
S=`echo $1 | tr '[:upper:]' '[:lower:]'`
N=`echo $S | tr -cd '[0-9]'`
case $S in
*.*)
echo "Swapfile size cannot include a Decimal point"
exit 2
;;
*m|*mb|*mi|*mib)
dd if=/dev/zero of="usr/swap0" bs=1024k count=$N
;;
*g|*gb|*gi|*gib)
dd if=/dev/zero of="usr/swap0" bs=1024k count=$(($N * 1024))
;;
*)
echo "Size argument $1 not supported"
exit 2
;;
esac
chmod 0600 "usr/swap0"
echo 'swapfile="/usr/swap0"' >> etc/rc.conf
}
strategy_add $PHASE_FREEBSD_OPTION_INSTALL option_swapfile_install $1
| #
# Create a swap file and set it up correctly.
#
# Usage:
# option AddSwap 768m
#
# Creates a 768m swap file as usr/swap0 and
# adds the correct configuration entries for
# it to be used as a swap file.
#
#
# TODO: expand the command line options here so that
# the following all work:
#
# option AddSwap 768m
# option AddSwap 768m file=/custom/filename
# option AddSwap 768m deferred
#
# The last would causes the swap file to actually get created
# on first boot. (By adding a start script to /usr/local/etc/rc.d
# and enabling it with a suitable option.) In particular,
# this would work well with AutoSize, allowing you to create
# images that can be copied onto any media: If the media is
# larger than the image, the image resizes and creates swap
# at that time.
#
option_swapfile_install ( ) {
echo "Creating $1 swap file"
S=`echo $1 | tr '[:upper:]' '[:lower:]'`
N=`echo $S | tr -cd '[0-9]'`
case $S in
*.*)
echo "Swapfile size cannot include a Decimal point"
exit 2
;;
*m|*mb|*mi|*mib)
dd if=/dev/zero of="usr/swap0" bs=1024k count=$N
;;
*g|*gb|*gi|*gib)
dd if=/dev/zero of="usr/swap0" bs=1024k count=$(($N * 1024))
;;
*)
echo "Size argument $1 not supported"
exit 2
;;
esac
chmod 0600 "usr/swap0"
echo 'md none swap sw,file=/usr/swap0 0 0' >> etc/fstab
}
strategy_add $PHASE_FREEBSD_OPTION_INSTALL option_swapfile_install $1
|
Update grpc build from 0.11.0 to 0.11.1. Seems like there is no impact. | #!/bin/bash
# This script downloads and installs the grpc library, for
# go and python, in the root of the image. It assumes we're running
# as root in the image.
set -ex
# grpc_dist can be empty, in which case we just install to the default paths
grpc_dist="$1"
if [ "$grpc_dist" != "" ]; then
cd $grpc_dist
fi
git clone https://github.com/grpc/grpc.git
cd grpc
git checkout 82c8f71a81b707376a72257b294fe6b6f1f5219d # Beta Release 0.11.0
git submodule update --init
make
if [ "$grpc_dist" != "" ]; then
make install prefix=$grpc_dist
else
make install
fi
CONFIG=opt ./tools/run_tests/build_python.sh
if [ "$grpc_dist" != "" ]; then
CFLAGS=-I$grpc_dist/include LDFLAGS=-L$grpc_dist/lib pip install src/python/grpcio -t $grpc_dist/lib/python2.7/site-packages
else
pip install src/python/grpcio
fi
| #!/bin/bash
# This script downloads and installs the grpc library, for
# go and python, in the root of the image. It assumes we're running
# as root in the image.
set -ex
# grpc_dist can be empty, in which case we just install to the default paths
grpc_dist="$1"
if [ "$grpc_dist" != "" ]; then
cd $grpc_dist
fi
git clone https://github.com/grpc/grpc.git
cd grpc
git checkout 4831d02cc2341ec2233ff9d9ef66fb9a86138fb7 # Beta Release 0.11.1
git submodule update --init
make
if [ "$grpc_dist" != "" ]; then
make install prefix=$grpc_dist
else
make install
fi
CONFIG=opt ./tools/run_tests/build_python.sh
if [ "$grpc_dist" != "" ]; then
CFLAGS=-I$grpc_dist/include LDFLAGS=-L$grpc_dist/lib pip install src/python/grpcio -t $grpc_dist/lib/python2.7/site-packages
else
pip install src/python/grpcio
fi
|
Install more with brew cask | #!/bin/bash
brew tap caskroom/cask
brew cask install --appdir="/Applications" alfred
brew cask install --appdir="/Applications" firefox
brew cask install --appdir="/Applications" iterm2
brew cask install --appdir="/Applications" sublime-text
brew cask install --appdir="/Applications" karabiner
brew cask install --appdir="/Applications" seil
brew cask install --appdir="/Applications" shady
| #!/bin/bash
brew tap caskroom/cask
brew cask install --appdir="/Applications" alfred
brew cask install --appdir="/Applications" firefox
brew cask install --appdir="/Applications" iterm2
brew cask install --appdir="/Applications" sublime-text
brew cask install --appdir="/Applications" karabiner
brew cask install --appdir="/Applications" seil
brew cask install --appdir="/Applications" shady
brew cask install --appdir="/Applications" google-chrome
brew cask install --appdir="/Applications" shiftit
brew cask install --appdir="/Applications" vlc
brew cask install --appdir="/Applications" filezilla
brew cask install --appdir="/Applications" appcleaner
brew cask install --appdir="/Applications" libreoffice
brew cask install --appdir="/Applications" thunderbird
brew cask install --appdir="/Applications" skype
brew cask install --appdir="/Applications" rubymine
brew cask install --appdir="/Applications" silverlight
brew cask install --appdir="/Applications" dropbox
|
Fix tests with PHPUnit 9 (+3) | #!/bin/bash
set -e
if [ -z "$1" ]
then
echo "Database is necessary"
exit 1
fi
n=0;
max=10
while [ -z "$(docker ps -q -f health=healthy -f name=anydataset_db_$1)" ] && [ "$n" -lt "$max" ];
do
echo "Waiting for $1...";
n=$(( n + 1 ))
sleep 5;
done
if [ "$n" -gt "$max" ]
then
echo "$mysql was not health after $(( max * 5 ))"
exit 2
fi
echo "$1 is up"
| #!/bin/bash
set -e
if [ -z "$1" ]
then
echo "Database is necessary"
exit 1
fi
n=0;
max=10
secs=6
while [ -z "$(docker ps -q -f health=healthy -f name=anydataset_db_$1)" ] && [ "$n" -lt "$max" ];
do
echo "Waiting for $1...";
n=$(( n + 1 ))
sleep $secs;
done
if [ "$n" -gt "$max" ]
then
echo "$mysql was not health after $(( max * secs ))"
exit 2
fi
echo "$1 is up"
docker ps
|
Fix for spaces in header path | #!/bin/bash
# based on CocoaLumberJack wiki
# Get user name of current user
full1="`whoami`"
#echo $full1
# Convert to lower case
full2=$(echo $full1 | awk '{print tolower($0)}')
#echo $full2
# Replace spaces with underscores
full3=$(echo ${full2// /_})
#echo $full3
# Remove any characters that are illegal in a macro name
full4=$(echo $full3 | sed 's/[^0-9a-zA-Z_]*//g')
#echo $full4
# If we output directly to our intended file, even when nothing has changed,
# then we'll essentially be doing a touch on the file.
# The compiler will see this, and recompile any files that include the header.
# This may mean recompiling every single source file, every single time we do a build!
# So instead we're going to output to a temporary file, and use diff to detect changes.
header_path=`find "${SRCROOT}" \( -name "KZBootstrapUserMacros.h" \)`
temp_filepath="/tmp/PerUserMacros.temp.h"
echo "// This file is automatically generated" > ${temp_filepath}
echo "#define $full4 1" >> ${temp_filepath}
if [ -a ${header_path} ]
then
DIFF=$(diff ${temp_filepath} ${header_path})
if [ "$DIFF" != "" ]
then
cp -f ${temp_filepath} ${header_path}
fi
else
cp -f ${temp_filepath} ${header_path}
fi | #!/bin/bash
# based on CocoaLumberJack wiki
# Get user name of current user
full1="`whoami`"
#echo $full1
# Convert to lower case
full2=$(echo $full1 | awk '{print tolower($0)}')
#echo $full2
# Replace spaces with underscores
full3=$(echo ${full2// /_})
#echo $full3
# Remove any characters that are illegal in a macro name
full4=$(echo $full3 | sed 's/[^0-9a-zA-Z_]*//g')
#echo $full4
# If we output directly to our intended file, even when nothing has changed,
# then we'll essentially be doing a touch on the file.
# The compiler will see this, and recompile any files that include the header.
# This may mean recompiling every single source file, every single time we do a build!
# So instead we're going to output to a temporary file, and use diff to detect changes.
header_path=`find "${SRCROOT}" \( -name "KZBootstrapUserMacros.h" \)`
temp_filepath="/tmp/PerUserMacros.temp.h"
echo "// This file is automatically generated" > ${temp_filepath}
echo "#define $full4 1" >> ${temp_filepath}
if [ -a "${header_path}" ]
then
DIFF=$(diff ${temp_filepath} ${header_path})
if [ "$DIFF" != "" ]
then
cp -f ${temp_filepath} ${header_path}
fi
else
cp -f ${temp_filepath} ${header_path}
fi
|
Remove --flagfile and move to supported --config-file | #!/bin/bash
set -o xtrace
set -o errexit
# Create a small network
nova-manage --flagfile %CFG_FILE% network create private %FIXED_RANGE% 1 %FIXED_NETWORK_SIZE%
if [[ "$ENABLED_SERVICES" =~ "quantum" ]]; then
echo "Not creating floating IPs (not supported by quantum server)"
else
# Create some floating ips
nova-manage --flagfile %CFG_FILE% floating create %FLOATING_RANGE%
# Create a second pool
nova-manage --flagfile %CFG_FILE% floating create --ip_range=%TEST_FLOATING_RANGE% --pool=%TEST_FLOATING_POOL%
fi
| #!/bin/bash
# TODO - get rid of this, just use python...
set -o xtrace
set -o errexit
# Create a small network
nova-manage --config-file %CFG_FILE% network create private %FIXED_RANGE% 1 %FIXED_NETWORK_SIZE%
if [[ "$ENABLED_SERVICES" =~ "quantum" ]]; then
echo "Not creating floating IPs (not supported by quantum server)"
else
# Create some floating ips
nova-manage --config-file %CFG_FILE% floating create %FLOATING_RANGE%
# Create a second pool
nova-manage --config-file %CFG_FILE% floating create --ip_range=%TEST_FLOATING_RANGE% --pool=%TEST_FLOATING_POOL%
fi
|
Install toilet in Linux for hostname in motd | # Ubuntu-only stuff. Abort if not Ubuntu.
is_ubuntu || return 1
# Update APT.
e_header "Updating APT"
sudo apt-get -qq update
sudo apt-get -qq dist-upgrade
# Install APT packages.
packages=(
aptitude
bash-completion
dialog
landscape-common
mtr-tiny
ncdu
ntp
pv
screen
sysv-rc-conf
tig
tmux
vim
)
packages=($(setdiff "${packages[*]}" "$(dpkg --get-selections | grep -v deinstall | awk '{print $1}')"))
if (( ${#packages[@]} > 0 )); then
e_header "Installing APT packages: ${packages[*]}"
for package in "${packages[@]}"; do
sudo apt-get -qq install "$package"
done
fi
| # Ubuntu-only stuff. Abort if not Ubuntu.
is_ubuntu || return 1
# Update APT.
e_header "Updating APT"
sudo apt-get -qq update
sudo apt-get -qq dist-upgrade
# Install APT packages.
packages=(
aptitude
bash-completion
dialog
landscape-common
mtr-tiny
ncdu
ntp
pv
screen
sysv-rc-conf
tig
tmux
toilet
vim
)
packages=($(setdiff "${packages[*]}" "$(dpkg --get-selections | grep -v deinstall | awk '{print $1}')"))
if (( ${#packages[@]} > 0 )); then
e_header "Installing APT packages: ${packages[*]}"
for package in "${packages[@]}"; do
sudo apt-get -qq install "$package"
done
fi
|
Add the cdv alias to cd to the virtualenvs directory. | function wo() {
[ -f './.venv' ] && workon `cat ./.venv`
}
| function wo() {
[ -f './.venv' ] && workon `cat ./.venv`
}
alias cdv='cd $WORKON_HOME'
|
Set PKG_CONFIG_PATH to build ode extension. | #!/bin/bash
cd ode
svn checkout -r 1939 https://svn.code.sf.net/p/opende/code/trunk opende
cd opende
patch -p0 < ../ode-r1939.patch
./bootstrap
./configure --enable-double-precision --enable-shared --prefix=$HOME/ode
make -j
make install
cd bindings/python
LD_LIBRARY_PATH=$HOME/ode/lib:$LD_LIBRARY_PATH \
python setup.py build_ext -L$HOME/ode/lib -I$HOME/ode/include
python setup.py install
| #!/bin/bash
cd ode
svn checkout -r 1939 https://svn.code.sf.net/p/opende/code/trunk opende
cd opende
patch -p0 < ../ode-r1939.patch
./bootstrap
./configure --enable-double-precision --enable-shared --prefix=$HOME/ode
make -j
make install
cd bindings/python
PKG_CONFIG_PATH=$HOME/ode/lib:$PKG_CONFIG_PATH \
python setup.py build_ext -L$HOME/ode/lib -I$HOME/ode/include
python setup.py install
|
Add --local flag to gem install command so it doesn't hit rubygems.org | #!/bin/bash
set -e
SCRIPT_DIR="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT_DIR="$SCRIPT_DIR/../"
pushd $PROJECT_DIR
gem build go_cart.gemspec
for f in *.gem; do gem install $f; done
rm -rf *.gem
popd | #!/bin/bash
set -e
SCRIPT_DIR="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT_DIR="$SCRIPT_DIR/../"
pushd $PROJECT_DIR
gem build go_cart.gemspec
for f in *.gem; do gem install --local $f; done
rm -rf *.gem
popd |
Disable the random sleep in the fake dsdb used by the scale tests. | #!/bin/bash
sleep `expr \( $RANDOM % 60 \) + 30`
echo "$@"
| #!/bin/bash
#sleep `expr \( $RANDOM % 60 \) + 30`
echo "$@"
|
Move all files from emacsdir | #!/usr/bin/bash
gcl czipperz/b
gcl czipperz/chx
if [ -d $HOME/.emacs.d ]; then
mv .emacs.d emacsdir
gcl czipperz/emacs.d $HOME/.emacs.d
mv emacsdir/* .emacs.d
rm -R emacsdir
else
gcl czipperz/emacs.d $HOME/.emacs.d
fi
(cd chx; ./install)
rm -R chx
gcl czipperz/confman
(cd confman; ./install)
rm -R confman
confman confman
#Setup awesome
mkdir -p .config/awesome/themes
if [ ! -d .config/awesome/themes/transBlack ]; then
gcl czipperz/awesomeThemes
cp -R awesomeThemes/trans* .config/awesome/themes
rm -R awesomeThemes
fi
(cd b; ./install)
rm -R b
| #!/usr/bin/bash
gcl czipperz/b
gcl czipperz/chx
if [ -d $HOME/.emacs.d ]; then
mv .emacs.d emacsdir
gcl czipperz/emacs.d $HOME/.emacs.d
mv $(ls -A emacsdir) .emacs.d
rm -R emacsdir
else
gcl czipperz/emacs.d $HOME/.emacs.d
fi
(cd chx; ./install)
rm -R chx
gcl czipperz/confman
(cd confman; ./install)
rm -R confman
confman confman
#Setup awesome
mkdir -p .config/awesome/themes
if [ ! -d .config/awesome/themes/transBlack ]; then
gcl czipperz/awesomeThemes
cp -R awesomeThemes/trans* .config/awesome/themes
rm -R awesomeThemes
fi
(cd b; ./install)
rm -R b
|
Update go version for git buildpack | #!/bin/bash
set -euo pipefail
GO_VERSION="1.8.1"
export GoInstallDir="/tmp/go$GO_VERSION"
mkdir -p $GoInstallDir
if [ ! -f $GoInstallDir/go/bin/go ]; then
GO_MD5="b05c0cbb28503d038a47d87a6b3b8e86"
URL=https://buildpacks.cloudfoundry.org/dependencies/go/go${GO_VERSION}.linux-amd64-${GO_MD5:0:8}.tar.gz
echo "-----> Download go ${GO_VERSION}"
curl -s -L --retry 15 --retry-delay 2 $URL -o /tmp/go.tar.gz
DOWNLOAD_MD5=$(md5sum /tmp/go.tar.gz | cut -d ' ' -f 1)
if [[ $DOWNLOAD_MD5 != $GO_MD5 ]]; then
echo " **ERROR** MD5 mismatch: got $DOWNLOAD_MD5 expected $GO_MD5"
exit 1
fi
tar xzf /tmp/go.tar.gz -C $GoInstallDir
fi
if [ ! -f $GoInstallDir/go/bin/go ]; then
echo " **ERROR** Could not download go"
exit 1
fi
| #!/bin/bash
set -euo pipefail
GO_VERSION="1.9"
export GoInstallDir="/tmp/go$GO_VERSION"
mkdir -p $GoInstallDir
if [ ! -f $GoInstallDir/go/bin/go ]; then
GO_MD5="4577d9ba083ac86de78012c04a2981be"
URL=https://buildpacks.cloudfoundry.org/dependencies/go/go${GO_VERSION}.linux-amd64-${GO_MD5:0:8}.tar.gz
echo "-----> Download go ${GO_VERSION}"
curl -s -L --retry 15 --retry-delay 2 $URL -o /tmp/go.tar.gz
DOWNLOAD_MD5=$(md5sum /tmp/go.tar.gz | cut -d ' ' -f 1)
if [[ $DOWNLOAD_MD5 != $GO_MD5 ]]; then
echo " **ERROR** MD5 mismatch: got $DOWNLOAD_MD5 expected $GO_MD5"
exit 1
fi
tar xzf /tmp/go.tar.gz -C $GoInstallDir
rm /tmp/go.tar.gz
fi
if [ ! -f $GoInstallDir/go/bin/go ]; then
echo " **ERROR** Could not download go"
exit 1
fi
|
Remove --prefer-source from composer install | #!/bin/bash
set -e
set -o pipefail
if [[ "$TRAVIS_PHP_VERSION" != "hhvm" &&
"$TRAVIS_PHP_VERSION" != "hhvm-nightly" ]]; then
# install "libevent" (used by 'event' and 'libevent' PHP extensions)
sudo apt-get install -y libevent-dev
# install 'event' PHP extension
echo "yes" | pecl install event
# install 'libevent' PHP extension (does not support php 7)
if [[ "$TRAVIS_PHP_VERSION" != "7.0" ]]; then
curl http://pecl.php.net/get/libevent-0.1.0.tgz | tar -xz
pushd libevent-0.1.0
phpize
./configure
make
make install
popd
echo "extension=libevent.so" >> "$(php -r 'echo php_ini_loaded_file();')"
fi
# install 'libev' PHP extension (does not support php 7)
if [[ "$TRAVIS_PHP_VERSION" != "7.0" ]]; then
git clone --recursive https://github.com/m4rw3r/php-libev
pushd php-libev
phpize
./configure --with-libev
make
make install
popd
echo "extension=libev.so" >> "$(php -r 'echo php_ini_loaded_file();')"
fi
fi
composer install --prefer-source
| #!/bin/bash
set -e
set -o pipefail
if [[ "$TRAVIS_PHP_VERSION" != "hhvm" &&
"$TRAVIS_PHP_VERSION" != "hhvm-nightly" ]]; then
# install "libevent" (used by 'event' and 'libevent' PHP extensions)
sudo apt-get install -y libevent-dev
# install 'event' PHP extension
echo "yes" | pecl install event
# install 'libevent' PHP extension (does not support php 7)
if [[ "$TRAVIS_PHP_VERSION" != "7.0" ]]; then
curl http://pecl.php.net/get/libevent-0.1.0.tgz | tar -xz
pushd libevent-0.1.0
phpize
./configure
make
make install
popd
echo "extension=libevent.so" >> "$(php -r 'echo php_ini_loaded_file();')"
fi
# install 'libev' PHP extension (does not support php 7)
if [[ "$TRAVIS_PHP_VERSION" != "7.0" ]]; then
git clone --recursive https://github.com/m4rw3r/php-libev
pushd php-libev
phpize
./configure --with-libev
make
make install
popd
echo "extension=libev.so" >> "$(php -r 'echo php_ini_loaded_file();')"
fi
fi
composer install
|
Fix install instruction to match actual file name | #!/bin/bash
waitUrl="`pwd`/ui/waiting-room.html";
# Ensure dependencies are installed.
echo "* Checking dependencies..."
hash tsc 2>/dev/null
if [ $? -ne 0 ]; then
echo "Please install the typescript compiler with ('sudo npm install -g typescript') before continuing."
exit
fi
hash multirust 2>/dev/null
if [ $? -ne 0 ]; then
echo "Please install multirust with ('./install-multirust') before continuing."
exit
fi
pushd .
# Try using the typescript compiler (tsc) to compile UI
echo "* Compiling Editor..."
cd ui
tsc
if [ $? -ne 0 ]; then
echo "Failed to compile editor, bailing."
popd
exit
fi
popd
# If we aren't restarting, open the editor in the user's preferred browser
if [[ "x$1" != "x--restart" ]]; then
echo "* Opening $waitUrl"
if [[ "$OSTYPE" == "darwin"* ]]; then
open "$waitUrl" &
else
xdg-open "$waitUrl" &
fi
fi
pushd .
# Ensure rustc is updated
echo "* Updating rust if necessary..."
cd runtime
multirust override nightly-2015-08-10
# Compile runtime server
echo "* Compiling server... (This takes a while)"
rustFlags="--release"
if [[ "x$1" != "x--debug" ]]; then
rustFlags=""
fi
RUST_BACKTRACE=1 cargo run --bin=server $rustFlags
popd
| #!/bin/bash
waitUrl="`pwd`/ui/waiting-room.html";
# Ensure dependencies are installed.
echo "* Checking dependencies..."
hash tsc 2>/dev/null
if [ $? -ne 0 ]; then
echo "Please install the typescript compiler with ('sudo npm install -g typescript') before continuing."
exit
fi
hash multirust 2>/dev/null
if [ $? -ne 0 ]; then
echo "Please install multirust with ('./install-multirust.sh') before continuing."
exit
fi
pushd .
# Try using the typescript compiler (tsc) to compile UI
echo "* Compiling Editor..."
cd ui
tsc
if [ $? -ne 0 ]; then
echo "Failed to compile editor, bailing."
popd
exit
fi
popd
# If we aren't restarting, open the editor in the user's preferred browser
if [[ "x$1" != "x--restart" ]]; then
echo "* Opening $waitUrl"
if [[ "$OSTYPE" == "darwin"* ]]; then
open "$waitUrl" &
else
xdg-open "$waitUrl" &
fi
fi
pushd .
# Ensure rustc is updated
echo "* Updating rust if necessary..."
cd runtime
multirust override nightly-2015-08-10
# Compile runtime server
echo "* Compiling server... (This takes a while)"
rustFlags="--release"
if [[ "x$1" != "x--debug" ]]; then
rustFlags=""
fi
RUST_BACKTRACE=1 cargo run --bin=server $rustFlags
popd
|
Make mac devdb shell script use args to start eventstored. | #!/usr/bin/env bash
EVENTSTORE_DIR="{EventStoreInstallPath}"
cd $EVENTSTORE_DIR
LD_LIBRARY_PATH=${EVENTSTORE_DIR}:$LD_LIBRARY_PATH ${EVENTSTORE_DIR}/eventstored $@ | #!/usr/bin/env bash
EVENTSTORE_DIR="{EventStoreInstallPath}"
cd $EVENTSTORE_DIR
LD_LIBRARY_PATH=${EVENTSTORE_DIR}:$LD_LIBRARY_PATH ${EVENTSTORE_DIR}/eventstored --db ./db --log ./log --run-projections=system --start-standard-projections $@ |
Change container name and consul key | #!/bin/bash
set -e
HOST_IP=$1
docker run --name $HOSTNAME -p 9999:2375 -d swarm \
manage \
consul://${HOST_IP}:8500/demo01
| #!/bin/bash
set -e
HOST_IP=$1
docker run --name swarm -p 9999:2375 -d swarm \
manage \
consul://${HOST_IP}:8500
|
Fix vs to allow session as first parameter | #! /usr/bin/env bash
v_funcs() {
$PAGER ~/.bash/vim.bash
}
v() {
vim "$@"
}
vv() {
vim +'edit $MYVIMRC' "$@"
}
vq() {
if (($# > 0)); then
vim -q <("$@" 2>&1)
else
printf '%s\n' 'Usage: vq cmd' '' 'Use {cmd} output as quickfix list'
fi
}
vf() {
if (($# > 0)); then
vim $("$@")
else
printf '%s\n' 'Usage: vf cmd' '' 'Use {cmd} output as filenames' \
'Brittle: {cmd} output will be word-split'
fi
}
vc() {
if (($# > 0)); then
( cd "$1" && shift && vim "$@" )
else
printf '%s\n' 'Usage: vc dir [args]' '' 'Execute vim in {dir}'
fi
}
vs() {
vim "$@" -S
}
# Start vim with its last cursor position
lvim() {
vim +'normal '"'"'0' "$@"
}
| #! /usr/bin/env bash
v_funcs() {
$PAGER ~/.bash/vim.bash
}
v() {
vim "$@"
}
vv() {
vim +'edit $MYVIMRC' "$@"
}
vq() {
if (($# > 0)); then
vim -q <("$@" 2>&1)
else
printf '%s\n' 'Usage: vq cmd' '' 'Use {cmd} output as quickfix list'
fi
}
vf() {
if (($# > 0)); then
vim $("$@")
else
printf '%s\n' 'Usage: vf cmd' '' 'Use {cmd} output as filenames' \
'Brittle: {cmd} output will be word-split'
fi
}
vc() {
if (($# > 0)); then
( cd "$1" && shift && vim "$@" )
else
printf '%s\n' 'Usage: vc dir [args]' '' 'Execute vim in {dir}'
fi
}
vs() {
if (($# > 0)); then
local session="$1"
shift
vim "$@" -S "$session"
else
vim -S
fi
}
# Start vim with its last cursor position
lvim() {
vim +'normal '"'"'0' "$@"
}
|
Clean up output dir before creating new renderspecs | #!/bin/bash
set -eux
basedir=$1
specdir=${basedir}/openstack/
WORKSPACE=${WORKSPACE:-$basedir}
echo "run renderspec over specfiles from ${specdir}"
for spec in ${specdir}/**/*.spec.j2; do
mkdir -p $WORKSPACE/logs/
for specstyle in "suse" "fedora"; do
echo "run ${spec} for ${specstyle}"
renderspec --spec-style ${specstyle} ${spec} \
-o $WORKSPACE/logs/${spec##*/}.${specstyle}
done
done
| #!/bin/bash
set -eux
basedir=$1
specdir=${basedir}/openstack/
WORKSPACE=${WORKSPACE:-$basedir}
OUTPUTDIR=$WORKSPACE/logs/
specstyles="suse fedora"
mkdir -p $OUTPUTDIR
# clean up output dir
for specstyle in $specstyles; do
rm -f $OUTPUTDIR/*.${specstyle}
done
echo "run renderspec over specfiles from ${specdir}"
for spec in ${specdir}/**/*.spec.j2; do
for specstyle in $specstyles; do
echo "run ${spec} for ${specstyle}"
renderspec --spec-style ${specstyle} ${spec} \
-o $WORKSPACE/logs/${spec##*/}.${specstyle}
done
done
|
Add update main branch alias | # Use `hub` as our git wrapper:
# http://defunkt.github.com/hub/
hub_path=$(which hub)
if [[ -f $hub_path ]]
then
alias git=$hub_path
fi
alias g="git"
alias glog="git log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative"
alias grm="git status | grep deleted | awk '{print \$3}' | xargs git rm"
alias hb="hub browse"
alias gpo="git push origin \$(gcb)"
alias gpl="git pull --rebase origin \$(gcb)"
alias gb="git branch"
alias gs="git status"
alias gco="git checkout"
alias gc="git commit"
# git current branch
alias gcb="git rev-parse --abbrev-ref HEAD"
# .g shortcuts
alias .ga=".g add"
alias .gd=".g diff"
alias .glog=".g log" | # Use `hub` as our git wrapper:
# http://defunkt.github.com/hub/
hub_path=$(which hub)
if [[ -f $hub_path ]]
then
alias git=$hub_path
fi
alias g="git"
alias glog="git log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative"
alias grm="git status | grep deleted | awk '{print \$3}' | xargs git rm"
alias hb="hub browse"
alias gpo="git push origin \$(gcb)"
alias gpl="git pull --rebase origin \$(gcb)"
alias gb="git branch"
alias gs="git status"
alias gco="git checkout"
alias gc="git commit"
# git current branch
alias gcb="git rev-parse --abbrev-ref HEAD"
# update current branch from main branch
alias gupd="main_branch=$(git remote show origin | sed -n '/HEAD branch/s/.*: //p'); echo \"rebasing origin/\$main_branch\"...; git fetch origin && git rebase origin/\$main_branch"
# .g shortcuts
alias .ga=".g add"
alias .gd=".g diff"
alias .glog=".g log"
|
Fix double quote warning by shellcheck | if ndef __CORE_EXCLUDE_SH; then
define __CORE_EXCLUDE_SH "exclude"
exclude() {
local current_lib
local include includes lib_header def defined
if [ -f "$1" ]; then
current_lib="$1"
else
current_lib="${SHELLM_USR}/lib/$1"
fi
lib_header=${current_lib#${SHELLM_USR}/lib/}
lib_header=${lib_header//[\/.]/_}
lib_header=__${lib_header^^}
defined=${!lib_header}
for def in ${defined}; do
case "$(type -t "${def}")" in
function)
# shellcheck disable=SC2163
unset -f "${def}"
;;
alias)
# shellcheck disable=SC2163
unalias "${def}"
;;
"")
# shellcheck disable=SC2163
unset "${def}"
;;
esac
done
unset ${lib_header}
# recurse on other included libraries
includes=$(grep -o 'include [a-zA-Z_/]*\.sh' "${current_lib}" | cut -d' ' -f2)
for include in ${includes}; do
exlude "${include}"
done
}
fi # __CORE_EXCLUDE_SH
| if ndef __CORE_EXCLUDE_SH; then
define __CORE_EXCLUDE_SH "exclude"
exclude() {
local current_lib
local include includes lib_header def defined
if [ -f "$1" ]; then
current_lib="$1"
else
current_lib="${SHELLM_USR}/lib/$1"
fi
lib_header=${current_lib#${SHELLM_USR}/lib/}
lib_header=${lib_header//[\/.]/_}
lib_header=__${lib_header^^}
defined=${!lib_header}
for def in ${defined}; do
case "$(type -t "${def}")" in
function)
# shellcheck disable=SC2163
unset -f "${def}"
;;
alias)
# shellcheck disable=SC2163
unalias "${def}"
;;
"")
# shellcheck disable=SC2163
unset "${def}"
;;
esac
done
unset "${lib_header}"
# recurse on other included libraries
includes=$(grep -o 'include [a-zA-Z_/]*\.sh' "${current_lib}" | cut -d' ' -f2)
for include in ${includes}; do
exlude "${include}"
done
}
fi # __CORE_EXCLUDE_SH
|
Refresh gpg-agent between tmux sessions | #! /usr/bin/env zsh
# Allow using the docker socket without root.
sudo chgrp docker /var/run/docker.sock
# Start-up the GPG-Agent for managing SSH and GPG so they can be used across
# all tmux panes/windows.
eval $(gpg-agent --daemon --enable-ssh-support --disable-scdaemon)
# Start TMUX for all terminal access.
function keep_tmux_up() {
while true
do
echo Starting TMUX session.
tmux -2 new -d
# Wait while session is alive.
while tmux has-session -t 0
do
echo TMUX session is up. Available to join.
sleep 1
done
echo TMUX session is down.
done
}
keep_tmux_up &
curl "https://github.com/$USER.keys" > "$HOME/.ssh/authorized_keys"
sudo apt install -y openssh-server
sudo mkdir -p /var/run/sshd
sudo /usr/sbin/sshd -D
| #! /usr/bin/env zsh
# Allow using the docker socket without root.
sudo chgrp docker /var/run/docker.sock
# Start TMUX for all terminal access.
function keep_tmux_up() {
while true
do
# Start-up the GPG-Agent for managing SSH and GPG so they can be used across
# all tmux panes/windows.
killall gpg-agent
eval $(gpg-agent --daemon --enable-ssh-support --disable-scdaemon)
echo Starting TMUX session.
tmux -2 new -d
# Wait while session is alive.
while tmux has-session -t 0
do
echo TMUX session is up. Available to join.
sleep 1
done
echo TMUX session is down.
done
}
keep_tmux_up &
curl "https://github.com/$USER.keys" > "$HOME/.ssh/authorized_keys"
sudo apt install -y openssh-server
sudo mkdir -p /var/run/sshd
sudo /usr/sbin/sshd -D
|
Check for WF and LF. | #!/bin/sh
awk '/pos last name first/{flag=1;next}/The total Number/{flag=0}flag' $1 | cut -c28- | sed 's/\// /' | awk '{ num_games = 0; for(i=5; i < NF; i++) { if ($i ~ /[WLD]/) num_games=num_games+1;} print $1 " " $2 " " $3 " " $4 " " num_games}'
#awk '/pos last name first/{flag=1;next}/The total Number/{flag=0}flag' $1 | cut -c28- | sed 's/\// /' | cut -c1-24 | awk '{if ($4 >= 15) print $0}'
| #!/bin/sh
awk '/pos last name first/{flag=1;next}/The total Number/{flag=0}flag' $1 | cut -c28- | sed 's/\// /' | awk '{ num_games = 0; for(i=5; i < NF; i++) { if (($i ~ /[WLD]/) && ($i != "WF") && ($i != "LF")) num_games=num_games+1;} print $1 " " $2 " " $3 " " $4 " " num_games}'
#awk '/pos last name first/{flag=1;next}/The total Number/{flag=0}flag' $1 | cut -c28- | sed 's/\// /' | cut -c1-24 | awk '{if ($4 >= 15) print $0}'
|
Fix Travis' branch env variable inconsitency | #!/bin/bash
set -ev
if [ "$TRAVIS_PHP_VERSION" != "7.0" ]; then
echo "Skipping docker build on php7.0"
exit 0;
fi
TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
docker build -t weinstein/app -f deploy/app.docker .
docker build -t weinstein/web -f deploy/web.docker .
docker tag weinstein/app weinstein/app:$TAG
docker tag weinstein/web weinstein/web:$TAG
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
docker push weinstein/app
docker push weinstein/web
| #!/bin/bash
set -ev
if [ "$TRAVIS_PHP_VERSION" != "7.0" ]; then
echo "Skipping docker build on php7.0"
exit 0;
fi
BRANCH=${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH}
TAG=`if [ "$BRANCH" == "master" ]; then echo "latest"; else echo $BRANCH ; fi`
docker build -t weinstein/app -f deploy/app.docker .
docker build -t weinstein/web -f deploy/web.docker .
docker tag weinstein/app weinstein/app:$TAG
docker tag weinstein/web weinstein/web:$TAG
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
docker push weinstein/app
docker push weinstein/web
|
Add another material theme to vscode | #!/bin/bash
set -e
source $DOTFILES_ROOT/util/common.sh
# Ensure code exists, or exit otherwise
command -v code >/dev/null 2>&1 || exit
# Symlink the settings file
mkdir -p "$HOME/Library/Application Support/Code/User"
ln -sfv "$DOTFILES_ROOT/vscode/settings.json" "$HOME/Library/Application Support/Code/User/settings.json"
VSCODE_PACKAGES=(
ms-vscode.cpptools
g3ortega.crystal
kofno.crystal-ide
stevejpurves.cucumber
msjsdiag.debugger-for-chrome
EditorConfig.editorconfig
sbrink.elm
emberjs.emberjs
codezombiech.gitignore
tberman.json-schema-validator
PKief.material-icon-theme
felixfbecker.php-pack
ricard.postcss
mohsen1.prettify-json
rebornix.ruby
shardulm94.trailing-spaces
whatwedo.twig
mjmcloug.vscode-elixir
dbaeumer.vscode-eslint
Equinusocio.vsc-material-theme
)
for package in "${VSCODE_PACKAGES[@]}"; do
step "Installing vscode extension: '$package'"
code --install-extension $package
done
| #!/bin/bash
set -e
source $DOTFILES_ROOT/util/common.sh
# Ensure code exists, or exit otherwise
command -v code >/dev/null 2>&1 || exit
# Symlink the settings file
mkdir -p "$HOME/Library/Application Support/Code/User"
ln -sfv "$DOTFILES_ROOT/vscode/settings.json" "$HOME/Library/Application Support/Code/User/settings.json"
VSCODE_PACKAGES=(
ms-vscode.cpptools
g3ortega.crystal
kofno.crystal-ide
stevejpurves.cucumber
msjsdiag.debugger-for-chrome
EditorConfig.editorconfig
sbrink.elm
emberjs.emberjs
codezombiech.gitignore
tberman.json-schema-validator
PKief.material-icon-theme
zhuangtongfa.Material-theme
felixfbecker.php-pack
ricard.postcss
mohsen1.prettify-json
rebornix.ruby
shardulm94.trailing-spaces
whatwedo.twig
mjmcloug.vscode-elixir
dbaeumer.vscode-eslint
Equinusocio.vsc-material-theme
)
for package in "${VSCODE_PACKAGES[@]}"; do
step "Installing vscode extension: '$package'"
code --install-extension $package
done
|
Include Homebrew's PG include path | #!/bin/sh
set -ex
which bindgen || cargo install bindgen
echo '#include <stdarg.h>' > /tmp/postgres.c
echo '#include "postgres.h"' >> /tmp/postgres.c
echo '#include "fmgr.h"' >> /tmp/postgres.c
echo '#include "replication/output_plugin.h"' >> /tmp/postgres.c
echo '#include "replication/logical.h"' >> /tmp/postgres.c
gcc -I /usr/include/postgresql/9.4/server -E /tmp/postgres.c > /tmp/libpq.c
cat /tmp/libpq.c | python src/remove_duplicate_single_line_statements.py > /tmp/libpq_dedup.c
bindgen -allow-bitfields -builtins /tmp/libpq_dedup.c > src/libpq.rs
| #!/bin/sh
set -ex
which bindgen || cargo install bindgen
echo '#include <stdarg.h>' > /tmp/postgres.c
echo '#include "postgres.h"' >> /tmp/postgres.c
echo '#include "fmgr.h"' >> /tmp/postgres.c
echo '#include "replication/output_plugin.h"' >> /tmp/postgres.c
echo '#include "replication/logical.h"' >> /tmp/postgres.c
gcc -I /usr/include/postgresql/9.4/server -I /usr/local/include/server/ \
-E /tmp/postgres.c > /tmp/libpq.c
cat /tmp/libpq.c | python src/remove_duplicate_single_line_statements.py > /tmp/libpq_dedup.c
bindgen -allow-bitfields -builtins /tmp/libpq_dedup.c > src/libpq.rs
|
Add full version of log. | #!/bin/sh
RUST_LOG=rls cargo run --manifest-path=/opt/rls/Cargo.toml 2>>/tmp/client.log
| #!/bin/sh
RUST_LOG=rls cargo run --manifest-path=/opt/rls/Cargo.toml 2>>/tmp/client.log
# tee -a /tmp/client.log | RUST_LOG=rls cargo run --manifest-path=/opt/rls/Cargo.toml 2>>/tmp/client.log | tee -a /tmp/client.log
|
Check for existing flash file - this might mean the SW version has not been increased. Warn and abort in that case. | #!/bin/sh
echo "Generating SOF flash file..."
sof2flash --input="../dtb/dtb.sof" --output="dtb.flash" --epcs
echo "Generating ELF flash file..."
elf2flash --input="../software/dtb_expert/dtb_expert.elf" --output="dtb_expert.flash" --epcs --after="dtb.flash"
echo "Fetching SW version..."
version=$(grep "sw_version" ../software/dtb_expert/dtb_config.h | tr -dc '[:digit:]' | sed 's/^\(.\{1\}\)/\1./')
echo "Merging flash files to dtb_v${version}.flash..."
cat dtb.flash dtb_expert.flash > dtb_v${version}.flash
rm dtb.flash
rm dtb_expert.flash
echo "Done."
| #!/bin/sh
echo "Fetching SW version..."
version=$(grep "sw_version" ../software/dtb_expert/dtb_config.h | tr -dc '[:digit:]' | sed 's/^\(.\{1\}\)/\1./')
if [ -e "dtb_v${version}.flash" ]
then
echo "Flash file for SW version ${version} already exists - forgot to increment version number?"
echo "Aborting."
else
echo "Generating SOF flash file..."
sof2flash --input="../dtb/dtb.sof" --output="dtb.flash" --epcs
echo "Generating ELF flash file..."
elf2flash --input="../software/dtb_expert/dtb_expert.elf" --output="dtb_expert.flash" --epcs --after="dtb.flash"
echo "Merging flash files to dtb_v${version}.flash..."
cat dtb.flash dtb_expert.flash > dtb_v${version}.flash
rm dtb.flash
rm dtb_expert.flash
echo "Done."
fi
|
Add apkg repository update before install | #!/usr/bin/env bash
# Installs dependencies for travis-ci environments.
# Install dependencies, which looks to be just bash & zsh.
#
# Darwin has zsh preinstalled already, so only need to install on Ubuntu.
#
# Note: $TRAVIS_OS_NAME will only be set on text boxes with multi-os enabled,
# so use negation test so it will fail gracefully on normal Travis linux setup.
if [[ "$TRAVIS_OS_NAME" != "osx" ]]; then
# okay, so we know we're probably on a linux box (or at least not an osx box)
# at this point. do we need to install zsh? let's say the default case is no:
needs_zsh=false
# check if zsh is listed in the TEST_SHELLS environment variable, set by
# our travis-ci build matrix.
if [[ $TEST_SHELLS =~ zsh ]]; then needs_zsh=true; fi
# if there is NO $TEST_SHELLS env variable persent (which should never happen,
# but maybe someone has been monkeying with the .travis.yml), run_tests.sh is
# going to fall back onto the default of testing everything, so we need zsh.
if [[ -z "$TEST_SHELLS" ]]; then needs_zsh=true; fi
# finally, we install zsh if needed!
if $needs_zsh ; then
sudo apt-get install zsh
else
echo "No deps required."
fi
fi
| #!/usr/bin/env bash
# Installs dependencies for travis-ci environments.
# Install dependencies, which looks to be just bash & zsh.
#
# Darwin has zsh preinstalled already, so only need to install on Ubuntu.
#
# Note: $TRAVIS_OS_NAME will only be set on text boxes with multi-os enabled,
# so use negation test so it will fail gracefully on normal Travis linux setup.
if [[ "$TRAVIS_OS_NAME" != "osx" ]]; then
# okay, so we know we're probably on a linux box (or at least not an osx box)
# at this point. do we need to install zsh? let's say the default case is no:
needs_zsh=false
# check if zsh is listed in the TEST_SHELLS environment variable, set by
# our travis-ci build matrix.
if [[ $TEST_SHELLS =~ zsh ]]; then needs_zsh=true; fi
# if there is NO $TEST_SHELLS env variable persent (which should never happen,
# but maybe someone has been monkeying with the .travis.yml), run_tests.sh is
# going to fall back onto the default of testing everything, so we need zsh.
if [[ -z "$TEST_SHELLS" ]]; then needs_zsh=true; fi
# finally, we install zsh if needed!
if $needs_zsh; then
sudo apt-get update
sudo apt-get install zsh
else
echo "No deps required."
fi
fi
|
Deploy can use a file of default variables | #!/bin/bash
cd $(dirname $0)
TEMPFILE=$(mktemp || mktemp -t X) 2>/dev/null
EXTRA_VARS="tempfile=$TEMPFILE $*"
eval ansible-playbook -i hosts --extra-vars "\"$EXTRA_VARS\"" deploy.yml >&2
cat $TEMPFILE
rm -rf $TEMPFILE
| #!/bin/bash
cd $(dirname $0)
TEMPFILE=$(mktemp || mktemp -t X) 2>/dev/null
for var in $*; do
EXTRA_VARS="$EXTRA_VARS --extra-vars \"$var\"";
done
EXTRA_VARS="$EXTRA_VARS --extra-vars \"tempfile=$TEMPFILE\""
eval ansible-playbook -i hosts $EXTRA_VARS deploy.yml >&2
cat $TEMPFILE
rm -rf $TEMPFILE
|
Add comment regarding JSC and ICU versions | #!/bin/bash
TARGET_DIR=target
SVN_REV=216995
mkdir -p $TARGET_DIR/webkit
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/trunk/Source $TARGET_DIR/webkit/Source
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/trunk/Tools $TARGET_DIR/webkit/Tools
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/trunk/CMakeLists.txt $TARGET_DIR/webkit/CMakeLists.txt
rm -rf $TARGET_DIR/webkit/Source/JavaScriptCore
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/releases/Apple/iOS%2010.3.2/JavaScriptCore $TARGET_DIR/webkit/Source/JavaScriptCore
rm -rf $TARGET_DIR/webkit/Source/WTF
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/releases/Apple/iOS%2010.3.2/WTF $TARGET_DIR/webkit/Source/WTF
mkdir -p $TARGET_DIR/icu
curl https://android.googlesource.com/platform/external/icu/+archive/android-7.1.2_r11/icu4c.tar.gz | tar xzf - -C $TARGET_DIR/icu
| #!/bin/bash
TARGET_DIR=target
# This SVN revision number was determined by looking for the latest iOS release at https://trac.webkit.org/browser/webkit/releases/Apple
SVN_REV=216995
mkdir -p $TARGET_DIR/webkit
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/trunk/Source $TARGET_DIR/webkit/Source
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/trunk/Tools $TARGET_DIR/webkit/Tools
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/trunk/CMakeLists.txt $TARGET_DIR/webkit/CMakeLists.txt
# As the trunk for $SVN_REV differs from releases dir I'm replacing JSC and WTF with versions from releases dir
rm -rf $TARGET_DIR/webkit/Source/JavaScriptCore
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/releases/Apple/iOS%2010.3.2/JavaScriptCore $TARGET_DIR/webkit/Source/JavaScriptCore
rm -rf $TARGET_DIR/webkit/Source/WTF
svn export -r $SVN_REV https://svn.webkit.org/repository/webkit/releases/Apple/iOS%2010.3.2/WTF $TARGET_DIR/webkit/Source/WTF
mkdir -p $TARGET_DIR/icu
# This is for the latest release for the latest android from https://android.googlesource.com/platform/external/icu/
curl https://android.googlesource.com/platform/external/icu/+archive/android-7.1.2_r11/icu4c.tar.gz | tar xzf - -C $TARGET_DIR/icu
|
Adjust binding hosting from old default | #!/bin/sh
OPTS="-e $ES_URL"
exec bin/kibana $OPTS
| #!/bin/sh
OPTS="-e $ES_URL -H $HOSTNAME"
exec bin/kibana $OPTS
|
Clarify message in setup script | #!/bin/bash
echo '[setup] Installing Emacs packages.'
emacs -nw install_emacs_packages_notice.txt
| #!/bin/bash
echo '[setup] Installing Emacs packages, if necessary.'
emacs -nw install_emacs_packages_notice.txt
|
Fix typo in shell script | #!/bin/bash
## Install Docker
apt-get update
apt-get -y install curl
curl -sSL https://get.docker.com/ | sh
## Install Git
apt-get -y install Git
## Build and run ASP.NET Core HelloWeb Docker container
git clone https://github.com/aspnet/Home.git /usr/local/src/aspnet-Home
cd /usr/local/src/aspnet-Home/samples/1.0.0-rc1-final/HellowWeb
docker build -t helloweb .
docker run -t -d -p 80:5004 helloweb | #!/bin/bash
## Install Docker
apt-get update
apt-get -y install curl
curl -sSL https://get.docker.com/ | sh
## Install Git
apt-get -y install Git
## Build and run ASP.NET Core HelloWeb Docker container
git clone https://github.com/aspnet/Home.git /usr/local/src/aspnet-Home
cd /usr/local/src/aspnet-Home/samples/1.0.0-rc1-final/HelloWeb
docker build -t helloweb .
docker run -t -d -p 80:5004 helloweb |
Use GIT_SSH_COMMAND to override SSH key. | #!/bin/bash
set -e
source venv/bin/activate
# This all happens in here:
cd digipres.github.io
# Grab a version ID:
VERSION=$(git describe --always --tag)
echo "\nDeploying into master branch:"
# Just in case something changed while we generated the data:
git checkout master
git pull origin master
# Add the new stuff:
git add --all .
git commit -am "New site version ${VERSION} deployed." --allow-empty
# Set up the credentials for digipres.github.io
# if GITHUB_TOKEN
if [[ -z "${DIGIPRES_REPO_DEPLOY_PRIVATE_KEY}" ]]; then
echo No DIGIPRES_REPO_DEPLOY_PRIVATE_KEY set: using standard remote.
git remote get-url origin
else
echo DIGIPRES_REPO_DEPLOY_PRIVATE_KEY set: using id_ed25519
echo "${DIGIPRES_REPO_DEPLOY_PRIVATE_KEY}" > ~/.ssh/id_ed25519
git remote set-url --push origin git@github.com:digipres/digipres.github.io.git
fi
# And PUSH IT
echo "\nPushing to master..."
git push origin master
echo "\n DONE."
| #!/bin/bash
set -e
source venv/bin/activate
# This all happens in here:
cd digipres.github.io
# Grab a version ID:
VERSION=$(git describe --always --tag)
echo "\nDeploying into master branch:"
# Just in case something changed while we generated the data:
git checkout master
git pull origin master
# Add the new stuff:
git add --all .
git commit -am "New site version ${VERSION} deployed." --allow-empty
# Set up the credentials for digipres.github.io
# if GITHUB_TOKEN
if [[ -z "${DIGIPRES_REPO_DEPLOY_PRIVATE_KEY}" ]]; then
echo No DIGIPRES_REPO_DEPLOY_PRIVATE_KEY set: using standard remote.
git remote get-url origin
else
echo DIGIPRES_REPO_DEPLOY_PRIVATE_KEY set: using ssh -i ~/.ssh/id_ed25519
echo "${DIGIPRES_REPO_DEPLOY_PRIVATE_KEY}" > ~/.ssh/id_ed25519
export GIT_SSH_COMMAND='ssh -i ~/.ssh/id_ed25519'
git remote set-url --push origin git@github.com:digipres/digipres.github.io.git
fi
# And PUSH IT
echo "\nPushing to master..."
git push origin master
echo "\n DONE."
|
Deploy both core and frontend | #!/bin/bash
shell='ssh root@java-deptools.fedorainfracloud.org'
activator frontend/rpm:packageBin
$shell '
set -e
systemctl stop java-deptools-frontend.service ||:
dd of=frontend.rpm
dnf install -y frontend.rpm
systemctl start java-deptools-frontend.service' \
< frontend/target/rpm/RPMS/noarch/java-deptools-frontend-0-1.noarch.rpm
| #!/bin/bash
set -e
shell='ssh root@java-deptools.fedorainfracloud.org'
activator core/rpm:packageBin frontend/rpm:packageBin
tar cj {core,frontend}/target/rpm/RPMS/noarch/java-deptools-*-0-1.noarch.rpm \
./././././generate-repos.sh | \
$shell '
set -e
tar xj --strip-components 5
systemctl stop java-deptools-frontend.service ||:
dnf install -y java-deptools-{core,frontend}-0-1.noarch.rpm
rm -f java-deptools-{core,frontend}-0-1.noarch.rpm
mv generate-repos.sh /usr/libexec/
systemctl start java-deptools-frontend.service'
|
Remove empty [] when creating the release | TEXT=`cat CHANGELOG.md| sed -n "/##\ $VERSION/,/##/p"`
TEXT=`echo "$TEXT" | sed '1d;$d'`
echo "gh release create $VERSION -n \"## What's Changed\r\n$TEXT\""
gh release create $VERSION -n "$TEXT" | TEXT=`cat CHANGELOG.md| sed -n "/##\ $VERSION/,/##/p"`
TEXT=`echo "$TEXT" | sed '1d;$d' | sed 's/\[\]//g'`
echo "gh release create $VERSION -n \"## What's Changed\r\n$TEXT\""
gh release create $VERSION -n "$TEXT" |
Switch to RabbitMQ 3.6.5 deb | #!/bin/bash -eux
PACKAGES="
rabbitmq-server
wget
"
# Install corosync with pacemaker
apt-get -y install $PACKAGES
# FIXME(bogdando) remove after the rabbitmq-server v3.5.7 released and got to the UCA
wget https://raw.githubusercontent.com/rabbitmq/rabbitmq-server/stable/scripts/rabbitmq-server-ha.ocf \
-O /tmp/rabbitmq-server-ha
chmod +x /tmp/rabbitmq-server-ha
cp -f /tmp/rabbitmq-server-ha /usr/lib/ocf/resource.d/rabbitmq/
# stop and disable rabbitmq-server, assumes puppet CM installed
puppet apply -e "service {'rabbitmq-server': ensure=>stopped, enable=>false }"
sync
| #!/bin/bash -eux
PACKAGES="
wget
"
# Install corosync with pacemaker
apt-get -y install $PACKAGES
ver=3.6.5
file="rabbitmq-server_${ver}-1_all.deb"
wget "http://www.rabbitmq.com/releases/rabbitmq-server/v${ver}/${file}" -O "/tmp/${file}"
dpkg -i "/tmp/${file}"
# stop and disable rabbitmq-server, assumes puppet CM installed
puppet apply -e "service {'rabbitmq-server': ensure=>stopped, enable=>false }"
sync
|
Abort if errors are encountered | #!/usr/bin/env bash
# Set version to latest unless set by user
if [ -z "$VERSION" ]; then
VERSION="1.0.1"
fi
echo "Downloading version ${VERSION}..."
# OS information (contains e.g. Darwin x86_64)
UNAME=`uname -a`
# Determine platform
if [[ $UNAME == *"Darwin"* ]]; then
PLATFORM="darwin"
elif [[ $UNAME == *"Cygwin"* ]]; then
PLATFORM="windows"
else
PLATFORM="linux"
fi
# Determine architecture
if [[ ($UNAME == *x86_64*) || ($UNAME == *amd64*) ]]
then
ARCH="amd64"
else
echo "Currently, there are no 32bit binaries provided."
echo "You will need to go get / go install github.com/boot2docker/boot2docker-cli."
exit 1
fi
# Download binary
curl -L -o boot2docker "https://github.com/boot2docker/boot2docker-cli/releases/download/v${VERSION}/boot2docker-v${VERSION}-${PLATFORM}_${ARCH}"
# Make binary executable
chmod +x boot2docker
echo "Done."
| #!/usr/bin/env bash
set -e
# Set version to latest unless set by user
if [ -z "$VERSION" ]; then
VERSION="1.0.1"
fi
echo "Downloading version ${VERSION}..."
# OS information (contains e.g. Darwin x86_64)
UNAME=`uname -a`
# Determine platform
if [[ $UNAME == *"Darwin"* ]]; then
PLATFORM="darwin"
elif [[ $UNAME == *"Cygwin"* ]]; then
PLATFORM="windows"
else
PLATFORM="linux"
fi
# Determine architecture
if [[ ($UNAME == *x86_64*) || ($UNAME == *amd64*) ]]
then
ARCH="amd64"
else
echo "Currently, there are no 32bit binaries provided."
echo "You will need to go get / go install github.com/boot2docker/boot2docker-cli."
exit 1
fi
# Download binary
curl -L -o boot2docker "https://github.com/boot2docker/boot2docker-cli/releases/download/v${VERSION}/boot2docker-v${VERSION}-${PLATFORM}_${ARCH}"
# Make binary executable
chmod +x boot2docker
echo "Done."
|
Add --skip-if-unchanged for yocto commit, also show a diff after building | #!/bin/bash
# Copyright (C) 2011 Colin Walters <walters@verbum.org>
#
set -e
set -x
WORKDIR=`pwd`
cd `dirname $0`
SCRIPT_SRCDIR=`pwd`
cd -
if test $(id -u) = 0; then
cat <<EOF
This script should not be run as root.
EOF
exit 1
fi
usage () {
echo "$0 BRANCH"
exit 1
}
BRANCH=$1
test -n "$BRANCH" || usage
shift
ARCH=x86
OSTREE_VER=$(cd $SCRIPT_SRCDIR && git describe)
BUILDDIR=$WORKDIR/tmp-eglibc
OSTREE_REPO=$WORKDIR/repo
BUILD_TAR=$BUILDDIR/deploy/images/gnomeos-contents-$BRANCH-qemu${ARCH}.tar.gz
BUILD_TIME=$(date -r $BUILD_TAR)
ostree --repo=${OSTREE_REPO} commit -s "Build from OSTree ${OSTREE_VER}" -b "gnomeos-yocto-$ARCH-$BRANCH" --tree=tar=${BUILD_TAR}
| #!/bin/bash
# Copyright (C) 2011 Colin Walters <walters@verbum.org>
#
set -e
set -x
WORKDIR=`pwd`
cd `dirname $0`
SCRIPT_SRCDIR=`pwd`
cd -
if test $(id -u) = 0; then
cat <<EOF
This script should not be run as root.
EOF
exit 1
fi
usage () {
echo "$0 BRANCH"
exit 1
}
BRANCH=$1
test -n "$BRANCH" || usage
shift
ARCH=x86
OSTREE_VER=$(cd $SCRIPT_SRCDIR && git describe)
BUILDDIR=$WORKDIR/tmp-eglibc
OSTREE_REPO=$WORKDIR/repo
BUILD_TAR=$BUILDDIR/deploy/images/gnomeos-contents-$BRANCH-qemu${ARCH}.tar.gz
BUILD_TIME=$(date -r $BUILD_TAR)
ostree --repo=${OSTREE_REPO} commit --skip-if-unchanged -s "Build from OSTree ${OSTREE_VER}" -b "gnomeos-yocto-$ARCH-$BRANCH" --tree=tar=${BUILD_TAR}
ostree --repo=${OSTREE_REPO} diff "gnomeos-yocto-$ARCH-$BRANCH"^ "gnomeos-yocto-$ARCH-$BRANCH"
|
Update testing if running inside Docker | #!/usr/bin/env bash
NAME="$(uname -s)"
RELEASE="$(uname -r)"
cd "$(dirname "$0")/.."
echo
if ([[ "$RELEASE" = *'boot2docker' ]] || [[ "$RELEASE" = *'moby' ]]) && \
[[ "$1" = 'docker' ]] ; then
echo 'You are using docker!'
echo 'I will assume bootstrap is being run as an entrypoint...'
echo
source 'script/bootstrap'
if [[ "$SHELL" != *'zsh' ]]; then
SHELL="$(which zsh)"
fi
exec "$SHELL" -li
else
echo "Uh oh! $NAME $RELEASE is not Docker..."
echo
fi
| #!/usr/bin/env bash
cd "$(dirname "$0")/.."
echo
if grep -qa docker /proc/1/cgroup && [[ "$1" = 'docker' ]] ; then
echo 'You are using docker!'
echo 'I will assume bootstrap is being run as an entrypoint...'
echo
source 'script/bootstrap'
if [[ "$SHELL" != *'zsh' ]]; then
SHELL="$(which zsh)"
fi
exec "$SHELL" -li
else
echo "Uh oh! $(uname -a) is not Docker..."
echo
fi
|
Remove reference to font CSS | #!/bin/sh
$PWD/generate_resume.py --outfile resume.html --css screen.css --font-css fonts.css --print-css print.css --inline-css -- template.html data.json
| #!/bin/sh
$PWD/generate_resume.py --outfile resume.html --css screen.css --print-css print.css --inline-css -- template.html data.json
|
Add clean to make sure build issues aren't a dirty repo | #!/bin/bash
# This script will build the project.
SWITCHES="-s --console=plain"
if [ $CIRCLE_PR_NUMBER ]; then
echo -e "Build Pull Request #$CIRCLE_PR_NUMBER => Branch [$CIRCLE_BRANCH]"
./gradlew build $SWITCHES
elif [ -z $CIRCLE_TAG ]; then
echo -e 'Build Branch with Snapshot => Branch ['$CIRCLE_BRANCH']'
./gradlew -Prelease.disableGitChecks=true snapshot $SWITCHES
elif [ $CIRCLE_TAG ]; then
echo -e 'Build Branch for Release => Branch ['$CIRCLE_BRANCH'] Tag ['$CIRCLE_TAG']'
case "$CIRCLE_TAG" in
*-rc\.*)
./gradlew -Prelease.disableGitChecks=true -Prelease.useLastTag=true candidate $SWITCHES
;;
*)
./gradlew -Prelease.disableGitChecks=true -Prelease.useLastTag=true final $SWITCHES
;;
esac
else
echo -e 'WARN: Should not be here => Branch ['$CIRCLE_BRANCH'] Tag ['$CIRCLE_TAG'] Pull Request ['$CIRCLE_PR_NUMBER']'
./gradlew build $SWITCHES
fi
EXIT=$?
exit $EXIT
| #!/bin/bash
# This script will build the project.
SWITCHES="-s --console=plain"
if [ $CIRCLE_PR_NUMBER ]; then
echo -e "Build Pull Request #$CIRCLE_PR_NUMBER => Branch [$CIRCLE_BRANCH]"
./gradlew clean build $SWITCHES
elif [ -z $CIRCLE_TAG ]; then
echo -e 'Build Branch with Snapshot => Branch ['$CIRCLE_BRANCH']'
./gradlew -Prelease.disableGitChecks=true clean snapshot $SWITCHES
elif [ $CIRCLE_TAG ]; then
echo -e 'Build Branch for Release => Branch ['$CIRCLE_BRANCH'] Tag ['$CIRCLE_TAG']'
case "$CIRCLE_TAG" in
*-rc\.*)
./gradlew -Prelease.disableGitChecks=true -Prelease.useLastTag=true clean candidate $SWITCHES
;;
*)
./gradlew -Prelease.disableGitChecks=true -Prelease.useLastTag=true clean final $SWITCHES
;;
esac
else
echo -e 'WARN: Should not be here => Branch ['$CIRCLE_BRANCH'] Tag ['$CIRCLE_TAG'] Pull Request ['$CIRCLE_PR_NUMBER']'
./gradlew clean build $SWITCHES
fi
EXIT=$?
exit $EXIT
|
Fix plugin-update --all when there are no plugins | # -*- sh -*-
plugin_update_command() {
if [ "$#" -lt 1 ]; then
display_error "usage: asdf plugin-update {<name> | --all} [git-ref]"
exit 1
fi
local plugin_name="$1"
local gitref="${2:-master}"
if [ "$plugin_name" = "--all" ]; then
for dir in "$(asdf_data_dir)"/plugins/*; do
echo "Updating $(basename "$dir")..."
(cd "$dir" && git fetch -p -u origin "$gitref:$gitref" && git checkout -f "$gitref")
done
else
local plugin_path
plugin_path="$(get_plugin_path "$plugin_name")"
check_if_plugin_exists "$plugin_name"
echo "Updating $plugin_name..."
(cd "$plugin_path" && git fetch -p -u origin "$gitref:$gitref" && git checkout -f "$gitref")
fi
}
plugin_update_command "$@"
| # -*- sh -*-
plugin_update_command() {
if [ "$#" -lt 1 ]; then
display_error "usage: asdf plugin-update {<name> | --all} [git-ref]"
exit 1
fi
local plugin_name="$1"
local gitref="${2:-master}"
if [ "$plugin_name" = "--all" ]; then
if [ -d "$(asdf_data_dir)"/plugins ]; then
for dir in $(find "$(asdf_data_dir)"/plugins -type d -mindepth 1 -maxdepth 1);
echo "Updating $(basename "$dir")..."
(cd "$dir" && git fetch -p -u origin "$gitref:$gitref" && git checkout -f "$gitref")
done
fi
else
local plugin_path
plugin_path="$(get_plugin_path "$plugin_name")"
check_if_plugin_exists "$plugin_name"
echo "Updating $plugin_name..."
(cd "$plugin_path" && git fetch -p -u origin "$gitref:$gitref" && git checkout -f "$gitref")
fi
}
plugin_update_command "$@"
|
Fix issue with current working directory in Linux / developer tool | #!/bin/bash
echo Uncompressing archive...
export TMPDIR=`mktemp -d /tmp/kXXXXXX`
ARCHIVE=`awk '/^__ARCHIVE_BELOW__/ {print NR + 1; exit 0; }' $0`
tail -n+$ARCHIVE $0 | tar xz -C $TMPDIR
CDIR=`pwd`
cd $TMPDIR
./titanium_runtime
cd $CDIR
rm -rf $TMPDIR
exit 0
__ARCHIVE_BELOW__
| #!/bin/bash
echo Uncompressing archive...
export TMPDIR=`mktemp -d /tmp/kXXXXXX`
ARCHIVE=`awk '/^__ARCHIVE_BELOW__/ {print NR + 1; exit 0; }' $0`
tail -n+$ARCHIVE $0 | tar xz -C $TMPDIR
$TMPDIR/titanium_runtime
rm -rf $TMPDIR
exit 0
__ARCHIVE_BELOW__
|
Fix mktemp parameter to make it Mac-ompatible | #!/bin/sh
#
# Usage: ./compare.sh [<domain> [<compare_domain>]]
domain=${1:-'apiary.io'}
compare_domain=${2:-"spf-orig.$domain"}
a="/$0"; a=${a%/*}; a=${a#/}; a=${a:-.}; BINDIR=`cd $a; pwd`
PATH=$BINDIR:$PATH
temp=`mktemp`
despf.sh $domain > ${temp}-1 2>/dev/null
despf.sh $compare_domain > ${temp}-2 2>/dev/null
trap "rm ${temp}-*" EXIT
cmp ${temp}-* && echo "Everything OK" || {
echo "Please update TXT records!" 1>&2
despf.sh | mkblocks.sh | xsel.sh
}
| #!/bin/sh
#
# Usage: ./compare.sh [<domain> [<compare_domain>]]
domain=${1:-'apiary.io'}
compare_domain=${2:-"spf-orig.$domain"}
a="/$0"; a=${a%/*}; a=${a#/}; a=${a:-.}; BINDIR=`cd $a; pwd`
PATH=$BINDIR:$PATH
temp=`mktemp /tmp/$$.XXXXXXXX`
despf.sh $domain > ${temp}-1 2>/dev/null
despf.sh $compare_domain > ${temp}-2 2>/dev/null
trap "rm ${temp}-*" EXIT
cmp ${temp}-* && echo "Everything OK" || {
echo "Please update TXT records!" 1>&2
despf.sh | mkblocks.sh | xsel.sh
}
|
Switch to `exec gunicorn` on prod for better signal handling | #!/bin/sh
./bin/run-common.sh
gunicorn testpilot.wsgi:application -b 0.0.0.0:${PORT:-8000} --log-file -
| #!/bin/sh
./bin/run-common.sh
exec gunicorn testpilot.wsgi:application -b 0.0.0.0:${PORT:-8000} --log-file -
|
Use a heredoc instead of a here-string | #!/bin/sh
set -e
# Read the "real" GPG program from the git repository configuration, defaulting to a PATH search for "gpg" just
# as git itself does.
unset GIT_CONFIG_PARAMETERS
GPG_PROGRAM=$(git config gpg.program || echo 'gpg')
PASSPHRASE_ARG=
if [ -n "${ATOM_GITHUB_CREDENTIAL_HELPER_SCRIPT_PATH:-}" ] && [ -n "${GIT_ASKPASS:-}" ]; then
PASSPHRASE=$(${GIT_ASKPASS})
PASSPHRASE_ARG="--passphrase-fd 3"
fi
exec ${GPG_PROGRAM} --batch --no-tty --yes ${PASSPHRASE_ARG} "$@" 3<<< "${PASSPHRASE}"
| #!/bin/sh
set -e
# Read the "real" GPG program from the git repository configuration, defaulting to a PATH search for "gpg" just
# as git itself does.
unset GIT_CONFIG_PARAMETERS
GPG_PROGRAM=$(git config gpg.program || echo 'gpg')
PASSPHRASE_ARG=
if [ -n "${ATOM_GITHUB_CREDENTIAL_HELPER_SCRIPT_PATH:-}" ] && [ -n "${GIT_ASKPASS:-}" ]; then
PASSPHRASE=$(${GIT_ASKPASS})
PASSPHRASE_ARG="--passphrase-fd 3"
fi
exec ${GPG_PROGRAM} --batch --no-tty --yes ${PASSPHRASE_ARG} "$@" 3<<EOM
${PASSPHRASE}
EOM
|
Change output format so that it is suitable for diff-cover. | #!/bin/bash
#
# $1 -- python source to run pylint on
#
if [ $# -lt 1 ]; then
# no source, just exit
exit 1
fi
file_suffix="$(eval echo \$$#|sed s?/?_?g)"
pylint_output="$(pylint \
--msg-template='{msg_id}:{line:3d},{column}: {obj}: {msg}' \
-r n --disable=C,R --rcfile=/dev/null \
--dummy-variables-rgx=_ \
--ignored-classes=Popen,TransactionSet \
--defining-attr-methods=__init__,_grabObjects,initialize,reset,start,setUp \
--load-plugins=intl \
$DISABLED_WARN_OPTIONS \
$DISABLED_ERR_OPTIONS \
$NON_STRICT_OPTIONS "$@" 2>&1 | \
egrep -v -f "$FALSE_POSITIVES" \
)"
# I0011 is the informational "Locally disabling ...." message
if [ -n "$(echo "$pylint_output" | fgrep -v '************* Module ' |\
grep -v '^I0011:')" ]; then
# Replace the Module line with the actual filename
pylint_output="$(echo "$pylint_output" | sed "s|\* Module .*|* Module $(eval echo \$$#)|")"
echo "$pylint_output" > pylint-out_$file_suffix
touch "pylint-$file_suffix-failed"
fi
| #!/bin/bash
#
# $1 -- python source to run pylint on
#
if [ $# -lt 1 ]; then
# no source, just exit
exit 1
fi
file_suffix="$(eval echo \$$#|sed s?/?_?g)"
pylint_output="$(pylint \
--msg-template='{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}' \
-r n --disable=C,R --rcfile=/dev/null \
--dummy-variables-rgx=_ \
--ignored-classes=Popen,TransactionSet \
--defining-attr-methods=__init__,_grabObjects,initialize,reset,start,setUp \
--load-plugins=intl \
$DISABLED_WARN_OPTIONS \
$DISABLED_ERR_OPTIONS \
$NON_STRICT_OPTIONS "$@" 2>&1 | \
egrep -v -f "$FALSE_POSITIVES" \
)"
# I0011 is the informational "Locally disabling ...." message
if [ -n "$(echo "$pylint_output" | fgrep -v '************* Module ' |\
grep -v '^I0011:')" ]; then
# Replace the Module line with the actual filename
pylint_output="$(echo "$pylint_output" | sed "s|\* Module .*|* Module $(eval echo \$$#)|")"
echo "$pylint_output" > pylint-out_$file_suffix
touch "pylint-$file_suffix-failed"
fi
|
Make the cluster names (more) unique | #!/bin/bash
set -e
cluster_name="codeship-gcloud-test"
codeship_google authenticate
echo "Starting a small cluster with a single instance"
gcloud config set compute/zone us-central1-a
gcloud container clusters create "${cluster_name}" \
--num-nodes 1 \
--machine-type g1-small
echo "Shutting everything down again"
gcloud container clusters delete "${cluster_name}" -q
| #!/bin/bash
set -e
date=$(date "+%Y-%m-%d_%H-%M")
cluster_name="codeship-gcloud-test-${date}"
codeship_google authenticate
echo "Starting a small cluster with a single instance"
gcloud config set compute/zone us-central1-a
gcloud container clusters create "${cluster_name}" \
--num-nodes 1 \
--machine-type g1-small
echo "Shutting everything down again"
gcloud container clusters delete "${cluster_name}" -q
|
Call yarn from travis javascript setup script. | which bower || npm install -g bower
# Check if the bower cache is valid, otherwise delete it
if ! cmp --silent bower.json vendor/assets/bower_components/bower.json; then
rm -rf vendor/assets/bower_components
fi
if [ -d vendor/assets/bower_components ]; then
# Using bower_components from cache
echo "bower assets installed... moving on."
else
bower install --allow-root -F --config.analytics=false
STATUS=$?
echo bower exit code: $STATUS
# fail the whole test suite if bower install failed
[ $STATUS = 0 ] || exit 1
[ -d vendor/assets/bower_components ] || exit 1
fi
| which bower || npm install -g bower
# Check if the bower cache is valid, otherwise delete it
if ! cmp --silent bower.json vendor/assets/bower_components/bower.json; then
rm -rf vendor/assets/bower_components
fi
yarn
if [ -d vendor/assets/bower_components ]; then
# Using bower_components from cache
echo "bower assets installed... moving on."
else
bower install --allow-root -F --config.analytics=false
STATUS=$?
echo bower exit code: $STATUS
# fail the whole test suite if bower install failed
[ $STATUS = 0 ] || exit 1
[ -d vendor/assets/bower_components ] || exit 1
fi
|
Use sudo and clean up. | #!/bin/bash
echo "Running apt-get update."
apt-get -qq update
echo "Running apt-get dist-upgrade."
DEBIAN_FRONTEND=noninteractive APT_LISTCHANGES_FRONTEND=none \
apt \
-o Dpkg::Options::=--force-confold \
-o Dpkg::Options::=--force-confdef \
-y --allow-downgrades --allow-remove-essential --allow-change-held-packages dist-upgrade
# Install VMware tools
apt install -y open-vm-tools-desktop fuse
# Install tools
apt install -y libbde-dev libbde-utils exfat-fuse exfat-utils pv
# Make sure pip is up to date
# pip install --upgrade pip
# Install tool for Rubber ducky
# Resources http://usbrubberducky.com/#!resources.md
# pip install --upgrade ducktoolkit
| #!/bin/bash
echo "Running apt-get update."
sudo apt-get -qq update
echo "Running apt-get dist-upgrade."
DEBIAN_FRONTEND=noninteractive APT_LISTCHANGES_FRONTEND=none \
sudo apt \
-o Dpkg::Options::=--force-confold \
-o Dpkg::Options::=--force-confdef \
-y --allow-downgrades --allow-remove-essential --allow-change-held-packages dist-upgrade
# Install VMware tools
sudo apt install -y open-vm-tools-desktop fuse
# Install tools
sudo apt install -y \
exfat-fuse \
exfat-utils \
libbde-dev \
libbde-utils \
pv
|
Fix git alias mapping bug | #! /usr/bin/env bash
# From https://gist.github.com/mwhite/6887990
if [ -f /usr/share/bash-completion/completions/git ] ; then
. /usr/share/bash-completion/completions/git
fi
function_exists() {
declare -f -F $1 > /dev/null
return $?
}
_git_aliases() {
git config --get-regexp "^alias\." |
grep -o "\.\([[:alnum:]]*\)" |
sed "s/\.//" |
grep -v "^[[:space:]]*$"
}
for al in `_git_aliases`; do
if [ $al != "pr" ]; then
alias g$al="git $al"
complete_func=_git_$(__git_aliased_command $al)
function_exists $complete_fnc && __git_complete g$al $complete_func
fi
done
| #! /usr/bin/env bash
# From https://gist.github.com/mwhite/6887990
if [ -f /usr/share/bash-completion/completions/git ] ; then
. /usr/share/bash-completion/completions/git
fi
function_exists() {
declare -f -F $1 > /dev/null
return $?
}
_git_aliases() {
git config --get-regexp "^alias\." |
grep -o "\.\([[:alnum:]]*\)" |
sed "s/\.//" |
grep -v "^[[:space:]]*$"
}
for al in `_git_aliases`; do
if [ $al != "pr" ]; then
alias g$al="git $al"
if [ "$(__git_aliased_command $al)" != "" ]; then
complete_func=_git_$(__git_aliased_command $al)
function_exists $complete_fnc && __git_complete g$al $complete_func
fi
fi
done
|
Add `gitall` function that will recursively run git commands in git-like sub-directories | #!/bin/sh
alias ga='git add'
alias gaa='git add -A'
alias gb='git branch'
alias gc='git commit'
alias gca='git commit -a'
alias gcam='git commit -a -m'
alias gcb='git-copy-branch-name'
alias gco='git checkout'
alias gcm='git commit -m'
alias gd='git diff'
alias gl='git pull --prune'
alias glg="git log --graph --decorate --oneline --abbrev-commit"
alias gp='git push origin HEAD'
alias gpa='git push origin --all'
alias gpr='gp && open-pr'
alias gs='git status -sb'
gi() {
curl -s "https://www.gitignore.io/api/$*";
}
| #!/bin/sh
alias ga='git add'
alias gaa='git add -A'
alias gb='git branch'
alias gc='git commit'
alias gca='git commit -a'
alias gcam='git commit -a -m'
alias gcb='git-copy-branch-name'
alias gco='git checkout'
alias gcm='git commit -m'
alias gd='git diff'
alias gl='git pull --prune'
alias glg="git log --graph --decorate --oneline --abbrev-commit"
alias gp='git push origin HEAD'
alias gpa='git push origin --all'
alias gpr='gp && open-pr'
alias gs='git status -sb'
gi() {
curl -s "https://www.gitignore.io/api/$*";
}
gitall() {
find . -maxdepth 1 -type d -exec git --git-dir={}/.git --work-tree=$PWD/{} "$@" \;
}
|
Add sidekiq admin startup script | #!/bin/bash
export RAILS_ENV=production
bundle exec sidekiq -l /var/log/sidekiq.log --environment production
| #!/bin/bash
export RAILS_ENV=production
bundle exec rackup sidekiq-admin.ru -p 3000 -E production &
bundle exec sidekiq -l /var/log/sidekiq.log --environment production
|
Fix builder script commit printout | #!/bin/sh -x
COQDIR=$1
BRANCH=$2
BUILDNAME=$3
export PATH=$COQDIR:$PATH
export TERM=dumb
D=~/builder/runs/$(date +%s)
mkdir -p $D
exec >$D/run-out.txt 2>&1
echo "Coq directory: $COQDIR"
echo "Branch: $BRANCH"
echo "Build name: $BUILDNAME"
## Print the Coq version
coqtop </dev/null
cd ~/builder/runs && ( ls | head -n -20 | xargs rm -rf )
cd $D
git clone -b $BRANCH git://g.csail.mit.edu/fscq-impl fscq
echo "Building commit:" "`git show --no-patch --pretty=oneline $BRANCH`"
cd fscq/src
script $D/make-out.txt -c 'time make'
script $D/checkproofs-out.txt -c 'time make checkproofs J=24'
cat $D/checkproofs-out.txt | grep -v '^Checking task ' > $D/checkproofs-errors.txt
cd $D
python3 ~/builder/parse-errors.py $BUILDNAME
| #!/bin/sh -x
COQDIR=$1
BRANCH=$2
BUILDNAME=$3
export PATH=$COQDIR:$PATH
export TERM=dumb
D=~/builder/runs/$(date +%s)
mkdir -p $D
exec >$D/run-out.txt 2>&1
echo "Coq directory: $COQDIR"
echo "Branch: $BRANCH"
echo "Build name: $BUILDNAME"
## Print the Coq version
coqtop </dev/null
cd ~/builder/runs && ( ls | head -n -20 | xargs rm -rf )
cd $D
git clone -b $BRANCH git://g.csail.mit.edu/fscq-impl fscq
cd fscq/src
echo "Building commit:" "`git show --no-patch --pretty=oneline $BRANCH`"
script $D/make-out.txt -c 'time make'
script $D/checkproofs-out.txt -c 'time make checkproofs J=24'
cat $D/checkproofs-out.txt | grep -v '^Checking task ' > $D/checkproofs-errors.txt
cd $D
python3 ~/builder/parse-errors.py $BUILDNAME
|
Upgrade CI to Docker 20.10.10 | #!/bin/bash
set -e
version="20.10.9"
echo "https://download.docker.com/linux/static/stable/x86_64/docker-$version.tgz";
| #!/bin/bash
set -e
version="20.10.10"
echo "https://download.docker.com/linux/static/stable/x86_64/docker-$version.tgz";
|
Remove old unused handlebar systemtags template | #!/usr/bin/env bash
REPODIR=`git rev-parse --show-toplevel`
cd $REPODIR
# Comments files plugin
node node_modules/handlebars/bin/handlebars -n OCA.Comments.Templates apps/comments/src/templates -f apps/comments/src/templates.js
# Settings
node node_modules/handlebars/bin/handlebars -n OC.Settings.Templates apps/settings/js/templates -f apps/settings/js/templates.js
# Systemtags
node node_modules/handlebars/bin/handlebars -n OC.SystemTags.Templates core/js/systemtags/templates -f core/js/systemtags/templates.js
# Files app
node node_modules/handlebars/bin/handlebars -n OCA.Files.Templates apps/files/js/templates -f apps/files/js/templates.js
# Sharing
node node_modules/handlebars/bin/handlebars -n OCA.Sharing.Templates apps/files_sharing/js/templates -f apps/files_sharing/js/templates.js
# Files external
node node_modules/handlebars/bin/handlebars -n OCA.Files_External.Templates apps/files_external/js/templates -f apps/files_external/js/templates.js
if [[ $(git diff --name-only) ]]; then
echo "Please submit your compiled handlebars templates"
echo
git diff
exit 1
fi
echo "All up to date! Carry on :D"
exit 0
| #!/usr/bin/env bash
REPODIR=`git rev-parse --show-toplevel`
cd $REPODIR
# Comments files plugin
node node_modules/handlebars/bin/handlebars -n OCA.Comments.Templates apps/comments/src/templates -f apps/comments/src/templates.js
# Settings
node node_modules/handlebars/bin/handlebars -n OC.Settings.Templates apps/settings/js/templates -f apps/settings/js/templates.js
# Files app
node node_modules/handlebars/bin/handlebars -n OCA.Files.Templates apps/files/js/templates -f apps/files/js/templates.js
# Sharing
node node_modules/handlebars/bin/handlebars -n OCA.Sharing.Templates apps/files_sharing/js/templates -f apps/files_sharing/js/templates.js
# Files external
node node_modules/handlebars/bin/handlebars -n OCA.Files_External.Templates apps/files_external/js/templates -f apps/files_external/js/templates.js
if [[ $(git diff --name-only) ]]; then
echo "Please submit your compiled handlebars templates"
echo
git diff
exit 1
fi
echo "All up to date! Carry on :D"
exit 0
|
Make dash available as /bin/sh | GIT=https://github.com/redox-os/dash.git
BRANCH=redox
HOST=x86_64-elf-redox
export AR="${HOST}-ar"
export AS="${HOST}-as"
export CC="${HOST}-gcc"
export CXX="${HOST}-g++"
export LD="${HOST}-ld"
export NM="${HOST}-nm"
export OBJCOPY="${HOST}-objcopy"
export OBJDUMP="${HOST}-objdump"
export RANLIB="${HOST}-ranlib"
export READELF="${HOST}-readelf"
export STRIP="${HOST}-strip"
function recipe_version {
printf "r%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)"
skip=1
}
function recipe_update {
echo "skipping update"
skip=1
}
function recipe_build {
./autogen.sh
./configure --host=${HOST} --prefix=/
make
skip=1
}
function recipe_test {
echo "skipping test"
skip=1
}
function recipe_clean {
make clean
skip=1
}
function recipe_stage {
dest="$(realpath $1)"
make DESTDIR="$dest" install
skip=1
}
| GIT=https://github.com/redox-os/dash.git
BRANCH=redox
HOST=x86_64-elf-redox
export AR="${HOST}-ar"
export AS="${HOST}-as"
export CC="${HOST}-gcc"
export CXX="${HOST}-g++"
export LD="${HOST}-ld"
export NM="${HOST}-nm"
export OBJCOPY="${HOST}-objcopy"
export OBJDUMP="${HOST}-objdump"
export RANLIB="${HOST}-ranlib"
export READELF="${HOST}-readelf"
export STRIP="${HOST}-strip"
function recipe_version {
printf "r%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)"
skip=1
}
function recipe_update {
echo "skipping update"
skip=1
}
function recipe_build {
./autogen.sh
./configure --host=${HOST} --prefix=/
make
skip=1
}
function recipe_test {
echo "skipping test"
skip=1
}
function recipe_clean {
make clean
skip=1
}
function recipe_stage {
dest="$(realpath $1)"
make DESTDIR="$dest" install
cp "$1/bin/dash" "$1/bin/sh" # TODO: symlink when Redox supports them
skip=1
}
|
Remove a reference to the MacPython website from the error dialog that is displayed when an application terminates with an uncaught exception. | #!/bin/sh
#
# This is the default apptemplate error script
#
if ( test -n "$2" ) ; then
echo "$1 Error"
echo "An unexpected error has occurred during execution of the main script"
echo ""
echo "$2: $3"
echo ""
echo "See the Console for a detailed traceback."
else
echo "$1 Error"
echo "MacPython 2.3 is required to run this application.";
echo "ERRORURL: http://homepages.cwi.nl/~jack/macpython/index.html Visit the MacPython Website";
fi
| #!/bin/sh
#
# This is the default apptemplate error script
#
if ( test -n "$2" ) ; then
echo "$1 Error"
echo "An unexpected error has occurred during execution of the main script"
echo ""
echo "$2: $3"
echo ""
echo "See the Console for a detailed traceback."
else
echo "$1 Error"
# Usage: ERRORURL <anURL> <a button label>, this is used by the
# bundle runner to put up a dialog.
#echo "ERRORURL: http://www.python.org/ Visit the Python Website
# echo "ERRORURL: http://homepages.cwi.nl/~jack/macpython/index.html Visit the MacPython Website"
fi
|
Make importer script use scrapy | while (true);
do ./manage.py customs_daily_tasks ;
echo -e "\nSleeping...\n"
sleep 24h;
done;
| wget https://openhatch.org/+api/v1/customs/tracker_model/\?format\=yaml\&limit\=500 -O /tmp/bug-trackers.yaml
pushd ../oh-bugimporters
env/bin/scrapy runspider bugimporters/main.py -a input_filename=/tmp/bug-trackers.yaml -s FEED_FORMAT=jsonlines -s FEED_URI=/tmp/results.jsonlines -s LOG_FILE=/tmp/scrapy-log -s CONCURRENT_REQUESTS_PER_DOMAIN=1 -s CONCURRENT_REQUESTS=200 -s DEPTH_PRIORITY=1 -s SCHEDULER_DISK_QUEUE=scrapy.squeue.PickleFifoDiskQueue -s SCHEDULER_MEMORY_QUEUE=scrapy.squeue.FifoMemoryQueue
popd
python manage.py import_bugimporter_data /tmp/results.jsonlines
|
Add Java 15 to download script | #!/bin/bash
set -e
case "$1" in
java8)
echo "https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u265-b01/OpenJDK8U-jdk_x64_linux_hotspot_8u265b01.tar.gz"
;;
java11)
echo "https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.8%2B10/OpenJDK11U-jdk_x64_linux_hotspot_11.0.8_10.tar.gz"
;;
java14)
echo "https://github.com/AdoptOpenJDK/openjdk14-binaries/releases/download/jdk-14.0.2%2B12/OpenJDK14U-jdk_x64_linux_hotspot_14.0.2_12.tar.gz"
;;
*)
echo $"Unknown java version"
exit 1
esac
| #!/bin/bash
set -e
case "$1" in
java8)
echo "https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u265-b01/OpenJDK8U-jdk_x64_linux_hotspot_8u265b01.tar.gz"
;;
java11)
echo "https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.8%2B10/OpenJDK11U-jdk_x64_linux_hotspot_11.0.8_10.tar.gz"
;;
java14)
echo "https://github.com/AdoptOpenJDK/openjdk14-binaries/releases/download/jdk-14.0.2%2B12/OpenJDK14U-jdk_x64_linux_hotspot_14.0.2_12.tar.gz"
;;
java15)
echo "https://github.com/AdoptOpenJDK/openjdk15-binaries/releases/download/jdk-15%2B36/OpenJDK15U-jdk_x64_linux_hotspot_15_36.tar.gz"
;;
*)
echo $"Unknown java version"
exit 1
esac
|
Install additional test requires in travis CI | #!/bin/bash
scripts_dir=$(dirname $(readlink -e "${BASH_SOURCE[0]}"))
source "$scripts_dir/common";
onnx_dir="$PWD"
# install onnx
cd $onnx_dir
ccache -z
pip install .
ccache -s
# onnx tests
cd $onnx_dir
pip install pytest-cov
pytest
# check auto-gen files up-to-date
python onnx/defs/gen_doc.py -o docs/Operators.md
python onnx/gen_proto.py
git diff --exit-code
| #!/bin/bash
scripts_dir=$(dirname $(readlink -e "${BASH_SOURCE[0]}"))
source "$scripts_dir/common";
onnx_dir="$PWD"
# install onnx
cd $onnx_dir
ccache -z
pip install .
ccache -s
# onnx tests
cd $onnx_dir
pip install pytest-cov nbval
pytest
# check auto-gen files up-to-date
python onnx/defs/gen_doc.py -o docs/Operators.md
python onnx/gen_proto.py
git diff --exit-code
|
Add node modules to path | if [ -d "/brew/bin" ]; then
PATH="/brew/bin:$PATH"
fi
if [ -d "/brew/sbin" ]; then
PATH="/brew/sbin:$PATH"
fi
# Make /usr/local/bin come earlier on the path to make brew on OSX work well
# when installed there.
if [ -d "/usr/local/bin" ]; then
PATH="/usr/local/bin:$PATH"
fi
if [ -d "/usr/local/sbin" ]; then
PATH="/usr/local/sbin:$PATH"
fi
# set PATH so it includes user's private bin if it exists
if [ -d "$HOME/bin" ] ; then
PATH="$HOME/bin:$PATH"
fi
if [ -d "$HOME/src/go/bin" ] ; then
PATH="$HOME/src/go/bin:$PATH"
fi
| if [ -d "/brew/bin" ]; then
PATH="/brew/bin:$PATH"
fi
if [ -d "/brew/sbin" ]; then
PATH="/brew/sbin:$PATH"
fi
# Make /usr/local/bin come earlier on the path to make brew on OSX work well
# when installed there.
if [ -d "/usr/local/bin" ]; then
PATH="/usr/local/bin:$PATH"
fi
if [ -d "/usr/local/sbin" ]; then
PATH="/usr/local/sbin:$PATH"
fi
# set PATH so it includes user's private bin if it exists
if [ -d "$HOME/bin" ] ; then
PATH="$HOME/bin:$PATH"
fi
if [ -d "$HOME/src/go/bin" ] ; then
PATH="$HOME/src/go/bin:$PATH"
fi
if [ -d "$HOME/.node_modules_global/bin" ] ; then
PATH="$HOME/.node_modules_global/bin:$PATH"
fi
|
Use p_source from rules for loop-aes | #!/bin/sh
p_source="$pkg_dist_dir/loop-AES-v3.7b.tar.bz2"
use_toolchain target
export ARCH=mips
pkg_build() {
p_run make \
LINUX_SOURCE="$LINUX_KERNEL" \
KBUILD_OUTPUT="$LINUX_KERNEL" \
USE_KBUILD=y MODINST=n RUNDM=n
}
pkg_install() {
cd "tmp-d-kbuild" &&
p_install_modules "loop"
}
| #!/bin/sh
use_toolchain target
export ARCH=mips
pkg_build() {
p_run make \
LINUX_SOURCE="$LINUX_KERNEL" \
KBUILD_OUTPUT="$LINUX_KERNEL" \
USE_KBUILD=y MODINST=n RUNDM=n
}
pkg_install() {
cd "tmp-d-kbuild" &&
p_install_modules "loop"
}
|
Update outdated flag for static builds of TagLib | #!/bin/sh
. ../build_config.sh
rm -rf tmp
mkdir tmp
cd tmp
wget http://taglib.github.io/releases/$TAGLIB_VERSION.tar.gz
tar -xf $TAGLIB_VERSION.tar.gz
cd $TAGLIB_VERSION/
cmake \
-D CMAKE_CXX_FLAGS="-fPIC" \
-DCMAKE_INSTALL_PREFIX=$PREFIX \
-DCMAKE_BUILD_TYPE=Release \
-DENABLE_STATIC=ON \
.
make
make install
cd ../..
rm -r tmp
| #!/bin/sh
. ../build_config.sh
rm -rf tmp
mkdir tmp
cd tmp
wget http://taglib.github.io/releases/$TAGLIB_VERSION.tar.gz
tar -xf $TAGLIB_VERSION.tar.gz
cd $TAGLIB_VERSION/
cmake \
-D CMAKE_CXX_FLAGS="-fPIC" \
-DCMAKE_INSTALL_PREFIX=$PREFIX \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=OFF \
.
make
make install
cd ../..
rm -r tmp
|
Fix config load issue when some vars contain spaces | #!/usr/bin/env bash
set -axe
CONFIG_FILE="/opt/hlds/startup.cfg"
if [ -r "${CONFIG_FILE}" ]; then
source "${CONFIG_FILE}"
fi
EXTRA_OPTIONS="${@}"
EXECUTABLE="/opt/hlds/hlds_run"
GAME="${GAME:-cstrike}"
MAXPLAYERS="${MAXPLAYERS:-32}"
START_MAP="${START_MAP:-de_dust2}"
SERVER_NAME="${SERVER_NAME:-Counter-Strike 1.6 Server}"
OPTIONS=( "-game" "${GAME}" "+maxplayers" "${MAXPLAYERS}" "+map" "${START_MAP}" "+hostname" "\"${SERVER_NAME}\"")
if [ -z "${RESTART_ON_FAIL}" ]; then
OPTIONS+=('-norestart')
fi
if [ -n "${ADMIN_STEAM}" ]; then
echo "\"STEAM_${ADMIN_STEAM}\" \"\" \"abcdefghijklmnopqrstu\" \"ce\"" >> "/opt/hlds/cstrike/addons/amxmodx/configs/users.ini"
fi
env > "${CONFIG_FILE}"
exec "${EXECUTABLE}" "${OPTIONS[@]}" ${EXTRA_OPTIONS}
| #!/usr/bin/env bash
set -axe
CONFIG_FILE="/opt/hlds/startup.cfg"
if [ -r "${CONFIG_FILE}" ]; then
# TODO: make config save/restore mechanism more solid
set +e
source "${CONFIG_FILE}"
set -e
fi
EXTRA_OPTIONS="${@}"
EXECUTABLE="/opt/hlds/hlds_run"
GAME="${GAME:-cstrike}"
MAXPLAYERS="${MAXPLAYERS:-32}"
START_MAP="${START_MAP:-de_dust2}"
SERVER_NAME="${SERVER_NAME:-Counter-Strike 1.6 Server}"
OPTIONS=( "-game" "${GAME}" "+maxplayers" "${MAXPLAYERS}" "+map" "${START_MAP}" "+hostname" "\"${SERVER_NAME}\"")
if [ -z "${RESTART_ON_FAIL}" ]; then
OPTIONS+=('-norestart')
fi
if [ -n "${ADMIN_STEAM}" ]; then
echo "\"STEAM_${ADMIN_STEAM}\" \"\" \"abcdefghijklmnopqrstu\" \"ce\"" >> "/opt/hlds/cstrike/addons/amxmodx/configs/users.ini"
fi
set > "${CONFIG_FILE}"
exec "${EXECUTABLE}" "${OPTIONS[@]}" ${EXTRA_OPTIONS}
|
Update jenkins provision script for centos | #!/usr/bin/env bash
sudo yum install -y java-11-openjdk
sudo wget -O /etc/yum.repos.d/jenkins.repo https://pkg.jenkins.io/redhat-stable/jenkins.repo
sudo rpm --import https://pkg.jenkins.io/redhat-stable/jenkins.io.key
sudo yum install -y jenkins
sudo usermod -aG docker vagrant
sudo service jenkins start
| #!/usr/bin/env bash
sudo yum install -y java-11-openjdk
sudo wget -O /etc/yum.repos.d/jenkins.repo https://pkg.jenkins.io/redhat-stable/jenkins.repo
sudo rpm --import https://pkg.jenkins.io/redhat-stable/jenkins.io.key
sudo yum install -y jenkins
sudo service jenkins start |
Exit on error when building docs. | #!/bin/sh
# pyinfra
# File: scripts/build_docs.sh
# Desc: build the pyinfra docs
# Build the apidoc/*.rst docs
rm -f docs/apidoc/*.rst
sh scripts/generate_api_docs.sh
# Build the modules/*.rst docs
rm -f docs/modules/*.rst
python scripts/generate_modules_docs.py
# Build the facts.rst doc
rm -f docs/facts.rst
python scripts/generate_facts_doc.py
# Build the HTML docs
sphinx-build -a docs/ docs/build/
| #!/bin/sh
# pyinfra
# File: scripts/build_docs.sh
# Desc: build the pyinfra docs
set -e
# Build the apidoc/*.rst docs
rm -f docs/apidoc/*.rst
sh scripts/generate_api_docs.sh
# Build the modules/*.rst docs
rm -f docs/modules/*.rst
python scripts/generate_modules_docs.py
# Build the facts.rst doc
rm -f docs/facts.rst
python scripts/generate_facts_doc.py
# Build the HTML docs
sphinx-build -a docs/ docs/build/
|
Update so profile_helper.sh uses greadlink | #!/bin/bash
# git-update.sh
for user in ~/Workspace/src/github.com/*
do
if [[ ! $user =~ $GITHUB_USER_NAME ]]; then
for project in $user/*
do
if [ -d $project/.git ]; then
echo "git $project"
cd $project
git fetch --all
git reset --hard
git clean -df
git merge --ff-only
git submodule update --init --recursive
echo ""
fi
done
fi
done
for bundle in ~/.vim/bundle/*
do
if [ -d $bundle/.git/ ]; then
echo "git $bundle"
cd $bundle
git fetch --all
git reset --hard
git clean -df
git merge --ff-only
git submodule update --init --recursive
echo ""
fi
done
| #!/bin/bash
# git-update.sh
for user in ~/Workspace/src/github.com/*
do
if [[ ! $user =~ $GITHUB_USER_NAME ]]; then
for project in $user/*
do
if [ -d $project/.git ]; then
echo "git $project"
cd $project
if [[ $project == *base16-shell* ]]; then
git pull --all
else
#statements
git fetch --all
git reset --hard
git clean -df
git merge --ff-only
git submodule update --init --recursive
fi
echo ""
fi
done
fi
done
for bundle in ~/.vim/bundle/*
do
if [ -d $bundle/.git/ ]; then
echo "git $bundle"
cd $bundle
git fetch --all
git reset --hard
git clean -df
git merge --ff-only
git submodule update --init --recursive
echo ""
fi
done
|
Revert "Chown subdirs to non root user at copmile" | #!/bin/bash
set -e
function finish {
echo "Cleaning up"
cd $WORKINGDIR
if [ -e ../temp.ini ]
then cp temp.ini ../halite.ini;
fi
}
trap finish EXIT
echo "Setting up"
WORKINGDIR=$PWD
if [ -e ../halite.ini ]
then cp ../halite.ini temp.ini;
fi
echo "Worker tests"
cd worker
python3 testWorker.py
echo "Environment tests"
cd ../environment
python3 testenv.py
| #!/bin/bash
set -e
function finish {
echo "Cleaning up"
cd $WORKINGDIR
if [ -e ../temp.ini ]
then cp temp.ini ../halite.ini;
fi
}
trap finish EXIT
echo "Setting up"
WORKINGDIR=$PWD
if [ -e ../halite.ini ]
then cp ../halite.ini temp.ini;
fi
cp tests.ini ../halite.ini
python3 setupMysql.py || python setupMysql.py
echo "Website tests"
phpunit --stderr website/
echo "Worker tests"
cd worker
python3 testWorker.py
echo "Environment tests"
cd ../environment
python3 testenv.py
|
Append comment to build installer on Windows. | # Gets depot_tools.
# http://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/depot_tools_tutorial.html#_setting_up
# On Windows, Check below page.
# http://www.chromium.org/developers/how-tos/build-instructions-windows
git clone https://chromium.googlesource.com/chromium/tools/depot_tools
export PATH=$PATH:/path_to_parent_dir/depot_tools
# Configures git.
git config --global user.name "Your Name"
git config --global user.email "your.email@example.com"
git config --global core.autocrlf false
git config --global core.filemode false
git config --global color.ui true
# --- Fetches chromium ---
# fetch chromium # Basic checkout for desktop Chromium
# fetch blink # Chromium code with Blink checked out to tip-of-tree
# fetch android # Chromium checkout for Android platform
# fetch ios
fetch blink
gclient sync
cd src
# Makes branch Aoi.
git new-branch Aoi
ninja -C out/Debug chrome
ninja -C out/Release chrome
ninja -C out/Release build_app_dmg
# Updates chromium source code.
git rebase-update
gclient sync
# Run following command after changing code.
gclient runhook
| # Gets depot_tools.
# http://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/depot_tools_tutorial.html#_setting_up
# On Windows, Check below page.
# http://www.chromium.org/developers/how-tos/build-instructions-windows
git clone https://chromium.googlesource.com/chromium/tools/depot_tools
export PATH=$PATH:/path_to_parent_dir/depot_tools
# Configures git.
git config --global user.name "Your Name"
git config --global user.email "your.email@example.com"
git config --global core.autocrlf false
git config --global core.filemode false
git config --global color.ui true
# --- Fetches chromium ---
# fetch chromium # Basic checkout for desktop Chromium
# fetch blink # Chromium code with Blink checked out to tip-of-tree
# fetch android # Chromium checkout for Android platform
# fetch ios
fetch blink
gclient sync
cd src
# Makes branch Aoi.
git new-branch Aoi
ninja -C out/Debug chrome
ninja -C out/Release chrome
# Creates installer. Use mini_installer on Windows.
ninja -C out/Release build_app_dmg
# Updates chromium source code.
git rebase-update
gclient sync
# Run following command after changing code.
gclient runhook
|
Use a fixed version of gRPC | #!/bin/sh
INSTALL_DIR=$HOME/deps
bold=$(tput bold)
normal=$(tput sgr0)
green=$(tput setaf 2)
if [ -d "$INSTALL_DIR/include/google" ] && [ -d "$INSTALL_DIR/include/grpc" ] && [ -d "$INSTALL_DIR/include/grpcpp" ] && [ -f "$INSTALL_DIR/lib/libprotobuf.a" ] && [ -f "$INSTALL_DIR/lib/libgrpc.a" ] && [ -f "$INSTALL_DIR/bin/protoc" ]; then
echo "${bold}${green}gRPC is already installed${normal}"
else
echo "${bold}${green}Install gRPC${normal}"
set -ex
git clone -b "$(curl -L https://grpc.io/release)" --single-branch --depth 1 https://github.com/grpc/grpc
cd grpc
git submodule update --init
PROTOBUF_CONFIG_OPTS="--prefix=$INSTALL_DIR" make prefix="$INSTALL_DIR" -j4
PROTOBUF_CONFIG_OPTS="--prefix=$INSTALL_DIR" sudo -E make prefix="$INSTALL_DIR" install
cd third_party/protobuf
sudo -E make install
# make clean
fi | #!/bin/sh
INSTALL_DIR=$HOME/deps
bold=$(tput bold)
normal=$(tput sgr0)
green=$(tput setaf 2)
if [ -d "$INSTALL_DIR/include/google" ] && [ -d "$INSTALL_DIR/include/grpc" ] && [ -d "$INSTALL_DIR/include/grpcpp" ] && [ -f "$INSTALL_DIR/lib/libprotobuf.a" ] && [ -f "$INSTALL_DIR/lib/libgrpc.a" ] && [ -f "$INSTALL_DIR/bin/protoc" ]; then
echo "${bold}${green}gRPC is already installed${normal}"
else
echo "${bold}${green}Install gRPC${normal}"
set -ex
git clone -b v1.34.0 --single-branch --depth 1 https://github.com/grpc/grpc
cd grpc
git submodule update --init
PROTOBUF_CONFIG_OPTS="--prefix=$INSTALL_DIR" make prefix="$INSTALL_DIR" -j4
PROTOBUF_CONFIG_OPTS="--prefix=$INSTALL_DIR" sudo -E make prefix="$INSTALL_DIR" install
cd third_party/protobuf
sudo -E make install
# make clean
fi |
Print git log in buildkite script | docker system prune --force
docker pull schani/quicktype
docker build --cache-from schani/quicktype -t quicktype .
docker run -t --workdir="/app" -e FIXTURE quicktype npm test
| git --no-pager log 'HEAD~5..HEAD'
docker system prune --force
docker pull schani/quicktype
docker build --cache-from schani/quicktype -t quicktype .
docker run -t --workdir="/app" -e FIXTURE quicktype npm test
|
Fix tests and use aws cli not directly | aws ec2 describe-nat-gateways --output table \
--query "NatGateways[$(auto_filter NatGatewayId State SubnetId VpcId -- $@)].{ \
\"1.Id\":NatGatewayId, \
\"2.State\":State, \
\"3.Vpc\":VpcId, \
\"4.Subnet\":SubnetId, \
\"5.NetworkInterface\":NatGatewayAddresses[0].NetworkInterfaceId, \
\"6.PrivateIp\":NatGatewayAddresses[0].PrivateIp, \
\"7.PublicIp\":NatGatewayAddresses[0].PublicIp}"
| awscli ec2 describe-nat-gateways --output table \
--query "NatGateways[$(auto_filter NatGatewayId State SubnetId VpcId -- $@)].{ \
\"1.Id\":NatGatewayId, \
\"2.State\":State, \
\"3.Vpc\":VpcId, \
\"4.Subnet\":SubnetId, \
\"5.NetworkInterface\":NatGatewayAddresses[0].NetworkInterfaceId, \
\"6.PrivateIp\":NatGatewayAddresses[0].PrivateIp, \
\"7.PublicIp\":NatGatewayAddresses[0].PublicIp}"
|
Check for migrations during test runs | #!/bin/sh
set -ex
flake8 .
rm -f .coverage
coverage run manage.py test --noinput --settings=service_info.settings.dev "$@"
coverage report
| #!/bin/sh
set -ex
flake8 .
rm -f .coverage
coverage run manage.py test --noinput --settings=service_info.settings.dev "$@"
coverage report
python manage.py makemigrations --dry-run | grep 'No changes detected' || (echo 'There are changes which require migrations.' && exit 1)
|
Add keys to git push | #!/bin/sh
# commit all changes to gxd
git config --global user.email $ADMIN_EMAIL
git config --global user.name $ADMIN_NAME
set -e
BRANCH=$1
cd $GXD
if [ -n "$BRANCH" ] ; then
git checkout master
git checkout -b $BRANCH || git checkout $BRANCH
git add .
git commit -m "incoming for $TODAY"
git push --set-upstream origin $BRANCH
# submit pull request
git request-pull master ${GITURL} $BRANCH
git checkout master
# git merge $BRANCH
# git branch -d $BRANCH
else
git add .
git commit -m "incoming for $TODAY"
fi
| #!/bin/sh
# commit all changes to gxd
git config --global user.email $ADMIN_EMAIL
git config --global user.name $ADMIN_NAME
set -e
BRANCH=$1
cd $GXD
if [ -n "$BRANCH" ] ; then
git checkout master
git checkout -b $BRANCH || git checkout $BRANCH
git add .
git commit -m "incoming for $TODAY"
ssh-agent bash -c "ssh-add ~/.ssh/gxd_rsa; git push --set-upstream origin $BRANCH"
# submit pull request
git request-pull master ${GITURL} $BRANCH
git checkout master
# git merge $BRANCH
# git branch -d $BRANCH
else
git add .
git commit -m "incoming for $TODAY"
fi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.