Instruction
stringlengths
14
778
input_code
stringlengths
0
4.24k
output_code
stringlengths
1
5.44k
Kill some now-unnecessary environment variables
#!/bin/bash # check for the candiru user USERID=`id -n -u` if [ "$USERID" != "candiru" ]; then echo "Error: user must be candiru to start commerce" exit 1 fi NODE_HOME=/platform/nodejs # number of subprocesses export CLUSTER=5 # optimize things (TODO: trunk, staging also?) export NODE_ENV="production" # Use this PID file export R3S_PID="/redfin/r3s/.pid" export R3S_CONFIGS="/redfin/r3s/conf" export DEBUG="rf:*" LOGDIR=/redfin/r3s/logs echo "Starting R3S..." echo "Logging to: $LOGDIR/r3s.log" nohup $NODE_HOME/bin/node /redfin/r3s/start.js /redfin/r3s/compiled-app.js >> $LOGDIR/r3s.log 2>&1 &
#!/bin/bash # check for the candiru user USERID=`id -n -u` if [ "$USERID" != "candiru" ]; then echo "Error: user must be candiru to start commerce" exit 1 fi NODE_HOME=/platform/nodejs # number of subprocesses export CLUSTER=5 # optimize things (TODO: trunk, staging also?) export NODE_ENV="production" # Use this PID file export R3S_PID="/redfin/r3s/.pid" export R3S_CONFIGS="/redfin/r3s/conf" LOGDIR=/redfin/r3s/logs echo "Starting R3S..." echo "Logging to: $LOGDIR/r3s.log" nohup $NODE_HOME/bin/node /redfin/r3s/start.js /redfin/r3s/compiled-app.js >> $LOGDIR/r3s.log 2>&1 &
Make it work for Python 3
sudo easy_install pip pip install appdirs pip install requests --upgrade
sudo apt-get update sudo apt-get install python3-setuptools sudo easy_install3 pip sudo mv /usr/local/bin/pip /usr/local/bin/pip3 pip3 install appdirs pip3 install requests --upgrade
Fix port with dev env
#!/bin/bash if [ $1 ]; then if [ $1 = "stop" ]; then echo "Kill all node and mongo processes" killall node killall mongod elif [ $1 = "start" ]; then echo "Kill all node and mongo processes" killall node killall mongod echo "Start mongo..." mongod --dbpath data/db --logpath ./data/db/mongo.log --port 27021 & echo "Start node..." nohup node server.js & echo "Server started" fi else echo "Type stop or start" fi
#!/bin/bash if [ $1 ]; then if [ $1 = "stop" ]; then echo "Kill all node and mongo processes" killall node killall mongod elif [ $1 = "start" ]; then echo "Kill all node and mongo processes" killall node killall mongod echo "Start mongo..." mongod --dbpath data/db --logpath ./data/db/mongo.log --port 27020 & echo "Start node..." nohup node server.js & echo "Server started" fi else echo "Type stop or start" fi
Make $USECUDA not case sensitive
#!/bin/bash DOCKERHUB_REPO='biobright/python3-opencv3' # allows env var to be overridden by shell, e.g. USECUDA=OFF ./build.sh : ${USECUDA:=OFF} if [ "${USECUDA}" = "ON" ]; then TAG='cuda'; else TAG='nocuda'; fi docker build -t ${DOCKERHUB_REPO}:${TAG} . \ && docker push ${DOCKERHUB_REPO}:${TAG}
#!/bin/bash DOCKERHUB_REPO='biobright/python3-opencv3' # allows env var to be overridden by shell, e.g. USECUDA=OFF ./build.sh : ${USECUDA:=OFF} if [ "${USECUDA}" = "ON" ] || [ "${USECUDA}" = "on" ] ; then TAG='cuda'; else TAG='nocuda'; fi echo TAG IS: ${TAG} docker build --no-cache -t ${DOCKERHUB_REPO}:${TAG} . \ && docker push ${DOCKERHUB_REPO}:${TAG}
Fix CF_RELEASE_BRANCH to be handled properly
#!/bin/bash -ex CF_RELEASE_USE_HEAD=${CF_RELEASE_USE_HEAD:-no} ruby_version=`rbenv version | cut -f1 -d" "` # to overwrite .ruby-version if [ ! "$(ls -A cf-release)" ]; then if [ -z "${CF_RELEASE_URL}" ]; then git submodule update --init cf-release else rmdir cf-release git clone ${CF_RELEASE_URL} cf-release fi ( cd cf-release if [ -z "${CF_RELEASE_BRANCH}" ]; then git checkout -f ${CF_RELEASE_BRANCH} fi if [ $CF_RELEASE_USE_HEAD != "no" ]; then # required to compile a gem native extension of CCNG sudo apt-get -y install git-core libmysqlclient-dev libpq-dev libsqlite3-dev libxml2-dev libxslt-dev gem install rake -v 0.9.2.2 --no-rdoc --no-ri # hack for collector git submodule update --init --recursive RBENV_VERSION=$ruby_version bundle install RBENV_VERSION=$ruby_version bundle exec bosh -n create release --force fi ) else echo "'cf-release' directory is not empty. Skipping cloning..." fi
#!/bin/bash -ex CF_RELEASE_USE_HEAD=${CF_RELEASE_USE_HEAD:-no} ruby_version=`rbenv version | cut -f1 -d" "` # to overwrite .ruby-version if [ ! "$(ls -A cf-release)" ]; then if [ -z "${CF_RELEASE_URL}" ]; then git submodule update --init cf-release else rmdir cf-release git clone ${CF_RELEASE_URL} cf-release fi ( cd cf-release if [ -n "${CF_RELEASE_BRANCH}" ]; then git checkout -f ${CF_RELEASE_BRANCH} fi if [ $CF_RELEASE_USE_HEAD != "no" ]; then # required to compile a gem native extension of CCNG sudo apt-get -y install git-core libmysqlclient-dev libpq-dev libsqlite3-dev libxml2-dev libxslt-dev gem install rake -v 0.9.2.2 --no-rdoc --no-ri # hack for collector git submodule update --init --recursive RBENV_VERSION=$ruby_version bundle install RBENV_VERSION=$ruby_version bundle exec bosh -n create release --force fi ) else echo "'cf-release' directory is not empty. Skipping cloning..." fi
Delete files to free up disk space
#!/bin/bash /usr/bin/mysql -u moodle -D moodle < /home/vagrant/moodle.sql /usr/bin/php /home/vagrant/www/moodle2/htdocs/admin/cli/upgrade.php --non-interactive 2>&1 > /vagrant_log/upgrade.txt
#!/bin/bash rm -f /home/vagrant/moodle-*.tgz /usr/bin/mysql -u moodle -D moodle < /home/vagrant/moodle.sql rm -f /home/vagrant/moodle.sql /usr/bin/php /home/vagrant/www/moodle2/htdocs/admin/cli/upgrade.php --non-interactive &> /vagrant_log/upgrade.txt
Fix to be sersh build when isCcCc4py.
#!/bin/bash # # Version and build information for ZeroMQ # # $Id$ # ###################################################################### ###################################################################### # # Version # ###################################################################### ZEROMQ_BLDRVERSION=${ZEROMQ_BLDRVERSION:-"2.1.11"} ###################################################################### # # Builds, deps, mask, auxdata, paths, builds of other packages # ###################################################################### if test -z "$ZEROMQ_BUILDS"; then if ! [[ `uname` =~ CYGWIN ]]; then ZEROMQ_BUILDS="cc4py" fi fi ZEROMQ_DEPS= ###################################################################### # # Launch builds. # ###################################################################### buildZeromq() { if bilderUnpack zeromq; then if bilderConfig zeromq cc4py "$CONFIG_COMPILERS_PYC $CONFIG_COMPFLAGS_PYC $ZEROMQ_CONFIG_LDFLAGS"; then bilderBuild zeromq cc4py fi fi } ###################################################################### # # Test # ###################################################################### testZeromq() { techo "Not testing zeromq." } ###################################################################### # # Install # ###################################################################### installZeromq() { # Ignore installation errors. R tries to set perms of /contrib/bin. # 20121202: Is this true? Copy-paste error? bilderInstall zeromq cc4py }
#!/bin/bash # # Version and build information for ZeroMQ # # $Id$ # ###################################################################### ###################################################################### # # Version # ###################################################################### ZEROMQ_BLDRVERSION=${ZEROMQ_BLDRVERSION:-"2.1.11"} ###################################################################### # # Builds, deps, mask, auxdata, paths, builds of other packages # ###################################################################### if test -z "$ZEROMQ_BUILDS"; then if ! [[ `uname` =~ CYGWIN ]]; then ZEROMQ_BUILDS=`getPythonBuild` fi fi ZEROMQ_BUILD=`getPythonBuild` ZEROMQ_DEPS= ZEROMQ_UMASK=002 ###################################################################### # # Launch builds. # ###################################################################### buildZeromq() { if bilderUnpack zeromq; then if bilderConfig zeromq $ZEROMQ_BUILD "$CONFIG_COMPILERS_PYC $CONFIG_COMPFLAGS_PYC $ZEROMQ_CONFIG_LDFLAGS"; then bilderBuild zeromq $ZEROMQ_BUILD fi fi } ###################################################################### # # Test # ###################################################################### testZeromq() { techo "Not testing zeromq." } ###################################################################### # # Install # ###################################################################### installZeromq() { # Ignore installation errors. R tries to set perms of /contrib/bin. # 20121202: Is this true? Copy-paste error? bilderInstall zeromq $ZEROMQ_BUILD }
Use the https version of the content API
#!/bin/bash set -e # Any subsequent(*) commands which fail will cause the shell script to exit immediately hash curl 2>/dev/null || { echo >&2 "This script requires 'curl', which is not found. Aborting."; exit 1; } hash pandoc 2>/dev/null || { echo >&2 "This script requires 'pandoc', which is not found. Aborting."; exit 1; } # Make sure we are at the root directory of the repository cd ${BASH_SOURCE%/*}/.. # Download the markdown readme echo "Downloading the README.md..." curl -s -o README.md http://www.craft.ai/content/api/python.md # Convert to rst echo "Converting to README.rst..." pandoc --from=markdown_github --to=rst --output=README.rst README.md # Commit! echo "Commiting the two README files" git add README.md README.rst git commit -m "Updated README files" echo "Success!"
#!/bin/bash set -e # Any subsequent(*) commands which fail will cause the shell script to exit immediately hash curl 2>/dev/null || { echo >&2 "This script requires 'curl', which is not found. Aborting."; exit 1; } hash pandoc 2>/dev/null || { echo >&2 "This script requires 'pandoc', which is not found. Aborting."; exit 1; } # Make sure we are at the root directory of the repository cd ${BASH_SOURCE%/*}/.. # Download the markdown readme echo "Downloading the README.md..." curl -s -o README.md https://www.craft.ai/content/api/python.md # Convert to rst echo "Converting to README.rst..." pandoc --from=markdown_github --to=rst --output=README.rst README.md # Commit! echo "Commiting the two README files" git add README.md README.rst git commit -m "Updated README files" echo "Success!"
Include function to close apps on macOS
PRM_DIR="$SECONDLAW/prm-data-osx" alias emacs="emacsclient -s $HOME/.emacs.d/server/server -n" export ALTERNATE_EDITOR="emacs" export LC_ALL=en_US.UTF-8 export LANG=en_US.UTF-8 alias pycharm="charm"
PRM_DIR="$SECONDLAW/prm-data-osx" alias emacs="emacsclient -s $HOME/.emacs.d/server/server -n" export ALTERNATE_EDITOR="emacs" export LC_ALL=en_US.UTF-8 export LANG=en_US.UTF-8 alias pycharm="charm" function closeapp() { osascript -e "quit app \"$1\"" }
Add a "packages" zip and differentiate from the all-in-one zip.
#!/bin/sh # # This script will create zips of each prototype that exclude any file not # necessary for the operation of the prototype (e.g., licenses, gruntfiles, # test scripts, .git directories, and so forth.) # echo $0; echo "Removing existing packages"; mkdir -p pkgs rm -rf pkgs/*; ROOT="$(pwd)" function make_zip() { cd "$1" zip -rq --exclude="@${ROOT}/package_excludes.txt" "$2" . } echo "Zipping all prototypes" make_zip "${ROOT}" "${ROOT}/pkgs/TinCan_Prototypes.zip" . echo "Zipping Golf Example" make_zip "${ROOT}/GolfExample_TCAPI" "${ROOT}/pkgs/GolfExample_TCAPI.zip" . echo "Zipping Tetris" make_zip "${ROOT}/JsTetris_TCAPI" "${ROOT}/pkgs/JsTetris_TCAPI.zip" . echo "Zipping Locator" make_zip "${ROOT}/Locator_TCAPI" "${ROOT}/pkgs/Locator_TCAPI.zip" . echo "Done";
#!/bin/sh # # This script will create zips of each prototype that exclude any file not # necessary for the operation of the prototype (e.g., licenses, gruntfiles, # test scripts, .git directories, and so forth.) # echo $0; echo "Removing existing packages"; mkdir -p pkgs rm -rf pkgs/*; ROOT="$(pwd)" function make_zip() { cd "$1" zip -rq --exclude="@${ROOT}/package_excludes.txt" "$2" . } echo "Zipping all prototypes" make_zip "${ROOT}" "${ROOT}/pkgs/tincan_prototypes_allinone.zip" . echo "Zipping Golf Example" make_zip "${ROOT}/GolfExample_TCAPI" "${ROOT}/pkgs/GolfExample_TCAPI.zip" . echo "Zipping Tetris" make_zip "${ROOT}/JsTetris_TCAPI" "${ROOT}/pkgs/JsTetris_TCAPI.zip" . echo "Zipping Locator" make_zip "${ROOT}/Locator_TCAPI" "${ROOT}/pkgs/Locator_TCAPI.zip" . echo "Zipping all packages" cd "${ROOT}/pkgs" zip -q "${ROOT}/pkgs/tincan_prototypes_packages.zip" *_TCAPI.zip cd - echo "Done";
Add clean, clean_all, clean_local, start, scaleup commands
#!/bin/bash set -eux rm -rf /tmp/tmp* rm /tmp/storage/* || true rm /tmp/connections.yaml || true mkdir -p /tmp/state echo > /tmp/state/commit_log || true echo > /tmp/state/commited_data || true echo > /tmp/state/stage_log || true find /vagrant/solar/solar -name '*.pyc' -delete || true sudo docker stop $(sudo docker ps -q) || true sudo docker rm $(sudo docker ps -qa) || true solar profile -c -t env/test_env -i prf1 solar discover solar assign -n 'node/node_2 | node/node_1' -r 'resources/docker' solar assign -n 'node/node_1' -r 'resources/mariadb' solar assign -n 'node/node_1' -r 'resources/keystone' solar assign -n 'node/node_1' -r 'resources/haproxy' solar assign -n 'node/node_1' -r 'resources/rabbitmq' solar connect --profile prf1 ./cli.py changes stage ./cli.py changes commit
#!/bin/bash set -eux function clean_local { rm -rf /tmp/tmp* rm /tmp/storage/* || true rm /tmp/connections.yaml || true mkdir -p /tmp/state echo > /tmp/state/commit_log || true echo > /tmp/state/commited_data || true echo > /tmp/state/stage_log || true find /vagrant/solar/solar -name '*.pyc' -delete || true sudo docker stop $(sudo docker ps -q) || true sudo docker rm $(sudo docker ps -qa) || true } function start { solar profile -c -t env/test_env -i prf1 solar discover solar assign -n 'node/node_2 | node/node_1' -r 'resources/docker' solar assign -n 'node/node_1' -r 'resources/mariadb' solar assign -n 'node/node_1' -r 'resources/keystone' solar assign -n 'node/node_1' -r 'resources/haproxy' solar assign -n 'node/node_1' -r 'resources/rabbitmq' solar connect --profile prf1 ./cli.py changes stage ./cli.py changes commit } function scaleup { solar assign -n 'node/node_2' -r 'resources/keystone' solar connect --profile prf1 ./cli.py changes stage ./cli.py changes commit } function clean { solar run -a remove -t 'resources/mariadb' || true solar run -a remove -t 'resources/keystone' || true solar run -a remove -t 'resources/haproxy' || true solar run -a remove -t 'resources/rabbitmq' || true } function clean_all { clean_local clean } $1
Fix ci script not failing with worng checksum and update checksums
#!/bin/bash set -e set -u # checksums MD5SUM="19e50b22e5ee88314ad6a6630d202277" MD5SUM_ANNO="da002cc4c9c4f2c77e4401c97564be94" # run ggsashimi without annotation docker run --rm -w $PWD -v $PWD:$PWD guigolab/ggsashimi -b examples/input_bams.tsv -c chr10:27040584-27048100 -o ci/sashimi [[ $(grep -avE 'CreationDate|ModDate' ci/sashimi.pdf | md5sum | awk '{$0=$1}1') == $MD5SUM ]] # run ggsashimi with annotation docker run --rm -w $PWD -v $PWD:$PWD guigolab/ggsashimi -g examples/annotation.gtf -b examples/input_bams.tsv -c chr10:27040584-27048100 -o ci/sashimi-anno [[ $(grep -avE 'CreationDate|ModDate' ci/sashimi-anno.pdf | md5sum | awk '{$0=$1}1') == $MD5SUM_ANNO ]]
#!/bin/bash set -e set -u # checksums sashimi_md5="86e5924ecf8ce1272635ff43b244b32e" sashimi_anno_md5="216c07785889074f69cb94cc2af7cb00" pdfmd5() { grep -avE 'CreationDate|ModDate' $1 | md5sum | awk '{$0=$1}1' } fail() { echo ${1-""} >&2 && exit 1 } files=( sashimi sashimi_anno ) anno="" for f in ${files[@]}; do [[ $f == "sashimi_anno" ]] && anno="-g examples/annotation.gtf" docker run --rm -w $PWD -v $PWD:$PWD guigolab/ggsashimi $anno -b examples/input_bams.tsv -c chr10:27040584-27048100 -o ci/$f md5=$(pdfmd5 ci/$f.pdf) [[ $md5 == $(eval 'echo $'$f'_md5') ]] || fail "== Wrong checksum for $f.pdf: $md5" done echo "== All checksums match" echo "== DONE" exit 0
Revert "Fix pj() function when no project has been specified"
alias pjo="pj open" pj () { emulate -L zsh cmd="cd" project=$1 if [[ "open" == "$project" ]]; then shift project=$* cmd=${=EDITOR} else project=$* fi if [[ -z "$project" ]]; then echo "You have to specify a project name." return fi for basedir ($PROJECT_PATHS); do if [[ -d "$basedir/$project" ]]; then $cmd "$basedir/$project" return fi done echo "No such project '${project}'." } _pj () { emulate -L zsh typeset -a projects for basedir ($PROJECT_PATHS); do projects+=(${basedir}/*(/N)) done compadd ${projects:t} } compdef _pj pj
alias pjo="pj open" pj () { emulate -L zsh cmd="cd" project=$1 if [[ "open" == "$project" ]]; then shift project=$* cmd=${=EDITOR} else project=$* fi for basedir ($PROJECT_PATHS); do if [[ -d "$basedir/$project" ]]; then $cmd "$basedir/$project" return fi done echo "No such project '${project}'." } _pj () { emulate -L zsh typeset -a projects for basedir ($PROJECT_PATHS); do projects+=(${basedir}/*(/N)) done compadd ${projects:t} } compdef _pj pj
Make the docker host an build parameter.
#!/bin/bash # # This script is only intended to run in the IBM DevOps Services Pipeline Environment. # echo Informing slack... curl -X 'POST' --silent --data-binary '{"text":"A new build for the proxy has started."}' $WEBHOOK > /dev/null mkdir dockercfg ; cd dockercfg echo Downloading Docker requirements.. wget http://docker-2.game-on.org:8081/dockerneeds.tar -q echo Setting up Docker... tar xzf dockerneeds.tar ; mv docker ../ ; cd .. ; chmod +x docker ; \ export DOCKER_HOST="tcp://docker-2.game-on.org:2375" DOCKER_TLS_VERIFY=1 DOCKER_CONFIG=./dockercfg echo Downloading the certificate... wget http://game-on.org:8081/proxy.pem -O ./proxy.pem -q echo Building the docker image... ./docker build -t gameon-proxy . echo Stopping the existing container... ./docker stop -t 0 gameon-proxy ./docker rm gameon-proxy echo Starting the new container... ./docker run -d -p 80:80 -p 443:443 -p 1936:1936 -e ADMIN_PASSWORD=$ADMIN_PASSWORD -e PROXY_DOCKER_HOST=$PROXY_DOCKER_HOST --name=gameon-proxy gameon-proxy rm -rf docker-cfg
#!/bin/bash # # This script is only intended to run in the IBM DevOps Services Pipeline Environment. # echo Informing slack... curl -X 'POST' --silent --data-binary '{"text":"A new build for the proxy has started."}' $WEBHOOK > /dev/null mkdir dockercfg ; cd dockercfg echo Downloading Docker requirements.. wget http://$BUILD_DOCKER_HOST:8081/dockerneeds.tar -q echo Setting up Docker... tar xzf dockerneeds.tar ; mv docker ../ ; cd .. ; chmod +x docker ; \ export DOCKER_HOST="tcp://$BUILD_DOCKER_HOST:2375" DOCKER_TLS_VERIFY=1 DOCKER_CONFIG=./dockercfg echo Downloading the certificate... wget http://$BUILD_DOCKER_HOST:8081/proxy.pem -O ./proxy.pem -q echo Building the docker image... ./docker build -t gameon-proxy . echo Stopping the existing container... ./docker stop -t 0 gameon-proxy ./docker rm gameon-proxy echo Starting the new container... ./docker run -d -p 80:80 -p 443:443 -p 1936:1936 -e ADMIN_PASSWORD=$ADMIN_PASSWORD -e PROXY_DOCKER_HOST=$PROXY_DOCKER_HOST --name=gameon-proxy gameon-proxy rm -rf docker-cfg
Allow for checkout of a specific branch
#!/bin/sh if [ ! -d "$HOME/.yadr" ]; then echo "Installing YADR for the first time" git clone --depth=1 https://github.com/333fred/dotfiles_yadr.git "$HOME/.yadr" cd "$HOME/.yadr" [ "$1" = "ask" ] && export ASK="true" rake install else echo "YADR is already installed" fi
#!/bin/sh if [ ! -d "$HOME/.yadr" ]; then echo "Installing YADR for the first time" git clone --depth=1 https://github.com/333fred/dotfiles_yadr.git "$HOME/.yadr" if [ ! -z "$1" ]; then git checkout "$1" fi cd "$HOME/.yadr" [ "$1" = "ask" ] && export ASK="true" rake install else echo "YADR is already installed" fi
Add apt-get argument to fix issue that happens sometimes on some relays
sudo sh -c 'echo "deb http://deb.torproject.org/torproject.org trusty main" >> /etc/apt/sources.list' sudo sh -c 'echo "deb-src http://deb.torproject.org/torproject.org trusty main" >> /etc/apt/sources.list' sudo gpg --keyserver keys.gnupg.net --recv 886DDD89 sudo gpg --export A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89 | sudo apt-key add - sudo apt-get update sudo apt-get -y install tor deb.torproject.org-keyring vim curl tor-arm sudo /etc/init.d/tor stop sudo -u debian-tor tor --list-fingerprint --orport 1 \ --dirserver "x 127.0.0.1:1 ffffffffffffffffffffffffffffffffffffffff" \ --datadirectory /var/lib/tor/ sudo wget -O /etc/tor/torrc http://directoryserver/router.conf HOSTNAME=$(hostname -s) echo "Nickname $HOSTNAME" | sudo tee -a /etc/tor/torrc ADDRESS=$(hostname -I | tr " " "\n" | grep "192.168") for A in $ADDRESS; do echo "Address $A" | sudo tee -a /etc/tor/torrc done sudo cat /etc/tor/torrc sudo /etc/init.d/tor restart
sudo sh -c 'echo "deb http://deb.torproject.org/torproject.org trusty main" >> /etc/apt/sources.list' sudo sh -c 'echo "deb-src http://deb.torproject.org/torproject.org trusty main" >> /etc/apt/sources.list' sudo gpg --keyserver keys.gnupg.net --recv 886DDD89 sudo gpg --export A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89 | sudo apt-key add - sudo apt-get update sudo apt-get -y --force-yes install tor deb.torproject.org-keyring vim curl tor-arm sudo /etc/init.d/tor stop sudo -u debian-tor tor --list-fingerprint --orport 1 \ --dirserver "x 127.0.0.1:1 ffffffffffffffffffffffffffffffffffffffff" \ --datadirectory /var/lib/tor/ sudo wget -O /etc/tor/torrc http://directoryserver/router.conf HOSTNAME=$(hostname -s) echo "Nickname $HOSTNAME" | sudo tee -a /etc/tor/torrc ADDRESS=$(hostname -I | tr " " "\n" | grep "192.168") for A in $ADDRESS; do echo "Address $A" | sudo tee -a /etc/tor/torrc done sudo cat /etc/tor/torrc sudo /etc/init.d/tor restart
Add a default revision string if we can't find hg
#!/bin/sh # # Print the current source revision, if available # FIXME: this prints the tip, which isn't useful if you're on a different # branch, or just not sync'd to the tip. hg tip --template 'hg-{rev}:{node|short}'
#!/bin/sh # # Print the current source revision, if available # FIXME: this prints the tip, which isn't useful if you're on a different # branch, or just not sync'd to the tip. hg tip --template 'hg-{rev}:{node|short}' || echo "hg-0:baadf00d"
Add debug to install script
#!/bin/bash venv="nephoria_venv" neph_branch="oldboto" adminapi_branch="master" yum install -y python-devel gcc git python-setuptools python-virtualenv if [ ! -d adminapi ]; then git clone https://github.com/nephomaniac/adminapi.git fi if [ ! -d nephoria ]; then git clone https://github.com/nephomaniac/nephoria.git fi if [ "x$venv" != "x" ]; then if [ ! -d $venv ]; then virtualenv $venv fi source $venv/bin/activate fi cd adminapi git fetch git checkout $adminapi_branch git pull origin $adminapi_branch python setup.py install cd - cd nephoria git fetch git checkout $neph_branch git pull origin $neph_branch python setup.py install cd -
#!/bin/bash set -x venv="nephoria_venv" neph_branch="oldboto" adminapi_branch="master" yum install -y python-devel gcc git python-setuptools python-virtualenv rpm -qa | grep virtualenv # verify it was installed successfully above yum repolist # check repos if [ ! -d adminapi ]; then git clone https://github.com/nephomaniac/adminapi.git fi if [ ! -d nephoria ]; then git clone https://github.com/nephomaniac/nephoria.git fi if [ "x$venv" != "x" ]; then if [ ! -d $venv ]; then virtualenv $venv fi source $venv/bin/activate fi cd adminapi git fetch git checkout $adminapi_branch git pull origin $adminapi_branch python setup.py install cd - cd nephoria git fetch git checkout $neph_branch git pull origin $neph_branch python setup.py install cd -
Fix bug with blacklist update
#!/usr/bin/with-contenv sh BLOCKLIST_ENABLED=$(jq -r '.["blocklist-enabled"]' /config/settings.json) BLOCKLIST_URL=$(jq -r '.["blocklist-url"]' /config/settings.json | sed 's/\&amp;/\&/g') if [ "${BLOCKLIST_ENABLED:-false}" == "true" -a -n "$BLOCKLIST_URL" ]; then mkdir -p /tmp/blocklists rm -rf /tmp/blocklists/* cd /tmp/blocklists wget -q -O blocklist.gz "$BLOCKLIST_URL" if [ $? == 0 ]; then gunzip *.gz if [ $? == 0 ]; then chmod go+r * rm -rf /config/blocklists/* cp /tmp/blocklists/blocklist.gz /config/blocklists/ s6-svc -h /var/run/s6/services/transmission fi fi fi
#!/usr/bin/with-contenv sh BLOCKLIST_ENABLED=$(jq -r '.["blocklist-enabled"]' /config/settings.json) BLOCKLIST_URL=$(jq -r '.["blocklist-url"]' /config/settings.json | sed 's/\&amp;/\&/g') if [ "${BLOCKLIST_ENABLED:-false}" == "true" -a -n "$BLOCKLIST_URL" ]; then mkdir -p /tmp/blocklists rm -rf /tmp/blocklists/* cd /tmp/blocklists wget -q -O blocklist.gz "$BLOCKLIST_URL" if [ $? == 0 ]; then gunzip *.gz if [ $? == 0 ]; then chmod go+r * rm -rf /config/blocklists/* cp /tmp/blocklists/* /config/blocklists/ s6-svc -h /var/run/s6/services/transmission fi fi fi
Handle setting jobs in OSX and add -Wno-error
#! /bin/bash set -e set -x if [ x"$TRAVIS" = xtrue ]; then CPU_COUNT=2 else CPU_COUNT=$(nproc) fi mv tcl/target/1986ве1т.cfg tcl/target/1986be1t.cfg mv tcl/target/к1879xб1я.cfg tcl/target/k1879x61r.cfg ./bootstrap mkdir build cd build ../configure \ --prefix=$PREFIX \ --enable-static \ --disable-shared \ --enable-usb-blaster-2 \ --enable-usb_blaster_libftdi \ --enable-jtag_vpi \ --enable-remote-bitbang \ make -j$CPU_COUNT make install
#! /bin/bash set -e set -x if [ x"$TRAVIS" = xtrue ]; then CPU_COUNT=2 else # Identify OS UNAME_OUT="$(uname -s)" case "${UNAME_OUT}" in Linux*) OS=Linux;; Darwin*) OS=Mac;; *) OS="${UNAME_OUT}" echo "Unknown OS: ${OS}" exit 1;; esac if [[ $OS == "Linux" ]]; then CPU_COUNT=$(nproc) elif [[ $OS == "Mac" ]]; then CPU_COUNT=$(sysctl -n hw.physicalcpu) fi fi mv tcl/target/1986ве1т.cfg tcl/target/1986be1t.cfg mv tcl/target/к1879xб1я.cfg tcl/target/k1879x61r.cfg ./bootstrap mkdir build cd build ../configure \ --prefix=$PREFIX \ --enable-static \ --disable-shared \ --enable-usb-blaster-2 \ --enable-usb_blaster_libftdi \ --enable-jtag_vpi \ --enable-remote-bitbang \ CFLAGS="-Wno-error" make -j$CPU_COUNT make install
Add option to explicitly set new version
#!/bin/bash -x # Copyright 2016 Nitor Creations Oy # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. VERSION=$(grep version setup.py | cut -d\' -f 2) MAJOR=${VERSION//.*} MINOR=${VERSION##*.} if [ "$1" = "-m" ]; then MAJOR=$(($MAJOR + 1)) MINOR="0" shift else MINOR=$(($MINOR + 1)) fi sed -i "s/$VERSION/$MAJOR.$MINOR/g" setup.py ./update-readme.sh sed -i "s/## Released version.*/## Released version $MAJOR.$MINOR/g" README.md git commit -m "$1" setup.py git tag "$MAJOR.$MINOR" -m "$1" git push --tags origin master python setup.py register -r pypi python setup.py sdist upload -r pypi --sign
#!/bin/bash -x # Copyright 2016 Nitor Creations Oy # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. VERSION=$(grep version setup.py | cut -d\' -f 2) MAJOR=${VERSION//.*} MINOR=${VERSION##*.} if [ "$1" = "-m" ]; then MAJOR=$(($MAJOR + 1)) MINOR="0" NEW_VERSION=$MAJOR.$MINOR shift elif [ "$1" = "-v" ]; then shift NEW_VERSION="$1" shift else MINOR=$(($MINOR + 1)) NEW_VERSION=$MAJOR.$MINOR fi sed -i "s/$VERSION/$NEW_VERSION/g" setup.py ./update-readme.sh sed -i "s/## Released version.*/## Released version $NEW_VERSION/g" README.md git commit -m "$1" setup.py git tag "$NEW_VERSION" -m "$1" git push --tags origin master python setup.py register -r pypi python setup.py sdist upload -r pypi --sign
Update next step for dev vm
#!/bin/bash echo ansible-playbook deploy/site.yml -i environment/inventory -l app* -e "galera_bootstrap_node=app.stepup.example.com" ansible-playbook deploy/site.yml -i environment/inventory -l app* -e "galera_bootstrap_node=app.stepup.example.com inventory_dir=`pwd`/environment" echo "" echo "Next steps:" echo "- Deploy manage server (optional): ./deploy-site-manage.sh" echo "- Deploy components : ./deploy-release.sh"
#!/bin/bash echo ansible-playbook deploy/site.yml -i environment/inventory -l app* -e "galera_bootstrap_node=app.stepup.example.com" ansible-playbook deploy/site.yml -i environment/inventory -l app* -e "galera_bootstrap_node=app.stepup.example.com inventory_dir=`pwd`/environment" echo "" echo "Next steps:" echo "- Deploy manage server (optional): ./deploy-site-manage.sh" echo "- Deploy components : ./deploy-release.sh (or, for a development server, deploy-develop.sh)"
Migrate to my own hosting
#!/bin/sh if [ ! -d "$HOME/.yadr" ]; then echo "Installing YADR for the first time" git clone https://github.com/skwp/dotfiles.git "$HOME/.yadr" cd "$HOME/.yadr" [ "$1" = "ask" ] && export ASK="true" rake install else echo "YADR is already installed" fi
#!/bin/sh if [ ! -d "$HOME/.yadr" ]; then echo "Installing YADR for the first time" git clone https://github.com/hderms/dotfiles.git "$HOME/.yadr" cd "$HOME/.yadr" [ "$1" = "ask" ] && export ASK="true" rake install else echo "YADR is already installed" fi
Use correct path to version file
#!/bin/sh set -e # Checkout master as we are currently have an individual commit checked out on # a detached tree. This means when we commit later it will be on a branch git checkout master git reset --hard origin/master # Init the submodule and checkout the revision pinned in `.gitmodules` git submodule update --init # The version of the toolkit defined by the pinned submodule PINNED_SUBMODULE_VERSION=`cat app/assets/VERSION.txt` # Force the submodule to pull the latest and checkout origin/master git submodule foreach git pull origin master # The version of the toolkit defined in the submodules master branch NEW_SUBMODULE_VERSION=`cat app/assets/VERSION.txt` # If the submodule has a new version string if [ "$PINNED_SUBMODULE_VERSION" != "$NEW_SUBMODULE_VERSION" ]; then # Commit the submodule change git commit -am "Updated submodule" # Update the package.json to have the submodule toolkit version npm version $NEW_SUBMODULE_VERSION # Reset the submodule change and the npm version commit so they can be # re-committed as a single commit git reset --soft HEAD~2 # Commit the updated submodule and version bump and push it to origin git commit -am "Bump to version $PINNED_SUBMODULE_VERSION" git push origin master fi npm publish
#!/bin/sh set -e # Checkout master as we are currently have an individual commit checked out on # a detached tree. This means when we commit later it will be on a branch git checkout master git reset --hard origin/master # Init the submodule and checkout the revision pinned in `.gitmodules` git submodule update --init # The version of the toolkit defined by the pinned submodule PINNED_SUBMODULE_VERSION=`cat govuk_frontend_toolkit/VERSION.txt` # Force the submodule to pull the latest and checkout origin/master git submodule foreach git pull origin master # The version of the toolkit defined in the submodules master branch NEW_SUBMODULE_VERSION=`cat govuk_frontend_toolkit/VERSION.txt` # If the submodule has a new version string if [ "$PINNED_SUBMODULE_VERSION" != "$NEW_SUBMODULE_VERSION" ]; then # Commit the submodule change git commit -am "Updated submodule" # Update the package.json to have the submodule toolkit version npm version $NEW_SUBMODULE_VERSION # Reset the submodule change and the npm version commit so they can be # re-committed as a single commit git reset --soft HEAD~2 # Commit the updated submodule and version bump and push it to origin git commit -am "Bump to version $PINNED_SUBMODULE_VERSION" git push origin master fi npm publish
Improve typecheck script to detect glint
#!/bin/bash for name in $(find packages -name node_modules -prune -o -name 'cardpay-subgraph' -prune -o -name 'tsconfig.json' -print); do { cd $(dirname $name) if [[ $name == *"boxel"* || $name == *"web-client"* || $name == *"ssr-web"* || $name == *"safe-tools-client"* ]] ; then yarn -s glint else yarn -s tsc --noEmit fi code=$? [ $code -eq 0 ] && echo PASS "$name" || echo FAIL "$name" exit $code } & done combined_exit=0 for pid in $(jobs -p); do wait "$pid" code=$? if [ "$code" != "0" ]; then combined_exit=$code fi done exit $combined_exit
#!/bin/bash for name in $(find packages -name node_modules -prune -o -name 'cardpay-subgraph' -prune -o -name 'tsconfig.json' -print); do { cd $(dirname $name) if grep -q "@glint" "package.json"; then yarn -s glint else yarn -s tsc --noEmit fi code=$? [ $code -eq 0 ] && echo PASS "$name" || echo FAIL "$name" exit $code } & done combined_exit=0 for pid in $(jobs -p); do wait "$pid" code=$? if [ "$code" != "0" ]; then combined_exit=$code fi done exit $combined_exit
Add nextflow to PATH in .bashrc.
WORK_DIR=`pwd` #install java 7 if needed JAVA_VER=`java -version 2>&1 | grep "java version" | awk '{print $3}' | tr -d \" | awk '{split($0, array, ".")} END{print array[2]}'` if [ "$JAVA_VER" -ge 7 ]; then echo "Java 7 or greater detected." else echo "Java version is lower than 7." sudo apt-get -y install openjdk-7-jre-headless fi #install nextflow if needed cd $HOME if [ ! -d nextflow/ ]; then mkdir -p nextflow cd nextflow/ curl -fsSL get.nextflow.io | bash else echo "Nexflow is already installed." fi #add to PATH export PATH=$HOME/nextflow:$PATH cd $WORK_DIR
WORK_DIR=`pwd` #install java 7 if needed JAVA_VER=`java -version 2>&1 | grep "java version" | awk '{print $3}' | tr -d \" | awk '{split($0, array, ".")} END{print array[2]}'` if [ "$JAVA_VER" -ge 7 ]; then echo "Java 7 or greater detected." else echo "Java version is lower than 7." sudo apt-get -y install openjdk-7-jre-headless fi #install nextflow if needed cd $HOME if [ ! -d nextflow/ ]; then mkdir -p nextflow cd nextflow/ curl -fsSL get.nextflow.io | bash else echo "Nexflow is already installed." fi #add to PATH echo "export PATH=$HOME/nextflow:$PATH" >> $HOME/.bashrc cd $WORK_DIR
Update Distro & Apps - Added autoremove
#!/bin/bash echo "Updating Distro & Apps" source $SCRIPTPATH/inc/pkgupdate.sh sudo apt-get -y dist-upgrade sudo apt-get -y autoclean
#!/bin/bash echo "Updating Distro & Apps" source $SCRIPTPATH/inc/pkgupdate.sh sudo apt-get -y dist-upgrade sudo apt-get -y autoremove sudo apt-get -y autoclean
Change the paths for the staging deployment
#!/bin/bash if [ ! -e tsomi-staging ]; then git clone https://savannidgerinel:${GITHUB_TOKEN}@github.com/cloudcity/tsomi-staging.git fi mkdir -p tsomi-staging cd tsomi-staging #cat ../../index.html | sed 's/static\/tsomi.css/\/tsomi\/static\/tsomi.css/' | sed 's/js\/bundle.js/\/tsomi\/js\/bundle.js/' > index.html cp ../../index.html . cp -r ../../static . cp -r ../../js . git add * git commit -m "deploy tsomi to staging" git push
#!/bin/bash if [ ! -e tsomi-staging ]; then git clone https://savannidgerinel:${GITHUB_TOKEN}@github.com/cloudcity/tsomi-staging.git fi mkdir -p tsomi-staging cd tsomi-staging #cat ../../index.html | sed 's/static\/tsomi.css/\/tsomi\/static\/tsomi.css/' | sed 's/js\/bundle.js/\/tsomi\/js\/bundle.js/' > index.html cp ../index.html . cp -r ../static . cp -r ../js . git add * git commit -m "deploy tsomi to staging" git push
Add shebang line in script
do_generate() { cd "$(dirname "$0")" protoc -I=. --python_out=orwell/messages controller.proto robot.proto server-game.proto server-web.proto } do_generate
#!/bin/sh do_generate() { cd "$(dirname "$0")" protoc -I=. --python_out=orwell/messages controller.proto robot.proto server-game.proto server-web.proto } do_generate
Make test fails if examples do not use latest version of Vega
#!/usr/bin/env bash # Check only output Vega files and normalized Vega-Lite files for now as Travis is having problem when generating SVG if ! git diff --word-diff=color --exit-code HEAD -- ./examples/compiled/*.vg.json then echo "Output examples vega specs are outdated." exit 1 elif ! git diff --word-diff=color --exit-code HEAD -- ./examples/specs/normalized/*.vl.json then echo "Output normalized examples vega-lite specs are outdated." exit 1 else exit 0 fi
#!/usr/bin/env bash # Check only output Vega files and normalized Vega-Lite files for now as Travis is having problem when generating SVG if ! git diff --word-diff=color --exit-code HEAD -- ./examples/compiled/vega_version then echo "Output examples were generated using an older version of Vega." exit 1 elif ! git diff --word-diff=color --exit-code HEAD -- ./examples/compiled/*.vg.json then echo "Output examples vega specs are outdated." exit 1 elif ! git diff --word-diff=color --exit-code HEAD -- ./examples/specs/normalized/*.vl.json then echo "Output normalized examples vega-lite specs are outdated." exit 1 else exit 0 fi
Install script : extracted extension id
LOCATION=`pwd` PROFILES="/Users/$(whoami)/Library/Application Support/Firefox/Profiles/" DEV_PROFILE=`ls "$PROFILES" | grep .dev` mkdir -p "$PROFILES/$DEV_PROFILE/extensions/" echo $LOCATION > "$PROFILES/$DEV_PROFILE/extensions/zogger@personal.com"
EXTENSION_ID=zogger@personal.com LOCATION=`pwd` PROFILES="/Users/$(whoami)/Library/Application Support/Firefox/Profiles/" DEV_PROFILE=`ls "$PROFILES" | grep .dev` mkdir -p "$PROFILES/$DEV_PROFILE/extensions/" echo $LOCATION > "$PROFILES/$DEV_PROFILE/extensions/$EXTENSION_ID"
Fix db upgrage scripts numbering check
#!/bin/sh OUT="$(find "$(dirname "$0")/../packaging/dbscripts" | grep -P '\d{2}_\d{2}_\d{2,8}' -o | sort | uniq -d)" if [ -n "${OUT}" ]; then echo "Found duplicate upgrade scripts with version $(echo ${OUT}), please resolve and retry" >&2 exit 1 fi exit 0
#!/bin/sh OUT="$(find "$(dirname "$0")/../packaging/dbscripts" | grep -P '^\d{2}_\d{2}_\d{2,8}' -o | sort | uniq -d)" if [ -n "${OUT}" ]; then echo "Found duplicate upgrade scripts with version $(echo ${OUT}), please resolve and retry" >&2 exit 1 fi exit 0
Remove --runtime switch from mono call
#!/bin/sh -x LATEST=$(curl -Ls -o /dev/null -w %{url_effective} https://github.com/monkey0506/AGSModuleExporter/releases/latest) TAG=${LATEST##*/} EXPORTER=https://github.com/monkey0506/AGSModuleExporter/releases/download/$TAG/AGSModuleExporter.exe wget -O /tmp/AGSModuleExporter.exe "${EXPORTER}" mono --runtime=v4.0 /tmp/AGSModuleExporter.exe -script ./$1
#!/bin/sh -x LATEST=$(curl -Ls -o /dev/null -w %{url_effective} https://github.com/monkey0506/AGSModuleExporter/releases/latest) TAG=${LATEST##*/} EXPORTER=https://github.com/monkey0506/AGSModuleExporter/releases/download/$TAG/AGSModuleExporter.exe wget -O /tmp/AGSModuleExporter.exe "${EXPORTER}" mono /tmp/AGSModuleExporter.exe -script ./$1
Update coverage tests for Go <1.4
#!/bin/bash echo "mode: set" > acc.out fail=0 # Standard go tooling behavior is to ignore dirs with leading underscors for dir in $(find . -maxdepth 10 -not -path './.git*' -not -path '*/_*' -type d); do if ls $dir/*.go &> /dev/null; then go test -v -coverprofile=profile.out $dir -check.v || fail=1 if [ -f profile.out ]; then cat profile.out | grep -v "mode: set" >> acc.out rm profile.out fi fi done # Failures have incomplete results, so don't send if [ -n "$COVERALLS_TOKEN" ] && [ "$fail" -eq 0 ]; then echo "SENDING" $HOME/gopath/bin/goveralls -v -coverprofile=acc.out -service travis-ci -repotoken $COVERALLS_TOKEN fi rm -f acc.out exit $fail
#!/bin/bash echo "mode: set" > acc.out fail=0 # Install the old cover tool if version is not 1.4 or higher. if [ -n "$TRAVIS_GO_VERSION" ] && [[ "$TRAVIS_GO_VERSION" < "go1.4" ]]; then go get code.google.com/p/go.tools/cmd/cover fi # Standard go tooling behavior is to ignore dirs with leading underscors for dir in $(find . -maxdepth 10 -not -path './cmd/*' -not -path './.git*' -not -path '*/_*' -type d); do if ls $dir/*.go &> /dev/null; then go test -v -coverprofile=profile.out $dir -check.v || fail=1 if [ -f profile.out ]; then cat profile.out | grep -v "mode: set" >> acc.out rm profile.out fi fi done # Failures have incomplete results, so don't send if [ -n "$COVERALLS_TOKEN" ] && [ "$fail" -eq 0 ]; then $HOME/gopath/bin/goveralls -v -coverprofile=acc.out -service travis-ci -repotoken $COVERALLS_TOKEN fi rm -f acc.out exit $fail
Use $HOME instead of weird nested echo
#!/bin/sh NOTIFIER_URL=https://raw.githubusercontent.com/grantovich/homebrew-notifier/master/notifier.sh brew install terminal-notifier mkdir -p ~/.bin curl -fsS $NOTIFIER_URL > ~/.bin/brew-update-notifier.sh chmod +x ~/.bin/brew-update-notifier.sh if crontab -l | grep -q 'brew-update-notifier'; then echo 'Crontab entry already exists, skipping...' else echo "0 11 * * * PATH=/usr/local/bin:\$PATH $(echo ~)/.bin/brew-update-notifier.sh" | crontab - fi echo echo "Notifier installed. You'll be notified of brew updates at 11am every day." echo "Checking for updates right now..." ~/.bin/brew-update-notifier.sh
#!/bin/sh NOTIFIER_URL=https://raw.githubusercontent.com/grantovich/homebrew-notifier/master/notifier.sh brew install terminal-notifier mkdir -p ~/.bin curl -fsS $NOTIFIER_URL > ~/.bin/brew-update-notifier.sh chmod +x ~/.bin/brew-update-notifier.sh if crontab -l | grep -q 'brew-update-notifier'; then echo 'Crontab entry already exists, skipping...' else echo "0 11 * * * PATH=/usr/local/bin:\$PATH $HOME/.bin/brew-update-notifier.sh" | crontab - fi echo echo "Notifier installed. You'll be notified of brew updates at 11am every day." echo "Checking for updates right now..." ~/.bin/brew-update-notifier.sh
Fix sanity check on the location of pkg
#!/usr/bin/env atf-sh . $(atf_get_srcdir)/test_environment.sh if [ `uname -s` != "Darwin" ]; then ldd=ldd fi tests_init \ which \ ${ldd} which_body() { atf_check \ -o inline:"$(atf_get_srcdir)/../../src/pkg\n" \ -e empty \ -s exit:0 \ which pkg } ldd_body() { atf_check \ -o match:".*libpkg.so.3 => $(atf_get_srcdir).*$" \ -e empty \ -s exit:0 \ ldd -a $(atf_get_srcdir)/../../src/.libs/pkg }
#!/usr/bin/env atf-sh . $(atf_get_srcdir)/test_environment.sh if [ `uname -s` != "Darwin" ]; then ldd=ldd fi tests_init \ which \ ${ldd} which_body() { atf_check \ -o inline:"$(atf_get_srcdir)/../../src/.libs/pkg\n" \ -e empty \ -s exit:0 \ which pkg } ldd_body() { atf_check \ -o match:".*libpkg.so.3 => $(atf_get_srcdir).*$" \ -e empty \ -s exit:0 \ ldd -a $(atf_get_srcdir)/../../src/.libs/pkg }
Increase wait time to 5 mins
#!/usr/bin/env bash ### Prep if [[ "$(uname)" == "Darwin" ]]; then if ! command -v brew -f &> /dev/null; then echo "No proper installer found. Please install homebrew" exit 1 fi INSTALLER="brew install" elif [[ "$(uname)" == "Linux"* ]]; then echo "--> Sleep to wait nix-env ready" sleep 180 # Source /etc/profile, it will set up nix, shadowenv and other goodies . /etc/profile if ! command -v nix-env -f &> /dev/null; then echo "No proper installer found. Please install Nix" exit 1 fi nix-channel --update && nix upgrade-nix INSTALLER="nix-env -f '<nixpkgs>' -iA" else echo "Unsupported system: $(uname)" exit 1 fi ### Install utils UTILS=(direnv ripgrep neovim fzf fd tig diff-so-fancy chezmoi) for i in "${UTILS[@]}" do $(echo "$INSTALLER $i") done ### Apply dotfiles chezmoi init https://github.com/ifyouseewendy/dotfiles.git --branch=chezmoi chezmoi apply ### Post hooks # neovim ln -s ~/.vimrc ~/.config/nvim/init.vim # vim-plug nvim --headless +PlugInstall +qa # tpm $HOME/.tmux/plugins/tpm/bin/install_plugins
#!/usr/bin/env bash ### Prep if [[ "$(uname)" == "Darwin" ]]; then if ! command -v brew -f &> /dev/null; then echo "No proper installer found. Please install homebrew" exit 1 fi INSTALLER="brew install" elif [[ "$(uname)" == "Linux"* ]]; then echo "--> Sleep to wait nix-env ready" sleep 300 # Source /etc/profile, it will set up nix, shadowenv and other goodies . /etc/profile if ! command -v nix-env -f &> /dev/null; then echo "No proper installer found. Please install Nix" exit 1 fi nix-channel --update && nix upgrade-nix INSTALLER="nix-env -f '<nixpkgs>' -iA" else echo "Unsupported system: $(uname)" exit 1 fi ### Install utils UTILS=(direnv ripgrep neovim fzf fd tig diff-so-fancy chezmoi) for i in "${UTILS[@]}" do $(echo "$INSTALLER $i") done ### Apply dotfiles chezmoi init https://github.com/ifyouseewendy/dotfiles.git --branch=chezmoi chezmoi apply ### Post hooks # neovim ln -s ~/.vimrc ~/.config/nvim/init.vim # vim-plug nvim --headless +PlugInstall +qa # tpm $HOME/.tmux/plugins/tpm/bin/install_plugins
Use old system till new system is approved.
#!/bin/bash - #=============================================================================== # # FILE: schedule.sh # # USAGE: ./schedule.sh # # DESCRIPTION: # # OPTIONS: --- # REQUIREMENTS: --- # BUGS: --- # NOTES: --- # AUTHOR: Dilawar Singh (), dilawars@ncbs.res.in # ORGANIZATION: NCBS Bangalore # CREATED: 02/05/2017 05:27:07 PM # REVISION: --- #=============================================================================== set -o nounset # Treat unset variables as an error DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" #if [ -f /opt/rh/python27/enable ]; then # source /opt/rh/python27/enable #fi python2.7 $DIR/schedule_aws_groupwise.py
#!/bin/bash - #=============================================================================== # # FILE: schedule.sh # # USAGE: ./schedule.sh # # DESCRIPTION: # # OPTIONS: --- # REQUIREMENTS: --- # BUGS: --- # NOTES: --- # AUTHOR: Dilawar Singh (), dilawars@ncbs.res.in # ORGANIZATION: NCBS Bangalore # CREATED: 02/05/2017 05:27:07 PM # REVISION: --- #=============================================================================== set -o nounset # Treat unset variables as an error DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" #if [ -f /opt/rh/python27/enable ]; then # source /opt/rh/python27/enable #fi #python2.7 $DIR/schedule_aws_groupwise.py python2.7 $DIR/schedule_aws.py
Adjust CI script to new path
#!/bin/bash # This is the bees CI build. Any changes to the build script should be # here instead if in the bees config. set -e DIR=$( cd "$( dirname "$0" )" && pwd ) function mark { echo echo "==============================================" echo $1 date echo "==============================================" echo } export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" mark "Starting build script" java -version mvn -version git clean -fd mark "Cleaning" mvn -B clean mark "Reversioning" mvn -B versions:set -DnewVersion=1.x.incremental.${BUILD_NUMBER} mark "Building" mvn -B -s ${SETTINGS_FILE} install mark "Testing messaging against HornetQ 2.3" mvn -B -s ${SETTINGS_FILE} -f modules/messaging-hornetq/pom.xml -P hornetq-2.3 test mark "Deploying" mvn -B -s ${SETTINGS_FILE} -Pbees deploy
#!/bin/bash # This is the bees CI build. Any changes to the build should be # here instead if in the bees config. set -e DIR=$( cd "$( dirname "$0" )" && pwd ) function mark { echo echo "==============================================" echo $1 date echo "==============================================" echo } export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" mark "Starting build script" java -version mvn -version git clean -fd mark "Cleaning" mvn -B clean mark "Reversioning" mvn -B versions:set -DnewVersion=1.x.incremental.${BUILD_NUMBER} mark "Building" mvn -B -s ${SETTINGS_FILE} install mark "Testing messaging against HornetQ 2.3" mvn -B -s ${SETTINGS_FILE} -f messaging-hornetq/pom.xml -P hornetq-2.3 test mark "Deploying" mvn -B -s ${SETTINGS_FILE} -Pbees deploy
Comment out install_docs.sh code deploy install code.
#!/usr/bin/env bash sudo rm -r /home/ubuntu/arches-docs git clone --recursive https://github.com/archesproject/docs.git /home/ubuntu/arches-docs cd /home/ubuntu/arches-docs npm install npm run deploy
#!/usr/bin/env bash # sudo rm -r /home/ubuntu/arches-docs # # git clone --recursive https://github.com/archesproject/docs.git /home/ubuntu/arches-docs # # cd /home/ubuntu/arches-docs # npm install # npm run deploy
Make setup work in virtualenvs too, plus run migrate on every setup
#!/bin/bash -e sudo python3-pip install -r pipreqs if [ -z "$APP_MODE" -o "$APP_MODE" == "dev" ]; then sudo python3-pip install -r pipreqs-dev fi python3 manage.py migrate exit 0
#!/bin/bash -e if [ -z $(which python3-pip) ]; then PIP_CMD=$(which pip) PYTHON_CMD=$(which python) else PIP_CMD=$(which python3-pip) PYTHON_CMD=$(which python3) fi sudo ${PIP_CMD} install -r pipreqs if [ -z "$APP_MODE" -o "$APP_MODE" == "dev" ]; then sudo ${PIP_CMD} install -r pipreqs-dev fi ${PYTHON_CMD} manage.py migrate exit 0
Add LANG=C and just cat the wordlist, no grepping
#!/bin/sh # Search wordlist for words that works as a sed substitution command and the words that gets # changed into words that exists in that list. words=/usr/share/dict/words for s in $(grep '^[a-z]*$' $words | grep -P '^s(.)((?!\1).)+\1((?!\1).)*\1$') do echo "### $s" (grep '^[a-z]*$' $words | sed 's/^/~ /' ; grep $(echo $s | sed 's/^s\(.\)\(.*\)\1.*\1$/\2/') $words | grep '^[a-z]*$' | sed "h;$s;x;p;x" | awk '{printf $0" ";getline;print}' ) | sort -k 2 | uniq -cf 1 | grep '^\s*2' | awk '{print $2" -> "$3}' | sed "s/^/$s /" done
#!/bin/sh # Search wordlist for words that works as a sed substitution command and the words that gets # changed into words that exists in that list. LANG=C words=/usr/share/dict/words for s in $(cat $words | grep -P '^s(.)((?!\1).)+\1((?!\1).)*\1[gim]*$') do echo "### $s" (cat $words | sed 's/^/~ /' ; grep $(echo $s | sed 's/^s\(.\)\(.*\)\1.*\1$/\2/') $words | grep '^[a-z]*$' | sed "h;$s;x;p;x" | awk '{printf $0" ";getline;print}' ) | sort -k 2 | uniq -cf 1 | grep '^\s*2' | awk '{print $2" -> "$3}' | sed "s/^/$s /" done
Add working on local [ci full]
#!/usr/bin/env bash # # Created by vcernomschi on 10/06/2015 # source $(dirname $0)/_head.sh ############################## ### Merge Coverage Results ### ############################## istanbul-combine -d ${__COVERAGE_PATH} -r lcovonly -p both \ ${__SRC_PATH}*/tests/frontend/coverage/**/coverage-final.json \ ${__SRC_PATH}*/tests/backend/coverage/coverage.json ################################################################ ### Update paths to have src/* file in coverage report ### ### https://github.com/codacy/node-codacy-coverage/issues/26 ### ################################################################ SEARCH_VALUE=$(pwd -P)"/" REPLACE_VALUE="" sed -e "s@${SEARCH_VALUE}@${REPLACE_VALUE}@g" ${__COVERAGE_PATH}"/lcov.info" > ${__COVERAGE_PATH}"/coverage.info" ###################################### ### Upload Coverage info to Codacy ### ###################################### cat ${__COVERAGE_PATH}"/coverage.info" | codacy-coverage --debug ##################################################################### ### Log top 20 file paths to be able see paths format from travis ### ##################################################################### head -n 20 ${__COVERAGE_PATH}"/coverage.info" ############################################# ### Cleanup! Remove all generated reports ### ############################################# __CMD='rm -rf ./coverage' subpath_run_cmd ${__SRC_PATH} "$__CMD" ########################### ### Remove final report ### ########################### cd ${__COVERAGE_PATH} rm -rf ${__COVERAGE_PATH}
#!/usr/bin/env bash # # Created by vcernomschi on 10/06/2015 # source $(dirname $0)/_head.sh ############################## ### Merge Coverage Results ### ############################## istanbul-combine -d ${__COVERAGE_PATH} -r lcovonly -p both \ ${__SRC_PATH}*/tests/frontend/coverage/report.json \ ${__SRC_PATH}*/tests/backend/coverage/coverage.json ################################################################ ### Update paths to have src/* file in coverage report ### ### https://github.com/codacy/node-codacy-coverage/issues/26 ### ################################################################ SEARCH_VALUE=$(pwd -P)"/" REPLACE_VALUE="" sed -e "s@${SEARCH_VALUE}@${REPLACE_VALUE}@g" ${__COVERAGE_PATH}"/lcov.info" > ${__COVERAGE_PATH}"/coverage.info" ###################################### ### Upload Coverage info to Codacy ### ###################################### cat ${__COVERAGE_PATH}"/coverage.info" | codacy-coverage --debug ##################################################################### ### Log top 20 file paths to be able see paths format from travis ### ##################################################################### head -n 20 ${__COVERAGE_PATH}"/coverage.info" ############################################# ### Cleanup! Remove all generated reports ### ############################################# __CMD='rm -rf ./coverage' subpath_run_cmd ${__SRC_PATH} "$__CMD" ########################### ### Remove final report ### ########################### cd ${__COVERAGE_PATH} rm -rf ${__COVERAGE_PATH}
Use bash to call build
( printf "Checking Code Style... " standard > stdout.tmp 2> stderr.tmp && ( echo "passed" ) || ( code=$? echo "failed" >&2 cat stderr.tmp >&2 cat stdout.tmp exit $code ) ) && ( if [[ $COVERALLS == 'true' ]]; then bash ./sh/test-coverage.sh -- $JEST_ARGV else jest -- $JEST_ARGV fi ) && ( ./sh/build.sh )
( printf "Checking Code Style... " standard > stdout.tmp 2> stderr.tmp && ( echo "passed" ) || ( code=$? echo "failed" >&2 cat stderr.tmp >&2 cat stdout.tmp exit $code ) ) && ( if [[ $COVERALLS == 'true' ]]; then bash ./sh/test-coverage.sh -- $JEST_ARGV else jest -- $JEST_ARGV fi ) && ( bash ./sh/build.sh )
Change script to run SQL script directly. Add test for manual run.
#!/bin/bash # the line run in the mask function can be substituted for your own masking software. In this example it calls a SQL script maskfunc() { su -m oracle -c "/act/scripts/masking.sh" } # this part of the script ensures we run the masking during a scrub mount after the database is started on the scrubbing server if [ "$ACT_MULTI_OPNAME" == "scrub-mount" ] && [ "$ACT_MULTI_END" == "true" ] && [ "$ACT_PHASE" == "post" ]; then maskfunc exit $? fi # if we are manually running the script remind the user how to test it if [ -z "$1" ]; then echo "If you want to run this script as a test then please use the following command:" echo "$0 test" fi # this lets us run this script manually if [ "$1" == "test" ]; then maskfunc exit $? fi exit 0
#!/bin/bash # the line run in the mask function can be substituted for your own masking software. In this example it calls a SQL script # You need to validate all the variables # Is your ORACLE_SID the one called by workflow? In this example it is called: unmasked # Is your ORACLE_HOME correct? In this example it is: /home/oracle/app/oracle/product/12.2.0/dbhome_1 # Is the SQL script being called the correct one? In this example it is called: maskscript.sql maskfunc() { su - oracle -c "cd /act/scripts;export ORACLE_SID=unmasked;export ORACLE_HOME=/home/oracle/app/oracle/product/12.2.0/dbhome_1;export PATH=$ORACLE_HOME/bin:$PATH;ORAENV_ASK=NO;sqlplus / as sysdba @/act/scripts/maskscript.sql;exit" } # this part of the script ensures we run the masking during a scrub mount after the database is started on the scrubbing server if [ "$ACT_MULTI_OPNAME" == "scrub-mount" ] && [ "$ACT_MULTI_END" == "true" ] && [ "$ACT_PHASE" == "post" ]; then maskfunc exit $? fi # if the user is running this manually then tell them to use test if [ -z "$1" ] && [ -z "$ACT_PHASE" ]; then echo "To run this script manually, use the following syntax: $0 test" exit 0 fi # this lets us run this script manually if [ "$1" == "test" ]; then maskfunc exit $? fi exit 0
Set tag when publishing next prerelease version
#!/bin/sh echo "What kind of a release?" select yn in "patch" "minor" "major" "premajor" "prerelease"; do case $yn in patch ) TYPE="patch"; break;; minor ) TYPE="minor"; break;; major ) TYPE="major"; break;; premajor ) TYPE="premajor --preid=next"; break;; prelelease ) TYPE="prerelease --preid=next"; break;; esac done cd projects/hslayers npm version $TYPE standard-changelog cd ../hslayers-app npm version $TYPE cd ../hslayers-cesium npm version $TYPE cd ../hslayers-cesium-app npm version $TYPE cd ../../ make build-all # Second round of building needed because app building # generates unnecessary ngcc files in hslayers-ng / hslayers-cesium lib directories npm run build npm run build-cesium
#!/bin/sh echo "What kind of a release?" select yn in "patch" "minor" "major" "premajor" "prerelease"; do case $yn in patch ) TYPE="patch"; break;; minor ) TYPE="minor"; break;; major ) TYPE="major"; break;; premajor ) TYPE="premajor --preid=next"; break;; prelelease ) TYPE="prerelease --preid=next --tag next"; break;; esac done cd projects/hslayers npm version $TYPE standard-changelog cd ../hslayers-app npm version $TYPE cd ../hslayers-cesium npm version $TYPE cd ../hslayers-cesium-app npm version $TYPE cd ../../ make build-all # Second round of building needed because app building # generates unnecessary ngcc files in hslayers-ng / hslayers-cesium lib directories npm run build npm run build-cesium
Hide errors for removing files that don't yet exist but should
#!/bin/bash DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" HOMEDIR=${HOME} ignoredfiles=(.git .gitmodules LICENSE.md README.md push.sh setup.sh update.sh install.sh install.py utils .DS_Store Brewfile assets) function createLinks() { for f in `ls -A $DIR`; do filename=$(basename $f) if [[ ${ignoredfiles[*]} =~ "$filename" ]]; then continue fi rm "$HOMEDIR/$filename" ln -s $DIR/$filename $HOMEDIR/$filename done } if [[ $1 =~ ^(-f|--force)$ ]]; then createLinks else read -p "This may overwrite existing files in your home directory. Are you sure? (y/n) " -n 1 echo if [[ $REPLY =~ ^[Yy]$ ]]; then createLinks fi fi unset createLinks
#!/bin/bash DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" HOMEDIR=${HOME} ignoredfiles=(.git .gitmodules LICENSE.md README.md push.sh setup.sh update.sh install.sh install.py utils .DS_Store Brewfile assets) function createLinks() { for f in `ls -A $DIR`; do filename=$(basename $f) if [[ ${ignoredfiles[*]} =~ "$filename" ]]; then continue fi rm -f "$HOMEDIR/$filename" > /dev/null ln -s $DIR/$filename $HOMEDIR/$filename done } if [[ $1 =~ ^(-f|--force)$ ]]; then createLinks else read -p "This may overwrite existing files in your home directory. Are you sure? (y/n) " -n 1 echo if [[ $REPLY =~ ^[Yy]$ ]]; then createLinks fi fi unset createLinks
Add text and hist report
vegeta attack --duration ${DURATION} --rate ${RATE} --targets targets | vegeta report -reporter='hist[0,20ms,40ms,60ms,100ms,200ms,1s,2s,3s]'
vegeta attack --duration ${DURATION} --rate ${RATE} --targets targets > result.bin vegeta report -inputs result.bin vegeta report -inputs result.bin -reporter='hist[0,20ms,40ms,60ms,100ms,200ms,1s,2s,3s]'
Add convenience aliases for installation of npm modules
eval "$(npm completion 2>/dev/null)"
eval "$(npm completion 2>/dev/null)" # Install and save to dependencies alias npms="npm i -S " # Install and save to dev-dependencies alias npmd="npm i -D "
Include pasted commands in shell history
# 'cd' to dir without typing the 'cd' command setopt AUTO_CD # Makes cd=pushd, 'popd' to go back setopt AUTO_PUSHD # Ignore multiple directories for the stack. setopt PUSHD_IGNORE_DUPS # 10 second wait if you do something that will delete everything. setopt RM_STAR_WAIT # Case insensitive globbing setopt NO_CASE_GLOB # setopt NUMERIC_GLOB_SORT # setopt EXTENDED_GLOB # bindkey -M vicmd 'q' push-line # !NNN, history auto-completion after space bindkey -M viins ' ' magic-space # vim command mode with <CTRL><SPACE> bindkey '^ ' vi-cmd-mode # common vim bindings bindkey -a 'gg' beginning-of-buffer-or-history bindkey -a 'g~' vi-oper-swap-case bindkey -a G end-of-buffer-or-history # vim backspace doesn't stop where you started insert mode bindkey '^?' backward-delete-char bindkey '^H' backward-delete-char
# 'cd' to dir without typing the 'cd' command setopt AUTO_CD # Makes cd=pushd, 'popd' to go back setopt AUTO_PUSHD # Ignore multiple directories for the stack. setopt PUSHD_IGNORE_DUPS # 10 second wait if you do something that will delete everything. setopt RM_STAR_WAIT # Case insensitive globbing setopt NO_CASE_GLOB # setopt NUMERIC_GLOB_SORT # setopt EXTENDED_GLOB # Save lines in history that have leading spaces. (i.e copied/pasted to the term) unsetopt HIST_IGNORE_SPACE # bindkey -M vicmd 'q' push-line # !NNN, history auto-completion after space bindkey -M viins ' ' magic-space # vim command mode with <CTRL><SPACE> bindkey '^ ' vi-cmd-mode # common vim bindings bindkey -a 'gg' beginning-of-buffer-or-history bindkey -a 'g~' vi-oper-swap-case bindkey -a G end-of-buffer-or-history # vim backspace doesn't stop where you started insert mode bindkey '^?' backward-delete-char bindkey '^H' backward-delete-char
Add vim backup directory creation
#!/bin/bash curl -fLo ~/.local/share/nvim/site/autoload/plug.vim --create-dirs \ https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim mkdir -p "$HOME/.config/nvim/" cp init.vim "$HOME/.config/nvim/init.vim" nvim +PlugClean +qa && nvim +PlugInstall +UpdateRemotePlugins +qa && nvim +PlugUpdate +UpdateRemotePlugins +qa # Tweak operator color to get a pretty blue. sed -ie "s\\hi! link Operator Normal\\hi! link Operator GruvboxBlueBold\\g" ~/.config/nvim/plugged/gruvbox/colors/gruvbox.vim # Update ctrlp-funky Rust support. cp ctrlp/funky/ft/rust.vim ~/.config/nvim/plugged/ctrlp-funky/autoload/ctrlp/funky/ft/rust.vim cp ctags ~/.ctags
#!/bin/bash curl -fLo ~/.local/share/nvim/site/autoload/plug.vim --create-dirs \ https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim mkdir $HOME/.vim.backup mkdir -p "$HOME/.config/nvim/" cp init.vim "$HOME/.config/nvim/init.vim" nvim +PlugClean +qa && nvim +PlugInstall +UpdateRemotePlugins +qa && nvim +PlugUpdate +UpdateRemotePlugins +qa # Tweak operator color to get a pretty blue. sed -ie "s\\hi! link Operator Normal\\hi! link Operator GruvboxBlueBold\\g" ~/.config/nvim/plugged/gruvbox/colors/gruvbox.vim # Update ctrlp-funky Rust support. cp ctrlp/funky/ft/rust.vim ~/.config/nvim/plugged/ctrlp-funky/autoload/ctrlp/funky/ft/rust.vim cp ctags ~/.ctags
Add pin back to Python 3.7
#!/bin/bash set -x set -e CONDA_PATH=${1:-~/conda} wget -c https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh chmod a+x Miniconda3-latest-Linux-x86_64.sh if [ ! -d $CONDA_PATH -o ! -z "$CI" ]; then ./Miniconda3-latest-Linux-x86_64.sh -p $CONDA_PATH -b -f fi export PATH=$CONDA_PATH/bin:$PATH conda info conda config --set safety_checks disabled conda config --set channel_priority strict mkdir -p ~/.conda/pkg conda config --prepend pkgs_dirs ~/.conda/pkg conda config --show #echo "python==3.7" > $CONDA_PATH/conda-meta/pinned #echo "conda-build==3.14.0" >> $CONDA_PATH/conda-meta/pinned conda install -y python conda update -y conda conda install -y conda-build conda install -y conda-verify conda install -y ripgrep conda install -y anaconda-client conda install -y jinja2 conda update -y --all
#!/bin/bash set -x set -e CONDA_PATH=${1:-~/conda} wget -c https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh chmod a+x Miniconda3-latest-Linux-x86_64.sh if [ ! -d $CONDA_PATH -o ! -z "$CI" ]; then ./Miniconda3-latest-Linux-x86_64.sh -p $CONDA_PATH -b -f fi export PATH=$CONDA_PATH/bin:$PATH conda info conda config --set safety_checks disabled conda config --set channel_priority strict mkdir -p ~/.conda/pkg conda config --prepend pkgs_dirs ~/.conda/pkg conda config --show echo "python==3.7" > $CONDA_PATH/conda-meta/pinned #echo "conda-build==3.14.0" >> $CONDA_PATH/conda-meta/pinned conda install -y python conda update -y conda conda install -y conda-build conda install -y conda-verify conda install -y ripgrep conda install -y anaconda-client conda install -y jinja2 conda update -y --all
Revert "Try with older stack."
#!/bin/sh # # From https://github.com/commercialhaskell/stack/blob/master/.travis.yml set -eux travis_retry() { cmd=$* $cmd || (sleep 2 && $cmd) || (sleep 10 && $cmd) } fetch_stack_osx() { curl -skL https://www.stackage.org/stack/osx-x86_64 | tar xz --strip-components=1 --include '*/stack' -C ~/.local/bin; } fetch_stack_linux() { curl -sL https://github.com/commercialhaskell/stack/releases/download/v1.9.3/stack-1.9.3-linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'; } case "$BUILD" in stack) mkdir -p ~/.local/bin; if [ `uname` = "Darwin" ]; then travis_retry fetch_stack_osx else travis_retry fetch_stack_linux fi; travis_retry stack --no-terminal setup --verbose; ;; cabal) mkdir -p $HOME/.cabal cat > $HOME/.cabal/config <<EOF remote-repo: hackage.haskell.org:http://hackage.fpcomplete.com/ remote-repo-cache: $HOME/.cabal/packages jobs: \$ncpus EOF ;; esac
#!/bin/sh # # From https://github.com/commercialhaskell/stack/blob/master/.travis.yml set -eux travis_retry() { cmd=$* $cmd || (sleep 2 && $cmd) || (sleep 10 && $cmd) } fetch_stack_osx() { curl -skL https://www.stackage.org/stack/osx-x86_64 | tar xz --strip-components=1 --include '*/stack' -C ~/.local/bin; } fetch_stack_linux() { curl -sL https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'; } case "$BUILD" in stack) mkdir -p ~/.local/bin; if [ `uname` = "Darwin" ]; then travis_retry fetch_stack_osx else travis_retry fetch_stack_linux fi; travis_retry stack --no-terminal setup --verbose; ;; cabal) mkdir -p $HOME/.cabal cat > $HOME/.cabal/config <<EOF remote-repo: hackage.haskell.org:http://hackage.fpcomplete.com/ remote-repo-cache: $HOME/.cabal/packages jobs: \$ncpus EOF ;; esac
Throw non-zero exit status if tests fail
#!/bin/bash # This script runs the tests on Heroku CI git clone -b "$HEROKU_TEST_RUN_BRANCH" --single-branch https://github.com/SalesforceFoundation/MetaCI MetaCI_checkout cd MetaCI_checkout git reset --hard $HEROKU_TEST_RUN_COMMIT_VERSION export DJANGO_SETTINGS_MODULE=config.settings.test python manage.py test coveralls
#!/bin/bash # This script runs the tests on Heroku CI git clone -b "$HEROKU_TEST_RUN_BRANCH" --single-branch https://github.com/SalesforceFoundation/MetaCI MetaCI_checkout cd MetaCI_checkout git reset --hard $HEROKU_TEST_RUN_COMMIT_VERSION export DJANGO_SETTINGS_MODULE=config.settings.test python manage.py test exit_status=$? coveralls if [ "$exit_status" != "0" ]; then exit $exit_status fi
Use forked dill until changes make it upstream
#!/usr/bin/env bash set -e set -x shopt -s dotglob readonly name="dill" readonly ownership="dill Upstream <robot@adios2>" readonly subtree="thirdparty/dill/dill" readonly repo="https://github.com/GTkorvo/dill.git" readonly tag="master" readonly shortlog="true" readonly paths=" " extract_source () { git_archive } . "${BASH_SOURCE%/*}/../update-common.sh"
#!/usr/bin/env bash set -e set -x shopt -s dotglob readonly name="dill" readonly ownership="dill Upstream <robot@adios2>" readonly subtree="thirdparty/dill/dill" #readonly repo="https://github.com/GTkorvo/dill.git" #readonly tag="master" readonly repo="https://github.com/chuckatkins/dill.git" readonly tag="misc-cmake-updates" readonly shortlog="true" readonly paths=" " extract_source () { git_archive } . "${BASH_SOURCE%/*}/../update-common.sh"
Fix long shebang problem on travis
#!/usr/bin/env bash set -e if [ ! -f venv/bin/activate ]; then python3 -m venv venv fi . venv/bin/activate pip install wheel pip install -r "$1/requirements.txt" PORT=`$1/../get_open_port.py` $1/generator.py -t uptane --signature-encoding base64 -o vectors --cjson json-subset $1/server.py -t uptane --signature-encoding base64 -P $PORT & sleep 3 trap 'kill %1' EXIT if [ "$2" == "valgrind" ]; then valgrind --track-origins=yes --show-possibly-lost=no --error-exitcode=1 --suppressions=$1/../aktualizr.supp ./aktualizr_uptane_vector_tests vectors/vector-meta.json $PORT else ./aktualizr_uptane_vector_tests vectors/vector-meta.json $PORT fi RES=$? rm -rf vectors kill %1 trap - EXIT trap exit ${RES}
#!/usr/bin/env bash set -e if [ ! -f venv/bin/activate ]; then python3 -m venv venv fi . venv/bin/activate # use `python -m pip` to avoid problem with long shebangs on travis python -m pip install wheel python -m pip install -r "$1/requirements.txt" PORT=`$1/../get_open_port.py` $1/generator.py -t uptane --signature-encoding base64 -o vectors --cjson json-subset $1/server.py -t uptane --signature-encoding base64 -P $PORT & sleep 3 trap 'kill %1' EXIT if [ "$2" == "valgrind" ]; then valgrind --track-origins=yes --show-possibly-lost=no --error-exitcode=1 --suppressions=$1/../aktualizr.supp ./aktualizr_uptane_vector_tests vectors/vector-meta.json $PORT else ./aktualizr_uptane_vector_tests vectors/vector-meta.json $PORT fi RES=$? rm -rf vectors kill %1 trap - EXIT trap exit ${RES}
Upgrade Java 8 CI image to jdk8u282b08
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u275-b01/OpenJDK8U-jdk_x64_linux_hotspot_8u275b01.tar.gz" ;; java11) echo "https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.9.1%2B1/OpenJDK11U-jdk_x64_linux_hotspot_11.0.9.1_1.tar.gz" ;; java15) echo "https://github.com/AdoptOpenJDK/openjdk15-binaries/releases/download/jdk-15.0.1%2B9/OpenJDK15U-jdk_x64_linux_hotspot_15.0.1_9.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u282-b08/OpenJDK8U-jdk_x64_linux_hotspot_8u282b08.tar.gz" ;; java11) echo "https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.9.1%2B1/OpenJDK11U-jdk_x64_linux_hotspot_11.0.9.1_1.tar.gz" ;; java15) echo "https://github.com/AdoptOpenJDK/openjdk15-binaries/releases/download/jdk-15.0.1%2B9/OpenJDK15U-jdk_x64_linux_hotspot_15.0.1_9.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
Upgrade Java 11 version in CI image
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/bell-sw/Liberica/releases/download/8u333+2/bellsoft-jdk8u333+2-linux-amd64.tar.gz" ;; java11) echo "https://github.com/bell-sw/Liberica/releases/download/11.0.15+10/bellsoft-jdk11.0.15+10-linux-amd64.tar.gz" ;; java17) echo "https://github.com/bell-sw/Liberica/releases/download/17.0.3+7/bellsoft-jdk17.0.3+7-linux-amd64.tar.gz" ;; java18) echo "https://github.com/bell-sw/Liberica/releases/download/18.0.1+12/bellsoft-jdk18.0.1+12-linux-amd64.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/bell-sw/Liberica/releases/download/8u333+2/bellsoft-jdk8u333+2-linux-amd64.tar.gz" ;; java11) echo "https://github.com/bell-sw/Liberica/releases/download/11.0.15.1+2/bellsoft-jdk11.0.15.1+2-linux-amd64.tar.gz" ;; java17) echo "https://github.com/bell-sw/Liberica/releases/download/17.0.3+7/bellsoft-jdk17.0.3+7-linux-amd64.tar.gz" ;; java18) echo "https://github.com/bell-sw/Liberica/releases/download/18.0.1+12/bellsoft-jdk18.0.1+12-linux-amd64.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
Move gpg-agent env vars to a file so they are easily resourced
#! /usr/bin/env zsh # Allow using the docker socket without root. sudo chgrp docker /var/run/docker.sock # Start TMUX for all terminal access. function keep_tmux_up() { while true do # Start-up the GPG-Agent for managing SSH and GPG so they can be used across # all tmux panes/windows. killall gpg-agent eval $(gpg-agent --daemon --enable-ssh-support --disable-scdaemon) echo Starting TMUX session. tmux -2 new -d # Wait while session is alive. while tmux has-session -t 0 do echo TMUX session is up. Available to join. sleep 1 done echo TMUX session is down. done } keep_tmux_up & curl "https://github.com/$USER.keys" > "$HOME/.ssh/authorized_keys" sudo apt install -y openssh-server sudo mkdir -p /var/run/sshd sudo /usr/sbin/sshd -D
#! /usr/bin/env zsh # Allow using the docker socket without root. sudo chgrp docker /var/run/docker.sock # Start-up the GPG-Agent for managing SSH and GPG so they can be used across # all tmux panes/windows. gpg-agent --daemon --enable-ssh-support --disable-scdaemon > $HOME/.gpg-agent-env # Start TMUX for all terminal access. function keep_tmux_up() { while true do source $HOME/.gpg-agent-env echo Starting TMUX session. tmux -2 new -d # Wait while session is alive. while tmux has-session -t 0 do echo TMUX session is up. Available to join. sleep 1 done echo TMUX session is down. done } keep_tmux_up & curl "https://github.com/$USER.keys" > "$HOME/.ssh/authorized_keys" sudo apt install -y openssh-server sudo mkdir -p /var/run/sshd sudo /usr/sbin/sshd -D
Make avahi start even with uid conflict
#!/bin/bash if [ ! -z "${AFP_USER}" ]; then if [ ! -z "${AFP_UID}" ]; then cmd="$cmd --uid ${AFP_UID}" fi if [ ! -z "${AFP_GID}" ]; then cmd="$cmd --gid ${AFP_GID}" fi adduser $cmd --no-create-home --disabled-password --gecos '' "${AFP_USER}" if [ ! -z "${AFP_PASSWORD}" ]; then echo "${AFP_USER}:${AFP_PASSWORD}" | chpasswd fi fi if [ ! -d /media/share ]; then mkdir /media/share chown "${AFP_USER}" /media/share echo "use -v /my/dir/to/share:/media/share" > readme.txt fi sed -i'' -e "s,%USER%,${AFP_USER:-},g" /etc/afp.conf echo ---begin-afp.conf-- cat /etc/afp.conf echo ---end---afp.conf-- mkdir /var/run/dbus dbus-daemon --system if [ "${AVAHI}" == "1" ]; then avahi-daemon -D else echo "Skipping avahi daemon, enable with env variable AVAHI=1" fi; exec netatalk -d
#!/bin/bash if [ ! -z "${AFP_USER}" ]; then if [ ! -z "${AFP_UID}" ]; then cmd="$cmd --uid ${AFP_UID}" fi if [ ! -z "${AFP_GID}" ]; then cmd="$cmd --gid ${AFP_GID}" fi adduser $cmd --no-create-home --disabled-password --gecos '' "${AFP_USER}" if [ ! -z "${AFP_PASSWORD}" ]; then echo "${AFP_USER}:${AFP_PASSWORD}" | chpasswd fi fi if [ ! -d /media/share ]; then mkdir /media/share chown "${AFP_USER}" /media/share echo "use -v /my/dir/to/share:/media/share" > readme.txt fi sed -i'' -e "s,%USER%,${AFP_USER:-},g" /etc/afp.conf echo ---begin-afp.conf-- cat /etc/afp.conf echo ---end---afp.conf-- mkdir /var/run/dbus dbus-daemon --system if [ "${AVAHI}" == "1" ]; then sed -i '/rlimit-nproc/d' /etc/avahi/avahi-daemon.conf avahi-daemon -D else echo "Skipping avahi daemon, enable with env variable AVAHI=1" fi; exec netatalk -d
Update of launcher shell script.
rm -rf $HUDSON_HOME/plugins/checkstyle* mvn install cp -f target/*.hpi $HUDSON_HOME/plugins/ cd $HUDSON_HOME java -jar jenkins.war
rm -rf $HUDSON_HOME/plugins/checkstyle* mvn install || { echo "Build failed"; exit 1; } cp -f target/*.hpi $HUDSON_HOME/plugins/ cd $HUDSON_HOME java -jar jenkins.war
Add support for multiple tags.
#!/bin/sh # Fail if LOGGLY_AUTH_TOKEN is not set if [ -z "$LOGGLY_AUTH_TOKEN" ]; then echo "Missing \$LOGGLY_AUTH_TOKEN" exit 1 fi # Fail if LOGGLY_TAG is not set if [ -z "$LOGGLY_TAG" ]; then echo "Missing \$LOGGLY_TAG" exit 1 fi # Create spool directory mkdir -p /var/spool/rsyslog # Replace variables sed -i "s/LOGGLY_AUTH_TOKEN/$LOGGLY_AUTH_TOKEN/" /etc/rsyslog.conf sed -i "s/LOGGLY_TAG/$LOGGLY_TAG/" /etc/rsyslog.conf # Run RSyslog daemon exec /usr/sbin/rsyslogd -n
#!/bin/sh # Fail if LOGGLY_AUTH_TOKEN is not set if [ -z "$LOGGLY_AUTH_TOKEN" ]; then echo "Missing \$LOGGLY_AUTH_TOKEN" exit 1 fi # Fail if LOGGLY_TAG is not set if [ -z "$LOGGLY_TAG" ]; then echo "Missing \$LOGGLY_TAG" exit 1 fi # Create spool directory mkdir -p /var/spool/rsyslog # Expand multiple tags, in the format of tag1:tag2:tag3, into several tag arguments LOGGLY_TAG=$(echo $LOGGLY_TAG | sed 's/:/\\\\" tag=\\\\"/g') # Replace variables sed -i "s/LOGGLY_AUTH_TOKEN/$LOGGLY_AUTH_TOKEN/" /etc/rsyslog.conf sed -i "s/LOGGLY_TAG/$LOGGLY_TAG/" /etc/rsyslog.conf # Run RSyslog daemon exec /usr/sbin/rsyslogd -n
Update iso for 12.04 to 12.04.1
#!/bin/bash # This file is sourced by build_crowbar.sh when you want to build Crowbar # using Ubuntu 10.10 as the base OS. It includes all Ubuntu 10.10 specific # build routines. # OS information for the OS we are building crowbar on to. OS=ubuntu OS_VERSION=12.04 OS_TOKEN="$OS-$OS_VERSION" OS_CODENAME=precise ISO=ubuntu-12.04-server-amd64.iso # uncomment to use the daily beta build, if you have it... #ISO=precise-server-amd64.iso . "$CROWBAR_DIR/ubuntu-common/build_lib.sh"
#!/bin/bash # This file is sourced by build_crowbar.sh when you want to build Crowbar # using Ubuntu 10.10 as the base OS. It includes all Ubuntu 10.10 specific # build routines. # OS information for the OS we are building crowbar on to. OS=ubuntu OS_VERSION=12.04 OS_TOKEN="$OS-$OS_VERSION" OS_CODENAME=precise ISO=ubuntu-12.04.1-server-amd64.iso # uncomment to use the daily beta build, if you have it... #ISO=precise-server-amd64.iso . "$CROWBAR_DIR/ubuntu-common/build_lib.sh"
Return non-zero exit code on failure
#!/bin/bash go get github.com/mitchellh/gox go get github.com/tcnksm/ghr export APPNAME="gnatsd" export OSARCH="linux/386 linux/amd64 linux/arm darwin/amd64 windows/386 windows/amd64" export DIRS="linux-386 linux-amd64 linux-arm darwin-amd64 windows-386 windows-amd64" export OUTDIR="pkg" # If we have an arg, assume its a version tag and rename as appropriate. if [[ -n $1 ]]; then export APPNAME=$APPNAME-$1 fi env CGO_ENABLED=0 gox -osarch="$OSARCH" -ldflags="-s -w" -output "$OUTDIR/$APPNAME-{{.OS}}-{{.Arch}}/gnatsd" for dir in $DIRS; do \ (cp README.md $OUTDIR/$APPNAME-$dir/README.md) ;\ (cp LICENSE $OUTDIR/$APPNAME-$dir/LICENSE) ;\ (cd $OUTDIR && zip -q $APPNAME-$dir.zip -r $APPNAME-$dir) ;\ echo "make $OUTDIR/$APPNAME-$dir.zip" ;\ done
#!/bin/bash set -e go get github.com/mitchellh/gox go get github.com/tcnksm/ghr export APPNAME="gnatsd" export OSARCH="linux/386 linux/amd64 linux/arm darwin/amd64 windows/386 windows/amd64" export DIRS="linux-386 linux-amd64 linux-arm darwin-amd64 windows-386 windows-amd64" export OUTDIR="pkg" # If we have an arg, assume its a version tag and rename as appropriate. if [[ -n $1 ]]; then export APPNAME=$APPNAME-$1 fi env CGO_ENABLED=0 gox -osarch="$OSARCH" -ldflags="-s -w" -output "$OUTDIR/$APPNAME-{{.OS}}-{{.Arch}}/gnatsd" for dir in $DIRS; do \ (cp README.md $OUTDIR/$APPNAME-$dir/README.md) ;\ (cp LICENSE $OUTDIR/$APPNAME-$dir/LICENSE) ;\ (cd $OUTDIR && zip -q $APPNAME-$dir.zip -r $APPNAME-$dir) ;\ echo "make $OUTDIR/$APPNAME-$dir.zip" ;\ done
Make it more independent of python
#!/bin/bash # Created by Andre Anjos <andre.dos.anjos@cern.ch> # Sex 11 Abr 2008 14:39:42 CEST if [ $# = 0 ]; then echo "usage: $0 python-executable-name" exit 1 fi function replace () { #1. what, #2. newvalue, #3. file echo "Changing file $3..."; sed -i -e "s#^$1\(\s*\)=\(\s*\).\+#$1\1=\2$2#" $3 } for f in bootstrap.sh Makefile; do cp $f $f~ replace PYTHON python2.5 $f replace BASEDIR $PWD $f done for f in stuff/Makefile; do cp $f $f~ replace BASEDIR $PWD/stuff $f done for f in stuff/settings.py; do cp $f $f~ replace BASEDIR "'$PWD/stuff'" $f replace DATABASE "'$PWD/db.sql3'" $f replace MEDIA_ROOT "'$PWD/media'" $f done
#!/bin/bash # Created by Andre Anjos <andre.dos.anjos@cern.ch> # Sex 11 Abr 2008 14:39:42 CEST if [ $# = 0 ]; then echo "usage: $0 python-executable-name" exit 1 fi function replace () { #1. what, #2. newvalue, #3. file echo "Changing file $3..."; sed -i -e "s#^$1\(\s*\)=\(\s*\).\+#$1\1=\2$2#" $3 } for f in bootstrap.sh Makefile; do cp $f $f~ replace PYTHON $1 $f replace BASEDIR $PWD $f done for f in stuff/Makefile; do cp $f $f~ replace BASEDIR $PWD $f replace PYTHON $1 $f done for f in stuff/settings.py; do cp $f $f~ replace BASEDIR "'$PWD/stuff'" $f replace DATABASE "'$PWD/db.sql3'" $f replace MEDIA_ROOT "'$PWD/media'" $f done
Check that the LLVM source directory exists before changing to it.
#!/bin/sh LLVM_PATH=`llvm-config --src-root` LIBOBJC_PATH=`pwd` if [ x$LLVM_PATH != x ] ; then cd $LLVM_PATH cd lib/Transforms if [ ! -d GNURuntime ] ; then mkdir GNURuntime fi cd GNURuntime for I in `ls $LIBOBJC_PATH/opts/` ; do if [ ! $I -nt $LIBOBJC_PATH/opts/$I ] ; then cp $LIBOBJC_PATH/opts/$I . fi done $1 $2 cd .. fi
#!/bin/sh LLVM_PATH=`llvm-config --src-root` LIBOBJC_PATH=`pwd` if [ x$LLVM_PATH != x ] ; then if [ -d $LLVM_PATH ] ; then cd $LLVM_PATH cd lib/Transforms if [ ! -d GNURuntime ] ; then mkdir GNURuntime fi cd GNURuntime for I in `ls $LIBOBJC_PATH/opts/` ; do if [ ! $I -nt $LIBOBJC_PATH/opts/$I ] ; then cp $LIBOBJC_PATH/opts/$I . fi done $1 $2 cd .. fi fi
Update class path for new core/agent structure. Should probably make this dynamic and maybe based on the .m2 repository instead of the local libraries.
#!/bin/bash CLASSPATH=$CLASSPATH:~/.m2/repository/org/javassist/javassist/3.18.1-GA/javassist-3.18.1-GA.jar CLASSPATH=$CLASSPATH:~/.m2/repository/org/javassist/javassist/3.18.1-GA/javassist-3.18.1-GA.jar CLASSPATH=$CLASSPATH:~/.m2/repository/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar CLASSPATH=$CLASSPATH:~/.m2/repository/org/slf4j/slf4j-simple/1.7.5/slf4j-simple-1.7.5.jar CLASSPATH=$CLASSPATH:~/.m2/repository/com/jolbox/bonecp/0.8.0-rc3/bonecp-0.8.0-rc3.jar CLASSPATH=$CLASSPATH:~/.m2/repository/com/google/guava/guava/15.0/guava-15.0.jar CLASSPATH=$CLASSPATH:$JAVA_HOME/lib/tools.jar CLASSPATH=$CLASSPATH:./target/HikariCP-1.1.4-SNAPSHOT.jar CLASSPATH=$CLASSPATH:./target/test-classes java -classpath $CLASSPATH \ -server -XX:+UseParallelGC -Xss256k -Xms128m -Xmx256m -Dorg.slf4j.simpleLogger.defaultLogLevel=info com.zaxxer.hikari.performance.Benchmark1 $1 $2 $3
#!/bin/bash CLASSPATH=$CLASSPATH:~/.m2/repository/org/javassist/javassist/3.18.1-GA/javassist-3.18.1-GA.jar CLASSPATH=$CLASSPATH:~/.m2/repository/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar CLASSPATH=$CLASSPATH:~/.m2/repository/org/slf4j/slf4j-simple/1.7.5/slf4j-simple-1.7.5.jar CLASSPATH=$CLASSPATH:~/.m2/repository/com/jolbox/bonecp/0.8.0-rc3/bonecp-0.8.0-rc3.jar CLASSPATH=$CLASSPATH:~/.m2/repository/com/google/guava/guava/15.0/guava-15.0.jar CLASSPATH=$CLASSPATH:$JAVA_HOME/lib/tools.jar CLASSPATH=$CLASSPATH:./core/target/HikariCP-1.1.9-SNAPSHOT.jar CLASSPATH=$CLASSPATH:./agent/target/HikariCP-agent-1.1.9-SNAPSHOT.jar CLASSPATH=$CLASSPATH:./core/target/test-classes java -classpath $CLASSPATH \ -server -XX:+UseParallelGC -Xss256k -Xms128m -Xmx256m -Dorg.slf4j.simpleLogger.defaultLogLevel=info com.zaxxer.hikari.performance.Benchmark1 $1 $2 $3
Make Solr core directories as part of Travis install
#!/bin/sh sudo ./scripts/apply_django_patches /home/vagrant/virtualenv/python2.7/lib/python2.7/site-packages/django createdb -T template_postgis atlas_travis -U postgres fab --set run_local=True install_solr install_solr_2155 fab --set run_local=True install_jetty_script install_jetty_config sudo cp config/travis/solr/solr.xml /usr/local/share/solr/multicore/ python manage.py build_solr_schema --settings=settings.travis > config/travis/solr/schema.xml #sudo sed -i.bak -r -e "s/#JDK_DIRS=.*/JDK_DIRS=\"\/usr\/lib\/jvm\/java-6-openjdk-amd64 \/usr\/lib\/jvm\/java-6-openjdk-i386\"/g" /etc/default/jetty fab --set run_local=True install_solr_config:instance=travis,solr_multicore=true,project_root=`pwd` sudo service jetty restart # If running browser tests, uncomment these lines #export DISPLAY=:99.0 #sh -e /etc/init.d/xvfb start
#!/bin/sh sudo ./scripts/apply_django_patches /home/vagrant/virtualenv/python2.7/lib/python2.7/site-packages/django createdb -T template_postgis atlas_travis -U postgres fab --set run_local=True install_solr make_solr_conf_dir make_solr_data_dir make_solr_lib_dir install_solr_2155 fab --set run_local=True install_jetty_script install_jetty_config sudo cp config/travis/solr/solr.xml /usr/local/share/solr/multicore/ python manage.py build_solr_schema --settings=settings.travis > config/travis/solr/schema.xml #sudo sed -i.bak -r -e "s/#JDK_DIRS=.*/JDK_DIRS=\"\/usr\/lib\/jvm\/java-6-openjdk-amd64 \/usr\/lib\/jvm\/java-6-openjdk-i386\"/g" /etc/default/jetty fab --set run_local=True install_solr_config:instance=travis,solr_multicore=true,project_root=`pwd` sudo service jetty restart # If running browser tests, uncomment these lines #export DISPLAY=:99.0 #sh -e /etc/init.d/xvfb start
Reduce permissions on ~/.ssh/config to the minimum necessary
#!/bin/bash # travis-before-install.sh set -ev # Decrypt the private key openssl aes-256-cbc -K $encrypted_278829cc3907_key -iv $encrypted_278829cc3907_iv -in id_rsa.enc -out ~/.ssh/id_rsa.github -d # Make the private key the default for ssh authentication for request to github.com chmod 600 ~/.ssh/id_rsa.github cat <<EOF>> ~/.ssh/config Host * StrictHostKeyChecking no Host github.com Hostname github.com IdentityFile ~/.ssh/id_rsa.github IdentitiesOnly yes StrictHostKeyChecking no EOF
#!/bin/bash # travis-before-install.sh set -ev # Decrypt the private key openssl aes-256-cbc -K $encrypted_278829cc3907_key -iv $encrypted_278829cc3907_iv -in id_rsa.enc -out ~/.ssh/id_rsa.github -d # Make the private key the default for ssh authentication for request to github.com chmod 600 ~/.ssh/id_rsa.github cat <<EOF>> ~/.ssh/config Host * StrictHostKeyChecking no Host github.com Hostname github.com IdentityFile ~/.ssh/id_rsa.github IdentitiesOnly yes StrictHostKeyChecking no EOF chmod 600 ~/.ssh/config
Work around missing dependencies in travis vm
#!/bin/sh -ev which brew || /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" brew doctor brew update brew tap caskroom/cask brew install bash-completion brew install doxygen brew install macvim brew install stow brew install tmux brew cask install firefox brew cask install google-chrome brew cask install google-drive brew cask install keepassx brew cask install vagrant brew cask install virtualbox brew cask install vlc brew cask install skype
#!/bin/sh -ev which brew || /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" test "$TRAVIS" = true || brew install doxygen brew doctor brew update brew tap caskroom/cask brew install bash-completion brew install macvim brew install stow brew install tmux brew cask install firefox brew cask install google-chrome brew cask install google-drive brew cask install keepassx brew cask install vagrant brew cask install virtualbox brew cask install vlc brew cask install skype
Fix CI script to not always success
#!/bin/bash function log() { d=$(date +'%Y-%m-%d %H:%M:%S') echo $d" "$1 } log "Running Copybara tests" log "Fetching dependencies" # Mercurial does not have an up-to-date .deb package # The official release needs to be installed with pip. apt-get -y install python-pip pip install mercurial log "Extracting Bazel" # Only because first time it extracts the installation bazel version echo "-----------------------------------" echo "Versions:" hg --version | grep "(version" | sed 's/.*[(]version \([^ ]*\)[)].*/Mercurial: \1/' git --version | sed 's/git version/Git:/' bazel version | grep "Build label" | sed 's/Build label:/Bazel:/' echo "-----------------------------------" log "Running Bazel" bazel "$@" log "Done"
#!/bin/bash -e function log() { d=$(date +'%Y-%m-%d %H:%M:%S') echo $d" "$1 } log "Running Copybara tests" log "Fetching dependencies" # Mercurial does not have an up-to-date .deb package # The official release needs to be installed with pip. apt-get -y install python-pip pip install mercurial log "Extracting Bazel" # Only because first time it extracts the installation bazel version echo "-----------------------------------" echo "Versions:" hg --version | grep "(version" | sed 's/.*[(]version \([^ ]*\)[)].*/Mercurial: \1/' git --version | sed 's/git version/Git:/' bazel version | grep "Build label" | sed 's/Build label:/Bazel:/' echo "-----------------------------------" log "Running Bazel" bazel "$@" log "Done"
Add support for restoring uncompressed DB snapshots
#!/bin/sh # This a script for restoring the local env-logger database from a backup snapshot. set -e if [ $# -ne 2 ]; then cat <<EOF This a script for restoring the local env-logger database from a backup snapshot. Usage: $0 <DB name> <DB snapshot name> Example: $0 env_logger db_snapshot.sql EOF exit 1 fi db_name=$1 snapshot_name=$2 echo "Truncating tables" psql "${db_name}" <<EOF TRUNCATE TABLE users CASCADE; TRUNCATE TABLE observations CASCADE; TRUNCATE TABLE yardcam_image; EOF echo "Adding new values" # Pressure data has not been collected from the beginning and thus contains # NULL values causing restore to fail psql "${db_name}" -c 'ALTER TABLE weather_data ALTER pressure DROP NOT NULL;' psql "${db_name}" < "${snapshot_name}"
#!/bin/sh # This a script for restoring the local env-logger database from a backup snapshot. set -e if [ $# -ne 2 ]; then cat <<EOF This a script for restoring the local env-logger database from a backup snapshot. Usage: $0 <DB name> <DB snapshot name> Example: $0 env_logger db_snapshot.sql EOF exit 1 fi db_name=$1 snapshot_name=$2 file_out=$(file -ib ${snapshot_name}) if [ $(echo "${file_out}"|grep -c xz) -eq 1 ]; then echo "Uncompressed snapshot, decompressing before restore" unxz ${snapshot_name} snapshot_name=$(echo ${snapshot_name}|sed 's/.xz//') fi echo "Truncating tables" psql "${db_name}" <<EOF TRUNCATE TABLE users CASCADE; TRUNCATE TABLE observations CASCADE; TRUNCATE TABLE yardcam_image; EOF echo "Adding new values" # Pressure data has not been collected from the beginning and thus contains # NULL values causing restore to fail psql "${db_name}" -c 'ALTER TABLE weather_data ALTER pressure DROP NOT NULL;' psql "${db_name}" < "${snapshot_name}"
Remove World and othe stuff from assets. Download countries with search.
#!/bin/bash set -x -u SRC=../../data DST=../../android/assets # Remove old links rm -rf $DST mkdir $DST files=(about.html basic_ldpi.skn basic_mdpi.skn basic_hdpi.skn basic_xhdpi.skn categories.txt classificator.txt types.txt fonts_blacklist.txt fonts_whitelist.txt languages.txt \ symbols_ldpi.png symbols_mdpi.png symbols_hdpi.png symbols_xhdpi.png unicode_blocks.txt \ visibility.txt drules_proto.txt drules_proto.bin external_resources.txt packed_polygons.bin \ WorldCoasts.mwm 01_dejavusans.ttf 02_wqy-microhei.ttf 03_jomolhari-id-a3d.ttf 04_padauk.ttf 05_khmeros.ttf 06_code2000.ttf) for item in ${files[*]} do ln -s $SRC/$item $DST/$item done # Separate case for World and countries list files without search support ln -s $SRC/countries.txt.nosearch $DST/countries.txt ln -s $SRC/World.mwm.nosearch $DST/World.mwm # Call flag symlinks generation ./update_flags.sh
#!/bin/bash set -x -u SRC=../../data DST=../../android/assets # Remove old links rm -rf $DST mkdir $DST files=(about.html basic_ldpi.skn basic_mdpi.skn basic_hdpi.skn basic_xhdpi.skn categories.txt classificator.txt types.txt fonts_blacklist.txt fonts_whitelist.txt languages.txt \ symbols_ldpi.png symbols_mdpi.png symbols_hdpi.png symbols_xhdpi.png unicode_blocks.txt \ visibility.txt drules_proto.txt drules_proto.bin external_resources.txt packed_polygons.bin) for item in ${files[*]} do ln -s $SRC/$item $DST/$item done # Separate case for World and countries list files without search support ln -s $SRC/countries.txt $DST/countries.txt # Call flag symlinks generation ./update_flags.sh
Remove growlnotify from required packages
#!/usr/bin/env zsh # Use Homebrew to install useful executables set -A base \ git git-extras willgit growlnotify ruby-build hub jsl ctags lorem graphviz \ postgresql mongodb redis memcached node rlwrap couchdb subversion wget tree \ vimpager z TRAPINT () { echo "Exiting..." exit } brew install $base brew install macvim --override-system-vim # Install PythonBrew # curl -kL http://xrl.us/pythonbrewinstall | bash # Install NPM # curl http://npmjs.org/install.sh | sh # Install rbenv # git clone git://github.com/sstephenson/rbenv.git $HOME/.rbenv
#!/usr/bin/env zsh # Use Homebrew to install useful executables set -A base \ git git-extras willgit ruby-build hub jsl ctags lorem graphviz postgresql \ mongodb redis memcached node rlwrap couchdb subversion wget tree \ vimpager z TRAPINT () { echo "Exiting..." exit } brew install $base brew install macvim --override-system-vim # Install PythonBrew # curl -kL http://xrl.us/pythonbrewinstall | bash # Install NPM # curl http://npmjs.org/install.sh | sh # Install rbenv # git clone git://github.com/sstephenson/rbenv.git $HOME/.rbenv
Update to use variables for vscode paths
#!/usr/bin/env bash # # Description: configures installed gui apps # if [ -n "$DEBUG" ]; then echo "$0: Setting bash option -x for debug" PS4='+($(basename ${BASH_SOURCE}):${LINENO}): ${FUNCNAME[0]:+${FUNCNAME[0]}(): }' set -x fi # Exit on error set -e; set -o pipefail PFX=${PFX:-==>} REPO_DIR=${REPO_DIR:?} HOME=${HOME:?} # # Main # function configureVscode() { echo "$PFX Configuring vscode user settings" ln -sf $DOTFILES_DIR/vscode/settings.json $HOME/Library/Application\ Support/Code/User/settings.json ln -sf $DOTFILES_DIR/vscode/keybindings.json $HOME/Library/Application\ Support/Code/User/keybindings.json ln -sfn $DOTFILES_DIR/vscode/snippets $HOME/Library/Application\ Support/Code/User/snippets } configureVscode exit 0
#!/usr/bin/env bash # # Description: configures installed gui apps # if [ -n "$DEBUG" ]; then echo "$0: Setting bash option -x for debug" PS4='+($(basename ${BASH_SOURCE}):${LINENO}): ${FUNCNAME[0]:+${FUNCNAME[0]}(): }' set -x fi # Exit on error set -e; set -o pipefail PFX=${PFX:-==>} REPO_DIR=${REPO_DIR:?} HOME=${HOME:?} VSCODE_DOTFILES_DIR=$REPO_DIR/vscode VSCODE_USER_SETTINGS_DIR="$HOME/Library/Application\ Support/Code/User" # # Main # function configureVscode() { echo "$PFX Configuring vscode user settings" ln -sf $VSCODE_DOTFILES_DIR/settings.json $VSCODE_USER_SETTINGS_DIR/settings.json ln -sf $VSCODE_DOTFILES_DIR/keybindings.json $VSCODE_USER_SETTINGS_DIR/keybindings.json ln -sfn $VSCODE_DOTFILES_DIR/snippets $VSCODE_USER_SETTINGS_DIR/Code/User/snippets } configureVscode exit 0
Write vmkite-info for mac builds
#!/bin/bash set -eux PROVISION_DIR="$HOME" install_buildkite() { echo "Installing buildkite-agent" /usr/local/bin/brew tap buildkite/buildkite /usr/local/bin/brew install --devel buildkite-agent cp /tmp/buildkite-hooks/* /usr/local/etc/buildkite-agent/hooks/ rm -rf /tmp/buildkite-hooks } install_launchd_daemon() { local script="vmkite-buildkite-agent.sh" local plist="com.macstadium.vmkite-buildkite-agent.plist" echo "Installing launchd service" sudo cp "${PROVISION_DIR}/$script" "/usr/local/bin/$script" sudo cp "${PROVISION_DIR}/$plist" "/Library/LaunchDaemons/$plist" sudo chmod 0755 "/usr/local/bin/$script" sudo launchctl load "/Library/LaunchDaemons/$plist" } install_utils() { /usr/local/bin/brew install awscli jq } install_utils install_buildkite install_launchd_daemon
#!/bin/bash set -eux PROVISION_DIR="$HOME" install_buildkite() { echo "Installing buildkite-agent" /usr/local/bin/brew tap buildkite/buildkite /usr/local/bin/brew install --devel buildkite-agent cp /tmp/buildkite-hooks/* /usr/local/etc/buildkite-agent/hooks/ rm -rf /tmp/buildkite-hooks } install_launchd_daemon() { local script="vmkite-buildkite-agent.sh" local plist="com.macstadium.vmkite-buildkite-agent.plist" echo "Installing launchd service" sudo cp "${PROVISION_DIR}/$script" "/usr/local/bin/$script" sudo cp "${PROVISION_DIR}/$plist" "/Library/LaunchDaemons/$plist" sudo chmod 0755 "/usr/local/bin/$script" sudo launchctl load "/Library/LaunchDaemons/$plist" } install_utils() { /usr/local/bin/brew install awscli jq } install_utils install_buildkite install_launchd_daemon # Write a version file so we can track which build this refers to cat << EOF > /etc/vmkite-info BUILDKITE_VERSION=$(buildkite-agent --version) BUILDKITE_BUILD_NUMBER=$BUILDKITE_BUILD_NUMBER BUILDKITE_BRANCH=$BUILDKITE_BRANCH BUILDKITE_COMMIT=$BUILDKITE_COMMIT EOF
Change ccu alias to not use submodules. Put that in new ccus alias
#------------------------------------------------------------------------------- # # ios/carthage.zsh # Carthage aliases # #------------------------------------------------------------------------------- alias carthage-clean='rm -rf ~/Library/Caches/org.carthage.CarthageKit' # Bootstrap alias cbootios='carthage bootstrap --no-use-binaries --platform iOS' # Checkout / Update alias cco='carthage checkout --no-use-binaries --use-submodules --use-ssh' alias ccu='carthage update --no-use-binaries --use-submodules --use-ssh --no-build' # Build alias cbios="carthage build --platform iOS" alias cbmac="carthage build --platform Mac" alias cball="carthage build --platform all" # Verbose variants alias ccov='cco --verbose' alias ccuv='ccu --verbose' alias cbiosv="cbios --verbose" alias cbmacv="cbmac --verbose" alias cballv="cball --verbose"
#------------------------------------------------------------------------------- # # ios/carthage.zsh # Carthage aliases # #------------------------------------------------------------------------------- alias carthage-clean='rm -rf ~/Library/Caches/org.carthage.CarthageKit' # Bootstrap alias cbootios='carthage bootstrap --no-use-binaries --platform iOS' # Checkout / Update alias cco='carthage checkout --no-use-binaries --use-submodules --use-ssh' alias ccu='carthage update --no-use-binaries --no-build' alias ccus='ccu --use-submodules --use-ssh' # Build alias cbios="carthage build --platform iOS" alias cbmac="carthage build --platform Mac" alias cball="carthage build --platform all" # Verbose variants alias ccov='cco --verbose' alias ccuv='ccu --verbose' alias cbiosv="cbios --verbose" alias cbmacv="cbmac --verbose" alias cballv="cball --verbose"
Upgrade Java 18 version in CI image
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/bell-sw/Liberica/releases/download/8u333+2/bellsoft-jdk8u333+2-linux-amd64.tar.gz" ;; java11) echo "https://github.com/bell-sw/Liberica/releases/download/11.0.15.1+2/bellsoft-jdk11.0.15.1+2-linux-amd64.tar.gz" ;; java17) echo "https://github.com/bell-sw/Liberica/releases/download/17.0.3.1+2/bellsoft-jdk17.0.3.1+2-linux-amd64.tar.gz" ;; java18) echo "https://github.com/adoptium/temurin18-binaries/releases/download/jdk-18.0.1%2B10/OpenJDK18U-jdk_x64_linux_hotspot_18.0.1_10.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/bell-sw/Liberica/releases/download/8u333+2/bellsoft-jdk8u333+2-linux-amd64.tar.gz" ;; java11) echo "https://github.com/bell-sw/Liberica/releases/download/11.0.15.1+2/bellsoft-jdk11.0.15.1+2-linux-amd64.tar.gz" ;; java17) echo "https://github.com/bell-sw/Liberica/releases/download/17.0.3.1+2/bellsoft-jdk17.0.3.1+2-linux-amd64.tar.gz" ;; java18) echo "https://github.com/bell-sw/Liberica/releases/download/18.0.1.1+2/bellsoft-jdk18.0.1.1+2-linux-amd64.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
Add || exit 1 to cd
#!/bin/bash # Update repository cd /var/www/website/ git checkout master git pull # Update python packages virtualenvlocation=$(which virtualenvwrapper.sh) # shellcheck source=/dev/null source "$virtualenvlocation" workon albertyw.com pip install -r requirements.txt # Configure settings cd albertyw.com ln -sf .env.production .env # Restart services sudo service nginx restart sudo service uwsgi restart
#!/bin/bash # Update repository cd /var/www/website/ || exit 1 git checkout master git pull # Update python packages virtualenvlocation=$(which virtualenvwrapper.sh) # shellcheck source=/dev/null source "$virtualenvlocation" workon albertyw.com pip install -r requirements.txt # Configure settings cd albertyw.com || exit 1 ln -sf .env.production .env # Restart services sudo service nginx restart sudo service uwsgi restart
Add confirmation logic in recipe making
#!/usr/bin/env bash # HOMERECIPE_SCRIPTS and HOMERECIPE_DIR need to be defined # Assumed "cook" is called to run this script [[ ! -z $HOMERECIPE_DIR ]] || { echo "\$HOMERECIPE_DIR is not found. Process exiting."; exit 1; } # Get brew bundle (includes cask) brew bundle dump --force --file="${HOMERECIPE_DIR}"/recipes/brew-recipe # brew bundle --file=recipes/brew-recipe # Get npm global recipe # TODO: This is a workaround - there must be a better way... command ls "$(npm root -g)" > "${HOMERECIPE_DIR}"/recipes/npm-recipe # for i in `cat recipes/npm-recipe`; do npm install -g $i; done
#!/usr/bin/env bash # HOMERECIPE_SCRIPTS and HOMERECIPE_DIR need to be defined # Assumed "cook" is called to run this script [[ ! -z $HOMERECIPE_DIR ]] || { echo "\$HOMERECIPE_DIR is not found. Process exiting."; exit 1; } confirm() { # call with a prompt string or use a default read -r -p "${1:-Are you sure? [y/N]} " response case "$response" in [yY][eE][sS]|[yY]) true ;; *) false ;; esac } confirm "All the existing recipes and dotfiles will be overridden, are you sure? [y/N]" || { echo "Recipe making cancelled"; exit; } # Get brew bundle (includes cask) brew bundle dump --force --file="${HOMERECIPE_DIR}"/recipes/brew-recipe # brew bundle --file=recipes/brew-recipe # Get npm global recipe # TODO: This is a workaround - there must be a better way... command ls "$(npm root -g)" > "${HOMERECIPE_DIR}"/recipes/npm-recipe # for i in `cat recipes/npm-recipe`; do npm install -g $i; done
Add Suggest Charge app to initial script
#!/bin/bash cp -a OSX_Battery_Charge_Detection.sh /usr/local/bin/ cp -a ChargeCompleteNotification.app /Applications/ cp -a com.ITXiaoPang.OSX_Battery_Charge_Detection.plist ~/Library/LaunchAgents launchctl load ~/Library/LaunchAgents/com.ITXiaoPang.OSX_Battery_Charge_Detection.plist
#!/bin/bash cp -a OSX_Battery_Charge_Detection.sh /usr/local/bin/ cp -a ChargeCompleteNotification.app /Applications/ cp -a SuggestChargeNotification.app /Applications/ cp -a com.ITXiaoPang.OSX_Battery_Charge_Detection.plist ~/Library/LaunchAgents launchctl load ~/Library/LaunchAgents/com.ITXiaoPang.OSX_Battery_Charge_Detection.plist
Improve output when format check failes
#!/bin/sh set -eo pipefail VIOLATING_FILES=$(goimports -local github.com/sapcc/kubernikus -l $@ | sed /generated/d) if [ -n "$VIOLATING_FILES" ]; then echo "Go code is not formatted:" goimports -e -d $@ exit 1 fi
#!/bin/sh set -eo pipefail VIOLATING_FILES=$(goimports -local github.com/sapcc/kubernikus -l $@ | sed /generated/d) if [ -n "$VIOLATING_FILES" ]; then echo "Go code is not formatted in these files:" echo "$VIOLATING_FILES" echo "Offending lines:" goimports -local github.com/sapcc/kubernikus -e -d $VIOLATING_FILES exit 1 fi
Allow for Windows protoc download
#!/usr/bin/env bash # This downloads and installs the protobuf compiler depending on the platform if [ "$(uname)" == "Darwin" ] then # Under Mac OS X platform echo 'Downloading MacOs protobuf compiler' curl https://github.com/google/protobuf/releases/download/v3.8.0/protoc-3.8.0-osx-x86_64.zip -o protoc.zip -L else [ "$(expr substr $(uname -s) 1 5)" == "Linux" ] # Under GNU/Linux platform echo 'Downloading Linux protobuf compiler' curl https://github.com/google/protobuf/releases/download/v3.8.0/protoc-3.8.0-linux-x86_64.zip -o protoc.zip -L fi
#!/usr/bin/env bash # This downloads and installs the protobuf compiler depending on the platform if [ "$(uname)" == "Darwin" ]; then # Under Mac OS X platform echo 'Downloading MacOs protobuf compiler' curl https://github.com/google/protobuf/releases/download/v3.8.0/protoc-3.8.0-osx-x86_64.zip -o protoc.zip -L elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then # Under GNU/Linux platform echo 'Downloading Linux protobuf compiler' curl https://github.com/google/protobuf/releases/download/v3.8.0/protoc-3.8.0-linux-x86_64.zip -o protoc.zip -L elif [ "$(expr substr $(uname -s) 1 5)" == "MINGW" ]; then # Under Windows platform echo 'Downloading Windows protobuf compiler' curl https://github.com/google/protobuf/releases/download/v3.8.0/protoc-3.8.0-win64.zip -o protoc.zip -L fi
Add missing role on Windows
#!/bin/bash # http://www.thisprogrammingthing.com/2015/using-ansible-with-vagrant-and-windows/ if [[ ! -f /usr/bin/ansible-playbook ]]; then yum install -y ansible fi ansible-galaxy install goozbach.EPEL ansible-galaxy install geerlingguy.nginx ansible-playbook --inventory="localhost," -c local /provision/playbook.yml
#!/bin/bash # http://www.thisprogrammingthing.com/2015/using-ansible-with-vagrant-and-windows/ if [[ ! -f /usr/bin/ansible-playbook ]]; then yum install -y ansible fi ansible-galaxy install goozbach.EPEL ansible-galaxy install geerlingguy.nginx ansible-galaxy install bertvv.mariadb ansible-playbook --inventory="localhost," -c local /provision/playbook.yml
Change compose alias to support v2
# OS X shortcuts alias rm=trash # brew install trash # Git alias gd='git diff' alias gu='gitup' alias git-delete-squashed='git checkout -q master && git for-each-ref refs/heads/ "--format=%(refname:short)" | while read branch; do mergeBase=$(git merge-base master $branch) && [[ $(git cherry master $(git commit-tree $(git rev-parse $branch^{tree}) -p $mergeBase -m _)) == "-"* ]] && git branch -D $branch; done' alias gpu='git push -u' # gt is alias for gittower . gt() { gittower $(git rev-parse --show-toplevel) } # Change theme of Terminal.app tabc() { NAME="${1:-Basic}" osascript -e "tell application \"Terminal\" to set current settings of front window to settings set \"$NAME\"" } # Change to Danger theme when executing ssh ssh() { tabc Pro /usr/bin/ssh $* tabc } # Docker alias compose='docker-compose' alias test-compose='compose -f /Users/mario/Code/yodel/yodel/docker-compose.test.yml' alias bu='bundle update' alias bl='bundle list' evalenv() { if [ "$1" != "" ]; then eval $(awk '{printf "export %s\n", $0}' $1) else eval $(awk '{printf "export %s\n", $0}' .env) fi }
# OS X shortcuts alias rm=trash # brew install trash # Git alias gd='git diff' alias gu='gitup' alias git-delete-squashed='git checkout -q master && git for-each-ref refs/heads/ "--format=%(refname:short)" | while read branch; do mergeBase=$(git merge-base master $branch) && [[ $(git cherry master $(git commit-tree $(git rev-parse $branch^{tree}) -p $mergeBase -m _)) == "-"* ]] && git branch -D $branch; done' alias gpu='git push -u' # gt is alias for gittower . gt() { gittower $(git rev-parse --show-toplevel) } # Change theme of Terminal.app tabc() { NAME="${1:-Basic}" osascript -e "tell application \"Terminal\" to set current settings of front window to settings set \"$NAME\"" } # Change to Danger theme when executing ssh ssh() { tabc Pro /usr/bin/ssh $* tabc } # Docker alias compose='docker compose' alias test-compose='compose -f /Users/mario/Code/yodel/yodel/docker-compose.test.yml' alias bu='bundle update' alias bl='bundle list' evalenv() { if [ "$1" != "" ]; then eval $(awk '{printf "export %s\n", $0}' $1) else eval $(awk '{printf "export %s\n", $0}' .env) fi }
Update Yarn PATH to match docs' recommendation
# sup yarn # https://yarnpkg.com export PATH="$HOME/.yarn/bin:$PATH"
# sup yarn # https://yarnpkg.com export PATH="$PATH:`yarn global bin`"
Remove Docker dangling images after copying new ones
#!/bin/bash set -e cd "$(dirname "${BASH_SOURCE[0]}")" . ./config.sh (cd ./tls && go get ./... && go run generate_certs.go $HOSTS) echo "Copying weave images, scripts, and certificates to hosts, and" echo " prefetch test images" for HOST in $HOSTS; do docker_on $HOST load -i ../weave.tar run_on $HOST mkdir -p bin upload_executable $HOST ../bin/docker-ns upload_executable $HOST ../weave rsync -az -e "$SSH" ./tls/ $HOST:~/tls for IMG in $TEST_IMAGES ; do docker_on $HOST inspect --format=" " $IMG >/dev/null 2>&1 || docker_on $HOST pull $IMG done done
#!/bin/bash set -e cd "$(dirname "${BASH_SOURCE[0]}")" . ./config.sh (cd ./tls && go get ./... && go run generate_certs.go $HOSTS) echo "Copying weave images, scripts, and certificates to hosts, and" echo " prefetch test images" for HOST in $HOSTS; do docker_on $HOST load -i ../weave.tar DANGLING_IMAGES="$(docker_on $HOST images -q -f dangling=true)" [ -n "$DANGLING_IMAGES" ] && docker_on $HOST rmi $DANGLING_IMAGES run_on $HOST mkdir -p bin upload_executable $HOST ../bin/docker-ns upload_executable $HOST ../weave rsync -az -e "$SSH" ./tls/ $HOST:~/tls for IMG in $TEST_IMAGES ; do docker_on $HOST inspect --format=" " $IMG >/dev/null 2>&1 || docker_on $HOST pull $IMG done done
Increase memory for Magerun install in case people decide to install sample data.
#!/usr/bin/env bash set -e # Exit on error if [ $(find $MAGE_ROOT_DIR -maxdepth 0 -type d -empty 2>/dev/null) ]; then # Install Magento /n98-magerun.phar install \ --installationFolder=$MAGE_ROOT_DIR \ --magentoVersionByName="magento-mirror-1.9.2.4" \ --installSampleData=no \ --dbHost=db \ --dbUser=magento \ --dbPass=magento \ --dbName=magento \ --useDefaultConfigParams=yes \ --baseUrl="https://pwa-magento.docker/" chgrp -R 33 $MAGE_ROOT_DIR/media $MAGE_ROOT_DIR/var find $MAGE_ROOT_DIR/media $MAGE_ROOT_DIR/var -type d -exec chmod 775 {} + find $MAGE_ROOT_DIR/media $MAGE_ROOT_DIR/var -type f -exec chmod 664 {} + cd $MAGE_ROOT_DIR modman init modman link /src fi cd $MAGE_ROOT_DIR modman deploy src magerun sys:setup:run
#!/usr/bin/env bash set -e # Exit on error if [ $(find $MAGE_ROOT_DIR -maxdepth 0 -type d -empty 2>/dev/null) ]; then # Install Magento php -dmemory_limit=1024M -f /n98-magerun.phar install \ --installationFolder=$MAGE_ROOT_DIR \ --magentoVersionByName="magento-mirror-1.9.2.4" \ --installSampleData=no \ --dbHost=db \ --dbUser=magento \ --dbPass=magento \ --dbName=magento \ --useDefaultConfigParams=yes \ --baseUrl="https://pwa-magento.docker/" chgrp -R 33 $MAGE_ROOT_DIR/media $MAGE_ROOT_DIR/var find $MAGE_ROOT_DIR/media $MAGE_ROOT_DIR/var -type d -exec chmod 775 {} + find $MAGE_ROOT_DIR/media $MAGE_ROOT_DIR/var -type f -exec chmod 664 {} + cd $MAGE_ROOT_DIR modman init modman link /src fi cd $MAGE_ROOT_DIR modman deploy src magerun sys:setup:run
Update release doc for version 5.2.0
#!/bin/bash set -e ./docker-webapp.sh --clean docker-compose up -d biosamples-agents-solr docker-compose up -d biosamples-agents-upload-workers #ARGS=--spring.profiles.active=big for X in 1 2 3 4 5 do echo "============================================================================================================" echo "=================================== STARTING INTEGRATION TESTS PHASE-"$X "=====================================" echo "============================================================================================================" #java -jar integration/target/integration-4.0.0-SNAPSHOT.jar --phase=$X $ARGS $@ docker-compose run --rm --service-ports biosamples-integration java -jar integration-5.2.0-SNAPSHOT.jar --phase=$X $ARGS $@ sleep 10 #solr is configured to commit every 5 seconds done #leave the agent up at the end docker-compose up -d biosamples-agents-solr echo "Successfully completed"
#!/bin/bash set -e ./docker-webapp.sh --clean docker-compose up -d biosamples-agents-solr docker-compose up -d biosamples-agents-upload-workers #ARGS=--spring.profiles.active=big for X in 1 2 3 4 5 do echo "============================================================================================================" echo "=================================== STARTING INTEGRATION TESTS PHASE-"$X "=====================================" echo "============================================================================================================" #java -jar integration/target/integration-4.0.0-SNAPSHOT.jar --phase=$X $ARGS $@ docker-compose run --rm --service-ports biosamples-integration java -jar integration-5.2.0-SNAPSHOT.jar --phase=$X $ARGS $@ sleep 10 #solr is configured to commit every 5 seconds done #leave the agent up at the end docker-compose up -d biosamples-agents-solr echo "Successfully completed"
Fix datbase wd ownership for initdb to work
#!/bin/sh # If empty data directory if [ ! -f /var/lib/postgresql/9.4/main/PG_VERSION ] && [ "$DATABASE_NAME" ] && [ "$DATABASE_USER" ] && [ "$DATABASE_PASSWORD" ] then # Create postgres data directory mkdir -p /var/lib/postgresql/9.4/main chown postgres:postgres /var/lib/postgresql/9.4/main /sbin/setuser postgres /usr/lib/postgresql/9.4/bin/initdb /var/lib/postgresql/9.4/main/ # Start postgresql /usr/bin/pg_ctlcluster "9.4" main start # Create users and databases here /sbin/setuser postgres createdb $DATABASE_NAME # wARNING This way the password is set is not very secure # to be reviewed.. /sbin/setuser postgres psql -c "create user $DATABASE_USER password '$DATABASE_PASSWORD'" /sbin/setuser postgres psql -c 'GRANT ALL PRIVILEGES ON DATABASE $DATABASE_NAME TO $DATABASE_USER;' # Give access to outside world with password auth echo "host all all 172.17.0.0/16 md5 " >> /etc/postgresql/9.4/main/pg_hba.conf # Stop postgresql /usr/bin/pg_ctlcluster "9.4" main stop fi # Launch init process /sbin/my_init
#!/bin/sh # If empty data directory if [ ! -f /var/lib/postgresql/9.4/main/PG_VERSION ] && [ "$DATABASE_NAME" ] && [ "$DATABASE_USER" ] && [ "$DATABASE_PASSWORD" ] then # Create postgres data directory mkdir -p /var/lib/postgresql/9.4/main chown -R postgres:postgres /var/lib/postgresql/ /sbin/setuser postgres /usr/lib/postgresql/9.4/bin/initdb /var/lib/postgresql/9.4/main/ # Start postgresql /usr/bin/pg_ctlcluster "9.4" main start # Create users and databases here /sbin/setuser postgres createdb $DATABASE_NAME # wARNING This way the password is set is not very secure # to be reviewed.. /sbin/setuser postgres psql -c "create user $DATABASE_USER password '$DATABASE_PASSWORD'" /sbin/setuser postgres psql -c 'GRANT ALL PRIVILEGES ON DATABASE $DATABASE_NAME TO $DATABASE_USER;' # Give access to outside world with password auth echo "host all all 172.17.0.0/16 md5 " >> /etc/postgresql/9.4/main/pg_hba.conf # Stop postgresql /usr/bin/pg_ctlcluster "9.4" main stop fi # Launch init process /sbin/my_init
Use aggregate compact report for master report
#!/usr/bin/env bash set -e git reset --hard git remote add upstream https://github.com/phpactor/worse-reflection git fetch upstream master echo -e "\n\n" echo -e "Benchmarking master branch" echo -e "==========================\n\n" git checkout upstream/master mv composer.lock composer.lock.pr composer install --quiet vendor/bin/phpbench run --progress=none --tag=master --retry-threshold=2 --tag=master echo -e "\n\n" echo -e "Benchmarking current branch and comparing to master" echo -e "===================================================\n\n" git checkout - mv composer.lock.pr composer.lock composer install --quiet vendor/bin/phpbench run --report=aggregate_compact --progress=travis --retry-threshold=2 --uuid=tag:master
#!/usr/bin/env bash set -e git reset --hard git remote add upstream https://github.com/phpactor/worse-reflection git fetch upstream master echo -e "\n\n" echo -e "Benchmarking master branch" echo -e "==========================\n\n" git checkout upstream/master mv composer.lock composer.lock.pr composer install --quiet vendor/bin/phpbench run --report=aggregate_compact --progress=none --tag=master --retry-threshold=2 --tag=master echo -e "\n\n" echo -e "Benchmarking current branch and comparing to master" echo -e "===================================================\n\n" git checkout - mv composer.lock.pr composer.lock composer install --quiet vendor/bin/phpbench run --report=aggregate_compact --progress=travis --retry-threshold=2 --uuid=tag:master
Update comments, same on server as local workstation now.
# assemblyLine.sh ############################################################################# # # Purpose: # # Assemble exec.sh script for an Apache Tomcat server installation on # an Ubuntu development workstation (not for production use). # ############################################################################# . ../../init.sh . init.sh cat init.sh > exec.sh cat clean.sh >> exec.sh cat addTomcatUser.sh >> exec.sh cat installTomcat.sh >> exec.sh
# assemblyLine.sh ############################################################################# # # Purpose: # # Assemble exec.sh script for an Apache Tomcat server installation on # an Ubuntu 16.04 system. # ############################################################################# . ../../init.sh . init.sh cat init.sh > exec.sh cat clean.sh >> exec.sh cat addTomcatUser.sh >> exec.sh cat installTomcat.sh >> exec.sh
Make sure the code is compiled before pushing it to gh-pages
#!/usr/bin/env bash DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" mkdir -p tmp if [ ! -d "tmp/gh-pages" ]; then git clone git@github.com:Hacker0x01/react-datepicker.git --branch gh-pages --single-branch tmp/gh-pages fi cd tmp/gh-pages git pull find . -maxdepth 1 ! -name '.git' ! -name '.' -exec rm -r {} \; cp -r $DIR/{bundle.js,index.html,style.css,images} ./ git add --all git commit -m "Publish new docs (automated commit)" git push
#!/usr/bin/env bash DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" npm run build mkdir -p tmp if [ ! -d "tmp/gh-pages" ]; then git clone git@github.com:Hacker0x01/react-datepicker.git --branch gh-pages --single-branch tmp/gh-pages fi cd tmp/gh-pages git pull find . -maxdepth 1 ! -name '.git' ! -name '.' -exec rm -r {} \; cp -r $DIR/{bundle.js,index.html,style.css,images} ./ git add --all git commit -m "Publish new docs (automated commit)" git push
Print credentials + trace log
./bridge running if [ "$?" == 1 ] then ip=`/sbin/ifconfig eth0 | grep 'inet addr:' | cut -d: -f2 | awk '{ print $1}'` (echo "Crash Flower Bridge" && \ echo "ip: $ip" && \ echo "" && \ echo "trace.log:" && \ cat trace.log) | \ mail -s "Flower Bridge crashed" "bruno.sautron@parrot.com" ./bridge restart > /dev/null fi
./bridge running if [ "$?" == 1 ] then ip=`/sbin/ifconfig eth0 | grep 'inet addr:' | cut -d: -f2 | awk '{ print $1}'` (echo "Crash Flower Bridge" && \ echo "ip: $ip" && \ echo "" && \ echo "trace.log:" && \ cat trace.log && echo && echo "credenitals.json:" && cat credentials.json) | \ mail -s "Flower Bridge crashed" "bruno.sautron@parrot.com" ./bridge restart > /dev/null fi
Fix the Fedora upload file
chmod a+rx 112x63 chmod a+rx 624x351 chmod a+rx 752x423 chmod a+rx source chmod a+r */*.png chmod a+r *.html rsync -v --progress \ 112x63 624x351 752x423 \ fedora-21.xml.gz fedora-21-icons.tar.gz \ *.html \ applications-to-import.yaml \ rhughes@secondary01.fedoraproject.org:/srv/pub/alt/screenshots/f21/
chmod a+rx 112x63 chmod a+rx 624x351 chmod a+rx 752x423 chmod a+rx source chmod a+r */*.png chmod a+r *.html rsync -vr --progress \ 112x63 624x351 752x423 \ fedora-21.xml.gz fedora-21-icons.tar.gz \ *.html \ applications-to-import.yaml \ rhughes@secondary01.fedoraproject.org:/srv/pub/alt/screenshots/f21/
Use llc+gcc instead of lli, so we can link against gmp
#!/bin/bash set -e if [ "$1" = "-" ]; then testdir=`mktemp -d` || exit 1 trap "rm -fr $testdir" exit outdir=${testdir} mod=temp_test cat > ${testdir}/${mod}.m else mod="$1" fi outdir=${outdir-out} shift dist/build/m3/m3 ${testdir:+-I${testdir} -o${outdir}} ${mod} file=${outdir}/${mod/::/__} llvm-as ${file}.ll lli ${file}.bc "$@"
#!/bin/bash set -e if [ "$1" = "-" ]; then testdir=`mktemp -d` || exit 1 trap "rm -fr $testdir" exit outdir=${testdir} mod=temp_test cat > ${testdir}/${mod}.m else mod="$1" fi outdir=${outdir-out} shift dist/build/m3/m3 ${testdir:+-I${testdir} -o${outdir}} ${mod} file=${outdir}/${mod/::/__} llvm-as ${file}.ll llc -o ${file}.s ${file}.bc gcc -o ${file} ${file}.s -lgmp ${file} "$@"
Load gcc module for gfortran to run on sites that don't have gcc available
#!/bin/bash hostname=`hostname -f` cur_time=`date` echo "Hostname $hostname" echo "Time: $cur_time" echo "OSG Site: $OSG_SITE_NAME" echo "GWMS Entry Name: $GLIDEIN_Entry_Name" echo "GWMS Resource Name: $GLIDEIN_ResourceName" source /cvmfs/oasis.opensciencegrid.org/osg/modules/lmod/5.6.2/init/bash module load madgraph/2.1.2 cd input mg5_aMC -f pp2z.madgraph cd pp2z ./bin/generate_events --laststep=pgs -f if [ "$?" != "0" ]; then echo "Error running octave tests" exit 1 fi
#!/bin/bash hostname=`hostname -f` cur_time=`date` echo "Hostname $hostname" echo "Time: $cur_time" echo "OSG Site: $OSG_SITE_NAME" echo "GWMS Entry Name: $GLIDEIN_Entry_Name" echo "GWMS Resource Name: $GLIDEIN_ResourceName" source /cvmfs/oasis.opensciencegrid.org/osg/modules/lmod/5.6.2/init/bash module load gcc/4.6.2 module load madgraph/2.1.2 cd input_files mg5_aMC -f pp2z.madgraph cd pp2z ./bin/generate_events --laststep=pgs -f if [ "$?" != "0" ]; then echo "Error running octave tests" exit 1 fi
Update the timing file so that if there is no diff, we don't double compile
#!/bin/bash NEW_FILE="$1" OLD_FILE="$2" SHELF_NAME="compare-times-shelf" trap "hg import --no-commit $SHELF_NAME" SIGINT SIGTERM # make the old version #hg shelve --all --name $SHELF_NAME hg diff > $SHELF_NAME && hg revert -a make clean make timed 2>&1 | tee "$OLD_FILE" # make the current version hg import --no-commit $SHELF_NAME && mv $SHELF_NAME "$SHELF_NAME-$(date | base64).bak" make clean make timed 2>&1 | tee "$NEW_FILE"
#!/bin/bash NEW_FILE="$1" OLD_FILE="$2" SHELF_NAME="compare-times-shelf" trap "hg import --no-commit $SHELF_NAME" SIGINT SIGTERM # make the old version #hg shelve --all --name $SHELF_NAME hg diff > $SHELF_NAME && hg revert -a make clean make timed 2>&1 | tee "$OLD_FILE" # make the current version if [ -z "$(cat $SHELF_NAME)" ]; then # there is no diff, so just copy the time file cp "$OLD_FILE" "$NEW_FILE" else hg import --no-commit $SHELF_NAME && mv $SHELF_NAME "$SHELF_NAME-$(date | base64).bak" make clean make timed 2>&1 | tee "$NEW_FILE" fi
Revert "start search indexer before start apiOdin job"
#!/bin/bash startSearchIndexer processApps $APPSTODEPLOY # Generate and Ingest Odin data bundle exec rake FORCE_COLOR=true api_server_url=https://$NODE_NAME.slidev.org apiSuperAssessmentTests TOGGLE_TABLESCANS=true EXITCODE=$? mongo --eval "db.adminCommand( { setParameter: 1, notablescan: false } )" exit $EXITCODE
#!/bin/bash # Generate and Ingest Odin data bundle exec rake FORCE_COLOR=true api_server_url=https://$NODE_NAME.slidev.org apiSuperAssessmentTests TOGGLE_TABLESCANS=true EXITCODE=$? mongo --eval "db.adminCommand( { setParameter: 1, notablescan: false } )" exit $EXITCODE
Fix path for push to S3
#!/bin/bash # Must be invoked with $PACKAGENAME echo $TRAVIS_PULL_REQUEST $TRAVIS_BRANCH PUSH_DOCS_TO_S3=true if [ "$TRAVIS_PULL_REQUEST" = true ]; then echo "This is a pull request. No deployment will be done."; exit 0 fi if [ "$TRAVIS_BRANCH" != "master" ]; then echo "No deployment on BRANCH='$TRAVIS_BRANCH'"; exit 0 fi # Deploy to binstar conda install --yes anaconda-client jinja2 anaconda -t $BINSTAR_TOKEN upload --force -u omnia -p ${PACKAGENAME}-dev $HOME/miniconda/conda-bld/*/${PACKAGENAME}-dev-*.tar.bz2 if [ $PUSH_DOCS_TO_S3 = true ]; then # Create the docs and push them to S3 # ----------------------------------- conda install --yes pip conda config --add channels omnia conda install --yes `conda build devtools/conda-recipe --output` pip install numpydoc s3cmd msmb_theme conda install --yes `cat docs/requirements.txt | xargs` conda list -e (cd docs && make html && cd -) ls -lt docs/_build pwd python devtools/ci/push-docs-to-s3.py fi
#!/bin/bash # Must be invoked with $PACKAGENAME echo $TRAVIS_PULL_REQUEST $TRAVIS_BRANCH PUSH_DOCS_TO_S3=true if [ "$TRAVIS_PULL_REQUEST" = true ]; then echo "This is a pull request. No deployment will be done."; exit 0 fi if [ "$TRAVIS_BRANCH" != "master" ]; then echo "No deployment on BRANCH='$TRAVIS_BRANCH'"; exit 0 fi # Deploy to binstar conda install --yes anaconda-client jinja2 anaconda -t $BINSTAR_TOKEN upload --force -u omnia -p ${PACKAGENAME}-dev $HOME/miniconda/conda-bld/*/${PACKAGENAME}-dev-*.tar.bz2 if [ $PUSH_DOCS_TO_S3 = true ]; then # Create the docs and push them to S3 # ----------------------------------- conda install --yes pip conda config --add channels omnia conda install --yes `conda build devtools/conda-recipe --output` pip install numpydoc s3cmd msmb_theme conda install --yes `cat docs/requirements.txt | xargs` conda list -e (cd docs && make html && cd -) ls -lt docs/_build pwd python devtools/travis-ci/push-docs-to-s3.py fi