Instruction
stringlengths
14
778
input_code
stringlengths
0
4.24k
output_code
stringlengths
1
5.44k
Add another run-test command to dump the AST
#!/bin/sh clang -cc1 -load build-debug/libcppanalyze.so -plugin rename -x c++ -std=c++98 -fcxx-exceptions -fexceptions tests/test.cpp
#!/bin/sh clang -cc1 -load build-debug/libcppanalyze.so -plugin rename -x c++ -std=c++98 -fcxx-exceptions -fexceptions tests/test.cpp ## uncomment to dump the AST in addition to run cppanalyze plugin #clang -cc1 -ast-dump -load build-debug/libcppanalyze.so -add-plugin rename -x c++ -std=c++98 -fcxx-exceptions -fexceptions tests/test.cpp
Put compose key back on F19
#!/bin/bash rc=$HOME/run_control kb=$rc/keyboard custom-keys () { $rc/keyboard/format-compose } generate () { dest="$1" mkdir -p "${dest%/*}" cat > "$dest" } indent () { perl -pe 'print " " if /\S/' } generated="generated by $0 at $(date)" if [[ `uname` == Darwin ]]; then mac=$rc/mac compose=$HOME/.cache/Compose.txt [[ -e "$compose" ]] || \ curl -s https://r4s6.net/Compose.txt | generate "$compose" { echo "/* $generated */" echo '{' cat $mac/key-bindings.dict | indent # http://xahlee.info/kbd/osx_keybinding_key_syntax.html # echo ' /* Compose Key: F19 */' # echo -n ' "\UF716" = ' echo ' /* Compose Key: F12 */' echo -n ' "\UF70F" = ' { # Custom first (first in wins). custom-keys cat $compose } | $mac/compose2keybindings.pl | indent echo '}' } | generate ~/Library/KeyBindings/DefaultKeyBinding.dict else { echo "$generated" echo 'include "%L"' custom-keys } | generate ~/.XCompose fi
#!/bin/bash rc=$HOME/run_control kb=$rc/keyboard custom-keys () { $rc/keyboard/format-compose } generate () { dest="$1" mkdir -p "${dest%/*}" cat > "$dest" } indent () { perl -pe 'print " " if /\S/' } generated="generated by $0 at $(date)" if [[ `uname` == Darwin ]]; then mac=$rc/mac compose=$HOME/.cache/Compose.txt [[ -e "$compose" ]] || \ curl -s https://r4s6.net/Compose.txt | generate "$compose" { echo "/* $generated */" echo '{' cat $mac/key-bindings.dict | indent # http://xahlee.info/kbd/osx_keybinding_key_syntax.html echo ' /* Compose Key: F19 */' echo -n ' "\UF716" = ' # echo ' /* Compose Key: F12 */' # echo -n ' "\UF70F" = ' { # Custom first (first in wins). custom-keys cat $compose } | $mac/compose2keybindings.pl | indent echo '}' } | generate ~/Library/KeyBindings/DefaultKeyBinding.dict else { echo "$generated" echo 'include "%L"' custom-keys } | generate ~/.XCompose fi
Fix stupid error when changing directory
#!/bin/sh apt-get update # install packages needed by libsndfile, matplotlib and lmdb apt-get install -y \ g++ \ autoconf \ autogen \ automake \ libtool \ pkg-config \ libogg0 \ libogg-dev \ libvorbis0a \ libvorbis-dev \ libsamplerate0 \ libsamplerate0-dev \ libx11-dev \ python-dev \ libfreetype6-dev \ libpng12-dev \ libffi-dev # get libflac 1.3.1, build and install wget http://downloads.xiph.org/releases/flac/flac-1.3.1.tar.xz tar xf flac-1.3.1.tar.xz cd flac-1.3.1 ./configure && make && make install cd .. # get libsndfile 1.0.26, build and install wget http://www.mega-nerd.com/libsndfile/files/libsndfile-1.0.27.tar.gz tar -xzf libsndfile-1.0.27.tar.gz cd libsndfile-1.0.27.tar.gz ./configure --libdir=/usr/lib/x86_64-linux-gnu && make && make install
#!/bin/sh apt-get update # install packages needed by libsndfile, matplotlib and lmdb apt-get install -y \ g++ \ autoconf \ autogen \ automake \ libtool \ pkg-config \ libogg0 \ libogg-dev \ libvorbis0a \ libvorbis-dev \ libsamplerate0 \ libsamplerate0-dev \ libx11-dev \ python-dev \ libfreetype6-dev \ libpng12-dev \ libffi-dev # get libflac 1.3.1, build and install wget http://downloads.xiph.org/releases/flac/flac-1.3.1.tar.xz tar xf flac-1.3.1.tar.xz cd flac-1.3.1 ./configure && make && make install cd .. # get libsndfile 1.0.26, build and install wget http://www.mega-nerd.com/libsndfile/files/libsndfile-1.0.27.tar.gz tar -xzf libsndfile-1.0.27.tar.gz cd libsndfile-1.0.27 ./configure --libdir=/usr/lib/x86_64-linux-gnu && make && make install
Switch back to agnoster-phatblat theme
# oh-my-zsh.zsh # Path to your oh-my-zsh installation. export ZSH=$HOME/.oh-my-zsh # Set name of the theme to load. # Look in ~/.oh-my-zsh/themes/ # Optionally, if you set this to "random", it'll load a random theme each # time that oh-my-zsh is loaded. # ZSH_THEME='agnoster-phatblat' ZSH_THEME='robbyrussell' # Example aliases # alias zshconfig="mate ~/.zshrc" # alias ohmyzsh="mate ~/.oh-my-zsh" # Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*) # Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/ # Example format: plugins=(rails git textmate ruby lighthouse) plugins=(cp gitfast jsontools osx pod rbenv ruby sublime sudo vi-mode) source $ZSH/oh-my-zsh.sh
# oh-my-zsh.zsh # Path to your oh-my-zsh installation. export ZSH=$HOME/.oh-my-zsh # Set name of the theme to load. # Look in ~/.oh-my-zsh/themes/ # Optionally, if you set this to "random", it'll load a random theme each # time that oh-my-zsh is loaded. ZSH_THEME='agnoster-phatblat' # ZSH_THEME='robbyrussell' # Example aliases # alias zshconfig="mate ~/.zshrc" # alias ohmyzsh="mate ~/.oh-my-zsh" # Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*) # Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/ # Example format: plugins=(rails git textmate ruby lighthouse) plugins=(cp gitfast jsontools osx pod rbenv ruby sublime sudo vi-mode) source $ZSH/oh-my-zsh.sh
Remove spaces to make options work with qsub
#!/bin/bash #$ -pe orte 7 #$ -l h_rt=03:00:00 #$ -N full-%(kinit)s-%(deltak)s #$ -o %(qsublogsdir)s/full-%(kinit)s-%(deltak)s.out #$ -e %(qsublogsdir)s/full-%(kinit)s-%(deltak)s.err #$ -v PATH, PYTHONPATH, LD_LIBRARY_PATH #$ -S /bin/bash #Change first order file here FOFILE=%(fofile)s KINIT=%(kinit)s DELTAK=%(deltak)s KEND=%(kend)s CODEDIR=%(codedir)s echo ----------------------------------------- echo Start: host `hostname`, date `date` echo NSLOTS: $NSLOTS declare -i TOTNUMPROCS TOTNUMPROCS=4*$NSLOTS echo TOTNUMPROCS: $TOTNUMPROCS cd $CODEDIR echo In directory $CODEDIR echo Starting first order run: mpirun -np 1 python harness.py -f $FOFILE -m --kinit $KINIT --deltak $DELTAK --kend $KEND echo First order run complete. echo Starting second order run: mpirun -np $TOTNUMPROCS python harness.py -f $FOFILE -p echo Second order run complete.
#!/bin/bash #$ -pe orte 7 #$ -l h_rt=03:00:00 #$ -N full-%(kinit)s-%(deltak)s #$ -o %(qsublogsdir)s/full-%(kinit)s-%(deltak)s.out #$ -e %(qsublogsdir)s/full-%(kinit)s-%(deltak)s.err #$ -v PATH,PYTHONPATH,LD_LIBRARY_PATH #$ -S /bin/bash #Change first order file here FOFILE=%(fofile)s KINIT=%(kinit)s DELTAK=%(deltak)s KEND=%(kend)s CODEDIR=%(codedir)s echo ----------------------------------------- echo Start: host `hostname`, date `date` echo NSLOTS: $NSLOTS declare -i TOTNUMPROCS TOTNUMPROCS=4*$NSLOTS echo TOTNUMPROCS: $TOTNUMPROCS cd $CODEDIR echo In directory $CODEDIR echo Starting first order run: mpirun -np 1 python harness.py -f $FOFILE -m --kinit $KINIT --deltak $DELTAK --kend $KEND echo First order run complete. echo Starting second order run: mpirun -np $TOTNUMPROCS python harness.py -f $FOFILE -p echo Second order run complete.
Add format code with Astyle.
#!/bin/bash echo "Change format file : $1" astyle --style=kr -A3 --indent-classes --indent-namespaces --indent-col1-comments \ --min-conditional-indent=# --max-instatement-indent=# --break-blocks \ --align-pointer=name --align-reference=type $1 # Remove source code original version after formatted soure code. rm $1.orig
#!/bin/bash echo "Change format file : $1" astyle --style=kr -A3 --indent-classes --indent-namespaces --indent-col1-comments \ --min-conditional-indent=# --max-instatement-indent=40 --break-blocks \ --align-pointer=name --align-reference=type $1 # Remove source code original version after formatted soure code. rm $1.orig
Add pyenv shims to PATH
#!/usr/bin/env bash if [[ -z $TMUX ]]; then export NVM_DIR="$HOME/.nvm" export GOPATH="$HOME/src" export GOBIN="$HOME/bin" PATH="$PATH:/usr/local/mysql/bin" PATH="$PATH:/usr/local/git/bin" PATH="$PATH:$HOME/.cargo/bin" PATH="$PATH:$HOME/.local/bin" PATH="$HOME/.yarn/bin:$PATH" PATH="$HOME/.config/kubectx:$PATH" PATH="/usr/local/go/bin:$GOPATH/bin:$PATH" RBENV_ROOT="${RBENV_ROOT:-$HOME/.rbenv}" if [ -d "$RBENV_ROOT" ]; then PATH="$RBENV_ROOT/bin:$RBENV_ROOT/shims:$PATH" fi PATH=".git/safe/../../bin:$PATH" PATH=".git/safe/../../node_modules/.bin:$PATH" PATH="$HOME/bin:$PATH" MANPATH="/usr/local/man:$MANPATH" MANPATH="/usr/local/mysql/man:$MANPATH" MANPATH="/usr/local/git/man:$MANPATH" PATH="/usr/local/bin:/usr/local/sbin:$PATH" export PATH export RBENV_ROOT export MANPATH fi
#!/usr/bin/env bash if [[ -z $TMUX ]]; then export NVM_DIR="$HOME/.nvm" export GOPATH="$HOME/src" export GOBIN="$HOME/bin" PATH="$PATH:/usr/local/mysql/bin" PATH="$PATH:/usr/local/git/bin" PATH="$PATH:$HOME/.cargo/bin" PATH="$PATH:$HOME/.local/bin" PATH="$HOME/.yarn/bin:$PATH" PATH="$HOME/.config/kubectx:$PATH" PATH="/usr/local/go/bin:$GOPATH/bin:$PATH" RBENV_ROOT="${RBENV_ROOT:-$HOME/.rbenv}" if [ -d "$RBENV_ROOT" ]; then PATH="$RBENV_ROOT/bin:$RBENV_ROOT/shims:$PATH" fi PATH="$HOME/.pyenv/shims:$PATH" PATH=".git/safe/../../bin:$PATH" PATH=".git/safe/../../node_modules/.bin:$PATH" PATH="$HOME/bin:$PATH" MANPATH="/usr/local/man:$MANPATH" MANPATH="/usr/local/mysql/man:$MANPATH" MANPATH="/usr/local/git/man:$MANPATH" PATH="/usr/local/bin:/usr/local/sbin:$PATH" export PATH export RBENV_ROOT export MANPATH fi
Fix duplicate nm-applet on some platforms.
#!/bin/sh xhost + # allows any user to start X apps xmodmap -e "keycode 133 = Caps_Lock" xmodmap -e "keycode 22 shift = BackSpace Delete" # Necessary in crouton. On classic Ubuntu, won't have any effect as cron is already running: sudo cron purgetrash.sh nm-applet & devilspie & for i in $(seq 1 8) do gnome-terminal --geometry=50x50+50+50 --window-with-profile=DesktopConsole$i & sleep 0.1 # otherwise might be too fast for devilspie done focusDesktopConsole.sh &
#!/bin/sh xhost + # allows any user to start X apps xmodmap -e "keycode 133 = Caps_Lock" xmodmap -e "keycode 22 shift = BackSpace Delete" # Necessary in crouton. On classic Ubuntu, won't have any effect as cron is already running: sudo cron purgetrash.sh pgrep nm-applet || nm-applet & devilspie & for i in $(seq 1 8) do gnome-terminal --geometry=50x50+50+50 --window-with-profile=DesktopConsole$i & sleep 0.1 # otherwise might be too fast for devilspie done focusDesktopConsole.sh &
Fix travis again... take 10
cat << EOF > Dockerfile FROM ocaml/opam2:debian-unstable ADD . /home/opam/cervoise WORKDIR /home/opam/cervoise RUN opam switch $OCAML_VERSION RUN eval \$(opam env) # Install Ubuntu packages RUN echo "deb http://llvm.org/apt/unstable/ llvm-toolchain-$LLVM_VERSION main" | sudo tee -a /etc/apt/sources.list RUN curl -L http://llvm.org/apt/llvm-snapshot.gpg.key | sudo apt-key add - RUN sudo apt-get update -qq RUN sudo apt-get install -qq cmake "llvm-$LLVM_VERSION" libgc-dev # Check OPAM package description RUN opam lint --warn=-48-21-32 *.opam # Install & tests RUN opam repository set-url default https://opam.ocaml.org/2.0 RUN opam pin add -y --no-action --kind=git cervoise . RUN opam pin add -y --no-action --kind=version llvm "${LLVM_VERSION}${LLVM_VERSION_MICRO}" RUN opam install -y opam-depext RUN opam depext -y cervoise RUN opam install -yt --deps-only cervoise RUN opam install -yvt cervoise RUN make tests EOF docker build .
cat << EOF > Dockerfile FROM ocaml/opam2:debian-unstable ADD . /home/opam/cervoise WORKDIR /home/opam/cervoise RUN opam switch $OCAML_VERSION # Install Ubuntu packages RUN echo "deb http://llvm.org/apt/unstable/ llvm-toolchain-$LLVM_VERSION main" | sudo tee -a /etc/apt/sources.list RUN curl -L http://llvm.org/apt/llvm-snapshot.gpg.key | sudo apt-key add - RUN sudo apt-get update -qq RUN sudo apt-get install -qq cmake "llvm-$LLVM_VERSION" libgc-dev # Check OPAM package description RUN opam lint --warn=-48-21-32 *.opam # Install & tests RUN opam repository set-url default https://opam.ocaml.org/2.0 RUN opam pin add -y --no-action --kind=git cervoise . RUN opam pin add -y --no-action --kind=version llvm "${LLVM_VERSION}${LLVM_VERSION_MICRO}" RUN opam install -y opam-depext RUN opam depext -y cervoise RUN opam install -yt --deps-only cervoise RUN opam install -yvt cervoise RUN eval \$(opam env) && make tests EOF docker build .
Add SO update script to bash script.
#!/bin/bash virtualenv --no-site-packages venv venv/bin/pip install -r requirements.txt # Write out the files from the database venv/bin/python write-files.py rm -rf venv
#!/bin/bash virtualenv --no-site-packages venv venv/bin/pip install -r requirements.txt # Get the latest question updates from Stack Overflow venv/bin/python update-latest-from-so.py # Write out the files from the database venv/bin/python write-files.py rm -rf venv
Fix the dependency install problem caused by rpm.
#!/bin/bash if [ ! -f /root/.extendingrc ]; then echo "Installing the Puppet Enterprise ruby packages." for i in $(ls /root/puppet-enterprise/packages/el-6-i386/pe-ruby*); do rpm -Uvh $i; done for i in $(ls /root/puppet-enterprise/packages/el-6-i386/pe-ruby*); do rpm -Uvh $i; done touch .extendingrc echo "Please run 'exec bash' or 'exec zsh' to update your PATH." else echo "This script has already installed the required packages. Please try 'exec bash' or 'exec zsh' to set your PATH." fi
#!/bin/bash if [ ! -f /root/.extendingrc ]; then echo "Installing the Puppet Enterprise ruby packages." yum localinstall --disablerepo=* /root/puppet-enterprise/packages/el-6-i386/pe-ruby* touch .extendingrc echo "Please run 'exec bash' or 'exec zsh' to update your PATH." else echo "This script has already installed the required packages. Please try 'exec bash' or 'exec zsh' to set your PATH." fi
Update the way rbenv is checked for
#!/bin/sh if test ! $(which rbenv) then echo " Installing rbenv for you." git clone https://github.com/sstephenson/rbenv.git ~/.rbenv echo 'export PATH="$HOME/.rbenv/bin:$PATH"' >> ~/.zshrc . ~/.zshrc fi if test ! $(which ruby-build) then echo " Installing ruby-build for you." git clone https://github.com/sstephenson/ruby-build.git ~/.rbenv/plugins/ruby-build . ~/.zshrc fi
#!/bin/sh if VERB="$( which rbenv )" 2> /dev/null; then echo " Installing rbenv for you." git clone https://github.com/sstephenson/rbenv.git ~/.rbenv echo 'export PATH="$HOME/.rbenv/bin:$PATH"' >> ~/.zshrc . ~/.zshrc fi if VERB="$( which ruby-build )" 2> /dev/null; then echo " Installing ruby-build for you." git clone https://github.com/sstephenson/ruby-build.git ~/.rbenv/plugins/ruby-build . ~/.zshrc fi
Use built-in instead of bash-toolbox dependency
include system.validator.SystemValidator @class FileNameUtil(){ @private _getPathUnix(){ if [[ ${@} =~ "\\" ]]; then echo /${@} | sed "s#\([A-Z]\):#\l\1#g" | sed "s#\\\\#/#g" else echo ${@} fi } @private _getPathWin(){ if [[ ${@} =~ "/" ]]; then echo ${@} | sed "s#^/\([a-z]\)#\u\1:#g" | sed "s#/#\\\\#g" else echo ${@} fi } getPath(){ if [[ $(SystemValidator isWindows) ]]; then _getPathWin ${@} else _getPathUnix ${@} fi } $@ }
@class FileNameUtil(){ @private _getPathUnix(){ if [[ ${@} =~ "\\" ]]; then echo /${@} | sed "s#\([A-Z]\):#\l\1#g" | sed "s#\\\\#/#g" else echo ${@} fi } @private _getPathWin(){ if [[ ${@} =~ "/" ]]; then echo ${@} | sed "s#^/\([a-z]\)#\u\1:#g" | sed "s#/#\\\\#g" else echo ${@} fi } getPath(){ if [[ $(uname) =~ NT ]]; then _getPathWin ${@} else _getPathUnix ${@} fi } $@ }
Add "build" and "market" optional args to version function
#------------------------------------------------------------------------------- # # ios/alias.zsh # Command-line aliases for iOS development # #------------------------------------------------------------------------------- # Xcode alias ox='open *.xcodeproj' alias ow='open *.xcworkspace' # agvtool function version() { # agvtool_path="/usr/bin/agvtool" agvtool_path=$(which agvtool) build_version=$(agvtool what-version -terse) market_version=$(agvtool what-marketing-version -terse1) echo "$market_version ($build_version)" } #------------------------------------------------------------------------------- # # Examples # #------------------------------------------------------------------------------- # function sshnewkey() { # if (($+1)); then # ssh-keygen -t rsa -C "$1" # else # echo "Usage: sshnewkey user@host" # fi # } # function fixperms() { # find "$1" -type f -print -exec chmod 644 {} \; # find "$1" -type d -print -exec chmod 755 {} \; # }
#------------------------------------------------------------------------------- # # ios/alias.zsh # Command-line aliases for iOS development # #------------------------------------------------------------------------------- # Xcode alias ox='open *.xcodeproj' alias ow='open *.xcworkspace' # agvtool function version() { agvtool_path=$(which agvtool) # "/usr/bin/agvtool" build_version=$(agvtool what-version -terse) market_version=$(agvtool what-marketing-version -terse1) case "$1" in build) echo "$build_version" ;; market) echo "$market_version" ;; *) # No args, output pretty format echo "$market_version ($build_version)" ;; esac }
Add support for only updating dotfiles with --update
#!/usr/bin/env bash cd "$(dirname "${BASH_SOURCE}")"; git pull origin master; function doIt() { rsync --exclude ".git/" --exclude ".DS_Store" --exclude "bootstrap.sh" \ --exclude "README.md" --exclude "LICENSE.txt" --exclude "brew.sh" \ --exclude "apt.sh" -avh --no-perms . ~; source ~/.bash_profile; # Configure tmux if which tmux > /dev/null; then tmux source-file ~/.tmux.conf fi; # Install neovim python package sudo pip3 install neovim # Install/Update neovim vim-plug plugin curl -fLo ~/.local/share/nvim/site/autoload/plug.vim --create-dirs \ https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim nvim --headless +PlugInstall +quitall # Install YCM with C-family support cd ~/.local/share/nvim/plugged/YouCompleteMe && ./install.py --clang-completer } if [ "$1" == "--force" -o "$1" == "-f" ]; then doIt; else read -p "This may overwrite existing files in your home directory. Are you sure? (y/n) " -n 1; echo ""; if [[ $REPLY =~ ^[Yy]$ ]]; then doIt; fi; fi; unset doIt;
#!/usr/bin/env bash cd "$(dirname "${BASH_SOURCE}")"; git pull origin master; function update() { rsync --exclude ".git/" --exclude ".DS_Store" --exclude "bootstrap.sh" \ --exclude "README.md" --exclude "LICENSE.txt" --exclude "brew.sh" \ --exclude "apt.sh" -avh --no-perms . ~; source ~/.bash_profile; # Configure tmux if which tmux > /dev/null; then tmux source-file ~/.tmux.conf fi; } function install() { rsync --exclude ".git/" --exclude ".DS_Store" --exclude "bootstrap.sh" \ --exclude "README.md" --exclude "LICENSE.txt" --exclude "brew.sh" \ --exclude "apt.sh" -avh --no-perms . ~; source ~/.bash_profile; # Configure tmux if which tmux > /dev/null; then tmux source-file ~/.tmux.conf fi; } if [ "$1" == "--force" -o "$1" == "-f" ]; then update; install; else read -p "This may overwrite existing files in your home directory. Are you sure? (y/n) " -n 1; echo ""; if [[ $REPLY =~ ^[Yy]$ ]]; then if [ "$1" == "--update" -o "$1" == "-U" ]; then update; else update; install; fi; fi; fi; unset update; unset install;
Add comments that explain where $STACK comes from
# This function will install a version of Ruby onto the # system for the buidlpack to use. It coordinates download # and setting appropriate env vars for execution # # Example: # # heroku_buildpack_ruby_install_ruby "$BIN_DIR" "$ROOT_DIR" # # Takes two arguments, the first is the location of the buildpack's # `bin` directory. This is where the `download_ruby` script can be # found. The second argument is the root directory where Ruby # can be installed. # # Returns the directory where ruby was downloaded heroku_buildpack_ruby_install_ruby() { local bin_dir=$1 local root_dir=$2 heroku_buildpack_ruby_dir="$root_dir/vendor/ruby/$STACK" if [ ! -d "$root_dir/vendor/ruby/$STACK" ]; then heroku_buildpack_ruby_dir=$(mktemp -d) # bootstrap ruby $bin_dir/support/download_ruby $heroku_buildpack_ruby_dir function atexit { rm -rf $heroku_buildpack_ruby_dir } trap atexit EXIT fi export PATH=$heroku_buildpack_ruby_dir/bin/:$PATH unset GEM_PATH }
# This function will install a version of Ruby onto the # system for the buidlpack to use. It coordinates download # and setting appropriate env vars for execution # # Example: # # heroku_buildpack_ruby_install_ruby "$BIN_DIR" "$ROOT_DIR" # # Takes two arguments, the first is the location of the buildpack's # `bin` directory. This is where the `download_ruby` script can be # found. The second argument is the root directory where Ruby # can be installed. # # This function relies on the env var `$STACK` being set. This # is set in codon outside of the buildpack. An example of a stack # would be "cedar-14". # # Returns the directory where ruby was downloaded heroku_buildpack_ruby_install_ruby() { local bin_dir=$1 local root_dir=$2 heroku_buildpack_ruby_dir="$root_dir/vendor/ruby/$STACK" if [ ! -d "$root_dir/vendor/ruby/$STACK" ]; then heroku_buildpack_ruby_dir=$(mktemp -d) # bootstrap ruby $bin_dir/support/download_ruby $heroku_buildpack_ruby_dir function atexit { rm -rf $heroku_buildpack_ruby_dir } trap atexit EXIT fi export PATH=$heroku_buildpack_ruby_dir/bin/:$PATH unset GEM_PATH }
Create stored procedure for deleting feed jobs.
#!/bin/bash MY_DIR=$(dirname $0) mysql -u$1 --password=$2 < $MY_DIR/create-database.sql echo "Created thinkbig database"; mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-batch-mysql.sql mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-batch-thinkbig-mysql.sql echo "Created Operation Manager Tables"; mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-batch-thinkbig-indexes.sql echo "Created Operation Manager Indexes"; mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-metadata-mysql.sql mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-metadata-constraints.sql echo 'Created Metadata Tables' mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-metadata-grants.sql echo 'Granted SQL for user nifi' echo "0.2.0 scripts installed"
#!/bin/bash MY_DIR=$(dirname $0) mysql -u$1 --password=$2 < $MY_DIR/create-database.sql echo "Created thinkbig database"; mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-batch-mysql.sql mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-batch-thinkbig-mysql.sql echo "Created Operation Manager Tables"; mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-batch-thinkbig-indexes.sql echo "Created Operation Manager Indexes"; mysql -u$1 --password=$2 thinkbig < $MY_DIR/stored_procedures/delete_feed_jobs.sql echo "Created stored procedures" mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-metadata-mysql.sql mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-metadata-constraints.sql echo 'Created Metadata Tables' mysql -u$1 --password=$2 thinkbig < $MY_DIR/schema-metadata-grants.sql echo 'Granted SQL for user nifi' echo "0.2.0 scripts installed"
Add shortcut for emacs client
# Shortcut for system file launch tool alias open=xdg-open # Git aliases # alias gb='git branch' alias gba='git branch --all --verbose' alias gc='git commit --verbose' alias gco='git checkout' alias gws='git status --short' alias gwS='git status' alias gwd='git diff --no-ext-diff' alias gwdD='git diff --no-ext-diff --word-diff' alias gs='git stash' alias gsa='git stash apply' alias gia='git add' alias gid='git diff --no-ext-diff --cached' alias giD='git diff --no-ext-diff --cached --word-diff' alias gl='git log --topo-order'
# Shortcut for system file launch tool alias open=xdg-open # alias for using emacs when running in server mode alias em='emacsclient -t' # Git aliases # alias gb='git branch' alias gba='git branch --all --verbose' alias gc='git commit --verbose' alias gco='git checkout' alias gws='git status --short' alias gwS='git status' alias gwd='git diff --no-ext-diff' alias gwdD='git diff --no-ext-diff --word-diff' alias gs='git stash' alias gsa='git stash apply' alias gia='git add' alias gid='git diff --no-ext-diff --cached' alias giD='git diff --no-ext-diff --cached --word-diff' alias gl='git log --topo-order'
Fix the .NET Core download script
#/bin/bash apt-get -y update sudo apt-get -y install libunwind8 gettext wget https://dotnetcli.blob.core.windows.net/dotnet/Sdk/2.1.300-rc1-008673/dotnet-sdk-2.1.300-rc1-008673-linux-arm.tar.gz sudo mkdir /opt/dotnet sudo tar -xvf dotnet-sdk-latest-linux-arm.tar.gz -C /opt/dotnet sudo ln -s /opt/dotnet/dotnet /usr/local/bin dotnet --info
#/bin/bash apt-get -y update sudo apt-get -y install libunwind8 gettext wget https://dotnetcli.blob.core.windows.net/dotnet/Sdk/2.1.300-rc1-008673/dotnet-sdk-2.1.300-rc1-008673-linux-arm.tar.gz sudo mkdir /opt/dotnet sudo tar -xvf dotnet-sdk-2.1.300-rc1-008673-linux-arm.tar.gz -C /opt/dotnet sudo ln -s /opt/dotnet/dotnet /usr/local/bin dotnet --info
Use home to get cloudbeaver config
# !/bin/bash name="run-cloudbeaver" if [ "$(docker ps -q -f name=$name)" ]; then read -p "alread running container $name stop this (yes|no): " yesno if [ "$yesno" = "yes" ]; then echo "stop container" docker stop $name fi exit 0 fi path=$(pwd) if [ -d "$1" ]; then path=$(realpath $1) fi docker run \ --rm \ --detach \ --name "$name" \ --publish 8978:8978 \ -v /var/cloudbeaver/workspace:/opt/cloudbeaver/workspace \ dbeaver/cloudbeaver
# !/bin/bash name="run-cloudbeaver" if [ "$(docker ps -q -f name=$name)" ]; then read -p "alread running container $name stop this (yes|no): " yesno if [ "$yesno" = "yes" ]; then echo "stop container" docker stop $name fi exit 0 fi path=$(pwd) if [ -d "$1" ]; then path=$(realpath $1) fi docker run \ --rm \ --detach \ --name "$name" \ --publish 8978:8978 \ -v $HOME/.cloudbeaver/workspace:/opt/cloudbeaver/workspace \ dbeaver/cloudbeaver:latest
Make test failures easier to see and understand.
#!/bin/bash # Make sure, we run in the root of the repo and # therefore run the tests on all packages base_dir="$( cd "$(dirname "$0")/.." && pwd )" cd "$base_dir" || { echo "Cannot cd to '$base_dir'. Aborting." >&2 exit 1 } rc=0 go_dirs() { go list -f '{{.Dir}}' ./... | tr '\n' '\0' } echo "Running go fmt" diff <(echo -n) <(go_dirs | xargs -0 gofmt -s -d -l) rc=$((rc || $?)) echo "Running goimports" diff -u <(echo -n) <(go_dirs | xargs -0 goimports -l) rc=$((rc || $?)) echo "Running go vet" go vet -all ./... rc=$((rc || $?)) echo "Running go test" go test -v ./... rc=$((rc || $?)) echo "Testing kinflate demos" mdrip --mode test --label test ./cmd/kinflate rc=$((rc || $?)) exit $rc
#!/bin/bash # Make sure, we run in the root of the repo and # therefore run the tests on all packages base_dir="$( cd "$(dirname "$0")/.." && pwd )" cd "$base_dir" || { echo "Cannot cd to '$base_dir'. Aborting." >&2 exit 1 } rc=0 function go_dirs { go list -f '{{.Dir}}' ./... | tr '\n' '\0' } function runTest { local name=$1 local result="SUCCESS" printf "============== begin %s\n" "$name" $name local code=$? rc=$((rc || $code)) if [ $code -ne 0 ]; then result="FAILURE" fi printf "============== end %s : %s code=%d\n\n\n" "$name" "$result" $code } function testGoFmt { diff <(echo -n) <(go_dirs | xargs -0 gofmt -s -d -l) } function testGoImports { diff -u <(echo -n) <(go_dirs | xargs -0 goimports -l) } function testGoVet { go vet -all ./... } function testGoTest { go test -v ./... } function testTutorial { mdrip --mode test --label test ./cmd/kinflate } runTest testGoFmt runTest testGoImports runTest testGoVet runTest testGoTest runTest testTutorial exit $rc
Delete docker mount since docker plugin use docker-jave.
#!/bin/bash set -e JENKINS_NAME=${JENKINS_NAME:-jenkins} JENKINS_VOLUME=${JENKINS_VOLUME:-jenkins-volume} GERRIT_NAME=${GERRIT_NAME:-gerrit} JENKINS_IMAGE_NAME=${JENKINS_IMAGE_NAME:-openfrontier/jenkins} JENKINS_OPTS=${JENKINS_OPTS:---prefix=/jenkins} # Create Jenkins volume. if [ -z "$(docker ps -a | grep ${JENKINS_VOLUME})" ]; then docker run \ --name ${JENKINS_VOLUME} \ ${JENKINS_IMAGE_NAME} \ echo "Create Jenkins volume." fi # Start Jenkins. docker run \ --name ${JENKINS_NAME} \ --link ${GERRIT_NAME}:gerrit \ -v /var/run/docker.sock:/var/run/docker.sock \ -v /bin/docker:/bin/docker \ -p 50000:50000 \ --volumes-from ${JENKINS_VOLUME} \ -d ${JENKINS_IMAGE_NAME} ${JENKINS_OPTS}
#!/bin/bash set -e JENKINS_NAME=${JENKINS_NAME:-jenkins} JENKINS_VOLUME=${JENKINS_VOLUME:-jenkins-volume} GERRIT_NAME=${GERRIT_NAME:-gerrit} JENKINS_IMAGE_NAME=${JENKINS_IMAGE_NAME:-openfrontier/jenkins} JENKINS_OPTS=${JENKINS_OPTS:---prefix=/jenkins} # Create Jenkins volume. if [ -z "$(docker ps -a | grep ${JENKINS_VOLUME})" ]; then docker run \ --name ${JENKINS_VOLUME} \ ${JENKINS_IMAGE_NAME} \ echo "Create Jenkins volume." fi # Start Jenkins. docker run \ --name ${JENKINS_NAME} \ --link ${GERRIT_NAME}:gerrit \ -p 50000:50000 \ --volumes-from ${JENKINS_VOLUME} \ -d ${JENKINS_IMAGE_NAME} ${JENKINS_OPTS}
Use new url from Atlas (previously Vagrant Cloud)
#!/bin/bash set -ex FULL_PATH=`pwd "$(dirname $0)/.."` fetch_bosh_lite_ovf(){ mkdir -p tmp ( cd tmp rm -f virtualbox.box wget https://vagrantcloud.com/cloudfoundry/boxes/bosh-lite/versions/6/providers/virtualbox.box tar xf virtualbox.box ) echo "${FULL_PATH}/tmp/box.ovf" } set_virtualbox_home(){ VBoxManage setproperty machinefolder "/var/vcap/data/VirtualBox\ VMs" } main() { set_virtualbox_home ovf_file=`fetch_bosh_lite_ovf` template_path="${FULL_PATH}/templates/virtualbox.json" packer build -var "source_path=${ovf_file}" -var "build_number=${GO_PIPELINE_COUNTER}" $template_path } main
#!/bin/bash set -ex FULL_PATH=`pwd "$(dirname $0)/.."` fetch_bosh_lite_ovf(){ mkdir -p tmp ( cd tmp rm -f virtualbox.box box_name=bosh-lite-virtualbox-ubuntu-trusty-${BOSH_LITE_VERSION}.box wget http://d2u2rxhdayhid5.cloudfront.net/${box_name} mv ${box_name} virtualbox.box tar xf virtualbox.box ) echo "${FULL_PATH}/tmp/box.ovf" } set_virtualbox_home(){ VBoxManage setproperty machinefolder "/var/vcap/data/VirtualBox\ VMs" } main() { set_virtualbox_home ovf_file=`fetch_bosh_lite_ovf` template_path="${FULL_PATH}/templates/virtualbox.json" packer build -var "source_path=${ovf_file}" -var "build_number=${GO_PIPELINE_COUNTER}" $template_path } main
Fix db loader image name.
#!/usr/bin/env bash project_dir=$(pwd) access_key=${AWS_ACCESS_KEY_ID:-none} secret_key=${AWS_SECRET_ACCESS_KEY:-none} region=${AWS_REGION:-eu-west-1} environment=${ENVIRONMENT:-yarn} db_host=($(node util/rds-endpoint.js)) db_name=${DB_NAME:-chusrodriguez} db_user=${DB_USERNAME:-root} db_password=$DB_ROOT_PASSWORD docker images docker run -ti -e PGSQL_USER=$db_user -e PGSQL_PASS=$db_password -e PGSQL_HOST=$db_host -e PGSQL_DB=$db_name psql-builder:latest
#!/usr/bin/env bash project_dir=$(pwd) access_key=${AWS_ACCESS_KEY_ID:-none} secret_key=${AWS_SECRET_ACCESS_KEY:-none} region=${AWS_REGION:-eu-west-1} environment=${ENVIRONMENT:-yarn} db_host=($(node util/rds-endpoint.js)) db_name=${DB_NAME:-chusrodriguez} db_user=${DB_USERNAME:-root} db_password=$DB_ROOT_PASSWORD docker run -ti -e PGSQL_USER=$db_user -e PGSQL_PASS=$db_password -e PGSQL_HOST=$db_host -e PGSQL_DB=$db_name psql-loader:latest
Add missing pkg_bin_dirs entry to elfutils
pkg_name=elfutils pkg_origin=core pkg_version=0.166 pkg_license=('GPL-3.0') pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>" pkg_description="elfutils is a collection of various binary tools such as eu-objdump, eu-readelf, and other utilities that allow you to inspect and manipulate ELF files." pkg_upstream_url=https://fedorahosted.org/elfutils/ pkg_source=https://fedorahosted.org/releases/e/l/$pkg_name/$pkg_version/$pkg_name-$pkg_version.tar.bz2 pkg_shasum=3c056914c8a438b210be0d790463b960fc79d234c3f05ce707cbff80e94cba30 pkg_deps=( core/glibc core/zlib ) pkg_build_deps=( core/gcc core/glibc core/m4 core/make core/zlib ) pkg_lib_dirs=(lib) pkg_include_dirs=(include)
pkg_name=elfutils pkg_origin=core pkg_version=0.166 pkg_license=('GPL-3.0') pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>" pkg_description="elfutils is a collection of various binary tools such as eu-objdump, eu-readelf, and other utilities that allow you to inspect and manipulate ELF files." pkg_upstream_url=https://fedorahosted.org/elfutils/ pkg_source=https://fedorahosted.org/releases/e/l/$pkg_name/$pkg_version/$pkg_name-$pkg_version.tar.bz2 pkg_shasum=3c056914c8a438b210be0d790463b960fc79d234c3f05ce707cbff80e94cba30 pkg_deps=( core/glibc core/zlib ) pkg_build_deps=( core/gcc core/glibc core/m4 core/make core/zlib ) pkg_bin_dirs=(bin) pkg_lib_dirs=(lib) pkg_include_dirs=(include)
Add flags for verbose test output
#!/usr/bin/env bash bazel_bin=$(which bazelisk 2>/dev/null) if [[ -z "$bazel_bin" ]]; then echo "Warning: bazelisk is not installed; falling back to bazel." bazel_bin=bazel fi ${bazel_bin} test \ "$@" \ //polygerrit-ui:karma_test
#!/usr/bin/env bash bazel_bin=$(which bazelisk 2>/dev/null) if [[ -z "$bazel_bin" ]]; then echo "Warning: bazelisk is not installed; falling back to bazel." bazel_bin=bazel fi # At least temporarily we want to know what is going on even when all tests are # passing, so we have a better chance of debugging what happens in CI test runs # that were supposed to catch test failures, but did not. ${bazel_bin} test \ "$@" \ --test_verbose_timeout_warnings \ --test_output=all \ //polygerrit-ui:karma_test
Add report output file of still-to-migrate domains
#!/bin/sh tmpdir=tmp/dns_report mkdir -p $tmpdir [ -z "$DNS_SERVER" ] && DNS_SERVER=8.8.8.8 tools/site_hosts.sh | while read host do dig @$DNS_SERVER +trace $host > $tmpdir/$host.txt done cat $tmpdir/*.txt | tools/dns_report.pl $tmpdir/hosts.csv
#!/bin/sh tmpdir=tmp/dns_report mkdir -p $tmpdir [ -z "$DNS_SERVER" ] && DNS_SERVER=8.8.8.8 tools/site_hosts.sh | while read host do dig @$DNS_SERVER +trace $host > $tmpdir/$host.txt done cat $tmpdir/*.txt | tools/dns_report.pl $tmpdir/hosts.csv grep $tmpdir/hosts.csv -e "^.*AKAMAI.*$" > $tmpdir/still_to_migrate.csv
Remove unnecessary optimization from build for now
#!/bin/sh for i in $( ls ./test/correct ); do echo ".: Testing: $i" python3 -OO main.py -i ./test/correct/$i done
#!/bin/sh for i in $( ls ./test/correct ); do echo ".: Testing: $i" python3 main.py -i ./test/correct/$i done
Use fork ahead of Trusted Sec's master branch to fix pymssql issue.
#!/bin/bash - apt update apt install --yes python python-pip xfce4 firefox git clone --depth 1 https://github.com/trustedsec/social-engineer-toolkit.git set/ chown -R vagrant:vagrant set cd set pip install -r requirements.txt
#!/bin/bash - apt update apt install --yes python python-pip xfce4 firefox git clone --depth 1 --branch fix-pymssql --single-branch https://github.com/meitar/social-engineer-toolkit.git set/ chown -R vagrant:vagrant set cd set pip install -r requirements.txt
Add explicit --port parameter for osm2pgsql
#!/bin/bash if [ -z "$1" ] || [ ! -f "$1" ] then echo Please pass OSM datafile as argument exit 1 fi OSM_DATAFILE=$1 if [ -z "$PGDATABASE" ] then if [ -z "$2" ] then echo Please pass database name as second argument exit 1 else export PGDATABASE=$2 fi fi dropdb --if-exists $PGDATABASE || exit 1 createdb $PGDATABASE || exit 1 psql -c 'CREATE EXTENSION postgis;' || exit 1 psql -c 'CREATE EXTENSION hstore;' || exit 1 osm2pgsql -d $PGDATABASE -c -k -s \ -S ./power.style \ $OSM_DATAFILE || exit 1 time psql -f ./prepare-tables.sql
#!/bin/bash if [ -z "$1" ] || [ ! -f "$1" ] then echo Please pass OSM datafile as argument exit 1 fi OSM_DATAFILE=$1 if [ -z "$PGDATABASE" ] then if [ -z "$2" ] then echo Please pass database name as second argument exit 1 else export PGDATABASE=$2 fi fi if [ -n "$PGPORT" ] then # osm2pgsql needs an explicit --port parameter OSM2PGSQL_FLAGS=--port=$PGPORT fi dropdb --if-exists $PGDATABASE || exit 1 createdb $PGDATABASE || exit 1 psql -c 'CREATE EXTENSION postgis;' || exit 1 psql -c 'CREATE EXTENSION hstore;' || exit 1 osm2pgsql -d $PGDATABASE -c -k -s \ -S ./power.style \ $OSM2PGSQL_FLAGS \ $OSM_DATAFILE || exit 1 time psql -f ./prepare-tables.sql
Install PEAR before running CI.
#!/bin/sh pyrus install pear/Archive_Tar phpenv rehash
#!/bin/sh pyrus install pear/PEAR pyrus install pear/Archive_Tar phpenv rehash
Update the gemfile special case
#!/bin/bash -l set -e PACKAGE_VERSION=$(jq '.version' --raw-output ./package.json) PACKAGE_VERSION=$(echo $PACKAGE_VERSION | sed -e 's/0.0.0./0.0.0.pre./g') PACKAGE_VERSION=$(echo $PACKAGE_VERSION | sed -e 's/-rc/.pre/g') echo "************* Building v$PACKAGE_VERSION *************" # Setup rubygems creds mkdir -p ~/.gem cat << EOF > ~/.gem/credentials --- :rubygems_api_key: ${RUBYGEMS_TOKEN} EOF chmod 0600 ~/.gem/credentials cd ./lib/$* echo "**************** Copying assets files to build directory ****************" cp -R ../build lib/ echo "**************** Versioning ****************" bundle exec rake version\["$PACKAGE_VERSION"\] echo "**************** Installing ****************" bundle install echo "**************** Linting ****************" bundle exec rake lint echo "**************** Testing ****************" bundle exec rake test bundle update echo "**************** Building ****************" (bundle exec rake build; gem push pkg/*.gem) && wait
#!/bin/bash -l set -e PACKAGE_VERSION=$(jq '.version' --raw-output ./package.json) PACKAGE_VERSION=$(echo $PACKAGE_VERSION | sed -e 's/0.0.0./0.0.0.pre./g') PACKAGE_VERSION=$(echo $PACKAGE_VERSION | sed -e 's/-rc/.pre/g') echo "************* Building v$PACKAGE_VERSION *************" # Setup rubygems creds mkdir -p ~/.gem cat << EOF > ~/.gem/credentials --- :rubygems_api_key: ${RUBYGEMS_TOKEN} EOF chmod 0600 ~/.gem/credentials cd ./lib/$* echo "**************** Copying assets files to build directory ****************" cp -R ../build lib/ perl -pi -e "s/\"octicons\", \"[^\"]+\"/\"octicons\", \"${PACKAGE_VERSION}\"/" ./Gemfile echo "**************** Installing ****************" bundle install echo "**************** Linting ****************" bundle exec rake lint echo "**************** Testing ****************" bundle exec rake test echo "**************** Versioning ****************" bundle exec rake version\["$PACKAGE_VERSION"\] bundle update echo "**************** Building ****************" (bundle exec rake build; gem push pkg/*.gem) && wait
Add inna, mactracker and postman casks
#!bin/bash brew cask install aerial brew cask install android-studio brew cask install appcleaner brew cask install appcode brew cask install arq brew cask install boxer brew cask install caffeine brew cask install clion brew cask install cyberduck brew cask install daisydisk brew cask install dash brew cask install diffmerge brew cask install docker brew cask install dolphin brew cask install dropbox brew cask install fork brew cask install geekbench brew cask install google-chrome brew cask install google-hangouts brew cask install handbrake brew cask install icons8 brew cask install imageoptim brew cask install intellij-idea brew cask install invisionsync brew cask install istat-menus brew cask install iterm2 brew cask install java brew cask install kitematic brew cask install macvim brew cask install markright brew cask install messenger brew cask install mp4tools brew cask install mplayerx brew cask install openemu brew cask install parallels-desktop brew cask install pycharm brew cask install sequel-pro brew cask install sip brew cask install sketch brew cask install skype brew cask install slack brew cask install sourcetree brew cask install spotify brew cask install sqlitebrowser brew cask install steveschow-gfxcardstatus brew cask install sublime-text brew cask install subtitles brew cask install vlc brew cask install webstorm brew cask install whatsapp brew cask install zeplin brew doctor brew cleanup && brew prune && brew cask cleanup
#!bin/bash brew cask install aerial brew cask install android-studio brew cask install appcleaner brew cask install appcode brew cask install arq brew cask install boxer brew cask install caffeine brew cask install clion brew cask install cyberduck brew cask install daisydisk brew cask install dash brew cask install diffmerge brew cask install docker brew cask install dolphin brew cask install dropbox brew cask install fork brew cask install geekbench brew cask install google-chrome brew cask install google-hangouts brew cask install handbrake brew cask install icons8 brew cask install iina brew cask install imageoptim brew cask install intellij-idea brew cask install invisionsync brew cask install istat-menus brew cask install iterm2 brew cask install java brew cask install kitematic brew cask install mactracker brew cask install macvim brew cask install markright brew cask install messenger brew cask install mp4tools brew cask install mplayerx brew cask install openemu brew cask install parallels-desktop brew cask install postman brew cask install pycharm brew cask install sequel-pro brew cask install sip brew cask install sketch brew cask install skype brew cask install slack brew cask install sourcetree brew cask install spotify brew cask install sqlitebrowser brew cask install steveschow-gfxcardstatus brew cask install sublime-text brew cask install subtitles brew cask install vlc brew cask install webstorm brew cask install whatsapp brew cask install zeplin brew doctor brew cleanup && brew prune && brew cask cleanup
Modify script according to changed directory structure
#!/bin/sh -e TS=`date --iso-8601` TARGET_DIR=src/com/komanov/serialization TARGET_NAME=jmh DATA_DIR=./site/public/data/scala-serialization bazel run -- //${TARGET_DIR}/${TARGET_NAME} -rf json -rff jmh.json | tee jmh.log mv ./bazel-bin/${TARGET_DIR}/${TARGET_NAME}/${TARGET_NAME}.runfiles/stuff/jmh.json ${DATA_DIR}/jmh_${TS}.json mv ./jmh.log ${DATA_DIR}/jmh_${TS}.log echo ${TS} echo "Don't forget to change JS, commit, push and deploy!"
#!/bin/sh -ex DIR=`dirname $0`/.. DIR=`pwd $DIR` TS=`date --iso-8601` TARGET=src/com/komanov/serialization/jmh TARGET_NAME=`basename $TARGET` DATA_DIR=${DIR}/site/public/data/scala-serialization if test ! -d /the/dir; then echo "no data dir, maybe you forgot to link site?"; exit 1; fi cd ${DIR}/projects/scala_serialization bazel run -- //${TARGET} -rf json -rff jmh.json | tee jmh.log mv ./bazel-bin/${TARGET}/${TARGET_NAME}.runfiles/stuff/jmh.json ${DATA_DIR}/jmh_${TS}.json mv ./jmh.log ${DATA_DIR}/jmh_${TS}.log cd ${DIR} echo ${TS} echo "Don't forget to change JS, commit, push and deploy!"
Tweak install script for Travis
#!/bin/bash # Run updates sudo apt-get update; sudo apt-get upgrade; sudo apt-get install -y curl python python-dev; # Install conda which conda || { CONDA=Miniconda2-latest-Linux-x86_64.sh curl -sLO https://repo.continuum.io/miniconda/$CONDA; chmod +x ./$CONDA; ./$CONDA -b -p /miniconda2; rm ./$CONDA; echo export PATH=$PATH:/miniconda2/bin >> /home/vagrant/.bashrc; echo source activate pyceratOpsRecs >> /home/vagrant/.bashrc; }
#!/bin/bash # Run updates sudo export DEBIAN_FRONTEND=noninteractive; DEBIAN_FRONTEND=noninteractive; sudo apt-get -o Dpkg::Options::="--force-confnew" -q -y update; sudo apt-get -o Dpkg::Options::="--force-confnew" -q -y upgrade; sudo apt-get install -o Dpkg::Options::="--force-confnew" -q -y curl python python-dev; # Install conda which conda || { CONDA=Miniconda2-latest-Linux-x86_64.sh curl -sLO https://repo.continuum.io/miniconda/$CONDA; chmod +x ./$CONDA; ./$CONDA -b -p /miniconda2; rm ./$CONDA; echo export PATH=$PATH:/miniconda2/bin >> /home/vagrant/.bashrc; echo source activate pyceratOpsRecs >> /home/vagrant/.bashrc; }
Remove __garbage from completion scripts
#!/usr/bin/env bash # # Completions for kubectl __garbage __kubectl __kubectl=$(type -P kubectl) [ -x "${__kubectl}" ] && source <(${__kubectl} completion bash)
#!/usr/bin/env bash # # Completions for kubectl __kubectl=$(type -P kubectl) [ -x "${__kubectl}" ] && source <(${__kubectl} completion bash) unset __kubectl
Fix CI errors caused by container status commands
#!/bin/bash # FUNCTIONS function purge_ceph { docker stop $(docker ps -q) rm -rf /var/lib/ceph/* rm -rf /etc/ceph } # MAIN purge_ceph
#!/bin/bash # FUNCTIONS function purge_ceph { container_count=$(docker ps -q | wc -l) if [[ "${container_count}" -gt 0 ]]; then docker stop $(docker ps -q) || echo failed to stop containers fi rm -rf /var/lib/ceph/* rm -rf /etc/ceph } # MAIN purge_ceph
Fix path to public dir
#!/bin/bash # Mirror local changes to a remote SFTP directory : ${FTP_HOSTNAME:?} : ${FTP_REMOTEDIR:?} : ${FTP_USERNAME:?} : ${FTP_PASSWORD:?} LOCALDIR="$(cd ../public && pwd)" lftp -c "open sftp://$FTP_HOSTNAME user $FTP_USERNAME '$FTP_PASSWORD' mirror --only-newer --reverse --delete --verbose $LOCALDIR $FTP_REMOTEDIR "
#!/bin/bash # Mirror local changes to a remote SFTP directory : ${FTP_HOSTNAME:?} : ${FTP_REMOTEDIR:?} : ${FTP_USERNAME:?} : ${FTP_PASSWORD:?} LOCALDIR="$(cd public && pwd)" lftp -c "open sftp://$FTP_HOSTNAME user $FTP_USERNAME '$FTP_PASSWORD' mirror --only-newer --reverse --delete --verbose $LOCALDIR $FTP_REMOTEDIR "
Replace apt by apt-get to improve compatibility
PLATFORM=$(uname) if [[ $PLATFORM == "Linux" ]]; then echo "Install vim & git" apt install vim git-core else # If brew is already installed if [[ $(brew &> /dev/null || echo $?) == 1 ]]; then brew install vim git else echo "Install XCode (Depends for brew)" xcode-select --install echo "Install Brew" ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" echo "Install vim" brew install vim fi fi echo "Install Vundle" git clone https://github.com/VundleVim/Vundle.vim.git ~/.vim/bundle/Vundle.vim mkdir ~/.vim/syntax curl https://raw.githubusercontent.com/evanmiller/nginx-vim-syntax/master/syntax/nginx.vim > ~/.vim/syntax/nginx.vim echo "Install vimrc" git clone https://github.com/PixiBixi/dotfiles cp dotfiles/.vimrc $HOME/ vim -c "VundleInstall" -c "q | q"
PLATFORM=$(uname) if [[ $PLATFORM == "Linux" ]]; then echo "Install vim & git" apt-get install vim git-core else # If brew is already installed if [[ $(brew &> /dev/null || echo $?) == 1 ]]; then brew install vim git else echo "Install XCode (Depends for brew)" xcode-select --install echo "Install Brew" ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" echo "Install vim" brew install vim fi fi echo "Install Vundle" git clone https://github.com/VundleVim/Vundle.vim.git ~/.vim/bundle/Vundle.vim mkdir ~/.vim/syntax curl https://raw.githubusercontent.com/evanmiller/nginx-vim-syntax/master/syntax/nginx.vim > ~/.vim/syntax/nginx.vim echo "Install vimrc" git clone https://github.com/PixiBixi/dotfiles cp dotfiles/.vimrc $HOME/ vim -c "VundleInstall" -c "q | q"
Use docker tag "devel" for development
#!/bin/bash set -ex cluster/vagrant/sync_config.sh make all for VM in `vagrant status | grep -v "^The Libvirt domain is running." | grep running | cut -d " " -f1`; do vagrant rsync $VM # if you do not use NFS vagrant ssh $VM -c "cd /vagrant && sudo hack/build-docker.sh" done export KUBECTL="cluster/kubectl.sh --core" cluster/deploy.sh
#!/bin/bash set -ex cluster/vagrant/sync_config.sh make all DOCKER_TAG=devel for VM in `vagrant status | grep -v "^The Libvirt domain is running." | grep running | cut -d " " -f1`; do vagrant rsync $VM # if you do not use NFS vagrant ssh $VM -c "cd /vagrant && export DOCKER_TAG=devel && sudo -E hack/build-docker.sh" done export KUBECTL="cluster/kubectl.sh --core" cluster/deploy.sh
Update path of JADE jar file inside the startup script.
#!/bin/bash clear BASE=clients echo "Starting the JADE server." java -cp $BASE/jade.jar jade.Boot -gui & if [ $? -ne 0 ]; then echo "It was not possible to run the jade server. Check your $BASE dir for the jade.jar file." exit $? fi echo -e "\n\nWait until the GUI interface is shown and start a client using the exec-client.sh script.\n\n"
#!/bin/bash clear JAR=repo/com/tilab/jade/4.4.0/jade-4.4.0.jar echo "Starting the JADE server." java -cp $JAR jade.Boot -gui & if [ $? -ne 0 ]; then echo "It was not possible to run the jade server. Check if the $JAR file exists." exit $? fi echo -e "\n\nWait until the GUI interface is shown and start a client using the exec-client.sh script.\n\n"
Switch from pm2 to forever
stage=$1 case $stage in pre ) pm2 stop greenbot ;; deploy ) git pull rm -r node_modules npm install ;; post ) pm2 start node_modules/greenbot/bin/greenbot.js --node-args "modacity.cson" ;; esac
stage=$1 case $stage in pre ) forever stop node_modules/greenbot/bin/greenbot.js ;; deploy ) git pull rm -r node_modules npm install ;; post ) forever start node_modules/greenbot/bin/greenbot.js modacity.cson ;; esac
Fix to retain and pass args
#!/usr/bin/env bash # # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # If root, ensure that --user-data-dir is specified if [ "$(id -u)" = "0" ]; then while test $# -gt 0 do if [[ $1 == --user-data-dir=* ]]; then DATA_DIR_SET=1 fi shift done if [ -z $DATA_DIR_SET ]; then echo "It is recommended to start vscode as a normal user. To run as root, you must specify an alternate user data directory with the --user-data-dir argument." 1>&2 exit 1 fi fi if [ ! -L $0 ]; then # if path is not a symlink, find relatively VSCODE_PATH="$(dirname $0)/.." else if which readlink >/dev/null; then # if readlink exists, follow the symlink and find relatively VSCODE_PATH="$(dirname $(readlink -f $0))/.." else # else use the standard install location VSCODE_PATH="/usr/share/@@NAME@@" fi fi ELECTRON="$VSCODE_PATH/@@NAME@@" CLI="$VSCODE_PATH/resources/app/out/cli.js" ELECTRON_RUN_AS_NODE=1 "$ELECTRON" "$CLI" "$@" exit $?
#!/usr/bin/env bash # # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # If root, ensure that --user-data-dir is specified ARGS=$@ if [ "$(id -u)" = "0" ]; then while test $# -gt 0 do if [[ $1 == --user-data-dir=* ]]; then DATA_DIR_SET=1 fi shift done if [ -z $DATA_DIR_SET ]; then echo "It is recommended to start vscode as a normal user. To run as root, you must specify an alternate user data directory with the --user-data-dir argument." 1>&2 exit 1 fi fi if [ ! -L $0 ]; then # if path is not a symlink, find relatively VSCODE_PATH="$(dirname $0)/.." else if which readlink >/dev/null; then # if readlink exists, follow the symlink and find relatively VSCODE_PATH="$(dirname $(readlink -f $0))/.." else # else use the standard install location VSCODE_PATH="/usr/share/@@NAME@@" fi fi ELECTRON="$VSCODE_PATH/@@NAME@@" CLI="$VSCODE_PATH/resources/app/out/cli.js" ELECTRON_RUN_AS_NODE=1 "$ELECTRON" "$CLI" $ARGS exit $?
Disable strict host key checking (since we can't approve it manually)
DYNO=${DYNO##*.} DYNO=${DYNO:-$RANDOM} ips=($COMPOSE_RETHINKDB_INTERNAL_IPS) nodes=${#ips[@]} identity=$(mktemp) echo "$COMPOSE_SSH_KEY" >$identity ssh -fo ExitOnForwardFailure=yes \ -NT compose@$COMPOSE_SSH_PUBLIC_HOSTNAME -p $COMPOSE_SSH_PUBLIC_PORT \ -i $identity \ -L 127.0.0.1:28015:${ips[$((DYNO % nodes))]}:28015 node server.js & wait %% trap exit SIGTERM SIGKILL trap "kill 0" EXIT
DYNO=${DYNO##*.} DYNO=${DYNO:-$RANDOM} ips=($COMPOSE_RETHINKDB_INTERNAL_IPS) nodes=${#ips[@]} identity=$(mktemp) echo "$COMPOSE_SSH_KEY" >$identity ssh -fo ExitOnForwardFailure=yes -o StrictHostKeyChecking=no \ -NT compose@$COMPOSE_SSH_PUBLIC_HOSTNAME -p $COMPOSE_SSH_PUBLIC_PORT \ -i $identity \ -L 127.0.0.1:28015:${ips[$((DYNO % nodes))]}:28015 node server.js & wait %% trap exit SIGTERM SIGKILL trap "kill 0" EXIT
Support for standalone console use-case
#!/bin/bash LOCK="/home/${USER}/dlrn.lock" set -e exec 200>$LOCK flock -n 200 || exit 1 if [ ! -d /home/${USER}/dlrn-logs ] then mkdir -p /home/${USER}/dlrn-logs fi source ~/.venv/bin/activate LOGFILE=/home/${USER}/dlrn-logs/dlrn-run.$(date +%s).log cd ~/dlrn echo `date` "Starting DLRN run." >> $LOGFILE dlrn --config-file /usr/local/share/dlrn/${USER}/projects.ini --info-repo /home/rdoinfo/rdoinfo/ 2>> $LOGFILE echo `date` "DLRN run complete." >> $LOGFILE
#!/bin/bash LOCK="/home/${USER}/dlrn.lock" set -e # if any arguments are given, assume console foreground execution ARG=$1 exec 200>$LOCK if ! flock -n 200 then if [ -n "$ARG" ] then echo "DLRN ${USER} running, please try again after disabling it." fi exit 1 fi if [ ! -d /home/${USER}/dlrn-logs ] then mkdir -p /home/${USER}/dlrn-logs fi source ~/.venv/bin/activate if [ -n "$ARG" ] then LOGFILE=/home/${USER}/dlrn-logs/dlrn-run.console.$(date +%s).log else LOGFILE=/home/${USER}/dlrn-logs/dlrn-run.$(date +%s).log fi cd ~/dlrn set +e echo `date` "Starting DLRN run." >> $LOGFILE dlrn --config-file /usr/local/share/dlrn/${USER}/projects.ini --info-repo /home/rdoinfo/rdoinfo/ "$@" 2>> $LOGFILE RET=$? echo `date` "DLRN run complete." >> $LOGFILE if [ -n "$ARG" ] then echo Arguments: "$@" cat $LOGFILE fi exit $RET
Enable tests on OSX, should pass now
#!/bin/bash set -xe if [ "$TRAVIS_OS_NAME" == "linux" ]; then cmake . -DCMAKE_BUILD_TYPE=$BUILD_TYPE \ -DCMAKE_CXX_COMPILER=$CXX_COMPILER \ -DCMAKE_C_COMPILER=$C_COMPILER \ -GNinja -DCMAKE_CXX_FLAGS='--coverage' \ -DLLVM_CONFIG="/usr/lib/llvm-${LLVM_VERSION}/bin/llvm-config" ninja CTEST_OUTPUT_ON_FAILURE=1 ninja test pip install --user cpp-coveralls coveralls --exclude /usr --exclude third_party --exclude test --exclude build --include libchigraph --include libchigraphdebugger --include chi --exclude libchigraph/include/chi/json.hpp else cmake . \ -DCMAKE_PREFIX_PATH='/usr/local/opt/qt5/;/usr/local/opt/gettext' \ -DCMAKE_BUILD_TYPE=Debug \ -DLLVM_CONFIG=/usr/local/opt/llvm/bin/llvm-config \ -GNinja -DCG_BUILD_DEBUGGER=OFF ninja # ninja test fi
#!/bin/bash set -xe if [ "$TRAVIS_OS_NAME" == "linux" ]; then cmake . -DCMAKE_BUILD_TYPE=$BUILD_TYPE \ -DCMAKE_CXX_COMPILER=$CXX_COMPILER \ -DCMAKE_C_COMPILER=$C_COMPILER \ -GNinja -DCMAKE_CXX_FLAGS='--coverage' \ -DLLVM_CONFIG="/usr/lib/llvm-${LLVM_VERSION}/bin/llvm-config" ninja CTEST_OUTPUT_ON_FAILURE=1 ninja test pip install --user cpp-coveralls coveralls --exclude /usr --exclude third_party --exclude test --exclude build --include libchigraph --include libchigraphdebugger --include chi --exclude libchigraph/include/chi/json.hpp else cmake . \ -DCMAKE_PREFIX_PATH='/usr/local/opt/qt5/;/usr/local/opt/gettext' \ -DCMAKE_BUILD_TYPE=Debug \ -DLLVM_CONFIG=/usr/local/opt/llvm/bin/llvm-config \ -GNinja -DCG_BUILD_DEBUGGER=OFF ninja CTEST_OUTPUT_ON_FAILURE=1 ninja test fi
Fix error in benchmarking script.
#!/bin/bash set -ex pwd="`pwd`" rev=`git rev-parse HEAD | cut -c1-8` date=`date "+%Y%m%dT%H%M"` result_path="$pwd/results/$date-$rev" lein=$pwd/lein echo "Installing/checking lein..." $lein version echo "lein installed" echo "Making uberjar" $lein uberjar echo "Uberjar made" echo "Benchmarking..." mkdir -p "$result_path" for i in 1 2 3 do # ORIGINAL VERSION version="orig" for w in $(seq 64) do $lein run -- -v $version -w $w > "$result_path/$version-w$w-i$i.txt" done # TXACT VERSION version="txact" for w in $(seq 64) do for s in $(seq 64) do $lein run -- -v $version -w $w > "$result_path/$version-w$w-s$s-i$i.txt" done done done echo "Benchmark done"
#!/bin/bash set -ex pwd="`pwd`" rev=`git rev-parse HEAD | cut -c1-8` date=`date "+%Y%m%dT%H%M"` result_path="$pwd/results/$date-$rev" lein=$pwd/lein echo "Installing/checking lein..." $lein version echo "lein installed" echo "Making uberjar" $lein uberjar echo "Uberjar made" echo "Benchmarking..." mkdir -p "$result_path" for i in 1 2 3 do # ORIGINAL VERSION version="orig" for w in $(seq 64) do $lein run -- -v $version -w $w > "$result_path/$version-w$w-i$i.txt" done # TXACT VERSION version="txact" for w in $(seq 64) do for s in $(seq 64) do $lein run -- -v $version -w $w -s $s > "$result_path/$version-w$w-s$s-i$i.txt" done done done echo "Benchmark done"
Change the containerize script to use the new docker image
#!/bin/bash set -e apk update && apk add git source /opt/resource/common.sh start_docker pushd LicenseFinder if [ ! -z "$(git diff master Dockerfile)" ]; then docker build . -t licensefinder/license_finder fi docker run -v $PWD:/lf -it licensefinder/license_finder /bin/bash \ -exlc "cd /lf && ci/scripts/run-tests.sh $RUBY_VERSION_UNDER_TEST" popd
#!/bin/sh set -e apk update && apk add git source /docker-lib.sh start_docker pushd LicenseFinder if [ ! -z "$(git diff master Dockerfile)" ]; then docker build . -t licensefinder/license_finder fi docker run -v $PWD:/lf -it licensefinder/license_finder /bin/bash \ -exlc "cd /lf && ci/scripts/run-tests.sh $RUBY_VERSION_UNDER_TEST" popd
Include global plugin directory in backups
#!/bin/bash # A script to backup all servers using duply () echo "Beginning backups..." echo "Backing up survival... (this could take a while)" duply survival backup echo "Success!" echo "Backing up lobby..." duply lobby backup echo "Success!" echo "Backing up creative..." duply creative backup echo "Success!" echo "Backing up space..." duply space backup echo "Success!" echo "Backing up development..." duply development backup echo "Success!" echo "Backing up RITcraft Survival..." duply rit-survival backup echo "Success!" echo "All done! Please double-check file integrity."
#!/bin/bash # A script to backup all servers using duply # echo "Beginning backups..." echo "Backing up global plugin directory..." duply plugindir backup echo "Success!" echo "Backing up survival... (this could take a while)" duply survival backup echo "Success!" echo "Backing up lobby..." duply lobby backup echo "Success!" echo "Backing up creative..." duply creative backup echo "Success!" echo "Backing up space..." duply space backup echo "Success!" echo "Backing up development..." duply development backup echo "Success!" echo "Backing up RITcraft Survival..." duply rit-survival backup echo "Success!" echo "All done! Please double-check file integrity."
Comment on how to set JAVA_HOME.
#!/bin/bash JNI_DIR=${JAVA_HOME}/include set -x YAZ_CONFIG=yaz-config YAZ_PREFIX=`${YAZ_CONFIG} --prefix` YAZ_CFLAGS=`${YAZ_CONFIG} --cflags` YAZ_LIBS=`${YAZ_CONFIG} --libs` swig -I"include" -I"${YAZ_PREFIX}/include" -outdir ../src/org/yaz4j/jni -package org.yaz4j.jni -o src/libyaz4j.cpp -c++ -java libyaz4j.i g++ -fPIC -I"include" -I"$JNI_DIR" -I"$JNI_DIR/linux" ${YAZ_CFLAGS} -c -Wall -o obj/libyaz4j.o src/libyaz4j.cpp g++ -fPIC -I"include" -I"$JNI_DIR" -I"$JNI_DIR/linux" ${YAZ_CFLAGS} -c -Wall -o obj/zoom-extra.o src/zoom-extra.cpp g++ -shared obj/libyaz4j.o obj/zoom-extra.o -o lib/libyaz4j.so ${YAZ_LIBS}
#!/bin/bash # You may need to do something like this: # env JAVA_HOME=/usr/lib/jvm/java-6-sun ./build.sh JNI_DIR=${JAVA_HOME}/include set -x YAZ_CONFIG=yaz-config YAZ_PREFIX=`${YAZ_CONFIG} --prefix` YAZ_CFLAGS=`${YAZ_CONFIG} --cflags` YAZ_LIBS=`${YAZ_CONFIG} --libs` swig -I"include" -I"${YAZ_PREFIX}/include" -outdir ../src/org/yaz4j/jni -package org.yaz4j.jni -o src/libyaz4j.cpp -c++ -java libyaz4j.i g++ -fPIC -I"include" -I"$JNI_DIR" -I"$JNI_DIR/linux" ${YAZ_CFLAGS} -c -Wall -o obj/libyaz4j.o src/libyaz4j.cpp g++ -fPIC -I"include" -I"$JNI_DIR" -I"$JNI_DIR/linux" ${YAZ_CFLAGS} -c -Wall -o obj/zoom-extra.o src/zoom-extra.cpp g++ -shared obj/libyaz4j.o obj/zoom-extra.o -o lib/libyaz4j.so ${YAZ_LIBS}
Add a nice theme to bat
if isinpath delta; then diff() { delta $* return 0 } export BAT_PAGER=less export GIT_PAGER=delta export BAT_THEME=base16 fi isinpath colordiff || return 0 diff() { /usr/bin/diff -u $* | colordiff } isinpath diff-highlight || return 0 diff() { /usr/bin/diff -u $* | diff-highlight | colordiff }
if isinpath delta; then diff() { delta $* return 0 } export BAT_PAGER=less export GIT_PAGER=delta fi isinpath colordiff || return 0 diff() { /usr/bin/diff -u $* | colordiff } isinpath diff-highlight || return 0 diff() { /usr/bin/diff -u $* | diff-highlight | colordiff }
Update simulator command for Xcode 9.3
# Open iOS Simulator from command line alias ios="open /Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/Applications/iPhone\ Simulator.app" # Clear Xcode's DerivedData cache alias xcode-ded="rm -rf ~/Library/Developer/Xcode/DerivedData"
# Open iOS Simulator from command line alias ios="open /Applications/Xcode.app/Contents/Developer/Applications/Simulator.app" # Clear Xcode's DerivedData cache alias xcode-ded="rm -rf ~/Library/Developer/Xcode/DerivedData"
Enable verbose output for cpanm Wx
#!/bin/bash set -euo pipefail brew update -v brew install boost perl cpanminus wxwidgets brew link --overwrite perl cpanminus export SLIC3R_STATIC=1 export BOOST_DIR=/usr/local perl ./Build.PL perl ./Build.PL --gui
#!/bin/bash set -euo pipefail brew update -v brew install boost perl cpanminus wxwidgets brew link --overwrite perl cpanminus export SLIC3R_STATIC=1 export BOOST_DIR=/usr/local #perl ./Build.PL cpanm -v --local-lib local-lib Wx perl ./Build.PL --gui
Set user and db name environment variables.
#!/bin/sh POSTGIS_INSTANCE=${1:-"osm-postgis"} POSTGIS_PASSWORD=${2:-"mysecretpassword"} docker run --name ${POSTGIS_INSTANCE} -e POSTGRES_PASSWORD=${POSTGIS_PASSWORD} -d osmtw/postgis
#!/bin/sh POSTGRES_USER=postgres POSTGRES_DB=postgres POSTGIS_INSTANCE=${1:-"osmdb"} POSTGIS_PASSWORD=${2:-$(cat /dev/urandom| tr -dc _A-Z-a-z-0-9 | head -c12)} docker run --name ${POSTGIS_INSTANCE} \ -e POSTGRES_PASSWORD=${POSTGIS_PASSWORD} \ -e POSTGRES_USER=${POSTGRES_USER} \ -e POSTGRES_DB=${POSTGRES_DB} \ -d osmtw/postgis
Add commit SHA to static/revision.txt
#!/bin/bash # need to do this here because docker build has no env python manage.py collectstatic --noinput python manage.py migrate --noinput exec gunicorn wsgi:application -b 0.0.0.0:8000 -w 2 --log-file -
#!/bin/bash # need to do this here because docker build has no env python manage.py collectstatic --noinput python manage.py migrate --noinput echo "$GIT_SHA" > static/revision.txt exec gunicorn wsgi:application -b 0.0.0.0:8000 -w 2 --log-file -
Revert "Add workaround for CentOS 5 :<"
#!/bin/sh script_base_dir=`dirname $0` if [ $# != 4 ]; then echo "Usage: $0 GPG_UID GPG_KEY_NAME DESTINATION DISTRIBUTIONS" echo " e.g.: $0 1BD22CD1 mitler-manager repositories/ 'fedora centos'" exit 1 fi GPG_UID=$1 GPG_KEY_NAME=$2 DESTINATION=$3 DISTRIBUTIONS=$4 run() { "$@" if test $? -ne 0; then echo "Failed $@" exit 1 fi } for distribution in ${DISTRIBUTIONS}; do for dir in ${DESTINATION}${distribution}/*/*; do # "--checksum sha" is for CentOS 5. If we drop CentOS 5 support, # we can remove the option. test -d $dir && run createrepo --checksum sha $dir done; run gpg --armor --export ${GPG_UID} > \ ${DESTINATION}${distribution}/RPM-GPG-KEY-${GPG_KEY_NAME}; done
#!/bin/sh script_base_dir=`dirname $0` if [ $# != 4 ]; then echo "Usage: $0 GPG_UID GPG_KEY_NAME DESTINATION DISTRIBUTIONS" echo " e.g.: $0 1BD22CD1 mitler-manager repositories/ 'fedora centos'" exit 1 fi GPG_UID=$1 GPG_KEY_NAME=$2 DESTINATION=$3 DISTRIBUTIONS=$4 run() { "$@" if test $? -ne 0; then echo "Failed $@" exit 1 fi } for distribution in ${DISTRIBUTIONS}; do for dir in ${DESTINATION}${distribution}/*/*; do test -d $dir && run createrepo $dir done; run gpg --armor --export ${GPG_UID} > \ ${DESTINATION}${distribution}/RPM-GPG-KEY-${GPG_KEY_NAME}; done
Add express-generator to npm global install
#! /bin/bash # # Global nodejs modules npm install -g eslint npm install -g tslint npm install -g typescript npm install -g typings npm install -g gulp npm install -g mocha npm install -g node-inspector npm install -g bower npm install -g jasmine npm install -g karma-cli npm install -g npm npm install -g watch npm install -g babel-cli npm install -g browserify
#! /bin/bash # # Global nodejs modules npm install -g eslint npm install -g tslint npm install -g typescript npm install -g typings npm install -g gulp npm install -g mocha npm install -g node-inspector npm install -g bower npm install -g jasmine npm install -g karma-cli npm install -g npm npm install -g watch npm install -g babel-cli npm install -g browserify npm install -g express-generator
Use new path to template
#!/bin/bash DIR=`dirname $0` set -x oc login -u test -p test --insecure-skip-tls-verify=true https://localhost:8443 oc delete project enmasse-ci oc new-project enmasse-ci oc policy add-role-to-user view system:serviceaccount:$(oc project -q):default oc policy add-role-to-user edit system:serviceaccount:$(oc project -q):deployer oc process -f https://raw.githubusercontent.com/enmasseproject/openshift-configuration/master/enmasse-template.yaml | oc create -f - $DIR/wait_until_up.sh 7 || exit 1 sleep 20 # TODO: check deployment config instead $DIR/scale_controller.sh topic-mytopic 4 || exit 1 $DIR/wait_until_up.sh 10 || exit 1 sleep 120 OPENSHIFT_USER=test OPENSHIFT_TOKEN=`oc config view -o jsonpath='{.users[?(@.name == "test/localhost:8443")].user.token}'` OPENSHIFT_URL=https://localhost:8443 PN_TRACE_FRM=1 gradle check -i
#!/bin/bash DIR=`dirname $0` set -x oc login -u test -p test --insecure-skip-tls-verify=true https://localhost:8443 oc delete project enmasse-ci oc new-project enmasse-ci oc policy add-role-to-user view system:serviceaccount:$(oc project -q):default oc policy add-role-to-user edit system:serviceaccount:$(oc project -q):deployer oc process -f https://raw.githubusercontent.com/enmasseproject/openshift-configuration/master/generated/enmasse-template.yaml | oc create -f - $DIR/wait_until_up.sh 7 || exit 1 sleep 20 # TODO: check deployment config instead $DIR/scale_controller.sh topic-mytopic 4 || exit 1 $DIR/wait_until_up.sh 10 || exit 1 sleep 120 OPENSHIFT_USER=test OPENSHIFT_TOKEN=`oc config view -o jsonpath='{.users[?(@.name == "test/localhost:8443")].user.token}'` OPENSHIFT_URL=https://localhost:8443 PN_TRACE_FRM=1 gradle check -i
Use string equality, not integer equality
#!/bin/bash set -e # Fail on errors set -x # Verbosity all the way GIT_IGNORE_NEW="true" # hack for now USE_ALIOTH="false" SKIP_PBUILDER=true ## Pick which one to build -- the Asheesh fork, or the Alioth packaging if [[ "$USE_ALIOTH" -eq "true" ]] ; then GIT_URL="https://alioth.debian.org/anonscm/git/collab-maint/alpine.git" else GIT_URL="https://github.com/paulproteus/alpine-packaging.git" fi if [[ "$GIT_IGNORE_NEW" -eq "true" ]] ; then EXTRA_GIT_BUILDPACKAGE_ARGS="--git-ignore-new" else EXTRA_GIT_BUILDPACKAGE_ARGS="" fi sudo apt-get install git-buildpackage # Get latest alpine packaging git clone "$GIT_URL" alpine # Make sure it builds outside a pbuilder cd alpine git-buildpackage $EXTRA_GIT_BUILDPACKAGE_ARGS # intentionally not quoted if [[ "$SKIP_PBUILDER" -eq "true" ]] ; then exit 0 # skip pbuilder for now fi # Create a pbuilder chroot sudo apt-get install ubuntu-dev-tools pbuilder-dist sid create pbuilder-dist sid build ../*.dsc
#!/bin/bash set -e # Fail on errors set -x # Verbosity all the way GIT_IGNORE_NEW="true" # hack for now USE_ALIOTH="false" SKIP_PBUILDER=true ## Pick which one to build -- the Asheesh fork, or the Alioth packaging if [[ "$USE_ALIOTH" == "true" ]] ; then GIT_URL="https://alioth.debian.org/anonscm/git/collab-maint/alpine.git" else GIT_URL="https://github.com/paulproteus/alpine-packaging.git" fi if [[ "$GIT_IGNORE_NEW" == "true" ]] ; then EXTRA_GIT_BUILDPACKAGE_ARGS="--git-ignore-new" else EXTRA_GIT_BUILDPACKAGE_ARGS="" fi sudo apt-get install git-buildpackage # Get latest alpine packaging git clone "$GIT_URL" alpine # Make sure it builds outside a pbuilder cd alpine git-buildpackage $EXTRA_GIT_BUILDPACKAGE_ARGS # intentionally not quoted if [[ "$SKIP_PBUILDER" == "true" ]] ; then exit 0 # skip pbuilder for now fi # Create a pbuilder chroot sudo apt-get install ubuntu-dev-tools pbuilder-dist sid create pbuilder-dist sid build ../*.dsc
Drop tests against 1.3 and 1.4
#!/usr/bin/env bash set -eux versions=(1.3.0 1.4.0 1.5.0 1.5.1 1.6.0 1.7.0-RC1 1.7.0-master-SNAPSHOT) for i in ${versions[@]} do cp pom.xml pom-$i.xml perl -i -pe 's/\[1.5.0,\)/'"$i"'/g' pom-$i.xml mvn clean test -f pom-$i.xml done for i in ${versions[@]} do rm pom-$i.xml done
#!/usr/bin/env bash set -eux versions=(1.5.0 1.5.1 1.6.0 1.7.0-RC1 1.7.0-master-SNAPSHOT) for i in ${versions[@]} do cp pom.xml pom-$i.xml perl -i -pe 's/\[1.5.0,\)/'"$i"'/g' pom-$i.xml mvn clean test -f pom-$i.xml done for i in ${versions[@]} do rm pom-$i.xml done
Remove update and leave releasing to manual step for now
#!/bin/bash heroku update heroku -v HEROKU_API_KEY="$1" heroku container:login heroku container:push web --app geekplanet-staging --arg BUILD_ENV="staging" heroku container:release web --app geekplanet-staging
#!/bin/bash HEROKU_API_KEY="$1" heroku container:login heroku container:push web --app geekplanet-staging --arg BUILD_ENV="staging"
Install cockpit-dashboard in cockpit/ws container
#!/bin/sh set -ex package_name() { package="$1" if [ -n "$VERSION" ]; then package="$package-$VERSION" fi echo "$package" } OSVER=$(. /etc/os-release && echo "$VERSION_ID") if [ -z "$INSTALLER" ]; then INSTALLER="dnf" fi "$INSTALLER" -y update "$INSTALLER" install -y sed arch=`uname -p` rpm=$(ls /container/rpms/cockpit-ws-*$OSVER.*$arch.rpm /container/rpms/cockpit-bridge-*$OSVER.*$arch.rpm || true) # If there are rpm files in the current directory we'll install those if [ -n "$rpm" ]; then $INSTALLER -y install /container/rpms/cockpit-ws-*$OSVER.*$arch.rpm /container/rpms/cockpit-bridge-*$OSVER.*$arch.rpm else ws=$(package_name "cockpit-ws") bridge=$(package_name "cockpit-bridge") "$INSTALLER" -y install "$ws" "$bridge" fi "$INSTALLER" clean all rm -rf /container/rpms || true # And the stuff that starts the container ln -s /host/proc/1 /container/target-namespace chmod -v +x /container/atomic-install chmod -v +x /container/atomic-uninstall chmod -v +x /container/atomic-run
#!/bin/sh set -ex package_name() { package="$1" if [ -n "$VERSION" ]; then package="$package-$VERSION" fi echo "$package" } OSVER=$(. /etc/os-release && echo "$VERSION_ID") if [ -z "$INSTALLER" ]; then INSTALLER="dnf" fi "$INSTALLER" -y update "$INSTALLER" install -y sed arch=`uname -p` rpm=$(ls /container/rpms/cockpit-ws-*$OSVER.*$arch.rpm /container/rpms/cockpit-bridge-*$OSVER.*$arch.rpm /container/rpms/cockpit-dashboard-*$OSVER.*$arch.rpm || true) # If there are rpm files in the current directory we'll install those if [ -n "$rpm" ]; then $INSTALLER -y install /container/rpms/cockpit-ws-*$OSVER.*$arch.rpm /container/rpms/cockpit-bridge-*$OSVER.*$arch.rpm /container/rpms/cockpit-dashboard-*$OSVER.*$arch.rpm else ws=$(package_name "cockpit-ws") bridge=$(package_name "cockpit-bridge") dashboard=$(package_name "cockpit-dashboard") "$INSTALLER" -y install "$ws" "$bridge" "$dashboard" fi "$INSTALLER" clean all rm -rf /container/rpms || true # And the stuff that starts the container ln -s /host/proc/1 /container/target-namespace chmod -v +x /container/atomic-install chmod -v +x /container/atomic-uninstall chmod -v +x /container/atomic-run
Disable persistent network interface naming * We actually want eth<num> naming to match cloud
#!/bin/bash set -e CLOUD_CONFIG_FILE=pxe-cloud-config.yml curl -o $CLOUD_CONFIG_FILE http://<%=server%>:<%=port%>/api/current/templates/$CLOUD_CONFIG_FILE?nodeId=<%=nodeId%> sudo coreos-install -d <%=installDisk%> -c $CLOUD_CONFIG_FILE -b <%=repo%> <% if (typeof ignitionScriptUri !== 'undefined') { %> # Customizations for supporting CoreOS Ignition: mkdir /mnt/coreos OEM_PARTITION_NUM=6 # https://coreos.com/os/docs/latest/sdk-disk-partitions.html mount <%=installDisk%>${OEM_PARTITION_NUM} /mnt/coreos/ echo "set linux_append=\"coreos.first_boot=1 coreos.config.url=<%=ignitionScriptUri%>\"" > /mnt/coreos/grub.cfg <%} %> curl -X POST -H 'Content-Type:application/json' http://<%=server%>:<%=port%>/api/current/notification?nodeId=<%=nodeId%> sudo reboot
#!/bin/bash set -e CLOUD_CONFIG_FILE=pxe-cloud-config.yml curl -o $CLOUD_CONFIG_FILE http://<%=server%>:<%=port%>/api/current/templates/$CLOUD_CONFIG_FILE?nodeId=<%=nodeId%> sudo coreos-install -d <%=installDisk%> -c $CLOUD_CONFIG_FILE -b <%=repo%> <% if (typeof ignitionScriptUri !== 'undefined') { %> # Customizations for supporting CoreOS Ignition: mkdir /mnt/coreos OEM_PARTITION_NUM=6 # https://coreos.com/os/docs/latest/sdk-disk-partitions.html mount <%=installDisk%>${OEM_PARTITION_NUM} /mnt/coreos/ echo "set linux_append=\"net.ifnames=0 biosdevname=0 coreos.first_boot=1 coreos.config.url=<%=ignitionScriptUri%>\"" > /mnt/coreos/grub.cfg <%} %> curl -X POST -H 'Content-Type:application/json' http://<%=server%>:<%=port%>/api/current/notification?nodeId=<%=nodeId%> sudo reboot
Use the acme-0.8 branch of simp_le
#!/bin/bash set -e # Install python packages needed to build simp_le apk --update add python py-requests py-setuptools git gcc py-pip musl-dev libffi-dev python-dev openssl-dev # Get Let's Encrypt simp_le client source mkdir -p /src git -C /src clone https://github.com/kuba/simp_le.git # Install simp_le in /usr/bin cd /src/simp_le git checkout acme-0.8 python ./setup.py install # Make house cleaning cd / rm -rf /src apk del git gcc py-pip musl-dev libffi-dev python-dev openssl-dev rm -rf /var/cache/apk/*
#!/bin/bash set -e # Install python packages needed to build simp_le apk --update add python py-requests py-setuptools git gcc py-pip musl-dev libffi-dev python-dev openssl-dev # Get Let's Encrypt simp_le client source branch="acme-0.8" mkdir -p /src git -C /src clone --depth=1 -b $branch https://github.com/kuba/simp_le.git # Install simp_le in /usr/bin cd /src/simp_le python ./setup.py install # Make house cleaning cd / rm -rf /src apk del git gcc py-pip musl-dev libffi-dev python-dev openssl-dev rm -rf /var/cache/apk/*
Update deploy script to use vivliostyle-js-viewer
#!/bin/bash set -ev # setup ssh key echo -e "Host github.com\n\tStrictHostKeyChecking no\nIdentityFile ~/.ssh/deploy.key\n" >> ~/.ssh/config echo -e "$GITHUB_DEPLOY_KEY" | base64 -d > ~/.ssh/deploy.key chmod 600 ~/.ssh/deploy.key # fetch gh-pages branch cd ../.. git clone --depth=1 --branch=gh-pages git@github.com:vivliostyle/vivliostyle.js.git gh-pages cd gh-pages # git configuration git config user.email "kwkbtr@vivliostyle.com" git config user.name "kwkbtr (Travis CI)" # update gh-pages branch master=../vivliostyle/vivliostyle.js cp -R ${master}/lib/vivliostyle.min.js ${master}/resources ${master}/viewer/res viewer/ git add viewer git commit -m "Update built vivliostyle.min.js (original commit: $TRAVIS_COMMIT)" git push origin gh-pages
#!/bin/bash set -ev # setup ssh key echo -e "Host github.com\n\tStrictHostKeyChecking no\nIdentityFile ~/.ssh/deploy.key\n" >> ~/.ssh/config echo -e "$GITHUB_DEPLOY_KEY" | base64 -d > ~/.ssh/deploy.key chmod 600 ~/.ssh/deploy.key # git configuration git config user.email "kwkbtr@vivliostyle.com" git config user.name "kwkbtr (Travis CI)" cd ../ # fetch and build vivliostyle-js-viewer gem install compass git clone --depth=1 --branch=master git@github.com:vivliostyle/vivliostyle-js-viewer.git vivliostyle-js-viewer cd vivliostyle-js-viewer npm run setup-local-vivliostyle ../vivliostyle.js npm install npm run build npm run test-sauce cd ../ # fetch gh-pages branch git clone --depth=1 --branch=gh-pages git@github.com:vivliostyle/vivliostyle.js.git gh-pages cd gh-pages # update gh-pages branch cp -R ../vivliostyle-js-viewer/build/* viewer/ git add viewer git commit -m "Update vivliostyle.js (original commit: $TRAVIS_COMMIT)" git push origin gh-pages
Update the virtualenv latest version
#!/bin/bash -e # install python prereqs add-apt-repository -y ppa:deadsnakes/ppa apt-get update pip install requests[security] apt-get install -y libxml2 libxml2-dev libxslt1.1 libxslt1-dev libffi-dev libssl-dev libpq-dev libmysqlclient-dev pip install setuptools # Installing virtualenv-15.1.0 wget -nv https://pypi.python.org/packages/d4/0c/9840c08189e030873387a73b90ada981885010dd9aea134d6de30cd24cb8/virtualenv-15.1.0.tar.gz#md5=44e19f4134906fe2d75124427dc9b716 tar xvfz virtualenv-15.1.0.tar.gz cd virtualenv-15.1.0 python setup.py install cd / #!/bin/bash for file in /u14pytall/version/*; do $file done
#!/bin/bash -e # install python prereqs add-apt-repository -y ppa:deadsnakes/ppa apt-get update pip install requests[security] apt-get install -y libxml2 libxml2-dev libxslt1.1 libxslt1-dev libffi-dev libssl-dev libpq-dev libmysqlclient-dev pip install setuptools # Installing virtualenv-16.0.0 wget -nv https://files.pythonhosted.org/packages/33/bc/fa0b5347139cd9564f0d44ebd2b147ac97c36b2403943dbee8a25fd74012/virtualenv-16.0.0.tar.gz tar xvfz virtualenv-16.0.0.tar.gz cd virtualenv-16.0.0 python setup.py install cd / #!/bin/bash for file in /u14pytall/version/*; do $file done
Support for Sublime Text 3, with fallback
# Sublime Text 2 Aliases #unamestr = 'uname' local _sublime_darwin_subl=/Applications/Sublime\ Text\ 2.app/Contents/SharedSupport/bin/subl if [[ $('uname') == 'Linux' ]]; then if [ -f '/usr/bin/sublime_text' ]; then st_run() { nohup /usr/bin/sublime_text $@ > /dev/null & } else st_run() { nohup /usr/bin/sublime-text $@ > /dev/null & } fi alias st=st_run elif [[ $('uname') == 'Darwin' ]]; then # Check if Sublime is installed in user's home application directory if [[ -a $HOME/${_sublime_darwin_subl} ]]; then alias st='$HOME/${_sublime_darwin_subl}' else alias st='${_sublime_darwin_subl}' fi fi alias stt='st .'
# Sublime Text 2 Aliases local _sublime_darwin_paths _sublime_darwin_paths=( "$HOME/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl" "$HOME/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl" "/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl" "/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl" ) if [[ $('uname') == 'Linux' ]]; then if [ -f '/usr/bin/sublime_text' ]; then st_run() { nohup /usr/bin/sublime_text $@ > /dev/null & } else st_run() { nohup /usr/bin/sublime-text $@ > /dev/null & } fi alias st=st_run elif [[ $('uname') == 'Darwin' ]]; then for _sublime_path in $_sublime_darwin_paths; do if [[ -a $_sublime_path ]]; then alias st="'$_sublime_path'" fi done fi alias stt='st .'
Add missing dep to installation script
#!/bin/bash GOVER=1.12 GOFN=go$GOVER.linux-amd64 # deps echo "... installing deps ..." sudo apt-get -y install wget git \ libopenmpi-dev libhwloc-dev libsuitesparse-dev libmumps-dev \ gfortran python-scipy python-matplotlib dvipng \ libfftw3-dev libfftw3-mpi-dev libmetis-dev \ liblapacke-dev libopenblas-dev libhdf5-dev # vtk if [[ ! -z "$USE_VTK" ]]; then echo "... installing VTK ..." sudo apt-get -y install libvtk7-dev fi # go mkdir -p ~/xpkg cd ~/xpkg rm -rf go wget https://dl.google.com/go/$GOFN.tar.gz -O ~/xpkg/$GOFN.tar.gz tar xf $GOFN.tar.gz go get -u all # output echo echo "go version" go version
#!/bin/bash GOVER=1.12 GOFN=go$GOVER.linux-amd64 # deps echo "... installing deps ..." sudo apt-get -y install wget git gcc \ libopenmpi-dev libhwloc-dev libsuitesparse-dev libmumps-dev \ gfortran python-scipy python-matplotlib dvipng \ libfftw3-dev libfftw3-mpi-dev libmetis-dev \ liblapacke-dev libopenblas-dev libhdf5-dev # vtk if [[ ! -z "$USE_VTK" ]]; then echo "... installing VTK ..." sudo apt-get -y install libvtk7-dev fi # go mkdir -p ~/xpkg cd ~/xpkg rm -rf go wget https://dl.google.com/go/$GOFN.tar.gz -O ~/xpkg/$GOFN.tar.gz tar xf $GOFN.tar.gz go get -u all # output echo echo "go version" go version
Determine if property is set to a specified value
PropsValidator(){ propertyExists(){ grep -o "${2}=.*" "${1}" } $@ }
PropsValidator(){ propertyExists(){ grep -o "${2}=.*" "${1}" } isSet(){ if [[ $(grep -o "${2}=${3}" ${1}) ]]; then echo true fi } $@ }
Revert "iconv bedrock2 CI output to UTF-8"
#!/usr/bin/env bash ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" FORCE_GIT=1 git_download bedrock2 ( cd "${CI_BUILD_DIR}/bedrock2" && git submodule update --init --recursive && COQMF_ARGS='-arg "-async-proofs-tac-j 1"' make | iconv -t UTF-8 -c `#9767` )
#!/usr/bin/env bash ci_dir="$(dirname "$0")" . "${ci_dir}/ci-common.sh" FORCE_GIT=1 git_download bedrock2 ( cd "${CI_BUILD_DIR}/bedrock2" && git submodule update --init --recursive && COQMF_ARGS='-arg "-async-proofs-tac-j 1"' make )
Add explicit pip install of biopython
#!/bin/sh # Install Python dependencies echo 'Python install' ( sudo pip install ./python sudo pip install pylint ) > /dev/null 2>&1
#!/bin/sh # Install Python dependencies echo 'Python install' ( sudo pip install biopython sudo pip install ./python sudo pip install pylint ) > /dev/null 2>&1
Add ability to skip configuration of sudoers
#!/bin/bash usage() { cat << EOF usage: $0 options This script retires sandbox deployment OPTIONS: -h Show this message -v Be verbose EOF } VERBOSE=0 while getopts “vh” OPTION do case $OPTION in h) usage exit 1 ;; v) VERBOSE=1 ;; ?) usage exit ;; esac done if [[ $VERBOSE -eq 1 ]]; then verbose_arg="--diff" else verbose_arg="" fi echo -e "\e[100mInstalling ansible roles...\e[m" http_proxy=http://127.0.0.1:3128 ansible-galaxy install -r Ansiblefile.yml --force 1>/dev/null echo echo -e "\e[100m Using sudo password for the last time...\e[m" ansible-playbook plays/initial.yml ${verbose_arg} --become --ask-become-pass echo echo -e " \e[100mHere comes the magic...\e[m" ansible-playbook plays/02_bootstrap.yml ${verbose_arg}
#!/bin/bash usage() { cat << EOF usage: $0 options This script retires sandbox deployment OPTIONS: -h Show this message -s Skip sudo configuration -v Be verbose EOF } VERBOSE=0 while getopts “vhs” OPTION do case $OPTION in h) usage exit 1 ;; s) SKIP_SUDO_STEP=1 ;; v) VERBOSE=1 ;; ?) usage exit ;; esac done if [[ $VERBOSE -eq 1 ]]; then verbose_arg="--diff" else verbose_arg="" fi echo -e "\e[100mInstalling ansible roles...\e[m" http_proxy=http://127.0.0.1:3128 ansible-galaxy install -r Ansiblefile.yml --force 1>/dev/null if [[ -z ${SKIP_SUDO_STEP} ]]; then echo exit 3 echo -e "\e[100m Using sudo password for the last time...\e[m" ansible-playbook plays/initial.yml ${verbose_arg} --become --ask-become-pass echo fi echo -e " \e[100mHere comes the magic...\e[m" ansible-playbook plays/02_bootstrap.yml ${verbose_arg}
Append local meteor folder to PATH
TEST="$1" if [ "$TEST" = "unit" ]; then echo Run unit and integration test npm run test:unit UNIT=$? npm run test:integration:headless INT=$? exit $(($UNIT + $INT)) fi echo Run end2end tests: "$TEST" echo Remove old log file rm server.log echo Start end2end server npm run test:end2end:server > server.log& COUNTER=0 MAX_WAIT=900 until grep "=> App running at" server.log; do echo App has not started yet.. Waiting for $COUNTER seconds sleep 30 COUNTER=$(($COUNTER+30)) if [ $COUNTER -gt $MAX_WAIT ]; then echo Meteor takes too long to start, exiting. Server log: cat server.log exit 1 fi done sleep 10 echo Start end2end test runner chimp --ddp=http://localhost:3100 --mocha --path=tests/end2end --browser=phantomjs -- $TEST tests/end2end/setup.js CHIMP_RESULT=$? echo Server log: cat server.log rm server.log mkdir tests/mongodump mongodump -h localhost:3101 -d meteor -o ./tests/mongodump exit $CHIMP_RESULT
TEST="$1" PATH=$PATH:$HOME/.meteor/ if [ "$TEST" = "unit" ]; then echo Run unit and integration test npm run test:unit UNIT=$? npm run test:integration:headless INT=$? exit $(($UNIT + $INT)) fi echo Run end2end tests: "$TEST" echo Remove old log file rm server.log echo Start end2end server npm run test:end2end:server > server.log& COUNTER=0 MAX_WAIT=900 until grep "=> App running at" server.log; do echo App has not started yet.. Waiting for $COUNTER seconds sleep 30 COUNTER=$(($COUNTER+30)) if [ $COUNTER -gt $MAX_WAIT ]; then echo Meteor takes too long to start, exiting. Server log: cat server.log exit 1 fi done sleep 10 echo Start end2end test runner chimp --ddp=http://localhost:3100 --mocha --path=tests/end2end --browser=phantomjs -- $TEST tests/end2end/setup.js CHIMP_RESULT=$? echo Server log: cat server.log rm server.log mkdir tests/mongodump mongodump -h localhost:3101 -d meteor -o ./tests/mongodump exit $CHIMP_RESULT
Build supply and finalize with GOOS=linux
#!/usr/bin/env bash set -exuo pipefail cd "$( dirname "${BASH_SOURCE[0]}" )/.." source .envrc go build -ldflags="-s -w" -o bin/supply go/supply/cli go build -ldflags="-s -w" -o bin/finalize go/finalize/cli
#!/usr/bin/env bash set -exuo pipefail cd "$( dirname "${BASH_SOURCE[0]}" )/.." source .envrc GOOS=linux go build -ldflags="-s -w" -o bin/supply go/supply/cli GOOS=linux go build -ldflags="-s -w" -o bin/finalize go/finalize/cli
Install Python on OS X using MacPorts
#!/usr/bin/env bash if [ "$(uname)" == "Darwin" ]; then brew update brew install redis brew services start redis brew install mongodb brew services start mongodb source travis/terryfy/library_installers.sh clean_builds get_python_environment macpython $PYTHON_VERSION venv fi python setup.py install pip install -r dev-requirements.txt
#!/usr/bin/env bash if [ "$(uname)" == "Darwin" ]; then brew update brew install redis brew services start redis brew install mongodb brew services start mongodb source travis/terryfy/library_installers.sh clean_builds get_python_environment macpports $PYTHON_VERSION venv fi python setup.py install pip install -r dev-requirements.txt
Set config path. Run only tests from test dir
#!/bin/bash # Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. set -e VENV=x-venv WORKSPACE=${WORKSPACE:-"/vagrant"} CONFIG_FILE=$WORKSPACE/jenkins-config.yaml # Setup a proper path, I call my virtualenv dir "$VENV" and # I've got the virtualenv command installed in /usr/local/bin PATH=$WORKSPACE/venv/bin:/usr/local/bin:$PATH if [ ! -d "$VENV" ]; then virtualenv -p python2 $VENV fi . $VENV/bin/activate pip install pip-accel pip-accel install -r solar/test-requirements.txt pushd solar PYTHONPATH=$WORKSPACE/solar CONFIG_FILE=$CONFIG_FILE py.test --cov=solar -s solar
#!/bin/bash # Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. set -e VENV=x-venv WORKSPACE=${WORKSPACE:-"/vagrant"} CONFIG_FILE=$WORKSPACE/jenkins-config.yaml # Setup a proper path, I call my virtualenv dir "$VENV" and # I've got the virtualenv command installed in /usr/local/bin PATH=$WORKSPACE/venv/bin:/usr/local/bin:$PATH if [ ! -d "$VENV" ]; then virtualenv -p python2 $VENV fi . $VENV/bin/activate pip install pip-accel pip-accel install -r solar/test-requirements.txt pushd solar SOLAR_CONFIG=../.config PYTHONPATH=$WORKSPACE/solar CONFIG_FILE=$CONFIG_FILE py.test --cov=solar -s solar/test
Test runner: Displaying diff of JSON output that don't match expected
#!/usr/bin/env bash export DIR=$(pwd) for f in $( find . -name "test.sh" ); do cd $(dirname $f) chmod a+x ./test.sh bash ./test.sh cd "${DIR}" done
#!/usr/bin/env bash PATH=$(npm bin):$PATH export DIR=$(pwd) for f in $( find . -name "test.sh" ); do cd $(dirname $f) chmod a+x ./test.sh bash ./test.sh > output.json output=$(json-diff output.json expected-output.json); if [ "$output" != " undefined" ] ; then echo "Test failed: $(pwd)" json-diff output.json expected-output.json fi rm output.json cd "${DIR}" done
Install depdencies with godep in the build script
#!/bin/bash set -e echo '--- go version' go version echo '--- building packages' ./scripts/build.sh
#!/bin/bash set -e echo '--- go version' go version echo '--- install dependencies' go get github.com/tools/godep godep get echo '--- building packages' ./scripts/build.sh
Improve script output during upload
#!/bin/bash PROJECT=gitlab RELEASE_BUCKET=downloads-packages RELEASE_BUCKET_REGION=eu-west-1 function error_exit { echo "$0: fatal error: $1" 1>&2 exit 1 } if !(git diff --quiet HEAD); then error_exit 'uncommited changes' fi if !(git describe --exact-match); then error_exit 'HEAD is not tagged' fi if !(bin/omnibus clean --purge ${PROJECT}); then error_exit 'clean failed' fi if !(touch build.txt); then error_exit 'failed to mark build start time' fi if !(OMNIBUS_APPEND_TIMESTAMP=0 bin/omnibus build project ${PROJECT}); then error_exit 'build failed' fi release_package=$(find pkg/ -newer build.txt -type f -not -name '*.json') if [[ -z ${release_package} ]]; then error_exit 'could not find the release package' fi if !(aws s3 cp ${release_package} s3://#{RELEASE_BUCKET} --acl public-read --region ${RELEASE_BUCKET_REGION}); then error_exit 'release upload failed' fi echo "$0: package uploaded to https://${RELEASE_BUCKET}.s3.amazonaws.com/${release_package}"
#!/bin/bash PROJECT=gitlab RELEASE_BUCKET=downloads-packages RELEASE_BUCKET_REGION=eu-west-1 function error_exit { echo "$0: fatal error: $1" 1>&2 exit 1 } if !(git diff --quiet HEAD); then error_exit 'uncommited changes' fi if !(git describe --exact-match); then error_exit 'HEAD is not tagged' fi if !(bin/omnibus clean --purge ${PROJECT}); then error_exit 'clean failed' fi if !(touch build.txt); then error_exit 'failed to mark build start time' fi if !(OMNIBUS_APPEND_TIMESTAMP=0 bin/omnibus build project ${PROJECT}); then error_exit 'build failed' fi release_package=$(find pkg/ -newer build.txt -type f -not -name '*.json') if [[ -z ${release_package} ]]; then error_exit 'could not find the release package' fi echo 'Starting upload' if !(aws s3 cp ${release_package} s3://${RELEASE_BUCKET} --acl public-read --region ${RELEASE_BUCKET_REGION}); then error_exit 'release upload failed' fi
Remove incorrect variable in etcd.conf
#!/bin/sh . /etc/sysconfig/heat-params myip=$(ip addr show eth0 | awk '$1 == "inet" {print $2}' | cut -f1 -d/) cat > /etc/etcd/etcd.conf <<EOF # [member] ETCD_NAME="$myip" ETCD_DATA_DIR="/var/lib/etcd/default.etcd" ETCD_LISTEN_CLIENT_URLS="http://0.0.0.0:4001" ETCD_LISTEN_PEER_URLS="http://$myip:7001" [cluster] ETCD_ADVERTISE_CLIENT_URLS="http://$myip:4001" ETCD_INITIAL_ADVERTISE_PEER_URLS="http://$myip:7001" ETCD_DISCOVERY="$ETCD_DISCOVERY_URL" EOF
#!/bin/sh . /etc/sysconfig/heat-params myip=$(ip addr show eth0 | awk '$1 == "inet" {print $2}' | cut -f1 -d/) cat > /etc/etcd/etcd.conf <<EOF ETCD_NAME="$myip" ETCD_DATA_DIR="/var/lib/etcd/default.etcd" ETCD_LISTEN_CLIENT_URLS="http://0.0.0.0:4001" ETCD_LISTEN_PEER_URLS="http://$myip:7001" ETCD_ADVERTISE_CLIENT_URLS="http://$myip:4001" ETCD_INITIAL_ADVERTISE_PEER_URLS="http://$myip:7001" ETCD_DISCOVERY="$ETCD_DISCOVERY_URL" EOF
Add t alias as `git ls-files | ack`
alias ll='ls -alF' alias la='ls -A' alias l='ls -CF' alias grep='grep --color=auto' alias rm='trash' alias tigs='tig status' alias rb='rbenv' alias be='bundle exec' alias pt='pstree' alias bi='bundle install --path=vendor/bundle --binstubs=.binstubs' alias vless='/usr/local/opt/vim/share/vim/vim74/macros/less.sh' alias u='j' alias tailf='tail -f' alias gtags='ctags -R --languages=-javascript --exclude=.git --exclude=log --exclude=target --fields=+iaS --extra=+q .' alias mcopy='reattach-to-user-namespace pbcopy' alias mpaste='reattach-to-user-namespace pbpaste' alias ctt='cp ~/.tmux.sh.example .tmux.sh' alias ag='ack -H --smart-case --column --ignore-dir=.binstubs --ignore-dir=vendor --ignore-dir=log --ignore-dir=tmp --ignore-file=is:Session.vim' alias vs='vim -S Session.vim' gi() { gem install $@; rbenv rehash; rehash }
alias ll='ls -alF' alias la='ls -A' alias l='ls -CF' alias grep='grep --color=auto' alias rm='trash' alias tigs='tig status' alias rb='rbenv' alias be='bundle exec' alias pt='pstree' alias bi='bundle install --path=vendor/bundle --binstubs=.binstubs' alias vless='/usr/local/opt/vim/share/vim/vim74/macros/less.sh' alias u='j' alias tailf='tail -f' alias gtags='ctags -R --languages=-javascript --exclude=.git --exclude=log --exclude=target --fields=+iaS --extra=+q .' alias mcopy='reattach-to-user-namespace pbcopy' alias mpaste='reattach-to-user-namespace pbpaste' alias ctt='cp ~/.tmux.sh.example .tmux.sh' alias ag='ack -H --smart-case --column --ignore-dir=.binstubs --ignore-dir=vendor --ignore-dir=log --ignore-dir=tmp --ignore-file=is:Session.vim --ignore-file=is:tags' alias t='git ls-files | ack --smart-case --no-column --noenv' alias vs='vim -S Session.vim' gi() { gem install $@; rbenv rehash; rehash }
Fix virtualbox repo setup for 20.04
#!/bin/bash -eu #configure script variables [ `whoami` != root ] && echo "should run as root" && exit 1 #export SETUP_SCRIPT_LOCATION=http://koub.org/files/linux/ export SETUP_SCRIPT_LOCATION=https://raw.githubusercontent.com/antoine-morvan/linux_postinstall/master/ [ ! -e ubuntu_func.sh ] && wget -q ${SETUP_SCRIPT_LOCATION}/01_func/ubuntu_func.sh -O ubuntu_func.sh source ubuntu_func.sh mkdir -p /etc/apt/sources.list.d/ echo "deb http://download.virtualbox.org/virtualbox/debian $(lsb_release -sc) contrib" | tee /etc/apt/sources.list.d/virtualbox.list wget -q -O- http://download.virtualbox.org/virtualbox/debian/oracle_vbox_2016.asc | sudo apt-key add - upgrade install_packs virtualbox-6.0 exit
#!/bin/bash -eu #configure script variables [ `whoami` != root ] && echo "should run as root" && exit 1 #export SETUP_SCRIPT_LOCATION=http://koub.org/files/linux/ export SETUP_SCRIPT_LOCATION=https://raw.githubusercontent.com/antoine-morvan/linux_postinstall/master/ [ ! -e ubuntu_func.sh ] && wget -q ${SETUP_SCRIPT_LOCATION}/01_func/ubuntu_func.sh -O ubuntu_func.sh source ubuntu_func.sh CODENAME=$(lsb_release -cs) # temporary fix until virtualbox releases focal repo if [ "$CODENAME" == "focal" ]; then CODENAME="eoan" fi mkdir -p /etc/apt/sources.list.d/ echo "deb http://download.virtualbox.org/virtualbox/debian $CODENAME contrib" | tee /etc/apt/sources.list.d/virtualbox.list wget -q -O- http://download.virtualbox.org/virtualbox/debian/oracle_vbox_2016.asc | sudo apt-key add - upgrade install_packs virtualbox-6.0 exit
Add data directory to bootstrap script
#!/usr/bin/env bash # USAGE: # source ./script/development/bootstrap-env.sh FC_DB_NAME="feature_creature" FC_DB_HOST="localhost" FC_DB_PORT="5432" FC_DB_USER=$(whoami) echo "Setting FC_DB_NAME environment variable:" $FC_DB_NAME export FC_DB_NAME=$FC_DB_NAME echo "Setting FC_DB_HOST environment variable..." $FC_DB_HOST export FC_DB_HOST=$FC_DB_HOST echo "Setting FC_DB_PORT environment variable..." $FC_DB_PORT export FC_DB_PORT=$FC_DB_PORT echo "Setting FC_DB_USER environment variable..." $FC_DB_USER export FC_DB_USER=$FC_DB_USER echo echo "Attempting to create database user:" $FC_DB_USER createuser -s -e $FC_DB_USER echo echo "Attempting to create database:" $FC_DB_NAME createdb -e $FC_DB_NAME
#!/usr/bin/env bash # USAGE: # source ./script/development/bootstrap-env.sh FC_DB_NAME="feature_creature" FC_DB_HOST="localhost" FC_DB_PORT="5432" FC_DB_USER=$(whoami) FC_DATA_FILES_PATH="$HOME/feature-creature/data/" echo "Setting FC_DB_NAME environment variable:" $FC_DB_NAME export FC_DB_NAME=$FC_DB_NAME echo "Setting FC_DB_HOST environment variable..." $FC_DB_HOST export FC_DB_HOST=$FC_DB_HOST echo "Setting FC_DB_PORT environment variable..." $FC_DB_PORT export FC_DB_PORT=$FC_DB_PORT echo "Setting FC_DB_USER environment variable..." $FC_DB_USER export FC_DB_USER=$FC_DB_USER echo "Setting FC_DB_USER environment variable..." $FC_DATA_FILES_PATH export FC_DATA_FILES_PATH=$FC_DATA_FILES_PATH echo echo "Attempting to create database user:" $FC_DB_USER createuser -s -e $FC_DB_USER echo echo "Attempting to create database:" $FC_DB_NAME createdb -e $FC_DB_NAME echo "Attempting to create data directory:" $FC_DATA_FILES_PATH mkdir -p $FC_DATA_FILES_PATH
Use bash instead of /bin/sh
#!/bin/sh SOURCE="${BASH_SOURCE[0]}" while [ -h "$SOURCE" ]; do DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" SOURCE="$(readlink "$SOURCE")" [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" done DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" node --harmony $DIR/tdir.js "$@"
#!/bin/bash SOURCE="${BASH_SOURCE[0]}" while [ -h "$SOURCE" ]; do DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" SOURCE="$(readlink "$SOURCE")" [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" done DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" node --harmony $DIR/tdir.js "$@"
Fix github owner to 'werf' for release publisher scripts
#PUBLISH_BINTRAY_AUTH= # bintray auth user:TOKEN BINTRAY_SUBJECT=flant # bintray organization BINTRAY_REPO=werf # bintray repository BINTRAY_PACKAGE=werf # bintray package in repository GITHUB_OWNER=flant # github user/org GITHUB_REPO=werf # github repository #PUBLISH_GITHUB_TOKEN # github token granted permission to create releases RELEASE_BUILD_DIR=release-build GIT_REMOTE=origin # can be changed to upstream with env
#PUBLISH_BINTRAY_AUTH= # bintray auth user:TOKEN BINTRAY_SUBJECT=flant # bintray organization BINTRAY_REPO=werf # bintray repository BINTRAY_PACKAGE=werf # bintray package in repository GITHUB_OWNER=werf # github user/org GITHUB_REPO=werf # github repository #PUBLISH_GITHUB_TOKEN # github token granted permission to create releases RELEASE_BUILD_DIR=release-build GIT_REMOTE=origin # can be changed to upstream with env
Disable VecCore for Travis CI clang-tidy check
#!/usr/bin/env bash set -ex # We need to put in place all relevant headers before running clang-tidy. mkdir ../build cd ../build cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -Dall=On -Dtesting=On -Dx11=Off -Dalien=Off \ -Dcuda=Off -Dtmva-gpu=Off ../root # We need to prebuild a minimal set of targets which are responsible for header copy # or generation. make -j4 move_headers intrinsics_gen ClangCommentCommandList ClangCommentCommandInfo \ ClangCommentHTMLNamedCharacterReferences ClangCommentHTMLTagsProperties \ ClangCommentNodes ClangAttrImpl ClangStmtNodes ClangAttrClasses \ ClangAttrDump ClangCommentHTMLTags ClangDeclNodes ClangAttrVisitor \ ClangDiagnosticCommon ClangARMNeon ClangDiagnosticIndexName \ ClangDiagnosticParse ClangDiagnosticComment ClangDiagnosticFrontend \ ClangDiagnosticGroups ClangDiagnosticSerialization ClangDiagnosticLex \ ClangDiagnosticSema ClangAttrList ClangAttrHasAttributeImpl \ ClangDiagnosticAST ClangDiagnosticDriver ClangDiagnosticAnalysis \ ClangDriverOptions ClangAttrParserStringSwitches ClangAttrParsedAttrList \ ClangAttrTemplateInstantiate ClangAttrSpellingListIndex \ ClangAttrParsedAttrImpl ClangAttrParsedAttrKinds googletest ln -s $PWD/compile_commands.json $PWD/../root/
#!/usr/bin/env bash set -ex # We need to put in place all relevant headers before running clang-tidy. mkdir ../build cd ../build cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -Dall=On -Dtesting=On -Dx11=Off -Dalien=Off \ -Dcuda=Off -Dtmva-gpu=Off -Dveccore=Off ../root # We need to prebuild a minimal set of targets which are responsible for header copy # or generation. make -j4 move_headers intrinsics_gen ClangCommentCommandList ClangCommentCommandInfo \ ClangCommentHTMLNamedCharacterReferences ClangCommentHTMLTagsProperties \ ClangCommentNodes ClangAttrImpl ClangStmtNodes ClangAttrClasses \ ClangAttrDump ClangCommentHTMLTags ClangDeclNodes ClangAttrVisitor \ ClangDiagnosticCommon ClangARMNeon ClangDiagnosticIndexName \ ClangDiagnosticParse ClangDiagnosticComment ClangDiagnosticFrontend \ ClangDiagnosticGroups ClangDiagnosticSerialization ClangDiagnosticLex \ ClangDiagnosticSema ClangAttrList ClangAttrHasAttributeImpl \ ClangDiagnosticAST ClangDiagnosticDriver ClangDiagnosticAnalysis \ ClangDriverOptions ClangAttrParserStringSwitches ClangAttrParsedAttrList \ ClangAttrTemplateInstantiate ClangAttrSpellingListIndex \ ClangAttrParsedAttrImpl ClangAttrParsedAttrKinds googletest ln -s $PWD/compile_commands.json $PWD/../root/
Use tee instead of >>
#!/usr/bin/env bash set -eux CRONDIR=$(pwd)/crondir mkdir -p $CRONDIR source aws.sh echo "Running stackage-server-cron..." echo "('tail -f $CRONDIR/stackage-server-cron.log' to watch)" docker run --rm \ -v $CRONDIR:/home/ubuntu \ -w /home/ubuntu \ fpco/stackage-server-prod:latest \ bash -c "useradd $(whoami) -u $(id -u); sudo -u $(whoami) env HOME=/home/ubuntu AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY bash -c '/usr/local/bin/stackage-server-cron >> /home/ubuntu/stackage-server-cron.log 2>&1'"
#!/usr/bin/env bash set -eux CRONDIR=$(pwd)/crondir mkdir -p $CRONDIR source aws.sh echo "Running stackage-server-cron..." echo "('tail -f $CRONDIR/stackage-server-cron.log' to watch)" docker run --rm \ -v $CRONDIR:/home/ubuntu \ -w /home/ubuntu \ fpco/stackage-server-prod:latest \ bash -c "useradd $(whoami) -u $(id -u); sudo -u $(whoami) env HOME=/home/ubuntu AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY bash -c '/usr/local/bin/stackage-server-cron 2>&1 | tee -a /home/ubuntu/stackage-server-cron.log'"
Work even if jack has started while running the script.
#!/bin/sh STARTTIME=$(date +%s) duration () { expr $(date +%s) - $STARTTIME } GOT_JACKD=0 GOT_JACKDMP=0 # Try for 10 seconds. while [ $(duration) -lt 10 ] ; do if ! killall -9 jackd ; then echo "finished killing jackd" GOT_JACKD=1 fi if ! killall -9 jackdmp ; then echo "finished killing jackdmp" GOT_JACKDMP=1 fi if [ $GOT_JACKD -eq 1 ] ; then if [ $GOT_JACKDMP -eq 1 ] ; then exit 0 fi fi # echo $(duration) sleep 0.01 done exit 1
#!/bin/sh STARTTIME=$(date +%s) duration () { expr $(date +%s) - $STARTTIME } GOT_JACKD=0 GOT_JACKDMP=0 # Try for 10 seconds. while [ $(duration) -lt 10 ] ; do if ! killall -9 jackd ; then echo "finished killing jackd" GOT_JACKD=1 else GOT_JACKD=0 # maybe it reappared? fi if ! killall -9 jackdmp ; then echo "finished killing jackdmp" GOT_JACKDMP=1 else GOT_JACKDMP=0 # maybe it reappared? fi if [ $GOT_JACKD -eq 1 ] ; then if [ $GOT_JACKDMP -eq 1 ] ; then exit 0 fi fi # echo $(duration) sleep 0.01 done exit 1
Add alias to diff changes for a specific commit
gclcd() { git clone "$1" && cd "$(basename "$1" | sed 's/\.git//g')" }
gclcd() { git clone "$1" && cd "$(basename "$1" | sed 's/\.git//g')" } gd^() { git diff $1^ $1 }
Remove macpilot and opera from list of casks to be installed.
#!/bin/sh apps=( 1password alfred applepi-baker atom atom bartender carbon-copy-cloner cleanmymac crashplan doxie dropbox firefox google-chrome google-drive hazel hipchat istat-menus iterm2 little-snitch macpilot namebench opera private-internet-access silverlight skype teamviewer things vagrant virtualbox vlc ) brew cask install ${apps[@]} --appdir=/Applications
#!/bin/sh apps=( 1password alfred applepi-baker atom atom bartender carbon-copy-cloner cleanmymac crashplan doxie dropbox firefox google-chrome google-drive hazel hipchat istat-menus iterm2 little-snitch namebench private-internet-access silverlight skype teamviewer things vagrant virtualbox vlc ) brew cask install ${apps[@]} --appdir=/Applications
Change location of files served by ipynb server
# as root user apt-get update # as vagrant user echo "alias ipynb='ipython notebook --ip=0.0.0.0 /vagrant/'" >> .bashrc source .bashrc wget http://repo.continuum.io/miniconda/Miniconda-3.5.2-Linux-x86_64.sh # this is the version based on Python 2 bash Miniconda-3.5.2-Linux-x86_64.sh # accept license when prompted rm Miniconda-3.5.2-Linux-x86_64.sh conda update conda # make sure we're up to date conda install pandas scikit-learn ipython-notebook --yes
# as root user apt-get update # as vagrant user echo "alias ipynb='ipython notebook --ip=0.0.0.0 /vagrant/ipynb/'" >> .bashrc source .bashrc wget http://repo.continuum.io/miniconda/Miniconda-3.5.2-Linux-x86_64.sh # this is the version based on Python 2 bash Miniconda-3.5.2-Linux-x86_64.sh # accept license when prompted rm Miniconda-3.5.2-Linux-x86_64.sh conda update conda # make sure we're up to date conda install pandas scikit-learn ipython-notebook --yes
Rename variables and remove redundant/out of date comments.
#!/bin/bash ############################ # .make.sh # This script creates symlinks from the home directory to any desired dotfiles in ~/dotfiles ############################ ########## Variables dir=~/dotfiles # dotfiles directory olddir=~/dotfiles_old # old dotfiles backup directory files="bashrc hgrc gitconfig vimrc vim zshrc oh-my-zsh private scrotwm.conf Xresources" # list of files/folders to symlink in homedir ########## # create dotfiles_old in homedir echo -n "Creating \"$olddir\" for backup of any existing dotfiles in ~ ..." mkdir -p $olddir echo "done" # change to the dotfiles directory echo -n "Changing directory to \"$dir\"..." cd $dir echo "done" # move any existing dotfiles in homedir to dotfiles_old directory, then create symlinks from the homedir to any files in the ~/dotfiles directory specified in $files echo "Deploying dotfile symlinks. Moving existing files to $olddir. ..." for file in $files; do if [ -f $dir/$file ]; then echo "$file" if [ -f ~/.$file ]; then mv -b ~/.$file ~/dotfiles_old/ fi ln -s $dir/$file ~/.$file fi done echo "done"
#!/bin/bash ############################ # .make.sh # This script creates symlinks from the home directory to any desired dotfiles in ~/dotfiles ############################ ########## Variables dotfiles_dir=~/dotfiles backup_dir=~/dotfiles_old # old dotfiles backup directory files="bashrc hgrc gitconfig vimrc vim zshrc oh-my-zsh private scrotwm.conf Xresources" # list of files/folders to symlink in homedir ########## echo -n "Creating \"$backup_dir\" for backup of any existing dotfiles in ~ ..." mkdir -p $backup_dir echo "done" echo -n "Changing directory to \"$dotfiles_dir\"..." cd $dotfiles_dir echo "done" echo "Deploying dotfile symlinks. Moving existing files to $backup_dir. ..." for file in $files; do if [ -f $dotfiles_dir/$file ]; then echo "$file" if [ -f ~/.$file ]; then mv -b ~/.$file ~/dotfiles_old/ fi ln -s $dotfiles_dir/$file ~/.$file fi done echo "done"
Fix preleasing building kbfs twice
#!/bin/bash set -e -u -o pipefail # Fail on error dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) cd "$dir" build_dir=${BUILD_DIR:-/tmp/build_keybase} mkdir -p $build_dir # This might be different for linux, e.g. date -d @$(git log -n1 --format="%at") +%Y%m%d%H%M%S` date_last_commit=`date -r $(git log -n1 --format="%at") +%Y%m%d%H%M%S` commit_short=`git log -1 --pretty=format:%h` build="$date_last_commit+$commit_short" echo "Building keybase" GO15VENDOREXPERIMENT=1 go build -a -tags "production" -ldflags "-X github.com/keybase/client/go/libkb.CustomBuild=$build" -o $build_dir/keybase github.com/keybase/client/go/keybase version=`$build_dir/keybase version -S` echo "Keybase version: $version" echo "Building kbfs" GO15VENDOREXPERIMENT=0 go get github.com/keybase/kbfs/kbfsfuse GO15VENDOREXPERIMENT=0 go build -a -tags "production" -ldflags "-X github.com/keybase/kbfs/libkbfs.CustomBuild=$build" -o $build_dir/kbfs github.com/keybase/kbfs/kbfsfuse kbfs_version=`$build_dir/kbfs -version` echo "KBFS version: $kbfs_version"
#!/bin/bash set -e -u -o pipefail # Fail on error dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) cd "$dir" build_dir=${BUILD_DIR:-/tmp/build_keybase} mkdir -p $build_dir # This might be different for linux, e.g. date -d @$(git log -n1 --format="%at") +%Y%m%d%H%M%S` date_last_commit=`date -r $(git log -n1 --format="%at") +%Y%m%d%H%M%S` commit_short=`git log -1 --pretty=format:%h` build="$date_last_commit+$commit_short" echo "Building keybase" GO15VENDOREXPERIMENT=1 go build -a -tags "production" -ldflags "-X github.com/keybase/client/go/libkb.CustomBuild=$build" -o $build_dir/keybase github.com/keybase/client/go/keybase version=`$build_dir/keybase version -S` echo "Keybase version: $version"
Switch to zsh in script
#!/bin/sh . ./utils.sh || exit 1 # add shells to /etc/shells shells="zsh bash dash fish ksh pdksh xonsh" for shell in $shells; do _test_executable "$shell" || return 1 shell_path="$(command -v "$shell")" if grep -Fqx "$shell_path" /etc/shells; then printf "Shell %s is already installed\n" "$shell_path" else printf "Adding shell %s to /etc/shells\n" "$shell_path" printf "%s\n" "$shell_path" \ | sudo tee -a /etc/shells >/dev/null fi done # switch default shell to zsh # _test_executable "zsh" && chsh -s "$(command -v zsh)"
#!/bin/sh . ./utils.sh || exit 1 # add shells to /etc/shells shells="zsh bash dash fish ksh pdksh xonsh" for shell in $shells; do _test_executable "$shell" || return 1 shell_path="$(command -v "$shell")" if grep -Fqx "$shell_path" /etc/shells; then printf "Shell %s is already installed\n" "$shell_path" else printf "Adding shell %s to /etc/shells\n" "$shell_path" printf "%s\n" "$shell_path" \ | sudo tee -a /etc/shells >/dev/null fi done # switch default shell to zsh _test_executable "zsh" 2>/dev/null && chsh -s "$(command -v zsh)"
Include README in final artifact
# This script takes care of building your crate and packaging it for release set -ex main() { local src=$(pwd) \ stage= case $TRAVIS_OS_NAME in linux) stage=$(mktemp -d) ;; osx) stage=$(mktemp -d -t tmp) ;; esac test -f Cargo.lock || cargo generate-lockfile # TODO Update this to build the artifacts that matter to you cross rustc --bin signify --target $TARGET --release -- -C lto # TODO Update this to package the right artifacts cp target/$TARGET/release/signify $stage/ cd $stage tar czf $src/$CRATE_NAME-$TRAVIS_TAG-$TARGET.tar.gz * cd $src rm -rf $stage } main
# This script takes care of building your crate and packaging it for release set -ex main() { local src=$(pwd) \ stage= case $TRAVIS_OS_NAME in linux) stage=$(mktemp -d) ;; osx) stage=$(mktemp -d -t tmp) ;; esac test -f Cargo.lock || cargo generate-lockfile cross rustc --bin signify --target $TARGET --release -- -C lto cp target/$TARGET/release/signify $stage/ cp README $stage/ cd $stage tar czf $src/$CRATE_NAME-$TRAVIS_TAG-$TARGET.tar.gz * cd $src rm -rf $stage } main
Add new alias: 'git authors'
#!/bin/bash usage() { echo -e "usage: ./git-init.sh \"User Name\" user@name.es" } if [ $# -lt 1 ]; then usage exit 1 fi username=$1 email=$2 set -e echo -e "Configuring username and email as repository properties..." git config user.name "$username" git config user.email "$email" echo -e "Configuring aliases for log command..." echo -e "New comand: git lol" git config alias.lol "log --pretty=oneline --abbrev-commit --graph --decorate" echo -e "New command: git changelog" git config alias.changelog "log --pretty=oneline --abbrev-commit --decorate --color" # TODO: See if it's possible to stop in previous release #git log v2.1.0...v2.1.1 --pretty=format:'<li> <a href="http://github.com/jeslopalo/<project>/commit/%H">view commit &bull;</a> %s</li> ' --reverse | grep "#changelog" echo -e "Configuring aliases for unstage changes..." git config alias.unstage "reset HEAD"
#!/bin/bash usage() { echo -e "usage: ./git-init.sh \"User Name\" user@name.es" } if [ $# -lt 1 ]; then usage exit 1 fi username=$1 email=$2 set -e echo -e "Configuring username and email as repository properties..." git config user.name "$username" git config user.email "$email" echo -e "Configuring aliases for log command..." echo -e "New comand: git lol" git config alias.lol "log --pretty=oneline --abbrev-commit --graph --decorate" echo -e "New command: git changelog" git config alias.changelog "log --pretty=oneline --abbrev-commit --decorate --color" # TODO: See if it's possible to stop in previous release #git log v2.1.0...v2.1.1 --pretty=format:'<li> <a href="http://github.com/jeslopalo/<project>/commit/%H">view commit &bull;</a> %s</li> ' --reverse | grep "#changelog" echo -e "New command: git authors" git config alias.authors '!git log --all --format="%aN <%aE>" | sort -u' echo -e "Configuring aliases for unstage changes..." git config alias.unstage "reset HEAD"
Update for homestead-7 (PHP 7.0)
# Download and install Zend Z-Ray: wget http://downloads.zend.com/zray/1208/zray-php5.6-Ubuntu-14.04-x86_64.tar.gz && tar xvfz zray-php5.6-Ubuntu-14.04-x86_64.tar.gz -C /opt # Add zray.conf wget https://raw.githubusercontent.com/GeneaLabs/laravel-dev-environment/master/Zend-Z-Ray/Homestead/zray.conf -O /etc/nginx/conf.d/zray.conf # Create symlinks to load Z-Ray configurations in php-fpm and php-cli: ln -sf /opt/zray/zray.ini /etc/php5/cli/conf.d/zray.ini ln -sf /opt/zray/zray.ini /etc/php5/fpm/conf.d/zray.ini # Set file and folder permissions: chown -R vagrant:vagrant /opt/zray # Restart nginx and php-fpm: service php-fpm restart service nginx restart
# Download and install Zend Z-Ray: wget http://downloads.zend.com/zray/1208/zray-php5.6-Ubuntu-14.04-x86_64.tar.gz && tar xvfz zray-php5.6-Ubuntu-14.04-x86_64.tar.gz -C /opt # Add zray.conf wget https://raw.githubusercontent.com/GeneaLabs/laravel-dev-environment/master/Zend-Z-Ray/Homestead/zray.conf -O /etc/nginx/conf.d/zray.conf # Create symlinks to load Z-Ray configurations in php-fpm and php-cli: ln -sf /opt/zray/zray.ini /etc/php/7.0/cli/conf.d/zray.ini ln -sf /opt/zray/zray.ini /etc/php5/7.0/fpm/conf.d/zray.ini # Set file and folder permissions: chown -R vagrant:vagrant /opt/zray # Restart nginx and php-fpm: service php7.0-fpm restart service nginx restart
Refresh keys before installing packages
#!/bin/sh # End the script on any errors set -e # Change the working directory to this one cd "$(dirname "$0")" # Get administrative privileges sudo -v # Keep pinging sudo until this script finishes # Source: https://gist.github.com/cowboy/3118588 while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null & # Install PKGBUILDs make package=tari-core make package=bspwm-round-corners-git make package=color-scripts make package=xeventbind # Install yay make aur package=yay # Install aur packages with yay yay -S rtv yay -S polybar yay -S shotgun yay -S ranger-git # Additional settings make fontconfig make yarnconfig # Revoke privileges sudo -K # Install dotfiles make -C .. # Change the color scheme to a sane default wal --theme base16-tomorrow-night # Run vim for the first time (i.e. install plugins and exit) nvim
#!/bin/sh # End the script on any errors set -e # Change the working directory to this one cd "$(dirname "$0")" # Get administrative privileges sudo -v # Keep pinging sudo until this script finishes # Source: https://gist.github.com/cowboy/3118588 while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null & # Refresh GPG keys before installing packages make refresh-keys # Install PKGBUILDs make package=tari-core make package=bspwm-round-corners-git make package=color-scripts make package=xeventbind # Install yay make aur package=yay # Install aur packages with yay yay -S rtv yay -S polybar yay -S shotgun yay -S ranger-git # Additional settings make fontconfig make yarnconfig # Revoke privileges sudo -K # Install dotfiles make -C .. # Change the color scheme to a sane default wal --theme base16-tomorrow-night # Run vim for the first time (i.e. install plugins and exit) nvim
Remove dist directory if gulp is not available
#!/bin/sh COOPR_NODE=${COOPR_NODE:-node} COOPR_GULP='./node_modules/gulp/bin/gulp.js' ${COOPR_NODE} ${COOPR_GULP} clean rm -rf node_modules rm -rf bower_components
#!/bin/sh COOPR_NODE=${COOPR_NODE:-node} COOPR_GULP='./node_modules/gulp/bin/gulp.js' # Prefer the gulp task, fall back to delete if [ -f ${COOPR_GULP} ]; then ${COOPR_NODE} ${COOPR_GULP} clean else rm -rf dist fi rm -rf node_modules rm -rf bower_components
Add new changes to gh-pages instead of completely overriding the branch and loosing all deploy history.
#!/bin/bash rm -rf dist || exit 0; mkdir dist; npm run build; ( cd dist git init git config user.name "Travis-CI" git config user.email "contact+travis-for-tddbin@uxebu.com" # cp ../CNAME ./CNAME git add . git commit -m "Deployed to Github Pages" git push --force --quiet "https://${GH_TOKEN}@${GH_REF}" master:gh-pages > /dev/null 2>&1 )
#!/bin/bash # clean up rm -rf tmp || exit 0; # build npm run build; TMP_CHECKOUT_DIR=tmp/origin-gh-pages ( git config user.name "Travis-CI" git config user.email "contact+travis-for-tddbin@uxebu.com" git clone --branch=gh-pages "https://${GH_TOKEN}@${GH_REF}" $TMP_CHECKOUT_DIR rm -rf $TMP_CHECKOUT_DIR/* cp -r dist/* $TMP_CHECKOUT_DIR cd $TMP_CHECKOUT_DIR git add . git commit -m "Travis deployed 'master' - `date`" git push )