Instruction
stringlengths
14
778
input_code
stringlengths
0
4.24k
output_code
stringlengths
1
5.44k
Add -allow-empty to the documentation commit
#!/bin/sh set -e export VSINSTALLDIR="C:\Program Files (x86)\Microsoft Visual Studio\2019\Community" export VisualStudioVersion="16.0" docfx ./docs/docfx.json SOURCE_DIR=$PWD TEMP_REPO_DIR=$PWD/../my-project-gh-pages echo "Removing temporary doc directory $TEMP_REPO_DIR" rm -rf $TEMP_REPO_DIR mkdir $TEMP_REPO_DIR echo "Cloning the repo with the gh-pages branch" git clone https://github.com/markvincze/Stubbery --branch gh-pages $TEMP_REPO_DIR echo "Clear repo directory" cd $TEMP_REPO_DIR git rm -r * echo "Copy documentation into the repo" cp -r $SOURCE_DIR/docs/_site/* . if [ "$APPVEYOR_REPO_BRANCH" == "master" ] then echo "Push the new docs to the remote branch" git add . -A git commit -m "Update generated documentation" git push origin gh-pages else echo "Not on master, skipping pushing docs" fi
#!/bin/sh set -e export VSINSTALLDIR="C:\Program Files (x86)\Microsoft Visual Studio\2019\Community" export VisualStudioVersion="16.0" docfx ./docs/docfx.json SOURCE_DIR=$PWD TEMP_REPO_DIR=$PWD/../my-project-gh-pages echo "Removing temporary doc directory $TEMP_REPO_DIR" rm -rf $TEMP_REPO_DIR mkdir $TEMP_REPO_DIR echo "Cloning the repo with the gh-pages branch" git clone https://github.com/markvincze/Stubbery --branch gh-pages $TEMP_REPO_DIR echo "Clear repo directory" cd $TEMP_REPO_DIR git rm -r * echo "Copy documentation into the repo" cp -r $SOURCE_DIR/docs/_site/* . if [ "$APPVEYOR_REPO_BRANCH" == "master" ] then echo "Push the new docs to the remote branch" git add . -A git commit --allow-empty -m "Update generated documentation" git push origin gh-pages else echo "Not on master, skipping pushing docs" fi
Fix Memory not output anything
#!/bin/bash # Base on article https://la-vache-libre.org/stress-ng-un-outil-pratique-pour-tester-la-stabilite-des-composants-de-votre-machine-en-charge-elevee/ while getopts n:r: option do case "${option}" in n) WORKER_COUNT=$OPTARG;; r) RESULT=$OPTARG;; esac done stress-ng --vm 10 --timeout 10 --metrics-brief > $RESULT.txt
#!/bin/bash # Base on article https://la-vache-libre.org/stress-ng-un-outil-pratique-pour-tester-la-stabilite-des-composants-de-votre-machine-en-charge-elevee/ while getopts n:r: option do case "${option}" in n) WORKER_COUNT=$OPTARG;; r) RESULT=$OPTARG;; esac done stress-ng --vm $WORKER_COUNT --timeout 10 --metrics-brief > $RESULT.txt
Add script path prefix to jenkins-call-url command
function canonicalize() ( cd "${1%/*}" echo "${PWD}/${1##*/}" ) if [ -f build.gradle ]; then [ -z "${FORCE_UPGRADE}" ] && export JENKINS_USER="admin" || export JENKINS_USER="${JENKINS_USER:-admin}" export JENKINS_PASSWORD="${JENKINS_PASSWORD:-$(<"${JENKINS_HOME}"/secrets/initialAdminPassword)}" unset JENKINS_CALL_ARGS jenkins-call-url -a -v -v "${JENKINS_WEB}"/api/json -o /dev/null export JENKINS_CALL_ARGS="-m POST ${JENKINS_WEB}/scriptText --data-string script= -d" else echo "Not in repository root." 1>&2 fi
function canonicalize() ( cd "${1%/*}" echo "${PWD}/${1##*/}" ) if [ -f build.gradle ]; then [ -z "${FORCE_UPGRADE}" ] && export JENKINS_USER="admin" || export JENKINS_USER="${JENKINS_USER:-admin}" export JENKINS_PASSWORD="${JENKINS_PASSWORD:-$(<"${JENKINS_HOME}"/secrets/initialAdminPassword)}" unset JENKINS_CALL_ARGS "${SCRIPT_LIBARY_PATH}"/jenkins-call-url -a -v -v "${JENKINS_WEB}"/api/json -o /dev/null export JENKINS_CALL_ARGS="-m POST ${JENKINS_WEB}/scriptText --data-string script= -d" else echo "Not in repository root." 1>&2 fi
Remove custom libogg path to match Unix makefile build script.
#!/bin/bash -e # continuous integration test script # run this from the top-level source directory OGG_PATH=/srv/jenkins/jobs/libogg/workspace VIDEOS=/usr/local/share/videos ./autogen.sh CFLAGS='-O2 -g' ./configure --enable-assertions --enable-check-asm --enable-logging --enable-accounting PKG_CONFIG_PATH=${OGG_PATH} make clean make distcheck PKG_CONFIG_PATH=${OGG_PATH} make docs make ./examples/encoder_example -k 4 ${VIDEOS}/claire_qcif-2frames.y4m -o out.$$.ogv ./examples/dump_video out.$$.ogv -o /dev/null rm -f out.$$.ogv
#!/bin/bash -e # continuous integration test script # run this from the top-level source directory VIDEOS=/usr/local/share/videos ./autogen.sh CFLAGS='-O2 -g' ./configure --enable-assertions --enable-check-asm --enable-logging --enable-accounting make clean make distcheck make docs make ./examples/encoder_example -k 4 ${VIDEOS}/claire_qcif-2frames.y4m -o out.$$.ogv ./examples/dump_video out.$$.ogv -o /dev/null rm -f out.$$.ogv
Add date and time to index file
#!/bin/sh source "./config" if [ -z $OUTDIR ] ; then OUTDIR=. fi ## setup OUT=${OUTDIR}/basic mkdir -p ${OUT} ## capture basic information ps -axjf >> $OUT/List_of_Running_Processes.txt pstree -ah >> $OUT/Process_tree_and_arguments.txt mount >> $OUT/Mounted_items.txt diskutil list >> $OUT/BasicInfo/Disk_utility.txt uptime >> $OUT/System_uptime.txt printenv >> $OUT/System_environment_detailed.txt cat /proc/version >> $OUT/OS_kernel_version.txt top -n 1 -b >> $OUT/Process_memory_usage.txt df -h >> $OUT/Disk_usage.txt hostname >> $OUT/hostname.txt date >> $OUT/date.txt uname -a >> $OUT/System_environment.txt lsof >> $OUT/Open_Files.txt find / -type d -perm -1000 -exec ls -ld {} \; >> $OUT/World_Writable.txt lsmod >> $OUT/Loaded_modules.txt chkconfig --list >> $OUT/chkconfig.txt service --status-all >> $OUT/Running_services.txt
#!/bin/sh source "./config" if [ -z $OUTDIR ] ; then OUTDIR=. fi ## setup OUT=${OUTDIR}/basic mkdir -p ${OUT} date >> ${OUT}/index ## capture basic information ps -axjf >> $OUT/List_of_Running_Processes.txt pstree -ah >> $OUT/Process_tree_and_arguments.txt mount >> $OUT/Mounted_items.txt diskutil list >> $OUT/Disk_utility.txt uptime >> $OUT/System_uptime.txt printenv >> $OUT/System_environment_detailed.txt cat /proc/version >> $OUT/OS_kernel_version.txt top -n 1 -b >> $OUT/Process_memory_usage.txt df -h >> $OUT/Disk_usage.txt hostname >> $OUT/hostname.txt date >> $OUT/date.txt uname -a >> $OUT/System_environment.txt lsof >> $OUT/Open_Files.txt find / -type d -perm -1000 -exec ls -ld {} \; >> $OUT/World_Writable.txt lsmod >> $OUT/Loaded_modules.txt chkconfig --list >> $OUT/chkconfig.txt service --status-all >> $OUT/Running_services.txt
Remove nail PATH export and conditionalise rvm source
fpath=($ZSH/functions $fpath) autoload -U $ZSH/functions/*(:t) #plugins=(ant django extract git git-flow github history-substring-search pip python ruby ssh-agent dirpersist) plugins=(vi-mode extract history-substring-search python ruby ssh-agent osx copyfile gitignore) source ~/.rvm/scripts/rvm export RUBYOPT="-I ." export PATH=$PATH:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin export JAVA_HOME=/usr/lib/jvm/default-java export CLASSPATH=.:$CLASSPATH export PYTHONPATH=/usr/lib/python2.7/dist-packages/:$PYTHONPATH # SPECIFICALLY FOR TESTIFY # -------------------------- # COMPLETION SETTINGS # add custom completion scripts ## fpath=(~/completions $fpath) #--------------------------- export PATH="/nail/home/prateek/bin:$PATH" function options() { PLUGIN_PATH="$HOME/.oh-my-zsh/plugins/" for plugin in $plugins; do echo "\n\nPlugin: $plugin"; grep -r "^function \w*" $PLUGIN_PATH$plugin | awk '{print $2}' | sed 's/()//'| tr '\n' ', '; grep -r "^alias" $PLUGIN_PATH$plugin | awk '{print $2}' | sed 's/=.*//' | tr '\n' ', ' done }
fpath=($ZSH/functions $fpath) autoload -U $ZSH/functions/*(:t) #plugins=(ant django extract git git-flow github history-substring-search pip python ruby ssh-agent dirpersist) plugins=(vi-mode extract history-substring-search python ruby ssh-agent osx copyfile gitignore) if [ -f ~/.rvm/scripts/rvm ]; then source ~/.rvm/scripts/rvm fi export RUBYOPT="-I ." export PATH=$PATH:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin export JAVA_HOME=/usr/lib/jvm/default-java export CLASSPATH=.:$CLASSPATH export PYTHONPATH=/usr/lib/python2.7/dist-packages/:$PYTHONPATH # SPECIFICALLY FOR TESTIFY # -------------------------- # COMPLETION SETTINGS # add custom completion scripts ## fpath=(~/completions $fpath) #--------------------------- function options() { PLUGIN_PATH="$HOME/.oh-my-zsh/plugins/" for plugin in $plugins; do echo "\n\nPlugin: $plugin"; grep -r "^function \w*" $PLUGIN_PATH$plugin | awk '{print $2}' | sed 's/()//'| tr '\n' ', '; grep -r "^alias" $PLUGIN_PATH$plugin | awk '{print $2}' | sed 's/=.*//' | tr '\n' ', ' done }
Update the current song script
#!/usr/bin/env bash # Inspired by: https://www.tylerewing.co/tmux-now-playing osascript <<APPLESCRIPT (* Test if iTunes is running. *) if is_app_running("iTunes") then tell application "iTunes" if (player state as text) is equal to "playing" then if current stream title is not equal to missing value then set song_information to my extract_song_information(get current stream title) if item 1 of song_information starts with "NPO Radio" or length of song_information is equal to 1 then return end if set artist_name to item 1 of song_information set track_name to item 2 of song_information return "#[fg=colour246]♫ " & artist_name & " - #[bold]" & track_name else set artist_name to artist of current track set track_name to name of current track return "#[fg=colour246]♫ " & artist_name & " - #[bold]" & track_name end if end if end tell end if (* Test if Spotify is running. *) if is_app_running("Spotify") then tell application "Spotify" if (player state as text) is equal to "playing" then set artist_name to artist of current track set track_name to name of current track return "#[fg=colour246]♫ " & track_name & " - #[bold]" & artist_name end if end tell end if (* Test if an app is running. *) on is_app_running(app_name) tell application "System Events" to (name of processes) contains app_name end is_app_running (* Extract song information from radio streams. *) on extract_song_information(current_song) set AppleScript's text item delimiters to " - " set song_information to current_song's text items set AppleScript's text item delimiters to {""} return song_information end extract_song_information APPLESCRIPT
#!/usr/bin/env bash # Inspired by: https://www.tylerewing.co/tmux-now-playing osascript <<APPLESCRIPT tell application "System Events" set running_apps to (name of every process) end tell set app_names to {"iTunes", "Spotify"} repeat with app_name in app_names (* Test if the app is running. *) if running_apps contains app_name then tell application app_name using terms from application "iTunes" if (player state as text) is equal to "playing" then set artist_name to artist of current track set track_name to name of current track return "#[fg=colour246]♫ " & track_name & " - #[bold]" & artist_name end if end using terms from end tell end if end repeat APPLESCRIPT
Revert "fix(pacman): systemct daemon-reload is done automatically"
pre_install() { useradd -U -l -M -r -s /usr/bin/nologin -d /var/lib/{{name}} -c "{{description}}" {{name}} } post_install() { openssl genrsa -out /etc/{{name}}/{{name}}.rsa 1024 openssl rsa -in /etc/{{name}}/{{name}}.rsa -pubout > /etc/{{name}}/{{name}}.rsa.pub chown {{name}}:{{name}} /etc/{{name}}/{{name}}.rsa* systemctl enable {{name}} systemctl enable {{name}}.socket systemctl -q try-reload-or-restart nginx } pre_upgrade() { systemctl stop {{name}}.socket systemctl stop {{name}} } post_upgrade() { systemctl start {{name}}.socket systemctl -q try-reload-or-restart nginx } pre_remove() { systemctl stop {{name}}.socket systemctl disable {{name}}.socket systemctl stop {{name}} systemctl disable {{name}} } post_remove() { systemctl -q try-reload-or-restart nginx userdel {{name}} groupdel {{name}} }
pre_install() { useradd -U -l -M -r -s /usr/bin/nologin -d /var/lib/{{name}} -c "{{description}}" {{name}} } post_install() { openssl genrsa -out /etc/{{name}}/{{name}}.rsa 1024 openssl rsa -in /etc/{{name}}/{{name}}.rsa -pubout > /etc/{{name}}/{{name}}.rsa.pub chown {{name}}:{{name}} /etc/{{name}}/{{name}}.rsa* systemctl daemon-reload systemctl enable {{name}} systemctl enable {{name}}.socket systemctl -q try-reload-or-restart nginx } pre_upgrade() { systemctl stop {{name}}.socket systemctl stop {{name}} } post_upgrade() { systemctl daemon-reload systemctl start {{name}}.socket systemctl -q try-reload-or-restart nginx } pre_remove() { systemctl stop {{name}}.socket systemctl disable {{name}}.socket systemctl stop {{name}} systemctl disable {{name}} } post_remove() { systemctl -q try-reload-or-restart nginx systemctl daemon-reload userdel {{name}} groupdel {{name}} }
Update git pull alias to update submodules recursively
alias g="git" # See `scmpuff/env.zsh` for the `gl` alias alias gap="git add -p" alias gcob="git checkout -b" alias gc="git commit" alias gdt="git difftool" alias gpl="git pull" alias gps="git push" alias gaa="git add --all" alias gca="git commit --amend" # amend commit without editing message alias gcam="git commit --amend -C HEAD" alias gb="git branch" alias gm="git merge --no-edit" alias gcp="git cherry-pick" alias gsh="git stash" alias gsha="git stash apply" alias gshp="git stash pop" alias gshl="git stash list" # Copy the current branch name to clipboard alias gbn="$DOTFILES/bin/git-copy-branch-name" # Show list of files changed in a commit # Follow with commit hash alias gdl="git diff-tree --no-commit-id --name-only -r $1" # List branches ordered by most recent commit alias gbr="git branch --sort=-committerdate" # List all the commits on the current branch ahead of master alias glb="git cherry -v master" # Deploy to staging alias gds="$DOTFILES/bin/git-deploy-to-staging"
alias g="git" # See `scmpuff/env.zsh` for the `gl` alias alias gap="git add -p" alias gcob="git checkout -b" alias gc="git commit" alias gdt="git difftool" alias gpl="git pull && git submodule update --init --recursive" alias gps="git push" alias gaa="git add --all" alias gca="git commit --amend" # amend commit without editing message alias gcam="git commit --amend -C HEAD" alias gb="git branch" alias gm="git merge --no-edit" alias gcp="git cherry-pick" alias gsh="git stash" alias gsha="git stash apply" alias gshp="git stash pop" alias gshl="git stash list" # Copy the current branch name to clipboard alias gbn="$DOTFILES/bin/git-copy-branch-name" # Show list of files changed in a commit # Follow with commit hash alias gdl="git diff-tree --no-commit-id --name-only -r $1" # List branches ordered by most recent commit alias gbr="git branch --sort=-committerdate" # List all the commits on the current branch ahead of master alias glb="git cherry -v master" # Deploy to staging alias gds="$DOTFILES/bin/git-deploy-to-staging"
Make sure that gtest-targets.mk gets regenerated with the right directory
#!/bin/sh cd "$(git rev-parse --show-toplevel 2>/dev/null)" >/dev/null 2>&1 python build/mktargets.py --directory codec/decoder --library decoder python build/mktargets.py --directory codec/encoder --library encoder --exclude DllEntry.cpp python build/mktargets.py --directory codec/common --library common --exclude asm_inc.asm --exclude arm_arch_common_macro.S --exclude arm_arch64_common_macro.S python build/mktargets.py --directory codec/processing --library processing python build/mktargets.py --directory codec/console/dec --binary h264dec python build/mktargets.py --directory codec/console/enc --binary h264enc python build/mktargets.py --directory codec/console/common --library console_common python build/mktargets.py --directory test/encoder --prefix encoder_unittest python build/mktargets.py --directory test/decoder --prefix decoder_unittest python build/mktargets.py --directory test/processing --prefix processing_unittest python build/mktargets.py --directory test/api --prefix api_test python build/mktargets.py --directory test/common --prefix common_unittest python build/mktargets.py --directory module --prefix module python build/mktargets.py --directory gtest --library gtest --out build/gtest-targets.mk --cpp-suffix .cc --include gtest-all.cc
#!/bin/sh cd "$(git rev-parse --show-toplevel 2>/dev/null)" >/dev/null 2>&1 python build/mktargets.py --directory codec/decoder --library decoder python build/mktargets.py --directory codec/encoder --library encoder --exclude DllEntry.cpp python build/mktargets.py --directory codec/common --library common --exclude asm_inc.asm --exclude arm_arch_common_macro.S --exclude arm_arch64_common_macro.S python build/mktargets.py --directory codec/processing --library processing python build/mktargets.py --directory codec/console/dec --binary h264dec python build/mktargets.py --directory codec/console/enc --binary h264enc python build/mktargets.py --directory codec/console/common --library console_common python build/mktargets.py --directory test/encoder --prefix encoder_unittest python build/mktargets.py --directory test/decoder --prefix decoder_unittest python build/mktargets.py --directory test/processing --prefix processing_unittest python build/mktargets.py --directory test/api --prefix api_test python build/mktargets.py --directory test/common --prefix common_unittest python build/mktargets.py --directory module --prefix module python build/mktargets.py --directory gtest/googletest --library gtest --out build/gtest-targets.mk --cpp-suffix .cc --include gtest-all.cc
Add iwyu to Bamboo debug Linux build.
#!/bin/bash set -o errexit set -o xtrace DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # Run the common setup ${DIR}/setup-dependencies.sh # Install nupic.core dependencies pip install \ --cache-dir /usr/local/src/nupic.core/pip-cache \ --build /usr/local/src/nupic.core/pip-build \ --no-clean \ pycapnp==0.5.5 \ -r bindings/py/requirements.txt # Build and install nupic.core mkdir -p build/scripts cmake -DCMAKE_BUILD_TYPE=Debug -DNTA_COV_ENABLED=ON -DCMAKE_INSTALL_PREFIX=`pwd`/build/release -DPY_EXTENSIONS_DIR=`pwd`/bindings/py/nupic/bindings . make install ./build/release/bin/cpp_region_test ./build/release/bin/unit_tests # Build installable python packages python setup.py install py.test bindings/py/tests
#!/bin/bash set -o errexit set -o xtrace DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # Run the common setup ${DIR}/setup-dependencies.sh apt-get install -y iwyu # Install nupic.core dependencies pip install \ --cache-dir /usr/local/src/nupic.core/pip-cache \ --build /usr/local/src/nupic.core/pip-build \ --no-clean \ pycapnp==0.5.5 \ -r bindings/py/requirements.txt # Build and install nupic.core mkdir -p build/scripts cmake -DCMAKE_BUILD_TYPE=Debug -DNUPIC_IWYU=ON -DNTA_COV_ENABLED=ON -DCMAKE_INSTALL_PREFIX=`pwd`/build/release -DPY_EXTENSIONS_DIR=`pwd`/bindings/py/nupic/bindings . make install ./build/release/bin/cpp_region_test ./build/release/bin/unit_tests # Build installable python packages python setup.py install py.test bindings/py/tests
Add compat for old appServerVersion constant pattern
@class AppServerVersionConstants(){ GLASSFISH_VERSION(){ echo "3.1.2.2" } JBOSS_VERSION(){ echo "eap-7.0.0" } JETTY_VERSION(){ echo "8.1.10" } JONAS_VERSION(){ echo "5.2.3" } RESIN_VERSION(){ echo "4.0.44" } TCAT_VERSION(){ echo "7.0.2" } TCSERVER_VERSION(){ echo "3.2.5" } TOMCAT_VERSION(){ echo "8.5.29" } WEBLOGIC_VERSION(){ echo "12.2.1" } WEBSPHERE_VERSION(){ echo "8.5.5.0" } WILDFLY_VERSION(){ echo "10.0.0" } $@ }
include string.util.StringUtil @class AppServerVersionConstants(){ GLASSFISH_VERSION(){ echo "3.1.2.2" } JBOSS_VERSION(){ echo "eap-7.0.0" } JETTY_VERSION(){ echo "8.1.10" } JONAS_VERSION(){ echo "5.2.3" } RESIN_VERSION(){ echo "4.0.44" } TCAT_VERSION(){ echo "7.0.2" } TCSERVER_VERSION(){ echo "3.2.5" } TOMCAT_VERSION(){ echo "8.5.29" } WEBLOGIC_VERSION(){ echo "12.2.1" } WEBSPHERE_VERSION(){ echo "8.5.5.0" } WILDFLY_VERSION(){ echo "10.0.0" } if [[ ${1} == *Version ]]; then $(StringUtil toUpperCase ${1//Version/}_VERSION) shift fi $@ }
Add environment variable for version of the SDK
export BOUNDARY_SDK_HOME=$PWD export BOUNDARY_MIB_REPOSITORY="$BOUNDARY_SDK_HOME/runtime/mibrepository" export PATH=$PATH:"$BOUNDARY_SDK_HOME/src/main/scripts" alias bsdk='cd $BOUNDARY_SDK_HOME'
export BOUNDARY_SDK_HOME=$PWD export BOUNDARY_SDK_VERSION=0.00.03 export BOUNDARY_MIB_REPOSITORY="$BOUNDARY_SDK_HOME/runtime/mibrepository" export PATH=$PATH:"$BOUNDARY_SDK_HOME/src/main/scripts" alias bsdk='cd $BOUNDARY_SDK_HOME'
Kill cloud sql proxy after tests complete.
#!/bin/bash for required_variable in \ GOOGLE_CLOUD_PROJECT \ GOOGLE_APPLICATION_CREDENTIALS \ GOOGLE_CLOUD_STORAGE_BUCKET \ ALTERNATE_GOOGLE_CLOUD_STORAGE_BUCKET \ ; do if [[ -z "${!required_variable}" ]]; then echo "Must set $required_variable" exit 1 fi done script_directory="$(dirname "`realpath $0`")" repo_directory="$(dirname $script_directory)" status_return=0 # everything passed # Print out Ruby version ruby --version # Run cloud proxy $HOME/cloud_sql_proxy -dir=/cloudsql -credential_file=$GOOGLE_APPLICATION_CREDENTIALS & while read product do # Run Tets export BUILD_ID=$CIRCLE_BUILD_NUM export TEST_DIR=$product echo "[$product]" pushd "$repo_directory/$product/" bundle install && bundle exec rspec --format documentation # Check status of bundle exec rspec if [ $? != 0 ]; then status_return=1 fi # Clean up deployed version bundle exec ruby "$repo_directory/spec/e2e_cleanup.rb" "$TEST_DIR" "$BUILD_ID" popd done < <(find * -type d -name 'spec' -path "*rails-*/*" -not -path "*vendor/*" -exec dirname {} \;) exit $status_return
#!/bin/bash for required_variable in \ GOOGLE_CLOUD_PROJECT \ GOOGLE_APPLICATION_CREDENTIALS \ GOOGLE_CLOUD_STORAGE_BUCKET \ ALTERNATE_GOOGLE_CLOUD_STORAGE_BUCKET \ ; do if [[ -z "${!required_variable}" ]]; then echo "Must set $required_variable" exit 1 fi done script_directory="$(dirname "`realpath $0`")" repo_directory="$(dirname $script_directory)" status_return=0 # everything passed # Print out Ruby version ruby --version # Start Cloud SQL Proxy $HOME/cloud_sql_proxy -dir=/cloudsql -credential_file=$GOOGLE_APPLICATION_CREDENTIALS & export CLOUD_SQL_PROXY_PROCESS_ID=$! while read product do # Run Tets export BUILD_ID=$CIRCLE_BUILD_NUM export TEST_DIR=$product echo "[$product]" pushd "$repo_directory/$product/" bundle install && bundle exec rspec --format documentation # Check status of bundle exec rspec if [ $? != 0 ]; then status_return=1 fi # Clean up deployed version bundle exec ruby "$repo_directory/spec/e2e_cleanup.rb" "$TEST_DIR" "$BUILD_ID" popd done < <(find * -type d -name 'spec' -path "*rails-*/*" -not -path "*vendor/*" -exec dirname {} \;) # Stop Cloud SQL Proxy kill $CLOUD_SQL_PROXY_PROCESS_ID exit $status_return
Add extra grep filtering to get rid of CI-specific issues
#!/bin/bash mkdir -p src/test/resources/results/actual/ EXPECTED_RESULT="src/test/resources/results/expected/report.txt" ACTUAL_RESULT="src/test/resources/results/actual/report.txt" ACTUAL_RESULT_PARSED="src/test/resources/results/actual/report_parsed.txt" mvn clean verify \ -Dmode=report \ -DjmeterCsv=src/test/resources/csv/jmeter/10_transactions.csv \ > $ACTUAL_RESULT sed 's/^\[INFO] //g' $ACTUAL_RESULT | sed '1,29d' | sed '5,$d' > $ACTUAL_RESULT_PARSED DIFF_OUTPUT=`diff $EXPECTED_RESULT $ACTUAL_RESULT_PARSED` OUT=$? echo -e ''; echo `basename "$0"` if [ $OUT -eq 0 ];then echo "OUTPUT AS EXPECTED" echo "TEST PASSED" exit 0 else echo "INCORRECT CONSOLE OUTPUT - DIFF:" echo $DIFF_OUTPUT echo "TEST FAILED" exit 1 fi
#!/bin/bash mkdir -p src/test/resources/results/actual/ EXPECTED_RESULT="src/test/resources/results/expected/report.txt" ACTUAL_RESULT="src/test/resources/results/actual/report.txt" ACTUAL_RESULT_PARSED="src/test/resources/results/actual/report_parsed.txt" mvn clean verify \ -Dmode=report \ -DjmeterCsv=src/test/resources/csv/jmeter/10_transactions.csv \ > $ACTUAL_RESULT sed 's/^\[INFO] //g' $ACTUAL_RESULT | sed '1,29d' | sed '5,$d' | grep -v '\-\-\-\-\-\-\-\-\-\-\-\-\-' > $ACTUAL_RESULT_PARSED DIFF_OUTPUT=`diff $EXPECTED_RESULT $ACTUAL_RESULT_PARSED` OUT=$? echo -e ''; echo `basename "$0"` if [ $OUT -eq 0 ];then echo "OUTPUT AS EXPECTED" echo "TEST PASSED" exit 0 else echo "INCORRECT CONSOLE OUTPUT - DIFF:" echo $DIFF_OUTPUT echo "TEST FAILED" exit 1 fi
Make updating dotfiles be quiet
check () { git fetch changes=$(git diff) if [ "$changes" != "" ]; then echo 'YOU HAVE UNCOMMITTED CHANGES'; exit fi changes=$(git ls-files --others --exclude-standard) if [ "$changes" != "" ]; then echo 'YOU HAVE UNCOMMITTED FILES'; exit fi changes=$(git diff HEAD..FETCH_HEAD) if [ "$changes" != "" ] ; then echo 'YOUR DOTFILES ARE OUT OF DATE'; git pull fi } # Check if there are updates to this dotfiles repo cd ~/.dotfiles check # Check if there are updates to ssh if [ -d ~/.ssh/.git ]; then cd ~/.ssh check fi
check () { git fetch changes=$(git diff) if [ "$changes" != "" ]; then echo 'YOU HAVE UNCOMMITTED CHANGES'; exit fi changes=$(git ls-files --others --exclude-standard) if [ "$changes" != "" ]; then echo 'YOU HAVE UNCOMMITTED FILES'; exit fi changes=$(git diff HEAD..FETCH_HEAD) if [ "$changes" != "" ] ; then echo 'YOUR DOTFILES ARE OUT OF DATE'; git pull --quiet fi } # Check if there are updates to this dotfiles repo cd ~/.dotfiles check # Check if there are updates to ssh if [ -d ~/.ssh/.git ]; then cd ~/.ssh check fi
Fix autojump script to support Nix installations
if [ $commands[autojump] ]; then # check if autojump is installed if [ -f $HOME/.autojump/etc/profile.d/autojump.zsh ]; then # manual user-local installation . $HOME/.autojump/etc/profile.d/autojump.zsh elif [ -f /usr/share/autojump/autojump.zsh ]; then # debian and ubuntu package . /usr/share/autojump/autojump.zsh elif [ -f /etc/profile.d/autojump.zsh ]; then # manual installation . /etc/profile.d/autojump.zsh elif [ -f /etc/profile.d/autojump.sh ]; then # gentoo installation . /etc/profile.d/autojump.sh elif [ -f /usr/local/share/autojump/autojump.zsh ]; then # freebsd installation . /usr/local/share/autojump/autojump.zsh elif [ -f /opt/local/etc/profile.d/autojump.zsh ]; then # mac os x with ports . /opt/local/etc/profile.d/autojump.zsh elif [ $commands[brew] -a -f `brew --prefix`/etc/autojump.zsh ]; then # mac os x with brew . `brew --prefix`/etc/autojump.zsh fi fi
if [ $commands[autojump] ]; then # check if autojump is installed if [ -f $HOME/.autojump/etc/profile.d/autojump.zsh ]; then # manual user-local installation . $HOME/.autojump/etc/profile.d/autojump.zsh elif [ -f $HOME/.nix-profile/etc/profile.d/autojump.zsh ]; then # nix installation . $HOME/.nix-profile/etc/profile.d/autojump.zsh elif [ -f /usr/share/autojump/autojump.zsh ]; then # debian and ubuntu package . /usr/share/autojump/autojump.zsh elif [ -f /etc/profile.d/autojump.zsh ]; then # manual installation . /etc/profile.d/autojump.zsh elif [ -f /etc/profile.d/autojump.sh ]; then # gentoo installation . /etc/profile.d/autojump.sh elif [ -f /usr/local/share/autojump/autojump.zsh ]; then # freebsd installation . /usr/local/share/autojump/autojump.zsh elif [ -f /opt/local/etc/profile.d/autojump.zsh ]; then # mac os x with ports . /opt/local/etc/profile.d/autojump.zsh elif [ $commands[brew] -a -f `brew --prefix`/etc/autojump.zsh ]; then # mac os x with brew . `brew --prefix`/etc/autojump.zsh fi fi
Revert "Request api key and username from user" (doesn't work with piping)
mkdir -p ~/.histsync for file in {'bash-preexec.sh','histsync-client'}; do echo Downloading, $file curl "http://histsync.io/download-client/$file" > ~/.histsync/$file; done read -p "Github username: " username read -p "API key (from histsync.io profile): " key echo '# Add the following lines in your .bashrc / .bash_profiles' echo '# ============' echo '# HistSync' echo 'source ~/.bash-preexec.sh' echo 'preexec() {' echo " ~/histsync/histsync-client --api-key $key --user $username \"\$1\" --log-file ~/.histsync/log;" echo '}'
mkdir -p ~/.histsync for file in {'bash-preexec.sh','histsync-client'}; do curl "http://histsync.io/download-client/$file" > ~/.histsync/$file; done echo '# Add the following lines in your .bashrc / .bash_profiles' echo '# ============' echo '# HistSync' echo 'source ~/.bash-preexec.sh' echo 'preexec() {' echo ' ~/histsync/histsync-client --api-key {{hist_sync_api_key}} --user {{github_username}} "$1" --log-file ~/.histsync/log;' echo '}'
Fix for external being not found
#!/bin/zsh # Source every file in custom for file in $(ls "${0:h}/functions"); do source "${0:h}/functions/$file" done # For any external imported files for file in $(ls "${0:h}/external"); do source "${0:h}/external/$file" done # Add zsh-completions to $fpath. fpath=("${0:h}/completion" $fpath) # Load and initialize the completion system ignoring insecure directories. autoload -Uz compinit && compinit -i
#!/bin/zsh # Source every file in custom for file in $(ls "${0:h}/functions"); do source "${0:h}/functions/$file" done # For any external imported files if [ -e "${0:h}/external" ]; then for file in $(ls "${0:h}/external"); do source "${0:h}/external/$file" done fi # Add zsh-completions to $fpath. fpath=("${0:h}/completion" $fpath) # Load and initialize the completion system ignoring insecure directories. autoload -Uz compinit && compinit -i
Set default configuration before running tests
#!/usr/bin/env bash # Use tmp folder for git database TMP=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/tmp # Recreate tmp folder rm -rf "$TMP" mkdir -p "$TMP" cd "$TMP" # Create repository and master branch git init # Make fixture git commit -m "Dummy" --allow-empty git checkout -b feature_one 2> /dev/null git commit -m "Dummy" --allow-empty git checkout -b feature_two 2> /dev/null git commit -m "Dummy" --allow-empty git checkout -b feature_three 2> /dev/null git commit -m "Dummy" --allow-empty git checkout feature_two 2> /dev/null git checkout master 2> /dev/null
#!/usr/bin/env bash # Use tmp folder for git database TMP=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/tmp # Recreate tmp folder rm -rf "$TMP" mkdir -p "$TMP" cd "$TMP" # Create repository and master branch git init # Make fixture git commit -m "Dummy" --allow-empty git checkout -b feature_one 2> /dev/null git commit -m "Dummy" --allow-empty git checkout -b feature_two 2> /dev/null git commit -m "Dummy" --allow-empty git checkout -b feature_three 2> /dev/null git commit -m "Dummy" --allow-empty git checkout feature_two 2> /dev/null git checkout master 2> /dev/null # Create base configuration git config --local --add switch.count 9 git config --local --add switch.order modified
Add info to testing script
readlinkf(){ perl -MCwd -e 'print Cwd::abs_path shift' $1; } SC_DIR=$(dirname "$(readlinkf `which seqcluster`)") unset PYTHONHOME unset PYTHONPATH export PYTHONNOUSERSITE=1 echo $SC_DIR "$SC_DIR/nosetests" -v -s -a "$@"
#!/usr/bin/env bash # bash script inspared from bcbio-nextgen: https://github.com/chapmanb/bcbio-nextgen/blob/master/tests/run_tests.sh # It allows to run specific test using the name like # ./run_test.sh test_align set -e readlinkf(){ perl -MCwd -e 'print Cwd::abs_path shift' $1; } SC_DIR=$(dirname "$(readlinkf `which seqcluster`)") unset PYTHONHOME unset PYTHONPATH export PYTHONNOUSERSITE=1 echo $SC_DIR "$SC_DIR/nosetests" -v -s -a "$@"
Debug command for clean history and logout current session.
#!/bin/bash set -o xtrace # Update resolv.conf with "nameserver 127.0.0.1". sed -i 's/^#\(prepend domain-name-servers\).*$/\1 127.0.0.1;/g' /etc/dhcp/dhclient.conf /etc/init.d/networking restart /etc/init.d/resolvconf restart cat /etc/resolv.conf # Print the clean history command. cat <<-EOF rm -f $HISTFILE && unset HISTFILE && exit EOF
#!/bin/bash set -o xtrace # Update resolv.conf with "nameserver 127.0.0.1". sed -i 's/^#\(prepend domain-name-servers\).*$/\1 127.0.0.1;/g' /etc/dhcp/dhclient.conf /etc/init.d/networking restart /etc/init.d/resolvconf restart cat /etc/resolv.conf # Print the clean history command. cat <<-EOF # Command for clean history and logout current session. rm -f \$HISTFILE && unset HISTFILE && exit EOF
Revert "Fix RabbitMQ credentials for development environment"
#!/bin/sh rabbitmqctl add_vhost "travis.development" rabbitmqctl add_user travis_worker travis_worker_password rabbitmqctl set_permissions -p "travis.development" travis_worker ".*" ".*" ".*" rabbitmqctl set_permissions -p "travis.development" guest ".*" ".*" ".*"
#!/bin/sh rabbitmqctl add_vhost "travis.development" rabbitmqctl add_user travisci_worker travisci_worker_password rabbitmqctl set_permissions -p "travisci.development" travisci_worker ".*" ".*" ".*" rabbitmqctl set_permissions -p "travisci.development" guest ".*" ".*" ".*"
Add EMAIL_REPLY_TO_ID to docker env
#!/usr/bin/env bash set -eo pipefail function exit_with_msg { echo $1 exit $2 } echo -n "" > docker.env env_vars=( NOTIFY_API_URL API_KEY FUNCTIONAL_TEST_EMAIL FUNCTIONAL_TEST_NUMBER EMAIL_TEMPLATE_ID SMS_TEMPLATE_ID LETTER_TEMPLATE_ID ) for env_var in "${env_vars[@]}"; do echo "${env_var}=${!env_var}" >> docker.env done
#!/usr/bin/env bash set -eo pipefail function exit_with_msg { echo $1 exit $2 } echo -n "" > docker.env env_vars=( NOTIFY_API_URL API_KEY FUNCTIONAL_TEST_EMAIL FUNCTIONAL_TEST_NUMBER EMAIL_TEMPLATE_ID SMS_TEMPLATE_ID LETTER_TEMPLATE_ID EMAIL_REPLY_TO_ID ) for env_var in "${env_vars[@]}"; do echo "${env_var}=${!env_var}" >> docker.env done
Add inkscape in to dependencies
#!/bin/bash # For generate the thesis. sudo apt-get install texlive-full if [[ ${1} == "--dev" ]] ; then echo "Install packages for developing..." sudo apt-get install okular geany-plugin-spellcheck hunspell-hu fi echo "Done!"
#!/bin/bash # For generate the thesis. sudo apt-get install texlive-full inkscape if [[ ${1} == "--dev" ]] ; then echo "Install packages for developing..." sudo apt-get install okular geany-plugin-spellcheck hunspell-hu fi echo "Done!"
Change directory to avoid server path to be incuded in the *sum files
#!/bin/sh cd /data/off-fr/html mongodump --collection products --db off-fr tar cvfz data/openfoodfacts-mongodbdump.tar.gz dump sha256sum data/openfoodfacts-mongodbdump.tar.gz > data/sha256sum md5sum data/openfoodfacts-mongodbdump.tar.gz > data/md5sum
#!/bin/sh cd /data/off-fr/html mongodump --collection products --db off-fr tar cvfz data/openfoodfacts-mongodbdump.tar.gz dump pushd data/ > /dev/null sha256sum openfoodfacts-mongodbdump.tar.gz > sha256sum md5sum openfoodfacts-mongodbdump.tar.gz > md5sum popd > /dev/null
Use app envs in entrypoint
#!/bin/bash # Configure crowd.properties echo "session.lastvalidation=session.lastvalidation" > ${LDAP_HOME}/crowd.properties \ && echo "session.tokenkey=session.tokenkey" >> ${LDAP_HOME}/crowd.properties \ && echo "crowd.server.url=${CROWD_URL}/services/" >> ${LDAP_HOME}/crowd.properties \ && echo "application.name=ldap" >> ${LDAP_HOME}/crowd.properties \ && echo "http.timeout=30000" >> ${LDAP_HOME}/crowd.properties \ && echo "session.isauthenticated=session.isauthenticated" >> ${LDAP_HOME}/crowd.properties \ && echo "application.login.url=${CROWD_URL}" >> ${LDAP_HOME}/crowd.properties \ && echo "session.validationinterval=0" >> ${LDAP_HOME}/crowd.properties \ && echo "application.password=0" >> ${LDAP_HOME}/crowd.properties # Run exec java -cp ${LDAP_HOME} -jar ${LDAP_INSTALL}/target/crowd-ldap-server-1.0.4-SNAPSHOT.jar
#!/bin/bash # Configure crowd.properties echo "session.lastvalidation=session.lastvalidation" > ${LDAP_HOME}/crowd.properties \ && echo "session.tokenkey=session.tokenkey" >> ${LDAP_HOME}/crowd.properties \ && echo "crowd.server.url=${CROWD_URL}/services/" >> ${LDAP_HOME}/crowd.properties \ && echo "application.name=${CROWD_APP_NAME}" >> ${LDAP_HOME}/crowd.properties \ && echo "http.timeout=30000" >> ${LDAP_HOME}/crowd.properties \ && echo "session.isauthenticated=session.isauthenticated" >> ${LDAP_HOME}/crowd.properties \ && echo "application.login.url=${CROWD_URL}" >> ${LDAP_HOME}/crowd.properties \ && echo "session.validationinterval=0" >> ${LDAP_HOME}/crowd.properties \ && echo "application.password=${CROWD_APP_PASSWORD}" >> ${LDAP_HOME}/crowd.properties # Run exec java -cp ${LDAP_HOME} -jar ${LDAP_INSTALL}/target/crowd-ldap-server-1.0.4-SNAPSHOT.jar
Use glibtoolize rather than libtoolize on mac
#! /bin/sh -e rm -rf autom4te.cache aclocal -I m4 autoheader libtoolize --copy automake --add-missing --copy autoconf
#! /bin/sh -e rm -rf autom4te.cache aclocal -I m4 autoheader case `uname` in Darwin*) glibtoolize --copy ;; *) libtoolize --copy ;; esac automake --add-missing --copy autoconf
Fix copypasta with 4.2 bundler install
#!/bin/bash if [ "$RAILS_VERSION" == "4.2" ] then gem uninstall -v ">= 2" -i $(rvm gemdir)@global -ax bundler || true gem install bundler -v "< 2" gem install bundler -v "< 2" else gem install bundler fi
#!/bin/bash if [ "$RAILS_VERSION" == "4.2" ] then gem uninstall -v ">= 2" -i $(rvm gemdir)@global -ax bundler || true gem install bundler -v "< 2" else gem install bundler fi
Add bash completion for exercism opts
_exercism () { local cur prev COMPREPLY=() # Array variable storing the possible completions. cur=${COMP_WORDS[COMP_CWORD]} prev=${COMP_WORDS[COMP_CWORD-1]} commands="configure download open submit troubleshoot upgrade version workspace help" config_opts="--show" version_opts="--latest" if [ "${#COMP_WORDS[@]}" -eq 2 ]; then COMPREPLY=( $( compgen -W "${commands}" "${cur}" ) ) return 0 fi if [ "${#COMP_WORDS[@]}" -eq 3 ]; then case "${prev}" in configure) COMPREPLY=( $( compgen -W "${config_opts}" -- "${cur}" ) ) return 0 ;; version) COMPREPLY=( $( compgen -W "${version_opts}" -- "${cur}" ) ) return 0 ;; help) COMPREPLY=( $( compgen -W "${commands}" "${cur}" ) ) return 0 ;; *) return 0 ;; esac fi return 0 } complete -o bashdefault -o default -o nospace -F _exercism exercism 2>/dev/null \ || complete -o default -o nospace -F _exercism exercism
_exercism () { local cur prev COMPREPLY=() # Array variable storing the possible completions. cur=${COMP_WORDS[COMP_CWORD]} prev=${COMP_WORDS[COMP_CWORD-1]} opts="--verbose --timeout" commands="configure download open submit troubleshoot upgrade version workspace help" config_opts="--show" version_opts="--latest" if [ "${#COMP_WORDS[@]}" -eq 2 ]; then case "${cur}" in -*) COMPREPLY=( $( compgen -W "${opts}" -- "${cur}" ) ) return 0 ;; *) COMPREPLY=( $( compgen -W "${commands}" "${cur}" ) ) return 0 ;; esac fi if [ "${#COMP_WORDS[@]}" -eq 3 ]; then case "${prev}" in configure) COMPREPLY=( $( compgen -W "${config_opts}" -- "${cur}" ) ) return 0 ;; version) COMPREPLY=( $( compgen -W "${version_opts}" -- "${cur}" ) ) return 0 ;; help) COMPREPLY=( $( compgen -W "${commands}" "${cur}" ) ) return 0 ;; *) return 0 ;; esac fi return 0 } complete -o bashdefault -o default -o nospace -F _exercism exercism 2>/dev/null \ || complete -o default -o nospace -F _exercism exercism
Disable GCOV builds in ubuntu-latest
#!/bin/bash set -uo pipefail set -e set -vx MAKE_J=$(nproc) export CROSS="ccache powerpc64le-linux-gnu-" make -j${MAKE_J} all ./opal-ci/fetch-debian-jessie-installer.sh make -j${MAKE_J} check (make clean; cd external/gard && CROSS= make -j${MAKE_J}) ( cd external/pflash; echo "Building for ARM..." make clean && make distclean CROSS_COMPILE=arm-linux-gnueabi- make || { echo "ARM build failed"; exit 1; } ) (cd external/pflash; make clean && make distclean && make) # GCOV build disabled for GCC 8.2 # https://github.com/open-power/skiboot/issues/206 # make clean # SKIBOOT_GCOV=1 make -j${MAKE_J} # SKIBOOT_GCOV=1 make -j${MAKE_J} check make clean rm -rf builddir mkdir builddir make SRC=$(pwd) -f ../Makefile -C builddir -j${MAKE_J} make clean echo "Building with clang..." make clean make -j${MAKE_J} CC=clang make -j${MAKE_J} CC=clang check
Use basename instead of sed
if [[ $BASH_PREVIEW ]]; then unset BASH_PREVIEW #Prevent infinite looping echo " Previewing Bash-it Themes " THEMES="$BASH_IT/themes/*/*.theme.bash" for theme in $THEMES do BASH_IT_THEME=$(echo $theme | sed "s/\//\n/g" | tail -1 | sed "s/.theme.bash//") echo " $BASH_IT_THEME" echo "" | bash --init-file $BASH_IT/bash_it.sh -i done fi
if [[ $BASH_PREVIEW ]]; then unset BASH_PREVIEW #Prevent infinite looping echo " Previewing Bash-it Themes " THEMES="$BASH_IT/themes/*/*.theme.bash" for theme in $THEMES do BASH_IT_THEME=$(basename -s '.theme.bash' $theme) echo " $BASH_IT_THEME" echo "" | bash --init-file $BASH_IT/bash_it.sh -i done fi
Set up databases in dev environment
#!/bin/bash --login set -e # Install packages sudo apt-get update --quiet sudo -E apt-get install --quiet --assume-yes curl git libpq-dev libmysqlclient-dev # Install rvm and ruby if ! rvm use ruby-2.2.0; then # Install mpapis public key gpg --keyserver hkp://keys.gnupg.net --recv-keys D39DC0E3 curl -sSL https://get.rvm.io | bash -s stable source ~/.rvm/scripts/rvm rvm install --quiet-curl ruby-2.2.0 rvm use ruby-2.2.0 fi # cd straight to /vagrant on login if ! grep -q 'cd \/vagrant' ~/.bashrc; then echo 'cd /vagrant' >> ~/.bashrc fi cd /vagrant # Install ruby dependencies bundle install
#!/bin/bash --login set -e # cd straight to /vagrant on login if ! grep -q 'cd \/vagrant' ~/.bashrc; then echo 'cd /vagrant' >> ~/.bashrc fi cd /vagrant # Don't prompt for mysql root password export DEBIAN_FRONTEND=noninteractive # Install packages sudo apt-get update --quiet sudo -E apt-get install --quiet --assume-yes curl git postgresql mysql-server libpq-dev libmysqlclient-dev # Install rvm and ruby if ! rvm use ruby-2.2.0; then # Install mpapis public key gpg --keyserver hkp://keys.gnupg.net --recv-keys D39DC0E3 curl -sSL https://get.rvm.io | bash -s stable source ~/.rvm/scripts/rvm rvm install --quiet-curl ruby-2.2.0 rvm use ruby-2.2.0 fi # Install ruby dependencies bundle install # Configure mysql to use utf-8 sudo tee /etc/mysql/conf.d/mysqld_unicode.cnf << 'EOF' [client] default-character-set = utf8 [mysql] default-character-set = utf8 [mysqld] collation-server = utf8_unicode_ci character-set-server = utf8 init-connect = 'SET NAMES utf8' EOF # Set mysql root password mysqladmin -u root password root || echo 'mysql password already set' # Create databases mysqladmin -u root --password=root create mysql_juicecubes || echo 'dev database already created' sudo -u postgres createdb -E utf-8 -T template0 postgres_cubes # Set postgres user password echo "ALTER USER postgres WITH password 'root';" | sudo -u postgres psql # Allow postgres login with password sudo tee /etc/postgresql/9.3/main/pg_hba.conf << 'EOF' local all all md5 host all all 127.0.0.1/32 md5 host all all ::1/128 md5 EOF
Fix exit code check when setting alias for gshuf.
cite about-alias about-alias 'general aliases' # List directory contents alias sl=ls alias ls='ls -G' # Compact view, show colors alias la='ls -AF' # Compact view, show hidden alias ll='ls -al' alias l='ls -a' alias l1='ls -1' alias _="sudo" if [ $(uname) = "Linux" ] then alias ls="ls --color=auto" fi which gshuf &> /dev/null if [ $? -eq 1 ] then alias shuf=gshuf fi alias c='clear' alias k='clear' alias cls='clear' alias edit="$EDITOR" alias pager="$PAGER" alias q='exit' alias irc="$IRC_CLIENT" alias rb='ruby' # Pianobar can be found here: http://github.com/PromyLOPh/pianobar/ alias piano='pianobar' alias ..='cd ..' # Go up one directory alias ...='cd ../..' # Go up two directories alias ....='cd ../../..' # Go up three directories alias -- -='cd -' # Go back # Shell History alias h='history' # Tree if [ ! -x "$(which tree 2>/dev/null)" ] then alias tree="find . -print | sed -e 's;[^/]*/;|____;g;s;____|; |;g'" fi # Directory alias md='mkdir -p' alias rd='rmdir'
cite about-alias about-alias 'general aliases' # List directory contents alias sl=ls alias ls='ls -G' # Compact view, show colors alias la='ls -AF' # Compact view, show hidden alias ll='ls -al' alias l='ls -a' alias l1='ls -1' alias _="sudo" if [ $(uname) = "Linux" ] then alias ls="ls --color=auto" fi which gshuf &> /dev/null if [ $? -eq 0 ] then alias shuf=gshuf fi alias c='clear' alias k='clear' alias cls='clear' alias edit="$EDITOR" alias pager="$PAGER" alias q='exit' alias irc="$IRC_CLIENT" alias rb='ruby' # Pianobar can be found here: http://github.com/PromyLOPh/pianobar/ alias piano='pianobar' alias ..='cd ..' # Go up one directory alias ...='cd ../..' # Go up two directories alias ....='cd ../../..' # Go up three directories alias -- -='cd -' # Go back # Shell History alias h='history' # Tree if [ ! -x "$(which tree 2>/dev/null)" ] then alias tree="find . -print | sed -e 's;[^/]*/;|____;g;s;____|; |;g'" fi # Directory alias md='mkdir -p' alias rd='rmdir'
Disable transformer test to get travis green
#!/bin/bash set -e DIR=$(dirname "${BASH_SOURCE[0]}") $DIR/build_sdk.sh $DIR/test.sh $DIR/browser_test.sh $DIR/node_test.sh $DIR/analyze.sh $DIR/format.sh $DIR/transformer_test.sh
#!/bin/bash set -e DIR=$(dirname "${BASH_SOURCE[0]}") $DIR/build_sdk.sh $DIR/test.sh $DIR/browser_test.sh $DIR/node_test.sh $DIR/analyze.sh $DIR/format.sh # TODO(vsm/ochafik): Re-enable when this is addressed: # https://github.com/dart-lang/dev_compiler/issues/458 # $DIR/transformer_test.sh
Allow script to start at a certain point
#!/bin/bash (cd experiments; git pull) for branch in raft-45 raft-46 raft-56 raft-58 raft-58-initialization raft-42 raft-66; do echo "==================== Running $branch ==================" git checkout $branch git pull sbt assembly && java -d64 -Xmx15g -cp target/scala-2.11/randomSearch-assembly-0.1.jar akka.dispatch.verification.Main > console.out done
#!/bin/bash START_WITH=$1 (cd experiments; git pull) for branch in raft-45 raft-46 raft-56 raft-58 raft-58-initialization raft-42 raft-66; do if [ "$START_WITH" != "" -a "$START_WITH" != $branch ]; then continue fi if [ "$START_WITH" != "" -a "$START_WITH" == $branch ]; then START_WITH="" fi echo "==================== Running $branch ==================" git checkout $branch git pull sbt assembly && java -d64 -Xmx15g -cp target/scala-2.11/randomSearch-assembly-0.1.jar akka.dispatch.verification.Main > console.out done
Make the code more scalable
CURRENT_PATH=$(pwd) FILES=("$HOME/.bashrc" "$HOME/.vimrc" "$HOME/.tmux.conf") # $1 => question function confirm(){ local input echo -n "$1 [y/N]" read input if [[ "$input" != "Y" && "$input" != "y" ]];then echo "exiting" exit fi } # Check if .bashrc or .vimrc exits before create symlink function check_rc_exist(){ for i in ${FILES[@]};do if [[ -h "$i" || -e "$i" ]];then confirm "$i already exits, delete it? " rm "$i" fi done } # $1 => {'0' => server, '1' => desktop} function create_symlinks(){ local b=".bashrc" local v=".vimrc" if [[ $1 == "0" ]];then ln -s "$CURRENT_PATH"/server/"$b" "$HOME"/"$b" ln -s "$CURRENT_PATH"/server/"$v" "$HOME"/"$v" else ln -s "$CURRENT_PATH"/"$b" "$HOME"/"$b" ln -s "$CURRENT_PATH"/"$v" "$HOME"/"$v" fi } check_rc_exist create_symlinks
CURRENT_PATH=$(pwd) FILES=("/.bashrc" "/.vimrc" "/.tmux.conf") INPUT_TYPE="$1" # $1 => question function confirm(){ local input echo -n "$1 [y/N]" read input if [[ "$input" != "Y" && "$input" != "y" ]];then echo "exiting" exit fi } # Check if .bashrc or .vimrc exits before create symlink function check_rc_exist(){ for i in ${FILES[@]};do local full_path="$HOME""$i" if [[ -h "$full_path" || -e "$full_path" ]];then confirm "$full_path already exits, delete it? " rm "$full_path" fi done } # $1 => {'0' => server, '1' => desktop} function create_symlinks(){ for i in ${FILES[@]};do if [[ $TYPE == "server" ]];then ln -s "$CURRENT_PATH/server$i" "$HOME""$i" elif [[ $TYPE == "desktop" ]];then ln -s "$CURRENT_PATH""$i" "$HOME""$i" else echo "error" exit fi done } # Assign value to TYPE function check_type(){ if [[ "$INPUT_TYPE" == "-s" || "$INPUT_TYPE" == "-S" ]];then TYPE="server" elif [[ "$INPUT_TYPE" == "-d" || "$INPUT_TYPE" == "-D" ]];then TYPE="desktop" else echo "Invalid argument. -s for server mode, -d for desktop mode" exit fi } check_type check_rc_exist "$TYPE" create_symlinks
Upgrade to Consul version to 1.5.2.
#!/bin/bash ########################################################################### # Download and Install Consul # # This script is prepared for caching of the download directory # ########################################################################### CONSUL_VER="1.5.1" UNAME=$(uname -s | tr '[:upper:]' '[:lower:]') CONSUL_ZIP="consul_${CONSUL_VER}_${UNAME}_amd64.zip" IGNORE_CERTS="${IGNORE_CERTS:-no}" # cleanup mkdir -p consul mkdir -p download if [[ ! -f "download/${CONSUL_ZIP}" ]] ; then cd download # install Vault if [[ "${IGNORE_CERTS}" == "no" ]] ; then echo "Downloading Consul with certs verification" wget "https://releases.hashicorp.com/consul/${CONSUL_VER}/${CONSUL_ZIP}" else echo "WARNING... Downloading Consul WITHOUT certs verification" wget "https://releases.hashicorp.com/consul/${CONSUL_VER}/${CONSUL_ZIP}" --no-check-certificate fi if [[ $? != 0 ]] ; then echo "Cannot download Consul" exit 1 fi cd .. fi cd consul if [[ -f consul ]] ; then rm consul fi unzip ../download/${CONSUL_ZIP} chmod a+x consul # check ./consul --version
#!/bin/bash ########################################################################### # Download and Install Consul # # This script is prepared for caching of the download directory # ########################################################################### CONSUL_VER="1.5.2" UNAME=$(uname -s | tr '[:upper:]' '[:lower:]') CONSUL_ZIP="consul_${CONSUL_VER}_${UNAME}_amd64.zip" IGNORE_CERTS="${IGNORE_CERTS:-no}" # cleanup mkdir -p consul mkdir -p download if [[ ! -f "download/${CONSUL_ZIP}" ]] ; then cd download # install Vault if [[ "${IGNORE_CERTS}" == "no" ]] ; then echo "Downloading Consul with certs verification" wget "https://releases.hashicorp.com/consul/${CONSUL_VER}/${CONSUL_ZIP}" else echo "WARNING... Downloading Consul WITHOUT certs verification" wget "https://releases.hashicorp.com/consul/${CONSUL_VER}/${CONSUL_ZIP}" --no-check-certificate fi if [[ $? != 0 ]] ; then echo "Cannot download Consul" exit 1 fi cd .. fi cd consul if [[ -f consul ]] ; then rm consul fi unzip ../download/${CONSUL_ZIP} chmod a+x consul # check ./consul --version
Use cp everywhere, instead of being inconsistent and using both mv and cp.
#!/bin/sh isocmd="genisoimage" rm -rf isofs/System/ mkdir -p isofs/boot/grub if [ ! -e '/usr/bin/genisoimage' ] then isocmd="mkisofs" fi mkdir -p isofs/System mv src/metodo/metodo.exe isofs/System mv src/user/user.exe isofs/System for x in `find -name '*.lib'`; do cp $x isofs/System done $isocmd -R -b boot/grub/stage2_eltorito -no-emul-boot -boot-load-size 4 -boot-info-table -input-charset utf-8 -o dux.iso isofs mv dux.iso iso
#!/bin/sh isocmd="genisoimage" rm -rf isofs/System/ mkdir -p isofs/boot/grub if [ ! -e '/usr/bin/genisoimage' ] then isocmd="mkisofs" fi mkdir -p isofs/System cp src/metodo/metodo.exe isofs/System cp src/user/user.exe isofs/System for x in `find -name '*.lib'`; do cp $x isofs/System done $isocmd -R -b boot/grub/stage2_eltorito -no-emul-boot -boot-load-size 4 -boot-info-table -input-charset utf-8 -o dux.iso isofs mv dux.iso iso
Fix paths in startup script
#!/bin/bash if [ ! -r /config/ost-config.php ]; then echo "ERROR: Could not read /config/ost-config.php." exit 1 fi ln -sf /config/ost-config.php /var/www/html/include/ost-config.php if [ "$OSTICKET_SETUP" = "yes" -o "$OSTICKET_SETUP" = "true" ]; then echo "Running in SETUP mode." if [ ! -w /var/www/html/include/ost-config.php ]; then echo "ERROR: ost-config.php is not writable." exit 2 fi else echo "Running in PRODUCTION mode." rm -rf /var/www/html/setup fi crontab <<EOF */5 * * * * /usr/bin/php /var/www/html/dota-support/api/cron.php EOF
#!/bin/bash if [ ! -r /config/ost-config.php ]; then echo "ERROR: Could not read /config/ost-config.php." exit 1 fi ln -sf /config/ost-config.php /var/www/html/dota-support/include/ost-config.php if [ "$OSTICKET_SETUP" = "yes" -o "$OSTICKET_SETUP" = "true" ]; then echo "Running in SETUP mode." if [ ! -w /var/www/html/dota-support/include/ost-config.php ]; then echo "ERROR: ost-config.php is not writable." exit 2 fi else echo "Running in PRODUCTION mode." rm -rf /var/www/html/dota-support/setup fi crontab <<EOF */5 * * * * /usr/bin/php /var/www/html/dota-support/api/cron.php EOF
Update to latest Cog version
#!/bin/bash NAME="$1" mkdir $NAME cd $NAME ../Cuis-Smalltalk-Dev/bin/copyImage.sh $NAME ../Cuis-Smalltalk-Dev/bin/getCog.sh 13.30 2761
#!/bin/bash NAME="$1" mkdir $NAME cd $NAME ../Cuis-Smalltalk-Dev/bin/copyImage.sh $NAME ../Cuis-Smalltalk-Dev/bin/getCog.sh 13.33 2776
Add vulkan unit tests to Kokoro
#!/bin/bash # Fail on any error. set -e # Display commands being run. set -x cd git/SwiftShader git submodule update --init mkdir -p build && cd build cmake .. make --jobs=$(nproc) # Run the GLES unit tests. TODO(capn): rename. ./unittests ./ReactorUnitTests
#!/bin/bash # Fail on any error. set -e # Display commands being run. set -x cd git/SwiftShader git submodule update --init mkdir -p build && cd build cmake .. make --jobs=$(nproc) # Run the reactor unit tests. ./ReactorUnitTests # Run the GLES unit tests. TODO(capn): rename. ./unittests # Run the Vulkan unit tests. cd .. # Must be run from project root build/vk-unittests
Enable redirects for downloading Racket
#!/bin/sh set -uex cd /tmp curl -O https://github.com/plt/racket/archive/master.tar.gz tar -zxf master.tar.gz cd racket-master/src ./configure --prefix=/app/vendor make -sj30 make -sj30 install
#!/bin/sh set -uex cd /tmp curl -L10 -O https://github.com/plt/racket/archive/master.tar.gz tar -zxf master.tar.gz cd racket-master/src ./configure --prefix=/app/vendor make -sj30 make -sj30 install
Test against 1.6 and 1.7
#!/usr/bin/env bash set -eux versions=(1.6.0 1.7.0-RC1 1.7.0-RC2 1.7.0-master-SNAPSHOT) for i in ${versions[@]} do cp pom.xml pom-$i.xml perl -i -pe 's/\[1.6.0,\)/'"$i"'/g' pom-$i.xml mvn clean test -f pom-$i.xml done for i in ${versions[@]} do rm pom-$i.xml done
#!/usr/bin/env bash set -eux versions=(1.6.0 1.7.0) for i in ${versions[@]} do cp pom.xml pom-$i.xml perl -i -pe 's/\[1.6.0,\)/'"$i"'/g' pom-$i.xml mvn clean test -f pom-$i.xml done for i in ${versions[@]} do rm pom-$i.xml done
Upgrade Java 17 version in CI image
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/adoptium/temurin8-binaries/releases/download/jdk8u312-b07/OpenJDK8U-jdk_x64_linux_hotspot_8u312b07.tar.gz" ;; java11) echo "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.13%2B8/OpenJDK11U-jdk_x64_linux_hotspot_11.0.13_8.tar.gz" ;; java17) echo "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17%2B35/OpenJDK17-jdk_x64_linux_hotspot_17_35.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/adoptium/temurin8-binaries/releases/download/jdk8u312-b07/OpenJDK8U-jdk_x64_linux_hotspot_8u312b07.tar.gz" ;; java11) echo "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.13%2B8/OpenJDK11U-jdk_x64_linux_hotspot_11.0.13_8.tar.gz" ;; java17) echo "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.1%2B12/OpenJDK17U-jdk_x64_linux_hotspot_17.0.1_12.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
Upgrade Java 18 version in CI image
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/adoptium/temurin8-binaries/releases/download/jdk8u332-b09/OpenJDK8U-jdk_x64_linux_hotspot_8u332b09.tar.gz" ;; java11) echo "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.15%2B10/OpenJDK11U-jdk_x64_linux_hotspot_11.0.15_10.tar.gz" ;; java17) echo "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.3%2B7/OpenJDK17U-jdk_x64_linux_hotspot_17.0.3_7.tar.gz" ;; java18) echo "https://github.com/adoptium/temurin18-binaries/releases/download/jdk-18.0.1%2B10/OpenJDK18U-jdk_x64_linux_hotspot_18.0.1_10.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
#!/bin/bash set -e case "$1" in java8) echo "https://github.com/bell-sw/Liberica/releases/download/8u333+2/bellsoft-jdk8u333+2-linux-amd64.tar.gz" ;; java11) echo "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.15%2B10/OpenJDK11U-jdk_x64_linux_hotspot_11.0.15_10.tar.gz" ;; java17) echo "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.3%2B7/OpenJDK17U-jdk_x64_linux_hotspot_17.0.3_7.tar.gz" ;; java18) echo "https://github.com/adoptium/temurin18-binaries/releases/download/jdk-18.0.1%2B10/OpenJDK18U-jdk_x64_linux_hotspot_18.0.1_10.tar.gz" ;; *) echo $"Unknown java version" exit 1 esac
Add flag -f to rm -r tests/*.test, so as to not complain if the files are not there (probably because the tests had not been run).
#!/bin/bash rm -f `find . -type f -a -name libtool` rm -f `find . -type f -a -name gmon.out` rm -f `find doc -type f -a -name *.log` rm -f configure rm -f ltmain.sh rm -f libtool rm -f install-sh rm -f aclocal.m4 rm -f config.guess rm -f config.sub rm -f config.log rm -f config.h rm -f config.status rm -f stamp-h* rm -f configure.scan rm -f depcomp rm -f missing rm -f mkinstalldirs rm -f compile rm -f `find . -name Makefile.in` rm -f `find . -name Makefile` rm -rf autom4te.cache/ rm -f acopost.spec rm -f *~ rm -rf src/.deps rm -r tests/*.test
#!/bin/bash rm -f `find . -type f -a -name libtool` rm -f `find . -type f -a -name gmon.out` rm -f `find doc -type f -a -name *.log` rm -f configure rm -f ltmain.sh rm -f libtool rm -f install-sh rm -f aclocal.m4 rm -f config.guess rm -f config.sub rm -f config.log rm -f config.h rm -f config.status rm -f stamp-h* rm -f configure.scan rm -f depcomp rm -f missing rm -f mkinstalldirs rm -f compile rm -f `find . -name Makefile.in` rm -f `find . -name Makefile` rm -rf autom4te.cache/ rm -f acopost.spec rm -f *~ rm -rf src/.deps rm -rf tests/*.test
Enable font smoothing for macOS 10.14
#!/bin/sh -x # Some ideas from: # - https://mths.be/macos # Enable Secure Keyboard Entry in Terminal.app defaults write com.apple.terminal SecureKeyboardEntry -bool true # Press & hold = entering same character defaults write com.microsoft.VSCode ApplePressAndHoldEnabled -bool false # Enable full keyboard access for all controls # (e.g. enable Tab in modal dialogs) defaults write NSGlobalDomain AppleKeyboardUIMode -int 3 # Fast keyboard repeat defaults write NSGlobalDomain KeyRepeat -int 2 defaults write NSGlobalDomain InitialKeyRepeat -int 15 # Trackpad: enable tap to click defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -int 1 defaults write com.apple.AppleMultitouchTrackpad Clicking -int 1 defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
#!/bin/sh -x # Some ideas from: # - https://mths.be/macos # Enable Secure Keyboard Entry in Terminal.app defaults write com.apple.terminal SecureKeyboardEntry -bool true # Press & hold = entering same character defaults write com.microsoft.VSCode ApplePressAndHoldEnabled -bool false # Enable full keyboard access for all controls # (e.g. enable Tab in modal dialogs) defaults write NSGlobalDomain AppleKeyboardUIMode -int 3 # Fast keyboard repeat defaults write NSGlobalDomain KeyRepeat -int 2 defaults write NSGlobalDomain InitialKeyRepeat -int 15 # Trackpad: enable tap to click defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -int 1 defaults write com.apple.AppleMultitouchTrackpad Clicking -int 1 defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1 # macOS 10.14 font smoothing on external display # See http://osxdaily.com/2018/09/26/fix-blurry-thin-fonts-text-macos-mojave/ defaults write -g CGFontRenderingFontSmoothingDisabled -bool NO
Add content option when sharing.
#!/bin/bash # On multiuser machines, output and grep on $USER too MPV_PID=$(ps axo '%p %c'|grep [m]pv$|awk '{print $1}') if [ "$(echo ${MPV_PID}|wc -w)" -ne 1 ] ; then echo "Error: too many mpv PIDs: \"${MPV_PID}\" ($(echo ${MPV_PID}|wc -w))" exit 1 fi IFS=' ' for F in $(lsof -p ${MPV_PID} -Ftn |grep -A1 ^tREG|grep ^n|sed 's/^n//g'); do if test -w "$F" ; then TMP_JSON=$(mktemp) exiftool -json "$F" > $TMP_JSON INFO=$(python <<EOF import json f=open('$TMP_JSON', 'r') obj=json.load(f) print('Hi, check out "' + obj[0]["Title"] + '" by ' + obj[0]['Artist'] + ".") EOF ) # Use zenity because the terminal cannot be controlled with mpv running. EMAIL=$(zenity --entry --title "Email to share with?" --text '') echo | mutt -s "$INFO" -- $EMAIL if [[ $? == 0 ]]; then echo "Successfully shared with '$EMAIL'." echo $INFO else echo "Failed to share with '$EMAIL'." echo $INFO fi fi done
#!/bin/bash function die() { echo $*; exit -1; } # On multiuser machines, output and grep on $USER too MPV_PID=$(ps axo '%p %c'|grep [m]pv$|awk '{print $1}') if [ "$(echo ${MPV_PID}|wc -w)" -ne 1 ] ; then echo "Error: too many mpv PIDs: \"${MPV_PID}\" ($(echo ${MPV_PID}|wc -w))" exit 1 fi IFS=' ' for F in $(lsof -p ${MPV_PID} -Ftn |grep -A1 ^tREG|grep ^n|sed 's/^n//g'); do if test -w "$F" ; then TMP_JSON=$(mktemp) exiftool -json "$F" > $TMP_JSON INFO=$(python <<EOF import json f=open('$TMP_JSON', 'r') obj=json.load(f) print('Hi, check out "' + obj[0]["Title"] + '" by ' + obj[0]['Artist'] + ".") EOF ) # Use zenity because the terminal cannot be controlled with mpv running. EMAIL=$(zenity --entry --title "Email to share with?" --text '') [[ -z $EMAIL ]] && die "Error: No email input." CONTENT=$(zenity --entry --title "Optional message body?" --text '') mutt -s "$INFO" -- $EMAIL<<EOF $CONTENT EOF if [[ $? == 0 ]]; then echo "Successfully shared with '$EMAIL'." else echo "Failed to share with '$EMAIL'."; fi echo $INFO fi done
Add alias for git permission reset
#!/bin/sh alias gl='git pull --prune' alias glg="git log --graph --decorate --oneline --abbrev-commit" alias gp='git push origin HEAD' alias gpa='git push origin --all' alias gd='git diff' alias gc='git commit' alias gca='git commit -a' alias gco='git checkout' alias gb='git branch' alias ga='git add' alias gaa='git add -A' alias gcm='git commit -m' alias gcam='git commit -a -m' alias gs='git status -sb' alias gcb='git-copy-branch-name' alias gpr='gp && open-pr' gi() { curl -s "https://www.gitignore.io/api/$*"; }
#!/bin/sh alias gl='git pull --prune' alias glg="git log --graph --decorate --oneline --abbrev-commit" alias gp='git push origin HEAD' alias gpa='git push origin --all' alias gd='git diff' alias gc='git commit' alias gca='git commit -a' alias gco='git checkout' alias gb='git branch' alias ga='git add' alias gaa='git add -A' alias gcm='git commit -m' alias gcam='git commit -a -m' alias gs='git status -sb' alias gcb='git-copy-branch-name' alias gpr='gp && open-pr' alias gpermres='git permission-resetb' gi() { curl -s "https://www.gitignore.io/api/$*"; } git config --global --add alias.permission-resetb \ '!git diff -p --no-ext-diff --no-color --diff-filter=d \ | grep -E "^(diff|old mode|new mode)" \ | sed -e "s/^old/NEW/;s/^new/old/;s/^NEW/new/" \ | git apply'
Fix a minor bug (calling shell function via exec)
#!/bin/bash BL=( /sys/class/backlight/* ) if [ ! -w "${BL[0]}/brightness" ]; then if [ "$1" != "no-sudo" ]; then no_sudo() { zenity --warning --text="No sudo utility found -- install gksu or kdesu" exit } SUDO=no_sudo if which gksudo > /dev/null 2>&1; then SUDO=gksudo elif which kdesu > /dev/null 2>&1; then SUDO=kdesu fi exec $SUDO "$0" "no-sudo" fi zenity --error --text="Can't alter the brightness" exit fi MAXIMUM="$(<${BL[0]}/max_brightness)" INITIAL="$(<${BL[0]}/brightness)" while read val; do echo $val > "${BL[0]}/brightness" done < <(zenity --scale --text="Set screen brightness" --min-value=0 --value="$INITIAL" --max-value="$MAXIMUM" --print-partial || echo "$INITIAL")
#!/bin/bash BL=( /sys/class/backlight/* ) if [ ! -w "${BL[0]}/brightness" ]; then if [ "$1" != "no-sudo" ]; then no_sudo() { zenity --warning --text="No sudo utility found -- install gksu or kdesu" exit } SUDO=no_sudo if which gksudo > /dev/null 2>&1; then SUDO='exec gksudo' elif which kdesu > /dev/null 2>&1; then SUDO='exec kdesu' fi $SUDO "$0" "no-sudo" fi zenity --error --text="Can't alter the brightness" exit fi MAXIMUM="$(<${BL[0]}/max_brightness)" INITIAL="$(<${BL[0]}/brightness)" while read val; do echo $val > "${BL[0]}/brightness" done < <(zenity --scale --text="Set screen brightness" --min-value=0 --value="$INITIAL" --max-value="$MAXIMUM" --print-partial || echo "$INITIAL")
Install global npm deps as part of node insall.sh
# Install the latest stable nave the node.js environment switcher Node.js curl -fsSL https://raw.github.com/isaacs/nave/master/nave.sh > /usr/local/bin/nave && chmod ugo+x /usr/local/bin/nave # Install a global node.js nave usemain stable
# Install the latest stable nave the node.js environment switcher Node.js curl -fsSL https://raw.github.com/isaacs/nave/master/nave.sh > /usr/local/bin/nave && chmod ugo+x /usr/local/bin/nave # Install a global node.js nave usemain stable # Install global deps npm install -g jshint npm install -g grunt npm install -g gulp
Comment out already existing directory.
# Generate docs. mkdir -p public/api python3 ./openapi/generate_openapi.py --release $(git describe --tags --abbrev=0) > ./public/api/wekan.yml api2html -c ./public/logo-header.png -o ./public/api/wekan.html ./public/api/wekan.yml # Copy docs to bundle #cp -pR ./public/api ~/repos/wekan/.build/bundle/programs/web.browser/app/ #cp -pR ./public/api ~/repos/wekan/.build/bundle/programs/web.browser.legacy/app/
# Generate docs. #mkdir -p public/api python3 ./openapi/generate_openapi.py --release $(git describe --tags --abbrev=0) > ./public/api/wekan.yml api2html -c ./public/logo-header.png -o ./public/api/wekan.html ./public/api/wekan.yml # Copy docs to bundle #cp -pR ./public/api ~/repos/wekan/.build/bundle/programs/web.browser/app/ #cp -pR ./public/api ~/repos/wekan/.build/bundle/programs/web.browser.legacy/app/
Add --silent flag to npm install
#!/bin/bash # source config source /mapic/config/env.sh || exit 1 # TODO: find a better way ? export SYSTEMAPIC_PGSQL_USERNAME \ SYSTEMAPIC_PGSQL_PASSWORD \ SYSTEMAPIC_PGSQL_DBNAME # ensure log folder mkdir -p /mapic/modules/mile/log # ensure node modules are installed NODE_MODULES_DIR=/mapic/modules/mile/node_modules if [ ! -d "$NODE_MODULES_DIR" ]; then echo "Installing node modules..." npm install || abort "Failed to install node modules. Quitting!" # build mapnik from source # rm node_modules/mapnik -r # npm install --build-from-source mapnik fi # spin server if $MAPIC_PRODMODE; then echo 'Mile | PostGIS Tile Server | Production mode' forever src/pile.js production >> log/pile.log else echo 'Mile Debug mode (with 8GB memory)' nodemon --max-old-space-size=8192 -i node_modules/ -i test/ src/pile.js fi
#!/bin/bash # source config source /mapic/config/env.sh || exit 1 # TODO: find a better way ? export SYSTEMAPIC_PGSQL_USERNAME \ SYSTEMAPIC_PGSQL_PASSWORD \ SYSTEMAPIC_PGSQL_DBNAME # ensure log folder mkdir -p /mapic/modules/mile/log # ensure node modules are installed NODE_MODULES_DIR=/mapic/modules/mile/node_modules if [ ! -d "$NODE_MODULES_DIR" ]; then echo "Installing node modules..." npm install --silent || abort "Failed to install node modules. Quitting!" fi # spin server if $MAPIC_PRODMODE; then echo 'Mile | PostGIS Tile Server | Production mode' forever src/pile.js production >> log/pile.log else echo 'Mile Debug mode (with 8GB memory)' nodemon --max-old-space-size=8192 -i node_modules/ -i test/ src/pile.js fi
Clean npm cache after install
#!/bin/bash echo "Installing dependencies" npm install -g coffee-script npm install -g bower npm install --verbose bower install --verbose apt-get update apt-get install nfs-common apt-get install cifs-utils apt-get install transmission-daemon echo "Mount network shared drive" mount -t cifs -o username=root,password= //192.168.1.123/Public /mnt/Public echo "Configure Transmission" usermod -a -G root debian-transmission cp transmission_settings.json /etc/transmission-daemon/settings.json echo "Starting Transmission" service transmission-daemon reload service transmission-daemon start echo "Configure Application" cp resin_config.json config.json
#!/bin/bash echo "Installing dependencies" npm install -g coffee-script npm install -g bower npm install --verbose bower install --verbose apt-get update apt-get install nfs-common apt-get install cifs-utils apt-get install transmission-daemon # Clean cache npm cache clean rm -rf /tmp/* echo "Mount network shared drive" mount -t cifs -o username=root,password= //192.168.1.123/Public /mnt/Public echo "Configure Transmission" usermod -a -G root debian-transmission cp transmission_settings.json /etc/transmission-daemon/settings.json echo "Starting Transmission" service transmission-daemon reload service transmission-daemon start echo "Configure Application" cp resin_config.json config.json
Add more options to run helper script
#!/bin/sh #Run seafile docker container with host folder as a volume #Default volume path on host. VOLUME_PATH="/home/docker/seafile" #Or you can add it to ~/.docker-sunx-seafile file [ -f ~/.docker-sunx-seafile ] && source ~/.docker-sunx-seafile docker run -ti -v $VOLUME_PATH:/home/seafile -p 127.0.0.1:8000:8000 -p 127.0.0.1:8082:8082 sunx/seafile
#!/bin/sh #Run seafile docker container with host folder as a volume #Default volume path on host. VOLUME_PATH="/home/docker/seafile" #Container hostname CONTAINER_HOSTNAME="seafile.domain.com" #Container name CONTAINER_NAME="seafile" #Restart policy RESTART_POLCY="unless-stopped" #Some extra arguments. Like -d ant -ti EXTRA_ARGS="-d -ti" #You can change default values by adding them to config file ~/.docker-sunx-seafile [ -f ~/.docker-sunx-seafile ] && source ~/.docker-sunx-seafile [ ! -z "$CONTAINER_HOSTNAME" ] && CONTAINER_HOSTNAME="--hostname=$CONTAINER_HOSTNAME" [ ! -z "$CONTAINER_NAME" ] && CONTAINER_NAME="--name=$CONTAINER_NAME" [ ! -z "$RESTART_POLCY" ] && RESTART_POLCY="--restart=$RESTART_POLCY" docker run -v $VOLUME_PATH:/home/seafile -p 127.0.0.1:8000:8000 -p 127.0.0.1:8082:8082 $CONTAINER_HOSTNAME $CONTAINER_NAME $RESTART_POLCY $EXTRA_ARGS sunx/seafile
Replace generation time with generation tree
#!/bin/bash DIR=$(dirname $(dirname $(realpath $0))) TARGET=$(dirname $DIR)/s9e.github.com/TextFormatter/coverage cd $DIR/tests rm -f $DIR/docs/coverage/* phpunit -d memory_limit=256M -c $DIR/tests/phpunit.xml --coverage-html $TARGET REGEXP=s/`echo $(dirname $(dirname $DIR)) | sed -e 's/\\//\\\\\//g'`//g sed -i $REGEXP $TARGET/*.html
#!/bin/bash DIR=$(dirname $(dirname $(realpath $0))) TARGET=$(dirname $DIR)/s9e.github.com/TextFormatter/coverage cd $DIR/tests rm -f $DIR/docs/coverage/* phpunit -d memory_limit=256M -c $DIR/tests/phpunit.xml --coverage-html $TARGET REGEXP=s/`echo $(dirname $(dirname $DIR)) | sed -e 's/\\//\\\\\//g'`//g sed -i $REGEXP $TARGET/*.html SHA1=`git rev-parse HEAD` REGEXP='s/(<td class="versionInfo">.*?at )[^.]+/\1<a href="https:\/\/github.com\/s9e\/TextFormatter\/tree\/'$SHA1'">'$SHA1'<\/a>/' sed -i -r "$REGEXP" $TARGET/*.html
Add travis_wait for installing ccn2
#!/usr/bin/env bash # Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved. set -e set -x if [ "$#" -ne 1 ]; then echo "Usage: $0 INSTALL_DIR" exit 1 fi INSTALL_DIR=$1 mkdir -p $INSTALL_DIR # Install torch curl -sk https://raw.githubusercontent.com/torch/ezinstall/master/install-deps | bash &>/dev/null git clone https://github.com/torch/distro.git $INSTALL_DIR --recursive cd $INSTALL_DIR ./install.sh -b # Build LMDB LMDB_DIR=$INSTALL_DIR/lmdb pushd . git clone https://gitorious.org/mdb/mdb.git $LMDB_DIR cd $LMDB_DIR/libraries/liblmdb make popd # Install luarocks modules install_rock () { $INSTALL_DIR/install/bin/luarocks install $@ &>/dev/null } install_rock image #install_rock inn $INSTALL_DIR/install/bin/luarocks install inn install_rock "https://raw.github.com/Sravan2j/lua-pb/master/lua-pb-scm-0.rockspec" install_rock ccn2 install_rock lightningmdb \ LMDB_INCDIR=$LMDB_DIR/libraries/liblmdb \ LMDB_LIBDIR=$LMDB_DIR/libraries/liblmdb
#!/usr/bin/env bash # Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved. set -e set -x if [ "$#" -ne 1 ]; then echo "Usage: $0 INSTALL_DIR" exit 1 fi INSTALL_DIR=$1 mkdir -p $INSTALL_DIR # Install torch curl -sk https://raw.githubusercontent.com/torch/ezinstall/master/install-deps | bash &>/dev/null git clone https://github.com/torch/distro.git $INSTALL_DIR --recursive cd $INSTALL_DIR ./install.sh -b # Build LMDB LMDB_DIR=$INSTALL_DIR/lmdb pushd . git clone https://gitorious.org/mdb/mdb.git $LMDB_DIR cd $LMDB_DIR/libraries/liblmdb make popd # Install luarocks modules install_rock () { travis_wait $INSTALL_DIR/install/bin/luarocks install $@ &>/dev/null } install_rock image install_rock inn install_rock "https://raw.github.com/Sravan2j/lua-pb/master/lua-pb-scm-0.rockspec" install_rock ccn2 install_rock lightningmdb \ LMDB_INCDIR=$LMDB_DIR/libraries/liblmdb \ LMDB_LIBDIR=$LMDB_DIR/libraries/liblmdb
Fix installation of Openstack trunk on Precise
#!/bin/bash -xue function is_centos { [[ -f /etc/centos-release ]] } function is_deb { [[ -f /etc/debian_version ]] } function common { if is_centos; then sudo yum install -y wget fi wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python - sudo easy_install pip sudo easy_install -U six if is_centos; then sudo yum install -y https://kojipkgs.fedoraproject.org//packages/python-mox/0.5.3/2.el6/noarch/python-mox-0.5.3-2.el6.noarch.rpm fi git clone -b ${DEVSTACK_BRANCH:-master} https://github.com/openstack-dev/devstack.git cp devstack/samples/local.conf devstack/local.conf cat >> devstack/local.conf <<EOF disable_service n-xvnc n-novnc n-obj n-cauth h-eng h-api h-api-cfn h-api-cw horizon SCREEN_LOGDIR="\${DEST}/logs" EOF cp jenkins/${JOB_NAME%%/*}/extras.d/55-scality-glance-store.sh devstack/extras.d/55-scality-glance-store.sh ./devstack/stack.sh } common
#!/bin/bash -xue function common { source jenkins/openstack-ci-scripts/jenkins/distro-utils.sh if is_centos; then sudo yum install -y wget fi if [[ $os_CODENAME == "precise" ]]; then sudo add-apt-repository --yes cloud-archive:icehouse fi wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python - sudo easy_install pip sudo easy_install -U six if is_centos; then sudo yum install -y https://kojipkgs.fedoraproject.org//packages/python-mox/0.5.3/2.el6/noarch/python-mox-0.5.3-2.el6.noarch.rpm fi git clone -b ${DEVSTACK_BRANCH:-master} https://github.com/openstack-dev/devstack.git cp devstack/samples/local.conf devstack/local.conf cat >> devstack/local.conf <<EOF disable_service n-xvnc n-novnc n-obj n-cauth h-eng h-api h-api-cfn h-api-cw horizon SCREEN_LOGDIR="\${DEST}/logs" EOF cp jenkins/${JOB_NAME%%/*}/extras.d/55-scality-glance-store.sh devstack/extras.d/55-scality-glance-store.sh ./devstack/stack.sh } common
Fix for how we get shellcheck
#!/usr/bin/env bash set -o errexit # exit on first error set -o nounset # don't allow use of unset variables set -o pipefail # produce a failure return code if any pipeline command errors set -o xtrace # echo commands shopt -s failglob # cause globs that don't get expanded to cause errors . ci/travis_retry.sh echo "install start" # Get shellcheck. curl https://github.com/koalaman/shellcheck/releases/download/stable/shellcheck-stable.linux.x86_64.tar.xz --output shellcheck-stable.tar.xz tar --xz --extract --file shellcheck-stable.tar.xz mv shellcheck-stable/shellcheck "${HOME}"/bin/ rm --recursive --force shellcheck-stable shellcheck-stable.tar.xz "${HOME}"/bin/shellcheck --version # ruby rvm --default use 2.3.2 ruby --version travis_retry bundle install bundle --version bundle-audit --version mdl --version overcommit --version reek --version rubocop --version ruby-lint --version travis version --skip-completion-check # npm npm --version # python python --version travis_retry python -m pip install --upgrade pip wheel travis_retry python -m pip install --require-hashes --requirement \ requirements.txt travis_retry python -m pip install --require-hashes --requirement \ dev-requirements.txt # make all
#!/usr/bin/env bash set -o errexit # exit on first error set -o nounset # don't allow use of unset variables set -o pipefail # produce a failure return code if any pipeline command errors set -o xtrace # echo commands shopt -s failglob # cause globs that don't get expanded to cause errors . ci/travis_retry.sh echo "install start" # Get shellcheck. curl --fail --location --show-error --silent https://github.com/koalaman/shellcheck/releases/download/stable/shellcheck-stable.linux.x86_64.tar.xz --output shellcheck-stable.tar.xz tar --xz --extract --file shellcheck-stable.tar.xz mv shellcheck-stable/shellcheck "${HOME}"/bin/ rm --recursive --force shellcheck-stable shellcheck-stable.tar.xz "${HOME}"/bin/shellcheck --version # ruby rvm --default use 2.3.2 ruby --version travis_retry bundle install bundle --version bundle-audit --version mdl --version overcommit --version reek --version rubocop --version ruby-lint --version travis version --skip-completion-check # npm npm --version # python python --version travis_retry python -m pip install --upgrade pip wheel travis_retry python -m pip install --require-hashes --requirement \ requirements.txt travis_retry python -m pip install --require-hashes --requirement \ dev-requirements.txt # make all
Add StrictModes=no to work around cygwin problem. Reported by Yang Tse <yangsita@gmail.com>.
#!/bin/sh # Written by Simon Josefsson. # Start sshd, invoke parameters, saving exit code, kill sshd, and # return exit code. srcdir=${srcdir:-$PWD} SSHD=${SSHD:-/usr/sbin/sshd} cmd="./ssh2${EXEEXT}" srcdir=`cd $srcdir; pwd` PRIVKEY=$srcdir/etc/user export PRIVKEY PUBKEY=$srcdir/etc/user.pub export PUBKEY chmod go-r $srcdir/etc/host* $SSHD -f /dev/null -h $srcdir/etc/host \ -o 'Port 4711' \ -o 'Protocol 2' \ -o 'UsePrivilegeSeparation no' \ -o "AuthorizedKeysFile $srcdir/etc/user.pub" \ -D & sshdpid=$! trap "kill ${sshdpid}; echo signal killing sshd; exit 1;" EXIT : "started sshd (${sshdpid})" sleep 3 : Invoking $cmd... eval $cmd ec=$? : Self-test exit code $ec : "killing sshd (${sshdpid})" kill "${sshdpid}" > /dev/null 2>&1 trap "" EXIT exit $ec
#!/bin/sh # Written by Simon Josefsson. # Start sshd, invoke parameters, saving exit code, kill sshd, and # return exit code. srcdir=${srcdir:-$PWD} SSHD=${SSHD:-/usr/sbin/sshd} cmd="./ssh2${EXEEXT}" srcdir=`cd $srcdir; pwd` PRIVKEY=$srcdir/etc/user export PRIVKEY PUBKEY=$srcdir/etc/user.pub export PUBKEY chmod go-r $srcdir/etc/host* $SSHD -f /dev/null -h $srcdir/etc/host \ -o 'Port 4711' \ -o 'Protocol 2' \ -o "AuthorizedKeysFile $srcdir/etc/user.pub" \ -o 'UsePrivilegeSeparation no' \ -o 'StrictModes no' \ -D & sshdpid=$! trap "kill ${sshdpid}; echo signal killing sshd; exit 1;" EXIT : "started sshd (${sshdpid})" sleep 3 : Invoking $cmd... eval $cmd ec=$? : Self-test exit code $ec : "killing sshd (${sshdpid})" kill "${sshdpid}" > /dev/null 2>&1 trap "" EXIT exit $ec
Fix redirect (for realz this time)
#!/bin/bash set -ex aws --region=us-west-2 s3 sync --acl public-read --exclude ".*" ./dist/ s3://apps.other.chat/ aws --region=us-west-2 s3api put-object --bucket apps.other.chat --acl public-read --key otherjs/0.0.x/other.min.js --website-redirect-location /apps.other.chat/otherjs/0.0.1%2B3880ffe/other.min.js --content-type application/javascript
#!/bin/bash set -ex aws --region=us-west-2 s3 sync --acl public-read --exclude ".*" ./dist/ s3://apps.other.chat/ aws --region=us-west-2 s3api put-object --bucket apps.other.chat --acl public-read --key otherjs/0.0.x/other.min.js --website-redirect-location /otherjs/0.0.1+3880ffe/other.min.js --content-type application/javascript
Split generation of 32- and 64-bit installers on windows
#!/bin/bash DISTDIR="`pwd`/release" VERSION=0.9.4 ELECTRON_VERSION=1.2.7 create_package() { os="$1" arch="$2" icon="$3" rm -rf "$DISTDIR/Millie-${os}-${arch}.zip" ./node_modules/.bin/electron-packager . "Millie" --platform=$os --arch=$arch --icon=$icon --overwrite --prune --ignore=release --ignore=distro.sh --app-version="$VERSION" --app-copyright "Copyright (C) 2016 Roberto Selbach Teixeira" --asar --out="$DISTDIR" --version="$ELECTRON_VERSION" cd "$DISTDIR/Millie-${os}-${arch}" zip -9ryv "$DISTDIR/Millie-${os}-${arch}.zip" . cd - } # max create_package darwin x64 ./app/img/app.icns create_package linux ia32 ./app/img/app.ico create_package linux x64 ./app/img/app.ico create_package win32 ia32 ./app/img/app.ico create_package win32 x64 ./app/img/app.ico grunt create-windows-installer
#!/bin/bash DISTDIR="`pwd`/release" VERSION=0.9.4 ELECTRON_VERSION=1.2.7 create_package() { os="$1" arch="$2" icon="$3" rm -rf "$DISTDIR/Millie-${os}-${arch}.zip" ./node_modules/.bin/electron-packager . "Millie" --platform=$os --arch=$arch --icon=$icon --overwrite --prune --ignore=release --ignore=distro.sh --app-version="$VERSION" --app-copyright "Copyright (C) 2016 Roberto Selbach Teixeira" --asar --out="$DISTDIR" --version="$ELECTRON_VERSION" cd "$DISTDIR/Millie-${os}-${arch}" zip -9ryv "$DISTDIR/Millie-${os}-${arch}.zip" . cd - } # max #create_package darwin x64 ./app/img/app.icns #create_package linux ia32 ./app/img/app.ico #create_package linux x64 ./app/img/app.ico #create_package win32 ia32 ./app/img/app.ico #create_package win32 x64 ./app/img/app.ico grunt create-windows-installer:ia32 mv "$DISTDIR/MillieSetup.exe" "$DISTDIR/Millie-win32-ia32.exe" grunt create-windows-installer:x64 mv "$DISTDIR/MillieSetup.exe" "$DISTDIR/Millie-win32-x64.exe"
Put timestamp in front of commit sha
#!/usr/bin/env bash set -euo pipefail export AWS_DEFAULT_REGION=us-east-1 VERSION=circle-${CIRCLE_SHA1}-$(date +%s) ARCHIVE=${VERSION}.zip docker login -e $DOCKER_EMAIL -u $DOCKER_USERNAME -p $DOCKER_PASSWORD docker build -t jeffreycharles/number-switcher-3000:${VERSION} . docker push jeffreycharles/number-switcher-3000 cat << EOF > Dockerrun.aws.json { "AWSEBDockerrunVersion": "1", "Image": { "Name": "jeffreycharles/number-switcher-3000:${VERSION}" }, "Ports": [{ "ContainerPort": "3000" }] } EOF zip $ARCHIVE Dockerrun.aws.json aws s3 cp $ARCHIVE s3://number-switcher-3000-deployments/${ARCHIVE} aws elasticbeanstalk create-application-version \ --application-name number-switcher-3000 \ --version-label $VERSION \ --source-bundle S3Bucket='number-switcher-3000-deployments',S3Key="${ARCHIVE}" aws elasticbeanstalk update-environment \ --environment-name number-switcher-3000 \ --version-label $VERSION
#!/usr/bin/env bash set -euo pipefail export AWS_DEFAULT_REGION=us-east-1 VERSION=circle-$(date +%s)-${CIRCLE_SHA1} ARCHIVE=${VERSION}.zip docker login -e $DOCKER_EMAIL -u $DOCKER_USERNAME -p $DOCKER_PASSWORD docker build -t jeffreycharles/number-switcher-3000:${VERSION} . docker push jeffreycharles/number-switcher-3000 cat << EOF > Dockerrun.aws.json { "AWSEBDockerrunVersion": "1", "Image": { "Name": "jeffreycharles/number-switcher-3000:${VERSION}" }, "Ports": [{ "ContainerPort": "3000" }] } EOF zip $ARCHIVE Dockerrun.aws.json aws s3 cp $ARCHIVE s3://number-switcher-3000-deployments/${ARCHIVE} aws elasticbeanstalk create-application-version \ --application-name number-switcher-3000 \ --version-label $VERSION \ --source-bundle S3Bucket='number-switcher-3000-deployments',S3Key="${ARCHIVE}" aws elasticbeanstalk update-environment \ --environment-name number-switcher-3000 \ --version-label $VERSION
Use latest nats server release
#!/bin/sh set -e # check to see if gnatsd folder is empty if [ ! "$(ls -A $HOME/gnatsd)" ]; then mkdir -p $HOME/gnatsd cd $HOME/gnatsd wget https://github.com/nats-io/gnatsd/releases/download/v0.9.2/gnatsd-v0.9.2-linux-amd64.zip -O gnatsd.zip unzip gnatsd.zip mv gnatsd-v0.9.2-linux-amd64/gnatsd . else echo 'Using cached directory.'; fi
#!/bin/sh set -e # check to see if gnatsd folder is empty if [ ! "$(ls -A $HOME/gnatsd)" ]; then mkdir -p $HOME/gnatsd cd $HOME/gnatsd wget https://github.com/nats-io/gnatsd/releases/download/v0.9.4/gnatsd-v0.9.4-linux-amd64.zip -O gnatsd.zip unzip gnatsd.zip mv gnatsd-v0.9.4-linux-amd64/gnatsd . else echo 'Using cached directory.'; fi
Remove remnants of ceilometer from mongodb data file
#!/bin/bash set -e source functions.sh if [ "$1" = "--coverage" ]; then COVERAGE_ARG="$1" shift fi export PATH=${PATH:+$PATH:}/sbin:/usr/sbin check_for_cmd mongod # Start MongoDB process for tests MONGO_DATA=`mktemp -d /tmp/CEILO-MONGODB-XXXXX` MONGO_PORT=29000 trap "clean_exit ${MONGO_DATA}" EXIT mkfifo ${MONGO_DATA}/out mongod --maxConns 32 --nojournal --noprealloc --smallfiles --quiet --noauth --port ${MONGO_PORT} --dbpath "${MONGO_DATA}" --bind_ip localhost --config /dev/null &>${MONGO_DATA}/out & # Wait for Mongo to start listening to connections wait_for_line "waiting for connections on port ${MONGO_PORT}" ${MONGO_DATA}/out # Read the fifo for ever otherwise mongod would block cat ${MONGO_DATA}/out > /dev/null & export AODH_TEST_MONGODB_URL="mongodb://localhost:${MONGO_PORT}/AODH" if test -n "$AODH_TEST_HBASE_URL" then export AODH_TEST_HBASE_TABLE_PREFIX=$(hexdump -n 16 -v -e '/1 "%02X"' /dev/urandom) python tools/test_hbase_table_utils.py --upgrade fi # Yield execution to venv command $*
#!/bin/bash set -e source functions.sh if [ "$1" = "--coverage" ]; then COVERAGE_ARG="$1" shift fi export PATH=${PATH:+$PATH:}/sbin:/usr/sbin check_for_cmd mongod # Start MongoDB process for tests MONGO_DATA=`mktemp -d /tmp/AODH-MONGODB-XXXXX` MONGO_PORT=29000 trap "clean_exit ${MONGO_DATA}" EXIT mkfifo ${MONGO_DATA}/out mongod --maxConns 32 --nojournal --noprealloc --smallfiles --quiet --noauth --port ${MONGO_PORT} --dbpath "${MONGO_DATA}" --bind_ip localhost --config /dev/null &>${MONGO_DATA}/out & # Wait for Mongo to start listening to connections wait_for_line "waiting for connections on port ${MONGO_PORT}" ${MONGO_DATA}/out # Read the fifo for ever otherwise mongod would block cat ${MONGO_DATA}/out > /dev/null & export AODH_TEST_MONGODB_URL="mongodb://localhost:${MONGO_PORT}/AODH" if test -n "$AODH_TEST_HBASE_URL" then export AODH_TEST_HBASE_TABLE_PREFIX=$(hexdump -n 16 -v -e '/1 "%02X"' /dev/urandom) python tools/test_hbase_table_utils.py --upgrade fi # Yield execution to venv command $*
Add quotes around the echo statement to prevent word splitting.
#!/bin/bash ## Author: Hersh Singh [hershdeep@gmail.com] ## Arch Installer Script Configuration # Script configuration AIS_MNT=/mnt AIS_TIMEZONE=US/Eastern AIS_HOST=dabba AIS_USER=hersh # Global configuration export EDITOR=vi # Define colors for 'tput' textrev=$(tput rev) textred=$(tput setaf 1) textblue=$(tput setaf 4) textreset=$(tput sgr0) # Functions to display messages note() { echo ${textblue}$@${textreset} } header() { echo ${textred}$@${textreset} }
#!/bin/bash ## Author: Hersh Singh [hershdeep@gmail.com] ## Arch Installer Script Configuration # Script configuration AIS_MNT=/mnt AIS_TIMEZONE=US/Eastern AIS_HOST=dabba AIS_USER=hersh # Global configuration export EDITOR=vi # Define colors for 'tput' textrev=$(tput rev) textred=$(tput setaf 1) textblue=$(tput setaf 4) textreset=$(tput sgr0) # Functions to display messages note() { echo -e "${textblue}$@${textreset}" } header() { echo -e "${textred}$@${textreset}" }
Use a particular version of dvisvgm
#!/usr/bin/env bash # Exit on error set -e # Echo each command set -x git clone https://github.com/mgieseki/dvisvgm cd dvisvgm ./autogen.sh ./configure --enable-bundled-libs make sudo make install cd .. make web
#!/usr/bin/env bash # Exit on error set -e # Echo each command set -x git clone https://github.com/mgieseki/dvisvgm cd dvisvgm git checkout 6001b3c0d5578f0647bf4cb9caaad0975a9e21d1 ./autogen.sh ./configure --enable-bundled-libs make sudo make install cd .. make web
Add git status in bash
# Sourced by /bin/bash # Inherits from the all shell env. source $HOME/.shellenv # Prompt colors RESET="\e[39m" BLUE="\e[34m" L_MAGENTA="\e[95m" RED="\e[31m" # Custom prompt export PS1="\n$BLUE\w$RESET\n${L_MAGENTA}\$ ❯ $RESET"
# Sourced by /bin/bash # Inherits from the all shell env. source $HOME/.shellenv # Prompt colors RESET="\e[39m" BLUE="\e[34m" L_MAGENTA="\e[95m" RED="\e[31m" DARK_GREY="\e[90m" # Git function helpers function parse_git_dirty { [[ $(git status 2> /dev/null | tail -n1) != "nothing to commit (working directory clean)" ]] && echo "*" } function parse_git_branch { git branch --no-color 2> /dev/null | sed -e '/^[^*]/d' -e "s/* \(.*\)/[\1$(parse_git_dirty)]/" } # Custom prompt export PS1="\n${BLUE}\w ${RESET}${DARK_GREY}$(parse_git_branch)${RESET}\n${L_MAGENTA}\$ > ${RESET}"
Update Netlify script with local Docker image tests
#!/usr/bin/env bash # Install Flit to be able to install all python3 -m pip install flit # Install with Flit python3 -m flit install --deps develop # Finally, run mkdocs python3 -m mkdocs build
#!/usr/bin/env bash set -x set -e # Install pip cd /tmp curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py python3.6 get-pip.py --user cd - # Install Flit to be able to install all python3.6 -m pip install --user flit # Install with Flit python3.6 -m flit install --user --deps develop # Finally, run mkdocs python3.6 -m mkdocs build
Remove unnecessary number from col & tab variables
#!/bin/sh -e # Format output of alias print dump into more readable format awk -F= 'BEGIN { print "ALIAS | COMMAND"; print "--------------------------------"; } /^alias/ { # replace all multi-spaces with a single space gsub(/\s+/, " ", $0); col1Len = 20; aliasLen = length($1); tab1len = col1Len - aliasLen; printf " " $1 " " for (i = 0; i < tab1len; i++) printf "-" # TODO check if command ends in \ # and indent + print continuation. print " " $2 } END { print "--------------------------------"; }' < "$HOME/.zshrc" | grep --color '#.*\|$'
#!/bin/sh -e # Format output of alias print dump into more readable format awk -F= 'BEGIN { print "ALIAS | COMMAND"; print "--------------------------------"; } /^alias/ { # replace all multi-spaces with a single space gsub(/\s+/, " ", $0); colLen = 20; aliasLen = length($1); tablen = colLen - aliasLen; printf " " $1 " " for (i = 0; i < tablen; i++) printf "-" # TODO check if command ends in \ # and indent + print continuation. print " " $2 } END { print "--------------------------------"; }' < "$HOME/.zshrc" | grep --color '#.*\|$'
Fix the benchmark shell script
#!/usr/bin/env bash # # Yet Another UserAgent Analyzer # Copyright (C) 2013-2017 Niels Basjes # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ( cd .. && mvn clean package -DskipTests=true ) version=$(fgrep '<version>' pom.xml | head -1 | sed 's@.*>\(.*\)<.*$@\1@g') echo "Testing version ${version}" && \ java -jar target/benchmarks.jar > version-${version}-$(date +%Y%m%d-%H%M%S).txt
#!/usr/bin/env bash # # Yet Another UserAgent Analyzer # Copyright (C) 2013-2017 Niels Basjes # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an AS IS BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ( cd .. && mvn clean package -DskipTests=true ) || exit version=$(fgrep '<version>' pom.xml | head -1 | sed 's@.*>\(.*\)<.*$@\1@g') echo "Testing version ${version}" && \ java -jar target/benchmarks.jar > version-${version}-$(date +%Y%m%d-%H%M%S).txt
Handle exit codes during package removal a bit better
# travis-worker pre-remove script __stop_travis_worker() { if ! status travis-worker >/dev/null 2>&1 ; then return fi if stop travis-worker ; then return fi echo "Failed to stop travis-worker (exit $?)" echo 'Sending SIGKILL' killall -9 travis-worker if stop travis-worker ; then return fi echo "Failed to stop travis-worker after sending SIGKILL (exit $?)" } __remove_travis_user() { if ! getent passwd travis >/dev/null 2>&1 ; then return fi if userdel travis -r 2>/dev/null ; then return fi echo "Failed to remove travis user (exit $?)" } __remove_travis_user __stop_travis_worker
# travis-worker pre-remove script __stop_travis_worker() { if ! status travis-worker >/dev/null 2>&1 ; then return fi stop travis-worker exit_code="$?" if [ $exit_code -eq 0 ] ; then return fi echo "Failed to stop travis-worker (exit $exit_code)" echo 'Sending SIGKILL' killall -9 travis-worker stop travis-worker exit_code="$?" if [ $exit_code -eq 0 ] ; then return fi echo "Failed to stop travis-worker after sending SIGKILL (exit $exit_code)" } __remove_travis_user() { if ! getent passwd travis >/dev/null 2>&1 ; then return fi userdel travis -r exit_code="$?" if [ $exit_code -eq 0 ] ; then return fi echo "Failed to remove travis user (exit $exit_code)" } set +e __remove_travis_user __stop_travis_worker
Add the /etc/sysconfig/modules/snd_bcm2835.modules example needed to load sound moudle
#!/bin/bash # systemd launch script for the speakIP.pl script start() { echo "Starting speakIP..." /root/NativeRaspberryPiBeaconParser/bin/speakIP.pl & } stop() { echo "Stopping speakIP..." pids=`ps ax | grep "speakIP.pl" | awk '{print $1}'` if [ -z "$pids" ] ; then echo "speakIP is not running" else for pid in $pids; do echo "killing " $pid kill $pid done fi } case "$1" in start) start ;; stop) stop ;; restart) stop sleep 1 start ;; *) exit 1 esac
#!/bin/bash # systemd launch script for the speakIP.pl script. This requires espeak and the sound module loaded, for example: # /etc/sysconfig/modules/snd_bcm2835.modules ##!/bin/bash # # exec /sbin/modprobe snd_bcm2835 start() { echo "Starting speakIP..." /root/NativeRaspberryPiBeaconParser/bin/speakIP.pl & } stop() { echo "Stopping speakIP..." pids=`ps ax | grep "speakIP.pl" | awk '{print $1}'` if [ -z "$pids" ] ; then echo "speakIP is not running" else for pid in $pids; do echo "killing " $pid kill $pid done fi } case "$1" in start) start ;; stop) stop ;; restart) stop sleep 1 start ;; *) exit 1 esac
Use defaults for zgen autoupdate
#!/bin/zsh # if the init scipt doesn't exist if ! zgen saved; then zgen load willghatch/zsh-cdr # Fish shell-like syntax highlighting zgen load zsh-users/zsh-syntax-highlighting # Directory listings for zsh with git features zgen load supercrabtree/k # Jump back to a specific parent directory instead of typing cd ../.. redundantly (use bd <name>) zgen load Tarrasch/zsh-bd # anything.el like widget - Trigger with "Ctrl-x ;" Two main actions (Enter/alt-Enter) + tab for list zgen load zsh-users/zaw # a selection of useful git scripts zgen load unixorn/git-extra-commands # Plugin that generates completion functions automatically from getopt-style help texts. zgen load RobSis/zsh-completion-generator # Set up easy auto updating, both of zgen and the bundles loaded in your configuration. set ZGEN_PLUGIN_UPDATE_DAYS=30 set ZGEN_SYSTEM_UPDATE_DAYS=30 zgen load unixorn/autoupdate-zgen # Fish-like suggestions. It suggests commands as you type, based on command history. zgen load zsh-users/zsh-autosuggestions fi
#!/bin/zsh # if the init scipt doesn't exist if ! zgen saved; then zgen load willghatch/zsh-cdr # Fish shell-like syntax highlighting zgen load zsh-users/zsh-syntax-highlighting # Directory listings for zsh with git features zgen load supercrabtree/k # Jump back to a specific parent directory instead of typing cd ../.. redundantly (use bd <name>) zgen load Tarrasch/zsh-bd # anything.el like widget - Trigger with "Ctrl-x ;" Two main actions (Enter/alt-Enter) + tab for list zgen load zsh-users/zaw # a selection of useful git scripts zgen load unixorn/git-extra-commands # Plugin that generates completion functions automatically from getopt-style help texts. zgen load RobSis/zsh-completion-generator # Set up easy auto updating, both of zgen and the bundles loaded in your configuration. zgen load unixorn/autoupdate-zgen # Fish-like suggestions. It suggests commands as you type, based on command history. zgen load zsh-users/zsh-autosuggestions fi
Remove darwin from the case
cite 'about-alias' about-alias 'pbcopy and pbpaste shortcuts to linux' case $OSTYPE in linux*) XCLIP=$(command -v xclip) [[ $XCLIP ]] && alias pbcopy="$XCLIP -selection clipboard" && alias pbpaste="$XCLIP -selection clipboard -o" ;; darwin*) ;; esac # to use it just install xclip on your distribution and it would work like: # $ echo "hello" | pbcopy # $ pbpaste # hello # very useful for things like: # cat ~/.ssh/id_rsa.pub | pbcopy # have fun!
cite 'about-alias' about-alias 'pbcopy and pbpaste shortcuts to linux' case $OSTYPE in linux*) XCLIP=$(command -v xclip) [[ $XCLIP ]] && alias pbcopy="$XCLIP -selection clipboard" && alias pbpaste="$XCLIP -selection clipboard -o" ;; esac # to use it just install xclip on your distribution and it would work like: # $ echo "hello" | pbcopy # $ pbpaste # hello # very useful for things like: # cat ~/.ssh/id_rsa.pub | pbcopy # have fun!
Change the way how conda is installed and remove source.
#!/bin/bash SPARK_URL=http://d3kbcqa49mib13.cloudfront.net/spark-2.1.0-bin-hadoop2.7.tgz SPARK_FILENAME=spark-2.1.0-bin-hadoop2.7.tgz set -e echo "Hello World" apt-get update apt-get -y install wget curl software-properties-common bzip2 # install openjdk add-apt-repository -y ppa:openjdk-r/ppa apt-get update apt-get -y install openjdk-8-jdk # Install Miniconda wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh \ && /bin/bash ~/miniconda.sh -b -p $HOME/conda cat $HOME/.bashrc echo -e '\nexport PATH=$HOME/conda/bin:$PATH' >> $HOME/.bashrc && source $HOME/.bashrc cat $HOME/.bashrc # install Packages $HOME/conda/bin/conda install -y nose numpy pandas # Install Spark curl $SPARK_URL > $HOME/$SPARK_FILENAME tar -xzf $HOME/$SPARK_FILENAME -C $HOME/ echo 'export SPARK_HOME=$HOME/spark-2.1.0-bin-hadoop2.7/' >> $HOME/.bashrc && source $HOME/.bashrc nosetests -vs pyspark-app-ci/tests/test_linear_regression.py # show folders find .
#!/bin/bash SPARK_URL=http://d3kbcqa49mib13.cloudfront.net/spark-2.1.0-bin-hadoop2.7.tgz SPARK_FILENAME=spark-2.1.0-bin-hadoop2.7.tgz set -e echo "Hello World" apt-get update apt-get -y install wget curl software-properties-common bzip2 # install openjdk add-apt-repository -y ppa:openjdk-r/ppa apt-get update apt-get -y install openjdk-8-jdk echo "Install miniconda" wget --quiet https://repo.continuum.io/miniconda/Miniconda3-4.1.11-Linux-x86_64.sh -O ~/miniconda.sh bash ~/miniconda.sh -b -p /home/conda rm ~/miniconda.sh export PATH="/home/conda/bin:$PATH" conda install -y nose numpy pandas # Install Spark curl $SPARK_URL > $HOME/$SPARK_FILENAME tar -xzf $HOME/$SPARK_FILENAME -C $HOME/ echo 'export SPARK_HOME=$HOME/spark-2.1.0-bin-hadoop2.7/' >> $HOME/.bashrc && source $HOME/.bashrc nosetests -vs pyspark-app-ci/tests/test_linear_regression.py # show folders find . exit 0
Enhance Rust Clippy alias: do not rebuild all for lint warnings
alias rust-repl='rustup run nightly-2016-08-01 ~/.cargo/bin/rusti' alias rusti=rust-repl alias clippy='cargo clean && rustup run nightly cargo clippy' alias rust-lint=clippy
alias rust-repl='rustup run nightly-2016-08-01 ~/.cargo/bin/rusti' alias rusti=rust-repl alias clippy='rustup run nightly cargo clippy' alias rust-lint=clippy
Add dim and blink modes :flashlight:
#!/bin/bash normal=$(tput sgr0) bold=$(tput bold) underline=$(tput smul) #Colors fg_black=$(tput setaf 0) fg_red=$(tput setaf 1) fg_green=$(tput setaf 2) fg_yellow=$(tput setaf 3) fg_blue=$(tput setaf 4) fg_magenta=$(tput setaf 5) fg_cyan=$(tput setaf 6) fg_white=$(tput setaf 7)
#!/bin/bash normal=$(tput sgr0) dim=$(tput dim) bold=$(tput bold) underline=$(tput smul) blink=$(tput blink) #Colors fg_black=$(tput setaf 0) fg_red=$(tput setaf 1) fg_green=$(tput setaf 2) fg_yellow=$(tput setaf 3) fg_blue=$(tput setaf 4) fg_magenta=$(tput setaf 5) fg_cyan=$(tput setaf 6) fg_white=$(tput setaf 7)
Fix long filenames and readlink -f incompatibility on OSX
# set -e DIR="$(dirname $(readlink -f $0))" TMUX=$1 NAME=$2 CMD=$3 DETACH=$4 BASE="$DIR/../" WATCHFILE="$BASE.run_$NAME.watch" if [ "$TMUX" = "pid" ]; then PID=`ps ax | grep "$WATCHFILE" | grep rm || echo -1` echo "PID: $PID" exit 0 fi if [ ! -x "$TMUX" ]; then echo "Could not find executable tmux at $TMUX" >&2 exit 100 fi # This is needed for 32 bit tmux export LD_LIBRARY_PATH="$BASE/local/lib:$LD_LIBRARY_PATH" # Kill any existing session "$TMUX" kill-session -t $NAME # Write the watch file echo "-1" > $WATCHFILE # Tell the client to start the monitor echo "MONITOR:1" # Start a new session "$TMUX" new -s $NAME "$CMD; ([ -e '$WATCHFILE' ] && rm '$WATCHFILE')" \ \; set-option -g status off \ \; set-option destroy-unattached off \ \; set-option mouse-select-pane on \ \; set-option set-titles on \ \; set-option remain-on-exit on \ \; set-window-option -g aggressive-resize on \ \; set-option -g prefix C-b \ \; $DETACH # Return the pid PID=`ps ax | grep "$WATCHFILE" | grep rm || echo -1` echo "PID: $PID"
# set -e DIR=$(dirname "$0") TMUX=$1 NAME=$2 CMD=$3 DETACH=$4 BASE="$DIR/../" WATCHFILE="$BASE.run_$NAME.watch" if [ "$TMUX" = "pid" ]; then PID=`ps ax | grep "$WATCHFILE" | grep rm || echo -1` echo "PID: $PID" exit 0 fi if [ ! -x "$TMUX" ]; then echo "Could not find executable tmux at $TMUX" >&2 exit 100 fi # This is needed for 32 bit tmux export LD_LIBRARY_PATH="$BASE/local/lib:$LD_LIBRARY_PATH" # Kill any existing session "$TMUX" kill-session -t $NAME # Write the watch file echo "-1" > $WATCHFILE # Tell the client to start the monitor echo "MONITOR:1" # Start a new session "$TMUX" new -s $NAME "$CMD; ([ -e '$WATCHFILE' ] && rm '$WATCHFILE')" \ \; set-option -g status off \ \; set-option destroy-unattached off \ \; set-option mouse-select-pane on \ \; set-option set-titles on \ \; set-option remain-on-exit on \ \; set-window-option -g aggressive-resize on \ \; set-option -g prefix C-b \ \; $DETACH # Return the pid PID=`ps ax | grep "$WATCHFILE" | grep rm || echo -1` echo "PID: $PID"
Check status before tailing log.
#!/bin/bash tar -zxf $PACKDIR/$PACKFILE -C $WORKDIR cp $CONFIGDIR/$CONFIGFILE $WORKDIR $WORKDIR/control restart $WORKDIR/control tail
#!/bin/bash WAIT_SERVICE_READY=10 function check_service(){ status=$($WORKDIR/control status) echo $status | grep -q "stoped" if [ $? -eq 0 ] ; then return 1 else return 0 fi } tar -zxf $PACKDIR/$PACKFILE -C $WORKDIR cp $CONFIGDIR/$CONFIGFILE $WORKDIR $WORKDIR/control restart sleep $WAIT_SERVICE_READY check_service if [ $? -eq 0 ] ; then $WORKDIR/control tail else echo "Failed to start." exit 1 fi
Revert "Adding remote command to niam deployment" Already taken care of in playbook
#!/bin/bash cd /opt/infrastructure/ansible/niam/ &&\ ansible-galaxy install -r requirements.yml &&\ ansible-playbook site.yml --become-user=ubuntu --vault-password-file /keys/grits_vault_password --private-key /var/lib/jenkins/.ssh/id_rsa --tags niam #Upload new docker image to S3 ssh -i /var/lib/jenkins/.ssh/id_rsa ubuntu@niam.eha.io " sudo rm /tmp/niam.tar.gz sudo docker save niam > /tmp/niam.tar &&\ sudo gzip -1 /tmp/niam.tar &&\ sudo aws s3 cp /tmp/niam.tar.gz s3://bsve-integration/niam.tar.gz sudo rm /tmp/niam.tar.gz " # Notify BSVE to redeploy aws sns publish --topic-arn arn:aws:sns:us-east-1:789867670404:EHA-Git-Lambda --message '{"app":"niam"}' --profile bsve-user
#!/bin/bash cd /opt/infrastructure/ansible/niam/ &&\ ansible-galaxy install -r requirements.yml &&\ ansible-playbook site.yml --become-user=ubuntu --vault-password-file /keys/grits_vault_password --private-key /var/lib/jenkins/.ssh/id_rsa --tags niam #Upload new docker image to S3 sudo docker save niam > /tmp/niam.tar &&\ sudo gzip -1 /tmp/niam.tar &&\ sudo aws s3 cp /tmp/niam.tar.gz s3://bsve-integration/niam.tar.gz sudo rm /tmp/niam.tar.gz # Notify BSVE to redeploy aws sns publish --topic-arn arn:aws:sns:us-east-1:789867670404:EHA-Git-Lambda --message '{"app":"niam"}' --profile bsve-user
Make sure that we use an up to date version of setuptools, wheel, and twine when publishing package.
#!/usr/bin/env bash ( cd ../ rm -r build/ dist/ django_qr_code.egg-info/ python setup.py check && python setup.py sdist && python setup.py bdist_wheel && twine upload dist/* )
#!/usr/bin/env bash ( python -m pip install --upgrade pip pip install --upgrade setuptools wheel twine cd ../ rm -r build/ dist/ django_qr_code.egg-info/ python setup.py check && python setup.py sdist && python setup.py bdist_wheel && twine upload dist/* )
Add CI build for wasm32 platform.
#!/bin/bash echo 'deb http://debian.ethz.ch/debian stretch main contrib' >> /etc/apt/sources.list apt update apt upgrade -y apt install -y clang cmake build-essential libxxf86vm-dev libxrandr-dev xorg-dev libglu1-mesa-dev libxrandr2 libglfw3 libglfw3-dev # libglfw3-wayland cargo test
#!/bin/bash echo 'deb http://debian.ethz.ch/debian stretch main contrib' >> /etc/apt/sources.list apt update apt upgrade -y apt install -y clang cmake build-essential libxxf86vm-dev libxrandr-dev xorg-dev libglu1-mesa-dev libxrandr2 libglfw3 libglfw3-dev # libglfw3-wayland cargo test cargo build --target wasm32-unknown-unknown cargo build --target wasm32-unknown-asmjs
Add `--delete` option to rsync to remove old files
#!/bin/bash set -e # exit with nonzero exit code if anything fails # clear and re-create the out directory if [[ $TRAVIS_BRANCH == 'master' ]] && [[ $TRAVIS_PULL_REQUEST = false ]] then # add remote ssh-key to key-storage # first add remote host to known hosts ssh-keyscan -t rsa $DEPLOY_HOST 2> /dev/null | sort -u - ~/.ssh/known_hosts -o ~/.ssh/known_hosts # decrypt private shh key openssl aes-256-cbc -K $encrypted_2adf16dc08ac_key -iv $encrypted_2adf16dc08ac_iv -in deploy_key.enc -out deploy_key -d # start ssh-agent and add the key eval "$(ssh-agent -s)" chmod 600 deploy_key ssh-add deploy_key rm -rf ~/out || exit 0; # compile the website bundle exec jekyll build -d ~/out --config _uberspace_config.yml # upload site rsync -rq ~/out/* $DEPLOY_USER@$DEPLOY_HOST:$DEPLOY_PATH else echo "NOT ON MASTER BRANCH, WILL NOT DEPLOY SITE" fi
#!/bin/bash set -e # exit with nonzero exit code if anything fails # clear and re-create the out directory if [[ $TRAVIS_BRANCH == 'master' ]] && [[ $TRAVIS_PULL_REQUEST = false ]] then # add remote ssh-key to key-storage # first add remote host to known hosts ssh-keyscan -t rsa $DEPLOY_HOST 2> /dev/null | sort -u - ~/.ssh/known_hosts -o ~/.ssh/known_hosts # decrypt private shh key openssl aes-256-cbc -K $encrypted_2adf16dc08ac_key -iv $encrypted_2adf16dc08ac_iv -in deploy_key.enc -out deploy_key -d # start ssh-agent and add the key eval "$(ssh-agent -s)" chmod 600 deploy_key ssh-add deploy_key rm -rf ~/out || exit 0; # compile the website bundle exec jekyll build -d ~/out --config _uberspace_config.yml # upload site rsync -rq --delete ~/out/* $DEPLOY_USER@$DEPLOY_HOST:$DEPLOY_PATH else echo "NOT ON MASTER BRANCH, WILL NOT DEPLOY SITE" fi
Add the whatis database to catpages too
#!/bin/sh # Create the catpages dist - must follow manpages dist script, for obvious # reasons. if [ -d ${RD}/trees/manpages/usr/share/man ]; then su -m man -c 'catman ${RD}/trees/manpages/usr/share/man' > /dev/null 2>&1; ( cd ${RD}/trees/manpages/usr/share/man; find cat* | cpio -dumpl ${RD}/trees/catpages/usr/share/man ) && rm -rf ${RD}/trees/manpages/usr/share/man/cat*; fi
#!/bin/sh # Create the catpages dist - must follow manpages dist script, for obvious # reasons. if [ -d ${RD}/trees/manpages/usr/share/man ]; then su -m man -c 'catman ${RD}/trees/manpages/usr/share/man' > /dev/null 2>&1; ( cd ${RD}/trees/manpages/usr/share/man; find cat* whatis | cpio -dumpl ${RD}/trees/catpages/usr/share/man ) && rm -rf ${RD}/trees/manpages/usr/share/man/cat*; fi
Set MANPAGER manually (WHY ?)
#!/bin/bash --norc ManpageTitle=$(ps -p $(ps -p $$ -o ppid=) -o args= | awk '{print $NF}') ManpageTitle="MANPAGE\ :\ $ManpageTitle" col -b | gvim -R -c -geometry 120x65 "set ft=man nomod nolist titlestring=$ManpageTitle" - > /dev/null
#!/bin/bash --norc ManpageTitle=$(ps -p $(ps -p $$ -o ppid=) -o args= | awk '{print $NF}') ManpageTitle="MANPAGE\ :\ $MAN_PN" col -b | MANPAGER='toto' gvim -geometry 120x65 -R -c "set ft=man nomod noma nolist titlestring=$ManpageTitle" -c "let \$MANPAGER='$MANPAGER'" - > /dev/null
Install pyp from forked repository.
#!/bin/bash set -e # Script for installing pyp (Python Power at the Prompt) # Prerequisites: - gcc # - wget name="pyp" version="" target_dir=$ROOT package=$name-$version # download package tmp_dir=$(mktemp -d) && cd $tmp_dir && echo $tmp_dir wget -O $name 'https://docs.google.com/uc?authuser=0&id=0B3RW1AtsOguXNFdjQU1RT000Zzg&export=download' # install package install $name $target_dir/bin
#!/bin/bash set -e # Script for installing pyp (Python Power at the Prompt) # Prerequisites: - gcc # - wget name="pyp" version="2.13-beta" target_dir=$ROOT package=$name-$version # download package tmp_dir=$(mktemp -d) && cd $tmp_dir && echo $tmp_dir wget -O $package.tar.gz "https://github.com/djhshih/pyp/archive/v${version}.tar.gz" mkdir $package tar -xzf $package.tar.gz -C $package --strip-components=1 cd $package # install package make install DESTDIR=$target_dir
Replace the command with the right one (since we have certbot now)
#!/bin/bash # TODO detect that the certificate got changed and restart apache letsencrypt-renewer --config-dir /etc/letsencrypt/
#!/bin/bash # TODO detect that the certificate got changed and restart apache certbot renew
Reduce verbosity of wget in CI scripts
[ "$COVERITY_BUILD" -ne 0 ] || exit 0 ARCHIVE="coverity_tool.tgz" WithMsg "Testing token" test "${COVERITY_SCAN_TOKEN}" && WithMsg "Downloading coverity scan tool" wget https://scan.coverity.com/download/linux64 --post-data "token=${COVERITY_SCAN_TOKEN}&project=koplyarov%2Fjoint" -O "$ARCHIVE" && Verbose tar -xvzf "$ARCHIVE" || exit 1
[ "$COVERITY_BUILD" -ne 0 ] || exit 0 ARCHIVE="coverity_tool.tgz" WithMsg "Testing token" test "${COVERITY_SCAN_TOKEN}" && WithMsg "Downloading coverity scan tool" wget --no-verbose https://scan.coverity.com/download/linux64 --post-data "token=${COVERITY_SCAN_TOKEN}&project=koplyarov%2Fjoint" -O "$ARCHIVE" && Verbose tar -xvzf "$ARCHIVE" || exit 1
Install PeachPy regardless of whether or not the source exists
#!/bin/bash -ex USE_NDK=false if [[ "$1" = "--ndk" ]]; then USE_NDK=true if [[ ! -d $ANDROID_NDK ]]; then echo Error: ANDROID_NDK not a directory: $ANDROID_NDK exit 1 fi fi if [[ ! -d NNPACK ]]; then rm -rf NNPACK git clone --recursive git@github.com:silklabs/NNPACK.git fi cd NNPACK/ if [[ ! -d env ]]; then virtualenv env fi source env/bin/activate pip install ninja-syntax if [[ ! -d PeachPy ]]; then git clone https://github.com/Maratyszcza/PeachPy.git ( cd PeachPy pip install --upgrade -r requirements.txt python setup.py generate pip install --upgrade . ) fi if $USE_NDK; then # Select right platform and ABI cat > jni/Application.mk <<EOF APP_PLATFORM := android-21 APP_PIE := true APP_ABI := armeabi-v7a APP_STL := c++_static NDK_TOOLCHAIN_VERSION := clang EOF $ANDROID_NDK/ndk-build -j$(nproc) else python ./configure.py --enable-shared ninja fi exit 0
#!/bin/bash -ex USE_NDK=false if [[ "$1" = "--ndk" ]]; then USE_NDK=true if [[ ! -d $ANDROID_NDK ]]; then echo Error: ANDROID_NDK not a directory: $ANDROID_NDK exit 1 fi fi if [[ ! -d NNPACK ]]; then rm -rf NNPACK git clone --recursive git@github.com:silklabs/NNPACK.git fi cd NNPACK/ if [[ ! -d env ]]; then virtualenv env fi source env/bin/activate pip install ninja-syntax if [[ ! -d PeachPy ]]; then git clone https://github.com/Maratyszcza/PeachPy.git fi if ! pip list --format=legacy | grep -q PeachPy; then ( cd PeachPy pip install --upgrade -r requirements.txt python setup.py generate pip install --upgrade . ) fi if $USE_NDK; then # Select right platform and ABI cat > jni/Application.mk <<EOF APP_PLATFORM := android-21 APP_PIE := true APP_ABI := armeabi-v7a APP_STL := c++_static NDK_TOOLCHAIN_VERSION := clang EOF $ANDROID_NDK/ndk-build -j$(nproc) else python ./configure.py --enable-shared ninja fi exit 0
Use mysqld to replace mysqld_safe
#!/bin/bash # Starts up MariaDB within the container. # Stop on error set -e DATA_DIR=/data LOG_DIR=/var/log/mysql chown -R mysql /var/log/mysql chown -R mysql /data if [[ -e /firstrun ]]; then source /scripts/first_run.sh else source /scripts/normal_run.sh fi pre_start_action post_start_action # Start MariaDB echo "Starting MariaDB..." exec /usr/bin/mysqld_safe
#!/bin/bash # Starts up MariaDB within the container. # Stop on error set -e DATA_DIR=/data LOG_DIR=/var/log/mysql chown -R mysql /var/log/mysql chown -R mysql /data if [[ -e /firstrun ]]; then source /scripts/first_run.sh else source /scripts/normal_run.sh fi pre_start_action post_start_action # Start MariaDB echo "Starting MariaDB..." exec mysqld
Update & modernize Plan style.
pkg_name=libarchive pkg_distname=$pkg_name pkg_origin=core pkg_version=3.3.2 pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>" pkg_description="Multi-format archive and compression library" pkg_upstream_url=https://www.libarchive.org pkg_license=('BSD') pkg_source=http://www.libarchive.org/downloads/${pkg_distname}-${pkg_version}.tar.gz pkg_shasum=ed2dbd6954792b2c054ccf8ec4b330a54b85904a80cef477a1c74643ddafa0ce pkg_dirname=${pkg_distname}-${pkg_version} pkg_deps=(core/glibc core/openssl core/zlib core/bzip2 core/xz) pkg_build_deps=(core/gcc core/coreutils core/make) pkg_include_dirs=(include) pkg_lib_dirs=(lib) do_build() { ./configure \ --prefix="$pkg_prefix" \ --without-xml2 \ --without-lzo2 make } do_check() { make check }
pkg_name=libarchive _distname=$pkg_name pkg_origin=core pkg_version=3.3.2 pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>" pkg_description="Multi-format archive and compression library" pkg_upstream_url="https://www.libarchive.org" pkg_license=('BSD') pkg_source="http://www.libarchive.org/downloads/${_distname}-${pkg_version}.tar.gz" pkg_shasum="ed2dbd6954792b2c054ccf8ec4b330a54b85904a80cef477a1c74643ddafa0ce" pkg_dirname="${_distname}-${pkg_version}" pkg_deps=( core/glibc core/openssl core/zlib core/bzip2 core/xz ) pkg_build_deps=( core/gcc core/coreutils core/make ) pkg_include_dirs=(include) pkg_lib_dirs=(lib) do_build() { ./configure \ --prefix="$pkg_prefix" \ --without-xml2 \ --without-lzo2 make } do_check() { make check }
Add directory if it doesn't exist
#!/bin/sh rm -Rf build/* cp -R haxelib.json haxedoc.xml src demo LICENSE.txt README.md build-detox.hxml build-jquery.hxml build cd build zip -r package.zip haxelib.json haxedoc.xml src demo LICENSE.txt README.md build-detox.hxml build-jquery.hxml cd ..
#!/bin/sh mkdir -p build rm -Rf build/* cp -R haxelib.json haxedoc.xml src demo LICENSE.txt README.md build-detox.hxml build-jquery.hxml build cd build zip -r package.zip haxelib.json haxedoc.xml src demo LICENSE.txt README.md build-detox.hxml build-jquery.hxml cd ..
Add alias for show merge conflicted files
# The rest of my fun git aliases alias gl='git pull --prune' alias glog="git log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative" alias gp='git push origin HEAD' alias gd='git diff' alias gc='git commit' alias gca='git commit -a' alias gco='git checkout' alias gb='git branch' alias gs='git status -sb' # upgrade your git if -sb breaks for you. it's fun. alias grm="git status | grep deleted | awk '{\$1=\$2=\"\"; print \$0}' | \ perl -pe 's/^[ \t]*//' | sed 's/ /\\\\ /g' | xargs git rm"
# The rest of my fun git aliases alias gl='git pull --prune' alias glog="git log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative" alias gp='git push origin HEAD' alias gd='git diff' alias gc='git commit' alias gca='git commit -a' alias gco='git checkout' alias gb='git branch' alias gs='git status -sb' # upgrade your git if -sb breaks for you. it's fun. alias grm="git status | grep deleted | awk '{\$1=\$2=\"\"; print \$0}' | \ perl -pe 's/^[ \t]*//' | sed 's/ /\\\\ /g' | xargs git rm" alias gmc='git diff --name-only --diff-filter=U'
Use camelCase for npm aliases
eval "$(npm completion 2>/dev/null)" # Install and save to dependencies alias npms="npm i -S " # Install and save to dev-dependencies alias npmd="npm i -D "
eval "$(npm completion 2>/dev/null)" # npm package names are lowercase # - https://twitter.com/substack/status/23122603153150361 # Thus, we've used camelCase for the following aliases: # Install and save to dependencies in your package.json # npms is used by https://www.npmjs.com/package/npms alias npmS="npm i -S " # Install and save to dev-dependencies in your package.json # npmd is used by https://github.com/dominictarr/npmd alias npmD="npm i -D "
Clarify the scope of a comment.
#!/bin/bash set -eux cd git/re2 case "${KOKORO_JOB_NAME}" in */windows-*) CMAKE_G_A_FLAGS=('-G' 'Visual Studio 14 2015' '-A' 'x64') ;; *) # Work around a bug in older versions of bash. :/ set +u CMAKE_G_A_FLAGS=() ;; esac cmake -D CMAKE_BUILD_TYPE=Debug "${CMAKE_G_A_FLAGS[@]}" . cmake --build . --config Debug --clean-first ctest -C Debug --output-on-failure -E 'dfa|exhaustive|random' cmake -D CMAKE_BUILD_TYPE=Release "${CMAKE_G_A_FLAGS[@]}" . cmake --build . --config Release --clean-first ctest -C Release --output-on-failure -E 'dfa|exhaustive|random' exit 0
#!/bin/bash set -eux cd git/re2 case "${KOKORO_JOB_NAME}" in */windows-*) CMAKE_G_A_FLAGS=('-G' 'Visual Studio 14 2015' '-A' 'x64') ;; *) CMAKE_G_A_FLAGS=() # Work around a bug in older versions of bash. :/ set +u ;; esac cmake -D CMAKE_BUILD_TYPE=Debug "${CMAKE_G_A_FLAGS[@]}" . cmake --build . --config Debug --clean-first ctest -C Debug --output-on-failure -E 'dfa|exhaustive|random' cmake -D CMAKE_BUILD_TYPE=Release "${CMAKE_G_A_FLAGS[@]}" . cmake --build . --config Release --clean-first ctest -C Release --output-on-failure -E 'dfa|exhaustive|random' exit 0
Install Carp::Always outside of Carton too
#!/bin/bash set -u set -o errexit PWD="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" source "$PWD/set_mc_root_dir.inc.sh" cd "$MC_ROOT_DIR" source ./script/set_perl_brew_environment.sh echo "Using Perl version: `perl -e 'print substr($^V, 1)'`" if [ `uname` == 'Darwin' ]; then # Mac OS X CPANM=/usr/local/bin/cpanm else # assume Ubuntu CPANM=cpanm fi $CPANM CPAN~2.10 # 1.0.9 or newer # (if the install of Carton 1.0.9 fails because of CPAN::Meta failure, # purge Perl with ./install_scripts/purge_mc_perl_brew.sh and # ./install_scripts/purge_carton_install.sh) $CPANM Carton~1.0.22 $CPANM List::MoreUtils # Install profiler and tools $CPANM Devel::NYTProf $CPANM Devel::Cover $CPANM lib::core::only
#!/bin/bash set -u set -o errexit PWD="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" source "$PWD/set_mc_root_dir.inc.sh" cd "$MC_ROOT_DIR" source ./script/set_perl_brew_environment.sh echo "Using Perl version: `perl -e 'print substr($^V, 1)'`" if [ `uname` == 'Darwin' ]; then # Mac OS X CPANM=/usr/local/bin/cpanm else # assume Ubuntu CPANM=cpanm fi $CPANM CPAN~2.10 # 1.0.9 or newer # (if the install of Carton 1.0.9 fails because of CPAN::Meta failure, # purge Perl with ./install_scripts/purge_mc_perl_brew.sh and # ./install_scripts/purge_carton_install.sh) $CPANM Carton~1.0.22 $CPANM List::MoreUtils # Always print stack traces when die()ing $CPANM Carp::Always # Install profiler and tools $CPANM Devel::NYTProf $CPANM Devel::Cover $CPANM lib::core::only
Make test checks for changes before committing
the_terminal_is_very_narrow() { [ `tput cols` -lt 80 ] } echo_wide_success() { echo -n " Success." echo " " } echo_wide_failure() { echo -n " Failure." echo " " } echo_narrow_success() { echo "Success." } echo_narrow_failure() { echo "Failure." } echo_success() { if the_terminal_is_very_narrow; then echo_narrow_success else echo_wide_success fi } echo_failure() { if the_terminal_is_very_narrow; then echo_narrow_failure else echo_wide_failure fi } echo "" if bash unittests/.quick_test.sh; then echo "" make clean echo_success git add . git commit -v || git status else echo "" echo_failure fi
the_terminal_is_very_narrow() { [ `tput cols` -lt 80 ] } echo_wide_success() { echo -n " Success." echo " " } echo_wide_failure() { echo -n " Failure." echo " " } echo_narrow_success() { echo "Success." } echo_narrow_failure() { echo "Failure." } echo_success() { if the_terminal_is_very_narrow; then echo_narrow_success else echo_wide_success fi } echo_failure() { if the_terminal_is_very_narrow; then echo_narrow_failure else echo_wide_failure fi } changes_have_been_made() { [[ `git status --porcelain` ]] } echo "" if bash unittests/.quick_test.sh; then echo "" make clean echo_success if changes_have_been_made; then git add . git commit -v || git status else git status fi else echo "" echo_failure fi
Add --warnings-as-errors to smoke test
#!/bin/bash set -e GENEREATE_CREDO_CHECK="lib/my_first_credo_check.ex" mix credo --mute-exit-status mix credo list --mute-exit-status mix credo suggest --mute-exit-status mix credo lib/credo/sources.ex:1:11 --mute-exit-status mix credo explain lib/credo/sources.ex:1:11 --mute-exit-status mix credo.gen.check $GENEREATE_CREDO_CHECK rm $GENEREATE_CREDO_CHECK mix credo.gen.config mix credo categories mix credo info mix credo info --verbose mix credo info --format=json mix credo info --verbose --format=json mix credo version mix credo help mix credo -v mix credo -h echo "" echo "Smoke test succesful."
#!/bin/bash set -e GENEREATE_CREDO_CHECK="lib/my_first_credo_check.ex" mix compile --force --warnings-as-errors mix credo --mute-exit-status mix credo list --mute-exit-status mix credo suggest --mute-exit-status mix credo lib/credo/sources.ex:1:11 --mute-exit-status mix credo explain lib/credo/sources.ex:1:11 --mute-exit-status mix credo.gen.check $GENEREATE_CREDO_CHECK rm $GENEREATE_CREDO_CHECK mix credo.gen.config mix credo categories mix credo info mix credo info --verbose mix credo info --format=json mix credo info --verbose --format=json mix credo version mix credo help mix credo -v mix credo -h echo "" echo "Smoke test succesful."