Instruction
stringlengths
14
778
input_code
stringlengths
0
4.24k
output_code
stringlengths
1
5.44k
Add script to execute/report metrics
#!/bin/bash # # Copyright (c) 2017 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. CURRENTDIR=$(dirname "$(readlink -f "$0")") REPORT_CMDS=("checkmetrics" "emailreport") # Verify/install report tools. These tools will # parse/send the results from metrics scripts execution. for cmd in "${REPORT_CMDS[@]}"; do if ! command -v "$cmd" > /dev/null 2>&1; then pushd "$CURRENTDIR/../cmd/$cmd" make make install popd fi done # Execute metrics scripts and report the results # by email. pushd "$CURRENTDIR/../metrics" source "lib/common.bash" # Run the time tests bash time/docker_workload_time.sh true busybox $RUNTIME 100 # Parse/Report results emailreport # Clean env rm -rf "results" popd exit 0
Add a script to simplify testing a plugin in the local testsite
#!/bin/bash # # Copyright (C) 2019 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script compiles a Gerrit plugin whose name is passed as first parameter # and copies it over to the plugin folder of the testsite. The path to the # testsite needs to be provided by the variable GERRIT_TESTSITE or as second # parameter. SCRIPT_DIR=$(dirname -- "$(readlink -f -- "$BASH_SOURCE")") GERRIT_CODE_DIR="$SCRIPT_DIR/.." cd "$GERRIT_CODE_DIR" if [ "$#" -lt 1 ] then echo "No plugin name provided as first argument. Stopping." exit 1 else PLUGIN_NAME="$1" fi if [ "$#" -lt 2 ] then if [ -z ${GERRIT_TESTSITE+x} ] then echo "Path to local testsite is neiter set as GERRIT_TESTSITE nor passed as second argument. Stopping." exit 1 fi else GERRIT_TESTSITE="$2" fi if [ ! -d "$GERRIT_TESTSITE" ] then echo "Testsite directory $GERRIT_TESTSITE does not exist. Stopping." exit 1 fi bazel build //plugins/"$PLUGIN_NAME"/... if [ $? -ne 0 ] then echo "Building the $PLUGIN_NAME plugin failed" exit 1 fi yes | cp -f "$GERRIT_CODE_DIR/bazel-genfiles/plugins/$PLUGIN_NAME/$PLUGIN_NAME.jar" "$GERRIT_TESTSITE/plugins/" if [ $? -eq 0 ] then echo "Plugin $PLUGIN_NAME copied successfully to testsite." fi
Add script to install rust toolchains
#! /bin/bash # Install the rust toolchain to specified version and variant if [[ "$#" -ne 3 || $1 == "--help" ]]; then echo "Usage: install-rust-toolchain.sh <rust dir> <build version|rust-toolchain path> <target>" exit 1 fi RUST_DIR=$1 BUILD_TOOLCHAIN=$2 TARGET=$3 RUSTUP_BIN="${RUST_DIR}/bin/rustup" mkdir -p "${RUST_DIR}" if [[ ! -f "${RUSTUP_BIN}" ]]; then bash -c 'curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path' fi if [[ -f "${BUILD_TOOLCHAIN}" ]]; then BUILD_PROJECT=$(dirname $(realpath "${BUILD_TOOLCHAIN}")) echo "Build rust toolchain specified for project ${BUILD_PROJECT}..." cd "${BUILD_PROJECT}"; "${RUSTUP_BIN}" target add "${TARGET}" else echo "Build specified toolchain ${BUILD_TOOLCHAIN}..." "${RUSTUP_BIN}" "+${BUILD_TOOLCHAIN}" target add "${TARGET}" fi
Add a test for named pipes.
#!/bin/bash -x source $(dirname $0)/functions.sh LOGS=${TEST_TMPDIR}/logs PROGS=${TEST_TMPDIR}/progs mkdir -p $LOGS $PROGS mkfifo $LOGS/logpipe start_server --logtostderr -vmodule=tail=2 --progs $PROGS --logs $LOGS/* echo 1 >> $LOGS/logpipe uri_get /debug/vars expect_json_field_eq 1 line_count "${DATA}" cat >> $LOGS/logpipe <<EOF 2 3 EOF uri_get /debug/vars expect_json_field_eq 3 line_count "${DATA}" pass
Add script to assist those not wanting to build canvas :D
#! /bin/bash # Tired of building canvas? # now you can add a `image: instructure/canvas-lms:master` to your docker-compose.local.yml # then just `docker-compose pull web` # note if you do a `docker-compose build` it will try to build everything # but if you do a `docker-compose up` it will just use the image you pulled down # there is some weirdness to the way canvas is setup, that make this a little more # difficult. We need to copy the Gemfile.lock from the container, and nuke stale # volumes. You should be able to go while without pulling the new image down by # running `bundle` and `yarn install` in your container after pulling # # ```yml # version: '2' # services: # web: &WEB # image: instructure/canvas-lms:master # build: # context: . # guard: # <<: *WEB # jobs: # <<: *WEB # webpack: # <<: *WEB # ``` # docker-compose down docker-compose pull web docker volume rm canvaslms_bundler canvaslms_canvas-docker-gems canvaslms_node_modules canvaslms_quizzes_node_modules canvaslms_selinimum_node_modules canvaslms_yarn-cache docker run --rm instructure/canvas-lms:master cat Gemfile.lock > Gemfile.lock docker-compose run --rm web bash -c "bundle; bundle exec rake db:migrate"
Add script to create xpi
#!/bin/bash DIRECTORY=$(dirname $0) rm -f $DIRECTORY/addon.xpi zip $DIRECTORY/addon.xpi --recurse-paths -0 $DIRECTORY/* --exclude $DIRECTORY/pack.sh --exclude *.swp
Add a script for creating a query manipulation index and query profile with the same names as the Find defaults
#!/bin/bash INDEX="search_default_index" PROFILE="search_default_profile" ENDPOINT="api.idolondemand.com" # Read arguments while [[ $# > 0 ]] do key="$1" case $key in -e|--endpoint) ENDPOINT="$2" shift ;; -a|--apikey) APIKEY="$2" shift ;; esac shift done echo "Using $ENDPOINT v1 and API key $APIKEY" # Create index echo "Creating index $INDEX" curl "https://$ENDPOINT/1/api/sync/createtextindex/v1?flavor=querymanipulation&apikey=$APIKEY&index=$INDEX" echo -e "\n" # Create query profile echo "Creating query profile $PROFILE" curl "https://$ENDPOINT/1/api/sync/createqueryprofile/v1?query_manipulation_index=$INDEX&promotions_enabled=true&promotion_categories=default&promotions_identified=true&synonyms_enabled=true&synonym_categories=default&blacklists_enabled=true&blacklist_categories=default&query_profile=$PROFILE&apikey=$APIKEY" echo -e "\nDone"
Add a shell script for submitting ftp export jobs to lsf
# Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################## # RNAcentral FTP export using the EBI LSF cluster. # # Requires UCSC tools (bedToBigBed and fetchChromSizes) # # Usage: # . ftp_export_parallel.sh /path/to/destination/directory /path/to/ucsc/tools ############################################################################## destination=$1 # first command line argument ucsc_tools=$2 # second command line argument # make a directory for LSF log files if it does not exist mkdir -p logs # create the output directory if it does not exist mkdir -p $destination # xrefs bsub -o logs/xrefs_lsfreport.txt -e logs/xrefs_output.txt python manage.py ftp_export -f xrefs -d $destination # fasta bsub -o logs/fasta_lsfreport.txt -e logs/fasta_output.txt python manage.py ftp_export -f fasta -d $destination # gff bsub -o logs/gff_lsfreport.txt -e logs/gff_output.txt python manage.py ftp_export -f gff -d $destination # gff3 bsub -o logs/gff3_lsfreport.txt -e logs/gff3_output.txt python manage.py ftp_export -f gff3 -d $destination # md5 bsub -o logs/md5_lsfreport.txt -e logs/md5_output.txt python manage.py ftp_export -f md5 -d $destination # bed and UCSC trackhub, `bed` must preceed `trackhub` because the trackhub job uses bigbed files produced by `bed` bsub -o logs/bed_lsfreport.txt -e logs/bed_output.txt python manage.py ftp_export -f bed -d $destination -b $ucsc_tools && python manage.py ftp_export -f trackhub -d $destination
Fix base path of CLI bash script
node ./bin/clusternatorCli-es5.js $@
#!/bin/bash SOURCE="${BASH_SOURCE[0]}" while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" SOURCE="$(readlink "$SOURCE")" [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located done DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" SCRIPT=/clusternatorCli-es5.js node $DIR$SCRIPT $@
Add script to rebuild all modules
#!/bin/sh #### # This script will try to build # every available compression module # If build fails - it will continue to another module ### THIS_DIR=`dirname $0` EXTRA_OPT=$1 if [ -n "${EXTRA_OPT}" ]; then EXTRA_OPT="--extra-optimization" else EXTRA_OPT="" fi cd "${THIS_DIR}/../lib-dynload" for mdir in `ls .` do if [ -d ${mdir} ]; then cd ${mdir} python3 setup.py clean -a python3 setup.py build_ext ${EXTRA_OPT} python3 setup.py build_ext clean cd .. fi done
Check codestyle only in C,C++ sources and header files.
#!/bin/bash # Copyright 2014 Red Hat Inc., Durham, North Carolina. # All Rights Reserved. set -e -o pipefail . $srcdir/../test_common.sh function test_illicit_function_use { codebase=$top_srcdir/src if grep -r xmlReaderForFile $codebase; then echo "xmlReaderForFile is not allowed within OpenSCAP project. Please make a use of oscap_source facility." return 1; fi if grep -r xmlTextReaderSetErrorHandler $codebase; then echo "xmlTextReaderSetErrorHandler is not allowed within OpenSCAP project. Please make a use of oscap_source facility." return 1; fi if grep -r xmlTextReaderReadString $codebase; then echo "xmlTextReaderReadString is not allowed within OpenSCAP project. Its implementation in libxml does not play well with xmlWalkerReader." return 1; fi } test_init "test_codebase.log" test_run "illicit use of functions" test_illicit_function_use 0 test_exit
#!/bin/bash # Copyright 2014 Red Hat Inc., Durham, North Carolina. # All Rights Reserved. set -e -o pipefail . $srcdir/../test_common.sh function test_illicit_function_use { codebase=$(find $top_srcdir/src/ -regex '.*\.[ch]x*') if grep xmlReaderForFile $codebase; then echo "xmlReaderForFile is not allowed within OpenSCAP project. Please make a use of oscap_source facility." return 1; fi if grep xmlTextReaderSetErrorHandler $codebase; then echo "xmlTextReaderSetErrorHandler is not allowed within OpenSCAP project. Please make a use of oscap_source facility." return 1; fi if grep xmlTextReaderReadString $codebase; then echo "xmlTextReaderReadString is not allowed within OpenSCAP project. Its implementation in libxml does not play well with xmlWalkerReader." return 1; fi } test_init "test_codebase.log" test_run "illicit use of functions" test_illicit_function_use 0 test_exit
Create a Trisquel setup file.
#!/bin/sh ESSENTIALS="build-essential curl git wget" ## Add this one when it shows up in the repo: ## libsdl1.2-dev sudo apt-get update && sudo apt-get dist-upgrade -y sudo apt-get autoclean && sudo apt-get clean && sudo apt-get autoremove -y sudo apt-get install $ESSENTIALS wget -N https://raw.githubusercontent.com/ryanpcmcquen/linuxTweaks/master/.genericLinuxConfig.sh -P ~/ sh ~/.genericLinuxConfig.sh ## Haskell! curl -sSL https://get.haskellstack.org/ | sh ## Reset to haskell-vim-now's settings: ln -sf ~/.config/haskell-vim-now/.vimrc ~/ ln -sf ~/.config/haskell-vim-now/.vim ~/ curl -L https://git.io/haskell-vim-now > /tmp/haskell-vim-now.sh bash /tmp/haskell-vim-now.sh
Add script for third party deps
#!/usr/bin/env bash #title :install_from_vch.sh #description :This script installs files stored on github to local mvn repo #author :a-cordier #============================================================================== set -e : ${1?Github url must be passed as a first argument} : ${2?Group id must be passed as a second argument} : ${3?Artifact id must be passed as a second argument} : ${4?Version must be passed as a second argument} GH_URL="$1" GROUP_ID="$2" ARTIFACT_ID="$3" VERSION="$4" TARGET="" function get_sources(){ TARGET=$(mktemp -d) git clone "$GH_URL" "$TARGET" } function install_files(){ pushd "$TARGET" mvn clean install -DskipTests -Djar.finalName="$ARTIFACT_ID" mvn install:install-file -Dfile=target/"$ARTIFACT_ID".jar -DgroupId="$GROUP_ID" -DartifactId="$ARTIFACT_ID" -Dversion="$VERSION" -Dpackaging=jar popd } function main(){ get_sources && install_files } main
Create db install sh script
#!/bin/bash mongo <<EOF use ecampus db.createCollection("calendar") db.createCollection("course") db.createCollection("user") exit EOF echo "Database is now installed"
Add end-to-end for TLS testing
#!/bin/bash TMP_DATA=`mktemp` rm $GOPATH/bin/* go install ./... openssl req -x509 -nodes -newkey rsa:4096 -keyout ${TMP_DATA}_key.pem -out ${TMP_DATA}_cert.pem -days 365 $GOPATH/bin/rqlited -http localhost:4001 -raft localhost:4002 -nodex509cert ${TMP_DATA}_cert.pem -nodex509key ${TMP_DATA}_key.pem -nonodeverify -encrypt ${TMP_DATA}_1 & sleep 5 $GOPATH/bin/rqlited -http localhost:4003 -raft localhost:4004 -join http://localhost:4001 -nodex509cert ${TMP_DATA}_cert.pem -nodex509key ${TMP_DATA}_key.pem -nonodeverify -encrypt ${TMP_DATA}_2 & sleep 5 $GOPATH/bin/rqlited -http localhost:4005 -raft localhost:4006 -join http://localhost:4001 -nodex509cert ${TMP_DATA}_cert.pem -nodex509key ${TMP_DATA}_key.pem -nonodeverify -encrypt ${TMP_DATA}_3 & sleep 5 wait
Add script to set symfony folder permissions
#!/usr/bin/env bash HTTPDUSER=`ps axo user,comm | grep -E '[a]pache|[h]ttpd|[_]www|[w]ww-data|[n]ginx' | grep -v root | head -1 | cut -d\ -f1` sudo setfacl -R -m u:"$HTTPDUSER":rwX -m u:`whoami`:rwX var sudo setfacl -dR -m u:"$HTTPDUSER":rwX -m u:`whoami`:rwX var
Add strict version of dev wrapper script
#!/bin/bash set -o nounset # (set -u) Treat unset variables as error and exit script set -o errexit # (set -e) Exit if any command returns a non-zero status set -o pipefail # Return non-zero status if any piped commands fail ############################################################################# # # This is just a wrapper script that sources the shmark.sh functions so that # those functions can be tested without having to source them into the shell # environment. The wrapper script also allows stricter testing by setting extra # options such as 'nounset' and 'errexit'. NOTE: Tests should be run both with # and without the 'errexit' and other options set above. Run with the options # set when writing and testing the functions. Then comment the options out to # simulate the way the functions are likely to be run when sourced into the # shell environment. # # NOTE: Shell scripts like this can't change directories for a shell, so to # fully test any functions that change directories, the functions file will # need to be sourced into the shell environment and tested directly from the # shell instead of this wrapper script. # # @date 2014-01-21 First version # @author Steve Wheeler # ############################################################################## PROGNAME="${0##*/}" . ./shmark.sh #echo >&2 "DEBUG: ${PROGNAME}: running..." shmark "$@" # call the main function (all other functions are private)
Add build script to automate a task over all supported akka versions
#!/bin/bash default_cmd=package CMD=${1:-$default_cmd} sbt "set akkaVersion := \"2.1.4\"" +$CMD "set akkaVersion := \"2.2.4\"" +$CMD "set akkaVersion := \"2.3.0\"" +$CMD
Create symbolic-link to HOME directory
#!/bin/bash ## Make Synbolic-link to $HOME # ln -s <source> <target> files="$HOME/dotfiles/.*" # Make array dotfile=() # Distinguish file or directory for i in $files; do if [ -f $i ]; then dotfile+=("$i") fi done # Make Synbolic-link for n in ${dotfile[@]}; do filename=`basename $n` ln -s $n $HOME/$filename done
Add integration test script for the clues-indigo job
#!/bin/bash NAME=$1 AUTH_FILE=$2 # Set working directory cd /opt/ec3/ # Launch cluster ./ec3 launch $NAME mesos docker ubuntu14-ramses -a $AUTH_FILE -u http://servproject.i3m.upv.es:8899 -y # Get cluster IP HOST_IP=$(./ec3 list | grep $NAME | awk '{print $3}') # Create marathon task cat >> mysql.json << EOT { "id": "mysql", "container": { "type": "DOCKER", "docker": { "image": "mysql", "network": "BRIDGE", "portMappings": [{ "containerPort": 3306, "servicePort": 8306, "protocol": "tcp" }] } }, "env": { "MYSQL_ROOT_PASSWORD": "password" }, "instances": 1, "cpus": 0.5, "mem": 256 } EOT echo 'SENDING MARATHON TASK' http POST http://$HOST_IP:8080/v2/apps < mysql.json # Check if task is recieved http GET http://$HOST_IP:8080/v2/apps?embed=tasks -b | jq '.apps[0].id' | grep mysql echo 'MARATHON TASK RECEIVED SUCCESFULLY' # Check if the marathon task is running TASK_STATE=$(http GET http://$HOST_IP:8080/v2/apps?embed=tasks -b | jq '.apps[0].tasks[0].state' | grep -c RUNNING) while [[ $TASK_STATE -ne 1 ]]; do sleep 20 TASK_STATE=$(http GET http://$HOST_IP:8080/v2/apps?embed=tasks -b | jq '.apps[0].tasks[0].state' | grep -c RUNNING) echo 'WATING FOR MARATHON TASK TO DEPLOY' done echo 'MARATHON TASK RUNNING' # Destroy the cluster ./ec3 destroy $NAME -y
Add handy shell script for running the server with command-line options
#!/bin/sh mvn exec:java -Dexec.mainClass=gov.nist.basekb.SearchServer -Dexec.args="-c /Users/soboroff/basekb/basekb-search/config.dat -p 7777 -i /Users/soboroff/basekb/basekb-index -m /Users/soboroff/basekb/basekb-search/enttype.classifier"
Add handy bash script to serve microsite
#!/usr/bin/env bash # !! IMPORTANT !! # You need to run > sbt mkSite at least once for this to even work # --- # handy script to get the micro-site served without having to remember where it is stored # cd docs/target/site/ jekyll serve & open http://127.0.0.1:4000/busymachines-commons/
Add script to run tests on SF
#!/bin/bash set -v # Set Environment echo ${PATH} | grep -q "${HOME}/bin" || { echo "Adding ${HOME}/bin to PATH" export PATH="${PATH}:${HOME}/bin" } # Install Go 1.5 mkdir -p ~/bin curl -sL -o ~/bin/gimme https://raw.githubusercontent.com/travis-ci/gimme/master/gimme chmod +x ~/bin/gimme eval "$(gimme 1.5)" # Get the Go dependencies export GOPATH=$HOME go get -f -u github.com/axw/gocov/gocov go get -f -u github.com/mattn/goveralls go get -f -u golang.org/x/tools/cmd/cover go get -f -u github.com/golang/lint/golint export GOPATH=`pwd`/Godeps/_workspace export PATH=$PATH:$GOPATH/bin # Fake install of project mkdir -p ${GOPATH}/src/github.com/redhat-cip/ ln -s $(pwd) ${GOPATH}/src/github.com/redhat-cip/skydive # Install requirements sudo yum -y install make sudo service openvswitch start sudo ovs-appctl -t ovsdb-server ovsdb-server/add-remote ptcp:6400 rpm -qi openvswitch # Run tests cd ${GOPATH}/src/github.com/redhat-cip/skydive gofmt -s -l . | grep -v statics/bindata.go make lint || true # (non-voting) make test GOFLAGS="-race -v -timeout 6m"
Add a script to make Debian packages.
#!/bin/sh # The MIT License # # Copyright (c) 2016 Jérémie DECOCK <jd.jdhp@gmail.com> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # LOAD VARIABLES ############################################################## source ./meta.sh ############################################################################### VERSION=$(python -c "print(__import__('${PYTHON_PACKAGE_NAME}').__version__)") DIST_DIR=dist ############################################################################### rm -rfv debian # TODO mkdir -p debian/usr/local/lib/python3.0/dist-packages cp -r ${PYTHON_PACKAGE_NAME} debian/usr/local/lib/python3.0/dist-packages chmod 644 $(find debian/usr/local/lib -type f) mkdir -p "debian/usr/share/doc/${PYTHON_PACKAGE_NAME}/" cp LICENSE "debian/usr/share/doc/${PYTHON_PACKAGE_NAME}/copyright" chmod 644 "debian/usr/share/doc/${PYTHON_PACKAGE_NAME}/copyright" mkdir -p debian/DEBIAN # section list : http://packages.debian.org/stable/ cat > debian/DEBIAN/control << EOF Package: ${PYTHON_PACKAGE_NAME} Version: ${VERSION} Section: libs Priority: optional Maintainer: ${AUTHOR_NAME} <${AUTHOR_EMAIL}> Architecture: all Depends: python (>= 3.0) Description: ${PROJECT_SHORT_DESC} EOF fakeroot dpkg-deb -b debian mkdir -p "${DIST_DIR}" mv debian.deb "${DIST_DIR}/${PYTHON_PACKAGE_NAME}_${VERSION}_all.deb"
Add version for setting version.
#!/bin/sh set -eu -o pipefail IFS=$'\n\t' if [[ $# -ne 1 ]] ; then >&2 echo "Usage: $0 <new_version>" exit 1 fi INPUT_VERSION=$1; shift MAJOR_VERSION=${INPUT_VERSION%%.*} WITHOUT_MAJOR_VERSION=${INPUT_VERSION#${MAJOR_VERSION}.} MINOR_VERSION=${WITHOUT_MAJOR_VERSION%%.*} WITHOUT_MINOR_VERSION=${INPUT_VERSION#${MAJOR_VERSION}.${MINOR_VERSION}.} PATCH_VERSION=${WITHOUT_MINOR_VERSION%%.*} XYZ_VERSION="${MAJOR_VERSION}.${MINOR_VERSION}.${PATCH_VERSION}" cd $(dirname -- $0) cd ${PWD}/../.. # set(CPACK_PACKAGE_VERSION "2.0.2") sed -i -e 's/set(CPACK_PACKAGE_VERSION *"[^"]*")/set(CPACK_PACKAGE_VERSION "'"${XYZ_VERSION}"'")/' CMakeLists.txt
Add initial ubuntu setup script
#!/bin/zsh CODENAME=$(lsb_release -cs); echo "CODENAME = $CODENAME" PG_VER=9.5 # Make directories mkdir -p ~/Documents/projects sudo apt install -y zsh git mc libssl-dev xsel libreadline-dev sudo apt install -y libx11-dev libxtst-dev libxt-dev libsm-dev libxpm-dev sudo apt install -y make build-essential libssl-dev zlib1g-dev libbz2-dev sudo apt install -y libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev sudo apt install -y xz-utils tk-dev cmake libboost-all-dev python3-dev silversearcher-ag # Install ohmyzsh sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)" # Change default shell to zsh chsh -s $(which zsh) # Install pyenv curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | zsh env PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install 3.7.3 # Compile vim LDFLAGS="-Wl,-rpath=${HOME}/.pyenv/versions/3.7.3/lib" ./configure --with-features=huge --enable-fail-if-missing \ --enable-multibyte \ --enable-python3interp=dynamic \ --enable-terminal \ --disable-gui --with-x --enable-largefile --disable-netbeans
Automate cloning original bootbox repo
mkdir tmp cd ./tmp git clone git://github.com/makeusabrew/bootbox.git cp ./bootbox/bootbox.js ../vendor/assets/javascripts/ cd ../ rm -rf ./tmp
Add configurations to run passport service
# Main class to be invoked. MAIN_CLASS=com.continuuity.passport.http.PassportHttpServer # Arguments for main class. #MAIN_CLASS_ARGS="" # Add Hadoop HDFS classpath EXTRA_CLASSPATH="" # Specify Heap Size. JAVA_HEAPMAX=-Xmx2048m
Build dynamic docker binary on RHEL 7 LE with Advanced Toolchain Installed
#!/bin/bash # #Script to build docker on RHEL 7 LE (ppc64le) platforms using #Advanced Toolchain # #Ensure AT9.0 is installed and PATH set appropriately # # build_rhel7le_with_at9.sh [build_dir] dir=${1} BUILD_DIR=${dir:-/docker_bld_ppc64} SRC='https://github.com/docker/docker.git' COMMIT_ID=611dbd8957581fa451a4103259100a5e2d115b8c #Install required dependencies yum groupinstall -y "Development Tools" yum install -y patch sqlite-devel wget git \ btrfs-progs-devel device-mapper-devel #Cleanup existing build and install directories rm -fr ${BUILD_DIR} #Create temp dir for building mkdir -p ${BUILD_DIR} #Set GOPATH GO_BASE_PATH="${BUILD_DIR}/go/src/github.com/docker/" mkdir -p ${GO_BASE_PATH} export AUTO_GOPATH=1 #Download docker source cd ${GO_BASE_PATH} git clone ${SRC} cd docker git checkout -b ppc64le ${COMMIT_ID} curl https://github.com/bpradipt/docker/commit/567c796fba113bca56b4ebf82be93d813e21f0f2.patch | \ patch -p1 sed -i.bkp 's/-ldl/-ldl -lpthread -lsystemd-journal/g' hack/make/gccgo ./hack/make.sh dyngccgo mv ./hack/make/gccgo.bkp ./hack/make/gccgo
Revert "Add java home path"
#!/bin/bash set -e version=$1 jar=$2 mainClass=$3 $JAVA_HOME/bin/javapackager -deploy \ -BappVersion=$version \ -Bcategory=Finance \ -BlicenseType=GPLv3 \ -Bemail=info@bitsquare.io \ -native deb \ -name Bitsquare \ -title Bitsquare \ -vendor Bitsquare \ -outdir build \ -appclass $mainClass \ -srcfiles $jar \ -outfile Bitsquare # -Bicon=client/icons/icon.png \
#!/bin/bash set -e version=$1 jar=$2 mainClass=$3 javapackager -deploy \ -BappVersion=$version \ -Bcategory=Finance \ -BlicenseType=GPLv3 \ -Bemail=info@bitsquare.io \ -native deb \ -name Bitsquare \ -title Bitsquare \ -vendor Bitsquare \ -outdir build \ -appclass $mainClass \ -srcfiles $jar \ -outfile Bitsquare # -Bicon=client/icons/icon.png \
Add python user path to path
#! /usr/bin/env bash if command -v python3 >/dev/null 2>&1 ; then python_path="$(python3 - <<SNAKE import site import os.path as path print(path.join(site.USER_BASE, 'bin')) SNAKE )" pathadd "$python_path" fi
Implement test inkscape fonts by list.txt fonts.
rm test.html rm *.png echo '<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/pure-min.css" integrity="sha384-nn4HPE8lTHyVtfCBi5yW9d20FjT8BJwUXyWZT9InLYax14RDjBj46LmSztkmNP9w" crossorigin="anonymous">' >> test.html while read p; do rm 0.svg echo '<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="100%" height="100%" viewBox="0 0 1200 628">' >> 0.svg echo '<defs>' >> 0.svg echo '<style>' >> 0.svg echo '.cls-1 {' >> 0.svg echo 'font-size: 62px;' >> 0.svg echo 'fill: #000;' >> 0.svg echo 'text-anchor: middle;' >> 0.svg echo "font-family: $p;" >> 0.svg echo '}' >> 0.svg echo '</style>' >> 0.svg echo '</defs>' >> 0.svg echo '<text id="name" class="cls-1" x="278" y="561">陳白翰</text>' >> 0.svg echo '</svg>' >> 0.svg inkscape -e "$p.png" 0.svg echo '<div class="pure-g">' >> test.html echo '<div class="pure-u-1-2">' >> test.html echo "字型名稱: $p" >> test.html echo '</div>' >> test.html echo '<div class="pure-u-1-2">' >> test.html echo "<img src='$p.png' class='pure-img' /><br/>" >> test.html echo '</div>' >> test.html echo '</div>' >> test.html done <list.txt
Add script to automate the refactoring
#!/bin/bash # This is a migration script to migrate the current Fork CMS code to the new structure that will allow installing # things with composer and give the module developers more freedom of how they want to structure the extra code within # their module # # src # | Files # | Core # | | Backend # | | Frontend # | Themes # | | ThemeName # | Modules # | | ModuleName # | | | Backend # | | | Frontend # | Installer # | Console while true; do read -p "Is this a dry run (y/n)?" yn case $yn in [Yy]* ) CLEANUP=false; break;; [Nn]* ) CLEANUP=true; break;; * ) echo "Please answer yes or no.";; esac done # Set some base paths ROOT=$(pwd) OLDSRC=$(pwd)'/src' NEWSRC=$(pwd)'/newSrc' MODULESDIR=$NEWSRC'/Modules' COREDIR=$NEWSRC'/Core' FILESDIR=$NEWSRC'/Files' THEMESDIR=$NEWSRC'/Themes' INSTALLERDIR=$NEWSRC'/Installer' CONSOLEDIR=$NEWSRC'/Console' APPLICATIONS=('Frontend' 'Backend') # Remove the old attempt rm -rf $NEWSRC # Create the base directories mkdir -p $COREDIR'/Backend' mkdir -p $COREDIR'/Frontend' mkdir -p $MODULESDIR cp -r $OLDSRC'/Common' $COREDIR'/Common' if $CLEANUP ; then rm -r $OLDSRC'/Common'; fi; cp -r $OLDSRC'/Console' $NEWSRC'/Console' if $CLEANUP ; then rm -r $OLDSRC'/Console'; fi; cp -r $OLDSRC'/Frontend/Files' $FILESDIR if $CLEANUP ; then rm -r $OLDSRC'/Frontend/Files'; fi; cp -r $OLDSRC'/Frontend/Themes' $THEMESDIR if $CLEANUP ; then rm -r $OLDSRC'/Frontend/Themes'; fi; cp -r $OLDSRC'/ForkCMS/Bundle/InstallerBundle' $INSTALLERDIR if $CLEANUP ; then rm -r $OLDSRC'/ForkCMS/Bundle/InstallerBundle'; fi; for APPLICATION in ${APPLICATIONS[@]} do for dir in $OLDSRC/$APPLICATION/Modules/*/ do dir=${dir%*/} mkdir $MODULESDIR/${dir##*/} &> /dev/null cp -r $dir $MODULESDIR/${dir##*/}/$APPLICATION/ if $CLEANUP ; then rm -r $dir; fi; done done if $CLEANUP then cp -r $OLDSRC $(pwd)'/oldsrc' rm -rf $OLDSRC cp -r $NEWSRC $OLDSRC rm -rf $NEWSRC fi
Add selftest for 'git submodule foreach'
#!/bin/sh # # Copyright (c) 2009 Johan Herland # test_description='Test "git submodule foreach" This test verifies that "git submodule foreach" correctly visits all submodules that are currently checked out. ' . ./test-lib.sh test_expect_success 'setup a submodule tree' ' echo file > file && git add file && test_tick && git commit -m upstream git clone . super && git clone super submodule && ( cd super && git submodule add ../submodule sub1 && git submodule add ../submodule sub2 && git submodule add ../submodule sub3 && git config -f .gitmodules --rename-section \ submodule.sub1 submodule.foo1 && git config -f .gitmodules --rename-section \ submodule.sub2 submodule.foo2 && git config -f .gitmodules --rename-section \ submodule.sub3 submodule.foo3 && git add .gitmodules test_tick && git commit -m "submodules" && git submodule init sub1 && git submodule init sub2 && git submodule init sub3 ) && ( cd submodule && echo different > file && git add file && test_tick && git commit -m "different" ) && ( cd super && ( cd sub3 && git pull ) && git add sub3 && test_tick && git commit -m "update sub3" ) ' sub1sha1=$(cd super/sub1 && git rev-parse HEAD) sub3sha1=$(cd super/sub3 && git rev-parse HEAD) cat > expect <<EOF Entering 'sub1' sub1-$sub1sha1 Entering 'sub3' sub3-$sub3sha1 EOF test_expect_success 'test basic "submodule foreach" usage' ' git clone super clone && ( cd clone && git submodule update --init -- sub1 sub3 && git submodule foreach "echo \$path-\$sha1" > ../actual ) && test_cmp expect actual ' test_done
Add script to download the wallpapers from wallpapershome.com
#!/bin/bash for i in `seq 1 1359`; do echo "page…$i"; for url in `curl https://wallpapershome.com/\?page\=$i |grep -o "a href=\"/[^\"]*.html" |sed -n 's/a href="/https:\/\/wallpapershome.com/p'|xargs curl -e "https_proxy=192.168.1.3:1080" -s -N |grep -o "/images[^\"]*/pic_[^\"]*.jpg"`; do wget -P `echo $url | grep -o "[^/]\+_[vh]"` -t 10 -T 10 -N https://wallpapershome.com$url; done; done;
Add a hash-bang as the first line of a shell script
if [ "x$ANDROID_JAVA_HOME" != x ] && [ -e "$ANDROID_JAVA_HOME/lib/tools.jar" ] ; then echo $ANDROID_JAVA_HOME/lib/tools.jar else JAVAC=$(which javac) if [ -z "$JAVAC" ] ; then echo "Please-install-JDK-6,-which-you-can-download-from-java.sun.com" exit 1 fi while [ -L "$JAVAC" ] ; do LSLINE=$(ls -l "$JAVAC") JAVAC=$(echo -n "$LSLINE" | sed -e "s/.* -> //") done echo $JAVAC | sed -e "s:\(.*\)/bin/javac.*:\\1/lib/tools.jar:" fi
#!/bin/sh if [ "x$ANDROID_JAVA_HOME" != x ] && [ -e "$ANDROID_JAVA_HOME/lib/tools.jar" ] ; then echo $ANDROID_JAVA_HOME/lib/tools.jar else JAVAC=$(which javac) if [ -z "$JAVAC" ] ; then echo "Please-install-JDK-6,-which-you-can-download-from-java.sun.com" exit 1 fi while [ -L "$JAVAC" ] ; do LSLINE=$(ls -l "$JAVAC") JAVAC=$(echo -n "$LSLINE" | sed -e "s/.* -> //") done echo $JAVAC | sed -e "s:\(.*\)/bin/javac.*:\\1/lib/tools.jar:" fi
Add script for updating docs repository.
#!/bin/bash -x # Script to build docs and update FlowM2M/AwaLWM2M-docs repository # # Define DOCKER_RUN_EXTRA_OPTIONS if you wish to add/override container run options. # # Assumes FlowM2M/AwaLWM2M-docs is checked out in ../docs # SSH credentials for pushing to FlowM2M/AwaLWM2M-docs are to be provided externally (e.g. from Jenkins) # DOCKER_RUN_EXTRA_OPTIONS=$DOCKER_RUN_EXTRA_OPTIONS set -o errexit set -o nounset SRC_DOCS=$PWD/api/doc/html DST_DOCS=../docs # only enable Docker pseudoterminal if a TTY is present: if [ -t 1 ]; then TERMINAL_OPT=-t else TERMINAL_OPT= fi # make docs docker run \ -v $(pwd):/home/build/AwaLWM2M \ -w /home/build/AwaLWM2M \ -i $TERMINAL_OPT \ $DOCKER_RUN_EXTRA_OPTIONS \ flowm2m/awalwm2m.ci \ make docs GIT_REV=$(git rev-parse HEAD) ( cd $DST_DOCS git checkout -b gh-pages-update git rm -rf --ignore-unmatch * cp -a $SRC_DOCS/* . git add . --all git status git commit -m "Update documentation from FlowM2M/AwaLWM2M.git :: $GIT_REV." git push origin gh-pages-update:gh-pages git checkout origin/gh-pages git branch -D gh-pages-update )
Add Cartage.sh file to run cartage update command for XCode 12
#!/usr/bin/env bash # carthage.sh # Usage example: ./carthage.sh build --platform iOS set -euo pipefail xcconfig=$(mktemp /tmp/static.xcconfig.XXXXXX) trap 'rm -f "$xcconfig"' INT TERM HUP EXIT # For Xcode 12 make sure EXCLUDED_ARCHS is set to arm architectures otherwise # the build will fail on lipo due to duplicate architectures. echo 'EXCLUDED_ARCHS__EFFECTIVE_PLATFORM_SUFFIX_simulator__NATIVE_ARCH_64_BIT_x86_64__XCODE_1200 = arm64 arm64e armv7 armv7s armv6 armv8' >> $xcconfig echo 'EXCLUDED_ARCHS = $(inherited) $(EXCLUDED_ARCHS__EFFECTIVE_PLATFORM_SUFFIX_$(EFFECTIVE_PLATFORM_SUFFIX)__NATIVE_ARCH_64_BIT_$(NATIVE_ARCH_64_BIT)__XCODE_$(XCODE_VERSION_MAJOR))' >> $xcconfig export XCODE_XCCONFIG_FILE="$xcconfig" carthage "$@"
Add deploy script for docs
#! /bin/bash -e SCRIPT_DIR=`dirname $0` pushd $SCRIPT_DIR SCRIPT_DIR=`pwd` popd if [ -z ${1} ]; then pushd $SCRIPT_DIR VERSION=`cat ../../pom.xml | grep version | head -4 | tail -1 | sed 's_.*<version>\([^<]*\)</version>.*_\1_'` popd else VERSION=${1} fi WORKING_DIR=/tmp/docs-deploy echo Using Version[${VERSION}] echo Script in [${SCRIPT_DIR}] echo Deploying to [${WORKING_DIR}] if [ -d ${WORKING_DIR} ]; then echo DELETING ${WORKING_DIR} rm -rf ${WORKING_DIR} fi git clone git@github.com:druid-io/druid-io.github.io.git ${WORKING_DIR} DOC_DIR=${WORKING_DIR}/docs/${VERSION}/ cp ${SCRIPT_DIR}/../_layouts/doc* ${WORKING_DIR}/_layouts/ mkdir -p ${DOC_DIR} cp -r ${SCRIPT_DIR}/../content/* ${DOC_DIR} BRANCH=docs-${VERSION} pushd ${WORKING_DIR} git checkout -b ${BRANCH} git add . git commit -m "Deploy new docs version ${VERSION}" git push origin ${BRANCH} popd rm -rf ${WORKING_DIR}
Add a script to automate PR preparation
#!/usr/bin/env bash if [[ $# -ne 1 ]]; then echo "Usage: $0 <smart-answer-flow-name>" exit fi if [[ -n "$(git status --porcelain)" ]]; then echo "Please run with a clean git working directory" exit fi bundle install bundle exec rails runner script/generate-responses-and-expected-results-for-smart-answer.rb $1 if [[ -n "$(git status --porcelain)" ]]; then git add test/data/$1-responses-and-expected-results.yml git commit -m "Update expected results for $1" fi bundle exec rake checksums:update if [[ -n "$(git status --porcelain)" ]]; then git add test/data/$1-files.yml git commit -m "Update checksums for $1" fi RUN_REGRESSION_TESTS=$1 ruby test/regression/smart_answers_regression_test.rb if [[ -n "$(git status --porcelain)" ]]; then git add test/artefacts/$1 git commit -m "Update test artefacts for $1" fi RUN_REGRESSION_TESTS=$1 ruby test/regression/smart_answers_regression_test.rb
Fix shebang in autoconf and automake Perl scripts
#!/bin/bash sed -i.bak -e '/^rsync .*/d' bootstrap.conf # bootstrap will fall back to wget ./bootstrap # This is needed only when the sed sources comes from the Git repo ./configure --prefix=$PREFIX sed -i.bak -e 's/ -Wmissing-include-dirs//' Makefile make make install
#!/bin/bash # fix autoconf sed -i.bak -e '1 s|^.*$|#!/usr/bin/env perl|g' $PREFIX/bin/autoheader \ $PREFIX/bin/autom4te $PREFIX/bin/autoreconf $PREFIX/bin/autoscan \ $PREFIX/bin/autoupdate $PREFIX/bin/ifnames # fix automake sed -i.bak -e '1 s|^.*$|#!/usr/bin/env perl|g' $PREFIX/bin/aclocal* \ $PREFIX/bin/automake* sed -i.bak -e '/^rsync .*/d' bootstrap.conf # bootstrap will fall back to wget ./bootstrap # This is needed only when the sed sources comes from the Git repo ./configure --prefix=$PREFIX sed -i.bak -e 's/ -Wmissing-include-dirs//' Makefile make make install
Add case Testing a system arch
#!/bin/bash case $(arch) in i386) echo "80386-based machine";; i486) echo "80486-based machine";; i586) echo "Pentium-based machine";; i686) echo "Pentium2+-based machine";; * ) echo "Other type of machine";; esac exit 0
Add script to generate tarballs for parrot and cvmfs root benchmarks
#!/bin/bash present_dir=$PWD cd multiple-read/root-code make cp readDirect-multiple ../../root-parrot cp readDirect-multiple ../../root-cvmfs make clean cd $present_dir cd single-read/root-code make cp readDirect-single ../../root-parrot cp readDirect-single ../../root-cvmfs make clean cd $present_dir cd single-read-cpuburn cp readDirect-cpuburn ../../root-parrot cp readDirect-cpuburn ../../root-cvmfs make clean cd $present_dir tar cvzf parrot-root.tar.gz parrot-root tar cvzf parrot-cvmfs.tar.gz parrot-cvmfs
Add script to update version numbers
#!/bin/sh if [ -z "$1" ] then echo 'New version number required eg. 1.9.0-rc1' exit 1 fi mvn versions:set -DgenerateBackupPoms=false -DnewVersion=$1 cd installer mvn versions:set -DgenerateBackupPoms=false -DnewVersion=$1 cd ..
Add man page installation script
#!/bin/sh -e [ -z "$PREFIX" ] && PREFIX="/usr/local" [ -z "$MANDIR" ] && MANDIR="$PREFIX/share/man" install -d "$DESTDIR/$MANDIR/man1" for manpage in man/*; do install "$manpage" "$DESTDIR/$MANDIR/man1/`basename $manpage`" done
Add convenience wrapper for running playbooks in docker.
#!/bin/bash function err { echo "ERROR: $@" exit 1 } current_directory="$(pwd)" # Check for Docker which docker &>/dev/null || err "Docker is required to run Ansible playbooks." # Validate the Docker image is available (assuming images are named) docker_image="$1" docker images | awk "{ print \$1 \":\" \$2 }" | grep "$1" &>/dev/null || err "Could not find Docker image: $docker_image" # Create symlinks to previous directory (assumes the influxdb module is in a module directory) # module_dirs="$(ls -1 ../)" # echo "Creating symlinks for each folder in '$(dirname $current_directory)' so that Ansible can find them..." # for dir in ${module_dirs[@]}; do # test -h ./$dir && err "Stopping due to current directory containing symlink $dir" # test -d ./$dir && err "Stopping due to current directory containing dir $dir" # ln -s ../$dir $dir # done echo "Running in Docker: $@" docker run \ --rm \ -v $(pwd):/root \ $@ # Remove symlinks so that current directory is clean # echo "Clearing symlinks..." # for dir in ${module_dirs[@]}; do rm $dir; done exit 0
Add a test case for a failing condition where a branch with no commits is unable to be pushed
#Make some changes to each repo and push the changes cd RepoA git checkout -f -B MyTestBranch git push --progress "origin" MyTestBranch:MyTestBranch read -p "paused"
Add a script to create the project.
#!/bin/bash CORDOVA=${CORDOVA-cordova} set -x $CORDOVA create CordovaAppHarness org.apache.appharness CordovaAppHarness cd CordovaAppHarness echo ' var cordova = require('../../cordova-cli/cordova'); module.exports = function(grunt) { // Simple config to run jshint any time a file is added, changed or deleted grunt.initConfig({ watch: { files: ['www/**'], tasks: ['prepare'], }, }); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.registerTask('prepare', 'Runs cdv prepare', function() { cordova.prepare(); }); // Default task(s). grunt.registerTask('default', ['watch']); }; ' > Gruntfile.js mkdir node_modules npm install grunt grunt-contrib-watch rm -r www ln -s ../www www $CORDOVA platform add ios ../../cordova-ios/bin/update_cordova_subproject platforms/ios/CordovaAppHarness.xcodeproj $CORDOVA plugin add ../../../mobile_chrome_apps/AppBundle $CORDOVA plugin add ../../../mobile_chrome_apps/zip $CORDOVA plugin add ../../../BarcodeScanner # https://github.com/wildabeast/BarcodeScanner.git $CORDOVA plugin add ../../cordova-plugin-file $CORDOVA plugin add ../../cordova-plugin-file-transfer exit 0 # optional plugins for l in ../cordova-plugin-* ; do $CORDOVA plugin add "$l" done
Use a subdirectory 'sut' to checkout branch opam/unstable.
mkdir dist || true mkdir tmp || true . "$(dirname $0)/opam.bash" || exit 1 # TODO: use the published version of opam-build-revdeps opam pin add opam-build-revdeps \ git://github.com/gildor478/opam-build-revdeps.git#opam/unstable cd dist opam-build-revdeps compare --package oasis \ --version1 latest \ --version2 latest --pin2 "oasis:$(pwd)/.." \ --root_dir "$(pwd)/../tmp"
mkdir dist || true mkdir tmp || true . "$(dirname $0)/opam.bash" || exit 1 # TODO: use the published version of opam-build-revdeps opam pin add opam-build-revdeps \ git://github.com/gildor478/opam-build-revdeps.git#opam/unstable opam-build-revdeps compare --package oasis \ --version1 latest \ --version2 latest --pin2 "oasis:$(pwd)/sut" \ --html_output "dist/output.html"
Add a script to dump INSERT statements for the data in the database
#!/bin/sh pg_dump --no-privileges --inserts --data-only --no-owner --no-tablespaces catmaid -U catmaid_user | egrep -v '^--' | egrep -v '^ *$'
Include lib directory when executing tests.
#!/bin/sh echo "\033[1;81m Running test_time_stack_item...\033[0m" ruby test_time_stack_item.rb || (echo "FAILED!!!!!!!!!!!!") echo "\033[1;81m Running test_timecop_without_date...\033[0m" ruby test_timecop_without_date.rb || (echo "FAILED!!!!!!!!!!!!") echo "\033[1;81m Running test_timecop_without_date_but_with_time...\033[0m" ruby test_timecop_without_date_but_with_time.rb || (echo "FAILED!!!!!!!!!!!!") echo "\033[1;81m Running test_timecop...\033[0m" ruby test_timecop.rb || (echo "FAILED!!!!!!!!!!!!")
#!/bin/sh echo "\033[1;81m Running test_time_stack_item...\033[0m" ruby -I../lib test_time_stack_item.rb || (echo "FAILED!!!!!!!!!!!!") echo "\033[1;81m Running test_timecop_without_date...\033[0m" ruby -I../lib test_timecop_without_date.rb || (echo "FAILED!!!!!!!!!!!!") echo "\033[1;81m Running test_timecop_without_date_but_with_time...\033[0m" ruby -I../lib test_timecop_without_date_but_with_time.rb || (echo "FAILED!!!!!!!!!!!!") echo "\033[1;81m Running test_timecop...\033[0m" ruby -I../lib test_timecop.rb || (echo "FAILED!!!!!!!!!!!!")
Add a script for reformatting all sources.
#!/bin/bash # Format all sources using rustfmt. # Exit immediately on errors. set -e cd $(dirname "$0") src=$(pwd) for crate in $(find "$src" -name Cargo.toml); do cd $(dirname "$crate") cargo fmt done
Adjust git config on SageMaker notebook.
#!/bin/bash echo On your SageMaker notebook, store this file under ~/Sagemaker and set as executable. echo Remember to change the name and email. USER_NAME='Firstname Lastname' USER_EMAIL='first.last@email.com' echo Adjusting contact to $USER_NAME / $USER_EMAIL git config --global user.name "$USER_NAME" git config --global user.email $USER_EMAIL echo You may need to run: echo ' ' git commit --amend --reset-author echo Adjusting log aliases... git config --global alias.lol "log --graph --format=format:'%C(bold blue)%h%C(reset) - %C(bold green)(%ar)%C(reset) %C(white)%s%C(reset) %C(bold white)— %an%C(reset)%C(bold yellow)%d%C(reset)' --abbrev-commit --date=relative" git config --global alias.lola "log --graph --all --format=format:'%C(bold blue)%h%C(reset) - %C(bold green)(%ar)%C(reset) %C(white)%s%C(reset) %C(bold white)— %an%C(reset)%C(bold yellow)%d%C(reset)' --abbrev-commit --date=relative"
Split off separate script for -current
#!/bin/sh # ################################################################################ # # Copyright (c) 2013-2015, Bryan Vyhmeister <bryan@bsdjournal.net> # # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, # INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM # LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR # OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR # PERFORMANCE OF THIS SOFTWARE. # ################################################################################ # # Title: cvs-cur.sh # Version: 0.5.5 # # This script simply checks out the src, xenocara, and ports trees for the # -current version of OpenBSD. This script is designed to be used # with -current rather than a specific release. The separate cvs-rel.sh is # designed to be used with specific release of OpenBSD. # ################################################################################ # # You should choose an anoncvs mirror outside the US unless you are in the US. # See http://www.openbsd.org/anoncvs.html for a list of mirrors. Change the # CVSROOT variable as needed. This script is designed to be run as root which # may not be the best way of doing things. CVSROOT="anoncvs@cvs.anoncvs.com:/cvs" ################################################################################ export CVSROOT cd /usr cvs -q get -P src cvs -q get -P xenocara cvs -q get -P ports
Add pwrstat UPS metric script
#!/bin/bash -e has_scheme='.*--scheme.*' after_scheme='(?<=\-\-scheme(=|\s))\s*\S+' if [[ "$@" =~ $has_scheme ]]; then PREFIX="$(echo "$@" | grep -oP $after_scheme)" else PREFIX="$HOSTNAME" fi STATUS=$(sudo pwrstat -status) DATE=$(date +%s) STATUS=$(echo "$STATUS" | sed -re 's/^\s+//' -e 's/\.+/:/') function extract_var() { local tmp="$@" echo "$STATUS" | grep -oP "(?<=$tmp:\s).*" } debug_regex='.*--debug.*' if [[ "$@" =~ $debug_regex ]]; then #echo "$STATUS" echo "State=$(extract_var State)" echo "Power Supply=$(extract_var Power Supply by)" echo "Utility Voltage=$(extract_var Utility Voltage | sed 's/\sV//')" echo "Output Voltage=$(extract_var Output Voltage | sed 's/\sV//')" echo "Battery Capacity (%)=$(extract_var Battery Capacity | sed 's/\s%//')" echo "Remaining Runtime (min.)=$(extract_var Remaining Runtime | sed 's/\smin\.//')" LOAD=$(extract_var Load) echo "Load (W)=$(echo $LOAD | sed 's/\sWatt.*//')" echo "Load (%)=$(echo $LOAD | sed -e 's/.*\sWatt(//' -e 's/\s%)//')" echo "Line Interaction=$(extract_var Line Interaction)" echo "Test Result=$(extract_var Test Result)" echo "Last Power Event=$(extract_var Last Power Event)" fi function output_var() { if [ ! -z $1 ] && [ ! -z $2 ]; then echo "$PREFIX.$1 $2 $DATE" return 0 else echo 'Two arguments required. Usage: output_var name value' return 1 fi } output_var voltage.utility $(extract_var Utility Voltage) output_var voltage.output $(extract_var Output Voltage) output_var battery.percent $(extract_var Battery Capacity) output_var battery.time $(extract_var Remaining Runtime | sed 's/\smin\.//') LOAD=$(extract_var Load) output_var load.watts $(echo $LOAD | sed 's/\sWatt.*//') output_var load.percent $(echo $LOAD | sed -e 's/.*\sWatt(//' -e 's/\s%)//')
Add wrapper script to run the tests locally
#!/bin/bash runtest () { echo "" echo "Testing tag $1" echo " > Cleaning... " make clean TAG=$1 > /dev/null 2>&1 echo " > Building... " make build TAG=$1 > /dev/null 2>&1 echo " > Running... " make run TAG=$1 > /dev/null 2>&1 echo " > Sleeping 15 seconds... " sleep 15 echo -n " > Testing... " make test TAG=$1 #> /dev/null 2>&1 [[ $? == 0 ]] && echo -e "\e[1;32mOK\e[0m" || echo -e "\e[1;31mFAILURE\e[0m" echo " > Stopping... " make stop TAG=$1 > /dev/null 2>&1 } runtest "5.6" runtest "7.0" runtest "7.1" runtest "7.2" runtest "7.3" runtest "7.4-codecasts"
Use PyMongo's test certificates on Evergreen
#!/bin/sh set -o xtrace # Write all commands first to stderr set -o errexit # Exit the script with error if any of the commands fail # Copy PyMongo's test certificates over driver-evergreen-tools' cp ${PROJECT_DIRECTORY}/test/certificates/* ${DRIVERS_TOOLS}/.evergreen/x509gen/ # Replace MongoOrchestration's client certificate. cp ${PROJECT_DIRECTORY}/test/certificates/client.pem ${MONGO_ORCHESTRATION_HOME}/lib/client.pem if [ -w /etc/hosts ]; then SUDO="" else SUDO="sudo" fi # Add 'server' and 'hostname_not_in_cert' as a hostnames echo "127.0.0.1 server" | $SUDO tee -a /etc/hosts echo "127.0.0.1 hostname_not_in_cert" | $SUDO tee -a /etc/hosts
Revert "Inlined, as the release process relies on it to be local."
#!/bin/bash set -e SRC_PATH=$(pwd) PROJECT_NAME="github.com/$TRAVIS_REPO_SLUG" export GO15VENDOREXPERIMENT=1 curl -L https://raw.githubusercontent.com/experimental-platform/misc/master/install-glide.sh | sh cp $HOME/bin/glide . docker run -v "${SRC_PATH}:/go/src/$PROJECT_NAME" -w "/go/src/$PROJECT_NAME" -e GO15VENDOREXPERIMENT=1 golang:1.5 /bin/bash -c "./glide up && go build -v"
Add VDI (VirtualBox virtual disk image) auto-generation script
#!/bin/bash # Gotta have a grub 0.97-like installed on your system # EMBOX_BIN_DIR="/home/user/Projects/Embox/build/base/bin" EMBOX_BIN="embox" VDI_OUTPUT_DIR="/home/user" VDI_OUTPUT_IMG="embox.vdi" TEMP_DIR="/tmp" TEMP_IMG="embox_vdi_temp.img" DD_IMAGE_SIZE=40320 DD_IMAGE_OFFSET=32256 MOUNT_DIR="/mnt/embox" GRUB_DIR="/boot/grub" LOOP_DEVICE="/dev/loop0" if [ "$(id -u)" != "0" ]; then echo "This script must be run as root" 1>&2 exit 1 fi echo -e "\n\nTrying to make up an empty disk image\n\n" dd if=/dev/zero of=$TEMP_DIR/$TEMP_IMG count=$DD_IMAGE_SIZE echo -e "\n\nMake a new pertition on image\n\n" echo -e "x\nh\n16\ns\n63\nc\n40\nr\nn\np\n1\n\n\nw\n\n\n" | fdisk $TEMP_DIR/$TEMP_IMG echo -e "\n\nMake a file system\n\n" losetup -o $DD_IMAGE_OFFSET $LOOP_DEVICE $TEMP_DIR/$TEMP_IMG mke2fs $LOOP_DEVICE losetup -d $LOOP_DEVICE echo -e "\n\nMount image\n\n" mkdir $MOUNT_DIR mount -o loop,offset=$DD_IMAGE_OFFSET $TEMP_DIR/$TEMP_IMG $MOUNT_DIR echo -e "\n\nMoving files to image\n\n" mkdir -p $MOUNT_DIR$GRUB_DIR cp $EMBOX_BIN_DIR/$EMBOX_BIN $MOUNT_DIR cp $GRUB_DIR/{stage1,stage2,e2fs_stage1_5} $MOUNT_DIR$GRUB_DIR umount $TEMP_DIR/$TEMP_IMG echo -e "\n\nInstall grub\n\n" echo -e "device (hd0) $TEMP_DIR/$TEMP_IMG\ngeometry (hd0) 40 16 63\nroot (hd0,0)\nsetup (hd0)\nquit\n\n" | grub --device-map=/dev/null echo -e "\n\nConvert image to VDI\n\n" rm -f $VDI_OUTPUT_DIR/$VDI_OUTPUT_IMG VBoxManage convertfromraw --format VDI $TEMP_DIR/$TEMP_IMG $VDI_OUTPUT_DIR/$VDI_OUTPUT_IMG chmod 777 $VDI_OUTPUT_DIR/$VDI_OUTPUT_IMG rm -f $TEMP_DIR/$TEMP_IMG
Add script to start db, web, & grunt watch.
#!/bin/bash onexit() { kill -2 $(jobs -p) kill $(jobs -p) echo "Waiting for children to exit..." sleep 1.1 kill -9 0 } trap onexit SIGINT SIGTERM EXIT INT QUIT TERM cd "$( dirname "${BASH_SOURCE[0]}" )" ./start-db.sh & ./start-web.sh & (cd js-library; grunt default watch) & wait
Add a MPP build-only CI task
#!/bin/bash set -e echo "Starting $0 at $(date)" cd "$(dirname $0)" # Run Gradle impl/build.sh --no-daemon listTaskOutputs -Pandroidx.validateNoUnrecognizedMessages "$@" impl/build.sh allProperties "$@" >/dev/null impl/build.sh --no-daemon buildOnServer -Pandroidx.validateNoUnrecognizedMessages checkExternalLicenses \ -PverifyUpToDate \ -Pandroidx.coverageEnabled=true \ -Pandroidx.enableAffectedModuleDetection \ -Pandroidx.compose.multiplatformEnabled=true \ -Pandroidx.allWarningsAsErrors --profile "$@" # Parse performance profile reports (generated with the --profile option above) and re-export the metrics in an easily machine-readable format for tracking impl/parse_profile_htmls.sh echo "Completing $0 at $(date)"
Add a script to update example outputs.
#!/usr/bin/env bash echo -n "MVar:" time stack exec examples-mvar > examples/MVar.out echo -ne "\nStack:" time stack exec examples-stack > examples/Stack.out echo -ne "\nComplex Stack:" time stack exec examples-complexstack > examples/ComplexStack.out
Add minimal test dependencies script
#!/bin/bash -xe # fix default umask of 0002 for hadoop data dir errors sudo sh -c 'echo "umask 0022" >> /etc/profile' # install some basic packages we need sudo apt-get -y install ant ant-optional git libev-dev libyaml-dev lsof python-dev python-setuptools python-pip rsync screen wamerican
Add shell script to commit php doc the right way
svn status dev/doc/phpdoc | grep "?" | awk '{print $2}' | xargs svn add svn status dev/doc/phpdoc | grep "!" | awk '{print $2}' | xargs svn del svn ci -m "Update php documentation" dev/doc/phpdoc
Add function to query processes by strings
function ps_find {( ps ax | grep "$(echo "$*" | sed 's/^\(.\)\(.*\)/[\1]\2/')" | sed -e 's/^[\ ]*//' -e 's/ .*$//' )}
Add script for doing a 4-stage bootstrap.
#!/bin/sh # The usual bootstrap self-consistency check is three stages, but because of # having our own libc as well we actually need four stages: # # host - completely different compiler # ncc1 - ncc source compiled with host, linked against system libc # ncc2 - ncc source compiled with ncc + system libc, linked against naive libc # ncc3 - ncc source compiled with ncc + naive libc, linked against naive libc # ncc4 - ncc source compiled with ncc + naive libc, linked against naive libc # # ncc2 and ncc3 might be different, because the libc of the compiler they were # compiled with is different. For example, the stability of qsort can be # different between the two, which can affect register allocation. make clean && make -j16 && rm -rf /tmp/naive1 && cp -r /opt/naive/ /tmp/naive1 && make clean && \ echo '\n========= Compiled stage 1 =========\n' && \ CC=/tmp/naive1/ncc AR=/tmp/naive1/nar NAIVE_DIR=/tmp/naive1/ make -j16 && \ rm -rf /tmp/naive2 && cp -r /opt/naive /tmp/naive2 && make clean && \ echo '\n========= Compiled stage 2 =========\n' && \ CC=/tmp/naive2/ncc AR=/tmp/naive2/nar NAIVE_DIR=/tmp/naive2/ make -j16 && \ rm -rf /tmp/naive3 && cp -r /opt/naive /tmp/naive3 && make clean && \ echo '\n========= Compiled stage 3 =========\n' && \ CC=/tmp/naive3/ncc AR=/tmp/naive3/nar NAIVE_DIR=/tmp/naive3/ make -j16 && \ rm -rf /tmp/naive4 && cp -r /opt/naive /tmp/naive4 && make clean && \ echo '\n========= Compiled stage 4 =========\n' && \ diff /tmp/naive3/ncc /tmp/naive4/ncc && \ diff /tmp/naive3/nar /tmp/naive4/nar && \ diff /tmp/naive3/libc.a /tmp/naive4/libc.a && \ echo 'Bootstrap completed successfully - stage 3 and 4 are consistent'
Add example screenlayout script for HP Zbook Studio x360 G5
#!/bin/sh # Notes: 2048x1152 on the laptop display seems to be quite sharper than # 1920x1080. CHECK=`lsmod | grep nouveau` PROVIDERS=`xrandr --listproviders | head -n 1 | cut -d ":" -f 3` # nouveau driver not loaded, assuming hybrid mode with intel driver # run this in order to detect monitors connect via egpu for i in `seq 1 $PROVIDERS`; do xrandr --setprovideroutputsource $i 0 done # it seems like xrandr --list monitors should show connected monitors that # are not active CONNECTED=`xrandr | grep "\<connected\>" | wc -l` if [ $CONNECTED -eq 1 ]; then #xrandr --output eDP1 --mode 2048x1152 --primary xrandr --output eDP1 --mode 1920x1080 --primary elif [ $CONNECTED -eq 4 ]; then xrandr --setprovideroutputsource 1 0 xrandr --output eDP1 --mode 2048x1152 --pos 1856x1200 \ --output DisplayPort-1-2 --primary --mode 1920x1200 --pos 1920x0 \ --output DisplayPort-1-3 --mode 1920x1080 --pos 0x0 \ --output DisplayPort-1-4 --mode 1920x1080 --pos 3840x0 fi
Automate renaming of package name via new Script. This is necessary because of different package name on google play store. Signing key was lost.
#!/bin/bash ############################################################################## ## ## Starcommander@github.com ## Pauls script to transfer android app! ## This is necessary because of different app-name on playstore. ## (sign key was lost by playstore user) ## ## Moves the package name from com.junjunguo.pocketmaps to com.starcom.pocketmaps ## ############################################################################## PROJ_PATH="/home/ppp/Desktop/Programmieren/map/PocketMaps/PocketMaps/" do_transfer() { cd "$PROJ_PATH" sed -i -e "s#com.junjunguo.pocketmaps#com.starcom.pocketmaps#g" app/build.gradle sed -i -e "s#com.junjunguo.pocketmaps#com.starcom.pocketmaps#g" app/src/main/AndroidManifest.xml mv app/src/main/java/com/junjunguo app/src/main/java/com/starcom find app/src/main/java/ -name "*.java" -print0 | xargs -0 --max-args 1 --replace="{}" sed -i -e "s/com.junjunguo.pocketmaps/com.starcom.pocketmaps/g" "{}" } do_transfer_back() { cd "$PROJ_PATH" sed -i -e "s#com.starcom.pocketmaps#com.junjunguo.pocketmaps#g" app/build.gradle sed -i -e "s#com.starcom.pocketmaps#com.junjunguo.pocketmaps#g" app/src/main/AndroidManifest.xml mv app/src/main/java/com/starcom app/src/main/java/com/junjunguo find app/src/main/java/ -name "*.java" -print0 | xargs -0 --max-args 1 --replace="{}" sed -i -e "s/com.starcom.pocketmaps/com.junjunguo.pocketmaps/g" "{}" } if [ -z "$1" ]; then echo "Use arg t=transfer to transfer for playstore" echo "Use arg b=back to transfer back" elif [ "$1" = "t" ]; then do_transfer echo "Finish! Now clear tmp data, and build again." elif [ "$1" = "b" ]; then do_transfer_back echo "Finish! Now clear tmp data, and build again." else echo "Wrong argument!" fi
Add shell script to build libskia.so shared lib using gn/ninja
#!/bin/bash # Run this script as `source ./build_skia.sh`. # This way the LD_LIBRARY_PATH environment variable is imported in the current shell. # NOTE: This was only tested on macOS. It requires python2 to be on the $PATH and # it must be run *outside* of a python3 venv otherwise gn tool will complain... pushd src/cpp/skia python2 tools/git-sync-deps bin/gn gen out/Shared --args='is_official_build=true is_component_build=true is_debug=false skia_enable_pdf=false skia_enable_ccpr=false skia_enable_gpu=false skia_enable_discrete_gpu=false skia_enable_nvpr=false skia_enable_skottie=false skia_enable_skshaper=false skia_use_dng_sdk=false skia_use_expat=false skia_use_gl=false skia_use_harfbuzz=false skia_use_icu=false skia_use_libgifcodec=false skia_use_libjpeg_turbo=false skia_use_libwebp=false skia_use_piex=false skia_use_sfntly=false skia_use_xps=false skia_use_zlib=false skia_use_libpng=false' ninja -C out/Shared export LD_LIBRARY_PATH=$(pwd)/out/Shared popd
Add script to handle updating translating API automagically.
# use goslate as the internet auto-translating engine in this project # https://bitbucket.org/zhuoqiang/goslate curl -o goslate.py https://bitbucket.org/zhuoqiang/goslate/raw/tip/goslate.py
Set env var to force use of node 16
# backup existing env var export _ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION=$ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION export ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION="node16"
Add install script for ruby 2.1.4
#!/bin/bash -ex apt-get update apt-get install -y wget build-essential autoconf bison build-essential libssl-dev libyaml-dev libreadline6-dev zlib1g-dev libncurses5-dev cd /tmp rm -rf ruby-installer mkdir ruby-installer cd ruby-installer echo "fetch latest ruby from ftp.ruby-lang.org" wget http://ftp.ruby-lang.org/pub/ruby/2.1/ruby-2.1.4.tar.gz tar -xzvf ruby-2.1.4.tar.gz echo "installing ruby 2.1.4" cd ruby-2.1.4/ && ./configure && make && make install echo "installed ruby interpreter, version:" echo `ruby -v` gem install bundler --no-ri --no-rdoc echo "installed bundler" apt-get clean rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
Add script to timestamp BitBake build
#!/bin/sh # ============================================================================================================= # Prepend timestamp to each line on stdin # # Usage: $ cat somefile.txt | prepend-timestamp.sh # # Credits: # http://stackoverflow.com/questions/21564/is-there-a-unix-utility-to-prepend-timestamps-to-lines-of-text # ============================================================================================================= awk '{ print strftime("%Y-%m-%d %H:%M:%S"), $0; fflush(); }' # === EOF ===
Add prototype script for generating standalone haddocks for the gh-pages branch
#!/usr/bin/env bash set -e ################################################################ # This script will: # # - Build documentation for all packages with stack # - Copy local packages to the output directory # - Fixup links so that any non-local-package links point to # Hackage # # You may want to change 'stackCommand' or 'remote' ################################################################ if [ "$#" -ne 1 ]; then echo "Need output directory" fi out=$1 remote=https://hackage.haskell.org/package/ stackCommand="stack --skip-ghc-check --system-ghc --nix haddock --haddock-hyperlink-source" printf "Generating docs with stack\n" indexPath=$($stackCommand 2>&1 | tee >(cat 1>&2) | grep '/doc/index.html$' | grep '/.stack-work/' | head -n1) docPath=$(dirname "$indexPath") printf "Docs generated in $docPath\n" mkdir -p "$out" go(){ printf "Copying docs to destination\n" cp -r "$docPath"/{vulkan*,Vulkan*,doc-index*,index.html,*.css,*.js,*.png} . printf "Replacing all relative links to point to Hackage\n" fd 'html$' --type f --exec \ sed -i 's|<a href="\.\./\([^/]\+\)|<a href="'"$remote"'\1/docs|g' printf "Making absolute local links relative\n" fd 'html$' --type f --exec \ sed -i 's|<a href="'"$docPath"'/|<a href="./|g' printf "Making any links to local packages on Hackage relative\n" fd . --type d --maxdepth 1 | while read d; do fd '.html$' --type f --exec sed -i 's|<a href="'"$remote$d"'/docs|<a href="../'"$d"'|g' done } (cd "$out" && go)
Add script to build and test all variants
#!/bin/sh # This script builds and runs the tests for all the configuration variants. errcho() { echo "$@" 1>&2; } DSTDIR="$1" if [ -z "$DSTDIR" ]; then errcho "DSTDIR is needed as the first positional argument." exit 1 fi if [ -d "$DSTDIR" ]; then read -p "DSTDIR exists. Do you want to clear \"$DSTDIR\"? [y/n]" yn if [ "$yn" != "y" ]; then exit 0 fi fi rm -rf "$DSTDIR" && mkdir -p "$DSTDIR" || exit 1 VARIANTS=$(cat << 'END' rel --buildtype=release -Dcompressed_ptrs=false -Dcompressed_builtins=false rel-cbuiltins --buildtype=release -Dcompressed_ptrs=false -Dcompressed_builtins=true rel-cptrs --buildtype=release -Dcompressed_ptrs=true -Dcompressed_builtins=false rel-cbuiltins-cptrs --buildtype=release -Dcompressed_ptrs=true -Dcompressed_builtins=true dbg --buildtype=debug -Db_sanitize=address -Dcompressed_ptrs=false -Dcompressed_builtins=false dbg-cbuiltins --buildtype=debug -Db_sanitize=address -Dcompressed_ptrs=false -Dcompressed_builtins=true dbg-cptrs --buildtype=debug -Db_sanitize=address -Dcompressed_ptrs=true -Dcompressed_builtins=false dbg-cbuiltins-cptrs --buildtype=debug -Db_sanitize=address -Dcompressed_ptrs=true -Dcompressed_builtins=true END ) meson setup "$DSTDIR" || exit 1 echo "$VARIANTS" | while read VARIANT; do NAME=$(echo "$VARIANT" | cut -d ' ' -f 1) FLAGS=$(echo "$VARIANT" | cut -d ' ' -f 2- | xargs) meson configure $FLAGS "$DSTDIR" if [ "$?" != "0" ]; then errcho "error when configuring variant: $NAME, with flags: $FLAGS" exit 1 fi ninja -C "$DSTDIR" if [ "$?" != "0" ]; then errcho "error when building variant: $NAME, with flags: $FLAGS" exit 1 fi ninja -C "$DSTDIR" test if [ "$?" != "0" ]; then errcho "error when testing variant: $NAME, with flags: $FLAGS" exit 1 fi done
Add script for local dev setup
#!/usr/bin/env bash # Unicode symbol emojis TASK="➡" [[ ! $(which createdb) ]] && { echo "No postgres installation found. Ensure that postgres is installed and the bin directory is in the system PATH."; exit 1; } [[ ! $(which lein) ]] && { echo "No leiningen installation found. Ensure that leiningen is installed and in the system PATH."; exit 1; } echo "$TASK Creating databases time_tracker and time_tracker_test." createdb time_tracker createdb time_tracker_test echo "$TASK Creating profiles.clj from template" cp profiles.clj.sample profiles.clj echo "$TASK Installing Dependencies" lein deps echo "$TASK Running Migrations" lein migrate echo -e "All done. Start the server with \n\n lein run\n\n and visit http://localhost:8000"
Add top-level server start script
#!/bin/bash SCRIPT="$0" SCRIPT_DIR=`dirname $SCRIPT` # TODO: (optional?) maven build task # Start server exec "$SCRIPT_DIR"/virgo-kernel/bin/startup.sh
Add method to build app server directory
include app.server.validator.AppServerValidator include app.server.version.AppServerVersion include base.vars.BaseVars AppServerFactory(){ getAppServerDir(){ local appServer=$(AppServerValidator returnAppServer ${2}) local _appServerDir=( $(BaseVars returnBundleDir ${1})/ ${appServer}- $(AppServerVersion returnAppServerVersion ${appServer} $(BaseVars returnBranch ${1})) ) StringUtil build _appServerDir } $@ }
Add alias to quickly track remote branches.
function git_track_branch() { for git_track_branch_BRANCH in $1 do git fetch origin git checkout --track -b $git_track_branch_BRANCH origin/$git_track_branch_BRANCH git pull done }
Add wrapper script to pipenv run
#!/bin/bash RPATH=$(dirname $(realpath $0)) export PIPENV_VENV_IN_PROJECT=true export PIPENV_PIPFILE="${RPATH}/Pipfile" if [[ ! -d "${RPATH}/.venv" ]]; then pipenv install fi cd ${RPATH} && pipenv run vctools "$@"
Add install files for ubuntu
sudo apt-get update sudo apt-get install docker-engine sudo docker run hello-world #THIS IS SO UGLY!!! curl -L https://github.com/docker/compose/releases/download/VERSION_NUM/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose chmod +x /usr/local/bin/docker-compose docker-compose --version
Add a helper to move things upstream
#!/bin/bash # See HACKING.md for usage set -o errexit set -o nounset set -o pipefail OS_ROOT=$(dirname "${BASH_SOURCE}")/.. source "${OS_ROOT}/hack/common.sh" source "${OS_ROOT}/hack/util.sh" os::log::install_errexit # Go to the top of the tree. cd "${OS_ROOT}" patch="${TMPDIR}/patch" kubedir="../../../k8s.io/kubernetes" if [[ ! -d "${kubedir}" ]]; then echo "Expected ${kubedir} to exist" 1>&2 exit 1 fi if [[ -z "${NO_REBASE-}" ]]; then lastkube="$(go run ${OS_ROOT}/hack/version.go ${OS_ROOT}/Godeps/Godeps.json k8s.io/kubernetes/pkg/api)" fi branch="$(git rev-parse --abbrev-ref HEAD)" selector="origin/master...${branch}" if [[ -n "${1-}" ]]; then selector="$1" fi echo "++ Generating patch for ${selector} onto ${lastkube} ..." 2>&1 git diff -p --raw --relative=Godeps/_workspace/src/k8s.io/kubernetes/ "${selector}" -- Godeps/_workspace/src/k8s.io/kubernetes/ > "${patch}" pushd "${kubedir}" > /dev/null os::build::require_clean_tree # create a new branch git checkout -b "${branch}" "${lastkube}" # apply the changes if ! git apply --reject "${patch}"; then echo 2>&1 echo "++ Patch does not apply cleanly, possible overlapping UPSTREAM patches?" 2>&1 exit 1 fi # generate a new commit, fetch the latest, and attempt a rebase to master git add . git commit -m "UPSTREAMED" git fetch git rebase origin/master -i echo 2>&1 echo "++ Done" 2>&1
Add tests for git check-ref-format
#!/bin/sh test_description='Test git check-ref-format' . ./test-lib.sh valid_ref() { test_expect_success "ref name '$1' is valid" \ "git check-ref-format '$1'" } invalid_ref() { test_expect_success "ref name '$1' is not valid" \ "test_must_fail git check-ref-format '$1'" } valid_ref 'heads/foo' invalid_ref 'foo' valid_ref 'foo/bar/baz' valid_ref 'refs///heads/foo' invalid_ref 'heads/foo/' invalid_ref './foo' invalid_ref '.refs/foo' invalid_ref 'heads/foo..bar' invalid_ref 'heads/foo?bar' valid_ref 'foo./bar' invalid_ref 'heads/foo.lock' valid_ref 'heads/foo@bar' invalid_ref 'heads/v@{ation' invalid_ref 'heads/foo\bar' test_expect_success "check-ref-format --branch @{-1}" ' T=$(git write-tree) && sha1=$(echo A | git commit-tree $T) && git update-ref refs/heads/master $sha1 && git update-ref refs/remotes/origin/master $sha1 git checkout master && git checkout origin/master && git checkout master && refname=$(git check-ref-format --branch @{-1}) && test "$refname" = "$sha1" && refname2=$(git check-ref-format --branch @{-2}) && test "$refname2" = master' test_done
Add script for updating bt_compidtostr() implementation
#!/bin/bash # Download the list of company IDs from bluetooth.org and generate a diff which # can be applied to source tree to update bt_compidtostr(). Usage: # # 1) ./tools/update_compids.sh | git apply -p0 # 2) Inspect changes to make sure they are sane # 3) git commit -m "lib: Update list of company identifiers" lib/bluetooth.c # # Requires html2text: http://www.mbayer.de/html2text/ # set -e -u tmpdir=$(mktemp -d) trap "rm -rf $tmpdir" EXIT mkdir $tmpdir/lib cp lib/bluetooth.c $tmpdir/lib/bluetooth.c.orig cp lib/bluetooth.c $tmpdir/lib/bluetooth.c cd $tmpdir path=en-us/specification/assigned-numbers-overview/company-identifiers # Use "iconv -c" to strip unwanted unicode characters # Also strip <input> tags of type checkbox because html2text generates UTF-8 # for them in some distros even when using -ascii (e.g. Fedora 18) curl https://www.bluetooth.org/$path | iconv -c -f utf8 -t ascii | \ sed '/<input.*type="checkbox"/d' | \ html2text -ascii -o identifiers.txt >/dev/null # Some versions of html2text do not replace &amp; (e.g. Fedora 18) sed -i 's/&amp;/\&/g' identifiers.txt sed -n '/^const char \*bt_compidtostr(int compid)/,/^}/p' \ lib/bluetooth.c > old.c echo -e 'const char *bt_compidtostr(int compid)\n{\n\tswitch (compid) {' > new.c cat identifiers.txt | perl -ne 'm/^(\d+)\s+0x[0-9a-f]+\s+(.*)/i && print "\tcase $1:\n\t\treturn \"$2\";\n"' >> new.c if ! grep -q "return \"" new.c; then echo "ERROR: could not parse company IDs from bluetooth.org" >&2 exit 1 fi echo -e '\tcase 65535:\n\t\treturn "internal use";' >> new.c echo -e '\tdefault:\n\t\treturn "not assigned";\n\t}\n}' >> new.c diff -Naur old.c new.c | patch -sp0 lib/bluetooth.c diff -Naur lib/bluetooth.c.orig lib/bluetooth.c
Add simple Stack commands list
function stack_sandbox_info() { stack_files=(*.stack(N)) if [ $#stack_files -gt 0 ]; then if [ -f stack.sandbox.config ]; then echo "%{$fg[green]%}sandboxed%{$reset_color%}" else echo "%{$fg[red]%}not sandboxed%{$reset_color%}" fi fi } function _stack_commands() { local ret=1 state _arguments ':subcommand:->subcommand' && ret=0 case $state in subcommand) subcommands=( "build:Build the project(s) in this directory/configuration" "install:Build executables and install to a user path" "test:Build and test the project(s) in this directory/configuration" "bench:Build and benchmark the project(s) in this directory/configuration" "haddock:Generate haddocks for the project(s) in this directory/configuration" "new:Create a brand new project" "init:Initialize a stack project based on one or more stack packages" "solver:Use a dependency solver to try and determine missing extra-deps" "setup:Get the appropriate ghc for your project" "path:Print out handy path information" "unpack:Unpack one or more packages locally" "update:Update the package index" "upgrade:Upgrade to the latest stack (experimental)" "upload:Upload a package to Hackage" "dot:Visualize your project's dependency graph using Graphviz dot" "exec:Execute a command" "ghc:Run ghc" "ghci:Run ghci in the context of project(s)" "ide:Run ide-backend-client with the correct arguments" "runghc:Run runghc" "clean:Clean the local packages" "docker:Subcommands specific to Docker use" ) _describe -t subcommands 'stack subcommands' subcommands && ret=0 esac return ret } compdef _stack_commands stack
Add support for completions on zsh
if [[ ! -o interactive ]]; then return fi compctl -K _swiftenv swiftenv _swiftenv() { local words completions read -cA words if [ "${#words}" -eq 2 ]; then completions="$(swiftenv commands)" else completions="$(swiftenv completions ${words[2,-2]})" fi reply=(${(ps:\n:)completions}) }
Add a test that 'rehash' isn't run.
# ------------------------------------------------------------------------------------------------- # Copyright (c) 2020 zsh-syntax-highlighting contributors # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, are permitted # provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this list of conditions # and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, this list of # conditions and the following disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of the zsh-syntax-highlighting contributors nor the names of its contributors # may be used to endorse or promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND # FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER # IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ------------------------------------------------------------------------------------------------- # -*- mode: zsh; sh-indentation: 2; indent-tabs-mode: nil; sh-basic-offset: 2; -*- # vim: ft=zsh sw=2 ts=2 et # ------------------------------------------------------------------------------------------------- hash zsyh-hashed-command=/usr/bin/env BUFFER='doesnotexist; zsyh-hashed-command' # Test that highlighting "doesnotexist" does not invoke the "rehash" builtin, # which would delete hashed commands (such as "zsyh-hashed-command"). expected_region_highlight=( "1 12 unknown-token" # doesnotexist "13 13 commandseparator" # ; "15 33 hashed-command" # zsyh-hashed-command )
Clean up a test warning
source common.sh clearStore max=500 reference=$NIX_STORE_DIR/abcdef touch $reference (echo $reference && echo && echo 0) | nix-store --register-validity echo "making registration..." set +x for ((n = 0; n < $max; n++)); do storePath=$NIX_STORE_DIR/$n echo -n > $storePath ref2=$NIX_STORE_DIR/$((n+1)) if test $((n+1)) = $max; then ref2=$reference fi echo $storePath; echo; echo 2; echo $reference; echo $ref2 done > $TEST_ROOT/reg_info set -x echo "registering..." nix-store --register-validity < $TEST_ROOT/reg_info echo "collecting garbage..." ln -sfn $reference "$NIX_STATE_DIR"/gcroots/ref nix-store --gc if test "$(sqlite3 ./test-tmp/db/db.sqlite 'select count(*) from Refs')" -ne 0; then echo "referrers not cleaned up" exit 1 fi
source common.sh clearStore max=500 reference=$NIX_STORE_DIR/abcdef touch $reference (echo $reference && echo && echo 0) | nix-store --register-validity echo "making registration..." set +x for ((n = 0; n < $max; n++)); do storePath=$NIX_STORE_DIR/$n echo -n > $storePath ref2=$NIX_STORE_DIR/$((n+1)) if test $((n+1)) = $max; then ref2=$reference fi echo $storePath; echo; echo 2; echo $reference; echo $ref2 done > $TEST_ROOT/reg_info set -x echo "registering..." nix-store --register-validity < $TEST_ROOT/reg_info echo "collecting garbage..." ln -sfn $reference "$NIX_STATE_DIR"/gcroots/ref nix-store --gc if [ -n "$(type -p sqlite3)" -a "$(sqlite3 ./test-tmp/db/db.sqlite 'select count(*) from Refs')" -ne 0 ]; then echo "referrers not cleaned up" exit 1 fi
Add script to check 'go fmt' status
#!/bin/bash fmtcount=`git ls-files | grep '.go$' | xargs gofmt -l 2>&1 | wc -l` if [ $fmtcount -gt 0 ]; then echo "run 'go fmt ./...' to format your source code." exit 1 fi
Add a script to generate the list of fixed bugs
#!/bin/bash # This script is used to generate the list of fixed bugs that # appears in the release notes files, with HTML formatting. # # Note: This script could take a while until all details have # been fetched from bugzilla. # # Usage examples: # # $ bin/bugzilla_mesa.sh mesa-9.0.2..mesa-9.0.3 # $ bin/bugzilla_mesa.sh mesa-9.0.2..mesa-9.0.3 > bugfixes # $ bin/bugzilla_mesa.sh mesa-9.0.2..mesa-9.0.3 | tee bugfixes # $ DRYRUN=yes bin/bugzilla_mesa.sh mesa-9.0.2..mesa-9.0.3 # $ DRYRUN=yes bin/bugzilla_mesa.sh mesa-9.0.2..mesa-9.0.3 | wc -l # regex pattern: trim before url trim_before='s/.*\(http\)/\1/' # regex pattern: trim after url trim_after='s/\(show_bug.cgi?id=[0-9]*\).*/\1/' # regex pattern: always use https use_https='s/http:/https:/' # extract fdo urls from commit log urls=$(git log $* | grep 'bugs.freedesktop.org/show_bug' | sed -e $trim_before -e $trim_after -e $use_https | sort | uniq) # if DRYRUN is set to "yes", simply print the URLs and don't fetch the # details from fdo bugzilla. #DRYRUN=yes if [ "x$DRYRUN" = xyes ]; then for i in $urls do echo $i done else echo "<ul>" echo "" for i in $urls do id=$(echo $i | cut -d'=' -f2) summary=$(wget --quiet -O - $i | grep -e '<title>.*</title>' | sed -e 's/ *<title>Bug [0-9]\+ &ndash; \(.*\)<\/title>/\1/') echo "<li><a href=\"$i\">Bug $id</a> - $summary</li>" echo "" done echo "</ul>" fi
Add script to fix PEP8 errors
#!/usr/bin/env bash # Fail on any error set -e # Setup the environment echo -n "Setup python virtual environment.." source scripts/bootstrap.sh >/dev/null echo "Done" # Fix PEP8 echo -n "Fix PEP8.." python3 -m autopep8 --in-place --aggressive --verbose -r library tests echo "Done" # Check PEP8 echo -n "Check PEP8 conformance.." python3 -m pep8 --show-source --show-pep8 --count library tests echo "Done"
Add an Outline test tool for linux
#!/bin/bash # This script allows debugging outline-go-tun2socks directly on linux. # Instructions: # 1. Install the Outline client for Linux, connect to a server, and disconnect. # This installs the outline controller service. # 2. $ git update-index --assume-unchanged connect_linux.sh # This helps to avoid accidentally checking in your proxy credentials. # 3. Edit this script to add the IP, port, and password for your test proxy. # 4. $ ./connect_linux.sh # 5. Ctrl+C to stop proxying readonly PROXY_IP="..." readonly PROXY_PORT="..." readonly PROXY_PASSWORD="..." go build -v . echo "{\"action\":\"configureRouting\",\"parameters\":{\"proxyIp\":\"${PROXY_IP}\",\"routerIp\":\"10.0.85.1\"}}" | socat UNIX-CONNECT:/var/run/outline_controller - ./electron -proxyHost "${PROXY_IP}" -proxyPort "${PROXY_PORT}" -proxyPassword "${PROXY_PASSWORD}" -logLevel debug -tunName outline-tun0 echo '{"action":"resetRouting","parameters":{}}' | socat UNIX-CONNECT:/var/run/outline_controller -
Monitor current IO, JVM heap usage
### IO monitoring # iostat watch -n 1 iostat -y 1 1 ### JVM monitoring # https://stackoverflow.com/questions/14464770/how-to-check-heap-usage-of-a-running-jvm-from-the-command-line/41748357#41748357 jstat -gc $(pgrep java) | awk '{print $3+$4+$6+$8}'
Add script for updating version
#!/bin/bash set -e cd "$(dirname "$(readlink -f "$BASH_SOURCE")")" versions=( "$@" ) if [ ${#versions[@]} -eq 0 ]; then versions=( */ ) fi versions=( "${versions[@]%/}" ) for version in "${versions[@]}"; do dist="$(grep '^FROM debian:' "$version/Dockerfile" | cut -d: -f2)" fullVersion="$(set -x; docker run --rm debian:"$dist" bash -c "apt-get update &> /dev/null && apt-cache show openjdk-$version-jdk | grep '^Version: ' | head -1 | cut -d' ' -f2")" fullVersion="${fullVersion%%[-~]*}" ( set -x sed -ri 's/(ENV JAVA_VERSION) .*/\1 '"$fullVersion"'/g' "$version/Dockerfile" ) done
Add script to run xmltweet on all the concatenated files.
#!/bin/bash # This script will run xmltweet seperately on all files in $INDIR, outputting the tokenized representations and dictionaries to $OUTDIR. # Tokenizer must have the new behavior readonly TOKENIZER=/home/anasrferreira/xmltweet2.exe #Location of tokenizer readonly INDIR=/home/schillaci/SampleData/concatenated readonly OUTDIR=/home/schillaci/SampleData/tokenized for dat in $(ls ${INDIR}/*.xml); # Loop over all .xml files do filename=${dat##*/} echo $(time ${TOKENIZER} -i ${dat} -o ${OUTDIR}/ -d ${OUTDIR}/${filename//.xml/}_dict ) # Runs xmltweet on all the .xml files and echoes the time taken done
Add script to copy apidocs and xref to netty-website
#!/bin/bash set -e if [ "$#" -ne 1 ]; then echo "Expected netty-website directory" exit 1 fi if [ ! -d "$1" ]; then echo "$1 is not a directory" exit 1 fi BRANCH=$(git branch --show-current) WEBSITE_API_DIR="$1"/"$BRANCH"/api/ WEBSITE_XREF_DIR="$1"/"$BRANCH"/xref/ API_DIR=all/target/api/ XREF_DIR=all/target/xref/ if [ ! -d "$API_DIR" ]; then echo "$API_DIR not exists, didn't run the release process yet?" exit 1 fi if [ ! -d "$XREF_DIR" ]; then echo "$XREF_DIR not exists, didn't run the release process yet?" exit 1 fi echo "Delete old javadocs and xref files" rm -rf "$WEBSITE_API_DIR"/* rm -rf "$WEBSITE_XREF_DIR"/* echo "Copy javadocs and xref files" cp -r "$API_DIR"/* "$WEBSITE_API_DIR" cp -r "$XREF_DIR"/* "$WEBSITE_XREF_DIR"
Update env file with new emotes
#!/bin/bash emotes=$(curl https://raw.githubusercontent.com/memelabs/chat-gui/master/assets/emotes.json | jq '.default | .[]') list="" for e in $emotes; do list+=$(echo $e | sed -e 's/^"//' -e 's/"$//'), done sed -ie "s/^EMOTES=.*/EMOTES=$(echo $list | rev | cut -c 2- | rev)/" .env.example
Add install to dev sh script
#!/bin/bash # OpenPGP Zimbra Secure is the open source digital signature and encrypt for Zimbra Collaboration Open Source Edition software # Copyright (C) 2016-present Nguyen Van Nguyen <nguyennv1981@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ****# END LICENSE BLOCK ***** # # OpenPGP MIME Secure Email Zimlet # # Written by Nguyen Van Nguyen <nguyennv1981@gmail.com> echo "Install OpenPGP Zimbra Secure to _dev zimlet folder" rm -Rf /opt/zimbra/zimlets-deployed/_dev/openpgp_zimbra_secure mkdir /opt/zimbra/zimlets-deployed/_dev cp -rv openpgp_zimbra_secure /opt/zimbra/zimlets-deployed/_dev echo "Flush all zimbra cached entries" su zimbra -c "/opt/zimbra/bin/zmprov fc all" exit 0
Add a fedora setup script
#!/bin/sh # Extra Repos sudo dnf copr enable oleastre/kitty-terminal sudo dnf copr enable daftaupe/gopass sudo dnf copr enable thindil/universal-ctags sudo dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo # Install packages sudo dnf install \ docker-ce \ fortune-mod \ git \ gopass \ httpie \ kitty \ neovim \ nodejs \ php-cli \ php-json \ php-mbstring \ php-xml \ pidgin \ pidgin-libnotify \ pidgin-otr \ postgresql \ python2-neovim \ python3-neovim \ the_silver_searcher \ thunderbird \ tmux \ universal-ctags \ urlview \ xclip \ xsel \ zsh sudo gem install lolcat tmuxinator # zsh Setup sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)" # Docker setup sudo groupadd docker sudo usermod -aG docker $USER sudo systemctl enable docker # Install Docker Compose sudo curl -L $(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep "docker-compose-$(uname -s)-$(uname -m)\"" | grep browser_download_url | cut -d\" -f4) -o /usr/local/bin/docker-compose sudo chmod +x /usr/local/bin/docker-compose # GNUPG # TODO: How to do this securely? # Pass Setup # TODO: Clone from personal repo sudo ln -s /usr/bin/gopass /usr/local/bin/pass curl -sSL $(curl -s https://api.github.com/repos/passff/passff-host/releases/latest | grep "install_host_app.sh" | grep browser_download_url | cut -d\" -f4) | bash -s -- firefox # Dotfiles rm ~/.bashrc rm ~/.bash_profile rm ~/.zshrc git clone git@github.com:jessarcher/dotfiles.git ~/.dotfiles ~/.dotfiles/install # SSH Keys # Composer EXPECTED_SIGNATURE="$(wget -q -O - https://composer.github.io/installer.sig)" php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');" ACTUAL_SIGNATURE="$(php -r "echo hash_file('SHA384', 'composer-setup.php');")" if [ "$EXPECTED_SIGNATURE" != "$ACTUAL_SIGNATURE" ] then >&2 echo 'ERROR: Invalid installer signature' rm composer-setup.php exit 1 fi sudo php composer-setup.php --install-dir=/usr/local/bin --filename=composer rm composer-setup.php sudo chmod +x /usr/local/bin/composer
Add underscore as dependency for Spark
#!/bin/bash ## Creates a self-contained spark.js and writes it stdout. set -e PACKAGES_DIR=`dirname $0`/../packages echo 'Meteor = {};' cat $PACKAGES_DIR/uuid/uuid.js cat $PACKAGES_DIR/deps/deps.js cat $PACKAGES_DIR/deps/deps-utils.js cat $PACKAGES_DIR/liverange/liverange.js cat $PACKAGES_DIR/universal-events/listener.js cat $PACKAGES_DIR/universal-events/events-ie.js cat $PACKAGES_DIR/universal-events/events-w3c.js cat $PACKAGES_DIR/domutils/domutils.js cat $PACKAGES_DIR/spark/spark.js cat $PACKAGES_DIR/spark/patch.js
#!/bin/bash ## Creates a self-contained spark.js and writes it stdout. set -e PACKAGES_DIR=`dirname $0`/../packages echo 'Meteor = {};' cat $PACKAGES_DIR/underscore/underscore.js cat $PACKAGES_DIR/uuid/uuid.js cat $PACKAGES_DIR/deps/deps.js cat $PACKAGES_DIR/deps/deps-utils.js cat $PACKAGES_DIR/liverange/liverange.js cat $PACKAGES_DIR/universal-events/listener.js cat $PACKAGES_DIR/universal-events/events-ie.js cat $PACKAGES_DIR/universal-events/events-w3c.js cat $PACKAGES_DIR/domutils/domutils.js cat $PACKAGES_DIR/spark/spark.js cat $PACKAGES_DIR/spark/patch.js