blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
f9894a2bf425be1ac5ddbe7eb81b5d1cfc19b272 | Shell | zycbobby/dockerfile | /postgres-fts/run-docker-postgres.sh | UTF-8 | 800 | 2.921875 | 3 | [] | no_license | docker run -p 45432:5432 -e POSTGRES_PASSWORD=123456 -d postgres:ch
BASEDIR=$(dirname $0)
echo $BASEDIR
cd $BASEDIR/..
while :
do
echo "try to connect to database"
PGPASSWORD=123456 psql -p 35432 -h localhost -U postgres -c "create database im;"
createResult=$?
echo ${createResult}
if [ ${createResult} -eq 0 ]; then
break
else
echo "wait for 1s"
sleep 1
fi
done
PGPASSWORD=123456 psql -p 45432 -h localhost -U postgres -c "CREATE EXTENSION zhparser;"
PGPASSWORD=123456 psql -p 45432 -h localhost -U postgres -c "CREATE TEXT SEARCH CONFIGURATION testzhcfg (PARSER = zhparser);"
PGPASSWORD=123456 psql -p 45432 -h localhost -U postgres -c "ALTER TEXT SEARCH CONFIGURATION testzhcfg ADD MAPPING FOR n,v,a,i,e,l WITH simple;"
| true |
3c82c88bffa85cf274df92d14f786766ac39cb28 | Shell | shen390s/workenv | /scripts/rwin | UTF-8 | 249 | 2.921875 | 3 | [] | no_license | #!/bin/sh
_rdesktop=`which rdesktop`
if [ "X$_rdesktop" = "X" ]; then
echo Please install rdesktop package
return 1
fi
_host=`hostip $1`
_geom="-g 1024x768"
_color="-a 24"
_user="-u Administrator"
exec $_rdesktop $_user $_color $_geom $_host | true |
8e315ea3da94aab7f5aed46d7553e04005216958 | Shell | andresguisado/clearscore | /setup.sh | UTF-8 | 295 | 2.84375 | 3 | [] | no_license | #!/bin/bash
#@andres.guisado@aguisado.com
#Getting Path where we are
PWD=`pwd`
#Creating clearscore container, nginx running, listening on 8080 port of your machine and sharing a volume where is located the app's source code
docker run --name clearscore -v $PWD/src:/usr/share/nginx/html:ro -d -p 8080:80 nginx
| true |
32082b19e3b9bdbeece59687805a62c736618c99 | Shell | pachun/old-dotfiles | /zshrc | UTF-8 | 4,126 | 3.015625 | 3 | [] | no_license | alias ssh="TERM=xterm-256color ssh"
source "/usr/local/share/zsh/site-functions/git-completion.bash"
autoload -U compinit && compinit
# use neovim
export VISUAL=nvim
export EDITOR=$VISUAL
alias vim='nvim'
# stuff since I'm lazy
alias ..="cd .."
alias c="clear"
alias e="exit"
alias t="tmux"
alias d="detox test"
alias b="bundle"
alias be="bundle exec"
alias migrate="rails db:migrate db:test:prepare"
alias s="bundle exec bin/rspec"
alias g="hub"
alias gs="g status"
alias pr="g pull-request"
alias gco="g checkout"
alias gb="g branch"
alias gr="g rebase"
alias gri="g rebase -i"
alias gl="g log"
alias gm="g merge"
alias x="gitx"
alias gaph="git log --graph --all -30"
alias wip="git add .; git commit -am 'wip'"
alias testcoverage="DISABLE_SPRING=true COVERAGE=true bundle exec rspec"
alias kill3000="kill -9 $(lsof -ti tcp:3000)"
alias fuck='sudo $(history -p \!\!)'
alias rmlocal="git branch | grep -v \"master\" | xargs git branch -D"
# history
setopt append_history # Append, not replace
setopt inc_append_history # Immediately append history
setopt always_to_end # Always go to end of line on complete
setopt correct # Correct typos
setopt hist_ignore_dups # Do not show dupes in history
setopt hist_ignore_space # Ignore commands starting with space
setopt prompt_subst # Necessary for pretty prompts
HISTSIZE=10000
SAVEHIST=10000
HISTFILE=~/.history
# use vi mode instead of emacs on the command line
bindkey -v
# remap escape to jj
bindkey -M viins 'jj' vi-cmd-mode
# Josh's zsh prompt config:
#
# makes color constants available
autoload -U colors
colors
# enable colored output from ls, etc. on FreeBSD-based systems
export CLICOLOR=1
setopt promptsubst
_git_prompt_info() {
ref=$(git symbolic-ref HEAD 2> /dev/null)
if [ -n $ref ]; then
branch_name="${ref#refs/heads/}"
branch_name_max_length=$(($COLUMNS/5))
if [ ${#branch_name} -gt $branch_name_max_length ]; then
echo "$branch_name[0,$(($branch_name_max_length-3))]..."
else
echo $branch_name
fi
fi
}
_git_status() {
git_status=$(cat "/tmp/git-status-$$")
if [ -n "$(echo $git_status | grep "Changes not staged")" ]; then
echo "changed"
elif [ -n "$(echo $git_status | grep "Changes to be committed")" ]; then
echo "pending"
elif [ -n "$(echo $git_status | grep "Untracked files")" ]; then
echo "untracked"
else
echo "unchanged"
fi
}
_git_prompt_color() {
if [ -n "$1" ]; then
current_git_status=$(_git_status)
if [ "changed" = "$current_git_status" ]; then
echo "$(_red $1)"
elif [ "pending" = "$current_git_status" ]; then
echo "$(_yellow $1)"
elif [ "unchanged" = "$current_git_status" ]; then
echo "$(_green $1)"
elif [ "untracked" = "$current_git_status" ]; then
echo "$(_cyan $1)"
fi
else
echo "$1"
fi
}
_color() {
if [ -n "$1" ]; then
echo "%{$fg_bold[$2]%}$1%{$reset_color%}"
fi
}
_separate() { if [ -n "$1" ]; then echo " $1"; fi }
_grey() { echo "$(_color "$1" grey)" }
_yellow() { echo "$(_color "$1" yellow)" }
_green() { echo "$(_color "$1" green)" }
_red() { echo "$(_color "$1" red)" }
_cyan() { echo "$(_color "$1" cyan)" }
_blue() { echo "$(_color "$1" blue)" }
_full_path() { echo "$(_blue "%~")" }
_working_directory() { echo "$(_blue "%c")" }
_colored_git_branch() { echo "$(_git_prompt_color "$(_git_prompt_info)")" }
_display_current_vim_mode() {
if [[ $VIMODE == 'vicmd' ]]; then
echo "$(_red "✘")"
else
echo "$(_green "✔")"
fi
}
function zle-line-init zle-keymap-select {
VIMODE=$KEYMAP
zle reset-prompt
}
zle -N zle-line-init
zle -N zle-keymap-select
function precmd {
$(git status 2> /dev/null >! "/tmp/git-status-$$")
}
PROMPT='$(_working_directory)$(_separate $(_colored_git_branch)) $(_display_current_vim_mode) '
# needs homebrew installed asdf
source /usr/local/opt/asdf/asdf.sh
# Local config
[[ -f ~/.zshrc.local ]] && source ~/.zshrc.local
| true |
ae3f129b76f6c4473ba18ba33614115831805cdd | Shell | sgouros/linguana | /db/export.sh | UTF-8 | 630 | 2.578125 | 3 | [] | no_license | # προσοχή αυτό πρέπει να τρέχει μόνο στον κεντρικο server. Στους άλλους Η/Υ δεν έχει νόημα
DATE="$(date +"%Y.%m.%d_%H.%M")"
VOC_DB_NAME="linguana_vocabulary"
VOC_OUTPUT_FILE="$VOC_DB_NAME"_"$DATE"
echo -n "Exporting $VOC_DB_NAME\n"
sudo cp /var/lib/couchdb/$VOC_DB_NAME.couch /home/george/code/linguana/db/$VOC_OUTPUT_FILE.couch
STATS_DB_NAME="linguana_stats"
STATS_OUTPUT_FILE="$STATS_DB_NAME"_"$DATE"
echo -n "Exporting $STATS_DB_NAME\n"
sudo cp /var/lib/couchdb/$STATS_DB_NAME.couch /home/george/code/linguana/db/$STATS_OUTPUT_FILE.couch
echo -n "done\n" | true |
7fd6d7d11f68820453a8d12c3c13014e104c233d | Shell | DanHam/packer-testing | /vmware/iso/debian/scripts/23-aws-create-and-enable-defer-cloud-init-unit.sh | UTF-8 | 2,488 | 4.25 | 4 | [] | no_license | #!/usr/bin/env bash
#
# The AWS Image Import service boots the imported VM as part of the import
# process. This means that first time the cloud-init modules run is during
# the import process. Clearly this is not what was intended.
# To avoid this we need to defer the start up of the cloud init services
# so the first run of cloud-init occurs when the instance created from the
# AMI is first booted.
# This is implemented through a systemd service that runs at next boot
# when the VM is booted as part of the import process. The service enables
# the cloud-init services so that their first run is the boot after next
# e.g. when any instance created from the imported AMI is first run as
# intended.
#
# This script dynamically creates the unit file and associated directory
# used to hold scripts using variables set in the Packer configuration
# template. These are exported by Packer as environment variables.
# Once created the unit is enabled and set to run at next boot
# Set verbose/quiet output based on env var configured in Packer template
[[ "$DEBUG" = true ]] && REDIRECT="/dev/stdout" || REDIRECT="/dev/null"
# Packer logging
echo "Creating systemd unit to set up deferred running of cloud-init..."
# Set up from variables exported from Packer configuration template
UNIT="${DEFER_CLOUD_INIT_SVC_UNIT}"
UNIT_FILE="/etc/systemd/system/${UNIT}"
UNIT_SCRIPTSD="${DEFER_CLOUD_INIT_SVC_DIR}"
ENABLE_SCRIPT="$UNIT_SCRIPTSD/${DEFER_CLOUD_INIT_SVC_ENABLE}"
CLEAN_SCRIPT="$UNIT_SCRIPTSD/${DEFER_CLOUD_INIT_SVC_CLEAN}"
# Create a directory to hold the service scripts
echo "Creating dir to hold units scripts: ${UNIT_SCRIPTSD}" >${REDIRECT}
mkdir ${UNIT_SCRIPTSD}
# Write the unit file
echo "Creating unit file for ${UNIT}..." > ${REDIRECT}
echo "Unit file location: ${UNIT_FILE}" > ${REDIRECT}
echo "Enable cloud-init script: ${ENABLE_SCRIPT}" > ${REDIRECT}
echo "Clean up script: ${CLEAN_SCRIPT}" > ${REDIRECT}
printf "%s" "\
[Unit]
Description=Defer initial run of cloud-init for AWS AMI imports
After=basic.target
[Service]
Type=oneshot
RemainAfterExit=yes
ExecStart=${ENABLE_SCRIPT}
ExecStop=${CLEAN_SCRIPT}
[Install]
WantedBy=multi-user.target
" >${UNIT_FILE}
# Unit files should have the following permissions
chmod 0664 ${UNIT_FILE}
# Reload systemd to pick up the newly created unit
systemctl daemon-reload
# Enable the unit used to defer cloud-init. The unit will run during the
# AWS image import process
systemctl enable ${UNIT} >/dev/null 2>&1
exit 0
| true |
122789d85529a4244d3353f7e8812b42b3bc9fa6 | Shell | karso/tech_prep | /1_count_app_instance.sh | UTF-8 | 603 | 3.703125 | 4 | [] | no_license | #!/bin/bash
## Assume the list of hostnames is in a file called 'hostnames.txt'
## Assume SSH auth established and the pri key is ssh-key.pri
## Assume SSH user is called 'user'
# ToDo: What if appXXXX (where X=[0-9]) is acceptable
# but not appYYYY (where Y=[a-z,A-Z]) - Trivial; grep further
# ToDo: Introduce parallelism
HostnamesList="/path/to/hostnames.txt"
SSHKey="/path/to/ssh-key.pri"
SSHUser="user"
cmd="ps -aux | grep -x app | wc -l"
for host in $(cat $HostnamesList); do
count=`ssh -i $SSHKey $SSHUser@$host $cmd`
if [ $count -ne 4 ]; then
echo "$host: Unhealthy"
fi
done
| true |
2fd9049b78366d2d59e486cf527fc02b746228af | Shell | loathingKernel/PKGBUILDs | /aur/python-pypresence/PKGBUILD | UTF-8 | 991 | 2.796875 | 3 | [] | no_license | # Maintainer: Carl Smedstad <carl.smedstad at protonmail dot com>
# Contributor: Luis Martinez <luis dot martinez at disroot dot org>
# Contributor: Ronan Pigott <rpigott@berkeley.edu>
pkgname=python-pypresence
_name=${pkgname#python-}
pkgver=4.3.0
pkgrel=1
pkgdesc='Discord RPC and Rich Presence wrapper library'
url='https://github.com/qwertyquerty/pypresence'
arch=(any)
license=(BSD)
depends=(python)
makedepends=(
python-build
python-installer
python-setuptools
python-sphinx
python-wheel
)
source=("$pkgname-$pkgver.tar.gz::$url/archive/refs/tags/v$pkgver.tar.gz")
sha256sums=('958a5bb2f28c3120c89c68cc242abd8e72e2dac9aaf9be36b7c7a6217dcf4669')
_archive="$_name-$pkgver"
build() {
cd "$_archive"
python -m build --wheel --no-isolation
sphinx-build -b man docs/sphinx man
}
package() {
cd "$_archive"
python -m installer --destdir="$pkgdir" dist/*.whl
install -Dm644 "man/$_name.1" -t "$pkgdir/usr/share/man/man1/"
install -Dm644 LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
}
| true |
ed7c05e6b7aac67f8e365cdb4f15d99f84f80c8b | Shell | swipswaps/telegram-mopidy-bash | /src/library.sh | UTF-8 | 1,032 | 2.875 | 3 | [
"BSD-3-Clause"
] | permissive | #!/usr/bin/env bash
import util/namedParameters util/class
import ./storage.sh
import ./json.sh
class:Library() {
public string mopidyUri
private Storage storage
function Library.getTrackUri() {
[string] albumName
[string] trackName
local key="$albumName:$trackName"
this storage createIfNotExist
if [ $(this storage isAlreadyAdded "$key") == 0 ]; then
map parameters
map query
query["album"]=${albumName}
parameters["query"]=$($var:query toJson)
parameters["uris"]='["gmusic:"]'
parameters["exact"]=false
local uri=$(Request post $(this mopidyUri) "core.library.search" "$($var:parameters toJson)" \
| jq "first(.result[0].tracks[] | select(.name == \"$trackName\").uri)")
this storage addTrack "$key" "$uri"
@return:value "$uri"
else
@return:value $(this storage getTrack "$key")
fi
}
}
Type::Initialize Library | true |
275a43b240898b7bd9f3d67d41989a661c577498 | Shell | e1-one/Bash-Scripts | /scripts/web-app-monitor.sh | UTF-8 | 1,437 | 4.0625 | 4 | [] | no_license | #!/bin/bash
# This script is usefull when we need to know if a web-app is successfully deployed (by performing health check)
# Usually webb-applications are long-running processes, they do not return exit code after start
# Luckily, some of them has HTTP endpoinds for monitoring (Spring Boot Actuator for instance)
# The scrips returns 0 if an apllication has been started successfuly
DELAY_IN_SECS_BEETWEEN_CALLS=10
ATTEMPTS_COUNT=20
APPLICATION_MONITOR_URL="http://localhost:8080/monitor/health"
USER_NAME="user1"
PASSWORD="pass12"
BASE64_AUTH_STRING=$(echo "$USER_NAME:$PASSWORD" | base64)
# Makes GET REST CALL and parses json
# Returns - 'UP', 'DOWN' or ''
function get_web_app_status() {
curl --silent --location --request \
GET $APPLICATION_MONITOR_URL \
--header "Authorization: Basic $BASE64_AUTH_STRING" |
jq -r '.status'
}
for i in $(seq 1 $ATTEMPTS_COUNT); do
echo "Attempt #$i "
status=$(get_web_app_status)
if [ "$status" = "UP" ]; then
echo "Application check is successful. Application's status is UP."
exit 0
elif [[ "$status" == "DOWN" ]]; then
echo "Application status is $status"
else
echo "Can't get application status"
fi
echo "Wait $DELAY_IN_SECS_BEETWEEN_CALLS second and check status again."
sleep $DELAY_IN_SECS_BEETWEEN_CALLS
done
echo "Application check is failed. We made $ATTEMPTS_COUNT ATTEMPTS_COUNT and all of them was unsuccessful." 1>&2
exit 1
| true |
22b5c6c2eb7cd964dc85752382730fb1886023f3 | Shell | dwheelerau/snakemake-plink | /scripts/01_plink_vcf_input.sh | UTF-8 | 1,147 | 2.921875 | 3 | [] | no_license | #!/bin/bash
# import a vcf file into plink format using a pheno.txt file (space sep) to
# include phenotpye data
# cat traits.txt # quantitative data, 0,1,2 would be case control. -9 is missing
# data
##FID IID dot
##NZE2 NZE2 -9
##COLN COLN -9
##NZE8 NZE8 -9
##NZE10 NZE10 10.54
##ALP3 ALP3 77.84
##AUS4 AUS4 2.03
##BHU1 BHU1 0.99
##CAN3 CAN3 10.37
##CHI17 CHI17 0.26
##COLN COLN 8.14
##COLS COLS 0.33
##DEN1 DEN1 1.43
##ECU13 ECU13 7.49
##GRE1 GRE1 0.6
##GUA1 GUA1 12.16
##GUA2 GUA2 16.85
##RUS1 RUS1 5.96
##SAF4 SAF4 21.58
##SLV1 SLV1 6.6
##USA12 USA12 0.53
# --allow-no-sex because not family data is know
#NOTE: chr1 or chromosome_1 needs to be modified to 1, unless use use a flag to specify names
OUT='myplinkp1'
plink --recode --vcf combined.freebayes.p1.annot.q30.chmrename.vcf \
--pheno ./trait_dot_plink.txt --allow-no-sex --out $OUT
# now fix . missing snp ids which are need for ident of position with FDR
# testing
#./fix_snpID.py <infile.map> <outfile.map>
OUT2=${OUT}.fix
python2 fix_snpID.py ${OUT}.map $OUT2
rm -f $OUT.map
mv $OUT2 $OUT.map
echo 'check that dot(.) is replaced by chr_pos in second col'
head -n 5 $OUT.map
| true |
ee6f342c75fe4f2f2f69240046664d425827487a | Shell | paulbry/nmc-swd | /bin/swci_provision_software.bash | UTF-8 | 615 | 3.25 | 3 | [] | no_license | #!/bin/bash
# Provision all software requirements via Spack envionrments
# Only run if deployment directory is present
if [ ! -d "${SWCI_DEPLOY_DIR}" ]; then
echo "No ${SWCI_DEPLOY_DIR} found"
echo "Skipping provisioning"
exit 0
fi
# Grab current SWCI for potential debug purposes
set -e
echo "Current SWCI Environment"
env | grep SWCI
echo " "
set +e
cd ${SWCI_DEPLOY_DIR}
spack env status
spack compiler list
# load modules system
module use "$(spack config get config | grep lmod: | awk '{print substr($2, 0)}')/$(spack arch)/Core"
set -e
spack install
spack module lmod refresh -y
spack find
| true |
215b4570037aea928d56a3530fde73681755c2a7 | Shell | pinter13/ssinst | /ssinst | UTF-8 | 1,232 | 3.546875 | 4 | [] | no_license | #!/bin/bash
# Author: Pintér Gábor
# Website: xpg.hu
# E-mail: info@xpg.hu
# Release date: 2018.02.26
# --- Set SSH Server port ---
if [ $# -eq 0 ]; then
read -p 'SSH port (default 22):' pn
elif [ $# -lt 2 ]; then
if [ "$1" -lt 0 ] || [ "$1" -gt 99999 ]; then
echo 'Not allowed!'
else
pn=$1
fi
else
echo 'Too much parameter!'
fi
# --- Install SSH package ---
yes | sudo pacman -Sy openssh
# --- Create sshd config ---
sudo cat << EOF > /etc/ssh/sshd_config
Port $pn
Protocol 2
PermitRootLogin no
LoginGraceTime 30
MaxAuthTries 3
PubkeyAuthentication yes
AuthorizedKeysFile .ssh/authorized_keys
PermitEmptyPasswords no
UsePAM yes
ClientAliveInterval 300
ClientAliveCountMax 0
EOF
# --- Restart sshd ---
sudo systemctl restart sshd
# --- After install ---
echo 'SSH Server installation done, try connect from client:'
echo "ssh user@serverip_or_hostname -p $pn"
echo 'if you use key file for authentication, generate a key on client and copy to the server:'
echo 'ssh-keygen -t rsa -b 4096'
echo "ssh-copy-id user@serverip_or_hostname -p $pn"
echo 'Turn off password authentication on the server and restart the service:'
echo '/etc/ssh/sshd_config'
echo 'PasswordAuthentication no'
echo 'sudo systemctl restart sshd' | true |
960c3200a92d9441e57986e396475c1d7b81ab4b | Shell | rsilveira65/tic-tac-toe | /symfony/entrypoint-dev.sh | UTF-8 | 337 | 2.671875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
echo "Running entrypoint.sh in DEVELOPMENT mode"
echo "Going to application dir"
cd '/home/rsilveira/application'
echo "Generating autoload"
/usr/local/bin/composer install
echo "Starting php-fpm daemon"
/usr/local/sbin/php-fpm --allow-to-run-as-root -c /etc/php/7.1/fpm
echo "Finishing entrypoint.sh"
tail -f /dev/null
| true |
c64e350a99e2b0904138e29fb37b4a326762588d | Shell | sajal243/mongrel2 | /docs/manual/inputs/make_deployment_directories.sh | UTF-8 | 673 | 2.546875 | 3 | [
"BSD-3-Clause"
] | permissive | # go home first
cd ~/
# create the deployment dir
mkdir deployment
cd deployment/
# fill it with the directories we need
mkdir run tmp logs static profiles
# Note: On some systems zeromq needs access to /proc from
# the chroot - on Linux this command should do it (make
# sure you mount it at boot time as well):
## mkdir -p proc && sudo mount --bind /proc proc
# create the procer profile dirs for each thing
cd profiles/
mkdir chat mp3stream handlertest web mongrel2
cd ..
# copy the mongrel2.conf sample from the source to here
cp ~/mongrel2/examples/configs/mongrel2.conf mongrel2.conf
# setup the mongrel2 database initially
m2sh load
# see our end results
ls
| true |
3932c02f46a936f8c7e1f050953e1d2363d50dc9 | Shell | stianlj/dotfiles | /termux/tasker/org-note.sh | UTF-8 | 580 | 2.78125 | 3 | [] | no_license | #!/data/data/com.termux/files/usr/bin/bash -l
# Place this in ~/.termux/tasker/org-note.sh
# You may have to create the .termux/tasker directory
# Launches a capture template 'N' with the following variables available in the template:
# %:description - The title entered in the popup
# %i - The selected text
title=$(echo $1 | base64 --decode | jq -sRr @uri)
note=$(echo $2 | base64 --decode | jq -sRr @uri)
socketFile=/data/data/com.termux/files/usr/var/run/emacs10444/server
emacsclient \
-s $socketFile \
"org-protocol://capture?template=N&title=$title&body=$note"
| true |
3205516ce49a4ef18ad4bc8862a0be5ece1d3a56 | Shell | NicolasEhrhardt/CS224N | /test.sh | UTF-8 | 1,350 | 2.953125 | 3 | [] | no_license | #!/bin/bash
data="data/"
case "$1" in
base)
java -cp pa1/java/classes cs224n.assignments.WordAlignmentTester \
-dataPath $data \
-model cs224n.wordaligner.BaselineWordAligner \
-evalSet $2 \
-trainSentences ${3:-200} \
-language ${4:-"french"} \
${@:5}
;;
pmi)
java -cp pa1/java/classes cs224n.assignments.WordAlignmentTester \
-dataPath $data \
-model cs224n.wordaligner.PMIModel \
-evalSet $2 \
-trainSentences ${3:-200} \
-language ${4:-"french"} \
${@:5}
;;
ibm1)
java -cp pa1/java/classes cs224n.assignments.WordAlignmentTester \
-dataPath $data \
-model cs224n.wordaligner.IBMModel1 \
-evalSet $2 \
-trainSentences ${3:-200} \
-language ${4:-"french"} \
${@:5}
;;
ibm2)
java -cp pa1/java/classes cs224n.assignments.WordAlignmentTester \
-dataPath $data \
-model cs224n.wordaligner.IBMModel2 \
-evalSet $2 \
-trainSentences ${3:-200} \
-language ${4:-"french"} \
${@:5}
;;
ibm2p)
java -cp pa1/java/classes cs224n.assignments.WordAlignmentTester \
-dataPath $data \
-model cs224n.wordaligner.IBMModel2Parallel \
-evalSet $2 \
-trainSentences ${3:-200} \
-language ${4:-"french"} \
${@:5}
;;
*)
echo "Usage: $0 {mini,pmi} {dev,miniTest} {number}"
exit 1
esac
| true |
4bc92c6ba53e932568d3fe55a1145a37997f5075 | Shell | cielavenir/CODE2RACE_stock | /Mystery.sh | UTF-8 | 206 | 2.765625 | 3 | [] | no_license | #!/bin/sh
#coding:utf-8
#author:cielavenir (https://github.com/cielavenir)
read t
for i in `seq $t`; do
read n
<<<$n factor | tr \ '\n' | uniq -c | sed 1d | awk 'BEGIN{n=1}{n*=$1+1}END{print n}'
done
| true |
c62ab57f2bfee44f69f7603ed80e7850a14215a5 | Shell | rgeorgiev583/os-2015-2016 | /midterm/81187/ukill.sh | UTF-8 | 453 | 3.421875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# A simple and elegant solution! Well done! (+0.1)
for user
do
# It does not make sense to pipe `awk`'s output to `kill`.
# It should be provided as arguments to `kill` instead using `xargs`.
# (-0.1)
# Like this:
#ps aux | grep ^$user | awk '{print $2}' | xargs kill
# Or:
#kill $(ps aux | grep ^$user | awk '{print $2}')
ps aux | grep $user | awk '{print $2}' | kill
done
# Total points for this task: 1/1
| true |
434f9da2fe13d4419afcaade72847970a21c1ac2 | Shell | joaogsleite/configs | /terminal/backup.sh | UTF-8 | 159 | 2.890625 | 3 | [] | no_license | #!/usr/bin/env bash
if [[ $(uname) == "Darwin" ]]; then
echo "Export theme file from macOS Terminal app"
else
echo "use default terminal app on Linux"
fi
| true |
0f25ead481c25a724a2a4e6a1d9853d3c6cc64b4 | Shell | alexandrdanilkov/bitrix.infra | /scripts/check_404.sh | UTF-8 | 379 | 2.53125 | 3 | [] | permissive | #!/usr/bin/env sh
# this script prints current 404s from favor-group.ru for redirects troubleshooting
# google shows year-old pages and we need to redirect poor souls clicking on them somewhere
fgrep '" 404 ' logs/nginx/favor-group.ru.access.log | egrep 'YandexBot/|Googlebot/' | cut -d '"' -f 2 | cut -d ' ' -f 2 | egrep -v '^(/bitrix/cache/|/upload/)' | sort | uniq -c | less
| true |
95233e89e951a9a2204e69ff82bcc82574653c14 | Shell | RestExpress/RestExpress | /core/src/scripts/restexpress.sh | UTF-8 | 1,727 | 3.828125 | 4 | [
"Apache-2.0"
] | permissive | #! /bin/bash
#
# This script utilizes the RestExpress kickstart application
# to create a new RestExpress project that has your own name.
#
# To leverage this tool:
# 1) Unzip the latest RestExpress Kickstart release to a known location (e.g. ~/local/RestExpress/).
# 2) Set the RESTEXPRESS_HOME environment variable. Not needed if you chose ~/local/RestExpress/.
# 3) Copy the RestExpress/restexpress.sh (this file) into a location on your path (e.g. ~/bin/).
# 4) Run it: ~/bin/restexpress.sh <project name> (e.g. restexpress.sh MyCoolProject)
#
if [ -z "$RESTEXPRESS_HOME" ]; then
RESTEXPRESS_HOME=~/local/RestExpress
fi
NAME_LWR=`echo $1 | awk {'print tolower($1)'}`
rm -rf $1
mkdir $1
cp -r $RESTEXPRESS_HOME/* $1
cp $RESTEXPRESS_HOME/.classpath $1
cp $RESTEXPRESS_HOME/.project $1
cd $1
# Update build.xml & Eclipse project
sed -i.orig s/KickStart/$1/g build.xml
sed -i.orig s/kickstart/$NAME_LWR/g build.xml
sed -i.orig s/[Kk]ick[Ss]tart/$1/g .project
# Rename the 'kickstart' directories
mv src/java/com/kickstart/ src/java/com/$NAME_LWR/
# packages
find src/java/com/$NAME_LWR -name '*.java' | xargs grep -l kickstart | xargs sed -i.orig s/kickstart/$NAME_LWR/g
# class names
find src/java/com/$NAME_LWR -name '*.java' | xargs grep -li kickstart | xargs sed -i.orig s/[Kk]ick[Ss]tart/$1/g
# file names
mv src/java/com/$NAME_LWR/controller/KickStartController.java src/java/com/$NAME_LWR/controller/$1Controller.java
#if {-z 'test/java/com/kickstart/'}
# mv test/java/com/kickstart/ test/java/com/$1/
# find test/java/com/$1 -name '*.java' | xargs grep -l kickstart | xargs sed -i .orig s/kickstart/$1/g
#fi
# Clean up
find . -name '*.orig' | xargs rm
# Done
echo $1 RestExpress service suite created.
| true |
2c64c4fb8ca6cdafed6906039979cb34696e4101 | Shell | emmanueltouzery/dotfiles | /zshrc | UTF-8 | 5,671 | 2.625 | 3 | [] | no_license | export TERM="xterm-256color"
# If you come from bash you might have to change your $PATH.
# export PATH=$HOME/bin:/usr/local/bin:$PATH
POWERLEVEL9K_MODE='awesome-fontconfig'
# If you come from bash you might have to change your $PATH.
# export PATH=$HOME/bin:/usr/local/bin:$PATH
# Path to your oh-my-zsh installation.
export ZSH=/home/emmanuel/.oh-my-zsh
# Set name of the theme to load. Optionally, if you set this to "random"
# it'll load a random theme each time that oh-my-zsh is loaded.
# See https://github.com/robbyrussell/oh-my-zsh/wiki/Themes
ZSH_THEME="powerlevel9k/powerlevel9k"
POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(dir vcs dir_writable)
POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(status command_execution_time)
POWERLEVEL9K_STATUS_VERBOSE=false
# POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
# POWERLEVEL9K_SHORTEN_DELIMITER=""
# POWERLEVEL9K_SHORTEN_STRATEGY="truncate_from_right"
POWERLEVEL9K_SHORTEN_DIR_LENGTH=3
POWERLEVEL9K_SHORTEN_DELIMITER=".."
POWERLEVEL9K_SHORTEN_STRATEGY="Default"
#ZSH_THEME="bullet-train"
#
#BULLETTRAIN_PROMPT_ORDER=(
# dir
# git
#)
#BULLETTRAIN_PROMPT_SEPARATE_LINE=false
#BULLETTRAIN_PROMPT_CHAR=
#BULLETTRAIN_PROMPT_ADD_NEWLINE=false
# Uncomment the following line to use case-sensitive completion.
# CASE_SENSITIVE="true"
# Uncomment the following line to use hyphen-insensitive completion. Case
# sensitive completion must be off. _ and - will be interchangeable.
# HYPHEN_INSENSITIVE="true"
# Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true"
# Uncomment the following line to change how often to auto-update (in days).
# export UPDATE_ZSH_DAYS=13
# Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true"
# Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true"
# Uncomment the following line to enable command auto-correction.
# ENABLE_CORRECTION="true"
# Uncomment the following line to display red dots whilst waiting for completion.
# COMPLETION_WAITING_DOTS="true"
# Uncomment the following line if you want to disable marking untracked files
# under VCS as dirty. This makes repository status check for large repositories
# much, much faster.
# DISABLE_UNTRACKED_FILES_DIRTY="true"
# Uncomment the following line if you want to change the command execution time
# stamp shown in the history command output.
# The optional three formats: "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
# HIST_STAMPS="mm/dd/yyyy"
# Would you like to use another custom folder than $ZSH/custom?
# ZSH_CUSTOM=/path/to/new-custom-folder
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
# Add wisely, as too many plugins slow down shell startup.
plugins=(git mvn npm fasd zsh-autosuggestions history history-substring-search)
source $ZSH/oh-my-zsh.sh
# User configuration
# export MANPATH="/usr/local/man:$MANPATH"
# You may need to manually set your language environment
# export LANG=en_US.UTF-8
# Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim'
# else
# export EDITOR='mvim'
# fi
# Compilation flags
# export ARCHFLAGS="-arch x86_64"
# ssh
# export SSH_KEY_PATH="~/.ssh/rsa_id"
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
# Example aliases
# alias zshconfig="mate ~/.zshrc"
# alias ohmyzsh="mate ~/.oh-my-zsh"
POWERLEVEL9K_DIR_PATH_SEPARATOR="%F{black} $(print_icon 'LEFT_SUBSEGMENT_SEPARATOR') %F{black}"
# POWERLEVEL9K_HOME_FOLDER_ABBREVIATION="%k%F{white}~%F{blue}"
POWERLEVEL9K_HOME_FOLDER_ABBREVIATION=""
# https://github.com/zzrough/gs-extensions-drop-down-terminal/issues/57#issuecomment-170202054
# 0 is the text foreground color
# 2 is the git branch info back color
# 4 is the dir path back color
# tput initc 0 172 196 200
# tput initc 1 776 0 0
# tput initc 2 298 568 19
# tput initc 3 749 592 0
# tput initc 4 200 372 800
# tput initc 5 447 294 376
# tput initc 6 23 560 470
# tput initc 7 815 835 800
# tput initc 8 329 337 313
# tput initc 9 929 160 156
# tput initc 10 537 878 196
# tput initc 11 980 905 298
# tput initc 12 380 600 1000
# tput initc 13 992 243 886
# tput initc 14 203 878 854
# tput initc 15 905 909 901
# alternative
# tput initc 0 300 300 300
# tput initc 1 800 210 100
# tput initc 2 650 760 380
# tput initc 3 800 460 180
# tput initc 4 350 530 670
# tput initc 5 630 380 470
# tput initc 6 470 710 760
# tput initc 7 810 810 810
# tput initc 8 570 570 570
# tput initc 9 1000 280 200
# tput initc 10 720 710 0
# tput initc 11 1000 780 430
# tput initc 12 530 760 1000
# tput initc 13 820 820 1000
# tput initc 14 440 760 830
# tput initc 15 910 910 910
tput initc 0 172 196 200
tput initc 1 800 210 100
tput initc 2 220 560 220
tput initc 3 800 460 180
tput initc 4 350 530 670
tput initc 5 630 380 470
tput initc 6 470 710 760
tput initc 7 810 810 810
tput initc 8 570 570 570
tput initc 9 1000 280 200
tput initc 10 720 710 0
tput initc 11 1000 780 430
tput initc 12 530 760 1000
tput initc 13 820 820 1000
tput initc 14 440 760 830
tput initc 15 910 910 910
# https://unix.stackexchange.com/a/114243/36566
zstyle ':completion:*' special-dirs true
source $HOME/.zprofile
source $HOME/.oh-my-zsh/custom/plugins/zsh-you-should-use/you-should-use.plugin.zsh
source /usr/share/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh
| true |
79b67acf89c9b9ab74bbf256b7aa94751944509a | Shell | LaGuer/francy | /scripts/release.sh | UTF-8 | 1,362 | 3.5625 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -ex
CURRENT=`pwd`
# Release Jupyter Extension to PyPi
echo "Release to PyPi..."
cd $CURRENT/js/packages/francy-extension-jupyter
pyenv local 3.6
pip3.6 install wheel
pip3.6 install twine
python3.6 setup.py sdist
python3.6 setup.py bdist_wheel --universal
`pyenv which twine` upload dist/*
echo "Done"
# Release GAP Package to Github
echo "Release to Github..."
cd $CURRENT
release="release-$TRAVIS_BRANCH"
json="{\"tag_name\": \"$TRAVIS_BRANCH\", \"name\": \"$release\", \"body\": \"Francy Release $TRAVIS_BRANCH\"}"
upload_url=$(curl -s -H "Authorization: token $GITHUB_ADMIN_KEY" -d "$json" "https://api.github.com/repos/gap-packages/francy/releases" | jq -r '.upload_url')
upload_url="${upload_url%\{*}"
version=${TRAVIS_BRANCH:1}
folder="francy-$version"
filename="$folder.tar.gz"
echo "creating release artifact : $filename"
mkdir -p $folder
cd $folder
cp -Rfp ../doc .
cp -Rfp ../examples .
cp -Rfp ../gap/ .
cp -Rfp ../tst/ .
cp -Rfp ../init.g .
cp -Rfp ../LICENSE .
cp -Rfp ../makedoc.g .
cp -Rfp ../PackageInfo.g .
cp -Rfp ../read.g .
cp -Rfp ../README.md .
cd ..
tar -czf $filename $folder
echo "uploading asset to: $upload_url"
curl -s -H "Authorization: token $GITHUB_ADMIN_KEY" -H "Content-Type: application/tar+gzip" \
--data-binary @$filename "$upload_url?name=$filename&label=$filename"
echo "Done" | true |
59647f5e74d8a0cceb5dfc433340efc06c46e993 | Shell | fruffy/bluebridge | /ip6/scripts/dist_cores.sh | UTF-8 | 523 | 3.453125 | 3 | [] | no_license | #!/bin/bash
ncpus=`grep -ciw ^processor /proc/cpuinfo`
test "$ncpus" -gt 1 || exit 1
n=0
for irq in `cat /proc/interrupts | grep eth | awk '{print $1}' | sed s/\://g`
do
f="/proc/irq/$irq/smp_affinity"
test -r "$f" || continue
cpu=$[$ncpus - ($n % $ncpus) - 1]
if [ $cpu -ge 0 ]
then
mask=`printf %x $[2 ** $cpu]`
echo "Assign SMP affinity: eth queue $n, irq $irq, cpu $cpu, mask 0x$mask"
echo "$mask" > "$f"
let n+=1
fi
done
| true |
0da1b5234c35dff78d76f4359bdf95831d107986 | Shell | catcuts/make_ro_system_for_rpi3_jessie | /migrate_and_upgrade/mount_hd.sh | UTF-8 | 369 | 3.5625 | 4 | [] | no_license | #!/bin/bash
mountpoint=/home/pi/hd/src/data/ftp
sddevlist=`ls /dev/sd*`
_mount(){
echo -n "mounting $1 ..." && \
{
umount -l $1
} || {
echo 'cannot umount $1'
}
{ # your 'try' block
mount $1 $mountpoint && \
echo 'ok'
exit 0
} || { # your 'catch' block
echo "cannot mount $1"
}
}
for dev in $sddevlist; do
_mount $dev
done
| true |
757707b5ab007cfc9f806314e6265ab8ed17c800 | Shell | VolatileDream/dot-files | /gpg-to-ssh | UTF-8 | 1,050 | 4.21875 | 4 | [] | no_license | #!/usr/bin/env bash
#
# Outputs all the subkeys usable (not expired, not revoked) for SSH.
#
# This is probably incorrect, as the output format (SSH key) is not capable of
# containing information related to time based expiry (as far as I can tell).
# So if you use this script to add ssh keys to a service, then the expiry state
# is not going to be handled. It would be better to incorperate this command
# into AuthorizedKeysCommand instead of an AuthorizedKeys file.
#
gpg2ssh() {
local -r key="$1";
# Get GPG to output the keys in a stable machine readable format.
# Then, match the key types we're looking for:
# type = subkey
# expiry = fully valid, ultimate validity
# use = authentication, and not disabled
# Then pass the subkey fingerprint to GPG for export.
gpg2 --list-keys --with-colons -- "${key}" | \
awk -F : \
'match($1, "sub") \
&& (match($2, "u") || match($2, "f")) \
&& match($12, "a") && !match($12, "D") { print $5 }' |\
xargs -L1 gpg2 --export-ssh-key
}
gpg2ssh "$@"
| true |
5c87af36565bf7ff688503783f716888bca80688 | Shell | ServerSide-CLS/homework | /2017810402086/ex02_adduser.sh | UTF-8 | 483 | 3.4375 | 3 | [] | no_license | #!/bin/bash
read -p 'Enter new username:' USER_NAME
read -p 'Enter person info:' COMMENT
PASSWORD_B=$( echo "!@#$%^&*()_+" | fold -w1 | shuf | tr -d '\n' | head -c2 )
PASSWORD_A=$( date +%s%N${RANDOM}${RANDOM} | sha256sum | head -c6 )
PASSWORD=$( echo ${PASSWORD_A}${PASSWORD_B} | fold -w1 | shuf | tr -d '\n' | head -c8 )
useradd -c "${COMMENT}" -m ${USER_NAME}
echo ${USER_NAME}:${PASSWORD} | chpasswd
echo "Username: ${USER_NAME}\nInfo: ${COMMENT}\nPassword: ${PASSWORD}"
| true |
e37d20cc71ca1d7464965af592f9363856bdfb9d | Shell | z-salang/shell | /simple_arr_two_dimen.sh | UTF-8 | 292 | 3.1875 | 3 | [] | no_license | #!/bin/bash
# 二维数组
twoDimenArr=(a1 a1 a3 a4 b1 b2 b3 b4 c1 c2 c3 c4 d1 d2)
length=${#twoDimenArr[@]}
echo 数组元素个数n=$length
col=4
row=$[$length / $col]
for ((i=0; i<$row; ++i))
do
for ((j=0; j<$col; ++j))
do
echo [$i,$j]=${twoDimenArr[$i * $col + $j]}
done
done
| true |
ff36a37ae86115f02cec34c7c624cb1b29e6bc32 | Shell | racingmars/pgclouds | /google.sh | UTF-8 | 2,599 | 3.625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
set -eu
IFS=$'\n\t'
LOCATION=us-west1
ZONE=us-west1-b
USERNAME=pgtest
COMPUTE_UNITS=8
MEMORY=12GiB
STORAGE=125
INSTANCE_NONCE=$(base64 /dev/urandom | tr -dc a-z0-9 | fold -w 10 | head -n 1)
PASSWORD=$(base64 /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 20 | head -n 1)
# We don't expect any pipelines to have error components from here on out
set -o pipefail
if ! [ -e ~/.ssh/id_rsa.pub ]
then
echo "ERROR: ~/.ssh/id_rsa.pub must exist"
exit 1
fi
echo Instance: $INSTANCE_NONCE
echo Password: $PASSWORD
PGNAME=pg-$INSTANCE_NONCE
echo Creating PostgreSQL instance: $PGNAME
gcloud sql instances create $PGNAME \
--authorized-networks=0.0.0.0/0 \
--cpu=$COMPUTE_UNITS \
--database-version=POSTGRES_9_6 \
--memory=$MEMORY \
--region=$LOCATION \
--gce-zone=$ZONE \
--storage-size=$STORAGE \
--storage-type=SSD \
> /dev/null
gcloud sql users set-password postgres any -i $PGNAME --password=$PASSWORD \
> /dev/null
PGFQDN=$(gcloud sql instances describe $PGNAME --format json \
| jq '.ipAddresses[0].ipAddress' | tr -d \")
echo New PostgreSQL Server is at: $PGFQDN
CLIENTVM=client-$INSTANCE_NONCE
echo Creating Client VM: $CLIENTVM
gcloud compute instances create $CLIENTVM \
--zone $ZONE \
--machine-type n1-standard-4 \
--image-family debian-9 --image-project debian-cloud \
--metadata "ssh-keys=$USERNAME:$(cat ~/.ssh/id_rsa.pub)" \
> /dev/null
CLIENTIP=$(gcloud compute instances describe $CLIENTVM --zone $ZONE \
--format json | jq '.networkInterfaces[0].accessConfigs[0].natIP' \
| tr -d \")
echo Client VM is at: $CLIENTIP
sleep 15
echo Uploading pgbench script.
TEMP_PG_SCRIPT=$(mktemp)
sed -e "s/\\^PGHOST\\^/$PGFQDN/" pgbench.sh > $TEMP_PG_SCRIPT
sed -e "s/\\^PGUSER\\^/postgres/" $TEMP_PG_SCRIPT > $TEMP_PG_SCRIPT.1
sed -e "s/\\^PGPASS\\^/$PASSWORD/" $TEMP_PG_SCRIPT.1 > $TEMP_PG_SCRIPT
scp -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null $TEMP_PG_SCRIPT ${USERNAME}@$CLIENTIP:pgbench.sh
rm $TEMP_PG_SCRIPT
rm $TEMP_PG_SCRIPT.1
ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null ${USERNAME}@$CLIENTIP -- chmod +x pgbench.sh
echo Running pgbench script.
ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null ${USERNAME}@$CLIENTIP -- ./pgbench.sh
scp -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null ${USERNAME}@$CLIENTIP:report.txt report-google-$INSTANCE_NONCE.txt
#echo Cleaning up -- deleting server and client
gcloud sql instances delete $PGNAME -q --async > /dev/null
gcloud compute instances delete $CLIENTVM --zone $ZONE -q > /dev/null
| true |
a90965bb7a901821c4ef0d0d0c2ec8fe8503ba0c | Shell | PaddlePaddle/PaddleTest | /inference/python_api_test/test_nlp_model/run.sh | UTF-8 | 792 | 2.71875 | 3 | [] | no_license | [[ -n $1 ]] && export CUDA_VISIBLE_DEVICES=$1
export FLAGS_call_stack_level=2
cases="./test_bert_gpu.py \
./test_bert_mkldnn.py \
./test_bert_trt_fp32.py \
./test_ernie_gpu.py \
./test_ernie_mkldnn.py \
./test_ernie_trt_fp32.py \
./test_lac_gpu.py \
./test_lac_trt_fp32.py \
./test_lac_trt_fp16.py \
"
bug=0
echo "============ failed cases =============" >> result.txt
for file in ${cases}
do
echo ${file}
if [[ ${ignore} =~ ${file##*/} ]]; then
echo "跳过"
else
python -m pytest -m server --disable-warnings -v ${file}
if [ $? -ne 0 ]; then
echo ${file} >> result.txt
bug=`expr ${bug} + 1`
fi
fi
done
echo "total bugs: "${bug} >> result.txt
exit ${bug}
| true |
c1f02f3e64a887f979517a94e26297f8afac0af2 | Shell | pku-cs-code/python-linux_Learning | /运维常用配置/check-url-array.sh | UTF-8 | 742 | 3.359375 | 3 | [] | no_license | #!/bin/bash
. /etc/init.d/functions
url_list=(
www.baidu.com
http://etiantian.org
http://oldboy.blog.51cto.com
http://192.168.31.60
)
function wait(){
echo -n "execute cmd after 3 secconds."
for((i=0;i<3;i++))
do
echo -n ".";sleep 1
done
echo "\n"
}
function check_url(){
wait
echo "check ulr..."
for((i=0;i<${#url_list[@]};i++))
do
judge=($(curl -I -s --connect-timeout 5 ${url_list[$i]}|head -1))
# echo ${judge[1]} ${judge[2]}
if [[ "${judge[1]}" = "200" && "${judge[2]}"=="OK" ]];then
action "${url_list[$i]} is successful." /bin/true
else
action "${url_list[$i]} is failed." /bin/false
fi
# for((j=0;j<${#judge[@]};j++))
# do
# echo "${judge[$j]}"
# done
done
}
check_url
| true |
11f8b76796a36460013697a127ceaddb685b6e31 | Shell | Leedehai/vuepress-proj | /examine.sh | UTF-8 | 856 | 3.640625 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env sh
URL_ROOT='vuepress-proj' # follow the 'base' property in config.js
LOCAL_SITE='docs/.vuepress/dist'
if [ ! -d $LOCAL_SITE ]; then
echo "[Error] $LOCAL_SITE not found. Did you run ./build.sh?"
exit 1
fi
echo "Examining the local site at $LOCAL_SITE"
cd $LOCAL_SITE > /dev/null
git log -1 --pretty=%B | head -1 # display the last build's date & time
cd - > /dev/null
printf "URL: \033[4;mhttp://localhost:8001/$URL_ROOT\033[0;m\n"
cd $LOCAL_SITE > /dev/null
ln -s . $URL_ROOT 2> /dev/null # so that localhost:8001/$URL_ROOT is valid, not committed
if [ $(python -c 'import sys; print(int(sys.version_info.major < 3))') -eq 1 ]; then
# for Python2
SIMPLE_HTTP_SERVER=SimpleHTTPServer # Python2
else
# for Python3.. nasty!
SIMPLE_HTTP_SERVER=http.server # Python3
fi
python -m $SIMPLE_HTTP_SERVER 8001 2> /dev/null # start server
| true |
8cb03f01e47b46009985d9be8ac375fc8744446f | Shell | averyfreeman/zfs-auto-delete-snapshots | /zfs-delete-snapshots.sh | UTF-8 | 1,224 | 3.609375 | 4 | [] | no_license | #!/bin/bash
#sudo su
printf "\nHere's list of your current snapshots:\n\n"
zfs list -t snapshot
printf "\nNote: hit ctrl-c at any time to abort this script\n"
printf "\nWhat dataset would you like to delete snapshots from?\n"
printf "(Enter all or part of name): "
read dataset
printf "\nwould you like to delete from head (oldest) snapshots or from tail (newest)?\n"
printf "(type 'head' or 'tail'): "
read direction
printf "\n *** will delete snapshots from $direction of $dataset *** \n"
printf "\nCounting number of zfs snapshots... "
count=$(zfs list -t snapshot -o name | grep $dataset | wc -l)
printf "$count snapshots in selected dataset $dataset\n"
printf "\nHow many snapshots would you like to delete?: "
read n
printf "\nDelete snapshots that provide dependencies?\n\n(DANGER: This might delete additional snapshots that were not explicitly specified, or create unpredictable behavior when restoring affected snapshots)\n\n"
printf "Type '-R' and hit enter for yes, or leave blank and hit enter for 'no': "
read Rflag
printf "\nOK, ** now deleting $n snapshots from $dataset ** \n"
zfs list -t snapshot -o name | grep $dataset | $direction -n $n | xargs -n 1 sudo zfs destroy $Rflag -vr
printf "\nAll done!\n\n" | true |
854a4b2acc8530c54c372628ac942e9cb9eefa28 | Shell | dcurca/CS2211 | /Assignment2/say.sh | UTF-8 | 2,455 | 4.09375 | 4 | [] | no_license | #!/bin/bash
# This shell script takes in two arguments and outputs a picture of a
# specified animal with either a random generated string or an inputted string
# from the user
# Dana Curca; dcurca; 250976773
echo
animal=$1
# if $2 is inputted then ignore $@ (standard input) otherwise ignore $2 and
# use standard input
text="${@:2}"
# checks to make sure number of arguments is valid
if [ $# == 0 ]; then
echo "Error: Need at least one argument indicating type of animal"
exit 1
fi
# checks to see if user inputted standard input or not
if [ $# -eq 1 ]; then
file=`shuf -n 1 /cs2211/a2/fortunes.txt`
# when standard input is not inputted go through cases
case $animal in
"duck")
echo "$file" | tr [a-zA-Z' '.] -
echo "< $file >"
echo "$file" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/duck.txt`"
;;
"dog")
echo "$file" | tr [a-zA-Z' '.] -
echo "< $file >"
echo "$file" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/dog.txt`"
;;
"cat")
echo "$file" | tr [a-zA-Z' '.] -
echo "< $file >"
echo "$file" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/cat.txt`"
;;
"frog")
echo "$file" | tr [a-zA-Z' '.] -
echo "< $file >"
echo "$file" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/frog.txt`"
;;
"cow")
echo "$file" | tr [a-zA-Z' '.] -
echo "< $file >"
echo "$file" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/cow.txt`"
;;
*)
echo "Error: This is not an option, choose either cat, dog, frog, cow, or duck."
exit 2
;;
esac
else
# when standard input is inputted go through cases
case $animal in
"duck")
echo "$text" | tr [a-zA-Z' '.] -
echo "< $text >"
echo "$text" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/duck.txt`"
;;
"cow")
echo "$text" | tr [a-zA-Z' '.] -
echo "< $text >"
echo "$text" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/cow.txt`"
;;
"dog")
echo "$text" | tr [a-zA-Z' '.] -
echo "< $text >"
echo "$text" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/dog.txt`"
;;
"cat")
echo "$text" | tr [a-zA-Z' '.] -
echo "< $text >"
echo "$text" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/cat.txt`"
;;
"frog")
echo "$text" | tr [a-zA-Z' '.] -
echo "< $text >"
echo "$text" | tr [a-zA-Z' '.] _
echo "`cat /cs2211/a2/frog.txt`"
;;
*)
# checks to make sure $1 is an option that is accounted for in case
echo "Error: This is not an option, choose either cat, dog, frog, cow or duck."
exit 2
;;
esac
fi
| true |
abb8867b6a0ffe2be080a8e3ed42503d362f8a7a | Shell | VerKnowSys/svdOS | /etc-jail/rc.d/ipfs | UTF-8 | 1,047 | 3.546875 | 4 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | #!/bin/sh
#
# $FreeBSD: releng/10.1/etc/rc.d/ipfs 255450 2013-09-10 13:48:33Z cy $
#
# PROVIDE: ipfs
# REQUIRE: ipnat
# KEYWORD: nojail shutdown
. /etc/rc.subr
name="ipfs"
rcvar="ipfs_enable"
start_cmd="ipfs_start"
stop_cmd="ipfs_stop"
start_precmd="ipfs_prestart"
ipfs_prestart()
{
# Do not continue if either ipnat or ipfilter is not enabled or
# if the ipfilter module is not loaded.
#
if ! checkyesno ipfilter_enable -o ! checkyesno ipnat_enable ; then
err 1 "${name} requires either ipfilter or ipnat enabled"
fi
if ! ${ipfilter_program:-/sbin/ipf} -V | grep -q 'Running: yes' >/dev/null 2>&1; then
err 1 "ipfilter module is not loaded"
fi
return 0
}
ipfs_start()
{
if [ -r /var/db/ipf/ipstate.ipf -a -r /var/db/ipf/ipnat.ipf ]; then
${ipfs_program} -R ${rc_flags}
rm -f /var/db/ipf/ipstate.ipf /var/db/ipf/ipnat.ipf
fi
}
ipfs_stop()
{
if [ ! -d /var/db/ipf ]; then
mkdir /var/db/ipf
chmod 700 /var/db/ipf
chown root:wheel /var/db/ipf
fi
${ipfs_program} -W ${rc_flags}
}
load_rc_config $name
run_rc_command "$1"
| true |
349701b9e1be631cd3b9cbfbfd5318051cb95cb3 | Shell | aristeu/vm-scalability | /case-truncate | UTF-8 | 290 | 3 | 3 | [] | no_license | #!/bin/bash
# step 1) fill $mem with $nr_cpu files
# step 2) truncate the files concurrently
source ./hw_vars
for i in `seq $nr_cpu`
do
create_sparse_file $SPARSE_FILE-$i $((mem / nr_cpu))
cp $SPARSE_FILE-$i /dev/null
done
for i in `seq $nr_cpu`
do
rm $SPARSE_FILE-$i &
done
wait
| true |
bfe341f89b02255526933c0b2b8507a9210cbd1a | Shell | petronny/aur3-mirror | /myhdl-hg/PKGBUILD | UTF-8 | 885 | 2.671875 | 3 | [] | no_license | # Contributor: LUO, Jian <jian (dot) luo (dot) cn _(at)_ gmail (dot) com>
pkgname=myhdl-hg
pkgver=1031
pkgrel=1
pkgdesc="a Python-Based Hardware Description Language (mercurial repo)"
arch=(i686 x86_64)
url="http://www.myhdl.org"
_hgroot="http://hg.myhdl.org"
_hgrepo="myhdl"
source=()
depends=('python')
makedepends=('mercurial' 'iverilog')
optdepends=('iverilog: for Co-Simulation with Icarus Verilog')
license=('LGPL')
provides=('myhdl')
conflicts=('myhdl')
md5sums=()
build() {
cd $startdir/src/$_hgrepo
python setup.py install --prefix=/usr --root=$pkgdir
# Install license
install -m 0644 -D ./LICENSE.txt $startdir/pkg/usr/share/licenses/$pkgname/LICENSE.txt
# Make cosimulation modules for Icarus Verilog
cd $startdir/src/$_hgrepo/cosimulation/icarus
make || return 1
install -m 0755 -D ./myhdl.vpi $pkgdir/usr/lib/ivl/myhdl.vpi
}
# vim: ts=2 sw=2 et ft=sh
| true |
c9ff14ed5c156e062ca92dd9a7c62c137295a174 | Shell | ElitCoder/util | /filter_rew_to_json/build.sh | UTF-8 | 242 | 2.953125 | 3 | [] | no_license | #!/bin/sh
# Current path
CURRENT_PATH=$(pwd)
# Create build folder
rm -rf build
mkdir -p build
cd build
# Set toolchain
cmake .. -DCMAKE_TOOLCHAIN_FILE=../toolchain-windows-mingw32.cmake
# Build
cmake --build .
# Go back
cd $CURRENT_PATH | true |
893770c84cc791f26bceeabf8d31289d1ab18411 | Shell | dzhuang/latex2pdf | /run-build-and-tests.sh | UTF-8 | 2,078 | 3.578125 | 4 | [] | no_license | #! /bin/bash
set -e
CWD=$(pwd)
APPDIR="$(pwd)/latex2pdf"
sudo apt-get update
sudo apt-get -y install --no-install-recommends -qq wget curl gettext
sudo apt-get -y install --no-install-recommends -qq $(awk '{print $1'} texlive_apt.list)
tex --version
echo codecov >> "$APPDIR"/requirements.txt
echo factory_boy >> "$APPDIR"/requirements.txt
pip install --no-cache-dir -r "$APPDIR"/requirements.txt
# if you need to install extra packages, specify in EXTRA_PACKAGE in Github secrets.
if [[ "$EXTRA_PACKAGE" ]]
then
sudo apt-get -y install --no-install-recommends -qq "$EXTRA_PACKAGE"
fi
# Install extra fonts. Compress the fonts you want to include in you own build, and
# make the file a downloadable link with ".tar.gz" extensions, and the put
# the url in your Github secrets named "MY_EXTRA_FONTS_GZ".
# ALERT!!! To include fonts in your own builds, You must respect the intellectual property
# rights (LICENSE) of those fonts, and take the correspond legal responsibility.
sudo mkdir -p ./extra_fonts
if [[ "$MY_EXTRA_FONTS_GZ" ]]
then
echo "----Installing user customized fonts.----"
wget "$MY_EXTRA_FONTS_GZ" -O fonts.tar.gz -q
sudo mkdir -p /usr/share/fonts/extra_fonts
sudo tar -zxf fonts.tar.gz -C ./extra_fonts
sudo cp -r ./extra_fonts /usr/share/fonts/
sudo fc-cache -f
else
echo "----No user customized fonts.----"
fi
# This is needed to run makemigrations
cd "$APPDIR" || exit 1
cp local_settings/local_settings_example.py local_settings/local_settings.py
npm install
python manage.py collectstatic
if [[ $DOCKER_USERNAME ]]; then
echo "----Building docker image.----"
cd "$CWD" || exit 1
IMAGE=$DOCKER_USERNAME/latex2pdf
if [[ "$BRANCH_NAME" != "main" ]]; then
IMAGE=$DOCKER_USERNAME/latex2pdf-$BRANCH_NAME
fi
docker build --no-cache . -t $IMAGE:${COMMIT_SHA::8} || exit 1
echo "----Docker images----"
docker images
fi
cd "$APPDIR" || exit 1
python manage.py makemigrations
python manage.py makemessages -l zh_Hans --no-location
# echo "----Tests started:----"
# coverage run manage.py test tests && coverage report -m && codecov
| true |
0d97277d5421b517817ea93bd8fdc85ac7932917 | Shell | ninkari/sh-fivem | /sh-fivem.sh | UTF-8 | 7,493 | 3.328125 | 3 | [] | no_license | #!/bin/bash
# Script Auto Install Five M
#=====================================================================================
# Author: Clashplayer#3630
#=====================================================================================
#=====================================================================================
# Root Force
# By Clashplayer#8772
cat << "EOF"
/$$$$$$ /$$ /$$ /$$$$$$$$ /$$$$$$ /$$ /$$ /$$$$$$$$ /$$ /$$ /$$ /$$ /$$$$$$ /$$
/$$__ $$| $$ | $$ | $$_____/|_ $$_/| $$ | $$| $$_____/| $$$ /$$$ | $$ | $$ /$$__ $$ /$$$$
| $$ \__/| $$ | $$ | $$ | $$ | $$ | $$| $$ | $$$$ /$$$$ | $$ | $$|__/ \ $$ |_ $$
| $$$$$$ | $$$$$$$$ /$$$$$$| $$$$$ | $$ | $$ / $$/| $$$$$ | $$ $$/$$ $$ | $$ / $$/ /$$$$$$/ | $$
\____ $$| $$__ $$|______/| $$__/ | $$ \ $$ $$/ | $$__/ | $$ $$$| $$ \ $$ $$/ /$$____/ | $$
/$$ \ $$| $$ | $$ | $$ | $$ \ $$$/ | $$ | $$\ $ | $$ \ $$$/ | $$ | $$
| $$$$$$/| $$ | $$ | $$ /$$$$$$ \ $/ | $$$$$$$$| $$ \/ | $$ \ $/ | $$$$$$$$ /$$ /$$$$$$
\______/ |__/ |__/ |__/ |______/ \_/ |________/|__/ |__/ \_/ |________/|__/|______/
EOF
#Supported systems:
supported="Ubuntu"
COLOR1='\033[0;32m' #green color
COLOR2='\033[0;31m' #red color
COLOR3='\33[0;33m'
NC='\033[0m' #no color
if [ "$(id -u)" != "0" ]; then
printf "${RED}ERREUR : SH-FIVEM n'a pas accès root. ⛔️\\n" 1>&2
printf "\\n"
exit 1
fi
printf "${COLOR1} Plus besoin du screen avec la version 2.1 de \\n"
printf "${COLOR1}©️ Copyright Tous droits réservés.©️ \\n"
printf "${COLOR2}💻 Systèmes pris en charge : $supported 💻\\n"
printf "${NC}\\n"
sleep 6
#############################################################################
# Prérequis installation Five M
apt update -y
apt upgrade -y
apt install sudo xz-utils git curl screen sed -y
#Installation de 4687
echo
printf "${YELLOW} LAST NEW Artifacts : Souhaitez-vous instalaltion du serveur Five M avec la version de 4394 ❓ [o/N]\\n"
read reponse
if [[ "$reponse" == "o" ]]
then
printf "${CYAN} Démarrage de l'instalaltion de version de 4394 pour serveur Five M !"
cd /home/
mkdir -p fivem
cd /home/fivem
wget https://runtime.fivem.net/artifacts/fivem/build_proot_linux/master/4394-572b000db3f5a323039e0915dac64641d1db408e/fx.tar.xz
tar xvfJ fx.tar.xz
# Suppression du cache automatique
# sed -i '1irm -r cache' run.sh
rm fx.tar.xz
fi
sleep 2
#Installation de SYSTEMCTL
echo
printf "${YELLOW} Vous souhaitez disposer d'une nouvelle technologie pour démarrer votre serveur fivem ? ❓ [o/N]\\n"
read reponse
if [[ "$reponse" == "o" ]]
then
printf "${CYAN} Démarrage technologie pour démarrer votre serveur fivem !"
cd /etc/systemd/system
wget https://raw.githubusercontent.com/Clashplayer-PROTECT/sh-fivem/master/fivem.service
systemctl enable fivem.service
fi
sleep 2
# Installation MARIADB
echo
printf "${YELLOW} Souhaitez-vous créer une installation automatique de MariaDB ❓ [o/N]\\n"
read reponse
if [[ "$reponse" == "o" ]]
then
printf "${CYAN} Démarrage de l'instalaltion de MariaDB pour serveur FiveM !"
apt -y install software-properties-common curl apt-transport-https ca-certificates gnupg
LC_ALL=C.UTF-8 add-apt-repository -y ppa:ondrej/php
add-apt-repository -y ppa:chris-lea/redis-server
curl -sS https://downloads.mariadb.com/MariaDB/mariadb_repo_setup | sudo bash
apt update -y
sudo add-apt-repository ppa:ondrej/php
sudo apt-get update -y
sudo apt-get install php-mbstring php-gettext
sudo apt -y install php7.4
apt install -y php7.4-{cli,gd,mysql,pdo,mbstring,tokenizer,bcmath,xml,fpm,curl,zip} mariadb-client mariadb-server apache2 tar unzip git
php -v
fi
sleep 2
echo -n -e "${GREEN}Quel est le nom de votre base de données ❓ ${YELLOW}(sh_base)${reset}: "
read -r DBNAME
if [[ "$DBNAME" == "" ]]; then
DBNAME="sh_base"
fi
sleep 2
echo -n -e "${GREEN}Quel est l'utilisateur de votre base de données ❓ ${YELLOW}(sh-fivem)${reset}: "
read -r DBUSER
if [[ "$DBUSER" == "" ]]; then
DBUSER="sh-fivem"
fi
sleep 2
echo -n -e "${GREEN}Quel est le mot de passe de votre base de données ❓ ${reset}: "
read -s -r DBPASS
while true; do
if [[ "$DBPASS" == "" ]]; then
echo -e "${red}Le mot de passe doit être obligatoire !"
echo -n -e "${GREEN}Quel est le mot de passe de votre base de données ❓ ${reset}: "
read -s -r DBPASS
else
echo -e "${GREEN}Le mot de passe est correct !${reset}"
break
fi
done
#Installation PHPMYADMIN
echo
printf "${YELLOW} Souhaitez-vous crée une installation automatique de PHPMYADMIN ❓ [o/N]\\n"
read reponse
if [[ "$reponse" == "o" ]]
then
printf "${CYAN} Démarrage de l'instalaltion de phpMyAdmin pour serveur Five M !"
apt install phpmyadmin
sudo service apache2 restart
ln -s /usr/share/phpmyadmin/ /var/www/html/phpmyadmin
fi
echo -e "Configuration de la utilisateur"
echo "Mettre le mot de passe root de MySQL"
sleep 2
mysql -e "CREATE USER '${DBUSER}'@'localhost' IDENTIFIED BY '${DBPASS}';"
mysql -e "CREATE DATABASE ${DBNAME};"
mysql -p -e "GRANT ALL PRIVILEGES ON * . * TO '${DBUSER}'@'localhost';"
mysql -e "FLUSH PRIVILEGES;"
sleep 3
printf "${COLOR3} L'installation est terminée ! \\n"
printf "${COLOR3} Discord de SH-FIVEM : https://discord.gg/Bx5UUV54mu \\n"
printf "${COLOR3} Github de Clahsplayer sur SH-FIVEM: https://github.com/Clashplayer-PROTECT/sh-fivem \\n"
echo -en '\n'
sleep 3
printf "${COLOR1} TOPO du MySQL \\n"
printf "${COLOR1} Lien du phpMyAdmin : http://$(hostname -I)/phpmyadmin/ \\n"
printf "${COLOR1} Nom d'utilisateur de la base de données MySQL: ${DBUSER}\\n"
printf "${COLOR1} Mot de passe de connexion base de données MySQL: ${DBPASS} \\n"
echo -en '\n'
sleep 3
printf "${COLOR2}💻 TOPO sur créaction de votre seveur ! \\n"
printf "${COLOR2}💻 Chemin du dossier : /home/fivem \\n"
printf "${COLOR2}💻 Ne surtout pas supprime run.sh et alpine\\n"
printf "${NC}\\n"
cat << "EOF"
/$$$$$$ /$$ /$$ /$$$$$$$$ /$$$$$$ /$$ /$$ /$$$$$$$$ /$$ /$$ /$$ /$$ /$$$$$$ /$$
/$$__ $$| $$ | $$ | $$_____/|_ $$_/| $$ | $$| $$_____/| $$$ /$$$ | $$ | $$ /$$__ $$ /$$$$
| $$ \__/| $$ | $$ | $$ | $$ | $$ | $$| $$ | $$$$ /$$$$ | $$ | $$|__/ \ $$ |_ $$
| $$$$$$ | $$$$$$$$ /$$$$$$| $$$$$ | $$ | $$ / $$/| $$$$$ | $$ $$/$$ $$ | $$ / $$/ /$$$$$$/ | $$
\____ $$| $$__ $$|______/| $$__/ | $$ \ $$ $$/ | $$__/ | $$ $$$| $$ \ $$ $$/ /$$____/ | $$
/$$ \ $$| $$ | $$ | $$ | $$ \ $$$/ | $$ | $$\ $ | $$ \ $$$/ | $$ | $$
| $$$$$$/| $$ | $$ | $$ /$$$$$$ \ $/ | $$$$$$$$| $$ \/ | $$ \ $/ | $$$$$$$$ /$$ /$$$$$$
\______/ |__/ |__/ |__/ |______/ \_/ |________/|__/ |__/ \_/ |________/|__/|______/
EOF
| true |
ba74c2c4b2dbd981e50dbfbd3793d6aecb9037e4 | Shell | code4clouds/bedrock | /cluster/environments/minikube/deploy_minikube.sh | UTF-8 | 442 | 2.65625 | 3 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | #!/bin/sh
# variables for deployment
GITOPS_SSH_URL="{insert the ssh url to the gitops github repo here}"
GITOPS_SSH_BRANCH="master"
GITOPS_SSH_KEY="{insert the path to the private ssh key file here}"
FLUX_REPO_URL="https://github.com/weaveworks/flux.git"
REPO_ROOT_DIR="repo-root"
# install flux into cluster
./../../common/flux/deploy_flux.sh -g $GITOPS_SSH_URL -b $GITOPS_SSH_BRANCH -k $GITOPS_SSH_KEY -f $FLUX_REPO_URL -d $REPO_ROOT_DIR | true |
14016ec01ed267411be8675f84131302d1b88afc | Shell | asdf-vm/asdf-elixir | /scripts/lint | UTF-8 | 418 | 3.03125 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Unoffical Bash "strict mode"
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
set -euo pipefail
#ORIGINAL_IFS=$IFS
IFS=$'\t\n' # Stricter IFS settings
# This script exists to make linting during local development easy. These checks
# are identical to the ones run by the Github workflow.
# Run shellcheck on Bash scripts
shellcheck bin/* scripts/*
# Run formatter
shfmt -i 2 -d .
| true |
c88c9b7df96e695bbbe211bb2b92d6ca2bfed23c | Shell | pmachart/dotfiles | /.config/i3blocks/btmousebattery.sh | UTF-8 | 343 | 2.90625 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
echo -n "Arc:"
if [[ -n "$(/usr/bin/upower -d | grep mouse_dev_E8_1C_E1_42_49_27)" ]] ; then
echo -n "<span color='green'>"
echo -n $(/usr/bin/upower -i /org/freedesktop/UPower/devices/mouse_dev_E8_1C_E1_42_49_27 | grep percentage | awk '{print $2}')
echo "</span>"
else
echo -n "<span color='red'>Off</font>"
fi
| true |
24104b0ba77c5a067e19f817c219f7e3f72c0a0d | Shell | MaayanLab/signature-commons | /charts/build_helm_chart.sh | UTF-8 | 2,182 | 3.46875 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
export output=signature-commons
export version=v2
mkdir -p "${output}/${version}"
# Generate base kubernetes deployments with helm-chart-docker-compose
helm template --debug \
helm-chart-docker-compose ./helm-chart-docker-compose/v1/ \
-f ../docker-compose.yml \
-f ../docker-compose.metadata-db.yml \
-f ../docker-compose.data-db.yml \
> "${output}/${version}/${output}.yaml.stage0"
# Generate values.yaml with .env.example
sed \
-e 's/^\([^=]\+\)=\(.*\)$/\1: "\2"/g' \
< ../.env.example \
> "${output}/${version}/values.yaml"
# Generate .Values substitution with .env.example
sed \
-e 's/^\([^=]\+\)=\(.*\)$/export \1="{{ .Values.\1 }}"/g' \
< ../.env.example \
> "${output}/${version}/.env"
# Substitute variables with envsubst
. ${output}/${version}/.env
export _DOLLAR='$'
sed -e 's/\$\$/${_DOLLAR}/g' \
< "${output}/${version}/${output}.yaml.stage0" \
> "${output}/${version}/${output}.yaml.stage1"
envsubst \
< "${output}/${version}/${output}.yaml.stage1" \
> "${output}/${version}/${output}.yaml.stage2"
# Replace helm-chart-docker-compose
sed \
-e "s/helm-chart-docker-compose/${output}/g" \
< "${output}/${version}/${output}.yaml.stage2" \
> "${output}/${version}/${output}.yaml.stage3"
# Split file into separate files
mkdir -p "${output}/${version}/templates"
python3 -c "
import os, re
for content in map(str.strip, open(os.path.join('${output}', '${version}', '${output}.yaml.stage3'), 'r').read().split('---')):
m = re.search('# Source!: (.+)\n', content)
if m:
filename = m.group(1)
with open(os.path.join('${output}', '${version}', 'templates', os.path.basename(filename)), 'w') as fw:
for line in content.splitlines():
if line.startswith('#!'):
print(line[2:], file=fw)
else:
print(line, file=fw)
"
# Add README
cp ../README.md "${output}/${version}/README.md"
# Clean up intermediary files
echo "Cleaning up..."
rm \
"${output}/${version}/${output}.yaml.stage0" \
"${output}/${version}/${output}.yaml.stage1" \
"${output}/${version}/${output}.yaml.stage2" \
"${output}/${version}/${output}.yaml.stage3" \
"${output}/${version}/.env"
| true |
8b08d859a1b73e8629cc3293ad1efb12a76c7d94 | Shell | elis351/promprog | /bash/bash4.sh | UTF-8 | 198 | 2.84375 | 3 | [] | no_license | #!/bin/bash
reducers_count=1
mappers_count=1
reducer_executable=""
mapper_executable=""
for i in {1..$reducers_count}
do
eval "reducer_data_$i=()"
done
for i in {1..$END}; do echo $i; done
| true |
18b93183a4f400360641ee6b1c073b30275ef554 | Shell | sergkol2005/dotfiles | /bin/i3_toggle_split_orientation.sh | UTF-8 | 599 | 3.359375 | 3 | [] | no_license | #!/bin/bash
FILE="/tmp/.i3_split_orientation"
if [ ! -f $FILE ] ; then
echo -n "horizontal" > $FILE
i3-msg -tcommand split horizontal
else
ORIENTATION=$(cat $FILE)
if [ "$ORIENTATION" == "horizontal" ] ; then
echo -n "vertical" > $FILE
i3-msg -tcommand split vertical
elif [ "$ORIENTATION" == "vertical" ] ; then
echo -n "horizontal" > $FILE
i3-msg -tcommand split horizontal
else
echo "Wrong orientation $ORIENTATION"
rm -rf $FILE
fi
fi
ps fax | grep 'i3status -c' | grep -v grep | awk '{ print $1 }' | xargs kill -USR1
| true |
671b4f4fb3678a547de5f2e96b8607663d167a14 | Shell | vladyslovestepanov/google-cloud-dotnet | /tagreleases.sh | UTF-8 | 228 | 2.75 | 3 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | #!/bin/bash
set -e
if [ -z "$1" ]
then
echo Please specify a github access token
exit 1
fi
# Make sure we have all the tags locally.
git fetch --all --tags -f -q
dotnet run -p tools/Google.Cloud.Tools.TagReleases -- $1
| true |
75f0e2fb2d0905bac4c9261dd8570b3e8924f2dd | Shell | carolbarata/dpseudo-n-beyond | /mapping_and_variant_calling/data_subsetting_submissionscript.sh | UTF-8 | 1,243 | 3.09375 | 3 | [] | no_license | #!/bin/bash
#$ -V ## pass all environment variables to the job, VERY IMPORTANT
#$ -N snp_subsetting_200p3 # job name
#$ -S /bin/bash ## shell where it will run this job
#$ -j y ## join error output to normal output
#$ -cwd ## Execute the job from the current working directory
#$ -q centos7.q ## queue name
##################################################
# Script that filters VCF files
##################################################
MAINDIR=/storage/home/users/cdcbrb/dpseudo_data/new_ref_mapping/
for SAMPLE in $(find ${MAINDIR} -maxdepth 1 -type d -name 'G200_P3_F');
do
for FILE in $(find ${SAMPLE} -type f -name '*_both_callers_snp_data.txt');
do
head -n1 ${FILE} >> ${FILE%\.txt}_XL.txt && grep -E '^XL' ${FILE} >> ${FILE%\.txt}_XL.txt
head -n1 ${FILE} >> ${FILE%\.txt}_XR.txt && grep -E '^XR' ${FILE} >> ${FILE%\.txt}_XR.txt
head -n1 ${FILE} >> ${FILE%\.txt}_2.txt && grep -E '^2' ${FILE} >> ${FILE%\.txt}_2.txt
head -n1 ${FILE} >> ${FILE%\.txt}_3.txt && grep -E '^3' ${FILE} >> ${FILE%\.txt}_3.txt
head -n1 ${FILE} >> ${FILE%\.txt}_4.txt && grep -E '^4' ${FILE} >> ${FILE%\.txt}_4.txt
head -n1 ${FILE} >> ${FILE%\.txt}_5.txt && grep -E '^5' ${FILE} >> ${FILE%\.txt}_5.txt
done
echo "${SAMPLE} is done"
done
| true |
3d6e160fdfb1d1b72fa30ca6bc86ff672b708abe | Shell | natemarks/dotfiles | /bashrc.d/temp_aliases.sh | UTF-8 | 277 | 2.765625 | 3 | [
"LicenseRef-scancode-fsf-notice"
] | permissive | #!/usr/bin/env bash
# I use temp dirs all the time
alias cd_temp='cd $(mktemp -d -t deleteme.XXXXXX --tmpdir=$HOME/tmp)'
alias mk_temp='mktemp -d -t deleteme.XXXXXX --tmpdir=$HOME/tmp'
alias rm_temp='find -L ~/tmp -type d -name "deleteme.*" -print 2> /dev/null | xargs rm -rf'
| true |
bc11ef3d12206a567be18335b5e4f01cde5ee48b | Shell | unleashlive/stream-bitrate-stats | /release.sh | UTF-8 | 1,589 | 4.28125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# Based on:
# https://gist.github.com/pete-otaqui/4188238
# works with a file called VERSION in the current directory,
# the contents of which should be a semantic version number
# such as "1.2.3"
# this script will display the current version, automatically
# suggest a "minor" version update, and ask for input to use
# the suggestion, or a newly entered value.
# once the new version number is determined, the script will
# pull a list of changes from git history, prepend this to
# a file called HISTORY.md (under the title of the new version
# number) and create a GIT tag.
SRC_VERSION_FILE="stream_bitrate_stats/__init__.py"
BASE_STRING=`cat VERSION`
BASE_LIST=(`echo $BASE_STRING | tr '.' ' '`)
V_MAJOR=${BASE_LIST[0]}
V_MINOR=${BASE_LIST[1]}
V_PATCH=${BASE_LIST[2]}
echo "Current version: $BASE_STRING"
V_PATCH=$((V_PATCH + 1))
SUGGESTED_VERSION="$V_MAJOR.$V_MINOR.$V_PATCH"
read -p "Enter a version number [$SUGGESTED_VERSION]: " INPUT_STRING
if [ "$INPUT_STRING" = "" ]; then
INPUT_STRING=$SUGGESTED_VERSION
fi
echo "Will set new version to be $INPUT_STRING"
perl -pi -e "s/$BASE_STRING/$INPUT_STRING/g" "$SRC_VERSION_FILE"
echo $INPUT_STRING > VERSION
echo "Version $INPUT_STRING:" > tmpfile
echo "" >> tmpfile
git log --pretty=format:"- %s" "v$BASE_STRING"...HEAD >> tmpfile
echo "" >> tmpfile
echo "" >> tmpfile
cat HISTORY.md >> tmpfile
mv tmpfile HISTORY.md
git add HISTORY.md VERSION "$SRC_VERSION_FILE"
git commit -m "Version bump to $INPUT_STRING"
git tag -a -m "Tagging version $INPUT_STRING" "v$INPUT_STRING"
git push && git push origin --tags | true |
b16f60a48c16f2159b17c2b671fdd5509df905e7 | Shell | Xanthorrhizol/OpenVPN_Setter | /set_server.bash | UTF-8 | 5,113 | 3.5625 | 4 | [] | no_license | #!/bin/bash
# privilege check
if [ $USER != "root" ]; then
echo "please run with root privilege"
exit -1
fi
# get user inputs
echo -e "======================="
echo -e " OpenVPN Server Setter"
echo -e "=======================\n"
echo -e "1. Enter your public IP"
read ip
filtered=$(echo ${ip} | grep -Eo "[1-9]{1}[0-9]{0,2}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}")
while [ ${#filtered} -eq 0 ]; do
echo -e "ERROR: Enter the valid IP"
echo -e "1. Enter your public IP"
read ip
filtered=$(echo ${ip} | grep -Eo "[1-9]{1}[0-9]{0,2}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}")
done
echo -e "2. Enter the port that you want to use for VPN server[1194]"
read port
if [ ${#port} -eq 0 ]; then
port="1194"
fi
echo -e "3. Enter the protocol(tcp or udp) you will use[udp]"
read protocol
if [ ${#protocol} -eq 0 ]; then
protocol="udp"
fi
echo -e "4. Enter your country[US]"
read country
if [ ${#country} -eq 0 ]; then
country="US"
fi
echo -e "5. Enter your province[CA]"
read province
if [ ${#province} -eq 0 ]; then
province="CA"
fi
echo -e "6. Enter your city[SanFrancisco]"
read city
if [ ${#city} -eq 0 ]; then
city="SanFrancisco"
fi
echo -e "7. Enter your organization[Fort-Funston]"
read org
if [ ${#org} -eq 0 ]; then
org="Fort-Funston"
fi
echo -e "8. Enter your email[me@myhost.mydomain]"
read email
if [ ${#email} -eq 0 ]; then
email="me@myhost.mydomain"
fi
echo -e "9. Enter your organization unit[MyOrganizationUnit]"
read ou
if [ ${#ou} -eq 0 ]; then
ou="MyOrganizationUnit"
fi
echo -e "10. Enter the key name you want[EasyRSA]"
read name
if [ ${#name} -eq 0 ]; then
name="EasyRSA"
fi
echo -e "11. What is your ISP provider?[KT]"
read isp
if [ ${#isp} -eq 0 ]; then
isp="KT"
fi
if [ ${isp} == "SKT" ] || [ ${isp} == "skt" ]; then
isp="SKT"
dns1="219.250.36.130"
dns2="210.220.163.82"
elif [ ${isp} == "KT" ] || [ ${isp} == "kt" ]; then
isp="KT"
dns1="168.126.63.1"
dns2="168.126.63.2"
elif [ ${isp} == "LG" ] || [ ${isp} == "lg" ]; then
isp="LG"
dns1="164.124.101.2"
dns2="203.248.252.2"
else
isp="other"
dns1="8.8.8.8"
dns2="8.8.4.4"
fi
# install openvpn & easyrsa
apt update -y
apt install openvpn ufw openssh-server wget git -y
wget -P ~/ https://github.com/OpenVPN/easy-rsa/releases/download/v3.0.8/EasyRSA-3.0.8.tgz
tar xvf ~/EasyRSA-3.0.8.tgz -C ~/
# make required dirs
mkdir -p ~/client-configs/keys
mkdir -p ~/client-configs/files
# copy setting files to destination
cp vars ~/EasyRSA-3.0.8/vars
cp server.conf /etc/openvpn/server.conf
cp base.conf ~/client-configs/base.conf.bak
# set add_allowed_ip.bash
cp add_allowed_ip.bash.bak add_allowed_ip.bash
sed -i "s/\[port\]/${port}/g" add_allowed_ip.bash
sed -i "s/\[protocol\]/${protocol}/g" add_allowed_ip.bash
# set vars(key's information)
cd ~/EasyRSA-3.0.8/
sed -i "s/\[country\]/${country}/g" vars
sed -i "s/\[province\]/${province}/g" vars
sed -i "s/\[city\]/${city}/g" vars
sed -i "s/\[org\]/${org}/g" vars
sed -i "s/\[email\]/${email}/g" vars
sed -i "s/\[ou\]/${ou}/g" vars
sed -i "s/\[name\]/${name}/g" vars
# set base.conf(client's config)
cd ~/client-configs/
sed -i "s/\[ip\]/${ip}/g" base.conf.bak
sed -i "s/\[port\]/${port}/g" base.conf.bak
sed -i "s/\[protocol\]/${protocol}/g" base.conf.bak
# set server.conf
cd /etc/openvpn/
sed -i "s/\[port\]/${port}/g" server.conf
sed -i "s/\[protocol\]/${protocol}/g" server.conf
sed -i "s/\[dns1\]/${dns1}/g" server.conf
sed -i "s/\[dns2\]/${dns2}/g" server.conf
if [ ${protocol} == "udp" ]; then
sed -i "s/\[mode\]/1/g" server.conf
elif [ ${protocol} == "tcp" ]; then
sed -i "s/\[mode\]/0/g" server.conf
else
echo "ERROR: protocol is invalid"
fi
cd $dir
# firewall settings
echo -e "*nat\n:POSTROUTING ACCEPT \[0:0\]\n-A POSTROUTING -s 10.8.0.0/8 -o eth0 -j MASQUERADE\nCOMMIT" >> /etc/ufw/before.rules
sed -i "s/DEFAULT_FORWARD_POLICY=\"DENY\"/DEFAULT_FORWARD_POLICY=\"ACCEPT\"/g" /etc/default/ufw
ufw enable
# set securetty
cp /usr/share/doc/util-linux/examples/securetty /etc/securetty
# set sysctl.conf
echo 1 > /proc/sys/net/ipv4/ip_forward
sed -i "s/#net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/g" /etc/sysctl.conf
sed -i "s/# net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/g" /etc/sysctl.conf
sysctl -p
# ip forwarding
iptables -A INPUT -i tun+ -j ACCEPT
iptables -A FORWARD -i tun+ -j ACCEPT
# generate cert & keys
cd ~/EasyRSA-3.0.8/
./easyrsa init-pki
./easyrsa build-ca nopass
./easyrsa gen-req server nopass
cp pki/private/server.key /etc/openvpn/
./easyrsa sign-req server server
cp pki/issued/server.crt /etc/openvpn/
cp pki/ca.crt /etc/openvpn/
./easyrsa gen-dh
openvpn --genkey --secret ta.key
cp ta.key /etc/openvpn/
cp pki/dh.pem /etc/openvpn/
# start openvpn server
systemctl start openvpn@server
systemctl enable openvpn@server
echo -e "\n----------------------------"
echo -e " The VPN server is started\n"
# prepare to add client
chmod -R 700 ~/client-configs
cp ta.key ~/client-configs/keys/
cp /etc/openvpn/ca.crt ~/client-configs/keys/
cp ~/client-configs/keys/ta.key ~/client-configs/files/
chmod 644 ~/client-configs/files/ta.key
ufw allow from any
ufw deny ${port}/${protocol}
# announce
echo -e "Next step is adding clients ovpn files.\nRun the add_client.bash."
| true |
c7d24f31f1abb6a857932a289c1dfb8acf644ae5 | Shell | ALeMire/Spark | /lib/config_helpers.sh | UTF-8 | 478 | 3.640625 | 4 | [] | no_license | #!/bin/sh
function cfile_yes_no()
{
directive=$1
bool_value=`lowercase $2`
config_file=$3
if [ "$bool_value" == "yes" ] || [ "$bool_value" == "no" ]; then
sed -i "s/^#*\($directive\).*/\1 $bool_value/" $config_file
fi
}
function cfile_0_1()
{
directive=$1
bool_value=$2
config_file=$3
if [ "$bool_value" == "0" ] || [ "$bool_value" == "1" ]; then
sed -i "s/^#*\($directive\).*/\1 $bool_value/" $config_file
fi
}
function cfile_number()
{
}
| true |
bbdfd57d1d854745b2461a11e0db8215b5432b6b | Shell | mgrutten/sgp4-comparison | /space-track/gen-include.sh | UTF-8 | 246 | 2.5625 | 3 | [
"MIT"
] | permissive | #!/usr/bin/bash
libs=(AstroFunc
DllMain
EnvConst
Sgp4Prop
TimeFunc
Tle)
mkdir -p include
for l in ${libs[@]}; do
echo "$l"
python3 afspc-fix.py "Sgp4Prop_small/SampleCode/C/DriverExamples/wrappers/${l}Dll.h" > "include/${l}.h"
done | true |
8daa7dbcefaed84f62bcc453a8d1795ffd54e489 | Shell | festerman/utils | /update_ff.sh | UTF-8 | 4,549 | 3.84375 | 4 | [] | no_license | #!/usr/bin/env bash
# after axil42's script to update the package on AUR
# https://github.com/axilleas/bin/blob/master/ff
# no guarantees!
pushd . >& /dev/null
cd /tmp
source_url_root="http:\/\/ftp.mozilla.org\/pub\/firefox"
mozillas_ftp="ftp://ftp.mozilla.org/pub/firefox/releases"
newpkgver=-1
for version in $( curl -ls "${mozillas_ftp}/" | grep -Po '^\d+\.\d+(b\d+)?$' ); do
if [[ $( vercmp $version $newpkgver ) > 0 ]]; then
newpkgver=$version
fi
done
# standard, when a beta, or a full release
download_ftp="${mozillas_ftp}/${newpkgver}"
source_url="${source_url_root}\/releases\/\${pkgver}\/linux-\${CARCH}\/en-US\/firefox-\${pkgver}.tar.bz2"
# if the latest version is a beta, then check for release candidates
rcpkgver=-1
if [[ $newpkgver =~ .*b[[:digit:]]+$ ]]; then
relver=${newpkgver%b*}
candidate_ftp="ftp://ftp.mozilla.org/pub/firefox/candidates/${relver}-candidates"
for version in $( curl -ls "${candidate_ftp}/" | grep -Po '^build\K(\d+)$' ); do
if [[ $( vercmp $version $rcpkgver ) > 0 ]]; then
rcpkgver=$version
fi
done
if [[ $rcpkgver > 0 ]]; then
newpkgver=${relver}rc${rcpkgver}
download_ftp="${candidate_ftp}/build${rcpkgver}"
source_url="${source_url_root}\/candidates\/${relver}-candidates\/build${rcpkgver}\/linux-\${CARCH}\/en-US\/firefox-${relver}.tar.bz2"
fi
fi
installed_ver=`pacman -Qi firefox-beta-bin | grep 'Version' | awk 'BEGIN { FS = " : " } ; { print $2 }' | sed 's/-[[:digit:]]\+$//'`
echo "Installed version: " $installed_ver
if [[ "$installed_ver" == "$newpkgver" ]]; then
echo "It appears the latest available version is already installed [$newpkgver]!"
exit
else
echo "A new version [$newpkgver] is available, will try to update ..."
fi
curr_ver=`cower -i firefox-beta-bin | grep Version | awk 'BEGIN { FS = " : " } ; { print $2 }'`
echo "Current AUR version: " $curr_ver
echo "Checking if new version really exists @mozilla [by getting SHA1SUMS from ${download_ftp}] ..."
# Link of SHA1SUMS file
sha="${download_ftp}/SHA1SUMS"
if [ -f SHA1SUMS ]; then rm SHA1SUMS; fi
wget -q $sha
if [ -f SHA1SUMS ]; then
echo "Version $newpkgver exists. Starting build process."
echo "Dowloading firefox-beta-bin from AUR ..."
cower -df firefox-beta-bin -t /tmp
cd /tmp/firefox-beta-bin
cp /tmp/SHA1SUMS .
echo 'Stripping SHA1SUM from downloaded file ...'
if [[ $rcpkgver > 0 ]]; then
sha1sumver=$relver
else
sha1sumver=$newpkgver
fi
newsha64=`grep -w "linux-x86_64/en-US/firefox-$sha1sumver.tar.bz2" SHA1SUMS | awk 'NR==1{print $1}'`
newsha32=`grep -w "linux-i686/en-US/firefox-$sha1sumver.tar.bz2" SHA1SUMS | awk 'NR==1{print $1}'`
echo 'Get old SHA1SUMS into variables, from the PKGBUILD ...'
oldsha64=`grep sha1sums PKGBUILD | head -n1 | cut -c 12-51`
oldsha32=`grep sha1sums PKGBUILD | tail -n1 | cut -c 42-81`
# Old package version, from PKGBUILD
oldpkgver=`grep pkgver PKGBUILD | head -n1 | awk -F= '{print $2;}'`
echo "Changing pkgver..."
echo "# old pkgver: $oldpkgver"
echo "# new pkgver: $newpkgver "
echo
sed -i "s/^pkgver=$oldpkgver$/pkgver=$newpkgver/" PKGBUILD
echo "Changing x86_64 sha1sums..."
echo "# old sha1sum firefox-x86_64: $oldsha64 "
echo "# new sha1sum firefox-x86_64: $newsha64 "
echo
sed -i "s/$oldsha64/$newsha64/" PKGBUILD
echo "Changing i686 sha1sums..."
echo "# old sha1sum firefox-i686: $oldsha32 "
echo "# new sha1sum firefox-i686: $newsha32 "
echo
sed -i "s/$oldsha32/$newsha32/" PKGBUILD
echo "Changing the download source"
echo "# new source: ${source_url}"
sed -i 's/^source=(\".*$/source=(\"'${source_url}'\"/' PKGBUILD
#### edit below for your preferred installation
#### one of the following blocks ...
#### makepkg and install
makepkg
carch=$(uname -m)
pwd=$(pwd)
source PKGBUILD
echo "Now run 'pacman -U ${pwd}/${pkgname}-${newpkgver}-${pkgrel}-${carch}.pkg.tar.xz' or similar"
#### or (simpler ...)
# makepkg -i
#### build for AUR upload (just the source)
# echo "Making source package..."
# makepkg -f --source
# echo "Uploading ..."
# source PKGBUILD
# burp $(echo ${pkgname}-${newpkgver}-${pkgrel}.src.pkg.tar.gz
#### after switching in one of the blocks above, enable this to get rid of the /tmp folder
# echo "Removing firefox-beta-bin folder"
# rm -r /tmp/firefox-beta-bin
popd >& /dev/null
else
echo "Found no SHA1SUMS file in ${sha}"
fi
| true |
7e9dd209530a30282de4f545a2cbd1b453726d73 | Shell | yoneyan/.init | /os_install/centos_install.sh | UTF-8 | 1,117 | 2.75 | 3 | [] | no_license | #!/bin/bash
echo -n User Name?:
read USER
yes | sudo yum -y epel-release
yes | sudo yum -y update
#Windows & Linux DualBoot
sudo timedatectl set-local-rtc true
#Tool
yes | sudo yum install git zsh tmux
yes | sudo yum install python3-pip
##Docker install
yes | sudo yum install -y yum-utils \
device-mapper-persistent-data \
lvm2
yes | sudo yum-config-manager \
--add-repo \
https://download.docker.com/linux/centos/docker-ce.repo
yes | sudo yum -y update
yes | sudo yum -y install docker-ce docker-ce-cli containerd.io
sudo gpasswd -a $USER docker
sudo chmod 666 /var/run/docker.sock
sudo systemctl start docker
sudo systemctl enable docker
##Docker-compose install
sudo curl -L "https://github.com/docker/compose/releases/download/1.24.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
##Neovim install
yes | yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
yes | sudo yum -y update
yes | sudo yum -y install neovim
yes | sudo yum -y install python-dev python-pip python3-dev python3-pip
| true |
5aa2a5437f1a30fe40854e4806a3f7d1fc7fe29e | Shell | shangxintonghua/swoole-src | /travis/docker-all.sh | UTF-8 | 320 | 2.96875 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/sh -e
__CURRENT__=`pwd`
__DIR__=$(cd "$(dirname "$0")";pwd)
# show info
cd ${__DIR__} && pwd && ls -al / && php -v
# compile in docker
./docker-compile.sh
# swoole info
php --ri swoole
#alpine
if [ "`apk | grep apk-tools`"x != ""x ]; then
echo "skip alpine\n"
exit 0
fi
# run unit tests
./docker-test.sh
| true |
4bb084c61938e49efe18f79d67a825c4340b57f1 | Shell | camante/DEM_generation_old | /resample_rasters.sh | UTF-8 | 482 | 3.03125 | 3 | [] | no_license | mkdir -p one_third
for i in *1_3*.tif;
do
echo $i
echo "Resampling to 1_9th resolution"
#name=${i#*_}
output_tmp="tmp.tif"
output="1_9_tmp_"$i
echo output_tmp is $output_tmp
echo output is $output
#echo $output
gdalwarp -r cubicspline -tr 0.00003086420 0.00003086420 -t_srs EPSG:4269 $i $output_tmp -overwrite
gdal_translate $output_tmp -a_srs EPSG:4269 -a_nodata -99999 -co "COMPRESS=DEFLATE" -co "PREDICTOR=3" -co "TILED=YES" $output
rm $output_tmp
mv $i one_third/$i
echo
done
| true |
e3d118673114be1af59e2c597309aa8312dd51e3 | Shell | ItachiEU/University | /Linux/Lab3/yad.sh | UTF-8 | 1,233 | 3.453125 | 3 | [] | no_license | #!/bin/bash
IFS=$'\n'
index=0
track_list=$(find . -type f -name *.mp3)
for var in $track_list
do
index=$((index+1))
tracks[$index]=$(mp3info -p "%l (%a): %t" "$var")
track_paths[$index]=$var
done
args=("Search by title" "")
index=0
for item in "${tracks[@]}"; do
index=$((index+1))
args+=("$index" "$item")
done
while [ 1 ]
do
CHOICE=$(yad --center --list --title="Music Player" --width=800 --height=400 --column="Number" --column="Description" "${args[@]}" 2>/dev/null | awk -F"|" '{print $1}')
echo "$CHOICE"
if [[ $CHOICE == "Search by title" ]]; then
SEARCH=$(yad --center --title="Search songs" --entry --entry-label="Type title" --width=800 --height=400 2>/dev/null)
args=("Search by title" "")
index=0
for item in "${tracks[@]}"; do
index=$((index+1))
matchtile=$(echo $item | sed 's/.*)//')
if [[ $matchtile == *"$SEARCH"* ]]; then
args+=("$index" "$item")
fi
done
continue
fi
if [[ "$CHOICE" == "" ]]
then
exit 0
fi
if [ "$CHOICE" -ge 1 ] && [ "$CHOICE" -le $index ]
then
mplayer ${track_paths[$CHOICE]} 2>/dev/null && continue
fi
done | true |
cece1acbcc3441b20d4d05b50ee06e57d8df255f | Shell | jbane11/Bane_SIMC | /weight_T2/SQL_kin_tgt | UTF-8 | 963 | 3.28125 | 3 | [] | no_license | #!/bin/bash
set -x
if [[ $# -eq 0 ]]
then
echo "what target would you like to use?"
read Target
else
Target=$1
fi
if [[ $# -lt 2 ]]
then
echo "what kinematic would you like to look at?"
read kin
else
kin=$2
fi
if [ "$Target" == "D2" ]
then
echo "D2"
tarid=2
elif [ "$Target" == "H3" ]
then
echo "H3"
tarid=3
elif [ "$Target" == "He3" ]
then
echo "He3"
tarid=4
elif [ "$Target" == "Carbon" ]
then
echo "foil"
tarid=7
elif [ "$Target" == "optics" ]
then
echo "optics"
tarid=8
else
echo "dummy"
tarid=0
fi
runs=($(mysql -B --user=triton-user -p3He3Hdata -hhalladb triton-work -e"select run_number from MARATHONrunlist where target='${Target}' and Kinematic='${kin}' "))
echo ${#runs[@]}
while [[ $i -lt ${#runs[@]}-1 ]]
do
i=$(( $i+1 ))
j=$(( $i ))
echo ${runs[${j}]}
run=${runs[${j}]}
sh ./T2_MC "${Target}" "${kin}" "${run}" "${tarid}"
done
| true |
dd61243e3a53a3dd78587d6091f0284381a0e7ec | Shell | cerna/machinekit-fixuid | /pack.sh | UTF-8 | 948 | 3.890625 | 4 | [
"MIT"
] | permissive | #!/bin/sh
cd $(dirname $0)
display_usage() {
echo "Usage:\n$0 [version]"
}
# check whether user had supplied -h or --help . If yes display usage
if [ $# = "--help" ] || [ $# = "-h" ]
then
display_usage
exit 0
fi
# check number of arguments
if [ $# -ne 1 ]
then
display_usage
exit 1
fi
for GOOS in linux; do
for GOARCH in amd64 arm64 arm 386; do
export GOOS="$GOOS"
export GOARCH="$GOARCH"
if [ "$GOARCH" = "386" ]
then
DEB_ARCH="i386"
elif [ "$GOARCH" = "arm" ]
then
DEB_ARCH="armhf"
else
DEB_ARCH="$GOARCH"
fi
./build.sh
rm -f machinekit-fixuid-*-${DEB_ARCH}.tar.gz
perm="$(id -u):$(id -g)"
sudo chown root:root fixuid
sudo chmod u+s fixuid
tar -cvzf machinekit-fixuid-${DEB_ARCH}.tar.gz fixuid
sudo chmod u-s fixuid
sudo chown $perm fixuid
done
done
| true |
72518e4e8e610caf85c3a95130362db20e824000 | Shell | gszr/dotfiles | /dots/bin/backup_keys | UTF-8 | 503 | 3.53125 | 4 | [] | no_license | #!/bin/bash
OUTPUT_DIR="$HOME"
OUTPUT_NAME="sec_$(date +%d_%m_%y).tar.gpg"
while getopts "d:f:h" OPTION; do
case $OPTION in
d)
OUTPUT_DIR=$OPTARG
;;
f)
OUTPUT_NAME=$OPTARG
;;
h|?)
echo "Usage: $0 [-d output directory] [-f backup file name]"
exit
esac
done
tar -c test -C ~/ .gnupg .ssh .password-store .electrum .electrum-ltc 2> /dev/null | \
gpg --cipher-algo AES256 --symmetric --output $OUTPUT_DIR/$OUTPUT_NAME
ls -l $OUTPUT_DIR/$OUTPUT_NAME
| true |
48c1802a34ddd2fb8407f602c372089845c75108 | Shell | fabio-cossio/TIGER_Event_Reconstruction | /run_4.sh | UTF-8 | 402 | 2.8125 | 3 | [] | no_license | ANADIR="/dati/Data_CGEM_IHEP_Integration_2019/raw_root/$1"
HERE=$PWD
NROC=12
NSUB=2000
if [ ! -d ${ANADIR} ]
then
mkdir ${ANADIR}
fi
for i in $(seq 0 20);
do
ts sleep 0.01
done
ts -N 50
ts -df sleep 0.01
#Merge Recon
ts -df bash -c "$exe_ter -C $1"
echo "Terminated all"
for i in $(seq 0 50);
do
ts sleep 0.01
done
ts -N 25
ts -df sleep 0.01
rm /tmp/ihep_data/ts-out*
cd $HERE
| true |
5c866e7719510f82b3a03755476cc8c411cff556 | Shell | QiqiM/teach_system | /docker/env/mysql/start.sh | UTF-8 | 765 | 3.390625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
#获取脚本所在目录
path=$(cd `dirname $0`;pwd)
echo "Asia/Shanghai" >$path/timezone
#mysql数据密码
password=123456
echo "mysql容器建立"
docker run --name=mysql -itd -p 3306:3306 --privileged=true -v /etc/localtime:/etc/localtime -v $path/timezone:/etc/timezone -v $path/data:/var/lib/mysql -v $path/my.cnf:/etc/mysql/my.cnf -v $path/init.sql:/opt/init.sql -e MYSQL_ROOT_PASSWORD=$password mysql:5.7.25
#判断data中有没有teach_system数据库,没有就进行初始化数据库的操作
omsCount=$(ls -l $path/data | grep -wc teach_system)
if [ $omsCount == 0 ]; then
echo "首次安装系统,进行sql初始化,请稍候"
sleep 20s
docker exec mysql bash -c "mysql -u root -p$password</opt/init.sql"
fi
| true |
52283aa56c85b341d50014088accd8e8058246d8 | Shell | GlobalFishingWatch/pipe-vessels | /scripts/run | UTF-8 | 813 | 4 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
set -e
THIS_SCRIPT_DIR="$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )"
display_usage() {
echo "Available Commands"
echo " aggregate_tracks aggregate messages into tracks and publish to bigquery"
echo " publish_postgres_tracks publish tracks from a given bigquery table to postgis"
echo " publish_vessel_info publish vessel information to the vessel search index"
}
if [[ $# -le 0 ]]
then
display_usage
exit 1
fi
case $1 in
aggregate_tracks)
${THIS_SCRIPT_DIR}/aggregate_tracks "${@:2}"
;;
publish_postgres_tracks)
${THIS_SCRIPT_DIR}/publish_postgres_tracks "${@:2}"
;;
publish_vessel_info)
${THIS_SCRIPT_DIR}/publish_vessel_info "${@:2}"
;;
*)
display_usage
exit 1
;;
esac
| true |
1c425b8a2b9ef60031faa254a4b2014419bc5a92 | Shell | ghostcow/pixel-cnn-qumran | /eval.sh | UTF-8 | 667 | 2.8125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# qumran dataset version 2 evaluation script
# script completes the test set letters using adaptive orientation.
# completion of each orientation is saved in data dir as "letter_completion_orientation_$i.pkl"
# where i in 0,..,7
for i in `seq 0 7`; do
python eval.py --data_dir data/qumran_test_letters \
--checkpoint_dir data/checkpoints/qv2_$i \
--rotation $i \
--data_set letters \
--load_params \
--nr_gpu 1 \
--gpu_mem_frac=0.4 \
--single_angle \
--init_batch_size 1 \
--batch_size 5 \
--nr_resnet 5 \
--nr_filters 40 \
--nr_logistic_mix 5 \
--nr_iters=3 | tee logs/eval_$i.log
done
| true |
bc3fe53181ea0fe1a80a87f6a50b850b4057a99d | Shell | Uzzu3/Prime-Player-Google-Play-Music | /build.sh | UTF-8 | 542 | 3.203125 | 3 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | git diff --exit-code --quiet && git diff --exit-code --cached --quiet
if [ $? -ne 0 ];
then
echo "You have uncommited changes"
exit 1
fi
tag=master
if [ $# -eq 1 ]
then
tag=$1
fi
git checkout -q $tag
if [ $? -ne 0 ];
then
echo "Invalid tag or branch: $tag"
exit 2
fi
echo "Building from tag/branch $tag"
sass --style compressed PrimePlayer/css/player.scss PrimePlayer/css/player.css
rm PrimePlayer.zip
cd PrimePlayer
7za a -xr@../exclude.lst -tzip ../PrimePlayer.zip *
cd ..
git checkout -q develop
echo "Back on branch develop"
| true |
ffc6d2dd3a524ba612a01ee16e5444a0bd29f822 | Shell | sdetweil/sonus | /preinstall | UTF-8 | 154 | 2.671875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
sudo apt-get install -y libatlas-base-dev
py_present=$(which python3)
if [ "$py_present." == "." ]; then
sudo apt-get install python3
fi
| true |
f3a8423c3837fb81f6f4ef181d78c4a19056ac95 | Shell | geoff-nixon/RBK20 | /rom/usr/share/armor/BD_START.sh | UTF-8 | 1,521 | 2.703125 | 3 | [] | no_license | #!/bin/sh
#For bdupd apply-update case,binary will auto call bd start, we need to stop it and make some settings first
/opt/bitdefender/bin/bd stop
#Run restore, if we have backup dir, that means router has performed upgrade and backup BD configuration to backup dir
/usr/share/armor/upgrade_bd_cfg.sh restore
/usr/share/armor/change_cloud_server.sh set_server production
chown -R root:root /opt/bitdefender/
chmod +x /opt/bitdefender/guster/scripts/*
if [ "x`/bin/config get i_opmode`" = "xapmode" ]; then
exit 0
fi
#insmod armor.ko
kernel_version=`uname -a | awk -F " " '{print $3}'`
insmod /lib/modules/$kernel_version/guster.ko
echo "BD agent start"
/opt/bitdefender/bin/bd start
/etc/init.d/ASH start
#iptables -t filter -I FORWARD -p tcp ! --sport 53 ! --dport 53 -j GUSTER
#trigger VA scan every day
LD_LIBRARY_PATH=/opt/bitdefender/lib /opt/bitdefender/bin/bdsett -set-key /daemons/bdvad/va_schedule_interval -to-string 604800
#LD_LIBRARY_PATH=/opt/bitdefender/lib /opt/bitdefender/bin/bdsett -set-key /daemons/bddevicediscovery/online_devices_sync_interval -to-string 90
#LD_LIBRARY_PATH=/opt/bitdefender/lib /opt/bitdefender/bin/bdsett -set-key /daemons/bddevicediscovery/neigh_expiry_time -to-string 200
activation_status=`/usr/share/armor/get_armor_status activate`
if [ "$activation_status" != "true" ]; then
LD_LIBRARY_PATH=/opt/bitdefender/lib /opt/bitdefender/bin/bdsett -set-key /daemons/bdvad/first_wait -to-string 3600
fi
net-wall restart
#/opt/bitdefender/guster/script/create_chain.sh 0
| true |
39b1e97ce99300fe95e826bebd051ab218f76f23 | Shell | forkgood/zippyshare_downloader | /zippyshare_downloader.sh | UTF-8 | 718 | 3.34375 | 3 | [] | no_license | #!/bin/bash
# Sintaxe: ./zippyshare_downloader.sh link_zippyshare
# victor.oliveira@gmx.com
site=$(curl -s $1)
echo "$site"|grep 'File has expired and does not exist anymore on this server' &> /dev/null
if [ $? == 0 ]; then
echo "Este arquivo foi apagado do Zippyshare."
exit
fi
var_a=$(echo "$site"|grep -Eo var\ a\ \=\ [0-9]\+|cut -d ' ' -f4); var_a=$(($var_a*$var_a*$var_a))
var_b=3
var_final=$(($var_a+$var_b))
url=$(echo $1|sed 's\http://\\g'|cut -d '/' -f1)
id=$(echo $1|sed 's\http://\\g'|cut -d '/' -f3)
arq=$(echo "$site"|grep 'twitter:title'|cut -d '"' -f4)
tam=$(echo "$site"|grep Size\:|cut -d\> -f4|cut -d\< -f1)
echo -e "Baixando: $arq\tTamanho: $tam"
wget -q --show-progress "http://$url/d/$id/$var_final/$arq"
| true |
03578acbeeb050f67d9db3151cfa71628e83e390 | Shell | WUZHEN1991/MutualFundAnalysis | /run-docker.sh | UTF-8 | 458 | 2.78125 | 3 | [] | no_license | cd /xy/src/MutualFundAnalysis/
ver=`date +%Y%m%d%H`
docker build -t mutualfundanalysis:$ver .
containerId=`docker ps | grep mutualfundanalysis: | awk '{print $1}'`
imageId=`docker ps | grep mutualfundanalysis: | awk '{print $2}'`
if [ -n "$containerId" ]; then
docker stop $containerId
docker rm $containerId
fi
if [ -n "$imageId" ]; then
docker rmi $imageId
fi
docker run -d -p 8000:8000 \
--name=mutualfundanalysis-$ver mutualfundanalysis:$ver
| true |
152d4511ff6309d36c0dd9c0c02e82e09e6fd390 | Shell | bernardoVale/berna_linux_study | /streams/streams_append.sh | UTF-8 | 475 | 3.4375 | 3 | [] | no_license | #!/bin/sh
# Append redirection send stdin to stdout but if the file already exists the content will be appended on that file
echo "This file exists" > /vagrant/streams/append_file.txt
# The file won't be overwrited
echo "Add this line too" >> /vagrant/streams/append_file.txt
read -p "Press [Enter] to check the content of append_file.txt"
cat /vagrant/streams/append_file.txt
read -p "Press [Enter] to remove the file append_file.txt"
rm /vagrant/streams/append_file.txt | true |
8e9edf6679cb894d528b30127b1579271ce05dd4 | Shell | Tatsh/misc-scripts | /archived/shot | UTF-8 | 356 | 3.0625 | 3 | [] | no_license | #!/usr/bin/env bash
if ! command -v xdpyinfo > /dev/null 2>&1 ||
! command -v import > /dev/null 2>&1; then
echo 'You need xdpyinfo and import to use this.' >&2
exit 1
fi
xdpyinfo -ext XINERAMA |
sed '/^ head #/!d;s///' |
while IFS=' :x@,' read -r i w h x y; do
import -window root -crop "${w}x$h+$x+$y" "head_$i.png"
done
| true |
f8fe89be50327998ab57451a59ea946ed92567b3 | Shell | NielsOerbaek/leaf | /femnist_regular.job | UTF-8 | 2,704 | 3.046875 | 3 | [
"BSD-2-Clause"
] | permissive | #!/bin/bash
#SBATCH --job-name=femnist-niid # Job name
#SBATCH --output=femnist-niid.%j.out # Name of output file (%j expands to jobId)
#SBATCH --time=72:00:00 # Run time (hh:mm:ss) - run for one hour max
#SBATCH --exclusive # Get a full machine
#SBATCH --gres=gpu # Schedule a GPU
#SBATCH --partition=red # Run on either the Red or Brown queue
echo "Running $(hostname) - (n)iid"
module load Anaconda3
. $(conda info --base)/etc/profile.d/conda.sh
conda activate leaf
module load TensorFlow/1.15.0-fosscuda-2019b-Python-3.7.4
conda info
which python
conda list tqdm
split_seed="1549786796"
sampling_seed="1549786595"
num_rounds="2000"
fedavg_lr="0.004"
output_dir="/home/niec/leaf/iid"
frac_of_data="0.05"
target_performance="0.8"
num_clients=("1" "3" "10" "35")
function move_data() {
path="$1"
suffix="$2"
cd /home/niec/leaf/models/metrics
mv sys_metrics.csv "${path}/sys_metrics_${suffix}.csv"
mv stat_metrics.csv "${path}/stat_metrics_${suffix}.csv"
cp -r /home/niec/leaf/data/femnist/meta "${path}"
mv "${path}/meta" "${path}/meta_${suffix}"
}
# NIID EXPERIMENTS
echo "################## NIID EXPERIMENTS ######################"
cd /home/niec/leaf/data/femnist/data
rm -r rem_user_data/ sampled_data/ test/ train/ union/
cd /home/niec/leaf/data/femnist
./preprocess.sh -s niid --sf ${frac_of_data} --iu ${frac_of_data} -k 100 -t sample --smplseed ${sampling_seed} --spltseed ${split_seed}
for clients_per_round in ${num_clients[*]}; do
echo "-- NUMBER OF CLIENTS: $clients_per_round"
cd /home/niec/leaf/models
python -u main.py -dataset 'femnist' -model 'cnn' --num-rounds ${num_rounds} --clients-per-round ${clients_per_round} --num-epochs 1 -lr ${fedavg_lr} --eval-every 5 --target-performance ${target_performance}
cd /home/niec/leaf
move_data ${output_dir} "fedavg_c_${clients_per_round}_e_1_frac_${frac_of_data}_niid"
done
# IID EXPERIMENTS
echo "################## IID EXPERIMENTS ######################"
cd /home/niec/leaf/data/femnist/data
rm -r rem_user_data/ sampled_data/ test/ train/ union/
cd /home/niec/leaf/data/femnist
./preprocess.sh -s iid --sf ${frac_of_data} --iu ${frac_of_data} -k 100 -t sample --smplseed ${sampling_seed} --spltseed ${split_seed}
for clients_per_round in ${num_clients[*]}; do
echo "-- NUMBER OF CLIENTS: $clients_per_round"
cd /home/niec/leaf/models
python -u main.py -dataset 'femnist' -model 'cnn' --num-rounds ${num_rounds} --clients-per-round ${clients_per_round} --num-epochs 1 -lr ${fedavg_lr} --eval-every 2 --target-performance ${target_performance}
cd /home/niec/leaf
move_data ${output_dir} "fedavg_c_${clients_per_round}_e_1_frac_${frac_of_data}_iid"
done
| true |
6706666c7ec59b701e9ef2c22fc5a86f2d83189c | Shell | MionaSHEN/shell | /shell_script/loop.sh | UTF-8 | 281 | 3.765625 | 4 | [] | no_license | #!/bin/sh
for i in 1 5 3 2
do
echo "Looping ... number $i"
done
max=10
for i in `seq 2 $max`
do
echo "$i"
done
INPUT_STRING=hello
while [ "$INPUT_STRING" != "bye" ]
do
echo "Please type something in (bye to quit)"
read INPUT_STRING
echo "You typed: $INPUT_STRING"
done
| true |
f667c4b3980b8a05fc5839ade172ab8d2f7266ae | Shell | particledecay/asdf-lab | /bin/install | UTF-8 | 1,104 | 3.921875 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set \
-o errexit \
-o nounset \
-o pipefail
# required environment variables
: ${ASDF_INSTALL_TYPE?}
: ${ASDF_INSTALL_VERSION?}
: ${ASDF_INSTALL_PATH?}
: ${ASDF_DOWNLOAD_PATH?}
readonly repository="zaquestion/lab"
readonly toolname="lab"
install() {
local -r os=$(get_os)
local -r arch=$(get_arch)
local -r filename="${toolname}_${ASDF_INSTALL_VERSION}_${os}_${arch}.tar.gz"
local -r download_pkg="${ASDF_DOWNLOAD_PATH}/${filename}"
echo "Extracting ${toolname} from tar archive"
[ ! -d "${ASDF_INSTALL_PATH}/bin" ] && mkdir -p "${ASDF_INSTALL_PATH}/bin"
tar xf "$download_pkg" -C "${ASDF_INSTALL_PATH}/bin" "${toolname}" 2>/dev/null
if [ $? -eq 0 ]
then
echo "Successfully installed ${toolname} ${ASDF_INSTALL_VERSION}"
exit 0
else
echo "Failed to install ${toolname} ${ASDF_INSTALL_VERSION}"
exit 1
fi
}
get_os() {
uname | tr '[:upper:]' '[:lower:]'
}
get_arch() {
local -r arch=$(uname -m)
case $arch in
x86_64)
echo amd64
;;
*86)
echo 386
;;
esac
}
install
| true |
ec370be807f71866d62c1a3adfefdb489466579b | Shell | top501/orthanc-mssql-storage-plugin | /bootstrap.sh | UTF-8 | 970 | 3.671875 | 4 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
set -e
SOURCE_DIR=$(pwd)
# INSTALL VCPKG
if [ ! -d "vcpkg" ];then
git clone https://github.com/mschmieder/vcpkg.git
cd vcpkg
git checkout fix-boost-compile-flags
./bootstrap-vcpkg.sh
# copy triplet into the triplets folder to ensure position independent
export PATH=${SOURCE_DIR}/vcpkg:${PATH}
cp -f ${SOURCE_DIR}/triplets/* triplets/
# use the same cmake version that comes with VCPKG
if [ "$(uname)" == "Darwin" ]; then
TRIPLET=""
else
TRIPLET="--triplet x64-linux-fpic"
fi
cd ${SOURCE_DIR}
fi
# INSTALL REQUIRED LIBRARIES
vcpkg ${TRIPLET} install jsoncpp gtest
# DOWNLOAD ORTHANC SOURCES
if [ -z "${ORTHANC_VERSION}" ];then
ORTHANC_VERSION=1.3.1
fi
if [ ! -d "orthanc" ];then
ORHTANC_FILENAME=Orthanc-${ORTHANC_VERSION}.tar.gz
wget -q -O ${ORHTANC_FILENAME} https://www.orthanc-server.com/downloads/get.php?path=/orthanc/${ORHTANC_FILENAME}
tar -xzf ${ORHTANC_FILENAME}
rm ${ORHTANC_FILENAME}
fi | true |
cc4588dcc4be91317261ce1d06e2d3ae944bc291 | Shell | wing-888/MOR_X5_FROM_VM | /x5/maintenance/test_fix_scripts/information/.svn/text-base/rrdtool.sh.svn-base | UTF-8 | 550 | 3.09375 | 3 | [] | no_license | #! /bin/sh
# Author: Mindaugas Mardosas
# Company: Kolmisoft
# Year: 2012
# About: This script that rrdtool is installed
. /usr/src/mor/x5/framework/bash_functions.sh
check_and_install_rrdtool()
{
if [ ! -f "/usr/bin/rrdtool" ]; then
yum -y install rrdtool
if [ ! -f "/usr/bin/rrdtool" ]; then
report "Failed to install rrdtool" 1
else
report "rrdtool was installed in order elunia stats would work correctly" 4
fi
else
report "rrdtool present" 0
fi
}
check_and_install_rrdtool
| true |
d30b6a98bb62eaffe9deabdff942f9638e97ca49 | Shell | OperaChrome/compass-core | /install/chef.sh | UTF-8 | 1,788 | 3.359375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
#
# create backup dir
sudo mkdir -p /root/backup/chef
sudo rpm -q chef-server
if [[ "$?" != "0" ]]; then
if [[ ! -e /tmp/chef-server-11.0.8-1.el6.${IMAGE_ARCH}.rpm ]]; then
sudo rpm -Uvh $CHEF_SRV
else
sudo rpm -Uvh /tmp/chef-server-11.0.8-1.el6.${IMAGE_ARCH}.rpm
fi
if [[ "$?" != "0" ]]; then
echo "failed to rpm install $CHEF_SRV"
exit 1
fi
else
echo "chef-server has already installed"
fi
# configure chef-server
sudo chef-server-ctl cleanse
mkdir -p /etc/chef-server
sudo cp -rn /etc/chef-server/chef-server.rb /root/backup/chef/
sudo rm -f /etc/chef-server/chef-server.rb
sudo cp -rf $COMPASSDIR/misc/chef-server/chef-server.rb /etc/chef-server/chef-server.rb
sudo chmod 644 /etc/chef-server/chef-server.rb
sudo chef-server-ctl reconfigure
sudo chef-server-ctl test
if [[ "$?" != "0" ]]; then
echo "chef-server-ctl test failed"
exit 1
fi
# configure chef client and knife
rpm -q chef
if [[ "$?" != "0" ]]; then
download http://www.opscode.com/chef/install.sh chef_install.sh
sudo chmod 755 /tmp/chef_install.sh
sudo /tmp/chef_install.sh
if [[ "$?" != "0" ]]; then
echo "chef install failed"
exit 1
else
echo "chef is installed"
fi
else
echo "chef has already installed"
fi
sudo mkdir -p ~/.chef
sudo knife configure -y -i --defaults -r ~/chef-repo -s https://localhost:443 -u $USER --admin-client-name admin --admin-client-key /etc/chef-server/admin.pem --validation-client-name chef-validator --validation-key /etc/chef-server/chef-validator.pem <<EOF
$CHEF_PASSWORD
EOF
sudo sed -i "/node_name/c\node_name \'admin\'" /$USER/.chef/knife.rb
sudo sed -i "/client_key/c\client_key \'\/etc\/chef-server\/admin.pem\'" /$USER/.chef/knife.rb
| true |
789336e33a4d7c81821de943a3d19ee34138b104 | Shell | gonzalo-/dotfiles | /.profile | UTF-8 | 366 | 2.53125 | 3 | [] | no_license | PATH=$HOME/bin:/bin:/sbin:/usr/bin:/usr/sbin:/usr/X11R6/bin:/usr/local/bin:/usr/local/sbin:/usr/games:.
: ${HOME='/root'}
export HOME
umask 022
export SSH_AUTH_SOCK=/tmp/ssh-agent.$LOGNAME.sock
ssh-add -l 2>/dev/null >/dev/null
if [ $? -ge 2 ]; then
ssh-agent -a "$SSH_AUTH_SOCK" >/dev/null
fi
export ENV=$HOME/.kshrc
export PATH HOME TERM EDITOR PS1 PKG_PATH
| true |
216d3069769628cbda2de94aec0615b6471bfad8 | Shell | cha63506/knapsack | /disk_size | UTF-8 | 781 | 3.375 | 3 | [] | no_license | #!/bin/bash
NDAYS=${1:-7}
SINCE=`date -d "$NDAYS days ago" +%Y-%m-%d`
if [ -f mirror_brain.txt ] ; then
echo 'Removing the date field and filtering the new packages ...'
remove_time_filter_new --date $SINCE mirror_brain.txt > disk_size_sql.txt 2> payload_new.txt
gzip -f mirror_brain.txt
echo 'Removing version name ...'
remove_package_version disk_size_sql.txt > no_version_disk_size_sql.txt
gzip -f disk_size_sql.txt
echo 'Removing version name in payload_new ...'
remove_package_version payload_new.txt > payload_new_no_version.txt
gzip -f payload_new.txt
echo 'Adding directories sizes ...'
disk_size_groups --size no_version_disk_size_sql.txt > no_version_disk_size_sql_with_dir.txt
gzip -f no_version_disk_size_sql.txt
fi
| true |
a6b652254948d1842bbf6a9895029a8c447016ac | Shell | mleijon/bashscripts | /merge_paired_reads.sh | UTF-8 | 238 | 2.765625 | 3 | [] | no_license | #!/usr/bin/env bash
#for f in $dirs;do cd $f;./*.sh; cd ../..;done
FILES="/storage/micke/parvo/run1/01-Cleaned/*R1.fastq.gz"
for f in $FILES; do
cat ${f/R1/R2} >> $f; wait
rm ${f/R1/R2}; wait
rename 's/_R1.fastq/.fastq/' $f; wait
done
| true |
d9993e3d102f40f21d9e1439da7bce3e1937526b | Shell | lsoaringl/DCNNs-in-Medical-Image-Analysis | /Evaluation_Zahra/Segment_pulmo_Shuai_Results.sh | UTF-8 | 4,480 | 3.140625 | 3 | [] | no_license | #!/bin/bash
#this script does the following steps:
# 1. cropes the Manual and Automatic Segmentation (initialization by Shuai with deep learning) accoding to the manual Centerline and saves them both in mhd and dcm
# 2. Applies opfront
# 3. computesdice jaccard MSD
# 4. Diameter per slice based on bifurcation point from Zahra results
# 18-12-2017
Start_Total=$(date +%s)
# Defin Paths and Directories:
Path_Opfront=/home/zsedghi/Medical_Phisics_AortaPulmoSegmentation/Scripts/Registeration/Opfront_OnlySegmentation.sh
Matlab_path=/home/zsedghi/Medical_Phisics_AortaPulmoSegmentation/Matlab
Path_Cost=/home/zsedghi/Medical_Phisics_AortaPulmoSegmentation/Scripts/Cost_local_NOManualCompare.sh
Path_MeanDIST=/home/zsedghi/Scripts/Original/MeanSurfaceDist.sh
Vol_dcm=$1 #e.g. Vol_dcm=/scratch/zsedghi/Data/DLCST/vol4.dcm
Auto_Initialization=$2 #e.g. Auto_Initialization=/scratch/zsedghi/Shuai_Results/DeepLearning_Output/vol4/masksTestPredicted.dcm
Manual_Segmentation=$3 #e.g. Pulmonary artery manual segmentation (dcm) /vol4/vol4_Manual_3DMask.dcm
Manual_LeftCenterline=$4 #e.g. LEft Pulmonary artery manual resampled centerline (txt) /vol4/Pulmonary artery /vol4_leftPulmonary_Centerline_resampled.txt
Manual_RightCenterline=$5 #e.g. Right Pulmonary artery manual resampled centerline (txt) /vol4/Pulmonary artery /vol4_RightPulmonary_Centerline_resampled.txt
Manual_Bif=$6 #e.g. Manual_Bif=/scratch/zsedghi/Data/DLCST_BifPoint/vol4_BifurcationPoint.txt
Output_folder=$7 #e.g. Output_folder=/scratch/zsedghi/Shuai_Results/Segmentation/vol4
if [ $# != "7" ] # countes the number of inputs and if less than 9 it gives error
then
echo " "
echo " Not enough inputs."
echo " "
echo "6 Inputs are required which should be in this order:
1. Full volume (dcm)
2.initialization for opfront (output of deep learning)
3.Manual Pulmonary artery Segmentation( dcm format)
4.Manual left Pulmonary artery Centerline (resampled txt)
5.Manual Right Pulmonary artery Centerline (resampled txt)
6.Manual pulmonary artery bifurcation point(txt file)
7.Output folder (a directory not a file name)
"
exit
fi
## Creat the outputs and log file
File_Base=$(basename "${Vol_dcm}" .dcm)
File=${File_Base%_*}
Log_Folder=$Output_folder/Logs
NOW=$(date +%d-%m-%Y)
/bin/echo " "
/bin/echo " Segmentation with graph cut , initialized with deeplearning output from Shuai"
/bin/echo "Script name: /home/zsedghi/Medical_Phisics_AortaPulmoSegmentation/Scripts/Deeplearning/Segment_Shuai_Results.sh"
/bin/echo "Input Volume: $Vol_dcm"
/bin/echo "Input Volume name: $File"
/bin/echo "Output Folder: $Output_folder"
/bin/echo "$(date +%d-%m-%Y----%T)"
###########################################################################
############ Create the output folders ####################################
Seg_Pulmo_Org=$Output_folder/Pulmo_Segmentation_OriginalVolume
Seg_Pulmo_Contrast=$Output_folder/Pulmo_Segmentation_ContrastReduced
Seg_Net=$Output_folder/Deeplearning_Accuracy
#mkdir -p $Seg_Pulmo_Org
#mkdir -p $Seg_Pulmo_Contrast
mkdir -p $Seg_Net
#################################################################################################################################################
### 1. Cut manual and initial segmentation and get the initialization accuracy(DSC, MSD) All based on manual centerline #########################
/bin/echo "********************************************************************************************************************************************"
/bin/echo "**** 1. Cut manual and initial segmentation and get the initialization accuracy(DSC, MSD) All based on manual centerline***"
/bin/echo "********************************************************************************************************************************************"
/bin/echo " 1.a) Cut both segmentations and calculate dice and jaccard coefficient: "
module load mcr;
$Matlab_path/Dice_Correct_Pulmo_ShuaiResults $Manual_LeftCenterline $Manual_RightCenterline $Manual_Segmentation $Auto_Initialization $Seg_Net/Manual $Seg_Net/Auto
/bin/echo " 1.b) calculate the mean surface distance (mean, max, min, std of the surface distance): "
$Path_MeanDIST $Seg_Net/Auto.mhd $Seg_Net/Manual.mhd $Seg_Net/MSD_CompleteVolume.txt
rm $Seg_Net/*.mhd
rm $Seg_Net/*.raw
/bin/echo Dice, Mean surface distance for the initialaization Ends on: `date`
/bin/echo Total Runtime : $runtime_total_print
| true |
476e5bf212d7ac7fa6960e69dfc2011fb9ae1264 | Shell | devphilou/docker-seed | /node/run.sh | UTF-8 | 232 | 2.59375 | 3 | [] | no_license | #!/bin/bash
echo -e 'Starting node docker container'
docker run --rm -p 4000:4000 -d \
--name my-project-node \
my-project-node-img
echo -e 'Checking...'
echo $(docker ps | head -1)
echo $(docker ps | grep my-project-node) | true |
91588d2ad5aaab0f74f9d61e10d2adbc3dbcb35a | Shell | follow-the-vine-to-get-to-the-melon/shell-random | /live/sh/scripts/die-flash-die.sh | UTF-8 | 120 | 2.578125 | 3 | [] | no_license | #!/usr/bin/env sh
flash_pid=$( \sudo /sbin/pidof plugin-container )
if [ $? -eq 0 ]; then
\kill $flash_pid
fi
| true |
3e3b4bab1c88492dbf7b54222a977affd358f436 | Shell | SoftSrv/flavors | /minecraft/build.sh | UTF-8 | 1,039 | 3.5625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
export PIDS=()
export VERSIONS=(1.11.2 1.12 1.12.2)
build() {
for version in ${VERSIONS[*]};
do
FULL_IMAGE="softsrv/minecraft:$version"
echo "building image: $FULL_IMAGE"
pushd versions/$version
docker build -t $FULL_IMAGE . &
PIDS[${i}]=$!
popd
done
for pid in ${PIDS[*]};
do
echo "waiting on process $pid"
wait $pid
echo "Process $pid exited with status $?"
done
}
test() {
echo "future: make sure images boot"
}
push() {
for version in ${VERSIONS[*]};
do
FULL_IMAGE="softsrv/minecraft:$version"
echo "pushing image: $FULL_IMAGE"
docker push $FULL_IMAGE &
PIDS[${i}]=$!
echo "versionName=$version" >> $JOB_STATE/minecraft_img_$version.env
echo "commitSha=$COMMIT" >> $JOB_STATE/minecraft_img_$version.env
echo "buildNumber=$BUILD_NUMBER" >> $JOB_STATE/minecraft_img_$version.env
done
for pid in ${PIDS[*]};
do
echo "waiting on process $pid"
wait $pid
echo "Process $pid exited with status $?"
done
}
build
test
push
| true |
654402a519fa972c2f32d8fd4c6c09b04cdcfbd1 | Shell | justindav1s/k8s_thehardway_vagrant | /lb/setup.sh | UTF-8 | 506 | 2.734375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
IP=192.168.20.10
ssh root@${IP} "mv /etc/haproxy/haproxy.cfg /etc/haproxy/haproxy_cfg.original"
scp haproxy.cfg root@${IP}:/etc/haproxy
ssh root@${IP} "sed 's/enforcing/permissive/' </etc/selinux/config >/etc/selinux/config.new"
ssh root@${IP} "cp /etc/selinux/config.new /etc/selinux/config"
ssh root@${IP} "rm -rf /etc/selinux/config.new"
ssh root@${IP} "systemctl daemon-reload"
ssh root@${IP} "systemctl enable haproxy"
#ssh root@${IP} "systemctl start haproxy"
ssh root@${IP} "reboot" | true |
2443742d30de887f0b8e5f1b0ec23494bad7d8c9 | Shell | d-r-scott/beamform | /stage4_dedispersion.sh | UTF-8 | 651 | 2.8125 | 3 | [] | no_license | #!/bin/bash
#SBATCH --job-name=dedispersion
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --time=0:10:00
#SBATCH --mem=64g
# Command line arguments
FRB=$1 # FRB name
pol=$2 # Polarisation (x or y)
DM=$3 # Dispersion measure in pc/cm3
f0=$4 # Central frequency in MHz
n=$5
# Set data directories
basedir=./output
outdir=${basedir}/${FRB}_n${n}
f_outdir=${outdir}/f
# Get modules to load and load them
source modules.sh
module load $modules_4
args="-f ${outdir}/${FRB}_sum_${pol}_f_derippled.npy --DM $DM --f0 $f0 --bw 336 -o ${outdir}/${FRB}_sum_${pol}_f_dedispersed_${DM}.npy"
echo "python3 dedisperse.py $args"
python3 dedisperse.py $args
| true |
c6b10a94440166daa4ad7fee77d26e018b8506b6 | Shell | rmdamasceno-zz/wpcontainer | /project/supporting_files/run.sh | UTF-8 | 7,090 | 3.734375 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
#
# Prepare our container for initial boot.
# Creating Variable Directory
if [[ ! -d /ENV ]]; then
echo "=> The variable directory was not found. "
echo "=> Creating directory ..."
mkdir -p /ENV
fi
# Checking variable file
if [[ -f /ENV/ENV ]]; then
echo "=> Variable file found"
source /ENV/ENV
fi
# Creating variables not found
#Generate PW to MySQL ROOT user
if [[ -z "$MYSQL_ROOT_PASS" ]] || [[ -z "$PASS" ]]; then
PASS=$(pwgen -s 12 1)
echo "=> MySQL ROOT user password generated is $PASS"
echo PASS="$PASS">>/ENV/ENV
else
PASS=$MYSQL_ROOT_PASS
echo PASS="$PASS">>/ENV/ENV
fi
#Generate User DB to WP
if [[ -z "$MYSQL_USER_NAME" ]] || [[ -z "$WP_USER" ]]; then
WP_USER=$(pwgen -A0s 6 1)
echo "=> MySQL WP user is $WP_USER"
echo WP_USER="$WP_USER">>/ENV/ENV
else
WP_USER=$MYSQL_USER_NAME
echo WP_USER="$WP_USER">>/ENV/ENV
fi
#Generate PW to User DB to WP
if [[ -z "$MYSQL_USER_PWD" ]] || [[ -z "$WP_PWD" ]]; then
WP_PWD=$(pwgen -s 12 1)
echo "=> MySQL WP user password is $WP_PWD"
echo WP_PWD="$WP_PWD">>/ENV/ENV
else
WP_PWD=$MYSQL_USER_PWD
echo WP_PWD="$WP_PWD">>/ENV/ENV
fi
#Generate DB to WP
if [[ -z "$MYSQL_DB_NAME" ]] || [[ -z "$WP_DB" ]]; then
WP_DB=$(pwgen -A0s 6 1)
echo "=> MySQL WP DB is $WP_DB"
echo WP_DB="$WP_DB">>/ENV/ENV
else
WP_DB=$MYSQL_DB_NAME
echo WP_DB="$WP_DB">>/ENV/ENV
fi
#Generate WP Table Prefix
if [[ -z "$WP_TB_PREFIX" ]] || [[ -z "$WP_TBP" ]]; then
WP_TBP=$(pwgen -A0s 4 1)
echo "=> WP table prefix is $WP_TBP"
echo WP_TBP="$WP_TBP">>/ENV/ENV
else
WP_TBP=$WP_TB_PREFIX
echo WP_TBP="$WP_TBP">>/ENV/ENV
fi
#Generate WP Debug
if [[ -z "$WP_DEBUG" ]]; then
WP_DEBUG="false"
echo "=> WP DEBUG is $WP_DEBUG"
echo WP_DEBUG="$WP_DEBUG">>/ENV/ENV
else
WP_DEBUG=$WP_DEBUG
echo WP_DEBUG="$WP_DEBUG">>/ENV/ENV
fi
# Retrieve information from apache sites
if [[ ! -f /ENV/apache.conf ]]; then
cp -rf /etc/apache2/sites-available/000-default.conf /ENV/apache.conf
else
cp -rf /ENV/apache.conf /etc/apache2/sites-available/000-default.conf
fi
# Retrieve PHP settings information
if [[ ! -f /ENV/php.ini ]]; then
cp -rf /etc/php/8.0/apache2/php.ini /ENV/php.ini
else
cp -rf /ENV/php.ini /etc/php/8.0/apache2/php.ini
fi
export PASS="$PASS"
export WP_DB="$WP_DB"
export WP_USER="$WP_USER"
export WP_PWD="$WP_PWD"
export WP_TBP="$WP_TBP"
export WP_DEBUG="$WP_DEBUG"
# Where does our MySQL data live?
VOLUME_HOME="/var/lib/mysql"
#######################################
# Use sed to replace apache php.ini values for a given PHP version.
# Globals:
# PHP_UPLOAD_MAX_FILESIZE
# PHP_POST_MAX_SIZE
# PHP_TIMEZONE
# Arguments:
# $1 - PHP version i.e. 5.6, 7.3 etc.
# Returns:
# None
#######################################
function replace_apache_php_ini_values () {
echo "Updating for PHP $1"
sed -ri -e "s/^upload_max_filesize.*/upload_max_filesize = ${PHP_UPLOAD_MAX_FILESIZE}/" \
-e "s/^post_max_size.*/post_max_size = ${PHP_POST_MAX_SIZE}/" /etc/php/$1/apache2/php.ini
sed -i "s/;date.timezone =/date.timezone = Europe\/London/g" /etc/php/$1/apache2/php.ini
}
if [ -e /etc/php/5.6/apache2/php.ini ]; then replace_apache_php_ini_values "5.6"; fi
if [ -e /etc/php/$PHP_VERSION/apache2/php.ini ]; then replace_apache_php_ini_values $PHP_VERSION; fi
#######################################
# Use sed to replace cli php.ini values for a given PHP version.
# Globals:
# PHP_TIMEZONE
# Arguments:
# $1 - PHP version i.e. 5.6, 7.3 etc.
# Returns:
# None
#######################################
function replace_cli_php_ini_values () {
echo "Replacing CLI php.ini values"
sed -i "s/;date.timezone =/date.timezone = Europe\/London/g" /etc/php/$1/cli/php.ini
}
if [ -e /etc/php/5.6/cli/php.ini ]; then replace_cli_php_ini_values "5.6"; fi
if [ -e /etc/php/$PHP_VERSION/cli/php.ini ]; then replace_cli_php_ini_values $PHP_VERSION; fi
echo "Editing APACHE_RUN_GROUP environment variable"
sed -i "s/export APACHE_RUN_GROUP=www-data/export APACHE_RUN_GROUP=staff/" /etc/apache2/envvars
if [ -n "$APACHE_ROOT" ];then
echo "Linking /var/www/html to the Apache root"
rm -f /var/www/html && ln -s "/app/${APACHE_ROOT}" /var/www/html
fi
echo "Editing phpmyadmin config"
sed -i "s/cfg\['blowfish_secret'\] = ''/cfg['blowfish_secret'] = '`date | md5sum`'/" /var/www/phpmyadmin/config.inc.php
echo "Setting up MySQL directories"
mkdir -p /var/run/mysqld
# Setup user and permissions for MySQL and Apache
chmod -R 770 /var/lib/mysql
chmod -R 770 /var/run/mysqld
if [ -n "$VAGRANT_OSX_MODE" ];then
echo "Setting up users and groups"
usermod -u $DOCKER_USER_ID www-data
groupmod -g $(($DOCKER_USER_GID + 10000)) $(getent group $DOCKER_USER_GID | cut -d: -f1)
groupmod -g ${DOCKER_USER_GID} staff
else
echo "Allowing Apache/PHP to write to the app"
# Tweaks to give Apache/PHP write permissions to the app
chown -R www-data:staff /var/www
chown -R www-data:staff /app
fi
echo "Allowing Apache/PHP to write to MySQL"
chown -R www-data:staff /var/lib/mysql
chown -R www-data:staff /var/run/mysqld
chown -R www-data:staff /var/log/mysql
if [ -e /var/run/mysqld/mysqld.sock ];then
echo "Removing MySQL socket"
rm /var/run/mysqld/mysqld.sock
fi
echo "Editing MySQL config"
sed -i "s/.*bind-address.*/bind-address = 0.0.0.0/" /etc/mysql/my.cnf
sed -i "s/.*bind-address.*/bind-address = 0.0.0.0/" /etc/mysql/mysql.conf.d/mysqld.cnf
sed -i "s/user.*/user = www-data/" /etc/mysql/mysql.conf.d/mysqld.cnf
if [[ ! -d $VOLUME_HOME/mysql ]]; then
echo "=> An empty or uninitialized MySQL volume is detected in $VOLUME_HOME"
echo "=> Installing MySQL ..."
# Try the 'preferred' solution
mysqld --initialize-insecure
# IF that didn't work
if [ $? -ne 0 ]; then
# Fall back to the 'depreciated' solution
mysql_install_db > /dev/null 2>&1
fi
echo "=> Done!"
/create_mysql_users.sh
else
echo "=> Using an existing volume of MySQL"
fi
echo "========================================================================"
echo "= +DB to Wordpress is:"
echo "= DB: $WP_DB"
echo "= US: $WP_USER"
echo "= PW: $WP_PWD"
echo "========================================================================"
if [[ ! -d /app/wordpress ]]; then
echo "=> The WP was not found in \\app "
echo "=> Installing WP ..."
if [[ ! -f /tmp/wordpress.tar.gz ]]; then
wget -O /tmp/wordpress.tar.gz https://wordpress.org/latest.tar.gz
fi
tar xfz /tmp/wordpress.tar.gz -C /app/
rm -rf /tmp/wordpress.tar.gz
else
echo "=> Using an existing WP installation"
fi
if [[ ! -f /app/wordpress/wp-config.php ]]; then
if [[ -z "$WP_AUTOCONFIG" ]] || [ ! "$WP_AUTOCONFIG" == "false" ]; then
echo "=> WP_AUTOCONFIG => $WP_AUTOCONFIG"
bash /install_wp.sh
else
echo "=> WP_AUTOCONFIG has been set to false, perform manual configuration"
fi
else
echo "=> The wp-config.php configuration file already exists in the /app/wordpress folder"
fi
echo "=> Enforcing permissions em /app..."
chown -R www-data:staff /app
echo "=> Done!"
echo "Starting supervisord"
exec supervisord -n
| true |
56d1721b3be11cf0f1fa4ac2a4925a7d13718717 | Shell | Liorya91/sedBash | /sedBash.sh | UTF-8 | 3,527 | 4.625 | 5 | [] | no_license | #!/bin/bash
#This script implements a simplified version of sed utility in bash. Input may be given through a pipeline or the console.
#exit codes for troubleshooting:
#exit 1(No argument)
#exit 2(Pipe regex failed)
#exit 3(Provided file does not exist)
#exit 4(Console regex failed)
#-------------------------------------------------------------------------------------------------------------
#Help message function
usage() {
echo "USAGE: sedBash 's/textToReplace/textToPaste/<g>' fileName || echo "text" | sedBash 's/textToReplace/textToPaste/<g>'"
echo "/g optional parameter stands for global replacement"
}
#Input Validation function using regular exp.
userInputValidation() {
if [[ $rawString =~ "'"s\/.+\/.+\/"'" ]]
then
userInput="valid"
echo $userInput
elif [[ $rawString =~ "'"s\/.+\/.+\/g"'" ]]
then
userInput="valid and global"
echo $userInput
else
userInput="Not valid"
echo $userInput
fi
}
#Beginning of main func.
main() {
#No argument provided by user.
if [ $# -eq 0 ]
then
usage
exit 1
fi
#User input.
rawString=$1
#Prevents null value case + makes regex comparison constant over different inputs.
rawString=\"\'${rawString}\'\"
#Decides whether the requested action is valid.
userInput=$(userInputValidation $rawString)
#Can also be done with IFS env variable instead of -F delimiter flag.
#prinf preferred over echo since flags might be interpreted by mistake on echo.
textToReplace=`printf $rawString | awk -F '/' '{print $2}'`
textToPaste=`printf $rawString | awk -F '/' '{print $3}'`
#Lists stdin file descriptors (Pipe vs console)
stdin="$(ls -l /dev/fd/0)"
#String manipulation
stdin="${stdin/*-> /}"
#In case input provided by a pipeline.
if [[ "$stdin" =~ ^pipe ]]
then
#read user input from pipe.
read inputString
#Checks if requested action is a valid exchange (single).
if [[ $userInput == "valid" ]]
then
#Performing the exchange using string manipulation.
echo ${inputString/$textToReplace/$textToPaste}
#Checks if requested action is a valid exchange (global).
elif [[ $userInput == "valid and global" ]]
then
#String manipulation.
echo ${inputString//$textToReplace/$textToPaste}
else
#In case provided argument is not valid print usage and exit.
usage
exit 2
fi
else
#Counts how many file were provided by user.
filecounter=0
#Array containing all files to be manipulated.
declare -a fileArray
#Insert file names into an array.
while [ $# -gt 1 ]
do
if [ -f "$2" ]
then
fileArray[$filecounter]=$2
let "filecounter+=1"
shift
else
echo "$2 file does not exist. please provide a correct full path"
usage
exit 3
fi
done
if [[ $userInput == "valid" ]]
then
#Loop through the array to print each file after string manipulation.
for i in "${fileArray[@]}"
do
fileContent=`cat $i`
echo ${fileContent/$textToReplace/$textToPaste}
done
#Check if requested action is a valid exchange (global).
elif [[ $userInput == "valid and global" ]]
then
#Loop through the array to print each file after string manipulation.
for i in "${fileArray[@]}"
do
fileContent=`cat $i`
echo ${fileContent//$textToReplace/$textToPaste}
done
else
#In case provided argument is not valid print usage and exit.
usage
exit 4
fi
fi
}
#Run main method with the given arguments.
main "$@"
| true |
7bca6a9bad30883d09b04541dcbfa529783db737 | Shell | rflpazini/bidu | /.travis/docker_push_production | UTF-8 | 280 | 2.578125 | 3 | [] | no_license | #!/bin/bash
VERSION=$1
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker tag "$IMAGE_NAME" "${IMAGE_NAME}:latest"
docker tag "$IMAGE_NAME" "${IMAGE_NAME}:${VERSION}"
docker push "${IMAGE_NAME}:latest" && docker push "${IMAGE_NAME}:${VERSION}" | true |
9ed21df7e58f05315fa30aaf7753b60e0f9f2ff3 | Shell | KrisSaxton/lfs-build-6.2 | /shadow/shadow-4.0.15/install/configure | UTF-8 | 2,891 | 2.734375 | 3 | [] | no_license | #!/bin/bash
################################################################################
# BEGIN
################################################################################
source functions
################################################################################
# GLOBALS
################################################################################
ABSetDefault AB_SYSTEM_INSTALL_DIR /usr
################################################################################
# DEPENDENCIES
################################################################################
ABSetDefault AB_PACKAGE_SHADOW_DEPENDS PKG_DEPENDS
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INSTALL_ENABLE no
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INSTALL_FORCE no
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INSTALL_NAME pkgdependsname
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INSTALL_DIR $AB_SYSTEM_INSTALL_DIR/$AB_PACKAGE_PKG_DEPENDS_INSTALL_NAME
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INCLUDE_DIR $AB_PACKAGE_PKG_DEPENDS_INSTALL_DIR/include
ABSetDefault AB_PACKAGE_PKG_DEPENDS_LIB_DIR $AB_PACKAGE_PKG_DEPENDS_INSTALL_DIR/lib
#ABSetCompileFlags $AB_PACKAGE_SHADOW_DEPENDS
################################################################################
# CONFIGURATION
################################################################################
ABSetDefault AB_PACKAGE_SHADOW_NAME shadow
ABSetDefault AB_PACKAGE_SHADOW_VERSION 4.0.15
ABSetDefault AB_PACKAGE_SHADOW_DESC 'Programs for handling passwords in a secure way'
ABSetDefault AB_PACKAGE_SHADOW_INSTALL_NAME ${AB_PACKAGE_SHADOW_NAME}-${AB_PACKAGE_SHADOW_VERSION}
ABSetDefault AB_PACKAGE_SHADOW_INSTALL_DIR $AB_SYSTEM_INSTALL_DIR
ABSetDefault AB_PACKAGE_SHADOW_INCLUDE_DIR $AB_PACKAGE_SHADOW_INSTALL_DIR/include
ABSetDefault AB_PACKAGE_SHADOW_LIB_DIR $AB_PACKAGE_SHADOW_INSTALL_DIR/lib
ABSetDefault AB_PACKAGE_SHADOW_CONFIGURE_PREFIX $AB_PACKAGE_SHADOW_INSTALL_DIR
ABSetDefault AB_PACKAGE_SHADOW_CONFIGURE_WITH_EDITOR /usr/bin/nano
AB_PACKAGE_NAME=$AB_PACKAGE_SHADOW_NAME
AB_PACKAGE_VERSION=$AB_PACKAGE_SHADOW_VERSION
AB_PACKAGE_DESC=$AB_PACKAGE_SHADOW_DESC
AB_PACKAGE_INSTALL_NAME=$AB_PACKAGE_SHADOW_INSTALL_NAME
AB_PACKAGE_INSTALL_DIR=$AB_PACKAGE_SHADOW_INSTALL_DIR
AB_PACKAGE_INCLUDE_DIR=$AB_PACKAGE_SHADOW_INCLUDE_DIR
AB_PACKAGE_LIB_DIR=$AB_PACKAGE_SHADOW_LIB_DIR
AB_PACKAGE_CONFIGURE_PREFIX=$AB_PACKAGE_SHADOW_CONFIGURE_PREFIX
AB_PACKAGE_CONFIGURE_WITH_EDITOR=$AB_PACKAGE_SHADOW_CONFIGURE_WITH_EDITOR
################################################################################
# END
################################################################################
| true |
0b0cb78bcfbfec00abf5b63dc705d8903cf5d6bb | Shell | jiangfeng1124/acl15-clnndep | /scripts/train-PROJ.sh | UTF-8 | 742 | 3.03125 | 3 | [] | no_license | #!/bin/bash
cd /export/a04/jguo/work/parser/clnndep
if [ $# -ne 1 ]; then
echo "Usage: ./train.sh [cls|0,1]"
exit -1
fi
# lang=${lang}
cls=$1
corpus=udt/en/
models=models/
f_train=$corpus/en-universal-train-brown.conll
f_dev=$corpus/en-universal-dev-brown.conll
if [ "$cls" = "1" ]; then
echo "Train PROJ+Cluster"
model_dir=$models/model.proj.cls
f_conf=conf/proj-dvc.cfg
else
echo "Train PROJ"
model_dir=$models/model.proj
f_conf=conf/proj-dv.cfg
fi
if [ ! -d $model_dir ]; then
mkdir $model_dir
fi
f_model=$model_dir/model
./bin/clnndep -train $f_train \
-dev $f_dev \
-model $f_model \
-cfg $f_conf \
-emb resources/projected/en.50
| true |
27a61c993c4676e7c7793f3f397a99d6b9d74993 | Shell | cadangelo/jobs | /make_res.sh | UTF-8 | 448 | 2.734375 | 3 | [] | no_license | #!/bin/bash
res='resume_main.tex'
bcf='resume_main.bcf'
pdf='resume_main.pdf'
xe='xelatex'
bi='biber'
irc='resume_dangelo.pdf'
ref='references_main.tex'
ref_long='references_long_main.tex'
cov='coverletter_main.tex'
# comiple resume
$xe $res
$bi $bcf
$xe $res
# compile references
#$xe $ref
#$xe $ref
# compile references long
#$xe $ref_long
#$xe $ref_long
# compile cover letter
$xe $cov
$bi $bcf
$xe $cov
#cp $pdf $irc
#gnome-open $irc &
| true |
fd8d8f68a2090d015748b71394a79e29d03f79c6 | Shell | lalaalal/backup | /backup.sh | UTF-8 | 382 | 3.25 | 3 | [] | no_license | #!/bin/bash
cd $PWD/$(dirname $0)
#HOSTNAME=$(cat /etc/hostname)
BACKUP_PATH=backup
if [ -z $1 ] || [ "local" = "$1" ]; then
rsync --delete -arv --files-from=list.txt ~/ .
rsync --delete -rv /usr/local/bin/ bin/
else
rsync list.txt $1:~/$BACKUP_PATH/
rsync --delete -arv --files-from=list.txt ~/ $1:~/$BACKUP_PATH
rsync --delete -rv /usr/local/bin/ $1:~/$BACKUP_PATH/bin/
fi
| true |
a843880ccb55d7ef330d46d6a96dc3e37cdc6995 | Shell | vloup/SmokinGuns-Jeuxlinux-Patches | /create-pk3.sh | UTF-8 | 1,129 | 3.625 | 4 | [] | no_license | #!/bin/bash
# Automatically create the patched qvm files for Smokin' Guns.
set -eux
# download and extract
wget "https://github.com/smokin-guns/SmokinGuns/archive/v1.1.tar.gz"
tar xvf "v1.1.tar.gz"
# patch
for diffile in *.diff; do
patch -p1 -d "SmokinGuns-1.1/" < "$diffile"
done
# compile and pack qvm
make -j5 -C "SmokinGuns-1.1/"
pushd "SmokinGuns-1.1/build/release-linux-x86_64/smokinguns/"
zip -r "zz_jeuxlinuxfr.pk3" "vm/"
popd
# output
if [ ! -d "pk3/" ]; then
mkdir "pk3/"
fi
# add qvm to pk3 folder
if [ "$(find 'pk3/' -type f -name 'zz_jeuxlinuxfr_*.pk3')" ]; then
# find older most recent pk3
pk3num="$(find 'pk3/' -name 'zz_jeuxlinuxfr_*.pk3' \
| sed 's#pk3/zz_jeuxlinuxfr_##' \
| sed 's#\.pk3##' \
| sort -n \
| tail -n 1)"
pk3num="$(($pk3num + 1))"
pk3num="$(printf %04d%s $pk3num)" # force 4 digits display
mv "SmokinGuns-1.1/build/release-linux-x86_64/smokinguns/zz_jeuxlinuxfr.pk3" \
"pk3/zz_jeuxlinuxfr_$pk3num.pk3"
else
mv "SmokinGuns-1.1/build/release-linux-x86_64/smokinguns/zz_jeuxlinuxfr.pk3" \
"pk3/zz_jeuxlinuxfr_0000.pk3"
fi
# cleanup
rm -r "SmokinGuns-1.1/"
rm "v1.1.tar.gz"
| true |
0c114c86ed1fd8377fe01445b149586374d280a3 | Shell | cometjun/GreeDataSystem | /GuGongServer/start.sh | UTF-8 | 441 | 2.90625 | 3 | [] | no_license | #! /bin/bash
echo "*********** start clean **************"
make clean -C ./Debug/
if [ $? != 0 ];
then
exit $?
fi
echo "*********** start make **************"
make -C ./Debug/
if [ $? -ne 0 ];then
exit $?
fi
kill -9 `ps -ef|grep GuGongServer|grep -v grep|awk '{print $2}'`
echo "*********** start service **************"
nohup ./Debug/GuGongServer &
read name
#sleep 2 && kill -2 $!
echo "****************start Success!***********"
| true |
b9da13d0123e5e8fe7a5cc15593aa89e94fd857a | Shell | stevetarver/shell-scripts | /examples/return_exit_code.sh | UTF-8 | 1,317 | 4.375 | 4 | [
"MIT"
] | permissive | #!/bin/sh
#
# I apply the provided 'set' flags and exit with the provided code
#
# Provides output and exit codes for testing with other scripts
#
# USE:
# see show_help()
#
# NOTES:
# see show_help()
#
# EXAMPLES:
# see show_help()
#
# EXIT CODES:
# * script argument 1
#
# CAVEATS:
# -e not on shebang - applied based on script args
#
show_help () (
echo "Exit with the specified exit code, optionally turn on command logging"
echo
echo "USE:"
echo " ${0} EXIT_CODE [SET_ARGS]"
echo
echo " EXIT_CODE: The exit code to exit with (default 4)"
echo " SET_ARGS: Args applied to 'set' prior to first command (optional)"
echo " E.g. -e, -x, -ex, +e, +x, +ex"
echo " No arg validation, you can pass anything."
echo
echo "EXAMPLES:"
echo
echo " ${0} Exit with code 4"
echo " ${0} 2 Exit with code 2"
echo " ${0} 2 -ex Exit with code 2, turn on error checking, command logging"
echo
exit 2
)
# Apply 'set' args before commands are executed
[ -n "${2}" ] && set ${2}
# Include directory locator to provide more interesting command logging
THIS_SCRIPT_DIR=$(dirname $(readlink -f "${0}"))
(
# Set default values
EXIT_CODE=4
[ -n "${1}" ] && EXIT_CODE="${1}"
exit ${EXIT_CODE}
)
| true |
cff3c0e1ee13467683fbb0e2488936535f24abc2 | Shell | cristian-pirnog/linuxenv | /editors/XEmacs/install.xemacs.sh | UTF-8 | 1,117 | 3.8125 | 4 | [] | no_license | #!/bin/bash
MESSAGE="Installing the default settings for XEmacs"
INSTALL_XEMACS=1
INSTALL_ALL=${1}
CEDET=${2}
source "$HOME/.${USER}_config/base.sh"
if [[ ${INSTALL_ALL} -ne 1 ]]; then
if [[ -z ${INSTALL_XEMACS} ]]; then
printf "Would you like to install default settings for Emacs? [Y/N] "
read answer
case $answer in
y| Y | yes | Yes | YES)
INSTALL_XEMACS=1
;;
*)
INSTALL_XEMACS=0
;;
esac
SaveConfigValueToCache INSTALL_XEMACS ${INSTALL_XEMACS}
fi
else
INSTALL_XEMACS=1
fi
if [[ ${INSTALL_XEMACS} -eq 1 ]]; then
CWD=`pwd`
UpdateFile $CWD/.dabbrev $HOME/.dabbrev $BACK_UP_DIR
UpdateFile $CWD/.xemacs $HOME/.xemacs $BACK_UP_DIR
echo " Not installing CEDET (not supported for XEmacs)"
# # Make symlink to 'cedet' directory
# if [ -d "$HOME/cedet-1.0pre3" ]; then
# if [ ! -L "$HOME/.xemacs/lisp/cedet-1.0pre3" ]; then
# ln -s "$HOME/cedet-1.0pre3" "$HOME/.xemacs/lisp/cedet-1.0pre3"
# fi
# fi
MESSAGE=$MESSAGE"\t[Done]"
echo -e $MESSAGE
else
echo "Not installing settings for Xemacs"
fi
| true |
fbb8958d8cd66ac72de3dd35883d50aa16c88483 | Shell | stevenkaras/bashfiles | /githooks/dispatching_hook | UTF-8 | 561 | 3.921875 | 4 | [] | no_license | #!/usr/bin/env bash
# This is a generic hook that is meant to invoke multiple hooks,
# handling both arguments, stdin, and breaking on the first error
# This makes it easy to write hooks that do exactly one thing
HOOK_NAME="$(basename "${BASH_SOURCE[0]}")"
GIT_DIR="$(git rev-parse --git-dir)"
HOOKS="$GIT_DIR/hooks/$HOOK_NAME.d"
if [[ -d "$HOOKS" ]]; then
stdin="$(cat; echo x)"
for HOOK in "$HOOKS"/*; do
if [[ -f "$HOOK" && -x "$HOOK" ]]; then
printf "%s" "${stdin%x}" | "$HOOK" "$@" || exit $?
fi
done
fi
exit 0
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.