blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
534d3d3ffe975a90e7dce58668c41fb12d4fefb4 | Shell | thalesmello/tmux-copycat | /scripts/copycat_mode_quit.sh | UTF-8 | 987 | 3.296875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$CURRENT_DIR/helpers.sh"
unbind_cancel_bindings() {
local cancel_mode_bindings=$(copycat_quit_copy_mode_keys)
local key
for key in $cancel_mode_bindings; do
tmux unbind-key -T copy-mode-vi "$key"
tmux bind-key -T copy-mode-vi "$key" send-keys -X cancel
done
}
unbind_prev_next_bindings() {
tmux unbind-key -T copy-mode-vi "$(copycat_next_key)"
tmux bind-key -T copy-mode-vi "$(copycat_next_key)" send-keys -X search-again
tmux unbind-key -T copy-mode-vi "$(copycat_prev_key)"
tmux bind-key -T copy-mode-vi "$(copycat_prev_key)" send-keys -X search-reverse
}
unbind_all_bindings() {
unbind_cancel_bindings
unbind_prev_next_bindings
}
main() {
if in_copycat_mode; then
reset_copycat_position
unset_copycat_mode
copycat_decrease_counter
# removing all bindings only if no panes are in copycat mode
if copycat_counter_zero; then
unbind_all_bindings
fi
fi
}
main
| true |
91126bf89a0e7a7085b00f4d97e446136c263aef | Shell | thenoakes/mnopts | /mnopts.sh | UTF-8 | 3,239 | 4.78125 | 5 | [] | no_license | #!/bin/bash
# mnopts
# Usage: source ./mnopts.sh "argument:a option:o flag:f" "usage string" "$@"
# First argument is space separated strings of the form full:x where
# 'full' sets up the long-form option (in this case it will be --full) and
# 'x' sets up the short-form option (in this case 'x')
# Second argument is a string which is printed whenever help or an incorrect set of options is invoked
# The full arguments of the calling script should also be passed as "$@"
# Note: help:h is an automatic option which invokes the usage statement
MNOPTS="$1"
MNOPTS_USAGE="$2"
shift 2
# A function which checks a value against values in an array
checkDuplicate () {
local e match="$1"
shift
for e; do [[ "$e" == "$match" ]] && return 0; done
return 1
}
dupeError() {
local dupe="$1"
echo "mnopts setup error: duplicate option '$dupe'"
exit 1
}
set_longOpt_shortOpt() {
local pair="$1"
IFS=':' read -ra optionPair <<< "$pair"
longOpt="${optionPair[0]}"
shortOpt="${optionPair[1]}"
}
# Only act if MNOPTS are supplied
if [ ! -z "$MNOPTS" ]; then
# BEGIN PARSING
allOptions=($MNOPTS)
# Build a list of the valid short-form options and set up the automatic bool variables
allLong=()
allShort=()
for option in ${allOptions[@]}
do
set_longOpt_shortOpt "$option"
# If either the long or short options already exist, bail out
checkDuplicate "$longOpt" "${allLong[@]}" && dupeError "$longOpt"
checkDuplicate "$shortOpt" "${allShort[@]}" && dupeError "$shortOpt"
# Add the long & short options to a list, and set the default value for the variable
allLong+=("$longOpt")
allShort+=("$shortOpt")
declare opt_${shortOpt}=false
done
# Set a custom or automatic usage statement
if [ ! -z "$MNOPTS_USAGE" ]; then
usage="Usage: $MNOPTS_USAGE"
else
usage="Usage: script-name"
for opt in ${allOptions[@]}
do
set_longOpt_shortOpt "$opt"
usage+=" [-${shortOpt}|--${longOpt}]"
done
fi
# Match the arguments passed from the calling script against the options
for arg in "$@"
do
shift
optionMatched=false
# Check the passed option against all registered options
for option in ${allOptions[@]}
do
# Get the 'long form'
set_longOpt_shortOpt "$option"
# Use it to set the 'short' form
if [ "$arg" == "--${longOpt}" ]
then
optionMatched=true
set -- "$@" "-${shortOpt}"
fi
done
# If not in the registered list, echo usage and exit
if [ "$optionMatched" == "false" ]
then
set -- "$@" "$arg"
fi
done
# Use standard getopts setup now that long-form options have been converted to short
OPTIND=1
while getopts "$(printf "%s" "${allShort[@]}")" opt 2> /dev/null
do
case "$opt" in
"?") echo "$usage" >&2 && exit 1 ;;
"h") echo "$usage" && exit 0 ;;
*) declare opt_${opt}=true ;;
esac
done
shift $(expr $OPTIND - 1)
# END PARSING
fi | true |
70cb90b7abdaa8c866e099fdbe42333d730f0c8b | Shell | Junho407/AdvanceVimrc | /.zshrc | UTF-8 | 1,473 | 2.546875 | 3 | [] | no_license | # If you come from bash you might have to change your $PATH.↵
# export PATH=$HOME/bin:/usr/local/bin:$PATH↵
# Path to your oh-my-zsh installation.↵
export ZSH="/home/junhowu/.oh-my-zsh"↵
export DEFAULT_USER='whoami'↵
# Set name of the theme to load --- if set to "random", it will↵
# load a random theme each time oh-my-zsh is loaded, in which case,↵
# to know which specific one was loaded, run: echo $RANDOM_THEME↵
# See https://github.com/robbyrussell/oh-my-zsh/wiki/Themes↵
ZSH_THEME="bullet-train"↵
# Which plugins would you like to load?↵
# Standard plugins can be found in ~/.oh-my-zsh/plugins/*↵
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/↵
# Example format: plugins=(rails git textmate ruby lighthouse)↵
# Add wisely, as too many plugins slow down shell startup.↵
plugins=( git zsh-autosuggestions zsh-completions zsh-syntax-highlighting)↵
autoload -U compinit && compinit↵
↵
source $ZSH/oh-my-zsh.sh↵
alias tmux="tmux -2"↵
alias vi="vim"↵
export EDITOR=/usr/local/bin/vim↵
export PATH=$PATH:~/.vim/shell↵
export PATH=$PATH:~/naming_script↵
export GTAGSCONF=/usr/local/share/gtags/gtags.conf↵
export GTAGSLABEL=pygments↵
export GTAGSROOT=~/.cache/tags↵
export GTAGSDBPATH=~/.cache/tags↵
↵
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh↵
# Solarized colors↵
export FZF_DEFAULT_OPTS="--height 40% --layout=reverse --preview '(highlight -O ansi {} || cat {}) 2> /dev/null | head -500'"↵
| true |
7c65e782076c8836ab40b9410ead86ab6b8cb16d | Shell | steven-em/sysdump_amazon_rex | /rootfs.img.gz/fwo_rootfs.img/usr/bin/sodagauge-info | UTF-8 | 1,551 | 4.03125 | 4 | [] | no_license | #! /bin/sh
HELP="usage:
sodagauge-info [-chklms] [-i <seconds>] <registers...>
-h help
--help help
-i s interval (seconds)
-c output state of charge in percent with sys entry command
-s same as -c but with lipc powerd command
-v output battery voltage value
-l output battery charge current (load)
-k output battery temperature
-m output battery available capacity (mAh)
"
usage() {
echo "$HELP"
exit 1
}
[ $# -eq 0 ] && usage
SLEEP=
while [ $# -gt 0 ]; do
case "$1" in
-c) WANT=percent_sys_entry ;;
-s) WANT=percent_lipc_powerd ;;
-v) WANT=mv ;;
-l) WANT=ma ;;
-k) WANT=temp ;;
-m) WANT=mah ;;
-t) WANT=cnt ;;
-i) SLEEP=$2 ; shift ;;
*) usage ;; # covers -h and --help too
esac
shift
done
source /etc/upstart/functions
DIR=/sys/devices/platform/soda/power_supply/soda_fg
if [ ! -d $DIR ]; then
echo "Battery dir $DIR does not exist"
exit 1
fi
get_value() {
case "$WANT" in
percent_sys_entry) echo "$(cat $DIR/capacity)" ;; # the "%" is already there
percent_lipc_powerd) lipc-get-prop com.lab126.powerd battLevel ;; #TODO fix this with new batteryd entry
mv) echo "$(cat $DIR/voltage_now) mV" ;;
ma) echo "$(cat $DIR/current_avg) mA" ;;
temp) echo "$(cat $DIR/temp) Fahrenheit" ;;
mah) echo "$(cat $DIR/charge_now) mAh" ;;
cnt) echo "$(cat $DIR/cycle_count)" ;;
*) echo "don't know what you want - $WANT" ; exit 1 ;; # shouldn't happen
esac
}
get_value
while [ -n "$SLEEP" ]; do sleep $SLEEP && get_value ; done
| true |
3ebd0c023fec72e70d19952d744f537c8a7bfef8 | Shell | Sirignus/Arch-Install | /software.sh | UTF-8 | 260 | 3.234375 | 3 | [] | no_license | #!/usr/bin/env bash
echo -e "\nInstalling Softwares\n"
PKGS=(
# Development tools
'code'
)
for PKG in "${PKGS[@]}"; do
echo "INSTALLING: ${PKG}"
sudo pacman -S "$PKG" --noconfirm --needed
done
echo '------------------'
echo "DONE." | true |
86911c85dd68690041674cddc84920b00c043068 | Shell | tanuj208/ITWS-Assignment2 | /q10.sh | UTF-8 | 103 | 2.5625 | 3 | [] | no_license | #!/bin/bash
X=`users | wc -w`
Y=`cat /etc/passwd | wc -l`
echo $X OUT OF $Y USERS ARE ONLINE RIGHT NOW
| true |
bcf60d5accd50be3793d4cc9107219592ccfe392 | Shell | amirouche/asyncio-foundationdb | /python-compile.sh | UTF-8 | 890 | 3.25 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/sh
set -xe
# Inspired from the great https://gitlab.com/python-devs/ci-images/
# Thanks Barry Warsaw.
# Needs:
# sudo apt-get update; sudo apt-get install make build-essential libssl-dev zlib1g-dev \
# libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm \
# libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev
PYTHON_VERSION_MAJOR_MINOR="$1"
PYTHON_VERSION=""
case $PYTHON_VERSION_MAJOR_MINOR in
3.7) PYTHON_VERSION="3.7.13";;
3.8) PYTHON_VERSION="3.8.13";;
3.9) PYTHON_VERSION="3.9.11";;
3.10) PYTHON_VERSION="3.10.3";;
esac
URL="https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tgz"
cd /tmp
wget -q $URL
tar -xzf Python-$PYTHON_VERSION.tgz
cd Python-$PYTHON_VERSION
./configure --prefix="$HOME/.local/"
make -j "$(nproc)"
make altinstall
rm -rf Python-$PYTHON_VERSION.tgz Python-$PYTHON_VERSION
| true |
a5577d8f8d8654431ca72e38a4a987eac99fb467 | Shell | daveyarwood/dotfiles | /bin/toggle_touchpad | UTF-8 | 736 | 3.875 | 4 | [] | no_license | #!/usr/bin/env bash
set -eo pipefail
input_number="$(xinput list \
| grep -i touchpad \
| sed -n 's/.*id=\([0-9]\+\).*/\1/p')"
if [[ -z "$input_number" ]]; then
echo "ERROR: Unable to determine touchpad input number."
exit 1
fi
target="$1"
if [[ -z "$target" ]]; then
if xinput --list-props "$input_number" \
| grep -iP 'device enabled.*:\s+1' >/dev/null; then
target="off"
else
target="on"
fi
fi
case "$target" in
on)
xinput --enable "$input_number"
notify-send "Touchpad enabled."
;;
off)
xinput --disable "$input_number"
notify-send "Touchpad disabled."
;;
*)
notify-send "womp womp: $target"
echo "Usage: $0 [on|off]"
exit 1
esac
| true |
51c88ad5e6d4d54cd783ec387f54259e4139cc50 | Shell | alexschneider/dotfiles-1 | /bash/environment | UTF-8 | 808 | 3.296875 | 3 | [] | no_license | #! bash
# This file sets up the environment correctly. It gets run for every shell,
# so it must be fast. Also, starting a shell within a shell shouldn't change
# the environment. The path manipulation functions are useful for this.
# Files will be created by default rwx user only
umask 0022
# General environment settings
export IFS=$' \t\n'
export PAGER="less"
export EDITOR="vim"
export CVS_RSH="ssh"
export RSYNC_RSH="ssh"
export PYTHONSTARTUP="${HOME}/.pythonrc.py"
export HISTCONTROL="ignoreboth"
export HISTSIZE="10000"
export HISTFILESIZE="1000000"
export HISTIGNORE="history*:clear:*/*\.del*:&:ls:[bf]g:exit"
export INPUTRC=~/.inputrc
shopt -s histappend
shopt -s cmdhist
# Load the bash functions
source "${HOME}/.bash/functions"
# The current directory shouldn't be in the path
pathremove .
| true |
fa49a330f94859b5d7fd9b9f4fb40e459a0be0e8 | Shell | KMR-zoar/mokuroku-vector | /dbinitial.sh | UTF-8 | 580 | 3.21875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
dbname="mokuroku.sqlite"
dbexist=`ls $dbname 2> /dev/null | wc -l`
createtable="CREATE TABLE 'mokuroku' ( 'path' TEXT, 'unixtime' INTEGER, 'size' INTEGER, 'md5sum' TEXT );"
createindex="CREATE UNIQUE INDEX mokurokuindex ON mokuroku(path);"
mokuroku="http://cyberjapandata.gsi.go.jp/xyz/std/mokuroku.csv.gz"
csvname="mokuroku.csv"
if [ $dbexist = "0" ]
then
wget $mokuroku
gunzip $csvname.gz
touch $dbname
sqlite3 $dbname "$createtable"
sqlite3 $dbname ".separator ," ".import $csvname mokuroku"
sqlite3 $dbname "$createindex"
rm $csvname
fi
| true |
638d2a84a4618141d968ea6d8d0bf5057fd7f40b | Shell | mYvenkatesh/SampleScripts | /if-else.sh | UTF-8 | 297 | 3.1875 | 3 | [] | no_license | #!/bin/bash
#Purpose:
#Version:
#Creation Date: Fri Aug 27 13:51:47 AEST 2021
#Modified Date:
#Author: rootUser
#START#:
echo -e "Enter any value> \c"
read -r a
echo -e "Enter any value: \c"
read -r b
if [ $a -gt $b ]; then
echo "$a is greater than $b"
else
echo "$b is greater than $a"
fi
#END#
| true |
30ef4963fbc7502c3536b1873dac69c6dca9001b | Shell | mannshi/pythonpython | /test.sh | UTF-8 | 337 | 3.09375 | 3 | [] | no_license | #!/bin/bash
answer[1]=1
answer[2]=3
answer[3]=46
answer[4]=13
answer[5]=13
answer[6]=0
for n in {1..6};do
num=`printf "%02d" $n`
ifile=testi$num.c
afile=testi$num.s
efile=testi$num.exe
echo TEST$num; ( /usr/bin/python3 mannc.py $ifile > $afile ); gcc -o $efile $afile ; ./$efile ;echo execute $?; echo expect ${answer[$n]}
done
| true |
8ada86519b2743975b6616b31c6efc389a434030 | Shell | IakMastro/LargeScaleDataManagement | /produce.sh | UTF-8 | 637 | 3.1875 | 3 | [] | no_license | #!/bin/sh
echo "Opening the containers"
docker-compose up -d kafka flume
echo "Containers are ready."
read -p "Enter file name to save the data: " filename
echo "Start of producing the data."
docker exec -it java mvn exec:java -Dexec.mainClass=kafka.TwitterProducer -Dexec.args="${filename}"
echo "End of produced data. Stopping the containers."
echo "It will take 60 seconds. Please wait..."
docker-compose stop -t 60 kafka zookeeper flume
namenode_ip=$(docker inspect namenode | jq '.[].NetworkSettings.Networks.largescaledatamanagement_default.IPAddress' | sed 's/\"//g')
echo "Done. View the data at: http://${namenode_ip}:9870" | true |
0b4c4de1f25ca5d9a46477b9f2f6940604d7170d | Shell | babywyrm/sysadmin | /EKS/workers/busted_.sh | UTF-8 | 834 | 3.65625 | 4 | [] | no_license | #!/bin/bash
# Get a list of all AWS regions
regions=$(aws ec2 describe-regions --query "Regions[].RegionName" --output text)
# Iterate through each region
for region in $regions
do
echo "Region: $region"
# Get a list of all EC2 instances in the region
instances=$(aws ec2 describe-instances --region $region --query "Reservations[*].Instances[*].{InstanceId:InstanceId,Tags:Tags[*]}" --output json)
# Iterate through each instance
for instance in $(echo "$instances" | jq -r '.[] | @base64')
do
_jq() {
echo ${instance} | base64 --decode | jq -r ${1}
}
instance_id=$(_jq '.InstanceId')
tags=$(_jq '.Tags')
eks_cluster=$(echo "$tags" | jq -r '.[] | select(.Key=="eks:cluster-name") | .Value')
echo -e "\tInstance: $instance_id (EKS Cluster: $eks_cluster)"
done
done
##
##
| true |
2a8fbcc626197162f90722715b5a0fb9ef58dd57 | Shell | emop99/VagrantSetting | /setting.sh | UTF-8 | 1,265 | 2.71875 | 3 | [
"MIT"
] | permissive | #!/bin/sh
# yum update
yum -y update
# wget install
yum -y install wget
# php install
wget https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
wget https://repo.ius.io/ius-release-el7.rpm
rpm -ivh epel-release-latest-7.noarch.rpm
rpm -ivh ius-release-el7.rpm
yum --enablerepo=ius-archive install -y php74*
systemctl start php-fpm
systemctl enable php-fpm
rm -f epel-release-latest-7.noarch.rpm
rm -f ius-release-el7.rpm
# nginx install
yes|cp -arpf /VagrantSetting/ngnix.repo /etc/yum.repos.d/CentOS-ngnix.repo
yum -y install nginx
systemctl start nginx
systemctl enable nginx
systemctl restart nginx
# mariaDB install
yes|cp -arpf /VagrantSetting/mariaDB.repo /etc/yum.repos.d/MariaDB.repo
yum makecache fast
yum -y install MariaDB-server MariaDB-client
systemctl start mariadb
systemctl enable mariadb
mysql -e "create database mariadb"
mysql -e "create user 'vagrant'@'localhost' identified by 'qlalfqjsgh1@'"
mysql -e "GRANT USAGE ON *.* TO 'vagrant'@'%' IDENTIFIED BY 'qlalfqjsgh1@'"
mysql -e "grant all privileges on mariadb.* to 'vagrant'@'%' identified by 'qlalfqjsgh1@'"
mysql -e "flush privileges"
sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/selinux/config
sed -i 's/$server_name;/$http_host;/g' /etc/nginx/fastcgi_params
| true |
7b61b341d69c29c363fbd8e4c04ad089fba6c041 | Shell | wooritech/github-actions | /gh-pages/entrypoint.sh | UTF-8 | 2,089 | 4.0625 | 4 | [
"MIT"
] | permissive | #!/bin/sh -l
set -e
if [ -z "$ACCESS_TOKEN" ]
then
echo "저장소에 접근할 수 있는 Github Personal Access Token을 입력 하세요."
exit 1
fi
if [ -z "$FOLDER" ]
then
echo "문서 코드가 저장된 폴더명을 입력하세요. (ex. dist)"
exit 1
fi
case "$FOLDER" in /*|./*)
echo "폴더명에는 '/' 또는 './'가 포함될 수 없습니다."
exit 1
esac
if [ -z "$BRANCH" ]
then
BRANCH="gh-pages"
fi
if [ -z "$BASE_BRANCH" ]
then
BASE_BRANCH="master"
fi
if [ -z "$COMMIT_EMAIL" ]
then
COMMIT_EMAIL="${GITHUB_ACTOR}@users.noreply.github.com"
fi
if [ -z "$COMMIT_NAME" ]
then
COMMIT_NAME="${GITHUB_ACTOR}"
fi
# git 명령어 설치
apt-get update && \
apt-get install -y git && \
# 깃헙 워크스페이스로 이동
cd $GITHUB_WORKSPACE && \
# git 설정
git init && \
git config --global user.email "${COMMIT_EMAIL}" && \
git config --global user.name "${COMMIT_NAME}" && \
# 저장소 경로 지정
REPOSITORY_PATH="https://${ACCESS_TOKEN}@github.com/${GITHUB_REPOSITORY}.git" && \
# 배포 브랜치가 없으면 새로 생성
if [ "$(git ls-remote --heads "$REPOSITORY_PATH" "${BRANCH}" | wc -l)" -eq 0 ];
then
echo "배포 브랜치(${BRANCH})를 생성합니다...\n"
git checkout $BASE_BRANCH && \
git checkout --orphan $BRANCH && \
git rm -rf . && \
touch README.md && \
git add README.md && \
git commit -m "Initial $BRANCH commit" && \
git push $REPOSITORY_PATH $BRANCH
fi
# 기본 브랜치 체크아웃
git checkout $BASE_BRANCH && \
# 빌드 스크립트 실행
echo "빌드 스크립트를 실행합니다... ${BUILD_SCRIPT}\n" && \
eval "$BUILD_SCRIPT" && \
# 커스텀 도메인 설정
if [ "$CNAME" ]; then
echo "$FOLDER 폴더에 CNAME 파일을 생성합니다...\n"
echo $CNAME > $FOLDER/CNAME
fi
# 배포 브랜치에 배포
echo "배포 브랜치(${BRANCH})에 배포합니다...\n" && \
git add -f $FOLDER && \
git commit -m "Deploying to $BRANCH - $(date +"%T")" && \
git push $REPOSITORY_PATH `git subtree split --prefix $FOLDER ${BASE_BRANCH}`:$BRANCH --force && \
echo "Github Pages 배포가 완료되었습니다.\n" | true |
d5cf69f417c16528dae9db3bf59499bab1dbe73f | Shell | ThomasFreedman/sysDtmux | /witnessSysD | UTF-8 | 2,100 | 3.890625 | 4 | [] | no_license | #!/bin/bash
#
# witnessSysD - Script to register a systemd unit for the witness node.
# Feeds and bts_tools monitoring use the tmuxSysD.service,
# which runs separately as a systemd user service unit.
#
# Start the witness_node process here, using systemd to insure it starts when
# the system is booted. This could live with the user units, however for now
# the separation from the user simplifies tmux session issues.
#
# See 'systemctl status witness.service' and 'journalctl -xn' for details.
# Use: systemctl start witness.service to start
# Config variables
#if [[ -n $(lsb_release -a | grep -i debian) ]]; then IS_DEBIAN=1; else IS_DEBIAN=0; fi
#export IS_DEBIAN;
export USER=ACCOUNT # Name of the account to run this service unit under
export NODE=/home/$USER/YOUR_NODE # Folder where blockchain & config are
export BIN=/home/$USER/PATH_TO_BINARIES # Full path of where the executables are found
export SERVICE_NAME="witness_node"
export SERVICE_EXEC="$BIN/witness_node -d $NODE --track-account \\\"1.2.277\\\" --track-account \\\"1.2.126782\\\" --replay-blockchain"
export SERVICE_STOP="/usr/bin/pkill -SIGINT $SERVICE_NAME"
export SERVICE_UNIT=1 # Set SERVICE_UNIT=1 to enable witness_node to start upon reboot
# Create a systemd unit service definition for the witness_node to
# run in the background.
if [ $SERVICE_UNIT -eq 1 ]; then
cat > /lib/systemd/system/$SERVICE_NAME.service <<EOL
[Unit]
Description=Job that runs the $SERVICE_NAME background process
[Service]
User=$USER
Type=simple
WorkingDirectory=$NODE
ExecStart=$SERVICE_EXEC
ExecStop=$SERVICE_STOP
Restart=on-failure
[Install]
WantedBy=multi-user.target
EOL
# Enable the service so it will start automatically upon reboot.
# Use systemctl to stop, start, reload, status, enable, disable
# or manage this service. See man systemctl and man systemd.*
# NOTE: with Restart enabled you will need to disable this unit
# or it will be restarted automatically by systemd.
systemctl daemon-reload
systemctl enable $SERVICE_NAME
fi
| true |
558a66038ca64854de46f82e183f1d0ded682d61 | Shell | dschaaff/profile.d | /01_shellopts.sh | UTF-8 | 181 | 2.5625 | 3 | [
"BSD-3-Clause",
"BSD-2-Clause",
"BSD-2-Clause-Views"
] | permissive | # Here we should use the `set` command to change the SHELLOPTS variable
# Sets the readline mode of your shell. Bash supports "vi" or "emacs". Most people use "emacs"
set -o emacs
| true |
9e0ac190fdef995d15e62e50a82b22caa5f15ae4 | Shell | noah8713/ovn-fake-multinode-old | /install_ovn.sh | UTF-8 | 1,149 | 2.640625 | 3 | [] | no_license | #!/bin/sh
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# get ovs source always from master as its needed as dependency
cd /ovs;
# build and install
./boot.sh
./configure --localstatedir="/var" --sysconfdir="/etc" --prefix="/usr" \
--enable-ssl
make -j8;
make install
cd /ovn
# build and install
./boot.sh
./configure --localstatedir="/var" --sysconfdir="/etc" --prefix="/usr" \
--enable-ssl --with-ovs-source=/ovs/ --with-ovs-build=/ovs/
make -j8; make install
# remove unused packages to make the container light weight.
for i in $(package-cleanup --leaves --all);
do dnf remove -y $i; dnf autoremove -y;
done
rm -rf /ovs; rm -rf /ovn
| true |
57b00a42888d339c6ae465e037e70393e1d3910e | Shell | lbssousa/c3sl-mdm-debian | /test.sh | UTF-8 | 1,687 | 3.625 | 4 | [] | no_license | #!/bin/bash
# Copyright (C) 2004-2007 Centro de Computacao Cientifica e Software Livre
# Departamento de Informatica - Universidade Federal do Parana - C3SL/UFPR
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
# This is just a lazy script that:
# - downloads the mdm git repository
# - generates a .tar.gz for it
# - puts our 'debian' directory inside the .tar.gz
# - uses 'debuild' to build the packages
# Everything is done inside the 'tmp' directory
set -e
set -x
if [ -d 'tmp' ]; then
echo "'tmp' directory already exists"
exit 1
fi
# Download mdm
mkdir tmp
cd tmp
git clone http://git.c3sl.ufpr.br/pub/scm/multiseat/mdm.git
# Remove 'git' stuff
cd mdm
rm -rf .git
find . -type f -name '.gitignore' -exec rm -f '{}' \;
# Create the .tar.gz
VERSION=$(grep '^MDM_VERSION=' mdm/src/mdm-common | cut -d\' -f2)
cd ..
mv mdm mdm-${VERSION}
tar cvzf mdm_${VERSION}.orig.tar.gz mdm-${VERSION}
# Put the debian stuff there
cp -r ../debian mdm-${VERSION}/
# Create the package
cd mdm-${VERSION}
debuild
| true |
9ad0017fcbfaea0f941a31e72a676775b5d07917 | Shell | Bondzio/AUR | /usb-creator/PKGBUILD | UTF-8 | 1,841 | 2.890625 | 3 | [] | no_license | # Maintainer: carstene1ns <arch carsten-teibes de> - http://git.io/ctPKG
# Contributor: Jekyll Wu <adaptee at gmail dot com>
# Contributor: Jacco Koning <archlinux@menollo.nl>, with help from qwak
# Contributor: twa022 <twa022@gmail.com>
pkgname=usb-creator
pkgver=0.2.63
pkgrel=1
pkgdesc="Create bootable USB from a LiveCD or disc image of Ubuntu"
arch=('any')
url="https://launchpad.net/usb-creator"
license=('GPL3')
depends=('udisks2' 'syslinux' 'parted' 'dosfstools' 'mtools' 'cdrkit'
'python-dbus' 'python-gobject' 'hicolor-icon-theme')
makedepends=('python-distutils-extra')
optdepends=('kdebindings-python: for KDE frontend' 'gtk3: for GTK frontend')
install=usb-creator.install
source=("https://launchpad.net/ubuntu/+archive/primary/+files/${pkgname}_${pkgver}.tar.xz"
"remove-ubuntu-version-check.patch")
sha256sums=('852967391ded96c4fa5b0e4c11c83f7a1fdac12a437670d6673b3921943aa59f'
'bdc29966cda4921e395b07530e0d6a0bda81f99d4b0948fadfda983e05ab51cb')
prepare() {
cd $pkgname-$pkgver
patch -Np1 < ../remove-ubuntu-version-check.patch
# fix location of bootsector for syslinux 6.xx
sed 's|mbr/mbr.bin|bios/mbr.bin|' -i bin/usb-creator-helper
}
package() {
cd $pkgname-$pkgver
python3 setup.py install --root="$pkgdir/" --optimize=1
# FIXME by adaptee:
# That setup.py is written with the assumption that /usr/share/kde4/apps is part of
# $(kde-prefix --path data), which is true for ubuntu. But this assumption does not
# hold in archlinux, which will make usb-creator-kde fail to load its ui file.
# So we must put usbcreator-kde.ui to an appropriate place!
install -Dm0644 gui/usbcreator-kde.ui "$pkgdir"/usr/share/apps/usb-creator-kde/usbcreator-kde.ui
# FIXME: install manual pages
install -d "$pkgdir"/usr/share/man/man8
install -Dm0644 man/$pkgname-{gtk,kde}.8 "$pkgdir"/usr/share/man/man8
}
| true |
a2a9bd782a6c60eb57102fa789de94c2a4515aed | Shell | weijiexu1985/lanmp | /upgrade.sh | UTF-8 | 10,305 | 3.765625 | 4 | [] | no_license | #!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export PATH
if [ $(id -u) != "0" ]; then
printf "Error: You must be root to run this script!"
exit 1
fi
LANMP_PATH=`pwd`
if [ `echo $LANMP_PATH | awk -F/ '{print $NF}'` != "lanmp" ]; then
clear && echo "Please enter lanmp script path:"
read -p "(Default path: ${LANMP_PATH}/lanmp):" LANMP_PATH
[ -z "$LANMP_PATH" ] && LANMP_PATH=$(pwd)/lanmp
cd $LANMP_PATH/
fi
clear
echo "#############################################################"
echo "# LANMP Auto Update Script"
echo "# Env: Redhat/CentOS"
echo "# Intro: https://wangyan.org/blog/lanmp.html"
echo "# Version: $(awk '/version/{print $2}' $LANMP_PATH/Changelog)"
echo "#"
echo "# Copyright (c) 2012, WangYan <WangYan@188.com>"
echo "# All rights reserved."
echo "# Distributed under the GNU General Public License, version 3.0."
echo "#"
echo "#############################################################"
echo ""
echo "Please enter the webroot dir:"
read -p "(Default webroot dir: /var/www):" WEBROOT
if [ -z $WEBROOT ]; then
WEBROOT="/var/www"
fi
echo "---------------------------"
echo "Webroot dir=$WEBROOT"
echo "---------------------------"
echo ""
echo "Please choose webserver software! (1:nginx,2:apache,3:nginx+apache) (1/2/3)"
read -p "(Default: 3):" SOFTWARE
if [ -z $SOFTWARE ]; then
SOFTWARE="3"
fi
echo "---------------------------"
echo "You choose = $SOFTWARE"
echo "---------------------------"
echo ""
######################### PHP5 #########################
LATEST_PHP=$(curl -s http://www.php.net/downloads.php | awk '/Current Stable/{print $3}')
INSTALLED_PHP=$(php -r 'echo PHP_VERSION;' 2>/dev/null)
echo -e "Latest version of PHP: \033[41;37m $LATEST_PHP \033[0m"
echo -e "Installed version of PHP: \033[41;37m $INSTALLED_PHP \033[0m"
echo ""
if [ "$INSTALLED_PHP" != "5.2.17p1" ];then
echo "Do you want to upgrade PHP ? (y/n)"
read -p "(Default: n):" UPGRADE_PHP
if [ -z $UPGRADE_PHP ]; then
UPGRADE_PHP="n"
fi
echo "---------------------------"
echo "You choose = $UPGRADE_PHP"
echo "---------------------------"
echo ""
fi
######################### Nginx #########################
if [ "$SOFTWARE" != 2 ];then
INSTALLED_NGINX=$(echo `nginx -v 2>&1` | cut -d '/' -f 2)
LATEST_NGINX=$(curl -s http://nginx.org/| awk -F- '/nginx-/{print $6}' | head -1|cut -d '<' -f 1)
echo -e "Latest version of Nginx: \033[41;37m $LATEST_NGINX \033[0m"
echo -e "Installed version of Nginx: \033[41;37m $INSTALLED_NGINX \033[0m"
echo ""
echo "Do you want to upgrade Nginx ? (y/n)"
read -p "(Default: n):" UPGRADE_NGINX
if [ -z $UPGRADE_NGINX ]; then
UPGRADE_NGINX="n"
fi
echo "---------------------------"
echo "You choose = $UPGRADE_NGINX"
echo "---------------------------"
echo ""
fi
######################### phpMyAdmin #########################
if [ ! -s "$LANMP_PATH/version.txt" ]; then
echo -e "phpmyadmin\t0" > $LANMP_PATH/version.txt
fi
INSTALLED_PMA=$(awk '/phpmyadmin/{print $2}' $LANMP_PATH/version.txt)
LATEST_PMA=$(elinks http://nchc.dl.sourceforge.net/project/phpmyadmin/phpMyAdmin/ | awk -F/ '{print $7F}' | sort -n | grep -iv '-' | tail -1)
echo -e "Latest version of phpmyadmin: \033[41;37m $LATEST_PMA \033[0m"
echo -e "Installed version of phpmyadmin: \033[41;37m $INSTALLED_PMA \033[0m"
echo ""
echo "Do you want to upgrade phpmyadmin ? (y/n)"
read -p "(Default: n):" UPGRADE_PMA
if [ -z $UPGRADE_PMA ]; then
UPGRADE_PMA="n"
fi
echo "---------------------------"
echo "You choose = $UPGRADE_PMA"
echo "---------------------------"
echo ""
get_char()
{
SAVEDSTTY=`stty -g`
stty -echo
stty cbreak
dd if=/dev/tty bs=1 count=1 2> /dev/null
stty -raw
stty echo
stty $SAVEDSTTY
}
echo "Press any key to start Upgrade..."
echo "Or Ctrl+C cancel and exit ?"
echo ""
char=`get_char`
######################### Extract Function #########################
Extract(){
local TARBALL_TYPE
if [ -n $1 ]; then
SOFTWARE_NAME=`echo $1 | awk -F/ '{print $NF}'`
TARBALL_TYPE=`echo $1 | awk -F. '{print $NF}'`
wget -c -t3 -T3 $1 -P $LANMP_PATH/
if [ $? != "0" ];then
rm -rf $LANMP_PATH/$SOFTWARE_NAME
wget -c -t3 -T60 $2 -P $LANMP_PATH/
SOFTWARE_NAME=`echo $2 | awk -F/ '{print $NF}'`
TARBALL_TYPE=`echo $2 | awk -F. '{print $NF}'`
fi
else
SOFTWARE_NAME=`echo $2 | awk -F/ '{print $NF}'`
TARBALL_TYPE=`echo $2 | awk -F. '{print $NF}'`
wget -c -t3 -T3 $2 -P $LANMP_PATH/ || exit
fi
EXTRACTED_DIR=`tar tf $LANMP_PATH/$SOFTWARE_NAME | tail -n 1 | awk -F/ '{print $1}'`
case $TARBALL_TYPE in
gz|tgz)
tar zxf $LANMP_PATH/$SOFTWARE_NAME -C $LANMP_PATH/ && cd $LANMP_PATH/$EXTRACTED_DIR || return 1
;;
bz2|tbz)
tar jxf $LANMP_PATH/$SOFTWARE_NAME -C $LANMP_PATH/ && cd $LANMP_PATH/$EXTRACTED_DIR || return 1
;;
tar|Z)
tar xf $LANMP_PATH/$SOFTWARE_NAME -C $LANMP_PATH/ && cd $LANMP_PATH/$EXTRACTED_DIR || return 1
;;
*)
echo "$SOFTWARE_NAME is wrong tarball type ! "
esac
}
echo "===================== PHP5 Upgrade ===================="
if [[ "$UPGRADE_PHP" = "y" && "$INSTALLED_PHP" > 5 ]];then
if [[ -d "/usr/local/php.bak" && -d "/usr/local/php" ]];then
rm -rf /usr/local/php.bak/
fi
\mv /usr/local/php /usr/local/php.bak
cd $LANMP_PATH
if [ ! -s php-5.5.*.tar.gz ]; then
LATEST_PHP_LINK="http://php.net/distributions/php-${$LATEST_PHP}.tar.gz"
BACKUP_PHP_LINK="http://wangyan.org/download/lanmp-src/php-latest.tar.gz"
Extract ${LATEST_PHP_LINK} ${BACKUP_PHP_LINK}
else
tar -zxf php-5.5.*.tar.gz
cd php-5.5.*/
fi
if [ "$SOFTWARE" != "1" ]; then
./configure \
--prefix=/usr/local/php \
--with-apxs2=/usr/local/apache/bin/apxs \
--with-curl \
--with-curlwrappers \
--with-freetype-dir \
--with-gettext \
--with-gd \
--with-iconv-dir \
--with-jpeg-dir \
--with-libxml-dir \
--with-mcrypt \
--with-mhash \
--with-mysql=/usr/local/mysql \
--with-mysqli=/usr/local/mysql/bin/mysql_config \
--with-openssl \
--with-pear \
--with-png-dir \
--with-xmlrpc \
--with-zlib \
--enable-bcmath \
--enable-calendar \
--enable-exif \
--enable-ftp \
--enable-gd-native-ttf \
--enable-inline-optimization \
--enable-mbregex \
--enable-mbstring \
--enable-shmop \
--enable-soap \
--enable-sockets \
--enable-sysvsem \
--enable-sysvshm \
--enable-xml \
--enable-zip
else
./configure \
--prefix=/usr/local/php \
--with-curl \
--with-freetype-dir \
--with-gettext \
--with-gd \
--with-iconv-dir \
--with-jpeg-dir \
--with-libxml-dir \
--with-mcrypt \
--with-mhash \
--with-mysql=/usr/local/mysql \
--with-mysqli=/usr/local/mysql/bin/mysql_config \
--with-openssl \
--with-pear \
--with-png-dir \
--with-xmlrpc \
--with-zlib \
--enable-bcmath \
--enable-calendar \
--enable-exif \
--enable-fpm \
--enable-ftp \
--enable-gd-native-ttf \
--enable-inline-optimization \
--enable-mbregex \
--enable-mbstring \
--enable-pcntl \
--enable-shmop \
--enable-soap \
--enable-sockets \
--enable-sysvsem \
--enable-sysvshm \
--enable-xml \
--enable-zip
fi
make ZEND_EXTRA_LIBS='-liconv'
make install
echo "---------- PHP Extension ----------"
PHP_EXT1=`ls /usr/local/php.bak/lib/php/extensions/`
PHP_EXT2=`ls /usr/local/php/lib/php/extensions/`
mkdir -p /usr/local/php/lib/php/extensions/${PHP_EXT2}
cp /usr/local/php.bak/lib/php/extensions/${PHP_EXT1}/* /usr/local/php/lib/php/extensions/${PHP_EXT2}
echo "---------- PHP Config ----------"
cp /usr/local/php.bak/lib/php.ini /usr/local/php/lib/php.ini
if [ "$SOFTWARE" != "1" ]; then
pkill httpd
/etc/init.d/httpd start
else
cp /usr/local/php.bak/etc/php-fpm.conf /usr/local/php/etc/php-fpm.conf
pkill php-fpm
/etc/init.d/php-fpm start
fi
rm -rf $LANMP_PATH/src/{php-*,memcache-*,xcache-*}
fi
echo "===================== Nginx Upgrade ===================="
if [[ "$UPGRADE_NGINX" = "y" || "$UPGRADE_NGINX" = "Y" ]];then
cd $LANMP_PATH
if [ ! -s nginx-${LATEST_NGINX}.tar.gz ]; then
LATEST_NGINX_LINK="http://nginx.org/download/nginx-$LATEST_NGINX.tar.gz"
BACKUP_NGINX_LINK="http://wangyan.org/download/lanmp-src/nginx-latest.tar.gz"
Extract ${LATEST_NGINX_LINK} ${BACKUP_NGINX_LINK}
else
tar -zxf nginx-${LATEST_NGINX}.tar.gz
cd nginx-${LATEST_NGINX}/
fi
./configure \
--pid-path=/var/run/nginx.pid \
--lock-path=/var/lock/nginx.lock \
--user=www \
--group=www \
--with-http_ssl_module \
--with-http_dav_module \
--with-http_flv_module \
--with-http_realip_module \
--with-http_gzip_static_module \
--with-http_stub_status_module \
--with-mail \
--with-mail_ssl_module \
--with-pcre \
--with-debug \
--with-ipv6 \
--http-client-body-temp-path=/var/tmp/nginx/client \
--http-proxy-temp-path=/var/tmp/nginx/proxy \
--http-fastcgi-temp-path=/var/tmp/nginx/fastcgi \
--http-uwsgi-temp-path=/var/tmp/nginx/uwsgi \
--http-scgi-temp-path=/var/tmp/nginx/scgi
make
\mv /usr/local/nginx/sbin/nginx /usr/local/nginx/sbin/nginx.old
cp objs/nginx /usr/local/nginx/sbin/nginx
/usr/local/nginx/sbin/nginx -t
make upgrade
echo "Upgrade completed!"
/usr/local/nginx/sbin/nginx -v
echo ""
/etc/init.d/nginx restart
rm -rf $LANMP_PATH/src/nginx-*
fi
echo "===================== phpMyAdmin Upgrade ===================="
if [[ "$UPGRADE_PMA" = "y" || "$UPGRADE_PMA" = "Y" ]];then
PMA_LINK="http://nchc.dl.sourceforge.net/project/phpmyadmin/phpMyAdmin"
mv $WEBROOT/phpmyadmin/config.inc.php $WEBROOT/config.inc.php
rm -rf $WEBROOT/phpmyadmin/
if [ ! -s phpMyAdmin-${LATEST_PMA}-all-languages.tar.gz ]; then
LATEST_PMA_LINK="${PMA_LINK}/${LATEST_PMA}/phpMyAdmin-${LATEST_PMA}-all-languages.tar.gz"
BACKUP_PMA_LINK="http://wangyan.org/download/lanmp/phpMyAdmin-latest-all-languages.tar.gz"
Extract ${LATEST_PMA_LINK} ${BACKUP_PMA_LINK}
mkdir -p $WEBROOT/phpmyadmin
mv * $WEBROOT/phpmyadmin
else
tar -zxf phpMyAdmin-${LATEST_PMA}-all-languages.tar.gz -C $WEBROOT
mv $WEBROOT/phpMyAdmin-${LATEST_PMA}-all-languages $WEBROOT/phpmyadmin
fi
mv $WEBROOT/config.inc.php $WEBROOT/phpmyadmin/
sed -i '/phpmyadmin/d' $LANMP_PATH/version.txt
echo -e "phpmyadmin\t${LATEST_PMA}" >> $LANMP_PATH/version.txt 2>&1
rm -rf $LANMP_PATH/src/phpMyAdmin-*
fi
if [ ! -d "$LANMP_PATH/src" ];then
mkdir -p $LANMP_PATH/src/
fi
\mv $LANMP_PATH/{*gz,*-*/,ioncube,package.xml} $LANMP_PATH/src >/dev/null 2>&1
| true |
689c1d582c55e2256db40ce029949c31cfa23b0b | Shell | gabriellep2/PortfolioTecnico | /FundamentosTI/exemplos/inteiros.sh | UTF-8 | 506 | 3.390625 | 3 | [] | no_license | localhost:~# vi inteiros.sh
localhost:~# chmod u+x inteiros.sh
localhost:~# cat inteiros.sh
#!/bin/bash
# Autora: Gabrielle Pires
# Descrição: Esse arquivo lerá um valor N e imprimirá todos os valores inteiros entre 1 (inclusive) e N (inclusive), sabendo que sempre será n>0
echo "Digite um numero"
read x
for i in $(seq 1 $x)
do
echo " - $i"
done
localhost:~# ./inteiros.sh
Digite um numero
0
localhost:~# ./inteiros.sh
Digite um numero
5
- 1
- 2
- 3
- 4
- 5
| true |
3d6e95e6a84d149545bf1f28309bfe2660f146a9 | Shell | plamolinux/Plamo-src | /boot/installer/usr/lib/setup/SeTmaketagj | EUC-JP | 8,515 | 3.65625 | 4 | [] | no_license | #!/bin/sh
MOUNT=/tag
mkdir -p $MOUNT
dialog --title "ॿեκ" --yesno "
եϡǥåȤΣܤ˽ޤ\n\
ơɤΥѥåȡ뤹뤫뤤ϥ\n\
åפ뤫ꤹ롢̤ʥեǤ\n\
äƤɤΥեȡ뤹뤫Ƥȡ\n\
ʤȡ뤹뤫ɤ䤤碌ʤΤǡ\n\
ȡ뤬ԤޤޤǤ˰٤Plamo Linux\n\
Linuxȡ뤷ȤʤСޤǥեȤ\n\
եȤäƥȡ뤷Ƥߤ뤳Ȥᤷޤ\n\
ȤΤϡॿեˤäɬפʡ\n\
ѥåƤ˴ؤޤꤴ¸ΤǤʤȻפ\n\
ǤΥץϤʤǤˤɤΥѥå\n\
ȡ뤷İƤ뤳ȤȤƤޤ\n\
褯狼ʤϡ̾ΥեѤƥ\n\
ȡ뤷Ƥ\
ॿեޤ" 19 70
if [ $? = 1 -o $? = 255 ]; then
exit
fi
dialog --title "ॿեγĥҤ" --inputbox "եγĥҤϤƤ\n\
γĥҤϡॿեΥե̾κǸ\n\
ʬ˻Ȥޤ㤨С錄ϥե\n\
ĥҤ'.pat'ꤷƤޤǤ餽줾Υǥ\n\
åȤΣܤΥǥˤ錄Υե\n\
ե̾'tagfile.pat'Ȥʤޤ.tgz.tarʳ\n\
顢ʳĥҤѤ뤳ȤǤޤĥҤ\n\
ԥꥪɤȤ³ʸ鹽ޤʸʳ\n\
ǤϹԤޤ" 15 70 2> /tmp/SeTtagmake
if [ $? = 1 -o $? = 255 -o ! "`cat /tmp/SeTtagmake | cut -b1`" = "." -o "`cat /tmp/SeTtagmake`" = "" ]; then
rm -f /tmp/SeTtagmake
exit
fi
TAGEXT="`cat /tmp/SeTtagmake`"
dialog --title "ѡȥ˥塼Ѥޤ?" --menu "ߤ˱\
ѡȥ˥塼ѥå뤳ȤǤޤΡޥ˥塼\
'Cѥ饷ƥ'Τ褦ɽΤǤѡ\
˥塼'C饤֥''Cѥ''C롼ɥե'Τ褦\
ɽˤʤޤ -- ġΥѥåɽ櫓Ǥѡȥ˥塼\
ѤȡΥ֥ƥबưΤɬܤΥѥååפ\
ޤȤǽǤ顤褯Ƥ狼äƤʤФʤޤ" \
15 65 2 \
"normal" "ѥåΥ롼פΤ˥Ρޥ˥塼Ѥ" \
"expert" "ġΥѥåΤ˥ѡȥ˥塼Ѥ" 2> /tmp/SeTreturn
if [ $? = 1 -o $? = 255 ]; then
rm -f /tmp/SeTreturn
exit
fi
MENU="`cat /tmp/SeTreturn`"
rm -f /tmp/SeTreturn
if [ "$MENU" = "normal" ]; then
MAKETAG="maketag.ez"
else
MAKETAG="maketag"
fi
while [ 0 ]; do
dialog --title "ǥåȤΥޥ" --menu "ǥåȤ ܤΥǥʥ饤ȥץƥȾ֤Ǥʤ\n\
ȤǧƤˤ˥ॿեޤ\n\
뤤ϥϡɥǥ饤ȡ뤷Ƥˤϡ\n\
ǥǥ쥯ȥ˺ޤ\n\
ʲ飱Ƥ" 13 70 3 \
"/dev/fd0" "եåԡɥ饤֣(a:)˥ॿե" \
"/dev/fd1" "եåԡɥ饤֣(b:)˥ॿե" \
"Directory" "ǥΥǥ쥯ȥ˥ե" 2> /tmp/SeTreturn
if [ $? = 1 -o $? = 255 ]; then
rm -f /tmp/SeTreturn
break
fi
CHOICE="`cat /tmp/SeTreturn`"
rm -f /tmp/SeTreturn
if [ "$CHOICE" = "/dev/fd0" -o "$CHOICE" = "/dev/fd1" ]; then
dialog --title "INSERT DISK IN DRIVE $CHOICE" --msgbox "
ॿեǥåȤΣܤ\n\
եåԡơENTERƤ\n\
եåԡ饤ȥץƥȾ֤ˤʤäƤƤϤޤ" 7 70
if [ $? = 255 ]; then
continue
fi
mount $CHOICE $MOUNT -t vfat
if [ ! $? = 0 ]; then
dialog --title "ޥȥ顼" --msgbox "ɥ饤$CHOICEΥեåԡMSDOSΥե륷ƥȤ\n\
ޥȤǤޤǤ" 6 70
umount $MOUNT 2> /dev/null
continue
fi
if [ -r $MOUNT/$MAKETAG -o $MOUNT/maketag ]; then
if [ -r $MOUNT/$MAKETAG ]; then
sh $MOUNT/$MAKETAG
else
sh $MOUNT/maketag
fi
else # no tagfile create script!
dialog --title "եץȤޤ" --msgbox "ޤ'maketag'ץȤĤޤΤǡ\n\
ॿեǤޤ\n\
maketagץȤSlackware 1.1.2Ƴ졢ѥå\n\
뤿Υ˥塼ɽΤǤǥ\n\
ƥȡ뤹ѥå̤ˡϤޤ\n\
ޤĤѥåϥȡ뤵Ǥ礦¿ʬ\n\
ǥΥեԽʤФʤʤǤ礦" 13 70
umount $MOUNT
continue
fi
if [ ! -r /tmp/SeTnewtag ]; then
umount $MOUNT
continue
fi
cp /tmp/SeTnewtag $MOUNT/tagfile$TAGEXT
if [ $? = 0 ]; then
dialog --title "ե뤬ޤ" --msgbox "ॿե뤬ޤĥҤϰʲ̤Ǥ \n\
$TAGEXT" 8 70
else
dialog --title "ԡ顼" --msgbox "ॿեǥإԡǤޤǤ\n\
ǥ饤ȥץƥȤƤʤ뤤϶\n\
뤫ɤĴ٤ơĩ路Ƥ" 9 70
fi
umount $MOUNT
fi
if [ "$CHOICE" = "Directory" ]; then
dialog --title "ǥ쥯ȥ" --inputbox "ॿեǥΣܤ\n\
ǥ쥯ȥꤷƤΥǥ쥯ȥ\n\
ϡߤΥե륷ƥΤɤ˥ޥȤƤʤ\n\
Фʤޤ" 11 70 2> /tmp/SeTreturn
if [ $? = 1 -o $? = 255 ]; then
rm -f /tmp/SeTreturn
continue
fi
DIR="`cat /tmp/SeTreturn`"
rm -f /tmp/SeTreturn
if [ ! -r $DIR/maketag -a ! -r $DIR/$MAKETAG ]; then
dialog --title "ե륹ץȤޤ" --msgbox "ޤꤵ줿ǥ쥯ȥ'maketag'ץ\n\
ʤᤳΥǥåȤΤΥॿե\n\
ǤޤmaketagץȤSlackware 1.1.2\n\
Ƴ졢ѥå˥塼ɽΤǤ\n\
ߡѥåƤȡ뤹뤫ɤ\n\
ꤹˡϤޤĤϥȡ뤵\n\
ȤǤ礦Υǥ쥯ȥΥեԽ\n\
ʤФʤʤ⤷ޤ" 13 70
continue
fi
if [ -r $DIR/$MAKETAG ]; then
sh $DIR/$MAKETAG
else
sh $DIR/maketag
fi
if [ ! -r /tmp/SeTnewtag ]; then
continue
fi
cp /tmp/SeTnewtag $DIR/tagfile$TAGEXT
if [ $? = 0 ]; then
dialog --title "ե뤬ޤ" --msgbox "ॿե뤬ޤĥҤϰʲ̤Ǥ\n\
$TAGEXT" 8 70
else
dialog --msgbox "ä狼ޤɤ褦Ǥ" 6 70
fi
fi
done
| true |
b00038c23daf3cfd72002e727ea77f9e15dedcb4 | Shell | tteck/Proxmox | /install/qbittorrent-install.sh | UTF-8 | 1,266 | 2.734375 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Copyright (c) 2021-2023 tteck
# Author: tteck (tteckster)
# License: MIT
# https://github.com/tteck/Proxmox/raw/main/LICENSE
source /dev/stdin <<< "$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y curl
$STD apt-get install -y sudo
$STD apt-get install -y mc
msg_ok "Installed Dependencies"
msg_info "Installing qbittorrent-nox"
$STD apt-get install -y qbittorrent-nox
mkdir -p /.config/qBittorrent/
cat <<EOF >/.config/qBittorrent/qBittorrent.conf
[Preferences]
WebUI\Password_PBKDF2="@ByteArray(amjeuVrF3xRbgzqWQmes5A==:XK3/Ra9jUmqUc4RwzCtrhrkQIcYczBl90DJw2rT8DFVTss4nxpoRhvyxhCf87ahVE3SzD8K9lyPdpyUCfmVsUg==)"
WebUI\Port=8090
WebUI\UseUPnP=false
WebUI\Username=admin
EOF
msg_ok "qbittorrent-nox"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/qbittorrent-nox.service
[Unit]
Description=qBittorrent client
After=network.target
[Service]
ExecStart=/usr/bin/qbittorrent-nox --webui-port=8090
Restart=always
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now qbittorrent-nox
msg_ok "Created Service"
motd_ssh
customize
msg_info "Cleaning up"
$STD apt-get autoremove
$STD apt-get autoclean
msg_ok "Cleaned"
| true |
f94d4c892f1b7dd4e6ad582723942c6d8ea2f49b | Shell | bijithnair/Project-Cyborg | /Foreman-codes/slack-notify.sh | UTF-8 | 1,007 | 3.671875 | 4 | [] | no_license | #!/bin/bash
function post_to_slack () {
# format message as a code block ```${msg}```
SLACK_MESSAGE="\`\`\`$1\`\`\`"
SLACK_URL=https://hooks.slack.com/services/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
case "$2" in
INFO)
SLACK_ICON=':slack:'
;;
WARNING)
SLACK_ICON=':warning:'
;;
ERROR)
SLACK_ICON=':bangbang:'
;;
*)
SLACK_ICON=':slack:'
;;
esac
curl -X POST --data "payload={\"text\": \"${SLACK_ICON} ${SLACK_MESSAGE}\", \"username\": \"login-bot\"}" ${SLACK_URL}
}
USER="User: $PAM_USER"
SERVICE="Service: $PAM_SERVICE"
REMOTE="Remote host: $PAM_RHOST"
TTY="TTY: $PAM_TTY"
DATE="Date: `date`"
SERVER="Server: `hostname`"
#CHAGE="`chage -l $PAM_USER | head -3`"
LOGINMESSAGE01="Login Alert on `hostname -s`"
LOGINMESSAGE02="USER:{$PAM_USER} got authenticated successfully!!!"
LOGINMESSAGE03="Login Details:-"
if [ "$PAM_TYPE" = "open_session" ]
then
post_to_slack "\n${LOGINMESSAGE01}\n${LOGINMESSAGE02}\n${LOGINMESSAGE03}\n${REMOTE}\n${USER}\n${SERVICE}\n${TTY}\n${DATE}\n${SERVER}" "INFO"
fi
exit 0
| true |
04c36a8efdc37b5cc8c954fa5a7076583bf8fb39 | Shell | yoppeh/minl | /2-libpipeline.sh | UTF-8 | 384 | 2.96875 | 3 | [] | no_license | #!/bin/bash
. ./environment.sh
. ./package-versions.sh
export FORCE_UNSAFE_CONFIGURE=1
if [ -f $PROGRESS_DIR/2-libpipeline ] ; then
exit 0
fi
echo "building libpipeline..."
set -e
tar xf libpipeline-${libpipeline_v}.tar.gz
cd libpipeline-${libpipeline_v}
./configure --prefix=/usr
make
make install
cd ..
rm -rf libpipeline-${libpipeline_v}
touch $PROGRESS_DIR/2-libpipeline
| true |
f3f3208187c6c93bf37b9350c0e658b5a4f797c7 | Shell | Kansukey/autoclass | /autoClass.sh | UTF-8 | 4,129 | 3.875 | 4 | [] | no_license | #!/bin/bash -e
clear
echo | cat <<EOF
############################################
# #
# C++ AUTO CLASS GENERATOR #
# https://github.com/Kansukey/autoclass #
# Made with pleasure (: #
# #
############################################
EOF
# Get all elements required
echo -e "> Class name :"
read -e className
# Ask for default includes
echo -e "\n> Do you want to browse the default include list ? (Y/n)"
read -e defaultInc
if [ "$defaultInc" != n ] ; then
echo -e ">> iostream (Y/n)" ; read -e Liostream
echo -e ">> string (Y/n)" ; read -e Lstring
echo -e ">> fstream (Y/n)" ; read -e Lfstream
fi
# Check if file already exists
if [ -a $className.hh ] ; then
echo -e "[ERROR]: Your file $className.hh already exists! exit."
exit 1;
elif [ -a $className.cpp ] ; then
echo -e "[ERROR]: Your file $className.cpp already exists! exit."
exit 1;
fi
# Header File
echo -e "> Starting to create the header file of your class $className..."
cat > $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}.hh <<EOF
//
// Header coming soon
//
#ifndef $(echo "$className" | tr '[:lower:]' '[:upper:]')_HH_
# define $(echo "$className" | tr '[:lower:]' '[:upper:]')_HH_
EOF
# Include part
if [ "$Liostream" != n ] ; then echo "#include <iostream>" >> $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}.hh ; fi
if [ "$Lstring" != n ] ; then echo "#include <string>" >> $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}.hh ; fi
if [ "$Lfstream" != n ] ; then echo "#include <fstream>" >> $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}.hh ; fi
# Here we go, let's add the content
cat >> $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}.hh <<EOF
class $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1} {
public:
$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}(void);
$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}(const $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1} &);
~$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}(void);
$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1} &operator=($(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1} const &);
};
#endif /* !$(echo "$className" | tr '[:lower:]' '[:upper:]')_HH_ */
EOF
# End of header file
echo -e "\e[92m> Done.\e[39m"
# Source file now
echo -e "\n> Starting to create the source file of your class $className..."
cat > $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}.cpp <<EOF
//
// Header coming soon
//
#include "$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}.hh"
$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}::$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}(void)
{ }
$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}::$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}(const $(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1} &other)
{ }
$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}::~$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}(void)
{ }
$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1} &$(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1}::operator=($(echo "$className" | tr '[:lower:]' '[:upper:]' <<< ${className:0:1})${className:1} const &other)
{
if (this != &other)
{
;// Do things
}
return (*this);
}
EOF
echo -e "\e[92m> Done.\e[39m"
| true |
170c1bbe47a1021570c8b39aa5b7839da73a9892 | Shell | tdk-kedion/minecraft | /auth.sh | UTF-8 | 523 | 3.125 | 3 | [] | no_license | #!/bin/bash
#查看Minecraft服务器的所有身份验证的脚本
# 使用所有Linux Minecraft脚本进入目录
cd "$( dirname $0 )"
# 读取配置文件
source config.cfg
#你想在哪里开始?(按行)
#较大的数字可能会导致屏幕闪烁。
lines=1500
# 显示输入行数最近的身份验证数,并显示所有新的身份验证,直到按下CTRL+C。
tail -n $lines -F $minecraftLog | grep -E "[0-9]*-[0-9]*-[0-9]* [0-9]*:[0-9]*:[0-9]* \[INFO\] [^/ ]"
# 重写正则表达式的新的更新
| true |
9301d3c96a648895d302d25788bd43d66b55376d | Shell | js-j/expect-nt | /blt/demos/busy | UTF-8 | 5,376 | 3.015625 | 3 | [
"TCL",
"LicenseRef-scancode-other-permissive",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | #!/bin/sh ./myblt
source bltDemo.tcl
#
# Script to test the "busy" command.
#
#
# General widget class resource attributes
#
option add *Button.padX 10
option add *Button.padY 2
option add *Scale.relief sunken
#option add *Scale.orient horizontal
option add *Entry.relief sunken
option add *Frame.borderWidth 2
set visual [winfo screenvisual .]
if { $visual == "staticgray" || $visual == "grayscale" } {
set activeBg black
set normalBg white
set bitmapFg black
set bitmapBg white
option add *top.background white
} else {
set activeBg red
set normalBg springgreen
set bitmapFg blue
set bitmapBg green
option add *Button.background khaki2
option add *Button.activeBackground khaki1
option add *Frame.background khaki2
option add *bottom.tile textureBg
# option add *Button.tile textureBg
option add *releaseButton.background limegreen
option add *releaseButton.activeBackground springgreen
option add *releaseButton.foreground black
option add *holdButton.background red
option add *holdButton.activeBackground pink
option add *holdButton.foreground black
option add *top.background springgreen
}
#
# Instance specific widget options
#
option add *top.relief sunken
option add *top.background $normalBg
option add *testButton.text "Test"
option add *quitButton.text "Quit"
option add *newButton.text "New button"
option add *holdButton.text "Hold"
option add *releaseButton.text "Release"
option add *buttonLabel.text "Buttons"
option add *entryLabel.text "Entries"
option add *scaleLabel.text "Scales"
option add *textLabel.text "Text"
if { $tk_version >= 4.0 } {
proc LoseFocus {} {
focus -force .
}
proc KeepRaised w {
# We have to do it this way because of Tk insists upon
# propagating events from toplevels to their children.
# This seems like way too much of hack just to handle
# keyboard accelerators.
bind keep-raised <Visibility> {
winop raise %W
}
bindtags $w keep-raised
}
set file bitmaps/corrugated_metal.gif
image create photo textureBg -file $file
} else {
proc LoseFocus {} {
focus none
}
proc KeepRaised w {
bind w <Visibility> {
winop raise %W
}
}
}
#
# This never gets used; it's reset by the Animate proc. It's
# here to just demonstrate how to set busy window options via
# the host window path name
#
option add *top.busyCursor bogosity
#
# Initialize a list bitmap file names which make up the animated
# fish cursor. The bitmap mask files have a "m" appended to them.
#
set bitmaps { fc_left fc_left1 fc_mid fc_right1 fc_right }
#
# Counter for new buttons created by the "New button" button
#
set numWin 0
#
# Current index into the bitmap list. Indicates the current cursor.
# If -1, indicates to stop animating the cursor.
#
set cnt -1
#
# Create two frames. The top frame will be the host window for the
# busy window. It'll contain widgets to test the effectiveness of
# the busy window. The bottom frame will contain buttons to
# control the testing.
#
frame .top
frame .bottom
#
# Create some widgets to test the busy window and its cursor
#
label .buttonLabel
button .testButton -command {
puts stdout "Not busy."
}
button .quitButton -command { exit }
entry .entry
scale .scale
text .text -width 20 -height 4
#
# The following buttons sit in the lower frame to control the demo
#
button .newButton -command {
global numWin
incr numWin
set name button#${numWin}
button .top.$name -text "$name" \
-command [list puts stdout "I am $name"]
table .top \
.top.$name $numWin+3,0 -padx 10 -pady 10
}
button .holdButton -command {
busy .top
LoseFocus
global cnt activeBg
if { $cnt < 0 } {
.top configure -bg $activeBg
set cnt 0
Animate .top
}
}
button .releaseButton -command {
catch {busy release .top} mesg
global cnt normalBg
set cnt -1
.top configure -bg $normalBg
}
#
# Notice that the widgets packed in .top and .bottom are not their children
#
table .top \
.testButton 0,0 \
.scale 1,0 \
.entry 0,1 \
.text 1,1 -fill both \
.quitButton 2,0 -cspan 2
table .bottom \
.newButton 0,0 \
.holdButton 1,0 \
.releaseButton 2,0
table configure .top \
.testButton .scale .entry .quitButton -padx 10 -pady 10
table configure .bottom \
.newButton .holdButton .releaseButton -padx 10 -pady 10
#
# Finally, realize and map the top level window
#
table . \
.top 0,0 \
.bottom 1,0
table configure . .bottom -fill both
#
# Simple cursor animation routine: Uses the "after" command to
# circulate through a list of cursors every 0.075 seconds. The
# first pass through the cursor list may appear sluggish because
# the bitmaps have to be read from the disk. Tk's cursor cache
# takes care of it afterwards.
#
proc Animate w {
global cnt
if { $cnt >= 0 } {
global bitmaps bitmapFg bitmapBg
set name [lindex $bitmaps $cnt]
set src @bitmaps/${name}
set mask bitmaps/${name}m
busy configure $w -cursor [list $src $mask $bitmapFg $bitmapBg]
incr cnt
if { $cnt > 4 } {
set cnt 0
}
after 75 Animate $w
} else {
busy configure $w -cursor watch
}
}
#
# For testing purposes allow the top level window to be resized
#
wm min . 0 0
#
# Force the demo to stay raised
#
KeepRaised .
| true |
672852612bdeb370732900b5cbf021cffb3f5e91 | Shell | cirocavani/MO651-Robotics | /setup/install-vrep.sh | UTF-8 | 263 | 2.90625 | 3 | [] | no_license | #!/usr/bin/env bash
set -eu
cd $(dirname "$0")/..
source conf/env.sh
source setup/packages.sh
echo "[ V-REP ] Installing..."
rm -rf $VREP_INSTDIR
mkdir -p $VREP_INSTDIR
tar Jxf downloads/$VREP_PKG -C $VREP_INSTDIR --strip-components=1
echo "[ V-REP ] done!"
| true |
7c86a21bd06ad79b10a481347155b26494891a0a | Shell | jmoreau-hds/flir_adk_ethernet | /sendCommand | UTF-8 | 2,757 | 3.859375 | 4 | [] | no_license | #!/bin/bash
usageMsg="$(cat <<EOF
\nusage: sendCommand <command> <command param> [args]
\n
\nCommands:
\nautoFFC <setting>
\n\tset the auto FFC setting on Boson. Valid values are "true" or "false"
\npixelFormat <setting>
\n\tset the pixel format. Valid values are "mono_8" and "mono_16" for Boson and, additionally, "color_8" for BlackFly
\n
\nOptions:
\n-n <namespace> DEFAULT flir_adk
\n\tSpecifies a namespace for the topic
\n-s <sub-namespace>
\n\tSpecifies a sub-namespace for the topic. Typically left or right
\n\te.g. topic is /flir_adk/left/pixel_format then run sendCommand
\n\tpixelFormat mono_8 -n flir_adk -s left
\n
EOF
)"
command=""
commandParam=""
commandValue=""
namespace="flir_adk"
subNamespace=""
while (( "$#" )); do
case "$1" in
-n)
namespace=$2
shift 2
;;
-s)
subNamespace=$2
shift 2
;;
-h|--help)
echo -e $usageMsg
exit 0
;;
--) # end argument parsing
shift
break
;;
-*|--*=) # unsupported flags
echo "Error: Unsupported flag $1" >&2
echo -e $usageMsg
exit 1
;;
ffc)
command="ffc"
shift 1
;;
nodeMap)
command="nodeMap"
commandParam=$2
shift 2
if [[ "$1" != "-"* ]]
then
commandValue=$1
shift 1
fi
;;
*) # preserve positional arguments
command=$1
commandParam=$2
shift 2
;;
esac
done
if [[ $command != "ffc" && (-z $command || -z $commandParam) ]]
then
echo "Error: must specify command and command parameter" >&2
echo -e $usageMsg
exit 1
fi
topicNamespace="/$namespace"
if [ ! -z $subNamespace ]
then
topicNamespace="$topicNamespace/$subNamespace"
fi
case "$command" in
autoFFC)
rostopic pub -1 "$topicNamespace/auto_ffc" std_msgs/Bool "data: $commandParam"
;;
pixelFormat)
rostopic pub -1 "$topicNamespace/pixel_format" std_msgs/String "$commandParam"
;;
ffc)
rostopic pub -1 "$topicNamespace/ffc" std_msgs/Empty "{}"
;;
setROI)
rostopic pub -1 "$topicNamespace/set_roi" sensor_msgs/RegionOfInterest "{$commandParam}"
;;
setCenterROI)
rostopic pub -1 "$topicNamespace/set_center_roi" sensor_msgs/RegionOfInterest "{$commandParam}"
;;
nodeMap)
if [ -z $commandValue ]
then
rosservice call "$topicNamespace/get_node" "$commandParam"
else
rostopic pub -1 "$topicNamespace/set_node" diagnostic_msgs/KeyValue "{key: '$commandParam', value: \'$commandValue\'}"
fi
;;
*)
echo -e "\nError: command '$command' not recognized\n"
;;
esac
| true |
c96c66eda2bc65d3adbe589cc78fa41bff3253c0 | Shell | pribesme7/TRAIN | /runTrainForSeparationScans.sh | UTF-8 | 3,427 | 3.609375 | 4 | [] | no_license | #!/bin/bash
# 2/04/2016 AGorzawski:
#######################################################
## Here edit what to use for the all calc process
##
## fillingSchemeFile one of the *.in files in the folder
## version = { nominal 2015 2016 }
#
#fillingSchemeFile=train25_2040_72.in
fillingSchemeFile=train25nom.in.emittance
version=2016
#fillingSchemeFile=train_4440_170m.in
#version=2015
#######################################################
if [ "$#" -ne 2 ]; then
echo " "
echo "Illegal number of parameters no IP selected no FILE with input values!"
echo " use: script {ip1|ip5} {filenamewithsteps}"
echo " Actual settings used for leveling script are: filling scheme: $fillingSchemeFile and optics: $version"
exit 0
fi
#######################################################
# The following part will iterate over the step (in mm) in given file and will assign the separation
# for a given IP. will run the TRAIN code and agreggate the code
#
iptolevel=$1
filewithstepstolevel=$2
foldername=$(date +%Y%m%d)"_"$iptolevel"_optics_"$version"_sep_scan_for_$fillingSchemeFile"
mkdir -p "$foldername"
templatefile=MAD_PART/collisionConfiguration.$version.tmp.XingPlaneSeparation
outputfile=MAD_PART/collisionConfiguration.$version
referencePlotFolder="notset"
while read -r line
do
stringarray=($line)
separation=${stringarray[0]}
separationXing=${stringarray[1]}
echo "################ UPDATE collision conf for optics $version and Separations: SEPARATION plane $separation, XING plane: $separationXing" ################"
echo '################ GENERATING the setup.input file for the given filling scheme: '$fillingSchemeFile
IP1SEP=0.0
if [ $iptolevel == "ip1" ]; then
IP1SEP=$separation
fi
IP1SEPXING=0.0
if [ $iptolevel == "ip1" ]; then
IP1SEPXING=$separationXing
fi
IP2SEP=0.0
IP8SEP=0.0
IP5SEP=0.0
if [ $iptolevel == "ip5" ]; then
IP5SEP=$separation
fi
IP5SEPXING=0.0
if [ $iptolevel == "ip5" ]; then
IP5SEPXING=$separationXing
fi
echo "$(eval "echo \"$(cat $templatefile)\"")" > $outputfile
echo '################ ...DONE.';
echo "################ Run MAD with updated files and with optics: $version"
./updateMadFiles.sh $version
cat MAD_PART/collisionConfiguration.2016
echo "################ ...DONE for MAD files with optics: $version"
echo "################ RUN TRAIN $fillingSchemeFile"
./runTrainForFillingScheme.sh $fillingSchemeFile noplot
touch RESULTS/$fillingSchemeFile/testFile.hhh
touch RESULTS/$fillingSchemeFile/testFile.ggg
touch RESULTS/$fillingSchemeFile/testFile
##copy the results to separate folder
currentResultFolder=$foldername/$fillingSchemeFile.$iptolevel.SEP.$separation.XING.$separationXing
if [ $referencePlotFolder == "notset" ]; then
referencePlotFolder=$currentResultFolder
fi
echo "################ MOVING TRAIN results to : $currentResultFolder"
mkdir -p "$currentResultFolder"
cp RESULTS/$fillingSchemeFile/* $currentResultFolder
##plot the results
# python plotVerHorOffsetsComparison.py $referencePlotFolder/ $currentResultFolder/ falseForDislpay
echo "################ DONE for SEPARATION plane $separation , XING plane: $separationXing #################################"
done < "$filewithstepstolevel"
#cp $filewithstepstolevel $foldername
# create an animated git from the all plots.
#convert -delay 250 -loop 0 *.png $foldername.gif
echo "DONE. Search the $foldername for the results"
| true |
e8019e9fb5b216dd6cb8c9780901f313c143fd52 | Shell | shirokurostone/dotfiles | /snippet.sh | UTF-8 | 1,506 | 3.65625 | 4 | [] | no_license |
_snippet_usage(){
echo <<EOS
snippet
Usage:
snippet add [snippet] add snippet
snippet show [snippet] show snippet
snippet rm [snippet] remove snippet
snippet list show snippet list
EOS
}
snippet(){
local snippet_dir="$HOME/.snippet"
case $1 in
"list")
if [ $# -ne 1 ]; then
_snippet_usage
return 1
fi
ls -1 "${snippet_dir}"
;;
"show")
if [ $# -ne 2 ]; then
_snippet_usage
return 1
fi
cat "${snippet_dir}/$2"
;;
"add")
if [ $# -ne 2 ]; then
_snippet_usage
return 1
fi
vim "${snippet_dir}/$2"
;;
"rm")
if [ $# -ne 2 ]; then
_snippet_usage
return 1
fi
rm "${snippet_dir}/$2"
;;
esac
return 0
}
_snippet(){
_arguments '1: :->subcommand' '2: :->snippets'
case "$state" in
subcommand)
_values \
"$state" \
"list" \
"show" \
"add" \
"rm"
;;
snippets)
_values \
"$state" \
$(snippet list)
;;
esac
}
compdef _snippet snippet
fzf-select-snippet(){
local snippet_dir="$HOME/.snippet"
BUFFER="$(
snippet show $(snippet list | fzf --prompt "snippet> " \
--ansi --reverse --no-sort --exact --bind=ctrl-z:ignore \
--preview "bat --color=always --style=numbers -l zsh '${snippet_dir}/{}'")
)"
CURSOR=$#BUFFER
zle reset-prompt
}
zle -N fzf-select-snippet
bindkey '^s^n' fzf-select-snippet | true |
2aca90124d30626a54580e76b6736702ae4c2780 | Shell | barak/hg-fast-export | /hg-reset.sh | UTF-8 | 1,506 | 3.90625 | 4 | [] | no_license | #!/bin/sh
# Copyright (c) 2007, 2008 Rocco Rutte <pdmef@gmx.net> and others.
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
ROOT="`dirname $0`"
REPO=""
PFX="hg2git"
SFX_MARKS="marks"
SFX_MAPPING="mapping"
SFX_HEADS="heads"
SFX_STATE="state"
QUIET=""
PYTHON=${PYTHON:-python}
USAGE="[-r <repo>] -R <rev>"
LONG_USAGE="Print SHA1s of latest changes per branch up to <rev> useful
to reset import and restart at <rev>.
If <repo> is omitted, use last hg repository as obtained from state file,
GIT_DIR/$PFX-$SFX_STATE by default.
Options:
-R Hg revision to reset to
-r Mercurial repository to use
"
. "$(git --exec-path)/git-sh-setup"
cd_to_toplevel
while case "$#" in 0) break ;; esac
do
case "$1" in
-r|--r|--re|--rep|--repo)
shift
REPO="$1"
;;
-*)
# pass any other options down to hg2git.py
break
;;
*)
break
;;
esac
shift
done
# for convenience: get default repo from state file
if [ x"$REPO" = x -a -f "$GIT_DIR/$PFX-$SFX_STATE" ] ; then
REPO="`egrep '^:repo ' "$GIT_DIR/$PFX-$SFX_STATE" | cut -d ' ' -f 2`"
echo "Using last hg repository \"$REPO\""
fi
# make sure we have a marks cache
if [ ! -f "$GIT_DIR/$PFX-$SFX_MARKS" ] ; then
touch "$GIT_DIR/$PFX-$SFX_MARKS"
fi
GIT_DIR="$GIT_DIR" $PYTHON "$ROOT/hg-reset.py" \
--repo "$REPO" \
--marks "$GIT_DIR/$PFX-$SFX_MARKS" \
--mapping "$GIT_DIR/$PFX-$SFX_MAPPING" \
--heads "$GIT_DIR/$PFX-$SFX_HEADS" \
--status "$GIT_DIR/$PFX-$SFX_STATE" \
"$@"
exit $?
| true |
2c34eb054bc7567c7a65e020ee9c5509f8b863d9 | Shell | Liangtaiwan/dotfiles | /zshrc | UTF-8 | 8,296 | 2.96875 | 3 | [] | no_license | #############################
# Load zle
#############################
autoload -Uz url-quote-magic
zle -N self-insert url-quote-magic
#############################
# Load plugins
#############################
if [[ -f /etc/arch-release ]]; then
package_list=(fzf diff-so-fancy zsh-syntax-highlighting zsh-completions)
if ! pacman -Qq ${package_list[@]} > /dev/null 2>&1; then
echo "You're using Archlinux! Use pacman to manage some nice tools:"
echo ${package_list[@]}
fi
fi
if [[ -x /usr/bin/fzf && -f /usr/share/fzf/key-bindings.zsh &&
-f /usr/share/fzf/completion.zsh ]]; then
source /usr/share/fzf/key-bindings.zsh
source /usr/share/fzf/completion.zsh
else
if ! [[ -f ~/.fzf.zsh ]] ; then
if ! [[ -f ~/.fzf/install ]] ; then
rm -rf ~/.fzf
git clone --depth 1 https://github.com/junegunn/fzf.git ~/.fzf
fi
~/.fzf/install --all
fi
source ~/.fzf.zsh
fi
export FZF_DEFAULT_OPTS="-m --cycle"
if (( $+commands[rg] )) ; then
export FZF_DEFAULT_COMMAND='rg -l ""'
elif (( $+commands[ag] )); then
export FZF_DEFAULT_COMMAND='ag -l -g ""'
fi
if ! [[ -f "${HOME}/.zplug/init.zsh" ]]; then
curl -sL https://raw.githubusercontent.com/zplug/installer/master/installer.zsh | zsh
fi
zstyle :zplug:tag depth 1
source "${HOME}/.zplug/init.zsh"
zplug "zplug/zplug"
zplug "mafredri/zsh-async", use:async.zsh
zplug "leomao/vim.zsh", use:vim.zsh, defer:1
export PURE_GIT_PULL=0
zplug "liangtaiwan/pure", use:pure.zsh, defer:2
export ENHANCD_DISABLE_HOME=1
export ENHANCD_DOT_ARG='.'
zplug "b4b4r07/enhancd", use:init.sh
if [[ -f /etc/arch-release ]]; then
if [[ -d /usr/share/zsh/plugins/zsh-syntax-highlighting ]]; then
zplug "/usr/share/zsh/plugins/zsh-syntax-highlighting", from:local, defer:3
fi
else
zplug "zsh-users/zsh-completions", use:"*.plugin.zsh"
zplug "so-fancy/diff-so-fancy", as:command, use:diff-so-fancy
zplug "zsh-users/zsh-syntax-highlighting", use:"*.plugin.zsh", defer:3
fi
export ZSH_HIGHLIGHT_HIGHLIGHTERS=(main brackets)
if ! zplug check --verbose; then
zplug install
fi
zplug load
#############################
# Options
#############################
# don't record duplicate history
setopt hist_ignore_dups
# no flow control
setopt noflowcontrol
# rm confirmation
setopt rm_star_wait
# Directory Stack settings
DIRSTACKSIZE=8
setopt auto_cd
setopt autopushd pushdminus pushdsilent pushdtohome pushd_ignore_dups
setopt mark_dirs
setopt multios
# also do completion for aliases
setopt complete_aliases
#############################
# Aliases
#############################
# List direcory contents
if (( $+commands[exa] )) ; then
alias ls='exa --group-directories-first'
alias l='ls -F'
alias ll='ls -glF'
alias la='ll -a'
alias lx='ll -s extension'
alias lk='ll -rs size'
alias lt='ll -ars modified'
else
alias ls='ls -h --color --group-directories-first'
alias l='ls -F'
alias ll='ls -lF'
alias la='ls -lAF'
alias lx='ls -lXB'
alias lk='ls -lSr'
alias lt='ls -lAFtr'
fi
alias sl=ls # often screw this up
# grep
if (( $+commands[rg] )); then
alias gg='rg'
elif (( $+commands[ag] )); then
alias gg='ag'
else
alias gg='grep -R -n'
fi
# Show history
alias history='fc -l 1'
# Tmux 256 default
alias tmux='tmux -2'
# vim alias
if [[ `vim --version 2> /dev/null | grep -- +clientserver` ]] ; then
# always use vim client server
alias vim='vim --servername vim'
fi
alias vi='vim'
alias v='vim'
if (( $+commands[nvim] )) ; then
alias v='nvim'
fi
# tmux alias
if (( $+commands[tmux] )) ; then
alias tma="tmux at -d"
alias tmat="tmux at -dt"
alias tnew="tmux new -s"
alias tml="tmux list-session"
fi
# Directory Stack alias
alias dirs='dirs -v'
alias ds='dirs'
# use thefuck if available
if (( $+commands[thefuck] )) ; then
eval $(thefuck --alias)
fi
#############################
# Completions
#############################
# Important
zstyle ':completion:*:default' menu yes=long select=2
# Completing Groping
zstyle ':completion:*:options' description 'yes'
zstyle ':completion:*:descriptions' format '%F{226}Completing %F{214}%d%f'
zstyle ':completion:*' group-name ''
# Completing misc
zstyle ':completion:*' matcher-list '' 'm:{[:lower:]}={[:upper:]} r:|[._-]=* r:|=*' 'm:{[:lower:][:upper:]}={[:upper:][:lower:]}'
zstyle ':completion:*' verbose yes
zstyle ':completion:*' completer _expand _complete _match _prefix _approximate _list _history
zstyle ':completion:*' expand prefix suffix
zstyle ':completion:*:*files' ignored-patterns '*?.o' '*?~' '*\#'
zstyle ':completion:*' use-cache true
zstyle ':completion:*:*:-subscript-:*' tag-order indexes parameters
zstyle ':completion:*' select-prompt %SScrolling active: current selection at %p%s
# Directory
zstyle ':completion:*:cd:*' ignore-parents parent pwd
export LS_COLORS='di=1;34:ln=36:so=32:pi=33:ex=32:bd=34;46:cd=34;43:su=30;41:sg=30;46:tw=30;42:ow=30;43'
zstyle ':completion:*' list-colors ${(s.:.)LS_COLORS}
# default: --
zstyle ':completion:*' list-separator '-->'
zstyle ':completion:*:manuals' separate-sections true
############################
# History Configuration
############################
## History wrapper
function omz_history {
local clear list
zparseopts -E c=clear l=list
if [[ -n "$clear" ]]; then
# if -c provided, clobber the history file
echo -n >| "$HISTFILE"
echo >&2 History file deleted. Reload the session to see its effects.
elif [[ -n "$list" ]]; then
# if -l provided, run as if calling `fc' directly
builtin fc "$@"
else
# unless a number is provided, show all history events (starting from 1)
[[ ${@[-1]-} = *[0-9]* ]] && builtin fc -l "$@" || builtin fc -l "$@" 1
fi
}
# Timestamp format
case ${HIST_STAMPS-} in
"mm/dd/yyyy") alias history='omz_history -f' ;;
"dd.mm.yyyy") alias history='omz_history -E' ;;
"yyyy-mm-dd") alias history='omz_history -i' ;;
"") alias history='omz_history' ;;
*) alias history="omz_history -t '$HIST_STAMPS'" ;;
esac
## History file configuration
[ -z "$HISTFILE" ] && HISTFILE="$HOME/.zsh_history"
HISTSIZE=50000
SAVEHIST=10000
## History command configuration
setopt extended_history # record timestamp of command in HISTFILE
setopt hist_expire_dups_first # delete duplicates first when HISTFILE size exceeds HISTSIZE
setopt hist_ignore_dups # ignore duplicated commands history list
setopt hist_ignore_space # ignore commands that start with space
setopt hist_verify # show command with history expansion to user before running it
setopt inc_append_history # add commands to HISTFILE in order of execution
setopt share_history # share command history data
#############################
## Bing Key
# Make sure that the terminal is in application mode when zle is active, since
# only then values from $terminfo are valid
if (( ${+terminfo[smkx]} )) && (( ${+terminfo[rmkx]} )); then
function zle-line-init() {
echoti smkx
}
function zle-line-finish() {
echoti rmkx
}
zle -N zle-line-init
zle -N zle-line-finish
fi
# start typing + [Up-Arrow] - fuzzy find history forward
if [[ "${terminfo[kcuu1]}" != "" ]]; then
autoload -U up-line-or-beginning-search
zle -N up-line-or-beginning-search
bindkey "${terminfo[kcuu1]}" up-line-or-beginning-search
fi
# start typing + [Down-Arrow] - fuzzy find history backward
if [[ "${terminfo[kcud1]}" != "" ]]; then
autoload -U down-line-or-beginning-search
zle -N down-line-or-beginning-search
bindkey "${terminfo[kcud1]}" down-line-or-beginning-search
fi
if [[ "${terminfo[khome]}" != "" ]]; then
bindkey "${terminfo[khome]}" beginning-of-line # [Home] - Go to beginning of line
fi
if [[ "${terminfo[kend]}" != "" ]]; then
bindkey "${terminfo[kend]}" end-of-line # [End] - Go to end of line
fi
# load custom settings
if [[ -f "${HOME}/.zshrc.local" ]]; then
source "${HOME}/.zshrc.local"
fi
# >>> conda initialize >>>
# !! Contents within this block are managed by 'conda init' !!
# __conda_setup="$('/usr/bin/conda' 'shell.zsh' 'hook' 2> /dev/null)"
# if [ $? -eq 0 ]; then
# eval "$__conda_setup"
# else
# if [ -f "/usr/etc/profile.d/conda.sh" ]; then
# . "/usr/etc/profile.d/conda.sh"
# else
# export PATH="/usr/bin:$PATH"
# fi
# fi
# unset __conda_setup
# <<< conda initialize <<<
| true |
e267e25c7f05d8d753c65224689540a84d4bf0e2 | Shell | ignaciomigliaro/computational_chemistry | /Scripts/homo-lumo.sh | UTF-8 | 124 | 2.828125 | 3 | [] | no_license | #!/bin/bash
for i in $(ls *.out)
do
HL=$(grep -b -1 -i " Alpha " $i | tail -2 | awk '{print $6}')
echo $i $HL
done
| true |
cd782a92cbdba4974cf78110e8955d8e1862cd21 | Shell | calucas27/STIGHelper-RELEASE | /linux/linuxTemplate.sh | UTF-8 | 293 | 2.703125 | 3 | [] | no_license | #!/bin/bash
#This variable contains the values from -audit or -enforce mode.
echo $1
#Commands for Linux can be entered below. Any .sh script that accepts -enforce and -audit as paramaters can be integrated into STIGHelper.
#cp <|your_script|> <dir_path>/STIGHelper/linux/<|your_script|>.sh | true |
20de6e28ce9456c21d89ef6d111edee6e41314db | Shell | ProBackup-nl/crashplan-qpkg | /src/qpkg/shared/htdocs/cgi-bin/backup.cgi | UTF-8 | 1,737 | 3.5625 | 4 | [] | no_license | #!/bin/sh
QPKG_NAME="CrashPlan"
QPKG_BASE=""
QPKG_DIR=""
QPKG_BACKUP_NAME="${QPKG_NAME}.config.tgz"
find_base()
{
# Determine BASE installation location according to smb.conf
publicdir=`/sbin/getcfg Public path -f /etc/config/smb.conf`
if [ ! -z $publicdir ] && [ -d $publicdir ];then
publicdirp1=`/bin/echo $publicdir | /bin/cut -d "/" -f 2`
publicdirp2=`/bin/echo $publicdir | /bin/cut -d "/" -f 3`
publicdirp3=`/bin/echo $publicdir | /bin/cut -d "/" -f 4`
if [ ! -z $publicdirp1 ] && [ ! -z $publicdirp2 ] && [ ! -z $publicdirp3 ]; then
[ -d "/${publicdirp1}/${publicdirp2}/Public" ] && QPKG_BASE="/${publicdirp1}/${publicdirp2}"
fi
fi
# Determine BASE installation location by checking where the Public folder is.
if [ -z $QPKG_BASE ]; then
for datadirtest in /share/HDA_DATA /share/HDB_DATA /share/HDC_DATA /share/HDD_DATA /share/HDE_DATA /share/HDF_DATA /share/HDG_DATA /share/HDH_DATA /share/MD0_DATA /share/MD1_DATA /share/MD2_DATA /share/MD3_DATA; do
[ -d $datadirtest/Public ] && QPKG_BASE="$datadirtest"
done
fi
if [ -z $QPKG_BASE ] ; then
echo "The Public share not found."
exit 1
fi
QPKG_DIR="${QPKG_BASE}/.qpkg/${QPKG_NAME}"
}
find_base
echo "Content-Description: File Transfer
Content-Type: application/octet-stream
Content-Disposition: attachment; filename=\"$QPKG_BACKUP_NAME\"
Content-Transfer-Encoding: binary
Expires: 0
Cache-Control: must-revalidate, post-check=0, pre-check=0
Pragma: public
"
/bin/tar czf - $QPKG_DIR/conf $QPKG_DIR/log
| true |
e7d46eb0cfc7878bdb6ad46ec735ef3b1bf81543 | Shell | brandon-lockaby/2gltf2 | /2gltf2.sh | UTF-8 | 216 | 2.859375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
if [ "$#" = 1 ]
then
blender -b -P 2gltf2.py -- "$1"
else
echo To glTF 2.0 converter.
echo Supported file formats: .abc .blend .dae .fbx. .obj .ply .stl .wrl .x3d
echo
echo 2gltf2.sh [filename]
fi
| true |
03d21e0edc8ee5842d1059ac40e98db7358c9d1f | Shell | jrtomps/tunl_soft | /scripts/release_CODA.sh | UTF-8 | 1,368 | 3.265625 | 3 | [] | no_license |
# set RELEASE home
if [ ! -d $RELEASE_TOP_DIR ] ; then
echo ${HOME}
export RELEASE_TOP_DIR=${HOME}
fi
# specify the version of the build to use
if [ ! -d $RELEASE_MY ] ; then
export RELEASE_BUILD=${RELEASE_TOP_DIR}/tunl_soft
test -d ${RELEASE_TOP_DIR}/tunl_soft | mkdir -p ${RELEASE_TOP_DIR}/tunl_soft
else
export RELEASE_BUILD=${RELEASE_MY}
fi
#make sure ROOT is available
if [ ! -d $ROOTSYS ] ; then
echo ROOTSYS variable is not set.... EXITING...
exit
fi
if [ X$OS=X ] ; then
export OS=`uname`
fi
if [ XARCH=X ] ; then
export ARCH=`uname -m`
fi
if [ X$OSNAME=X ] ; then
export OSNAME=${OS}-${ARCH}
fi
# set common environment variables
export RELEASE_SCRIPTS=${RELEASE_BUILD}/scripts
export RELEASE_BIN=${RELEASE_BUILD}/bin/${OSNAME}
export RELEASE_LIB=${RELEASE_BUILD}/lib/${OSNAME}
export RELEASE_SLIB=${RELEASE_BUILD}/slib/${OSNAME}
export RELEASE_SRC=${RELEASE_BUILD}/source
export RELEASE_INC=${RELEASE_SRC}/include
export RELEASE_CMS=${RELEASE_BUILD}/CMS
export RELEASE_CONFIG_PATH=${RELEASE_SRC}/programs/coda2root/config/ro_config.xml
# Add TUNL_BIN to the PATH
export PATH=${PATH}:${RELEASE_BIN}
# append LD_LIBRARY_PATH
if [ ! -d $LD_LIBRARY_PATH ] ; then
export LD_LIBRARY_PATH=${RELEASE_SLIB}:${RELEASE_LIB}
else
export LD_LIBRARY_PATH=${RELEASE_SLIB}:${RELEASE_LIB}:${LD_LIBRARY_PATH}
fi
| true |
6c2406fc6d3983726964dc0bb723817849ec0b46 | Shell | dptools/process_offsite_audio | /individual_modules/run_transcription_review_alerts.sh | UTF-8 | 2,969 | 3.734375 | 4 | [] | no_license | #!/bin/bash
# will call this module with arguments in main pipeline, root then study
data_root="$1"
study="$2"
# this particular script it doesn't make any sense to call from outside the pipeline
if [[ -z "${repo_root}" ]]; then
exit
fi
# move to study's processed folder to loop over patients
cd "$data_root"/PROTECTED/"$study"/processed
for p in *; do
# first check that it is truly a patient ID that has interview data
if [[ ! -d $p/interviews ]]; then
continue
fi
cd "$p"/interviews
# now check open and then psychs for transcripts of interest - in prescreening means they were sent for the sites to review
if [[ -d open/transcripts/prescreening ]]; then
cd open/transcripts/prescreening
for file in *.txt; do
if [[ ! -e $file ]]; then # make sure don't get errors related to file not found if the folder is empty
continue
fi
if [[ ! -e ../${file} ]]; then # this means there is a transcript pending review!
if [[ ! -e ${repo_root}/site_review_email_body.txt ]]; then # if this is first one flagged add an intro to newly created email body txt
echo "Action required - transcripts needing manual review!" > "$repo_root"/site_review_email_body.txt
echo "The following is a list of transcripts still pending redaction review for this site:" >> "$repo_root"/site_review_email_body.txt
echo "(note that there may be an ~1 day lag between correctly approving a transcript and it being registered by the interview pipeline)" >> "$repo_root"/site_review_email_body.txt
fi
# now add the file name to the list, set up like bullet points
echo " * ${file}" >> "$repo_root"/site_review_email_body.txt
fi
done
cd ../../.. # leave the transcription folder once loop is done
fi
if [[ -d psychs/transcripts/prescreening ]]; then
cd psychs/transcripts/prescreening
for file in *.txt; do
if [[ ! -e $file ]]; then # make sure don't get errors related to file not found if the folder is empty
continue
fi
if [[ ! -e ../${file} ]]; then # this means there is a transcript pending review!
if [[ ! -e ${repo_root}/site_review_email_body.txt ]]; then # if this is first one flagged add an intro to newly created email body txt
echo "Action required - transcripts needing manual review!" > "$repo_root"/site_review_email_body.txt
echo "The following is a list of transcripts still pending redaction review for this site:" >> "$repo_root"/site_review_email_body.txt
echo "(note that there may be an ~1 day lag between correctly approving a transcript and it being registered by the interview pipeline)" >> "$repo_root"/site_review_email_body.txt
fi
# now add the file name to the list, set up like bullet points
echo " * ${file}" >> "$repo_root"/site_review_email_body.txt
fi
done
cd ../../.. # leave the transcription folder once loop is done
fi
# return to study root once done with the patient's transcripts
cd "$data_root"/PROTECTED/"$study"/processed
done | true |
e12ae419c26c64003765f54f2937ac474f3f2895 | Shell | pr0d1r2/ruby_dev_shell_aliases | /bepc_changed_vs_origin_master.sh | UTF-8 | 640 | 3.484375 | 3 | [
"MIT"
] | permissive | function bepc_changed_vs_origin_master() {
local bepc_changed_vs_origin_master_FILE
local bepc_changed_vs_origin_master_FILES_EXISTING=()
for bepc_changed_vs_origin_master_FILE in `git_files_changed_vs_origin_master | grep features | grep ".feature$"`
do
if [ -f $bepc_changed_vs_origin_master_FILE ]; then
bepc_changed_vs_origin_master_FILES_EXISTING+=$bepc_changed_vs_origin_master_FILE
fi
done
if [ `echo $bepc_changed_vs_origin_master_FILES_EXISTING | wc -l` -gt 0 ]; then
echorun bepc `echo $bepc_changed_vs_origin_master_FILES_EXISTING | tr "\n" " "`
else
echo
echo "$0: nothing to run"
fi
}
| true |
1b8e51ae164c38722cdf75a24e26c05eb39cb13e | Shell | hyoseo/telegram-bot | /sh_scripts/start.sh | UTF-8 | 556 | 3.171875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
SRC_PATH="$(readlink "${BASH_SOURCE[0]}" || echo "${BASH_SOURCE[0]}")"
RUN_PATH=${SRC_PATH%/*}/..
cd "$RUN_PATH" || exit
case $# in
0)
echo "you must input telegram bot token at least"
;;
1)
export TOKEN=$1
nohup ./venv/bin/python3 main.py 1> /dev/null 2>&1 &
;;
2)
export TOKEN=$1
export WEB_PORT=$2
nohup ./venv/bin/python3 main.py 1> /dev/null 2>&1 &
;;
*)
export TOKEN=$1
export WEB_PORT=$2
export LOG_LEVEL=$3
nohup ./venv/bin/python3 main.py 1> /dev/null 2>&1 &
;;
esac
| true |
9089194eb21067ca5223ea17a68cb34bb6d59ebf | Shell | craftingjava/docker-suitecrm | /bootstrap/suitecrm-init.sh | UTF-8 | 5,574 | 3.34375 | 3 | [] | no_license | #!/bin/bash
set -eu
readonly SUITECRM_HOME="/var/www/html"
readonly CONFIG_SI_FILE="${SUITECRM_HOME}/config_si.php"
readonly CONFIG_FILE="${SUITECRM_HOME}/config.php"
readonly CONFIG_OVERRIDE_FILE="${SUITECRM_HOME}/config_override.php"
CURRENCY_ISO4217="${CURRENCY_ISO4217:-USD}"
CURRENCY_NAME="${CURRENCY_NAME:-US Dollar}"
DATE_FORMAT="${DATE_FORMAT:-d-m-Y}"
EXPORT_CHARSET="${EXPORT_CHARSET:-ISO-8859-1}"
DEFAULT_LANGUAGE="${DEFAULT_LANGUAGE:-en_us}"
DB_ADMIN_PASSWORD="${DB_ADMIN_PASSWORD:-dbpasswd}"
DB_ADMIN_USERNAME="${DB_ADMIN_USERNAME:-dbadmin}"
DATABASE_NAME="${DATABASE_NAME:-suitecrmdb}"
DATABASE_TYPE="${DATABASE_TYPE:-mysql}"
DATABASE_HOST="${DATABASE_HOST:-mysqldb}"
POPULATE_DEMO_DATA="${POPULATE_DEMO_DATA:-false}" # Not yet implemented
SITE_USERNAME="${SITE_USERNAME:-admin}"
SITE_PASSWORD="${SITE_PASSWORD:-password}"
SITE_URL="${SITE_URL:-http://localhost}"
SYSTEM_NAME="${SYSTEM_NAME:-Zentek CRM}"
## Built in functions ##
write_suitecrm_config() {
echo "Write config_si file..."
cat <<EOL > ${CONFIG_SI_FILE}
<?php
\$sugar_config_si = array (
'dbUSRData' => 'create',
'default_currency_iso4217' => '${CURRENCY_ISO4217}',
'default_currency_name' => '${CURRENCY_NAME}',
'default_currency_significant_digits' => '2',
'default_currency_symbol' => '$',
'default_date_format' => '${DATE_FORMAT}',
'default_decimal_seperator' => '.',
'default_export_charset' => '${EXPORT_CHARSET}',
'default_language' => '${DEFAULT_LANGUAGE}',
'default_locale_name_format' => 's f l',
'default_number_grouping_seperator' => ',',
'default_time_format' => 'H:i',
'export_delimiter' => ',',
'setup_db_admin_password' => '${DB_ADMIN_PASSWORD}',
'setup_db_admin_user_name' => '${DB_ADMIN_USERNAME}',
'setup_db_create_database' => 1,
'setup_db_database_name' => '${DATABASE_NAME}',
'setup_db_drop_tables' => 0,
'setup_db_host_name' => '${DATABASE_HOST}',
'setup_db_pop_demo_data' => false,
'setup_db_type' => '${DATABASE_TYPE}',
'setup_db_username_is_privileged' => true,
'setup_site_admin_password' => '${SITE_PASSWORD}',
'setup_site_admin_user_name' => '${SITE_USERNAME}',
'setup_site_url' => '${SITE_URL}',
'setup_system_name' => '${SYSTEM_NAME}',
);
EOL
chown www-data:www-data ${CONFIG_SI_FILE}
echo "---[ start ${CONFIG_SI_FILE} ]---"
cat ${CONFIG_SI_FILE}
echo "---[ end ${CONFIG_SI_FILE} ]---"
}
write_suitecrm_oauth2_keys() {
if cd Api/V8/OAuth2 ; then
if [[ ! -e private.key ]] ; then
if [[ -e /run/secrets/suitecrm_oauth2_private_key ]] ; then
echo "OAuth2 keys are now docker secrets"
ln -Tsf /run/secrets/suitecrm_oauth2_private_key private.key
ln -Tsf /run/secrets/suitecrm_oauth2_public_key public.key
else
echo "Generating new OAuth2 keys"
rm -f private.key public.key
openssl genrsa -out private.key 2048 && \
openssl rsa -in private.key -pubout -out public.key
chmod 600 private.key public.key
chown www-data:www-data private.key public.key
fi
fi
cd -
fi
}
check_mysql() {
until nc -w1 ${DATABASE_HOST} 3306; do
sleep 3
echo Using DB host: ${DATABASE_HOST}
echo "Waiting for MySQL to come up..."
done
echo "MySQL is available now."
}
## Main program ##
echo "SYSTEM_NAME: ${SYSTEM_NAME}"
echo "SITE_URL: ${SITE_URL}"
# Generate OAuth keys
write_suitecrm_oauth2_keys
# Waiting for DB to come up
check_mysql
# Folder conf.d might be a mounted volume containing existing configuration files
# Let's (re)link the config files which might be there or create empty ones
if [ ! -f ${CONFIG_FILE} -o ! -f ${CONFIG_OVERRIDE_FILE} ]; then
echo "Configuration files have not been found, let's check conf.d..."
touch ${SUITECRM_HOME}/conf.d/config.php
ln -sf ${SUITECRM_HOME}/conf.d/config.php ${CONFIG_FILE}
echo "---[ start ${CONFIG_FILE} ]---"
cat ${CONFIG_FILE}
echo "---[ end ${CONFIG_FILE} ]---"
touch ${SUITECRM_HOME}/conf.d/config_override.php
ln -sf ${SUITECRM_HOME}/conf.d/config_override.php ${CONFIG_OVERRIDE_FILE}
echo "---[ start ${CONFIG_OVERRIDE_FILE} ]---"
cat ${CONFIG_OVERRIDE_FILE}
echo "---[ end ${CONFIG_OVERRIDE_FILE} ]---"
fi
# Run slient install only if config files are actually empty
if [ $(cat ${CONFIG_FILE} | wc -l) -eq 0 -o $(cat ${CONFIG_OVERRIDE_FILE} | wc -l) -eq 0 ]; then
echo "Configuring SuiteCRM for first run..."
write_suitecrm_config
echo "##################################################################################"
echo "##Running silent install, will take a couple of minutes, so go and take a tea...##"
echo "##################################################################################"
chown www-data:www-data -R ${SUITECRM_HOME}/conf.d
chown www-data:www-data ${SUITECRM_HOME}/config*.php
su www-data -s /bin/sh -c php <<'__END_OF_INSTALL_PHP__'
<?
$_SERVER['HTTP_HOST'] = 'localhost';
$_SERVER['REQUEST_URI'] = 'install.php';
$_SERVER['SERVER_SOFTWARE'] = 'Apache';
$_REQUEST = array('goto' => 'SilentInstall', 'cli' => true);
require_once 'install.php';
?>
__END_OF_INSTALL_PHP__
echo "Silent install completed."
fi
echo "##################################################################################"
echo "##SuiteCRM is ready to use, enjoy it##############################################"
echo "##################################################################################"
apache2-foreground
# End of file
# vim: set ts=2 sw=2 noet:
| true |
7164a33d5a305cd3bcd9732bb91eb29c10428468 | Shell | Cloudxtreme/mupi | /mupi.main | UTF-8 | 2,597 | 3.859375 | 4 | [] | no_license | #!/bin/bash
##########################################################################
#
# mupi main functions
#
# By Alan Hayward, 2006
#
# Internal mpui functions. Do not run directly
#
##########################################################################
echo "Internal mpui functions. Do not run directly";
##############################################################################
##Deal with command line
commandLine()
{
while getopts ds? line
do
case $line in
d) DEBUG=1;;
s) STRACE=1;;
?) echo $USAGE; exit;;
esac
done;
shift `expr $OPTIND - 1`;
EXE=${1:?$USAGE};
if [ ! -x $EXE ];
then
echo $USAGE;
exit;
fi;
}
##############################################################################
##System calls
initStrace()
{
#Stracing support
if [ $STRACE ];
then
STRACE() #syscallname, numberParams, string...
{
local NAME=$1;
local NUMBER=$2;
shift; shift;
printf "Strace::$NAME(" >&2;
case $NUMBER in
6) printf "$(getRegVal $SYSCALL1),$(getRegVal $SYSCALL2),$(getRegVal $SYSCALL3),$(getRegVal $SYSCALL4),$(getRegVal $SYSCALL5),$(getRegVal $SYSCALL6)" >&2;;
5) printf "$(getRegVal $SYSCALL1),$(getRegVal $SYSCALL2),$(getRegVal $SYSCALL3),$(getRegVal $SYSCALL4),$(getRegVal $SYSCALL5)" >&2;;
4) printf "$(getRegVal $SYSCALL1),$(getRegVal $SYSCALL2),$(getRegVal $SYSCALL3),$(getRegVal $SYSCALL4)" >&2;;
3) printf "$(getRegVal $SYSCALL1),$(getRegVal $SYSCALL2),$(getRegVal $SYSCALL3)" >&2;;
2) printf "$(getRegVal $SYSCALL1),$(getRegVal $SYSCALL2)" >&2;;
1) printf "$(getRegVal $SYSCALL1)" >&2;;
esac
printf ")$@" >&2;
}
STRACERESULT() #String...
{
printf " =$@\n" >&2;
}
else
STRACE() #syscallname, number params, string...
{
return;
}
STRACERESULT() #String...
{
return;
}
fi;
}
##############################################################################
##setNextInstruction
setNextInstruction() #address
{
NEXTINSTRUCTION=$1;
}
##############################################################################
##Main Loop
run()
{
commandLine $@;
initStrace;
NUMBER_EXECUTED=0;
NUMBER_DISASSEMBLED=0;
ADDRESS=$(getStartAddress);
loadInitialMemoryAddresses;
createStack;
RUNNING=1;
while [ $RUNNING ];
do
disassemble;
setNextInstruction $((ADDRESS+LENGTH));
if [ $DEBUG ];
then
echo >&2;
dumpRegs;
echo >&2;
dumpInstruction;
NUMBER_EXECUTED=$((NUMBER_EXECUTED+1));
fi;
performInstruction;
ADDRESS=$NEXTINSTRUCTION;
done;
}
| true |
ef9f6d4ef374c57bb202d81e52b8f237219a5f57 | Shell | StoneIceUNFORGETABLE222/archlinux-community | /python-requests-hawk/repos/community-any/PKGBUILD | UTF-8 | 1,159 | 2.578125 | 3 | [] | no_license | # $Id$
# Maintainer: Balló György <ballogyor+arch at gmail dot com>
_pkgbase=requests-hawk
pkgbase=python-requests-hawk
pkgname=('python2-requests-hawk' 'python-requests-hawk')
pkgver=1.0.0
pkgrel=2
pkgdesc="Hawk authentication strategy for the requests python library"
arch=('any')
url="https://github.com/mozilla-services/requests-hawk"
license=('Apache')
depends=('python-mohawk' 'python-requests' 'python2-mohawk' 'python2-requests')
source=("$pkgbase-$pkgver.tar.gz::https://github.com/mozilla-services/$_pkgbase/archive/$pkgver.tar.gz")
sha256sums=('6d1be6fab72659952069048081b26e0d536f3d2960640452943cd347a1b2e117')
prepare() {
cp -a $_pkgbase-$pkgver{,-py2}
}
build() {
# Building Python2
cd $_pkgbase-$pkgver-py2
python2 setup.py build
# Building Python3
cd ../$_pkgbase-$pkgver
python3 setup.py build
}
package_python2-requests-hawk() {
depends=('python2-mohawk' 'python2-requests')
cd $_pkgbase-$pkgver-py2
python2 setup.py install --root="$pkgdir" --optimize=1
}
package_python-requests-hawk() {
depends=('python-mohawk' 'python-requests')
cd $_pkgbase-$pkgver
python3 setup.py install --root="$pkgdir" --optimize=1
}
| true |
74228d42cdd40783184b0ad884c3f39a06613116 | Shell | zhaozhixu/float2pow2 | /scripts/float2pow2.sh | UTF-8 | 1,992 | 3.921875 | 4 | [] | no_license | #! /bin/bash
if [ $# -eq 0 ]; then
echo "usage: $0 [-h] -p <portion> -b <bitwidth> -r <pow_low> <pow_high> -f <files>"
echo " "
echo "options:"
echo "-h, --help show this message"
echo "-p, --portion <portion> portion to drop small data (in percentage 0~100)"
echo "-b, --bitwidth <bitwidth> bitwidth"
echo "-r, --range <pow_low> <pow_high> low and high end of power"
echo "-f, --file <files> files to process"
exit 0
fi
portion="0"
bitwidth="4"
pow_low="-7"
pow_high="3"
while [ $# -gt 0 ]; do
case "$1" in
-h|--help)
echo "usage: $0 [-h] -p <portion> -b <bitwidth> -r <pow_low> <pow_high> -f <files>"
echo " "
echo "options:"
echo "-h, --help show this message"
echo "-p, --portion <portion> portion to drop small data (in percentage 0~100)"
echo "-b, --bitwidth <bitwidth> bitwidth"
echo "-r, --range <pow_low> <pow_high> low and high end of power"
echo "-f, --file <files> files to process"
exit 0;;
-p|--portion)
portion=$2
shift
shift;;
-b|--bitwidth)
bitwidth=$2
shift
shift;;
-r|--range)
pow_low=$2
pow_high=$3
shift
shift
shift;;
-f|--file)
shift
file=$@
break;;
*)
exit 0
break;;
esac
done
# chmod +x convert.py datafilter.pl reshape.pl
# echo "./datafilter.pl $portion $abs $file"
perl ./scripts/datafilter.pl $portion $file
filepy=""
for f in $file
do
filepy="$filepy ""$f""_float"
done
# echo "python convert.py $filepy"
python ./scripts/convert.py $bitwidth $pow_low $pow_high $filepy
perl ./scripts/reshape.pl $file
filesh=""
for f in $filepy
do
filesh="$filesh ""$f""_convert"
done
printf "Removing temp files..."
rm -f $filepy $filesh
printf "done\n"
| true |
c2a28a27bda2813f34866a4d5292cd2cd45712bd | Shell | scheasbro/dropbox.sh-sdc | /dropbox.sh | UTF-8 | 2,314 | 4.125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
#Thx, http://stackoverflow.com/questions/20723868/batch-rename-dropbox-conflict-files
#Point to where you want the script to look and where it should backup files it replaces
folder=/Volumes/Seagate/Dropbox
backup=~/Desktop/Script_Backup
#colors
red='\033[0;31m'
purple='\033[1;35m'
NC='\033[0m' # No Color
clear
echo "This script will climb through the $folder tree and repair conflict files by deleting the OLDER of the the conflicted file and its counterpart"
if [[ $1 == "replace" ]]; then
echo -e "${red}This is NOT a drill.${NC} The script will backup the older of the conflicted files and then delete it from the Dropbox directory."
else
echo -e "${purple}This is a dry run.${NC} You'll see what files would be replaced. Run \"./Dropbox.sh replace\" to make it run for real."
fi
echo "Press any key to continue..."
echo "------------------------------"
read -n 1
find $folder -type f -print0 | while read -d $'\0' file; do
newname=$(echo "$file" | sed 's/ (.*conflicted copy.*)//')
if [ "$file" != "$newname" ]; then
if [ -f "$newname" ];then
# determine which is newer
if [ "$newname" -nt "$file" ]; then
echo "$newname is NEWER than $file"
file_to_move="$file"
file_to_keep="$newname"
else
echo "$newname is OLDER than $file"
file_to_move="$newname"
file_to_keep="$file"
fi
backupname=${newname/"$folder"/"$backup"}
if [[ $1 != "replace" ]]; then
echo "Would have moved $file_to_move to $backupname"
else
echo "Moving $file_to_move to $backupname"
mkdirp "$(dirname "$backupname")"
cp "$file_to_move" "$backupname"
mv "$file_to_keep" "$newname"
fi
else
# if the unconflicted version isn't there for some reason, just rename the original
if [[ $1 != "replace" ]]; then
echo "Didn't see an unconflicted counterpart for $file, so would have just renamed file"
else
echo "Didn't see an unconflicted counterpart for $file, so will replace"
mv "$file" "$newname"
fi
fi
fi
done
| true |
1aebaf5959d8c856041318f774545f68d4684524 | Shell | unbleavabl/dotfiles | /chunkwm/scripts/misc/restart.sh | UTF-8 | 526 | 3.3125 | 3 | [] | no_license | #!/usr/bin/env bash
function main
{
! { source "${BASH_SOURCE[0]//${0##*/}}../display/notify.sh" && \
source "${BASH_SOURCE[0]//${0##*/}}../display/format.sh"; } && \
exit 1
title_parts=("chunkwm")
subtitle_parts=()
message_parts=("Restarting chunkwm")
title="$(format "${title_parts[@]}")"
subtitle="$(format "${subtitle_parts[@]}")"
message="$(format "${message_parts[@]}")"
notify "${title:-}" "${subtitle:-}" "${message:-}"
brew services restart chunkwm
}
main
| true |
910d0a3bf88668edec91b2ca18390982b69c0f5d | Shell | Galphaa/xsg-RPM-build | /.variable_build.sh | UTF-8 | 4,534 | 3.8125 | 4 | [] | no_license | #!/usr/bin/env bash
# Prepares sources for RPM installation
PATH=$PATH:/usr/local/bin
#
# Currently Supported Operating Systems:
#
# CentOS 6, 7
#
# Defning return code check function
check_result() {
if [ $1 -ne 0 ]; then
echo "Error: $2"
exit $1
fi
}
build_signed_rpm() {
SPEC_FILE="$1"
TARGET="$2"
rpmbuild -bb -v --sign --clean --target ${TARGET} ${WORKING_DIR}/rpmbuild/SPECS/${SPEC_FILE}
#rpmbuild -bb -v ${WORKING_DIR}/rpmbuild/SPECS/${SPEC_FILE}
#expect -exact "Enter pass phrase: "
#send -- "blank\r"
#expect eof
}
targ="$3"
version=`date +%Y%m%d`
release=`date +%H%M%S`
# Creating variable for future changing if needed (Dowloaded hariskon/nagios-plugins reposioty and script we need is located in nagios-plugins dirs
nagios_plugins="nagios-plugins"
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
WORKING_DIR=`mktemp -d -p /tmp`
check_result $? "Cant create TMP Dir"
nagios_plugins=nagios-plugins
cd $WORKING_DIR
git clone --recursive https://github.com/HariSekhon/nagios-plugins.git > /dev/null 2>&1
check_result $? "Can't cloning from git repo"
mkdir rpmbuild
check_result $? "Can't creating rpmbuild dir"
cd rpmbuild
mkdir {BUILD,RPMS,SOURCES,SPECS,SRPMS,tmp}
check_result $? "Can't creat rpmbuilding sub dirs"
cd ../
mkdir ${WORKING_DIR}/${targ}-${version}
check_result $? "Cant create TMP Version Dir"
mkdir -p ${CURRENT_DIR}/rpmbuild/SOURCES/
check_result $? "Unable Create Source Folder"
mkdir -p ${CURRENT_DIR}/rpmbuild/SPECS/
check_result $? "Unable Create SPECS Folder"
cd $CURRENT_DIR
## copping spec files from my repo
wget https://raw.githubusercontent.com/Galphaa/xsg-RPM-build/master/file.spec > /dev/null 2>&1
check_result $? "Cant download spec file from my repo"
mv file.spec ${targ}.spec
check_result $? "Problem with renameing spec file to "$targ""
## changeed mv to cp
cp ${targ}.spec ${WORKING_DIR}/rpmbuild/SPECS/
check_result $? "Can't moving $targ Spec to rpm/SPEC/ "
cp ${targ}.spec ${CURRENT_DIR}/rpmbuild/SPECS/
check_result $? "Unable Copy RPM Config"
mkdir -p usr/lib64/nagios/plugins/
mkdir -p /usr/lib64/nagios/plugins/
cp ${WORKING_DIR}/${nagios_plugins}/${targ} usr/lib64/nagios/plugins/
cp ${WORKING_DIR}/${nagios_plugins}/${targ} /usr/lib64/nagios/plugins/
cd /usr/lib64/nagios/plugins/"${targ}-$version"
chmod -x "${targ}"
cd -
cd usr/lib64/nagios/plugins/"${targ}-$version"
chmod -x "${targ}"
cd -
cp -R usr $WORKING_DIR/${targ}-${version}
#cp -R etc $WORKING_DIR/${targ}-${version}
cd $WORKING_DIR
tar zcvf "${targ}-${version}".tar.gz ${targ}-${version}
check_result $? "Problem with compressing Downloaded scpript ("$targ") to tar.gz format"
## changed mv to cp
cp "${targ}-${version}".tar.gz ${CURRENT_DIR}/rpmbuild/SOURCES/
check_result $? "Unable Copy Sources"
cp ${CURRENT_DIR}/${targ}.spec ${CURRENT_DIR}/rpmbuild/SPECS/
check_result $? "Unable Copy RPM Config"
cd $CURRENT_DIR/rpmbuild
cp SOURCES/"${targ}-${version}".tar.gz "${WORKING_DIR}"/rpmbuild/SOURCES/
check_result $? "Problem with moving"
echo "Setting versions information in SPEC files"
sed -i -- "s/__NAME__/${targ}/g" ${WORKING_DIR}/rpmbuild/SPECS/${targ}.spec
sed -i -- "s/__VERSION__/${version}/g" ${WORKING_DIR}/rpmbuild/SPECS/${targ}.spec
sed -i -- "s/__RELEASE__/${release}/g" ${WORKING_DIR}/rpmbuild/SPECS/${targ}.spec
sed -i -- "s/__NAME__/${targ}/g" ${WORKING_DIR}/rpmbuild/SPECS/${targ}.spec
sed -i -- "s|__PATH__|"/usr/lib64/nagios/plugins/${targ}"|g" ${WORKING_DIR}/rpmbuild/SPECS/${targ}.spec
## changing macro to our custom rpmmacros
cd ~
wget https://raw.githubusercontent.com/Galphaa/xsg-RPM-build/master/beta_.rpmmacros
cp .rpmmacros before_.rpmmacros
mv beta_.rpmmacros .rpmmacros
sed -i -- "s|__PATH__|${WORKING_DIR}|g" .rpmmacros
cd -
## Begining RPM building
build_signed_rpm $1 $2
check_result $? "Problem with prmbuild tool. (last section of building of RPM package)"
##moving RPM build file to script location
mv ${WORKING_DIR}/rpmbuild/RPMS/x86_64/${targ}-${version}-${release}.x86_64.rpm ${CURRENT_DIR}/build/
check_result $? "Problem with moving RPM package to script file location/build directory)"
#returning old macro
cd -
rm .rpmmacros
mv before_.rpmmacros .rpmmacros
## removing garbage and preprearing for new sesion
rm -rf $CURRENT_DIR/usr/*
rm -rf $CURRENT_DIR/usr/lib64/nagios/plugins/*
rm -f $CURRENT_DIR/${targ}.spec
rm -f $CURRENT_DIR/rpmbuild/SPECS/*
rm -f $CURRENT_DIR/rpmbuild/SOURCES/*
echo "Mission Accomplished"
| true |
70bef0cb6f7fe6e7fa8489ae5c74dc2bd80f6643 | Shell | bashfunc/bashTool | /archives/uninstall | UTF-8 | 127 | 2.59375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
echo "Removing files..."
if [ -f /usr/local/bin/extract ]; then
$(sudo rm /usr/local/bin/extract)
fi
echo "Done." | true |
48624a952921c18495bd68c71b26b33af60a7322 | Shell | paulstuart/imgman | /systemd/imgman | UTF-8 | 356 | 3.34375 | 3 | [
"BSD-2-Clause"
] | permissive | #!/bin/bash
[[ -f /etc/sysconfig/imgman ]] && . /etc/sysconfig/imgman
IMGHOST=${IMGHOST:-127.0.0.1}
IMGPORT=${IMGPORT:-9999}
IMGMAC=$(ethtool -P eth0 | awk '{print $3}')
case $1 in
start) MSG=BOOT ;;
stop) MSG=SHUTDOWN ;;
*) echo >&2 "invalid command: $1"; exit 1 ;;
esac
echo "- - $MSG $IMGMAC" > /dev/udp/$IMGHOST/$IMGPORT
| true |
90ef1ae142f72b7bd64ade4a9640506505d6ed78 | Shell | alefisico/bril | /bril/scripts/dippub/test/runall.sh | UTF-8 | 418 | 3.015625 | 3 | [] | no_license | #!/bin/bash
#
# Starts all xDAQ applications in one shot.
# Log files will be created in current directory.
#
# determine this script's directory
pushd "`dirname $0`" >/dev/null
base=`pwd`
popd >/dev/null
$base/start_eventingBus.sh >eventing.log 2>&1 &
$base/start_lumiHFHistoSaver.sh >saver_histo.log 2>&1 &
#$base/start_lumiHFLumiSaver.sh >saver_lumi.log 2>&1 &
$base/start_lumiHFReadout.sh >readout.log 2>&1 &
| true |
3cb473758b68edfa1a326ecca9267e9abafcb202 | Shell | mitchierichie/nr-instance-setup | /dist.sh | UTF-8 | 8,537 | 3.84375 | 4 | [] | no_license | #!/usr/bin/env bash
FORMAT="--dev-slug=my-slug --site-url=www.domain.com"
REDIS=1
PAGESPEED=1
if [[ -z "$1" ]] || [[ -z "$2" ]]; then
printf -- "\033[31m ERROR: Invalid or no argument supplied \033[0m\n"
printf -- "\033[32m CORRECT SYNTAX ---> ${FORMAT} \033[0m\n"
exit 64
fi
for i in "$@"; do
case $i in
-d=* | --dev-slug=*)
DEVSITE_SLUG="${i#*=}"
;;
-s=* | --site-url=*)
SITE_URL="${i#*=}"
;;
--skip-redis*)
$REDIS=0
;;
--skip-pagespeed*)
$PAGESPEED=0
;;
--default)
DEFAULT=YES
;;
*)
# unknown option
;;
esac
done
#if [ -z "$DEFAULT" ]; then
# echo "Error: Wrong Syntax";
# echo ${FORMAT}
# exit 1
# fi
if [[ $DEVSITE_SLUG == *.* ]]; then
printf -- "\033[31m Devsite Slug con not contain a period (.) \033[0m\n"
printf -- "\033[32m CORRECT SYNTAX ---> ${FORMAT} \033[0m\n"
exit 64
fi
if [[ $DEVSITE_SLUG == */* ]]; then
printf -- "\033[31m Devsite Slug con not contain a slash (/) \033[0m\n"
printf -- "\033[32m CORRECT SYNTAX ---> ${FORMAT} \033[0m\n"
exit 64
fi
if [[ $SITE_URL == *http* ]]; then
printf -- "\033[31m ERROR: Site Url can not contain http \033[0m\n"
printf -- "\033[32m CORRECT SYNTAX ---> ${FORMAT} \033[0m\n"
exit 64
fi
if [[ $SITE_URL != www* ]]; then
printf -- "\033[31m ERROR: Wrong Site URL format \033[0m\n"
printf -- "\033[32m CORRECT SYNTAX ---> ${FORMAT} \033[0m\n"
exit 64
fi
if [[ "${SITE_URL}" == */* ]]; then
printf -- "\033[31m ERROR: Site Url can not contain a slash (/) \033[0m\n"
printf -- "\033[32m CORRECT SYNTAX ---> ${FORMAT} \033[0m\n"
exit 64
fi
if [[ "${SITE_URL}" == *. ]]; then
printf -- "\033[31m ERROR: Site Url can not end with a period (.) \033[0m\n"
printf -- "\033[32m CORRECT SYNTAX ---> ${FORMAT} \033[0m\n"
exit 64
fi
if [[ "${SITE_URL}" == www.DOMAIN.com ]]; then
printf -- "\033[31m ERROR: Be sure to replace DOMAIN.com with the domain for this account \033[0m\n"
printf -- "\033[32m CORRECT SYNTAX ---> ${FORMAT} \033[0m\n"
exit 64
fi
load_spinner() {
sp='/-\|'
printf ' '
sleep 0.1
COUNTER=1
while [[ $COUNTER -lt 15 ]]; do
printf '\b%.1s' "$sp"
sp=${sp#?}${sp%???}
sleep 0.1
let COUNTER=COUNTER+1
done
printf -- "\n"
}
initiate_lighsailScript() {
PUBLIC_IP="$(dig +short myip.opendns.com @resolver1.opendns.com)"
printf -- "\033[33m Replace PUBLIC IP with production URL....... \033[0m"
load_spinner
sudo -u daemon wp search-replace "${PUBLIC_IP}" "${SITE_URL}" --skip-plugins=w3-total-cache
sudo -u daemon wp search-replace "nrdevsites.com" "nativerank.dev" --skip-plugins=w3-total-cache
sudo -u daemon wp search-replace "www.nativerank.dev" "nativerank.dev" --skip-plugins=w3-total-cache
printf -- "\033[33m Replacing devsite slug (escaped) with production URL....... \033[0m"
load_spinner
sudo -u daemon wp search-replace "nativerank.dev\\/${DEVSITE_SLUG}" "${SITE_URL}" --skip-plugins=w3-total-cache
printf -- "\033[33m Replacing devsite slug with production (unescaped) URL....... \033[0m"
load_spinner
sudo -u daemon wp search-replace "nativerank.dev/${DEVSITE_SLUG}" "${SITE_URL}" --skip-plugins=w3-total-cache
printf -- "\033[33m Running the same replacements on Less and CSS\n"
load_spinner
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/less/src/ -name "*.less" -exec sed -i "s/nrdevsites.com/nativerank.dev/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/less/src/ -name "*.less" -exec sed -i "s/www.nativerank.dev/nativerank.dev/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/less/src/ -name "*.less" -exec sed -i "s/http:/https:/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/less/src/ -name "*.less" -exec sed -i "s/https:\/\/nativerank.dev/nativerank.dev/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/less/src/ -name "*.less" -exec sed -i "s/nativerank.dev\/${DEVSITE_SLUG}//g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/css/ -name "*.css" -exec sed -i "s/nrdevsites.com/nativerank.dev/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/css/ -name "*.css" -exec sed -i "s/www.nativerank.dev/nativerank.dev/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/css/ -name "*.css" -exec sed -i "s/http:/https:/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/css/ -name "*.css" -exec sed -i "s/https:\/\/nativerank.dev/nativerank.dev/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/css/ -name "*.css" -exec sed -i "s/nativerank.dev\/${DEVSITE_SLUG}//g" {} +
printf -- "\033[33m Running the same replacements on data.json\n"
load_spinner
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/ -name "data.json" -exec sed -i "s/nativerank.dev\/${DEVSITE_SLUG}//g" {} +
printf -- "\033[33m Running the same replacements for Handlebars templates"
load_spinner
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/templates/ -name "*.hbs" -exec sed -i "s/nrdevsites.com/nativerank.dev/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/templates/ -name "*.hbs" -exec sed -i "s/www.nativerank.dev/nativerank.dev/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/templates/ -name "*.hbs" -exec sed -i "s/nativerank.dev\/${DEVSITE_SLUG}/${SITE_URL}/g" {} +
sudo find /home/bitnami/apps/wordpress/htdocs/wp-content/themes/yootheme_child/templates/ -name "*.hbs" -exec sed -i "s/http:\/\/${SITE_URL}/https:\/\/${SITE_URL}/g" {} +
printf -- "\033[33m Making it secure [http -> https]....... \033[0m"
load_spinner
sudo -u daemon wp search-replace "http://${SITE_URL}" "https://${SITE_URL}" --skip-plugins=w3-total-cache
printf -- "\033[33m Setting site URL in WordPress....... \033[0m"
load_spinner
wp config set WP_SITEURL "https://${SITE_URL}"
wp config set WP_HOME "https://${SITE_URL}"
if [[ $PAGESPEED ]]; then
printf -- "\033[33m Adding default Pagespeed configuration....... \033[0m"
load_spinner
sudo sed -i "s/ModPagespeed on/ModPagespeed on\n\nModPagespeedRespectXForwardedProto on\nModPagespeedLoadFromFileMatch \"^https\?:\/\/${SITE_URL}\/\" \"\/opt\/bitnami\/apps\/wordpress\/htdocs\/\"\n\nModPagespeedLoadFromFileRuleMatch Disallow .\*;\n\nModPagespeedLoadFromFileRuleMatch Allow \\\.css\$;\nModPagespeedLoadFromFileRuleMatch Allow \\\.jpe\?g\$;\nModPagespeedLoadFromFileRuleMatch Allow \\\.png\$;\nModPagespeedLoadFromFileRuleMatch Allow \\\.gif\$;\nModPagespeedLoadFromFileRuleMatch Allow \\\.js\$;\n\nModPagespeedDisallow \"\*favicon\*\"\nModPagespeedDisallow \"\*.svg\"\nModPagespeedDisallow \"\*.mp4\"\nModPagespeedDisallow \"\*.txt\"\nModPagespeedDisallow \"\*.xml\"\n\nModPagespeedInPlaceSMaxAgeSec -1\nModPagespeedLazyloadImagesAfterOnload off/g" /opt/bitnami/apache2/conf/pagespeed.conf
sudo sed -i "s/inline_css/inline_css,hint_preload_subresources/g" /opt/bitnami/apache2/conf/pagespeed.conf
fi
printf -- "\033[33m Removing Bitnami banner....... \033[0m"
load_spinner
sudo /opt/bitnami/apps/wordpress/bnconfig --disable_banner 1
printf -- "\033[33m Updating Redis Object Cache WP Plugin....... \033[0m"
sudo wp plugin update redis-cache --allow-root
# Set right permission
sudo chown -R daemon:daemon /opt/bitnami/apps/wordpress/htdocs/wp-content/plugins/redis-cache
if [[ $REDIS ]]; then
printf -- "\033[33m Setting up and activating Redis Server....... \033[0m"
load_spinner
sudo apt-get install redis-server -y
sudo -u daemon wp redis enable
fi
printf -- "\033[33m Activating WP Rocket plugin and setting WP_CACHE....... \033[0m"
load_spinner
wp config set WP_CACHE true --raw --type=constant
sudo -u daemon wp plugin activate wp-rocket
wp config set WP_ROCKET_CF_API_KEY 1ff24a7ac86219650211952b4fceaf93061a4 --type=constant
wp config set WP_ROCKET_CF_API_KEY_HIDDEN true --raw --type=constant
sudo -u daemon wp cache flush --skip-plugins=w3-total-cache
printf -- "\033[33m Restarting apache....... \033[0m"
load_spinner
sudo /opt/bitnami/ctlscript.sh restart apache
}
printf -- "\033[32m Initiating scripts... \033[0m\n"
initiate_lighsailScript
wait
printf -- "\033[32m Successfully migrated ${DEVSITE_SLUG} -> ${SITE_URL}. \033[0m\n"
exit 0
| true |
17bdfc1765722b25ca5c3455b57fb138c8dcdecd | Shell | brostwalt/ekt-docker | /nuget-cleanup.sh | UTF-8 | 422 | 2.6875 | 3 | [] | no_license | DOTNET_INCREMENTAL="${DOTNET_INCREMENTAL:-false}"
if [ "$DOTNET_INCREMENTAL" == "true" ]; then
mkdir /opt/app-root/packages-for-incremental-build
find /root/.nuget/packages -type f -maxdepth 3 -name '*.nupkg' -exec mv -t /opt/app-root/packages-for-incremental-build {} \+
fi
rm -rf /root/.nuget/packages
rm -rf /root/.local
if [ "$DOTNET_RM_SRC" == "true" ]; then
echo "---> Removing sources..."
rm -rf /src/*
fi
| true |
4879ff532813f8d87a6daa1f15d82b64a7717ffc | Shell | avjves/becky_gui | /frontend/becky-react/src/paths_to_dev.sh | UTF-8 | 97 | 2.53125 | 3 | [] | no_license | for f in $(find . -type f);
do
sed -i 's/"\/api\//"http:\/\/localhost:6701\/api\//g' $f
done
| true |
61d887225b46b7fc941782bba3c082e21a62a658 | Shell | 5queezer/vagrant-arch | /install_zsh.sh | UTF-8 | 262 | 3.125 | 3 | [] | no_license | #!/bin/bash
echo -e "\n--- Installing zsh ---\n"
pacman -Sq zsh grml-zsh-config --noconfirm 2>&1 | tee -a /var/log/vm_build.log
ZSH_BIN=$(whereis zsh | cut -f 2 -d' ')
for user in root vagrant; do
chsh -s $ZSH_BIN $user
su -c "touch ~/.zshrc" - $user
done
| true |
8b8aa345cba5b7898b7112440c0cc321015e79f3 | Shell | jgkennedy/openshift-tools | /docker/oso-rhel7-zagg-web/start.sh | UTF-8 | 520 | 2.59375 | 3 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | #!/bin/bash -e
# Configure the container on startup
ansible-playbook /root/config.yml
# set Django secret key
echo 'setting Django secret key...'
DJANGO_SECRET=$(python -c "import random, string; print ''.join([random.SystemRandom().choice('{}{}{}'.format(string.ascii_letters, string.digits, '!#$%()*+,-.:;<=>?@[]^_{|}~')) for i in range(50)])")
sed -i "s/^SECRET_KEY = .*$/SECRET_KEY = '$DJANGO_SECRET'/" /opt/rh/zagg/zagg/settings.py
echo 'Starting httpd'
echo '--------------'
LANG=C /usr/sbin/httpd -DFOREGROUND
| true |
177de70aca5bd5b4b400d435b60c16c4b813a0e1 | Shell | snowell/asdf-stardog | /lib/commands/command-license.bash | UTF-8 | 1,058 | 3.984375 | 4 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/usr/bin/env bash
scope="$1"
licpath="$2"
if [ -z "$scope" ]; then
if [ -z "$licpath" ]; then
if [ -n "$STARDOG_LICENSE_PATH" ]; then
echo -e "Stardog Ext\t$STARDOG_LICENSE_PATH\t\$STARDOG_LICENSE_PATH"
elif [ -f "$PWD/.stardog-license-path" ]; then
read -r licpath < .stardog-license-path
echo -e "Stardog Ext\t$licpath\t$PWD/.stardog-license-path"
elif [ -f "$HOME/.stardog-license-path" ]; then
read -r licpath < $HOME/.stardog-license-path
echo -e "Stardog Ext\t$licpath\t$HOME/.stardog-license-path"
else
echo -e "No Stardog License Path set"
fi
exit 0
fi
fi
if [ ! -f "$licpath" ]; then
echo -e "asdf-stardog: $licpath does not exist"
exit 1
fi
case "$scope" in
global)
echo "$licpath" > ~/.stardog-license-path
echo "Global STARDOG_LICENSE_PATH set to $licpath"
;;
local)
echo "$licpath" > .stardog-license-path
echo "Local STARDOG_LICENSE_PATH set to $licpath"
;;
*)
echo -e "Usage: asdf stardog license {global|local} \$file"
exit 1
esac | true |
e21869b0475d9dc8f7909caf9eafc2bd013574f6 | Shell | ingorichter/dotfiles | /zsh/zprofile | UTF-8 | 547 | 3.25 | 3 | [] | no_license | # This is not working anymore!?
# specify JAVA_HOME. Since Java 17 is available, I'm going with it
# JAVA_HOME=$(/usr/libexec/java_home -v 17)
# load machine specific configuration
if [ -e "${MACHINE_CONFIG_FILE}" ]; then
source "${MACHINE_CONFIG_FILE}"
fi
# Alias definition
for alias in $(find ~/.alias.d/ -type f -not -name .DS_Store)
do
source "$alias"
done
# Ingo
# setup rbenv
eval "$(rbenv init -)"
# setup pyenv
eval "$(pyenv init --path)"
if which pyenv-virtualenv-init > /dev/null; then eval "$(pyenv virtualenv-init -)"; fi
| true |
ef2880bd4add56df8dd6da714a967c7a9aea4d85 | Shell | snail-Lin/Pi-Node-Shell | /natapp.sh | UTF-8 | 810 | 3.0625 | 3 | [
"Apache-2.0"
] | permissive | NATAPPPATH='/home/pi/Desktop'
rm ${NATAPPPATH}/aria.out
rm ${NATAPPPATH}/ssh.out
PROCESS=`ps -u pi -w 1000|grep natapp|grep -v grep|grep -v PPID|awk '{ print $1}'`
for i in $PROCESS
do
echo "Kill the $1 process [ $i ]"
kill -9 $i
done
nohup ${NATAPPPATH}/natapp -authtoken="***" -log=stdout > ${NATAPPPATH}/aria.out &
echo "6800端口隧道已打开"
nohup ${NATAPPPATH}/natapp -authtoken="***" -log=stdout > ${NATAPPPATH}/ssh.out &
echo "ssh隧道已打开"
sleep 6
grep -n 'http' nohup.out | tail -n 1|awk '{ print $8 }'
grep -n 'tcp' nohup.out |tail -n 1 |awk '{print $8}'
if ps -u root|grep natapp|grep -v -q grep
then
echo 'ssh tunnel open'
else
sudo nohup ${NATAPPPATH}/natapp -authtoken=b9e401f983fb887a -log=stdout > ${NATAPPPATH}/web.out &
echo 'ssh tunnel reopen'
fi
| true |
723418c91bb46dda1c8a7b97488accc97d4e2082 | Shell | xhatanubiss/lazyrecon | /helpers/discord-hook.sh | UTF-8 | 427 | 2.828125 | 3 | [] | no_license | #!/bin/bash
# use discord webhook
# https://www.digitalocean.com/community/tutorials/how-to-use-discord-webhooks-to-get-notifications-for-your-website-status-on-ubuntu-18-04#:~:text=To%20create%20a%20webhook%20you,click%20the%20Create%20Webhook%20button.
# https://discord.com/developers/docs/resources/webhook
status="$@"
curl -H "Content-Type: application/json" -X POST -d '{"content":"'"${status}"'"}' $DISCORDWEBHOOKURL
| true |
a63c395042c27d6572c94746f22e5b7fd634bf7c | Shell | cansavvy/OpenPBTA-analysis | /analyses/snv-callers/run_caller_analysis.sh | UTF-8 | 2,661 | 3.421875 | 3 | [
"CC-BY-4.0",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | #!/bin/bash
# C. Savonen
# CCDL for ALSF 2019
# Purpose:Run an intial evaluation of each variant caller's MAF file
# Set this so the whole loop stops if there is an error
set -e
set -o pipefail
# The files named in these arrays will be ran in the analysis.
datasets=("strelka2" "mutect2" "lancet" "vardict")
wgs_files=("WGS.hg38.strelka2.unpadded.bed" "WGS.hg38.mutect2.unpadded.bed" "WGS.hg38.lancet.300bp_padded.bed" "WGS.hg38.vardict.100bp_padded.bed")
# Reference file paths
cosmic=analyses/snv-callers/ref_files/brain_cosmic_variants_coordinates.tsv
annot_rds=analyses/snv-callers/ref_files/hg38_genomic_region_annotation.rds
# Choose TSV or RDS
format=rds
# Set a default for the VAF filter if none is specified
vaf_cutoff=${OPENPBTA_VAF_CUTOFF:-0}
############################ Set Up Reference Files ############################
# The original COSMIC file is obtained from: https://cancer.sanger.ac.uk/cosmic/download
# These data are available if you register. The full, unfiltered somatic mutations
# file CosmicMutantExport.tsv.gz for grch38 is used here.
Rscript analyses/snv-callers/scripts/00-set_up.R \
--annot_rds $annot_rds \
--cosmic_og scratch/CosmicMutantExport.tsv.gz \
--cosmic_clean $cosmic
########################## Calculate and Set Up Data ##########################
# Create files that contain calculated VAF, TMB, and regional analyses.
for ((i=0;i<${#datasets[@]};i++));
do
echo "Processing dataset: ${datasets[$i]}"
Rscript analyses/snv-callers/scripts/01-calculate_vaf_tmb.R \
--label ${datasets[$i]} \
--output analyses/snv-callers/results/${datasets[$i]} \
--file_format $format \
--maf data/pbta-snv-${datasets[$i]}.vep.maf.gz \
--metadata data/pbta-histologies.tsv \
--bed_wgs data/${wgs_files[$i]} \
--bed_wxs data/WXS.hg38.100bp_padded.bed \
--annot_rds $annot_rds \
--vaf_filter $vaf_cutoff \
--no_region \
--overwrite
done
######################## Plot the data and create reports ######################
for dataset in ${datasets[@]}
do
echo "Processing dataset: ${dataset}"
Rscript analyses/snv-callers/scripts/02-run_eval.R \
--label ${dataset} \
--vaf analyses/snv-callers/results/${dataset} \
--plot_type png \
--file_format $format \
--output analyses/snv-callers/plots/${dataset} \
--cosmic $cosmic \
--strategy wgs,wxs,both \
--no_region
done
##################### Merge callers' files into total files ####################
Rscript analyses/snv-callers/scripts/03-merge_callers.R \
--vaf analyses/snv-callers/results \
--output analyses/snv-callers/results/consensus \
--file_format $format \
--overwrite
| true |
47b3cb19d6230172101eaedcba08795eb134f96d | Shell | JOC9000/CS35 | /Assignment 3/poornames | UTF-8 | 3,725 | 4.03125 | 4 | [] | no_license | #!/usr/bin/env bash
IFS=$'\n'
#IFS is set to newline to be able to collect poor /
#names that have a space in them
export LC_ALL=C
#a series of checks to detect errors in arguments
if [ $# -gt 2 ]; then
echo "too many arguments" 1>&2
exit 1
elif [ $# -eq 0 ]; then
directory="."
elif [ $# -eq 1 ]; then
if [ "$1" == "-r" ]; then
directory="."
else
directory=$(find "$1" -maxdepth 0 -type d)
fi
elif [ $# -eq 2 ]; then
if [ "$1" != "-r" ]; then
echo "given two arguments, first must be recursive option -r" 1>&2
exit 4
fi
if [ "$2" == "-r" ]; then
echo "given two arguments, first must be recursive option -r" 1>&2
exit 4
fi
directory=$(find "$2" -maxdepth 0 -type d)
fi
#a check to see if given directory name is a valid directory
if [ "$directory" == "" ]; then
echo "not a directory!" 1>&2
exit 2
fi
#a check to add a slash to the end of a path if it does not have on
slashCheck=$(echo "$directory" | grep -E "\/$")
if [ "$slashCheck" == "" ]; then
directory=$(echo "$directory/")
fi
#this should get all subdirectories from the given directory /
#and execute poornames on each of them
if [ "$1" == "-r" ]; then
find "$directory" -mindepth 1 -type d -exec "$0" {} \;
fi
#the following command retrieves all filenames in $directory,
#then removes the path, leaving only the filename. These are then
#sorted for ease later on.
#The regex here is ".+\/" which captures all text up to a last slash.
#Since find does not append a slash to directory files this is a
#quick way to retrieve the filenames that will be checked.
files=$(find "$directory" -mindepth 1 -maxdepth 1 | sed -r "s/^.+\///g" | sort -f)
#captures filenames that do not comply with 1st guideline
#the following statements do the same for each guideline
poor=$(echo "$files" | grep -E "[^._A-Za-z-]")
#check for filenames starting in -
poor1=$(echo "$files" | grep -E "^-")
#checks for filenames starting in .
poor2=$(echo "$files" | grep -E "^\.")
#checks for filenames longer than 14 characters
poor3=$(echo "$files" | grep -E ".{15,}")
#To avoid N^2 operations checking each filename against
#every other filename, I presorted the filenames in files.
#Depending on the locale, this means that capitalized letters
#appear right next to uncapitalized ones. I take advantage of this
#and then only check the current $word in files to the prior word that
#was checked. The prompt is unclear about which locale is used, so
#I assumed a regular sort was suitable. Results of this are stored in
#poor4
poor4=""
pastword=""
for word in $files; do
#to compare if they differ only by case, I translate all upper chars
#to lower chars
wordcheck1=$(echo "$word" | tr "[:upper:]" "[:lower:]")
wordcheck2=$(echo "$pastword" | tr "[:upper:]" "[:lower:]")
if [ "$wordcheck1" == "$wordcheck2" ]; then
poor4=$(echo "$poor4"; echo "$word"; echo "$pastword")
fi
pastword="$word"
done
#append all poor variables to fullpoor for simplicity
fullpoor=$(echo "$poor"; echo "$poor1"; echo "$poor2"; echo "$poor3"; echo "$poor4")
fullpoor=$(echo "$fullpoor" | sort -u -b)
fullestpoor=""
for word in $fullpoor; do
#checkDir check if a file is a directory
if [ -d "$directory$word" ]; then
word=$(echo "$word/")
fi
if [ "$fullestpoor" == "" ]; then
fullestpoor=$(echo "$directory$word")
else
fullestpoor=$(echo "$fullestpoor"; echo "$directory$word")
fi
done
#the result is sorted and any empty lines are removed
fullestpoor=$(echo "$fullestpoor" | sort -u -b | grep -E -v "^$")
#a result is only given is fullest poor is not empty,
#to prevent empty lines
if [ "$fullestpoor" != "" ]; then
echo "$fullestpoor"
fi
| true |
46e2444e62aa8a80bca5fb81740eaa5b7f35e5fc | Shell | vtex/toolbelt | /scripts/symlinkPlugin.sh | UTF-8 | 144 | 2.53125 | 3 | [
"MIT"
] | permissive | LINK_PATH="$PWD/node_modules/vtex"
rm $LINK_PATH || echo "Failed to remove $LINK_PATH, maybe it already doesn't exists..."
ln -s $PWD $LINK_PATH | true |
464dd2e4aa0df538d5351deb6ce934c00a246390 | Shell | chlorm-forks/ffmpeg-mr | /s3/build/build.sh | UTF-8 | 1,386 | 3.03125 | 3 | [] | no_license | #!/bin/bash
# Get yasm and install it
# wget http://www.tortall.net/projects/yasm/releases/yasm-1.1.0.tar.gz -O- | tar zxvf -
# cd yasm-1.1.0
# ./configure --disable-nls --prefix=/usr
# make
# sudo make install
# cd ..
# rm -rf ./yasm-1.1.0
# Install CVS.
# sudo apt-get install cvs
PLATFORM=`getconf LONG_BIT`
# Perform the correct build, and upload it.
if [ $PLATFORM == "64" ]
then
hadoop fs -get s3://ffmpeg-mr/build/build64.tar ./build.tar && tar xf build.tar && cd build \
&& rm -rf ./ffmpeg-mr && wget http://dl.dropbox.com/u/8444884/ffmpeg-mr.zip -O /tmp/ffmpeg-mr.zip && \
unzip -o -q /tmp/ffmpeg-mr.zip -d ./ffmpeg-mr && rm /tmp/ffmpeg-mr.zip && cd ffmpeg-mr && chmod 755 run.sh && \
make
if [ -e libffmpeg-mr.so ]
then
hadoop fs -rm s3://ffmpeg-mr/lib64/libffmpeg-mr.so
hadoop fs -put libffmpeg-mr.so s3://ffmpeg-mr/lib64/libffmpeg-mr.so
else
exit -1
fi
else
hadoop fs -get s3://ffmpeg-mr/build/build.tar ./build.tar && tar xf build.tar && cd build \
&& rm -rf ./ffmpeg-mr && wget http://dl.dropbox.com/u/8444884/ffmpeg-mr.zip -O /tmp/ffmpeg-mr.zip && \
unzip -o -q /tmp/ffmpeg-mr.zip -d ./ffmpeg-mr && rm /tmp/ffmpeg-mr.zip && cd ffmpeg-mr && chmod 755 run.sh && \
make
if [ -e libffmpeg-mr.so ]
then
hadoop fs -rm s3://ffmpeg-mr/lib/libffmpeg-mr.so
hadoop fs -put libffmpeg-mr.so s3://ffmpeg-mr/lib/libffmpeg-mr.so
else
exit -1
fi
fi
| true |
55340c17769ae7859a820491634083b96c63bdb7 | Shell | piyushkumar13/shell-scripting | /loopexample.sh | UTF-8 | 414 | 3.59375 | 4 | [] | no_license | #! /bin/bash
for i in "piyush" "sheel" "brijesh"
do
echo "the name of the flat mates are : $i"
done
declare -a degrees
degrees=("Mtech" "Btech")
len=${#degrees[*]} # this statement is used to get the length of the array.
while [ $len -gt 0 ]
do
len=`expr $len - 1`
echo "the degrees are : "
echo "${degrees[$len]}"
done
a=5
until [[ ! $a -gt 0 ]];
do
echo "the value of a : $a"
a=`expr $a - 1`
done
| true |
756a6d83d7856f62a712df746359fa2fc4a878aa | Shell | MartinWeindel/gardener | /hack/local-development/generate-webhook-ca-bundle.sh | UTF-8 | 2,392 | 2.703125 | 3 | [
"Apache-2.0",
"MPL-2.0",
"BSD-3-Clause",
"MPL-1.1",
"MIT",
"BSD-2-Clause"
] | permissive | #!/usr/bin/env bash
# Copyright 2023 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
tmpdir=$(mktemp -d)
service=gardener-admission-controller
namespace=garden
cat <<EOF >> "$tmpdir/csr.conf"
[req]
default_bits = 2048
prompt = no
default_md = sha256
req_extensions = req_ext
distinguished_name = req_distinguished_name
[req_distinguished_name]
organizationName = Self-signed certificate
[v3_ext]
authorityKeyIdentifier = keyid,issuer:always
basicConstraints = CA:FALSE
keyUsage = keyEncipherment,dataEncipherment
extendedKeyUsage = serverAuth,clientAuth
subjectAltName = @alt_names
[req_ext]
subjectAltName = @alt_names
[alt_names]
IP.1 = 127.0.0.1
IP.2 = ::1
DNS.1 = ${service}
DNS.2 = ${service}.${namespace}
DNS.3 = ${service}.${namespace}.svc
DNS.4 = ${service}.${namespace}.svc.cluster
DNS.5 = ${service}.${namespace}.svc.cluster.local
EOF
openssl genrsa -out "$tmpdir/ca.key" 2048
# CA
openssl req -x509 -new -nodes -key "$tmpdir/ca.key" -out "$tmpdir/ca.crt" -days 9000 -subj "/CN=gardener-admission-controller"
openssl genrsa -out "$tmpdir/server.key" 2048
# CSR
openssl req -new -key "$tmpdir/server.key" -out "$tmpdir/server.csr" -config "$tmpdir/csr.conf"
# Signing
openssl x509 -req -in "$tmpdir/server.csr" -CA "$tmpdir/ca.crt" -CAkey "$tmpdir/ca.key" -CAcreateserial -out "$tmpdir/server.crt" -extfile "$tmpdir/csr.conf" \
-days 9000 -sha256 -extensions v3_ext
caBundle=$(openssl enc -a -A < "$tmpdir/ca.crt")
echo "$caBundle" > ../../dev/gardener-admission-controller-caBundle
cat "$tmpdir/server.key" > ../../dev/gardener-admission-controller-server.key
cat "$tmpdir/server.crt" > ../../dev/gardener-admission-controller-server.crt
| true |
98368cd09179c4aa5e557328db47b3ea3b03d7db | Shell | weisjohn/gvm2 | /scripts/function/resolve_current_version.sh | UTF-8 | 2,295 | 4.125 | 4 | [
"MIT"
] | permissive | # scripts/function/resolve_current_version.sh
#
# shellcheck shell=bash
# vi: set ft=bash
#
# source once and only once!
[[ ${GVM_RESOLVE_CURRENT_VERSION:-} -eq 1 ]] && return || readonly GVM_RESOLVE_CURRENT_VERSION=1
# load dependencies
dep_load() {
local srcd="${BASH_SOURCE[0]}"; srcd="${srcd:-${(%):-%x}}"
local base="$(builtin cd "$(dirname "${srcd}")" && builtin pwd)"
local deps; deps=(
"_shell_compat.sh"
)
for file in "${deps[@]}"
do
source "${base}/${file}"
done
}; dep_load; unset -f dep_load &> /dev/null || unset dep_load
# __gvm_resolve_current_version()
# /*!
# @abstract Determine the currently active go version name
# @discussion
# This function will call the currently active go (resolved via regular PATH
# resolution) with the version command. The returned string will be parsed and
# the essential go version name (e.g. go1.9.2, go1.9beta1) will be returned.
# <pre>@textblock
# goM.m[.p][betaX|rcX]
# M - Major version
# m - minor version
# p - patch version
# @/textblock</pre>
# From the list of names that conform to the above pattern, the name with the
# highest version number will be selected.
# @param path [optional] A search path, defaults to PATH.
# @return Returns a string containing the current version name (status 0) or an
# empty string (status 1) on failure.
# @note Also sets global variable RETVAL to the same return value.
# */
__gvm_resolve_current_version() {
local shell_path="${1:-$PATH}"
local active_go="$(PATH="${shell_path}" which go)"
local regex='(go([0-9]+(\.[0-9]+)*)([a-z0-9]*))'
unset RETVAL
[[ -z "${active_go// /}" ]] && RETVAL="" && echo "${RETVAL}" && return 1
while IFS=$'\n' read -r _line; do
if __gvm_rematch "${_line}" "${regex}"
then
# GVM_REMATCH[1]: version name (e.g. go1.7.1, go1.9beta1, go1.7rc1)
# GVM_REMATCH[2]: isolated version (e.g. 1.7.1)
# GVM_REMATCH[3]: not used
# GVM_REMATCH[4]: isolated betaX, rcX string
version="${GVM_REMATCH[1]}"
fi
done <<< "$("${active_go}" version)"
RETVAL="${version}"
if [[ -z "${RETVAL// /}" ]]
then
RETVAL="" && echo "${RETVAL}" && return 1
fi
echo "${RETVAL}" && return 0
}
| true |
55a2287840c1b17bbf1ba330ec1981970b1c0658 | Shell | rwitten/vision-spl | /name.sh | UTF-8 | 542 | 2.75 | 3 | [] | no_license | # invocation is ./name.sh ${algorithm} ${class} ${C} ${foldnum} ${randomness} ${hallu} ${kernel} ${prox_weight} ${epsilon} ${name}
algorithm=$1
class=$2
C=$3
foldnum=$4
randomness=$5
hallu=$6
kernel=$7
prox_weight=$8
epsilon=$9
name=${10}
algorithms[1]='cccp'
algorithms[2]='spl'
algorithms[3]='splplus'
hallus[0]='nohallu'
hallus[1]='hallu'
kernels[0]='bow'
kernels[1]='spm'
echo ${algorithms[$algorithm]}_${class}_C${C}_fold${foldnum}_rand${randomness}_${hallus[$hallu]}_${kernels[$kernel]}_lambda${prox_weight}_eps${epsilon}_${name}
| true |
780d43f859e36b76df20f3334fa018cbfcab8247 | Shell | takutakahashi/openstack-ops-toolkit | /scripts/loadbalancer/create_lb.sh | UTF-8 | 1,468 | 3.28125 | 3 | [] | no_license | #!/bin/bash -ex
LB_NAME=$1
PORT=$2
AMPHORA_IMAGE_ID=$3
ADDRESS=$4
glance image-show $AMPHORA_IMAGE_ID || exit 1
echo create loadbalancer
./create_lb_with_image.sh $LB_NAME $AMPHORA_IMAGE_ID
LB_ID=`openstack loadbalancer list --name $LB_NAME -c id -f value`
echo create listener
openstack loadbalancer listener create --name $LB_NAME-listener --protocol TCP --protocol-port $PORT $LB_ID
./wait.sh $LB_ID
echo create pool
openstack loadbalancer pool create --name $LB_NAME-pool --protocol TCP --listener $LB_NAME-listener --lb-algorithm ROUND_ROBIN
./wait.sh $LB_ID
if [ "$ADDRESS" = "" ]; then
echo create instance for member
./create_instance.sh $LB_NAME-member
echo wait
sleep 10
ADDRESS=`openstack server list --name $LB_NAME-member -c Networks -f value |awk -F'=' '{print $2}'`
fi
echo create member
openstack loadbalancer member create --name $LB_NAME-member --address $ADDRESS --protocol-port $PORT $LB_NAME-pool
./wait.sh $LB_ID
echo create healthmonitor
openstack loadbalancer healthmonitor create --name $LB_NAME-hm --delay 60 --timeout 30 --max-retries 3 --type TCP $LB_NAME-pool
./wait.sh $LB_ID
echo add floating ip
VIP_PORT_ID=`openstack loadbalancer show $LB_ID -c vip_port_id -f value`
FLOATING_IP_ID=`openstack floating ip list --project $TARGET_PROJECT|grep None |awk '{print $2}' |head -1`
openstack floating ip set --port $VIP_PORT_ID $FLOATING_IP_ID
echo attached floating ip. details below:
openstack floating ip show $FLOATING_IP_ID
| true |
7b7bb34449849be4225df8dff8548b591a3a47ac | Shell | simonswain/vagrant-ubuntu-precise-32 | /clean.sh | UTF-8 | 516 | 3.15625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
FOLDER_BASE=`pwd`
FOLDER_ISO="${FOLDER_BASE}/iso"
FOLDER_BUILD="${FOLDER_BASE}/working"
if [ -e "${FOLDER_BUILD}" ]; then
sudo chown $EUID -R "${FOLDER_BUILD}"
sudo chmod -R u+x "${FOLDER_BUILD}"
rm -rf "${FOLDER_BUILD}"
fi
if [ -e "${FOLDER_ISO}" ]; then
sudo chown $EUID -R "${FOLDER_ISO}"
sudo chmod -R u+x "${FOLDER_ISO}"
rm -rf "${FOLDER_ISO}"
fi
if [ -e "${FOLDER_BASE}/package.box" ]; then
rm -rf "${FOLDER_BASE}/package.box"
fi
cd test
vagrant destroy >/dev/null
| true |
d6bfbd83b3ed84a890c3996bd96854931c2b526c | Shell | dslm4515/BMLFS | /build-scripts/audacious-plugins.build | UTF-8 | 1,993 | 2.578125 | 3 | [] | no_license | #! /bin/bash
# audacious-plugins
# Source: https://distfiles.audacious-media-player.org/audacious-plugins-4.1.tar.bz2
#
# $BUILD = Directory to temporarily install
# $PKGS = Directory to store built packages
#
# DEPS
# Required: audacious neon
# Recommended: mpg123
# Optional: wavpack, cURL, FAAD2, FFmpeg, FLAC, LAME, libcdio, libnotify,
# Optional: libsamplerate, libsndfile, libvorbis, Pulseaudio, SDL,
# Optional: adplug, The Bauer stereophonic-to-binaural DSP (bs2b) library,
# Optional: FluidSynth, JACK (requires libsamplerate-0.1.9), libcue,
# Optional: libmodplug, libmms, libsidplayfp, LIRC
# No longer building GTK+2
TPUT=/bin/true ./configure --prefix=/usr \
--disable-gtk $BUILDTRUPLE &&
read -p "Compile?" && make -j2 &&
sudo -S make DESTDIR=$BUILD install &&
cd $BUILD && sudo -S mkdir -v ${BUILD}/install &&
cat > /tmp/slack-desc << "EOF"
# HOW TO EDIT THIS FILE:
# The "handy ruler" below makes it easier to edit a package description. Line
# up the first '|' above the ':' following the base package name, and the '|'
# on the right side marks the last column you can put a character in. You must
# make exactly 11 lines for the formatting to be correct. It's also
# customary to leave one space after the ':' except on otherwise blank lines.
|-----handy-ruler------------------------------------------------------|
audacious-plugins: audacious-plugins (Plugins used by the Audacious media player)
audacious-plugins:
audacious-plugins: This is a collection of plugins for Audacious to support additional
audacious-plugins: media formats.
audacious-plugins:
audacious-plugins: The webpage for Audacious is: http://www.audacious-media-player.org
audacious-plugins:
audacious-plugins:
audacious-plugins:
audacious-plugins:
audacious-plugins:
EOF
sudo -S mv -v /tmp/slack-desc install/ &&
sudo -S makepkg -l y -c n $PKGS/audacious-plugins-4.1-$(uname -m)-mlfs.txz &&
sudo -S rm -rf ${BUILD}/*
| true |
5f72502802a26361b99b40fa05f289cf5c84d127 | Shell | derwiki/johnhenry | /install.sh | UTF-8 | 876 | 3.375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
set -e
if [ -z "$projectname" ]; then
echo " Err: please run 'export projectname=\"MyWebSiteName\"' and try again."
exit -1
fi
#TODO: quit if no: rails, git, bundle, heroku
lowercaseprojectname=`echo $projectname | awk '{print tolower($0)}'`
rails new $projectname
cd $projectname
git init
heroku create $lowercaseprojectname
git add .
git commit -m "rails new $projectname"
echo "gem 'johnhenry'" >> Gemfile
bundle
bundle exec rake john_henry:install
bundle
git add app config Gemfile*
git rm app/views/layouts/application.html.erb
git commit -m 'Install JohnHenryRails'
bundle exec rake john_henry:install:migrations
bundle exec rake db:migrate
git add db
git commit -m 'Add initial migrations and schema.rb'
git push heroku master
heroku run rake db:migrate
heroku restart
echo Success! Point your browser to: http://$lowercaseprojectname.herokuapp.com
| true |
a1aae4317389a3d3f0d5762809910e4a1880a523 | Shell | dinhphi31/wr-app | /codemagic/pre_build_prd.sh | UTF-8 | 1,356 | 2.59375 | 3 | [
"MIT"
] | permissive | #!/bin/sh
# see
# <https://qiita.com/sensuikan1973/items/ab55d6a56033011350fc>
# <https://medium.com/@riscait/building-and-delivering-a-flavor-enabled-flutter-app-using-codemagic-5225a6070224>
set -ex
# Workaround for issue #21335
/usr/bin/plutil -replace CFBundleIdentifier -string com.worldrize.app ios/Runner/Info.plist
# NOTE: codemagic の UI から以下の環境変数をセットすること
# FIREBASE_ANDROID_CREDENTIAL: @ ./android/app/google-services.json
# FIREBASE_IOS_CREDENTIAL: @ ./ios/Runner/GoogleService-Info.plist
echo "${FIREBASE_ANDROID_CREDENTIAL}" > ./android/app/google-services.json
echo "${FIREBASE_IOS_CREDENTIAL}" > ./ios/Runner/GoogleService-Info.plist
echo "${DOTENV}" > secrets/.env
# read .env
export $(egrep -v '^#' secrets/.env | xargs)
# Download assets
curl gdrive.sh | bash -s ${ASSETS_GDRIVE_ID}
unzip -qq assets.zip
rm -rf ./assets.zip ./__MACOSX
which agvtool
# == work around of failing build with ==
# A problem occurred evaluating root project 'cloud_functions_web'.
# > Failed to apply plugin [id 'com.android.library']
# > Minimum supported Gradle version is 5.4.1. Current version is 4.10.2.
# 4.10.2 -> 5.4.1
ls $FLUTTER_ROOT/.pub-cache
sed -i -e 's/4.10.2/5.4.1/g' $FLUTTER_ROOT/.pub-cache/hosted/pub.dartlang.org/cloud_functions_web-1.1.0/android/gradle/wrapper/gradle-wrapper.properties | true |
5281540db84173685fac62fed329a1e86ae033bf | Shell | baoit09/sc-network.deevo.io | /scripts/generate-orderer-tls.sh | UTF-8 | 3,354 | 3.4375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
#
# Copyright Deevo Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
source $(dirname "$0")/env.sh
# Wait for setup to complete sucessfully
usage() { echo "Usage: $0 [-g <orgname>] [-n <numberpeer>]" 1>&2; exit 1; }
while getopts ":g:n:" o; do
case "${o}" in
g)
g=${OPTARG}
;;
n)
n=${OPTARG}
;;
*)
usage
;;
esac
done
shift $((OPTIND-1))
if [ -z "${g}" ] || [ -z "${n}" ] ; then
usage
fi
source $(dirname "$0")/env.sh
ORG=${g}
mkdir -p ${DATA}
initOrdererVars $ORG 0
export ORDERER_GENERAL_LOCALMSPDIR=${DATA}/orderer/msp
export ORDERER_GENERAL_GENESISFILE=${DATA}/genesis.block
export ORDERER_GENERAL_LOCALMSPID=${ORG}MSP
export ORDERER_GENERAL_TLS_ROOTCAS=[${DATA}/${ORG}-ca-cert.pem]
export ORDERER_GENERAL_TLS_CLIENTROOTCAS=[${DATA}/${ORG}-ca-cert.pem]
export ORDERER_HOST=orderer${n}.${ORG}.deevo.com
export ORDERER_GENERAL_LISTENADDRESS=0.0.0.0
export ORDERER_GENERAL_TLS_PRIVATEKEY=${DATA}/orderer/tls/server.key
export ORDERER_GENERAL_TLS_CLIENTAUTHREQUIRED=true
export ORDERER_GENERAL_LOGLEVEL=debug
export ORDERER_GENERAL_GENESISMETHOD=file
#export ORDERER_DEBUG_BROADCASTTRACEDIR=/hyperledgerconfig/data/logs
export ORDERER_GENERAL_TLS_CERTIFICATE=${DATA}/orderer/tls/server.crt
export ORDERER_GENERAL_TLS_ENABLED=true
export ORDERER_HOME=${DATA}/orderer
export FABRIC_CFG_PATH=${DATA}/
export ORDERER_FILELEDGER_LOCATION=/var/hyperledger/production/orderer
export FABRIC_CA_CLIENT_HOME=$HOME/orderer
export FABRIC_CA_CLIENT_TLS_CERTFILES=${DATA}/${ORG}-ca-cert.pem
export ENROLLMENT_URL=https://rca-${ORG}-admin:rca-${ORG}-adminpw@rca.${ORG}.deevo.com:7054
export ORDERER_HOME=${DATA}/orderer
export ORDERER_DEBUG_BROADCASTTRACEDIR=$DATA/logs
export ORG=${g}
export ORG_ADMIN_CERT=${DATA}/orgs/org0/msp/admincerts/cert.pem
mkdir -p ${DATA}/orderer
mkdir -p ${DATA}/orderer/tls
rm -rf /var/hyperledger/production/*
mkdir -p data
mkdir -p data/logs
if [ -f ./data/logs/orderer.out ] ; then
rm ./data/logs/orderer.out
fi
mkdir -p /tmp/tls
mkdir -p /tmp/tls/signcerts
mkdir -p /tmp/tls/keystore
if [ -d /tmp/tls/keystore ] ; then
rm -rf /tmp/tls/keystore/*
fi
# Enroll to get orderer's TLS cert (using the "tls" profile)
$GOPATH/src/github.com/hyperledger/fabric-ca/cmd/fabric-ca-client/fabric-ca-client enroll -d --enrollment.profile tls -u $ENROLLMENT_URL -M /tmp/tls --csr.hosts $ORDERER_HOST
# Copy the TLS key and cert to the appropriate place
TLSDIR=$ORDERER_HOME/tls
mkdir -p $TLSDIR
cp /tmp/tls/keystore/* $ORDERER_GENERAL_TLS_PRIVATEKEY
cp /tmp/tls/signcerts/* $ORDERER_GENERAL_TLS_CERTIFICATE
rm -rf /tmp/tls
# Enroll again to get the orderer's enrollment certificate (default profile)
$GOPATH/src/github.com/hyperledger/fabric-ca/cmd/fabric-ca-client/fabric-ca-client enroll -d -u $ENROLLMENT_URL -M $ORDERER_GENERAL_LOCALMSPDIR
# Finish setting up the local MSP for the orderer
finishMSPSetup $ORDERER_GENERAL_LOCALMSPDIR
copyAdminCert $ORDERER_GENERAL_LOCALMSPDIR
mkdir -p $DATA/orderer
env | grep ORDERER
rm -rf /var/hyperledger/production/*
mkdir -p data
mkdir -p data/logs
if [ -f ./data/logs/orderer.out ] ; then
rm ./data/logs/orderer.out
fi
cp -R ${FABRIC_CA_CLIENT_HOME}/* ${DATA}/orderer
#cp -R ${ORDERER_GENERAL_LOCALMSPDIR} ${DATA}/orderer
echo "done see /data/logs/orderer"
| true |
3b5b1cb7acf1337d7930154f0ecf4cb3cde2287c | Shell | ticky/dotfiles | /platform/all/zsh/aliases.zsh | UTF-8 | 2,698 | 3.703125 | 4 | [
"MIT"
] | permissive | # LS Alias definitions
# main ls alias
LS="ls -hF" # `-h` enables "human" units for file sizes, `-F` shows characters to denote directory and file types
LL="${LS}Al" # `-A` shows all files except `.` and `..`, `-l` shows list in a long format
LD="${LS}d" # `-d` shows directories as regular files (allows limiting display to current directory rather than listing contents of subdirectories)
# detect coreutils' type by throwing --color=auto at GNU ls
if ls --color=auto ~ >/dev/null 2>&1; then
# If GNU Coreutils has been detected, configure aliases to use `--color`
LS="${LS} --color"
LL="${LL} --color"
LD="${LD} --color"
else
# BSD ls errors out when passed `--color`, GNU ls doesn't seem to care about unexpected arguments
# Use environment variable rather than alias to enable colours.
export CLICOLOR="yes"
# Configure ls colours
#
# +-Directory (Cyan)
# | +-Symlink (Magenta)
# | | +-Socket (Magenta)
# | | | +-Pipe (Green)
# | | | | +-Executable (Magenta)
# | | | | | +-Block Special (Green on Cyan)
# | | | | | | +-Character Special (Green on Brown)
# | | | | | | | +-Executable with setuid set (Magenta on Red)
# | | | | | | | | +-Executable with setgid set (Magenta on Green)
# | | | | | | | | | +-Directory writable to others, with sticky bit (Black on Green)
# | | | | | | | | | | +-Directory writable to others, without sticky bit (Black on Brown)
# | | | | | | | | | | |
export LSCOLORS="GxfxFxcxFxcgcdfbfgacad"
# See `man ls` on a Mac or FreeBSD system for info on this
# Support for additional GNU Coreutils
if gls > /dev/null 2>&1; then
# If the GNU Coreutils are included as "gls"
# setup the main ls alias
alias gls="g${LS} --color"
# list all files in directory
alias gll="g${LL} --color"
# list dot files in directory
alias gl.="g${LD} --color .*"
fi
fi
# disable shell globbing for "calc(1)"
alias calc="noglob calc"
# setup the main ls alias
alias ls="${LS}"
# list all files in directory
alias ll="${LL}"
# list dot files in directory
alias l.="${LD} .*"
# GNU ls' colours are defined in ~/.dircolors
eval $(dircolors -b ~/.dircolors 2>/dev/null)
alias disk-usage="du -h"
alias disk-free="df -h"
# coloured man pages
alias man="mansi"
alias man-update="ronn --roff --markdown --manual \"Jessica Stokes' Dotfiles\" $DOTFILES/platform/*/man/**/*.ronn"
alias rm!="rm -rf"
# close window
alias x="exit"
alias q="exit"
alias :q="exit"
alias ipls="ifconfig | ipgrep"
alias killall="killall -v"
| true |
84ee1985a579c05f0aafedfdfe3a7c85a54de4c0 | Shell | asanger/dotfiles | /zshrc | UTF-8 | 1,154 | 2.8125 | 3 | [] | no_license | # shortcut to this dotfiles path is $ZSH
export ZSH=$HOME/dotfiles
export VAULT_PATH=/Volumes/shadowfax/Dropbox/.vault
# aliases
alias reload!='. ~/.zshrc'
# all of our zsh files
typeset -U config_files
config_files=($ZSH/**/*.zsh)
# load the path files
for file in ${(M)config_files:#*/path.zsh}
do
source $file
done
# load everything but the path and completion files
for file in ${${config_files:#*/path.zsh}:#*/completion.zsh}
do
source $file
done
antigen bundle zsh-users/zsh-syntax-highlighting
antigen bundle sindresorhus/pure
antigen bundle sharat87/autoenv
antigen apply
# use .localrc for SUPER SECRET CRAP that you don't
# want in your public, versioned repo.
if [[ -a ~/.localrc ]]
then
source ~/.localrc
fi
# initialize autocomplete here, otherwise functions won't be loaded
autoload -U compinit
compinit
# load every completion after autocomplete loads
for file in ${(M)config_files:#*/completion.zsh}
do
source $file
done
# matches case insensitive for lowercase
zstyle ':completion:*' matcher-list 'm:{a-z}={A-Z}'
# pasting with tabs doesn't perform completion
zstyle ':completion:*' insert-tab pending
unset config_files
| true |
024bb426adc385cd2c7731b77dc0ae7453faa000 | Shell | DanNBullock/app-tractclassification | /submit.pbs | UTF-8 | 451 | 2.65625 | 3 | [] | no_license | #!/bin/bash
#PBS -l nodes=1:ppn=16:dc2,walltime=4:00:00
#PBS -N tractclassification
#PBS -V
#PBS -o stdout.$PBS_JOBID.log
#PBS -e stderr.$PBS_JOBID.log
[ $PBS_O_WORKDIR ] && cd $PBS_O_WORKDIR
module load matlab
module load spm
rm -rf tracts
echo "starting matlab"
export MATLABPATH=$MATLABPATH:$SERVICE_DIR
matlab -nodisplay -nosplash -r main
if [ -s output.mat ];
then
echo 0 > finished
else
echo "output missing"
echo 1 > finished
exit 1
fi
| true |
fba613313b54e46a81c2d1964946885c26960248 | Shell | tai271828/upstart_script | /brightness | UTF-8 | 273 | 2.953125 | 3 | [] | no_license | #! /bin/sh
#
# rc
#
# read brightness parameter after reboot
#
# Authors:
# Taihsiang Ho
#
if [ -e /etc/last_brightness ] ; then
cat /etc/last_brightness > /sys/class/backlight/acpi_video0/brightness
else
echo "called" > /etc/last_brightness_read_failed
fi
| true |
ba4d273d20fb7f6c25a2565be70b1a44f34e2676 | Shell | A2-Collaboration/epics | /scripts/cbrem/gluex/scripts/.svn/text-base/applyToCurrentDiamond.sh.svn-base | UTF-8 | 1,350 | 3.09375 | 3 | [
"EPICS"
] | permissive | #!/bin/sh
#Apply calculated settings to current diamond
if [ $# -ne "2" ]; then
echo
echo "Usage:"
echo " $0 <P> <G>"
exit;
fi
#Get the prefixes for goni and cbrem
P=$1;
G=$2;
id=`caget -t "${G}RADIATOR_ID"`
para_mode=`caget -t "${P}REQ_PARA_MODE"`;
perp_mode=`caget -t "${P}REQ_PERP_MODE"`;
phi=`caget -t "${P}REQ_PHI022"`;
para_pitch=`caget -t "${P}REQ_PARA_PITCH"`;
para_yaw=`caget -t "${P}REQ_PARA_YAW"`;
perp_pitch=`caget -t "${P}REQ_PERP_PITCH"`;
perp_yaw=`caget -t "${P}REQ_PERP_YAW"`;
off_pitch=`caget -t "${P}REQ_OFF_PITCH"`;
off_yaw=`caget -t "${P}REQ_OFF_YAW"`;x1
caput "${P}PARA_MODE" "$para_mode";
caput "${P}PERP_MODE" "$perp_mode";
caput "${P}OFF_PITCH" "$off_pitch";
caput "${P}OFF_YAW" "$off_yaw";
caput "${P}PARA_PITCH" "$para_pitch";
caput "${P}PARA_YAW" "$para_yaw";
x1
caput "${P}PERP_PITCH" "$perp_pitch";
caput "${P}PERP_YAW" "$perp_yaw";
caput "${P}DIAM${id}:PHI022" "$phi";
caput "${P}DIAM${id}:PARA_MODE" "$para_mode";
caput "${P}DIAM${id}:PERP_MODE" "$perp_mode";
caput "${P}DIAM${id}:OFF_PITCH" "$off_pitch";
caput "${P}DIAM${id}:OFF_YAW" "$off_yaw";
caput "${P}DIAM${id}:PARA_PITCH" "$para_pitch";
caput "${P}DIAM${id}:PARA_YAW" "$para_yaw";
caput "${P}DIAM${id}:PERP_PITCH" "$perp_pitch";
caput "${P}DIAM${id}:PERP_YAW" "$perp_yaw";
xs
caput "${P}DIAM${id}:PHI022" "$phi";
exit; | true |
3408010e77ab4dfa152c126ebbdce4ac51975331 | Shell | josephcopenhaver/wowaddon-vendor-trash-manager | /scripts/set-version.sh | UTF-8 | 823 | 3.71875 | 4 | [] | no_license | #!/bin/bash
set -exo pipefail
# constants
toc_file="VendorTrashManager.toc"
version="${1}"
# verify project state is consistent before this script runs
if [ -n "$(git diff --name-only | grep -v "${toc_file}")" ] || [ -n "$(git ls-files --others --exclude-standard)" ]; then
git status
echo ""
echo "dirty or untracked files exist"
echo ""
exit 1
fi
prefix="v"
if [[ "${version}" = "${prefix}"* ]]; then
version="${version#"${prefix}"}"
fi
test -n "${version}" || (
echo "no version specified"
exit 1
)
version="v${version}"
sed -i.bak -E 's/^##\s+Version:\s+([^\n]+)$/## Version: '"${version}"'/' "${toc_file}"
rm -f "${toc_file}.bak"
git add "${toc_file}"
git commit -m "App version is now ${version}"
git tag "${version}"
git push origin
git push origin "refs/tags/${version}"
| true |
708c6fb1761a0e2080e83e268096838e846ff9f2 | Shell | yuexiuya/mvr | /scripts/video.sh | UTF-8 | 518 | 2.8125 | 3 | [] | no_license | #!/bin/bash
if [[ $# -lt 1 ]]; then
echo "Usage $0 <path to file> [left] [top] [width] [height]"
exit 0;
fi
vfile=$1
left=0
top=0
w=800
h=480
if [[ $# -eq 5 ]]; then
left=$2
top=$3
w=$4
h=$5
fi
gst-launch filesrc location=${vfile} typefind=true \
! aiurdemux name=demux demux. ! \
queue max-size-buffers=0 max-size-time=0 ! vpudec ! mfw_isink \
axis-left=$left axis-top=$top disp-width=$w disp-height=$h \
#demux. ! queue max-size-buffers=0 max-size-time=0 ! beepdec \
#! audioconvert ! alsasink
| true |
9e0cba1fe058903a0b196594cdc28c03bdcd0394 | Shell | swordhui/baselayout | /resource/init.d/localnet | UTF-8 | 904 | 3.40625 | 3 | [] | no_license | #!/bin/sh
########################################################################
# Begin $rc_base/init.d/localnet
#
# Description : Loopback device
#
# Authors : Gerard Beekmans - gerard@linuxfromscratch.org
#
# Version : 00.00
#
# Notes :
#
########################################################################
. /etc/sysconfig/rc
. ${rc_functions}
. /etc/sysconfig/network
case "${1}" in
start)
boot_mesg "Bringing up the loopback interface..."
ip addr add 127.0.0.1/8 label lo dev lo
ip link set lo up
evaluate_retval
boot_mesg "Setting hostname to ${HOSTNAME}..."
hostname ${HOSTNAME}
evaluate_retval
;;
stop)
boot_mesg "Bringing down the loopback interface..."
ip link set lo down
evaluate_retval
;;
restart)
${0} stop
sleep 1
${0} start
;;
status)
echo "Hostname is: $(hostname)"
ip link show lo
;;
*)
echo "Usage: ${0} {start|stop|restart|status}"
exit 1
;;
esac
# End $rc_base/init.d/localnet
| true |
e3ef1fe0537aa91b1bcea9684b0c812dc823be31 | Shell | universal-editor/backend-app | /vagrant/provision/always-as-vagrant.sh | UTF-8 | 377 | 2.703125 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
#== Bash helpers ==
function info {
echo " "
echo "--> $1"
echo " "
}
#== Provision script ==
info "Install project dependencies"
cd /app
composer --no-progress --prefer-dist install
info "Init project"
./init --env=Development --overwrite=y
info "Apply migrations"
./yii migrate <<< "yes"
info "Apply fixtures"
./yii fixture/load "*" <<< "yes" | true |
9ce56edccc8e45e4d7234e52b07da16779476fda | Shell | techknowfile/dotfiles | /.zshrc | UTF-8 | 6,850 | 2.640625 | 3 | [] | no_license | LC_ALL="en_US.UTF-8"
# fh - repeat history
fh() {
print -z $( ([ -n "$ZSH_NAME" ] && fc -l 1 || history) | fzf +s --tac | sed 's/ *[0-9]* *//')
}
##########################
## Shortcuts
##########################
alias cfi='vim ~/.i3/config'
alias cfz='vim ~/.zshrc'
alias p='cd ~/projects; ls -a'
alias D='cd ~/Downloads; ls -a'
alias d='cd ~/Documents; ls -a'
alias P='cd ~/Pictures; ls -a'
alias r='ranger'
alias t='cd ~/projects/thesis; ls -a'
alias www='cd ~/www/techknowfile.github.io; ls -a'
zstyle ':completion:*' matcher-list 'm:{a-zA-Z}={A-Za-z}'
alias ls="ls --color=auto -F"
export PATH="/usr/local/anaconda3/bin:/home/techknowfile/.vim/plugged/vim-live-latex-preview/bin:/home/techknowfile/spark/bin:$PATH"
alias dotfiles='/usr/bin/git --git-dir=$HOME/.dotfiles --work-tree=$HOME'
# export TERMCMD="x-terminal-emulator"
export EDITOR=vim
set encoding=utf-8
DEFAULT_USER=techknowfile
##############################################################################
# History Configuration
##############################################################################
HISTSIZE=5000 #How many lines of history to keep in memory
HISTFILE=~/.zsh_history #Where to save history to disk
SAVEHIST=5000 #Number of history entries to save to disk
#HISTDUP=erase #Erase duplicates in the history file
setopt appendhistory #Append history to the history file (no overwriting)
setopt incappendhistory #Immediately append to the history file, not just when a term is killed#
# Executes commands at the start of an interactive session.
#
# Authors:
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Source Prezto.
# if [[ -s "${ZDOTDIR:-$HOME}/.zprezto/init.zsh" ]]; then
# source "${ZDOTDIR:-$HOME}/.zprezto/init.zsh"
# fi
alias tmux='tmux'
setopt shwordsplit
# ZGEN
if [ -e "${HOME}/.zgen/zgen.zsh" ]
then
source "${HOME}/.zgen/zgen.zsh"
if ! zgen saved; then
echo "Creating a zgen save"
zgen oh-my-zsh
# plugins
zgen oh-my-zsh plugins/git
zgen oh-my-zsh plugins/heroku
zgen oh-my-zsh plugins/pip
zgen oh-my-zsh plugins/lein
zgen oh-my-zsh plugins/command-not-found
# bulk load
zgen loadall <<EOPLUGINS
zsh-users/zsh-history-substring-search
EOPLUGINS
# completions
zgen load zsh-users/zsh-syntax-highlighting
zgen load bhilburn/powerlevel9k powerlevel9k
zgen load bckim92/zsh-autoswitch-conda
zgen load changyuheng/zsh-interactive-cd
# save all to init script
zgen save
fi
else
git clone https://github.com/tarjoilija/zgen.git "${HOME}/.zgen"
fi
DISABLE_AUTO_TITLE="true"
# Load the oh-my-zsh's library
# antigen use oh-my-zsh
# # Bundles from the default repo (robbyrussell's oh-my-zsh).
# antigen bundle git
# antigen bundle heroku
# antigen bundle pip
# antigen bundle lein
# antigen bundle command-not-found
# # Syntax highlighting bundle.
# antigen bundle zsh-users/zsh-syntax-highlighting
# # Load the theme.
# antigen theme bhilburn/powerlevel9k powerlevel9k
# # Tell Antigen that you're done.
# antigen apply
# source ~/opt/zsh-interactive-cd/zsh-interactive-cd.plugin.zsh
export levels="ssh 01aabaed9@129.219.253.30 -p 1337"
eval `dircolors ''${HOME}/'.dir_colors/dircolors'`
zstyle ':completion:*' list-colors "${(@s.:.)LS_COLORS}"
autoload -Uz compinit
compinit
export PATH="/home/techknowfile/anaconda3/bin:$PATH"
POWERLEVEL9K_PYTHON_ICON='\ue73c'
#POWERLEVEL9K_MODE='awesome-patched'
POWERLEVEL9K_ANACONDA_BACKGROUND='149'
# POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(time context dir rbenv vcs anaconda)
POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(time dir rbenv vcs anaconda)
POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(vi_mode)
POWERLEVEL9K_VI_INSERT_MODE_STRING=""
POWERLEVEL9K_VI_MODE_NORMAL_FOREGROUND='blue'
POWERLEVEL9K_VI_COMMAND_MODE_STRING="<<<"
POWERLEVEL9K_ANACONDA_FOREGROUND='016'
POWERLEVEL9K_ANACONDA_LEFT_DELIMITER=''
POWERLEVEL9K_ANACONDA_RIGHT_DELIMITER=''
source ~/.shortcutsrc
# ROS/Gazebo
# source /opt/ros/indigo/setup.zsh
# source /opt/ros/indigo/setup.zsh
# source ~/catkin_ws/devel/setup.zsh
if [ -n "$CONDA_DEFAULT_ENV" ]; then
echo "source activate $CONDA_DEFAULT_ENV"
fi
# precmd() { RPROMPT="" }
# function zle-line-init zle-keymap-select {
# # VIM_PROMPT="%{$fg_bold[yellow]%} [% NORMAL]% %{$reset_color%}"
# POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=("%{$fg_bold[yellow]%} [% NORMAL]% %{$reset_color%}")
# RPS1="${${KEYMAP/vicmd/$VIM_PROMPT}/(main|viins)/} $EPS1"
# zle reset-prompt
# }
zle -N zle-line-init
zle -N zle-keymap-select
bindkey jk vi-cmd-mode
bindkey kj vi-cmd-mode
export KEYTIMEOUT=5
autoload -U colors
colors
export MALMO_XSD_PATH=/usr/local/malmo/Schemas
alias minecraft='/usr/local/malmo/Minecraft/launchClient.sh'
alias missions='cd ~/projects/malmo/missions'
## Fix keybindings
bindkey '\e[1~' beginning-of-line # Linux console
bindkey '\e[H' beginning-of-line # xterm
bindkey '\eOH' beginning-of-line # gnome-terminal
bindkey '\e[2~' overwrite-mode # Linux console, xterm, gnome-terminal
bindkey '\e[3~' delete-char # Linux console, xterm, gnome-terminal
bindkey '\e[4~' end-of-line # Linux console
bindkey '\e[F' end-of-line # xterm
bindkey '\eOF' end-of-line # gnome-terminal]]]]]]'
export XDG_CURRENT_DESKTOP=GNOME
unset PYTHONPATH
export LD_LIBRARY_PATH=/opt/OpenBLAS/lib:/usr/local/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH
stty -ixon
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh
function fzf-cd {
cd $(dirname $(fzf))
zle reset-prompt
zle -R
}
zle -N fzf-cd
bindkey ^f fzf-cd
# cf - fuzzy cd from anywhere
# ex: cf word1 word2 ... (even part of a file name)
# zsh autoload function
cf() {
local file
file="$(locate -Ai -0 $@ | grep -z -vE '~$' | fzf --read0 -0 -1)"
if [[ -n $file ]]
then
if [[ -d $file ]]
then
cd -- $file
else
cd -- ${file:h}
fi
fi
}
# cdf - cd into the directory of the selected file
cdf() {
local file
local dir
file=$(fzf +m -q "$1") && dir=$(dirname "$file") && cd "$dir"
}
# cdg - cd into the root of current git directory
cdg() {
cd $(git rev-parse --show-toplevel)
}
# tty console colors
if [ "$TERM" = "linux" ]; then
echo -en "\e]P0232323" #black
echo -en "\e]P82B2B2B" #darkgrey
echo -en "\e]P1D75F5F" #darkred
echo -en "\e]P9E33636" #red
echo -en "\e]P287AF5F" #darkgreen
echo -en "\e]PA98E34D" #green
echo -en "\e]P3D7AF87" #brown
echo -en "\e]PBFFD75F" #yellow
echo -en "\e]P48787AF" #darkblue
echo -en "\e]PC7373C9" #blue
echo -en "\e]P5BD53A5" #darkmagenta
echo -en "\e]PDD633B2" #magenta
echo -en "\e]P65FAFAF" #darkcyan
echo -en "\e]PE44C9C9" #cyan
echo -en "\e]P7E5E5E5" #lightgrey
echo -en "\e]PFFFFFFF" #white
clear #for background artifacting
fi
# fix ruby error for LustyJuggler
export RUBYOPT="-W0"
| true |
4a30af8cd4f7504cb384f8d1006c15bc8f8c9d7e | Shell | Sayam753/semester-1 | /OC/bash pro/ex.bash | UTF-8 | 404 | 3.609375 | 4 | [] | no_license | #!/bin/bash
echo Enter a number
read b
#CHECKING IF PRIME
fun()
{
for ((i=2; i<j; i++))
do
if [ $[$j%$i] -eq 0 ]
then
flag=1
break
else
flag=0
fi
done
}
for ((j=b; j>1; j--))
do
fun
if [ $flag -eq 1 ]
then
continue
#ALL PRIMES ARE IN $J ONE BY ONE`
else
for ((;;))
do
if [ $[b%j] -eq 0 ]
then
echo "*$j"
b=$[b/j]
else
break;
fi
done
fi
done
| true |
b6ad45a165193b0c3506d1376b1e7eaa3a8b0e45 | Shell | doxikus/nettemp | /modules/tools/db_reset | UTF-8 | 1,272 | 2.734375 | 3 | [] | no_license | #! /bin/bash
dir=$( cd "$( dirname "$0" )" && cd ../../ && pwd )
date=`date +%y%m%d-%H%M`
cd $dir && git reset --hard && cd -
rm -rf $dir/dbf/*.db
#rm -rf $dir/db/*.rrd #uncomment will delete all stored data
rm -rf $dir/tmp
mkdir $dir/tmp
for i in snmp hosts kwh nettemp; do
rm -rf $dir/dbf/$i.db
sqlite3 $dir/dbf/$i.db < $dir/modules/tools/$i.schema
chmod 775 $dir/dbf/$i.db
chown root.www-data $dir/dbf/$i.db
done
sqlite3 $dir/dbf/nettemp.db "INSERT OR IGNORE INTO users (login, password, perms ) VALUES ('admin', 'd033e22ae348aeb5660fc2140aec35850c4da997', 'adm')"
#sqlite3 $dir/dbf/nettemp.db "INSERT OR IGNORE INTO users (login, password, perms ) VALUES ('temp', 'd969831eb8a99cff8c02e681f43289e5d3d69664', 'ops')"
sqlite3 $dir/dbf/nettemp.db "INSERT INTO device (usb, onewire, serial, i2c, lmsensors, wireless ) VALUES ('off','off','off','off','off','off')"
sqlite3 $dir/dbf/nettemp.db "INSERT INTO settings (mail, sms, rrd, highcharts, fw, vpn, gpio, authmod ) VALUES ('off','off', 'off', 'on', 'off', 'off', 'on', 'on' )"
sqlite3 $dir/dbf/nettemp.db "INSERT INTO fw (ssh, icmp, ext, openvpn ) VALUES ('off','off', '0.0.0.0/0', 'off' )"
sqlite3 $dir/dbf/nettemp.db "INSERT INTO mail_settings (host, port, user, auth, tls, tlscheck ) VALUES ('smtp.gmail.com', '587', 'yourmail@gmail.com', 'on', 'on', 'off' )"
| true |
16c8d1fc76f5d736f6720ef964965e516e743010 | Shell | jshaw35/bashscripts | /interpolate.sh | UTF-8 | 1,335 | 3.609375 | 4 | [] | no_license | #! /bin/bash -x
# By Jonah Shaw, 03032020
# Interpolating existing reanalysis data to a different resolution.
# Modelled after lines 207-230 in conv_ERA_interim.sh
# TO-DO: build in 'help' feature using getopt
# Example use:
# sh interpolate.sh ERA_f19_tn14/res_file_T.nc ERA_f09f09_32L_days/ ERA_f19_tn14/ 2008
############
# SET INPUT ARGS
############
args=("$@")
res_file=${args[0]} # Path to an output file with appropriate resolution
old_path=${args[1]} # Directory path to reanalysis files to interpolate
new_path=${args[2]} # Directory path to store new reanalysis data
year=${args[3]} # Year to process
echo $res_file $old_path $new_path $year # this works
# Create weights file from res_file:
# 1: Grab T coordinates from res_file (simplest option)
cdo -s selname,T $res_file $new_path/res_file_T.nc
new_res=$new_path/res_file_T.nc
# 2: Create weights file for res_file's resolution
cdo -s genbil,$new_path/res_file_T.nc $old_path/$year-01-01.nc $new_path/weights.nc
new_weights=$new_path/weights.nc
cd $old_path
files_old=$(find *$year-*.nc) # find all files for your year
#echo $files_old
#exit 1
# iterate through and interpolate using the new res and weight files
for file_in in $files_old
do
echo $file_in
cdo -s remap,$new_res,$new_weights $file_in $new_path/$file_in
done
exit 1
| true |
46118c4bcc2361f4897accc8062c25e88cc75020 | Shell | pwalkr/home | /bin/myansible.d/roles/i3_wm/files/i3-helper | UTF-8 | 1,152 | 4.125 | 4 | [] | no_license | #!/bin/sh
usage() {
echo "$0 <command> [options]" 1>&2
}
HELPER_LOCKDIR="/tmp/i3-helper.lckd"
summon_workspace() {
local target="$1"
local swap=
if [ -z "$target" ]; then
echo "No workspace specified" 1>&2
exit 1
fi
current="$(i3-msg -t get_workspaces | jq '.[] | select(.focused==true).name')"
output="$(i3-msg -t get_outputs | jq -r ".[] | select(.current_workspace==$current).name")"
# If target is on the other monitor, swap current to replace it
swap="$(i3-msg -t get_outputs | jq -r ".[] | select(.current_workspace==\"$target\").name")"
if [ "$swap" ]; then
swap="move workspace to output $swap; "
fi
i3-msg "${swap}workspace $target; move workspace to output $output; workspace $target"
}
for util in i3-msg jq; do
if ! which $util; then
echo "Missing utility '$util'"
notify-send "i3-helper: missing utility '$util'"
exit 1
fi
done
for x in `seq 1 20`; do
mkdir "$HELPER_LOCKDIR" &>/dev/null && break;
if [ $x = 20 ]; then
echo "Failed to acquire lock" 1>&2
exit 1
fi
sleep 0.1
done
query="$1"; shift
case "$query" in
"summon")
summon_workspace "$@"
;;
*)
usage
;;
esac
rmdir "$HELPER_LOCKDIR"
| true |
9ba561065d20ab72b3a2c1a0b34d26742cbd39bb | Shell | bioinformaticssg/Working-on-the-HPC | /07_seq_with_variables_and_args.sh | UTF-8 | 490 | 3.828125 | 4 | [] | no_license |
#!/bin/bash
# Command line parameters given when the script is invoked are indicated by $# (ex: $1, $2) based on the order they are given.
# Here we are assigning the command line parameter to a variable so we don't have the change script at the bottom.
NSTART_SEQ=$1
NSTEP_SEQ=$2
NEND_SEQ=$3
for NUMBER in `seq ${NSTART_SEQ} ${NSTEP_SEQ} ${NEND_SEQ}`; do # seq is surrounded by back quotes
echo $NUMBER # prints to stdout
echo $NUMBER >> seq3.txt # prints to file
done
| true |
637d7f961ff9e5651fe981875121d1cc80c2655f | Shell | whichxjy/practice-sh | /simple_top.sh | UTF-8 | 306 | 3.109375 | 3 | [] | no_license | #! /bin/bash
HEADFLAGS="-n 20"
PSFLAGS=aux
SLEEPFLAGS=5
SORTFLAGS="-k3nr -k1,1 -k2n"
HEADER="`ps $PSFLAGS | head -n 1`"
while true
do
clear
uptime
echo "$HEADER"
ps $PSFLAGS |
sed -e "1d" |
sort $SORTFLAGS |
head $HEADFLAGS
sleep $SLEEPFLAGS
done
| true |
625606c083ac7bae8571bc2b63c55c73e31dfe51 | Shell | gregorthebigmac/install_unifi | /unifi_install.sh | UTF-8 | 5,044 | 4.03125 | 4 | [] | no_license | #!/bin/bash
# List of scripts in order:
# d8 = Debian 8
# d9 = Debian 9
# d10 = Debian 10
# u1604 = Ubuntu 16.04
# u1804 = Ubuntu 18.04 / Mint 18
# u1810 = Ubuntu 18.10 / Mint 19
# u1904 = Ubuntu 19.04
d8="https://get.glennr.nl/unifi/5.6.42/D8/unifi-5.6.42.sh"
d9="https://get.glennr.nl/unifi/5.6.42/D9/unifi-5.6.42.sh"
d10="https://get.glennr.nl/unifi/5.6.42/D10/unifi-5.6.42.sh"
u1604="https://get.glennr.nl/unifi/5.6.42/U1604/unifi-5.6.42.sh"
u1804="https://get.glennr.nl/unifi/5.6.42/U1804/unifi-5.6.42.sh"
u1810="https://get.glennr.nl/unifi/5.6.42/U1810/unifi-5.6.42.sh"
u1904="https://get.glennr.nl/unifi/5.6.42/U1904/unifi-5.6.42.sh"
distro=""
version=""
script_number=""
help_wall_of_text="Usage:
unifi_install.sh
unifi_install.sh -d [distro_code] -v [version_number]
List of supported options:
-d [distro_code] Choose your distro. Supported distros and their codes are as follows:
Debian = d
Ubuntu = u
Mint = m
-v [version_number] Choose your distro version. Supported distro versions are as follows:
Ubuntu: 1604, 1804, 1810, 1904
Debian: 8, 9, 10
Mint: 18, 19
Examples:
---------- Debian -----------
unifi_install.sh -d d -v 8
unifi_install.sh -d d -v 9
unifi_install.sh -d d -v 10
---------- Ubuntu -----------
unifi_install.sh -d u -v 1604
unifi_install.sh -d u -v 1804
unifi_install.sh -d d -v 1810
unifi_install.sh -d d -v 1904
---------- Debian -----------
unifi_install.sh -d m -v 18
unifi_install.sh -d d -v 19
"
while getopts ':d:v:h:' OPTION; do
case $OPTION in
d) distro="$OPTARG"
;;
v) version="$OPTARG"
;;
h) echo "$help_wall_of_text"
exit 1
;;
?) echo "$help_wall_of_text"
exit 1
;;
esac
done
if [[ -z "$distro" ]]; then
echo "What distro are you running? Ubuntu, Mint, or Debian? [u/m/d]"
read distro
if [[ "$distro" == "u" ]]; then
distro="Ubuntu"
echo "What version of $distro are you running? [1604/1804/1810/1904]"
read version
elif [[ "$distro" == "m" ]]; then
distro="Mint"
echo "What version of $distro are you running? [18/19]"
read version
elif [[ "$distro" == "d" ]]; then
distro="Debian"
echo "What version of $distro are you running? [8/9/10]"
read version
else
echo "Sorry, but $distro is not supported. Please run this script again with the -h"
echo "option to see the full list of supported distros and their respective versions."
exit 2
fi
elif [[ "$distro" == "u" ]]; then distro="Ubuntu"
elif [[ "$distro" == "m" ]]; then distro="Mint"
elif [[ "$distro" == "d" ]]; then distro="Debian"
fi
if [[ "$distro" == "Ubuntu" ]]; then
if [[ "$version" == "1604" ]]; then
echo "Downloading install script for $distro $version..."
script_number="${distro}_${version}_install.sh"
wget -c $u1604 -o $script_number
elif [[ "$version" == "1804" ]]; then
script_number="${distro}_${version}_install.sh"
wget -c $u1804 -o $script_number
elif [[ "$version" == "1810" ]]; then
script_number="${distro}_${version}_install.sh"
wget -c $u1810 -o $script_number
elif [[ "$version" == "1904" ]]; then
script_number="${distro}_${version}_install.sh"
wget -c $u1904 -o $script_number}
else
echo "Sorry, but $distro version $version wasn't an option. Try running this script"
echo "with -h for a list of supported distros and their versions."
exit 3
fi
elif [[ "$distro" == "Mint" ]]; then
if [[ "$version" == "18" ]]; then
script_number="${distro}_${version}_install.sh"
wget -c $u1804 -o $script_number
elif [[ "$version" == "19" ]]; then
script_number="${distro}_${version}_install.sh"
wget -c $u1810 -o $script_number
else
echo "Sorry, but $distro version $version wasn't an option. Try running this script"
echo "with -h for a list of supported distros and their versions."
exit 4
fi
fi
if [[ "$distro" == "Debian" ]]; then
if [[ "$version" == "8" ]]; then
script_number="${distro}_${version}_install.sh"
wget -c $d8 -o $script_number
elif [[ "$version" == "9" ]]; then
script_number="${distro}_${version}_install.sh"
wget -c $d9 -o $script_number
elif [[ "$version" == "10" ]]; then
script_number="${distro}_${version}_install.sh"
wget -c $d10 -o $script_number
else
echo " Sorry, but $distro version $version wasn't an option. Try running this script"
echo "with -h for a list of supported distros and their versions."
exit 5
fi
fi
#rm $script_number
mv unifi-5.6.42.sh $script_number
chmod 755 $script_number
sudo ./$script_number
| true |
b7dd4f763a52a1e06dcb0b804f0a27a7619ddbf8 | Shell | tarmiste/lfspkg | /archcore/svnsnap/community/ps_mem/trunk/PKGBUILD | UTF-8 | 700 | 2.59375 | 3 | [] | no_license | # $Id:
# Maintainer : Martin Wimpress <code@flexion.org>
# Contributor: Kyle Keen <keenerd@gmail.com>
# Contributor: Ali Gündüz <gndz.ali@gmail.com>
pkgname=ps_mem
pkgver=3.9
pkgrel=1
pkgdesc="List processes by memory usage"
arch=('any')
url="http://www.pixelbeat.org/scripts/"
license=('GPL')
# Compatible with both Python 2.x and 3.x
depends=('python')
makedepends=('git')
source=("${pkgname}::git+https://github.com/pixelb/${pkgname}.git#commit=f0891def54f1edb7")
md5sums=('SKIP')
package() {
cd "${srcdir}/${pkgname}"
install -D -m 755 ps_mem.py "${pkgdir}/usr/bin/${pkgname}"
install -D -m 644 README.md "${pkgdir}/usr/share/doc/${pkgname}/README.md"
install -D -m 644 "${pkgname}.1" "${pkgdir}/usr/share/man/man1/${pkgname}.1"
}
| true |
6f05ae570a500d9e7daa3c3b47941a7f558768ab | Shell | ssiyad/dotfiles | /zshrc | UTF-8 | 1,711 | 2.75 | 3 | [] | no_license | autoload -Uz compinit up-line-or-beginning-search down-line-or-beginning-search vcs_info
compinit
precmd() {
vcs_info
print -Pn "\e]0;%~\a"
}
function check_last_exit_code() {
local LAST_EXIT_CODE=$?
if [[ $LAST_EXIT_CODE -ne 0 ]]; then
echo "%F{red}$LAST_EXIT_CODE%f"
fi
}
zstyle ':completion:*' menu select
zstyle ':vcs_info:git*' formats "%F{yellow}%b%f "
HISTFILE=~/.zsh_history
HISTSIZE=1000
SAVEHIST=1000
setopt appendhistory
setopt COMPLETE_ALIASES
setopt PROMPT_SUBST
setopt histignoredups
PROMPT='%B%F{154}---%f %2~%b ${vcs_info_msg_0_}%B%F{cyan}»%f%b '
RPROMPT='$(check_last_exit_code)'
source /usr/share/zsh/plugins/zsh-autosuggestions/zsh-autosuggestions.zsh
source /usr/share/zsh/plugins/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh
wd() {
. /usr/share/wd/wd.sh
}
bindkey -e
bindkey '\e[A' up-history
bindkey '\e[B' down-history
bindkey "^[[1;5C" forward-word
bindkey "^[[1;5D" backward-word
[[ -n "${key[Control-Left]}" ]] && bindkey -- "${key[Control-Left]}" backward-word
[[ -n "${key[Control-Right]}" ]] && bindkey -- "${key[Control-Right]}" forward-word
export ARCHFLAGS="-arch x86_64"
export EDITOR=nvim
export PIPENV_VENV_IN_PROJECT=1
alias mkdir="mkdir -pv"
alias ls="ls -aFGh --color=always"
alias rm="rm -rfv"
alias mv="mv -iv"
alias cp="cp -rv"
alias ix.io="curl -F 'f:1=<-' ix.io"
alias emacs="emacs -nw"
alias code="code-oss --enable-features=UseOzonePlatform --ozone-platform=wayland"
alias vim='nvim'
alias v='nvim'
[ -z "$NVM_DIR" ] && export NVM_DIR="$HOME/.nvm"
source /usr/share/nvm/nvm.sh
source /usr/share/nvm/bash_completion
source /usr/share/nvm/install-nvm-exec
if [ -z $DISPLAY ] && [ "$(tty)" = "/dev/tty1" ]; then
exec sway
fi
| true |
d089b3c587c8b7fd344195654e935534e3706f58 | Shell | cjobenny/remoterepo | /pgm8.sh | UTF-8 | 668 | 3.09375 | 3 | [] | no_license | while [ true ]
do
echo "Choose Your Option:"
echo "1.Add"
echo "2.Substract"
echo "3.Division"
echo "4.Multiply"
echo "5.Exit"
read opt
if [ $opt -eq "5" ]
then
exit
fi
echo "Enter first Number:"
read n1
echo "Enter second Number:"
read n2
echo "Result is->"
if [ $opt -eq "1" ]
then
echo $((n1+n2))
elif [ $opt -eq "2" ]
then
echo $((n1-n2))
elif [ $opt -eq "3" ]
then
if [ $n2 -eq 0 ]
then
echo "Cannot divisible by zero"
elif [ $n2 -ne 0 ]
then
echo `echo "scale=2; $n1 / $n2" | bc`
fi
elif [ $opt -eq "4" ]
then
echo `echo "scale=2; $n1 * $n2" | bc -l`
fi
done
echo "Changed"
echo "Again Changed"
echo "aswin"
echo "Pulling Commit"
echo "domus"
| true |
b376f2fcf64c879622b950d0c7458cac2e6f1911 | Shell | jawaad-ahmad/common-inf | /roles/console-login-ip-addr/templates/etc/network/if-up.d/show-ip-address.sh.j2 | UTF-8 | 264 | 2.859375 | 3 | [
"MIT"
] | permissive | #!/bin/sh
CAT=/bin/cat
if [ "${METHOD}" = "loopback" ]; then
exit 0
fi
# Only run from ifup.
if [ "${MODE}" != "start" ]; then
exit 0
fi
/usr/local/bin/get-ip-address.sh > /etc/issue
echo "" >> /etc/issue
${CAT} /etc/issue-standard >> /etc/issue
exit 0
| true |
422a4e86994bbd7cc409d6be347e977a970695e0 | Shell | acoli-repo/book-gen | /fill_chapterstructure.sh | UTF-8 | 560 | 3.34375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# run with: sh fill_chapterstructure.sh gen/
# Fills already generated chapter structure in gen/chap-struc.html
# with data from get/chapters/*/*
TEXTRANK="textrank/summ_and_keywords.py"
JAR="beta_writer/dist/beta_writer.jar"
PYTHON="/usr/local/bin/python3"
echo "Filling chapter structure..."
# Fill stubs and add intros, conclusions and method and result summaries.
# true: fill with reordered sentences.
# false: fill with NON-reordered sentences.
java -Xmx3g -jar $JAR "STUB_FILLER" $1 $TEXTRANK $PYTHON > $1'book.html'
echo "... done." | true |
3fa1cf497e332b3dc61d7a8582130a459990cd3a | Shell | lolnoob/Agents | /picek.sh | UTF-8 | 392 | 3.609375 | 4 | [] | no_license | #!/bin/bash
echo "Sarting..."
DIR=./configs/*
echo "Found $(ls -l $DIR | grep .ini | wc -l) config files"
echo "processing..."
for file in $DIR; do
if [[ $file == *.ini ]] ; then
echo "Executing program with config: $file"
nice -19 nohup python3 main.py $file &
sleep 2
echo $file $! >> pid.txt
else
echo "Wrong config file: $file"
fi
done | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.