blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
4734a911819cd64b2f3a4a4ea75d91098da5e0a4 | Shell | jkaye2012/bash-profile | /lib/configuration | UTF-8 | 687 | 3.578125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# -*- mode: sh -*-
## Configuration defaults.
# Variables set here can be overridden in a few ways:
# 1. A file at BASHPROF_ROOT named env.
# 2. Explicitly set environment variables.
## Profile-wide
BASHPROF_VERBOSE="${BASHPROF_VERBOSE:-}"
## Python
BASHPROF_PYENV_ENABLED="${BASHPROF_PYENV_ENABLED:-1}"
BASHPROF_PYENV_GLOBAL_VERSION="${BASHPROF_PYENV_GLOBAL_VERSION:-3.8.0}"
## Powerline
BASHPROF_POWERLINE_VENV="${BASHPROF_POWERLINE_VENV:-powerline}"
env_overrides="$BASHPROF_ROOT/env"
if [ -f "$env_overrides" ]; then
# shellcheck source=/dev/null
. "$env_overrides"
fi
export BASHPROF_VERBOSE
export BASHPROF_USE_PYENV
export BASHPROF_PYENV_GLOBAL_VERSION
| true |
7ceb0b79157858294204dbe18543241629db4c97 | Shell | nmag-project/nmag-dist | /patches/check-deps.sh | UTF-8 | 1,862 | 3.9375 | 4 | [] | no_license | #!/bin/sh
NMAG_WEBSITE_DEPS="http://nmag.soton.ac.uk/nmag/0.2/install/install_a.html"
GREP_Q_I="grep -q -i"
ECHO="echo"
STOP="exit 1"
DEBIAN_LIKE=no
ETC_ISSUE=/etc/issue
DPKG_ARCHITECTURE=dpkg-architecture
# Communicate that we are about to do some tests
$ECHO "Checking dependencies..."
# First, we check for Debian and the Multiarch problem.
PLEASE_CHECK_DEPS="A list of all the packages required in order\
to compile Nmag is available at $NMAG_WEBSITE_DEPS. We cannot\
detect all of them. If you want to continue\
anyway you can try 'make anyway'."
$GREP_Q_I -i Ubuntu $ETC_ISSUE >/dev/null 2>&1 && DEBIAN_LIKE=yes
$GREP_Q_I -q -i Debian $ETC_ISSUE >/dev/null 2>&1 && DEBIAN_LIKE=yes
if test "$DEBIAN_LIKE" = "yes"; then
echo "Debian-like system: checking multiarch settings..."
$DPKG_ARCHITECTURE >/dev/null 2>/dev/null || \
{ $ECHO "Error: cannot find the executable dpkg-architecture, which is" \
"necessary to compile Nmag in Debian or Ubuntu. Please, install the " \
"package dpkg-dev with your package manager if this is a " \
"Debian or Ubuntu system. Otherwise:" \
"$PLEASE_CHECK_DEPS"; $STOP; }
else
$ECHO "It looks as if this is neither Debian nor Ubuntu..."
fi
# Now we check for other requirements
$ECHO "Checking required executables..."
NOT_FOUND="not found, error! $PLEASE_CHECK_DEPS"
cc --version >/dev/null 2>/dev/null || { $ECHO "cc $NOT_FOUND"; $STOP; }
c++ --version >/dev/null 2>/dev/null || { $ECHO "c++ $NOT_FOUND"; $STOP; }
bash --version >/dev/null 2>/dev/null || { $ECHO "bash $NOT_FOUND"; $STOP; }
gawk --version >/dev/null 2>/dev/null || { $ECHO "gawk $NOT_FOUND"; $STOP; }
m4 --version >/dev/null 2>/dev/null || { $ECHO "m4 $NOT_FOUND"; $STOP; }
patch --version >/dev/null 2>/dev/null || { $ECHO "patch $NOT_FOUND"; $STOP; }
$ECHO "Preliminary checks were successfully completed."
| true |
b7cc0bd45cb888fe8a03a4f9db81391de6a9f4a1 | Shell | jedavies-dev/kiss-aarch64 | /extra/uboot/build | UTF-8 | 388 | 2.78125 | 3 | [] | no_license | #!/bin/sh -e
#for patch in *.patch; do
# patch -p1 < "$patch"
#done
export BL31="/boot/bl31.elf"
find . -iname "*pine*"
make pinebook-pro-rk3399_defconfig
make
# Install files
mkdir -p "$1/boot"
mkdir -p "$1/usr/bin"
install -Dm600 idbloader.img "$1/boot/"
install -Dm600 u-boot.itb "$1/boot/"
install -Dm600 boot.txt "$1/boot/"
install -Dm755 tools/mkimage "$1/usr/bin/"
| true |
b44e13b865d0c291a06f963aeb1ed192847cad9b | Shell | tiusjc/dtisjc-docs | /docs/dockerfiles/ubuntu-firefox/dkx.sh | UTF-8 | 324 | 2.921875 | 3 | [] | no_license | export XSOCK=/tmp/.X11-unix
export XAUTH=/tmp/.docker.xauth
if [ ! -f $XAUTH ]; then
touch $XAUTH
xauth nlist $DISPLAY | sed -e "s/^..../ffff/" | xauth -f $XAUTH nmerge -
fi
docker run -i -t -e DISPLAY -e USER -e XAUTHORITY=$XAUTH -v $XSOCK:$XSOCK -v $XAUTH:$XAUTH \
--net=host $@
| true |
a1e68cb6b77fadb70bd1b2aa4e568d0f93c63935 | Shell | coral1412/devops | /install_aliyun_epel.sh | UTF-8 | 725 | 3.28125 | 3 | [] | no_license | #/bin/bash
mv /etc/yum.repos.d/epel.repo /etc/yum.repos.d/epel.repo.backup
mv /etc/yum.repos.d/epel-testing.repo /etc/yum.repos.d/epel-testing.repo.backup
osversion=`uname -r |awk -F - '{print $2}' |awk -F . '{print $2}'`
#osversion=`lsb_release -r |awk '{print $2}' |awk -F . '{print $1}'`
if [ $osversion == 'el5' ];then
wget -O /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-5.repo
elif [ $osversion == 'el6' ];then
wget -O /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-6.repo
elif [ $osversion == 'el7' ];then
wget -O /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo
else
echo "匹配不到你当前系统版本的EPEL源"
fi
yum clean all && yum makecache | true |
13ed9f71a460050a7c3c56257a355a745ef5d622 | Shell | taroyuyu/MouseDef | /Scripts/release_pipeline/2_archive.sh | UTF-8 | 425 | 2.921875 | 3 | [
"MIT"
] | permissive | #!/bin/zsh
source .env
BUILD_NUMBER=`agvtool what-version | sed -n 2p | xargs`
VERSION_NUMBER=`sed -n '/MARKETING_VERSION/{s/MARKETING_VERSION: //;s/;//;s/^[[:space:]]*//;p;q;}' XcodeGen/MouseDef.yml`
FILENAME="$APP_SCHEME.$VERSION_NUMBER.$BUILD_NUMBER"
echo "🛠 Archiving"
# Build and archive a new version
xcodebuild \
-config Release\
-scheme $APP_SCHEME \
-archivePath ./Build/$FILENAME \
archive \
| xcpretty
| true |
3173db20fc731031d765998537fa9ef52d417c7b | Shell | xingniu/multitask-ft-fsmt | /coling2018/scripts/gen-bilingual-data.sh | UTF-8 | 1,029 | 2.984375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
### Data selection
lang_ds=$lang_base
ds_dir=$bilingual_data_dir
select_data=$ds_dir/pool.select.$select_n.$lang_pos
if [[ $nods == True ]]; then
select_data=$ds_dir/pool.top.$select_n
head -n $select_n $pool.$bilingual_lang_src > $select_data.$bilingual_lang_src
head -n $select_n $pool.$lang_ds > $select_data.$lang_ds
else
. `dirname $0`/data-selection-ced.sh
fi;
### Cleaning selected parallel data
if [ ! -f $select_data.clean.$lang_ds ]; then
echo " * Cleaning selected parallel data $select_data.* ..."
$moses_scripts_path/training/clean-corpus-n.perl \
-ratio 3 \
$select_data \
$bilingual_lang_src $lang_ds \
$select_data.clean \
1 1000
fi;
if [[ $nods == True ]]; then
ln -srf $select_data.clean.$bilingual_lang_src $ds_dir/bilingual.$bilingual_lang_src
ln -srf $select_data.clean.$lang_ds $ds_dir/bilingual.$lang_ds
else
ln -srf $select_data.clean.$bilingual_lang_src $ds_dir/$lang_pos.$bilingual_lang_src
ln -srf $select_data.clean.$lang_ds $ds_dir/$lang_pos.$lang_ds
fi;
| true |
e31bbd07f071e2bbf3f153127b7d1f58b1ae0f0c | Shell | dusharu/my_scripts | /bash/mysql/check_slave/check_slave.sh | UTF-8 | 3,485 | 3.671875 | 4 | [
"MIT"
] | permissive | #!/bin/bash
################################################################################
# #
# Check Status mysql SLAVE #
# #
# mailto:dusharu17@gmail.com #
# 2018.06.19 #
################################################################################
######################### VAR #########################
DB_MASTER_FQDN="<mysql.example.com>"
FILE_SLAVE_PROBLEM='/tmp/slave_monitoring.txt'
############### EXIT CODE
ERROR_CANT_CONNECT_TO_MYSQL=100
ERROR_CANT_READ_MYCNF=103
ERROR_CANT_GET_INTERFACES_HOST=104
ERROR_CANT_GET_IP_HOST=105
ERROR_CANT_GET_IP_DB_MASTER=106
ERROR_SLAVE_BROKEN=107
ERROR_SLAVE_FALLS_BEHIND_MASTER=108
ERROR_RUN_ON_MASTER=0
ERROR_UNKNOWN=255
######################### FUNCTION #########################
function SendEmail {
EMAIL="<admin_email>"
BOT_EMAIL="<bot_email>"
echo -e "$@" | mail -a "From: $BOT_EMAIL" -s "$(hostname):$0" $EMAIL
}
function CheckErrorCode {
# $1 - Error code
# $2 - Text for mail
# $3 - Exit with ERROR CODE
# NEED:
# $ERROR_UNKNOWN, $FILE_SLAVE_PROBLEM
if [[ $1 -ne 0 ]]; then
if ! grep "$2" "$FILE_SLAVE_PROBLEM" > /dev/null 2>&1 ; then
echo "$2" >> "$FILE_SLAVE_PROBLEM"
SendEmail "$2"
fi
if [[ -z $3 ]]; then
exit $ERROR_UNKNOWN
else
exit "$3"
fi
else
if grep "$2" "$FILE_SLAVE_PROBLEM" > /dev/null 2>&1 ; then
DEL_TEMPLATE="$(echo "$2" |\
sed -e 's#"#\\"#g' |\
sed -e 's#\\#\\\\#g' |\
sed -e 's#/#\\/#g')"
sed -i -e "/^$DEL_TEMPLATE/d" "$FILE_SLAVE_PROBLEM"
SendEmail "PROBLEM RESOLVE: $2"
fi
fi
}
######################### MAIN #########################
############### Check
if [[ ! -r /root/.my.cnf ]]; then
CheckErrorCode 1 "Can't read /root/.my.cnf.Exit." $ERROR_CANT_READ_MYCNF
else
CheckErrorCode 0 "Can't read /root/.my.cnf.Exit." $ERROR_CANT_READ_MYCNF
fi
IP_INTERFACES="$(ip ro |grep -m1 default |awk '{print $5}')"
CheckErrorCode $? "Can't get default route inetrfaces from Host" $ERROR_CANT_GET_INTERFACES_HOST
IP_HOST="$(ip -4 add show dev "${IP_INTERFACES}" | grep -m1 inet |awk '{print $2}' |sed -e 's#/.*$##g' )"
CheckErrorCode $? "Can't get IP from Host" $ERROR_CANT_GET_IP_HOST
IP_DB_MASTER=$(host ${DB_MASTER_FQDN} |awk '{print $4}')
CheckErrorCode $? "Can't get IP master DB" $ERROR_CANT_GET_IP_DB_MASTER
if [[ "${IP_HOST}" == "${IP_DB_MASTER}" ]]; then
exit $ERROR_RUN_ON_MASTER
fi
MYSQL_SLAVE_STATUS="$(mysql -e 'SHOW SLAVE STATUS\G')"
CheckErrorCode $? "can't connect to mysql" $ERROR_CANT_CONNECT_TO_MYSQL
CHECK_SLAVE_DOUBLE_YES=$( echo "${MYSQL_SLAVE_STATUS}" |grep -c -e "Slave_IO_Running: Yes" -e "Slave_SQL_Running: Yes")
if [[ $CHECK_SLAVE_DOUBLE_YES -ne 2 ]]; then
CheckErrorCode 1 "$0 - SLAVE broken." $ERROR_SLAVE_BROKEN
else
CheckErrorCode 0 "$0 - SLAVE broken." $ERROR_SLAVE_BROKEN
fi
SECOND_BEHIND_MASTER=$(echo "${MYSQL_SLAVE_STATUS}"| awk '/Seconds_Behind_Master:/ {print $2}')
if [[ $SECOND_BEHIND_MASTER -ge 3600 ]]; then
CheckErrorCode 1 "SLAVE BEHIND MASTER >= 1Hour(3600sec)." $ERROR_SLAVE_FALLS_BEHIND_MASTER
else
CheckErrorCode 0 "SLAVE BEHIND MASTER >= 1Hour(3600sec)." $ERROR_SLAVE_FALLS_BEHIND_MASTER
fi
| true |
7730c2cf99e0345a2c8b9bfa526e8d5b5e3187bb | Shell | ChieftainY2k/media-shrinker | /encode_264.sh | UTF-8 | 3,325 | 3.40625 | 3 | [] | no_license | #!/bin/bash
set -e
#ENCODER=264
INPUT=$1
CRF=$2 # lower values = better quality
ENCODER=$3
PRESET=slower
#set defaults
if [[ "$CRF" == '' ]]; then
CRF=23
fi
if [[ "$ENCODER" == '' ]]; then
ENCODER=264
fi
OUTPUT="/cygdrive/c/tmp/ffmpeg-output/$INPUT.$ENCODER.$CRF.mp4"
DIR=$(dirname "$OUTPUT")
OUTPUT_FILE=$(basename "$OUTPUT")
OUTPUT_TMP=/tmp/${OUTPUT_FILE}
REGEX_PATTERN='[.](264|265)[.][0-9]{2}[.]mp4$'
if [[ "$INPUT" =~ $REGEX_PATTERN ]]
then
echo "---------------------------------------------------------------------------------"
echo "[`date \"+%Y-%m-%d %H:%M:%S\"`] *** Input file $INPUT is already converted, skipping."
exit
fi
if [[ ! -f "$OUTPUT" ]]; then
echo -n cygpath -w \"${INPUT}\" > /tmp/command.sh
chmod +x /tmp/command.sh
INPUT_WIN=`/tmp/command.sh`
#echo INPUT_WIN = $INPUT_WIN
echo -n cygpath -w \"${OUTPUT}\" > /tmp/command.sh
OUTPUT_WIN=`/tmp/command.sh`
#echo OUTPUT_WIN = $OUTPUT_WIN
echo -n cygpath -w \"${OUTPUT_TMP}\" > /tmp/command.sh
OUTPUT_TMP_WIN=`/tmp/command.sh`
#echo OUTPUT_TMP_WIN = $OUTPUT_TMP_WIN
unlink /tmp/command.sh
#exit
echo "---------------------------------------------------------------------------------"
echo "[`date \"+%Y-%m-%d %H:%M:%S\"`] *** Converting $INPUT_WIN to $OUTPUT_WIN"
# x265
#nice -n 20 /cygdrive/c/Program\ Files/ffmpeg/bin/ffmpeg.exe -y -i "$INPUT_WIN" -c:v libx265 -preset $PRESET -x265-params crf=$CRF -c:a aac -strict experimental -b:a 128k -f mp4 "$OUTPUT_TMP_WIN"
# x264
#nice -n 20 /cygdrive/c/Program\ Files/ffmpeg/bin/ffmpeg.exe -y -i "$INPUT_WIN" -c:v libx264 -preset $PRESET -x264-params crf=$CRF -c:a aac -strict experimental -b:a 128k -f mp4 "$OUTPUT_TMP_WIN"
#encode
nice -n 20 /cygdrive/c/Program\ Files/ffmpeg/bin/ffmpeg.exe \
-y -i "$INPUT_WIN" \
-c:v libx$ENCODER \
-preset $PRESET \
-x$ENCODER-params "crf=$CRF" \
-c:a aac \
-strict experimental \
-b:a 128k \
-f mp4 \
-max_muxing_queue_size 1024 \
"$OUTPUT_TMP_WIN"
# #encode and force FPS div by 4, no audio
# nice -n 20 /cygdrive/c/Program\ Files/ffmpeg/bin/ffmpeg.exe \
# -y -i "$INPUT_WIN" \
# -vf "setpts=4*PTS" \
# -an \
# -c:v libx${ENCODER} \
# -preset ${PRESET} \
# -x${ENCODER}-params "crf=$CRF" \
# -strict experimental \
# -f mp4 \
# -max_muxing_queue_size 1024 \
# "$OUTPUT_TMP_WIN"
# -filter_complex "[0:v]setpts=4*PTS[v];[0:a]atempo=0.5,atempo=0.5[a]" -map "[v]" -map "[a]" \
# -c:a aac \
# -b:a 128k \
# -vf "setpts=4*PTS" \
# -an \
#check ffmpeg exit code
EXITCODE=$?
if [[ ${EXITCODE} -ne 0 ]]; then
echo "[`date \"+%Y-%m-%d %H:%M:%S\"`] *** FATAL ERROR: ffmpeg exit code is $EXITCODE, which means an error for file '$INPUT_WIN'"
unlink ${OUTPUT_TMP}
exit 255
fi
mkdir -p "$DIR"
EXITCODE=$?
if [[ ${EXITCODE} -ne 0 ]]; then
echo "[`date \"+%Y-%m-%d %H:%M:%S\"`] *** FATAL ERROR: Cannot create directory $DIR"
exit 255
fi
mv "$OUTPUT_TMP" "$OUTPUT"
EXITCODE=$?
if [[ ${EXITCODE} -ne 0 ]]; then
echo "[`date \"+%Y-%m-%d %H:%M:%S\"`] *** FATAL ERROR: Cannot move '$OUTPUT_TMP' to '$OUTPUT'"
exit 255
fi
echo "[`date \"+%Y-%m-%d %H:%M:%S\"`] *** SUCCESS, conversion completed."
else
echo "---------------------------------------------------------------------------------"
echo "[`date \"+%Y-%m-%d %H:%M:%S\"`] *** Output file exists, skipping $INPUT"
fi
# sleep 3
| true |
634d8da26cbfc79b512ca61f20246ed6e2f21a14 | Shell | conda-forge/umock-c-feedstock | /recipe/build.sh | UTF-8 | 341 | 2.53125 | 3 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | permissive | #!/usr/bin/env bash
set -e
mkdir -p build
cd build
cmake \
-D CMAKE_INSTALL_PREFIX=${PREFIX} \
-D CMAKE_PREFIX_PATH=${PREFIX} \
-D CMAKE_BUILD_TYPE=Release \
-D CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP=True \
-D BUILD_SHARED_LIBS=ON \
-D use_installed_dependencies=ON \
${SRC_DIR}
make -j$CPU_COUNT
make install
| true |
2de94899f96f35ddb93422295f630e53ec8c4829 | Shell | jlangston/sourcegraph | /dev/ci/ci-db-backcompat.sh | UTF-8 | 1,647 | 3.859375 | 4 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | #!/bin/bash
#
# This is wrapper that runs the DB schema backcompat test (db-backcompat.sh) in the CI environment.
#
# It finds the last migration by listing the migration SQL files (alphabetical order corresponds to
# chronological order), then finds the commit in which those SQL files were added. It then uses the
# commit immediately before that commit to run the DB unit tests against the *present* schema.
cd $(dirname "${BASH_SOURCE[0]}")/../..
HEAD=$(git symbolic-ref --short HEAD || git rev-parse HEAD)
if [ -z "$HEAD" ]; then
echo 'Could not set $HEAD to current revision'
exit 1
fi
cat <<EOF
Running ci-db-backcompat.sh with the following parameters:
HEAD: $HEAD
git rev-parse HEAD: $(git rev-parse HEAD)
git rev-parse --abbrev-ref HEAD: $(git rev-parse --abbrev-ref HEAD)
EOF
LAST_MIGRATION=$(ls -1 ./migrations/*.up.sql | cut -d'_' -f 1 | cut -d'/' -f 3 | sort -n | tail -n1)
COMMIT_OF_LAST_MIGRATION=$(git log --pretty=format:"%H" "./migrations/${LAST_MIGRATION}"* | tail -n1)
COMMIT_BEFORE_LAST_MIGRATION=$(git log -n1 --pretty=format:"%H" "${COMMIT_OF_LAST_MIGRATION}"^)
echo "Last migration was ${LAST_MIGRATION}, added in ${COMMIT_OF_LAST_MIGRATION}."
echo "Testing current schema ${LAST_MIGRATION}, with tests at ${COMMIT_BEFORE_LAST_MIGRATION}."
echo ""
git log -n2 --stat "${COMMIT_OF_LAST_MIGRATION}" | sed 's/^/ /'
echo ""
# Recreate the test DB and run TestMigrations once to ensure that the schema version is the latest.
set -ex
go test -count=1 -v ./cmd/frontend/db/ -run=TestMigrations
HEAD="$HEAD" OLD="${COMMIT_BEFORE_LAST_MIGRATION}" ./dev/ci/db-backcompat.sh
set +ex
echo "SUCCESS"
| true |
5bb20503507508b4201021c933daed3e52dbe5fa | Shell | ssanj/babyloncandle | /new-post.sh | UTF-8 | 188 | 3.390625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
if [ $# -eq 0 ]; then
echo "usage: new-post title"
exit 1
fi
POST_NAME=$(echo $@ | tr ' ' '-')
CURRENT_DATE=$(date '+%Y-%m-%d')
touch "posts/$CURRENT_DATE-$POST_NAME.md"
| true |
09f69bf861f879812709ec108dc3351421863061 | Shell | GM-990/enigma2-plugin-systemplugins-imagemanager | /src/bc_ru.sh | UTF-8 | 3,273 | 3.71875 | 4 | [] | no_license | #!/bin/sh
LABEL=$1
DIR_FROM=$2
DIRECTORY=/hdd
DATE=`date +%Y%m%d`
MKFS=/sbin/mkfs.jffs2
BACKUPIMAGE="e2jffs2.img"
BACKUPTAR="$1.tar"
BACKUPTARGZ="$1.tar.gz"
ISAVAILABLE=`mount | grep hdd`
if [ -z "$ISAVAILABLE" ]; then
echo "Try to mount sda1 to /media/hdd"
mount /dev/sda1 /media/hdd
ISAVAILABLE=`mount | grep sda1`
fi
if [ ! -z "$ISAVAILABLE" ]; then
if grep -qs 'spark' /proc/stb/info/model ; then
BOXTYPE=SPARK
OPTIONS="-e 0x20000 -n"
else
echo "Box not found !!!"
exit 0
fi
echo "Обнаружена платформа "$BOXTYPE
echo ""
if [ ! -f $MKFS ] ; then
echo $MKFS" не найден!"
exit 0
fi
rm -rf "$DIRECTORY/tmp/root"
mkdir -p "$DIRECTORY/tmp/root"
if [ ! -e "$DIRECTORY/enigma2-$DATE-$LABEL-$3" ]; then
mkdir -p "$DIRECTORY/enigma2-$DATE-$LABEL-$3"
fi
if [[ "$LABEL" = "NAND" ]] ; then
mount -t jffs2 /dev/mtdblock6 "$DIRECTORY/tmp/root"
else
mount "$DIR_FROM" "$DIRECTORY/tmp/root"
fi
echo "Раздел $LABEL примонтирован"
echo ""
if [ ! -f "$DIRECTORY/tmp/root/boot/uImage" ] ; then
echo "отсутствует uImage!"
echo "для создания корректного архива"
echo "положите uImage в папку /boot"
cd /root
umount "$DIRECTORY/tmp/root"
exit 0
fi
if [[ "$4" = "YES" ]]; then
mv "$DIRECTORY/tmp/root/etc/enigma2/settings" "$DIRECTORY/tmp"
fi
if [[ "$3" = "IMG" ]]; then
BACKUP=$BACKUPIMAGE
echo "Пожалуйста подождите, копируется uImage"
echo ""
cp "$DIRECTORY/tmp/root/boot/uImage" "$DIRECTORY/enigma2-$DATE-$LABEL-$3/uImage"
echo "Пожалуйста подождите, создается "$BACKUP
$MKFS --root="$DIRECTORY/tmp/root" --faketime --output="$DIRECTORY/tmp/$BACKUP" $OPTIONS
elif [[ "$3" = "TAR" -o "$3" = "TARGZ" ]]; then
BACKUP=$BACKUPTAR
echo "Пожалуйста подождите, создается "$BACKUPTAR
cd "$DIRECTORY/tmp/root"
tar -cf "$DIRECTORY/tmp/$BACKUP" *
if [[ "$3" = "TARGZ" ]]; then
echo "Пожалуйста подождите, создается "$BACKUPTARGZ
cd "$DIRECTORY/tmp"
gzip "$BACKUP"
BACKUP=$BACKUPTARGZ
fi
fi
if [[ "$4" = "YES" ]]; then
mv "$DIRECTORY/tmp/settings" "$DIRECTORY/tmp/root/etc/enigma2"
fi
echo ""
echo "Пожалуйста подождите, копируется $BACKUP"
echo ""
mv "$DIRECTORY/tmp/$BACKUP" "$DIRECTORY/enigma2-$DATE-$LABEL-$3/"
if [ -f "$DIRECTORY/enigma2-$DATE-$LABEL-$3/$BACKUP" ] ; then
echo "***********************************************************************"
echo "Ваш архив $BACKUP находится в: $DIRECTORY/enigma2-$DATE-$LABEL-$3"
echo "***********************************************************************"
else
echo "******************************"
echo " Увы, произошла ошибка! "
echo "******************************"
fi
cd /root
sync
umount "$DIRECTORY/tmp/root"
echo "Раздел $LABEL отмонтирован."
echo ""
rm -rf "$DIRECTORY/tmp"
else
echo "USB-Флешка не обнаружена!"
exit 0
fi
exit
| true |
363dc930343a4f8b9d38ff700833c8cbc463bfee | Shell | silky/toysolver | /build_bdist_win32.sh | UTF-8 | 1,340 | 2.90625 | 3 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
sudo apt-get update
sudo apt-get install wine wget cabal-install
wget https://www.haskell.org/platform/download/2014.2.0.0/HaskellPlatform-2014.2.0.0-i386-setup.exe
wine HaskellPlatform-2014.2.0.0-i386-setup.exe
# https://plus.google.com/+MasahiroSakai/posts/RTXUt5MkVPt
#wine cabal update
cabal update
cp -a ~/.cabal/packages ~/.wine/drive_c/users/`whoami`/Application\ Data/cabal/
wine cabal sandbox init
wine cabal install --only-dependencies --flag=BuildToyFMF --flag=BuildSamplePrograms --flag=BuildMiscPrograms
wine cabal configure --flag=BuildToyFMF --flag=BuildSamplePrograms --flag=BuildMiscPrograms
wine cabal build
VER=`wine ghc -e ":m + Control.Monad Distribution.Package Distribution.PackageDescription Distribution.PackageDescription.Parse Distribution.Verbosity Data.Version System.IO" -e "hSetBinaryMode stdout True" -e 'putStrLn =<< liftM (showVersion . pkgVersion . package . packageDescription) (readPackageDescription silent "toysolver.cabal")'`
PKG=toysolver-$VER-win32
rm -r $PKG
mkdir $PKG
cp dist/build/htc/htc.exe dist/build/knapsack/knapsack.exe dist/build/lpconvert/lpconvert.exe dist/build/nqueens/nqueens.exe dist/build/pbconvert/pbconvert.exe dist/build/sudoku/sudoku.exe dist/build/toyfmf/toyfmf.exe dist/build/toysat/toysat.exe dist/build/ToySolver/toysolver.exe $PKG/
zip -r $PKG.zip $PKG
| true |
cb3eaacce407c3ddbac83580f3b6dd650f70b8c5 | Shell | indigo-dc/flaat | /examples/life-test.sh | UTF-8 | 2,631 | 2.515625 | 3 | [
"MIT"
] | permissive | # #!/bin/bash
#PORTS="8081 8082 8083"
PORTS="8081"
#
echo -e "---------------------------------------------------------------\n/valid_user"
for PORT in $PORTS; do
echo -e "\n $PORT"
http http://localhost:$PORT/valid_user "Authorization: Bearer `oidc-token deep`"
done
#
echo -e "---------------------------------------------------------------\n/valid_user_2"
for PORT in $PORTS; do
echo -e "\n $PORT"
http http://localhost:$PORT/valid_user_2 "Authorization: Bearer `oidc-token egi`"
done
#
echo -e "---------------------------------------------------------------\n/group_test_kit"
for PORT in $PORTS; do
echo -e "\n $PORT"
http http://localhost:$PORT/group_test_kit "Authorization: Bearer `oidc-token kit`"
echo -e "\nshould fail: "
http http://localhost:$PORT/group_test_kit "Authorization: Bearer `oidc-token egi`"
done
#
echo -e "---------------------------------------------------------------\n/group_test_iam"
for PORT in $PORTS; do
echo -e "\n $PORT"
http http://localhost:$PORT/group_test_iam "Authorization: Bearer `oidc-token deep`"
echo -e "\nshould fail: "
http http://localhost:$PORT/group_test_iam "Authorization: Bearer `oidc-token kit`"
done
#
echo -e "---------------------------------------------------------------\n/group_test_hdf"
for PORT in $PORTS; do
echo -e "\n $PORT"
http http://localhost:$PORT/group_test_hdf "Authorization: Bearer `oidc-token login`"
echo -e "\nshould fail: "
http http://localhost:$PORT/group_test_hdf "Authorization: Bearer `oidc-token kit`"
done
#
echo -e "---------------------------------------------------------------\n/group_test_hdf2"
for PORT in $PORTS; do
echo -e "\n $PORT"
http http://localhost:$PORT/group_test_hdf2 "Authorization: Bearer `oidc-token login`"
echo -e "\nshould fail: "
http http://localhost:$PORT/group_test_hdf2 "Authorization: Bearer `oidc-token egi`"
done
#
echo -e "---------------------------------------------------------------\n/group_test_hdf3"
for PORT in $PORTS; do
echo -e "\n $PORT"
http http://localhost:$PORT/group_test_hdf3 "Authorization: Bearer `oidc-token login`"
echo -e "\nshould fail: "
http http://localhost:$PORT/group_test_hdf3 "Authorization: Bearer `oidc-token egi`"
done
echo -e "---------------------------------------------------------------\n/role_test_egi"
for PORT in $PORTS; do
echo -e "\n $PORT"
http http://localhost:$PORT/role_test_egi "Authorization: Bearer `oidc-token egi`"
echo -e "\nshould fail: "
http http://localhost:$PORT/role_test_egi "Authorization: Bearer `oidc-token login`"
done
| true |
e3f2551db1150dc7b06328e5ecb9d07bec9843e4 | Shell | Antergos/antergos-packages | /antergos/b43-firmware/PKGBUILD | UTF-8 | 750 | 2.59375 | 3 | [] | no_license | # Maintainer: Antergos Developers <dev(at)antergos.com>
# Contributor: Xavion <Xavion (dot) 0 (at) Gmail (dot) com>
pkgname=b43-firmware
pkgver=6.30.163.46
pkgrel=8
pkgdesc="Firmware for Broadcom B43 wireless networking chips - latest release"
arch=("any")
url="https://wireless.wiki.kernel.org/en/users/Drivers/b43"
license=("unknown")
depends=("linux>=3.2")
makedepends=("b43-fwcutter>=018")
conflicts=(b43-firmware-classic)
options=(!emptydirs)
source=(http://www.lwfinger.com/${pkgname}/broadcom-wl-${pkgver}.tar.bz2)
sha1sums=('237d29a7701429054f5c82c000ef2d9aa6f2c3db')
package() {
cd "${srcdir}"
# Directories
install -d "${pkgdir}"/usr/lib/firmware/
# Application
b43-fwcutter -w "${pkgdir}"/usr/lib/firmware/ broadcom-wl-${pkgver}.wl_apsta.o
}
| true |
7941e1bba9b1341037a9211ca51f1c4469fc5e1e | Shell | annoyatron255/dotfiles | /.zshrc | UTF-8 | 8,813 | 3.15625 | 3 | [] | no_license | # If you come from bash you might have to change your $PATH.
# export PATH=$HOME/bin:/usr/local/bin:$PATH
# Path to your oh-my-zsh installation.
export ZSH="/home/jack/.oh-my-zsh"
# Set name of the theme to load --- if set to "random", it will
# load a random theme each time oh-my-zsh is loaded, in which case,
# to know which specific one was loaded, run: echo $RANDOM_THEME
# See https://github.com/robbyrussell/oh-my-zsh/wiki/Themes
ZSH_THEME="robbyrussell"
# Set list of themes to pick from when loading at random
# Setting this variable when ZSH_THEME=random will cause zsh to load
# a theme from this variable instead of looking in ~/.oh-my-zsh/themes/
# If set to an empty array, this variable will have no effect.
# ZSH_THEME_RANDOM_CANDIDATES=( "robbyrussell" "agnoster" )
# Uncomment the following line to use case-sensitive completion.
# CASE_SENSITIVE="true"
# Uncomment the following line to use hyphen-insensitive completion.
# Case-sensitive completion must be off. _ and - will be interchangeable.
# HYPHEN_INSENSITIVE="true"
# Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true"
# Uncomment the following line to automatically update without prompting.
# DISABLE_UPDATE_PROMPT="true"
# Uncomment the following line to change how often to auto-update (in days).
# export UPDATE_ZSH_DAYS=13
# Uncomment the following line if pasting URLs and other text is messed up.
# DISABLE_MAGIC_FUNCTIONS=true
# Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true"
# Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true"
# Uncomment the following line to enable command auto-correction.
# ENABLE_CORRECTION="true"
# Uncomment the following line to display red dots whilst waiting for completion.
# COMPLETION_WAITING_DOTS="true"
# Uncomment the following line if you want to disable marking untracked files
# under VCS as dirty. This makes repository status check for large repositories
# much, much faster.
# DISABLE_UNTRACKED_FILES_DIRTY="true"
# Uncomment the following line if you want to change the command execution time
# stamp shown in the history command output.
# You can set one of the optional three formats:
# "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
# or set a custom format using the strftime function format specifications,
# see 'man strftime' for details.
# HIST_STAMPS="mm/dd/yyyy"
# Would you like to use another custom folder than $ZSH/custom?
# ZSH_CUSTOM=/path/to/new-custom-folder
# Which plugins would you like to load?
# Standard plugins can be found in ~/.oh-my-zsh/plugins/*
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
# Add wisely, as too many plugins slow down shell startup.
plugins=(
git
fzf
jump
zsh-syntax-highlighting
)
source $ZSH/oh-my-zsh.sh
if [ -n "$WINDOWID" ]; then
mkdir -p /tmp/urxvtc_ids/
echo $$ > /tmp/urxvtc_ids/$WINDOWID
fi
# User configuration
function yta() {
PLAYLIST=~/.config/mpd/playlists/yt-playlist.m3u
printf "#EXTM3U\n#EXTINF:" > $PLAYLIST
youtube-dl -f bestaudio -j ytsearch:"$*" | jq -cMr '(.duration | tostring) + "," + .title' >> $PLAYLIST
youtube-dl -f bestaudio -g ytsearch:"$*" >> $PLAYLIST
mpc load $(basename -s .m3u $PLAYLIST)
mpc play $(mpc playlist | wc -l)
}
function yt() {
mpv ytdl://ytsearch:"$*"
}
function m() {
if [ $# -eq 0 ]
then
TRACK="$(mpc listall -f "%file%\t%title%\t%artist%\t%album%" | fzf -d '\t' --with-nth=2,3,4 | head -n 1 | sed "s/\t.*//")"
else
TRACK="$(mpc listall -f "%file%\t%title%\t%artist%\t%album%" | fzf -f "$*" | head -n 1 | sed "s/\t.*//")"
fi
if $(mpc playlist -f "%file%" | grep -Fxq "$TRACK")
then
mpc play $(mpc playlist -f "%file%" | grep -nFx "$TRACK" | sed "s/:.*//" | head -n 1)
else
mpc add "$TRACK"
mpc play $(mpc playlist | wc -l)
fi
}
function o() {
if [ $# -eq 0 ]
then
FILE=$(fzf)
else
FILE=$(fzf -f "$*" | head -n 1)
fi
#xdg-open $FILE < /dev/null > /dev/null 2>&1 & disown
setsid xdg-open $FILE 1>&- 2>&- 0<&-
sleep 1
}
function shader() {
SHADER_PATH="$HOME/Code/compton-shaders/"
if [ $# -eq 0 ]
then
SHADER="$(find $SHADER_PATH -type f -iname "*.glsl" | fzf --delimiter / --with-nth -1 | head -n 1)"
else
SHADER="$(find $SHADER_PATH -type f -iname "*.glsl" | fzf -f "$*" | head -n 1)"
fi
if [ -n "$SHADER" ]
then
killall picom
while killall -0 picom
do
sleep 1
done
picom -b --backend glx --force-win-blend --use-damage --glx-fshader-win "$(cat "$SHADER")"
fi
}
function notes() {
if [ $# -eq 0 ]
then
DATE="$(date "+%Y-%m-%d")"
FOLDER="$DATE"
FILENAME="notes.tex"
else
FOLDER="$*"
FILENAME="$*.tex"
fi
index=0
while [ -d "$FOLDER" ]; do
printf -v FOLDER -- '%s_%01d' "$DATE" "$((++index))"
done
mkdir $FOLDER
cd $FOLDER
cp ~/.vim/templates/latexmkrc ~/.vim/templates/preamble.tex ./
cp ~/.vim/templates/notes.tex "./$FILENAME"
sed -i "s/DATE/$(date "+%B %-d, %Y")/g" "./$FILENAME"
SUBJECT=$(basename "$(dirname "$(dirname "$(pwd)")")" | sed -e "s/\([A-Z]\)\([0-9]\)/\1 \2/g")
sed -i "s/SUBJECT/$SUBJECT/g" "./$FILENAME"
TYPE=$(basename "$(dirname "$(pwd)")")
case $TYPE in
hw)
TYPE="Homework $(basename "$(pwd)" | sed "s/[^0-9]//g")"
;;
notes)
TYPE="Notes"
;;
labs)
TYPE="Lab $(basename "$(pwd)" | sed "s/[^0-9]//g")"
;;
quizzes)
TYPE="Quiz $(basename "$(pwd)" | sed "s/[^0-9]//g") "
;;
exams)
TYPE="Exam $(basename "$(pwd)" | sed "s/[^0-9]//g")"
;;
esac
sed -i "s/TYPE/$TYPE/g" "./$FILENAME"
nvim +11 +VimtexCompile "./$FILENAME"
}
function t() {
if [ $# -eq 0 ]
then
zsh
else
$@
fi
}
function manpdf() {
zathura <(man -Tpdf $*) & disown
}
function za() {
zathura $* & disown
}
function xyzzy() {
echo "Nothing happens."
}
function tx() {
LATEX_DIR=/tmp/latex_temp
mkdir -p $LATEX_DIR
if [[ "$*" != *"edit"* ]]
then
echo -e "\\\\begin{align*}\n\t\n\\\\end{align*}" > $LATEX_DIR/latex_input.tex
fi
nvim +2 +"call vimtex#syntax#p#amsmath#load()" $LATEX_DIR/latex_input.tex
echo -E "${$(<$HOME/.vim/templates/shortdoc.tex)//CONTENTS/$(<$LATEX_DIR/latex_input.tex)}" > $LATEX_DIR/latex.tex
( cd $LATEX_DIR ; pdflatex $LATEX_DIR/latex.tex )
pdfcrop --margins 12 $LATEX_DIR/latex.pdf $LATEX_DIR/latex.pdf
pdf2svg $LATEX_DIR/latex.pdf $LATEX_DIR/latex.svg
pdftoppm $LATEX_DIR/latex.pdf $LATEX_DIR/latex -png -f 1 -singlefile -rx 600 -ry 600
if [[ "$*" == *"svg"* ]]
then
nohup xclip -selection clipboard -target image/x-inkscape-svg -i $LATEX_DIR/latex.svg 1>&- 2>&- 0<&-
else
nohup xclip -selection clipboard -target image/png -i $LATEX_DIR/latex.png 1>&- 2>&- 0<&-
fi
}
function vimbuffer() {
# Paste scrollback to print_file. Terminal specific.
xdotool key --window $WINDOWID ctrl+Print
local print_file="/tmp/urxvt_screen"
local written_file="/tmp/urxvt_buffer.sh"
local prompt_string="$(print -P "$PS1" | sed 's/\x1b\[[0-9;]*m//g')"
# Remove trailing newlines
printf '%s\n' "$(cat "$print_file")" > "$written_file"
# Remove last lines of buffer
tail -n $(tac "$written_file" | grep -nm1 "$prompt_string" | cut -d : -f 1) \
"$written_file" | wc -c | xargs -I {} truncate "$written_file" -s -{}
local scrollback_line_length=$(( $(wc -l < "$written_file") + 1 ))
echo "$prompt_string$PREBUFFER$BUFFER" >> "$written_file"
local byte_offset=$(( ${#PREBUFFER//$'\n'/} + ${#LBUFFER//$'\n'/} + \
$(printf "%s" "$prompt_string" | wc -m) ))
nvim "+${scrollback_line_length}" "+normal ${byte_offset} " -- \
"$written_file" </dev/tty
print -Rz - "$(tail -n $(tac "$written_file" | grep -nm1 "$prompt_string" \
| cut -d : -f 1) "$written_file" | tail -c +$(( $(printf "%s" \
"$prompt_string" | wc -c) + 1 )))"
rm "$written_file"
zle send-break
}
zle -N vimbuffer
bindkey '^P' vimbuffer
# export MANPATH="/usr/local/man:$MANPATH"
# You may need to manually set your language environment
# export LANG=en_US.UTF-8
# Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim'
# else
# export EDITOR='mvim'
# fi
# Compilation flags
# export ARCHFLAGS="-arch x86_64"
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
# Example aliases
# alias zshconfig="mate ~/.zshrc"
# alias ohmyzsh="mate ~/.oh-my-zsh"
export QUOTING_STYLE=literal
export SAVEHIST=1000000
#alias za="zathura"
alias tdir='mkdir $(date "+%Y-%m-%d")'
alias j="jump"
alias feh="feh --scale-down --auto-zoom --auto-rotate --image-bg \"#000100\""
alias dotfiles='/usr/bin/git --git-dir=$HOME/.dotfiles_git/ --work-tree=$HOME'
alias vim="nvim"
| true |
dec0117ece938101436210b871c1efb910ad59a8 | Shell | cwiggins/dot_files | /xinitrc | UTF-8 | 1,154 | 2.5625 | 3 | [] | no_license | #!/bin/sh
#
# ~/.xinitrc
#
# Executed by startx (run your window manager from here)
# Source all the scripts in /etc/X11/xinit/xinitrc.d/
if [ -d /etc/X11/xinit/xinitrc.d ]; then
for f in /etc/X11/xinit/xinitrc.d/*; do
[ -x "$f" ] && . "$f"
done
unset f
fi
# gnome keyring settings
eval $('/usr/bin/gnome-keyring-daemon --start --components=gpg,,pkcs11,secrets,ssh')
export SSH_AUTH_SOCK
export GPG_AGENT_INFO
export GNOME_KEYRING_CONTROL
export GNONE_KEYRING_PID
# urxvtd settings
/usr/bin/urxvtd -q -o -f
# clipboard syncing
/usr/bin/autocutsel -fork &
/usr/bin/autocutsel -selection CLIPBOARD -fork &
# settings for SCIM
export XMODIFIERS=SCIM
export GTK_IM_MODULE="scim"
export QT_IM_MODULE="scim"
scim -d &
# set cursor name
xsetroot -cursor_name left_ptr &
# Hide cursor
unclutter -noevents -idle 5 &
# piping conky to dzen2
/home/curtiss/scripts/dzen2-personalized &
# start compositing manager
xcompmgr &
# wallpaper
# nitrogen --restore &
# autostart
#/home/curtiss/scripts/autostart &
#mount maildir
#/home/curtiss/scripts/mount_maildir &
#sync hwclock once with ntp
hwclock -w
# start nm-applet
nm-applet &
exec dwm
| true |
6114ffd82302105697ddfe300dbf9970ac2c21f9 | Shell | dellelce/mkit | /modules/readline/build.sh | UTF-8 | 497 | 3.25 | 3 | [] | no_license | #
# readline
#
build_readline()
{
[ -f "/etc/alpine-release" -a -f "$srcdir_readline/shlib/Makefile.in" ] &&
{
rlmk="$srcdir_readline/shlib/Makefile.in"
typeset ncurses="-L${prefix}\/lib -lncurses"
sed -i -e "s:SHLIB_LIBS = @SHLIB_LIBS@:SHLIB_LIBS = @SHLIB_LIBS@ ${ncurses}:" $rlmk
#ls -lt $rlmk
# commenting until a proper option for debugging is added
#echo "Debug: lib in install target"
#ls -lt "$prefix/lib/"
}
build_gnuconf readline $srcdir_readline
return $?
}
| true |
b5f9b57831f815719224b2138cb6c67efdfbb8c9 | Shell | thu-cs-lab/tanlabs-speed-tester | /petalinux/project-spec/meta-user/recipes-apps/serial-config/files/prog_artix.sh | UTF-8 | 752 | 2.875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
prog_b_pin=960
init_b_pin=$(($prog_b_pin+1))
done_pin=$(($prog_b_pin+2))
[ -d /sys/class/gpio/gpio$prog_b_pin ] || echo $prog_b_pin >/sys/class/gpio/export
[ -d /sys/class/gpio/gpio$init_b_pin ] || echo $init_b_pin >/sys/class/gpio/export
[ -d /sys/class/gpio/gpio$done_pin ] || echo $done_pin >/sys/class/gpio/export
echo out >/sys/class/gpio/gpio$prog_b_pin/direction
echo 0 >/sys/class/gpio/gpio$prog_b_pin/value
echo "init_b:" `cat /sys/class/gpio/gpio$init_b_pin/value`
echo 1 >/sys/class/gpio/gpio$prog_b_pin/value
echo "init_b:" `cat /sys/class/gpio/gpio$init_b_pin/value`
[ -f "$1" ] && time serial-config $1
echo "init_b:" `cat /sys/class/gpio/gpio$init_b_pin/value`
echo "done:" `cat /sys/class/gpio/gpio$done_pin/value`
| true |
7a98345bc75dc2b6f921c87fc4df282378471f47 | Shell | dodikk/sumsubstance-xamarin | /scripts/ios-make-fat-binaries.sh | UTF-8 | 2,505 | 2.71875 | 3 | [
"Apache-2.0"
] | permissive | ## ISO8601DateFormatterValueTransformer
## PocketSocket
## RKValueTransformers
## Reachability
## RestKit
## SOCKit
## TransitionKit
#
## TODO: maybe automate copying fat files inside frameworks
## for a prototype this is supposed to be manual
## Note: also assuming that thin frameworks have been built already
## and copied to
## * lib-native/ios/frameworks/Debug-iphonesimulator
## * lib-native/ios/frameworks/Release-iphoneos
## -------------
LAUNCH_DIR=$PWD
cd ..
REPOSITORY_ROOT_DIR=$PWD
cd "$LAUNCH_DIR"
cd "$REPOSITORY_ROOT_DIR/lib-native/ios"
lipo -create \
-output frameworks/fat/ISO8601DateFormatterValueTransformer \
frameworks/Release-iphoneos/ISO8601DateFormatterValueTransformer/ISO8601DateFormatterValueTransformer.framework/ISO8601DateFormatterValueTransformer \
frameworks/Debug-iphonesimulator/ISO8601DateFormatterValueTransformer/ISO8601DateFormatterValueTransformer.framework/ISO8601DateFormatterValueTransformer
lipo -create \
-output frameworks/fat/PocketSocket \
frameworks/Release-iphoneos/PocketSocket/PocketSocket.framework/PocketSocket \
frameworks/Debug-iphonesimulator/PocketSocket/PocketSocket.framework/PocketSocket
lipo -create \
-output frameworks/fat/PocketSocket \
frameworks/Release-iphoneos/PocketSocket/PocketSocket.framework/PocketSocket \
frameworks/Debug-iphonesimulator/PocketSocket/PocketSocket.framework/PocketSocket
lipo -create \
-output frameworks/fat/RKValueTransformers \
frameworks/Release-iphoneos/RKValueTransformers/RKValueTransformers.framework/RKValueTransformers \
frameworks/Debug-iphonesimulator/RKValueTransformers/RKValueTransformers.framework/RKValueTransformers
lipo -create \
-output frameworks/fat/Reachability \
frameworks/Release-iphoneos/Reachability/Reachability.framework/Reachability \
frameworks/Debug-iphonesimulator/Reachability/Reachability.framework/Reachability
lipo -create \
-output frameworks/fat/RestKit \
frameworks/Release-iphoneos/RestKit/RestKit.framework/RestKit \
frameworks/Debug-iphonesimulator/RestKit/RestKit.framework/RestKit
lipo -create \
-output frameworks/fat/SOCKit \
frameworks/Release-iphoneos/SOCKit/SOCKit.framework/SOCKit \
frameworks/Debug-iphonesimulator/SOCKit/SOCKit.framework/SOCKit
lipo -create \
-output frameworks/fat/TransitionKit \
frameworks/Release-iphoneos/TransitionKit/TransitionKit.framework/TransitionKit \
frameworks/Debug-iphonesimulator/TransitionKit/TransitionKit.framework/TransitionKit
cd "$LAUNCH_DIR"
| true |
4276e5a41f0e9174ebac516fcc09900e7caf170d | Shell | straywarrior/leetcode-solutions | /algorithms/cpp/source/update_cmakelists.sh | UTF-8 | 501 | 3.5 | 4 | [
"MIT"
] | permissive | #! /bin/bash
#
# update_cmakelists.sh
# Copyright (C) 2021 StrayWarrior <i@straywarrior.com>
current_cases=($(cat CMakeLists.txt |grep -E "^leetcode_add_case" |cut -d '(' -f 2 |tr -d '()' |sort))
updated_cases=($(find . -name '*.cpp' |cut -d '/' -f 2 |cut -d '.' -f 1 |sort))
added_cases=($(comm -13 <(printf '%s\n' ${current_cases[@]}) <(printf '%s\n' ${updated_cases[@]})))
for case_name in ${added_cases[@]}; do
echo "Add case: $case_name"
echo "leetcode_add_case($case_name)" >> CMakeLists.txt
done
| true |
e26d765dcfc9bb41f1f2415096f016bf7eb2f74d | Shell | iu-parfunc/accelerate-multi-benchmarks | /Setup.sh | UTF-8 | 448 | 3.109375 | 3 | [] | no_license | #!/bin/bash
# Compile all the packages
set -xe
if [ "$CABAL" == "" ]; then
CABAL=cabal-1.20
fi
$CABAL sandbox init
if ! [ -e ./cuda/configure ]; then
cp ./aux/configure ./cuda/
fi
PKGS="./Accelerate/ ./Accelerate-cuda/ ./Accelerate-examples/ ./Accelerate-io/ ./accelerate-fft/ ./cuda/ ./gloss/gloss/ ./gloss/gloss-raster/ ./gloss/gloss-rendering/ ./gloss-raster-accelerate/ ./gloss-accelerate/"
$CABAL install -fMULTI -fcuda $PKGS $*
| true |
f5b551b02fab3a4f1c469adb13810ad5be4e6275 | Shell | wasabilti/Git_Script | /git-first-install.sh | UTF-8 | 562 | 3.015625 | 3 | [] | no_license | #!/bin/bash
# Use to init git at first install
# PATH=/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin::/Library/Apple/usr/bin:~/bin
export PATH
read -p "Username: " USERNAME
read -p "Email: " EMAIL
git config --global user.name "$USERNAME"
git config --global user.email "$EMAIL"
ssh-keygen -t rsa -C "EMAIL"
eval $(ssh-agent -s)
read -p "Private key file (eg: ~/.ssh/id_rsa): " KEYNAME
ssh-add $KEYNAME
read -p "Public key file (eg: ~/.ssh/id_rsa.pub)" PUBNAME
cat $PUBNAME
echo 'Copy the pub key to github.com'
read tmp_wait
ssh -T git@github.com
| true |
7510998ede3d2b61e01abd31023a6d2933893a0e | Shell | brendanc-dpaw/s2i-moodle | /.s2i/bin/assemble | UTF-8 | 345 | 2.71875 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
shopt -s dotglob
echo "---> Downloading and installing moodle"
curl --silent -o moodle.tar.gz https://codeload.github.com/moodle/moodle/tar.gz/MOODLE_33_STABLE
tar xvf moodle.tar.gz
rm -v moodle.tar.gz; mv moodle-*/* ./; rm -rf moodle-*
mv -v /tmp/src/config.php ./config.php
# Fix source directory permissions
fix-permissions ./
| true |
6e0585c6ddb5e76fe493e373e3df8b782a4e5bea | Shell | yorevs/homesetup | /bin/hhs-functions/bash/hhs-shell-utils.bash | UTF-8 | 6,452 | 3.859375 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Script: hhs-shell-utils.bash
# Created: Oct 5, 2019
# Author: <B>H</B>ugo <B>S</B>aporetti <B>J</B>unior
# Mailto: homesetup@gmail.com
# Site: https://github.com/yorevs/homesetup
# License: Please refer to <https://opensource.org/licenses/MIT>
#
# Copyright (c) 2023, HomeSetup team
# !NOTICE: Do not change this file. To customize your functions edit the file ~/.functions
# @function: Search for previously issued commands from history using filters.
# @param $1 [Req] : The case-insensitive filter to be used when listing.
function __hhs_history() {
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
echo "Usage: ${FUNCNAME[0]} [regex_filter]"
return 1
elif [[ "$#" -eq 0 ]]; then
history | sort -k2 -k 1,1nr | uniq -f 1 | sort -n | grep -i "^ *[0-9]* "
else
history | sort -k2 -k 1,1nr | uniq -f 1 | sort -n | grep -i "$*"
fi
return $?
}
# inspiRED by https://superuser.com/questions/250227/how-do-i-see-what-my-most-used-linux-command-are
# @function: Display statistics about commands in history.
# @param $1 [Opt] : Limit to the top N commands.
function __hhs_hist_stats() {
local top_n=${1:-10} i=1 cmd_name cmd_qty cmd_chart
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
echo "Usage: ${FUNCNAME[0]} [top_N]"
return 1
fi
pad=$(printf '%0.1s' "."{1..60})
pad_len=30
echo -e "\n${ORANGE}Top '${top_n}' used commands in bash history ...\n"
for cmd in $(history | tr -s ' ' | cut -d ' ' -f6 | sort | uniq -c | sort -nr | head -n "${top_n}" |
perl -lane 'printf "%s %03d %s \n", $F[1], $F[0], "▄" x ($F[0] / 5)'); do
cmd_name=$(echo "${cmd}" | cut -d ' ' -f1)
cmd_qty=$(echo "${cmd}" | cut -d ' ' -f2)
cmd_chart=$(echo "${cmd}" | cut -d ' ' -f3-)
printf "${WHITE}%3d: ${HHS_HIGHLIGHT_COLOR} " $i
echo -n "${cmd_name} "
printf '%*.*s' 0 $((pad_len - ${#cmd_name})) "${pad}"
printf "${GREEN}%s ${CYAN}|%s \n" " ${cmd_qty}" "${cmd_chart}"
i=$((i+1))
done
echo "${NC}"
return $?
}
# @function: Display all environment variables using filters.
# @param $1 [Opt] : If -e is present, edit the env file, otherwise a case-insensitive filter to be used when listing.
function __hhs_envs() {
local pad pad_len filters name value columns ret_val=0
HHS_ENV_FILE=${HHS_ENV_FILE:-$HHS_DIR/.env}
touch "${HHS_ENV_FILE}"
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
echo "Usage: ${FUNCNAME[0]} [options] [regex_filters]"
echo ''
echo ' Options: '
echo ' -e : Edit current HHS_ENV_FILE.'
return 1
else
if [[ "$1" == '-e' ]]; then
__hhs_edit "${HHS_ENV_FILE}"
ret_val=$?
else
pad=$(printf '%0.1s' "."{1..60})
pad_len=40
columns="$(($(tput cols) - pad_len - 10))"
filters="$*"
filters=${filters// /\|}
[[ -z "${filters}" ]] && filters=".*"
echo ' '
echo "${YELLOW}Listing all exported environment variables matching [ ${filters} ]:"
echo ' '
IFS=$'\n'
shopt -s nocasematch
for v in $(env | sort); do
name=${v%%=*}
value=${v#*=}
if [[ ${name} =~ ${filters} ]]; then
echo -en "${HHS_HIGHLIGHT_COLOR}${name}${NC} "
printf '%*.*s' 0 $((pad_len - ${#name})) "${pad}"
echo -en " ${GREEN}=> ${NC}"
echo -n "${value:0:${columns}}"
[[ ${#value} -ge ${columns} ]] && echo -n "..."
echo "${NC}"
fi
done
shopt -u nocasematch
IFS="$RESET_IFS"
echo ' '
fi
fi
return ${ret_val}
}
# @function: Display all alias definitions using filters.
# @param $1 [Opt] : If -e is present, edit the .aliasdef file, otherwise a case-insensitive filter to be used when listing.
function __hhs_defs() {
local pad pad_len filters name value columns ret_val=0
HHS_ALIASDEF_FILE="${HHS_DIR}/.aliasdef"
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
echo "Usage: ${FUNCNAME[0]} [regex_filters]"
return 1
else
if [[ "$1" == '-e' ]]; then
__hhs_edit "${HHS_ALIASDEF_FILE}"
ret_val=$?
else
pad=$(printf '%0.1s' "."{1..60})
pad_len=51
columns="$(($(tput cols) - pad_len - 10))"
filters="$*"
filters=${filters// /\|}
[[ -z "${filters}" ]] && filters=".*"
echo ' '
echo "${YELLOW}Listing all alias definitions matching [ ${filters} ]:"
echo ' '
IFS=$'\n'
shopt -s nocasematch
# shellcheck disable=SC2013
for v in $(grep '^ *__hhs_alias' "${HHS_ALIASDEF_FILE}" | sed 's/^ *//g' | sort | uniq); do
name=${v%%=*}
name=${name// /}
value=${v#*=}
value=${value//\'/}
if [[ ${name} =~ ${filters} ]]; then
echo -en "${HHS_HIGHLIGHT_COLOR}${name//__hhs_alias/}${NC} "
printf '%*.*s' 0 $((pad_len - ${#name})) "${pad}"
echo -en " ${GREEN}is defined as ${NC}"
echo -n "${value:0:${columns}}"
[[ ${#value} -ge ${columns} ]] && echo -n "..."
echo "${NC}"
fi
done
shopt -u nocasematch
IFS="$RESET_IFS"
echo ' '
fi
fi
return ${ret_val}
}
# @function: Select a shell from the existing shell list.
function __hhs_shell_select() {
local ret_val=1 sel_shell mselect_file avail_shells=()
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
echo "Usage: ${FUNCNAME[0]} "
else
IFS=$'\n' read -d '' -r -a avail_shells <<<"$(grep '/.*' '/etc/shells')"
# Add the brew bash and zsh as options
[[ -f '/usr/local/bin/bash' ]] && avail_shells+=('/usr/local/bin/bash')
[[ -f '/usr/local/bin/zsh' ]] && avail_shells+=('/usr/local/bin/zsh')
mselect_file=$(mktemp)
if __hhs_mselect "${mselect_file}" "Please select your new default shell:" "${avail_shells[@]}"; then
sel_shell=$(grep . "${mselect_file}")
if [[ -n "${sel_shell}" && -f "${sel_shell}" ]]; then
if \chsh -s "${sel_shell}"; then
ret_val=$?
clear
export SHELL="${sel_shell}"
echo "${GREEN}Your default shell has changed to => '${SHELL}'"
echo "${ORANGE}Next time you open a terminal window you will use \"${SHELL}\" as your default shell"
\rm -f "${mselect_file}"
else
__hhs_errcho "${FUNCNAME[0]}: Unable to change shell to ${sel_shell}"
[[ -f "${mselect_file}" ]] && \rm -f "${mselect_file}"
fi
fi
fi
IFS="$RESET_IFS"
echo -e "${NC}"
fi
return ${ret_val}
}
| true |
6fec0a99aa4c8ead07796b55a0c392083636db78 | Shell | rushikeshbhandare97/shell_programming | /primerange.sh | UTF-8 | 219 | 3.140625 | 3 | [] | no_license | #!/bin/bash +x
read -p "Enter range: " n
echo 2
for (( j=3; j<=$n; j++ ))
do
h=$(($j/2))
for (( i=2; i<=$h; i++ ))
do
if [ $(($j%$i)) -eq 0 ]
then break
fi
done
if [ $i -gt $h ]
then echo $j
fi
done
| true |
715170a8677452059254de415197defc44e2c9af | Shell | gonzaloamadio/bash-scripts | /script_arguments/params1.sh | UTF-8 | 1,029 | 3.96875 | 4 | [] | no_license | #!/bin/bash
# When you want getopts to expect an argument for an option, just place a : (colon) after the proper option flag.
# If you want -A to expect an argument (i.e. to become -A SOMETHING) just do:
# getopts fA:x VARNAME
#If the very first character of the option-string is a : (colon), which would normally be nonsense because there's no option letter preceding it,
#getopts switches to "silent error reporting mode".
#In productive scripts, this is usually what you want because it allows you to handle errors yourself without being disturbed by annoying messages.
#Si no le ponemos los 1eros :, y ejecutamos -a sin argumento nos imprime ademas de nuestro error:
# ./params1.sh: la opción requiere un argumento -- s
while getopts ":a:" opt; do
case $opt in
a)
echo "-a was triggered, Parameter: $OPTARG" >&2
exit 0
;;
\?)
echo "Invalid option: -$OPTARG" >&2
exit 1
;;
:)
echo "Option -$OPTARG requires an argument." >&2
exit 1
;;
esac
done
| true |
aa105acb49483433b8ad5d31b1273140896e1e9a | Shell | VB6Hobbyst7/Raspbian_For_Robots | /buster_update/Raspbian_for_Robots_Buster_Flavor.sh | UTF-8 | 3,861 | 3.109375 | 3 | [
"MIT"
] | permissive | #! /bin/bash
#
# Set some details to fit Rasbpian for Robots
# mainly :
# set the password for user Pi to robots1234
# set samba password to robots1234
# set hostname to dex
# by Nicole Parrot // Dexter Industries
#
###################################################
echo "********** FLAVOR ***********"
echo "WARNING WARNING WARNING"
echo "you will need to manually enter the password for samba"
echo "WARNING WARNING WARNING"
# This script sets up the environment to fit with Raspbian for Robots, but none of the actual Dexter Industries software
# 1. User Name is Pi, Password is robots1234
# 2. Hostname is dex
# 3. Installing Samba
DEFAULT_PWD=robots1234
####################################
# Changing Pi password to robots1234
####################################
echo pi:$DEFAULT_PWD | sudo chpasswd
####################################
# Installing Samba
####################################
echo "********** SAMBA ***********"
sudo apt-get install -y samba samba-common
sudo sed -i 's/read only = yes/read only = no/g' /etc/samba/smb.conf
sudo sed -i 's/create mask = 0700/create mask = 0775/g' /etc/samba/smb.conf
sudo sed -i 's/directory mask = 0700/directory mask = 0775/g' /etc/samba/smb.conf
sudo systemctl restart smbd.service
# set samba password manually
echo "enter samba password"
sudo smbpasswd -a pi
####################################
# Shell in a box
####################################
sudo apt-get install -y shellinabox
# disable requirement for SSL for shellinabox
# adding after line 41, which is approximately where similar arguments are found.
# it could really be anywhere in the file - NP
sudo sed -i '/SHELLINABOX_ARGS=/d' /etc/init.d/shellinabox
sudo sed -i '41 i\SHELLINABOX_ARGS="--disable-ssl"' /etc/init.d/shellinabox
####################################
# set default hostname to dex
####################################
# Re-write /etc/hosts
echo "Editing hosts file"
sudo sed -i 's/raspberrypi/dex/g' /etc/hosts
sudo hostnamectl set-hostname dex
echo "Hostname change will be effective after a reboot"
####################################
# Install autodetecting
####################################
sudo cp /home/pi/di_update/Raspbian_For_Robots/buster_update/auto_detect_robot.service /etc/systemd/system/
sudo systemctl daemon-reload
sudo systemctl enable auto_detect_robot.service
####################################
# override GPG3 antenna color
####################################
sudo mkdir -p /etc/systemd/system/antenna_wifi.service.d/
sudo cp /home/pi/di_update/Raspbian_For_Robots/upd_script/antenna_wifi_override.conf /etc/systemd/system/antenna_wifi.service.d/
sudo systemctl daemon-reload
sudo systemctl restart antenna_wifi.service
####################################
# background image
####################################
sudo cp /home/pi/di_update/Raspbian_For_Robots/dexter_industries_logo_transparent_bg.png /usr/share/rpd-wallpaper/
####################################
# Put some tool icons on desktop
####################################
bash /home/pi/di_update/Raspbian_For_Robots/upd_script/update_desktop.sh
####################################
# DO MANUALLY
####################################
# 1.
# From the Preferences menu, select recommended software
# install Scratch and Mu
# 2.
# open File Manager
# go to Edit / Preferences
# set `Don't ask options on launch executable file`
# 3.
# in ~/.bashrc, add `xhost + &>/dev/null` as the last line
# 4. change background image and background/text color
# right click on background, select Desktop Preferences
# set image to dexter background, with layout as 'centre image on screen'
# text color to #020202
# background color to #F6F3FE
# a. remove current espeak
sudo apt-get remove -y espeak
# b. reinstall espeak and helpers
sudo apt install -y espeak espeak-ng python3-espeak speech-dispatcher-espeak | true |
8c773f363240abf608c9bad9ea5001ff3ee82203 | Shell | ansemjo/dotfiles | /bash/aliases.d/colordiff.sh | UTF-8 | 108 | 2.578125 | 3 | [] | no_license | #!/usr/bin/env bash
# colorful diff if installed
if iscommand colordiff; then
alias diff='colordiff'
fi
| true |
b1d80b6df2bcca7d215a2433146325eb787442e0 | Shell | fredzannarbor/pagekicker-community | /scripts/includes/print-cover-v2.sh | UTF-8 | 10,329 | 3.015625 | 3 | [
"Apache-2.0"
] | permissive | #initialize print variables
# this is for case bound only - most of these values would need to be conditional to support other bindings
print_horizontal_trim=2438 # 8.125 inches for 8.5 inch books
print_vertical_trim=3300
print_top_height=$((print_vertical_trim / 4))
print_bottom_height=$((print_vertical_trim / 10))
print_label_text_width=$((print_horizontal_trim - 225))
outsidebleed=187
insidebleed=204
topbleed=217
bottombleed=225
textsafety=150
userprovidedprintISBN=9781608880416 # dev only
spinepixels=131 #dev only
# create directory for print images
cd images/$uuid ; mkdir print ; echo "created directory images/$uuid/print" ; cd $scriptpath ; echo "changed directory back to " $scriptpath | tee --append $sfb_log
# calculate dimensions
# calculate spine dimensions (we must know the spine before we can do the canvas!)
echo "checking sku is " $sku "and path to pdf is " $mediatargetpath$uuid"/"$sku"print.pdf"
pdfpagecount=`pdftk $mediatargetpath$uuid/$sku"print.pdf" dump_data output | grep -i NumberOfPages | cut -d":" -f2 | sed '/^$/d'`
echo "pdf page count is" $pdfpagecount
# get rid of space and save $spinepixels as variable
# calculate size of canvas
canvaswidth=$(( $print_horizontal_trim * 2 + $spinepixels + $outsidebleed + $insidebleed + $insidebleed + $outsidebleed + 10 ))
canvasheight=$(( $topbleed + $print_vertical_trim + $bottombleed + 10 ))
echo "calculated canvaswidth as "$canvaswidth
echo "calculated canvasheight as "$canvasheight
# calculate safe areas on front and back page
safepagewidth=$(( $print_horizontal_trim - $textsafety - $textsafety ))
safepageheight=$(( $print_vertical_trim - $textsafety ))
echo "calculated safepagewidth as" $safepagewidth
echo "calculated safepageheight as" $safepageheight
# calculate spine
if [ "$spinepixels" -lt 106 ] ; then
spinesafety=10
else
spinesafety=18
fi
echo "because spine width is less than 105 pixels, spinesafety is " $spinesafety
safespinetitlewidth=$(( $spinepixels - $spinesafety - $spinesafety ))
echo "safespinetitlewidth is" $safespinetitlewidth
safespinetitleheight=$(( $safepageheight / 2 ))
echo "calculated safespinetitleheight as " $safespinetitleheight
spineleftmargin=$(( $outsidebleed + $insidebleed + $print_horizontal_trim + $spinesafety * 2 ))
echo "calculated spineleftmargin as bleed + page width +spinepixels for " $spineleftmargin
spinelogotypebegin=$(( $print_horizontal_trim - 600 ))
# front page calculations
frontpageflushleftmargin=$(( $outsidebleed + $print_horizontal_trim + $insidebleed + $spinepixels + insidebleed ))
echo "calculated frontpageflushleftmargin as" $frontpageflushleftmargin
# there's always a cushion around top and bottom text t
frontpagetopcushion=150
frontpagebottomcushion=0
echo "frontpagetopcushion is " $frontpagetopcushion
echo "frontpagebottomcushion is " $frontpagebottomcushion
# back page calculations
ISBNylocation=$(( $safepageheight - 300 - 25 ))
ISBNxlocation=$(( $outsidebleed + 125 ))
echo "calculated ISBNxlocation as" $ISBNxlocation
echo "calculated ISBNylocation as" $ISBNylocation
# start by building the full canvas
convert -size "$canvaswidth"x"$canvasheight" xc:$newcovercolor \
-units "PixelsPerInch" -density 300 -resample 300x \
images/$uuid/print/fullcanvas.png
# then create the front cover
convert -size "$print_horizontal_trim"x"$print_vertical_trim" -density 300 -units pixelsperinch xc:$newcovercolor images/$uuid/print/canvas.png
convert -size "$print_horizontal_trim"x"$print_top_height" -density 300 -units pixelsperinch xc:$newcovercolor images/$uuid/print/topcanvas.png
convert -size "$print_horizontal_trim"x"$print_bottom_height" -density 300 -units pixelsperinch xc:$newcovercolor images/$uuid/print/bottomcanvas.png
convert -size "$print_horizontal_trim"x"$print_top_height" -density 300 -units pixelsperinch xc:$newcovercolor images/$uuid/print/toplabel.png
convert -size "$print_horizontal_trim"x"$print_bottom_height" -density 300 -units pixelsperinch xc:$newcovercolor images/$uuid/print/bottomlabel.png
# underlay the Word Cloud cover (which was created during the ebookcover build)
composite -gravity Center images/$uuid/ebookcover/$sku"printcloud.png" images/$uuid/print/canvas.png images/$uuid/print/canvas.png
# build the labels for the front cover
echo "covertitle is" $covertitle
convert -background $newcovercolor -fill "$coverfontcolor" -gravity center -size "$print_label_text_width"x"$print_top_height" \
-font $newcoverfont caption:"$covertitle" \
-density 300 -units pixelsperinch\
images/$uuid/print/topcanvas.png +swap -gravity center -composite images/$uuid/print/toplabel.png\
convert -background $newcovercolor -fill "$coverfontcolor" -gravity center -size "$print_label_text_width"x"$print_bottom_height" \
-font $newcoverfont caption:"$editedby" \
-density 300 -units pixelsperinch\
images/$uuid/print/bottomcanvas.png +swap -gravity center -composite images/$uuid/print/bottomlabel.png
# lay the labels on top of the front cover
composite -geometry +0+0 images/$uuid/print/toplabel.png images/$uuid/print/canvas.png images/$uuid/print/step1.png
composite -geometry +0+$print_horizontal_trim images/$uuid/print/bottomlabel.png images/$uuid/print/step1.png images/$uuid/print/step2.png
composite -gravity south -geometry +0+0 assets/PageKicker_cmyk300dpi_300.png images/$uuid/print/step2.png images/$uuid/print/cover.png
# make a working copy of the front cover
cp images/$uuid/print/cover.png images/$uuid/print/$sku"printfrontcover.png"
# make PDF and EPS copies of the front cover
convert images/$uuid/print/$sku"printfrontcover.png" -density 300 images/$uuid/print/$sku"printfrontcover.pdf"
convert -density 300 images/$uuid/print/$sku"printfrontcover.pdf" images/$uuid/print/$sku"printfrontcover.eps"
# replace first page of interior with print cover page
pdftk A=$mediatargetpath$uuid/$sku"print.pdf" B="images/"$uuid/print/$sku"printfrontcover.pdf" cat B1 A2-end output $mediatargetpath$uuid/$sku"finalprint.pdf"
# build the ISBN
python $scriptpath"lib/bookland-1.4/bookland-1.4.1b" -o images/$uuid/print/$userprovidedprintISBN.eps -f OCRB -b 1 -q --cmyk 0,0,0,1.0 "$userprovidedprintISBN" 90000
convert -units "PixelsPerInch" -density 300 -resample 300x -border 25x25 -bordercolor white images/$uuid/print/$userprovidedprintISBN.eps -colorspace CMYK images/$uuid/print/$userprovidedprintISBN.png
# build the spine caption
echo "building spine caption"
convert -size $safespinetitleheight$x$safespinetitlewidth -density 300 -units pixelsperinch -background $newcovercolor -fill "$coverfontcolor" -font $coverfont -rotate 90 -gravity West caption:"$covertitle" images/$uuid/print/spinecaption.png
# build the spine logotype
echo "building spine logotype"
convert -size $safespinetitleheight$x$safespinetitlewidth -density 300 -units pixelsperinch -background $newcovercolor -fill "$coverfontcolor" -font $coverfont -rotate 90 -gravity East caption:"PageKicker" images/$uuid/print/spinelogotype.png
# lay the objects on the canvas
# lay the ISBN box at the bottom left corner of the full canvas
convert images/$uuid/print/fullcanvas.png \
images/$uuid/print/$userprovidedprintISBN.png -geometry +$ISBNxlocation+$ISBNylocation -composite \
images/$uuid/print/fullcanvas1.png
# lay the front cover on the full canvas
convert images/$uuid/print/fullcanvas1.png \
images/$uuid/print/$sku"printfrontcover.png" -geometry +$frontpageflushleftmargin+$topbleed -composite \
images/$uuid/print/fullcanvas2.png
# assemble and lay down the spine caption and logotype, unless it is too thin
if [ "$pdfpagecount" -lt 48 ]; then
echo "page count too low for spine"
cp images/$uuid/print/fullcanvas2.png images/$uuid/print/finalcanvas.png
else
# lay the spine caption on the full canvas
convert images/$uuid/print/fullcanvas2.png \
images/$uuid/print/spinecaption.png -geometry +$spineleftmargin+375 -composite \
images/$uuid/print/fullcanvas3.png
# resize the purple bird
purplebirdsize=$(( $safespinetitlewidth - 20 ))
convert assets/purplebird300.png -resize $purplebirdsizex$purplebirdsize\> images/$uuid/print/purple$safespinetitlewidth.png
# surround the bird with a white box
convert -units "PixelsPerInch" -density 300 -resample 300x -border 5x5 -bordercolor white images/$uuid/print/purple$safespinetitlewidth.png -colorspace CMYK images/$uuid/print/purplebirdwithbox.png
# create spacer box
convert -size "$safespinetitlewidth"x20 xc:none images/$uuid/print/spacer.png
# append spine logotype, spacer, and purplebird box
convert images/$uuid/print/spinelogotype.png images/$uuid/print/spacer.png -background none -gravity west -append images/$uuid/print/logowithspacer.png
convert images/$uuid/print/logowithspacer.png images/$uuid/print/purplebirdwithbox.png -background none -gravity west -append images/$uuid/print/logobar.png
# lay the spine logotype on the full canvas
convert images/$uuid/print/fullcanvas3.png \
images/$uuid/print/logobar.png -geometry +$spineleftmargin+$spinelogotypebegin -composite \
images/$uuid/print/fullcanvas4.png
cp images/$uuid/print/fullcanvas2.png images/$uuid/print/finalcanvas.png
fi
# save the cover and prepare it for production
# save as single large file (png)
# convert RGB to CMYK
convert images/$uuid/print/finalcanvas.png -colorspace CMYK images/$uuid/print/$userprovidedISBN.pdf
echo "built print cover as file images/$uuid/print/$sku.cmyk.pdf" | tee --append $sfb_log
xvfb-run --auto-servernum ebook-convert tmp/$uuid/cumulative.html $mediatargetpath$uuid/$sku".pdf" --cover "images/"$uuid"/print/"$sku"printfrontcover.png" --margin-left "54" --margin-right "54" --margin-top "54" --margin-bottom "54" --pdf-default-font-size "11" --pdf-page-numbers --insert-metadata --pdf-serif-family "AvantGarde" --title "$covertitle"
echo "saving interior as PDFx1a"
# -B flag makes it b&w
./lib/pstill_dist/pstill -M defaultall -m XimgAsCMYK -m Xspot -m Xoverprint -d 500 -m XPDFX=INTENTNAME -m XPDFXVERSION=1A -m XICCPROFILE=USWebCoatedSWOP.icc -o $mediatargetpath$uuid/$userprovidedprintISBN.pdf $mediatargetpath$uuid/$sku.pdf
| true |
4680ebaa2a7558c1fb704b25f43c69b6ebfc2583 | Shell | samircaica/scripts | /import-hive-caller.sh | UTF-8 | 1,251 | 3.21875 | 3 | [] | no_license | #!/bin/bash
file_path=$1
i=1
if [ -f "$file_path" ]; then
while read line; do
echo "================= Starting script execution DB $i =========="
echo "Getting kerberos ticket"
echo "============================================================"
NAME=atlas; kinit -kt $(find /run/cloudera-scm-agent/process -name ${NAME}*.keytab -path "*${NAME}*" | sort | tail -n 1) $(klist -kt $(find /run/cloudera-scm-agent/process -name ${NAME}*.keytab -path "*${NAME}*" | sort | tail -n 1) | awk '{ print $4 }' | grep "^${NAME}*" | head -n 1)
klist
echo "============================================================"
echo "================ Starting Import ==========================="
echo "============================================================"
echo "Importing --------- $line"
/opt/cloudera/parcels/CDH/lib/atlas/hook-bin/import-hive.sh -d $line
echo "============================================================"
echo "================= Finishing Import ========================="
echo "============================================================"
echo "================= Finishing script execution DB $i ========="
((i++))
done < "$file_path"
else
echo "File not found: $file_path"
fi | true |
77fabecb06d3006e68d43267a10139ad8c0dcf97 | Shell | samuael/redis_outputs | /instantiate_redis_server.sh | UTF-8 | 623 | 2.84375 | 3 | [] | no_license | #!/bin/bash
#
#
# in this instantiation 'samuael' represents my user name in this node
# and 'proxyweb-PGO' represents the 'group'
#
cd redis-4.0.6 ;
#sudo make ;
#sudo make test;
sudo make install;
cd ../;
sudo mkdir -p /var/lib/redis;
sudo chown samuael:proxyweb-PGO /var/lib/redis;
sudo chmod a+rwx /var/lib/redis;
sudo mkdir -p /etc/redis;
sudo chmod a+rwx /etc/redis;
sudo cp redis.conf /etc/redis;
sudo cp redis.service /etc/systemd/system/;
sudo systemctl start redis;
sudo systemctl enable redis;
# Reloading the system whenever a new service is added.
sudo systemctl deamon-reload;
redis-server --version;
| true |
d07cbcb1b287f545c6cbab24127e71491c43fc6e | Shell | the-dilo/airy | /pacman/alias/short | UTF-8 | 1,645 | 3.3125 | 3 | [
"MIT"
] | permissive | # vim:ft=sh
# USE: -r/-a to use repo/aur only
alias pacg='pacman -Sg' # show list of package [g]roup
alias paci='pacman -S' # install
alias pacI='pacman -U' # install from files on disk
alias pacl='\pacman -Ql' # list files (installed by a given package)
alias paclr='_a()(command pacman -Ql "$@" | grep -P "/bin/(?!\$)"); _a '
alias pacL='\pacman -Qo' # show pkgs which contain arg path
alias pacLr='_a(){ command pacman -Qo "$(which "$1")"; }; _a '
alias paco='\pacman -Qdt' # list [o]rphans
alias pacq='pacman -Si' # info
alias pacQ='pacman -Qi' # info local -- grep by "Required By"
alias pacr='pactree -c ' # deps direct
alias pacR='pactree -cr ' # deps reverse
alias pacD='pacman -D --asdeps' # change install reason
alias pacs='pacman -Ss' # search
alias pacS='pacman -Qs' # search local
alias pacu='pacman -Su' # install with [u]pdate
alias pacU='sudo pacman --sync --refresh'
alias pacx='sudo \pacman -Rsu' # '[r]emove'
alias pacX='sudo \pacman -Rsc' # 'Rm pkg, its deps and all depending pkgs
alias pacy='pacman -Sy' # refresh only
alias pacY='pacman -Syu' # '[u]pgrade all'
# alias pacU='pacman -U /var/cache/pacman/pkg' # refresh only
# alias pacc='pacman -Scc' # '[c]lean cache' (all not currently installed)
# alias pacexpl='pacman -D --asexp' # 'mark as [expl]icit'
# alias pacimpl='pacman -D --asdep' # 'mark as [impl]icit'
alias pac-last='tac /var/log/pacman.log | sed -rn "/\[PACMAN\]/q;/installed|updated/p" | tac'
# Synchronizes the local package database against the repositories then
# upgrades outdated packages.
# alias pacU="sudo pacman --sync --refresh --sysupgrade"
| true |
c01abc1e604e156a443b89104fd30c0e6fbda633 | Shell | optionalg/crimson | /crimson_recon | UTF-8 | 8,756 | 3.265625 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
#
### CREATED BY KARMAZ
#
#
### TO DO:
# 1. Third level domain enumeration.
# 2. Add all features from crimson_notes.
# 3. Add altns mutations after checking for live and brute again.
# 4. Add nuclei
### FUNCTIONS:
#
# 1. SUBDOMAIN ENUMERATION
# 2. CHECKING LIVE DOMAINS
# 3. LOOKING FOR DOMAIN HIJACKING
# 4. MAKING SCREENS
# 5. LOOKING FOR CORS MISCONFIGURATIONS
# 6. RESOLVE IP ADDRESSES OF DOMAINS
# 7. MASS SCANN ALL IP ADDRESSES
# 8. SPIDER URLS
#
### LISTS:
#
# 1. live.txt - LIVE SUBDOMAINS
# 2. ip.txt - ALL IPS
# 3. ports.txt - OPENED PORTS
# 4. subjack.txt - VULNS
# 5. Screens - STATUS + SCREENS
# 6. cors_scan.txt - VULNS
# 7. urls.txt - ALL SPIDERS URLS IN ONE
# 8. status_live.txt - HTTPS / HTTPS SUBDOMAINS STATUS
#
### WORKFLOW
# 0. *Comment out portscan below if u want to scann all ports of all domains.
# 1. Start Burp
# - Create new project - example.domain.com
# - Turn off interception
# 2. Start this script.
# 3. Check the output listed above (LISTS).
# 4. Select the target domain and start crimson_target module.
# 5. *Eventually proxy urls.txt and status_live.txt with additional headers and listener ON.
###
echo -e "\033[0;31m
██████╗██████╗ ██╗███╗ ███╗███████╗ ██████╗ ███╗ ██╗ ██████╗ ███████╗ ██████╗ ██████╗ ███╗ ██╗
██╔════╝██╔══██╗██║████╗ ████║██╔════╝██╔═══██╗████╗ ██║ ██╔══██╗██╔════╝██╔════╝██╔═══██╗████╗ ██║
██║ ██████╔╝██║██╔████╔██║███████╗██║ ██║██╔██╗ ██║ ██████╔╝█████╗ ██║ ██║ ██║██╔██╗ ██║
██║ ██╔══██╗██║██║╚██╔╝██║╚════██║██║ ██║██║╚██╗██║ ██╔══██╗██╔══╝ ██║ ██║ ██║██║╚██╗██║
╚██████╗██║ ██║██║██║ ╚═╝ ██║███████║╚██████╔╝██║ ╚████║███████╗██║ ██║███████╗╚██████╗╚██████╔╝██║ ╚████║
╚═════╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝
\033[0m"
if [ -z $1 ]
then
echo "Usage: ./crimson_recon example.domain.com"
exit 1
fi
### REMEMBER ABOUT BURP WITH PROPER SCOPE
echo -e "BURP SCOPE: .*\."$(echo $1 | sed "s/\./\\\./")"$"
read -rsp $'RUN BURP AND ADD SCOPE...\n' -n1 key
export DOMAIN=$1
mkdir $HOME/bounty/$DOMAIN -p
cd $HOME/bounty/$DOMAIN
### SCRAPING DOMAINS && BRUTEFORCING DOMAINS
echo -e "\033[0;31m [+]\033[0m SUBDOMAINS SCRAPING"
curl -s https://crt.sh/?q=$DOMAIN | grep $DOMAIN | grep TD | sed 's/<BR>.*$//g' | sed 's/<\/TD>.*$//' | sed 's/<TD>//g' | sed 's/\*.//g' | sed 's/ //g' | grep -v "TDclass" | sort -u > crt.txt
amass enum --passive -d $DOMAIN -o amass.txt
subfinder -silent -recursive -d $DOMAIN | tee subfinder.txt
echo -e "\033[0;31m [+]\033[0m SUBDOMAINS BRUTEFORCING"
$HOME/tools/puredns/puredns bruteforce $HOME/tools/CRIMSON/words/dns $DOMAIN -w brute_dns.txt
# OLD BEST BRUTE TECHNIQUE
#$HOME/tools/massdns/scripts/subbrute.py $HOME/tools/CRIMSON/words/dns $DOMAIN | $HOME/tools/massdns/bin/massdns -r $HOME/tools/massdns/lists/resolvers.txt -t A -o S -w massdns.txt
#cat massdns.txt | cut -d " " -f 1 | sed 's/\.[^.]*$//' > brute_dns.txt
### ASSETFINDER FROM TOMNOMNOM
echo -e "\033[0;31m [+]\033[0m START WEBSPIDERS"
assetfinder --subs-only $DOMAIN | tee -a assetfinder.txt
### SPIDER 1 = paramspider.txt
python3 $HOME/tools/ParamSpider/paramspider.py -d $DOMAIN --output ./paramspider.txt --level high
cat paramspider.txt | unfurl -u domains > subdomains_paramspider.txt
### MERGE SUBDOMAIN LISTS
sort -u assetfinder.txt subdomains_paramspider.txt amass.txt subfinder.txt brute_dns.txt crt.txt > $HOME/bounty/$DOMAIN/subdomains.txt
### CHECK FOR LIVE - RESOLVE subdomains.txt
echo -e "\033[0;31m [+]\033[0m CHECK FOR LIVE SUBDOMAINS"
cat subdomains.txt | dnsx -o live1.txt
### MAKE MUTATIONS OF BRUTEFORCED DOMAINS AND REMOVE DUPLICATED FROM PREVIOUS ENUMERATION
echo -e "\033[0;31m [+]\033[0m MUTATE LIVE SUBDOMAINS"
altdns -w $HOME/tools/CRIMSON/words/dns-altdns.txt -i live1.txt -o altdns_temp.txt
cat $HOME/tools/CRIMSON/words/dns | sed "s/$/\.$domain/" > ddns.txt
awk 'FNR==NR {a[$0]++; next} !($0 in a)' ddns.txt altdns_temp.txt > not_duplicated_altdns
awk 'FNR==NR {a[$0]++; next} !($0 in a)' subdomains.txt not_duplicated_altdns > not_duplicated_altdns2
awk 'FNR==NR {a[$0]++; next} !($0 in a)' live1.txt not_duplicated_altdns2 > altdns_with_domain_name
cat altdns_with_domain_name | sed "s/\.$DOMAIN//g" > altdns.txt
### BRUTE FORCE MUTATED WORDLIST
echo -e "\033[0;31m [+]\033[0m CHECK MUTATED SUBDOMAINS FOR LIVE SUBDOMAINS"
$HOME/tools/puredns/puredns bruteforce altdns.txt $DOMAIN -w brute2.txt
### CHECK LIVE FROM BRUTEFORCE OF MUTATED WORDLIST
cat brute2.txt | dnsx -o live2.txt
sort -u live1.txt live2.txt > live.txt
rm live1.txt live2.txt altdns_temp.txt not_duplicated_altnds not_duplicated_altnds2 ddns.txt altdns_with_domain_name altdns.txt brute2.txt
### SPIDER 2 = gau.txt
echo -e "\033[0;31m [+]\033[0m GATHER URLS"
cat live.txt | get-all-urls -subs | tee -a gau.txt
cat gau.txt | unfurl -u domains > subs_gau.txt
sort -u subs_gau.txt live.txt > live2.txt
mv live2.txt live.txt
rm subs_gau.txt
### SPIDER 3 = waybackurls.txt
cat live.txt | waybackurls | tee -a waybackurls.txt
cat waybackurls.txt | unfurl -u domains > subs_waybackurls.txt
sort -u subs_waybackurls.txt live.txt > live2.txt
mv live2.txt live.txt
rm subs_waybackurls.txt
### MERGE SPIDERS = urls.txt
cat waybackurls.txt gau.txt paramspider.txt | qsreplace -a > urls.txt
rm waybackurls.txt paramspider.txt gau.txt
### CHECK FOR CORS MISSCONFIGURATION && DOMAIN TAKEOVER
echo -e "\033[0;31m [+]\033[0m CHECK CORS"
cat live.txt | CorsMe -t 100 | tail -n+11 | tee cors_scan.txt
echo -e "\033[0;31m [+]\033[0m CHECK DOMAIN TAKEOVER"
subjack -w live.txt -t 100 -timeout 30 -o subjack.txt -ssl -a
### SCREENSHOOTS FROM DOMAINS
echo -e "\033[0;31m [+]\033[0m MAKE SCREENSHOTS OF ENUMERATED LIVE DOMAINS"
$HOME/tools/EyeWitness/Python/EyeWitness.py -f $HOME/bounty/$DOMAIN/live.txt -d screens
### ADD urls.txt TO BURPSUITE
echo -e "\033[0;31m [+]\033[0m PROXY ulrs.txt FROM SPIDERS"
cat urls.txt | grep $DOMAIN |grep http | sort -u > proxy.txt
wfuzz -L -Z -z file,proxy.txt -z file,$HOME/tools/CRIMSON/words/blank -p 127.0.0.1:8080 FUZZFUZ2Z
### PORT SCANING ALL DOMAINS
#echo asdqwe123 | sudo -S -E $HOME/tools/port_scan.sh
### CHECK STATUS CODE OF ALL DOMAINS
echo -e "\033[0;31m [+]\033[0m CHECK STATUS CODE OF ENUMERATED LIVE SUBDOMAINS"
cat live.txt | sed "s/^/http:\/\//" >> temp_live.txt
cat live.txt | sed "s/^/https:\/\//" >> temp_live.txt
wfuzz -f status_live.txt,raw -Z -z file,temp_live.txt -z file,$HOME/tools/CRIMSON/words/blank FUZZFUZ2Z
rm temp_live.txt
### PROXY DOMAINS TO BURP
echo -e "\033[0;31m [+]\033[0m PROXY SUBDOMAINS TO BURP"
cat status_live.txt | grep -v "XXX" | cut -d "\"" -f 2 > proxy.txt
wfuzz -L -Z -z file,proxy.txt -z file,$HOME/tools/CRIMSON/words/blank -p 127.0.0.1:8080 FUZZFUZ2Z
### CHECK ZONETRANSFER
dnsrecon -d $DOMAIN -a
### PRINT RESULTS
echo -e "\033[0;31m [+]\033[0m RESULTS: "
echo "\n\nCheck: \n\t - subjack.txt \n\t - cors_scan.txt \n\t - status_live.txt \n\t - BURP & EyeWitness"
scite subjack.txt cors_scan.txt status_live.txt
### CLEAR THE DIRECTORY
rm crt.txt amass.txt assetfinder.txt brute_dns.txt geckodriver.log massdns.txt subfinder.txt
### CHECK STATUS CODE HERE TOO
### GO TO https://httpstatus.io/ + LIVE.TXT
### -S = take the pass from input -E use exported variable from user terminal
# echo asdqwe123 | sudo -S -E $HOME/tools/port_scan.sh
### GREP 2/3/4 LEVEL DOMAINS
# grep -P '^(?:[a-z0-9]+.){1}[^.]*$' 2nd level domains only
# grep -P '^(?:[a-z0-9]+.){2}[^.]*$' 3rd level domains only
# grep -P '^(?:[a-z0-9]+.){2,}[^.]*$' 3rd level domains or higher
# grep -P '^(?:[a-z0-9]+.){2,3}[^.]*$' 3rd to 4th level domains only
# grep -P '^(?:[a-z0-9]+.){3,}[^.]*$' 4th level domains or higher
###
| true |
515738ee721141b10c9032462c088a430673702f | Shell | DiogenesPolanco/gac | /gac.sh | UTF-8 | 1,626 | 4.0625 | 4 | [
"MIT"
] | permissive | #!/bin/zsh
function gac() {
if [ $# -eq 0 ] || [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
# displays help with
# gac | gac -h | gac --help
echo "------"
echo "Cannot commit without comments. Semantic reminder:"
echo "🐛 BUG FIX: b"
echo "📦 CHORE: c"
echo "📖 DOCS: d"
echo "✅ FEAT: f"
echo "🚀 NEW RELEASE: n"
echo "👌 IMPROVE: i"
echo "🪚 REFACTOR: r"
echo "🎨 STYLE: s"
echo "🧪 TEST: t"
echo "🛠 WORKING ON: w"
echo "------"
return 1
fi
SHORTCUT=$1
shift ;
COMMENT=$@
# Fix a bug
if [ "$SHORTCUT" = "b" ]; then
SHORTCUT="🐛 BUG FIX:"
# Chore
elif [ "$SHORTCUT" = "c" ]; then
SHORTCUT="📦 CHORE:"
# Write or edit existing documentation
elif [ "$SHORTCUT" = "d" ]; then
SHORTCUT="📖 DOCS:"
# Add new feature
elif [ "$SHORTCUT" = "f" ]; then
SHORTCUT="✅ FEAT:"
# Deploy in production
elif [ "$SHORTCUT" = "n" ]; then
SHORTCUT="🚀 NEW RELEASE:"
# Improve your code base
elif [ "$SHORTCUT" = "i" ]; then
SHORTCUT="👌 IMPROVE:"
# Refator your code base
elif [ "$SHORTCUT" = "r" ]; then
SHORTCUT="🪚 REFACTOR:"
# Styling actions
elif [ "$SHORTCUT" = "s" ]; then
SHORTCUT="🎨 STYLE:"
# Test your code
elif [ "$SHORTCUT" = "t" ]; then
SHORTCUT="🧪 TEST:"
# Working on a feature
elif [ "$SHORTCUT" = "w" ]; then
SHORTCUT="🛠 WORKING ON:"
fi
# res with or without semantic
git add -A && git commit -m "$SHORTCUT $COMMENT"
return 1
} | true |
36e41bb6ef445fd4277a67252c1ca0c38c210637 | Shell | adierkens/dotfiles | /zshrc | UTF-8 | 5,427 | 2.578125 | 3 | [] | no_license | # Set up the prompt
export ZSH=$HOME/.oh-my-zsh
plugins=(git osx git-prompt history-substring-search zsh-syntax-highlighting)
source $ZSH/oh-my-zsh.sh
autoload -Uz promptinit
promptinit
prompt adam1
setopt histignorealldups sharehistory
# Use emacs keybindings even if our EDITOR is set to vi
bindkey -e
# Keep 1000 lines of history within the shell and save it to ~/.zsh_history:
HISTSIZE=1000
SAVEHIST=1000
HISTFILE=~/.zsh_history
# Use modern completion system
autoload -Uz compinit
compinit
zstyle ':completion:*' auto-description 'specify: %d'
zstyle ':completion:*' completer _expand _complete _correct _approximate
zstyle ':completion:*' format 'Completing %d'
zstyle ':completion:*' group-name ''
zstyle ':completion:*' menu select=2
if whence dircolors > /dev/null; then
eval "$(dircolors -b)"
zstyle ':completion:*:default' list-colors ${(s.:.)LS_COLORS}
alias ls='ls --color'
else
export CLICOLOR=1
zstyle ':completion:*' list-colors ''
fi
zstyle ':completion:*' list-prompt %SAt %p: Hit TAB for more, or the character to insert%s
zstyle ':completion:*' matcher-list '' 'm:{a-z}={A-Z}' 'm:{a-zA-Z}={A-Za-z}' 'r:|[._-]=* r:|=* l:|=*'
zstyle ':completion:*' menu select=long
zstyle ':completion:*' select-prompt %SScrolling active: current selection at %p%s
zstyle ':completion:*' use-compctl false
zstyle ':completion:*' verbose true
zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([0-9]#)*=0=01;31'
zstyle ':completion:*:kill:*' command 'ps -u $USER -o pid,%cpu,tty,cputime,cmd'
export VIEW=/usr/bin/elinks
function url-encode; {
setopt extendedglob
echo "${${(j: :)@}//(#b)(?)/%$[[##16]##${match[1]}]}"
}
function google; {
$VIEW "http://www.google.com/search?q=`url-encode '${(j: :)@'`"
}
#
setopt AUTO_CD # Don't need to type cd
setopt CORRECT # Spell check
setopt AUTO_PUSHD # cd = pushd
setopt PUSHD_SILENT
setopt PUSHD_TO_HOME # blank pushd goes to home
setopt RM_STAR_WAIT # 10 second delay if you do something that'll delete everything
setopt ZLE
export EDITOR="vim"
setopt IGNORE_EOF
setopt NO_BEEP
bindkey -M viins '\C-i' complete-word
# Faster! (?)
zstyle ':completion::complete:*' use-cache 1
#
# # case insensitive completion
zstyle ':completion:*' matcher-list 'm:{a-z}={A-Z}'
#
zstyle ':completion:*' verbose yes
zstyle ':completion:*:descriptions' format '%B%d%b'
zstyle ':completion:*:messages' format '%d'
zstyle ':completion:*:warnings' format 'No matches for: %d'
zstyle ':completion:*' group-name ''
#
# # generate descriptions with magic.
zstyle ':completion:*' auto-description 'specify: %d'
#
# # Don't prompt for a huge list, page it!
zstyle ':completion:*:default' list-prompt '%S%M matches%s'
#
# # Don't prompt for a huge list, menu it!
zstyle ':completion:*:default' menu 'select=0'
#
# # Have the newer files last so I see them first
zstyle ':completion:*' file-sort modification reverse
#
# # color code completion!!!! Wohoo!
zstyle ':completion:*' list-colors "=(#b) #([0-9]#)*=36=31"
#
unsetopt LIST_AMBIGUOUS
setopt COMPLETE_IN_WORD
#
# # Separate man page sections. Neat.
zstyle ':completion:*:manuals' separate-sections true
#
# # Egomaniac!
zstyle ':completion:*' list-separator 'fREW'
#
# # complete with a menu for xwindow ids
zstyle ':completion:*:windows' menu on=0
zstyle ':completion:*:expand:*' tag-order all-expansions
#
# # more errors allowed for large words and fewer for small words
zstyle ':completion:*:approximate:*' max-errors 'reply=( $(( ($#PREFIX+$#SUFFIX)/3 )) )'
#
# # Errors format
zstyle ':completion:*:corrections' format '%B%d (errors %e)%b'
#
# # Don't complete stuff already on the line
zstyle ':completion::*:(rm|vi):*' ignore-line true
#
# # Don't complete directory we are already in (../here)
#
zstyle ':completion::approximate*:*' prefix-needed false
#
function prompt_char {
git branch >/dev/null 2>/dev/null && echo "±" && return
echo '○'
}
function box_name {
[ -f ~/.box-name ] && cat ~/.box-name || hostname -s
}
local ruby_env=''
if which rvm-prompt &> /dev/null; then
ruby_env=' ‹$(rvm-prompt i v g)›%{$reset_color%}'
else
if which rbenv &> /dev/null; then
ruby_env=' ‹$(rbenv version-name)›%{$reset_color%}'
fi
fi
local current_dir='${PWD/#$HOME/~}'
local git_info='$(git_prompt_info)'
local prompt_char='$(prompt_char)'
PROMPT="╭─%{$FG[040]%}%n%{$reset_color%} %{$FG[239]%}at%{$reset_color%} %{$FG[033]%}$(box_name)%{$reset_color%} %{$FG[239]%}in%{$reset_color%} %{$terminfo[bold]$FG[226]%}${current_dir}%{$reset_color%}${git_info} %{$FG[239]%}using%{$FG[243]%}${ruby_env}
╰─${prompt_char} "
ZSH_THEME_GIT_PROMPT_PREFIX=" %{$FG[239]%}on%{$reset_color%} %{$fg[255]%}"
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%}"
ZSH_THEME_GIT_PROMPT_DIRTY="%{$FG[202]%}✘✘✘"
ZSH_THEME_GIT_PROMPT_CLEAN="%{$FG[040]%}✔"
alias gs="git status"
alias gc="git commit"
export ANDROID_HOME=~/Developer/.android-sdk-macosx
export PATH=${PATH}:$ANDROID_HOME/bin
export PYTHONPATH="/Library/Python/2.7/site-packages:$PYTHONPATH"
# Setup zsh-autosuggestions
source /Users/Adam/.oh-my-zsh/custom/plugins/zsh-autosuggestions/autosuggestions.zsh
# Enable autosuggestions automatically
zle-line-init() {
zle autosuggest-start
}
zle -N zle-line-init
# use ctrl+t to toggle autosuggestions(hopefully this wont be needed as
# zsh-autosuggestions is designed to be unobtrusive)
bindkey '^T' autosuggest-toggle
| true |
765fdfc5f1ac28a9a0729735372216b5b260ee23 | Shell | dycw/dotfiles | /mysql-workbench/install.sh | UTF-8 | 329 | 3.25 | 3 | [] | permissive | #!/usr/bin/env bash
echo "$(date '+%Y-%m-%d %H:%M:%S'): Running mysql-workbench/install.sh..."
if [[ "$(uname -s)" =~ Darwin* ]]; then
# shellcheck source=/dev/null
source "$(git rev-parse --show-toplevel)/brew/install.sh"
if ! grep -Fxq mysqlworkbench <<<"$(brew list -1)"; then
brew install --cask mysqlworkbench
fi
fi
| true |
9d6b42db50accfda7d980ed2a590b348cd616375 | Shell | sangdongvan/tictactoe | /check_env.sh | UTF-8 | 152 | 2.921875 | 3 | [] | no_license | #!/bin/bash
if [ -z "$TICTACTOE_ENV_SET" ]; then
echo "Error: TICTACTOE build environment is not set up. Use sh public.sh make ${command}"
exit 1
fi
| true |
51c0ad68eedaaf84ab21862589421b0876a384b5 | Shell | flexhamp/quick-start-kubernetes | /kubernetes/kubernetes.sh | UTF-8 | 3,982 | 3.6875 | 4 | [] | no_license | #!/usr/bin/env bash
#trap ctrl_c INT
function main_menu() {
clear
echo "Select an option:"
printf "\n"
echo "1 - Install Kubernetes"
echo "2 - Install Dashboard"
echo "3 - Uninstall Kubernetes"
echo "4 - Uninstall Dashboard"
echo "0 - Exit"
read -n 1 option
}
# Предустановка kubernetes
function preset_kubernetes() {
printf "\n"
echo "Selinux and swap will be disabled"
# read -n 1 -s -r -p "Press any key to continue"
# Отключение selinux
setenforce 0
sed -i --follow-symlinks 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/sysconfig/selinux
# Отключение swap
sed -i '/swap/d' /etc/fstab
swapoff -a
}
function install_packages_kubernetes() {
echo "The packages kubelet-1.20.2-0, kubeadm-1.20.2-0, kubectl-1.20.2-0 will be installed"
# read -n 1 -s -r -p "Press any key to continue"
cat <<EOF >/etc/yum.repos.d/kubernetes.repo
[kubernetes]
name=Kubernetes
baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64
enabled=1
gpgcheck=1
repo_gpgcheck=1
gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg
EOF
# Выбираем версию 1.20.2-0, для которой производим настройки
yum install kubelet-1.20.2-0 kubeadm-1.20.2-0 kubectl-1.20.2-0 -y
systemctl enable kubelet
systemctl start kubelet
return 0
}
function kubeadm_init() {
cat <<EOF >/etc/docker/daemon.json
{
"exec-opts": ["native.cgroupdriver=systemd"]
}
EOF
systemctl restart docker
kubeadm init --pod-network-cidr=10.244.0.0/16
mkdir -p $HOME/.kube
cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
kubectl apply -f https://raw.githubusercontent.com/coreos/flannel/master/Documentation/kube-flannel.yml
# В дефолтной конфигурации мастер-нода не запускает контейнеры,
# так как занимается отслеживанием состояния кластера и перераспределением ресурсов.
# Ввод данной команды даст возможность запускать контейнеры на мастере, собенно, если кластер содержит лишь одну ноду:
kubectl taint nodes --all node-role.kubernetes.io/master-
}
function install_dashboard() {
kubectl apply -f https://raw.githubusercontent.com/kubernetes/dashboard/v2.0.0/aio/deploy/recommended.yaml
mkdir -p ~/dashboard
cp "$(pwd)"/kubernetes/dashboard/dashboard-admin.yaml ~/dashboard
cp "$(pwd)"/kubernetes/dashboard/dashboard-read-only.yaml ~/dashboard
cp "$(pwd)"/kubernetes/dashboard/dashboard.sh ~/dashboard
chmod +x ~/dashboard/dashboard.sh
ln -s ~/dashboard/dashboard.sh /usr/local/bin/dashboard
dashboard start
}
function delete_dashboard() {
dashboard stop
}
function setup_kubernetes() {
preset_kubernetes
sh "$(pwd)"/docker/docker.sh 1
install_packages_kubernetes
kubeadm_init
# read -n 1 -s -r -p "Press any key to continue"
}
function delete_kubernetes() {
kubeadm reset -f
systemctl restart docker
systemctl stop kubelet
systemctl disable kubelet
yum remove kubeadm kubectl kubelet kubernetes-cni kube* -y
find / -name "*kube*"
rm -rf /etc/kubernetes
rm -rf ~/.kube
# docker stop "$(docker ps -q)"
# docker rm "$(docker ps -a -q)"
# docker rmi "$(docker images -q)" -f
# systemctl disable docker
# systemctl disable docker.socket
# systemctl stop docker
# systemctl stop docker.socket
# yum install docker-ce-19.03.9-3.el7 docker-ce-cli-19.03.9-3.el7 containerd.io -y
# rm -rf /var/lib/docker
read -n 1 -s -r -p "Press any key to continue"
}
#function ctrl_c() {
# echo -ne "\nTrapped CTRL-C"
#}
while [ 1 ]; do
main_menu
echo -ne "\n"
case $option in
1) setup_kubernetes ;;
2) install_dashboard ;;
3) delete_kubernetes ;;
4) delete_dashboard ;;
0) break ;;
esac
done
clear
| true |
acb9abd55ae922dff1651d89e1e2e90671154f76 | Shell | google/trillian-examples | /serverless/deploy/github/log/leaf_validator/entrypoint.sh | UTF-8 | 2,153 | 4.09375 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# This is an example leaf validator, it doesn't really do much but the idea is
# that this action would run against PRs which are effectively "queuing" leaves
# and return success/failure depending on whether the leaves present in the PR
# conform to a given set of requirements.
#
# We'll consider a PR good if either:
# - it only touches files outside the log directory, or
# - it only touches files in the log's leaves/pending directory.
set -e
function main {
if [ "${INPUT_LOG_DIR}" == "" ]; then
echo "Missing log dir input".
exit 1
fi
echo "::debug:Log directory is ${GITHUB_WORKSPACE}/${INPUT_LOG_DIR}"
cd ${GITHUB_WORKSPACE}
# Figure out where any leaves in the PR should be rooted under
PENDING_DIR="$(readlink -f -n ${INPUT_LOG_DIR}/leaves/pending)"
echo "::debug:Pending leaf directory is ${PENDING_DIR}"
# Now grab a list of all the modified/added/removed files in the PR
FILES=$(git diff origin/master HEAD --name-only)
local has_non_log_files=0
local has_log_pending_files=0
local has_log_non_pending_files=0
while IFS= read -r f; do
LEAF=$(readlink -f -n ${f})
if [[ ${LEAF} = ${PENDING_DIR}/* ]]; then
echo "::debug:Found pending leaf ${LEAF}"
# Checks on the format/quality of the leaf could be done here, along
# with signature verification etc.
has_log_pending_files=1
elif [[ ${LEAF} = ${INPUT_LOG_DIR}/* ]]; then
echo "::warning file=${f}::Added/Modified non-pending leaves file in \`${INPUT_LOG_DIR}/\` directory"
has_log_non_pending_files=1
else
echo "Found non-log file ${f}"
has_non_log_files=1
fi
done <<< ${FILES}
if [[ ${has_log_non_pending_files} -ne 0 ]]; then
echo "::error:PR attempts to modify log structure/state"
exit 1
fi
if [[ ${has_log_pending_files} -ne 0 && ${has_non_log_files} -ne 0 ]]; then
echo "::error:PR mixes log additions and non-log changes, please split them up"
exit 1
fi
}
main
| true |
540f1a6411059db09c43c61350ca04db2ac9cecc | Shell | pld-linux/ircd-hybrid | /ircd-hybrid.init | UTF-8 | 1,270 | 4.15625 | 4 | [] | no_license | #!/bin/sh
#
# ircd This shell script takes care of starting and stopping ircd.
#
# chkconfig: 234 75 30
# description: Internet Relay Chat Server.
#
# Source function library.
. /etc/rc.d/init.d/functions
# Source networking configuration.
. /etc/sysconfig/network
# Get service config
[ -f /etc/sysconfig/ircd ] && . /etc/sysconfig/ircd
# Check that networking is up.
if is_yes "${NETWORKING}"; then
if [ ! -f /var/lock/subsys/network -a "$1" != stop -a "$1" != status ]; then
msg_network_down "IRCd Server"
exit 1
fi
else
exit 0
fi
# See how we were called.
case "$1" in
start)
# Check if the service is already running?
if [ ! -f /var/lock/subsys/ircd ]; then
# Start daemons.
msg_starting "IRCd Server"
daemon ircd
touch /var/lock/subsys/ircd
else
msg_already_running "IRCd Server"
fi
;;
stop)
# Check if the service is already running?
if [ -f /var/lock/subsys/ircd ]; then
# Stop daemons.
msg_stopping "IRCd Server"
killproc ircd
rm -f /var/run/ircd.pid /var/lock/subsys/ircd >/dev/null 2>&1
else
msg_already_running "IRCd Server"
fi
;;
restart|force-reload)
$0 stop
$0 start
exit $?
;;
status)
status ircd
exit $?
;;
*)
msg_usage "$0 {start|stop|restart|force-reload|status}"
exit 3
esac
exit 0
| true |
974a2bd5549299d7a642f3e71b618c18dfa17c4d | Shell | daywatch/ASR-for-Chinese-Pipeline | /code/split.sh | UTF-8 | 307 | 2.609375 | 3 | [] | no_license | #!/bin/bash
cd /mnt/rds/redhen/gallina/Singularity/data/wav
echo 'Split Begins'
for file in `ls`
do
for wav in `ls $file`
do
mkdir -p ../split/$file/${wav%%.*}
sox $file/$wav ../split/$file/${wav%%.*}/$wav trim 0 10 : newfile : restart \;
echo $file/$wav' split done'
done
done
exit
| true |
c50b93f0e117c70d8159ebceef202a6b6f26d6a5 | Shell | talmobi/dotfiles | /.bashrc | UTF-8 | 6,146 | 3.203125 | 3 | [] | no_license | export LC_ALL=en_US.UTF-8
export BASH_SILENCE_DEPRECATION_WARNING=1
# disbale ctrl-d for closing windows/sessions accidentally
set -o ignoreeof
# fix for scp / rsync ( don't do anything if terminal isn't human basically )
case $- in
*i*) ;;
*) return;;
esac
unameOut="$(uname -s)"
case "${unameOut}" in
Linux*) machine=Linux;;
Darwin*) machine=Mac;;
CYGWIN*) machine=Cygwin;;
MINGW*) machine=MinGw;;
*) machine="UNKNOWN:${unameOut}";;
esac
isWindows=1
alias ls='ls -G'
if [ $machine = Mac ] ; then
alias ls='ls -G'
isWindows=0
elif [ $machine = Linux ] ; then
alias ls='ls --color'
isWindows=0
elif [ $machine = MinGw ] || [ $machine = Cygwin ] ; then
alias ls='ls --color'
isWindows=1
fi
echo "BASH_VERSION: $BASH_VERSION"
echo "Machine: $machine"
# if [ $ncolors -ge 256 ]; then
if [ -t 1 ]; then
if [ $TERM = xterm ]; then
export TERM=xterm-256color
else
export TERM=screen-256color
fi
fi
# fix irssi corrupted scrolling
alias irssi='TERM=screen irssi'
# if on windows
if [ $isWindows -eq 1 ]; then
export TERM=xterm
export FORCE_COLOR=true
fi
echo "TERM: $TERM"
ncolors=$(tput colors)
echo "ncolors: $ncolors"
if [ -x "$(command -v vim)" ]; then
export EDITOR=vim
fi
function git-ctags () {
cwd=$(pwd)
project_path=$(git rev-parse --show-toplevel)
cd $project_path
ctags -R .
cd $cwd
}
alias jap="grep . ~/dotfiles/jap/* | nfzf"
alias tips="grep . ~/dotfiles/scripts/tips.txt | nfzf"
alias sf="rg --files | fzf"
alias saf="find . | fzf"
alias tree="tree -I 'node_modules'"
alias gist="ls -1 ~/dotfiles/gists/* | fzf --exit-0 --bind 'enter:execute(vim --not-a-term -- {})+abort'"
alias gitcheckout="git branch | sed s/*//g | sed s/\ //g | nfzf --normal | xargs git checkout"
alias gitmerge="git branch | sed s/*//g | sed s/\ //g | nfzf --normal | xargs git merge"
alias gitlog="git log --all --graph --decorate --oneline"
alias gitrebase="git branch | sed s/*//g | sed s/\ //g | nfzf --normal | xargs git rebase -i"
alias gitdeletedfiles="git log --diff-filter=D --summary | sed -n -e '/^commit/h' -e '\:/:{' -e G -e 's/\ncommit \(.*\)/ \1/gp' -e }"
# https://www.cyberciti.biz/faq/how-do-i-find-the-largest-filesdirectories-on-a-linuxunixbsd-filesystem/
# get top 10 files/dirs eting disc space
alias ducks="du -cks * | sort -n | head"
alias duke100="du -k * | awk '$1 > 100000' | sort -nr"
# never again...
alias gti=git
alias nulab-sso='aws sso login --profile cacoo-develop'
. ~/dotfiles/z.sh
alias h='history'
# refresh tmux pane in case of tty corruption
tmux-refresh() {
stty sane; printf '\033k%s\033\\\033]2;%s\007' "$(basename "$SHELL")" "$(uname -n)"; tput reset; tmux refresh
}
# export PROMPT_COMMAND="history -a; history -n"
function parse_git_branch {
git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/ (\1)/'
}
[ -f ~/.fzf.bash ] && source ~/.fzf.bash
# enable bash completion in interactive shells
# if ! shopt -oq posix; then
# if [ -f /usr/share/bash-completion/bash_completion ]; then
# . /usr/share/bash-completion/bash_completion
# elif [ -f /etc/bash_completion ]; then
# . /etc/bash_completion
# fi
# fi
# get current branch in git repo
function parse_git_branch2 {
BRANCH=`git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/\1/'`
if [ ! "${BRANCH}" == "" ]
then
# STAT=`parse_git_dirty`
# echo "[${BRANCH}${STAT}]"
echo "(${BRANCH})"
else
echo ""
fi
}
# get current status of git repo
function parse_git_dirty {
status=`git status 2>&1 | tee`
dirty=`echo -n "${status}" 2> /dev/null | grep "modified:" &> /dev/null; echo "$?"`
untracked=`echo -n "${status}" 2> /dev/null | grep "Untracked files" &> /dev/null; echo "$?"`
ahead=`echo -n "${status}" 2> /dev/null | grep "Your branch is ahead of" &> /dev/null; echo "$?"`
newfile=`echo -n "${status}" 2> /dev/null | grep "new file:" &> /dev/null; echo "$?"`
renamed=`echo -n "${status}" 2> /dev/null | grep "renamed:" &> /dev/null; echo "$?"`
deleted=`echo -n "${status}" 2> /dev/null | grep "deleted:" &> /dev/null; echo "$?"`
bits=''
if [ "${renamed}" == "0" ]; then
bits=">${bits}"
fi
if [ "${ahead}" == "0" ]; then
bits="*${bits}"
fi
if [ "${newfile}" == "0" ]; then
bits="+${bits}"
fi
if [ "${untracked}" == "0" ]; then
bits="?${bits}"
fi
if [ "${deleted}" == "0" ]; then
bits="x${bits}"
fi
if [ "${dirty}" == "0" ]; then
bits="!${bits}"
fi
if [ ! "${bits}" == "" ]; then
echo " ${bits}"
else
echo ""
fi
}
# export PS1="\u\W\`parse_git_branch\` "
if [ $isWindows -eq 0 ]; then
export PS1='\e[0;32m \u \e[m\e[0;33m \w \e[m\e[1;33m$(parse_git_branch2)\e[m\n\$ '
else
export PS1='\e[0;32m \u@\h\e[m\e[0;35m \s\e[m\e[0;33m \w\e[m \e[1;36m`parse_git_branch2`\e[m\n\$ '
# export PS1='\e[0;32m \u \e[m\e[0;33m \w \e[m\e[1;33m$(parse_git_branch)\e[m\n\$ '
fi
arm() {
arch -x86_64 $@
}
export HISTIGNORE='history:clear:h:jap:tips'
# don't put duplicate lines or lines starting with space in the history.
# See bash(1) for more options
# also important for tmux @resurrect-save-shell-history to prevent duplicates
HISTCONTROL=ignoreboth
# append to the history file, don't overwrite it
shopt -s histappend
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
HISTSIZE=99999
HISTFILESIZE=99999
# export FZF_DEFAULT_COMMAND='find . | grep --exclude=vim'
export FZF_DEFAULT_COMMAND='rg --files'
export FZF_CTRL_T_COMMAND="$FZF_DEFAULT_COMMAND"
export FZF_DEFAULT_OPTS="--bind ctrl-e:preview-down,ctrl-y:preview-up"
# https://github.com/junegunn/fzf/issues/816
export FZF_CTRL_R_OPTS="--preview 'echo {}' --preview-window down:3:hidden --bind '?:toggle-preview' --bind 'ctrl-y:execute(echo -n {2..} | pbcopy)' --header 'Press CTRL-Y to copy command into clipboard'"
eval "$(direnv hook bash)"
# $(brew --prefix asdf)/etc/bash_completion.d/asdf.bash
# test -d ~/var/log/ && (
# echo "$(date): .bashrc: $0: $$"; pstree -lp $PPID 2>/dev/null
# echo "BASH_SOURCE: ${BASH_SOURCE[*]}"
# echo "FUNCNAME: ${FUNCNAME[*]}"
# echo "BASH_LINENO: ${BASH_LINENO[*]}"
# ) >> ~/var/log/config-scripts.log
| true |
6c65ba222c094708186ad89184b6c024724c3222 | Shell | liuyujie/iOSXocdeBuildShell | /EnterpriseBuildDev.sh | UTF-8 | 1,080 | 2.8125 | 3 | [] | no_license | #!/bin/bash
#参考自 https://github.com/qindeli/WorksapceShell
#注意:脚本和WorkSpace必须在同一个目录
#工程名字(Target名字)
Project_Name="Project_Name"
#workspace的名字
Workspace_Name="Workspace_Name"
#配置环境,Release或者Debug,默认Release
Configuration="Release"
#IPA存放的地址
IPA_Save_Path="/Users/${USER}/Desktop/${Project_Name}"_$(date +%H%M%S)
EnterpriseExportOptionsPlist=./EnterprisePlistDev.plist
EnterpriseExportOptionsPlist=${EnterpriseExportOptionsPlist}
#clean
xcodebuild clean -configuration $Configuration -alltargets
#打包archive
xcodebuild archive -workspace $Workspace_Name.xcworkspace -scheme $Project_Name -configuration $Configuration -archivePath build/$Project_Name-build.xcarchive
#导出IPA
xcodebuild -exportArchive -archivePath build/$Project_Name-build.xcarchive -exportOptionsPlist "${EnterpriseExportOptionsPlist}" -exportPath $IPA_Save_Path
#上传fir.im (需要安装fir cli [https://github.com/FIRHQ/fir-cli/blob/master/README.md])
fir publish ${IPA_Save_Path}/${Project_Name}.ipa
rm -rf build | true |
1fb626ab30cb62bb98ba492dc4ada8439d205ff1 | Shell | sahil-rao/deployment-sahil | /packer/scripts/setup_ubuntu.sh | UTF-8 | 524 | 2.65625 | 3 | [] | no_license | #!/bin/bash
set -euv
# Ubuntu 14.04, m4.xlarge (hvm)
# AWS EC2 instances sometimes have stale APT caches when starting up... so we
# wait for AWS to do its magic and refresh them
sleep 10s
apt-get clean; apt-get update
apt-get -y install \
emacs \
git \
logrotate \
monit \
ntp \
python \
python-dev \
python-pip \
vim
pip install --upgrade pip
pip install awscli-cwlogs==1.4.0
pip install awscli==1.9.8
pip install boto==2.45.0
pip install boto3==1.4.4
pip install datadog==0.15.0
pip install requests==2.2.1
| true |
d0c1f96b679d95ac1453f51dd2ff98ef7e8c156c | Shell | Privex/azirevpn-builder | /build.sh | UTF-8 | 1,211 | 3.859375 | 4 | [
"X11"
] | permissive | #!/usr/bin/env bash
: ${INSTALL_DEPS=0}
: ${BUILD_PKG=1}
: ${INSTALL_PKG=1}
: ${OUT_DIR="/output"}
errcheck() {
local erret="$1" segment="NOT SPECIFIED" note=""
(( $# > 1 )) && segment="$2"
(( $# > 2 )) && note="$3"
if (( erret )); then
>&2 echo -e "\n [!!!] ERROR: Non-zero return code (${erret}) detected at segment: $segment"
[[ -n "$note" ]] && >&2 echo -e " [!!!] NOTE: $note"
>&2 echo ""
exit $erret
fi
return 0
}
if (( INSTALL_DEPS )); then
echo " >>> Installing dependencies using make install-deps"
make install-deps
errcheck "$?" "make install-deps" "Something went wrong while installing dependencies"
fi
if (( BUILD_PKG )); then
echo " >>> Building DEB package using: make build-deb"
make build-deb
errcheck "$?" "make build-deb" "Something went wrong while building the DEB file."
echo " >>> Copying 'azirevpn_*' to '${OUT_DIR}' ..."
cp -v ../azirevpn_* "${OUT_DIR%/}/"
fi
if (( INSTALL_PKG )); then
echo " >>> Installing DEB package using: make install-deb"
make install-deb
errcheck "$?" "make install-deb" "Something went wrong while installing the package from the DEB file."
fi
| true |
3e572d532245df6ef31663cbf624bcdb6cd54815 | Shell | AdaptiveScale/market | /minio_spark_cluster/bin/ubuntu/14/1_configure_sync_ntp.sh | UTF-8 | 352 | 2.65625 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/sh
echo "Installing <NTP> mechanism to update clocks !"
sudo apt-get update --fix-missing
sudo apt-get install ntp -y
sudo service ntp stop
echo "Updating internal clock and according offset(slipage) !"
sudo ntpd -gq
sudo service ntp start
echo "The clock has been synchornised via <NTP> !"
echo "============================================" | true |
b4667ad73110c0de2b5b8788fb73c759aecf6385 | Shell | Heroszhen/nodejs_api | /push.sh | UTF-8 | 216 | 2.921875 | 3 | [] | no_license | #! /bin/bash
#push to github
git add -A
git commit -m "maj"
git push origin master
if [ -n "$1" ]
then
if [ $1 = "archive" ]
then
zip ../nodejs-default.zip -r * .[^.]* -x "node_modules/*"
fi
fi | true |
9def740d462e277a9450d8c1afd3ba1aa615c1b2 | Shell | spdqbr/dotfiles | /functions.source | UTF-8 | 14,412 | 4.1875 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# swap directory
# useful for changing from /nmount/server1/Application/logs
# to /nmount/server2/Application/logs
# by use of "sd server1 server2"
function sd(){
cd "${PWD/\/$1\//\/$2\/}"
}
# frequent directory, used to alias directories
# stores aliases in ~/.fd
fd() {
# Get the fd alias from the front of the string
local fdshort=${1%%/*}
# Find the expansion
local equivalent=$( grep -E "^$fdshort " ~/.fd | sed -e 's/^[^ ]* //g')
if [[ "$equivalent" != "" ]]
then
# cd to the full directory
cd "${1/$fdshort/$equivalent}"
else
# alias not found, print aliases
echo ""
cat ~/.fd
echo ""
fi;
}
# adds the current working directory to your fd alias list
addfd(){ echo "$1" "$(pwd)" >> ~/.fd; }
# autocomplete for fd
_fd_complete() {
# Extract the word fragment we're trying to complete
local word=${COMP_WORDS[COMP_CWORD]}
# Check for path separators
local slashCount=$( grep -o "/" <<< "$word" | wc -l )
if [[ $slashCount -gt 0 ]]
then
# Tell complete to only display the last part of the match
# e.g. if the match is /temp/foo/bar, only display bar
compopt -o filenames
# Extract the fd alias
fdshort=${word%%/*}
# Find its expansion
fd=$( grep -E "^${fdshort} " ~/.fd | sed -e 's/^[^ ]* //g' -e 's/ /\\ /g' )
# if the path is more than one level deep, split out the intermediates
# e.g. if the command line is fd tmp/foo/bar/blah
# "path" becomes foo/bar
local path=
if [[ $slashCount -gt 1 ]]
then
# trim the fd alias from the beginning
path=${word#*/}
# trim the incomplete word from the end
path=${path%/*}
# Combine everything except the incomplete word
fdshort=${fdshort}/${path}
fi
# Get the incomplete word
local partialword=${word##*/}
# Generate a list of directories matching the incomplete word in the expanded path
local list=$( cd ${fd}/${path}; compgen -d )
COMPREPLY=($(compgen -W '${list[@]}' -- "$partialword"))
# add the full fd path to the compreply so it doesn't overwrite the commandline
temp=()
for reply in "${COMPREPLY[@]}"
do
temp+=("${fdshort}/${reply}")
done
COMPREPLY=(${temp[@]})
else
# If there are no slashes, just source the ~/.fd file
local list=$(sed -e 's/ .*//g' ~/.fd)
COMPREPLY=($(compgen -W '${list[@]}' -- "$word"))
fi
# Append the slash if there's only one match
if [[ ${#COMPREPLY[@]} -eq 1 ]];
then
i=${COMPREPLY[0]};
COMPREPLY[0]="${i}/";
fi;
return 0
}
complete -o nospace -F _fd_complete fd
# find file
# searches all subdirectories of . for a file (not directory)
# containing the given substring (case insensitive)
ff() { find . -type f -iname '*'"$*"'*' ; }
# Swap 2 filenames around, if they exist
swap()
{
local TMPFILE=tmp.$$
[ $# -ne 2 ] && echo "swap: 2 arguments needed" && return 1
[ ! -e "$1" ] && echo "swap: $1 does not exist" && return 1
[ ! -e "$2" ] && echo "swap: $2 does not exist" && return 1
mv "$1" $TMPFILE
mv "$2" "$1"
mv $TMPFILE "$2"
}
# more thorough evrsion of which, checks aliases and functions
function which {
(alias; declare -f) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot "$@"
}
# Pretty print xml
formatxml(){
if [[ $# -eq 0 ]]
then
xmlData=$(zenity --entry --text="Paste XML Data")
echo "$xmlData" | xmllint --format -
else
xmllint --format "$@" 2> /dev/null
fi
}
# Pattern-move, move/rename files using regex replacement
# -d for dry-run
pmv(){
if [[ $# -lt 3 ]]
then
echo "Usage: $0 [-d] \"pattern\" \"replacement\" file1 [file2 [file3 [ ... ]]]"
echo "-d for dry run"
return
fi
dryRun=false
pattern=$1
shift
if [[ "$pattern" == "-d" ]]
then
dryRun=true
pattern=$1
shift
fi
replacement=$1
shift
if [[ "$dryRun" == true ]]
then
echo "*****Dry run*****"
fi
echo "$pattern -> $replacement"
while [[ $# -gt 0 ]]
do
newName=$( echo "$1" | sed -e "s/$pattern/$replacement/g" )
if [[ "$newName" != "$1" ]]
then
echo "mv \"$1\" \"$newName\""
if [[ "$dryRun" == false ]]
then
mv "$1" "$newName"
fi
fi
shift
done
}
# Multi-pattern move
# do multiple pattern substitutions on multiple files
# Defaults to dry run. First argument == "!" to actually do the move
# Example:
# Files in dir: "foo bar baz.txt" "bing-bang bong.txt"
# > mpmv ! " " "-" 'o' '0' -- *
# "f00-bar-baz.txt" "bing-bang-b0ng.txt"
mpmv () {
dryRun=0
if [[ "$1" == "!" ]]; then
dryRun=1
shift
fi
ops=()
while [[ ! -z $1 ]]; do
pat=$1; shift
if [[ "$pat" == "--" ]]; then
break
fi
rep=$1; shift
ops+=("$pat")
ops+=("$rep")
done
echo "All: ${ops[@]}"
for file in "$@"; do
echo "file: $file"
new=$file
for (( i=0; i<${#ops[@]}; i+=2)); do
new=$( echo "$new" | sed -e "s/${ops[i]}/${ops[$(( i + 1 ))]}/g" )
done
echo "new: $new"
if [[ "$dryRun" == "1" ]]; then
if [[ "$file" != "$new" ]]; then
mv "$file" "$new"
fi
fi
echo ""
done
}
# Quickly test how bash handles lists of args
argsTest ()
{
echo "$# parameters";
echo Using '$*';
for p in $*;
do
echo "[$p]";
done;
echo Using '"$*"';
for p in "$*";
do
echo "[$p]";
done;
echo Using '$@';
for p in $@;
do
echo "[$p]";
done;
echo Using '"$@"';
for p in "$@";
do
echo "[$p]";
done
}
# Normalize yes/no response
yesno() {
question=$1
default=$2
echo -n "$question"
read -r yn
if [[ "$yn" == "" ]]
then
yn=$default
fi
yn=$( echo "$yn" | tr '[:lower:]' '[:upper:]' )
if [[ "$yn" == "Y" || "$yn" == "YES" || "$yn" == "T" || "$yn" == "TRUE" || "$yn" == "UH-HUH" || "$yn" == "YUP" || "$yn" == "AFFIRMATIVE" ]]
then
return 0
else
return 1
fi
}
# Get current screen geometry using xtermcontrol
getGeom() {
current=$( xtermcontrol --get-geometry )
export curW=$(echo "$current" | sed -e 's/x.*//g')
export curH=$(echo "$current" | sed -e 's/.*x\([^+]*\)+.*/\1/g')
export curX=$(echo "$current" | sed -e 's/[^+]*+\(.*\)+.*/\1/g')
export curY=$(echo "$current" | sed -e 's/.*+//g')
}
# Shrink terminal screen to half it's height moving it "half up"
hu() {
getGeom;
xtermcontrol --geometry="${curW}x$(( curH / 2 ))+${curX}+${curY}"
}
# Expand terminal screen to twice its height, moving it "Double down"
dd() {
getGeom;
xtermcontrol --geometry="${curW}x$(( curH * 2))+${curX}+${curY}"
}
# Moves files with a given search string to the given folder
# Example: sortByString Exception Exception_dir *.log
# Would move all log files containing the word "Exception" to the Exception_dir folder
sortByString() {
searchString=$1
shift
dirName=$1
shift
echo "Moving files with string \"$searchString\" to $dirName"
if [[ ! -d $dirName ]]
then
mkdir "$dirName"
fi
for file in $(grep -l "$searchString" "$@")
do
echo "$file"
mv "$file" "$dirName/$file"
done
}
# Look In Zip (and jar) files for a file containing the search string
liz() {
if which jar 1> /dev/null 2> /dev/null
then
unzip_cmd="$(which jar)"
unzip_args="-ft"
elif which unzip 1> /dev/null 2> /dev/null
then
unzip_cmd="$(which unzip)"
unzip_args="-l"
else
echo "Could not find unzip or jar command in path" 1>&2
return 1
fi
find . -type f \( -iname \*.jar -o -iname \*.zip \) -print | \
while read -r filename
do
values=$( "$unzip_cmd" $unzip_args "$filename" | grep -E "$1")
if [[ $? -eq 0 ]]
then
echo "$filename"
for value in $values
do
echo " $value"
done
echo ""
fi
done
}
# Resize terminal to rows x columns
resize(){
printf "\\e[8;%s;%s;t" "$1" "$2"
}
# Find all differences between two folders
# Output a copy-paste friendly diff command for differing files
foldiff() {
diff --recursive --brief "$1" "$2" | sed -e "s/^Files/windiff/g" -e "s/ and / /g" -e "s/ differ$//g"
}
# cd up to a folder higher in the path
# For example, if you're in /var/logs/foo/bar/baz you can run
# cdup logs
# to switch to /var/logs
function cdup {
#If no arg, go up one directory
[[ -z "$1" ]] && cd .. && return 0
newdir="${PWD/\/$1\/*/}/$1"
if [[ -d "$newdir" ]]
then
cd "$newdir"
else
echo "\"$newdir\" does not exist"
fi
}
function _cdup_complete {
local word=${COMP_WORDS[COMP_CWORD]}
local list=$(pwd | cut -c 2- | sed -e 's#/[^/]*$##g' -e 's/\([ ()]\)/\\\\\1/g')
IFS=/
list=$(compgen -W "$list" -- "$word")
IFS=$'\n'
COMPREPLY=($list)
return 0
}
complete -F _cdup_complete cdup
# resolve dependencies for gem
function resolve_gem_dependencies {
tempfile=/tmp/gemlist.txt
gem list > $tempfile
grep '^gem' "$@" | \
sed -e 's/gem "\([^"]*\)", "= \([^"]*\)"/\1:\2/g' | \
while read -r line
do
version=${line/#*:/}
gem=${line/%:*/}
grep -q "$gem (.*$version.*)" $tempfile> /dev/null
if [[ $? -ne 0 ]]
then
command="gem install $gem -v $version"
echo "$command"
$command
fi
done
}
# Auto search the bash man page for a command
function bashman {
man bash | /usr/bin/less -p "^ $1 ";
}
# Backup/restore a file by adding or removing the .bak exension
function bak {
local force=1
local cmd=mv
local OPTIND=0
while getopts :fmc FLAG
do
echo "$FLAG"
case "$FLAG" in
f) force=0
;;
m) cmd="mv"
;;
c) cmd="cp"
;;
*) echo "Unknown flag $FLAG"
return
;;
esac
done
shift $((OPTIND-1))
for var in "$@"
do
if [[ -f "$var" ]]
then
if [[ "$var" == *.bak ]]
then
doMove=0
if [[ $force -eq 1 && -f ${var%.bak} ]]
then
yesno "${var%.bak} exists. Overwrite [yN]? " "n"
doMove=$?
fi
if [[ ${doMove} -eq 0 ]]
then
echo "Restoring $var to ${var%.bak}..."
$cmd "$var" "${var%.bak}"
else
echo "Skipping restore of $var"
fi
else
doMove=0
if [[ $force -eq 1 && -f "${var}.bak" ]]
then
yesno "${var}.bak exists. Overwrite [yN]? " "n"
doMove=$?
fi
if [[ ${doMove} -eq 0 ]]
then
echo "Backing up $var to ${var}.bak..."
$cmd "$var" "${var}.bak"
else
echo "Skipping backup of $var"
fi
fi
else
echo "File not found: $var"
fi
done
}
# Sanitize file names/paths for pasting into terminal
function sanitize {
while read -r arg
do
echo "$arg" | sed -e 's/\([-*? ()]\)/\\\1/g'
done
}
# popup window when command is complete
# If no command is given, re-execute the last command and alert
function alert {
if [[ $# -eq 0 ]]
then
command=$(fc -ln -1)
else
command="$*"
fi
$command
returnCode=$?
if [[ $returnCode -eq 0 ]]
then
zenity --info --text="Command \\n\
$command \\n\
Completeded successfully"
else
zenity --error --text="Command \\n\
$command \\n\
Failed! Exit code $returnCode"
fi
}
# Select columns by name from input
# e.g.
# echo -e "a b c d\n1 2 3 4" | colsel d a c
# will output
# d a c
# 4 1 3
function colsel {
IFS=' '
while read -r line;
do
if [[ "$indices" == "" ]]
then
for selected in "$@"
do
headerPos=1
for header in "${line[@]}"
do
if [[ "$selected" == "$header" ]]
then
indices=$indices'$'$headerPos'"\t"'
fi
headerPos=$(( headerPos + 1 ))
done
done
fi
echo "$line" | awk '{print '"$indices"'; }'
done
}
# Install packages for Centos X-Forwarding
function prepareX {
sudo yum remove -y gtk2
sudo yum install -y xorg-x11-server-Xvfb gdk-pixbuf2 gtk3
}
# List memory usage with PID and command line
function psmem {
ps aux"$1" | awk '{ s = ""; for (i = 11; i <= NF; i++) s = s $i " "; print $2 "\t" $6/1024 " MB\t\t" s}' | sort -n -k2
}
function pidgrep {
ps -ef | grep -E "^\\w+ +$1 +"
}
function lazygit {
git add --all .
git commit -m "$@"
git push
}
# Update file permissions in git index
gitchmod() {
mode="$1"
shift
git update-index --chmod="${mode}" "$@"
}
gitls() {
git ls-files --stage
}
urlencode() {
local data
if [[ $# != 1 ]]; then
echo "Usage: $0 string-to-urlencode"
return 1
fi
data="$(curl -s -o /dev/null -w %{url_effective} --get --data-urlencode "$1" "")"
if [[ $? != 3 ]]; then
echo "Unexpected error" 1>&2
return 2
fi
echo "${data##/?}"
return 0
}
vb() {
if [[ -f ./vb.sh ]]; then
./vb.sh
else
vagrant halt
vagrant up
vagrant ssh
fi
}
mcd() {
mkdir "$1" && cd "$1"
}
function vssh {
/usr/bin/ssh -i ./.vagrant/machines/default/virtualbox/private_key -p 2222 vagrant@127.0.0.1
}
venv() {
if [[ ! -d ./venv ]]; then
python3 -m venv venv
fi
source ./venv/bin/activate
}
epoch() {
date -d @"$1"
}
| true |
68319826a920d38ba38131cffa3cbc49e790c192 | Shell | ekarlso/nim-vm | /bin/nim-vm | UTF-8 | 11,586 | 3.890625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
# Nim repo to use
NIM_REPO=http://github.com/Araq/Nim.git
# Where to store nim-versions
NIM_DIR=${NIM_DIR:-~/.nimvm}
NIM_BIN_DIR=${NIM_BIN_DIR:-}
NIMVM_ACTIVATE=0
NIMVM_VERSION_LINK=${NIMVM_VERSION_LINK:-1}
# if version "repo" is used the directory used is $NIM_REPO_LOCATION
platform='unknown'
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
platform='linux'
elif [[ "$unamestr" == 'Darwin' ]]; then
platform='darwin'
fi
if [ "$platform" == "darwin" ]; then
KOCH_FLAGS="-d:release -d:useGnuReadline -d:nativeStacktrace -d:avoidTimeMachine"
TIMEOUT_CMD="gtimeout"
elif [ "$platform" == "linux" ]; then
KOCH_FLAGS="-d:release -d:useGnuReadline -d:nativeStacktrace"
TIMEOUT_CMD="timeout"
else
KOCH_FLAGS="-d:release"
TIMEOUT_CMD="timeout"
fi
VERSION="0.3.0"
get_location() {
link=$(readlink ${BASH_SOURCE[0]})
if [ -f "$link" ]; then
path=$(dirname $link)/..
else
path=$DIR/..
fi
echo $path
}
nimvm_version() {
local version=$VERSION
path=$(get_location)
[ -d "$path/.git" ] && {
version=$(cd $path; git log --pretty=format:'%h' -n 1)
}
echo $version
}
function err {
local exitcode=$?
local xtrace=$(set +o | grep xtrace)
set +o xtrace
local msg="[ERROR] ${BASH_SOURCE[2]}:$1 $2"
echo $msg 1>&2;
if [[ -n ${LOGDIR} ]]; then
echo $msg >> "${LOGDIR}/error.log"
fi
$xtrace
exit $exitcode
}
function backtrace {
if [ -z "$DEBUG" ]; then
return
fi
local level=$1
local deep=$((${#BASH_SOURCE[@]} - 1))
echo "[Call Trace]"
while [ $level -le $deep ]; do
echo "${BASH_SOURCE[$deep]}:${BASH_LINENO[$deep-1]}:${FUNCNAME[$deep-1]}"
deep=$((deep - 1))
done
}
# Prints line number and "message" then exits
# die $LINENO "message"
function die {
local exitcode=$?
set +o xtrace
local line=$1; shift
if [ $exitcode == 0 ]; then
exitcode=1
fi
backtrace 2
err $line "$*"
# Give buffers a second to flush
sleep 1
exit $exitcode
}
function git_timed {
local count=0
local timeout=0
if [[ -n "${GIT_TIMEOUT}" ]]; then
timeout=${GIT_TIMEOUT}
fi
until $TIMEOUT_CMD -s SIGINT ${timeout} git "$@"; do
# 124 is timeout(1)'s special return code when it reached the
# timeout; otherwise assume fatal failure
if [[ $? -ne 124 ]]; then
die $LINENO "git call failed: [git $@]"
fi
count=$(($count + 1))
warn "timeout ${count} for git call: [git $@]"
if [ $count -eq 3 ]; then
die $LINENO "Maximum of 3 git retries reached"
fi
sleep 5
done
}
# git update using reference as a branch.
# git_update_branch ref
function git_update_branch {
local git_branch=$1
git checkout -f origin/$git_branch
# a local branch might not exist
git branch -D $git_branch || true
git checkout -b $git_branch
}
# git update using reference as a branch.
# git_update_remote_branch ref
function git_update_remote_branch {
local git_branch=$1
git checkout -b $git_branch -t origin/$git_branch
}
# git update using reference as a tag. Be careful editing source at that repo
# as working copy will be in a detached mode
# git_update_tag ref
function git_update_tag {
local git_tag=$1
git tag -d $git_tag
# fetching given tag only
git_timed fetch origin tag $git_tag
git checkout -f $git_tag
}
function get_bindir() {
# Default to $NIMDIR/bin
bindir=$NIM_DIR/bin
[ ! -z "$NIM_BIN_DIR" ] && bindir=$NIM_BIN_DIR
echo $bindir
}
function activate() {
local version=$1
local version_dir="$NIM_DIR/versions/$version"
if [ ! -d "$version_dir" ]; then
die $LINENO "Version not found $version"
fi
}
function koch_build() {
if [ ! -d "csources" ]; then
case "$1" in
(v0.9.*)
git clone --branch v0.9.4 --depth 1 https://github.com/nim-lang/csources.git
cd "csources"
sh build.sh
cd ".."
./bin/nimrod c koch
;;
(*)
git clone --depth 1 https://github.com/nim-lang/csources.git
cd "csources"
sh build.sh
cd ".."
./bin/nim c koch
;;
esac
fi
./koch boot $KOCH_FLAGS
case "$1" in
(v0.9.*)
if [ ! -f bin/nim ]; then
# make it available as nim
ln -s nimrod bin/nim
fi
;;
esac
}
function get_versiondir() {
local version=$1
if [ "$version" == "repo" ]; then
if [ -z "$NIM_REPO_LOCATION" ]; then
echo "Nim reposiory location (NIM_REPO_LOCATION) not set"
exit 5
fi
echo "$NIM_REPO_LOCATION"
else
echo "$NIM_DIR/versions/$version"
fi
}
function install_version() {
local version=$1
if [ -z "$version" ]; then
die $LINENO "Version not specified."
fi
mkdir -p $NIM_DIR/versions
local version_dir=$(get_versiondir $version)
if [ -d $version_dir ]; then
die $LINENO "Version $version already exists.."
fi
NIM_CACHE=$NIM_DIR/repo_cache
if [ ! -d $NIM_CACHE ]; then
echo "No cacehd repository found, cloning $NIM_REPO to $NIM_CACHE..."
git_timed clone $NIM_REPO $NIM_CACHE
else
echo "Using cached repository $NIM_CACHE"
fi
cp -R $NIM_CACHE $version_dir
cd $version_dir
git remote set-url origin $NIM_REPO
git_timed fetch origin
# handle git_ref accordingly to type (tag, branch)
if [[ -n "`git show-ref refs/tags/$version`" ]]; then
git_update_tag $version
elif [[ -n "`git show-ref refs/heads/$version`" ]]; then
git_update_branch $version
elif [[ -n "`git show-ref refs/remotes/origin/$version`" ]]; then
git_update_remote_branch $version
else
die $LINENO "$version is neither branch nor tag"
fi
koch_build $version
bindir=$(get_bindir)
if [ "$NIMVM_VERSION_LINK" = "1" ]; then
ln -nfs $version_dir/bin/nim $bindir/nim.$version
fi
}
function update_version() {
local version=$1
if [ -z "$version" ]; then
die $LINENO "Version not specified."
fi
local version_dir=$(get_versiondir $version)
if [ "$version" == "repo" ]; then
die $LINENO "Local repo should be handled manually."
fi
if [ ! -d $version_dir ]; then
die $LINENO "Version $version does not exists.."
fi
cd $version_dir
git_timed fetch origin
# handle git_ref accordingly to type (tag, branch)
if [[ -n "`git show-ref refs/tags/$version`" ]]; then
git_update_tag $version
elif [[ -n "`git show-ref refs/heads/$version`" ]]; then
git_update_branch $version
elif [[ -n "`git show-ref refs/remotes/origin/$version`" ]]; then
git_update_remote_branch $version
else
die $LINENO "$version is neither branch nor tag"
fi
koch_build $version
bindir=$(get_bindir)
if [ "$NIMVM_VERSION_LINK" = "1" ]; then
ln -nfs $version_dir/bin/nim $bindir/nim.$version
fi
}
function rebuild_version() {
local version=$1
if [ -z "$version" ]; then
die $LINENO "Version not specified."
fi
local version_dir=$(get_versiondir $version)
if [ ! -d $version_dir ]; then
die $LINENO "Version $version does not exists.."
fi
cd $version_dir
koch_build $version
bindir=$(get_bindir)
if [ "$NIMVM_VERSION_LINK" = "1" ]; then
ln -nfs $version_dir/bin/nim $bindir/nim.$version
fi
}
function remove_version() {
local version=$1
if [ $version == "repo" ]; then
die $LINENO "Cannot remove 'repo' version"
fi
local version_dir=$(get_versiondir $version)
if [ ! -d "$version_dir" ]; then
die $LINENO "Version $version doesn't exist."
fi
rm -rf "$version_dir"
}
function get_active_version() {
local bindir=$(get_bindir)
local active_link=$bindir/nim
path=$(readlink $active_link)
if [[ "$NIM_REPO_LOCATION/bin/nim" == "$path" ]]; then
echo "repo"
elif [ -f "$path" ]; then
echo $(basename $(cd $(dirname $path)/..; pwd))
fi
}
function active_version() {
local active=$(get_active_version)
[ -z "$active" ] && die $LINENO "No active version"
echo $active
}
function get_versions() {
local version_dir="$NIM_DIR/versions"
if [ ! -d "$version_dir" ]; then
return
fi
versions=""
for i in $version_dir/*; do
if [ ! -x $i/bin/nim ]; then
continue
fi
name=$(basename $i)
versions+=" $name"
done
echo $versions
}
function list_versions() {
local active=$(get_active_version)
local versions=$(get_versions)
if [ -z "$versions" ]; then
echo "No versions installed currently"
exit 0
fi
for i in $versions; do
echo -ne "-> $i"
if [ "$active" == $i ]; then
echo -ne " (active)"
fi
echo
done
}
function use_version() {
local version=$1
local version_dir=$(get_versiondir $version)
local bindir=$(get_bindir)
if [ "$version" == "repo" ]; then
if [ -z "$NIM_REPO_LOCATION" ]; then
die $LINENO "Nim reposiory location (NIM_REPO_LOCATION) not set"
fi
local version_dir="$NIM_REPO_LOCATION"
fi
if [ ! -d "$version_dir" ]; then
echo $version_dir
die $LINENO "Version $version doesn't exist. Can't activate."
fi
[ -w "$bindir/nim" ] && rm $bindir/nim
ln -s $version_dir/bin/nim $bindir/nim
echo "Now using $version at $bindir/nim"
}
while getopts ":d:b:a" opt; do
case "${opt}" in
a)
NIMVM_ACTIVATE=1
;;
b)
NIM_BIN_DIR=$OPTARG
;;
d)
NIM_DIR=$OPTARG
;;
esac
done
shift "$((OPTIND-1))" # Shift off the options and optional --.
case $1 in
install)
install_version $2
if [ "$NIMVM_ACTIVATE" = "1" ]; then
use_version $2
fi
;;
uninstall)
remove_version $2
;;
update)
if [ -z "$2" ]; then
update_version $(get_active_version)
else
update_version $2
if [ "$NIMVM_ACTIVATE" = "1" ]; then
use_version $2
fi
fi
;;
rebuild)
if [ -z "$2" ]; then
rebuild_version $(get_active_version)
else
rebuild_version $2
if [ "$NIMVM_ACTIVATE" = "1" ]; then
use_version $2
fi
fi
;;
list)
list_versions
;;
active)
active_version
;;
use)
use_version $2
;;
*)
echo "Usage - Nim version manager $(nimvm_version):"
echo " $0 install <version> install version and use it"
echo " $0 uninstall <version> uninstall version"
echo " $0 update (<version>) update current (or version and use it)"
echo " $0 rebuild (<version>) rebuild current (or version and use it)"
echo " $0 list list versions"
echo " $0 active show active version"
echo " $0 activate|use use a version"
;;
esac
| true |
191c8a5bbfe950ab89171dc7372a70e92549d209 | Shell | hiqsol/dotfiles | /.config/zsh/obsolete.sh | UTF-8 | 551 | 3.171875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env zsh
keepassxc() {
file="$HOME/bin/keepassxc"
if [ ! -x $file ]; then
url=`curl --silent "https://api.github.com/repos/keepassxreboot/keepassxc/releases/latest" | jq -r '.assets[].browser_download_url | select (. | test("-x86_64.AppImage$"))'`
curl -L $url -o $file
chmod a+x $file
fi
$file $@
}
install_ripgrep() {
curl -LO https://github.com/BurntSushi/ripgrep/releases/download/0.10.0/ripgrep_0.10.0_amd64.deb
sudo dpkg -i ripgrep_0.10.0_amd64.deb && rm ripgrep_0.10.0_amd64.deb
}
| true |
55adf66c326124fd80e4c42c21f0ec35edfdd8f0 | Shell | dLobatog/foreman_release | /tagging.sh | UTF-8 | 1,967 | 2.8125 | 3 | [] | no_license | VERSION=`cat VERSION`
MAJOR=`cat VERSION | cut -d. -f1`
MINOR=`cat VERSION | cut -d. -f2`
PATCH=`cat VERSION | cut -d. -f3`
RC_VERSION=`cat VERSION | cut -d- -f2`
RELEASEDIR=`pwd`
# Check for 'tx', '~/.transifexrc', 'jq'
# foreman
git clone git@github.com:theforeman/foreman.git
cd foreman
git checkout $MAJOR.$MINOR-stable
echo "gem 'rdoc'" > bundler.d/Gemfile.local.rb
cp config/settings.yaml.example config/settings.yaml
cp config/database.yml.example config/database.yml
bundle install
make -C locale tx-update
script/sync_templates.sh
cp "$RELEASEDIR"/VERSION .
tmp=$(mktemp)
jq ".version = \"$VERSION\"" package.json > "$tmp" && mv "$tmp" package.json
extras/changelog
git add .
git commit -m "Release $VERSION"
git tag -m "Release $VERSION" $VERSION
git push origin $MAJOR.$MINOR-stable
git push origin $VERSION
# foreman-proxy
git clone git@github.com:theforeman/smart-proxy.git
cd smart-proxy
git checkout $MAJOR.$MINOR-stable
cp "$RELEASEDIR"/VERSION .
extra/changelog
git add .
git commit -m "Release $VERSION"
git tag -m "Release $VERSION" $VERSION
git push origin $MAJOR.$MINOR-stable
git push origin $VERSION
# foreman-installer
git clone git@github.com:theforeman/foreman-installer.git
cd foreman-installer
git checkout $MAJOR.$MINOR-stable
cp "$RELEASEDIR"/VERSION .
git add .
git commit -m "Release $VERSION"
git tag -m "Release $VERSION" $VERSION
git push origin $MAJOR.$MINOR-stable
git push origin $VERSION
# foreman-selinux
git clone git@github.com:theforeman/foreman-selinux.git
cd foreman-selinux
git checkout $MAJOR.$MINOR-stable
cp "$RELEASEDIR"/VERSION .
extras/changelog
git add .
git commit -m "Release $VERSION"
git tag -m "Release $VERSION" $VERSION
git push origin $MAJOR.$MINOR-stable
git push origin $VERSION
cd $RELEASEDIR
echo "You have now tagged all repositories! Go ahead and start the pipeline - release_tarballs"
echo "Once the job is finished, download and sign the tarballs using signing.rb"
echo http://ci.theforeman.org/view/Release%20pipeline/
| true |
ae576a525a36040cd00ff743aa84396b2eb3926a | Shell | Tr4sK/mypve-firewall | /install.sh | UTF-8 | 760 | 2.984375 | 3 | [] | no_license | #!/bin/bash
##############################################################
#
# INSTALL SCRIPT FOR PVE-FIREWALL
#
##############################################################
#
#
#
#
CONF_DIR='/etc/pve-firewall'
if [ ! -d $CONF_DIR ]; then
mkdir $CONF_DIR
fi
# Copy default configuration files to CONF_DIR
cp ./common_bridged.rules $CONF_DIR/common_bridged.rules
cp ./bridged.rules $CONF_DIR/bridged.rules
cp ./global.conf $CONF_DIR/global.conf
cp ./hypervisor.rules $CONF_DIR/hypervisor.rules
cp ./routed.rules $CONF_DIR/routed.rules
cp ./macros $CONF_DIR/macros
# Copy script to /etc/init.d/
cp pve-firewall /etc/init.d/pve-firewall
chmod +x /etc/init.d/pve-firewall
# Add script to boot sequence
update-rc.d pve-firewall start 70 S . stop 70 0 1 6
| true |
611d7d893c727332c2fd0bc8fee0d92aa816dbc6 | Shell | bespoke-code/bin | /deb-install | UTF-8 | 467 | 3.53125 | 4 | [] | no_license | #!/bin/bash
# Script: deb-install
# Author: Andrej Georgievski <andrej.zgeorgievski@gmail.com>
# Created: 2 Feb 2017
# Version: 1.1
APP=$1
sudo dpkg -i "$APP"
if [ $? -ne 0 ]
then
sudo apt-get install -f -y
sudo dpkg -i "$APP"
fi
if [ $? -eq 0 ]
then
echo "-------------------------------------"
echo "Software $APP installed successfully."
else
echo "----------------------------------------------------------------"
echo "Software $APP couldn't be installed! Try installing it manually."
fi
| true |
b8b37b241c67d4a0c463172699fd9308ca393641 | Shell | oldratlee/my-zsh-settings | /components/20-sys-langs.zsh | UTF-8 | 3,750 | 3.546875 | 4 | [] | no_license | ###############################################################################
# Go
###############################################################################
export GOPATH="$HOME/.go"
export PATH="$PATH:$GOPATH/bin"
###############################################################################
# Rust
###############################################################################
[ -f "$HOME/.cargo/env" ] && source "$HOME/.cargo/env"
###############################################################################
# CPP
###############################################################################
##################################################
# vcpkg
##################################################
export VCPKG_ROOT="$HOME/.vcpkg"
alias vp='vcpkg'
alias vps='vcpkg search'
alias vpi='vcpkg install'
alias vpl='vcpkg list'
VP_CM_PATH="$VCPKG_ROOT/scripts/buildsystems/vcpkg.cmake"
VP_CM_OPT="-DCMAKE_TOOLCHAIN_FILE=$VP_CM_PATH"
# CMG_OPTS="$VP_CM_OPT"
# Enable VCpkg CMake Generation support
evcmg() {
logAndRun set "CMG_OPTS=$CMG_OPTS"
}
# Disable VCpkg CMake Generation support
dvcmg() {
logAndRun unset CMG_OPTS
}
enable_vcpkg() {
# for compiler
export CPATH="$VCPKG_ROOT/installed/x64-osx/include"
export LIBRARY_PATH="$VCPKG_ROOT/installed/x64-osx/lib"
}
[ -f "$VCPKG_ROOT/scripts/vcpkg_completion.zsh" ] &&
source "$VCPKG_ROOT/scripts/vcpkg_completion.zsh"
###############################################################################
# CMake
###############################################################################
# Selecting a compiler must be done on the first run in an empty directory.
# It's not CMake syntax per se, but you might not be familiar with it. To pick Clang:
# CC=clang CXX=clang++ cmake ..
cmg() {
compdef cmb=cmake
compdef cmg=cmake
compdef cmc=cmake
logAndRun cmake ${CMG_BUILD_TOOL:+-G"$CMG_BUILD_TOOL"} "${CMG_COMPILER_OPTS[@]}" $CMG_OPTS -S . -B "$CMG_BUILD_DIR" "$@"
echo
}
cmb() {
compdef cmb=cmake
compdef cmg=cmake
compdef cmc=cmake
[ -e build ] || cmg || return 1
if (( $# ==0 )); then
logAndRun cmake $CMB_OPTS --build "$CMG_BUILD_DIR" # CMake 3.15+ only
else
logAndRun cmake $CMB_OPTS --build "$CMG_BUILD_DIR" --target "$@"
fi
}
alias cmc='cmb clean'
__swCMakeCompiler() {
local compiler="$1"
case "$compiler" in
mac*)
unset CMG_COMPILER_OPTS
CMG_COMPILER=mac-clang
CMG_BUILD_DIR="build-cmake-$CMG_COMPILER${CMG_BT:+-$CMG_BT}"
;;
*gcc)
CMG_COMPILER_OPTS=(
-DCMAKE_C_COMPILER=$(echo /usr/local/opt/gcc/bin/gcc-[0-9]*)
-DCMAKE_CXX_COMPILER=$(echo /usr/local/opt/gcc/bin/c++-[0-9]*)
)
CMG_COMPILER=brew-gcc
CMG_BUILD_DIR="build-cmake-$CMG_COMPILER${CMG_BT:+-$CMG_BT}"
;;
*clang)
CMG_COMPILER_OPTS=(
-DCMAKE_C_COMPILER=/usr/local/opt/llvm/bin/clang
-DCMAKE_CXX_COMPILER=/usr/local/opt/llvm/bin/clang++
)
CMG_COMPILER=brew-clang
CMG_BUILD_DIR="build-cmake-$CMG_COMPILER${CMG_BT:+-$CMG_BT}"
;;
*)
return 1
esac
}
__swCMakeCompiler "mac clang"
swCMakeCompiler() {
local compiler
select compiler in 'mac default' 'brew gcc' 'brew clang'; do
__swCMakeCompiler "$compiler" && break
done
}
__swCMakeBuildTool() {
local bt="$1"
case "$bt" in
make*)
unset CMG_BUILD_TOOL CMG_BT
CMG_BUILD_DIR="build-cmake-$CMG_COMPILER${CMG_BT:+-$CMG_BT}"
;;
ninja*)
CMG_BUILD_TOOL='Ninja'
CMG_BT='ninja'
CMG_BUILD_DIR="build-cmake-$CMG_COMPILER${CMG_BT:+-$CMG_BT}"
;;
*)
return 1
esac
}
| true |
ab99fc4cb84db9ec49fc5b5a98b97a8b6b953ce8 | Shell | ldmosquera/actual-dotfiles | /.profile.d/mpd.bash | UTF-8 | 284 | 2.984375 | 3 | [] | no_license | export MPD_HOST='nieve'
alias musica='ncmpc -c'
alias m='musica'
function mpc_pausarDespuesDeEsta() {
remanenteDeCancionActual=`mpc | awk -F"[ /:]" '/playing/ {print 60*($8-$6)+$9-$7}'`
[[ $remanenteDeCancionActual ]] && {
sleep $remanenteDeCancionActual
mpc pause
} &
}
| true |
cfe9182d5d819ab5a97016e7f5dad30824984da3 | Shell | walterpaulo/shell | /backup.sh | UTF-8 | 1,303 | 3.46875 | 3 | [] | no_license | #!/bin/bash
#
#decimo.sh - Nosso decimo Progroma em Shell - Script de Backup Completo.
#
# Homepage:
# Autor: Walter Paulo <walter0paulo@hotmail.com>
# Mantenedor: Walter Paulo
#
#--------------------------------------------------------
#
# Este program sera usado para usado para realizar um backup full de arquivos localizados em um servidor onde
# teremo que copiar a pasta /srv/samba
#
# Exemplo de execução:
#
# $ ./backup.sh
#
# Hitórico de Versões
#
# Versão: 1.0
#
# COPYRIGHT: Este programa é GPL
# BAKDIR - Local onde será armazenado os arquvivos de bakcup.
# FILEDIR - Local onde estão os arquivos de origem, que faremos backup.
BKPDIR="/srv/backup/"
FILEDIR="/srv/samba/"
LOGFILE="/var/log/backup.log"
ERROLOG="/var/log/backup_error.log"
DATE=$(date +%d_%m_%Y)
FILENAME="/srv/backuptar_$DATE.tar.gz"
COMPACT="tar -cvzf $FILENAME $BKPDIR"
ADMIN="walter0paulo@hotmail.com"
echo -e "\n"
echo "Iniciando o Script de Backup"
echo -e "\n"
verificar(){
if [ $? -eq 0 ]; then
echo -e "\e[34mComando ok\e[m\e"
else
echo "ERRO"
mail_err
fi
}
mail() {
sendEmail -f root@unoseg.local -t -m $ADMIN -u "Mensagem de Backup" -a $LOGFILE
}
mail_err() {
sendEmail -f root@unoseg.local -t $ADMIN -m "Mensagem de Backup" -a $ERROLOG
}
rsync -avu $FILEDIR $BKPDIR > $LOGFILE 2> $ERROLOG
verificar
[ -f $COMPACT ] || $COMPACT
verificar
mail
exit 0
| true |
f8bdf68c7c6fdcce42a47e7b4978cb2e9937b651 | Shell | ath016/bash | /encryption/hopShift.sh | UTF-8 | 722 | 3.53125 | 4 | [] | no_license | #!/bin/bash
alpha=(a b c d e f g h i j k l m n o p q r s t u v w x y z)
function char () {
c=" "
c=${alpha[$1]}
}
function num () {
n=0
for i in {0..25}
do
if [ $1 == ${alpha[$i]} ]
then
n=$i
fi
done
}
echo -n "enter your message: "
read msg
echo -n "your encrypted message: "
msg=${msg//" "/""}
len=${#msg}
x=0
while [ $x -lt $len ]
do
if [ $x -lt $len ]
then
num ${msg:x:1}
n=$((($n + 7) % 26))
char $n
echo -n $c
fi
x=`expr $x + 1`
if [ $x -lt $len ]
then
num ${msg:x:1}
n=$((($n + 14) % 26))
char $n
echo -n $c
fi
x=`expr $x + 1`
if [ $x -lt $len ]
then
num ${msg:x:1}
n=$((($n + 15) % 26))
char $n
echo -n $c
fi
x=`expr $x + 1`
echo -n " "
done
echo ""
| true |
7dba36b5c4c46f7d9a27d9f627d2ca177917862f | Shell | Frugghi/TweetSpark | /vagrant-cluster/master/deploy/auto-deploy | UTF-8 | 675 | 3.796875 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Taken from https://gist.github.com/mikesmullin/6401258
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
watch_file="$DIR/$1"
args="$@"
function update_sha {
sha=`ls -lR --time-style=full-iso "$watch_file" | sha1sum`
}
function run {
echo "Running..."
spark-submit-yarn $args
echo "Resumed watching..."
}
function compare {
update_sha
if [[ $sha != $previous_sha ]] ; then
echo "Change detected..."
previous_sha=$sha
run
fi
}
trap run SIGINT
trap exit SIGQUIT
update_sha
previous_sha=$sha
echo -e "Press Ctrl+C to force run, Ctrl+\\ to exit"
echo -e "Watching \"$watch_file\""
while true; do
compare
sleep 1
done
| true |
ab72f198dca2bd94bb7180ef36e82849f6bc537b | Shell | jmsilvadev/iaccountapi | /pkg/run-tests.sh | UTF-8 | 636 | 3.0625 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/sh
if [ ! -d tests/coverage/ ]; then
mkdir tests/coverage/
fi
if [ ! -d tests/outputs/ ]; then
mkdir tests/outputs/
fi
go test -v ./... -cover -coverprofile=tests/coverage/cover.out > tests/outputs/output_test.log
go tool cover -html=tests/coverage/cover.out -o tests/coverage/coverage.html
CHECKFAIL=$(cat tests/outputs/output_test.log | grep FAIL)
cat tests/outputs/output_test.log
if [ -z "${CHECKFAIL}" ]; then
echo "####################"
echo "# All Tests PASSED #"
echo "####################"
else
echo "################"
echo "# Tests FAILED #"
echo "################"
exit 1
fi | true |
267e3d1a522de8f62898b3b79d4390214bda52e5 | Shell | bahmni-msf/bahmni-mart-playbook | /roles/metabase/templates/metabase-restore.sh.j2 | UTF-8 | 791 | 3.65625 | 4 | [] | no_license | #!/usr/bin/env bash
METABASE_DB_HOST={{ metabase_db_server }}
METABASE_DB_USERNAME={{ metabase_db_user }}
METABASE_DB_NAME={{ metabase_db_name }}
POSTGRESQL_PORT={{ metabase_db_port }}
if [[ -z $1 ]]; then
read -p "Enter DB file location: " BACKUP_FILE_LOCATION
else
BACKUP_FILE_LOCATION=$1
fi
if [ -z "$BACKUP_FILE_LOCATION" ]; then
echo "Please provide DB file location";
exit 1:
fi
echo "Restoring ${METABASE_DB_NAME} with ${BACKUP_FILE_LOCATION}"
psql -h ${METABASE_DB_HOST} -p ${POSTGRESQL_PORT} -U ${METABASE_DB_USERNAME} ${METABASE_DB_NAME} < ${BACKUP_FILE_LOCATION}
EXIT_CODE=$?
echo
if [ ${EXIT_CODE} == 0 ]; then
echo "${METABASE_DB_NAME} restore is successfully completed";
else
echo "${METABASE_DB_NAME} restore is failed"
exit ${EXIT_CODE}
fi
| true |
03333bd3dcf1cc04d00cfb0b1c8ae26e9e1f0425 | Shell | jmac-burgh/transcode_scripts_mythTV | /makeMythLinks | UTF-8 | 20,174 | 3.59375 | 4 | [] | no_license | #!/bin/bash
#==============================================================================
# This script creates hardlinks to mythtv recordings using metadata from the
# mythtv database. The hardlinks are named in a human readable format
# including a pattern that XBMC can use to identify the recordings.
# If hardlink(s) have been created, a command is issued that tells
# xbmc to update its video library. curl must be installed for this to work.
# xbmc must be configured to allow it to be controled remotely.In its current
# form this script must be run on the machine that is running mysql server.
# Tested with mpg files but I guess it could work with other formats. Not
# sure about nupple files though.
#
# The following variables must be set in order to run the script
#
# tvDirName:folder where the script will create links for the tvshows
# movieDirName:folder where links for the movies wil be created
# linkDirName:parent folder of movieDirName and tvDirName
# lockFileDirName:directory to keep lock file
# logFileDirName:directory to keep log file
# staleTime:seconds after which lock file is considered stale
# cleanLibTime:minutes to sleep after issuing the clean library command
# scanLibTime:minutes to wait after issuing the scan library command
# xbmcUserName:xbmc username
# xbmcPassword:xbmc password
# xbmcIpAddress:ip address of the machine that is running xbmc
# xbmcPort the:port that xbmc is listening on
#
# The mysql username and password are automatically extracted from the
# config file.
# mySQLUserName:mysql database user name (automatically detected)
# mySQLPassword:mysql database password (automatically detected)
#
# recordingsLocation is automatically retrieved from the myth database.
# the mythtv user should have write access in this location
# title is created upon detection of a new tv series.
# The paths are created as follows
#
# Hardlink location for tv shows will be created as follows
# recordingsLocation/linkDirName/tvDirName/title/tvDBFileName
#
# Hardlink location for movies will be created as follows
# recordingsLocation/linkDirName/movieDirName/movieFileName
#
# To run the script as "mythtv" for testing login as user mythtv.
# Under ubuntu 12.04 issue the command "sudo su mythtv -" and provide your
# root password. Navigate to where the script is and type
# "bash makeMythLinks"
#
# written with gedit and tested with bash version 4.2.25, ubuntu 12.04 server,
# mythtv .26,.27,HD Homerun prime cableacard tuner.
# Author Pedro Pena
#==============================================================================
#==============================================================================
# Global values go here
#==============================================================================
tvDirName="tvShows" # the folder where the script will create links for the tvshows
movieDirName="movies" # the folder where links for the movies wil be created
linkDirName="links/test" # the parent folder of movieDirName and tvDirName
lockFileDirName="lock" # directory to keep lock file
logFileDirName="log" # directory to keep log file
staleTime="1800" # seconds after which lock file is considered stale
# attempt to automatically get username and password
# if the path and/or filename is wrong on your system then update it below.
# You can also replace the expressions the username and password
# eg.
# mySQLUserName="macleod"
# mySQLPassword="h1land3r"
mySQLUserName=`grep -oP '(?<=<UserName>).*?(?=</UserName>)' /etc/mythtv/config.xml`
mySQLPassword=`grep -oP '(?<=<Password>).*?(?=</Password>)' /etc/mythtv/config.xml`
# recordingsLocation holds the mythtv default path
recordingsLocation=`mysql -u $mySQLUserName --password="$mySQLPassword" -s -N --raw -e\
'SELECT DIRNAME FROM mythconverg.storagegroup WHERE GROUPNAME="Default"';`
cleanLibTime="6" # number of minutes to sleep after issuing the clean library command
scanLibTime="2" # number of minutes to wait after issuing the scan library command
NLCR=$(echo -e "\n\r") # store newline and carriage return characters
TAB=$(echo -e "\t") # store tab character
#==============================================================================
# to allow remote control of xbmc navigate to
# system->settings->services->webserver
# enable "Allow control of XBMC via HTTP" and create a username and password
# system->settings->services->Remote control
# enable "Allow programs on other systems to control XBMC"
#==============================================================================
xbmcUserName="xbmc" # xbmc username
xbmcPassword="apollo13" # xbmc password
xbmcIpAddress="192.168.0.14" # ip address of the machine that is running xbmc
xbmcPort="8080" # the port that xbmc is listening on
#==============================================================================
# Global values end here
#==============================================================================
#==============================================================================
# log appends to specified file prefixing the message with a timestamp.
# Takes two parameters. $1 which is the absolute path to the file and $2
# which is the message.
#==============================================================================
function log {
dirPath=$1
message=$2
echo `date +%Y%m%d%H%M%S`' '$message >> $dirPath
}
#==============================================================================
# getColumnValue extracts the column value from a string splitting the input
# via tab as a delimiter. parameter $1 is the row and parameter %2 is the
# requested field
#==============================================================================
function getColumnValue {
# column 0 season
# column 1 episode
# column 2 filename
# column 3 title
# column 4 programid
# column 5 originalairdate
# column 6 recgroup
# column 7 autoexpire
# column 8 endtime
# column 9 subtitle
# the column position values are determined by the order
# in which they're placed in the sql query
IFS=$TAB # set default delimiter to \t
arr=($1) #split tab delimited row into fields and place in an array
IFS=$NLCR # set default delimeter to \n\r
echo ${arr[$2]} # return requested field
}
#==============================================================================
# getType returns the first 2 characters from the programid field.
# These characters seem to be related to the type of recording it is. As far
# as I can tell MV is for movie and EP & SH for tv show and SP is sports.
# takes one parameter $1 which
# is the field that holds the "identifying" prefix
#==============================================================================
function getType {
line=$1
echo ${line:0:2} # return first two characters
}
#==============================================================================
# createLinks checks if the links exist and creates them if they don't. It
# takes two paramteres $1 is the target file and path $2 is the path and
# filename for the hard link. CreateLinks returns true if a hard link is
# created. returns false if otherwise.
#==============================================================================
function createLinks {
target=$1
hLink=$2
created="false"
if [ -f $target ] && [ ! -f $hLink ]; then
`ln $target $hLink`
created="true"
fi
echo $created
}
function getNumOfLinks {
if [ -f %1 ]; then
echo `stat -c %h $1`
else
echo "-1"
fi
}
#==============================================================================
# createDir creates a directory if it doesn't exist. Takes one parameter. $1
# is the path of the directory to create
#==============================================================================
function createDir {
dirPath=$1
if [ ! -d $dirPath ]; then
`mkdir $dirPath`
fi
}
#==============================================================================
# Here the paths for the hardlinks are created if they don't exist
#==============================================================================
# create the parent directory for the hard links, lock and logs subdirs
#
$(createDir $recordingsLocation''$linkDirName)
#
# create lockfile directory
$(createDir $recordingsLocation''$linkDirName'/'$lockFileDirName)
# create log directory
$(createDir $recordingsLocation''$linkDirName'/'$logFileDirName)
# create hard links
$(createDir $recordingsLocation''$linkDirName'/'$tvDirName)
$(createDir $recordingsLocation''$linkDirName'/'$movieDirName)
#==============================================================================
# Check to see if the script is currently running or if there is a stale lock
# file. If the file exists then the script is already running or the script
# ended without completing.The lock file is created with the epoch time in it
# this time is later compared to the current epcoh time. if it is greater
# than $staleTime then it assumed that the script ended in a bad way and a new
# lock file is created and the script runs. If the file is there and the time
# has not elapsed then it exits
#==============================================================================
lockFile=$recordingsLocation''$linkDirName'/'$lockFileDirName'/lockFile'
logFile=$recordingsLocation''$linkDirName'/'$logFileDirName'/log'
deleteQueue=$recordingsLocation''$linkDirName'/'$logFileDirName'/deletequeue'
fileToDelete=$1
if [ "$fileToDelete" != "" ]; then
echo $fileToDelete >> $deleteQueue
$(log $logFile $fileToDelete" was added to delete queue.")
fi
if [ -f $lockFile ]; then
read -r temp < $lockFile
currentTime=$(echo `date +%s`)
timeElapsed=$((currentTime-temp))
if [ "$timeElapsed" -ge "$staleTime" ]; then
echo `date +%s` > $lockFile
$(log $logFile "lock file is stale,creating new one.")
else
$(log $logFile "script already running,exiting.")
exit 0
fi
else
echo `date +%s` > $lockFile
$(log $logFile "creating lock file.")
fi
#==============================================================================
# records holds the season numbers, episode numbers,filenames,titles,
# programids,air dates and episode names for all of the mythtv
# recordings(I hope). It seems like scheduled recordings always have a
# recordid > 0 and deleted recordings like livetv have a recordid = 0.
# note: SUBTITLE must always be the last column in the query below.
# The reason being that it doesn't always contain a value so it will shift
# the following columns down by one in the result string.
#==============================================================================
records=`mysql -u $mySQLUserName --password="$mySQLPassword" -s -N --raw -e\
'SELECT SEASON,EPISODE,BASENAME,TITLE,PROGRAMID,\
ORIGINALAIRDATE,RECGROUP,AUTOEXPIRE,ENDTIME,SUBTITLE FROM mythconverg.recorded WHERE RECORDID > 0';`
#==============================================================================
# This is the main loop. Each record is delimited by \n\r so the default
# delimiter is changed to \n\r
#==============================================================================
IFS=$NLCR # set default delimiter to newline carriage return
linkDeleted="false" # used to check if any links were removed
hardLinkCreated="false" # used to check if any hard links were created
temp="" # used for temporary stuff
linksRemoved=0 # keep track of number of links removed
linksCreated=0 # keep track of number of links created
for x in $records; do
season=$(getColumnValue $x 0)
episode=$(getColumnValue $x 1)
fileName=$(getColumnValue $x 2)
title=$(getColumnValue $x 3)
oTitle=$title #need this to search other table
programid=$(getColumnValue $x 4)
originalairdate=$(getColumnValue $x 5)
recGroup=$(getColumnValue $x 6)
autoExpire=$(getColumnValue $x 7)
endTime=$(getColumnValue $x 8)
subtitle=$(getColumnValue $x 9)
typeOfRec=$(getType $programid)
# clean up title and subtitle
title=${title//[^a-zA-Z0-9 ]/} # remove non alphanumeric chars except space
title=${title// /_} # replace space with underscore
title=${title//__/_} # replace double underscore with single underscore
title=${title,,} # make lowercase
subtitle=${subtitle//[^a-zA-Z0-9 ]/} # remove non alphanumeric chars except space
subtitle=${subtitle// /_} # replace space with underscore
subtitle=${subtitle//__/_} # replace double underscore with single underscore
subtitle=${subtitle,,} # make lowercase
fileExtension=${fileName##*.} # extracts the extension for the video file
fileNameSansExt=${fileName%.*} #extracts filename without extension
target=$recordingsLocation$fileName # location & filename of the actual myth recording
# converting the show endtime to epoch time for easier comparisons
endTime=`date +%s -ud $endTime`
currentTime=`date +%s`
# if timeDiff is greater than 0 then the show is still recording
timeDiff=$((endTime-currentTime))
if [ "$subtitle" == "" ]; then #need some sort of subtitle or same name links will occur.
#subtitle=$originalairdate
subtitle=$fileNameSansExt # 9.7.14 this does not allow same name links to be created
fi
if ( [ "$typeOfRec" == "EP" ] || [ "$typeOfRec" == "SH" ] || [ "$typeOfRec" == "SP" ] ) && [ "0" -ge "$timeDiff" ]; then # check if it's a tv show/sports
#create tv show directory if it doesn't exist
$(createDir $recordingsLocation''$linkDirName'/'$tvDirName'/'$title)
pattern='s'$season'e'$episode
# if season and episode numbers aren't present use the original air date
if [ "$season" == "0" ] && [ "$episode" == "0" ]; then
pattern=$originalairdate
else
# delete file with original airdate if seasons are now available.
tvDBFileName=$subtitle'.'$originalairdate'.'$fileExtension
hLink=$recordingsLocation''$linkDirName'/'$tvDirName'/'$title'/'$tvDBFileName
if [ -f $hLink ]; then
`rm $hLink`
linkDeleted="true"
linksRemoved=$((linksRemoved+1))
$(log $logFile $hLink" has been removed because season and episode numbers are now available.")
fi
fi
tvDBFileName=$subtitle'.'$pattern'.'$fileExtension
hLink=$recordingsLocation''$linkDirName'/'$tvDirName'/'$title'/'$tvDBFileName
linkNum=$(getNumOfLinks $hLink)
# attemptig to detect a deletion via mythtv.... should probably remove this.
if [ "$recGroup" == "Deleted" ] && [ "$autoExpire" -gt "0" ] && [ -f $hLink ]; then
$(log $logFile "delete requested,removing links for "$title"-"$tvDBFileName)
`rm $hLink`
linkDeleted="true"
linksRemoved=$((linksRemoved+1))
else
if [ "$recGroup" != "Deleted" ] && [ "0" -ge "$timeDiff" ] && [ "$autoExpire" != "9999" ]; then
temp=$(createLinks $target $hLink)
fi
if [ "$temp" == "true" ]; then
$(log $logFile $hLink" has been created.")
hardLinkCreated="true"
linksCreated=$((linksCreated+1))
temp="false"
fi
fi
else
# this is where the links are created for movies
releaseDate=`mysql -u $mySQLUserName --password="$mySQLPassword" -s -N --raw -e\
'SELECT AIRDATE FROM mythconverg.recordedprogram WHERE TITLE="'$oTitle'" LIMIT 1;'`
pattern='('$releaseDate')'
movieFileName=$title''$pattern'.'$fileExtension
hLink=$recordingsLocation''$linkDirName'/'$movieDirName'/'$movieFileName
linkNum=$(getNumOfLinks $hLink)
if [ "$recGroup" == "Deleted" ] && [ "$autoExpire" -gt "0" ] && [ -f $hLink ]; then
$(log $logFile "delete requested,removing links for "$movieFileName)
`rm $hLink`
linkDeleted="true"
linksRemoved=$((linksRemoved+1))
else
if [ "$recGroup" != "Deleted" ] && [ ! "$pattern" == "" ] && [ ! "$pattern" == "(0000)" ] && [ "$autoExpire" != "9999" ]; then
temp=$(createLinks $target $hLink)
fi
if [ "$temp" == "true" ]; then
$(log $logFile $sLink" has been created.")
hardLinkCreated="true"
linksCreated=$((linksCreated+1))
temp="false"
fi
fi
fi
done # end for loop
#
# lines that can be used to automate xbmc video library refresh to automatically add/delete items in video library using json
# add any xbmc clients with a video library in the variable definition section above then uncomment lines below
#
# clean the xbmc video library if any links were removed
#if [ "$linkDeleted" == "true" ]; then
# curl --data-binary '{ "jsonrpc": "2.0", "method": "VideoLibrary.Clean", "id": "mybash"}' -H 'content-type: application/json;' $xbmcUserName:$xbmcPassword@$xbmcIpAddress:$xbmcPort/jsonrpc
# $(log $logFile "library clean request issued to xbmc")
# $(log $logFile "sleeping for 3 minutes while library is cleaned")
# sleep 3m
#fi
# update xbmc video library if any hard links were added.
#if [ "$hardLinkCreated" == "true" ]; then
#if [ "$hardLinkCreated" == "$hardLinkCreated" ]; then
# curl --data-binary '{ "jsonrpc": "2.0", "method": "VideoLibrary.Scan", "id": "mybash"}' -H 'content-type: application/json;' $xbmcUserName:$xbmcPassword@$xbmcIpAddress:$xbmcPort/jsonrpc
# $(log $logFile "library scan request issued to xbmc")
# $(log $logFile "sleeping for 1 minute while library scans")
# sleep 1m
#fi
$(log $logFile $linksCreated" links created "$linksRemoved" links removed")
# remove lock file
$(log $logFile "removing lock file")
`rm $lockFile`
| true |
22dae22193ea5fd3852a105239565b843585997a | Shell | dineshkummarc/QuoJS | /vendor/build.sh | UTF-8 | 1,007 | 3.546875 | 4 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/bin/bash
#define paths
COMPILER=google-compiler/compiler.jar
LUNGO_SOURCES=../src/
LUNGO_NAMESPACE=quo.
BUILDPATH=../release/
MINIFIED="min"
PACKED="packed"
#script
clear
echo -e "\033[0m"============================ QUOJS COMPILER ============================
## Files to compile
FILES_TO_COMPILE=""
FILES_TO_JOIN=""
#Main
DIR=$LUNGO_SOURCES$LUNGO_NAMESPACE
echo -e "\033[33m [DIR]: "$LUNGO_SOURCES
FILES=(js core.js element.js environment.js output.js query.js style.js ajax.js events.js events.manager.js events.gestures.js)
for file in "${FILES[@]}"
do
FILES_TO_COMPILE=$FILES_TO_COMPILE" --js "$DIR$file
FILES_TO_JOIN=$FILES_TO_JOIN" "$DIR$file
done
#UNCOMPRESED
cat $FILES_TO_JOIN > $BUILDPATH/QuoJS-development.js
echo -e "\033[32m [BUILD]: QuoJS-development".js"\033[0m"
#MINIFIED
java -jar $COMPILER $FILES_TO_COMPILE --js_output_file $BUILDPATH/QuoJS.js
echo -e "\033[32m [BUILD]: QuoJS".js"\033[0m"
echo ============================ /QUOJS COMPILER ============================ | true |
9feda877b2c19caf66c40e8acd338f55f7d6b77f | Shell | petrelharp/isolation_by_coalescence | /sims/run_eems.sbatch | UTF-8 | 552 | 3.015625 | 3 | [] | no_license | #!/bin/bash
#SBATCH -p long
#SBATCH -n 1
#SBATCH -t 48:00:00
#SBATCH --mem-per-cpu=48G
#SBATCH --ntasks-per-core=1
cd $SLURM_SUBMIT_DIR
: ${OUTDIR?Must define OUTDIR}
if [ -z "$OUTDIR" ]
then
echo "Must define OUTDIR (is empty)."
exit 1
fi
INIFILE="$OUTDIR/eems.ini"
echo "Running:"
echo " eems/runeems_snps/src/runeems_snps --params $INIFILE &> $OUTDIR/eems_run.log"
/usr/bin/time --format='elapsed: %E / kernel: %S / user: %U / mem: %M' \
eems/runeems_snps/src/runeems_snps --params $INIFILE &> $OUTDIR/eems_run.log
echo "Done!"
| true |
6d86393d4f474637aa02f8b730fc0c02bde0fbc4 | Shell | rcn-ee/repos-wheezy-archive | /debian-7-pysendfile/version.sh | UTF-8 | 536 | 2.59375 | 3 | [] | no_license | #!/bin/bash -e
mirror="http://http.debian.net/debian"
package_name="pysendfile"
debian_pkg_name="${package_name}"
package_version="2.0.0"
package_source="${package_name}_${package_version}.orig.tar.gz"
src_dir="${package_name}-${package_version}"
git_repo=""
git_sha=""
reprepro_dir="p/${package_name}"
dl_path="${mirror}/pool/main/${reprepro_dir}/"
debian_version="${package_version}-6"
debian_untar="${package_name}_${debian_version}.debian.tar.gz"
debian_patch=""
jessie_version="~20141124+1"
wheezy_version="~bpo70+20141214+1"
| true |
2567db8f408482565cb2838fbc6a526711e9b4d4 | Shell | solomonxie/dotfiles | /benchmark/mping.sh | UTF-8 | 2,835 | 3.28125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
##### 一键Ping测试 #####
##### Author:xiaoz.me #####
##### Update:2019-06-03 #####
PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/bin:/sbin
export PATH
#获取服务器公网IP
osip=$(curl https://api.ttt.sh/ip/qqwry/?type=txt)
location=(
'四川 电信'
'天津 电信'
'江苏 电信'
'四川 联通'
'河北 联通'
'浙江 联通'
'安徽 移动'
'山东 移动'
'四川 移动'
'广东 移动'
)
#各地区DNS,来源于http://dns.lisect.com/ and https://www.ip.cn/dns.html
dnsip=(
'61.139.2.69' #四川 电信
'219.150.32.132' #天津 电信
'218.2.2.2' #江苏 电信
'119.6.6.6' #四川 联通
'202.99.160.68' #河北 联通
'221.12.1.227' #浙江 联通
'211.138.180.2' #安徽 移动
'218.201.96.130' #山东 移动
'223.87.238.22' #四川 移动
'211.139.129.222' #广东 移动
)
echo '---------------------------------------------------------------------------'
echo "您的本机IP为:[$osip]"
function mping(){
num=0
#Ping次数
pnum=$1
#echo '---------------------------------------------------------------------------'
echo "正在进行Ping测试,请稍后..."
echo '---------------------------------------------------------------------------'
while(( $num<10 ))
do
ping ${dnsip[$num]} -c $pnum > /tmp/${dnsip[$num]}.txt
echo 【${location[$num]}】 - ${dnsip[$num]}
echo ''
tail -2 /tmp/${dnsip[$num]}.txt
echo '---------------------------------------------------------------------------'
let "num++"
done
echo "【参数说明】"
echo "x% packet loss: 丢包率"
echo "min: 最低延迟"
echo "avg: 平均延迟"
echo "max: 最高延迟"
echo "mdev: 平均偏差"
echo '---------------------------------------------------------------------------'
}
function moretrace(){
#检查besttrace是否存在
if [ ! -f "./besttrace" ]
then
#下载besttrace
wget -q http://soft.xiaoz.org/linux/besttrace
#添加执行权限
chmod +x ./besttrace
fi
#进行路由跟踪
echo '---------------------------------------------------------------------------'
echo '正在进行路由跟踪,请稍后...'
echo '---------------------------------------------------------------------------'
echo '【四川电信】 - 61.139.2.69'
echo ''
./besttrace -q 1 61.139.2.69
echo '---------------------------------------------------------------------------'
echo '【河北 联通】- 202.99.160.68'
echo ''
./besttrace -q 1 202.99.160.68
echo '---------------------------------------------------------------------------'
echo '【安徽 移动】 - 211.138.180.2'
echo ''
./besttrace -q 1 211.138.180.2
echo '---------------------------------------------------------------------------'
}
mping 10
echo ''
moretrace
echo ''
echo '此结果由mping生成:https://www.xiaoz.me/archives/13044'
echo '' | true |
094344b21e741ff9659903b3c41887f3aa17a33d | Shell | RaoulMa/data_science_bowl_2018 | /sample.sh | UTF-8 | 1,450 | 2.890625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# Author: Raoul Malm
# Description: Bash script to train the neural network for
# image segmentation of the 2018 Data Science Bowl.
# options of running main.py:
#--name: argument = name of the model
#--load: load a pretrained model
#--train: train the model
#--predict: make predictions
#--epochs: argument = number of epochs for training
# Training of 5 models with 10-fold cross validation for 50 epochs
#python main.py --name nn0_256_256 \
#--predict
# Training of 5 models with 10-fold cross validation for 50 epochs
#python main.py --name nn0_512_512 nn1_512_512 nn2_512_512 nn3_512_512 nn4_512_512 \
#--train \
#--epochs 50.0
# Training of 5 models with 10-fold cross validation for 50 epochs
#python main.py --name nn5_384_384 nn6_384_384 nn7_384_384 nn8_384_384 nn9_384_384 \
#--train \
#--epochs 50.0
# Prediction of 5 trained models
#python main.py --name nn0_256_256 nn1_256_256 nn2_256_256 nn3_256_256 nn4_256_256 \
#--predict
# Training and prediction for testing purposes on a reduced data set
python main.py --name tmp \
--train_imgs 50 \
--test_imgs 50 \
--train \
--predict \
--epochs 2.0
# Training and prediction on the full data
#python main.py --name tmp \
#--load \
#--train \
#--predict \
#--epochs 1.0
# Continue training and prediction on the full data set where the model is loaded from a file
#python main.py --name nn0_384_384_3 \
#--load \
#--train \
#--predict \
#--epochs 1.0
| true |
2b11097bd2b1a8570c29a170381a0fac46a8017b | Shell | samsucik/prosodic-lid-globalphone | /egs/yomdle_tamil/v1/local/prepare_dict.sh | UTF-8 | 569 | 3.265625 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
# Copyright 2017 Hossein Hadian
# 2017 Babak Rekabdar
# 2017 Chun Chieh Chang
# 2017 Ashish Arora
# This script prepares the dictionary.
set -e
dir=data/local/dict
. ./utils/parse_options.sh || exit 1;
mkdir -p $dir
local/prepare_lexicon.py $dir
cut -d' ' -f2- $dir/lexicon.txt | sed 's/SIL//g' | tr ' ' '\n' | sort -u | sed '/^$/d' >$dir/nonsilence_phones.txt || exit 1;
echo '<sil> SIL' >> $dir/lexicon.txt
echo SIL > $dir/silence_phones.txt
echo SIL >$dir/optional_silence.txt
echo -n "" >$dir/extra_questions.txt
| true |
cc9042488f7ff5fec9882c7f8a9d92cc53b45f2e | Shell | sandipkhot21/Openstack_0.0001 | /OpenStack/1Compute/Scripts/ComputeInstallScript2.sh~ | UTF-8 | 2,843 | 2.859375 | 3 | [] | no_license | #! /bin/sh
MYP=/home/sandip/1Compute;
echo "***********************************************Next is OpenStack Network Installation***********************************************";
echo $'auto lo\niface lo inet loopback\n\nauto eth0\niface eth0 inet static\naddress 192.168.1.31\nnetmask 255.255.255.0\nbroadcast 192.168.1.255\n\nauto eth0:0\niface eth0:0 inet static\naddress 192.168.2.31\nnetmask 255.255.255.0\nbroadcast 192.168.2.255' > /etc/network/interfaces;
service networking stop && service networking start;
route add default gw 192.168.1.1;
service networking stop && service networking start;
sync; sleep 5;
echo "***********************************************Next is Nova Installation***********************************************";
echo "Come from Controller, wait for #1 mssg from compute.....";
echo -n "Continue with Nova-Compute Installation?y/n > ";
read var;
if [ "$var" = "n" ];
then exit 0;
fi;
DEBIAN_FRONTEND=noninteractive apt-get install --force-yes -y nova-compute sysfsutils;
echo "Edit /etc/nova/nova.cnf";
echo -n "Continue?y/n > ";
read var;
if [ "$var" = "n" ];
then exit 0;
fi;
#cp $MYP/Nova/nova1.conf /etc/nova/nova.conf;
sync;
service nova-compute restart;
sleep 5;
echo "********************************************Done with Nova-Compute Installation********************************************";
echo "***********************************************Next is Neutron Installation***********************************************";
echo "2. Go to Controller, wait for #7 mssg from controller....";
echo "Don't Continue Install in neutron debug phase......";
echo -n "Neutron-Network & Neutron-Controller Installation Done?y/n > ";
read var;
if [ "$var" = "n" ];
then exit 0;
fi;
#This step to be done only after Neutron installation on Network node
echo "Edit /etc/sysctl.cnf";
echo -n "Continue?y/n > ";
read var;
if [ "$var" = "n" ];
then exit 0;
fi;
#cp $MYP/Neutron/sysctl.conf /etc/sysctl.conf;
sysctl -p;
DEBIAN_FRONTEND=noninteractive apt-get install --force-yes -y neutron-plugin-openvswitch-agent openvswitch-datapath-dkms;
echo "Edit /etc/neutron/neutron.cnf and /etc/neutron/plugins/ml2/ml2_conf.ini";
echo -n "Continue?y/n > ";
read var;
if [ "$var" = "n" ];
then exit 0;
fi;
#cp $MYP/Neutron/neutron.conf /etc/neutron/neutron.conf;
#cp $MYP/Neutron/ml2_conf.ini /etc/neutron/plugins/ml2/ml2_conf.ini;
sync;
service openvswitch-switch restart;
sleep 5;
echo "Edit /etc/nova/nova.cnf";
echo -n "Continue?y/n > ";
read var;
if [ "$var" = "n" ];
then exit 0;
fi;
#cp $MYP/Neutron/nova.conf /etc/nova/nova.conf;
sync;
service nova-compute restart;
service neutron-plugin-openvswitch-agent restart;
echo "*******************************************Configurations for Compute Node Complete*******************************************";
echo "8. Go to Controller....";
exit 0;
| true |
90706f98ecc0674ae88a814fe8944a6867ad0207 | Shell | joshmt1/UserCode | /DelphesCode/analysis/DelphesSkim.sh | UTF-8 | 475 | 2.90625 | 3 | [] | no_license | #! /bin/bash
if [ ${#1} = 0 ]; then
echo "Requires a directory as an argument"
exit
fi
#don't skim signal
mylist=`ls -1 $1/reduced*.root | grep -v susy | grep -v natural`
#MLL TIGHTHT
#export SKIMOPTION='TIGHTHT MLL'
#export SKIMOPTION='MLL'
export SKIMOPTION='TIGHTHT'
for j in $mylist;
do
echo $j
root -b -l -q "DelphesSkim.C+(\"$j\")"
#ps aux | grep joshmt | grep 'root.exe' | grep -v grep | wc -l
done
#ps aux | grep joshmt | grep -v grep | grep -c 'root\.exe'
| true |
e8f346edd6582b38e28cdf299ae12c1d4e04298d | Shell | bpei1989/MyCookbook | /Shell/3. 文件.sh | UTF-8 | 2,757 | 3.765625 | 4 | [] | no_license | 1. 交集
comm命令
comm t1 t2#两文件的交集
2. mkdir -p
该命令会自动创建不存在的路径
比如mkdir -p /home/ab/cd/e #ab cd 都会自动创建,不会提示错误
3. 文件权限
-rwxr-xr-x
第一个文件类型
第二组用户权限
第三组用户组
最后一个其他用户
chmod命令
粘滞位(+t)是应用于目录的权限类型,通过粘滞位,只有目录的所有者才能删除目录中的文件,即使用户组
和其他用户有权限也不能删除
chmod a+t dirname
4. 创建不可修改文件
chattr将文件设置为不可修改,即使root也不能修改
/etc/shadow文件就是这样的,该文件由当前系统中所有用户的密码组成,用passwd修改密码就是修改的这个文件
chattr +i file
或sudo chattr +i file
chattr -i file#去除该权限
5. touch建空白文件
6. 链接
ln -s target symblinkname
ln -l -s /var/ w#生成w为链接
7. 列举文件类型信息
#!/bin/bash
if [ $# -ne 1 ]; #参数个数
then
echo $0 basepath;
echo
fi
path=$1 #第一个参数
declare -A statarray; #声明关联数组
#while读入的写法,注意反引用的写法,注意关联数组,注意let
while read line;
do
ftype = `file -b "$line"` #file命令用于查看文件具体信息
let statarray["$ftype"]++;
done<<(find $path -type f -print)
#注意关联数组的写法,尤其是!,@用得多,因为返回列表
for ftype in "${!statarry[@]}";
do
echo $ftype : ${statarray["$ftype"]}
done
$./filestat.sh /home/tmp
Bourne-Again shell script
ASCII text executable
8. 环回(loopback)文件与挂载
loopback文件是在文件中而非物理设备中创建的文件系统,可以把这些文件挂载到挂载点上,就像设备一样
步骤:
dd if=/dev/zero of=loopbackfile.img bs=1G count=1#创建1G大小空文件
mkfs.ext4 loopbackfile.img#按照ext4格式化
mkdir /mnt/loopback#挂载点
mount -o loop loopbackfile.img /mnt/loopback #-o loop挂载回环文件系统
卸载用 umount
iso可以回环挂载
mkdir /mnt/iso
mount -o loop linux.iso /mnt/iso #iso是一个只读文件系统
sync #强制写入磁盘,root可执行,因为对挂载设备操作不会立即写入磁盘
9. iso
创建iso文件
dd if=/dev/cdrom of=image.iso
mkisofs -V "Label1" -o image.iso source_dir/ #-o iso文件路径,sourcedir是iso文件内容目录
isohybrid image.iso #生成类似系统盘
dd if=image.iso of=/dev/sdb1 #写入u盘
10. diff
diff file1 file2
-u选项用于一体化输出,容易读
11. head tail
cat a | head
head -n 4 file
cat a | tail
tail -n a
tail -f a #监视动态增长的文件
12. 统计
wc命令
#行数
wc -l file
cat file | wc -l
#单词数
wc -w file
#字符数
wc -c file
13. tree
tree ~/ #打印图形化目录
| true |
3160cac71aa3c679e7747d7978f99074978471b9 | Shell | Winiex/dotfiles.v2 | /bash/alias/os.bash | UTF-8 | 90 | 3.15625 | 3 | [] | no_license | # Get parent pid of a process
# Usage: ppid 21150
ppid() {
ps -p ${1:-$$} -o ppid;
}
| true |
042d2f1bca47f376a903ab95de7539db85b84071 | Shell | asmtal/baremetal-jenkins-management | /scripts/puppet_master.sh | UTF-8 | 833 | 2.515625 | 3 | [] | no_license | #!/usr/bin/env bash
set -eo pipefail
wget https://apt.puppetlabs.com/puppetlabs-release-trusty.deb
sudo dpkg -i puppetlabs-release-trusty.deb
sudo apt-get update
sudo apt-get --yes --force-yes install -f puppetmaster-passenger
cat <<EOF | sudo tee /etc/puppet/puppet.conf
[main]
logdir=/var/log/puppet
vardir=/var/lib/puppet
ssldir=/var/lib/puppet/ssl
rundir=/var/run/puppet
factpath=$vardir/lib/facter
templatedir=$confdir/templates
dns_alt_names = puppet,puppet.goings.space
[master]
ssl_client_header = SSL_CLIENT_S_DN
ssl_client_verify_header = SSL_CLIENT_VERIFY
autosign = true
EOF
sudo service apache2 stop
sudo puppet cert clean --all
cat <<EOF | sudo tee /etc/puppet/autosign.conf
*.goings.space
*ec2.internal
EOF
sudo puppet master --verbose --no-daemonize &
sleep 10
sudo killall puppet || true
sudo puppet master
| true |
45d173291c11139d390d428454e62b1f284a70c2 | Shell | tom-harwood/jburg3 | /test/grammar_driven/burmTest.sh | UTF-8 | 1,587 | 3.359375 | 3 | [
"Apache-2.0"
] | permissive | SCRIPT_DIR=`dirname $0`
JBURG_HOME=$SCRIPT_DIR/../..
usage()
{
echo "Usage: burmTest [-r] [-q] -g <grammar> -t <testcase> [-d <dumpfile>] [-p <templates>]"
}
args=`getopt c:d:g:m:p:t:rqv $*`
if [ $? -ne 0 ]
then
usage;
exit 2
fi
MAINCLASS=TestRunner
set -- $args
for i in $*
do
case "$i"
in
-c)
DUMPCLASS="-classname $2";
shift;
shift;;
-m)
MAINCLASS=$2;
shift;
shift;;
-d)
DUMPFILE="-dump $2";
if [ "$TEMPLATES" == "" ]
then
TEMPLATES="-templates xml.stg"
fi
shift;
shift;;
-g)
GRAMMAR=$2;
shift;
shift;;
-p)
TEMPLATES="-templates $2"
shift;
shift;;
-q)
QUIET="-quiet";
shift;;
-r)
RANDOMIZE="-randomize";
shift;;
-t)
TESTCASE=$2;
shift;
shift;;
-v)
VERBOSE_TRIGGER="-verbose-trigger $2";
shift;
shift;;
--)
break;;
esac
done
#echo java -ea -cp $JBURG_HOME/lib/jburg.jar:$HOME/tools/antlr-4.5.1-complete.jar:classes $MAINCLASS -grammar $GRAMMAR $QUIET $RANDOMIZE $VERBOSE_TRIGGER $DUMPFILE $TEMPLATES $TESTCASE $DUMPCLASS
java -ea -cp $JBURG_HOME/lib/jburg.jar:$HOME/tools/antlr-4.5.1-complete.jar:classes $MAINCLASS -grammar $GRAMMAR $QUIET $RANDOMIZE $VERBOSE_TRIGGER $DUMPFILE $TEMPLATES $TESTCASE $DUMPCLASS
| true |
e282e847a8f1129d31a71e41d1ce17ce7cf58b94 | Shell | fbie/lego-analysis | /scripts/find-next.sh | UTF-8 | 83 | 2.53125 | 3 | [] | no_license | for f in $1/*-$1.csv; do if grep -q "Next step;16" $f; then cp $f clean/; fi; done
| true |
a86c6f554ea2aaa0b9427fc83ca7c151b35896de | Shell | vijay517/DEVICE | /executable_files/setup_device.sh | UTF-8 | 1,695 | 3.859375 | 4 | [] | no_license | #!/bin/bash
# exit when any command fails
set -e
ROOTDIR=~/Desktop/DEVICE
#-----------------------------------------------------------------------------------------------------
# STEP 1: CHECKING IF deviceinfo.txt FILES ARE PRESENT
#-----------------------------------------------------------------------------------------------------
#Checking if the device info text in present
if [ ! -f $ROOTDIR/deviceinfo.txt ]
then
echo "authentication file keys.csv does not exist in the directory:" $(pwd)
exit -1
fi
#--------------------------------------------------------------------------------------------------------
# STEP 2: CREATING THING, CERT AND PRIVATE KEY FOR THE THING. THE POLICY AND CERT IS ATTACHED TO THE THING
#--------------------------------------------------------------------------------------------------------
#Creating a IoT thing in IoT core in aws
DEVICENAME=$(cat ${ROOTDIR}/deviceinfo.txt | grep -m1 -B1 "DEVICENAME" | grep -Po 'DEVICENAME:\K.*')
aws iot create-thing --thing-name $DEVICENAME
#Create certificate and keys. After creating the keys, the certificate arn is stored for further use
certificateArn=$(aws iot create-keys-and-certificate --set-as-active --certificate-pem-outfile certificate.pem.crt --private-key-outfile private.pem.key | grep -B1 certificateArn | grep -Po '"'"certificateArn"'"\s*:\s*"\K([^"]*)')
#Move the private key and certificate to the certificate directory
mv private.pem.key certificate.pem.crt $ROOTDIR/certificates/
#Attach the policy and certificate to the thing
aws iot attach-policy --policy-name labPolicy --target $certificateArn
aws iot attach-thing-principal --thing-name $DEVICENAME --principal $certificateArn
| true |
71d333d60bcc76afab021eda6461d24a3334fa2c | Shell | pdjr-beatrice/log | /bin/log-runtime | UTF-8 | 2,544 | 4.0625 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
#
# NAME
# log-runtime.sh - process a log file and print the total runtime of some
# device.
#
# SYNOPSIS
# log-runtime [-d device] logfilespec...
# log-runtime -h
#
# DESCRIPTION
# log-runtime outputs the total number of operating hours for a particular
# device as recorded in one or more specified log files.
#
# logfilespec is a log file specifier of one of three types:
#
# YYYY selects all the log files for the specified year;
# YYYYMM selects all the log files for the specified month;
# YYYYMMDD selects the log file for the specified day.
#
# The optional -d switch allows log entries for the required device to be
# specified by their log file token. In the absence of the -d option, the
# script will return total engine hours (equivalent to specifying -d ENGINE).
#
# The script operates by processing timestamp values on device State entries
# in each selected log file and so calculates the total device runtime as
# hours:minutes for the selected day.
#
# AUTHOR
# Paul Reeve <preeve@pdjr.eu>
source log.defs
DEVICE=${LOG_HOURS_DEFAULT_DEVICE}
FILESPECS=
while [ "${1}" != "" ] ; do
case ${1} in
-d)
shift
DEVICE=${1}
;;
-h)
${GREP} '^#' ${0}
exit 0
;;
*)
FILESPEC=${1}
while [ ${#FILESPEC} -lt 8 ] ; do FILESPEC=${FILESPEC}'?' ; done
FILESPECS="${FILESPECS} ${LOGDIR}${FILESPEC}"
;;
esac
shift
done
if [ "${FILESPECS}" == "" ] ; then
echo "usage: log-runtime [-d DEVICE] logfilespec..."
echo "usage: log-runtime -h"
exit 0
fi
TOTALRUNTIME=0
for FILENAME in ${FILESPECS} ; do
ENTRIES=$(${GREP} -h "${DEVICE^^} State" ${FILENAME})
START=""
STOP=""
while read time ltime selector event state ; do
if [ "${state}" == "1" ] ; then
START=$(${DATE} --utc -d "${time}" '+%s')
fi
if [ "${state}" == "0" ] ; then
if [ "${START}" != "" ] ; then
STOP=$(${DATE} --utc -d "${time}" '+%s') ;
fi
fi
if [ "${START}" != "" ] && [ "${STOP}" != "" ] ; then
RUNTIME=$(( ${STOP} - ${START} ))
TOTALRUNTIME=$(( ${TOTALRUNTIME} + ${RUNTIME} ))
START=""
STOP=""
fi
done <<< "${ENTRIES}"
done
HOURS=$(( ${TOTALRUNTIME} / 3600 ))
MINUTES=$(( ($TOTALRUNTIME - (${HOURS} * 3600)) / 60 ))
if [ ${#MINUTES} -eq 1 ] ; then MINUTES="0${MINUTES}" ; fi
echo "${HOURS}:${MINUTES}"
| true |
2ab60ca553c2f5b0a91b1fd045472ab26c34fa0b | Shell | StarThinking/parameter_test_controller | /container_utility_sh/list_files_key.sh | UTF-8 | 313 | 3.1875 | 3 | [] | no_license | #!/bin/bash
if [ $# -ne 2 ]; then echo 'wrong: num key '; exit -1; fi
num=$1
key=$2
for i in $(seq 0 $num)
do
docker exec hadoop-$i bash -c "/root/parameter_test_controller/container_utility_sh/list_files_key_stub.sh $num '$key'" &
pids[$i]=$!
done
for i in $(seq 0 $num)
do
wait ${pids[$i]}
done
| true |
15e4340d340f2192febec3805d8e4bad04abda4d | Shell | frog-ny/tech-poc-puppeteer-mocha | /build.sh | UTF-8 | 287 | 2.546875 | 3 | [] | no_license | #!/bin/bash
imageName=web-image
containerName=puppeteer-mocha-poc
docker build --no-cache -t $imageName -f Dockerfile .
echo Delete old container...
docker rm -f $containerName
echo Run new container...
docker run -it -d -p 3000:3000 --name $containerName -v $(pwd):/www/ $imageName
| true |
406c28b0782c60335e27692c24bbc00c13cc2019 | Shell | MarioAndWario/QETools | /qrm/qrm.sh | UTF-8 | 8,123 | 3.75 | 4 | [] | no_license | #!/bin/bash
# default directory is `.`
# -a$DIR/--all=$DIR : rm all output files
# -w$DIR/--wfn=$DIR : rm wfn in *.save directory
# -l$DIR/--log=$DIR : rm log files
# -e$DIR/--eig=$DIR : rm eigenvalue and helper files
# -s$DIR/--save=$DIR : rm prefix.save directory in dos, pdos, ldos calculations
# ======
# read the options
# `getopt -o` followed by short options
# `getopt --long` followed by long options
# ======
# The set command takes any arguments after the options (here "--" signals the end of the options) and assigns them to the positional parameters ($0..$n). The eval command executes its arguments as a bash command.
# set -- "ab bc" will treat `"ab` as $1 and `bc$` as $2, (annoying whitespace problem!!!)
# eval set -- "ab bc" will treat `ab bc` as $1. By passing the set command to eval bash will honor the embedded quotes in the string rather than assume they are part of the word.
# '--' means no more options following (both set and getopt use it), and we can specify the input string, which is usually $@, which is an array of all the input argument in command line.
# ======
# shift n : moving current argument parameter (e.g. $4) to $(4-n)
# ======
# getopt just like a rearrangement of string
TEMP=`getopt -o ha::W::w::l::e::s:: --long --help,all::,wfn::,wfc::,log::,eig::,save:: -n 'Some errors!' -- "$@"`
#echo "${TEMP}"
eval set -- "$TEMP"
DIRname="."
# extract options and their arguments into variables.
while true ; do
DELflag="MW"
case "$1" in
-a|--all)
case "$2" in
"")
DIRname="."
shift 2 ;;
*)
DIRname=$2
shift 2 ;;
esac
if [ -f ${DIRname}/QE.in ]; then
prefix=$(grep "prefix" "${DIRname}/QE.in" | head -n 1 | awk -F"[']" '{print $2}')
elif [ -f ${DIRname}/IN.q ]; then
prefix=$(grep "prefix" "${DIRname}/IN.q" | head -n 1 | awk -F"[']" '{print $2}')
else
echo "--- No input file in ${DIRname}"
exit
fi
echo "+++ Deleting save dir in ${DIRname}"
rm -rf ${DIRname}/JOB.*
rm -rf ${DIRname}/slurm*
rm -rf ${DIRname}/${prefix}.wfc*
rm -rf ${DIRname}/${prefix}.hub*
rm -rf ${DIRname}/${prefix}.save
rm -rf ${DIRname}/${prefix}.xml
rm -rf ${DIRname}/WFN*
rm -rf ${DIRname}/bin*
rm -rf ${DIRname}/hdf5*
rm -rf ${DIRname}/QE*
rm -rf ${DIRname}/eigen*
rm -rf ${DIRname}/helper*
rm -rf ${DIRname}/bands
rm -rf ${DIRname}/temp*
rm -rf ${DIRname}/*.out
rm -rf ${DIRname}/*.dat
;;
-W|--wfn)
case "$2" in
"")
DIRname="."
shift 2 ;;
*)
DIRname=$2
shift 2 ;;
esac
########
#Decide wether or not to carry on based on if there is IN.q in current directory
if [ -f ${DIRname}/QE.in ]; then
echo "+++ Found PW input file in ${DIRname}"
prefix=$(grep "prefix" "${DIRname}/QE.in" | head -n 1 | awk -F"[']" '{print $2}')
elif [ -f ${DIRname}/IN.q ]; then
echo "+++ Found PW input file in ${DIRname}"
prefix=$(grep "prefix" "${DIRname}/IN.q" | head -n 1 | awk -F"[']" '{print $2}')
else
echo "--- No PW (-W) input file in ${DIRname}"
continue
fi
########
# echo "prefix = ${prefix}"
if [ ! -z ${prefix} ]; then
if [ -d "${DIRname}/${prefix}.save" ]; then
echo "+++ Deleting WFNDir K* in ${DIRname}/${prefix}.save"
rm -rf ${DIRname}/${prefix}.save/K*
rm -rf ${DIRname}/${prefix}.save/wfc*
fi
fi
;;
-w|--wfc)
case "$2" in
"")
DIRname="."
shift 2 ;;
*)
DIRname=$2
shift 2 ;;
esac
if [ -f ${DIRname}/QE.in ]; then
echo "+++ Found PW input file in ${DIRname}"
prefix=$(grep "prefix" "${DIRname}/QE.in" | head -n 1 | awk -F"[']" '{print $2}')
elif [ -f ${DIRname}/IN.q ]; then
echo "+++ Found PW input file in ${DIRname}"
prefix=$(grep "prefix" "${DIRname}/IN.q" | head -n 1 | awk -F"[']" '{print $2}')
else
echo "--- No PW (-w) input file in ${DIRname}"
continue
fi
echo "+++ Deleting WFNfile ${prefix}.wfc* and ${prefix}.hub* in ${DIRname}"
rm -rf ${DIRname}/${prefix}.wfc*
rm -rf ${DIRname}/${prefix}.hub*
rm -rf ${DIRname}/${prefix}.mix*
rm -rf ${DIRname}/${prefix}.igk*
;;
-s|--save)
case "$2" in
"")
DIRname="."
shift 2 ;;
*)
DIRname=$2
shift 2 ;;
esac
if [ -f "${DIRname}/dos.in" ]; then
echo "+++ Found DOS input file in ${DIRname}"
prefix=$(grep "prefix" "${DIRname}/dos.in" | head -n 1 | awk -F"[']" '{print $2}')
elif [ -f ${DIRname}/pp.in ]; then
echo "+++ Found LDOS input file in ${DIRname}"
prefix=$(grep "prefix" "${DIRname}/pp.in" | head -n 1 | awk -F"[']" '{print $2}')
elif [ -f ${DIRname}/projwfc.in ]; then
echo "+++ Found PDOS input file in ${DIRname}"
prefix=$(grep "prefix" "${DIRname}/projwfc.in" | head -n 1 | awk -F"[']" '{print $2}')
else
echo "--- No DOS input file in ${DIRname}"
continue
fi
if [ -d ${DIRname}/${prefix}.save ]; then
echo "+++ Deleting ${prefix}.save in ${DIRname} ->"
rm -rf ${DIRname}/${prefix}.save
fi
;;
-l|--log)
case "$2" in
"")
DIRname="."
shift 2 ;;
*)
DIRname=$2
shift 2 ;;
esac
rm -rf ${DIRname}/JOB.*
rm -rf ${DIRname}/slurm-*
;;
-e|--eig)
case "$2" in
"")
DIRname="."
shift 2 ;;
*)
DIRname=$2
shift 2 ;;
esac
rm -rf ${DIRname}/eigenvalue*
rm -rf ${DIRname}/Klength.dat
rm -rf ${DIRname}/helper*
rm -rf ${DIRname}/tempEig.dat
rm -rf ${DIRname}/Eig.*
;;
-h)
shift
echo "Usage: qcp.sh -a\$DIR/-all=\$DIR : rm all output files (default ./)"
echo " -w\$DIR/-wfc=\$DIR : rm all wfns files (default ./)"
echo " -l\$DIR/-log=\$DIR : rm all log files (default ./)"
echo " -s\$DIR/-save=\$DIR : rm prefix.save directory in dos, ldos, pdos calcualtions (default ./)"
echo " -W\$DIR/-wfn=\$DIR : rm prefix.save/K* directories (default ./)"
break
;;
--)
shift
echo "----------------------------------------------"
# echo "Usage: qcp.sh -a\$DIR/-all=\$DIR : rm all output files (default ./)"
# echo " -w\$DIR/-wfc=\$DIR : rm all wfns files (default ./)"
# echo " -l\$DIR/-log=\$DIR : rm all log files (default ./)"
# echo " -s\$DIR/-save=\$DIR : rm prefix.save directory in dos, ldos, pdos calcualtions (default ./)"
# echo " -W\$DIR/-wfn=\$DIR : rm prefix.save/K* directories (default ./)"
exit 0
;;
esac
done
| true |
0d5bf6ea94e90c133ac07ea7390fd9abb492c6dd | Shell | ZMTraobin/ZMTraobin | /villager/csp-doc/01.开发规范/csp_patch.sh | UTF-8 | 560 | 2.5625 | 3 | [] | no_license |
#!/bin/bash
echo "------update csp server begin------"
cd /gitRepertory/csp/
pwd
git pull origin dev
mvn clean package -Psit
cp /gitRepertory/csp/core/target/core.war /var/www/tomcat8hap/webapps/ROOT/
cd /var/www/tomcat8hap/webapps/ROOT/
jar -xf core.war
if [ `ps -ef | grep '/var/www/tomcat8hap/bin' | grep -v 'grep' | awk '{print $2}'` != "" ]; then
kill `ps -ef | grep '/var/www/tomcat8hap/bin' | grep -v 'grep' | awk '{print $2}'`
else
echo "no tomcat8hap process"
fi
cd /var/www/tomcat8hap/bin
./startup.sh
echo "------update csp server end------"
| true |
99661909ccf40d1914a6949a241f83de82fd1868 | Shell | thiziri/MatchZoo | /data/trec/prepare_trec_data.sh | UTF-8 | 1,037 | 2.90625 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
currpath=`pwd`
# In addition to sample.txt, you need to prepare the trec_corpus.txt file that contains all the dataset with trec ids
# that will be used fo train/test/valid
# here is an example line of the trec_corpus.txt file:
# 682 adult immigrant english
# "682" is the query ID followed by its words
# an example is provided in the file: trec_corpus.txt
# data preparation
python3 preparation_for_ranking.py ${currpath}
# download the glove vectors
wget http://nlp.stanford.edu/data/glove.6B.zip
unzip glove.6B.zip
# generate word embedding
python3 gen_w2v.py glove.6B.50d.txt word_dict.txt embed_glove_d50
python3 norm_embed.py embed_glove_d50 embed_glove_d50_norm
# generate data histograms for drmm model
# generate data bin sums for anmm model
# generate idf file
cat word_stats.txt | cut -d ' ' -f 1,4 > embed.idf
python3 histogram_generator.py 30 50 ${currpath} embed_glove_d50_norm
python3 binsum_generator.py 20 ${currpath} 50 embed_glove_d50_norm # the default number of bin is 20
echo "Done."
| true |
457a7e9c338b1ffe7e213d162b38ec9437d7c32d | Shell | medical-projects/doctorai-docker | /scripts/env.init.sh | UTF-8 | 2,351 | 2.9375 | 3 | [] | no_license | #!/usr/bin/env bash
source /etc/profile
source /etc/bashrc
source /scripts/config.sh
# zeppelin
if [[ $INSTALL_ZEPPELIN == "1" ]]; then
(! id -u zeppelin > dev/null 2>&1 ) && adduser zeppelin #
echo "download zeppelin..."
wget http://archive.apache.org/dist/zeppelin/zeppelin-0.7.1/zeppelin-0.7.1-bin-netinst.tgz
echo "extract zeppelin..."
tar -xzf zeppelin-0.7.1-bin-netinst.tgz
echo "rm zeppelin.tgz..."
rm -f zeppelin-0.7.1-bin-netinst.tgz
echo "instgall interpreters"
/zeppelin-0.7.1-bin-netinst/bin/install-interpreter.sh --name md
/zeppelin-0.7.1-bin-netinst/bin/install-interpreter.sh --name python
mv zeppelin-0.7.1-bin-netinst /usr/local/
ln -sf /usr/local/zeppelin-0.7.1-bin-netinst /usr/local/zeppelin
echo 'export PATH=/usr/local/zeppelin/bin:$PATH' >> /etc/bashrc
echo "zeppelin success"
fi
# miniconda
if [[ $INSTALL_CONDA == "1" ]]; then
echo "downloading miniconda"
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh
chmod +x miniconda.sh
./miniconda.sh -b -p /usr/local/mc && rm -rf miniconda.sh
export PATH=/usr/local/mc/bin:$PATH
echo 'export PATH=/usr/local/mc/bin:$PATH' >> /etc/bashrc
if [[ $INSTALL_IPYTHON == "1" ]]; then
conda install --yes ipython
fi
# theano
if [[ $INSTALL_THEANO == "1" ]]; then
conda install --yes numpy theano
fi
# pytorch
if [[ $INSTALL_PYTORCH == "1" ]]; then
# compile
# export CMAKE_PREFIX_PATH=/usr/local/mc
#
# # Install basic dependencies
# conda install --yes numpy pyyaml mkl setuptools cmake gcc cffi
#
# # Add LAPACK support for the GPU
# #conda install -c soumith magma-cuda80 # or magma-cuda75 if CUDA 7.5
#
# wget https://github.com/pytorch/pytorch/archive/v0.2.0.tar.gz -O pytorch.tgz
# tar -xzvf pytorch.tgz
# mv pytorch-0.2.0 /usr/local/src
# (cd /usr/local/src/pytorch-0.2.0 && python setup.py install)
# rm -f pytorch.tgz
#
# # for https://github.com/pytorch/examples/tree/master/imagenet
# pip install torchvision
# install pytorch
conda install pytorch torchvision -c soumith
fi
# tf
if [[ $INSTALL_TENSORFLOW == "1" ]]; then
conda install --yes tensorflow
fi
# bug fix
pip install py4j
echo "miniconda success"
fi
# NOT cleanup self
# rm -f /scripts/env.init.sh
| true |
c9206ee82dfa131f699b71228ab450893a652eb2 | Shell | taext/research_os | /bin/sub | UTF-8 | 401 | 2.796875 | 3 | [] | no_license | #!/bin/bash
# takes YouTube URL, downloads .json (video info) and .vtt (subtitles)
cd /home/dd/Documents/srt_search
youtube-dl $1 -j > "./files/$(youtube-dl $1 -e).json"
#./get_yt_sub.py $1 | xargs -0 -I {} wget {} -O "./files/$(youtube-dl $1 -e).srt"
cd files
youtube-dl --skip-download --write-auto-sub $1 1> /dev/null 2> /dev/null
cd -
#./only_textify_srt.py "$(ls -t ./files/*.vtt | head -n 1)"
| true |
0efc8633d24bf601d30299ba6cfd77c545a46f65 | Shell | habitat-sh/core-plans | /inetutils/plan.sh | UTF-8 | 2,335 | 3.078125 | 3 | [
"Apache-2.0"
] | permissive | pkg_name=inetutils
pkg_origin=core
pkg_version=1.9.4
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_description="\
Inetutils is a collection of common network programs. It includes: an ftp \
client and server, a telnet client and server, an rsh client and server, an \
rlogin client and server, a tftp client and server, and much more...\
"
pkg_upstream_url="http://www.gnu.org/software/inetutils/"
pkg_license=('GPL-3.0-or-later')
pkg_source="http://ftp.gnu.org/gnu/$pkg_name/${pkg_name}-${pkg_version}.tar.xz"
pkg_shasum="849d96f136effdef69548a940e3e0ec0624fc0c81265296987986a0dd36ded37"
pkg_deps=(
core/glibc
core/libcap
core/ncurses
)
pkg_build_deps=(
core/coreutils
core/diffutils
core/patch
core/make
core/gcc
core/sed
core/grep
)
pkg_bin_dirs=(bin)
do_build() {
# Configure flag notes:
#
# * `--disable-logger`: prevents building the `logger`, as the version from
# Util-linux will be used instead
# * `--disable-whois`: prevents building the `whois` tool, which is out of
# date
# * `--disable-r*`: prevents building of obsolete programs such as `rlogin`,
# `rsh`, etc.
# * `--disable-servers`: prevents the building of the server components in
# this codebase, such as `telnetd`, `ftpd`, etc.--a dedicated Plan for
# any of these service components is much preferred
./configure \
--prefix="$pkg_prefix" \
--disable-logger \
--disable-whois \
--disable-rcp \
--disable-rexec \
--disable-rlogin \
--disable-rsh \
--disable-servers
make LDFLAGS="$LDFLAGS -Wl,--copy-dt-needed-entries"
}
do_install() {
do_default_install
# `libexec/` directory is not used
rm -rfv "$pkg_prefix/libexec"
}
# ----------------------------------------------------------------------------
# **NOTICE:** What follows are implementation details required for building a
# first-pass, "stage1" toolchain and environment. It is only used when running
# in a "stage1" Studio and can be safely ignored by almost everyone. Having
# said that, it performs a vital bootstrapping process and cannot be removed or
# significantly altered. Thank you!
# ----------------------------------------------------------------------------
if [[ "$STUDIO_TYPE" = "stage1" ]]; then
pkg_build_deps=(
core/gcc
core/coreutils
core/sed
core/grep
)
fi
| true |
6d3a9070d2f399b92a29c5b76a3213603fdbd8c5 | Shell | MetaMeute/ff-monitor-bw | /monitor-bw-rrd.sh | UTF-8 | 1,023 | 3.265625 | 3 | [
"CC0-1.0"
] | permissive | #!/bin/bash
. ./monitor-bw.conf
rrd_create() {
STEP=$TESTRUN_INTERVAL
HEARTBEAT=$(($STEP*2))
STEPS_PER_HOUR=$((60*60/$STEP))
ROWS_MONTH=$((31*24*60*60/$STEP))
ROWS_YEAR=$((366*24*60*60/($STEP*$STEPS_PER_HOUR)))
# Minimum: 0 MBit/s, Maximum: 100 MBit/s (input in KBit/s)
# 1st RRA: 31 days, $STEP resolution
# 2.-4. RRA: 366 days, hourly resolution
rrdtool create "$1" \
--step $STEP \
DS:vpn-throughput-tx:GAUGE:$HEARTBEAT:0:102400 \
DS:vpn-throughput-rx:GAUGE:$HEARTBEAT:0:102400 \
RRA:LAST:0:1:$ROWS_MONTH \
RRA:AVERAGE:0.5:$STEPS_PER_HOUR:$ROWS_YEAR \
RRA:MIN:0.5:$STEPS_PER_HOUR:$ROWS_YEAR \
RRA:MAX:0.5:$STEPS_PER_HOUR:$ROWS_YEAR
}
rrd_update() {
local input; local valuetx; local valuerx; local valtime
while read input; do
valtime="${input%% *}"
valuetx="${input#* }"
valuerx="${valuetx#* }"
valuetx="${valuetx% *}"
rrdtool update "$1" "$valtime":"$valuetx":"$valuerx"
done
}
trap "pkill -P $$" EXIT
[ ! -f "$1" ] && rrd_create "$1"
./monitor-bw-ns.sh | rrd_update "$1"
| true |
50c2a6e338b55cfac33c6090d03a6989ce9de23b | Shell | KaOSx/main | /perl-uri/PKGBUILD | UTF-8 | 581 | 2.6875 | 3 | [] | no_license |
pkgname=perl-uri
pkgver=5.19
pkgrel=1
pkgdesc="Uniform Resource Identifiers (absolute and relative)"
arch=('x86_64')
url="https://metacpan.org/release/URI"
license=('PerlArtistic')
depends=('perl')
options=('!emptydirs')
source=("https://cpan.metacpan.org/authors/id/S/SI/SIMBABQUE/URI-${pkgver}.tar.gz")
md5sums=('3afa2a27cd215d88c3301087f7b150c1')
build() {
cd URI-$pkgver
perl Makefile.PL INSTALLDIRS=vendor
make
}
check() {
cd URI-${pkgver}
# needs extra check depends for all tests
make test
}
package() {
cd URI-$pkgver
make install DESTDIR=${pkgdir}
}
| true |
19d666187c2b7aa33328fa737a7df4edd640cd51 | Shell | blueprintmrk/provision | /provision.sh | UTF-8 | 4,148 | 3.578125 | 4 | [
"BSD-2-Clause"
] | permissive | #! /bin/bash
#
# This script is designed to be run on a fresh install of Arch Linux. You are
# advised to check through it and comment out the things you don't want. By
# default it does basic setup, then installs and configures nginx, postgres,
# mongo, redis, ruby, and nodejs. As-is, it must be run interactively (although
# it can be run unattended - see comments in source).
#
# ===================
# SECURITY WARNING!
# ===================
#
# This script populates your ~/.ssh/authorized_keys with the authorised keys
# for the Github account $GHUSER. This means that if a computer can push
# commits to github repos owned by $GHUSER, then by the time this script is
# done it will also be able to log into your machine! Be careful! If you enter
# an invalid username, your ~/.ssh/authorized_keys will be filled with garbage.
# See github.org/asayers/provision/ for the auxiliary files
# You should probably change these.
export HOSTNAME="vanaheimr" # Desired hostname
export NAME="Alex Sayers" # Your full name (for git config)
export EMAIL="alex.sayers@gmail.com" # Your email (for ssh and git config)
export GHUSER="" # Your github username (for authorised SSH keys) - MAKE SURE THIS IS YOU!
# Config files will be downloaded from here. This can be left alone, but feel free to host your own configs.
export URL="https://raw.github.com/asayers/provision/master" # config files will be downloaded from here.
echo "Setting hostname to $HOSTNAME"
echo $HOSTNAME > /etc/hostname
echo "Setting locale to en_GB.UTF-8"
echo -e "en_GB.UTF-8 UTF-8\nen_US.UTF-8 UTF-8" > /etc/locale.gen
echo 'LANG="en_GB.UTF-8"' > /etc/locale.conf
echo "KEYMAP=uk" > /etc/vconsole.conf
locale-gen
echo "Setting timezone to London"
ln -s /usr/share/zoneinfo/Europe/London /etc/localtime
hwclock --systohc --utc
echo "Initializing pacman"
haveged -w 1024
pacman-key --init
pkill haveged
pacman-key --populate archlinux
# This last step is impossible unattended. If you must run this script unattended you can skip it by running:
#curl "$URL/pacman.conf" > ~/pacman.conf
#alias pacman="pacman --noconfirm --config ~/pacman.conf"
echo "Installing AUR helper"
bash <(curl aur.sh) -si packer # We're using packer, because it doesn't have AUR dependencies.
echo "Upgrading system"
pacman -Syu --noconfirm
pacman -Syu --noconfirm
pacman -S --noconfirm base-devel sudo mosh tar htop tmux zsh vim git nodejs
echo "Configuring user: root"
passwd
echo "%wheel ALL=(ALL) ALL" >> /etc/sudoers
echo "Configuring user: deployer"
useradd -m -g users -G wheel -s /bin/zsh deployer
passwd deployer
chown -R deployer:users /home/deployer
chmod a+rx /home/deployer
cd /home/deployer
su deployer
echo ":: config..."
# home.tar includes terminfo, bashrc, zshrc, and tmux config.
curl "$URL/home.tar" | tar xv
echo ":: ssh..."
ssh-keygen -t rsa -C "$EMAIL" -f ~/.ssh/id_rsa
curl "https://github.com/"$GHUSER".keys" > .ssh/authorized_keys
echo ":: git..."
git config --global user.name "$NAME"
git config --global user.email "$EMAIL"
echo ":: ruby..."
curl https://raw.github.com/fesplugas/rbenv-installer/master/bin/rbenv-installer | bash
rbenv install 1.9.3-p194
rbenv global 1.9.3-p194
rbenv bootstrap
rbenv rehash
exit
echo "Setting up nginx"
pacman -S --noconfirm nginx
cd /etc/nginx
curl "$URL/nginx.tar" | tar xv # Includes h5bp nginx conf
systemctl enable nginx
systemctl start nginx
echo "Setting up postgres"
pacman -S --noconfirm postgresql
chown -R postgres /var/lib/postgres/
su - postgres -c "initdb --locale en_US.UTF-8 -D '/var/lib/postgres/data'"
su - postgres -c "createuser -s deployer"
mkdir /run/postgresql
chown postgres /run/postgresql/
systemctl enable postgresql
systemctl start postgresql
echo "Maybe add a password for the deployer postgres user?"
echo "Setting up redis"
pacman -S --noconfirm redis
systemctl enable redis
systemctl start redis
echo "Setting up mongo"
pacman -S --noconfirm mongodb
systemctl enable mongodb
systemctl start mongodb
echo "Done!"
echo "Remember to add deployer's key to github:"
cat ~deployer/.ssh/id_rsa.pub
| true |
3598921a681410b1ce60d0636d83468422a311f0 | Shell | brooks-builds/bbggez | /run-examples.sh | UTF-8 | 152 | 2.78125 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env sh
for example in $(ls examples)
do
echo "Running example $example"
cargo run --example "$(echo "$example" | cut -f 1 -d ".")"
done
| true |
6ee90b9caa00fb84769b1cdbc976db75ea20e330 | Shell | sitodav/before-november-2014 | /C/C_Unix_Programming/SHELLSCRIPT/settimo/script.sh | UTF-8 | 359 | 3.4375 | 3 | [] | no_license |
touch DIZIONARIO
i=0
while [ $i -lt 4 ]
do
echo "Inserisci nome"
read nome
echo $nome >> DIZIONARIO
i=$(($i+1))
done
sort DIZIONARIO -r
echo "Chi vuoi cercare ?"
read key
found=`grep -o -c "$key" DIZIONARIO`
if [ $found -eq 1 ]
then
echo "TROVATO" $key "CON SUCCESSO"
else
echo "NON TROVATO"
fi
echo "DIZIONARIO OCCUPA N BYTES: " `wc -c DIZIONARIO`
| true |
4040844a8456340df5b05cafbe1aa469268d8041 | Shell | gesielrios/2pg_cartesian | /scripts/dm/refinement/reune.sh | UTF-8 | 365 | 3.453125 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
nodefile=$1
path_to_pdbs=$2
destination=$3
here=$(pwd)
total_rep=5
total_nodes=$(wc -l "$nodefile" | awk '{print $1}')
node=1
while [ $node -le $total_nodes ]; do
node_name=$(head -n $node "$nodefile" | tail -n 1)
echo -n "$node_name"" ... "
scp "$node_name":"$path_to_pdbs""/"* "$destination" >/dev/null
echo "ok"
let node=$node+1
done
| true |
ec39cc0e4badb23ecc1442af9780dae05baa06ec | Shell | elifesciences-publications/MAP-C | /ChIPseq_annotate_genes.sh | UTF-8 | 970 | 2.984375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# ChIPseq_annotate_genes.sh
# Seungsoo Kim
# load modules
module load bedtools/2.26.0
peaksdir='nobackup/ChIPseq/macs'
for tf in 'Leu3' 'Sdd4' 'Rgt1'
do
for c in 'saturated' 'exponential'
do
bedtools closest -D a -fd -t first -a <(bedtools closest -D a -fu -t first -a <(awk '{OFS="\t"; print $1, $2+$10, $2+$10+1, $7}' $peaksdir/${tf}_${c}_merged_peaks.narrowPeak | sort -k1,1 -k2,2n) -b <(sort -k1,1 -k4,4n ../sacCer3_genes_cleaned.gff | cut -f1,4-10)) -b <(sort -k1,1 -k4,4n ../sacCer3_genes_cleaned.gff | cut -f1,4-10) | cut -f1-4,9,11-13,18,20-22 > $peaksdir/${tf}_${c}_merged_peaks_annotated.bed
awk '{OFS="\t"; if ($5=="-" && $9=="-") print $0, $7; else if ($5=="+" && $9=="+") print $0, $11; else if ($5=="-" && $9=="+" && (-$8 < $12)) print $0, $7; else if ($5=="-" && $9=="+" && (-$8 >= $12)) print $0, $11; else print $0, $7 "-" $11}' $peaksdir/${tf}_${c}_merged_peaks_annotated.bed > $peaksdir/${tf}_${c}_merged_peaks_labeled.bed
done
done
| true |
7e4dd4b95d324fd844bf67db095438d68c7d38e2 | Shell | xen0n/dotfiles | /.zshrc | UTF-8 | 7,105 | 2.6875 | 3 | [] | no_license | # Path to your oh-my-zsh installation.
export ZSH=$HOME/.oh-my-zsh
# Set name of the theme to load.
# Look in ~/.oh-my-zsh/themes/
# Optionally, if you set this to "random", it'll load a random theme each
# time that oh-my-zsh is loaded.
ZSH_THEME="xen0n"
#ZSH_THEME="robbyrussell"
# Example aliases
# alias zshconfig="mate ~/.zshrc"
# alias ohmyzsh="mate ~/.oh-my-zsh"
# Uncomment the following line to use case-sensitive completion.
# forced to enable this since https://github.com/ohmyzsh/ohmyzsh/issues/10972
# later fixed by https://github.com/ohmyzsh/ohmyzsh/pull/11526
#CASE_SENSITIVE="true"
# Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true"
# Uncomment the following line to change how often to auto-update (in days).
# export UPDATE_ZSH_DAYS=13
# Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true"
# Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true"
# Uncomment the following line to disable command auto-correction.
# DISABLE_CORRECTION="true"
# Uncomment the following line to display red dots whilst waiting for completion.
# COMPLETION_WAITING_DOTS="true"
# Uncomment the following line if you want to disable marking untracked files
# under VCS as dirty. This makes repository status check for large repositories
# much, much faster.
# DISABLE_UNTRACKED_FILES_DIRTY="true"
# Uncomment the following line if you want to change the command execution time
# stamp shown in the history command output.
# The optional three formats: "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
# HIST_STAMPS="mm/dd/yyyy"
# Would you like to use another custom folder than $ZSH/custom?
# ZSH_CUSTOM=/path/to/new-custom-folder
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
plugins=(autojump autopep8 bower celery coffee encode64 fabric git gitfast git-flow gradle mvn npm pip python svn-fast-info systemd urltools)
# User configuration
#. /etc/profile
# detect OS
local is_linux=false
local is_darwin=false
case `uname -s` in
Darwin) is_darwin=true ;;
Linux) is_linux=true ;;
esac
# detect environment
local is_work=false
if [ -e ~/.xen0n-work-env ]; then
is_work=true
fi
[[ -f ~/.zshrc.flavor ]] && source ~/.zshrc.flavor
# source oh-my-zsh after PATH export for it to pick up commands at custom
# locations
source $ZSH/oh-my-zsh.sh
# macOS mangles PATH with /etc/paths things so clean up the mess
# Linux needs this too because the computation is run only once
if [[ -n "$__xen0n_ORIG_PATH" ]]; then
export PATH="$__xen0n_ORIG_PATH"
fi
# very big history
export HISTSIZE=1000000
export SAVEHIST=1000000
# set some alias
# this should go after oh-my-zsh initialization to override some of its
# alias choices
alias cp="cp -i"
alias mv="mv -i"
alias rm="rm -i"
alias ls="ls --color=auto"
alias ll="ls -alF"
#alias grep="grep --color=auto"
#alias egrep="egrep --color=auto"
alias goi="go install"
alias goiv="go install -v ./..."
alias gpfork='git push fork $(git_current_branch)'
alias gporig='git push origin $(git_current_branch)'
alias v=vim
if $is_work; then
alias qn="/usr/bin/ssh -o 'UseKeychain yes' jumpbox"
alias qssh="/usr/bin/ssh -o 'UseKeychain yes' jumpbox -t qssh"
alias fssh="/usr/bin/ssh -o 'UseKeychain yes' jumpbox -t fssh"
fi
if ! $is_work; then
# Gentoo dev work
alias pcs='pkgcheck scan'
alias pdc='pkgdev commit'
fi
alias ....='cd ../../../'
alias .....='cd ../../../../'
alias ......='cd ../../../../../'
alias .......='cd ../../../../../../'
# language...
#LANG=zh_CN.UTF-8
# dir_colors
eval `dircolors ~/.dir_colors`
# Detect term wrappers
# TODO: broken under macOS due to ps usage differences
if $is_linux; then
#WRAPPER_PID="$( ps -o ppid --no-headers | head -1 | tr -d "[:blank:]" )"
WRAPPER_PID="$PPID"
if [[ "x${WRAPPER_PID}" != "x" ]]; then
WRAPPER_PROGRAM="$( ps -p "${WRAPPER_PID}" -o comm --no-headers )" ;
else
WRAPPER_PROGRAM="unknown" ;
fi
# new version of tmux
if [[ "x${WRAPPER_PROGRAM}" == "xtmux: server" ]]; then
WRAPPER_PROGRAM="tmux";
fi
#echo $WRAPPER_PROGRAM
echo $WRAPPER_PROGRAM | grep 'terminator' > /dev/null && WRAPPER_PROGRAM="terminator"
# set TERM variable
if [[ "x${WRAPPER_PROGRAM}" == "xfbterm" ]]; then
TERM=fbterm ;
elif [[ "x${WRAPPER_PROGRAM}" == "xTerminal" ]]; then
TERM=xterm-256color ;
elif [[ "x${WRAPPER_PROGRAM}" == "xgnome-terminal" ]]; then
TERM=xterm-256color ;
elif [[ "x${WRAPPER_PROGRAM}" == "xterminator" ]]; then
TERM=xterm-256color ;
elif [[ "x${WRAPPER_PROGRAM}" == "xtmux" ]]; then
TERM=screen-256color ;
else
LANG="en_US.UTF-8" ;
export LANG ;
fi
export TERM
unset WRAPPER_PID WRAPPER_PROGRAM
fi
# other Linux-specific settings
if $is_linux; then
#export _JAVA_OPTIONS='-Dswing.defaultlaf=com.sun.java.swing.plaf.gtk.GTKLookAndFeel'
# wayland, according to dev-libs/weston::x11
if [[ "x${XDG_RUNTIME_DIR}" == "x" ]]; then
export XDG_RUNTIME_DIR=/tmp/.runtime-${USER}
mkdir -p "${XDG_RUNTIME_DIR}"
chmod 0700 "${XDG_RUNTIME_DIR}"
fi
if [[ $XDG_SESSION_TYPE == wayland ]]; then
export MOZ_ENABLE_WAYLAND=1
export QT_QPA_PLATFORM='wayland;xcb'
fi
fi
if ! $is_work; then
# added by travis gem
# only source if installed
if [[ -e ~/.travis/travis.sh ]]; then
source ~/.travis/travis.sh
fi
# local NPM config
if [[ -e ~/.config-local-npm.sh ]]; then
source ~/.config-local-npm.sh
fi
# AFDKO
if [[ -e ~/.config-afdko.sh ]]; then
source ~/.config-afdko.sh
fi
fi
# macOS-specific settings
if $is_darwin; then
export HOMEBREW_BOTTLE_DOMAIN=https://mirrors.ustc.edu.cn/homebrew-bottles
# install iTerm2 shell integration only if running in it
iterm2_integration_path=~/.iterm2_shell_integration.sh
if [[ -e $iterm2_integration_path && "x$TERM_PROGRAM" == "xiTerm.app" ]]; then
source $iterm2_integration_path
fi
# homebrew ssh-agent
export SSH_AUTH_SOCK=~/.ssh-agent.sock
fi
# work-specific settings
# actually let's just enable them if installed, regardless of environment
if [[ -e ~/.gvm/scripts/gvm ]]; then
# gvm
source ~/.gvm/scripts/gvm
fi
# direnv
if command -v direnv > /dev/null 2>&1; then
eval "$(direnv hook zsh)"
fi
if [[ -e ~/.nvm/nvm.sh ]]; then
export NVM_DIR="$HOME/.nvm"
. "${NVM_DIR}/nvm.sh"
elif [[ -s "/usr/local/opt/nvm/nvm.sh" ]]; then
export NVM_DIR="$HOME/.nvm"
. "/usr/local/opt/nvm/nvm.sh"
fi
export RUSTUP_DIST_SERVER=https://mirrors.tuna.tsinghua.edu.cn/rustup
# export MANPATH="/usr/local/man:$MANPATH"
# You may need to manually set your language environment
# export LANG=en_US.UTF-8
# Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim'
# else
# export EDITOR='mvim'
# fi
# Compilation flags
# export ARCHFLAGS="-arch x86_64"
# ssh
# export SSH_KEY_PATH="~/.ssh/dsa_id"
# utils for work
if $is_work; then
. ~/.worklib.sh
fi
# qshell completion
if command -v qshell > /dev/null 2>&1; then
eval "$(qshell completion zsh)"
fi
export __xen0n_zshrc_done=x
| true |
e1e514f61f2669019517b767c6ee5831ddfe2406 | Shell | s-vde/llvm-pass-development-scripts | /clone_and_install_llvm.sh | UTF-8 | 1,426 | 3.671875 | 4 | [] | no_license | #!/bin/sh
prefix=`echo $1`
branch=`echo $2`
here=`pwd .`
llvm_base=${prefix}/llvm-repo
llvm_src=${llvm_base}/src
llvm_projects_src=${llvm_src}/projects
llvm_build=${llvm_base}/build
# source locations of subprojects
clang_src=${llvm_src}/tools/clang
compiler_rt_src=${llvm_projects_src}/compiler-rt
libcxx_src=${llvm_projects_src}/libcxx
libcxxabi_src=${llvm_projects_src}/libcxxabi
llvm_git=http://llvm.org/git
echo ===== Building branch ${branch} of LLVM in base dir ${llvm_base} from ${llvm_git}
test -d ${llvm_base} || mkdir -p ${llvm_base}
test -d ${llvm_build} || mkdir ${llvm_build}
#
# clone_or_update_repository(repo_name, repo_location)
#
clone_or_update_repository() {
repo_name=`echo $1`
repo_location=`echo $2`
echo $'\n'Cloning or Updating ${repo_name}
test -d ${repo_location} || git clone ${llvm_git}/${repo_name} ${repo_location}
cd ${repo_location}
git checkout ${branch}
git pull origin ${branch}
cd ${here}
}
#
#
#
clone_or_update_repository llvm ${llvm_src}
clone_or_update_repository clang ${clang_src}
clone_or_update_repository compiler-rt ${compiler_rt_src}
clone_or_update_repository libcxx ${libcxx_src}
clone_or_update_repository libcxxabi ${libcxxabi_src}
echo === Making
cd ${llvm_build}
cmake ../src -DLLVM_ENABLE_RTTI=ON -DLLVM_ENABLE_ASSERTIONS=ON -DLLVM_ENABLE_EH=ON -DLLVM_TARGETS_TO_BUILD=all
cmake --build .
#cmake --build . --target install
cd ${here}
| true |
e0811077641ce5b53f1a10c9e237c0861372f733 | Shell | hyphop/miZy-linux-kernel | /kernel_build_packages | UTF-8 | 1,472 | 3.3125 | 3 | [
"MIT"
] | permissive | #!/bin/sh
## hyphop ##
. ./,config
DIR=`pwd`
cd $kernel_out
[ "$name" ] || name="$1"
[ "$name" ] || name="default"
echo "pack as $name"
## meta
#out=/tmp/miZy.linux.kernel.$name
out=/tmp/miZy.linux.kernel.$name/miZy.linux.kernel.$name
d=`dirname $out`
[ -d $d ] || mkdir -p $d
m=$out.modules.meta.tar.gz
##
for r in $out.*; do
# [ "" ] && {
[ -f "$r" ] && {
rm $r
echo "[i] remove $r" 1>&2
}
# }
done
#exit 0
echo "[i] $m" 1>&2
[ -f $m ] && rm $m
tar -czf $m \
modules.builtin \
modules.dep \
modules.links \
modules.list \
modules.order
## raw image
m=$out.Image.gz
echo "[i] $m" 1>&2
[ -f $m ] && rm $m
gzip -c Image > $m
## packaed uimage
P="gzip lzma lzo none"
for p in $P; do
# echo $p
f="uImage.$p"
m="$out.$f"
echo "[i] $m" 1>&2
[ -f $f ] && {
case $p in
none)
gzip -c $f > $m.gz
;;
*)
cp $f $m
esac
}
done
## modules all
m=$out.modules.all.tar.gz
echo "[i] $m" 1>&2
[ -f $m ] && rm $m
tar -czf $m modules.all
## modules tree
m=$out.modules.full.tar.gz
echo "[i] $m" 1>&2
[ -f $m ] && rm $m
tar --exclude "lib/firmware" -czf $m modules
## configs
m=$out.config
echo "[i] $m" 1>&2
#ls -l1
cd $DIR
cat bin/modules/lib/modules/*/source/.config > $m
## configs gz
m=$out.config.gz
echo "[i] $m" 1>&2
gzip -c bin/modules/lib/modules/*/source/.config > $m
## check
d=`dirname $out`
b=`basename $out`
[ -f $out.md5sum ] && rm $out.md5sum
cd $d
md5sum $b* > $out.md5sum
cd $DIR
## ok
| true |
d5ef440ac4bdfd301d5e63a8ec0fa457332c9e31 | Shell | blinry/dotfiles | /.bin/audio-status | UTF-8 | 415 | 3.34375 | 3 | [] | no_license | #!/usr/bin/env bash
set -eu -o pipefail
volume=$(pamixer --get-volume)
echo "$volume%"
echo "$volume%"
if [ -n "$(pactl list sinks | grep "analog-stereo")" ]; then
# Analog output is on.
test -n "$(amixer get 'Master'|grep '\[on\]')" && echo "#ffffff" || echo "#777777"
else
# Probably the HDMI output is on.
test -n "$(amixer get 'Master'|grep '\[on\]')" && echo "#8888ff" || echo "#333388"
fi
| true |
0c7bfa8230f696bae6e708dd5e3e8c307be62d41 | Shell | tomascarvalho/neocompiler-eco | /compilers/docker-compiler-go/compile.sh | UTF-8 | 504 | 2.65625 | 3 | [
"MIT"
] | permissive | #!/bin/sh
echo $COMPILECODE | base64 -d > /tmp/contract.go
echo -n "{ \"output\": \""
./bin/neo-go contract compile -i /tmp/contract.go -o /tmp/contract.avm > /tmp/output.txt 2> /tmp/output.err
cat /tmp/output.txt /tmp/output.err | base64 | tr -d '\n'
echo -n "\", \"avm\": \""
if [ -f /tmp/contract.avm ]; then
cat /tmp/contract.avm | xxd -p | base64 | tr -d '\n'
fi
echo -n "\", \"abi\":\""
if [ -f /tmp/contract.abi.json ]; then
cat /tmp/contract.abi.json | base64 | tr -d '\n'
fi
echo "\"}"
| true |
ab1ef5528c616b435ca05f0e2a74df7859d61e82 | Shell | yuksiy/system_backup_unix | /system_backup_snap_umount.sh | UTF-8 | 3,504 | 3.984375 | 4 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/bin/bash
# ==============================================================================
# 機能
# スナップショットの情報取得・マウント解除・削除
# 構文
# system_backup_snap_umount.sh {1|2}
#
# Copyright (c) 2006-2017 Yukio Shiiya
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
# ==============================================================================
######################################################################
# 基本設定
######################################################################
SCRIPT_FULL_NAME="`realpath $0`"
SCRIPT_ROOT="`dirname ${SCRIPT_FULL_NAME}`"
SCRIPT_NAME="`basename ${SCRIPT_FULL_NAME}`"
######################################################################
# 変数定義
######################################################################
# ユーザ変数
. "/usr/local/etc/system_backup/env.sh"
if [ $? -ne 0 ];then exit $?;fi
# システム環境 依存変数
# プログラム内部変数
LVM_SNAPSHOT="/usr/local/sbin/lvm_snapshot.sh"
######################################################################
# 関数定義
######################################################################
SNAP_UMOUNT() {
# スナップショットの情報取得
echo "-I スナップショットの情報取得の実行中..." | tee -a "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}"
${LVM_SNAPSHOT} info "${!SNAP_FS_LIST_N}" >> "${SCRIPT_TMP_DIR}/${!SNAP_INFO_AFTER_LOG_N}" 2>&1
if [ $? -ne 0 ];then return 1;fi
# スナップショットのマウント解除
echo "-I スナップショットのマウント解除の実行中..." | tee -a "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}"
${LVM_SNAPSHOT} umount "${!SNAP_FS_LIST_N}" >> "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}" 2>&1
if [ $? -ne 0 ];then return 1;fi
# スナップショットの削除
echo "-I スナップショットの削除の実行中..." | tee -a "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}"
${LVM_SNAPSHOT} remove "${!SNAP_FS_LIST_N}" >> "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}" 2>&1
if [ $? -ne 0 ];then return 1;fi
}
######################################################################
# メインルーチン
######################################################################
if [ "${SYSTEM_BACKUP_RUN}" = "" ];then exit 0;fi
if [ ! -d "${SCRIPT_TMP_DIR}" ];then exit 0;fi
case $1 in
1|2) N=$1;;
*) exit 1;;
esac
SNAP_FS_LIST_N=SNAP_FS_LIST_${N}
SNAP_UMOUNT_LOG_N=SNAP_UMOUNT_LOG_${N}
SNAP_INFO_AFTER_LOG_N=SNAP_INFO_AFTER_LOG_${N}
# 前回のSNAP_UMOUNT_LOG_N の削除
if [ -f "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}" ];then rm -f "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}";fi
# 前回のSNAP_INFO_AFTER_LOG_N の削除
if [ -f "${SCRIPT_TMP_DIR}/${!SNAP_INFO_AFTER_LOG_N}" ];then rm -f "${SCRIPT_TMP_DIR}/${!SNAP_INFO_AFTER_LOG_N}";fi
# 処理開始メッセージの表示
echo "-I スナップショットの情報取得・マウント解除・削除を開始します" | tee -a "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}"
# スナップショットの情報取得・マウント解除・削除
SNAP_UMOUNT
if [ $? -ne 0 ];then
echo "-E スナップショットの情報取得・マウント解除・削除が異常終了しました" | tee -a "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}"
exit 1
else
echo "-I スナップショットの情報取得・マウント解除・削除が正常終了しました" | tee -a "${SCRIPT_TMP_DIR}/${!SNAP_UMOUNT_LOG_N}"
exit 0
fi
| true |
3b6e3228aa14dc7ee9666b1a592a8ec676c438c4 | Shell | productionwentdown/android-builder | /release | UTF-8 | 1,430 | 3.8125 | 4 | [
"MIT"
] | permissive | #!/bin/sh
set -e
BUILT_APK=none
ALIGNED_APK=app/build/outputs/apk/release/app-release-unsigned-aligned.apk
SIGNED_APK=app/build/outputs/apk/release/app-release.apk
# Ensure directory exists
mkdir -p app/build/outputs/apk/release/
TEMP_DIR="$(mktemp -d)"
TEMP_STORE_FILE="$TEMP_DIR/store.jks"
if [ -z "$STORE_FILE" ] || [ -z "$STORE_PASSWORD" ] || [ -z "$KEY_ALIAS" ]; then
echo "STORE_FILE, STORE_PASSWORD or KEY_ALIAS are not configured secrets. Aborting..."
exit 1
fi
echo
echo "Reading STORE_FILE from environment"
echo $STORE_FILE | xxd -ps -r > $TEMP_STORE_FILE
# The following file is not required for the following process
# but I'm gonna leave it here anyway
echo > keystore.properties << EOF
storeFile=$TEMP_STORE_FILE
storePassword=$STORE_PASSWORD
keyAlias=$KEY_ALIAS
EOF
echo
echo "I think you're lazy so we will build a unsigned release APK and then sign it manually,"
echo "rather than using the gradle process."
echo
echo "Building unsigned release APK"
set -x
./gradlew assembleRelease
set +x
BUILT_APK=$(find . -name "*.apk" -path "*release*")
echo
echo "Doing zipalign"
set -x
zipalign -v -p 4 $BUILT_APK $ALIGNED_APK
set +x
echo
echo "Signing"
set -x
apksigner sign \
--ks $TEMP_STORE_FILE \
--ks-pass pass:$STORE_PASSWORD \
--ks-key-alias $KEY_ALIAS \
--out $SIGNED_APK \
$ALIGNED_APK
set +x
echo
echo "Verifying"
set -x
apksigner verify $SIGNED_APK
set +x
echo "Done!"
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.