blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
e8c4fa6023f7dfc4b79dab73080d4d59501e934b
|
Shell
|
giangdo/su_dung_xterm
|
/.bashrc
|
UTF-8
| 2,845
| 3.078125
| 3
|
[] |
no_license
|
# Sample .bashrc for SuSE Linux
# Copyright (c) SuSE GmbH Nuernberg
# There are 3 different types of shells in bash: the login shell, normal shell
# and interactive shell. Login shells read ~/.profile and interactive shells
# read ~/.bashrc; in our setup, /etc/profile sources ~/.bashrc - thus all
# settings made here will also take effect in a login shell.
#
# NOTE: It is recommended to make language settings in ~/.profile rather than
# here, since multilingual X sessions would not work properly if LANG is over-
# ridden in every subshell.
# Some applications read the EDITOR variable to determine your favourite text
# editor. So uncomment the line below and enter the editor of your choice :-)
#export EDITOR=/usr/bin/vim
#export EDITOR=/usr/bin/mcedit
# For some news readers it makes sense to specify the NEWSSERVER variable here
#export NEWSSERVER=your.news.server
# If you want to use a Palm device with Linux, uncomment the two lines below.
# For some (older) Palm Pilots, you might need to set a lower baud rate
# e.g. 57600 or 38400; lowest is 9600 (very slow!)
#
#export PILOTPORT=/dev/pilot
#export PILOTRATE=115200
test -s ~/.alias && . ~/.alias || true
alias mini="ssh -X giang@192.168.56.101"
alias miniSftp="sftp giang@192.168.56.101"
alias miniSshfs="sshfs giang@192.168.56.101:/home/giang ~/mini"
alias miniUmount="fusermount -u ~/mini"
alias syncLocal2mini="~/w/script/syncLocal2mini.sh"
#aliases that use for grep code
alias grep_code="~/w/script/grep_code.sh"
alias grepi_code="~/w/script/grepi_code.sh"
#alias that used for generate cscope tags file
alias cscopeGenerate="~/w/script/cscopeGenerate.sh"
#alias that used for tar
alias tarCompress="~/w/script/tarCompress.sh"
alias tarExtract="~/w/script/tarExtract.sh"
export EDITOR=vim
alias vi="vim"
alias gvim="vim -g"
# alias for task warrior
alias t="task"
# alias for jrnl
alias j="jrnl"
# alias for cd ..: go up 2 directory -> cd ...; go up 1 directory cd ..; go up 3 directory ....
cd() { if [[ "$1" =~ ^\.\.+$ ]];then local a dir;a=${#1};while [ $a -ne 1 ];do dir=${dir}"../";((a--));done;builtin cd $dir;else builtin cd "$@";fi ;}
#COLOR OF BASH
#Color of xterm
TERM=xterm-256color
#Colors of bash promt: (in "Solarized" palette) (http://www.grison.me/2011/11/20/Unix-Prompt)
Color_Off='\[\e[0m\]' # Text Reset
White='\[\e[0;37m\]' # White
BPurple='\[\e[1;35m\]' # Purple
Green='\[\e[0;32m\]' # Green
Blue='\[\e[0;34m\]' # Blue
Yellow='\[\e[0;33m\]' # Yellow
Purple='\[\e[0;35m\]' # Purple
BWhite='\[\e[1;37m\]' # White
bashName="me"
PS1="$BWhite$bashName:$Yellow\w$Yellow$White\$ $Color_Off"
#Color of ls command
eval `dircolors /home/giang/.dircolors/dircolors.256dark`
alias ls="ls --color"
#WORKING WITH TMUX
#To fix bug: can not reaccess to tmux pane when we type Ctrl + S only
stty -ixon
| true
|
8dbf73d38c41a9199b77e75023e328281d8adfbb
|
Shell
|
heiqiu2017/-Appium-
|
/start.sh
|
UTF-8
| 197
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
rm test.log
if [ -d ./errorScreenShot ]
then
echo "errorScreenShot directory is exist !"
else
mkdir errorScreenShot
fi
rm ./errorScreenShot/*.png
python startTest.py | tee test.log
| true
|
1155f657bc57eb9dbc3cfed810d93becf7676f20
|
Shell
|
FengyunPan/istio
|
/broker/bin/linters.sh
|
UTF-8
| 1,223
| 2.875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -ex
SCRIPTPATH=$( cd "$(dirname "$0")" ; pwd -P )
$SCRIPTPATH/install_linters.sh
$SCRIPTPATH/init.sh
buildifier -showlog -mode=check $(find . -type f \( -name 'BUILD' -or -name 'WORKSPACE' -or -wholename '.*bazel$' -or -wholename '.*bzl$' \) -print )
NUM_CPU=$(getconf _NPROCESSORS_ONLN)
echo "Start running linters"
gometalinter --concurrency=${NUM_CPU} --enable-gc --deadline=300s --disable-all\
--enable=aligncheck\
--enable=deadcode\
--enable=errcheck\
--enable=gas\
--enable=goconst\
--enable=gofmt\
--enable=goimports\
--enable=golint\
--min-confidence=0\
--enable=gotype\
--enable=ineffassign\
--enable=interfacer\
--enable=lll --line-length=120\
--enable=megacheck\
--enable=misspell\
--enable=structcheck\
--enable=unconvert\
--enable=varcheck\
--enable=vet\
--enable=vetshadow\
--exclude=vendor\
--exclude=.*.pb.go\
--exclude=.*.gen.go\
--exclude=.*_test.go\
--exclude=mock_.*\
--exclude=pkg/testing\
--exclude="should have a package comment"\
./...
# Disabled linters:
# --enable=dupl\
# --enable=gocyclo\
# --cyclo-over=15\
echo "Done running linters"
$SCRIPTPATH/check_license.sh
$SCRIPTPATH/check_workspace.sh
echo "Done checks"
| true
|
1d31773c5327a759190b6f0bd99e5d1a36aba5df
|
Shell
|
NOAA-AOML/GPLOT
|
/shell/get_ATCF.sh
|
UTF-8
| 3,720
| 4.0625
| 4
|
[] |
no_license
|
#!/bin/sh
# This script grabs decks from the NHC ATCF
# Options include:
# DECK_TYPE: realtime --> Download all Decks from the real-time directories.
# Includes subdirectories "aid_public" for A-Decks
# and "btk" for B-Decks.
# archive --> Download all Decks from the archive directories.
# Must set "DECK_YR" to point to the correct ATCF
# subdirectory.
# ADECK_ON: 0 --> Do NOT download A-Decks.
# 1 --> Download A-Decks. Subdirectory is "adeck/NHC".
# BDECK_ON: 0 --> Do NOT download B-Decks.
# 1 --> Download B-Decks. Subdirectory is "bdeck".
# DIR /PATH/ABOVE/DECK/DIRECTORIES
# DECK_ID al012018 --> Optional input to download only specific Decks.
# Format is BBSSYYYY, where BB is the two-character
# basin, SS is the two-digit Storm ID, and YYYY is the
# four-digit year. Can set to "ALL"
# DECK_YR 2018 --> Optional input to download only archived Decks from
# specified years. Wildcards may be used but it is
# advised to download only 1 year at a time.
#
# Example invocations: ./get_ATCF.sh realtime 1 1 /lfs1/projects/hur-aoml/Ghassan.Alaka/ al012018
# ./get_ATCF.sh archive 1 1 /lfs1/projects/hur-aoml/Ghassan.Alaka/ ALL 2018
# 1. Get input arguments
DECK_TYPE="$1"
ADECK_ON="$2"
BDECK_ON="$3"
ODIR="$4"
DECK_ID="$5"
DECK_YR="$6"
if [ "$DECK_TYPE" != "realtime" ] && [ "$DECK_TYPE" != "archive" ]; then
echo "ERROR: DECK_TYPE = ${DECK_TYPE}"
echo "ERROR: DECK_TYPE must be 'realtime' or 'archive'."
exit
elif [ "$DECK_TYPE" = "realtime" ]; then
echo "MSG: Downloading real-time Decks from NHC ATCF."
elif [ "$DECK_TYPE" = "archive" ]; then
echo "MSG: Downloading archived Decks from NHC ATCF."
if [ -z "$DECK_YR" ]; then
echo "ERROR: DECK_YR must be specified to download archived Decks."
exit
fi
fi
if [ "$DECK_ID" == "ALL" ]; then
DECK_ID="*"
fi
# 2. Get A-Decks
if [ "$ADECK_ON" == "1" ]; then
echo "MSG: Downloading A-Decks."
TMP_DIR="TMP.$(date +%N)"
mkdir -p $ODIR/adeck/NHC
mkdir -p $ODIR/adeck/$TMP_DIR
cd $ODIR/adeck
if [ "$DECK_TYPE" == "realtime" ]; then
FTP_DIR="ftp.nhc.noaa.gov/atcf/aid_public/"
elif [ "$DECK_TYPE" == "archive" ]; then
FTP_DIR="ftp.nhc.noaa.gov/atcf/archive/${DECK_YR}/"
fi
wget -r -l1 -A "a*${DECK_ID}*dat.gz" -T 5 -N ftp://${FTP_DIR} #>/dev/null 2>&1
cp -p ${FTP_DIR}a*${DECK_ID}*dat.gz $TMP_DIR/.
gunzip -f $TMP_DIR/a*${DECK_ID}*dat.gz #>/dev/null 2>&1
cp -p -u $TMP_DIR/a*${DECK_ID}*dat NHC/. #>/dev/null 2>&1
rm -rf $TMP_DIR
fi
# 3. Get B-Decks
if [ "$BDECK_ON" == "1" ]; then
echo "MSG: Downloading B-Decks."
TMP_DIR="TMP.$(date +%N)"
mkdir -p $ODIR/bdeck/$TMP_DIR
cd $ODIR/bdeck
if [ "$DECK_TYPE" == "realtime" ]; then
FTP_DIR="ftp.nhc.noaa.gov/atcf/btk/"
wget -r -l1 -A "b*${DECK_ID}*dat" -T 5 -N ftp://${FTP_DIR} #>/dev/null 2>&1
cp -p -u ${FTP_DIR}b*${DECK_ID}*dat . #>/dev/null 2>&1
elif [ "$DECK_TYPE" == "archive" ]; then
FTP_DIR="ftp.nhc.noaa.gov/atcf/archive/${DECK_YR}/"
wget -r -l1 -A "b*${DECK_ID}*dat.gz" -T 5 -N ftp://${FTP_DIR} #>/dev/null 2>&1
cp -p ${FTP_DIR}b*${DECK_ID}*dat.gz $TMP_DIR/.
gunzip -f $TMP_DIR/b*${DECK_ID}*dat.gz #>/dev/null 2>&1
cp -p -u $TMP_DIR/b*${DECK_ID}*dat . #>/dev/null 2>&1
fi
rm -rf $TMP_DIR
fi
| true
|
b87bd8e0e6c71bbd1f11a4a76ffe3cc5a98507c5
|
Shell
|
turnopil/DevOps
|
/packer-centos-7/scenario.sh
|
UTF-8
| 3,336
| 3.625
| 4
|
[] |
no_license
|
# Make sure we run with root privileges
if [ $UID != 0 ];
then
# not root, use sudo
# $0=./script.sh
# $*=treat everything as one word
# exit $?=return in bash
echo "This script needs root privileges, rerunning it now using sudo!"
sudo "${SHELL}" "$0" $*
exit $?
fi
# get real username
if [ $UID = 0 ] && [ ! -z "$SUDO_USER" ];
then
USER="$SUDO_USER"
else
USER="$(whoami)"
fi
# Update system
echo "Updating system...Please wait 5-10 minutes. There is some problems with repo"
yum update -y --nogpgcheck >/dev/null 2>&1
# yum install -y maven > /dev/null 2>&1
# MySQL section
echo "Starting mysql-server"
service mysqld start
echo `service mysqld status | grep active`
# grep 'temporary password' /var/log/mysqld.log
DATABASE_PASS=$(grep 'temporary password' /var/log/mysqld.log|cut -d ":" -f 4|cut -d ' ' -f 2)
# echo $DATABASE_PASS
# Secure_installation_script automation mysql 5.7
echo "Secure_installation_script automation (for MySQL 5.6 only)"
# sudo service mysqld stop
SqlVersion=5.7
SqlVersionCurrent=$(mysql --version|awk '{ print $5 }'|awk -F\.21, '{ print $1 }')
# set passwrd to mysql
mysqladmin --user=root --password="$DATABASE_PASS" password "$DATABASE_PASS"
if [ "$SqlVersionCurrent" = "$SqlVersion" ]
then
mysql -u root -p"$DATABASE_PASS" -e "FLUSH PRIVILEGES"
echo "MySQL version > 5.6"
else
# mysql -u root -p"$DATABASE_PASS" -e "UPDATE mysql.user SET Password=PASSWORD(mysql -u root -p"$DATABASE_PASS" -e "FLUSH PRIVILEGES"'$DATABASE_PASS') WHERE User='root'"
mysql -u root -p"$DATABASE_PASS" -e "DELETE FROM mysql.user WHERE User='root' AND Host NOT IN ('localhost', '127.0.0.1', '::1')"
mysql -u root -p"$DATABASE_PASS" -e "DELETE FROM mysql.user WHERE User=''"
mysql -u root -p"$DATABASE_PASS" -e "DELETE FROM mysql.db WHERE Db='test' OR Db='test\_%'"
mysql -u root -p"$DATABASE_PASS" -e "FLUSH PRIVILEGES"
#echo $SqlVersionCurrent
#echo "NOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO"
fi
# Create DB
echo "Creating databese: tmw and user: tmw"
mysql -u root -p"$DATABASE_PASS" -e "CREATE DATABASE tmw DEFAULT CHARSET = utf8 COLLATE = utf8_unicode_ci;"
# Create a new user with same name as new DB
mysql -u root -p"$DATABASE_PASS" -e "GRANT ALL ON tmw.* TO 'tmw'@'localhost' IDENTIFIED BY '$DATABASE_PASS';"
mysql -u root -p"$DATABASE_PASS" -e "FLUSH PRIVILEGES"
# GIT section
echo "Git clone application..."
cd /opt
git clone https://github.com/if-078/TaskManagmentWizard-NakedSpring-.git > /dev/null 2>&1
cd TaskManagmentWizard-NakedSpring-/src/test/resources
# Import settings from application to MySQL database
echo "Set settings to MySQL tmw DATABASE tables"
mysql -u tmw -p"$DATABASE_PASS" tmw <create_db.sql
mysql -u tmw -p"$DATABASE_PASS" tmw <set_dafault_values.sql
# Write database login & passwd to application config file
# Script was stolen from teammate :)
cd /opt/TaskManagmentWizard-NakedSpring-
echo `chmod -R 755 .`
MCONF=src/main/resources/mysql_connection.properties
sed -i 's/jdbc.username=root/jdbc.username=tmw/g' $MCONF
sed -i 's/jdbc.password=root/jdbc.password='$DATABASE_PASS'/g' $MCONF
echo "Setup complete!"
# Add tcp 8585 port to firewall
echo "Allow 8585 port"
systemctl start firewalld
firewall-cmd --zone=public --add-port=8585/tcp --permanent
firewall-cmd --reload
echo `iptables -S | grep 8585`
# Run application
echo "Run WAR application"
mvn tomcat7:run-war
| true
|
2e5fc7a92a56708bb96c077a1fc55653cd7ff296
|
Shell
|
fengjixuchui123/SourecCoding
|
/maintenanceToolKit/src/main/resources/bin/collectLog.sh
|
UTF-8
| 847
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/bash
#To collect the HPBA's logs
#The logsConig.sh is a shell to config the path of log
source ./logsConfig.sh
#Creat the folder to store the logs
Logs_Path=$HPBA_HOME/maintenance/log
allLog_Path=$HPBA_HOME/maintenance
if [ ! -x $Logs_Path ]
then
mkdir -p $Logs_Path
fi
#collect the logs to "maintenance/log" and compress logs.
#if you want to uncompress allLogs.tar,you should use "-P" operation
export Logs_Path
IFS=','
for log_info in $LOGS_INFO
do
logPath=`echo $log_info | awk -F'=' '{print $1}'`
tarFile=`echo $log_info | awk -F'=' '{print $2}'`
folderName=`echo $log_info | awk -F'=' '{print $3}'`
mkdir $folderName
cp -r $logPath ./$folderName
tar -czf $Logs_Path/$tarFile $folderName --remove-files
done
tar -czPf $allLog_Path/allLogs.tar $Logs_Path
mv $allLog_Path/allLogs.tar $Logs_Path
| true
|
069b966f2b7517de34a5612b1a41c3bb72aeb165
|
Shell
|
amisr/overspread
|
/src/Madrigal/cgi-bin/looker.cgi
|
UTF-8
| 44,801
| 3
| 3
|
[] |
no_license
|
#!/bin/sh
# The madtclsh path is longer than 32 characters. So, we take advantage
# of the fact that a backslash continues a comment line in tcl \
exec /Users/mnicolls/Documents/Work/Madrigal/bin/madtclsh "$0" ${1+"$@"}
# $Id: looker.cgi,v 1.11 2008/07/29 15:47:04 brideout Exp $
lappend auto_path /Users/mnicolls/Documents/Work/Madrigal/madtcllib
package require cgi
package require mtl
proc sendTopForm { htmlstyle } {
global madroot option
cgi_html {
cgi_head {
cgi_title "Looker"
}
cgi_body $htmlstyle {
cgi_center {
cgi_h1 "Looker"
}
}
cgi_form looker {
cgi_hr
cgi_h2 "Compute azimuth, elevation and range to a specified
array of points. Select one of the following options for
specifying the points."
cgi_radio_button option=1 checked
cgi_put "Geodetic latitude, longitude and altitude of the points"
cgi_br
cgi_radio_button option=2
cgi_put "Apex latitude, longitude and altitude of the points"
cgi_hr
cgi_br
cgi_h2 "Compute geographic and geomagnetic coordinates of a specified
array of points."
cgi_h4 "Select one of the following options for specifying the points:"
cgi_radio_button option=3
cgi_put "Geodetic latitude, longitude and altitude of the points"
cgi_br
cgi_radio_button option=4
cgi_put "Azimuth, elevation and range of the points from a specified instrument (output includes aspect angle)"
cgi_br
cgi_hr
cgi_h2 "Compute azimuth, elevation and range to points
along a geomagnetic field line. Select one of the following
three options for specifying the field line."
cgi_radio_button option=6
cgi_put "Geodetic latitude, longitude, altitude of a point
on the field line"
cgi_br
cgi_radio_button option=5
cgi_put "Azimuth, elevation, range from specified instrument"
cgi_br
cgi_radio_button option=7
cgi_put "Apex latitude, longitude of the field line"
cgi_br
cgi_hr
cgi_h2 "Compute point/magnetic conjugate parameters of a specified
array of points."
cgi_radio_button option=8
cgi_put "Geodetic latitude, longitude and altitude of the points"
cgi_br
cgi_hr
cgi_center {
cgi_submit_button "=Specify input parameters"
}
}
}
}
proc sendSelectionForm { htmlstyle } {
global madroot option
cgi_html {
cgi_head {
cgi_title "Looker"
}
cgi_body $htmlstyle {
if {$option < 8} {
cgi_form looker {
if {$option == 1} {
sendForm1 $htmlstyle
} elseif {$option == 2} {
sendForm2 $htmlstyle
} elseif {$option == 3} {
sendForm3 $htmlstyle
} elseif {$option == 4} {
sendForm4 $htmlstyle
# Points on field line given az, el, range
} elseif {$option == 5} {
sendForm5 $htmlstyle
} elseif {$option == 6} {
sendForm6 $htmlstyle
} elseif {$option == 7} {
sendForm7 $htmlstyle
}
cgi_center {
cgi_submit_button "=List selected data"
}
}
} elseif {$option == 8} {
cgi_form madRecDisplay method=get {
sendForm8 $htmlstyle
cgi_center {
cgi_submit_button "=List selected data"
}
}
} else {
cgi_p "Illegal option number - $option"
}
}
}
}
proc sendForm1 { htmlstyle } {
global madroot option mne instName instLat instLon instAlt
set option 1
cgi_center {
cgi_h1 "Geographic and Geomagnetic Coordinates vs Azimuth, Elevation, Range"
}
cgi_puts "Select an instrument, either by name or by selecting
\"Enter instrument coordinates\" and then entering the
latitude, longitude and altitude of the instrument. Then
specify the latitude/longitude/altitude grid for which
look direction and range will be calculated.
The epoch for which the IGRF geomagnetic coordinates will be
calculated may also be specified."
cgi_br
cgi_puts [cgi_italic "Note: The geomagnetic field computations sometimes
fail, usually for points near the pole or equator. Usually this
is obvious, but apply common sense in using the results
returned by this program."]
cgi_br
cgi_text option=$option type=hidden
# Top-level 2x2 Table
cgi_table cellpadding=10 align=center {
# First row of top-level table
cgi_table_row {
# Row 1 column 1
cgi_table_data {
# Instrument selection box table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Name"
}
cgi_table_row valign=center {
cgi_table_data {
cgi_select station size=10 {
cgi_option "Enter Instrument Coordinates" value=0
for {set i 0} {$i < [llength $mne]} {set i [expr $i+2]} {
set code [lindex $mne $i]
if {$instLat($code) > -90.0 && \
$instLon($code) > -360.0 && \
$instAlt($code) < 100.0} {
if {$code == 30} {
cgi_option $instName($code) value=$code "selected"
} else {
cgi_option $instName($code) value=$code
}
}
}
}
}
}
}
}
# Row 1 column 2
cgi_table_data {
# Output specification table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Output Table Grid"
}
cgi_table_row valign=center {
cgi_td align=center {}
cgi_td align=center {start}
cgi_td align=center {end}
cgi_td align=center {delta}
}
cgi_table_row valign=center {
cgi_td align=right {latitude}
cgi_table_data align=left {
cgi_text p1=30.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p2=50.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p3=10.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {longitude}
cgi_table_data align=left {
cgi_text p4=260.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p5=280.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p6=10.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {altitude}
cgi_table_data align=left {
cgi_text p7=100.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p8=500.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p9=100.0 size=6 maxlength=10
}
}
}
}
}
# Second row of top-level table
cgi_table_row {
# Row 2 column 1
cgi_table_data valign=top {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {latitude}
cgi_td align=center {longitude}
cgi_td align=center {altitude}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text s1=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s2=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s3=0.0 size=6 maxlength=10
}
}
}
}
# Row 2 column 2
cgi_table_data valign=top {
# Epoch specification
set time [clock seconds]
set year [clock format $time -format "%Y" -gmt 1]
set byear [clock scan "January 1, $year GMT"]
set eyear [clock scan "December 31, $year GMT"]
set ep [expr $year + 1.0*($time - $byear)/($eyear - $byear)]
set epoch [format "%7.2f" $ep]
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Geomagnetic field epoch"
}
cgi_table_row valign=center {
cgi_td align=center {year}
}
cgi_table_row valign=center {
cgi_table_data align=center {
cgi_text tm=$epoch size=7 maxlength=10
}
}
}
}
}
}
# End of top-level table
}
proc sendForm2 { htmlstyle } {
global madroot option mne instName instLat instLon instAlt
set option 2
cgi_center {
cgi_h1 "Geographic and Geomagnetic Coordinates vs Azimuth, Elevation, Range"
}
cgi_puts "Select an instrument, either by name or by selecting
\"Enter instrument coordinates\" and then entering the
latitude, longitude and altitude of the instrument. Then
specify the apex latitude, apex longitude altitude grid for
which look direction and range will be calculated. The
epoch for which the IGRF geomagnetic coordinates will be
calculated may also be specified."
cgi_br
cgi_puts [cgi_italic "Note: The geomagnetic field computations sometimes
fail, usually for points near the pole or equator. Usually this
is obvious, but apply common sense in using the results
returned by this program."]
cgi_br
cgi_text option=$option type=hidden
# Top-level 2x2 Table
cgi_table cellpadding=10 align=center {
# First row of top-level table
cgi_table_row {
# Row 1 column 1
cgi_table_data {
# Instrument selection box table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Name"
}
cgi_table_row valign=center {
cgi_table_data {
cgi_select station size=10 {
cgi_option "Enter Instrument Coordinates" value=0
for {set i 0} {$i < [llength $mne]} {set i [expr $i+2]} {
set code [lindex $mne $i]
if {$instLat($code) > -90.0 && \
$instLon($code) > -360.0 && \
$instAlt($code) < 100.0} {
if {$code == 30} {
cgi_option $instName($code) value=$code "selected"
} else {
cgi_option $instName($code) value=$code
}
}
}
}
}
}
}
}
# Row 1 column 2
cgi_table_data {
# Output specification table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Output Table Grid"
}
cgi_table_row valign=center {
cgi_td align=center {}
cgi_td align=center {start}
cgi_td align=center {end}
cgi_td align=center {delta}
}
cgi_table_row valign=center {
cgi_td align=right {apex latitude}
cgi_table_data align=left {
cgi_text p1=30.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p2=50.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p3=10.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {apex longitude}
cgi_table_data align=left {
cgi_text p4=260.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p5=280.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p6=10.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {altitude}
cgi_table_data align=left {
cgi_text p7=100.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p8=500.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p9=100.0 size=6 maxlength=10
}
}
}
}
}
# Second row of top-level table
cgi_table_row {
# Row 2 column 1
cgi_table_data valign=top {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {latitude}
cgi_td align=center {longitude}
cgi_td align=center {altitude}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text s1=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s2=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s3=0.0 size=6 maxlength=10
}
}
}
}
# Row 2 column 2
cgi_table_data valign=top {
# Epoch specification
set time [clock seconds]
set year [clock format $time -format "%Y" -gmt 1]
set byear [clock scan "January 1, $year GMT"]
set eyear [clock scan "December 31, $year GMT"]
set ep [expr $year + 1.0*($time - $byear)/($eyear - $byear)]
set epoch [format "%7.2f" $ep]
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Geomagnetic field epoch"
}
cgi_table_row valign=center {
cgi_td align=center {year}
}
cgi_table_row valign=center {
cgi_table_data align=center {
cgi_text tm=$epoch size=7 maxlength=10
}
}
}
}
}
}
# End of top-level table
}
proc sendForm3 { htmlstyle } {
# Examples:
# looker 3 2002.20 0 0 0 -90.0 90.0 45.0 0.0 360.0 90.0 0.0 600.0 200.0
# looker 3 2002.20 0 0 0 40.0 50.0 10.0 260.0 280.0 10.0 0.0 600.0 200.0
global madroot option mne instName instLat instLon instAlt
set option 3
cgi_center {
cgi_h1 "Geomagnetic Coordinates vs Latitude, Longitude, Altitude"
}
cgi_puts "Specify the latitude/longitude/altitude grid on which
geomagnetic coordinates will be calculated.
The epoch for which the IGRF geomagnetic coordinates will be
calculated may also be specified."
cgi_br
cgi_puts [cgi_italic "Note: The geomagnetic field computations sometimes
fail, usually for points near the pole or equator. Usually this
is obvious, but apply common sense in using the results
returned by this program."]
cgi_br
cgi_text option=$option type=hidden
cgi_text station=0 type=hidden
cgi_text s1=0 type=hidden
cgi_text s2=0 type=hidden
cgi_text s3=0 type=hidden
cgi_table cellpadding=10 align=center {
cgi_table_row {
cgi_table_data {
# Output specification table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Output Table Grid"
}
cgi_table_row valign=center {
cgi_td align=center {}
cgi_td align=center {start}
cgi_td align=center {end}
cgi_td align=center {delta}
}
cgi_table_row valign=center {
cgi_td align=right {latitude}
cgi_table_data align=left {
cgi_text p1=-90.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p2=90.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p3=45.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {longitude}
cgi_table_data align=left {
cgi_text p4=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p5=360.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p6=90.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {altitude}
cgi_table_data align=left {
cgi_text p7=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p8=600.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p9=200.0 size=6 maxlength=10
}
}
}
}
}
cgi_table_row {
cgi_table_data valign=top {
# Epoch specification
set time [clock seconds]
set year [clock format $time -format "%Y" -gmt 1]
set byear [clock scan "January 1, $year GMT"]
set eyear [clock scan "December 31, $year GMT"]
set ep [expr $year + 1.0*($time - $byear)/($eyear - $byear)]
set epoch [format "%7.2f" $ep]
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Geomagnetic field epoch"
}
cgi_table_row valign=center {
cgi_td align=center {year}
}
cgi_table_row valign=center {
cgi_table_data align=center {
cgi_text tm=$epoch size=7 maxlength=10
}
}
}
}
}
}
}
proc sendForm4 { htmlstyle } {
global madroot option mne instName instLat instLon instAlt
set option 4
cgi_center {
cgi_h1 "Geographic and Geomagnetic Coordinates vs Azimuth, Elevation, Range"
}
cgi_puts "Select an instrument, either by name or by selecting
\"Enter instrument coordinates\" and then entering the
latitude, longitude and altitude of the instrument. Then
specify the azimuth/elevation/range grid on which
geodetic and geomagnetic coordinates will be calculated.
The epoch for which the IGRF geomagnetic coordinates will be
calculated may also be specified."
cgi_br
cgi_puts [cgi_italic "Note: The geomagnetic field computations sometimes
fail, usually for points near the pole or equator. Usually this
is obvious, but apply common sense in using the results
returned by this program."]
cgi_br
cgi_text option=$option type=hidden
# Top-level 2x2 Table
cgi_table cellpadding=10 align=center {
# First row of top-level table
cgi_table_row {
# Row 1 column 1
cgi_table_data {
# Instrument selection box table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Name"
}
cgi_table_row valign=center {
cgi_table_data {
cgi_select station size=10 {
cgi_option "Enter Instrument Coordinates" value=0
for {set i 0} {$i < [llength $mne]} {set i [expr $i+2]} {
set code [lindex $mne $i]
if {$instLat($code) > -90.0 && \
$instLon($code) > -360.0 && \
$instAlt($code) < 100.0} {
if {$code == 30} {
cgi_option $instName($code) value=$code "selected"
} else {
cgi_option $instName($code) value=$code
}
}
}
}
}
}
}
}
# Row 1 column 2
cgi_table_data {
# Output specification table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Output Table Grid"
}
cgi_table_row valign=center {
cgi_td align=center {}
cgi_td align=center {start}
cgi_td align=center {end}
cgi_td align=center {delta}
}
cgi_table_row valign=center {
cgi_td align=right {azimuth}
cgi_table_data align=left {
cgi_text p1=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p2=360.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p3=45.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {elevation}
cgi_table_data align=left {
cgi_text p4=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p5=90.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p6=30.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {range}
cgi_table_data align=left {
cgi_text p7=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p8=600.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p9=200.0 size=6 maxlength=10
}
}
}
}
}
# Second row of top-level table
cgi_table_row {
# Row 2 column 1
cgi_table_data valign=top {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {latitude}
cgi_td align=center {longitude}
cgi_td align=center {altitude}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text s1=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s2=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s3=0.0 size=6 maxlength=10
}
}
}
}
# Row 2 column 2
cgi_table_data valign=top {
# Epoch specification
set time [clock seconds]
set year [clock format $time -format "%Y" -gmt 1]
set byear [clock scan "January 1, $year GMT"]
set eyear [clock scan "December 31, $year GMT"]
set ep [expr $year + 1.0*($time - $byear)/($eyear - $byear)]
set epoch [format "%7.2f" $ep]
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Geomagnetic field epoch"
}
cgi_table_row valign=center {
cgi_td align=center {year}
}
cgi_table_row valign=center {
cgi_table_data align=center {
cgi_text tm=$epoch size=7 maxlength=10
}
}
}
}
}
}
# End of top-level table
}
proc sendForm5 { htmlstyle } {
global madroot option mne instName instLat instLon instAlt
set option 5
cgi_center {
cgi_h1 "Field Line Coordinates Given Azimuth, Elevation, Range"
}
cgi_puts "Select an instrument, either by name or by selecting
\"Enter instrument coordinates\" and then entering the
latitude, longitude and altitude of the instrument. Then
specify an azimuth, elevation and range to a field line on
which geodetic and geomagnetic coordinates will be
calculated. The epoch for which the IGRF geomagnetic
coordinates will be calculated may also be specified."
cgi_br
cgi_puts [cgi_italic "Note: The geomagnetic field computations sometimes
fail, usually for points near the pole or equator. Usually
this is obvious, but apply common sense in using the
results returned by this program."]
cgi_br
cgi_text option=$option type=hidden
cgi_text p7=0 type=hidden
cgi_text p8=0 type=hidden
cgi_text p9=0 type=hidden
# Top-level 2x2 Table
cgi_table cellpadding=10 align=center {
# First row of top-level table
cgi_table_row {
# Row 1 column 1
cgi_table_data {
# Instrument selection box table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Name"
}
cgi_table_row valign=center {
cgi_table_data {
cgi_select station size=10 {
cgi_option "Enter Instrument Coordinates" value=0
for {set i 0} {$i < [llength $mne]} {set i [expr $i+2]} {
set code [lindex $mne $i]
if {$instLat($code) > -90.0 && \
$instLon($code) > -360.0 && \
$instAlt($code) < 100.0} {
if {$code == 30} {
cgi_option $instName($code) value=$code "selected"
} else {
cgi_option $instName($code) value=$code
}
}
}
}
}
}
}
}
# Row 1 column 2
cgi_table_data {
cgi_table {
cgi_table_row {
cgi_table_data {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Field Line Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {azimuth}
cgi_td align=center {elevation}
cgi_td align=center {range}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text p1=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p2=45.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p3=1000.0 size=6 maxlength=10
}
}
}
}
}
cgi_table_row {
cgi_table_data {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Altitudes Along Field Line"
}
cgi_table_row valign=center {
cgi_td align=center {start}
cgi_td align=center {end}
cgi_td align=center {delta}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text p4=100.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p5=1000.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p6=100.0 size=6 maxlength=10
}
}
}
}
}
}
}
}
# Second row of top-level table
cgi_table_row {
# Row 2 column 1
cgi_table_data valign=top {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {latitude}
cgi_td align=center {longitude}
cgi_td align=center {altitude}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text s1=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s2=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s3=0.0 size=6 maxlength=10
}
}
}
}
# Row 2 column 2
cgi_table_data valign=top {
# Epoch specification
set time [clock seconds]
set year [clock format $time -format "%Y" -gmt 1]
set byear [clock scan "January 1, $year GMT"]
set eyear [clock scan "December 31, $year GMT"]
set ep [expr $year + 1.0*($time - $byear)/($eyear - $byear)]
set epoch [format "%7.2f" $ep]
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Geomagnetic field epoch"
}
cgi_table_row valign=center {
cgi_td align=center {year}
}
cgi_table_row valign=center {
cgi_table_data align=center {
cgi_text tm=$epoch size=7 maxlength=10
}
}
}
}
}
}
# End of top-level table
}
proc sendForm6 { htmlstyle } {
global madroot option mne instName instLat instLon instAlt
set option 6
cgi_center {
cgi_h1 "Field Line Coordinates Given Latitude, Longitude, and Altitude"
}
cgi_puts "Select an instrument, either by name or by selecting
\"Enter instrument coordinates\" and then entering the
latitude, longitude and altitude of the instrument. Then
specify the latitude, longitude and altitude of a point on
a field line on which geodetic and geomagnetic coordinates
will be calculated. The epoch for which the IGRF
geomagnetic coordinates will be calculated may also be
specified."
cgi_br
cgi_puts [cgi_italic "Note: The geomagnetic field computations sometimes
fail, usually for points near the pole or equator. Usually
this is obvious, but apply common sense in using the
results returned by this program."]
cgi_br
cgi_text option=$option type=hidden
cgi_text p7=0 type=hidden
cgi_text p8=0 type=hidden
cgi_text p9=0 type=hidden
# Top-level 2x2 Table
cgi_table cellpadding=10 align=center {
# First row of top-level table
cgi_table_row {
# Row 1 column 1
cgi_table_data {
# Instrument selection box table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Name"
}
cgi_table_row valign=center {
cgi_table_data {
cgi_select station size=10 {
cgi_option "Enter Instrument Coordinates" value=0
for {set i 0} {$i < [llength $mne]} {set i [expr $i+2]} {
set code [lindex $mne $i]
if {$instLat($code) > -90.0 && \
$instLon($code) > -360.0 && \
$instAlt($code) < 100.0} {
if {$code == 30} {
cgi_option $instName($code) value=$code "selected"
} else {
cgi_option $instName($code) value=$code
}
}
}
}
}
}
}
}
# Row 1 column 2
cgi_table_data {
cgi_table {
cgi_table_row {
cgi_table_data {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Field Line Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {latitude}
cgi_td align=center {longitude}
cgi_td align=center {altitude}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text p1=45.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p2=270.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p3=0.0 size=6 maxlength=10
}
}
}
}
}
cgi_table_row {
cgi_table_data {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Altitudes Along Field Line"
}
cgi_table_row valign=center {
cgi_td align=center {start}
cgi_td align=center {end}
cgi_td align=center {delta}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text p4=100.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p5=1000.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p6=100.0 size=6 maxlength=10
}
}
}
}
}
}
}
}
# Second row of top-level table
cgi_table_row {
# Row 2 column 1
cgi_table_data valign=top {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {latitude}
cgi_td align=center {longitude}
cgi_td align=center {altitude}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text s1=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s2=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s3=0.0 size=6 maxlength=10
}
}
}
}
# Row 2 column 2
cgi_table_data valign=top {
# Epoch specification
set time [clock seconds]
set year [clock format $time -format "%Y" -gmt 1]
set byear [clock scan "January 1, $year GMT"]
set eyear [clock scan "December 31, $year GMT"]
set ep [expr $year + 1.0*($time - $byear)/($eyear - $byear)]
set epoch [format "%7.2f" $ep]
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Geomagnetic field epoch"
}
cgi_table_row valign=center {
cgi_td align=center {year}
}
cgi_table_row valign=center {
cgi_table_data align=center {
cgi_text tm=$epoch size=7 maxlength=10
}
}
}
}
}
}
# End of top-level table
}
proc sendForm7 { htmlstyle } {
global madroot option mne instName instLat instLon instAlt
set option 7
cgi_center {
cgi_h1 "Field Line Coordinates Given Azimuth, Elevation, Range"
}
cgi_puts "Select an instrument, either by name or by selecting
\"Enter instrument coordinates\" and then entering the
latitude, longitude and altitude of the instrument. Then
specify an the apex latitude and longitude of a field line
on which geodetic and geomagnetic coordinates will be
calculated. The epoch for which the IGRF geomagnetic
coordinates will be calculated may also be specified."
cgi_br
cgi_puts [cgi_italic "Note: The geomagnetic field computations sometimes
fail, usually for points near the pole or equator. Usually
this is obvious, but apply common sense in using the
results returned by this program."]
cgi_br
cgi_text option=$option type=hidden
cgi_text p3=0 type=hidden
cgi_text p7=0 type=hidden
cgi_text p8=0 type=hidden
cgi_text p9=0 type=hidden
# Top-level 2x2 Table
cgi_table cellpadding=10 align=center {
# First row of top-level table
cgi_table_row {
# Row 1 column 1
cgi_table_data {
# Instrument selection box table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Name"
}
cgi_table_row valign=center {
cgi_table_data {
cgi_select station size=10 {
cgi_option "Enter Instrument Coordinates" value=0
for {set i 0} {$i < [llength $mne]} {set i [expr $i+2]} {
set code [lindex $mne $i]
if {$instLat($code) > -90.0 && \
$instLon($code) > -360.0 && \
$instAlt($code) < 100.0} {
if {$code == 30} {
cgi_option $instName($code) value=$code "selected"
} else {
cgi_option $instName($code) value=$code
}
}
}
}
}
}
}
}
# Row 1 column 2
cgi_table_data {
cgi_table {
cgi_table_row {
cgi_table_data {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Field Line Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {apex latitude}
cgi_td align=center {apex longitude}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text p1=65.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p2=270.0 size=6 maxlength=10
}
}
}
}
}
cgi_table_row {
cgi_table_data {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Altitudes Along Field Line"
}
cgi_table_row valign=center {
cgi_td align=center {start}
cgi_td align=center {end}
cgi_td align=center {delta}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text p4=100.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p5=1000.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text p6=100.0 size=6 maxlength=10
}
}
}
}
}
}
}
}
# Second row of top-level table
cgi_table_row {
# Row 2 column 1
cgi_table_data valign=top {
# Instrument coordinate specification
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Instrument Coordinates"
}
cgi_table_row valign=center {
cgi_td align=center {latitude}
cgi_td align=center {longitude}
cgi_td align=center {altitude}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_text s1=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s2=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text s3=0.0 size=6 maxlength=10
}
}
}
}
# Row 2 column 2
cgi_table_data valign=top {
# Epoch specification
set time [clock seconds]
set year [clock format $time -format "%Y" -gmt 1]
set byear [clock scan "January 1, $year GMT"]
set eyear [clock scan "December 31, $year GMT"]
set ep [expr $year + 1.0*($time - $byear)/($eyear - $byear)]
set epoch [format "%7.2f" $ep]
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Geomagnetic field epoch"
}
cgi_table_row valign=center {
cgi_td align=center {year}
}
cgi_table_row valign=center {
cgi_table_data align=center {
cgi_text tm=$epoch size=7 maxlength=10
}
}
}
}
}
}
# End of top-level table
}
proc sendForm8 { htmlstyle } {
global madroot option mne instName instLat instLon instAlt
set option 3
cgi_center {
cgi_h1 "Point/Magnetic Conjugate Point vs Latitude, Longitude, Altitude"
}
cgi_puts "Specify the latitude/longitude/altitude grid on which
point/magnetic conjugate point parameters will be calculated.
This program calculates the following for both the specified
point and its magnetic conjugate: gdlat, glon, solar zenith
angle and shadow height."
cgi_br
cgi_puts "This calculation is done for a single time (UT)."
cgi_br
cgi_table cellpadding=10 align=center {
cgi_table_row {
cgi_table_data {
# Output specification table
cgi_table cellpadding=5 border="border" {
cgi_caption {
cgi_puts "Output Table Grid"
}
cgi_table_row valign=center {
cgi_td align=center {}
cgi_td align=center {start}
cgi_td align=center {end}
cgi_td align=center {delta}
}
cgi_table_row valign=center {
cgi_td align=right {latitude}
cgi_table_data align=left {
cgi_text start_lat=-90.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text stop_lat=90.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text step_lat=45.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {longitude}
cgi_table_data align=left {
cgi_text start_lon=-180.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text stop_lon=180.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text step_lon=90.0 size=6 maxlength=10
}
}
cgi_table_row valign=center {
cgi_td align=right {altitude}
cgi_table_data align=left {
cgi_text start_alt=0.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text stop_alt=600.0 size=6 maxlength=10
}
cgi_table_data align=left {
cgi_text step_alt=200.0 size=6 maxlength=10
}
}
}
}
}
cgi_table_row {
cgi_table_data valign=top {
# time specification
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Time (UT)"
}
cgi_table_row valign=center {
cgi_td align=center {year}
cgi_td align=center {month}
cgi_td align=center {day}
cgi_td align=center {hour}
cgi_td align=center {min}
cgi_td align=center {sec}
}
cgi_table_row valign=center {
cgi_table_data align=center {
cgi_text year=2000 size=4 maxlength=4
}
cgi_table_data align=left {
cgi_text month=1 size=4 maxlength=4
}
cgi_table_data align=left {
cgi_text day=1 size=4 maxlength=4
}
cgi_table_data align=left {
cgi_text hour=0 size=4 maxlength=4
}
cgi_table_data align=left {
cgi_text min=0 size=4 maxlength=4
}
cgi_table_data align=left {
cgi_text sec=0 size=4 maxlength=4
}
}
}
}
}
cgi_table_row {
cgi_table_data valign=top {
# parameter selection
cgi_table cellpadding=5 border="border" width=100% {
cgi_caption {
cgi_puts "Select parameters"
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_checkbox pList=MAGCONJLAT checked
cgi_put "Magnetic conjugate latitude"
}
cgi_table_data align=left {
cgi_checkbox pList=MAGCONJLON checked
cgi_put "Magnetic conjugate longitude"
}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_checkbox pList=SZEN checked
cgi_put "Solar zenith angle"
}
cgi_table_data align=left {
cgi_checkbox pList=SZENC checked
cgi_put "Magnetic conjugate solar zenith angle"
}
}
cgi_table_row valign=center {
cgi_table_data align=left {
cgi_checkbox pList=SDWHT checked
cgi_put "Shadow height (km)"
}
cgi_table_data align=left {
cgi_checkbox pList=MAGCONJSDWHT checked
cgi_put "Magnetic conjugate shadow height (km)"
}
}
}
}
}
}
}
proc sendResults { htmlstyle option tm slatgd slon saltgd \
p1 p2 p3 p4 p5 p6 p7 p8 p9} {
global madroot
cgi_html {
cgi_head {
cgi_title "Looker"
}
cgi_body $htmlstyle {
cgi_center {
cgi_h1 "Looker Results"
}
cgi_preformatted {
#puts "looker1 $option $tm $slatgd $slon $saltgd $p1 $p2 $p3 $p4 $p5 $p6 $p7 $p8 $p9"
catch {exec /Users/mnicolls/Documents/Work/Madrigal/bin/looker1 $option $tm $slatgd $slon $saltgd $p1 $p2 $p3 $p4 $p5 $p6 $p7 $p8 $p9} result
cgi_puts $result
}
}
}
}
########################
# Start Execution Here #
########################
set looker /Users/mnicolls/Documents/Work/Madrigal/bin/looker1
# Uncomment following statement to see errors in Web page
#cgi_debug -on
madrigal madrigal
set madroot [madrigal cget -madroot]
set htmlstyle [madrigal cget -htmlstyle]
set htmlstyle [string trimleft [set htmlstyle] "<"]
set htmlstyle [string trimright [set htmlstyle] ">"]
regsub -nocase BODY $htmlstyle "" htmlstyle
madInstrument instruments
instruments read
instruments getEntries mnemonic instName instLat instLon instAlt \
contactName contactAddress1 \
contactAddress2 contactAddress3 \
contactCity contactState contactPostalCode \
contactCountry contactTelephone contactEmail
set mne [instruments cget -mnemonic]
cgi_eval {
cgi_input
#cgi_input "option=3&station=0&s1=0&s2=0&s3=0&p1=40.0&p2=50.0&p3=10.0&p4=260.0&p5=280.0&p6=10.0&p7=0.0&p8=600.0&p9=200.0&tm=2002.20"
# First call. Display form.
if {[llength [cgi_import_list]] == 0} {
sendTopForm $htmlstyle
# Have the option arguments. Display the appropriate input
# specification form.
} elseif {[llength [cgi_import_list]] == 1} {
catch {cgi_import option}
sendSelectionForm $htmlstyle
} elseif {[llength [cgi_import_list]] == 15} {
catch {cgi_import option}
catch {cgi_import tm}
catch {cgi_import station}
catch {cgi_import p1}
catch {cgi_import p2}
catch {cgi_import p3}
catch {cgi_import p4}
catch {cgi_import p5}
catch {cgi_import p6}
catch {cgi_import p7}
catch {cgi_import p8}
catch {cgi_import p9}
catch {cgi_import s1}
catch {cgi_import s2}
catch {cgi_import s3}
if {$station == 0} {
set slatgd $s1
set slon $s2
set saltgd $s3
} else {
set slatgd $instLat($station)
set slon $instLon($station)
set saltgd $instAlt($station)
}
sendResults $htmlstyle $option $tm $slatgd $slon $saltgd \
$p1 $p2 $p3 $p4 $p5 $p6 $p7 $p8 $p9
} else {
cgi_http_head
cgi_html {
cgi_body $htmlstyle {
cgi_p "Error: Must specify either 0 or 15 parameters"
}
}
}
}
exit
| true
|
3e544d4969e8bb5724af209ad03c4fa549499283
|
Shell
|
war49/ansible-tower
|
/main.sh
|
UTF-8
| 3,056
| 3.625
| 4
|
[] |
no_license
|
#
# Bash script to automate Ansible Tower Deployment
# Suwardi - wardilee@icloud.com
#
#!/bin/bash
# Define variables
YML_CREATE_ORG_FILE=/var/lib/awx/projects/Test-yml/org_crt.yml
YML_CREATE_INVPRJ_FILE=/var/lib/awx/projects/Test-yml/inventory_proj.yml
YML_PROJ_DIR=/var/lib/awx/projects
ORGANIZATION_NAME=ORG-49
INVENTORY=Invent-49
PROJECT_NAME=Project-49
PROJECT_PATH=Directory49
JOBTEMPLATE=JOBTemp-49
# Flexible ip address of tower host
HOSTIP=`sudo ip addr | grep 192.168 | awk '{ print $2 }' | cut -d'/' -f1`
# Set Tower IP & test ping
if ( tower-cli config host $HOSTIP ); then
echo "Tower IP set to $HOSTIP"
else
echo "Failed set tower IP"
exit ${1}
fi
if ( ping -c1 $HOSTIP > /dev/null 2>&1 ); then
echo "ping successful"
else
echo "Tower unreachable"
exit ${1}
fi
# Create organization from playbook
# with tower api
if [[ -f "$YML_CREATE_ORG_FILE" ]]; then
echo "Creating tower organization $ORGANIZATION_NAME"
ansible-playbook --connection=local --extra-vars "host_ip='${HOSTIP}' \
org_name='${ORGANIZATION_NAME}'" "$YML_CREATE_ORG_FILE"
tower-cli organization list
else
echo "File $YML_CREATE_ORG_FILE not found"
exit ${1}
fi
#sleep 10
# Get Organization ID
ORGID=`tower-cli organization list | grep $ORGANIZATION_NAME | awk '{ print $1 }'`
# Create Tower inventory with Tower api
if [[ -f "$YML_CREATE_INVPRJ_FILE" ]]; then
# Create Tower Inventory
echo "Creating tower inventory with $YML_CREATE_INVPRJ_FILE"
ansible-playbook --connection=local --tags "inventory_post" --extra-vars \
"host_ip='${HOSTIP}' inv_name='${INVENTORY}' org_id='${ORGID}'" $YML_CREATE_INVPRJ_FILE
# Listing inventory
tower-cli inventory list
# Create Tower project
echo "Creating tower Project with $YML_CREATE_INVPRJ_FILE"
if [[ ! -d "$YML_PROJ_DIR/$PROJECT_PATH" ]]; then
mkdir "$YML_PROJ_DIR/$PROJECT_PATH"
fi
ansible-playbook --connection=local --tags "project_post" --extra-vars \
"host_ip='${HOSTIP}' proj_name='${PROJECT_NAME}' org_id='${ORGID}' \
proj_path='${PROJECT_PATH}'" $YML_CREATE_INVPRJ_FILE
# Listing project
tower-cli project list
# Create Tower job template
echo "Creating tower Job Template with $YML_CREATE_INVPRJ_FILE"
INVENTORY_ID=`tower-cli inventory list | grep $INVENTORY | awk '{ print $1 }'`
PROJ_ID=`tower-cli project list | grep $PROJECT_NAME | awk '{ print $1 }'`
#PLAYBOOK_YML="$YML_PROJ_DIR/$PROJECT_PATH/hello.yml"
PLAYBOOKY="hello.yml"
echo -e "---\n- hosts: localhost\n gather_facts: false\n foo: \"Hello world\"" > $PLAYBOOKY
ansible-playbook -vvv --connection=local --tags "jobtemplate_post" --extra-vars \
"host_ip='${HOSTIP}' job_templ_name='${JOBTEMPLATE}' proj_id='${PROJ_ID}' \
playbook_path='${PLAYBOOKY}' inv_id='${INVENTORY_ID}'" $YML_CREATE_INVPRJ_FILE
# Listing job template
tower-cli job_template list
# Running job with Tower
tower-cli job launch -J $JOBTEMPLATE
else
echo "File $YML_CREATE_INVPRJ_FILE not found"
exit ${1}
fi
| true
|
40b8b8904f2890d6158c98f8d9d9a40a02550eaa
|
Shell
|
zvakanaka/photo-gal
|
/scripts/create_webs.sh
|
UTF-8
| 402
| 3.65625
| 4
|
[
"MIT"
] |
permissive
|
# $1 album, $2 photo_dir
if [ -z $1 ]; then
echo "ERROR: Must supply dirname"
exit 1;
fi
if [ -z $2 ]; then
photo_dir='../photo'
else
photo_dir=$2
fi
echo $(date) Creating webs for $1 from $3 >> scripts/log.txt
cd $photo_dir/$1 && mkdir .web;
for f in *.[jJ]*; do
cwebp $f -resize 0 1024 -q 70 -short -o .web/${f%.*}.webp;
convert $f -resize 1024x1024\> -quality 70 .web/${f%.*}.jpg;
done
| true
|
5fbbec9dff1c30682c15643f8891883e44ffd15b
|
Shell
|
mpascucci/AST-image-processing
|
/tests/benchmark/benchmark.sh
|
UTF-8
| 1,721
| 4.21875
| 4
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Optionally builds, and runs benchmark.py.
# must be run from improc/tests/benchmark directory.
# "usage: sh benchmark.sh [-nb] [args_to_benchmark.py]"
# -nb or --nobuild : Skip build steps and just run benchmark.py"
# LD_LIBRARY_PATH will be updated only within the scope of this script.
# This is to avoid polluting the user's LD_LIBRARY_PATH with old astimplibs.
# To access astimplib python module outside this script,
# run source install_astimp_python_module.sh separately,
# or run source benchmark.sh.
if [[ ! $(pwd) =~ improc/tests/benchmark ]]; then
echo "Please run from improc/tests/benchmark directory."
exit 1
fi
if [[ "$1" == "--nobuild" || "$1" == "-nb" ]]; then # run only
PATH_TO_ASTLIB="$(pwd)/../../build/astimplib"
# Update LD_LIBRARY_PATH only within the scope of this script
if [[ ! $LD_LIBRARY_PATH == $PATH_TO_ASTLIB* ]]
then
# If there are multiple astimplibs, the first will take priority.
export LD_LIBRARY_PATH=$PATH_TO_ASTLIB:$LD_LIBRARY_PATH
fi
# run benchmark python script
# "${@:2}" forwards all args except the first, which was '--nobuild'.
python3 benchmark.py "${@:2}"
else # build and run
# cd to improc/python-module directory
cd ../../python-module
# Install the python module
#* Use the flag -s here if you get `fatal error: 'cstddef' file not found #include <cstddef>`
# source is used so that LD_LIBRARY_PATH is updated within the scope
# of *this* script. "" avoids forwarding args.
source ./install_astimp_python_module.sh ""
# get back to improc/tests/benchmark
cd ../tests/benchmark
# "${@}" forwards all args to benchmark.py.
python3 benchmark.py "$@"
fi
| true
|
fbff408b497b15495e197e06bfedb130b2f9e02d
|
Shell
|
slimm609/checksec.sh
|
/src/functions/aslrcheck.sh
|
UTF-8
| 2,274
| 3.453125
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/usr/bin/env bash
# shellcheck disable=SC2154
# these top lines are moved during build
# check for system-wide ASLR support
aslrcheck() {
# PaX ASLR support
if ! (grep -q 'Name:' /proc/1/status 2> /dev/null); then
echo_message '\033[33m insufficient privileges for PaX ASLR checks\033[m\n' '' '' ''
echo_message ' Fallback to standard Linux ASLR check' '' '' ''
fi
if grep -q 'PaX:' /proc/1/status 2> /dev/null; then
if grep -q 'PaX:' /proc/1/status 2> /dev/null | grep -q 'R'; then
echo_message '\033[32mPaX ASLR enabled\033[m\n\n' '' '' ''
else
echo_message '\033[31mPaX ASLR disabled\033[m\n\n' '' '' ''
fi
else
# standard Linux 'kernel.randomize_va_space' ASLR support
# (see the kernel file 'Documentation/sysctl/kernel.txt' for a detailed description)
echo_message " (kernel.randomize_va_space): " '' '' ''
if sysctl -a 2> /dev/null | grep -q 'kernel\.randomize_va_space = 1'; then
echo_message '\033[33mPartial (Setting: 1)\033[m\n\n' '' '' ''
echo_message " Description - Make the addresses of mmap base, stack and VDSO page randomized.\n" '' '' ''
echo_message " This, among other things, implies that shared libraries will be loaded to \n" '' '' ''
echo_message " random addresses. Also for PIE-linked binaries, the location of code start\n" '' '' ''
echo_message " is randomized. Heap addresses are *not* randomized.\n\n" '' '' ''
elif sysctl -a 2> /dev/null | grep -q 'kernel\.randomize_va_space = 2'; then
echo_message '\033[32mFull (Setting: 2)\033[m\n\n' '' '' ''
echo_message " Description - Make the addresses of mmap base, heap, stack and VDSO page randomized.\n" '' '' ''
echo_message " This, among other things, implies that shared libraries will be loaded to random \n" '' '' ''
echo_message " addresses. Also for PIE-linked binaries, the location of code start is randomized.\n\n" '' '' ''
elif sysctl -a 2> /dev/null | grep -q 'kernel\.randomize_va_space = 0'; then
echo_message '\033[31mNone (Setting: 0)\033[m\n' '' '' ''
else
echo_message '\033[31mNot supported\033[m\n' '' '' ''
fi
echo_message " See the kernel file 'Documentation/sysctl/kernel.txt' for more details.\n\n" '' '' ''
fi
}
| true
|
2755242af72fc0ed5caac13d76cd5f7aa3e6fd0d
|
Shell
|
ovieu/docker-rails-app
|
/actionhooks/runtests.sh
|
UTF-8
| 447
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/bash
log="/tmp/runtests.log"
source /opt/actionhooks/docker_config.sh .
curdate=`/bin/date`
echo "" > $log
echo "Latest Run Tests Event $curdate" >> $log
echo "" >> $log
# Custom Actions for the type of application:
pushd /opt/webapp >> $log
bundle exec rake test TEST=test/controllers/hello_world_test.rb
popd >> $log
curdate=`/bin/date`
echo "" >> $log
echo "END of Run Tests Event $curdate" >> $log
echo "" >> $log
exit 0
| true
|
c56ba6d07b0d0d887190b2c6ef35214b9dc8502f
|
Shell
|
JudyBee/twitter-text-sentiment-analysis
|
/utils/embeddings/word2vec/run_word2vec.sh
|
UTF-8
| 821
| 3.6875
| 4
|
[] |
no_license
|
#!/bin/bash
SIZE="full"
PREPROCESS="none"
DATA="data"
EMB="data/embeddings"
function fail {
echo >&2 $1
exit 1
}
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-s|--size)
SIZE="$2"
shift
shift
;;
-p|--preprocess)
PREPROCESS="$2"
shift
shift
;;
-d|--dataDir)
DATA="$2"
shift
shift
;;
-e|--embDir)
EMB="$2"
shift
shift
;;
*) # unknown option
shift # past argument
;;
esac
done
# compute word embeddings
. utils/embeddings/word2vec/word2vec.config || fail "Word2Vec Config File could not be read."
echo "Word2Vec config file read"
#
python3 utils/embeddings/word2vec/word2vec.py --${SIZE} --${PREPROCESS} --dataDir ${DATA} --embDir ${EMB}|| fail "Could not compute Word2Vec Embedding"
echo "Word2Vec Embedding computed"
| true
|
e7119924344d6374810285cb26f8d544adbcdaa6
|
Shell
|
mamachanko/pomodoro
|
/feature-tests.sh
|
UTF-8
| 199
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
#/usr/bin/env bash
set -ex
cd build
python -m SimpleHTTPServer 1234 &> /dev/null &
SERVER_PID=$!
cd ..
cd feature-tests
TEST_URL="localhost:1234" ./gradlew clean build
cd ..
kill "${SERVER_PID}"
| true
|
12446f22fb3db69ae4ad37b250f58a27a91d342b
|
Shell
|
radtek/MultiverseClientServer
|
/multiverse server/multiverse-server/multiverse/bin/mvm
|
UTF-8
| 583
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
if [ "X$MV_HOME" = "X" ]; then
export MV_HOME=`dirname $0`/..
fi
MVJAR="$MV_HOME/dist/lib/multiverse.jar"
MARSJAR="$MV_HOME/dist/lib/mars.jar"
GETOPT="$MV_HOME/other/java-getopt-1.0.11.jar"
if [ $(uname -o) == "Cygwin" ]; then
MV_CLASSPATH="$MVJAR;$MARSJAR;$GETOPT;$JAVA_HOME/lib/tools.jar"
else
MV_CLASSPATH="$MVJAR:$MARSJAR:$GETOPT:$JAVA_HOME/lib/tools.jar"
fi
if [ "X$JAVA_HOME" = "X" ]; then
java -cp $MV_CLASSPATH multiverse.management.CommandMain "$@"
else
"$JAVA_HOME/bin/java" -cp "$MV_CLASSPATH" multiverse.management.CommandMain "$@"
fi
| true
|
0207d4bc1a2cd2be8197c17c2d6724c998b7a0c4
|
Shell
|
Akash-Patil-12/Shell_programing
|
/Dictionary/dictionary-problem-1.sh
|
UTF-8
| 557
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash -x
declare -A Dice
range=$((6-1+1))
check=0
flag=1
while [[ $check -ne 1 ]]
do
no=$(($(($RANDOM%$range))+1))
Dice[$no]=$((${Dice[$no]}+1))
if [[ ${Dice[$no]} -eq 10 ]]
then
check=1
fi
done
echo "Keys : " ${!Dice[@]}
echo "Value : " ${Dice[@]}
max=${Dice[1]}
min=$max
for ((i=1;i<=6;i++))
do
if [[ ${Dice[$i]} -gt 0 ]]
then
if [[ ${Dice[$i]} -ge $max ]]
then
max=${Dice[$i]}
fi
if [[ ${Dice[$i]} -le $min ]]
then
min=${Dice[$i]}
fi
fi
done
echo "Maximum time value : $max"
echo "Minimum time value : $min"
| true
|
4ab7c05ca8d6d7ff954036965b46006df78843e2
|
Shell
|
dalehenrich/GsDevKit_home
|
/bin/utils/cleanSharedPharoDirectory
|
UTF-8
| 946
| 3.46875
| 3
|
[
"MIT"
] |
permissive
|
#! /bin/bash
#=========================================================================
# Copyright (c) 2015 GemTalk Systems, LLC <dhenrich@gemtalksystems.com>.
#=========================================================================
echo "================="
echo " GsDevKit script: $(basename $0) $*"
echo " path: $0"
echo "================="
usage() {
cat <<HELP
USAGE: $(basename $0) [-h]
Remove all files from \$GS_SHARED_PHARO, except those present in a pristine
installation.
OPTIONS
-h
display help
EXAMPLES
$(basename $0) -h
$(basename $0) -d \$GS_SHARED_PHARO_
HELP
}
set -e # exit on error
if [ "${GS_HOME}x" = "x" ] ; then
echo "the GS_HOME environment variable needs to be defined"; exit 1
fi
source ${GS_HOME}/bin/defGsDevKit.env
pushd ${GS_SHARED_PHARO} >& /dev/null
rm -rf *.zip github-cache package-cache *.image *.changes pharo pharo-ui pharo-vm *.log
popd >& /dev/null
echo "...finished $(basename $0)"
| true
|
254ef896e1fbe630f802573fe034625346ec565b
|
Shell
|
virtualparadox/BBMap
|
/sh/unicode2ascii.sh
|
UTF-8
| 1,091
| 3.828125
| 4
|
[
"BSD-3-Clause-LBNL"
] |
permissive
|
#!/bin/bash
usage(){
echo "
Written by Brian Bushnell
Last modified October 17, 2017
Description: Replaces unicode and control characters with printable ascii characters.
WARNING - this does not work in many cases, and is not recommended!
It is only retained because there is some situation in which it is needed.
Usage: unicode2ascii.sh in=<file> out=<file>
Please contact Brian Bushnell at bbushnell@lbl.gov if you encounter any problems.
"
}
#This block allows symlinked shellscripts to correctly set classpath.
pushd . > /dev/null
DIR="${BASH_SOURCE[0]}"
while [ -h "$DIR" ]; do
cd "$(dirname "$DIR")"
DIR="$(readlink "$(basename "$DIR")")"
done
cd "$(dirname "$DIR")"
DIR="$(pwd)/"
popd > /dev/null
#DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/"
CP="$DIR""current/"
z="-Xmx200m"
set=0
if [ -z "$1" ] || [[ $1 == -h ]] || [[ $1 == --help ]]; then
usage
exit
fi
calcXmx () {
source "$DIR""/calcmem.sh"
setEnvironment
parseXmx "$@"
}
calcXmx "$@"
function unicode2ascii() {
local CMD="java $EA $EOOM $z -cp $CP jgi.UnicodeToAscii $@"
echo $CMD >&2
eval $CMD
}
unicode2ascii "$@"
| true
|
b3693d60ee2dce62521c1ad324edfd4c20ff2306
|
Shell
|
travleev/patch-scripts
|
/patch_get.sh
|
UTF-8
| 1,674
| 4.25
| 4
|
[] |
no_license
|
#!/bin/bash
if [[ "3" -ne "$#" ]] ; then
echo ""
echo " Specify original folder, modified folder "
echo " and diff file name as command line parameters"
echo ""
echo " > patch_get.sh ORIG MODIF patch.txt"
echo ""
echo " ORIG and MODIF must be subfolders of the current folder "
echo " (see general recomendations for avoiding common "
echo " mistakes in GNU diff)."
echo ""
echo " If file `exclude_patterns` exists, it will be used to exclude files"
echo " from comparison. If this file does not exist, it will be generated"
echo " to show example content."
exit 1
fi;
orig=$1 # original folder
modi=$2 # modified folder
ptch=$3 # diff file
echo "Original folder: $orig"
echo "Modified folder: $modi"
echo "Diff file: $ptch"
for f in "$orig" "$modi" ; do
if [[ ! -d $f ]]; then
echo "Folder does not exist: $f"
exit 1
fi;
if [[ "$f" == *"/"* ]]; then
echo "$f: folder name contains slashes. Consider to work only with folders in current dir."
exit 1
fi;
done;
if [[ -f $ptch ]]; then
dat=$(date +%Y_%m_%d__%H_%M_%S)
bak=$ptch.$dat
echo "Patch file $ptch already exists. It will be moved to $bak"
mv $ptch $bak
fi;
if [[ ! -a exclude_patterns ]]; then
echo ""
echo "Example exclude_patterns will be generated. Adjust it"
echo "to control inclusion of files into patch, and restart the script."
echo '*.o' > exclude_patterns
echo '*.orig' >> exclude_patterns
echo '*.log' >> exclude_patterns
fi;
diff -Naur -X exclude_patterns $orig $modi > $ptch
exit 0
| true
|
1e83ca20745bcf4e6685b7fc3471b246920aafcc
|
Shell
|
qupb/easyengine
|
/ci/prepare.sh
|
UTF-8
| 292
| 2.703125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# called by Travis CI
if [[ "$TRAVIS_BRANCH" == "develop-v4" ]]; then
version=$(head -n 1 VERSION)
version="$(echo $version | xargs)"
version+="-nightly"
echo $version > VERSION
fi
php -dphar.readonly=0 ./utils/make-phar.php easyengine.phar --quite > /dev/null
| true
|
dc61b692d2b9120b59d5b1f96bcf8701da53a4f5
|
Shell
|
zeus911/lanmp
|
/include/public.sh
|
UTF-8
| 11,016
| 3.921875
| 4
|
[] |
no_license
|
_red(){
printf '\033[1;31;31m%b\033[0m' "$1"
}
_info(){
printf '\033[1;31;36m%b\033[0m' "$1"
printf "\n"
}
_success(){
printf '\033[1;31;32m%b\033[0m' "$1"
printf "\n"
}
_warn(){
printf '\033[1;31;33m%b\033[0m' "$1"
printf "\n"
}
_error(){
printf '\033[1;31;31m%b\033[0m' "$1"
printf "\n"
exit 1
}
IsRoot(){
if [[ ${EUID} -ne 0 ]]; then
_error "This script must be run as root"
fi
}
DownloadFile(){
local cur_dir=$(pwd)
if [ -s "$1" ]; then
_info "$1 [found]"
else
_info "$1 not found, download now..."
wget --no-check-certificate -cv -t3 -T60 -O ${1} ${download_root_url}${1}
if [ $? -eq 0 ]; then
_success "$1 download completed..."
else
rm -f ${1}
_warn "$1 download failed, retrying download from secondary url..."
wget --no-check-certificate -cv -t3 -T60 -O $1 ${2}
if [ $? -eq 0 ]; then
_success "$1 download completed..."
else
_error "Failed to download $1, please download it to ${cur_dir} directory manually and try again."
fi
fi
fi
}
DownloadUrl(){
local cur_dir=$(pwd)
if [ -s "$1" ]; then
_info "$1 [found]"
else
_info "$1 not found, download now..."
wget --no-check-certificate -cv -t3 -T60 -O $1 ${2}
if [ $? -eq 0 ]; then
_success "$1 download completed..."
else
_error "Failed to download $1, please download it to ${cur_dir} directory manually and try again."
fi
fi
}
_get_package_manager(){
yum >/dev/null 2>&1
if [ "$?" -ne 127 ]; then
PM="yum"
else
apt-get >/dev/null 2>&1
if [ "$?" -ne 127 ]; then
PM="apt-get"
else
_error "Get Package Manager Failed!"
fi
fi
}
Is64bit(){
if [ $(getconf WORD_BIT) = '32' ] && [ $(getconf LONG_BIT) = '64' ]; then
return 0
else
return 1
fi
}
InstallPack(){
local command="$1"
local depend=$(echo "$1" | awk '{print $4}')
_info "Starting to install package ${depend}"
${command} > /dev/null 2>/tmp/install_package.log
if [ $? -ne 0 ]; then
_error "
+------------------+
| ERROR DETECTED |
+------------------+
Installation package ${depend} failed.
Error Log is available at /tmp/install_package.log"
fi
}
CheckError(){
local command="$1"
if [[ $2 == "noOutput" ]]; then
${command} >/dev/null 2>&1
else
${command}
fi
if [ $? -ne 0 ]; then
_error "
+------------------+
| ERROR DETECTED |
+------------------+
An error occurred,The Full Log is available at /tmp/install.log"
fi
}
wait_for_pid () {
try=0
while test $try -lt 35 ; do
case "$1" in
'created')
if [ -f "$2" ] ; then
try=''
break
fi
;;
'removed')
if [ ! -f "$2" ] ; then
try=''
break
fi
;;
esac
echo -n .
try=`expr $try + 1`
sleep 1
done
}
_disable_selinux(){
if [ -s /etc/selinux/config ]; then
selinux=`grep "SELINUX=enforcing" /etc/selinux/config |wc -l`
if [[ ${selinux} -ne 0 ]];then
sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/selinux/config
setenforce 0
fi
fi
}
_install_tools(){
_info "Starting to install development tools..."
if [ "${PM}" = "yum" ];then
InstallPack "yum -y install epel-release"
yum_depends=(
gcc
gcc-c++
make
perl
wget
net-tools
openssl
zlib
automake
psmisc
procps
zip
unzip
bzip2
xz
tar
e2fsprogs
)
for depend in ${yum_depends[@]}
do
InstallPack "yum -y install ${depend}"
done
dnf -y install dnf-plugins-core >/dev/null 2>&1 && dnf config-manager --enable PowerTools >/dev/null 2>&1
dnf -y install chrony >/dev/null 2>&1
yum -y install ntpdate >/dev/null 2>&1
elif [ "${PM}" = "apt-get" ];then
apt_depends=(
gcc
g++
make
perl
wget
net-tools
openssl
zlib1g
automake
psmisc
procps
zip
unzip
bzip2
xz-utils
tar
e2fsprogs
)
apt-get update > /dev/null 2>&1
for depend in ${apt_depends[@]}
do
InstallPack "apt-get -y install ${depend}"
done
apt-get -y install ntpdate >/dev/null 2>&1
fi
if ! grep -qE "^/usr/local/lib" /etc/ld.so.conf.d/*.conf; then
echo "/usr/local/lib" > /etc/ld.so.conf.d/locallib.conf
fi
if Is64bit; then
if ! grep -qE "^/usr/lib/x86_64-linux-gnu" /etc/ld.so.conf.d/*.conf; then
echo "/lib/x86_64-linux-gnu" > /etc/ld.so.conf.d/x86_64-linux-gnu.conf
echo "/usr/lib/x86_64-linux-gnu" >> /etc/ld.so.conf.d/x86_64-linux-gnu.conf
echo "/usr/local/lib/x86_64-linux-gnu" >> /etc/ld.so.conf.d/x86_64-linux-gnu.conf
fi
else
if ! grep -qE "^/usr/lib/i386-linux-gnu" /etc/ld.so.conf.d/*.conf; then
echo "/lib/i386-linux-gnu" > /etc/ld.so.conf.d/i386-linux-gnu.conf
echo "/usr/lib/i386-linux-gnu" >> /etc/ld.so.conf.d/i386-linux-gnu.conf
echo "/usr/local/lib/i386-linux-gnu" >> /etc/ld.so.conf.d/i386-linux-gnu.conf
fi
fi
ldconfig
_info "Install development tools completed..."
_check_command_exist "gcc"
_check_command_exist "g++"
_check_command_exist "make"
_check_command_exist "wget"
_check_command_exist "perl"
_check_command_exist "netstat"
_check_command_exist "openssl"
_check_command_exist "automake"
_check_command_exist "killall"
_check_command_exist "pkill"
_check_command_exist "zip"
_check_command_exist "unzip"
_check_command_exist "xz"
_check_command_exist "tar"
_check_command_exist "chattr"
_check_command_exist "lsattr"
}
_check_command_exist(){
local cmd="$1"
if eval type type > /dev/null 2>&1; then
eval type "$cmd" > /dev/null 2>&1
elif command > /dev/null 2>&1; then
command -v "$cmd" > /dev/null 2>&1
else
which "$cmd" > /dev/null 2>&1
fi
rt=$?
if [ ${rt} -ne 0 ]; then
_error "$cmd is not installed, please install it and try again."
fi
}
_set_timezone() {
_info "Starting set to timezone..."
rm -f /etc/localtime
ln -s /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
_success "Set timezone completed..."
}
_sync_time() {
_info "Starting to sync time..."
ntpdate -bv cn.pool.ntp.org >/dev/null 2>&1
chronyc -a makestep >/dev/null 2>&1
_success "Sync time completed..."
StartDate=$(date "+%Y-%m-%d %H:%M:%S")
StartDateSecond=$(date +%s)
_info "Start time: ${StartDate}"
}
parallel_make(){
local para="$1"
cpunum=$(cat /proc/cpuinfo | grep 'processor' | wc -l)
if [ ${parallel_compile} -eq 0 ]; then
cpunum=1
fi
if [ ${cpunum} -eq 1 ]; then
[ "${para}" == "" ] && make || make "${para}"
else
[ "${para}" == "" ] && make -j${cpunum} || make -j${cpunum} "${para}"
fi
}
GetOsInfo(){
cname=$( awk -F: '/model name/ {name=$2} END {print name}' /proc/cpuinfo | sed 's/^[ \t]*//;s/[ \t]*$//' )
cores=$( awk -F: '/model name/ {core++} END {print core}' /proc/cpuinfo )
freq=$( awk -F: '/cpu MHz/ {freq=$2} END {print freq}' /proc/cpuinfo | sed 's/^[ \t]*//;s/[ \t]*$//' )
tram=$( free -m | awk '/Mem/ {print $2}' )
swap=$( free -m | awk '/Swap/ {print $2}' )
up=$( awk '{a=$1/86400;b=($1%86400)/3600;c=($1%3600)/60;d=$1%60} {printf("%ddays, %d:%d:%d\n",a,b,c,d)}' /proc/uptime )
load=$( w | head -1 | awk -F'load average:' '{print $2}' | sed 's/^[ \t]*//;s/[ \t]*$//' )
opsy=$( GetRelease )
arch=$( uname -m )
lbit=$( getconf LONG_BIT )
host=$( hostname )
kern=$( uname -r )
ramsum=$( expr $tram + $swap )
}
_check_ram(){
GetOsInfo
if [ ${ramsum} -lt 480 ]; then
_error "Not enough memory. The installation needs memory: ${tram}MB*RAM + ${swap}MB*SWAP >= 480MB"
fi
[ ${ramsum} -lt 600 ] && disable_fileinfo="--disable-fileinfo" || disable_fileinfo=""
}
AddToEnv(){
local location="$1"
cd ${location} && [ ! -d lib ] && [ -d lib64 ] && ln -s lib64 lib
[ -d "${location}/bin" ] && export PATH=${location}/bin:${PATH}
if [ -d "${location}/lib" ]; then
export LD_LIBRARY_PATH="${location}/lib:${LD_LIBRARY_PATH}"
fi
if [ -d "${location}/include" ]; then
export CPPFLAGS="-I${location}/include $CPPFLAGS"
fi
}
CreateLibLink(){
local lib="$1"
if [ ! -s "/usr/lib64/$lib" ] && [ ! -s "/usr/lib/$lib" ]; then
libdir=$(find /usr/lib /usr/lib64 -name "$lib" | awk 'NR==1{print}')
if [ "$libdir" != "" ]; then
if Is64bit; then
[ ! -d /usr/lib64 ] && mkdir /usr/lib64
ln -s ${libdir} /usr/lib64/${lib}
ln -s ${libdir} /usr/lib/${lib}
else
ln -s ${libdir} /usr/lib/${lib}
fi
fi
fi
}
CreateLib64Dir(){
local dir="$1"
if Is64bit; then
if [ -s "$dir/lib/" ] && [ ! -s "$dir/lib64/" ]; then
cd ${dir}
ln -s lib lib64
fi
fi
}
GenPassWord(){
cat /dev/urandom | head -1 | md5sum | head -c 16
}
GetRelease(){
[ -f /etc/redhat-release ] && awk '{print ($1,$3~/^[0-9]/?$3:$4)}' /etc/redhat-release && return
[ -f /etc/os-release ] && awk -F'[= "]' '/PRETTY_NAME/{print $3,$4,$5}' /etc/os-release && return
[ -f /etc/lsb-release ] && awk -F'[="]+' '/DESCRIPTION/{print $2}' /etc/lsb-release && return
}
GetIp(){
local ipv4=$( ip addr | egrep -o '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | \
egrep -v "^192\.168|^172\.1[6-9]\.|^172\.2[0-9]\.|^172\.3[0-2]\.|^10\.|^127\.|^255\.|^0\." | head -n 1 )
[ -z "${ipv4}" ] && ipv4=$( wget -qO- -t1 -T2 ip.hws.com/getip.asp )
[ -z "${ipv4}" ] && ipv4=$( wget -qO- -t1 -T2 ipv4.icanhazip.com )
[ -z "${ipv4}" ] && ipv4=$( wget -qO- -t1 -T2 ipinfo.io/ip )
if [ "${ipv4}" == "" ]; then
ipv4='127.0.0.1'
fi
printf -- "%s" "${ipv4}"
}
CheckInstalled(){
local cmd="$1"
local location="$2"
if [ -d "${location}" ]; then
_warn "${location} already exists, skipped the installation."
AddToEnv "${location}"
else
${cmd}
fi
}
InstallPreSetting(){
_check_ram
_disable_selinux
_get_package_manager
_set_timezone
_install_tools
_sync_time
}
| true
|
4d777e57309fd737cff6336792db6586cc40ef75
|
Shell
|
rfevre/Sujet02_Systeme
|
/oui-non.sh
|
UTF-8
| 170
| 2.8125
| 3
|
[] |
no_license
|
#!/bin/bash
echo "" > reponse
while [ "$reponse" != "oui" ] && [ "$reponse" != "non" ]
do
echo 'Répondre oui ou non'
read reponse
done
echo reponse = $reponse
| true
|
fa2c195a69916c9087b6b52281f621b861cc54fe
|
Shell
|
qianqiangliu/ldd3
|
/short/short_unload
|
UTF-8
| 146
| 2.671875
| 3
|
[] |
no_license
|
#!/bin/sh
module="short"
device="short"
# invoke rmmod with all arguments we got
rmmod $module || exit 1
# Remove stale nodes
rm -f /dev/short
| true
|
d014446839f68775ac842be1bc483368787e3b4b
|
Shell
|
chaunceyyann/LinuxSetupScript
|
/scripts/mkco
|
UTF-8
| 7,631
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/bash
#####################################################################################################
# Program : Make Code
# Author : Chauncey Yan
# Functions : Make the sample code for different code(C,Bash,Perl)
# Revision : 0.8
# Mod date : May 23
# Updates : make multiple file at one time
# Issues :
#####################################################################################################
# Init : filename, cur_date
#####################################################################################################
for i in $@; do
filepath=$i
filename=$i
cur_date=`date | awk '{print $2, $3, $6}'`
# stripped the function to distinguish the path and file name for better output
slash=`echo $i | grep "/"`
if [[ -n $slash ]];then
filename=`echo $i | awk -F'/' '{print $NF}'`
filedir=`echo $i | awk -F'/' '{for (i=1;i<NF;i++) printf "%s/",$i}'`
echo fn:$filename fd:$filedir fp:$filepath
fi
filename_short=`echo $filename | awk -F'.' '{print $1}'`
filename_exten=`echo $filename | awk -F'.' '{print $2}'`
#####################################################################################################
# Function : make a long line
# Input : length
# Output : line in the file
# Issues :
#####################################################################################################
function longline(){
for k in `seq 1 $1`; do
echo -n "$commentc" >> $filepath
done
}
#####################################################################################################
# Function : function header
# Input : null
# Output : header in the file
# Issues :
#####################################################################################################
function func(){
echo -n "$commentb" >> $filepath
longline 100
echo >> $filepath
echo -e "$commentc" Function'\t': >> $filepath
echo -e "$commentc" Input'\t\t': >> $filepath
echo -e "$commentc" Output'\t': >> $filepath
echo -e "$commentc" Issues'\t': >> $filepath
longline 100
echo "$commentd" >> $filepath
echo -e "$ftype"foo" (){\n\n\treturn ;\n}" >> $filepath
echo >> $filepath
}
#####################################################################################################
# Function : function declaration
# Input : null
# Output : function declaration in the file
# Issues :
#####################################################################################################
function func_decla(){
echo $commenta Functions declarations >> $filepath
echo "$ftype"foo" ();" >> $filepath
echo >> $filepath
}
#####################################################################################################
# Main :
# Issues :
#####################################################################################################
if [[ -n $filepath ]];then
if [[ -e $filepath ]];then
echo $filepath exists. Overwrite it? y/n
read input
if [ $input != 'y' ] && [ $input != "yes" ];then
exit
fi
fi
if [[ $filename_exten == '' ]] || [[ $filename_exten == "sh" ]];then
shebang="#!$SHELL\n"
vtype=""
vindi="$"
vdefiner=""
ftype=""
commenta='#'
commentb='##'
commentc='#'
commentd="##"
elif [[ $filename_exten == "pl" ]];then
shebang="#!/wv/pevtools/aoi/bin/perl\n"
vtype="my "
ftype="sub "
vindi="$"
vdefiner="$"
commenta='#'
commentb='##'
commentc='#'
commentd="##"
elif [[ $filename_exten == "c" ]] || [[ $filename_exten == "cpp" ]];then
unset shebang
vtype="int "
ftype="int "
vindi=""
vdefiner=""
commenta="//"
commentb="/*"
commentc="*"
commentd="*/"
else
echo Supported file extension types:
echo "mkmyco filename[empty](Bash) filename.c/cpp(C/C++) filename.pl(Perl)"
exit
fi
# Make a new file
echo -n -e $shebang > $filepath
#
echo -n "$commentb" >> $filepath
longline 100
echo >> $filepath
echo -e "$commentc" Program'\t': $filename_short >> $filepath
echo -e "$commentc" Author'\t': Chauncey Yan >> $filepath
echo -e "$commentc" Functions'\t': >> $filepath
echo -e "$commentc" Revision'\t': 0.1 >> $filepath
echo -e "$commentc" Mod date'\t': $cur_date >> $filepath
echo -e "$commentc" Updates'\t': >> $filepath
echo -e "$commentc" Issues'\t': >> $filepath
longline 101
echo "$commentc" >> $filepath
# Init
echo -e "$commentc" Init'\t\t': >> $filepath
echo -n "$commentc" >> $filepath
longline 99
echo "$commentd" >> $filepath
# Libs goes here
# Function block for bash shell script(no possible declaration)
if [[ $filename_exten == '' ]] || [[ $filename_exten == "sh" ]];then
func
elif [[ $filename_exten == 'pl' ]];then
echo "use Getopt::Long;" >> $filepath
func_decla
echo "# Getopt long" >> $filepath
echo "local (@p_R, @p_C);" >> $filepath
echo "GetOptions('R=s{2,}'=> \@p_R, 'C=s{2,}' => \@p_C);" >> $filepath
echo >> $filepath
elif [[ $filename_exten == 'c' ]];then
echo -e "#include <stdio.h>\n#include <stdlib.h>\n#include <sys/types.h>\n#include <sys/stat.h>">> $filepath
echo -e "#include <fcntl.h>\n#include <unistd.h>\n#include <string.h>\n#include <time.h>\n#define BUF_SIZE 256\n">> $filepath
func_decla
elif [[ $filename_exten == "cpp" ]];then
echo -e "#include <iostream>\n#include <cstdlib>\n#include <cstring>\n#include <ctime>\n#define BUF_SIZE 256\n">> $filepath
func_decla
fi
# var=val
echo -e "$vtype$vdefiner"var="$vindi"val";\n" >> $filepath
# Main
echo -n "$commentb" >> $filepath
longline 100
echo >> $filepath
echo -e "$commentc" Main'\t\t': >> $filepath
echo -e "$commentc" Issues'\t': >> $filepath
longline 100
echo "$commentd" >> $filepath
# Different main styles
if [[ $filename_exten == '' ]] || [[ $filename_exten == "sh" ]];then
echo -e "if [[ -z \$var ]];then # -n for not empty" >> $filepath
echo -e "\techo var is empty\nfi" >> $filepath
echo -e "for i in \$(seq 1 2 20);do\n\techo number \$i\ndone\n\n" >> $filepath
elif [[ $filename_exten == "pl" ]];then
echo -e "if (-e \$filename) {\n\tsystem(\"rm \$filename\");\n}\n" >> $filepath
echo -e "foreach \$i(@ARGV) {\n\tprint \"\$i \";\n}" >> $filepath
elif [[ $filename_exten == "c" ]] || [[ $filename_exten == "cpp" ]];then
echo "int main (int argc, char *argv[]){" >> $filepath
echo -e "\tint fd, num_write;\n\tchar buf[BUF_SIZE];\n\t// open the file for writing" >> $filepath
echo -e "\tif ((fd = open(\"file_path\",\n\t\t\t\t\tO_RDWR | O_CREAT | O_APPEND," >> $filepath
echo -e "\t\t\t\t\tS_IRUSR | S_IWUSR)) == -1){\n\t\tperror("open");\n\t\texit(EXIT_FAILURE);\n\t}\n" >> $filepath
echo -e "\tif (write(fd, buf, strlen(buf)) != strlen(buf)){\n\t\tperror(\"Can't write the whole buf \");" >> $filepath
echo -e -n "\t\texit(EXIT_FAILURE);\n\t}\n\tmemset(buf, ">> $filepath
echo -n "'\0', " >> $filepath
echo -e "BUF_SIZE);\n\n" >> $filepath
echo -e "\tfor (int i = 1; i < argc; i++){\n\n\t}" >> $filepath
else
echo could not generate main
exit
fi
echo >> $filepath
# Function block for perl and c
if [[ $filename_exten == 'pl' ]] || [[ $filename_exten == 'c' ]] || [[ $filename_exten == "cpp" ]];then
func
fi
else
echo "Please provide filenames and file extension types (supported multiple files creation)."
echo "EXAMPLE: mkmyco filename.sh(Shell's being used) filename.c(C) filename.cpp(C++) filename.pl(Perl)"
exit
fi
# Check if there is a shebang and chmod to excutable accordingly
if [[ -n $shebang ]];then
chmod 755 $filepath
fi
done # for multiple files
if [[ -z $filepath ]];then
echo "Please provide filenames and file extension types (supported multiple files creation)."
echo "EXAMPLE: mkmyco filename.sh(Shell's being used) filename.c(C) filename.cpp(C++) filename.pl(Perl)"
exit
fi
| true
|
73aaa42a2894cd430fa63f0138c4a814b743c884
|
Shell
|
c-manning/dotfiles
|
/scripts/internet_info.sh
|
UTF-8
| 807
| 2.890625
| 3
|
[] |
no_license
|
#!/bin/bash
IP=$(ipconfig getifaddr en0)
#PUB_IP=$(dig +short myip.opendns.com @resolver1.opendns.com)
PUB_IP=`curl https://canihazip.com/s`
#INTERNET=''
#INTERNET='📶'
INTERNET='📡'
internet_info=$(/System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport -I | grep CtlRSSI | sed -e 's/^.*://g' | sed 's/-//g' | tr -d " ")
if [[ $internet_info -lt 20 ]]; then
echo -n '#[fg=colour116]'
elif [[ $internet_info -lt 30 ]]; then
echo -n '#[fg=colour117]'
elif [[ $internet_info -lt 40 ]]; then
echo -n '#[fg=colour118]'
elif [[ $internet_info -lt 50 ]]; then
echo -n '#[fg=colour119]'
else
echo -n '#[fg=colour120]'
fi
#echo -n "$INTERNET[$internet_info] #[fg=colour197]$IP | $PUB_IP"
echo -n "$INTERNET[$internet_info] #[fg=colour197]$PUB_IP"
| true
|
6f32508a62f29e877c1c00fe9b52de2828a426c8
|
Shell
|
kdlug/vagrant-ubuntu-docker
|
/scripts/bash-theme.sh
|
UTF-8
| 650
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Agnoster theme for bash
#
# Fonts
git clone https://github.com/powerline/fonts.git $HOME/fonts
cd fonts && ./install.sh && rm -r $HOME/fonts
# Copy repo
rm -rf $HOME/.bash/themes/agnoster-bash
mkdir -p $HOME/.bash/themes/agnoster-bash
cd $HOME/.bash/themes/agnoster-bash && git clone https://github.com/speedenator/agnoster-bash.git .
# Add to .bashrc
if grep -q "agnoster.bash" $HOME/.bashrc; then
echo "Agnosther theme is already installed."
else
cat << 'EOF' >> $HOME/.bashrc
export THEME=$HOME/.bash/themes/agnoster-bash/agnoster.bash
if [[ -f $THEME ]]; then
export DEFAULT_USER=`whoami`
source $THEME
fi
EOF
fi
| true
|
736493f5e42c02784fe02eb2712bc52afdc307af
|
Shell
|
jishnu7/Snippets
|
/bash/replace_many.sh
|
UTF-8
| 162
| 3.078125
| 3
|
[] |
no_license
|
// Replace a string with another in many files at once.
// 'i' for case insensitivity
for f in *.php;
do
sed 's/old_text/new_text/i' < "$f" > "$f.new";
done
| true
|
9889c2bc6a973c508913c990bc31b212438eae96
|
Shell
|
onap/vnfsdk-dovetail-integration
|
/install.sh
|
UTF-8
| 3,456
| 3.125
| 3
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
#!/bin/bash
##############################################################################
# Copyright 2018 EuropeanSoftwareMarketingLtd.
# ===================================================================
# Licensed under the ApacheLicense, Version2.0 (the"License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under
# the License
##############################################################################
# vnftest comment: this is a modified copy of
# yardstick/install.sh
# fit for arm64
DOCKER_ARCH="$(uname -m)"
UBUNTU_PORTS_URL="http://ports.ubuntu.com/ubuntu-ports/"
UBUNTU_ARCHIVE_URL="http://archive.ubuntu.com/ubuntu/"
source /etc/os-release
source_file=/etc/apt/sources.list
if [[ "${DOCKER_ARCH}" == "aarch64" ]]; then
sed -i -e 's/^deb \([^/[]\)/deb [arch=arm64] \1/g' "${source_file}"
DOCKER_ARCH="arm64"
DOCKER_REPO="${UBUNTU_PORTS_URL}"
EXTRA_ARCH="amd64"
EXTRA_REPO="${UBUNTU_ARCHIVE_URL}"
dpkg --add-architecture amd64
else
sed -i -e 's/^deb \([^/[]\)/deb [arch=amd64] \1/g' "${source_file}"
DOCKER_ARCH="amd64"
DOCKER_REPO="${UBUNTU_ARCHIVE_URL}"
EXTRA_ARCH="arm64"
EXTRA_REPO="${UBUNTU_PORTS_URL}"
dpkg --add-architecture arm64
fi
sed -i -e 's/^deb-src /# deb-src /g' "${source_file}"
VERSION_CODENAME=${VERSION_CODENAME:-trusty}
echo "APT::Default-Release \""${VERSION_CODENAME}"\";" > /etc/apt/apt.conf.d/default-distro
sub_source_file=/etc/apt/sources.list.d/vnftest.list
touch "${sub_source_file}"
# first add xenial repo needed for installing qemu_static_user/xenial in the container
# then add complementary architecture repositories in case the cloud image is of different arch
if [[ "${VERSION_CODENAME}" != "xenial" ]]; then
REPO_UPDATE="deb [arch="${DOCKER_ARCH}"] "${DOCKER_REPO}" xenial-updates universe"
fi
echo -e ""${REPO_UPDATE}"
deb [arch="${EXTRA_ARCH}"] "${EXTRA_REPO}" "${VERSION_CODENAME}" main universe multiverse restricted
deb [arch="${EXTRA_ARCH}"] "${EXTRA_REPO}" "${VERSION_CODENAME}"-updates main universe multiverse restricted
deb [arch="${EXTRA_ARCH}"] "${EXTRA_REPO}" "${VERSION_CODENAME}"-security main universe multiverse restricted
deb [arch="${EXTRA_ARCH}"] "${EXTRA_REPO}" "${VERSION_CODENAME}"-proposed main universe multiverse restricted" > "${sub_source_file}"
echo "vm.mmap_min_addr = 0" > /etc/sysctl.d/mmap_min_addr.conf
# install tools
apt-get update && apt-get install -y \
qemu-user-static/xenial \
bonnie++ \
wget \
expect \
curl \
git \
sshpass \
qemu-utils \
kpartx \
libffi-dev \
libssl-dev \
libzmq-dev \
python \
python-dev \
libxml2-dev \
libxslt1-dev \
supervisor \
python-pip \
vim \
libxft-dev \
libxss-dev \
sudo \
iputils-ping
if [[ "${DOCKER_ARCH}" != "aarch64" ]]; then
apt-get install -y libc6:arm64
fi
apt-get -y autoremove && apt-get clean
git config --global http.sslVerify false
mkdir -p /etc/vnftest
cp "${PWD}/etc/vnftest/vnftest.yaml" /etc/vnftest
# install vnftest + dependencies
easy_install -U pip
pip install -r requirements.txt
pip install .
| true
|
f973039dfd8c912e82fd8da16f257cadef82b5ea
|
Shell
|
jzacsh/yabashlib
|
/src/files.sh
|
UTF-8
| 894
| 4.09375
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# $1 = file to check for siblings of
isFileAnOnlyChild() {
local parentDir numChildren
parentDir="$(readlink -f "$(dirname "$1")")"
numChildren=$(find "$parentDir" | wc -l)
# 2 lines, one for each of: $parentDir and $1
[ "$numChildren" -eq 2 ]
}
# $1 = directory to check is empty
isDirectoryEmpty() {
[ "$(find "$1")" = "$1" ]
}
# $1 = directory to determine if is parent
# $2 = file to determine as child
isDirectoryParentOfFile() {
[[ "$(readlink -f "$2")" =~ "^$(readlink -f "$1")" ]]
}
# $1 = path to return the relative version of
getRelativeFromHome() {
printf '%s' "${1/$HOME\//}"
}
# $1 = A path to retrieve the first child of, when relative to HOME
# eg: $HOME/foo/bar will return "foo"
getFirstChildOfHome() {
local relativeScaffold firstChild _
IFS=/ read -r firstChild _ <<< "$(getRelativeFromHome "$1")"
printf '%s' "$firstChild"
}
| true
|
433180b1e52567ac5aaa9ee9218dd97d4f2343be
|
Shell
|
blancKaty/alignmentFramework_and_classif
|
/src/script_bash/run_align_deep_pca_gctw_test_correct_class.sh
|
UTF-8
| 817
| 3.453125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
TRAIN_FOLDER=$1
TEST_FOLDER=$2
OUTPUT_FOLDER=$3
ORIGINAL_FOLDER=$4
L=$5
CTW_FOLDER=$6
CLASS_NUMBER=$(ls $TRAIN_FOLDER | wc -l );
for CLASS_FOLDER in $(seq -w $CLASS_NUMBER)
do
echo "Queueing alignment for class " $CLASS_FOLDER
echo
mkdir -p $OUTPUT_FOLDER/$CLASS_FOLDER
for TEST_SAMPLE in `ls -d $TEST_FOLDER/${CLASS_FOLDER}/*`
do
TEST_NAME=$(echo $TEST_SAMPLE | rev | cut -d '/' -f1 | rev );
if [ ! -f $OUTPUT_FOLDER/$CLASS_FOLDER/${TEST_NAME} ]
then
echo "Sample $TEST_NAME will be aligned.."
# align every class in parallel
src/script_bash/align_deep_pca_gctw_interface_test.sh $TRAIN_FOLDER/$CLASS_FOLDER $CLASS_FOLDER ${TEST_SAMPLE} $OUTPUT_FOLDER/$CLASS_FOLDER $L $CTW_FOLDER
else
echo "Skip : "${TEST_NAME}
fi
done
done
| true
|
c596ad5fcb5b34e752c0a1a63ff7dd79ce085e7a
|
Shell
|
sohilkaushal/bash-scripting
|
/extra programs/file_search.sh
|
UTF-8
| 142
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
echo Program for searching a file in a Directory
for i in $( ls )
do
if [ "$i" = "$1" ];
then
echo "File Found"
fi
done
| true
|
2037854a68e761ee19061667d57d94e3eb6385a5
|
Shell
|
idavehuwei/wit
|
/public/plugins/chatbot/scripts/install.sh
|
UTF-8
| 2,015
| 3.375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#! /bin/bash
###########################################
# Install Plugin
# Copyright (2019) 北京华夏春松科技有限公司
###########################################
# constants
## for windows platform
export MSYS=winsymlinks:nativestrict
baseDir=$(cd `dirname "$0"`;pwd)
rootDir=$(cd -P $baseDir/..;pwd)
upperDir=$(cd -P $rootDir/..;pwd)
COSINEE_BASEDIR=$(cd -P $upperDir/../..;pwd)
pluginName=$(basename $rootDir)
# functions
# main
[ -z "${BASH_SOURCE[0]}" -o "${BASH_SOURCE[0]}" = "$0" ] || return
cd $rootDir/..
echo "[plugins] path" `pwd`
if [ -d $COSINEE_BASEDIR ]; then
PLUGINS_DIR=$COSINEE_BASEDIR/contact-center/app/src/main/java/com/chatopera/cc/plugins
echo "[plugin] link" $rootDir "as" $pluginName "..."
if [ ! -d $PLUGINS_DIR ]; then
mkdir -p $PLUGINS_DIR
fi
cd $PLUGINS_DIR
pwd
if [ -e $pluginName ]; then
rm -rf $pluginName
fi
echo "[plugin] link source codes"
ln -s $rootDir/classes $pluginName
# Install channel views
if [ -d $rootDir/views/channel/$pluginName ]; then
echo "[plugin] unlink views for channel"
VIEW_ADMIN_CHANNEL=$COSINEE_BASEDIR/contact-center/app/src/main/resources/templates/admin/channel
if [ -d $VIEW_ADMIN_CHANNEL/$pluginName ]; then
rm -rf $VIEW_ADMIN_CHANNEL/$pluginName
fi
cd $VIEW_ADMIN_CHANNEL
ln -s $rootDir/views/channel/$pluginName .
fi
# Install apps view
if [ -d $rootDir/views/apps/$pluginName ]; then
echo "[plugin] unlink views for apps"
VIEW_ADMIN_APPS=$COSINEE_BASEDIR/contact-center/app/src/main/resources/templates/apps
if [ -d $VIEW_ADMIN_APPS/$pluginName ]; then
rm -rf $VIEW_ADMIN_APPS/$pluginName
fi
cd $VIEW_ADMIN_APPS
ln -s $rootDir/views/apps/$pluginName .
fi
echo "[plugin] install done."
else
echo "[error] not found cosinee dir."
exit 2
fi
| true
|
592778310c8c34c4db3da5241f68f49db519f15f
|
Shell
|
ican2002/5g-mec-cloud-gaming
|
/verify.sh
|
UTF-8
| 2,106
| 3.28125
| 3
|
[] |
no_license
|
#!/bin/bash
echo "5g-mec-cloud-gaming verify test"
DIR=$(cd `dirname $0`;pwd)
OS=$(facter operatingsystem)
#install go
if [ -z "${GO_URL}" ]; then
GO_URL='https://dl.google.com/go/'
fi
if [ -z "${GO_VERSION}" ]; then
GO_VERSION='go1.13.4.linux-amd64.tar.gz'
fi
set -e -u -x -o pipefail
echo "---> Installing golang from ${GO_URL} with version ${GO_VERSION}"
wget ${GO_URL}/${GO_VERSION}
sudo rm -rf /usr/local/go
sudo tar -C /usr/local -xzf ${GO_VERSION}
ls /usr/local
export PATH=$PATH:/usr/local/go/bin/
export PATH=$PATH:/usr/bin/
export GOPATH=$HOME/go
export PATH=$PATH:$GOPATH/bin
go version
#export GOPROXY=https://goproxy.io
go get github.com/onsi/ginkgo/ginkgo
go get github.com/onsi/gomega/
ginkgo version
#install docker
#curl -sSL https://get.daocloud.io/docker | sh
case "$OS" in
Ubuntu)
sudo apt install docker.io
;;
CentOS|RedHat)
sudo yum install -y yum-utils device-mapper-persistent-data lvm2
sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo
sudo yum install -y docker-ce docker-ce-cli containerd.io
;;
esac
sudo service docker start
sudo docker version
sudo mkdir -p /etc/docker
sudo tee /etc/docker/daemon.json <<-'EOF'
{
"registry-mirrors": ["https://registry.docker-cn.com","http://hub-mirror.c.163.com"]
}
EOF
sudo service docker restart
#install docker-compose
sudo curl -L "https://github.com/docker/compose/releases/download/1.24.1/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
sudo ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
docker-compose --version
mkdir $GOPATH/src/go.ectd.io
cd $GOPATH/src/go.ectd.io/
git clone https://github.com/etcd-io/bbolt.git
cd $DIR
git submodule update --init --recursive
make -C 5GCEmulator/ngc build
sudo --preserve-env=PATH make -C 5GCEmulator/ngc test-unit-nef
sudo systemctl daemon-reload
sudo systemctl restart docker
sudo --preserve-env=PATH make -C edgenode networkedge
make -C edgecontroller build-dnscli && make -C edgecontroller test-dnscli
| true
|
b6972271d71f42f6147f6df296b7402e3dce96c9
|
Shell
|
ChronSyn/dotfiles
|
/scripts/package/recipes/clojure.sh
|
UTF-8
| 544
| 3.375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# vim: filetype=sh
set -euo pipefail
source "${DOTFILES}/scripts/package/aux/recipes.sh"
recipe::abort_if_installed clojure
pm="$(recipe::main_package_manager)"
if [[ $pm = "brew" ]]; then
brew install clojure
esac
CLJ_VERSION="1.10.1.447"
CLJ_FOLDER="$TEMP_FOLDER/clojure"
pushd
mkdir -p "$CLJ_FOLDER" || true
cd "$CLJ_FOLDER"
curl -O "https://download.clojure.org/install/linux-install-${CLJ_VERSION}.sh"
chmod +x "linux-install-${CLJ_VERSION}.sh"
sudo "./linux-install-${CLJ_VERSION}.sh"
popd
rm -rf "$CLJ_VERSION"
| true
|
6badc1dcfa62b1064ff0831046eb2b06a29a312d
|
Shell
|
mgbmeei/eqlite
|
/bin/eqlite
|
UTF-8
| 366
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
case $1 in
start)
docker stack deploy -c docker-compose.yml eqlite
;;
stop)
docker stack rm eqlite
;;
ps)
docker stack ps eqlite
;;
shell)
docker attach $(docker ps --filter name=eqlite_lib --format "{{.ID}}")
;;
*)
echo "start|stop|shell|ps"
;;
esac
| true
|
6fa24d85b074b535ca49e342d1680c8ac94b39ac
|
Shell
|
themaire/bash_scripts
|
/picturesOperations/timelapse/manu_avconv.sh
|
UTF-8
| 330
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/bash
# Timelapse sur le "Raspberry Pi 3.
# Exemples :
# projects/timelapse/manu_avconv.sh 4 /mnt/disquenoir/raspberrypi/raspistill/20170522_tl
# Paramètres
# $1 le dossier des images à traiter
# C'est parti pour la video du timelapse
avconv -r 10 -i "$1/pic%04.jpg" -r 10 -vcodec libx264 -crf 20 -g 15 "$1/tl.mp4" &
| true
|
780f849a1c63b15d0ef59ab1775dbd8e02e6db64
|
Shell
|
garymcwilliams/gary-shell
|
/tools_lagan/buildshell
|
UTF-8
| 295
| 3.046875
| 3
|
[] |
no_license
|
#! /bin/bash
#; buildshell
#; Setup environment for building frontline
if [ -r ~/.shell/minimalshell ]; then
. ~/.shell/minimalshell
fi
# use function to ensure that cygwin forces a copy
export COPY=`type -P cp`
cp ()
{
$COPY -f $*
}
#; $Id: buildshell,v 1.2 2003/10/15 08:52:15 gary Exp $
| true
|
ddea7bf91721c50dc7c5e93fb02cd6fedae439a7
|
Shell
|
oglops/bumblebee-update
|
/setup.sh
|
UTF-8
| 1,203
| 3.625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# http://unix.stackexchange.com/questions/122681/how-can-i-tell-whether-a-package-is-installed-via-yum-in-a-bash-script
function isinstalled {
if yum list installed "$@" >/dev/null 2>&1; then
true
else
false
fi
}
PACKAGE=kmod-nvidia
# install specified kmod-nvidia
array=( $(repoquery kmod-nvidia --show-duplicates --disablerepo="*" --enablerepo="elrepo-archives") )
# array=( $(repoquery kmod-nvidia --show-duplicates --disablerepo="*" --enablerepo="elrepo-archives") )
PS3="Please enter your choice: "
select answer in "${array[@]}"; do
if [ ! -z "$answer" ]; then
echo "you selected $answer"
break
fi
done
# uninstall old kmod-nvidia
if isinstalled $PACKAGE; then
yum remove kmod-nvidia -y
fi
# yum install -y --enablerepo="elrepo-archives" $answer
yum install $answer --disablerepo="*" --enablerepo="elrepo-archives"
# reinstall bumblebee or you won't be able to login to x
yum reinstall bumblebee bumblebee-selinux -y
# Xlib: extension "GLX" missing on display ":0.0".
yum reinstall mesa-libGL mesa-dri-drivers -y
# restore conf files
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
$DIR/restore_conf.sh
# you should reboot now
| true
|
b19972503e609e5425a0fd3f8fd136c64d3bc394
|
Shell
|
ynot16/SamplePractice
|
/BuildScript.sh
|
UTF-8
| 1,267
| 2.796875
| 3
|
[] |
no_license
|
#!/bin/sh
# BulidScript.sh
# BROnlineLearning
#
# Created by bori-applepc on 16/8/25.
# Copyright © 2016年 Bori Information Technology Co., Ltd. All rights reserved.
# 工程名
APP_NAME="pratice"
# 证书
CODE_SIGN_DISTRIBUTION="iPhone Distribution: Guangzhou Bori Information Technology Co., Ltd."
# info.plist路径
project_infoplist_path="./${APP_NAME}/Info.plist"
# 取版本号
bundleShortVersion=$(/usr/libexec/PlistBuddy -c "print CFBundleShortVersionString" "${project_infoplist_path}")
# 取bulid值
budleVersion=$(/usr/libexec/PlistBuddy -c "print CFBundleVersion" "${project_infoplist_path}")
DATE="$(date +%Y%m%d)"
IPANAME="${APP_NAME}_V${bundleShortVersion}_${DATE}.ipa"
# 要上传的ipa文件路径
IPA_PATH="$HOME/${IPANAME}"
echo ${IPA_PATH}
echo "${IPA_PATH}">> test.txt
echo "=================clean================="
xcodebuild -workspace "${APP_NAME}.xcworkspace" -scheme "${APP_NAME}" -configuration 'Release' clean
echo "+++++++++++++++++build+++++++++++++++++"
xcodebuild -workspace "${APP_NAME}.xcworkspace" -scheme "${APP_NAME}" -sdk iphoneos -configuration 'Release' CODE_SIGN_IDENTITY="${CODE_SIGN_DISTRIBUTION}" SYMROOT='$(PWD)'
xcrun -sdk iphoneos PackageApplication "./Release-iphoneos/${APP_NAME}.app" -o ~/"${IPANAME}"
| true
|
ded7df513edd8e83e72074d736b95450c00afef5
|
Shell
|
gjianw217/MoonIC2
|
/panel_menu/res/network~
|
UTF-8
| 1,157
| 3.734375
| 4
|
[] |
no_license
|
#!/bin/sh
#
# manage network interfaces and configure some networking options
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
if ! [ -x /sbin/ifup ]; then
exit 0
fi
do_network_up(){
%if DRV0%
if [ ! -f %DRV0% ];
then
echo "%DRV0% no driver!"
return 1
fi
insmod %DRV0%
%endif%
%if DRV1%
if [ ! -f %DRV1% ];
then
echo "%DRV1% no driver!"
return 1
fi
insmod %DRV1%
%endif%
sleep 3
%if mac%
ifconfig %ifname% hw ether %mac%
%endif%
ifconfig %ifname% up
%if wlan%
iwconfig %ifname% essid %essidname% %key%
sleep 2
%endif%
%if dhcp%
udhcpc -b -i %ifname%
%endif%
%if ipaddr%
ifconfig %ifname% %ipaddr%
%endif%
return 0
}
do_network_down(){
ifconfig %ifname% down
%if DRV0%
rmmod %DRV0%
%endif%
%if DRV1%
rmmod %DRV1%
%endif%
%if dhcp%
killall udhcpc
%endif%
}
case "$1" in
start)
do_network_up
;;
stop)
do_network_down
;;
force-reload|restart)
echo -n "Reconfiguring network interfaces... "
do_network_down
sleep 2
do_network_up
echo "done."
;;
*)
echo "Usage: /etc/init.d/network {start|stop|restart|force-reload}"
exit 1
;;
esac
exit 0
| true
|
31af41f55bfca08a91fe282c4d9e3281f749e17b
|
Shell
|
kmh11/dotfiles
|
/.bashrc
|
UTF-8
| 485
| 3.0625
| 3
|
[] |
no_license
|
#
# ~/.bashrc
#
# If not running interactively, don't do anything
[[ $- != *i* ]] && return
alias ls='ls --color=auto'
PS1='[\u@\h \W]\$ '
if [[ $neofetch == true ]]; then
neofetch
fi
if [[ $kmhzonehtop == true ]]; then
while true; do ssh -t ubuntu@kmh.zone htop; done
fi
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
shopt -s dotglob
| true
|
9204057bdf9f7970f3bce938fde4b6a9a41b7a55
|
Shell
|
clrgit/shellopts
|
/bin/mkdoc
|
UTF-8
| 252
| 2.515625
| 3
|
[] |
no_license
|
#!/usr/bin/bash
set -e
# Generate github-like page
(
cd doc
{
echo '<link rel="stylesheet" type="text/css" href="stylesheet.css">'
pandoc ../README.md
} >index.html
)
# Generate rdoc
rdoc --output=rdoc --force-output lib
| true
|
b84bb99e16de0497e7a635847f461bac7a2071cd
|
Shell
|
newtoncodes/docker-lib-mysql
|
/dev/build.sh
|
UTF-8
| 316
| 3.234375
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
dir=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd)
v="$1"
if [ "$v" = "" ] || [ "$v" = "5.7" ]; then
cd ${dir}/../5.7 && docker build -t newtoncodes/mysql .
cd ${dir}/../5.7 && docker build -t newtoncodes/mysql:5.7 .
else
cd ${dir}/../${v} && docker build -t newtoncodes/mysql:${v} .
fi
| true
|
1d2669587eed3b75639a44a5c64d19f5c7168e0a
|
Shell
|
plojyon/tc
|
/tc.sh
|
UTF-8
| 11,822
| 3.875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#?Usage:
#? tc.sh [-h] [clean] [<tests_path>] <main_c_program> [<included_c_programs> ...]
#? [-t | -T <n> | -f <s> | -n <I> ]
#? actions:
#? clean Delete diff and res files
#? -h, --help Help on command usage
#? -t, --timed Display time taken to execute the code
#? -T <n>, --timeout <n> Larges amount of seconds allowed before
#? timeout
#? Default: 1
#? -n <I> Interval range of tests
#? using ~ instead of - selects complement
#? a-b (a, b]
#? a- (a, ...)
#? -b (..., b]
#? ~b (b, ...)
#? a~b (..., a]U(b, ...)
#? Default: '-' (all)
#? -f <s>, --format <s> Format of test data prefix
#? Default: 'test'
#? -e <f>, --entry <f> Default entry function for c file
#? Default: 'main'
TC_PATH="."
TESTS=""
ENTRY_FUNCTION="main"
FILE_PREFIX="test"
TIMEOUT_VAL=1 #in seconds
KILL_AFTER=$((TIMEOUT_VAL+2))
TIMED=0
### CONSTANTS, inspired by FRI Makefile
CC="gcc"
CCFLAGS="-std=c99 -pedantic -Wall"
LIBS="-lm"
DIFF_TIMEOUT=0.5
LOG="/dev/null"
TIMEOUT_SIGNAL=124
SHELL="/bin/bash"
OK_STRING="\033[1;32mOK\033[0;38m"
FAILED_STRING="\033[1;31mfailed\033[0;38m"
TIMEOUT_STRING="\033[1;35mtimeout\033[0;38m"
### CHECKING IF ALL THE PROGRAMS EXIST
REQUIRED_PROGRAMS=("awk" "basename" "bc" "cut" "date" "diff" "find" "grep" "realpath" "sort" "timeout" "$CC")
for PROGRAM in ${REQUIRED_PROGRAMS[@]}; do
if ! command -v $PROGRAM &> /dev/null; then
echo "Error: '$PROGRAM' not found, exiting" >&2
exit 1
fi
done
function print_help
{
echo " tc.sh [-h] [clean] [<tests_path>] <main_c_program> [<included_c_programs> ...]"
echo " [-t | -T <n> | -f <s> | -n <I> ] "
echo
echo " actions:"
echo " clean Delete diff and res files"
echo
echo " -h, --help Help on command usage"
echo " -t, --timed Display time taken to execute the code"
echo " -T <n>, --timeout <n> Larges amount of seconds allowed before"
echo " timeout"
echo " Default: 1"
echo " -n <I> Interval range of tests"
echo " using ~ instead of - selects complement"
echo " a-b (a, b]"
echo " a- (a, ...)"
echo " -b (..., b]"
echo " ~b (b, ...)"
echo " a~b (..., a]U(b, ...)"
echo " Default: '-' (all)"
echo " -f <s>, --format <s> Format of test data prefix"
echo " Default: 'test'"
echo " -e <f>, --entry <f> Default entry function for c file"
echo " Default: 'main'"
}
### ARGUMENT PARSING
POS_PARAMS=""
while (( "$#" )); do
case "$1" in
-h|--help)
print_help
exit 0
;;
-t|--timed)
TIMED=1
shift
;;
-T|--timeout)
if [ -n "$2" ] && [ ${2:0:1} != "-" ]; then
TIMEOUT_VAL=$2
shift 2
else
echo "Error: Missing value for $1" >&2
print_help
exit 1
fi
;;
-e|--entry)
if [ -n "$2" ] && [ ${2:0:1} != "-" ]; then
ENTRY_FUNCTION=$2
shift 2
else
echo "Error: Missing value for $1" >&2
print_help
exit 1
fi
;;
-f|--format)
if [ -n "$2" ] && [ ${2:0:1} != "-" ]; then
FILE_PREFIX=$2
shift 2
else
echo "Error: Missing value for $1" >&2
print_help
exit 1
fi
;;
-n)
if [ -n "$2" ]; then
if [[ $2 =~ ^([1-9][0-9]*)?(\-|\~)([1-9][0-9]*)?$ ]]; then
if [[ ${#2} -le 1 ]]; then # CASE '-n -'' or '-n ~'
TESTS=""
else
TESTS=$2
fi
shift 2
else
echo "Error: Invalid value for $1" >&2
print_help
exit 1
fi
else
echo "Error: Missing value for $1" >&2
print_help
exit 1
fi
;;
-*|--*=) # unsupported flags
echo "Error: Unexpected argument $1" >&2
print_help
exit 1
;;
*) # preserve positional arguments
POS_PARAMS="$POS_PARAMS $1"
shift
;;
esac
done
# set positional arguments in their proper place
eval set -- "$POS_PARAMS"
# Check for action/-s
ACTION="$1"
case "${ACTION^^}" in
CLEAN) #
shift # consume action arg
if [ $# -gt 1 ]; then
echo "Invalid number of arguments" >&2
print_help
exit 1
elif [ $# -eq 1 ]; then
TC_PATH="$1" # CASE tc clean <path>
else
TC_PATH="." # CASE tc clean
fi
;;
*) #default is to test
if [ $# -gt 1 ]; then
TC_PATH="$1" # CASE tc [<path>] <main> [<additional> ...]
shift
fi
# main_file
MAIN_FILE="$1"
shift
INCLUDE_FILES="$@" # Additional files to get compiled
;;
esac
### VALIDATE
## Validate path
if [ ! -d "$TC_PATH" ]; then
echo "Error: '$TC_PATH' is not a directory" >&2
print_help
exit 1
fi
# absolute path for safety
TC_PATH=$(realpath "$TC_PATH")
### HELPER FUNCTIONS
function remove_leading_dotslash { echo "$@" | sed -e "s/^\.\///g"; }
function get_test_num {
echo "$@" | grep -Po "(?<=$FILE_PREFIX)([0-9]+)(?=.c)";
}
function rm_extension { echo "$@" | grep -Po "(.*)(?=\.)"; }
function get_base_name { rm_extension $(basename "$1"); }
### CLEANING
if [[ ${ACTION^^} = "CLEAN" ]]; then
file_matches=$(find $TC_PATH -maxdepth 1 -type f | grep -E $FILE_PREFIX[0-9]+\.\(res\|diff\) | sort) # Search for tests
if [ $(echo "$file_matches" | wc -w) = "0" ]; then
echo "Nothing to remove"
exit 0
fi
echo "$file_matches"
echo "Remove all [y/n]?"
read -p "> " ans
if [ ${ans^^} = "Y" ]; then
#rm "$file_matches"
for f in "$file_matches"; do
rm $(remove_leading_dotslash "$f")
done
fi
exit 0
fi
### OS-specific
function get_os_type
{
case "$OSTYPE" in
#solaris*) echo "SOLARIS" ;;
darwin*) echo "OSX" ;; #
linux*) echo "LINUX" ;; #
#bsd*) echo "BSD" ;;
msys*) echo "WINDOWS" ;;
*) echo "UNSUPPORTED" ;;
esac
}
os_type=$(get_os_type)
if [ "$os_type" = "LINUX" ]; then
get_exe() { r=$(realpath $1); chmod +x $r; echo "$r"; }
elif [ "$os_type" = "OSX" ]; then # TODO: gdate, gdiff, gtimeout?
get_exe() { r=$(realpath $1); chmod +x $r; echo "$r"; }
elif [ "$os_type" = "WINDOWS" ]; then
get_exe() { r=$(realpath $1); echo "$r.exe"; }
else
echo "Unsupported OS" >&2 #
exit 1
fi
### COMPILING FUNCTION
function compile_cc {
abs_target=$(realpath "$1")
base_name=$(rm_extension $abs_target)
base_target=$(basename "$1")
# echo "$CC $CCFLAGS $INCLUDE_FILES $abs_target -o $base_name $LIBS --entry=$ENTRY_FUNCTION"
$CC $CCFLAGS $INCLUDE_FILES $abs_target -o $base_name $LIBS --entry=$ENTRY_FUNCTION
exit_code=$?
if [[ $exit_code -ne 0 ]]; then
echo -e "Compiling file $base_target $FAILED_STRING, exiting" >&2
exit 1
fi
echo "Compiled $base_target"
}
### DETECTING TYPE OF TESTING
test_c_files=$(find $TC_PATH -maxdepth 1 -type f | grep -E $FILE_PREFIX[0-9]+\.c | sort ) # Search for tests
test_in_files=$(find $TC_PATH -maxdepth 1 -type f | grep -E $FILE_PREFIX[0-9]+\.in | sort ) # Search for tests
test_c_n=$(echo "$test_c_files" | wc -w | bc)
test_in_n=$(echo "$test_in_files" | wc -w | bc)
if [ $test_c_n -eq 0 ] && [ $test_in_n -eq 0 ];then
echo "No tests found in $TC_PATH." >&2
exit 1
elif [ $test_in_n -eq 0 ]; then
echo "Using $test_c_n $FILE_PREFIX.c files."
type_testing=2
elif [ $test_c_n -eq 0 ]; then
echo "Using $test_in_n $FILE_PREFIX.in files."
if [ -z $MAIN_FILE ]; then
echo "Missing main c file!" >&2
exit 1
fi
type_testing=1
else
echo "Found differend tests. Select type of testing"
echo "[1] #$test_in_n $FILE_PREFIX.in files"
echo "[2] #$test_c_n $FILE_PREFIX.c files?"
read -p "> " type_testing
if [ "$type_testing" != "1" ] && [ "$type_testing" != "2" ];then
echo "Invalid option: \"$type_testing\", exiting." >&2
exit 1
fi
fi
if [ "$type_testing" == "1" ];then
test_cases="$test_in_files"
elif [ "$type_testing" == "2" ]; then
test_cases="$test_c_files"
fi
### FILTER TEST CASES
if [ ! -z "$TESTS" ]; then
CUT_FLAGS=""
if [[ $TESTS == *"~"* ]];then
CUT_FLAGS="$CUT_FLAGS --complement"
TESTS=${TESTS/"~"/"-"} # Replace compliment
fi
# echo "$CUT_FLAGS $TESTS"
test_cases=$(echo "$test_cases" | cut -d$'\n' -f$TESTS $CUT_FLAGS)
unset CUT_FLAGS
fi
# echo "$test_cases"
### CONDITIONAL COMPILING
echo " == COMPILING =="
if [ "$type_testing" == "1" ];then
compile_cc "$MAIN_FILE"
exe_name=$(get_exe $(rm_extension $MAIN_FILE))
elif [ "$type_testing" == "2" ]; then
INCLUDE_FILES="$MAIN_FILE $INCLUDE_FILES" # Main file is just an include
for file in $test_cases; do
compile_cc "$file"
done
fi
all_tests=0
ok_tests=0
echo
echo " == TESTING =="
for test_case in $test_cases
do
# Get variables for this case
base_name=$(rm_extension "$test_case")
file_name=$(basename $base_name)
#i=$(get_test_num $file_name)
#echo "$i $file_name"
#echo "$base_name $test_case $file_name"
cbase_name=$(realpath "$base_name")
out_file="$base_name.out"
# Check if .out exists
if ! [ -f "$out_file" ]; then
echo "Missing $out_file for $test_case"
continue
else
if [ "$type_testing" == "2" ];then
### TESTING .c .out
exe_name=$(get_exe $cbase_name)
start_time=$(date +%s.%N)
$(timeout -k $KILL_AFTER $TIMEOUT_VAL $exe_name > $cbase_name.res 2>&1) 2> /dev/null
exit_code=$?
end_time=$(date +%s.%N)
else
### TESTING .in, .out
in_file="$cbase_name.in"
if ! [ -f "$in_file" ]; then
echo "Missing $in_file for $test_case"
continue
fi
start_time=$(date +%s.%N)
$(timeout -k $KILL_AFTER $TIMEOUT_VAL $exe_name < $in_file > $cbase_name.res 2>&1) 2> /dev/null
exit_code=$?
end_time=$(date +%s.%N)
fi
if [[ $exit_code == $TIMEOUT_SIGNAL ]]; then
echo -e "${file_name^} -- $TIMEOUT_STRING [> $TIMEOUT_VAL s]"
else
if [ $TIMED -eq 1 ]; then
timeDifference=" [$(echo "scale=2; $end_time - $start_time" | bc | awk '{printf "%.2f\n", $0}') s]"
else
timedDifference=""
fi
timeout -k $DIFF_TIMEOUT $DIFF_TIMEOUT diff --ignore-trailing-space $base_name.out $base_name.res > $base_name.diff
exit_code=$?
if [[ $exit_code == $TIMEOUT_SIGNAL ]]; then
echo -e "${file_name^} -- $FAILED_STRING (diff errored)$timeDifference"
elif [ -s "$base_name.diff" ]; then
echo -e "${file_name^} -- $FAILED_STRING$timeDifference"
else
echo -e "${file_name^} -- $OK_STRING$timeDifference"
((ok_tests+=1))
fi
fi
((all_tests+=1))
fi
done
echo "Result: $ok_tests / $all_tests"
| true
|
8391edd5b5749d43515ec2d31e2e5a06ea38eb4e
|
Shell
|
nickmoran06/Templates_and_scripts
|
/Ubuntu_14.04/holberton/Betty_holbi_C
|
UTF-8
| 1,026
| 3.484375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
cd
git clone https://github.com/holbertonschool/Betty.git
cd Betty
sudo ./install.sh
echo "#!/bin/bash
# Simply a wrapper script to keep you from having to use betty-style
# and betty-doc separately on every item.
# Originally by Tim Britton (@wintermanc3r), multiargument added by
# Larry Madeo (@hillmonkey)
BIN_PATH="/usr/local/bin"
BETTY_STYLE="betty-style"
BETTY_DOC="betty-doc"
if [ "$#" = "0" ]; then
echo "No arguments passed."
exit 1
fi
for argument in "$@" ; do
echo -e "\n========== $argument =========="
${BIN_PATH}/${BETTY_STYLE} "$argument"
${BIN_PATH}/${BETTY_DOC} "$argument"
done" > betty
chmod a+x betty
sudo mv betty /bin/
# configuration of .emacs to adapt of betty style
cd
# Tabs configuration
echo "(setq c-default-style "bsd"
c-basic-offset 8
tab-width 8
indent-tabs-mode t)" >> .emacs
# Extension of lines configuration
echo "(require 'whitespace)
(setq whitespace-style '(face empty lines-tail trailing))
(global-whitespace-mode t)" >> .emacs
| true
|
512fbd8ff2e078b57163c17fbe69fa8849be4495
|
Shell
|
Senior-Design-May1601/config
|
/ansible/Splunk-init
|
UTF-8
| 983
| 3.46875
| 3
|
[] |
no_license
|
#! /usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
set -o noclobber
openssl genrsa -out privkey.pem 2048
echo ""
echo "TLS private key written to privkey.pem"
echo ""
echo "Next we'll generate a certificate signing request (CSR)."
echo "You'll be asked a bunch of questions. Most of them don't matter and can be left blank."
echo "The one thing you _MUST_ do is:"
echo ""
echo " Set \"Common Name (e.g. server FQDN or YOUR name) []:\" to localhost"
echo ""
echo "Do not set the challenge."
echo ""
read -n 1 -p "Press any key to continue..."
echo ""
openssl req -new -key privkey.pem -out csr.pem
echo ""
openssl x509 -req -days 365 -in csr.pem -signkey privkey.pem -out cert.pem
echo ""
echo "TLS certificate written to cert.pem"
echo ""
echo "Creating SSH keys. Do not set a password"
echo ""
read -n 1 -p "Press any key to continue..."
echo ""
ssh-keygen -t ed25519 -a 100 -f ssh-dummy-key
echo ""
echo "SSH keys written to ssh-dummy-key and ssh-dummy-key.pub"
| true
|
3ec4358bbb7ae9a726d2dcc7aa788849adaedc77
|
Shell
|
OpenAtWork/gitlab-docker
|
/firstrun.sh
|
UTF-8
| 1,476
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
# Set these parameters
mysqlRoot=RootPassword
# === Do not modify anything in this section ===
# Regenerate the SSH host key
/bin/rm /etc/ssh/ssh_host_*
dpkg-reconfigure openssh-server
password=$(cat /srv/gitlab/config/database.yml | grep -m 1 password | sed -e 's/ password: "//g' | sed -e 's/"//g')
# ==============================================
# === Delete this section if restoring data from previous build ===
# Precompile assets
cd /home/git/gitlab
su git -c "bundle exec rake assets:precompile RAILS_ENV=production"
rm -R /srv/gitlab/data/mysql
mv /var/lib/mysql-tmp /srv/gitlab/data/mysql
# Start MySQL
mysqld_safe &
sleep 5
# Initialize MySQL
mysqladmin -u root --password=temprootpass password $mysqlRoot
echo "CREATE USER 'git'@'localhost' IDENTIFIED BY '$password';" | \
mysql --user=root --password=$mysqlRoot
echo "CREATE DATABASE IF NOT EXISTS gitlabhq_production DEFAULT CHARACTER SET \
'utf8' COLLATE 'utf8_unicode_ci';" | mysql --user=root --password=$mysqlRoot
echo "GRANT SELECT, LOCK TABLES, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, \
ALTER ON gitlabhq_production.* TO 'git'@'localhost';" | mysql \
--user=root --password=$mysqlRoot
cd /home/git/gitlab
su git -c "bundle exec rake gitlab:setup force=yes RAILS_ENV=production"
sleep 5
su git -c "bundle exec rake db:seed_fu RAILS_ENV=production"
# ================================================================
# Delete firstrun script
rm /srv/gitlab/firstrun.sh
| true
|
e70c5fe0809396682a483646167b6f16d182d376
|
Shell
|
petronny/aur3-mirror
|
/myroundcube/PKGBUILD
|
UTF-8
| 724
| 2.515625
| 3
|
[] |
no_license
|
# $Id$
# Contributor: fila pruda.com
pkgname=myroundcube
pkgver=0.7.2.v1.0
pkgrel=2
pkgdesc="Plugins for Roundcube web-based mail client"
arch=('any')
url="http://myroundcube.googlecode.com"
license=('GPL')
depends=('roundcubemail>=0.7.2')
source=(http://myroundcube.googlecode.com/files/roundcube-0.7.2-bundle-v.1.0.zip)
md5sums=('7fa332ce791ce1899259ce80568597b6')
build() {
_instdir=${pkgdir}/usr/share/webapps/roundcubemail
install -dm755 ${_instdir}
cp -r ${srcdir}/trunk/plugins ${_instdir}
# fix all the 644 perms on files
find ${_instdir} -type f -exec chmod 644 {} \;
# some dirs need be rewritable
chown http:http ${_instdir}/plugins/calendar/temp
chown http:http ${_instdir}/plugins/captcha/temp
}
| true
|
9e63c4796e6e4eb4d873f058c6ae92b6569dbafb
|
Shell
|
ronan22/obs-light
|
/obslight-fakeobs/tools/post_import_operations.sh
|
UTF-8
| 2,257
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
PROJECT=$1
. tools/common.sh
echo_green "Updating 'latest' links in obs-repos..."
cd obs-repos
LATEST=`find . -maxdepth 1 -name "$PROJECT*" -printf "%f\n" | grep -v "$PROJECT:latest" | sort | tail -n 1`
rm -f "$PROJECT:latest"
ln -sf $LATEST "$PROJECT:latest"
cd ..
echo_green "Updating 'latest' links in releases..."
cd releases
RELEASE=`find . -maxdepth 1 -type d -printf "%f\n" | sort | tail -n 1`
rm -f latest
ln -sf $RELEASE latest
cd ..
echo_green "Updating fakeobs project mappings..."
if [ -f mappings.xml ]
then
cp -f mappings.xml mappings.xml.`date +%Y%m%d%H%M%S`
else
touch mappings.xml
fi
bash tools/updatemappings.sh $PROJECT > mappings_new.xml
mv mappings_new.xml mappings.xml
echo
echo_green "Updating packages-git/repos.lst..."
find packages-git/ -mindepth 2 -maxdepth 2 -type d -printf "%p\n" | sort > packages-git/repos.lst
echo_green "Updating packages-git/mappingscache.xml (may be long)..."
python tools/makemappings.py packages-git/repos.lst packages-git/mappingscache.xml
if [ "$?" -ne "0" ]
then
echo_red " Updating mappingscache failed!"
echo_red " This may append if the project contains big files and there is not enough free memory."
echo_red " Please cd to '/srv/fakeobs' and run"
echo_red " python tools/makemappings.py packages-git/repos.lst packages-git/mappingscache.xml"
fi
ONOBSAPPLIANCE=`grep -q "Intel OTC" /var/config_obs 2>/dev/null && echo 1 || echo 0`
if [ $ONOBSAPPLIANCE -eq 1 ]
then
echo_green "This machine seems to be an OBS appliance"
echo_green " trying to automatically create a link to the fakeobs..."
tools/create_fakeobs_link.sh
fi
DISTRUBUTIONSPATH="/srv/www/obs/api/files/distributions.xml"
echo_green "Updating OBS' 'distributions.xml' file..."
if [ -f "$DISTRUBUTIONSPATH" ]
then
cp -f theme/fakeobs.png /srv/www/obs/webui/public/images/distributions/
# here we assume there is only one target
TARGET=`/bin/ls obs-repos/$PROJECT:latest/`
tools/addfakeobsdistrib.py "$DISTRUBUTIONSPATH" "$PROJECT" "$TARGET"
else
echo_yellow "$DISTRUBUTIONSPATH not found."
echo_yellow "You will have to manually update this file on your OBS server."
echo_yellow "See http://en.opensuse.org/openSUSE:Build_Service_private_installation#Add_Repositories_targets"
fi
| true
|
6357f7c3e6340f1862929e3f43ace15ea5f0b08a
|
Shell
|
le0pard/presentations
|
/brug_2015/examples/nbody/run.sh
|
UTF-8
| 861
| 3
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/sh
ITERATIONS=50000000
echo "Cleanup"
mkdir -p ./.bins
rm -fr ./.bins/*
echo "Compiling c..."
gcc -pipe -Wall -O3 -fomit-frame-pointer -march=native -mfpmath=sse -msse3 nbody.c -o nbody -lm
mv nbody ./.bins/nbody_gcc
echo "Compiling rust..."
rustc -C opt-level=3 -C target-cpu=core2 nbody.rs
mv nbody ./.bins/nbody_rust
echo "Compiling go..."
go build nbody.go
mv nbody ./.bins/nbody_go
echo "Compiling crystal..."
crystal build nbody.cr --release
mv nbody ./.bins/nbody_cr
echo "#################### Benchmarking ####################"
echo ""
echo "C time"
time ./.bins/nbody_gcc $ITERATIONS
echo ""
echo "Rust time"
time ./.bins/nbody_rust $ITERATIONS
echo ""
echo "Golang time"
time ./.bins/nbody_go $ITERATIONS
echo ""
echo "Crystal time"
time ./.bins/nbody_cr $ITERATIONS
echo ""
echo "#################### Finished work ####################"
| true
|
ae7c5bfed44dfee0397b15519e3b7b36431f636e
|
Shell
|
stjordanis/mit-d3m-ta2
|
/scripts/run_on_d3m.sh
|
UTF-8
| 314
| 2.953125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
function echodo() {
echo $*
$*
}
docker build --build-arg UID=$UID -t mit-d3m-ta2 .
COMMANDS=${*:-/bin/bash}
DATASETS=/home/pythia/Projects/d3m/datasets/seed_datasets_current/
echodo docker run -i -t --rm -v $DATASETS:/input -v $(pwd):/home/user -w /home/user -u $UID mit-d3m-ta2 $COMMANDS
| true
|
86725412c64055fc1584bdf61d84ba2e24c4cb63
|
Shell
|
ClickHouse/ClickHouse
|
/tests/queries/0_stateless/00097_long_storage_buffer_race_condition_mt.sh
|
UTF-8
| 340
| 2.59375
| 3
|
[
"BSL-1.0",
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Tags: race
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
export NO_SHELL_CONFIG=1
for _ in {1..4}; do
"$CURDIR"/00097_long_storage_buffer_race_condition.sh > /dev/null 2>&1 &
done
wait
$CLICKHOUSE_CLIENT -q "SELECT 'Still alive'";
| true
|
44843390aae08e51814c922e275328f0416f4164
|
Shell
|
google-code/aos
|
/build
|
UTF-8
| 957
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
# Compile MBR, VBR and kernel
echo " # Compile MBR, VBR and kernel..."
nasm mbr.asm -o mbr.bin
nasm vbr.asm -o vbr.bin
nasm kernel.asm -o kernel.bin
# Compile core
echo " # Compiling core..."
nasm ./core/core.asm -o ./fsgen/rootfs/core
# Recompile FSGEN
echo " # Recompiling FSGEN"
fsgen/build-fsgen
# Generate file system
echo " # Generating file system..."
./fsgen/fsgen ./fsgen/rootfs ./rootfs.hdd 256 2>&1 > fsgen.log
# Generate final hard disk image
echo " # Generating final hard disk image..."
cat mbr.bin vbr.bin kernel.bin rootfs.hdd > image.hdd
# cat mbr.bin vbr.bin 111 rootfs.hdd > hdd
# Clean up
echo " # Cleaning up..."
rm -rf *.bin
# Start QEMU or BOCHS
if [ "$1" = "bochs" ]; then
echo " # All done! Starting Bochs..."
bochs -f aos.bochs-conf -q -log bochs.log
else
echo " # All done! Starting QEmu..."
qemu-system-x86_64 -hda image.hdd -monitor stdio -D qemu.log
fi
# qemu-system-x86_64 -hda hdd
| true
|
923ea91f9b43114de5d40bc4bac67f3aac65603d
|
Shell
|
cristian-sima/Wisply
|
/util/ubuntu/install/src/settings.fun
|
UTF-8
| 491
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
# The name of the database
database="wisply"
# The default MySQL root username
MySQLUsername="root"
# The default MySQL root password
MySQLPassword="Pv1XL_De_zHdhgjWu"
# The name of username for database
databaseUsername="root"
# The password of the username
databasePassword="root"
# It states if the installing directory will be or not deleted after installing
# It can be NO or YES
deleteDirectory="NO"
# The pasth to installing directory
installingDirectory="/install"
| true
|
0258b13602b35bb40e66725c1f16f299c24d5ee9
|
Shell
|
afester/CodeSamples
|
/Shell/bash/returnPipe.sh
|
UTF-8
| 234
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
returnSimple() {
local __resultvar=$1
echo "Sub shell level: $BASH_SUBSHELL"
return 42
}
echo "Sub shell level: $BASH_SUBSHELL"
returnSimple theResult | cat
result=${PIPESTATUS[0]}
echo ${result}
echo Done.
| true
|
b390f503ef9ed5a5ccea55923232a7b140a7f16d
|
Shell
|
1981shaked/test2
|
/bin/IndIbm016/refresh_report
|
UTF-8
| 5,517
| 3.796875
| 4
|
[] |
no_license
|
#!/bin/ksh
#===================================================================================
# Name : refresh_report
# Desrciption : Checking the last refresh log per given version.
# Informs about errors and new refreshed files in the refresh process
# Report sent to given e-mail addresses
#
# Usage : refresh_report <java/Unix/online> <version> [<mail_address>]
# Author : Malka Sulimani
#===================================================================================
############ Checking input ##################
if [ $# != 2 -a $# != 2 ] ; then
echo "ERROR : Usage: refresh_report <version> [<mail_address>]"
echo "Example: refresh_report 311 nbuildreports@amdocs.com"
exit
fi
Mail="N"
if [ $# -eq 2 ] ; then
Mail="Y"
MailList=$2
fi
ver=$1
refresh_dir=~/log.harvest/$ver
if [ ! -d $refresh_dir ]
then
echo " There is no folder ~/log.harvest/$ver"
echo " Please check if you typed the correct version"
exit 1
fi
temp=`find $refresh_dir/. -type f -name "manager*.log"`
if [ "$temp" = "" ]
then
echo "No refresh was performed in $ver "
exit 1
fi
#####################################################
moduleFile="$HOME/product/${CCPROD}/v${ver}/config/${CCPROD}_v${ver}_modbo.dat"
if [[ ! -f $moduleFile ]]
then
echo "\n File $moduleFile not exist.\n Please create this file\n"
exit 0
fi
#count_bb = 0
log_refresh_file=/tmp/log_refresh_sum_$ver
rm -f $log_refresh_file
touch $log_refresh_file
log_deleted_file=/tmp/log_deleted_sum_$ver
rm -f $log_deleted_file
touch $log_deleted_file
log_unknown_file=/tmp/log_unknown_sum_$ver
rm -f $log_unknown_file
touch $log_unknown_file
log_errorlist_file=/tmp/log_errorlist_sum_$ver
rm -f $log_errorlist_file
touch $log_errorlist_file
moduleList=`cat $moduleFile | /usr/bin/awk '{print $2}' | sort -n`
for module in $moduleList
do
last_refresh_file=`ls -tr ~/log.harvest/$ver/$module/manager*.log |tail -1`
temp=`more $last_refresh_file | grep "checked out to"`
if [ "$temp" != "" ]
then
more $last_refresh_file | grep "checked out to" | /usr/bin/awk -F " out to" '{print $2}' >> $log_refresh_file
fi
temp=`more $last_refresh_file | grep ": Deleting"`
if [ "$temp" != "" ]
then
more $last_refresh_file | grep ": Deleting" >> $log_deleted_file
fi
temp=`more $last_refresh_file | grep "Unknown file" | grep -v /bb_profile | grep -v .lis | grep -v _generated/`
if [ "$temp" != "" ]
then
more $last_refresh_file | grep "Unknown file" | grep -v /bb_profile | grep -v .lis | grep -v _generated/ >> $log_unknown_file
fi
temp=`more $last_refresh_file | grep -i Error: | grep -v ". It is deleted."`
if [ "$temp" != "" ]
then
more $last_refresh_file | grep -i Error: | grep -v ". It is deleted." >> $log_errorlist_file
fi
# count_bb=count_bb + `grep "Checking out to" $log_refresh_file | wc -l`
done
########################################################
log_file=/tmp/refresh_sum_$ver.`timestamp`
rm -f $log_file
touch $log_file
echo "=================================" > $log_file
echo " Refresh files list:" >> $log_file
echo "=================================" >> $log_file
if [[ -s "$log_refresh_file" ]]
then
more $log_refresh_file >> $log_file
else
echo "No files were refreshed in the refresh process!" >> $log_file
fi
echo "\n=================================" >> $log_file
echo " Deleted files list:" >> $log_file
echo "=================================" >> $log_file
if [[ -s "$log_deleted_file" ]]
then
more $log_deleted_file >> $log_file
else
echo "No files were deleted in the refresh process!" >> $log_file
fi
echo "\n=================================" >> $log_file
echo " Unknown files list: " >> $log_file
echo "=================================" >> $log_file
if [[ -s "$log_unknown_file" ]]
then
more $log_unknown_file >> $log_file
else
echo "No Unknown files were found in the refresh process!" >> $log_file
fi
echo "\n=================================" >> $log_file
echo " Errors list: " >> $log_file
echo "=================================" >> $log_file
if [[ -s "$log_errorlist_file" ]]
then
more $log_errorlist_file >> $log_file
else
echo "No errors were found in the refresh process!" >> $log_file
fi
count_bb=`more $log_refresh_file | /usr/bin/awk -F "bb" '{print $2}' | /usr/bin/awk -F "/" '{print $2}' | sort -u |wc -l`
echo "\n++++++++++++++++++++++++++++++++++++++++++++++++++++++++" >> $log_file
echo " Total BB which were participated in the refresh => $count_bb" >> $log_file
echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" >> $log_file
#count_file=`grep "checked out to" $log_refresh_file | wc -l`
count_file=`more $log_refresh_file | wc -l`
echo "\n++++++++++++++++++++++++++++++++++++++++++++++++++++++++" >> $log_file
echo " Total refreshed files => $count_file" >> $log_file
echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" >> $log_file
count_file=`grep ": Deleting" $log_deleted_file | wc -l`
echo "\n++++++++++++++++++++++++++++++++++++++++++++++++++++++++" >> $log_file
echo " Total deleted files => $count_file" >> $log_file
echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" >> $log_file
if [ "$Mail" = "N" ] ; then
echo "You can find the log file at: $log_file"
else
mailx -s "Refresh report for ver $ver " "$MailList" < $log_file
fi
| true
|
ac5bbee3d08b20b10281221d434041c49f093d5a
|
Shell
|
jwsong94/ESP_2018_1
|
/HW2/2_20132138.sh
|
UTF-8
| 392
| 3.234375
| 3
|
[] |
no_license
|
#!/bin/bash
# 20132138
# Jungwoo Song
# Github - https://github.com/jwsong94/ESP_2018_1
PORT=$1
GPIODIR="gpio$PORT"
echo "gpio : $PORT"
cd /sys/class/gpio/
echo $PORT > ./export
cd $GPIODIR
echo out > direction
i=1
while [ $i -le 10 ]
do
echo "LED ON"
echo 1 > value
sleep 1s
echo "LED OFF"
echo 0 > value
sleep 1s
let "i = i + 1"
done
cd ..
echo 13 > unexport
| true
|
0b3d49396f5e5ab73a592ffc0eed3d4b4048cfa0
|
Shell
|
TristinDavis/masKomodo
|
/morePackage.sh
|
UTF-8
| 964
| 3.171875
| 3
|
[] |
no_license
|
echo "Remove morefolders. If it wasn't there than this will error. Left after build to allow you to inspect contents that was wrapped."
rm -r morefolders
echo "Create the morefolders dir and add chrome and defaults inside."
mkdir -p morefolders/chrome/ && cp -r defaults morefolders/
echo "Copy the chrome.manifest and install.rdf into morefolders"
cp -r *.*f* morefolders/
echo "create morekomodo.jar and move it into morefolder/chrome. It's removed if you've already got one left in there."
cd chrome
rm morekomodo.jar
zip -r content locale skin
echo "Zip up chrome contents to *.zip, rename to morekomodo.jar, move to morefolders/chrome then get out of this folder."
mv *.zip morekomodo.jar && mv morekomodo.jar ../morefolders/chrome/ && cd ..
echo "morekomodo.jar moved"
echo "create .XPI"
rm moreKomodo-1.8.3.xpi
cd morefolders/ && zip -r chrome defaults chrome.manifest install.rdf
mv *.zip moreKomodo-1.8.3.xpi && mv moreKomodo-1.8.3.xpi ../
echo "done"
| true
|
5271a584eaad22758881f2c534acf5f66c7eed85
|
Shell
|
enitink/prj
|
/generic/codes/prData/shellScripts/sumOfDig.sh
|
UTF-8
| 291
| 3.8125
| 4
|
[] |
no_license
|
#This Shell Script computes sum of digits
echo -e "\nEnter Any No to find its sum of digits : \c"
read number
temp=$number
sum=0
while [ $temp -gt 0 ]
do
rem=`expr $temp % 10`
sum=`expr $sum + $rem`
temp=`expr $temp / 10`
done
echo -e "Sum of Digits of number " $number " = " $sum
| true
|
e389edb6ca5d60b5aa6d6ba63c210f6c2dc36003
|
Shell
|
lucaswannen/source_code_classification_with_CNN
|
/dataset_v2/bash/967780.txt
|
UTF-8
| 117
| 2.625
| 3
|
[] |
no_license
|
#!/bin/sh
let SECS=$1*60
echo "Sleeping for" $1 "minutes, which is" $SECS "seconds."
sleep $SECS &&
pm-suspend
echo
| true
|
f0386236af6e0ab6dd36303a1b9299efda6556f7
|
Shell
|
cnidario/js-games
|
/1/img/pan.sh
|
UTF-8
| 216
| 3.140625
| 3
|
[] |
no_license
|
for f in *.jpg; do
w=$(identify -format "%w" "$f");
h=$(identify -format "%h" "$f");
if [ "$w" -gt "$h" ]; then
hh=$(( (w - h) / 2));
convert "$f" -crop "$h"x"$h"+"$hh"+0 "$f";
fi
done
| true
|
5e756a8b0a3a325959e5d37738ab0d5a2c924f2a
|
Shell
|
reynoldscem/bashDB
|
/create_database.sh
|
UTF-8
| 346
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
# shellcheck source=functions.sh
source "$(dirname "$0")/functions.sh"
if [ -z "$1" ]; then
eval "${no_param}"
fi
database=$1
if [ -d "$database" ]; then
eval "${db_exists}"
# If another process is trying to create the same database we don't really care.
elif mkdir -p "$database"; then
eval "${db_created}"
else
exit 1
fi
| true
|
1f03028fd60356b640999093a42f7db93de0f676
|
Shell
|
eic/eic-spack-cvmfs
|
/update.sh
|
UTF-8
| 2,406
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
dir=`dirname ${0}`
# Group umask
umask 002
# Load environment
source /cvmfs/eic.opensciencegrid.org/packages/setup-env.sh
# Setup os
spack debug report
os=`spack arch -o`
# Find compilers
if [ -w /cvmfs/eic.opensciencegrid.org/packages ] ; then
spack compiler find --scope site
else
spack compiler find --scope user
fi
spack compiler list
# Find ccache
spack load --first ccache os=$os || spack install ccache os=$os && spack load --first ccache os=$os
# Wait until released
if [ -w /cvmfs/eic.opensciencegrid.org/packages ] ; then
${dir}/wait-until-released.sh /cvmfs/eic.opensciencegrid.org/packages
fi
# Create environments
packages=/cvmfs/eic.opensciencegrid.org/packages/spack/eic-spack/packages
environments=/cvmfs/eic.opensciencegrid.org/packages/spack/eic-spack/environments
for envdir in ${environments}/* ; do
env=`basename ${envdir}`
envfile=${envdir}/spack.yaml
if [ ! -d "${envdir}" ] ; then
continue
fi
envosdir=${envdir}/${os}
if [ ! -w ${envosdir} ] ; then
continue
fi
mkdir -p ${envosdir}
if [ ! -f "${envosdir}/spack.lock" ] ; then
spack env create --without-view -d ${envosdir} ${envfile}
fi
spack env activate --without-view ${envosdir}
if [ ! -f "${envosdir}/spack.lock" ] ; then
echo "Concretizing for the first time"
spack concretize -f
fi
yaml_time=$(ls --time-style=+%s -l ${envdir}/spack.yaml | awk '{print($6)}')
lock_time=$(ls --time-style=+%s -l ${envosdir}/spack.lock | awk '{print($6)}')
yaml_lock_diff=$((yaml_time-lock_time))
if [ "${yaml_lock_diff}" -gt 5 ] ; then
echo "Reconcretizing because of changes to environment"
cp ${envdir}/spack.yaml ${envosdir}/spack.yaml
spack concretize -f
fi
updated_packages=`find ${packages} -type f -newer ${envosdir}/spack.lock -not -name "*.pyc"`
if [ ! -z "${updated_packages}" ] ; then
echo "Reconcretizing because of changes to packages:"
echo "${updated_packages}"
spack concretize -f
fi
spack install -j $(($(nproc)/2)) | grep -v '^\[+\]'
spack install -j 1 --keep-stage --show-log-on-error | grep -v '^\[+\]'
spack env deactivate
done
# Add cvmfscatalog
if [ -w /cvmfs/eic.opensciencegrid.org/packages ] ; then
${dir}/cvmfscatalog-add.sh /cvmfs/eic.opensciencegrid.org/packages
fi
# Release the cvmfs working directory
if [ -w /cvmfs/eic.opensciencegrid.org/packages ] ; then
${dir}/release.sh /cvmfs/eic.opensciencegrid.org/packages
fi
| true
|
2e4c8916f432865c0fc4a4b6d7a2fe3ec1877f03
|
Shell
|
aheinric/ShellExtensions
|
/module_1_path.sh
|
UTF-8
| 1,523
| 3.859375
| 4
|
[] |
no_license
|
## Extend path with custom directories.
### You can specify custom directories in 'resources/paths.cfg':
### - Each line should contain one directory.
### - A line can reference environment variables; e.g.: '$HOME/local/bin'
### - A '#' defines a comment; all whitespace before the '#' and the rest of the line after it are ignored.
# Avoid cluttering path if .profile gets sourced more than once.
# TODO: This may have some downsides.
# If $PATH gets updated, the saved copy won't reflect that.
if [ "$SHELL_EXT_OLD_PATH" == "" ]; then
# If we don't have a saved "original" path, save it.
export SHELL_EXT_OLD_PATH=$PATH
else
# Otherwise, restore the path from the saved copy before editing.
export PATH=$SHELL_EXT_OLD_PATH
fi
# Define the PATH separator character.
# TODO: uncomment the appropriate line for your plaform.
ps=':' # Linux, MacOS X, and similar.
#ps=';' # Windows, mingw, cygwin, and similar. Not sure about the Windows 10 linux subsystem.
# Extend PATH with entries from a resource file.
# This keeps this file from getting clobbered with user-specific configuration.
path_cfg="$SHELL_EXT_RES/paths.cfg"
if [[ ! -f "$path_cfg" ]]; then
echo "# Put extra PATH entires in here, one per line."
fi
while read -r l; do
# Remove comments
l=$(echo "$l" | sed -E 's/\s*#.*//')
if [ "$l" == '' ]; then
# If the whole line was a comment, ditch it.
continue
fi
# Expand environment variables.
l=$(expandenv "$l")
export PATH="$l$ps$PATH"
done < "$path_cfg"
| true
|
da72c499334ec8ff7ddeaf86c8ed4aba997921c0
|
Shell
|
netronome-support/IVG
|
/helper_scripts/inventory.sh
|
UTF-8
| 6,609
| 3.875
| 4
|
[] |
no_license
|
#!/bin/bash
########################################################################
if [ "$INV_PRINT_MODE" == "CSV" ]; then
# Useful for including the output into a Spreadsheet
function show () {
local field="$1"
local value="$2"
printf "\"%s\",\"%s\"\n" "$field" "$value"
}
else
function show () {
local field="$1"
local value="$2"
printf "%-14s %s\n" "$field" "$value"
}
fi
########################################################################
show "Hostname" "$(hostname)"
########################################################################
if [ -f /etc/os-release ]; then
. /etc/os-release
show "OS" "$NAME $VERSION"
fi
########################################################################
show "Kernel" "$(uname -r)"
########################################################################
if [ -x "$(which dmidecode 2> /dev/null)" ]; then
manu=$(dmidecode --type system | sed -rn 's/^\s*Manufacturer: (.*)$/\1/p')
prod=$(dmidecode --type system | sed -rn 's/^\s*Product Name: (.*)$/\1/p')
show "Server" "$manu $prod"
vendor=$(dmidecode --type bios | sed -rn 's/^\s*Vendor: (.*)$/\1/p')
version=$(dmidecode --type bios | sed -rn 's/^\s*Version: (.*)$/\1/p')
rel_date=$(dmidecode --type bios | sed -rn 's/^\s*Release Date: (.*)$/\1/p')
revision=$(dmidecode --type bios | sed -rn 's/^\s*BIOS Revision: (.*)$/\1/p')
show "BIOS" "$vendor Version $version ($rel_date); Revision $revision"
fi
########################################################################
cpu=$(lscpu | sed -rn 's/^Vendor ID:\s+(\S.*)$/\1/p')
cpu_model=$(lscpu | sed -rn 's/^Model name:\s+(\S.*)$/\1/p')
if [ "$cpu_model" == "" ] && [ -x "$(which dmidecode 2> /dev/null)" ]; then
cpu_version=$(dmidecode --type processor \
| sed -rn 's/^\s*Version: (.*)$/\1/p' \
| head -1)
cpu="$cpu $cpu_version"
else
cpu="$cpu $cpu_model"
fi
cpu_freq=$(lscpu | sed -rn 's/^CPU MHz:\s+(\S.*)$/\1/p')
[ "$cpu_freq" != "" ] && cpu="$cpu ${cpu_freq}MHz"
show "CPU" "$cpu"
########################################################################
cps=$(lscpu | sed -rn 's/^Core.*per socket:\s+(\S.*)$/\1/p')
scn=$(lscpu | sed -rn 's/^Socket.*:\s+(\S.*)$/\1/p')
show "NUMA" "$scn sockets, $cps cores/socket"
########################################################################
mem=$(cat /proc/meminfo | sed -rn 's/^MemTotal:\s+(\S.*)$/\1/p')
show "Memory" "$mem"
########################################################################
function extract () {
local varname="$1"
local field="$2"
local value=$(echo "$line" \
| sed -rn 's/^.*@@@'"$field"':\s*([^@]+)@@@.*$/\1/p')
printf -v $varname "%s" "$value"
}
if [ -x "$(which dmidecode 2> /dev/null)" ]; then
dmidecode --type memory \
| awk '{printf "%s@@@", $0}' \
| sed -r 's/@@@@@@/@@@\n@@@/g' \
| sed -r 's/@@@\s*/@@@/g' \
| grep -E "^@@@Handle" \
| grep -E "@@@Memory Device@@@" \
| grep -E "@@@Form Factor: DIMM@@@" \
| grep -E "@@@Data Width: [0-9]+ bits@@@" \
| while read line ;
do
#extract "m_width_t" "Total Width"
extract "m_width_d" "Data Width"
extract "m_size" "Size"
extract "m_locator" "Locator"
#extract "m_manu" "Manufacturer"
extract "m_type" "Type"
extract "m_speed" "Speed"
extract "m_rank" "Rank"
#extract "m_part_n" "Part Number"
extract "m_cfg_clock" "Configured Clock Speed"
extract "m_max_clock" "Configured Voltage"
info="$m_width_d, $m_size, $m_locator"
info="$info, $m_cfg_clock (max $m_speed), rank $m_rank"
show "DIMM" "$info"
done
fi
########################################################################
ovsctl="/opt/netronome/bin/ovs-ctl"
if [ -x "$ovsctl" ]; then
agvers=$($ovsctl version | sed -rn 's/^Netro.*version //p')
show "Agilio" "$agvers"
fi
########################################################################
hwinfo="/opt/netronome/bin/nfp-hwinfo"
nfp_present="unknown"
if [ -x $hwinfo ]; then
fn="/tmp/nfp-hwinfo.txt"
$hwinfo > $fn 2> /dev/null
if [ $? -ne 0 ]; then
show "NFP" "MISSING!!"
nfp_present="missing"
else
nfp_present="yes"
model=$( sed -rn 's/^assembly.model=(.*)$/\1/p' $fn)
partno=$( sed -rn 's/^assembly.partno=(.*)$/\1/p' $fn)
rev=$( sed -rn 's/^assembly.revision=(.*)$/\1/p' $fn)
sn=$( sed -rn 's/^assembly.serial=(.*)$/\1/p' $fn)
bsp=$( sed -rn 's/^board.setup.version=(.*)$/\1/p' $fn)
freq=$( sed -rn 's/^core.speed=(.*)$/\1/p' $fn)
show "NFP" \
"$model ($partno rev=$rev sn=$sn ${freq}MHz)"
show "BSP" "$bsp"
fi
fi
########################################################################
if [ -x /opt/netronome/bin/nfp-media ] && [ "$nfp_present" == "yes" ]; then
phymode=$(/opt/netronome/bin/nfp-media \
| tr '\n' ' ' \
| sed -r 's/\s+\(\S+\)\s*/ /g')
show "Media" "$phymode"
fi
########################################################################
lscpu \
| sed -rn 's/^NUMA\snode([0-9]).*:\s+(\S+)$/\1:\2/p' \
| while read line ; do
cpuidx=${line/:*/}
vcpulist=${line/*:/}
show "NUMA $cpuidx" "$vcpulist"
done
########################################################################
nfpsys="/sys/bus/pci/drivers/nfp"
nfpnuma="UNKNOWN"
nfpbdf="UNKNOWN"
if [ -d "$nfpsys" ]; then
nfpbdf=$(find $nfpsys -name '00*' \
| sed -r 's#^.*/##' \
| head -1)
if [ -h "$nfpsys/$nfpbdf" ]; then
nfpnuma="$(cat $nfpsys/$nfpbdf/numa_node)"
fi
show "NFP NUMA" "$nfpnuma"
show "NFP BDF" "$nfpbdf"
fi
########################################################################
show "Kernel Command Line" ""
show "" "$(cat /proc/cmdline)"
########################################################################
viopid=$(pgrep virtiorelayd)
if [ "$viopid" != "" ]; then
show "VirtIO Relay Daemon Command Line" ""
show "" "$(cat /proc/$viopid/cmdline | tr '\0' ' ')"
fi
########################################################################
virsh="$(which virsh 2> /dev/null)"
if [ "$virsh" != "" ]; then
show "VM CPU Usage" ""
for inst in $(virsh list --name) ; do
if [ "$inst" != "" ]; then
vcpulist=$($virsh vcpuinfo $inst \
| sed -rn 's/^CPU:\s+(\S+)$/\1/p' \
| tr '\n' ',' \
| sed -r 's/,$/\n/' )
show "$inst" "$vcpulist"
fi
done
fi
########################################################################
exit 0
| true
|
66103d185efb543e99aa9519bffc0b5f41659dd4
|
Shell
|
bigdotsoftware/ingenicoserver
|
/tests/transaction_preauthorization.sh
|
UTF-8
| 2,703
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/bash
URI="http://127.0.0.1:3020"
out=`curl -s -XGET "$URI/v1/ingenico_status" -H 'Content-Type: application/json'`
echo "###> status: $out"
if [[ "$out" == *"BatchCompleted"* ]]; then
echo "###> Batch completed, terminal is waiting to read batch data (use script clearbatch.sh)"
exit;
fi
if [[ "$out" == *"Reconciliation needed"* ]]; then
echo "###> Reconciliation needed, wykonaj zamkniecie dnia"
exit;
fi
if [[ "$out" == *"WaitTrEnd"* ]]; then
echo "###> WaitTrEnd, finish previously opened transaction"
exit;
fi
echo "###> Starting preauthorization transaction"
curl -XPOST "$URI/v1/ingenico_transaction?fulldebug=true" -H 'Content-Type: application/json' -d '{
"type": "preauthorization",
"amount": 1000,
"sequenceNb" : "-"
}'
COUNTER=0
while [ $COUNTER -lt 150 ]; do
let COUNTER=COUNTER+1
out=`curl -s -XGET "$URI/v1/ingenico_status" -H 'Content-Type: application/json'`
echo "###> status: $out"
if [[ "$out" == *"WaitTrEnd"* ]]; then
break;
fi
sleep 1
done
echo "###> Closing transaction"
out=`curl -XGET "$URI/v1/ingenico_transaction_end" -H 'Content-Type: application/json'`
echo $out
#echo '{ "ansactionNumber":174,"transactionDetails":{"authorizationCode":"072760","serverMessage":""}}' | jq -r '.transactionDetails.authorizationCode'
authorizationCode=`echo $out | jq -r '.terminal.transactionDetails.authorizationCode'`
echo "###> Transaction closed with '$authorizationCode'"
if [ -z "$authorizationCode" ]; then
echo "###> No transaction code returned, transaction refused"
else
echo ">>>>> (AKCJA TESTERA) Wybierz akcje dla preutoryzacji $authorizationCode"
echo ">>>>> (AKCJA TESTERA) 1) completion"
echo ">>>>> (AKCJA TESTERA) 2) cancel"
select opt in 1 2
do
case $opt in 1)
out=`curl -s -XPOST "$URI/v1/ingenico_transaction?fulldebug=true" -H 'Content-Type: application/json' -d '{
"type": "completion",
"amount": 1000,
"sequenceNb" : "$authorizationCode"
}'`
break;
esac
case $opt in 2)
out=`curl -s -XPOST "$URI/v1/ingenico_transaction?fulldebug=true" -H 'Content-Type: application/json' -d '{
"type": "preauthorization_cancel",
"amount": 1000,
"sequenceNb" : "$authorizationCode"
}'`
break;
esac
done
echo $out
out=`curl -s -XGET "$URI/v1/ingenico_status" -H 'Content-Type: application/json'`
echo "###> status: $out"
echo "###> Closing preauthorization_cancel or completion"
out=`curl -XGET "$URI/v1/ingenico_transaction_end" -H 'Content-Type: application/json'`
echo $out
fi
echo "###> Done"
| true
|
e61ab67732015794d21715f0e08c4d1b42c3fcde
|
Shell
|
geethaka/rubycas-server
|
/int_test.sh
|
UTF-8
| 284
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# added a sleep since kubernets deployment can take time to propogate
sleep 60
test_result=$(curl -s -o /dev/null -w "%{http_code}" http://rubycas.k8.bebraven.org/login)
echo $test_result
if [ $test_result == 200 ]
then
echo "success"
else
echo "fail"
exit 1
fi
| true
|
3a7de84c1054bef28f5daf05d611c094721a92a6
|
Shell
|
ibisek/firmwareLoader
|
/connectAllAvailable.sh
|
UTF-8
| 894
| 4
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Connects all available CUBE units to /dev/rfcomm{$i} where i>=0
#
RED='\033[1;31m'
GREEN='\033[1;32m'
BLUE='\033[1;34m'
PURPLE='\033[1;35m'
CYAN='\033[1;36m'
WHITE='\033[1;37m'
NC='\033[0m' # No Color
echo -e "\n${WHITE}Searching for BT devices..${NC}"
available="`hcitool scan`"
readarray -t lines <<<"$available"
lines=`echo $lines| grep OGN`
i=0
macAddr="null"
ognId="null"
regex="([A-F:0-9:]+)\s+OGN CUBE\s([A-F0-9]+)"
for line in "${lines[@]}"
do
if [[ $line =~ $regex ]]
then
macAddr="${BASH_REMATCH[1]}"
ognId="${BASH_REMATCH[2]}"
else
continue
fi
echo -e "Found OGN CUBE tracker ID ${GREEN}$ognId${NC} with BT MAC addr ${GREEN}$macAddr${NC}"
port="/dev/rfcomm$i"
sudo rfcomm unbind $port 2>&1 > /dev/null
sudo rfcomm bind $port $macAddr
echo -e " bound $ognId to ${GREEN}$port${NC}"
((i++))
done
| true
|
339566073c8efb1c3e86421f00577c5b6c6ab3b9
|
Shell
|
nobiki/4shamo
|
/bootstrap.sh
|
UTF-8
| 492
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/bash
if [ ! -e /bootstrap.lock ]; then
pip install selenium==3.13.0 robotframework==3.0.4 \
robotframework-seleniumlibrary==3.3.1 \
robotframework-httplibrary
touch /bootstrap.lock
fi
BUILDTAG=`pwgen -A -0 -1 10`
export BUILDTAG=${BUILDTAG}
echo "BUILDTAG: ${BUILDTAG}"
robot -A robot.args -v BUILDTAG:${BUILDTAG} --outputdir results/${BUILDTAG} entry.robot
# robot -A robot.args -v BUILDTAG:${BUILDTAG} -t このテスト -t あのテスト client.robot
| true
|
4fc9999b3916f4ec0717a6805e26b3ebc01b88ba
|
Shell
|
BlaT2512/Facial-Recognition-Door-Lock
|
/src/doorlock.init.sh
|
UTF-8
| 773
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/sh
### BEGIN INIT INFO
# Provides: doorlock
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Doorlock function using rpi camera
# Description: Doolock
### END INIT INFO
# If you want a command to always run, put it here
# Carry out specific functions when asked to by the system
case "$1" in
start)
echo "Starting doorlock"
# run application you want to start
cd /home/pi/Facial-Recognition-Door-Lock
sudo python3 ./doorlock.py &
;;
stop)
echo "Stopping doorlock"
# kill application you want to stop
killall python3
;;
*)
echo "Usage: /etc/init.d/noip {start|stop}"
exit 1
;;
esac
exit 0
| true
|
8f75c01a9740d111aa3f6cb846f130b1a595c19d
|
Shell
|
ethernetdan/weave-kubernetes-anywhere
|
/examples/google-compute-engine/provision.sh
|
UTF-8
| 4,309
| 3.453125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash -x
if ! /usr/local/bin/docker -v 2> /dev/null | grep -q "^Docker\ version\ 1\.10" ; then
echo "Installing current version of Docker Engine 1.10"
curl --silent --location https://get.docker.com/builds/Linux/x86_64/docker-1.10.1 --output /usr/local/bin/docker
chmod +x /usr/local/bin/docker
fi
systemd-run --unit=docker.service /usr/local/bin/docker daemon
/usr/local/bin/docker version
if ! [ -x /usr/local/bin/weave ] ; then
echo "Installing current version of Weave Net"
curl --silent --location http://git.io/weave --output /usr/local/bin/weave
chmod +x /usr/local/bin/weave
/usr/local/bin/weave setup
fi
/usr/local/bin/weave version
/usr/local/bin/weave launch-router --init-peer-count 7
/usr/local/bin/weave launch-proxy --rewrite-inspect
## In a specific instance groups find nodes with `kube-weave` tag
list_weave_peers_in_group() {
## There doesn't seem to be a native way to obtain instances with certain tags, so we use awk
gcloud compute instance-groups list-instances $1 --uri --quiet \
| xargs -n1 gcloud compute instances describe \
--format='value(tags.items[], name, networkInterfaces[0].accessConfigs[0].natIP)' \
| awk '$1 ~ /(^|\;)kube-weave($|\;).*/ && $2 ~ /^kube-.*$/ { print $2 }'
}
## This is very basic way of doing Weave Net peer discovery, one could potentially implement a pair of
## systemd units that write and watch an environment file and call `weave connect` when needed...
## However, the purpose of this script is only too illustrate the usage of Kubernetes Anywhere in GCE.
/usr/local/bin/weave connect \
$(list_weave_peers_in_group kube-master-group) \
$(list_weave_peers_in_group kube-node-group)
/usr/local/bin/weave expose -h $(hostname).weave.local
if ! [ -x /usr/local/bin/scope ] ; then
echo "Installing current version of Weave Scope"
curl --silent --location http://git.io/scope --output /usr/local/bin/scope
chmod +x /usr/local/bin/scope
fi
/usr/local/bin/scope version
/usr/local/bin/scope launch --probe.kubernetes true --probe.kubernetes.api http://kube-apiserver.weave.local:8080
eval $(/usr/local/bin/weave env)
## We don't set a restart policy here, as this script re-runs on each boot, which is quite handy,
## however we need clear out previous container while saving the logs for future reference
save_last_run_log_and_cleanup() {
if [[ $(docker inspect --format='{{.State.Status}}' $1) = 'exited' ]]
then
docker logs $1 > /var/log/$1_last_run 2>&1
docker rm $1
fi
}
case "$(hostname)" in
kube-etcd-1)
save_last_run_log_and_cleanup etcd1
docker run -d \
-e ETCD_CLUSTER_SIZE=3 \
--name=etcd1 \
weaveworks/kubernetes-anywhere:etcd
;;
kube-etcd-2)
save_last_run_log_and_cleanup etcd2
docker run -d \
-e ETCD_CLUSTER_SIZE=3 \
--name=etcd2 \
weaveworks/kubernetes-anywhere:etcd
;;
kube-etcd-3)
save_last_run_log_and_cleanup etcd3
docker run -d \
-e ETCD_CLUSTER_SIZE=3 \
--name=etcd3 \
weaveworks/kubernetes-anywhere:etcd
;;
kube-master-0)
save_last_run_log_and_cleanup kube-apiserver
save_last_run_log_and_cleanup kube-controller-manager
save_last_run_log_and_cleanup kube-scheduler
docker run -d \
-e ETCD_CLUSTER_SIZE=3 \
-e CLOUD_PROVIDER=gce \
--name=kube-apiserver \
weaveworks/kubernetes-anywhere:apiserver
docker run -d \
-e CLOUD_PROVIDER=gce \
--name=kube-controller-manager \
weaveworks/kubernetes-anywhere:controller-manager
docker run -d \
--name=kube-scheduler \
weaveworks/kubernetes-anywhere:scheduler
;;
## kube-[5..N] are the cluster nodes
kube-node-*)
save_last_run_log_and_cleanup kubelet
save_last_run_log_and_cleanup kube-proxy
docker run \
--volume="/:/rootfs" \
--volume="/var/run/docker.sock:/docker.sock" \
weaveworks/kubernetes-anywhere:tools \
setup-kubelet-volumes
docker run -d \
-e CLOUD_PROVIDER=gce \
--name=kubelet \
--privileged=true --net=host --pid=host \
--volumes-from=kubelet-volumes \
weaveworks/kubernetes-anywhere:kubelet
docker run -d \
--name=kube-proxy \
--privileged=true --net=host --pid=host \
weaveworks/kubernetes-anywhere:proxy
;;
esac
| true
|
ff6b0fc336513a998df87b1d08976e0b2d8834e8
|
Shell
|
imos/bin
|
/stelnet
|
UTF-8
| 982
| 3.90625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# stelnet is an SSL client.
#
# stelnet is used to communicate wit hanother host using the SSL protocol.
# stelnet requires the openssl command. If port is not specified, 443 (HTTPS)
# is used.
#
# Usage:
# stelnet host [port]
source "$(dirname "${BASH_SOURCE}")"/imosh || exit 1
DEFINE_bool crlf true 'Use CRLF instead for new lines.'
DEFINE_bool ssl3 true 'Use SSL3.'
DEFINE_bool debug false 'Show debug information.'
eval "${IMOSH_INIT}"
main() {
local host="$1"
local port="$2"
local options=(-host "${host}" -port "${port}")
if (( FLAGS_crlf )); then options+=(-crlf); fi
if (( FLAGS_ssl3 )); then options+=(-ssl3); fi
if (( FLAGS_ssl3 )); then options+=(-ssl3); fi
if (( FLAGS_debug )); then
openssl s_client "${options[@]}"
else
openssl s_client -quiet "${options[@]}" 2>/dev/null
fi
}
if [ "${#}" -eq 2 ]; then
main "$@"
elif [ "${#}" -eq 1 ]; then
main "$@" 443
else
LOG FATAL 'stelnet requires one or two arguments.'
fi
| true
|
c0d07bbe764591a762b09900d0b619567f6c04d8
|
Shell
|
chu888chu888/BigData-spark-installer
|
/src/spark
|
UTF-8
| 944
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
CONF_FILES_DIR=./src/config_files
export SPARK_CONF_DIR=/usr/local/spark/conf
export SPARK_HOME=/usr/local/spark
source ~/.pam_environment
wget http://d3kbcqa49mib13.cloudfront.net/spark-1.4.1-bin-hadoop2.4.tgz -P $DOWNLOAD_DIR
tar -xzf $DOWNLOAD_DIR/spark-1.4.1-bin-hadoop2.4.tgz -C $DOWNLOAD_DIR
sudo mv $DOWNLOAD_DIR/spark-1.4.1-bin-hadoop2.4 $SPARK_HOME
sudo chown -R ubuntu:ubuntu $SPARK_HOME
sudo mkdir -p /var/lib/spark/{work,rdd,pid}
sudo mkdir -p /var/log/spark
sudo chown -R ubuntu:ubuntu /var/lib/spark
sudo chown -R ubuntu:ubuntu /var/log/spark
cp $CONF_FILES_DIR/spark-env.sh $SPARK_CONF_DIR/
cp $CONF_FILES_DIR/spark-defaults.conf $SPARK_CONF_DIR/
cd $SPARK_CONF_DIR
cp log4j.properties.template log4j.properties
echo 'MASTER=spark://$(hostname):7077' | tee -a ~/.pam_environment
echo "export PATH=$PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin" | tee -a ~/.pam_environment
echo "*********** Spark Done ************"
| true
|
df9bcf7501ea4a9f33879864ebcbef02094eed69
|
Shell
|
jkwascom/general-environment-setup
|
/arch/sync-check.sh
|
UTF-8
| 2,704
| 3.59375
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/zsh
LOCAL_CONFIG_ROOT="$HOME"
LOCAL_PACKAGELIST_ROOT="/tmp"
CLOUD_ENV_ROOT="$LOCAL_CONFIG_ROOT/general-environment-setup"
CLOUD_CONFIG_ROOT="$CLOUD_ENV_ROOT/configs"
CLOUD_PACKAGELIST_ROOT="$CLOUD_ENV_ROOT/packages"
MODE="check"
PACKAGELIST_SUFFIX="pkglist"
PACMAN_LIST="pacman.$PACKAGELIST_SUFFIX"
PIP_LIST="pip.$PACKAGELIST_SUFFIX"
VIM_LIST="vim.$PACKAGELIST_SUFFIX"
SYNC_OUTPUT_FILE="/tmp/sync-check-output.tmp"
if [[ "$1" == "show" ]] {
MODE="show"
}
if [[ "$1" == "synchronize" ]] {
MODE="synchronize"
}
if [[ "$1" == "update-local" ]] {
MODE="update-local"
}
generate_package_lists() {
pacman -Qqe > "$1/$PACMAN_LIST"
pip list > "$1/$PIP_LIST"
vim -c "redir @\" | silent NeoBundleList | redir END | set paste | exe \"normal pddggVG:sort
\" | x! $1/$VIM_LIST"
}
check_up_to_date () {
LOCAL_ROOT="$1"
CLOUD_ROOT="$2"
SYNC_FILE_NAME="$3"
test `diff $LOCAL_ROOT/$SYNC_FILE_NAME $CLOUD_ROOT/$SYNC_FILE_NAME | wc -l` -eq 0 && return 0
if [[ $MODE == "synchronize" ]] {
vim -d $LOCAL_ROOT/$SYNC_FILE_NAME $CLOUD_ROOT/$SYNC_FILE_NAME < $TTY > $TTY
return 0
}
if [[ $MODE == "check" ]] {
echo "$SYNC_FILE_NAME differs" >> $SYNC_OUTPUT_FILE
}
if [[ $MODE == "show" ]] {
show_diff $LOCAL_ROOT/$SYNC_FILE_NAME $CLOUD_ROOT/$SYNC_FILE_NAME >> $SYNC_OUTPUT_FILE
}
echo "" >> $SYNC_OUTPUT_FILE
}
show_diff () {
git diff --color --no-index $1 $2
}
update_local_packages() {
LOCAL_ROOT="$1"
CLOUD_ROOT="$2"
SYNC_FILE_NAME="$3"
cat $CLOUD_ROOT/$SYNC_FILE_NAME | xargs -I% sudo pacman -S --noconfirm %
}
update_local_config() {
LOCAL_ROOT="$1"
CLOUD_ROOT="$2"
SYNC_FILE_NAME="$3"
cp $2/$3 $1
}
if [[ $MODE == "update-local" ]] {
update_local_packages $LOCAL_PACKAGELIST_ROOT $CLOUD_PACKAGELIST_ROOT $PACMAN_LIST
return 0
}
echo "Sync Check Results:" > $SYNC_OUTPUT_FILE
#ls -A1 workspace/cygwin/google-docs/cloud-workspace/setup/configs | xargs -I% check_up_to_date $LOCAL_CONFIG_ROOT $CLOUD_CONFIG_ROOT %
check_up_to_date $LOCAL_CONFIG_ROOT $CLOUD_CONFIG_ROOT '.zshrc'
check_up_to_date $LOCAL_CONFIG_ROOT $CLOUD_CONFIG_ROOT '.tmux.conf'
check_up_to_date $LOCAL_CONFIG_ROOT $CLOUD_CONFIG_ROOT '.vimrc'
check_up_to_date $LOCAL_CONFIG_ROOT $CLOUD_CONFIG_ROOT '.screenrc'
generate_package_lists $LOCAL_PACKAGELIST_ROOT
#ls -A1 workspace/cygwin/google-docs/cloud-workspace/setup/packages | xargs -I% check_up_to_date $LOCAL_PACKAGELIST_ROOT $CLOUD_PACKAGELIST_ROOT %
check_up_to_date $LOCAL_PACKAGELIST_ROOT $CLOUD_PACKAGELIST_ROOT $PACMAN_LIST
check_up_to_date $LOCAL_PACKAGELIST_ROOT $CLOUD_PACKAGELIST_ROOT $PIP_LIST
check_up_to_date $LOCAL_PACKAGELIST_ROOT $CLOUD_PACKAGELIST_ROOT $VIM_LIST
less -SRFX $SYNC_OUTPUT_FILE
| true
|
0d098594fd7888596253550d348a2f0ce9482ecc
|
Shell
|
xiy/rvm-openshift
|
/scripts/color
|
UTF-8
| 1,342
| 3.671875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# set colors, separate multiple selections with coma, order is not important
# using bold in one definition requires resetting it in others with offbold
# using background in one color requires resetting it in others with bdefault
# example:
# rvm_error_color=bold,red
# rvm_notify_color=offbold,green
case "${TERM:-dumb}" in
(dumb|unknown) exit ;;
esac
builtin command -v tput >/dev/null || exit
for color in ${1//,/ }
do
case "${color:-}" in
# regular colors
black) tput setaf 0
;;
red) tput setaf 1
;;
green) tput setaf 2
;;
yellow) tput setaf 3
;;
blue) tput setaf 4
;;
magenta) tput setaf 5
;;
cyan) tput setaf 6
;;
white) tput setaf 7
;;
# emphasized (bolded) colors
bold) tput smso
;;
offbold) tput rmso
;;
# background colors
bblack) tput setab 0
;;
bred) tput setab 1
;;
bgreen) tput setab 2
;;
byellow) tput setab 3
;;
bblue) tput setab 4
;;
bmagenta) tput setab 5
;;
bcyan) tput setab 6
;;
bwhite) tput setab 7
;;
# Defaults
default) tput setaf 9
;;
bdefault) tput setab 9
;;
# Reset
*) tput sgr0
;;
esac
done
| true
|
3a1dbe102dced4c34b97c62cf5122b98d1aecbd7
|
Shell
|
kranjan94/Timed-Screencapture
|
/screencap.sh
|
UTF-8
| 386
| 3.734375
| 4
|
[] |
no_license
|
#!/bin/bash
# Creates a directory on the desktop called screens and takes
# screenshots of the display at regular intervals and saves the
# results in screens.
mkdir ~/Desktop/screens
prefix="$HOME/Desktop/screens/"
suffix=".jpg"
while true; do
time=$(date +%r)
filename="$prefix$time$suffix"
screencapture -S -tjpg $filename
echo "Captured screen at $time."
sleep $1
done
exit 0
| true
|
124763fd521e091f6aa0d4f9fc1b62a92200f676
|
Shell
|
dantailby/childrens-games-starter-pack
|
/build-scripts/build.sh
|
UTF-8
| 1,574
| 3.71875
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
set -e
# You should place all your build steps in this script which will be run by our automated build pipeline
# This script is run from the project root - you can cd around as you need to
# Your build steps should build your game to a root-level output folder as demonstrated below, this folder is then uploaded to our servers
# If you require any specific software to be installed please consult your BBC Technical Lead/TPM
# If the build fails you should receive an email with failure information
# When you commit the following code, you can check the pipeline is working by checking the main.js file on the Test environment or by
# viewing the play.test page for your project (get the URLs from the TPM)
# Make a change to this file (the hello world string) and commit it to kick
# off the automated build pipline. The resulting page on TEST shows
# examples of using echo stats and local storage with an external configuration
# file (from src/*).
# 'build' the src files to output directory
mkdir output
cp src/* output
# edit the string below and commit to see your changes reflected
helloWorldFunc="function appendHelloWorld() { \
var content = document.createElement('div'); \
content.innerHTML = 'Hello World! Edit me in build.sh and commit to Git to see the automated build pipeline in action!'; \
container.appendChild(content); \
}"
# replaces the appendHelloWorld function in main.js with the one above
sed "s/function appendHelloWorld() {}/$helloWorldFunc/g" output/main.js > output/main.tmp && mv output/main.tmp output/main.js
| true
|
d2b118ecb515b36eea4023f050a39da076fa0c3d
|
Shell
|
icanccwhite/tgirt-dna-seq
|
/genome_tgirt_seq/prepare_data/UMI2id.sh
|
UTF-8
| 741
| 2.875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
PROJECT_PATH=$WORK/cdw2854/ecoli_genome/rawData/k12
FASTQ_PATH=${PROJECT_PATH}
FASTQ_PATH=/stor/work/Lambowitz/Data/NGS/JA17166
OUTPATH=${FASTQ_PATH}/umi2id
PROGRAM_PATH=${HOME}/TGIRT_UMI/preproces_fastq
PYTHON=$(which python)
mkdir -p ${OUTPATH}
for FQ1 in `ls ${FASTQ_PATH}/K12*R1_001.fastq.gz | grep -v cluster`
do
FQ2=${FQ1/_R1_/_R2_}
SAMPLENAME=$(basename ${FQ1%_R1_001.fastq.gz})
echo ${PYTHON} ${PROGRAM_PATH}/clip_fastq.py \
--fastq1=${FQ1} \
--fastq2=${FQ2} \
--idxBase=13 \
--barcodeCutOff=30 \
--mismatch=2 \
--outputprefix=- \
\| ${PYTHON} ${PROGRAM_PATH}/deinterleave_fastq.py \
- \
$OUTPATH/${SAMPLENAME}_umi2id_R1_001.fastq.gz \
$OUTPATH/${SAMPLENAME}_umi2id_R2_001.fastq.gz
done
| true
|
217570c46408f72fa249f7b06ab8d5340777d04d
|
Shell
|
mizuno-as/server-hands-on
|
/cloud-init.sh
|
UTF-8
| 451
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash
if [ -z "$1" ]
then
echo "$0 container_name"
exit 1
fi
YML=$(mktemp /tmp/init-XXXXX.yml)
cat > $YML <<EOF
#cloud-config
hostname: $1
ssh_pwauth: false
users:
- name: YOURNAME
shell: /bin/bash
lock_passwd: true
sudo: ALL=(ALL) NOPASSWD:ALL
ssh_authorized_keys:
- "YOUR PUBLIC KEY"
EOF
lxc info $1 2>/dev/null
if [ $? -eq 0 ]
then
cat $YML
lxc config set $1 user.user-data - < $YML
fi
rm $YML
| true
|
ef575a7c43ef19b81a9b3d674e035b446a663cba
|
Shell
|
Sonikkua/linux-admin-and-bash-scripts
|
/UNIX Shell Programing/Chapter15/cdh
|
UTF-8
| 3,979
| 4
| 4
|
[] |
no_license
|
#!/bin/bash
# cd history program -- final version
# sets PS1 to current directory
# eliminates duplicate entries
# implements "cd -name"
# stores cd history in $CDHISTFILE
alias cd=cdh
# implements ksh "cd old new"
function cdh
{
typeset -i cdlen i
typeset t
if [ $# -eq 0 ]
then
set -- $HOME
fi
if [ $# -eq 2 ]
then
set -- $(echo $PWD | sed "s|$1|$2|")
fi
if [ "$CDHISTFILE" -a -r "$CDHISTFILE" ] # if directory history exists
then
typeset CDHIST
i=-1
while read -r t # read directory history file
do
CDHIST[i=i+1]=$t
done <$CDHISTFILE
fi
if [ "${CDHIST[0]}" != "$PWD" -a "$PWD" != "" ]
then
_cdins # insert $PWD into cd history
fi
cdlen=${#CDHIST[*]} # number of elements in history
case "$@" in
-) # cd to new dir
if [ "$OLDPWD" = "" ] && ((cdlen>1))
then
echo ${CDHIST[1]}
'cd' ${CDHIST[1]}
else
'cd' $@
fi
;;
-l) # print directory list
typeset -i num
((i=cdlen))
while (((i=i-1)>=0))
do
num=$i
echo "$num ${CDHIST[i]}"
done
return
;;
-[0-9]|-[0-9][0-9]) # cd to dir in list
if (((i=${1#-})<cdlen))
then
echo ${CDHIST[i]}
'cd' ${CDHIST[i]}
else
'cd' $@
fi
;;
-*) # cd to matched dir in list
t=${1#-}
i=1
while ((i<cdlen))
do
case ${CDHIST[i]} in
*$t*)
echo ${CDHIST[i]}
'cd' ${CDHIST[i]}
break
;;
esac
((i=i+1))
done
if ((i>=cdlen))
then
'cd' $@
fi
;;
*) # cd to new dir
'cd' $@
;;
esac
_cdins # insert $PWD into cd history
if [ "$CDHISTFILE" ]
then
cdlen=${#CDHIST[*]} # number of elements in history
i=0
while ((i<cdlen))
do
echo ${CDHIST[i]} # update directory history
((i=i+1))
done >$CDHISTFILE
fi
PS1="$PWD: "
}
function _cdins # insert $PWD into cd history
{ # meant to be called only by cdh
typeset -i i
((i=0))
while ((i<${#CDHIST[*]})) # see if dir is already in list
do
if [ "${CDHIST[$i]}" = "$PWD" ]
then
break
fi
((i=i+1))
done
if ((i>40)) # limit max size of list
then
i=40
fi
while (((i=i-1)>=0)) # bump old dirs in list
do
CDHIST[i+1]=${CDHIST[i]}
done
CDHIST[0]=$PWD # insert new directory in list
}
| true
|
345e8e5013a555f4a33f8c65c9cbe9bff1e14f8a
|
Shell
|
jaakritso/dotfiles
|
/brew.sh
|
UTF-8
| 1,390
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
# Install command-line tools using Homebrew
# (Optionally) Turn off brew's analytics https://docs.brew.sh/Analytics
# brew analytics off
# Make sure we’re using the latest Homebrew
brew update
# Upgrade any already-installed formulae
brew upgrade
# GNU core utilities (those that come with OS X are outdated)
brew install coreutils
brew install moreutils
# GNU `find`, `locate`, `updatedb`, and `xargs`, `g`-prefixed
brew install findutils
# GNU `sed`
brew install gnu-sed
# Updated shells
# Note: don’t forget to add `/usr/local/bin/<EACHSHELL>` to `/etc/shells` before running `chsh`.
brew install bash
brew install zsh
brew install fish
brew install bash-completion@2
# Install wget
brew install wget
# Install more recent versions of some OS X tools
brew install vim
brew install nano
brew install grep
brew install openssh
# z hopping around folders
brew install z
# run this script when this file changes guy.
brew install entr
# github util
brew install gh
# nicer git diffs
brew install git-delta
brew install git
brew install shellcheck # linting for .sh files
# Install other useful binaries
brew install the_silver_searcher # ack is an alternative, tbh i forget which i like more.
brew install fzf
brew install node # This installs `npm` too using the recommended installation method
# Remove outdated versions from the cellar
brew cleanup
| true
|
f2f074b5a37975ffd2d0656107591c9454270f3e
|
Shell
|
chrissparksnj/offsec
|
/prog_docs/bashscripts/samba_multi_ms.sh
|
UTF-8
| 238
| 2.734375
| 3
|
[] |
no_license
|
#!/bin/bash
if [ $# -eq 0 ]; then
echo "Please specify a ip address by exporting: eg: ./smb_version_ms.sh <ipaddress>";
else
sudo msfconsole -x "use exploit/multi/samba/usermap_script; set lhost 1ocalhost; set rhost $1; run; exit"
fi
| true
|
be192977a4fc22a90e6a1859c537b0410fed2e60
|
Shell
|
gopinathansubu/mojo-standalone
|
/build/install-build-deps.sh
|
UTF-8
| 3,243
| 3.84375
| 4
|
[] |
no_license
|
#!/bin/bash
ROOT_DIR="$(dirname $(realpath $(dirname "${BASH_SOURCE[0]}")))"
BUILD_DIR="$ROOT_DIR/build"
BUILDTOOLS_DIR="$ROOT_DIR/buildtools"
THIRD_PARTY_DIR="$ROOT_DIR/third_party/"
GTEST_DIR="$ROOT_DIR/testing/gtest"
function install_dep_from_tarfile {
SRC_URL=$1
PACKAGE_DIR=$2
FILENAME="$(basename $SRC_URL)"
DOWNLOAD_DIR="$(basename $FILENAME .tar.gz)/$PACKAGE_DIR"
INSTALL_DIR="$THIRD_PARTY_DIR/$PACKAGE_DIR"
OLD_DIR="$THIRD_PARTY_DIR/$PACKAGE_DIR.old"
mkdir -p "$INSTALL_DIR"
cd "$INSTALL_DIR"
echo "Downloading $SRC_URL"
curl --remote-name "$SRC_URL"
tar xvzf "$FILENAME"
cd "$DOWNLOAD_DIR"
# Replace with new directory
cd "$ROOT_DIR"
mv "$INSTALL_DIR" "$OLD_DIR"
mv "$OLD_DIR/$DOWNLOAD_DIR" "$INSTALL_DIR"
rm -fr "$OLD_DIR"
}
# BODY
# Install gsutil.
cd $THIRD_PARTY_DIR
curl --remote-name https://storage.googleapis.com/pub/gsutil.tar.gz
tar xfz gsutil.tar.gz
rm gsutil.tar.gz
# Install gn.
$THIRD_PARTY_DIR/gsutil/gsutil cp gs://chromium-gn/56e78e1927e12e5c122631b7f5a46768e527f1d2 $BUILDTOOLS_DIR/gn
chmod 700 $BUILDTOOLS_DIR/gn
# Build and install ninja.
cd $THIRD_PARTY_DIR
rm -rf ninja
git clone https://github.com/martine/ninja.git -b v1.5.1
./ninja/bootstrap.py
cp ./ninja/ninja $BUILDTOOLS_DIR
chmod 700 $BUILDTOOLS_DIR/ninja
# Install gtest at the correct revision.
rm -rf $GTEST_DIR
mkdir -p $GTEST_DIR
git clone https://chromium.googlesource.com/external/googletest.git $GTEST_DIR
cd $GTEST_DIR
git checkout 4650552ff637bb44ecf7784060091cbed3252211 # from svn revision 692
# Download and extract PLY.
# Homepage:
# http://dabeaz.com/ply
install_dep_from_tarfile "http://dabeaz.com/ply/ply-3.4.tar.gz" "ply"
# Download and extract Jinja2.
# Homepage:
# http://jinja.pocoo.org/
# Installation instructions:
# http://jinja.pocoo.org/docs/intro/#from-the-tarball-release
# Download page:
# https://pypi.python.org/pypi/Jinja2
JINJA2_SRC_URL="https://pypi.python.org/packages/source/"
JINJA2_SRC_URL+="J/Jinja2/Jinja2-2.7.1.tar.gz"
install_dep_from_tarfile $JINJA2_SRC_URL 'jinja2'
# Download and extract MarkupSafe.
# Homepage:
# https://pypi.python.org/pypi/MarkupSafe
MARKUPSAFE_SRC_URL="https://pypi.python.org/packages/source/"
MARKUPSAFE_SRC_URL+="M/MarkupSafe/MarkupSafe-0.23.tar.gz"
install_dep_from_tarfile $MARKUPSAFE_SRC_URL 'markupsafe'
# Download and extract Cython.
mkdir -p $THIRD_PARTY_DIR/cython
cd $THIRD_PARTY_DIR/cython
read prefix cython_version <<< `pip show cython | grep "^Version:"`
echo "cython version = $cython_version"
pip_output=`pip install cython -d . --no-binary :all:`
echo "pip saved in $pip_output"
zip_type="zip"
tar_type="tar.gz"
zip_cython_package="Cython-$cython_version.$zip_type"
#echo "zip = $zip_cython_package"
tar_cython_package="Cython-$cython_version.$tar_type"
#echo "gunzip = $tar_cython_package"
if [ -f ./$zip_cython_package ]
then
echo "$zip_cython_package found "
unzip $zip_cython_package
rm -rf $zip_cython_package
elif [ -f $tar_cython_package ]
then
echo "$tar_cython_package found "
gunzip $tar_cython_package
tar -xf "Cython-$cython_version.tar"
else
echo "unknown found: `ls Cython-*` "
fi
mv "Cython-$cython_version" src
# Install the Mojo shell
$BUILD_DIR/download_mojo_shell.py
| true
|
b70c00abe74c0045f22dd329911228ff578e5c9b
|
Shell
|
rahulkumarjhamtani/Linux-Shell-Programming-Lab
|
/pattern4.sh
|
UTF-8
| 440
| 3.21875
| 3
|
[] |
no_license
|
echo "Number of rows"
read n
for ((i=1;i<=n;i++))
do
for ((j=i;j<=n-1;j++))
do
echo -n " "
done
num=$i
for ((j=1;j<=2*i-1;j++))
do
if [ $j -lt $i ]
then
echo -n $num
num=$((num+1))
elif [ $j -eq $i ]
then
echo -n $num
num=$((num-1))
else
echo -n $num
num=$((num-1))
fi
done
echo
done
| true
|
29037bb924779a721ef822fd21f9991d86f5ccd5
|
Shell
|
mohi048/common-scripts
|
/Preq_Kilo.sh
|
UTF-8
| 2,445
| 3.125
| 3
|
[] |
no_license
|
#!/bin/bash
apt-get update
apt-get -y upgrade
apt-get -y install python-software-properties
apt-get -y install ubuntu-cloud-keyring
echo "deb http://ubuntu-cloud.archive.canonical.com/ubuntu trusty-updates/kilo main" > /etc/apt/sources.list.d/cloudarchive-kilo.list
apt-get -y update && apt-get -y dist-upgrade
echo "###############################################"
echo "# Installing OVS bridge #"
echo "###############################################"
apt-get -y install openvswitch-switch python-openvswitch
/etc/init.d/openvswitch-switch restart
eth=$(/sbin/ifconfig eth0 | grep 'inet addr:' | cut -d: -f2 | awk '{ print $1}')
gateway=$(ip route show default | awk '/default/ {print $3}')
echo "###############################################"
echo "# Updating rc.local file #"
echo "###############################################"
if grep -q "ifconfig" /etc/rc.local; then
echo "------------- i m skipping addition to rc.local file ....."
else
printf '$i\nsleep 10\n.\nwq\n' | ex - /etc/rc.local
printf '$i\nifconfig eth0 0\n.\nwq\n' | ex - /etc/rc.local
printf '$i\nifconfig br-int up\n.\nwq\n' | ex - /etc/rc.local
printf '$i\nifconfig br-ex '$eth' netmask 255.255.255.0 up\n.\nwq\n' | ex - /etc/rc.local
printf '$i\nroute add default gw '$gateway' dev br-ex metric 100\n.\nwq\n' | ex - /etc/rc.local
fi
echo "###############################################"
echo "# UPDATING sysctl.conf #"
echo "###############################################"
if grep -q "#net.ipv4.ip_forward" /etc/sysctl.conf; then
sed -i 's/#net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/g' /etc/sysctl.conf
else
echo "----------- exitting net.ipv4.ip_forward"
fi
if grep -q "#net.ipv4.conf.all.rp_filter" /etc/sysctl.conf; then
sed -i 's/#net.ipv4.conf.all.rp_filter=1/net.ipv4.conf.all.rp_filter=0/g' /etc/sysctl.conf
else
echo "----------- exitting net.ipv4.conf.all.rp_filter"
fi
if grep -q "#net.ipv4.conf.default.rp_filter" /etc/sysctl.conf; then
sed -i 's/#net.ipv4.conf.default.rp_filter=1/net.ipv4.conf.default.rp_filter=0/g' /etc/sysctl.conf
else
echo "---------- exitting net.ipv4.conf.default.rp_filter"
fi
ovs-vsctl add-br br-int
ovs-vsctl add-br br-ex
shutdown -r +1 && ovs-vsctl add-port br-ex eth0
echo "Rebooting the machine , Please standBy !!!!!"
echo "################ SSH IP ADDRESS $eth TO RE-LOGIN ###############"
| true
|
2f11e6a8d1e64f663a1ce22d5bde86b9d1788d62
|
Shell
|
alabuel/hardening.amazon.linux
|
/sections/3.5-firewall-configuration.sh
|
UTF-8
| 3,897
| 2.734375
| 3
|
[] |
no_license
|
# Author: Ariel Abuel
# Benchmark: CIS
# --------------------------------------------
# 3.5 Firewall Configuration
# 3.5.1 Configure IPv4 iptables
# --------------------------------------------
# 3.5.1.1 Ensure default deny firewall policy (Scored)
log "CIS" "3.5.1.1 Ensure default deny firewall policy (Scored)"
execute_command "iptables -P INPUT DROP"
execute_command "iptables -P OUTPUT DROP"
execute_command "iptables -P FORWARD DROP"
# 3.5.1.2 Ensure loopback traffic is configured (Scored)
log "CIS" "3.5.1.2 Ensure loopback traffic is configured (Scored)"
execute_command "iptables -A INPUT -i lo -j ACCEPT"
execute_command "iptables -A OUTPUT -o lo -j ACCEPT"
execute_command "iptables -A INPUT -s 127.0.0.0/8 -j DROP"
# 3.5.1.3 Ensure outbound and established connections are configured (Not Scored)
log "CIS" "3.5.1.3 Ensure outbound and established connections are configured (Not Scored)"
execute_command "iptables -A OUTPUT -p tcp -m state --state NEW,ESTABLISHED -j ACCEPT"
execute_command "iptables -A OUTPUT -p udp -m state --state NEW,ESTABLISHED -j ACCEPT"
execute_command "iptables -A OUTPUT -p icmp -m state --state NEW,ESTABLISHED -j ACCEPT"
execute_command "iptables -A INPUT -p tcp -m state --state ESTABLISHED -j ACCEPT"
execute_command "iptables -A INPUT -p udp -m state --state ESTABLISHED -j ACCEPT"
execute_command "iptables -A INPUT -p icmp -m state --state ESTABLISHED -j ACCEPT"
# 3.5.1.4 Ensure firewall rules exist for all open ports (Scored)
log "CIS" "3.5.1.4 Ensure firewall rules exist for all open ports (Scored)"
# execute_command "iptables -A INPUT -p <protocol> --dport <port> -m state --state NEW -j ACCEPT"
skip "need protocol and ports for firewall rule"
# --------------------------------------------
# 3.5.2 Configure IPv6 ip6tables
# --------------------------------------------
# 3.5.2.1 Ensure IPv6 default deny firewall policy (Scored)
log "CIS" "3.5.2.1 Ensure IPv6 default deny firewall policy (Scored)"
execute_command "ip6tables -P INPUT DROP"
execute_command "ip6tables -P OUTPUT DROP"
execute_command "ip6tables -P FORWARD DROP"
# 3.5.2.2 Ensure IPv6 loopback traffic is configured (Scored)
log "CIS" "3.5.2.2 Ensure IPv6 loopback traffic is configured (Scored)"
execute_command "ip6tables -A INPUT -i lo -j ACCEPT"
execute_command "ip6tables -A OUTPUT -o lo -j ACCEPT"
execute_command "ip6tables -A INPUT -s ::1 -j DROP"
# 3.5.2.3 Ensure IPv6 outbound and established connections are configured (Not Scored)
log "CIS" "3.5.2.3 Ensure IPv6 outbound and established connections are configured (Not Scored)"
execute_command "ip6tables -A OUTPUT -p tcp -m state --state NEW,ESTABLISHED -j ACCEPT"
execute_command "ip6tables -A OUTPUT -p udp -m state --state NEW,ESTABLISHED -j ACCEPT"
execute_command "ip6tables -A OUTPUT -p icmp -m state --state NEW,ESTABLISHED -j ACCEPT"
execute_command "ip6tables -A INPUT -p tcp -m state --state ESTABLISHED -j ACCEPT"
execute_command "ip6tables -A INPUT -p udp -m state --state ESTABLISHED -j ACCEPT"
execute_command "ip6tables -A INPUT -p icmp -m state --state ESTABLISHED -j ACCEPT"
# 3.5.2.4 Ensure IPv6 firewall rules exist for all open ports (Not Scored)
log "CIS" "3.5.2.4 Ensure IPv6 firewall rules exist for all open ports (Not Scored)"
# execute_command "ip6tables -A INPUT -p <protocol> --dport <port> -m state --state NEW -j ACCEPT"
skip "For each port identified in the audit which does not have a firewall rule establish a proper rule for accepting inbound connections"
# 3.5.3 Ensure iptables is installed (Scored)
log "CIS" "3.5.3 Ensure iptables is installed (Scored)"
package_install "iptables"
# 3.6 Disable IPv6 (Not Scored)
log "CIS" "3.6 Disable IPv6 (Not Scored)"
grub_option "add" "GRUB_CMDLINE_LINUX" "ipv6.disable=1"
#line_replace "/etc/default/grub" "^GRUB_CMDLINE_LINUX=" "GRUB_CMDLINE_LINUX=\"ipv6.disable=1\""
execute_command "grub2-mkconfig -o /boot/grub2/grub.cfg"
| true
|
eb5819d42583d9d4c4523f67b124a3a1e3f2ee41
|
Shell
|
dhh15/multimodal
|
/utilscripts/pdfstopng.sh
|
UTF-8
| 274
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/bash
# pdfs to pngs
files="./singlepagepdfs/*.pdf"
outdir="./singelpagepngs/"
# run in the dir where the multipage pdfs are
for filename in $files
do
base=${filename%.pdf}
echo "$outdir$base.png"
convert -density 300 $filename "$outdir$base.png"
done
| true
|
5f2a36778efdb701c9d5d5a47c0aad8d98d13b1d
|
Shell
|
cloudymation/docker_fundamentals
|
/docker_commit/java.sh
|
UTF-8
| 612
| 2.65625
| 3
|
[
"MIT"
] |
permissive
|
docker run -it ubuntu_bionic_with_git /bin/bash
# apt update && apt upgrade -y && apt install -y default-jre && apt install -y default-jdk
# java --version
# javac --version
# exit
docker container ls -a
# Get last exited container ID
docker logs <CONTAINER_ID>
docker diff <CONTAINER_ID>
docker commit -m "Added JRE & JDK 11." <CONTAINER_ID> ubuntu_bionic_with_git_and_jre-jdk-11
docker login -u <REGISTRY_USERNAME>
docker tag ubuntu_bionic_with_git_and_jre-jdk-11 <REGISTRY_USERNAME>/ubuntu_bionic_with_git_and_jre-jdk-11
docker push <REGISTRY_USERNAME>/ubuntu_bionic_with_git_and_jre-jdk-11
docker logout
| true
|
b1079ccf19139c865b76570e8cb5afd5f116553a
|
Shell
|
rackerlabs/eventlet
|
/build-website.bash
|
UTF-8
| 1,585
| 4.03125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
build="$PWD/website-build"
usage="Builds eventlet.net website static pages into ${build}.
Requires sphinx-build, git and Github account."
while [ -n "$1" ]; do
# TODO: parse args
echo "$usage" >&2
exit 1
shift
done
if ! which sphinx-build >/dev/null; then
echo "sphinx-build not found. Possible solution: pip install sphinx" >&2
echo "Links: http://sphinx-doc.org/" >&2
exit 1
fi
if ! git status >/dev/null; then
echo "git not found. git and Github account are required to update online documentation." >&2
echo "Links: http://git-scm.com/ https://github.com/" >&2
exit 1
fi
echo "1. clean"
rm -rf "$build"
mkdir -p "$build/doc"
echo "2. build static pages"
cp doc/real_index.html "$build/index.html"
# -b html -- builder, output mode
# -d dir -- path to doctrees cache
# -n -- nit-picky mode (kind of -Wall for gcc)
# -W -- turns warnings into errors
# -q -- quiet, emit only warnings and errors
sphinx-build -b html -d "$build/tmp" -n -q "doc" "$build/doc"
rm -rf "$build/tmp"
rm -f "$build/doc/.buildinfo"
echo "3. Updating git branch gh-pages"
source_name=`git rev-parse --abbrev-ref HEAD`
source_id=`git rev-parse --short HEAD`
git branch --track gh-pages origin/gh-pages || true
git checkout gh-pages
git ls-files -z |xargs -0 rm -f
rm -rf "doc"
mv "$build"/* ./
touch ".nojekyll"
echo "eventlet.net" >"CNAME"
rmdir "$build"
echo "4. Commit"
git add -A
git status
read -p "Carefully read git status output above, press Enter to continue or Ctrl+C to abort"
git commit --edit -m "Website built from $source_name $source_id"
| true
|
7d21da73170330565ea007ba25adcdbf99601cc7
|
Shell
|
yunwilliamyu/secure-distributed-union-cardinality
|
/simulation/run_all_but_mpc.sh
|
UTF-8
| 1,183
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
HOSP_DIR=$DIR/../hospital-bin
for file in hosp*.npy
do
fbname=$(basename "$file" .npy)
$HOSP_DIR/count-hospital.py $file $fbname-count.bin
$HOSP_DIR/hll-hospital.py $file $fbname-hll.bin
$HOSP_DIR/ids-hospital.py $file $fbname-ids.bin
$HOSP_DIR/count-hospital.py $DIR/million_test.txt --hospital-population $file $fbname-6count.bin
$HOSP_DIR/hll-hospital.py $DIR/million_test.txt --hospital-population $file $fbname-6hll.bin
$HOSP_DIR/hll-hospital.py $DIR/million_test.txt --hospital-population $file $fbname-6hllm.bin -m --hospital-hll-freqs-file $fbname.freqs
$HOSP_DIR/ids-hospital.py $DIR/million_test.txt --hospital-population $file $fbname-6ids.bin
$HOSP_DIR/count-hospital.py $DIR/10_5.txt --hospital-population $file $fbname-5count.bin
$HOSP_DIR/hll-hospital.py $DIR/10_5.txt --hospital-population $file $fbname-5hll.bin
$HOSP_DIR/hll-hospital.py $DIR/10_5.txt --hospital-population $file $fbname-5hllm.bin -m --hospital-hll-freqs-file $fbname.freqs
$HOSP_DIR/ids-hospital.py $DIR/10_5.txt --hospital-population $file $fbname-5ids.bin
done
| true
|
faac65d28e80c20feb2af12de138535bbe5d90fe
|
Shell
|
VertxChinaUserGroup/vertx-cn-website
|
/frontend/pre-commit.sh
|
UTF-8
| 727
| 3.609375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
echo "start lint:stage"
# 如果有进入node_modules目录的权限则遍历修改后的文件,否则'npm run lint:fix'
if [ -x node_modules ]; then
for file in $(git diff HEAD --name-only | grep -E '\.(js|jsx|vue)$')
do
node_modules/.bin/eslint --fix "$file"
if [ $? -ne 0 ]; then
echo "ESLint failed on staged file '$file'. Please check your code and try again. You can run ESLint manually via npm run eslint."
exit 1
fi
done
else
npm run lint:fix
if [ $? -ne 0 ]; then
echo "ESLint failed."
exit 1
fi
fi
npm run puglint
if [ $? -ne 0 ]; then
echo "puglint failed."
exit 1
fi
npm run stylelint
if [ $? -ne 0 ]; then
echo "stylelint failed."
exit 1
fi
| true
|
458dd77910b21b58c688bfffc567950ff515de79
|
Shell
|
albertwcheng/albert-bioinformatics-scripts
|
/CDTToMatrix.sh
|
UTF-8
| 313
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
if [ $# -lt 1 ]; then
echo $0 CDTFile "[OutMatrix:default CDTFile/.cdt/.mat]"
exit
fi
CDTFile=$1
if [ $# -ge 2 ]; then
OutMatrix=$2
else
OutMatrix=${CDTFile/.cdt/}
OutMatrix=${OutMatrix/.CDT/}.mat
fi
cuta.py -f1,4-_1 $CDTFile | awk '$1!="AID" && $1!="EWEIGHT"' > $OutMatrix #'FNR!=2 && FNR!=3'
| true
|
28ccdc6d432c7b2de666a517577030de8b5f6361
|
Shell
|
stcui007/DMOD
|
/scripts/update_package.sh
|
UTF-8
| 8,017
| 4.28125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
INFO='Rebuild and update the local-source Python package at given path in the implied or specified Python virtual env.
Defaults to all local packages in the event no single directory is provided.'
SCRIPT_PARENT_DIR="$(cd "$(dirname "${0}")"; pwd)"
# Set SHARED_FUNCS_DIR (as needed by default_script_setup.sh) to the correct path before using it to source its contents
SHARED_FUNCS_DIR="${SCRIPT_PARENT_DIR}/shared"
if [ ! -d "${SHARED_FUNCS_DIR}" ]; then
>&2 echo "Error: could not find shared script functions script at expected location:"
>&2 echo " ${SHARED_FUNCS_DIR}"
exit 1
fi
# Import shared default script startup source
. ${SHARED_FUNCS_DIR}/default_script_setup.sh
# Import shared functions used for python-dev-related scripts
. ${SHARED_FUNCS_DIR}/py_dev_func.sh
# Import bash-only shared functions used for python-dev-related scripts
. ${SHARED_FUNCS_DIR}/py_dev_bash_func.sh
usage()
{
local _O="${NAME:?}:
${INFO:?}
Usage:
${NAME:?} -h|-help|--help
${NAME:?} [opts] [<directory>]
Options:
--dependencies | -d
Install Python dependency packages defined in the
requirements.txt file in the project root
--dependencies-only | -D
Like the --dependencies option, except only do this
step, without performing any other package building or
updating
--libraries-only | --no-service-packages | -l
Include only library packages when default installing
all local-source packages (ignored if single package
directory is specified)
--venv <dir>
Set the directory of the virtual environment to use.
By default, the following directories will be checked,
with the first apparently valid virtual env being used:
- ./venv/
- ./.venv/
- ${SCRIPT_PARENT_DIR:-?}/venv/
- ${SCRIPT_PARENT_DIR:-?}/.venv/
"
echo "${_O}" 2>&1
}
# Make sure we end up in the same starting directory, and deactivate venv if it was activated
cleanup_before_exit()
{
# Make sure we don't accidentally run this more than once
CLEANUP_DONE=$((${CLEANUP_DONE:=0}+1))
if [ ${CLEANUP_DONE} -gt 1 ]; then
>&2 echo "Warning: cleanup function being run for ${CLEANUP_DONE} time"
fi
# Go back to shell starting dir
cd "${STARTING_DIR:?}"
# If the flag is set that a virtual environment was activated, then deactivate it
if [ ${VENV_WAS_ACTIVATED:-1} -eq 0 ]; then
>&2 echo "Deactiving active virtual env at ${VIRTUAL_ENV}"
deactivate
fi
}
while [ ${#} -gt 0 ]; do
case "${1}" in
--dependencies|-d)
[ -n "${DO_DEPS:-}" ] && usage && exit 1
DO_DEPS='true'
;;
--dependencies-only|-D)
[ -n "${DO_DEPS:-}" ] && usage && exit 1
# Also, this shouldn't be set, as it assumes we will be building, which conflicts with this option
[ -n "${NO_SERVICE_PACKAGES:-}" ] && usage && exit 1
DO_DEPS='true'
DEPS_ONLY='true'
;;
-h|--help|-help)
usage
exit
;;
--libraries-only|--no-service-packages|-l)
[ -n "${NO_SERVICE_PACKAGES:-}" ] && usage && exit 1
# Also, make sure we aren't marked for dependencies only
[ -n "${DEPS_ONLY:-}" ] && usage && exit 1
NO_SERVICE_PACKAGES='true'
;;
--venv)
[ -n "${VENV_DIR:-}" ] && usage && exit 1
VENV_DIR="$(py_dev_validate_venv_dir "${2}")"
[ -z "${VENV_DIR:-}" ] && echo "Error: provided arg ${2} is not a valid virtual env directory" && exit 1
shift
;;
*)
# Checks that PACKAGE_DIRS is, in fact, set (if it is, it'll get replaced with x)
[ ${#PACKAGE_DIRS[@]} -gt 0 ] && usage && exit 1
[ ! -d "${1}" ] && >&2 echo "Error: directory arg '${1}' is not an existing directory" && usage && exit 1
PACKAGE_DIRS[0]="${1}"
;;
esac
shift
done
# Look for a default venv to use if needed
py_dev_detect_default_venv_directory
# Bail here if a valid venv is not set
[ -z "${VENV_DIR:-}" ] && echo "Error: no valid virtual env directory could be determined or was given" && exit 1
# Take appropriate action to activate the virtual environment if needed
py_dev_activate_venv
# Trap to make sure we "clean up" script activity before exiting
trap cleanup_before_exit 0 1 2 3 6 15
# After setting VENV, if set to get dependencies, do that, optionally exiting after if that's all we are set to do
if [ -n "${DO_DEPS:-}" ]; then
_REQ_FILE="${PROJECT_ROOT}/requirements.txt"
if [ ! -f "${_REQ_FILE}" ]; then
>&2 echo "Error: unable to find expected Python requirements file at ${_REQ_FILE}"
exit 1
fi
pip install --upgrade -r "${_REQ_FILE}"
# Also, if set to only get dependencies, exit here
[ -n "${DEPS_ONLY:-}" ] && exit
fi
# If unset, meaning no single package directory was specified, assume all packages should be installed.
if [ -z ${PACKAGE_DIRS+x} ]; then
# First, get all them, in the separate arrays for lib and service packages.
py_dev_bash_get_package_directories
spi=0
for i in ${LIB_PACKAGE_DIRS[@]}; do
# Include package directory, as long as there is a setup.py for the package
if [ -e "${i}/setup.py" ]; then
PACKAGE_DIRS[${spi}]="${i}"
spi=$((spi+1))
fi
done
# Though check for option indicating only library packages should be installed.
if [ -z "${NO_SERVICE_PACKAGES:-}" ]; then
for i in ${SERVICE_PACKAGE_DIRS[@]}; do
# Include package directory, as long as there is a setup.py for the package
if [ -e "${i}/setup.py" ]; then
PACKAGE_DIRS[${spi}]="${i}"
spi=$((spi+1))
fi
done
fi
fi
PACKAGE_DIST_NAMES=()
# The --find-links=.../dist/ arguments needed for the dist/ directories when doing the local pip instal
PACKAGE_DIST_DIR_LINK_ARGS=()
build_package_and_collect_dist_details()
{
if [ ${#} -lt 1 ]; then
>&2 echo "Error: unable to build package without package directory argument"
exit 1
elif [ ${#} -lt 2 ]; then
>&2 echo "Error: unable to build package without starting directory argument"
exit 1
fi
# Go into the package directory, build new dists, and install them
cd "${1}"
# Collect dist names and dist link args as we go.
# Of course, this means we need to figure out the index of the next array values.
# Fortunately, this should just be the current size of the arrays
local _N=${#PACKAGE_DIST_NAMES[@]}
PACKAGE_DIST_NAMES[${_N}]="$(py_dev_extract_package_dist_name_from_setup)"
# Bail if we can't detect the appropriate package dist name
if [ -z "${PACKAGE_DIST_NAMES[${_N}]}" ]; then
>&2 echo "Error: unable to determine package dist name from ${1}/setup.py"
exit 1
fi
# Then add the generated dist directory pip arg value to that array
PACKAGE_DIST_DIR_LINK_ARGS[${_N}]="--find-links=${1}/dist"
# Clean any previous artifacts and build
py_dev_clean_dist && python setup.py sdist bdist_wheel
# Return to starting directory if one was given
cd "${2}"
}
# Build the packages, and build lists/arrays of dist names and '--find-links=' pip arg values as we go
for pd in ${PACKAGE_DIRS[@]}; do
build_package_and_collect_dist_details "${pd}" "${STARTING_DIR:?}"
done
# Uninstall all existing package dists
pip uninstall -y ${PACKAGE_DIST_NAMES[@]}
# Install new dists, using the generated '--find-links=' args so we can find the local copies of build package dists
pip install --upgrade ${PACKAGE_DIST_DIR_LINK_ARGS[@]} ${PACKAGE_DIST_NAMES[@]}
# Finally, clean up all the created build artifacts in the package directories
for pd in ${PACKAGE_DIRS[@]}; do
cd "${pd}"
py_dev_clean_dist
cd "${STARTING_DIR:?}"
done
| true
|
46889bb719b76e5d7a85d1cbd7cf244101ad60ea
|
Shell
|
IDR/idr-utils
|
/scripts/annotate/re-ann.bash
|
UTF-8
| 519
| 2.671875
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
# Delete all map annotations and re-apply
export PATH=$PATH:/home/omero/OMERO.server/bin
export IDR=../../
set -e
set -u
omero -q login demo@localhost
ARGS=$(sed "$1"'q;d' input)
YML=$IDR/$(echo $ARGS | cut -f1 -d" ")
CSV=${YML/bulkmap-config.yml/annotation.csv}
OBJID=$(echo $ARGS | cut -f2 -d" ")
omero metadata populate --context deletemap $OBJID
omero metadata populate --context deletemap --cfg $YML $OBJID
omero metadata populate --file $CSV $OBJID
omero metadata populate --context bulkmap --cfg $YML $OBJID
| true
|
3a5efa97d42df367bbba684c49928f3c10b28af3
|
Shell
|
IMIO/imio.pm.locales
|
/src/imio/pm/locales/locales/sync_pos.sh
|
UTF-8
| 582
| 2.578125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
files="PloneMeeting plone imio.actionspanel datagridfield imio.annex imio.history collective.eeafaceted.batchactions collective.eeafaceted.z3ctable eea collective.behavior.talcondition collective.documentgenerator collective.iconifiedcategory collective.contact.core"
languages="de en es fr nl"
for file in $files; do
for language in $languages; do
touch $language/LC_MESSAGES/$file.po
i18ndude sync --pot $file.pot $language/LC_MESSAGES/$file.po
msgfmt -o $language/LC_MESSAGES/$file.mo $language/LC_MESSAGES/$file.po
done
done
| true
|
46833caeb12da4492ff2717e96056c005feea3cb
|
Shell
|
gikeymarcia/dotfiles
|
/.scripts/setup/apple-webcam-installer.sh
|
UTF-8
| 1,084
| 3.5
| 4
|
[] |
no_license
|
#!/bin/bash
# Mikey Garcia, @gikeymarcia
# install firmware and drivers for apple macbook webcam in Ubuntu
# dependencies: automatically installed
# adapted from https://askubuntu.com/a/1215628
# dependencies
sudo apt install git curl xz-utils cpio kmod libssl-dev checkinstall -y
# remove existing dir and clone into it
clean_clone () {
repo=$1; dir=$2
sudo rm -r "$dir"
git clone "$repo" "$dir"
}
# extract firmware
firmware=/tmp/apple-webcam/firmware
clean_clone https://github.com/patjak/facetimehd-firmware.git "$firmware"
cd "$firmware" || exit
make
sudo make install
# install drivers
drivers=/tmp/apple-webcam/drivers
clean_clone https://github.com/patjak/bcwc_pcie.git "$drivers"
cd "$drivers" || exit
make
sudo make install
sudo depmod
sudo modprobe -r bdc_pci
sudo modprobe facetimehd
echo "~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-"
if [ "$(grep -c ^facetimehd /etc/modules)" -eq 0 ]; then
echo "adding 'facetimehd' to /etc/modules"
echo "facetimehd" | sudo tee -a /etc/modules
else
echo "'facetimehd' already in /etc/modules"
fi
cat /etc/modules
| true
|
c967ce41fda9eef41ed204300d0ec6aa0df123c7
|
Shell
|
noandrea/aeternity
|
/deployment/deploy_api_docs.sh
|
UTF-8
| 452
| 3.203125
| 3
|
[
"ISC"
] |
permissive
|
#!/bin/bash
set -e
git clone git@github.com:aeternity/api-docs.git /tmp/api-docs
cp apps/aehttp/priv/swagger.json /tmp/api-docs/
cd /tmp/api-docs/
git add swagger.json;
STATUS=`git status --porcelain`
if [ -z "$STATUS" ]; then
echo "Nothing to commit, docs did not change";
else
git config user.name "CircleCI"
git config user.email "circleci@aeternity.com"
git commit -a -m "Update Aeternity node API docs to $CIRCLE_TAG";
git push origin master
fi
| true
|
79dd3e46176d2528dbbc10579eba42141f9365ce
|
Shell
|
swftvsn/package-libvips-darwin
|
/libvips-package-sharp-tar.sh
|
UTF-8
| 3,040
| 3.21875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Clean working directories
rm -rf lib include
mkdir lib include
# Use pkg-config to automagically find and copy necessary header files
for path in $(pkg-config --cflags --static vips-cpp libcroco-0.6 | tr ' ' '\n' | grep '^-I' | cut -c 3- | sort | uniq); do
cp -R ${path}/ include;
done;
rm include/vips/vipsc++.h
# Manually copy JPEG and GIF header files
cp /usr/local/opt/jpeg-turbo/include/*.h include
cp /usr/local/opt/giflib/include/*.h include
# Use pkg-config to automagically find and copy necessary dylib files
for path in $(pkg-config --libs --static vips-cpp libcroco-0.6 | tr ' ' '\n' | grep '^-L' | cut -c 3- | sort | uniq); do
find ${path} -type f -name *.dylib | xargs -I {} cp {} lib;
done;
rm -f lib/libvipsCC.*.dylib
# Manually copy JPEG and GIF dylib files
cp /usr/local/opt/jpeg-turbo/lib/libjpeg.8.dylib lib
cp /usr/local/opt/jpeg/lib/libjpeg.9.dylib lib
cp /usr/local/opt/giflib/lib/libgif.7.dylib lib
# Modify all dylib file dependencies to use relative paths
cd lib
for filename in *.dylib; do
chmod 644 $filename;
install_name_tool -id @rpath/$filename $filename
for dependency in $(otool -L $filename | cut -d' ' -f1 | grep '/usr/local'); do
install_name_tool -change $dependency @rpath/$(basename $dependency) $filename;
done;
done;
cd ..
# Fix file permissions
chmod 644 include/*.h
chmod 644 lib/*.dylib
# Generate versions.json
printf "{\n\
\"cairo\": \"$(pkg-config --modversion cairo)\",\n\
\"croco\": \"$(pkg-config --modversion libcroco-0.6)\",\n\
\"exif\": \"$(pkg-config --modversion libexif)\",\n\
\"fontconfig\": \"$(pkg-config --modversion fontconfig)\",\n\
\"freetype\": \"$(pkg-config --modversion freetype2)\",\n\
\"gdkpixbuf\": \"$(pkg-config --modversion gdk-pixbuf-2.0)\",\n\
\"gif\": \"$(grep GIFLIB_ include/gif_lib.h | cut -d' ' -f3 | paste -s -d'.' -)\",\n\
\"glib\": \"$(pkg-config --modversion glib-2.0)\",\n\
\"gsf\": \"$(pkg-config --modversion libgsf-1)\",\n\
\"harfbuzz\": \"$(pkg-config --modversion harfbuzz)\",\n\
\"jpeg\": \"$(grep JPEG_LIB_VERSION_ include/jpeglib.h | cut -d' ' -f4 | paste -s -d'.' -)\",\n\
\"lcms\": \"$(pkg-config --modversion lcms2)\",\n\
\"orc\": \"$(pkg-config --modversion orc-0.4)\",\n\
\"pango\": \"$(pkg-config --modversion pango)\",\n\
\"pdfium\": \"master\",\n\
\"pixman\": \"$(pkg-config --modversion pixman-1)\",\n\
\"png\": \"$(pkg-config --modversion libpng)\",\n\
\"svg\": \"$(pkg-config --modversion librsvg-2.0)\",\n\
\"tiff\": \"$(pkg-config --modversion libtiff-4)\",\n\
\"vips\": \"$(pkg-config --modversion vips-cpp)\",\n\
\"webp\": \"$(pkg-config --modversion libwebp)\",\n\
\"xml\": \"$(pkg-config --modversion libxml-2.0)\"\n\
}" >versions.json
printf "\"darwin-x64\"" >platform.json
# Generate tarball
TARBALL=libvips-$(pkg-config --modversion vips-cpp)-darwin-x64.tar.gz
tar cfz "${TARBALL}" include lib *.json
advdef --recompress --shrink-insane "${TARBALL}"
# Remove working directories
rm -rf lib include *.json
# Display checksum
shasum *.tar.gz
| true
|
7b676d669d96e03b6f3064c415b2f710bbab7ba3
|
Shell
|
zxing2004/docker
|
/18.09/dind/dockerd-entrypoint.sh
|
UTF-8
| 686
| 3.671875
| 4
|
[] |
no_license
|
#!/bin/sh
set -e
# no arguments passed
# or first arg is `-f` or `--some-option`
if [ "$#" -eq 0 ] || [ "${1#-}" != "$1" ]; then
# add our default arguments
set -- dockerd \
--host=unix:///var/run/docker.sock \
--host=tcp://0.0.0.0:2375 \
"$@"
fi
if [ "$1" = 'dockerd' ]; then
if [ -x '/usr/local/bin/dind' ]; then
# if we have the (mostly defunct now) Docker-in-Docker wrapper script, use it
set -- '/usr/local/bin/dind' "$@"
fi
# explicitly remove Docker's default PID file to ensure that it can start properly if it was stopped uncleanly (and thus didn't clean up the PID file)
find /run /var/run -iname 'docker*.pid' -delete
fi
exec "$@"
| true
|
8a1d8890768dad4414f173a15c9bfbc327684dbb
|
Shell
|
Urdip7633/COMP2101
|
/bash/guessinggame.sh
|
UTF-8
| 1,574
| 4.3125
| 4
|
[] |
no_license
|
#!/bin/bash
#
# This script implements a guessing game
# it will pick a secret number from 1 to 10
# it will then repeatedly ask the user to guess the number
# until the user gets it right
# Give the user instructions for the game
cat <<EOF
Let's play a game.
I will pick a secret number from 1 to 10 and you have to guess it.
If you get it right, you get a virtual prize.
Here we go!
EOF
# Pick the secret number and set their default guess to be a wrong answer
secretnumber=$(($RANDOM % 10 +1)) # save our secret number to compare later
userguess=0
# This loop repeatedly asks the user to guess and tells them if they got the right answer
# TASK 1: Test the user input to make sure it is not blank
# TASK 2: Test the user input to make sure it is a number from 1 to 10 inclusive
# TASK 3: Give them better feedback, telling them if their guess is too low, or too high
while [ $userguess != $secretnumber ]; do # loop around until they get it right
read -p "Give me a number from 1 to 10: " userguess # ask for a guess
# if [ $userguess -eq $secretnumber ]; then
# echo "You got it! Have a milkdud."
# exit
# fi
#done
while [ -z "$userguess" ];
do
read -p "Give me a number from 1 to 10: " userguess
done
if [ $userguess -le 10 ] && [ $userguess -gt 1 ]
then
if [ "$userguess" -gt "$secretnumber" ]
then
echo "Sorry The number you guesed is HIGH"
elif [ "$userguess" -lt "$secretnumber" ]
then
echo "Sorry The number you guesed is LOW"
else
echo "Myboy.... You are great you guess it right"
exit
fi
fi
done
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.