blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
4
115
path
stringlengths
2
970
src_encoding
stringclasses
28 values
length_bytes
int64
31
5.38M
score
float64
2.52
5.28
int_score
int64
3
5
detected_licenses
listlengths
0
161
license_type
stringclasses
2 values
text
stringlengths
31
5.39M
download_success
bool
1 class
bdd4cd147bdde4f07e720335f396108a8e1fd58e
Shell
sshambar/readynas-test
/TEST10/start.sh
UTF-8
854
3.828125
4
[]
no_license
#!/bin/bash # # Ensure that symlinks are in place set -e LOG=/tmp/test-addon.status SERVICE=TEST10 backup_file() { file=$1 [ ! -h $file ] || return 0 # backup original smbd [ -f $file -a ! -f ${file}.orig ] && cp -a $file ${file}.orig || : } RESTART= set_symlink() { src=$1 dest=$2 if ! [ -h $dest -a "$(LANG=en_US.utf8 ls -dn $dest 2>/dev/null | awk '{ print $10 }')" = $src ]; then if [ -f $src ]; then rm -f $dest ln -s $src $dest RESTART=1 fi fi return 0 } backup_file /usr/sbin/mytest set_symlink /usr/local/sbin/mytest /usr/sbin/mytest FEATURE1=$(grep "^${SERVICE}_FEATURE1=" /etc/default/services | sed "s/^${SERVICE}_FEATURE1=//") # restart if changes if [ -n "$RESTART" ]; then /usr/sbin/mytest fi echo "$(date): start.sh: id=$(id -u) RESTART=$RESTART FEATURE1=$FEATURE1" >> "$LOG" exit 0
true
f9a1ce6546310da5a2d95037595749b524a023b4
Shell
biadelmont/playground
/.bin/new-problem
UTF-8
1,137
3.859375
4
[ "MIT" ]
permissive
#!/bin/bash #vim:set ft=sh function _can_create_folder { not_empty_with_no_folders=1 correct_depth=1 if [ -n "$(ls -F | grep '/')" ]; then cd "$(ls -F | grep '/' | head -1)" if [ -n "$(ls -F | grep '/')" ]; then correct_depth=0 fi cd - > /dev/null fi if [ -z "$(ls -F | grep '/')" -a -n "$(ls -F | grep -v '/')" ]; then not_empty_with_no_folders=0 fi if [ $correct_depth == 0 -o $not_empty_with_no_folders == 0 ]; then echo echo -n "You probably are trying to create the solution folder in the " echo "wrong place!!!" echo echo "You are now here $PWD" echo return 1 fi return 0 } if ! _can_create_folder; then exit 1 fi if [ -z $1 ]; then echo echo "You should inform a problem ID." echo exit 1 fi dir=$1 language=${2:-cpp} if [ -d $dir ]; then echo echo "Already there is a folder called $dir." echo exit 1 fi mkdir $dir && cd $dir && echo "Created $dir." touch $dir.$language touch in.txt touch out.txt touch tags touch problem.txt touch WRONG
true
5fedf17abb261f43b271cb5a9688d2ba370d0c1a
Shell
vlad17/mve
/scripts/lint.sh
UTF-8
385
3.421875
3
[ "Apache-2.0" ]
permissive
#! /usr/bin/env bash # Lints code: # # # Lint mve by default. # ./scripts/lint.sh # # Lint specific files. # ./scripts/lint.sh mve/{reporter,plot}.py set -euo pipefail lint() { PYTHONPATH=mve pylint --disable=locally-disabled,fixme,too-many-return-statements "$@" } main() { if [[ "$#" -eq 0 ]]; then lint mve else lint "$@" fi } main "$@"
true
4c5265158a3f00429c14f0a26cc98b4990c41f3d
Shell
hutchison/.files
/scripts/backup_postgres_db.sh
UTF-8
1,041
4.21875
4
[]
no_license
#!/usr/bin/env bash # Exportiert die Datenbank $DBNAME ($1) als SQL-Datei # in das Verzeichnis $DESTDIR ($2) # mit dem Präfix $PREFIX ($3). # Damit die Datenbank exportiert werden kann, braucht der ausführende Nutzer # $username Zugriff: # sudo -u postgres psql $DBNAME # $DBNAME=# GRANT CONNECT ON DATABASE database_name TO username; # $DBNAME=# GRANT USAGE ON SCHEMA schema_name TO username; # GRANT SELECT ON ALL TABLES IN SCHEMA schema_name TO username; # GRANT SELECT ON ALL SEQUENCES IN SCHEMA schema_name TO username; # Mittels "psql $DBNAME < $DESTFILENAME" kann diese importiert werden. # Vorher muss die alte Datenbank gelöscht werden: # $ drop_db $DBNAME # $ sudo -u postgres createdb -O $USER $DBNAME usage() { echo "$0 dbname destdir prefix" exit 1 } if [[ $# -ne 3 ]]; then usage else DBNAME="$1" DESTDIR="$2" PREFIX="$3" CURRENT_DATETIME=$(date +"%Y%m%d_%H%M%S") DESTFILENAME="${PREFIX}_${DBNAME}_${CURRENT_DATETIME}.sql" pg_dump "$DBNAME" > "${DESTDIR}/${DESTFILENAME}" xz "${DESTDIR}/${DESTFILENAME}" fi
true
e9c99d1d3087fca4536fda4268b0300caa9e499c
Shell
lihsup/co2reader
/step.sh
UTF-8
255
2.734375
3
[]
no_license
#!/bin/bash #Get parameters day1=$1 day2=$2 direction=$3 resid=$4 function=$5 cname=$6 #Get current position TOP_DIR=$(pwd) #Add all necessary jars LIBPATH=lib/java-json.jar docker exec -d $cname ./forward.sh $day1 $day2 $direction $resid $function
true
18b1b4deda7ead833de98d80ac80236a1ebdc4e4
Shell
shirlymadushanka/e16-co502-RV32IM-pipeline-implementation-group1
/cpu/scripts/test-data-cache.sh
UTF-8
361
2.640625
3
[]
no_license
#!/bin/bash echo "data-cache test bench run start!" echo "" # Navigate to reg-file module directory cd ../data_cache_module # if any error happens exit set -e # clean rm -rf data_cache.vvp rm -rf data_cache_wavedata.vcd # compiling iverilog -o data_cache_tb.vvp data_cache_tb.v # run vvp data_cache_tb.vvp echo "" echo "data-cache test bench run stopped!"
true
0e273c2f39b3ac805b13946759e46dd914787330
Shell
mauroxcf/holberton-system_engineering-devops
/0x04-loops_conditions_and_parsing/3-until_holberton_school
UTF-8
142
2.96875
3
[]
no_license
#!/usr/bin/env bash #sctipt that print holberschool 10 times with until i=0 until [ $i -gt 9 ] do echo "Holberton School" i=$((i+1)) done
true
2a297866cc8cbaad2dfd47291a84c6336c5eeeba
Shell
christophpickl/ponyo-svn
/artifact/cpp-playground/cmake-shared-playground/trunk/my_cmake_install.sh
UTF-8
282
3
3
[]
no_license
#!/bin/bash CHECK() { RETURN_CODE=$1 if [ ${RETURN_CODE} -ne 0 ]; then echo FAIL!!! RETURN_CODE was: ${RETURN_CODE} exit 1 fi } cmake -DCMAKE_INSTALL_PREFIX:PATH=/usr/local #cmake -DCMAKE_INSTALL_PREFIX:PATH=/pxtmp/usr/local CHECK $? make CHECK $? make install CHECK $?
true
8d0b7547c67168c71e5f078f63e558e1ca611df8
Shell
yota-code/marccup
/export_path
UTF-8
542
2.703125
3
[]
no_license
#!/usr/bin/env zsh source_dir=${0:A:h} export MARCCUP_autotest_DIR=${source_dir}/autotest function push_pythonpath() { pth=$1:A echo "+ ${pth}" export PATH=${pth}/script:$PATH export PYTHONPATH=${pth}/package:$PYTHONPATH } push_pythonpath ${source_dir}/../oaktree push_pythonpath "${source_dir}/../cc-pathlib" push_pythonpath ${source_dir} export MARCCUP_static_DIR=${source_dir}/static export MARCCUP_repo_DIR="/mnt/workbench/source/marccup/test/parser/book" typeset -T PYTHONPATH pythonpath typeset -U pythonpath typeset -U path
true
18c0bddf985f246a4fb416b9dcae84fe350ef6c4
Shell
SupinfoProjects/transcode
/scripts/clean.sh
UTF-8
334
3.03125
3
[]
no_license
#!/bin/bash if ! [ $(id -u) = 0 ]; then echo "Please run as root" exit fi echo "Stopping all containers" docker stop $(docker ps -a -q) echo "Deleting all containers" docker rm $(docker ps -a -q) echo "Deleting all images" docker rmi $(docker images -q) echo "All docker containers and images have been stopped and deleted"
true
b85560027dabc4593db8dff9cdce113e9c86496b
Shell
gearboxworks/iso-maker
/docker-image/build/rootfs/root/.bash_login
UTF-8
312
2.796875
3
[]
no_license
#!/bin/bash # /build/permissions.sh restore # No longer used. #if [ ! -d /tmp/rootfs ] #then # echo "# Creating directory ..." # mkdir -p /tmp/rootfs #fi # #if [ -f /build/rootfs.changes.tar.gz ] #then # echo "# Extracting rootfs to /tmp/rootfs ..." # tar zxf /build/rootfs.changes.tar.gz -C /tmp/rootfs/ #fi
true
51bb4f27a4ca7b02f23a22444c67e5c0a7e18e61
Shell
CAST-projects/QRPortal
/build/server_install.sh
UTF-8
3,305
3.9375
4
[]
no_license
#!/bin/bash # service name: releasenotemanager.service # set -x display_usage() { echo "usage:" echo "$0 -p <package_path> -r <port>" exit 1 } unset PACKPATH unset SRVPORT while getopts "p:r:l:u:a:t:d:" OPT; do case $OPT in p) PACKPATH=${OPTARG};; r) SRVPORT=${OPTARG};; \?) display_usage ; exit 1;; esac done [ -z "$PACKPATH" ] && display_usage && exit 1 [ -z "$SRVPORT" ] && display_usage && exit 1 PACKDIR=`dirname $0` SERVERDIR="/var/opt/technologies_server" BACKUPDIR="$PACKDIR/../backups" TMPFIC=/tmp/technologies_server_install.tmp SRVHOST=`hostname` export PORT=$SRVPORT echo echo Server installation is: $SERVERDIR echo Package path is: $PACKPATH if [ ! -f $PACKPATH ]; then echo ERROR echo The package file $PACKPATH does not exist. exit 1 fi if [ ! -d $SERVERDIR ]; then echo ERROR echo Folder $SERVERDIR must exist, and user $USER must have write access to it exit 1 fi touch $SERVERDIR/test if [ $? -ne 0 ]; then echo ERROR echo user $USER must have write access to folder $SERVERDIR exit 1 fi cd $SERVERDIR rm -r * cd - tar xzf $PACKPATH -C $SERVERDIR >$TMPFIC 2>&1 if [ $? -ne 0 ]; then echo ERROR echo Untar of the package fails. cat $TMPFIC exit 1 fi [ -d $BACKUPDIR ] || mkdir $BACKUPDIR cp $PACKPATH $BACKUPDIR || exit 1 echo cd $BACKUPDIR echo "Package has been backuped in $BACKUPDIR" NBPACK=`find . -name '*.taz' | wc -l` echo echo Number of packages in the backup: $NBPACK if [ "$NBPACK" -gt "10" ];then echo Cleaning: ls -1t | tail -n $(($NBPACK - 10)) ls -1t | tail -n $(($NBPACK - 10)) | xargs rm -fdr fi cd $SERVERDIR echo echo Installing server on port: $SRVPORT echo echo "==============================================" echo "==============================================" mkdir -p $SERVERDIR/NodeJS/etc echo "prefix=$SERVERDIR">$SERVERDIR/NodeJS/etc/npmrc echo echo "==============================================" echo "==============================================" echo "Starting the server ...." sudo systemctl daemon-reload sudo systemctl stop technologies.service sudo systemctl enable technologies.service sudo systemctl start technologies.service sudo systemctl status technologies.service>$TMPFIC 2>&1 grep " active (running)" $TMPFIC if [ $? -ne 0 ]; then echo ERROR echo Cannot start the service cat $TMPFIC echo ================================================ echo ================================================ echo Journal: echo ================================================ sudo journalctl -xe echo ================================================ echo ================================================ exit 1 fi sleep 15 curl http://localhost:$SRVPORT >$TMPFIC 2>&1 if [ $? -ne 0 ]; then cat $TMPFIC echo ERROR echo The server did not respond.... echo ================================================ echo ================================================ echo Journal: echo ================================================ sudo journalctl -xe echo ================================================ echo ================================================ exit 1 fi echo echo "==============================================" echo "==============================================" echo Installation is successful. exit 0
true
80b23128c18f2fc8f1bb19c5bb0ec362ccad158e
Shell
NWU-MuST/STP-services
/scheduler/speech_services/align.sh
UTF-8
612
2.65625
3
[ "Apache-2.0" ]
permissive
#!/bin/bash #$ -N ##JOB_NAME## #$ -e ##ERR_OUT## #$ -o ##STD_OUT## #$ -cwd TICKET=##INSTANCE_TICKET## WHERE=##SPEECH_SERVICES## DOCKER_PATH=##DOCKER_PATH## REAL_PATH=##REAL_PATH## AUDIO=`python $WHERE/json_parse.py $TICKET audiofile | sed "s:"$REAL_PATH":"$DOCKER_PATH":g"` TEXT=`python $WHERE/json_parse.py $TICKET textfile | sed "s:"$REAL_PATH":"$DOCKER_PATH":g"` RESULT=`python $WHERE/json_parse.py $TICKET resultfile | sed "s:"$REAL_PATH":"$DOCKER_PATH":g"` SYSTEM=`python $WHERE/json_parse.py $TICKET subsystem` docker exec -t services /home/dac/align/align_kaldi/align.sh $AUDIO $TEXT $SYSTEM $RESULT
true
313b5e1c1e3455e68e91f6847482bc40117bcea0
Shell
olebole/voclient
/configure
UTF-8
5,623
4.0625
4
[ "MIT" ]
permissive
#!/bin/bash # # CONFIGURE -- A pseudo-autoconf configure script to allow a standard # build command sequence to reset the installation directories. We # assume a build/install of this package can be accomplished with the # commands: # % ./configure --prefix=/opt/local # % make # % make install # # This configure script rewrites the "install_env" file with the specified # install paths, that script is then used in the 'make install' change to # set the install directories for the binaries, libraries, include files and # man pages. The '--prefix' option sets the default root path, other options # allow paths to be set individually for each component. # # The default paths used are: # # prefix=/usr/local/ global root path # bindir=${prefix}/bin/ task binary install directory # libdir=${prefix}/lib/ library install directory # incdir=${prefix}/include/ include file install directory # mandir=${prefix}/man/man1/ man page install directory # # # Usage: ./configure [ <opts>=<val> | <opt> <val> ] # # Where -h -help --help print a help summary # -p -prefix --prefix set global path prefix # -b -bindir --bindir task bin directory # -i -incdir --incdir include file directory # -l -libdir --libdir library directory # -m -mandir --mandir man page directory # # Example: # # The following commands are all equivalent for setting the global install # path prefix: # # % ./configure --p /opt/local # % ./configure -prefix /opt/local # % ./configure --prefix /opt/local # % ./configure -p=/opt/local # % ./configure -prefix=/opt/local # % ./configure --prefix=/opt/local # # ---------------------------------------------------------------------------- # Setup the default environment. unset noclobber prefix=/usr/local/ bindir=${prefix}/bin/ libdir=${prefix}/lib/ incdir=${prefix}/include/ mandir=${prefix}/man/man1/ # Process any cmdline flags. while [ $# -gt 0 ] do case "$1" in -h | -help | --help) # print a help summary echo "" echo " Usage: ./configure [ <opts>=<val> | <opt> <val> ]" echo "" echo " Where -h -help --help print a help summary" echo " -p -prefix --prefix set global path prefix" echo " -b -bindir --bindir task bin directory" echo " -i -incdir --incdir include file directory" echo " -l -libdir --libdir library directory" echo " -m -mandir --mandir man page directory" echo "" echo " Example:" echo "" echo " The following commands are all equivalent for setting the" echo " global install path prefix:" echo "" echo " % ./configure --p /opt/local" echo " % ./configure -prefix /opt/local" echo " % ./configure --prefix /opt/local" echo " % ./configure -p=/opt/local" echo " % ./configure -prefix=/opt/local" echo " % ./configure --prefix=/opt/local" echo "" exit 0 ;; -p | -prefix | --prefix) # global prefix case "$2" in "") shift 2 ;; *) prefix=$2/ ; shift 2 ;; esac bindir=${prefix%/}/bin/ libdir=${prefix%/}/lib/ incdir=${prefix%/}/include/ mandir=${prefix%/}/man/man1/ ;; -p=* | -prefix=* | --prefix=*) prefix="${1#*=}"/ bindir=${prefix%/}/bin/ libdir=${prefix%/}/lib/ incdir=${prefix%/}/include/ mandir=${prefix%/}/man/man1/ ;; -b | -bindir | --bindir) # task bin directory case "$2" in "") shift 2 ;; *) bindir=$2/ ; shift 2 ;; esac ;; -b=* | -bindir=* | --bindir=*) bindir="${1#*=}"/ ;; -l | -libdir | --libdir) # library directory case "$2" in "") shift 2 ;; *) libdir=$2/ ; shift 2 ;; esac ;; -l=* | -libdir=* | --libdir=*) libdir="${1#*=}"/ ;; -i | -incdir | --incdir) # include directory case "$2" in "") shift 2 ;; *) incdir=$2/ ; shift 2 ;; esac ;; -i=* | -incdir=* | --incdir=*) incdir="${1#*=}"/ ;; -m | -mandir | --mandir) # man page directory case "$2" in "") shift 2 ;; *) mandir=$2/ ; shift 2 ;; esac ;; -m=* | -mandir=* | --mandir=*) mandir="${1#*=}"/ ;; *) echo "Unknown argument '$1'" shift esac done # Create the environment file for the "make install" command. First check # that if the file exists, we have permission to overwrite it. if [ ! -w ./install_env ]; then echo "Error: cannot overwrite environment file './install_env'" exit 1 fi cat << END_OF_ENVFILE | sed -e 's://:/:g' > ./install_env #!/bin/bash prefix=${prefix%/} bindir=${bindir} libdir=${libdir} incdir=${incdir} mandir=${mandir} # Process cmdline flags. for a in "\$@" do case "\$a" in -b | -bindir | --bindir) # task bin directory echo \$bindir ;; -l | -libdir | --libdir) # library directory echo \$libdir ;; -i | -incdir | --incdir) # include directory echo \$incdir ;; -m | -mandir | --mandir) # man page directory echo \$mandir ;; *) exit 1 ;; esac done END_OF_ENVFILE # Create the install directories if needed. This also gives us a chance to # print errors if we don't have permissions on these dirs. dirs=($bindir $libdir $incdir $mandir) for f in ${dirs[@]}; do if [ ! -d $f ]; then /bin/mkdir -p $f >> /dev/null 2>&1 if (( $?==1 )); then echo "Warning: Cannot create install directory '$f' as user '$USER'." fi elif [ ! -w $f ]; then echo "Warning: Install directory '$f' is not writable by user '$USER'." fi done # Set the execute permission and quit. chmod 755 ./install_env exit 0
true
0be94034afd16bd8bb171741dcbeb5b64e2ca8af
Shell
matclab/rofi-hamster
/rofi-hamster
UTF-8
3,177
3.625
4
[]
no_license
#!/usr/bin/env bash set -eo pipefail : "${CACHE_DIR:="$HOME/.cache/frece/"}" [[ -d $CACHE_DIR ]] || mkdir -p "$CACHE_DIR" CACHE="$CACHE_DIR"/hamster.db # To enable mocking in test : "${_ROFI:=rofi}" : "${_HAMSTER:=hamster}" : "${_DUNSTIFY:=dunstify}" : "${_XSV:=xsv}" : "${_FRECE:=frece}" : "${_TESTING:="no"}" function activities() { # Echo a reverse list of activities prefixed by date # Activities follow the hamster start command format # start=$1 # end=$2 if [[ $# -eq 0 ]] then start=$(date +%Y-%m-%d -d "2 years ago") end=$(date +%Y-%m-%d) shift else start="$1" end="$2" fi LC_ALL=C "$_HAMSTER" export tsv "$start" "$end" | "$_XSV" select -d"\t" "activity,category,description,tags" | tail -n +2 | # remove header line # change # gestion projet,c845,planning desc,145 # gestion projet,c845,planning desc,"145, 246" # into hamster start command # gestion projet@c845,planning desc #145 # gestion projet@c845,planning desc #145 #246 sed --regexp-extended -e '/^[^"]*",?$/d' | #remove buggy lines like 'comment",' # separate activity and category with @ # For line containing multiple tags and thus ending with ", add # before # tags remove coma and double quotes # For line with single tag add # before the tag # Replace last coma with space sed --regexp-extended -e 's/,/@/' -e '/"$/s/"/#/' -e '/"$/s/, / #/g' \ -e 's/"$//' -e '/,[^#]+$/s/,([^,]+)$/,#\1/' -e 's/,*([^,]*)$/ \1/' | LC_ALL=C sort -u } function all_activities() { if [[ ! -e $CACHE ]] then frece init "$CACHE" <(activities "$(date +%Y-%m-%d -d "1 month ago")" "$(date +%Y-%m-%d)") frece print "$CACHE" else frece print "$CACHE" frece update "$CACHE" <(activities "$(date +%Y-%m-%d -d "3 month ago")" "$(date +%Y-%m-%d)") --purge-old & fi } # To enable mocking in test : "${_all_activities:=all_activities}" function main() { current=$("$_HAMSTER" current | sed 's|^[^ ]\+ [^ ]\+ \(.*\) [^ ]\+$|\1|' ) selectedfilter=$("$_all_activities" \ | "$_ROFI" -dmenu -i -select "$current" -p "Select task" -format "s|f") selected=${selectedfilter/\|*/} filter=${selectedfilter/*\|/} [ -z "$selected" ] && exit # if selected activity matches current we stop it if [[ "${selected/,,/} " =~ $current\ ]]; then "$_HAMSTER" stop "$_DUNSTIFY" -a "hamster" "Stop activity ${selected/,,/}" else if [[ $filter =~ ^[A-Za-z].*[0-9-]$ ]] then # Filter ends with time or duration → we keep filter action="$filter" item="$(echo "$action" | sed --regexp-extended -e 's/ *,+[^,]*//')" if ! "$_FRECE" increment "$CACHE" "$item" then ("$_FRECE" add "$CACHE" "$item" "$_FRECE" increment "$CACHE" "$item" )& fi else action="$selected" if ! "$_FRECE" increment "$CACHE" "$action" then ("$_FRECE" add "$CACHE" "$action" "$_FRECE" increment "$CACHE" "$action" )& fi fi echo "$action" "$_HAMSTER" start "$action" "$_DUNSTIFY" -a "hamster" "Start activity ${action/,,/}" fi } if [[ "$_TESTING" == "no" ]] then main fi
true
57a3a9d04ffaa2c2bd6e8fa4843d2e96e2aa74b3
Shell
heyanbin/first_project
/get_TsTv_rate.sh
UTF-8
510
3.5
4
[]
no_license
#!/bin/sh # Ts/Tv in SNPs is Transition/Transversion. # vcf files in directory, this script need one argument:directory, and you will get the Ts/Tv result in file named TsTv_result.txt Fold_A=$1 for i in $Fold_A/*.vcf;do data1=$(awk '{print FILENAME;exit}' $i) data3=$(awk '{if(($4~/[AG]/ && $5~/[AG]/) || ($4~/[CT]/ && $5~/[CT]/)){print $1}}' $i |wc -l ) data4=$(awk '{if(($4~/[AG]/ && $5~/[CT]/) || ($4~/[CT]/ && $5~/[AG]/)){print $1}}' $i |wc -l ) echo -e "$data1 $data3 $data4" >>TsTv_result.txt done
true
635f0b1292e4bfd7f3036edd026d637a085c257d
Shell
flaccid/rightscripts
/attachments/configure_chrony.sh
UTF-8
859
3.453125
3
[]
no_license
#! /bin/bash -e # usage: configure_chrony.sh [nameserver nameserver..] # configures chrony # this script should be run as root ntp_servers="$@" [ -z "$ntp_servers" ] && ntp_servers=pool.ntp.org # convert to array read -a ntp_servers <<<$ntp_servers echo "configuring to sync with: [$ntp_servers]" # we assume that /etc/chrony.conf exists # remove any servers already configured sed -i '/^server /d' /etc/chrony.conf # remove some undesirable comments sed -i '/^# Use public servers from/d' /etc/chrony.conf sed -i '/^# Please consider joining/d' /etc/chrony.conf # add each server to top of config file for s in "${ntp_servers[@]}" do : sed -i "1s/^/server $s\n/" /etc/chrony.conf done type timedatectl > /dev/null 2>&1 && timedatectl set-ntp yes echo 'restarting chronyd' systemctl restart chronyd systemctl status chronyd chronyc activity
true
c3dcc5316c16383b72588713dc55907cbcbbe9cc
Shell
giacomo21/Image-analysis
/Graphs/LX-2/molecule_otsu = False/BioImageXD-1.0/bin/fix_install_names.sh
UTF-8
3,050
3.375
3
[]
no_license
#! /bin/bash VTK_SOURCE_DIR="/Users/kallepahajoki/VTK" ITK_SOURCE_DIR="/Users/kallepahajoki/ITK/InsightToolkit-3.4.0/bin/" if [ "$1" != "" ]; then VTK_SOURCE_DIR=$1 fi if [ ! -d $VTK_SOURCE_DIR ]; then echo "The VTK source directory specified does not exist" echo "Current VTK directory: $VTK_SOURCE_DIRECTORY" echo "You can either edit this script to change it permanently, or" echo "Give it as an argument ot this script." echo "Example:" echo "# sudo sh fix_install_names.sh ~/VTK" exit fi VTK_SOURCE_DIR="${VTK_SOURCE_DIR}/bin" cd /Library/Frameworks/Python.framework/Versions/2.5/lib LIBS="`echo *.dylib`" for lib in $LIBS do DEPS="`otool -L $lib |grep -v /|cut -d' ' -f1`" for dep in $DEPS do install_name_tool -change $dep /Library/Frameworks/Python.framework/Versions/2.5/lib/$dep $lib done done cd /Library/Frameworks/Python.framework/Versions/2.5/lib/InsightToolkit/WrapITK/lib LIBS="`echo _*.so`" for lib in $LIBS do DEPS="`otool -L $lib |grep vtk |cut -d' ' -f1`" for dep in $DEPS do ndep="`basename $dep`" install_name_tool -change $dep /Library/Frameworks/Python.framework/Versions/2.5/lib/$ndep $lib done DEPS="`otool -L $lib |grep -i itk |grep -v /|cut -d' ' -f1`" for dep in $DEPS do ndep="`basename $dep`" install_name_tool -change $dep /Library/Frameworks/Python.framework/Versions/2.5/lib/InsightToolkit/$ndep $lib done DEPS="`otool -L $lib |grep -i itk |grep $ITK_SOURCE_DIR |cut -d' ' -f1`" for dep in $DEPS do ndep="`echo $dep | sed s,$ITK_SOURCE_DIR,,`" if [ ! -e "$ndep" ]; then install_name_tool -change $dep /Library/Frameworks/Python.framework/Versions/2.5/lib/InsightToolkit/$ndep $lib else install_name_tool -change $dep /Library/Frameworks/Python.framework/Versions/2.5/lib/InsightToolkit/WrapITK/lib/$ndep $lib fi done done cd /Library/Frameworks/Python.framework/Versions/2.5/lib/InsightToolkit/ LIBS="`echo *.dylib`" for lib in $LIBS do DEPS="`otool -L $lib |grep -i itk |grep -v /|cut -d' ' -f1`" for dep in $DEPS do ndep="`basename $dep`" install_name_tool -change $dep /Library/Frameworks/Python.framework/Versions/2.5/lib/InsightToolkit/$ndep $lib done done LIBS="/Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/site-packages/vtkbxd/libvtkBXDProcessingPython.so" cd /Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/site-packages/ if [ -d vtk ]; then cd vtk elif [ -d VTK-*.egg ]; then cd VTK-*.egg/vtk fi LIBS="$LIBS `echo *.so`" for lib in $LIBS do DEPS="`otool -L $lib |grep -v /|cut -d' ' -f1`" for dep in $DEPS do install_name_tool -change $dep /Library/Frameworks/Python.framework/Versions/2.5/lib/$dep $lib done DEPS="`otool -L $lib |grep $VTK_SOURCE_DIR|cut -d' ' -f1`" for dep in $DEPS do ndep="`echo $dep | sed s,$VTK_SOURCE_DIR,,`" install_name_tool -change $dep /Library/Frameworks/Python.framework/Versions/2.5/lib/$ndep $lib done done
true
1b162830805b0e248c42697cbdf467bfa75ba266
Shell
mitchgu/isotope
/ansible/roles/isotope/templates/iso_cmd.sh.j2
UTF-8
2,086
3.125
3
[]
no_license
#! /bin/bash echo -e ' Use the \033[31miso\033[0m command! Usage: iso \033[31mhome\033[0m - takes you to the app directory iso \033[31mshell\033[0m - start a rails console iso \033[31mstart\033[0m - starts the server using foreman iso \033[31mmigrate\033[0m - migrate the db iso \033[31mprecompile\033[0m - precompiles assets iso \033[31mrestart\033[0m - restarts the puma server iso \033[31mstop\033[0m - stop the puma server iso \033[31mrailslog\033[0m - tails the rails log iso \033[31mpumalog\033[0m - tails the puma log iso \033[31mnginxlog\033[0m - tails the nginx log ' tput sgr0 #dd8733 function iso() { if [ "$1" == "home" ] then cd {{ app_dir }} elif [ "$1" == "shell" ] then bundle exec rails console elif [ "$1" == "start" ] then sudo foreman start elif [ "$1" == "migrate" ] then bundle exec rails db:migrate elif [ "$1" == "precompile" ] then bundle exec rails assets:precompile elif [ "$1" == "restart" ] then sudo systemctl restart isotope.target elif [ "$1" == "stop" ] then sudo systemctl stop isotope.target elif [ "$1" == "railslog" ] then tail log/{{ rails_env }}.log elif [ "$1" == "pumalog" ] then tail log/puma.stderr.log tail log/puma.stdout.log elif [ "$1" == "nginxlog" ] then sudo tail /var/log/nginx/error.log sudo tail /var/log/nginx/access.log else echo -e "Usage: iso \033[31mhome\033[0m - takes you to the app directory iso \033[31mshell\033[0m - start a rails console iso \033[31mstart\033[0m - starts the server using foreman iso \033[31mmigrate\033[0m - migrate the db iso \033[31mprecompile\033[0m - precompiles assets iso \033[31mrestart\033[0m - restarts the puma server iso \033[31mstop\033[0m - stop the puma server iso \033[31mrailslog\033[0m - tails the rails log iso \033[31mpumalog\033[0m - tails the puma log iso \033[31mnginxlog\033[0m - tails the nginx log" fi }
true
f1b81ce1a61d6c7c634e7c018e3cc2c31eff511a
Shell
CMCDragonkai/.dotfiles
/binc/uln2wln
UTF-8
674
3.8125
4
[ "MIT" ]
permissive
#!/usr/bin/env sh # need administrator mode before running this, as it relies on CMD, and # CMD's mklink does not respect the create symbolic link privilege # right now relative symlinks gets converted to absolute symlinks # this is not because of mklink, but due to cygpath : ' uniln2winln - Convert a Unix symlink to a Windows NTFS symlink Usage: uniln2winln <path-to-link> ' target_path="$(readlink "$1")" if [ -d "$target_path" ]; then rm "$1" && cmd /c mklink '/D' "$(cygpath --windows --absolute "$1")" "$(cygpath --windows "$target_path")" else rm "$1" && cmd /c mklink "$(cygpath --windows --absolute "$1")" "$(cygpath --windows "$target_path")" fi
true
eafb2221027c355613e2caf562e5491f9e7d2e23
Shell
nielsmaerten/autotune-docker-cloud
/gcp-deploy.sh
UTF-8
488
2.640625
3
[]
no_license
#!/bin/bash # Run this script in GCP Console to build and deploy the docker container to GCP Container Registry # curl -s https://raw.githubusercontent.com/nielsmaerten/autotune-docker-cloud/master/gcp-deploy.sh | bash rm -rf autotune-docker-cloud/ git clone https://github.com/nielsmaerten/autotune-docker-cloud cd autotune-docker-cloud/ export PROJECT_ID="$(gcloud config get-value project -q)" docker build -t gcr.io/${PROJECT_ID}/autotune . docker push gcr.io/${PROJECT_ID}/autotune
true
139fb8754d0ada0f96a81347dbe78239b660022a
Shell
barthouse/linuxfromscratch
/scripts/install/install-libcap.sh
UTF-8
629
2.8125
3
[]
no_license
PKGNAME=libcap PKGVER=2.24 TAREXT=xz DIR="`dirname \"$0\"`" source $DIR/dosetup.sh source $DIR/dotar.sh echo 'CONFIG' sed -i '/install.*STALIBNAME/d' libcap/Makefile \ 1> $CONFIGLOG 2> $CONFIGERR echo 'MAKE' make \ 1> $MAKELOG 2> $MAKEERR echo 'MAKE INSTALL' make RAISE_SETFCAP=no prefix=/usr install \ 1> $INSTALLLOG 2> $INSTALLERR chmod -v 755 /usr/lib/libcap.so \ 1>> $INSTALLLOG 2>> $INSTALLERR mv -v /usr/lib/libcap.so.* /lib \ 1>> $INSTALLLOG 2>> $INSTALLERR ln -sfv ../../lib/$(readlink /usr/lib/libcap.so) /usr/lib/libcap.so \ 1>> $INSTALLLOG 2>> $INSTALLERR source $DIR/docleanup.sh
true
4d23b997a20779796d71c552e062153daf274799
Shell
zephinzer/version-tagging-scripts
/get-next
UTF-8
2,873
4.25
4
[]
no_license
#!/bin/bash help () { printf " *********** get-next.sh *********** gets the next semver versioning of the current folder based on the version in the git tag list. version should be in format x.y.z where x is the major version, y is the minor version and z is the patch version. options: \t-m --merged : retrieve only from tags that have been merged \t-q --quiet : suppresses debug prints \t-h --help : prints this text usage: \t./get-next.sh <version_type> [...options] \t<version_type>: \t\tmajor: {major|maj|m1} \t\tminor: {minor|min|m2} \t\tpatch: {patch|ptc|p} (DEFAULT) examples: \t./get-next.sh maj : bumps the major version (1.0.0 > 2.0.0) \t./get-next.sh min : bumps the minor version (1.0.0 > 1.1.0) \t./get-next.sh patch : bumps the patch version (1.0.0 > 1.0.1) \t./get-next.sh -q : bumps the patch version (1.0.0 > 1.0.1) w/o output "; } get_current_version () { if [[ $* == *-m* ]] || [[ $* == *--merged* ]]; then MERGE=' -m'; else MERGE=''; fi; GET_LATEST_COMMAND="$(dirname $0)/get-latest -q${MERGE}"; CURRENT_VERSION=$($GET_LATEST_COMMAND); if [[ $? != 0 ]]; then printf " ERROR: could not find a git tag that resembles a semver version (x.y.z). \t> To add a 1.0.3 tag, run 'git tag 1.0.3' \t> Exiting with status code 1. "; exit 1; fi; LATEST_MAJOR_VERSION=$(printf "$CURRENT_VERSION" | cut -d. -f1); LATEST_MINOR_VERSION=$(printf "$CURRENT_VERSION" | cut -d. -f2); LATEST_PATCH_VERSION=$(printf "$CURRENT_VERSION" | cut -d. -f3); } get_next_version () { VERSION_TYPE_TO_UPDATE=$1; NEXT_MAJOR_VERSION=$LATEST_MAJOR_VERSION; NEXT_MINOR_VERSION=$LATEST_MINOR_VERSION; NEXT_PATCH_VERSION=$LATEST_PATCH_VERSION; case "$VERSION_TYPE_TO_UPDATE" in "major"|"maj"|"m1") ((NEXT_MAJOR_VERSION++)); NEXT_VERSION="${NEXT_MAJOR_VERSION}.0.0"; ;; "minor"|"min"|"m2") ((NEXT_MINOR_VERSION++)); NEXT_VERSION="${NEXT_MAJOR_VERSION}.${NEXT_MINOR_VERSION}.0"; ;; "patch"|"ptc"|"p"|*) ((NEXT_PATCH_VERSION++)); NEXT_VERSION="${NEXT_MAJOR_VERSION}.${NEXT_MINOR_VERSION}.${NEXT_PATCH_VERSION}"; ;; esac } if [[ $* == *-h* ]] || [[ $* == *--help* ]]; then help; exit 1; fi; if [[ $* == *-q* ]] || [[ $* == *--quiet* ]]; then exec 6>&1; exec > /dev/null; fi; get_current_version; printf "CURRENT_VERSION: ${CURRENT_VERSION}\n"; printf "MAJOR: ${LATEST_MAJOR_VERSION}\n"; printf "MINOR: ${LATEST_MINOR_VERSION}\n"; printf "PATCH: ${LATEST_PATCH_VERSION}\n"; get_next_version $1; printf "NEXT_VERSION: ${NEXT_VERSION}\n"; printf "MAJOR: ${NEXT_MAJOR_VERSION}\n"; printf "MINOR: ${NEXT_MINOR_VERSION}\n"; printf "PATCH: ${NEXT_PATCH_VERSION}\n"; printf "VERSION UPGRADE: ${CURRENT_VERSION} > "; if [[ $* == *-q* ]] || [[ $* == *--quiet* ]]; then exec 1>&6 6>&-; fi; printf "${NEXT_VERSION}\n"; exit 0;
true
a5d2959aab1c39c1fde4bb97d906b1b7979da76c
Shell
delkyd/alfheim_linux-PKGBUILDS
/python2-pp/PKGBUILD
UTF-8
884
2.75
3
[]
no_license
# Maintainer: jyantis <yantis@yantis.net> # Contributor: Akshay Srinivasan <akshaysrinivasan@gmail.com> pkgname=python2-pp pkgver=1.6.4 pkgrel=1 pkgdesc="Parallel and distributed programming for Python 2" arch=('i686' 'x86_64') depends=('python2>=2.5') url="http://www.parallelpython.com" license=('BSD-like license') source=("http://www.parallelpython.com/downloads/pp/pp-$pkgver.tar.gz") sha256sums=('fa271f17641e069f19e25d352885e3a475797c657b30e433d8227c3d882163fa') build() { cd $srcdir/pp-$pkgver/ # Patch any #!/usr/bin/python to #!/usr/bin/python2 for file in $(find . -name '*.py' -print); do sed -r -i 's_^#!.*/usr/bin/python(\s|$)_#!/usr/bin/python2_' $file sed -r -i 's_^#!.*/usr/bin/env(\s)*python(\s|$)_#!/usr/bin/env python2_' $file done python2 setup.py build } package() { cd $srcdir/pp-$pkgver/ python2 setup.py install --root="${pkgdir}" --optimize=1 } # vim:set ts=2 sw=2 et:
true
a62cb97fbf0bd5c225154848ed6bdab103722d27
Shell
xhaa123/blfs
/mld/frei0r-plugins/01-frei0r-plugins-1.7.0.sh
UTF-8
900
3.15625
3
[]
no_license
#!/bin/bash ${log} `basename "$0"` " started" blfs_all && ${log} `basename "$0"` " download" blfs_all && if test -d /sources/frei0r-plugins-1.7.0 then rm -rf /sources/frei0r-plugins-1.7.0 fi SCRIPT=`realpath $0` SCRIPTPATH=`dirname $SCRIPT` wget https://files.dyne.org/frei0r/releases/frei0r-plugins-1.7.0.tar.gz \ --continue --directory-prefix=/sources && md5sum -c ${SCRIPTPATH}/md5-frei0r-plugins && tar xf /sources/frei0r-plugins-1.7.0.tar.gz -C /sources/ && cd /sources/frei0r-plugins-1.7.0 && mkdir -vp build && cd build && cmake -DCMAKE_INSTALL_PREFIX=/usr \ -DCMAKE_BUILD_TYPE=Release \ -DWITHOUT_OPENCV=TRUE \ -Wno-dev .. && ${log} `basename "$0"` " configured" blfs_all && make && ${log} `basename "$0"` " built" blfs_all && make install && ${log} `basename "$0"` " installed" blfs_all && ${log} `basename "$0"` " finished" blfs_all
true
95bfb7f3a5496952c6296e9e8c39f12bb7e44fb1
Shell
xterm-x11/conf
/dotfiles/.profile
UTF-8
1,931
2.78125
3
[]
no_license
# ~/.profile: executed by the command interpreter for login shells. # This file is not read by bash(1), if ~/.bash_profile or ~/.bash_login # exists. # see /usr/share/doc/bash/examples/startup-files for examples. # the files are located in the bash-doc package. # the default umask is set in /etc/profile; for setting the umask # for ssh logins, install and configure the libpam-umask package. #umask 022 # if running bash if [ -n "$BASH_VERSION" ]; then # include .bashrc if it exists if [ -f "$HOME/.bashrc" ]; then . "$HOME/.bashrc" fi fi ####################################### PATH if [ -d "/usr/bin" ] ; then PATH="/usr/bin:$PATH" fi if [ -d "/usr/local/bin" ] ; then PATH="/usr/local/bin:$PATH" fi if [ -d "/sbin" ] ; then PATH="/sbin:$PATH" fi if [ -d "/usr/sbin" ] ; then PATH="/usr/sbin:$PATH" fi if [ -d "$HOME/usr/bin" ] ; then PATH="$HOME/usr/bin:$PATH" fi if [ -d "/usr/numeca/bin" ] ; then PATH="/usr/numeca/bin:$PATH" # export NI_DRIVER=X11 fi if [ -d "$HOME/work/android-sdk-linux_x86/tools" ] ; then PATH="$HOME/work/android-sdk-linux_x86/tools:$PATH" fi if [ -d "$HOME/work/android-sdk-linux_x86/platform-tools" ] ; then PATH="$HOME/work/android-sdk-linux_x86/platform-tools:$PATH" fi if [ -d "/opt/intel/bin" ] ; then PATH="/opt/intel/bin:$PATH" fi if [ -d "$HOME/scripts" ] ; then PATH="$HOME/scripts:$PATH" fi if [ -d "$HOME/games/bin" ] ; then PATH="$HOME/games/bin:$PATH" fi export PATH ######################################### ## Turn off bell xset b off if [ -f "/usr/share/terminfo/r/rxvt-unicode-256color" ]; then export TERM=rxvt-unicode-256color else export TERM=xterm fi # CDPATH export CDPATH='~:~/school:~/work' # LANGUAGE # export LANGUAGE="fr:en_GB:en" # export LC_MESSAGES="fr_FR.UTF-8" # export LC_CTYPE="fr_FR.UTF-8" # export LC_COLLATE="fr_FR.UTF-8" # export LANG="fr_FR.UTF-8" xrdb -load ~/.Xresources
true
0bc89ca93f799735ca484666ed3893148934bee7
Shell
deads2k/origin-server
/cartridges/openshift-origin-cartridge-php/usr/lib/php_config
UTF-8
2,073
3.734375
4
[ "Apache-2.0" ]
permissive
#!/bin/bash source $OPENSHIFT_CARTRIDGE_SDK_BASH source ${OPENSHIFT_PHP_DIR}usr/lib/php_context function select_php_document_root { for dir in php public public_html web www; do if [ -d "$1/$dir" -o -h "$1/$dir" ]; then export OPENSHIFT_PHP_DOCUMENT_ROOT="$1/$dir" echo "Application directory \"$dir/\" selected as DocumentRoot" return fi done export OPENSHIFT_PHP_DOCUMENT_ROOT="$1" echo "Application directory \"/\" selected as DocumentRoot" } function system_php_dir { local phpscl=$(expr "$(php_context 'echo ${X_SCLS:-}')" : '.*\(php[^ ]\+\).*') [ -n "${phpscl}" ] && echo "/opt/rh/${phpscl}/root/etc/php.d" \ || echo "/etc/php.d" } function enable_modules { local phprc=${OPENSHIFT_PHP_DIR}usr/shared/etc/php.ini.erb local phpd=${OPENSHIFT_PHP_DIR}usr/${OPENSHIFT_PHP_VERSION}/etc/php.d local testcmd= local module= local modvar= # Generate OpenShift php.ini oo-erb ${phprc} > ${PHPRC} # Add test for php modules (ignore duplicates and disabled modules) in order: # 1. OpenShift php.d/*.ini.erb # 2. OpenShift php.d/*.ini # 3. system php.d/*.ini # Note: Module can be disabled using `$OPENSHIFT_PHP_<MODULE>_ENABLED=false' for file in ${phpd}/{*.ini.erb,*.ini} $(system_php_dir)/*.ini; do module=${file##*/} module=${module%.erb} module=${module%.ini} modvar="OPENSHIFT_PHP_${module^^}_ENABLED" if [[ "${!modvar,,}" != "false" && $testcmd != *\'${module}.so\'* ]]; then testcmd+="@dl('${module}.so') and printf(' ${file}');" fi done # Clear openshift.ini echo > ${PHP_INI_SCAN_DIR}/openshift.ini # Run test -> Copy settings of modules whose test passed into openshift.ini for file in $(php_context "php -n -r \"${testcmd}\"" 2>/dev/null); do if [[ $file == *\.erb ]]; then oo-erb $file >> ${PHP_INI_SCAN_DIR}/openshift.ini else cat $file >> ${PHP_INI_SCAN_DIR}/openshift.ini fi done }
true
b7c7b60cf13865fd3d93a320b611b1f475f3a67d
Shell
daptiv/PullQuester
/install-hub.win.sh
UTF-8
1,943
3.8125
4
[]
no_license
#!/bin/bash bash_path() { echo $1 | sed -e 's#\(.\):#/\1#' | sed -e 's#\\#/#g' ; } goto_temp_dir() { if [ ! -d .pullquester-temp ]; then mkdir .pullquester-temp fi pushd .pullquester-temp > /dev/null } cleanup_temp_dir() { popd > /dev/null rm -r .pullquester-temp } install_go() { local GOVERSION=1.4.1 echo Installing Go v$GOVERSION curl -s -o go.zip "https://storage.googleapis.com/golang/go$GOVERSION.windows-amd64.zip" unzip -qq go.zip cp -r ./go/* $GOROOT echo Done! } ensure_GoRoot_dir() { if [ -z "$GOROOT" ]; then export GOROOT=$( bash_path $HOME )/go else export GOROOT=$( bash_path $GOROOT ) fi if [ ! -d $GOROOT ]; then mkdir -p $GOROOT > /dev/null fi } ensure_GoPath_dir() { if [ -z "$GOPATH" ]; then export GOPATH=$GOROOT/packages else export GOPATH=$( bash_path $GOPATH ) fi if [ ! -d $GOPATH ]; then mkdir -p $GOPATH > /dev/null fi } ensure_Go_paths() { ensure_GoRoot_dir ensure_GoPath_dir } install_hub() { echo Installing hub... go get github.com/github/hub go install github.com/github/hub echo Done! } add_env_vars_to_profile() { echo GOROOT=$GOROOT echo GOPATH=$GOPATH echo PATH=\$PATH:\$GOROOT/bin:\$GOPATH/bin if [ -z "$( grep 'GOROOT=' ~/.profile )"]; then echo GOROOT=$GOROOT >> ~/.profile fi if [ -z "$( grep 'GOPATH=' ~/.profile )"]; then echo GOPATH=$GOPATH >> ~/.profile fi if [ -z "$( grep '=\$PATH:\$GOROOT/bin:\$GOPATH/bin' ~/.profile )"]; then echo PATH=\$PATH:\$GOROOT/bin:\$GOPATH/bin >> ~/.profile fi } goto_temp_dir # check for go installation ensure_Go_paths if [ -z "$(which go)" ]; then install_go fi #check for package storage location ensure_GoPath_dir export PATH=$PATH:$GOROOT/bin:$GOPATH/bin install_hub add_env_vars_to_profile cleanup_temp_dir
true
c4f45f872221374da05aa6ea6fd4d1598a962a37
Shell
parvez2014/Jalangi-Berkeley
/scripts/type_coercions_octane.sh
UTF-8
974
3.125
3
[]
no_license
#!/bin/bash dir="type_coercions_results/octane" rm -rf instrumentFF_tmp mkdir type_coercions_results mkdir ${dir} for bm in `ls -1 tests/octane2/index_*.html | grep -v -e "zlib\|typescript\|earley-boyer\|code-load" | xargs` # exclude benchmarks with generated or obfuscated code #for bm in `ls -1 tests/octane2/index_*.html | xargs` # all benchmarks do echo "####################################" echo ${bm} bm_short=`basename ${bm} | sed -e 's/index_//g' | sed -e 's/.html//g'` rm -rf ${dir}/${bm_short} java -cp thirdparty/selenium-server-standalone-2.41.0.jar:/home/m/eclipse/workspace/WebAppEvaluation/bin/ evaluation.OctaneExperimentRunner ${bm_short} mkdir ${dir}/${bm_short} mv /tmp/analysisResults.json ${dir}/${bm_short}/analysisResults.json mkdir ${dir}/${bm_short}/sourcemaps mv instrumentFF_tmp/*_jalangi_sourcemap.json ${dir}/${bm_short}/sourcemaps/ mkdir ${dir}/${bm_short}/src mv instrumentFF_tmp/*.js ${dir}/${bm_short}/src/ done
true
254a758e103e9953a3da5549731defcfb8c5acbe
Shell
rickspencer3/cobol-stacksmith
/download-example-app.sh
UTF-8
657
3.703125
4
[]
no_license
#!/bin/bash # Downloads example application and scripts for building. set -euo pipefail read -p "This will overwrite any files in user-scripts or user-uploads. Continue (y/n)? " -n 1 -r echo if [[ $REPLY =~ ^[Yy]$ ]] then echo "Downloading the example app and scripts..." wget https://raw.githubusercontent.com/bitnami-labs/stacksmith-examples/master/generic/minio/scripts/build.sh \ -O user-scripts/build.sh --quiet wget https://raw.githubusercontent.com/bitnami-labs/stacksmith-examples/master/generic/minio/scripts/run.sh \ -O user-scripts/run.sh --quiet echo "Done. You can now build with 'docker-compose build'." fi
true
439d5a9da4a56ceca87615e55cd13e539b7358c2
Shell
yeochinyi/codetest
/exercism/dl_all.sh
UTF-8
831
3.625
4
[]
no_license
#set -x exer_ws=$(exercism workspace) tmp_file="$HOME/tmp/exercism/exercises-$1.html" wget "https://exercism.io/tracks/$1/exercises" -O $tmp_file --no-check-certificate list=$(rg "/tracks/([\w\-]+)/exercises/([\w\-]+)" $tmp_file -o -r '$2' -N) #echo $list num_skip=0 num_dl_ok=0 num_dl_fail=0 for exer in $list; do exer_dir="$exer_ws/$1/$exer" echo checking $exer if [ ! -d "$exer_dir" ]; then exercism download --exercise=$exer --track=$1 exit_code=$? echo exit_code=$exit_code if [[ $exit_code -eq 0 ]]; then echo downloaded $exer (( num_dl_ok +=1 )) else echo dl fail $exer (( num_dl_fail +=1 )) fi else (( num_skip +=1 )) fi done echo num_skip=$num_skip, num_dl_ok=$num_dl_ok, num_dl_fail=$num_dl_fail
true
5a523c16416c45ca13faf0482a5004b14c164374
Shell
Escapist70/chromiumos
/src/platform/memento_softwareupdate/ping_omaha.sh
UTF-8
5,726
3.25
3
[ "BSD-3-Clause" ]
permissive
#!/bin/bash # Copyright (c) 2009 The Chromium OS Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. source `dirname "$0"`/memento_updater_logging.sh # Example Omaha ping and response (converted to 80 char width w/ backslashes): # <?xml version="1.0" encoding="UTF-8"?> # <o:gupdate xmlns:o="http://www.google.com/update2/request" \ # version="Keystone-1.0.5.0" protocol="2.0" \ # machineid="{177255303f3cc519182a103069489327}" ismachine="0" \ # userid="{706F576A-ACF9-4611-B608-E5528EAC106A}"> # <o:os version="MacOSX" platform="mac" sp="10.5.6_i486"></o:os> # <o:app appid="com.google.GoogleAppEngineLauncher" version="1.2.2.380" \ # lang="en-us" brand="GGLG" board="x86-generic"> # <o:ping active="0"></o:ping> # <o:updatecheck></o:updatecheck> # </o:app> # </o:gupdate> # Response (converted to 80 char width w/ backslashes): # <?xml version="1.0" encoding="UTF-8"?><gupdate \ # xmlns="http://www.google.com/update2/response" protocol="2.0"><app \ # appid="com.google.GoogleAppEngineLauncher" status="ok"><ping \ # status="ok"/><updatecheck status="noupdate"/></app></gupdate> # If you change version= above to "0.0.0.0", you get (again, 80 chars w/ \s): # <?xml version="1.0" encoding="UTF-8"?><gupdate \ # xmlns="http://www.google.com/update2/response" protocol="2.0"><app \ # appid="com.google.GoogleAppEngineLauncher" status="ok"><ping \ # status="ok"/><updatecheck DisplayVersion="1.2.2.0" \ # MoreInfo="http://appenginesdk.appspot.com" Prompt="true" \ # codebase="http://googleappengine.googlecode.com/files/GoogleAppEngine\ # Launcher-1.2.2.dmg" hash="vv8ifTj79KivBMTsCDsgKPpsmOo=" needsadmin="false" \ # size="4018650" status="ok"/></app></gupdate> # Get local version LOCAL_VERSION=$(grep ^CHROMEOS_RELEASE_VERSION \ /mnt/stateful_partition/etc/lsb-release | \ cut -d = -f 2-) if [ "x" = "x$LOCAL_VERSION" ] then # look in the main file LOCAL_VERSION=$(grep ^CHROMEOS_RELEASE_VERSION \ /etc/lsb-release | cut -d = -f 2-) fi # Parameters of the update request: OS=Memento PLATFORM=memento APP_ID={87efface-864d-49a5-9bb3-4b050a7c227a} APP_VERSION=${1:-$LOCAL_VERSION} APP_BOARD="$2" OS_VERSION=${APP_VERSION}_$(uname -m) PING_APP_VERSION="$APP_VERSION" if [ ! -z "$1" ]; then PING_APP_VERSION="$1" fi LANG=en-us BRAND=GGLG OMAHA_ID_FILE=/mnt/stateful_partition/etc/omaha_id if [ ! -f "$OMAHA_ID_FILE" ] then # omaha file isn't a regular file if [ -e "$OMAHA_ID_FILE" ] then # but the omaha file does exist. delete it rm -rf "$OMAHA_ID_FILE" fi # Generate Omaha ID: dd if=/dev/urandom bs=16 count=1 status=noxfer | xxd -c 32 -g 1 -u | \ cut -d ' ' -f 2-17 | awk \ '{print "{" $1 $2 $3 $4 "-" $5 $6 "-" $7 $8 "-" $9 $10 "-" \ $11 $12 $13 $14 $15 $16 "}"; }' > "$OMAHA_ID_FILE" chmod 0444 "$OMAHA_ID_FILE" fi MACHINE_ID=$(cat "$OMAHA_ID_FILE") if [ "x" = "x$MACHINE_ID" ] then log missing Omaha ID and unable to generate one exit 1 fi USER_ID=$MACHINE_ID AU_VERSION=MementoSoftwareUpdate-0.1.0.0 for dir in /mnt/stateful_partition /; do value=$(grep ^CHROMEOS_RELEASE_TRACK $dir/etc/lsb-release | \ cut -d = -f 2-) if [ -z "$APP_TRACK" -a -n "$value" ]; then APP_TRACK="$value" fi value=$(grep ^CHROMEOS_RELEASE_BOARD $dir/etc/lsb-release | \ cut -d = -f 2-) if [ -z "$APP_BOARD" -a -n "$value" ]; then APP_BOARD="$value" fi done AUSERVER_URL=$(grep ^CHROMEOS_AUSERVER /etc/lsb-release | cut -d = -f 2-) if [ "x" = "x$AUSERVER_URL" ] then AUSERVER_URL="https://tools.google.com/service/update2" log using default update server fi # for testing. Uncomment and use these to reproduce the examples above # OS=MacOSX # PLATFORM=mac # OS_VERSION=10.5.6_i486 # APP_ID=com.google.GoogleAppEngineLauncher # #APP_VERSION=0.0.0.0 # APP_VERSION=1.2.2.380 # #APP_BOARD=arm-generic # APP_BOARD=x86-generic # LANG=en-us # BRAND=GGLG # MACHINE_ID={177255303f3cc519182a103069489327} # USER_ID={706F576A-ACF9-4611-B608-E5528EAC106A} # AU_VERSION=Keystone-1.0.5.0 # post file must be a regular file for wget: POST_FILE=/tmp/memento_au_post_file cat > "/tmp/memento_au_post_file" << EOF <?xml version="1.0" encoding="UTF-8"?> <o:gupdate xmlns:o="http://www.google.com/update2/request" \ version="$AU_VERSION" protocol="2.0" machineid="$MACHINE_ID" \ ismachine="0" userid="$USER_ID"> <o:os version="$OS" platform="$PLATFORM" sp="$OS_VERSION"></o:os> <o:app appid="$APP_ID" version="$PING_APP_VERSION" lang="$LANG" \ brand="$BRAND" track="$APP_TRACK" board="$APP_BOARD"> <o:ping active="0"></o:ping> <o:updatecheck></o:updatecheck> </o:app> </o:gupdate> EOF log sending this request to omaha at $AUSERVER_URL cat "$POST_FILE" >> "$MEMENTO_AU_LOG" RESPONSE=$(wget -q --header='Content-Type: text/xml' \ --post-file="$POST_FILE" -O - $AUSERVER_URL) rm -f "$POST_FILE" log got response: log "$RESPONSE" # parse response CODEBASE=$(expr match "$RESPONSE" '.* codebase="\([^"]*\)"') HASH=$(expr match "$RESPONSE" '.* hash="\([^"]*\)"') SIZE=$(expr match "$RESPONSE" '.* size="\([^"]*\)"') if [ -z "$CODEBASE" ] then log No update exit 0 fi # We need to make sure that we download updates via HTTPS, but we can skip # this check for developer builds. DEVSERVER_URL=$(grep ^CHROMEOS_DEVSERVER /etc/lsb-release | cut -d = -f 2-) if [ "x" = "x$DEVSERVER_URL" ] then HTTPS_CODEBASE=$(expr match "$CODEBASE" '\(https://.*\)') if [ -z "$HTTPS_CODEBASE" ] then log No https url exit 0 fi fi echo URL=$CODEBASE echo HASH=$HASH echo SIZE=$SIZE echo APP_VERSION=$APP_VERSION echo NEW_VERSION=TODO
true
707d85305618f8e2874c18ea112effea9bcc629c
Shell
atphalix/freesbie2
/extra/rootmfs.sh
UTF-8
603
3.328125
3
[ "BSD-2-Clause" ]
permissive
#!/bin/sh # # Copyright (c) 2005 Dario Freni # # See COPYING for licence terms. # # $FreeBSD$ # $Id: rootmfs.sh,v 1.1 2005/10/03 18:37:50 saturnero Exp $ set -e -u if [ -z "${LOGFILE:-}" ]; then echo "This script can't run standalone." echo "Please use launch.sh to execute it." exit 1 fi TMPFILE=$(mktemp -t rootmfs) echo "Adding init script for /root mfs" cp ${LOCALDIR}/extra/rootmfs/rootmfs.rc ${BASEDIR}/etc/rc.d/rootmfs chmod 555 ${BASEDIR}/etc/rc.d/rootmfs echo "Saving mtree structure for /root" mtree -Pcp ${BASEDIR}/root > ${TMPFILE} mv ${TMPFILE} ${BASEDIR}/etc/mtree/root.dist
true
81b03769a9c5d5b19d65c118c040c7cb9a38f474
Shell
Spreadcat/vagrant-lazy
/bootstrap/tools/init.sh
UTF-8
383
3.125
3
[]
no_license
#!/bin/bash YUMPACKAGES='vim screen tcpdump tree telnet bash-completion bind-utils' APTPACKAGES='vim screen tcpdump tree telnet' echo "Installing tools" case "${TDISTRO}" in centos|redhat) yum install -y ${YUMPACKAGES} timedatectl set-timezone Europe/Oslo ;; ubuntu) apt-get install -y ${APTPACKAGES} ;; *) echo 'unknown os. abort.' exit 1 esac
true
8dafdf28904bb05a2e2f8c8a5710912cbde3df32
Shell
stereolabs/zed-ros2-wrapper
/.ci/run_tests.sh
UTF-8
889
2.96875
3
[ "Apache-2.0", "LicenseRef-scancode-generic-cla", "BSD-2-Clause" ]
permissive
#!/bin/bash set -e ttk='--->' WORKDIR=$(pwd) #echo "${ttk} WORKDIR (${WORKDIR})content" #ls -lah ${WORKDIR} echo "${ttk} Check artifact presence" ls -lah ../ros2_ws/install #ls -lah /builds/sl/ros2_ws/install/zed_interfaces/share/zed_interfaces/ #ls -lah /builds/sl/ros2_ws/install/zed_components/share/zed_components/ #ls -lah /builds/sl/ros2_ws/install/zed_wrapper/share/zed_wrapper/ #ls -lah /builds/sl/ros2_ws/install/zed_ros2/share/zed_ros2/ echo "${ttk} Initialize local ROS2 environment" cd ${WORKDIR} source ../ros2_ws/install/local_setup.bash env | grep COLCON env | grep ROS echo "${ttk} Check ROS2 installation" ros2 doctor -r echo "${ttk} Check ZED ROS2 packages presence" ros2 pkg list | grep zed echo "${ttk} USB peripherals" lsusb | grep 2b03 echo "${ttk} Test node running for 10 seconds" timeout --signal=SIGTERM 10 ros2 launch zed_wrapper zed2.launch.py exit 0
true
fc77e2e9255c50efc2c906d5a208257dcf5d22e5
Shell
ppc64le/build-scripts
/a/airline/airline_ubi_8.3.sh
UTF-8
1,243
3.234375
3
[ "Apache-2.0" ]
permissive
# ----------------------------------------------------------------------------- # # Package : airline # Version : 0.6 # Source repo : https://github.com/airlift/airline.git # Tested on : UBI 8.3 # Language : Java # Travis-Check : True # Script License: Apache License, Version 2 or later # Maintainer : Raju.Sah@ibm.com # # Disclaimer: This script has been tested in root mode on given # ========== platform using the mentioned version of the package. # It may not work as expected with newer versions of the # package and/or distribution. In such case, please # contact "Maintainer" of this script. # # ---------------------------------------------------------------------------- set -e PACKAGE_NAME=airline PACKAGE_VERSION=${1:-0.6} PACKAGE_URL=https://github.com/airlift/airline.git yum install -y git maven java-1.8.0-openjdk.ppc64le java-1.8.0-openjdk-devel.ppc64le export JAVA_HOME=/usr/lib/jvm/$(ls /usr/lib/jvm/ | grep -P '^(?=.*java-)(?=.*ppc64le)') echo "JAVA_HOME is $JAVA_HOME" #Update the path env. variable export PATH=$PATH:$JAVA_HOME/bin #Clone the repo. git clone $PACKAGE_URL cd $PACKAGE_NAME/ git checkout $PACKAGE_VERSION #Build and test the package mvn install
true
ff816c043b4bb39ce1a3be96e83c7bce7b8e99c8
Shell
rparrapy/dagster
/.buildkite/scripts/dagster_k8s.sh
UTF-8
2,812
3.703125
4
[ "Apache-2.0" ]
permissive
#! /bin/bash ROOT=$(git rev-parse --show-toplevel) set -eu TOX_PYTHON_VERSION="$1" # Environment vars export DAGSTER_DOCKER_IMAGE="${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/dagster-docker-buildkite:${BUILDKITE_BUILD_ID}-${TOX_PYTHON_VERSION}" export CLUSTER_NAME=kind`echo ${BUILDKITE_JOB_ID} | sed -e 's/-//g'` export KUBECONFIG="/tmp/kubeconfig" # ensure cleanup happens on error or normal exit function cleanup { kind delete cluster --name ${CLUSTER_NAME} } trap cleanup EXIT echo -e "--- \033[32m:k8s: Running kind cluster setup\033[0m" # Need a unique cluster name for this job; can't have hyphens kind create cluster --name ${CLUSTER_NAME} kind get kubeconfig --internal --name ${CLUSTER_NAME} > ${KUBECONFIG} # see https://kind.sigs.k8s.io/docs/user/private-registries/#use-an-access-token aws ecr get-login --no-include-email --region us-west-1 | sh for node in $(kubectl get nodes -oname); do # the -oname format is kind/name (so node/name) we just want name node_name=${node#node/} # copy the config to where kubelet will look docker cp $HOME/.docker/config.json ${node_name}:/var/lib/kubelet/config.json # restart kubelet to pick up the config docker exec ${node_name} systemctl restart kubelet.service done cd $ROOT/python_modules/libraries/dagster-k8s/ echo -e "--- \033[32m:helm: Installing Helm and Helm chart\033[0m" # Install Helm 3 curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash # Install helm chart helm install \ --debug \ --set dagit.image.repository="${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/dagster-docker-buildkite" \ --set dagit.image.tag="${BUILDKITE_BUILD_ID}-${TOX_PYTHON_VERSION}" \ --set job_image.image.repository="${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/dagster-docker-buildkite" \ --set job_image.image.tag="${BUILDKITE_BUILD_ID}-${TOX_PYTHON_VERSION}" \ dagster \ helm/dagster/ echo -e "--- \033[32m:k8s: Waiting for dagster pods\033[0m" # Wait for dagster pods to launch # # NOTE: Adding this wait because helm --wait does not seem to work on buildkite, and because the # kubectl wait below will continue if it runs after the cluster is up but before the helm-launched # pods start while kubectl get pods | grep dagster ret=$? ((ret != 0)) do sleep 1 done echo -e "--- \033[32m:k8s: Waiting for pods ready\033[0m" DAGIT_POD=$(kubectl get pods -o jsonpath='{.items[*].metadata.name}' | tr ' ' '\n' | grep dagit) # Wait for pods to be ready while kubectl wait --for=condition=Ready pods --all --all-namespaces --timeout=5s ret=$? ((ret != 0)) do kubectl get pods kubectl describe pod ${DAGIT_POD} done echo -e "--- \033[32m:python: Running tox tests\033[0m" # Finally, run tests tox -e $TOX_PYTHON_VERSION
true
3a0e3f937952d81042fdfec0733727ffd212e9ca
Shell
ggmanuilov/node-disjoint-set
/bash/build.sh
UTF-8
481
2.546875
3
[ "MIT" ]
permissive
#!/usr/bin/env bash BASEDIR=$(dirname "$0") cd ${BASEDIR}/../ PROTO_DEST=./generated mkdir -p ${PROTO_DEST} ## JavaScript code generating protoc --plugin=protoc-gen-ts=node_modules/.bin/protoc-gen-ts --ts_out=./generated --proto_path=./protos ./protos/dsu_service.proto protoc --plugin=protoc-gen-grpc=./node_modules/.bin/grpc_tools_node_protoc_plugin --js_out=import_style=commonjs,binary:./generated --grpc_out=./generated --proto_path=./protos ./protos/dsu_service.proto
true
101d7dcffe5502abf736d7aefb13db13d9a4bc70
Shell
sigma-random/uconnect-swdl
/RA4/usr/share/scripts/app-install/us-app-install.sh
UTF-8
818
3.046875
3
[]
no_license
#!/bin/sh # # ISO_PATH must be an environment variable set upon being called # if [[ "$ISO_PATH" == "" ]]; then exit 1 fi # # USB_PATH must be set as well # if [[ "$USB_PATH" == "" ]]; then exit 2 fi # # ISO_PATH must be defined and point to the root of the mounted update # ISO image. We prefer binaries in the ISO over those in the IFS and # on the target. # PATH=$ISO_PATH/bin:$ISO_PATH/usr/bin:$ISO_PATH/sbin:$ISO_PATH/usr/sbin:/bin:/usr/bin:/sbin:/usr/sbin LD_LIBRARY_PATH=$ISO_PATH/lib:$ISO_PATH/usr/lib:$ISO_PATH/lib/dll:$ISO_PATH/usr/lib/dll:/lib:/usr/lib:/lib/dll:/usr/lib/dll:/usr/lib/lua # Launch the Lua installer export LUA_PATH="./?.lua;./installer/?.lua;/usr/share/lua/?.lua;/usr/share/lua/?/init.lua"; cd $ISO_PATH/usr/share/scripts/app-install lua -s us-app-install.lua $ISO_PATH $USB_PATH
true
3b679e8a67a2c47319d8a0bc463d311378a371d6
Shell
langthom/data-structures
/RedBlackTree/Python/redblack.py
UTF-8
22,454
3.140625
3
[ "BSD-3-Clause" ]
permissive
#!/bin/bash # Copyright (c) 2015, Thomas Lang. All rights reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> from enum import Enum class RedBlackTree(): """ Implementation of a red-black tree.<p> A red-black tree is a binary search tree whose nodes can have colors (either red or black). This tree is self-balancing and guarantees that all important operations (like searching, inserting and removing values) run in {@code O(log n)} where {@code n} denotes the number of elements in the tree. <p> This rebalancing is done by guaranteeing the following rules: <ul> <li> The root is always black. </li> <li> The leaf nodes (what are {@code null} pointers in reality) are black. </li> <li> If a node is red, both children are black. </li> <li> Every path from a node to its children contain the same amount of black nodes. </li> </ul> @author Thomas Lang @version 1.0, 2015-08-25 @see Node """ class Color(Enum): """ Enum representing the two possible colors of a node. """ RED = 0 BLACK = 1 def __str__(self): """Returns a string representation of this enum.""" return str(self.name) class Node(): """ Implementation of a single node of a Red-black tree.<p> This is basically a simple node of any binary search tree but with the additional {@code color} attribute of type {@code Color}, what can be either {@code Color.RED} or {@code Color.BLACK}. @author Thomas Lang @version 2015-08-25 @see Color """ def __init__(self, value): """ Creates a new red node encapsulating the passed {@code value}. @param value The value encapsulated in this node. """ self.color = RedBlackTree.Color.RED self.value = value self.left = None self.right = None self.parent = None def __str__(self): """ Returns a string representation of a node including all of its children. @return Returns a string representation of a node. """ __rep = [] if self.left is not None: __rep.append("(") __rep.append(self.left.__str__()) __rep.append(")") __rep.append("[") __rep.append(str(self.color)) __rep.append(": ") __rep.append(str(self.value)) __rep.append("]") if self.right is not None: __rep.append("(") __rep.append(self.right.__str__()) __rep.append(")") return ''.join(__rep) def __init__(self): """Creates a new and empty RedBlackTree.""" self.__size = 0 self.__root = None def isEmpty(self): """ Checks if the tree is empty or not. @return Returns either {@code true} if the tree is empty or {@code false} otherwise. """ return self.__size == 0 def size(self): """ Returns the number of elements stored in the tree. @return Returns the size of the tree. """ return self.__size; def insert(self, value): """ Inserts the passed value into the tree. @param value The value to insert into the tree which must not be {@code null}. @return Returns either {@code true} if the insertion was successful or {@code false} otherwise. """ if value is None: raise TypeError("Null value cannot be inserted.") __node = RedBlackTree.Node(value) __success = True; if self.isEmpty(): __node.color = RedBlackTree.Color.BLACK self.__root = __node else: __success = self._insert(self.__root, __node) if __success: self.__size += 1 return __success; def _insert(self, current, node): """ Recursively traverses through the tree until the position for inserting is found. Then the node is inserted. If necessary, the tree is also rebalanced starting from the new node. @param current The current traversal node which must not be {@code null}. @param node The node to insert into the tree which must not be {@code null}. @return Returns either {@code true} if the insertion was successful or {@code false} if the node already exists in the tree. @see #rebalance(Node<T>) """ assert (current is not None), "Null traversing node passed." assert (node is not None), "Null node passed." __curvalue = current.value __newvalue = node.value __success = True if __newvalue < __curvalue: if current.left is None: current.left = node node.parent = current self._rebalance(node) __success = True else: __success = self._insert(current.left, node) elif __newvalue > __curvalue: if current.right is None: current.right = node node.parent = current self._rebalance(node) __success = True else: __success = self._insert(current.right, node) else: __success = False return __success; def _rebalance(self, node): """ Rebalances the tree from {@code node} if necessary. @param node The node to start rebalancing from. """ assert (node is not None), "Null node passed." __parent = node.parent if __parent is None: # Case 1: The new node has no parent. This means that the new node # is the root and the root always must be black. node.color = RedBlackTree.Color.BLACK; return; if __parent.color == RedBlackTree.Color.BLACK: # Case 2: Per default every new node (including this one) are red. # When the color of the parent node is black, then the depth of # black nodes is still the same and we do not have to do anything. return; __grandparent = __parent.parent __uncle = __grandparent.right if __parent == __grandparent.left \ else __grandparent.left if __uncle is not None and __uncle.color == RedBlackTree.Color.RED: # Case 3: Both the uncle and the parent nodes are red. Then we # restore the tree by changing the below colors what makes the # tree be okay locally. But now, the grand parent will be # problematic, so we rebalance it. __parent.color = RedBlackTree.Color.BLACK __uncle.color = RedBlackTree.Color.BLACK __grandparent.color = RedBlackTree.Color.RED self._rebalance(__grandparent) else: # Case 4: The parent node and the node itself are red and the # path from the grand parent to the node forms a zig-zag line. # Then we perform a rotation and swap positions what will result # in a constellation useable for the fifth case. # The exact rotation depends on if the node was a left or a right # child. if node == __parent.right and __parent == __grandparent.left: self._rotateLeft(parent) node = node.left elif node == __parent.left and __parent == __grandparent.right: self._rotateRight(__parent) node = node.right # Case 5: From this position we restore the tree by swapping # colors and rotations around the grand parent, depending on if # the node was a left or a right child. __parent = node.parent __grandParent = __parent.parent __parent.color = RedBlackTree.Color.BLACK __grandParent.color = RedBlackTree.Color.RED if node == __parent.left and __parent == __grandParent.left: self._rotateRight(__grandParent) else: self._rotateLeft(__grandParent) def _rotateLeft(self, node): """ Performs a single left rotation around the passed {@code node}. @param node The node to rotate around which must not be {@code null}. """ assert (node is not None), "Null node passed." __parent = node.parent __right = node.right __rightleft = __right.left node.parent = __right node.right = __rightleft __right.parent = __parent __right.left = node if __rightleft is not None: __rightleft.parent = node if __parent is None: self.__root = __right; elif __parent.left == node: __parent.left = __right; else: __parent.right = __right; def _rotateRight(self, node): """ Performs a single right rotation around the passed {@code node}. @param node The node to rotate about which must not be {@code null}. """ assert (node is not None), "Null node passed." __parent = node.parent __left = node.left __leftright = __left.right node.parent = __left node.left = __leftright __left.parent = __parent __left.right = node if __leftright is not None: __leftright.parent = node if __parent is None: self.__root = __left elif __parent.right == node: __parent.right = __left else: __parent.left = __left def contains(self, value): """ Checks if the tree contains the passed {@code value}. @param value The value to search for. @return Returns either {@code true} if the tree contains the passed {@code value} or {@code false} otherwise. @see #get(T) """ return self._get(value) is not None def _get(self, value): """ Gets the node containing the passed {@code value}. @param value The value to search for. @return Returns either the node containing the passed {@code value} or {@code null} if the tree is empty or no node could be found. @see #get(Node<T>, T) """ if self.isEmpty(): return None else: return self.__get(self.__root, value) def __get(self, current, value): """ Recursively gets the node containing the passed {@code value}. @param current The current traversal node, which must not be {@code null}. @param value The value to search for. @return Returns either the node containing the passed {@code value} or {@code null} if no such node could be found. """ assert (current is not None), "Null traversing node." __curvalue = current.value if value == __curvalue: return current elif value < __curvalue and current.left is not None: return self.__get(current.left, value) elif value > __curvalue and current.right is not None: return self.__get(current.right, value) else: return None def remove(self, value): """ Removes the passed {@code value} from this tree.<p> If necessary, the tree will be rebalanced after the removal. If the passed {@code value} does not exist in the tree, this function does nothing and returns {@code false}. @param value @return Returns {@code true} if the removal was successful or {@code false} otherwise. @see #deleteInternalNode(Node<T>) @see #deleteNode(Node<T>) """ __node = self._get(value) if __node is None: return False __parent = __node.parent if value == self.__root.value: __rln = self.__root.left is None __rrn = self.__root.right is None if __rln and __rrn: self.__root = None return True elif not __rln and __rrn: __col = self.__root.color self.__root = self.__root.left self.__root.color = __col elif __rln and not __rrn: __col = self.__root.color self.__root = self.__root.right self.__root.color = __col else: self._deleteInternalNode(self.__root) elif __node.left is not None and __node.right is not None: self._deleteInternalNode(__node) else: self._deleteNode(__node) self.__size -= 1 return True def _deleteInternalNode(self, node): """ Deletes a node from the tree that has both two children. @param node The node to delete which must not be {@code null}. @see #deleteNode(Node<T>) """ assert (node is not None), "Null node passed." # The node is deleted by exchanging its value with the largest value # from the left sub tree and finally deleting the maximum node of the # left sub tree. __maxofmin = node.left while __maxofmin.right is not None: __maxofmin = __maxofmin.right node.value = __maxofmin.value self._deleteNode(__maxofmin) def _deleteNode(self, node): """ Deletes a node from the tree that does not have two real children. The tree will be rebalanced if necessary. @param node The node to delete which must not be {@code null}. @see #rebalanceAfterDeletion(Node<T>) """ assert (node is not None), "Null node passed." __parent = node.parent __left = node.left __right = node.right __pright = __parent.right __rightchild = __pright is not None and __pright.value == node.value # Please note that we need not to check if 'parent' is null here, # because this can only happen if 'node' is the root, but this special # case is already recognized in the methode 'remove'. if node.left is None and node.right is None: if node.color == RedBlackTree.Color.BLACK: self._rebalanceAfterDeletion(node) if __rightchild: __parent.right = None else: __parent.left = None elif node.left is not None: if __rightchild: __parent.right = __left else: __parent.left = __left __left.parent = __parent else: if __rightchild: __parent.right = __right __parent.right.color = node.color else: __parent.left = __right __parent.left.color = node.color __right.parent = __parent node.parent = None node = None def _rebalanceAfterDeletion(self, node): """ Rebalances the tree after a deletion. @param node A child of the deleted node which must not be {@code null}. """ assert (node is not None), "Null node passed." __parent = node.parent if __parent is None: # Case 1: Problematic node is root, no rotations to made.# return __sibling = __parent.right if node == __parent.left else __parent.left if __sibling.color == RedBlackTree.Color.RED: # Case 2: The sibling of the node is red. # Then invert the colors of the parent and the sibling node # following by performing a left / right rotation around the # parent node depending on if the node was a left or a right # child. __parent.color = RedBlackTree.Color.RED __sibling.color = RedBlackTree.Color.BLACK if node == __parent.left: self._rotateLeft(parent) else: self._rotateRight(parent) __pcolor = __parent.color __scolor = __sibling.color __slcolor = RedBlackTree.Color.BLACK if __sibling.left is None \ else __sibling.left.color __srcolor = RedBlackTree.Color.BLACK if __sibling.right is None \ else __sibling.right.color if __pcolor == RedBlackTree.Color.BLACK \ and __scolor == RedBlackTree.Color.BLACK \ and __slcolor == RedBlackTree.Color.BLACK \ and __srcolor == RedBlackTree.Color.BLACK: # Case 3: The parent, the sibling and both children of the sibling # are black. Then the sibling has the wrong color, so change it to # red. This may have corrupted any integraty conditions of the # parent node, so we have to rebalance the parent node. __sibling.color = RedBlackTree.Color.RED self._rebalanceAfterDeletion(__parent) elif __pcolor == RedBlackTree.Color.RED \ and __scolor == RedBlackTree.Color.BLACK \ and __slcolor == RedBlackTree.Color.BLACK \ and __srcolor == RedBlackTree.Color.BLACK: # Case 4: The sibling and its both children are black but the # parent is red. Then we can rebalance the tree by simply # inverting the colors of the sibling and parent node. __sibling.color = RedBlackTree.Color.RED __parent.color = RedBlackTree.Color.BLACK else: # Case 5: # (a): Node is the left child and the sibling and the sibling's # right child are black but the siblings left child is red. # Then we change the colors of the sibling and it's left # child and perform a right rotation around the sibling. # Then all paths have the same number of right nodes. # After this, we immediately go to case 6. # (b): The same thing as in (a) but the other way (right child). if node == __parent.left \ and __scolor == RedBlackTree.Color.BLACK \ and __slcolor == RedBlackTree.Color.RED \ and __srcolor == RedBlackTree.Color.BLACK: __sibling.color = RedBlackTree.Color.RED __sibling.left.color = RedBlackTree.Color.BLACK self._rotateRight(sibling) elif node == __parent.right \ and __scolor == RedBlackTree.Color.BLACK \ and __slcolor == RedBlackTree.Color.BLACK \ and __srcolor == RedBlackTree.Color.RED: __sibling.color = RedBlackTree.Color.RED __sibling.right.color = RedBlackTree.Color.BLACK self._rotateLeft(sibling) # Case 6: The sibling is black, the right child of the sibling is # red and the node is the left child of it's parent. # Then we resolve this illegal state by changing the colors as # below. After this, we have to correct the now invalid paths # by rotating, depending on if the node was a left or a right # child. __sibling.color = __parent.color __parent.color = RedBlackTree.Color.BLACK if node == __parent.left: __sibling.right.color = RedBlackTree.Color.BLACK; self._rotateLeft(__parent) else: __sibling.left.color = RedBlackTree.Color.BLACK; self._rotateRight(__parent) def clear(self): """Clears the tree.""" self.__root = None self.__size = 0 def __str__(self): """ Returns a string representation of the entire tree.<p> This representation is either {@code (empty tree)} if the tree is empty or the representation of the root node. @return Returns a string representation of the entire tree. """ if self.isEmpty(): return "(empty tree)" else: return self.__root.__str__() def main(): """Main (testing) method.""" print("Creating a new Red-black tree ... ", end=""); test = RedBlackTree(); print("done."); print("Inserting some values ... ", end=""); test.insert(7); test.insert(8); test.insert(6); test.insert(12); test.insert(0); test.insert(9); test.insert(10); test.insert(-7); test.insert(999); print("done."); print("Tree now: " + str(test)); print("Deleting a few values ... ", end=""); test.remove(12); test.remove(0); test.remove(9); test.remove(7); test.remove(8); test.remove(10); test.remove(-7); test.remove(6); print("done."); print("Tree now: " + str(test)); print("Clearing tree now ... ", end=""); test.clear(); print("done."); print("Tree now: " + str(test)); if __name__ == '__main__': main()
true
7414f1c0386fe8345297efcc277f100224a0b86c
Shell
montanpi/caja-scripts
/scripts/links/find hard links
UTF-8
424
3.6875
4
[]
no_license
#!/usr/bin/env bash FILE_SYSTEM=$(zenity --title "Select a folder to search for hard links" --file-selection --directory) if [ -n "$FILE_SYSTEM" ]; then a=$(find "$FILE_SYSTEM" -xdev -samefile "$@") if [ -n "$a" ]; then zenity --info --title "Hard Links of " "$@" --width=700 --text="$a" else zenity --info --title "Hard Links of " "$@" --width=700 --text="No hard links found in '$FILE_SYSTEM'" fi fi exit
true
a82852504638d3a2aa899d19159ad738bfb8722c
Shell
hardboydu/packet-journey
/tests/libnetlink/script.sh
UTF-8
2,895
3.578125
4
[ "BSD-3-Clause", "BSD-2-Clause" ]
permissive
#!/bin/sh set -e set -u cd `dirname $0` TEMPDIR=`mktemp -d` cleanup() { rm -rf $TEMPDIR kill $process_pid } trap cleanup 0 read_timeout() { set +e # Can't use $$, as it's the parent shell pid, not the subshell. # Thus incorrect if invoked in a subshell current_pid=$(exec sh -c 'echo $PPID') saved_traps="$(trap)" if [ "x${saved_traps}" = "x" ]; then saved_traps="trap - ALRM"; fi trap 'eval "${saved_traps}"; set -e; return' ALRM (sleep $1; kill -ALRM $current_pid) & timer_pid=$! read $2 kill $timer_pid 2>/dev/null eval "${saved_traps}" set -e } skipuntil() { read_timeout 5 line1 stripped=`printf "%.${#1}s" "$line1"` if [ "$stripped" = "$1" ]; then return 0 fi skipuntil "$1" } expectedoutput() { read_success=0 read_timeout 2 line1 || read_success=1 if [ $read_success -ne 0 ]; then echo $2 ", expected" $1 ": got nothing (timeout)" >&2 exit 1; fi if [ "$line1" != "$1" ]; then echo $2 ", expected" $1 ": " $line1 >&2 exit 1; fi } ip link add eth0 type dummy ip link set dev eth0 up mkfifo $TEMPDIR/fifo $1 >$TEMPDIR/fifo & process_pid=$! ( skipuntil "START" ip addr add 1.2.3.1/24 dev eth0 expectedoutput "addr4 add 1.2.3.1/24 dev eth0" "We should have read new address through netlink" ip addr add 1::1/48 dev eth0 expectedoutput "addr6 add 1::1/48 dev eth0" "We should have read new address through netlink" ip route add 1.2.4.0/24 via 1.2.3.254 expectedoutput "route4 add 1.2.4.0/24 via 1.2.3.254" "We should have read new route through netlink" ip route del 1.2.4.0/24 via 1.2.3.254 expectedoutput "route4 del 1.2.4.0/24 via 1.2.3.254" "We should have read route deletion through netlink" ip route add 1::/48 via fe80::1 dev eth0 expectedoutput "route6 add 1::/48 via fe80::1" "We should have read new route through netlink" ip route del 1::/48 via fe80::1 dev eth0 expectedoutput "route6 del 1::/48 via fe80::1" "We should have read route deletion through netlink" ip addr del 1::1/48 dev eth0 expectedoutput "addr6 del 1::1/48 dev eth0" "We should have read new address through netlink" ip addr del 1.2.3.1/24 dev eth0 expectedoutput "addr4 del 1.2.3.1/24 dev eth0" "We should have read a delete address through netlink" ip neigh add 1.2.3.2 lladdr 11:22:33:44:55:66 dev eth0 expectedoutput "neigh4 add 1.2.3.2 lladdr 11:22:33:44:55:66 nud PERMANENT dev eth0" "We should have read new neigghbour through netlink" ip neigh add 1::2 lladdr 11:22:33:44:55:67 dev eth0 expectedoutput "neigh6 add 1::2 lladdr 11:22:33:44:55:67 nud PERMANENT dev eth0" "We should have read new neigghbour through netlink" kill $process_pid; expectedoutput "EOF" "We should have end the test with no further content" ) < $TEMPDIR/fifo wait $process_pid || ( echo "process returned non-zero return code, check logs" >&2; exit 1) exitcode=$? exit 0
true
1df6a665af10dd4e5f0acfd9c4a73809b3adfd7b
Shell
hyxbiao/imgcv
/examples/fashionAI/train.sh
UTF-8
672
2.59375
3
[]
no_license
attr_key=${1:-skirt_length_labels} data_dir=${2:-~/data/vision/fashionAI/} pretrain_model_dir=${3:-models/pretrain_resnet50/} model_dir=${4:-models/$attr_key/baseline} now=`date +%Y%m%d_%H%M` pretrain_warm_vars='^((?!dense).)*$' log_dir=log/$attr_key mkdir -p $log_dir echo "attr_key: $attr_key" echo "data_dir: $data_dir" echo "model_dir: $model_dir" echo "pretrain_model_dir: $pretrain_model_dir" echo "pretrain_warm_vars: $pretrain_warm_vars" nohup python app.py \ --data_dir $data_dir \ --model_dir $model_dir \ --attr_key $attr_key \ --pretrain_model_dir $pretrain_model_dir \ --pretrain_warm_vars $pretrain_warm_vars \ >$log_dir/train_${now}.out 2>&1 &
true
1f94ee03619fced5e0168ef6b1436afa297cf07f
Shell
odeter/emacs
/.emacs.d/var/backup/!home!christopher!Documents!Work!Derant!it_security_testing!automalscript.sh~
UTF-8
295
2.78125
3
[]
no_license
#!/bin/bash # A simple script response=$(curl --compressed https://raw.githubusercontent.com/stamparm/ipsum/master/ipsum.txt 2>/dev/null); if [ 0 -eq $? ]; then ${response} | grep -v "#" | grep -v -E "\s[1-2]$" | cut -f 1 > ip_list.txt && python3 automaltest.py fi python3 automaltest.py
true
88a0ad4ee487025fa56509c9fc82c55d0d45407a
Shell
srl295/icu
/icufacts/c/check.sh
UTF-8
201
2.671875
3
[]
no_license
#!/bin/sh # Copyright (c) 2010 IBM Corporation and Others. All Rights Reserved. #make -C data for dir in `ls`; do if [ -f "${dir}/Makefile" ]; then make -C "${dir}" check fi done
true
25f3142827980919af15764b69ec29bab8bc0246
Shell
cjgd/scripting-libs
/sed/diff_d_D
UTF-8
517
3.40625
3
[]
no_license
#! /bin/sh # Carlos Duarte <cgd@mail.teleweb.pt>, 980722/990607 # diff between d, and D sed commands for i in d D; do case $i in d) echo "running with command 'd' (no output)" ;; D) echo "running with command 'D' (only 2nd line)" ;; esac cat<<EOF| line 1 line 2 EOF sed -e 1!q -e N -e $i done # # d: delete pattern space and goto end # D: delete up to first newline and goto start # # i.e. neither d or D produce output, but 'd' will fetch # a new line, and continue from start, while 'D' branches # directly to top #
true
fdcb867f9f533ebb711758c06dfc788324b30acc
Shell
zhooravlik/magento2-infrastructure
/build/core_dev/performance/mpaf/scripts/parsible/test/test-jmeter-batch.sh
UTF-8
297
2.8125
3
[]
no_license
#!/bin/sh JMETER_SAMPLE=$1 DEBUG="--debug 1" PYTHON_HOME=/usr ${PYTHON_HOME}/bin/python ../parsible.py --log-file ${JMETER_SAMPLE} --parser parse_jmeter --batch-mode 1 ${DEBUG} --pid-file /tmp/parsible.pid --stat-prefix sample-test if [ -f /tmp/parsible.pid ]; then rm /tmp/parsible.pid fi
true
67e8387a8aee6c2a454c7d7cff93d322c5cae825
Shell
ramseykarim/helpss
/pipeline/idl_preprocess_wrapper.sh
UTF-8
9,736
4.3125
4
[]
no_license
#!/bin/bash # bash wrapper for Tracy's IDL preprocessing for HELPSS Herschel data # Author: Ramsey Karim # The idea here is to run Tracy's preprocessing code from command line using variables # This wrapper should be run with certain command line arguments: # 1: directory containing the "level2_5/" directory (e.g. /the/directory/) # 2: directory containing the manticore prep IDL procedures Tracy wrote # 3: name of the object (needs a name, like "Perseus1" or "NGC1333") # 4: directory to which to write all these images this_script_name="idl_preprocess_wrapper.sh" # command line arguments are set up # default values for parameters are clearly enumerated below default_obs_dir="$(pwd)/" default_mprep_dir="/n/sgraraid/filaments/manticore-prep/" default_object="unassigned_name" default_working_dir="$(pwd)/" # set to defaults; will change if argument specifies such obs_directory=$default_obs_dir Pobs_directory="" Sobs_directory="" mprep_directory=$default_mprep_dir object_name=$default_object working_dir=$default_working_dir pacs70=false reference_wavelength="500" print_usage_exit() { printf "${this_script_name} usage: ./${this_script_name} [valid arguments] -h help (prints this message and exits) -x run (even if no arguments are present) (at least one argument is necessary to run) -d data directory containing the archival Herschel data this directory MUST be the \"observation number\" directory this directory MUST contain the \"level2_5\" OR \"level2\" directory relative or absolute path are both ok will be overridden by -P or -S if those are set default -d <current directory> ($(pwd)/) -P data directory containing the archival Herschel PACS data overrides -d see \"-d\" documentation above -S data directory containing the archival Herschel SPIRE data overrides -d see \"-d\" documentation above -i IDL preprocessing scripts directory this directory must contain MakeHerschelImages.pro, RemapImages.pro, and ConvolveHerschelImages.pro if you are running this on sgra, you may specify this without the '/n' rmpband default -i ${default_mprep_dir} -n object name to assign in case the name is missing default -n ${default_object} -o output directory to which to write the processed FITS files the directory MUST already exist default -o <current directory> ($(pwd)/) -7 include 70 micron data -R reference wavelength must be one of: 70, 160, 250, 350, 500 (70 micron will not be accepted though, reverts to 500) must also be one of the available wavelengths for this dataset reverts to 500 if not recognized " exit 1 } complain_directory() { # first/only arg is offending directory printf "$BASH_SOURCE: DIRECTORY ${1} DOES NOT EXIST " exit 1 } sanitize_directory() { # first/only argument is directory name # ensures the name ends in '/' # ensures this directory exists, and if not, prints usage and exits directory=$1 if [[ "$directory" != *\/ ]] ; then directory="${directory}/" fi # Resolve relative paths to absolute ones (more or less) if [[ "$directory" != \/* ]] ; then directory="$(pwd)/${directory}" fi echo $directory if [[ ! -d $directory ]] ; then exit 1 fi } # parse arguments while getopts 'hxd:S:P:i:n:o:7R:' flag ; do case "${flag}" in h) print_usage_exit ;; x) : ;; d) obs_directory="$(sanitize_directory ${OPTARG})" if [[ $? -eq 1 ]] ; then complain_directory "${obs_directory}" ; fi ;; P) Pobs_directory="$(sanitize_directory ${OPTARG})" if [[ $? -eq 1 ]] ; then complain_directory "${Pobs_directory}" ; fi ;; S) Sobs_directory="$(sanitize_directory ${OPTARG})" if [[ $? -eq 1 ]] ; then complain_directory "${Sobs_directory}" ; fi ;; i) mprep_directory="$(sanitize_directory ${OPTARG})" if [[ $? -eq 1 ]] ; then complain_directory "${mprep_directory}" ; fi ;; n) object_name="${OPTARG}" ;; o) working_dir="$(sanitize_directory ${OPTARG})" if [[ $? -eq 1 ]] ; then complain_directory "${working_dir}" ; fi ;; 7) pacs70=true ;; R) reference_wavelength="${OPTARG}" ;; *) print_usage_exit ;; esac done if [[ -z $1 ]] ; then printf "${this_script_name}: need at least one argument (-x to run with all defaults)\n" print_usage_exit fi # Get PACS+SPIRE observation directories and figure out level 2/2_5 # If Pobs_directory or Sobs_directory are not set, set them if [[ -z "$Pobs_directory" ]] ; then Pobs_directory="$obs_directory" ; fi if [[ -z "$Sobs_directory" ]] ; then Sobs_directory="$obs_directory" ; fi # The obs_directory (something like ../1342190326/) should contain the # level2_5/ or level2/ directory # Check PACS directory if [[ -d "${Pobs_directory}level2_5/" ]] ; then Plvl2or25_directory="${Pobs_directory}level2_5/" elif [[ -d "${Pobs_directory}level2/" ]] ; then Plvl2or25_directory="${Pobs_directory}level2/" else printf "${this_script_name}: PACS directory not valid\n" exit 1 fi # Check SPIRE directory if [[ -d "${Sobs_directory}level2_5/" ]] ; then Slvl2or25_directory="${Sobs_directory}level2_5/" elif [[ -d "${Sobs_directory}level2/" ]] ; then Slvl2or25_directory="${Sobs_directory}level2/" else printf "${this_script_name}: SPIRE directory not valid\n" exit 1 fi # The directory structure of the PACS and SPIRE data is fairly standard # We can assume the name of these subdirectories and that they each contain 1 file if [[ "$pacs70" = true ]] ; then p70_source=\"$(find "${Plvl2or25_directory}HPPJSMAPB/" -name "*.*")\" fi p160_source=\"$(find "${Plvl2or25_directory}HPPJSMAPR/" -name "*.*")\" s250_source=\"$(find "${Slvl2or25_directory}extdPSW/" -name "*.*")\" s350_source=\"$(find "${Slvl2or25_directory}extdPMW/" -name "*.*")\" s500_source=\"$(find "${Slvl2or25_directory}extdPLW/" -name "*.*")\" # Construct the IDL call (based on the NOTES Tracy made in mprep_directory) # Make ".run directory" shorthand and create all the import statements idlrun=".run ${mprep_directory}" make_herschel_images_import="${idlrun}MakeHerschelImages" remap_images_import="${idlrun}RemapImages" convolve_herschel_images_import="${idlrun}ConvolveHerschelImages" # MakeHerschelImages setup and call # Handle 70um more gracefully if [[ "$pacs70" = true ]] ; then len_arr="5" # Order shouldn't matter last_lines="filearr(4)=${p70_source} " else len_arr="4" last_lines="" fi make_herschel_images_setup="filearr=strarr(${len_arr}) filearr(0)=${p160_source} filearr(1)=${s250_source} filearr(2)=${s350_source} filearr(3)=${s500_source} ${last_lines}" # Note that this will dump outputs to current working directory make_herschel_images_cmd="MakeHerschelImages, filearr, object=\"${object_name}\"" # Get filenames for these newly created files (standard filenaming scheme) img="image" err="error" if [[ "$pacs70" = true ]] ; then p70="\"./PACS70um-" fi p160="\"./PACS160um-" s250="\"./SPIRE250um-" s350="\"./SPIRE350um-" s500="\"./SPIRE500um-" fits=".fits\"" # RemapImages setup and call # Reference is SPIRE500 (largest pixels, so least number of pixels) # Need to remap other 3 images+errors (6 total files) to the reference case "${reference_wavelength}" in 350) rmp_reference_band="${s350}" ; rmpband1=${p160} ; rmpband2=${s250} ; rmpband3=${s500} ;; 250) rmp_reference_band="${s250}" ; rmpband1=${p160} ; rmpband2=${s350} ; rmpband3=${s500} ;; 160) rmp_reference_band="${p160}" ; rmpband1=${s250} ; rmpband2=${s350} ; rmpband3=${s500} ;; # Not allowing referecing to 70um. It doesn't make sense *) rmp_reference_band="${s500}" ; reference_wavelength="500" ; rmpband1=${p160} ; rmpband2=${s250} ; rmpband3=${s350} ;; esac # Handle 70 micron more gracefully if [[ "$pacs70" = true ]] ; then len_arr="8" # Order shouldn't matter last_lines="filearr(6)=${p70}${img}${fits} filearr(7)=${p70}${err}${fits} " else len_arr="6" last_lines="" fi remap_images_setup="reference=${rmp_reference_band}${img}${fits} filearr=strarr(${len_arr}) filearr(0)=${rmpband1}${img}${fits} filearr(1)=${rmpband1}${err}${fits} filearr(2)=${rmpband2}${img}${fits} filearr(3)=${rmpband2}${err}${fits} filearr(4)=${rmpband3}${img}${fits} filearr(5)=${rmpband3}${err}${fits} ${last_lines}" remap_images_cmd="RemapImages, reference, filearr" # ConvolveHerschelImages setup and call # Convolving to reference wavelength of 500um (worst resolution) # Handle 70um more gracefully if [[ "$pacs70" = true ]] ; then within_wavearr="70, " within_otherarr="${p70}\", " fi # within_* will be "" if else, by default rmp="-remapped" convolve_herschel_images_setup="wavearr=[${within_wavearr}160, 250, 350, 500] imarr=[${within_otherarr}${p160}\", ${s250}\", ${s350}\", ${s500}\"]+\"${img}${rmp}${fits} errarr=[${within_otherarr}${p160}\", ${s250}\", ${s350}\", ${s500}\"]+\"${err}${rmp}${fits} refwave=${reference_wavelength} " convolve_herschel_images_cmd="ConvolveHerschelImages, wavearr, imarr, errarr, refwave=refwave" # Change directory to working directory so all file reads/writes are in there cd $working_dir # Make the IDL call using a "here document", which emulates interactive mode idl <<EOF ${make_herschel_images_import} ${remap_images_import} ${convolve_herschel_images_import} ${make_herschel_images_setup} ${make_herschel_images_cmd} ${remap_images_setup} ${remap_images_cmd} ${convolve_herschel_images_setup} ${convolve_herschel_images_cmd} EOF printf "done with IDL preprocessing; written to ${working_dir} "
true
5247548a9f7b892e72a68f7c8adf4c1da8d16314
Shell
dyomas/pyhrol
/examples/test.sh
UTF-8
2,677
3.03125
3
[ "BSD-2-Clause" ]
permissive
#!/bin/sh # Copyright (c) 2013, 2014, Pyhrol, pyhrol@rambler.ru # GEO: N55.703431,E37.623324 .. N48.742359,E44.536997 # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 4. Neither the name of the Pyhrol nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS # OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. # set -e readonly LIST_DIR=`dirname $0` readonly BUILD_DIR=`pwd` readonly TARGETS="${1}" readonly LIBRARIES="${2:-${1}}" readonly EXAMPLE="${3:-${1}}" RETVAL=0 echo "TARGETS = \"$TARGETS\" LIBRARIES = \"$LIBRARIES\" EXAMPLE = \"$EXAMPLE\" LIST_DIR = \"$LIST_DIR\" BUILD_DIR = \"$BUILD_DIR\"" >&2 if [ -n "$TARGETS" ]; then make $TARGETS fi cd $LIST_DIR if [ -n "$LIBRARIES" ]; then for LIBRARY in $LIBRARIES; do if ! [ -e "$LIBRARY.so" ]; then ln -sf $BUILD_DIR/$LIBRARY.so . fi done fi unset __PY_SHOW_CALLS unset __PY_HACK_SIGNATURES unset PYHROL_TRACE unset PYHROL_TUPLE_HEADER_FORMAT unset PYHROL_TUPLE_BODY_FORMAT unset PYHROL_ERROR_HEADER_FORMAT unset PYHROL_ERROR_BODY_FORMAT PYTHON=python which stdbuf 2>&1 > /dev/null && { PYTHON="stdbuf -oL $PYTHON" } PYTHON="$PYTHON -EsS" readonly TMP_FILE=`mktemp /tmp/__phe_XXXX` $PYTHON $EXAMPLE.py > $TMP_FILE 2>&1 && { diff --label="<$EXAMPLE.txt>" --label="(obtained)" -U0 $EXAMPLE.txt $TMP_FILE || { RETVAL=$? } } || { RETVAL=$? cat $TMP_FILE >&2 } rm -f $TMP_FILE exit $RETVAL
true
c850bfb363a799adee05800650e9134df4db4dd4
Shell
jackson910210/CREME
/CREME_windows/CREME_backend_execution/scripts/preprocessing/NetworkPacket/make_subflow_dataset.sh
UTF-8
1,068
3.546875
4
[]
no_license
if [ $# != 3 ]; then echo "Usage: ./make_subflow_dataset.sh time_window(secs) pcap_file_path code_path" exit -1 fi time_window=$1 pcap_file_path=$2 code_path=$3 cd $pcap_file_path mkdir -p "pcap_${time_window}secs" for pcapfile in $(ls *.pcap); do # # split to subflow # tshark -r $filename -Y "frame.time_relative >= ${lowerbound} && frame.time_relative <= ${upperbound}" -w "pcap_${time_window}secs/${newName}.pcap" filename=$(basename $pcapfile .pcap) # extract feature argus -r $pcapfile -w "pcap_${time_window}secs/${filename}.argus" -S $time_window ra -unzr "pcap_${time_window}secs/${filename}.argus" -c , -s rank stime flgs proto saddr sport daddr dport pkts bytes state ltime seq dur mean stddev sum min max spkts dpkts sbytes dbytes rate srate drate > "pcap_${time_window}secs/${filename}.csv" # delete subflow argus file to save space rm "pcap_${time_window}secs/${filename}.argus" done cd "pcap_${time_window}secs" # merge subflow python3 "${code_path}/merge_subflow_csv.py" "${time_window}secs" cd ..
true
c5cf8457f895a7a6e9f6e3c2fe1ea502b6298eee
Shell
koraynilay/linux-custom-scripts
/stty1yn.sh
UTF-8
386
3.234375
3
[]
no_license
#!/bin/sh h=$HOME #zprofile location sscrpt='starti3' #scprit/command to substitute (don't use "/") case $1 in y|yes|on|true)sed -i "s/echo no start \#$sscrpt/$sscrpt/" "$h/.zprofile";; n|no|off|false)sed -i "s/$sscrpt/echo no start \#$sscrpt/" "$h/.zprofile";; *)printf "Usage:\n$0 [y|yes|on|true]\tto enable autostart de/wm\n$0 [n|no|off|false]\tto disable autostart de/wm\n" esac
true
c53209a2ef79fffb9e5737b1e3457ef77ce0d238
Shell
mrquincle/crownstone-bash
/get_events.sh
UTF-8
236
2.59375
3
[]
no_license
#!/bin/bash source login.sh endpoint=sse echo "Install moreutils to get timestamp (ts) command" curl --silent --no-buffer --http2 -H 'Accept:text/event-stream' -H "$auth_header" "$event_server/$endpoint" | ts '[%Y-%m-%d %H:%M:%.S]'
true
49988e578bc979e9d149671910d92072dcb00c91
Shell
KeiyaTomizawa/rc_files
/.zshrc
UTF-8
10,904
2.84375
3
[]
no_license
# Search path for the cd command #if [ -z ${ZLOGIN:=} ]; #then # source ${ZDOTDIR}/.zlogin #fi export ZDOTDIR=/home/${USER}/.zsh export DISPLAY=localhost:0.0 export LANG=ja_JP.UTF-8 cdpath=(.. ~ ) # Use hard limits, except for a smaller stack and no core dumps unlimit #limit stack 8192 limit -s #umask 022 # Set up aliases alias mv='nocorrect mv' # no spelling correction on mv alias cp='nocorrect cp' # no spelling correction on cp alias mkdir='nocorrect mkdir' # no spelling correction on mkdir alias grep='grep --color' #alias po=popd alias ll='ls -l' alias la='ls -a' alias sl='ls' #alias pdflatex='pdflatex test$1 ; pdfcrop -m 5 test$1 test$1' #alias pdflatex='pdflatex test$1 ; pdfcrop -m 5 test$1 test$1' alias gd='dirs -v; echo -n "select number: "; read newdir; cd -"$newdir"' pdf() { if [ $1 = "-c" ]; then pdflatex $2 ; pdfcrop -m 5 `echo $2 | cut -d '.' -f 1`.pdf `echo $2 | cut -d '.' -f 1`.pdf ;convert -density 500 -alpha off -trim -transparent white `echo $2 | cut -d '.' -f 1`.pdf `echo $2 | cut -d '.' -f 1`.png elif [ $1 = "-e" ]; then pdflatex $2 ; pdfcrop -m 5 `echo $2 | cut -d '.' -f 1`.pdf `echo $2 | cut -d '.' -f 1`.pdf ;pdftops `echo $2 | cut -d '.' -f 1`.pdf `echo $2 | cut -d '.' -f 1`.eps elif [ $1 = "-a" ]; then pdflatex $2 ; pdfcrop -m 5 `echo $2 | cut -d '.' -f 1`.pdf `echo $2 | cut -d '.' -f 1`.pdf ;convert -density 500 -alpha off -trim -transparent white `echo $2 | cut -d '.' -f 1`.pdf `echo $2 | cut -d '.' -f 1`.png ;apvlv `echo $2 | cut -d '.' -f 1`.pdf else pdflatex $1 ; pdfcrop -m 5 `echo $1 | cut -d '.' -f 1`.pdf `echo $1 | cut -d '.' -f 1`.pdf ; fi } #screen shared dir #alias sd='selector dirref; [[ $? -eq 0 ]] && echo -n "select number: "; read newdir; cd `selector dirref $newdir`' #alias xs='selector forward_x; [[ $? -eq 0 ]] && echo -n "select number: "; read x; export DISPLAY=`selector forward_x $x`' export LSCOLORS=ExGxFxdxBxDxDxBxBxExEx export LS_COLORS='no=00:fi=00:di=01;34:ln=01;36:pi=40;33:so=01;35:bd=40;33;01:or=01;05;37;41:mi=01;05;37;41:ex=01;31:*.tar=01;31:*.tar.gz=01;31:*.pdf=01;32:*.eps=01;33:*.eps2=01;33' if [ -x `where dircolors` ] && [ -e ${ZDOTDIR}/.dircolors ]; then eval `dircolors ${ZDOTDIR}/.dircolors` fi alias lscolor='ls -F --color=always --show-control-char' alias ls='ls -F --color=always --show-control-char' zstyle ':completion:*:default' list-colors ${(s.:.)LS_COLORS} if [ -n ${TERM:=} ]; then case $TERM in rxvt-unicode) TERM=xterm ;; esac fi #### time REPORTTIME=4 TIMEFMT="\ The name of this job. :%J CPU seconds spent in user mode. :%U CPU seconds spent in kernel mode. :%S Elapsed time in seconds. :%E The CPU percentage. :%P The maximum memory useage :%M Kb" # Global aliases -- These do not have to be # at the beginning of the command line. #alias -g M='|more' #alias -g H='|head' #alias -g T='|tail' alias -g RE='| sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"' #manpath=($X11HOME/man /usr/man /usr/lang/man /usr/local/man /usr/share/man) #export MANPATH # # Set prompts setopt prompt_subst PROMPT='%{%(?.%{%}.%{%})%}%m%{%}%# ' # default prompt RPROMPT='%{%}%~%{%} %t' # prompt for right side of screen #RPROMPT='`~/.tools/apm.pl`[%t]' # prompt for right side of screen # Some environment variables #export MAIL=/var/spool/mail/$USERNAME #export LESS=-cex3M export GREPOPTIONS=--color #export HELPDIR=/usr/local/lib/zsh/help # directory for run-help function to find docs #MAILCHECK=300 HISTFILE="${ZDOTDIR}/zhistory" HISTSIZE=10000 SAVEHIST=10000 setopt extended_history # コマンドの開始時刻と経過時間を登録 setopt hist_ignore_dups # 直前のコマンドと同一ならば登録しない setopt hist_ignore_all_dups # 登録済コマンド行は古い方を削除 setopt hist_reduce_blanks # 余分な空白は詰めて登録(空白数違い登録を防ぐ) #setopt append_history # zsh を終了させた順にファイルに記録(デフォルト) #setopt inc_append_history # 同上、ただしコマンドを入力した時点で記録 setopt share_history # ヒストリの共有。(append系と異なり再読み込み不要、これを設定すれば append 系は不要) setopt hist_no_store # historyコマンドは登録しない setopt hist_ignore_space # コマンド行先頭が空白の時登録しない(直後ならば呼べる) DIRSTACKSIZE=20 setopt list_packed # 補完候補リストを詰めて表示 setopt print_eight_bit # 補完候補リストの日本語を適正表示 #setopt menu_complete # 1回目のTAB で補完候補を挿入。表示だけの方が好き setopt no_clobber # 上書きリダイレクトの禁止 setopt no_unset # 未定義変数の使用の禁止 setopt no_hup # logout時にバックグラウンドジョブを kill しない setopt no_beep # コマンド入力エラーでBEEPを鳴らさない setopt extended_glob # 拡張グロブ setopt numeric_glob_sort # 数字を数値と解釈して昇順ソートで出力 setopt auto_cd # 第1引数がディレクトリだと cd を補完 setopt correct # スペルミス補完 setopt no_checkjobs # exit 時にバックグラウンドジョブを確認しない setopt ignore_eof # C-dでlogoutしない(C-dを補完で使う人用) setopt pushd_to_home # 引数なしpushdで$HOMEに戻る(直前dirへは cd - で) setopt pushd_ignore_dups # ディレクトリスタックに重複する物は古い方を削除 #setopt pushd_silent # pushd, popd の度にディレクトリスタックの中身を表示しない setopt interactive_comments # コマンド入力中のコメントを認める #setopt rm_star_silent # rm * で本当に良いか聞かずに実行 #setopt rm_star_wait # rm * の時に 10秒間何もしない #setopt chase_links # リンク先のパスに変換してから実行。 # Watch for my friends #watch=( $(<~/.friends) ) # watch for people in .friends file #watch=(notme) # watch for everybody but me #LOGCHECK=300 # check every 5 min for login/logout activity #WATCHFMT='%n %a %l from %m at %t.' # Set/unset shell options setopt notify globdots correct pushdtohome cdablevars autolist setopt automenu setopt correctall autocd recexact longlistjobs setopt autoresume histignoredups pushdsilent noclobber setopt autopushd pushdminus extendedglob rcquotes mailwarning unsetopt bgnice autoparamslash ## Autoload zsh modules when they are referenced #zmodload -a zsh/stat stat zmodload -a zsh/zpty zpty zmodload -a zsh/zprof zprof zmodload -a zsh/mapfile mapfile # Some nice key bindings #bindkey '^X^Z' universal-argument ' ' magic-space #bindkey '^X^A' vi-find-prev-char-skip #bindkey '^Xa' _expand_alias #bindkey '^Z' accept-and-hold #bindkey -s '\M-/' \\\\ #bindkey -s '\M-=' \| #bindkey -v # vi key bindings bindkey -e # emacs key bindings bindkey ' ' magic-space # also do history expansion on space bindkey '^I' complete-word # complete on tab, leave expansion to _expand bindkey '^P' history-beginning-search-backward # 先頭マッチのヒストリサーチ bindkey '^N' history-beginning-search-forward # 先頭マッチのヒストリサーチ bindkey '^V' history-incremental-search-forward #function------------------------------------------------------ typeset -U path typeset -U cdpath typeset -U fpath typeset -U manpath typeset -U hosts typeset -U known_hosts #typeset -U path cdpath fpath manpath hosts known_hosts # Setup new style completion system. To see examples of the old style (compctl # based) programmable completion, check Misc/compctl-examples in the zsh # distribution. autoload -U compinit compinit -u autoload -U promptinit promptinit; zstyle ':completion::complete:*' use-cache 1 autoload -U colors colors zstyle ':completion:*:processes' command 'ps x' zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([%0-9]#)*=0=01;31' # compdef _tex platex # platex に .tex を # Completion Styles # list of completers to use zstyle ':completion:*::::' completer _expand _complete _ignored _approximate # allow one error for every three characters typed in approximate completer zstyle -e ':completion:*:approximate:*' max-errors \ 'reply=( $(( ($#PREFIX+$#SUFFIX)/3 )) numeric )' # insert all expansions for expand completer zstyle ':completion:*:expand:*' tag-order all-expansions # formatting and messages zstyle ':completion:*' verbose yes zstyle ':completion:*:descriptions' format '%B%d%b' zstyle ':completion:*:messages' format '%d' zstyle ':completion:*:warnings' format 'No matches for: %d' zstyle ':completion:*:corrections' format '%B%d (errors: %e)%b' zstyle ':completion:*' group-name '' # match uppercase from lowercase zstyle ':completion:*' matcher-list 'm:{a-z}={A-Z}' # offer indexes before parameters in subscripts zstyle ':completion:*:*:-subscript-:*' tag-order indexes parameters # command for process lists, the local web server details and host completion #zstyle ':completion:*:processes' command 'ps -o pid,s,nice,stime,args' #zstyle ':completion:*:urls' local 'www' '/var/www/htdocs' 'public_html' zstyle '*' hosts $hosts # Filename suffixes to ignore during completion (except after rm command) zstyle ':completion:*:*:(^rm):*:*files' ignored-patterns '*?.o' '*?.c~' \ '*?.old' '*?.pro' # the same for old style completion #fignore=(.o .c~ .old .pro) zmodload -i zsh/complist #autoload -U zsh/complist zstyle ':completion:*:default' menu select=1 bindkey -M menuselect '^h' vi-backward-char bindkey -M menuselect '^j' vi-down-line-or-history bindkey -M menuselect '^k' vi-up-line-or-history bindkey -M menuselect '^l' vi-forward-char # ignore completion functions (until the _ignored completer) zstyle ':completion:*:functions' ignored-patterns '_*' global-alias-space () { zle _expand_alias; zle .self-insert } zle -N global-alias-space bindkey ' ' global-alias-space zstyle :completion:\* regular false #bindkey -s " " '^Xa^V ' export PYTHONHISTORY=~/.pyhistory export PYTHONSTARTUP=~/.pystartup #path=($path /opt/tafsm/bin/) #export PYTHONPATH="/usr/projects/lib64/python2.7/site-packages" #path=($path /opt/tafsm/bin/) #path=($path /cygdrive/c/Program\ Files/MongoDB/Server/3.2/bin ) #export PYTHONPATH="/opt/tafsm/lib64/python2.7/site-packages" #./Xming.exe :0 -clipboard -notrayicon -c -multiwindow -reset -logverbose 0 & export PATH="$HOME/.rbenv/bin:$PATH" eval "$(rbenv init -)" export PATH="$HOME/mongodb-win32-x86_64-enterprise-windows-64-3.2.9/bin:$PATH" #alias docker='/usr/local/bin/console.exe docker'
true
783e9a0497cc03efe6db68e2fa4442e5977810a3
Shell
VDR4Arch/vdr4arch
/plugins/vdr-skinflat/PKGBUILD
UTF-8
1,199
2.71875
3
[]
no_license
# This PKGBUILD is part of the VDR4Arch project [https://github.com/vdr4arch] # Maintainer: Christopher Reimer <mail+vdr4arch[at]c-reimer[dot]de> pkgname=vdr-skinflat pkgver=0.1.1 _vdrapi=2.6.3 pkgrel=30 pkgdesc="Simple and slim skin for VDR" url="https://github.com/vdr-projects/vdr-plugin-skinflat" arch=('x86_64' 'i686' 'arm' 'armv6h' 'armv7h') license=('GPL2') depends=('graphicsmagick' "vdr-api=${_vdrapi}") _plugname=${pkgname//vdr-/} source=("$pkgname-$pkgver.tar.gz::https://github.com/vdr-projects/vdr-plugin-skinflat/archive/refs/tags/$pkgver.tar.gz" "50-$_plugname.conf") backup=("etc/vdr/conf.avail/50-$_plugname.conf") sha256sums=('ef0881dc341c2d2de6a784cf0dc8cbbe88a4ea33ef4921928646d847b8789e1a' 'fddf7ea4e1d84a9f6797fc8a9b4d8df129afd2b49d3f5c23874dde4aa752d8d8') prepare() { cd "${srcdir}/vdr-plugin-$_plugname-$pkgver" sed -i 's/Magick++/GraphicsMagick++/g' Makefile } build() { cd "${srcdir}/vdr-plugin-$_plugname-$pkgver" make } package() { cd "${srcdir}/vdr-plugin-$_plugname-$pkgver" make DESTDIR="$pkgdir" install install -Dm644 "$srcdir/50-$_plugname.conf" "$pkgdir/etc/vdr/conf.avail/50-$_plugname.conf" chown -R 666:666 "$pkgdir/var/lib/vdr" }
true
799d51c0187b521eee3d189152d4999763daaa25
Shell
Offirmo/web-tech-experiments
/tosort/2015/simpli/contrib/offirmo-shell-lib/bin/osl_lib_semver.sh
UTF-8
746
3.71875
4
[ "Unlicense", "LicenseRef-scancode-public-domain" ]
permissive
#! /bin/bash ## Offirmo Shell Library ## https://github.com/Offirmo/offirmo-shell-lib ## ## This file defines : ## version manipulation functions ## ## This file is meant to be sourced : ## source osl_lib_semver.sh ## ## inspired from : ## https://www.npmjs.org/doc/misc/semver.html source osl_lib_version.sh OSL_SEMVER_compute_next_incompatible_version() { local current_version=$1 local return_value=1 ## false until found otherwise ## easy : just increase the first digit. ## unless it's 0... local comp_result=$(OSL_SEMVER_compare $tested_version $reference_version) #echo "comparison result : $comp_result" if [[ $comp_result -lt 0 ]]; then ## fine, it is #echo "le OK" return_value=0 fi return $return_value }
true
d49f5927268873555dddd597ddb20ba5543744c9
Shell
rharriszzz/zos_python
/install_jupyter_and_bokeh
UTF-8
3,570
2.78125
3
[]
no_license
#!/bin/sh # https://www.ibm.com/products/open-enterprise-python-zos/pricing # click on "Try Free Edition" if [[ "$ENVIRONMENT_IS_CLEAN" != "YES" ]]; then keep="HOME=$HOME LOGNAME=$LOGNAME USER=$USER" keep="$keep _BPXK_AUTOCVT=ON ENVIRONMENT_IS_CLEAN=YES PATH=$PATH" exec /bin/env -i $keep "$0" "$@" fi # === edit these lines === PYZ=$HOME/pyz PYZ_DATE=2020-06-12 PYZ_DATE=2020-07-24 PYZ_PAX=$HOME/HAMB380-$PYZ_DATE.runnable.pax.Z OLD_ROCKET_PORTED_TOOLS=/rsusr/ported.old ROCKET_PORTED_TOOLS=/rsusr/ported # we need to use the "make" that was compiled in ebcdic mode, # otherwise we get errors like this ".deps/jmemnobs.Plo:1: *** missing separator. Stop." # to fix this we will have to insert lines like "@am__fastdepCC_TRUE@ $(AM_V_at)chtag -t -c 1047 $(DEPDIR)/$*.Plo" # in all the files "Makefile.in" export ROCKET_PORTED_TOOLS_MAKE=$OLD_ROCKET_PORTED_TOOLS/bin/make clean='yes' # === end of lines to edit === log=install-$PYZ_DATE.log touch $log chtag -t -c 819 $log exec > $log 2>&1 set -x mkdir -p $PYZ cd $PYZ export PATH=/bin export LIBPATH=/usr/lib export PYZ_DIR=usr/lpp/IBM/cyp/v3r8/pyz export PYZ_PREFIX=$PYZ/$PYZ_DIR export PATH=$PYZ_PREFIX/bin:$PATH export LIBPATH=$PYZ_PREFIX/lib:$LIBPATH export _BPXK_AUTOCVT=ON if [[ "$clean" == "yes" && -d usr ]]; then rm -rf usr fi if [[ ! -d usr ]]; then pax -z -r -f $PYZ_PAX find . > pyz-$PYZ_DATE.list fi export CC=/bin/xlclang export CXX=/bin/xlclang++ # -W'c,DLL' is not needed with xlclang (or with xlc -q64) CFLAGS=`python3 -c 'import sysconfig; print(sysconfig.get_config_var("CFLAGS"))' | sed -e "s/-W'c,DLL' //"` export CFLAGS CFLAGSFORSHARED=`python3 -c 'import sysconfig; print(sysconfig.get_config_var("CFLAGSFORSHARED"))'` export CFLAGSFORSHARED LDSHARED=`python3 -c 'import sysconfig; print(sysconfig.get_config_var("LDSHARED"))'` export LDSHARED pip3 install --verbose certifi export SSL_CERT_FILE=$PYZ_PREFIX/lib/python3.8/site-packages/certifi/cacert.pem # I can't get /bin/patch to work, so let's use sed if [[ 1 == 1 ]]; then # https://developer.ibm.com/node/2017/04/09/working-with-modules-and-the-node-js-technical-preview-for-zos/ export _C89_CCMODE=1 export _CC_CCMODE=1 export _CXX_CCMODE=1 else unset _C89_CCMODE unset _CC_CCMODE unset _CXX_CCMODE # fix: FSUM3010 Specify a file with the correct suffix (.C, .hh, .i, .c, .i, .s, .o, .x, .p, .I, or .a), or a corresponding data set name, instead of -obuild/lib.os390-27.00-8561-3.8/zmq/libzmq.so. f=$PYZ_DIR/lib/python3.8/distutils/unixccompiler.py if [[ ! -f $f.save ]]; then cp $f $f.save fi sed \ -e '/ld_args = (objects + self.objects +/{' \ -e 'N' \ -e 'c\ \ ld_args = ([opt for opt in lib_opts if opt.startswith("-L")] +\ \ ["-o", output_filename] + objects + self.objects +\ \ [opt for opt in lib_opts if not opt.startswith("-L")])' \ -e '}' \ -e '/ld_args.extend(\["-Wl,dll", new_pythonx\])/{' \ -e 'N' \ -e 'c\ \ ld_args.append(new_pythonx)\ \ ld_args[0:0] = ["-Wl,dll", "-q64"]' \ -e '}' \ $f.save > $f fi rm *.gz . ./build_pkg-config . ./build_libzmq . ./build_zlib . ./build_jpeg export PIP_IGNORE_INSTALLED=0 pip3 install --verbose wheel export PREFIX=$PYZ_PREFIX . ./build_pyzmq # bokeh needs Pillow which needs zlib and jpeg # notebook 6.1 requires argon2-cffi, which we can't build pip3 install notebook==6.0.3 bokeh cffi==1.14.0 numpy==1.18.2 cryptography==2.8
true
25a188a91eef419b4ac98fc1e2097a36021a182b
Shell
shinesolutions/aem-helloworld-custom-stack-provisioner
/post-common.sh
UTF-8
1,196
3.578125
4
[ "Apache-2.0" ]
permissive
#!/usr/bin/env bash set -o nounset BASE_DIR=$(dirname "$0") echo "[aem-helloworld] Executing Custom Stack Provisioner post-common step..." # Translate puppet detailed exit codes to basic convention 0 to indicate success. # More info on Puppet --detailed-exitcodes https://puppet.com/docs/puppet/5.3/man/agent.html translate_puppet_exit_code() { exit_code="$1" # 0 (success) and 2 (success with changes) are considered as success. # Everything else is considered to be a failure. if [ "$exit_code" -eq 0 ] || [ "$exit_code" -eq 2 ]; then exit_code=0 else exit "$exit_code" fi return "$exit_code" } set +o errexit /opt/puppetlabs/bin/puppet apply \ --detailed-exitcodes \ --modulepath "${BASE_DIR}"/modules \ --execute "include aem_helloworld::post_common" translate_puppet_exit_code "$?" set -o errexit # Specify HOME env var explicitly for inspec here due to intermittent failure # when pry gem attempts to resolve its config https://github.com/pry/pry/blob/v0.12.2/lib/pry/pry_class.rb#L5 # NOTE: their master branch no longer has pry config with home dir tilde expansion HOME=/root /opt/puppetlabs/puppet/bin/inspec exec "${BASE_DIR}"/test/post_common.rb
true
ecceceec0125f50a9ab5e8922045e7a78da5abcf
Shell
rgabriana/Work
/old_work/svn/gems/tags/2.1.0_alpha_rc1/scripts/support.sh
UTF-8
1,180
3.671875
4
[]
no_license
#!/bin/bash #################################################### #Database dump, logs directory and version file are #archived, compressed in the Enlighted/support #directory which then can be exported to the GUI #################################################### # Database details PGHOST="localhost" PGUSER="postgres" PGDATABASE="ems" TOMCAT_PATH=/var/lib/tomcat6 current_time=`date '+%m%d%Y%H%M%S'` echo "Current time = $current_time" cd $TOMCAT_PATH/Enlighted/support mkdir $current_time cd $current_time #backup the logs directory echo "backing up the logs directory..." tar zcvf logs.tar $TOMCAT_PATH/logs/* #backup the database echo "backing up the database..." /usr/bin/pg_dump -i -U $PGUSER -h $PGHOST -b -f "database_backup.sql" $PGDATABASE #backup version file cp -f $TOMCAT_PATH/webapps/ems/META-INF/EMSMANIFEST.MF . cd .. echo "archiving/compressing the files..." tar zcvf enLighted_support_$current_time.tar $current_time/* gzip enLighted_support_$current_time.tar cd $TOMCAT_PATH/Enlighted/support rm -rf $current_time echo "finished generating troubleshooting file enLighted_support_$current_time.tar.gz"
true
40753ae1fcfdaf5086f17fac0ab6383131d3d2d1
Shell
dimitrsa/opsys2018-assignment1-1865
/script1a.sh
UTF-8
429
3.734375
4
[]
no_license
#!/bin/bash input="file.in" while IFS= read -r line do # check if input line is a comment case "$line" in \#*) continue ;; esac line2="${line//\//\\}" if wget -q "$line" -O index.html > /dev/null; then if [ -f "$line2.html" ]; then cmp --silent "$line2".html index.html || echo "$line" else echo "$line INIT" fi else echo "$line FAIL" fi mv index.html "$line2".html done <"$input"
true
b4debfd5525bda54ca1e083c3a152d85b9e9f732
Shell
mlevin2/useful-things
/scripts/slow
UTF-8
465
2.875
3
[]
no_license
#!/usr/bin/env bash # makes something run really nicely # --------------------------------------------------------------------------- # Runs it a nice level 19 (the lowest priority), ionice class 3 (the nicest), # and with trickle (throttles network activity). Useful for running something # non-urgent that you don't want hogging resources # # Requires trickle FILE=`mktemp` echo "$@" >$FILE chmod +x $FILE nice -n 19 ionice -c3 trickle -u 5 -d 5 $FILE rm $FILE
true
26505294b8f7a37a8889e3641c65cb6460226027
Shell
stevemandl/debian_copyist_brother
/tools/list_printers_files_with_metadata.sh
UTF-8
2,655
3.78125
4
[ "ISC", "GPL-2.0-only" ]
permissive
#! /usr/bin/env bash error_message="error: pass the directory created by the web_brother.sh tools that contains list and files" if [ "${WEBBROTHER_WORKSPACE}" != '' ] then WORKSPACE="${WEBBROTHER_WORKSPACE}" else WORKSPACE="$(cd "$(dirname $0)"; pwd)/../material/" fi ! [ -d "${WORKSPACE}" ] && echo "${error_message}" && exit 1 archives_list="${WORKSPACE}/lists/printers_archives_with_licenses.txt" ! [ -f "${archives_list}" ] && echo "${error_message}" && exit 1 tmp_dir="/tmp/list_files.d/" mkdir -p "${tmp_dir}" function _get_archive_type { echo "${1}" | sed -e 's/.*\(rpm\|deb\|ppd.gz\|tar.gz\)/\1/' } function _create_list { cat "${archives_list}" | sed -e 's/\t.*//' | sort | uniq | while read archive_name do archive_license="$(cat "${archives_list}" | sort | uniq | grep "^${archive_name}" | sed -e 's/.*\t//')" archive_type="$(_get_archive_type ${archive_name})" if [ "${archive_type}" = 'deb' ] then embedded_data_archive_name="$(ar t "${WORKSPACE}/files/${archive_name}" | grep '^data.')" if [ "$(_get_archive_type "${embedded_data_archive_name}")" = 'tar.gz' ] then (cd "${tmp_dir}"; ar x "${WORKSPACE}/files/${archive_name}" "${embedded_data_archive_name}") tar -tzf "${tmp_dir}${embedded_data_archive_name}" | grep -v '/$' | while read file_path do file_name=$(basename "${file_path}") file_type="$(_get_archive_type "${file_name}")" # bad packagers put previous .deb and .rpm inside the .deb if [ "${file_type}" != 'rpm' -a "${file_type}" != 'deb' -a "${file_name}" != 'changelog.Debian.gz' -a "${file_name}" != 'copyright' ] then file_sum="$(tar -Oxzf "${tmp_dir}${embedded_data_archive_name}" "${file_path}" | md5sum | sed -e 's/ -$//')" file_date="$(date +%s --date="$(tar --full-time --utc -tzvf "${tmp_dir}${embedded_data_archive_name}" "${file_path}" | sed -e 's/.*\([0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9]\).*/\1/')")" printf '%s\t%s\t%s\t%s\t%s\t%s\n' ${file_sum} ${file_name} ${file_date} ${archive_name} ${archive_license} ${file_path} fi done rm "${tmp_dir}${embedded_data_archive_name}" fi elif [ "${archive_type}" = 'ppd.gz' ] then file_name="$(basename "${archive_name}" | sed -e 's/.gz$//')" file_path="./${file_name}" file_sum="$(cat "${WORKSPACE}/files/${archive_name}" | gunzip | md5sum | sed -e 's/ -$//')" file_date='0000000000' printf '%s\t%s\t%s\t%s\t%s\t%s\n' ${file_sum} ${file_name} ${file_date} ${archive_name} ${archive_license} ${file_path} fi done } _create_list | tee "${WORKSPACE}/lists/printers_files_with_metadata.txt" rmdir "${tmp_dir}" #EOF
true
2934ca0292395cb30b705af6498a47035d300821
Shell
kennedysgarage/Voronoi-Diacam
/paper/js/paper/build/preprocess.sh
UTF-8
1,332
3.53125
4
[ "MIT", "LicenseRef-scancode-generic-cla" ]
permissive
#!/bin/sh # Paper.js # # This file is part of Paper.js, a JavaScript Vector Graphics Library, # based on Scriptographer.org and designed to be largely API compatible. # http://scriptographer.org/ # # Copyright (c) 2011, Juerg Lehni & Jonathan Puckey # http://lehni.org/ & http://jonathanpuckey.com/ # # Distributed under the MIT license. See LICENSE file for details. # # All rights reserved. # preprocess.sh # # A simple code preprocessing wrapper that uses a combination of cpp, jssrip.py # and sed to preprocess JavaScript files containing C-style preprocess macros # (#include, #ifdef, etc.). Three options offer control over wether comments # are preserved or stripped and whitespaces are compressed. # # Usage: # preprocess.sh MODE SOURCE DESTINATION ARGUMENTS # # ARGUMENTS: # e.g. "-DBROWSER" # # MODE: # commented Preprocessed but still formated and commented # stripped Formated but without comments # compressed Uses UglifyJS to reduce file size VERSION=0.21 DATE=$(git log -1 --pretty=format:%ad) COMMAND="./prepro.js -d '{ \"version\": $VERSION, \"date\": \"$DATE\" }' -d '$4' $2" case $1 in stripped) eval "$COMMAND -c" > $3 ;; commented) eval $COMMAND > $3 ;; compressed) eval $COMMAND > temp.js ../../uglifyjs/bin/uglifyjs temp.js --extra --unsafe --reserved-names "$eval,$sign" > $3 rm temp.js ;; esac
true
614ccdbca512a50855263fdf6c7cc3fe8233c8a5
Shell
ZJULearning/RMI
/crf/crf_eval.sh
UTF-8
2,958
3.25
3
[ "MIT" ]
permissive
#!/bin/bash # python PATH export PYTHONPATH="${PYTHONPATH}:${HOME}/github" # hyperparameter echo -n "input the gpu (seperate by comma (,) ): " read gpus export CUDA_VISIBLE_DEVICES=${gpus} echo "using gpus ${gpus}" # replace comma(,) with empty #gpus=${gpus//,/} # the number of characters #num_gpus=${#gpus} #echo "the number of gpus is ${num_gpus}" # choose the base model echo "" echo "0 -- deeplabv3" echo "1 -- deeplabv3+" echo "2 -- pspnet" echo -n "choose the base model: " read model_choose case ${model_choose} in 0 ) base_model="deeplabv3" ;; 1 ) base_model="deeplabv3+" ;; 2 ) base_model="pspnet" ;; * ) echo "The choice of the segmentation model is illegal!" exit 1 ;; esac # choose the backbone echo "" echo "0 -- resnet_v1_50" echo "1 -- resnet_v1_101" echo "2 -- resnet_v1_152" echo -n "choose the base network: " read base_network #base_network=1 case ${base_network} in 0 ) backbone="resnet50";; 1 ) backbone="resnet101";; 2 ) backbone="resnet152";; * ) echo "The choice of the base network is illegal!" exit 1 ;; esac echo "The backbone is ${backbone}" echo "The base model is ${base_model}" # choose the batch size batch_size=1 # choose the dataset echo "" echo "0 -- PASCAL VOC2012 dataset" echo "1 -- Cityscapes" echo "2 -- CamVid" echo -n "input the dataset: " read dataset if [ ${dataset} = 0 ] then # data dir data_dir="${HOME}/dataset/VOCdevkit/VOC2012" checkpoint_name="deeplab-resnet_ckpt_30406.pth" train_split='val' dataset=pascal elif [ ${dataset} = 1 ] then data_dir="${HOME}/dataset/Cityscapes/" dataset=cityscapes elif [ ${dataset} = 2 ] then data_dir="${HOME}/dataset/CamVid/" checkpoint_name="deeplab-resnet_ckpt_5800.pth" dataset=camvid train_split='test' else echo "The choice of the dataset is illegal!" exit 1 fi echo "The data dir is ${data_dir}, the batch size is ${batch_size}." # set the work dir work_dir="${HOME}/github/RMI/crf" # ckpt directory ##################################################### # STE YOUR CHECKPOINT FILE HERE ##################################################### model_name=TBD resume=TBD # model dir and output dir model_dir=TBD output_dir=TBD if [ -d ${output_dir} ] then echo "save outputs into ${output_dir}" else mkdir -p ${output_dir} echo "make the directory ${output_dir}" fi # crf steps # choose the dataset echo "" echo -n "input the iteration step of CRF (1 ~ 10):" read crf_iter_steps # train the model #for crf_iter_steps in 5 #do python ${work_dir}/crf_refine.py --resume ${resume} \ --seg_model ${base_model} \ --backbone ${backbone} \ --model_dir ${model_dir} \ --train_split ${train_split} \ --gpu_ids ${gpus} \ --checkname deeplab-resnet \ --dataset ${dataset} \ --data_dir ${data_dir} \ --crf_iter_steps ${crf_iter_steps} \ --output_dir ${output_dir} #done echo "Test Finished!!!"
true
fab889655cb801de884cbb218cec248eff3498a9
Shell
tiburon-777/OTUS_HighLoad
/cicd/init.sh
UTF-8
2,914
3.03125
3
[]
no_license
#!/bin/bash until docker exec mysql_master sh -c 'export MYSQL_PWD=root; mysql -u root -e ";"' do echo "Waiting for mysql_master database connection..." sleep 4 done priv_stmt='INSTALL PLUGIN rpl_semi_sync_master SONAME "semisync_master.so"; CREATE DATABASE IF NOT EXISTS app CHARACTER SET utf8 COLLATE utf8_general_ci; GRANT ALL ON app.* TO "app"@"%" IDENTIFIED BY "app"; GRANT REPLICATION SLAVE ON *.* TO "mydb_slave_user"@"%" IDENTIFIED BY "mydb_slave_pwd"; FLUSH PRIVILEGES;' docker exec mysql_master sh -c "export MYSQL_PWD=root; mysql -u root -e '$priv_stmt'" until docker exec mysql_slave1 sh -c 'export MYSQL_PWD=root; mysql -u root -e ";"' do echo "Waiting for mysql_slave1 database connection..." sleep 4 done until docker exec mysql_slave2 sh -c 'export MYSQL_PWD=root; mysql -u root -e ";"' do echo "Waiting for mysql_slave2 database connection..." sleep 4 done priv_stmt='INSTALL PLUGIN rpl_semi_sync_slave SONAME "semisync_slave.so"; CREATE DATABASE IF NOT EXISTS app CHARACTER SET utf8 COLLATE utf8_general_ci; GRANT ALL ON app.* TO "app"@"%" IDENTIFIED BY "app"; FLUSH PRIVILEGES;' docker exec mysql_slave1 sh -c "export MYSQL_PWD=root; mysql -u root -e '$priv_stmt'" docker exec mysql_slave2 sh -c "export MYSQL_PWD=root; mysql -u root -e '$priv_stmt'" docker-ip() { docker inspect --format '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$@" } MS_STATUS=`docker exec mysql_master sh -c 'export MYSQL_PWD=root; mysql -u root -e "SHOW MASTER STATUS"' | grep mysq` CURRENT_LOG=`echo $MS_STATUS | awk '{print $1}'` CURRENT_POS=`echo $MS_STATUS | awk '{print $2}'` start_slave_stmt="CHANGE MASTER TO MASTER_HOST='$(docker-ip mysql_master)',MASTER_USER='mydb_slave_user',MASTER_PASSWORD='mydb_slave_pwd',MASTER_LOG_FILE='$CURRENT_LOG',MASTER_LOG_POS=$CURRENT_POS; START SLAVE;" start_slave_cmd='export MYSQL_PWD=root; mysql -u root -e "' start_slave_cmd+="$start_slave_stmt" start_slave_cmd+='"' docker exec mysql_slave1 sh -c "$start_slave_cmd" echo "Checking slave1 status" docker exec mysql_slave1 sh -c "export MYSQL_PWD=root; mysql -u root -e 'SHOW SLAVE STATUS \G' | grep Slave_" echo "Checking slave1 GTID mode" sudo docker exec mysql_slave1 sh -c "export MYSQL_PWD=root; mysql -u root -e 'SHOW VARIABLES' | grep gtid" echo "Checking slave1 semisync" sudo docker exec mysql_slave1 sh -c "export MYSQL_PWD=root; mysql -u root -e 'SHOW VARIABLES' | grep semi_sync" docker exec mysql_slave2 sh -c "$start_slave_cmd" echo "Checking slave2 status" docker exec mysql_slave2 sh -c "export MYSQL_PWD=root; mysql -u root -e 'SHOW SLAVE STATUS \G' | grep Slave_" echo "Checking slave2 GTID mode" sudo docker exec mysql_slave2 sh -c "export MYSQL_PWD=root; mysql -u root -e 'SHOW VARIABLES' | grep gtid" echo "Checking slave2 semisync" sudo docker exec mysql_slave2 sh -c "export MYSQL_PWD=root; mysql -u root -e 'SHOW VARIABLES' | grep semi_sync"
true
708e381b7c0039d64d949633bdbdbf654c5ed69a
Shell
hmasmoudi/SyphaxOS
/Default/0003-SyphaxOSGnome3/001_BuildPackagesScripts/0163-gnome-power-manager/PKGBUILD
UTF-8
743
2.6875
3
[]
no_license
# Maintainer: Hatem Masmoudi <hatem.masmoudi@gmail.com> pkgname=gnome-power-manager pkgver=3.26.0 pkgrel=2 pkgdesc="The GNOME Power Manager package contains a tool used to report on power management on the system." arch=('x86_64') url="http://ftp.gnome.org/pub/gnome/sources/gnome-power-manager/3.26" license=('GPL') groups=('desktop') source=("$url/${pkgname}-${pkgver}.tar.xz") sha256sums=('20aee0b0b4015e7cc6fbabc3cbc4344c07c230fe3d195e90c8ae0dc5d55a2d4e') depends=('rootfs') build() { cd "$srcdir/${pkgname}-${pkgver}" sed -i "/'libm'/s/^/libm = cc.find_library('m', required : false)#/" meson.build meson build --prefix=/usr ninja -C build } package() { cd "$srcdir/${pkgname}-${pkgver}" DESTDIR="${pkgdir}" ninja -C build install }
true
56519a5b8d2e9b4301c2d06d6708489a30f61ce2
Shell
marian-nmt/marian-regression-tests
/tests/factors/test_factors_transformer.sh
UTF-8
1,280
2.9375
3
[ "MIT" ]
permissive
#!/bin/bash -x ##################################################################### # SUMMARY: Training a factored model using the transformer model # AUTHOR: pedrodiascoelho # TAGS: factors ##################################################################### # Exit on error set -e # Remove old artifacts and create working directory rm -rf factors_transformer factors_transformer.{log,out,diff} mkdir -p factors_transformer # Run marian command $MRT_MARIAN/marian \ --no-shuffle --seed 1111 --dim-emb 32 --dim-rnn 64 --maxi-batch 1 --maxi-batch-sort none --clip-norm 0 \ --type transformer -m factors_transformer/model.npz -t toy.bpe.fact.{en,de} -v $MRT_MODELS/factors/vocab.{en,de}.fsv \ --disp-freq 5 -e 5 \ --log factors_transformer.log # Check if files exist test -e factors_transformer/model.npz test -e factors_transformer.log #Checks factor usage grep -q "Factored embeddings enabled" factors_transformer.log grep -q "Factored outputs enabled" factors_transformer.log # Compare the current output with the expected output cat factors_transformer.log | $MRT_TOOLS/extract-costs.sh > factors_transformer.out $MRT_TOOLS/diff-nums.py factors_transformer.out factors_transformer.expected -o factors_transformer.diff # Exit with success code exit 0
true
cf5070ce6459b87e04b977e8f617937c854a36da
Shell
JohnnyBeProgramming/deploy-cloud-app
/cloud/local/destroy.sh
UTF-8
679
3.0625
3
[]
no_license
#!/bin/bash # ----------------------------------------------------------------------------- set -euo pipefail # Stop running the script on first error... # ----------------------------------------------------------------------------- # Install docker for desktop: # - https://www.docker.com/products/docker-desktop # ----------------------------------------------------------------------------- DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source $DIR/../../config/local/cloud.env # No cluster and/or image repository to delete # Remove the deployment config check_file="$DIR/../../config/local/deploy.ini" [ -f $check_file ] && rm -f $check_file
true
ebdc263a20257db82ae6c914fd46c5734da42685
Shell
tomees/Nginx-vhost-deploy
/deploy/vhost.sh
UTF-8
817
3.921875
4
[]
no_license
#!/bin/bash if [ -z $DOMAIN ]; then echo "No domain name given" exit 1 fi # check the domain is roughly valid! PATTERN="^([[:alnum:]]([[:alnum:]\-]{0,61}[[:alnum:]])?\.)+[[:alpha:]]{2,6}$" if [[ "$DOMAIN" =~ $PATTERN ]]; then DOMAIN=`echo $DOMAIN | tr '[A-Z]' '[a-z]'` echo "Vytvářím vhost pro doménu" $DOMAIN else echo "CHYBA: špatný formát domény" exit 1 fi #Replace dots with underscores SITE_DIR=`echo $DOMAIN | $SED 's/\./_/g'` # Now we need to copy the virtual host template CONFIG=$NGINX_CONFIG/$DOMAIN sudo cp $CURRENT_DIR/deploy/virtual_host.template $CONFIG sudo $SED -i "s/DOMAIN/$DOMAIN/g" $CONFIG sudo $SED -i "s!ROOT!$WEB_DIR/$SITE_DIR!g" $CONFIG sudo chmod 600 $CONFIG echo "Vhost $DOMAIN vytvořen" sudo ln -s $CONFIG $NGINX_SITES_ENABLED/$DOMAIN echo "Vhost $DOMAIN aktivován"
true
5c2fd8d3c8f79cd835124ca86d468bc85d49d38a
Shell
Ankit0506/Programme
/printarray.sh
UTF-8
188
2.546875
3
[]
no_license
#!/bin/bash arr=(prakhar ankit 1 risabh manish abhinav) echo ${arr[@]} echo ${arr[*]} echo ${arr[@]:0} echo ${arr[*]:0} echo ${arr[0]} echo ${arr} echo ${arr[@]:2,5} echo ${arr[0]//a/A} echo ${arr[0]//r/R}
true
3a1d55a7612ee9830994db3289726791d6eeae97
Shell
jxjhheric/MIUI-10-China-Debloat
/config.sh
UTF-8
5,271
2.6875
3
[]
no_license
########################################################################################## # # Magisk Module Template Config Script # by topjohnwu # ########################################################################################## ########################################################################################## # # Instructions: # # 1. Place your files into system folder (delete the placeholder file) # 2. Fill in your module's info into module.prop # 3. Configure the settings in this file (config.sh) # 4. If you need boot scripts, add them into common/post-fs-data.sh or common/service.sh # 5. Add your additional or modified system properties into common/system.prop # ########################################################################################## ########################################################################################## # Configs ########################################################################################## # Set to true if you need to enable Magic Mount # Most mods would like it to be enabled AUTOMOUNT=true # Set to true if you need to load system.prop PROPFILE=true # Set to true if you need post-fs-data script POSTFSDATA=true # Set to true if you need late_start service script LATESTARTSERVICE=false ########################################################################################## # Installation Message ########################################################################################## # Set what you want to show when installing your mod print_modname() { ui_print "******************************" ui_print " MIUI 10 China Developer " ui_print " Debloater " ui_print "******************************" } ########################################################################################## # Replace list ########################################################################################## # List all directories you want to directly replace in the system # Check the documentations for more info about how Magic Mount works, and why you need this # This is an example REPLACE=" /system/app/Youtube /system/priv-app/SystemUI /system/priv-app/Settings /system/framework " # Construct your own list here, it will override the example above # !DO NOT! remove this if you don't need to replace anything, leave it empty as it is now REPLACE=" /system/app/AnalyticsCore /system/app/BasicDreams /system/app/BugReport /system/app/CloudService /system/app/Email /system/app/GameCenter /system/app/HybridAccessory /system/app/HybridPlatform /system/app/Joyose /system/app/MSA /system/app/Music /system/app/SogouInput /system/app/Userguide /system/app/VirtualSim /system/app/YouDaoEngine /system/app/jjhome /system/app/klobugreport /system/app/mab /system/data-app/BaiduIME /system/data-app/MiFinance /system/data-app/MiMobileNoti /system/data-app/MiShop /system/data-app/O2O /system/data-app/SmartHome /system/data-app/WaliLive /system/data-app/mihome /system/priv-app/MiGameCenterSDKService /system/priv-app/Mipub /system/priv-app/MiuiVideo /system/priv-app/YellowPage " ########################################################################################## # Permissions ########################################################################################## set_permissions() { # Only some special files require specific permissions # The default permissions should be good enough for most cases # Here are some examples for the set_perm functions: # set_perm_recursive <dirname> <owner> <group> <dirpermission> <filepermission> <contexts> (default: u:object_r:system_file:s0) # set_perm_recursive $MODPATH/system/lib 0 0 0755 0644 # set_perm <filename> <owner> <group> <permission> <contexts> (default: u:object_r:system_file:s0) # set_perm $MODPATH/system/bin/app_process32 0 2000 0755 u:object_r:zygote_exec:s0 # set_perm $MODPATH/system/bin/dex2oat 0 2000 0755 u:object_r:dex2oat_exec:s0 # set_perm $MODPATH/system/lib/libart.so 0 0 0644 # The following is default permissions, DO NOT remove set_perm_recursive $MODPATH 0 0 0755 0644 } ########################################################################################## # Custom Functions ########################################################################################## # This file (config.sh) will be sourced by the main flash script after util_functions.sh # If you need custom logic, please add them here as functions, and call these functions in # update-binary. Refrain from adding code directly into update-binary, as it will make it # difficult for you to migrate your modules to newer template versions. # Make update-binary as clean as possible, try to only do function calls in it. disable_bloatware() { pm disable com.miui.video pm disable com.miui.player pm disable com.xiangkan.android pm disable com.xiaomi.mimobile.noti pm disable com.xiaomi.mibrain.speech pm disable com.sohu.inputmethod.sogou.xiaomi pm disable com.xiaomi.shop pm disable com.wali.live pm disable com.xiaomi.o2o pm disable com.mi.liveassistant pm disable com.baidu.input_mi }
true
284c58383fabbe404487e8013d0bb3d0f0bebc4b
Shell
garyellis/docker-squid
/entrypoint.sh
UTF-8
1,188
3.453125
3
[]
no_license
#!/bin/bash -e set -x export WHITELIST_FROM_ENV_ENABLED=${WHITELIST_FROM_ENV_ENABLED:-true} export WHITELIST_DOMAIN_NAMES="${WHITELIST_DOMAIN_NAMES:-google.com,github.com}" function makedirs(){ mkdir -p /var/log/squid && chmod 777 /var/log/squid mkdir -p /var/spool/squid && chmod 777 /var/spool/squid } function selfsigned_ca(){ if [ ! "${SELFSIGNED_CA_ENABLED}" == "true" ]; then return fi echo "creating selfsigned squid ca certificate" mkdir -p /etc/squid/ssl openssl req -x509 -new -newkey rsa:4096 -nodes -keyout /etc/squid/ssl/squid.pem -out /etc/squid/ssl/squid.pem -days 365 \ -subj "/C=US/ST=Arizona/L=Scottsdale/O=EWSARCH/CN=squid" } function whitelist_from_env(){ if [ ! "${WHITELIST_FROM_ENV_ENABLED}" == "true" ]; then return fi echo "creating /etc/squid/squid.allowed.sites.txt" cat <<EOF > /etc/squid/squid.allowed.sites.txt $(echo ${WHITELIST_DOMAIN_NAMES}|sed 's/,/\n/g') EOF } function aws_ssm_config(){ if [ ! "${AWS_SSM_CONFIG_ENABLED}" == "true" ]; then return fi echo "squid.conf from ssm not yet implemented" } makedirs selfsigned_ca whitelist_from_env aws_ssm_config exec squid -f /etc/squid/squid.conf -NYCd 1
true
a3689144e89ea733c134d509df10168cba340fa1
Shell
webis-de/wasp
/app/pywb/run.sh
UTF-8
271
3.40625
3
[ "MIT" ]
permissive
#!/bin/bash pywb_port=${PYWB_PORT:=8001} case $1 in start) source env/bin/activate wayback --port $pywb_port --record 1> pywb.log 2>&1 & echo $! > pid.txt ;; stop) pid=$(cat pid.txt) kill $pid ;; *) echo Unknown command: $1 esac
true
175a7611b8326e632749917d7d7321837e6b718d
Shell
sonalsannigrahi/Archers-Final
/workaround.sh
UTF-8
375
3.484375
3
[]
no_license
#!/bin/bash function change_permission { echo "Changing permission of $1" str=$(xattr -p com.apple.quarantine "$1" | sed 's/^0081/00c1/') xattr -w com.apple.quarantine "$str" "$1" } [ $(id -u) -eq 0 ] || { echo "Must be run as root. Try sudo."; exit; } for f in /usr/local/lib/libsfml-* /Library/Frameworks/{freetype.framework,sfml*}; do change_permission $f done
true
241d5aa8effbda4ec7e03c8166bac789fd861a98
Shell
atemaguer/chunky
/run_chunkservers.sh
UTF-8
445
3.28125
3
[]
no_license
#!/bin/bash # How many chunkservers to start N_CHUNKSERVERS=${1:-2} # Launch in detached mode the chunkservers echo "Launching chunkservers" for i in $(seq 1 $N_CHUNKSERVERS); do cs_name="chunkserver_$i" echo "Name: $cs_name" volume_name="vol_$i" docker run -d --rm --mount source=${volume_name},target=/chunk_storage --name $cs_name --network gfs-net chunkserver --self_ip $cs_name --master_ip master --master_port 50051 done
true
eed5b4243bf2b3af79926cf292df4cc7dfc3cede
Shell
mbornoz/veewee
/templates/SLES-11-SP1-DVD-i586-GM/postinstall.sh
UTF-8
1,425
2.984375
3
[ "MIT" ]
permissive
# # postinstall.sh # date > /etc/vagrant_box_build_time # install vagrant key echo -e "\ninstall vagrant key ..." mkdir -m 0700 /home/vagrant/.ssh cd /home/vagrant/.ssh wget --no-check-certificate -O authorized_keys https://raw.github.com/mitchellh/vagrant/master/keys/vagrant.pub chmod 0600 /home/vagrant/.ssh/authorized_keys chown -R vagrant.users /home/vagrant/.ssh # update sudoers echo -e "\nupdate sudoers ..." echo -e "\n# added by veewee/postinstall.sh" >> /etc/sudoers echo -e "vagrant ALL=(ALL) NOPASSWD: ALL\n" >> /etc/sudoers # speed-up remote logins echo -e "\nspeed-up remote logins ..." echo -e "\n# added by veewee/postinstall.sh" >> /etc/ssh/sshd_config echo -e "UseDNS no\n" >> /etc/ssh/sshd_config # install chef and puppet echo -e "\ninstall chef and puppet ..." gem install chef --no-ri --no-rdoc gem install puppet --no-ri --no-rdoc # install the virtualbox guest additions echo -e "\ninstall the virtualbox guest additions ..." zypper --non-interactive remove `rpm -qa virtualbox-guest-*` >/dev/null 2>&1 VBOX_VERSION=$(cat /home/vagrant/.vbox_version) cd /tmp wget http://download.virtualbox.org/virtualbox/$VBOX_VERSION/VBoxGuestAdditions_$VBOX_VERSION.iso #wget http://192.168.178.10/VBoxGuestAdditions_$VBOX_VERSION.iso mount -o loop VBoxGuestAdditions_$VBOX_VERSION.iso /mnt sh /mnt/VBoxLinuxAdditions.run umount /mnt rm -f VBoxGuestAdditions_$VBOX_VERSION.iso echo -e "\nall done.\n" exit
true
0c655c9f000546bf724e2ba53c0bd455325e04a5
Shell
cristivlas/zerobugs
/engine/testsuite/test_no_info.sh
UTF-8
919
2.9375
3
[ "BSL-1.0" ]
permissive
# vim: tabstop=4:softtabstop=4:expandtab:shiftwidth=4 # $Id$ ################################################################ # ################################################################ function test_no_info() { echo ----- ${FUNCNAME}${1:-$debug} ----- >> $logfile cat > foo.cpp << '---end---' // foo.cpp int foo(const char* s) { int x = 0; while (*s) { x += *s++; } return x; } int main(int argc, char* argv[]) { int y = foo(argv[0]); return y; } ---end--- #write test script (processed by the AutoTest plugin) cat > script << '---end---' call ( exec a.out ) call test.cancel call quit ---end--- #compile rm a.out build foo.cpp rm -f $config run_debugger $@ } ################################################################ # Run this test standalone ################################################################ function run() { source common.sh test_no_info } source suffix.sh
true
ed538f9790a762b96916414435840833a491eca6
Shell
shivgupt/ipfs_gallery
/ops/publish.sh
UTF-8
833
3.8125
4
[]
no_license
#!/bin/bash service='ipfs_daemon' category='videos' index="$HOME/ipfs/$category/index.html" touch $index daemon=`docker inspect --format '{{.Status.ContainerStatus.ContainerID}}' $(docker service ps -q $service | head -n1)` echo "Found daemon $daemon" if [[ -z "`ipfs key list | grep $category`" ]]; then ipfs key gen --type=rsa --size=2048 $category; fi for dir in "$@" do hash=`docker exec -it $daemon sh -c "ipfs add -r -q /ipfs/$category/$dir | tail -n 1 | tr -d '\n\r'"` if [[ -z "`cat $index | grep $hash`" ]] then echo '<a href="https://gateway.ipfs.io/ipfs/'"$hash"'">'"$dir"'</a> '"$hash" >> $index echo "Added $dir $hash" fi done echo 'Publishing... ' docker exec -it $daemon sh -c 'ipfs name publish --key='"$category"' /ipfs/`ipfs add -q /ipfs/'"$category"'/index.html | tail -n 1`'
true
7541a8892f270486d719e4c147e6fdca5fee6917
Shell
control-center/serviced
/pkg/reversion/deb/reversion.sh
UTF-8
1,036
3.84375
4
[ "Apache-2.0" ]
permissive
#!/bin/sh if [ "$#" -ne 3 ] then echo "ERROR: incorrect number of arguments" echo "USAGE: $0 sourceRepoClass sourceVersion targetVersion" echo "" echo "Example - reversion unstable 1.0.0-2113~xenial to 1.0.0rc1:" echo "$0 unstable 1.0.0-2113~xenial 1.0.0rc1" echo "" echo "Example - reversion testing 1.0.0rc1 to 1.0.0:" echo "$0 testing 1.0.0rc1 1.0.0" exit 1 fi SOURCE_CLASS="$1" SOURCE_VERSION="$2" TARGET_VERSION="$3" set -e set -x # Add the source repo REPO_URL=http://${SOURCE_CLASS}.zenoss.io/apt/ubuntu sh -c 'echo "deb [ arch=amd64 ] '$REPO_URL' xenial universe" \ > /etc/apt/sources.list.d/zenoss.list' apt-get update cd /tmp apt-get download serviced=${SOURCE_VERSION} echo -e "\nFYI - Here's the metadata for the source package" dpkg -f serviced_${SOURCE_VERSION}_amd64.deb deb-reversion -b -v ${TARGET_VERSION} serviced_${SOURCE_VERSION}_amd64.deb echo -e "\nFYI - Here's the metadata for the target package" dpkg -f serviced_${TARGET_VERSION}_amd64.deb mv serviced_${TARGET_VERSION}_amd64.deb /output
true
d888e5c79ae064bab93b453d5ac31f665febb8a4
Shell
metakirby5/.dots
/base/.config/bash/configs/shims/adb.bash
UTF-8
241
3.203125
3
[ "MIT" ]
permissive
adb() { local cmd="$1"; shift case "$cmd" in file) command adbf "$@" ;; reinstall) command adb install -rd "$@" ;; '') command adb ;; *) command adb "$cmd" "$@" ;; esac }
true
bd12acceac400897ee38734119fe213180db22f6
Shell
yvvyoon/blockchain-project-belief
/2.back/basic-2org/start.sh
UTF-8
2,683
2.625
3
[ "CC-BY-4.0" ]
permissive
#!/bin/bash #basic-network start.sh # # Copyright IBM Corp All Rights Reserved # # SPDX-License-Identifier: Apache-2.0 # # Exit on first error, print all commands. set -ev # don't rewrite paths for Windows Git Bash users export MSYS_NO_PATHCONV=1 docker-compose -f docker-compose.yml down docker-compose -f docker-compose.yml up -d orderer.example.com ca.example.com peer0.org1.example.com couchdb cli peer1.org1.example.com cli2 peer0.org2.example.com cli3 peer1.org2.example.com cli4 docker ps # wait for Hyperledger Fabric to start # incase of errors when running later commands, issue export FABRIC_START_TIMEOUT=<larger number> export FABRIC_START_TIMEOUT=10 #echo ${FABRIC_START_TIMEOUT} sleep ${FABRIC_START_TIMEOUT} # Create the channel #docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" peer0.org1.example.com peer channel create -o orderer.example.com:7050 -c mychannel -f /etc/hyperledger/configtx/channel.tx docker exec cli peer channel create -o orderer.example.com:7050 -c mychannel -f /etc/hyperledger/configtx/channel.tx #--tls --cafile /etc/hyperledger/crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/msp/tlscacerts/tlsca.example.com-cert.pem # Join peer0.org1.example.com to the channel. #docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" peer0.org1.example.com peer channel join -b /etc/hyperledger/configtx/mychannel.block docker exec peer0.org1.example.com peer channel join -b /etc/hyperledger/configtx/mychannel.block # Join peer1.org1.example.com to the channel. #docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" peer1.org1.example.com peer channel join -b /etc/hyperledger/configtx/mychannel.block docker exec peer1.org1.example.com peer channel join -b /etc/hyperledger/configtx/mychannel.block # Join peer0.org2.example.com to the channel. #docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" peer0.org2.example.com peer channel join -b /etc/hyperledger/configtx/mychannel.block docker exec peer0.org2.example.com peer channel join -b /etc/hyperledger/configtx/mychannel.block # Join peer1.org2.example.com to the channel. #docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" peer1.org2.example.com peer channel join -b /etc/hyperledger/configtx/mychannel.block docker exec peer1.org2.example.com peer channel join -b /etc/hyperledger/configtx/mychannel.block
true
0d6bec2c7b4a7e7b6eebc26babd675aed60fcc65
Shell
dojot/ansible-dojot
/scripts/down
UTF-8
1,239
4.09375
4
[ "Apache-2.0" ]
permissive
#!/bin/bash # Removes all the dojot components that are running in a Kubernetes environment. ## Check the dependencies before running the rest of the script echo "Checking dependencies..." echo "Checking kubectl..." if ! command -v kubectl &> /dev/null then echo "You must have Kubernetes installed!" exit 1 fi echo "OK" ## Retrieve the arguments while getopts r:h flag do case "${flag}" in r) remove=${OPTARG};; h) help="true";; *) exit 1;; esac done ## Print help if [ "${remove}" != "false" ] && [ "${remove}" != "true" ] || [ "$help" == "true" ] then echo "usage: $0 [OPTIONS]" echo " -h: shows the script help (this message)" echo " -r: true if it should remove the volumes (won't erase any data), false if it should not." exit 1 fi # When we run only delete --all all, it can kill the pods first, not the deployments nor # the stateful sets, thus creating new pods, so we first delete them to prevent this. kubectl delete --all sts,deploy -n dojot kubectl delete --all sts,deploy -n dojot-monitoring kubectl delete --all all -n dojot-monitoring kubectl delete --all all -n dojot if [ "${remove}" == "true" ] then echo "Removing volumes" kubectl delete --all pvc --all-namespaces kubectl delete --all pv,sc fi
true
f2cf7064f90599b8f43b3ff58a45136b75bbb09e
Shell
lmdu/pyfastx
/benchmark/benchmark_fasta_build_index.sh
UTF-8
3,044
3.765625
4
[ "MIT" ]
permissive
#!/bin/bash #store benchmark time and memory tempfile=time_mem.tmp #record memory memorys=() #record elapsed time times=() #record index file size sizes=() #number of programs num=-1 #number of repeat tests repeats=$1 #input fasta files gfiles=$@ measure_memory_time(){ /usr/bin/time -f "%e %M" -o $tempfile $1 > /dev/null 2>&1 let num++ if [ ! ${memorys[$num]} > 0 ]; then memorys[$num]=0 times[$num]=0 fi arr=($(cat $tempfile)) #clear temp file if [ -e "$tempfile" ]; then rm "$tempfile" fi times[$num]=$(echo "${arr[0]}+${times[$num]}" | bc) memorys[$num]=$(echo "${arr[1]}+${memorys[$num]}" | bc) } #print header printf "genome\tsize\tcount\tbioperl\t\t\tbiopython\t\t\tpyfaidx\t\t\tpyfasta\t\t\tpysam\t\t\tsamtools\t\t\tseqkit\t\t\tpyfastx\t\t\tpyfastx_gzip\t\t\n" for gfile in ${gfiles[@]:1}; do memorys=() times=() filename=$(basename $gfile) filename="${filename%.*}" for i in $(seq 1 $repeats); do num=-1 #bioperl if [ -e "$gfile.index" ]; then rm "$gfile.index" fi measure_memory_time "perl bioperl_fasta_build_index.pl $gfile" let sizes[$num]=$(stat -c %s $gfile.index) #biopython if [ -e "$gfile.db" ]; then rm "$gfile.db" fi measure_memory_time "python3 biopython_fasta_build_index.py $gfile" let sizes[$num]=$(stat -c %s $gfile.db) #pyfaidx if [ -e "$gfile.fai" ]; then rm "$gfile.fai" fi measure_memory_time "python3 pyfaidx_fasta_build_index.py $gfile" let sizes[$num]=$(stat -c %s $gfile.fai) #pyfasta if [ -e "$gfile.gdx" ]; then rm "$gfile.gdx" rm "$gfile.flat" fi measure_memory_time "python3 pyfasta_fasta_build_index.py $gfile" let sizes[$num]=$(stat -c %s $gfile.gdx) #pysam if [ -e "$gfile.fai" ]; then rm "$gfile.fai" fi measure_memory_time "python3 pysam_fasta_build_index.py $gfile" let sizes[$num]=$(stat -c %s $gfile.fai) #samtools if [ -e "$gfile.fai" ]; then rm "$gfile.fai" fi measure_memory_time "samtools faidx $gfile" let sizes[$num]=$(stat -c %s $gfile.fai) #seqkit if [ -e "$gfile.seqkit.fai" ]; then rm "$gfile.seqkit.fai" fi measure_memory_time "seqkit faidx -f $gfile" let sizes[$num]=$(stat -c %s $gfile.seqkit.fai) #pyfastx if [ -e "$gfile.fxi" ]; then rm "$gfile.fxi" fi measure_memory_time "python3 pyfastx_fasta_build_index.py $gfile" let sizes[$num]=$(stat -c %s $gfile.fxi) #pyfastx gzip if [ -e "$gfile.gz.fxi" ]; then rm "$gfile.gz.fxi" fi measure_memory_time "python3 pyfastx_fasta_build_index.py $gfile.gz" let sizes[$num]=$(stat -c %s $gfile.gz.fxi) done #get genome information array=($(python3 get_fasta_info.py $gfile)) #genome size gsize=${array[0]} #sequence counts in genome seqcounts=${array[1]} #print result printf "%s\t%s\t%s" $filename $gsize $seqcounts for((i=0;i<=$num;i++)); do mm=$(echo "${memorys[$i]}/$repeats" | bc) et=$(echo "${times[$i]}/$repeats" | bc) is=$(python3 -c "import math; print(math.ceil(${sizes[$i]}/1024))") printf "\t%d\t%d\t%d" $mm $et $is done printf "\n" done
true
dc9a979770eadb2539f04e95552f666bff917d52
Shell
varesa/libvirt-vlan-config
/apply.sh
UTF-8
1,470
4.03125
4
[]
no_license
set -euo pipefail network=${1:-} ([ -n "$network" ] && virsh net-list --name | head -n-1 | grep -q $network) || (echo "First argument must be name of libvirt network"; exit 1) path=${2:-} ([ -n "$path" ] && [ -d $path/create ]) || (echo "Second argument must be a path that contains directories create/ and remove/"; exit 1) ([ -n "$path" ] && [ -d $path/remove ]) || (echo "Second argument must be a path that contains directories create/ and remove/"; exit 1) current() { virsh net-dumpxml $network } create() { echo "--Creating: $(get_name $1)" virsh net-update $network add portgroup $1 --live --config echo } update() { echo "--Updating: $(get_name $1)" virsh net-update $network modify portgroup $1 --live --config echo } remove() { echo "--Removing: $(get_name $1)" virsh net-update $network delete portgroup $1 --live --config echo } get_name() { cat $1 | grep '<portgroup name' } for portgroup_xml in $path/create/*.xml; do if current | grep -q "$(get_name $portgroup_xml)"; then update $portgroup_xml else create $portgroup_xml fi done if [ ! "$(ls $path/remove | wc -l)" == 0 ]; then for portgroup_xml in $path/remove/*.xml; do if current | grep -q "$(get_name $portgroup_xml)"; then remove $portgroup_xml else echo "--VLAN $portgroup_xml does not exist" echo fi done else echo "No VLANs to remove" fi
true
97a8e4c516f8351decccb79e2e5323225b2dae81
Shell
mikhailadvani/my_utils
/machine-setup/10-terraform.sh
UTF-8
423
2.921875
3
[]
no_license
set -e TERRAFORM_DOWNLOAD_URL="https://releases.hashicorp.com/terraform/0.12.7/terraform_0.12.7_darwin_amd64.zip" TERRAFORM_TMP_DIR="/tmp/terraform" mkdir -p $TERRAFORM_TMP_DIR curl -o $TERRAFORM_TMP_DIR/terraform.zip $TERRAFORM_DOWNLOAD_URL unzip $TERRAFORM_TMP_DIR/terraform.zip -d $TERRAFORM_TMP_DIR sudo mv $TERRAFORM_TMP_DIR/terraform /usr/local/bin/ sudo chmod +x /usr/local/bin/terraform rm -rf $TERRAFORM_TMP_DIR
true
275fb0086a03dda0a41fffe17b786ee373d685e6
Shell
TurpIF/config
/etc/inputrc
UTF-8
1,553
3.484375
3
[ "MIT" ]
permissive
#!/bin/bash # # Configuration for shell input mode. # # vim: set ft=sh # Set vi's context set editing-mode vi # List all matches in case multiple possible completions are possible set show-all-if-ambiguous on # Immediately add a trailing slash when autocompleting symlinks to directories set mark-symlinked-directories on # Use the text that has already been typed as the prefix for searching through # commands (i.e. more intelligent Up/Down behavior) "\e[1~": beginning-of-line "\e[4~": end-of-line "\e[7~": beginning-of-line "\e[8~": end-of-line "\eOH": beginning-of-line "\eOF": end-of-line "\e[H": beginning-of-line "\e[F": end-of-line "\e[B": history-search-forward "\e[A": history-search-backward # Do not autocomplete hidden files unless the pattern explicitly begins with a dot set match-hidden-files off # Show all autocomplete results at once set page-completions off # If there are more than 200 possible completions for a word, ask to show them all set completion-query-items 200 # Show extra file information when completing, like `ls -F` does set visible-stats on # Be more intelligent when autocompleting by also looking at the text after # the cursor. For example, when the current line is "cd ~/src/mozil", and # the cursor is on the "z", pressing Tab will not autocomplete it to "cd # ~/src/mozillail", but to "cd ~/src/mozilla". (This is supported by the # Readline used by Bash 4.) set skip-completed-text on # Allow UTF-8 input and output, instead of showing stuff like $'\0123\0456' set input-meta on set output-meta on
true
4b25c91eddfb674dfee494a56d1fa0186b864e10
Shell
peteletroll/DockRotate
/mkrelease.sh
UTF-8
3,483
3.515625
4
[ "MIT" ]
permissive
#!/bin/bash name=DockRotate ksphome=~/KSP/KSP_linux force=0 zipname='' while getopts fz: opt do case $opt in f) force=1 ;; z) zipname="$OPTARG" ;; *) exit 1 ;; esac done shift `expr $OPTIND - 1` cd `dirname $0` || exit 1 # [assembly: AssemblyVersion ("1.3.1.1")] version=`sed -n 's/.*\<AssemblyVersion\>.*"\([^"]\+\)".*/\1/p' "$name/Properties/AssemblyInfo.cs"` if [ "$version" = "" ] then echo "ABORTING: can't find assembly version number" 1>&2 exit 1 fi fileversion=`sed -n 's/.*\<AssemblyFileVersion\>.*"\([^"]\+\)".*/\1/p' "$name/Properties/AssemblyInfo.cs"` if [ "$fileversion" != "$version" ] then echo "ABORTING: version incoherency: $version != $fileversion" 1>&2 exit 1 fi dll="$name/bin/Release/$name.dll" debugdll="$name/bin/Debug/$name.dll" dllname=`basename $dll` foundbadspacing=0 for f in `find . -name \*.cs` do if grep -Hn ' \| \|[^ ] \|[ ]$\|\<class\>.*[ ]:' $f 1>&2 then foundbadspacing=1 fi done if [ $foundbadspacing -ne 0 ] then echo "ABORTING: found bad spacing, see above" 1>&2 exit 1 fi echo source spacing is ok foundnewer=0 for f in `find . -name \*.cs` do for d in $dll $debugdll do if [ $f -nt $d ] then echo "ABORTING: $f is newer than $d" 1>&2 foundnewer=1 fi done done [ $foundnewer -eq 0 ] || exit 1 zip="/tmp/$name-$version.zip" if [ ! -z "$zipname" ] then zip="$zipname" fi ( status=0 kspversion=`cat "$ksphome/readme.txt" | awk 'NR <= 30 && NF == 2 && $1 == "Version" { print $2 }'` if [ "$kspversion" = "" ] then echo "ABORTING: can't find KSP version number" 1>&2 exit 1 fi if echo '[]' | jq . > /dev/null then true else echo "ABORTING: jq not working" 1>&2 exit 1 fi jsonversion="Resources/$name.version" jqfilter='.VERSION | (.MAJOR|tostring) + "." + (.MINOR|tostring) + "." + (.PATCH|tostring) + "." + (.BUILD|tostring)' jversion=`jq -r "$jqfilter" "$jsonversion"` if [ $? -ne 0 ] then echo "ABORTING: JSON syntax error in $jsonversion" 1>&2 exit 1 fi if [ "$version" != "$jversion" ] then echo "ABORTING: DLL version is $version, JSON version is $jversion" 1>&2 status=1 fi jqfilter='.KSP_VERSION | (.MAJOR|tostring) + "." + (.MINOR|tostring) + "." + (.PATCH|tostring)' jversion=`jq -r "$jqfilter" $jsonversion` if [ $? -ne 0 ] then echo "ABORTING: JSON syntax error in $jsonversion" 1>&2 exit 1 fi if [ "$kspversion" != "$jversion" ] then echo "ABORTING: KSP version is $kspversion, JSON version is $jversion" 1>&2 status=1 fi exit $status ) if [ $? -ne 0 ] then if [ $force -ne 0 ] then echo "RESUMING: -f option activated, forcing zip creation" 1>&2 if [ -z "$zipname" ] then zip=${zip%.zip}-forced.zip fi else echo "ABORTING: use -f to force zip creation" 1>&2 exit 1 fi fi tmp=`mktemp -d` || exit 1 trap "rm -rf $tmp" EXIT dir="$tmp/GameData/$name" mkdir -p $dir || exit 1 mmglob="ModuleManager.*.dll" nmmdll=`find "$ksphome/GameData/" -name "$mmglob" -printf . -exec cp {} "$tmp" \; | wc -c` if [ "$nmmdll" -ne 1 ] then echo "ABORTING: there should be one ModuleManager.*.dll, there are $nmmdll:" 1>&2 find "$ksphome/GameData/" -name "$mmglob" 1>&2 exit 1 fi cp "$tmp"/$mmglob $dir/.. || exit 1 cp ModuleManagerLicense.md $dir/.. || exit 1 cp -r $dll README.md LICENSE.md Resources/* $dir || exit 1 cp $debugdll $dir/${dllname%.dll}.debugdll rm -f $zip echo echo generating release $zip ( cd $tmp && zip -vr $zip GameData ) || exit 1 echo generated $zip `du -h $zip | cut -f 1` echo
true
55439d300ddc2e6720aff1cdcfad070c22120540
Shell
MrYing/yunba-smartoffice-1
/tests/tc_control.sh
UTF-8
297
2.984375
3
[ "MIT" ]
permissive
if [ $# -lt 2 ] ; then echo usage: `basename ${0}` '<ALIAS> <on_off|mode|fan|inc|dec>' echo e.g.: `basename ${0}` '"temp_ctrl_0"' '"mode"' exit 1 fi alias="${1}" cmd="${2}" msg="{\\\"cmd\\\":\\\"${cmd}\\\",\\\"devid\\\":\\\"${alias}\\\"}" #echo "${msg}" ./publish.sh "${alias}" "${msg}"
true
95feb353a315381dc62e2a85f05e240e52cce77b
Shell
chrisspen/django-chroniker
/init_virtualenv.sh
UTF-8
426
3.171875
3
[]
no_license
#!/bin/bash set -e cd "$(dirname "$0")" echo "[$(date)] Removing existing virtualenv if it exists." [ -d .env ] && rm -Rf .env echo "[$(date)] Creating virtual environment." python3.8 -m venv .env echo "[$(date)] Activating virtual environment." . .env/bin/activate echo "[$(date)] Upgrading pip." pip install -U pip echo "[$(date)] Installing pip requirements." pip install -r requirements.txt -r requirements-test.txt
true
db46a375be5bd57468290fd967aec99c64100f8c
Shell
luavela/luavela
/3rdparty/udis86/scripts/asmtest.sh
UTF-8
749
2.90625
3
[ "MIT", "CC-PDDC", "BSD-2-Clause", "FSFUL" ]
permissive
#!/usr/bin/env bash objdump="otool -tV" yasm=yasm asmfile="ud_yasmtest.asm" binfile="ud_yasmtest.bin" Sfile="ud_yasmtest.S" objfile="ud_yasmtest.o" echo "[bits $1]" > $asmfile echo $2 >> $asmfile $yasm -f bin -o $binfile $asmfile if [ ! $? -eq 0 ]; then echo "error: failed to assemble" exit 1 fi echo "-- hexdump --------------------------------------" hexdump $binfile echo echo "-- objdump --------------------------------------" hexdump -e '1/1 ".byte 0x%02x\n"' $binfile > $Sfile gcc -c $Sfile -o $objfile $objdump -d $objfile echo echo "-- udcli (intel) ---------------------------------" ../udcli/udcli -$1 $binfile echo echo "-- udcli (at&t) ----------------------------------" ../udcli/udcli -$1 -att $binfile echo exit 0
true
576450b3323475087c247c894867ee40544b6215
Shell
glours/docker-ce-tags
/check/check-tags.sh
UTF-8
289
2.96875
3
[]
no_license
#!/bin/sh COMPONENT=$1 TAG=$2 COMMIT=$3 #echo "Checking docker/${COMPONENT}@${TAG}" (cd docker-ce && git checkout ${TAG} &> /dev/null) (cd "${COMPONENT}-extract" && git checkout ${COMMIT} &> /dev/null) diff -r --exclude=".git" "docker-ce/components/${COMPONENT}" "${COMPONENT}-extract"
true
dbd21ed6d638b6cb0c497d515419a3fe8f90b8d1
Shell
pauldmccarthy/indexed_gzip
/.ci/build_dev_indexed_gzip.sh
UTF-8
270
2.8125
3
[ "Zlib" ]
permissive
#!/usr/bin/env bash # # Build a test version of indexed_gzip. set -e envdir="$1" thisdir=$(cd $(dirname "$0") && pwd) source $thisdir/activate_env.sh "$envdir" # enable line tracing for cython # modules - see setup.py export INDEXED_GZIP_TESTING=1 pip install -e .
true
19f6f7509ba669022ebc6edcc0b1153890ec8ae4
Shell
gidapataki/nng-2016
/final/runner.sh
UTF-8
603
3.671875
4
[]
no_license
#!/usr/bin/env bash DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" PID= cd "${REPOSITORY}" isRebuildNeeded() { git remote update &>/dev/null diffcount=$(git rev-list HEAD...origin/master --count) if [ "$diffcount" -gt "0" ]; then echo "yes" else echo "no" fi } rebuild() { git checkout origin/master &>/dev/null make -j8 -C build &>/dev/null } restartBees() { kill "$PID" watch -n 1 "./build/bees 2>&1" & PID=$! } restartBees while true; do if [ "$(isRebuildNeeded)" == "yes" ]; then echo "Rebuilding" rebuild restartBees fi sleep 1 done
true
cd839896c1bb9445812fac3c7dd490a23933e682
Shell
Mikail-Eliyah/openssl_at_1st_sight
/functions/data_convert.sh
UTF-8
3,962
4.03125
4
[]
no_license
#!/bin/bash function hex_string_file_to_file(){ read -p "file in (e.g. 00.txt): " file_in_name read -p "file out (e.g. 01.txt): " file_out_name cat $file_in_name | xxd -r -p > $file_out_name } function file_to_hex_string_file(){ read -p "file in (e.g. data.txt): " file_in_name read -p "file out (e.g. 00.txt): " file_out_name # file to hex string xxd -p $file_in_name | tr -d '\n' > $file_out_name } # hex-dump-of-file-containing-only-the-hex-characters # xxd -p file # in a single line: # xxd -p data.txt | tr -d '\n' # xxd -p data.txt | xxd -r -p > 00.txt # $ echo "ab AB i m 010 i" | xxd -pu # 61622041422069206d2030313020690a # $ echo "ab AB i m 010 i" | xxd -u | xxd -r # ab AB i m 010 i function ascii_to_hex () { #echo "arg1 is: '$1'" ANS=$(echo -n "$1" | xxd -p) ; # -n echo $ANS; return $?; } function hex_to_ascii () { # echo "arg1 is: '$1'" ans=$(echo "$1" | xxd -p -r) ; # -n echo $ans; #return $?; # return $ANS as string } function ascii_to_base64 () { ans=$(openssl enc -base64 <<< "$1"); echo $ans #return $?; } function base64_to_ascii () { ans=$(openssl enc -base64 -d <<< "$1"); echo $ans #return $?; } function hex_to_base64 () { # echo "arg1 is: '$1'" ans_00=$(hex_to_ascii "$1") ; ans_01=$(ascii_to_base64 "$ans_00") ; #return $ans_01; # return $ANS as string } function base64_to_hex () { # echo "arg1 is: '$1'" #ans_00=$(base64_to_ascii "$1") ; #ans_01=$(ascii_to_hex "$ans_00") ; #return $ans_01; # return $ANS as string if [ "$1" == "" ] # not specified then echo 'example: $ base64_to_hex "QWxhZGRpbjpvcGVuIHNlc2FtZQo="' else base64_str="$1" #ans_01=$(echo -n $base64_str | openssl enc -base64 -d | hexdump -v -e '/1 "%02x" ') ans_01=$(echo -n $base64_str | base64 -d | od -t x8 -An | tr -d ' ') fi; echo $ans_01 # return $ans_01; } function decimal_to_hex(){ if [ "$1" == "" ] # not specified then echo 'example: $ decimal_to_hex "999999"' else dec_str="$1" hex_str=$(printf "%x\n" $dec_str) fi; echo $hex_str } function hex_to_decimal(){ if [ "$1" == "" ] # not specified then echo 'example: $ hex_to_decimal "999999"' else hex_str="$1" dec_str=$(printf "%ld\n" '0x'$hex_str) fi; echo $dec_str } function ascii_to_decimal(){ if [ "$1" == "" ] # not specified then echo 'example: $ ascii_to_decimal "abdY20"' else ascii_str="$1" hex_str=$(ascii_to_hex $ascii_str) dec_str=$(hex_to_decimal $hex_str) fi; echo $dec_str } function decimal_to_ascii(){ if [ "$1" == "" ] # not specified then echo 'example: $ decimal_to_ascii "91694388364660"' else dec_str="$1" hex_str=$(decimal_to_hex $dec_str) ascii_str=$(hex_to_ascii $hex_str) fi; echo $ascii_str } function binary_file_to_hex (){ file_binary="$1" cat $file_binary | xxd -p | tr -d "\n" } echo "" : <<'NOTE_BLOCK' # cat file.dat | xxd -r -p | base64 echo "6F0AD0BFEE7D4B478AFED096E03CD80A" | xxd -r -p | base64 $ openssl rand -hex 2 | xxd -r -p | base64 openssl enc -base64 <<< 'Hello, World!' openssl enc -base64 -d <<< SGVsbG8sIFdvcmxkIQo= echo "QWxhZGRpbjpvcGVuIHNlc2FtZQ==" | base64 -D | od -t x8 -An 41 6c 61 64 64 69 6e 3a 6f 70 65 6e 20 73 65 73 61 6d 65 Aladdin:open sesame base64_str=’QWxhZGRpbjpvcGVuIHNlc2FtZQo=’ base64_str=’QWxhZGRpbjpvcGVuIHNlc2FtZQ==’ # base64 to hex echo $base64_str | base64 -D | hexdump -v -e '/1 "%02x" ' ANS: 416c616464696e3a6f70656e20736573616d65 # base64 to ascii echo "QWxhZGRpbjpvcGVuIHNlc2FtZQo=" | base64 -D # base64 --decod echo "QWxhZGRpbjpvcGVuIHNlc2FtZQ==" | base64 -D ANS: Aladdin:open sesame # ascii to base64 openssl enc -base64 <<< 'Aladdin:open sesame' echo "Aladdin:open sesame" | base64 ANS: QWxhZGRpbjpvcGVuIHNlc2FtZQo= # ascii to hex echo "Aladdin:open sesame" | base64 | base64 -D | hexdump -v -e '/1 "%02x" ' ANS: 416c616464696e3a6f70656e20736573616d650a hex_str=”416c616464696e3a6f70656e20736573616d650a” echo $hex_str | xxd -r -p | base64 NOTE_BLOCK echo ""
true
b1db43f7ed7a6d4db21fd5f828645c40134017c2
Shell
dixondj/shell_scripting_exe
/greet_user/greet_user.sh
UTF-8
140
2.828125
3
[]
no_license
# !/bin/bash if [ -z $1 ] then echo "ERROR: No input is provided. Exiting now..." exit 1 else echo "Hello, nice to meet you $1" fi
true
3352ed9ec94a6064ab3b66652e8c45475d86a2bc
Shell
thomasmettler/tompi2
/ip_logger/ip_logger.sh
UTF-8
910
3.390625
3
[]
no_license
#!/bin/bash OUTPUT_FILE=ips.txt # Grab this server's public IP address #PUBLIC_IP=`curl -s https://ipinfo.io/ip` current_date_time="`date "+%Y-%m-%d %H:%M:%S"`"; ouput_ips="`sudo arp-scan --localnet --retry=3 | grep [a-z0-9][a-z0-9]:[a-z0-9][a-z0-9]:[a-z0-9][a-z0-9]:[a-z0-9][a-z0-9]:[a-z0-9][a-z0-9]:[a-z0-9][a-z0-9]`" #echo "Date: ${current_date_time}\nIP: ${ouput_ips}\n" | tr --delete \" echo "Date: ${current_date_time}" echo "${ouput_ips}" | \ tr --delete \" > \ ${OUTPUT_FILE} while read a b c; do echo "IP: "$a" MAC: "$b" System: "$c #echo $b if [ "$b" == "5c:c3:07:ed:bf:2e" ]; then echo "Found you!" curl -i -XPOST 'localhost:8086/write?db=mydb' --data-binary "iplogger,host=\"${a}\",mac=\"${b}\" system="\"${c}\" --silent --output /dev/null fi #echo "MAC: "$b #echo "System: "$c done < ${OUTPUT_FILE} #echo $last
true
a8a1a0716d721b85471e1a17b0278670a8f6d8e0
Shell
xifeiwu/workcode
/bash/exam/opensourcebak.sh
UTF-8
4,550
3.40625
3
[]
no_license
#!/bin/sh #/etc/init.d/mysqld stop #执行备份前先停止MySql,防止有数据正在写入,备份出错 #备份规划:在本地新建一个文件夹备份当天的文件,文件名格式:2013-04-0。 #文件夹下的mysql存放数据库的备份文件,project目前未使用。 #对mysql文件夹打包,找到项目目录打包并复制到备份文件夹下,bak.log文件记录备份信息。 MAXDAYS=15 #MAXDAYS=15代表删除15天前的备份,即只保留最近15天的备份 BK_DIR=/root/BackUp/ #备份文件存放路径:/root/BackUp/ SUBDIR_NAME=`date +%Y-%m-%d` #每一天的信息存储到一个文件夹 BK_SUBDIR=$BK_DIR$SUBDIR_NAME/ # MYSQL_DIR="$BK_SUBDIR"mysql PROJECT_DIR="$BK_SUBDIR"project BK_LOG="$BK_SUBDIR"bak.log #日志文件 LINUX_USER=root #系统用户名 DB_USER=root #数据库 DB_PASSWORD=ztb2012 #数据库密码:iscas1006ztb FTP_ADDR=192.168.161.222 #备份ftp数据库 FTP_USER=xifei #ftp用户名:sunlv FTP_PASSWD=iscasztb #ftp密码:13u579 FTP_ROOTDIR=/ #wikidb存放目录:/analysis/wikidb/server06 FTP_TMP=/tmp/tmp.txt #存放备份服务器目录 if [ ! -e "${BK_DIR}" ]; then #检查主文件夹是否存在 mkdir -p "${BK_DIR}" fi chown -R $LINUX_USER:$LINUX_USER $BK_DIR #更改备份数据库文件的所有者 if [ ! -e "${BK_SUBDIR}" ]; then #检查子文件夹是否存在 mkdir -p "${BK_SUBDIR}" fi if [ ! -e "${MYSQL_DIR}" ]; then #检查子文件夹下的数据库目录是否存在 mkdir -p "${MYSQL_DIR}" fi if [ ! -e "${PROJECT_DIR}" ]; then #检查子文件夹的工程目录是否存在 mkdir -p "${PROJECT_DIR}" fi if [ ! -e "${BK_LOG}" ]; then #检查日志文件是否存在 touch "${BK_LOG}" fi #获取ftp上wiki备份中所有文件夹 function lsftp { ftp -n $FTP_ADDR <<FTP user $FTP_USER $FTP_PASSWD cd $FTP_ROOTDIR ls FTP } #在ftp上新建文件夹 function ftpmkdir { ftp -n $FTP_ADDR <<EOF user $FTP_USER $FTP_PASSWD cd $FTP_ROOTDIR mkdir $1 EOF } #在ftp上删除文件夹 function ftprmdir { ftp -n $FTP_ADDR <<EOF user $FTP_USER $FTP_PASSWD cd $FTP_ROOTDIR rmdir $1 EOF } #上传文件 function upload { ftp -n $FTP_ADDR <<EOF user $FTP_USER $FTP_PASSWD binary prompt cd $FTP_ROOTDIR"/"$1 mput $2 quit EOF } #删除7天前的文件 function ftpdelfile { ftp -n $FTP_ADDR <<EOF user $FTP_USER $FTP_PASSWD cd $FTP_ROOTDIR"/"$1 delete $2 quit EOF } date=`date +%Y%m%d` #获取当前日期 #本地备份数据库文件 cd $MYSQL_DIR mysqldump -u $DB_USER --password=$DB_PASSWORD rwwb > rwwb.sql mysqldump -u $DB_USER --password=$DB_PASSWORD community > community.sql mysqldump -u $DB_USER --password=$DB_PASSWORD phpcms > phpcms.sql cd .. echo "=====================1.make tarball for database back up=======================" > $BK_LOG tar -zcvf opensource-$date.sql.tar.gz mysql >> ${BK_LOG} #本地备份工程文件 cd /opt/lampp if [ -e "project.tar.gz" ]; then #检查日志文件是否存在 rm project.tar.gz fi echo "====================2.make tarball for project back up========================" >> $BK_LOG tar -zcf project.tar.gz web >> ${BK_LOG} mv project.tar.gz "$BK_SUBDIR"/opensource-$date.project.tar.gz #/etc/init.d/mysqld start #备份完成后,启动MySql #将备份文件上传到ftp服务器 #opensource备份夹是否存在,不存在则新建 lsftp|grep opensource > $FTP_TMP if [ ! $? -eq 0 ]; then { ftpmkdir opensource echo "=====================3.opensource dir has been created=======================" >> $BK_LOG } fi cd $BK_SUBDIR echo "======================3.upload file to ftp server======================" >> $BK_LOG upload opensource opensource-$date.sql.tar.gz upload opensource opensource-$date.project.tar.gz #删除15天前的备份文件() #删除本地15天前的备份 cd $BK_DIR dirname=`date -d -"$MAXDAYS"day +%Y-%m-%d` rm -rf $dirname #删除ftp服务器空间15天前的备份 deldate=`date -d -"$MAXDAYS"day +%Y%m%d` ftpdelfile opensource opensource-$deldate.sql.tar.gz ftpdelfile opensource opensource-$deldate.project.tar.gz
true
a976399f03819db82fc44c87c0dff54f72b62302
Shell
vaibs1997/shell-programming
/Arrays/repeatedTwice.sh
UTF-8
126
3.015625
3
[]
no_license
#!/bin/bash for (( i=1; i<=100; i++ )) do if [ $(($i%11)) == 0 ] then arr[$i]=$i fi done echo ${arr[@]}
true
d3b634976291f70e562d33e97e4e8f9260782926
Shell
jbussdieker/c-http_server
/autogen.sh
UTF-8
473
3.375
3
[]
no_license
#!/bin/sh warn() { echo "WARNING: $@" 1>&2 } case `uname -s` in Darwin) LIBTOOLIZE=glibtoolize ;; FreeBSD) LIBTOOLIZE=libtoolize ;; OpenBSD) LIBTOOLIZE=libtoolize ;; Linux) LIBTOOLIZE=libtoolize ;; SunOS) LIBTOOLIZE=libtoolize ;; *) warn "unrecognized platform:" `uname -s` LIBTOOLIZE=libtoolize ;; esac set -ex $LIBTOOLIZE --copy --force aclocal -I m4 automake --add-missing --copy --foreign autoconf ./configure $@
true
22d809d37f39a76fe74422c82e8dd3e58db060fe
Shell
urjaman/szdev-recipes
/pre-load.sh
UTF-8
231
2.9375
3
[]
no_license
#!/bin/bash # This is a little hack of a script to load the big git repos from outside dchrt # by just abusing makepkg set -e set -x MAKEPKG=makepkg for d in gcc-new binutils; do cd $d $MAKEPKG -A -d -o -p PNDBUILD cd .. done
true
3de58811281c3673b7233f0cff8cecf8c136052e
Shell
gomttang/dotfiles
/bootstrap.sh
UTF-8
1,385
3.0625
3
[]
no_license
# Brew install if test ! $(which brew); then ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" fi brew update brew tap homebrew/bundle # Brewfile 을 읽어서 업데이트 한다. brew bundle --file=$HOME/Dropbox/dotfiles/Brewfile brew cleanup brew cask cleanup # git file update [ ! -f $HOME/.gitconfig ] && ln -nfs $HOME/Dropbox/dotfiles/.gitconfig $HOME/.gitconfig [ ! -f $HOME/.gitignore_global ] && ln -nfs $HOME/Dropbox/dotfiles/.gitignore_global $HOME/.gitignore_global # zsh to default shell & Install Oh-my-zsh chsh -s $(which zsh) sh -c "$(wget https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh -O -)" [ ! -f $HOME/.zshrc ] && ln -nfs $HOME/Dropbox/dotfiles/.zshrc $HOME/.zshrc source $HOME/.zshrc # Mackup 환경 설정 파일(.mackup.cfg)파일을 홈 디렉터리에 링크한다. [ ! -f $HOME/.mackup.cfg ] && ln -nfs $HOME/Dropbox/dotfiles/.mackup.cfg $HOME/.mackup.cfg # osx #source $HOME/Dropbox/dotfiles/.osx # node & npm brew install node brew install python sudo npm install -g yarn sudo pip install awscli sudo npm install -g eslint # .ssh Mkdir ~/.ssh cp $HOME/Dropbox/ssh/* ~/.ssh/ # makeup brew install mackup mackup restore # Keyboard Speed Setting defaults write -g InitialKeyRepeat -int 10 # normal minimum is 15 defaults write -g KeyRepeat -int 1 # normal minimum is 2
true