blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
f17495cc0a2302e1972261703fe363c4bdf9e728
|
Shell
|
ArnaudDroitLab/an_cryptic
|
/scripts/DefineIntergenicRegions.sh
|
UTF-8
| 4,704
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
#PBS -l nodes=1:ppn=1
#PBS -l walltime=2:00:00
#PBS -A eav-760-aa
#PBS -o Intergenic.sh.stdout
#PBS -e Intergenic.sh.stderr
#PBS -V
#PBS -N Intergenic-split-sense
cd /gs/project/eav-760-aa/RNA-CKII-Spt6
rm -rf output/intergenic
mkdir -p output/intergenic
# Load bedtools.
module add mugqic/samtools/1.3
module add mugqic/bedtools/2.22.1
# UCSC-obtained data has "chr" in front of chromosome number. Remove that.
sed -e 's/^chr//' input/UCSC_SGD_Genes.bed > output/intergenic/UCSC_SGD_Genes.nochr.bed
# Define a list of all intergenic regions
bedtools subtract -a input/All.bed -b output/intergenic/UCSC_SGD_Genes.nochr.bed > output/intergenic/intergenic.bed
# Split and annotate all intergenic regions based on their proximity to the closest gene.
perl scripts/annotate_split.pl output/intergenic/UCSC_SGD_Genes.nochr.bed 1000 < output/intergenic/intergenic.bed > output/intergenic/intergenic.split.bed
# Split whole annotation into promoter, downstream and intergenic files.
for type in promoter downstream
do
grep -e "$type.*+$" output/intergenic/intergenic.split.bed > $type.plus.bed
grep -e "$type.*-$" output/intergenic/intergenic.split.bed > $type.minus.bed
done
grep -e "intergenic" output/intergenic/intergenic.split.bed > intergenic.bed
# Declare a function for submitting jobs using preset parameters.
RAP_ID=eav-760-aa
function submit_job {
if [ "$2" = "" ]
then
dependFlag=""
else
dependFlag="-W depend=afterok:$2"
fi
qsub -m ae -M $JOB_MAIL -W umask=0002 -A $RAP_ID -d `pwd` \
-j oe -o $1.stdout \
-e $1.stderr \
-N $1 \
-l walltime=1:00:0 -q metaq -l nodes=1:ppn=8 -l pmem=1700m \
$dependFlag $1 | grep "[0-9]"
}
# Declare a named array to keep job IDs to keep track of dependencies.
declare -A job_ids
# Split the bam files
for lib in CK2 WT Spt6-SA CK2-2 WT-2 Spt6-SA-2
do
# Get the bam file's name.
BAMFILE=output/bwa/$lib.sorted.bam
for type in promoter downstream
do
for direction in sense antisense
do
plus_samtools_arg="-f"
minus_samtools_arg="-F"
if [ $direction = "antisense" ]
then
plus_samtools_arg="-F"
minus_samtools_arg="-f"
fi
prefix=output/intergenic/$lib.$type.$direction
cat > $lib.$type.$direction.generate_bam.sh <<EOF
module add mugqic/samtools/1.3
samtools view -b -h $plus_samtools_arg 16 -L $type.plus.bed $BAMFILE > $prefix.1.bam
samtools view -b -h $minus_samtools_arg 16 -L $type.minus.bed $BAMFILE > $prefix.2.bam
samtools merge $prefix.sorted.bam $prefix.1.bam $prefix.2.bam
samtools index $prefix.sorted.bam
rm $prefix.1.bam $prefix.2.bam
EOF
job_ids[$lib.$type.$direction]=$(submit_job $lib.$type.$direction.generate_bam.sh )
done
done
cat > $lib.intergenic.generate_bam.sh <<EOF
module add mugqic/samtools/1.3
samtools view -b -h -L intergenic.bed $BAMFILE > output/intergenic/$lib.intergenic.sorted.bam
samtools index output/intergenic/$lib.intergenic.sorted.bam
EOF
job_ids[$lib.intergenic]=$(submit_job $lib.intergenic.generate_bam.sh)
done
for lib in CK2 WT Spt6-SA CK2-2 WT-2 Spt6-SA-2
do
for type in promoter downstream
do
grep -e "$type" output/intergenic/intergenic.split.bed > $type.bed
for direction in sense antisense
do
prefix=output/intergenic/$lib.$type.$direction
BAMFILE=$prefix.sorted.bam
BIGWIG=$prefix.sorted.bigwig
cat > $lib.$type.$direction.generate_matrix.sh <<EOF
module load mugqic/python/2.7.8
./bin/bamCoverage --skipNonCoveredRegions --binSize 10 --numberOfProcessors 8 -b $BAMFILE -o $BIGWIG
./bin/computeMatrix reference-point --nanAfterEnd --numberOfProcessors 8 --regionsFileName $type.bed --scoreFileName $BIGWIG --outFileName $prefix.sorted.matrix.gz
EOF
submit_job $lib.$type.$direction.generate_matrix.sh ${job_ids[$lib.$type.$direction]}
done
done
# And finally, do intergenic.
BAMFILE=output/intergenic/$lib.intergenic.sorted.bam
BIGWIG=output/intergenic/$lib.intergenic.sorted.bigwig
cat > $lib.intergenic.generate_matrix.sh <<EOF
module load mugqic/python/2.7.8
./bin/bamCoverage --skipNonCoveredRegions --binSize 10 --numberOfProcessors 8 -b $BAMFILE -o $BIGWIG
./bin/computeMatrix scale-regions --numberOfProcessors 8 --regionsFileName intergenic.bed --scoreFileName $BIGWIG --outFileName $prefix.sorted.matrix.gz
EOF
submit_job $lib.intergenic.generate_matrix.sh ${job_ids[$lib.intergenic]}
done
| true
|
4610b37c1d54eb53c1a62c8ff4cfc92e0d77a260
|
Shell
|
ChickenCoding/BootBBCrafty
|
/bbtemplate.sh
|
UTF-8
| 462
| 2.796875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
rep=dev/coffee/
model=model/
view=view/
collection=collection/
crafty=crafty/
for theFile in "$@"
do
touch ${rep}${view}${theFile}.view.js
touch ${rep}${model}${theFile}.model.js
touch ${rep}${crafty}${theFile}.crafty.js
touch ${rep}${collection}${theFile}.collection.js
done
echo "****************************************"
echo "files creates, writing process now"
echo " *** *** *** *** *** "
| true
|
5c49c1a025c76a30778b884bb12aeb554a278e03
|
Shell
|
zhaogang1993/ProgramOnLinux
|
/PUBILIC/build/buildall.sh
|
GB18030
| 547
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
find_ret=$(find /home/zhaogang/share/code_new | grep makefile)
NULL=/dev/null
TARGET_PATH=/home/zhaogang/share/code_new/PUBILIC/target/
LIBPATH=/usr/lib/
#ʲôΪȫֱ
export TARGET_PATH LIBPATH
for makefile in $find_ret
do
pushd $(dirname $makefile) > $NULL #ضnullǷΪpushdĴӡϢ
echo -e "***********************build-begin******************************"
make -f $makefile
echo -e "***********************build-end********************************\n"
popd > $NULL
done
| true
|
481a3ba06e0d76846d7e51b0e9bfa149fc0b679f
|
Shell
|
p1ne/irz-firmware
|
/mkfirmware
|
UTF-8
| 326
| 2.5625
| 3
|
[] |
no_license
|
#!/bin/sh
[ -z $BUILDROOT ] && exit 0
cp $BUILDROOT/output/build/linux-custom/arch/arm/boot/uImage.irz-ruh2b ./fw/uImage.RUH2b
rm ./fw/fsImage.RUH2b
mksquashfs $BUILDROOT/board/irz/ruh2b/skeleton ./fw/fsImage.RUH2b -comp xz
md5sum ./fw/*.RUH2b | sed -e 's/.\/fw\///g' > ./fw/update.md5
cd fw
tar -cvf ../firmware.bin *
cd ..
| true
|
fdf0fee728a8b456745841a44d3009ec76627139
|
Shell
|
franciscomaion/Spectre
|
/Other/Fisher.sh
|
UTF-8
| 1,293
| 2.5625
| 3
|
[] |
no_license
|
#PBS -S /bin/bash
#PBS -N Fisher_run
#PBS -V
#PBS -l nodes=1:ppn=1
#PBS -l walltime=0:20:00
#PBS -p 1023
#PBS -e /home/mjw/HOD_MockRun/W1_Spectro_V7_7/fisher/Fisher_stderr.pbs
#PBS -o /home/mjw/HOD_MockRun/W1_Spectro_V7_7/fisher/Fisher_stdout.pbs
test(){
export outputdir=/home/mjw/HOD_MockRun/W1_Spectro_V7_7.1
export mask_Qldir=/home/mjw/HOD_MockRun/W1_Spectro_V7_2
export LOZ=0.6
export HIZ=0.9
export FIELDFLAG=4
export d0=1000
export KMAX=0.8
rm -r /home/mjw/IO_lock/
}
test
DIR="$HOME/HOD_MockRun/Scripts/"
cd $DIR
export BRANCH=$(git symbolic-ref --short HEAD) # current Git branch
export GSL_RNG_SEED=123
export GSL_RNG_TYPE=taus
export OMP_NUM_THREADS=1 # Threads = allocated processors.
cd ..
# -g: gnu debug; -w: no warnings; -o2/-03: optimization level; -DHCUBATURE; Scripts/cubature/hcubature.c;
# -std=gnu99 (for C99 with GNU extensions; https://gcc.gnu.org/onlinedocs/gcc-5.1.0/gcc/Standards.html);
# current default standard is equivalent to -std=gnu90, which is the 1989/1990 standard with GNU-specific
# extensions. gcc 5.1.0 (2015-04-22) changed from gnu90 to gnu11.
gcc -pedantic -std=gnu11 -O2 -o Fisher.o Scripts/driver_Fisher.c -fopenmp -lfftw3_omp -lfftw3 -lm -lgsl -lgslcblas
./Fisher.o $d0 $FIELDFLAG $LOZ $HIZ $KMAX
| true
|
cd66a614707cc93f0cae51b2f68278cde6917f72
|
Shell
|
shmolyneaux/sous-vide-cooker
|
/etc_init.d_SnakeCharmer
|
UTF-8
| 2,242
| 3.75
| 4
|
[] |
no_license
|
#!/bin/sh
### BEGIN INIT INFO
# Provides: SnakeCharmer
# Required-Start: $local_fs $remote_fs $network $syslog $storserv
# Required-Stop: $local_fs $remote_fs $network $syslog $storserv
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start/stop SnakeCharmer python web server
### END INIT INFO
#
# SnakeCharmer This init.d script is used to start SnakeCharmer.
# It basically just calls SnakeCharmer.
SNAKECHARMER_PATH=/usr/local/SnakeCharmer/6.0/SnakeCharmer.py
SNAKECHARMER_ERROR_FILE=/usr/local/SnakeCharmer/6.0/err.txt
SNAKECHARMER_LOG_FILE=/usr/local/SnakeCharmer/6.0/log.txt
SNAKECHARMER_USER=stephen
ls $SNAKECHARMER_PATH
if [ -x $SNAKECHARMER_PATH ] ; then
HAVE_SNAKECHARMER=1
else
echo "Can't find SnakeCharmer"
exit 0
fi
. /lib/lsb/init-functions
pidof_SnakeCharmer() {
# if pidof is null for some reasons the script exits automagically
# classified as good/unknown feature
PIDS=`pgrep -f "python3 $SNAKECHARMER_PATH"` || true
echo $PIDS
return 0
}
case $1 in
start)
log_daemon_msg "Starting web server" "SnakeCharmer"
echo
chown $SNAKECHARMER_USER $SNAKECHARMER_LOG_FILE
chown $SNAKECHARMER_USER $SNAKECHARMER_ERROR_FILE
sudo -u $SNAKECHARMER_USER -H sh -c "python3 $SNAKECHARMER_PATH > $SNAKECHARMER_LOG_FILE 2> $SNAKECHARMER_ERROR_FILE" &
;;
stop)
log_daemon_msg "Stopping web server" "SnakeCharmer"
echo
kill $(pidof_SnakeCharmer)
;;
restart)
log_daemon_msg "Stopping web server" "SnakeCharmer"
echo
kill $(pidof_SnakeCharmer)
log_daemon_msg "Starting web server" "SnakeCharmer"
sudo -u $SNAKECHARMER_USER -H sh -c "python3 $SNAKECHARMER_PATH > $SNAKECHARMER_LOG_FILE 2> $SNAKECHARMER_ERROR_FILE" &
;;
status)
PID=$(pidof_SnakeCharmer)
if [ -n "$PID" ]; then
log_success_msg "SnakeCharmer is running (pid $PID)."
echo
exit 0
else
log_failure_msg "SnakeCharmer is not running."
echo
exit 1
fi
;;
*)
log_success_msg "Usage: /etc/init.d/SnakeCharmer {start|stop|restart|status}"
echo
exit 1
;;
esac
| true
|
1825cb4e71619226089a6619c341427ee25420dc
|
Shell
|
DavidKo3/xla
|
/test/run_tests.sh
|
UTF-8
| 593
| 3.4375
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
set -ex
CDIR="$(cd "$(dirname "$0")" ; pwd -P)"
LOGFILE=/tmp/pytorch_py_test.log
MAX_GRAPH_SIZE=1000
GRAPH_CHECK_FREQUENCY=100
while getopts 'LM:C:' OPTION
do
case $OPTION in
L)
LOGFILE=
;;
M)
MAX_GRAPH_SIZE=$OPTARG
;;
C)
GRAPH_CHECK_FREQUENCY=$OPTARG
;;
esac
done
shift $(($OPTIND - 1))
export TRIM_GRAPH_SIZE=$MAX_GRAPH_SIZE
export TRIM_GRAPH_CHECK_FREQUENCY=$GRAPH_CHECK_FREQUENCY
if [ "$LOGFILE" != "" ]; then
python3 "$CDIR/test_operations.py" "$@" 2>&1 | tee $LOGFILE
else
python3 "$CDIR/test_operations.py" "$@"
fi
| true
|
09b06af93520ceac5f335dea99d389a1ca54010e
|
Shell
|
LineageOS/android_system_extras
|
/cppreopts/cppreopts.sh
|
UTF-8
| 2,575
| 3.859375
| 4
|
[] |
no_license
|
#!/system/bin/sh
#
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# create files with 644 (global read) permissions.
umask 022
# Helper function to copy files
function do_copy() {
source_file=$1
dest_name=$2
# Move to a temporary file so we can do a rename and have the preopted file
# appear atomically in the filesystem.
temp_dest_name=${dest_name}.tmp
if ! cp ${source_file} ${temp_dest_name} ; then
log -p w -t cppreopts "Unable to copy file ${source_file} to ${temp_dest_name}!"
else
log -p i -t cppreopts "Copied file from ${source_file} to ${temp_dest_name}"
sync
if ! mv ${temp_dest_name} ${dest_name} ; then
log -p w -t cppreopts "Unable to rename temporary file from ${temp_dest_name} to ${dest_name}"
rm ${temp_dest_name} || log -p w -t cppreopts "Unable to remove temporary file ${temp_dest_name}"
else
log -p i -t cppreopts "Renamed temporary file from ${temp_dest_name} to ${dest_name}"
fi
fi
}
if [ $# -eq 1 ]; then
# Where the system_b is mounted that contains the preopt'd files
mountpoint=$1
if ! test -f ${mountpoint}/system-other-odex-marker ; then
log -p i -t cppreopts "system_other partition does not appear to have been built to contain preopted files."
exit 1
fi
log -p i -t cppreopts "cppreopts from ${mountpoint}"
# For each odex and vdex file do the copy task
# NOTE: this implementation will break in any path with spaces to favor
# background copy tasks
for file in $(find ${mountpoint} -type f -name "*.odex" -o -type f -name "*.vdex" -o -type f -name "*.art"); do
real_name=${file/${mountpoint}/\/system}
dest_name=$(preopt2cachename ${real_name})
if ! test $? -eq 0 ; then
log -p i -t cppreopts "Unable to figure out destination for ${file}"
continue
fi
# Copy files in background to speed things up
do_copy ${file} ${dest_name} &
done
# Wait for jobs to finish
wait
exit 0
else
log -p e -t cppreopts "Usage: cppreopts <preopts-mount-point>"
exit 1
fi
| true
|
8a388c6ff01299ff0bcb7e3a5bb3d8f89d148c89
|
Shell
|
GluuFederation/community-edition-package
|
/update/3.0.2/bin/update_ldap.sh
|
UTF-8
| 4,112
| 3.9375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
UPDATE_VERSION="3.0.2"
BACKUP_TIME=`date +%Y-%m-%d.%H:%M:%S`
BACKUP_FOLDER="/opt/upd/$UPDATE_VERSION/backup_ldap_$BACKUP_TIME"
init () {
# Prapre required data
echo "Preparing for appling updates..."
# Determine Gluu LDAP password
LDAP_PWD=`cat /etc/gluu/conf/ox-ldap.properties | grep "^bindPassword: " | awk -F": " '{print $2}' | xargs -0 -I {} /opt/gluu/bin/encode.py -d {}`
if [ $? -ne 0 ]; then
echo "Failed to determine Gluu LDAP password"
return 1
fi
LDAP_PWD_FILE=/home/ldap/.pw
echo $LDAP_PWD > $LDAP_PWD_FILE
return 0
}
add_json_configuration_option() {
FILE_NAME=$1
CONFIGURATION_KEY=$2
CONFIGURATION_OPTION=$3
EXIST_CONFIGURATION_KEY=`cat $FILE_NAME | grep $CONFIGURATION_KEY`
if [[ ! -z $EXIST_CONFIGURATION_KEY ]]; then
# Configuraton alredy has this option
return 0
fi
# Create copy without last "}"
cat $FILE_NAME > $FILE_NAME.tmp
cat $FILE_NAME.tmp | grep -v "^}$" > $FILE_NAME
rm -f $FILE_NAME.tmp
echo -e $CONFIGURATION_OPTION >> $FILE_NAME
return 1
}
# START: Update for oxAuth configuration
apply_update1() {
# Check if oxAuth configuration updated already
echo "Checking if oxAuth configuration need to be updated..."
# Determine oxAuth config LDAP DN
OXAUTH_CONFIG_DN=`cat /etc/gluu/conf/ox-ldap.properties | grep "^oxauth_ConfigurationEntryDN" | grep "ou.*" -o`
mkdir -p $BACKUP_FOLDER
# Export entry which we are going to update
echo "Creating backup before updating LDAP"
BACKUP_FILE=$BACKUP_FOLDER/oxauth_config.ldif
/opt/opendj/bin/ldapsearch -h localhost -p 1636 -Z -X -T -D "cn=directory manager,o=gluu" -j $LDAP_PWD_FILE -b "$OXAUTH_CONFIG_DN" 'objectClass=oxAuthConfiguration' > $BACKUP_FILE
if [ $? -ne 0 ]; then
echo "Failed to prepare oxAuth configuration backup before updating LDAP"
return 1
fi
OXAUTH_DYNAMIC_CONFIG_FILE=$BACKUP_FOLDER/oxauth_dynamic.json
BASE_64_ENCODED=`cat $BACKUP_FILE | grep "^oxAuthConfDynamic:: " | wc -l`
if [[ $BASE_64_ENCODED == 0 ]]; then
# Value is not base64 encoded
cat $BACKUP_FILE | grep "^oxAuthConfDynamic: " | awk -F": " '{print $2}' | python -m json.tool > $OXAUTH_DYNAMIC_CONFIG_FILE
else
# Value is base64 encoded
cat $BACKUP_FILE | grep "^oxAuthConfDynamic:: " | awk -F":: " '{print $2}' | base64 --decode | python -m json.tool > $OXAUTH_DYNAMIC_CONFIG_FILE
fi
COUNT_CHANGES=0
add_json_configuration_option $OXAUTH_DYNAMIC_CONFIG_FILE '"corsConfigurationFilters"' ', "corsConfigurationFilters": [{"filterName": "CorsFilter", "corsAllowedOrigins": "*", "corsAllowedMethods": "GET,POST,HEAD,OPTIONS", "corsAllowedHeaders": "Origin,Authorization,Accept,X-Requested-With,Content-Type,Access-Control-Request-Method,Access-Control-Request-Headers", "corsExposedHeaders": "", "corsSupportCredentials": true, "corsLoggingEnabled": false, "corsPreflightMaxAge": 1800, "corsRequestDecorate": true}]\n}'
((COUNT_CHANGES+=$?))
if [[ $COUNT_CHANGES == 0 ]]; then
echo "All new configuration options added already"
return 0
fi
echo "Preparing update operation"
OXAUTH_CONF_DYNAMIC_BASE64=`base64 --wrap 0 < $OXAUTH_DYNAMIC_CONFIG_FILE`
UPDATE_FILE=$BACKUP_FOLDER/oxauth_config_update.ldif
cat $BACKUP_FILE | grep "^dn: " > $UPDATE_FILE
echo "changetype: modify" >> $UPDATE_FILE
echo "replace: oxAuthConfDynamic" >> $UPDATE_FILE
echo "oxAuthConfDynamic:: $OXAUTH_CONF_DYNAMIC_BASE64" >> $UPDATE_FILE
echo "" >> $UPDATE_FILE
echo "Applying update"
/opt/opendj/bin/ldapmodify -h localhost -p 1636 -Z -X -D "cn=directory manager,o=gluu" -j $LDAP_PWD_FILE -f $UPDATE_FILE
if [ $? -ne 0 ]; then
echo "Failed to apply LDAP update"
return 2
fi
echo "Update was applied successfully"
return 0
}
# END: Update for oxAuth configuration
finish() {
echo "Removing temporary data"
rm -rf $LDAP_PWD_FILE
return 0
}
init
if [ $? -ne 0 ]; then
exit $?
fi
apply_update1
finish
| true
|
ad351cd7bca9e775d035565fe17a8ff7de4a3a34
|
Shell
|
hoto/vagrant-ubuntu-workstation
|
/deployment/install/install-eb-cli.sh
|
UTF-8
| 337
| 2.890625
| 3
|
[] |
no_license
|
echo "[INFO] Installing AWS Elastic Beanstlak CLI..."
# Installing PIP, there is no other way to install EB CLI
INSTALL_FOLDER='/home/vagrant/tmp/aws-eb-cli'
mkdir -p ${INSTALL_FOLDER}
cd ${INSTALL_FOLDER} && { curl -O https://bootstrap.pypa.io/get-pip.py; cd -; }
sudo python2.7 ${INSTALL_FOLDER}/get-pip.py
sudo pip install awsebcli
| true
|
4a9763652ef07d047cb64a15f62849822b594b62
|
Shell
|
shadracnicholas/home-automation
|
/tools/orchestrate/systemd/deploy.sh
|
UTF-8
| 1,085
| 3.671875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Abort if anything fails
set -e
DASHES=$(echo $SERVICE | tr "." "-")
ssh -t -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null "$TARGET_USERNAME"@"$DEPLOYMENT_TARGET" << EOF
# Abort if anything fails
set -e
cd $TARGET_DIRECTORY
if [ ! -d "src" ]; then
git clone https://github.com/shadracnicholas/home-automation.git src
fi
cd src
git checkout -- .
git checkout master
git pull
# The or true will suppress the non-zero exit code if the service does not exist
# Note that this will still print an error to the console though
sudo systemctl stop "$DASHES".service || true
cd $SERVICE
bash ../tools/orchestrate/systemd/$LANG.sh
echo "Creating systemd service"
# The quotes are needed around the variable to preserve the new lines
echo "$SYSTEMD_SERVICE" | sudo tee /lib/systemd/system/$DASHES.service
sudo chmod 644 /lib/systemd/system/$DASHES.service
sudo systemctl daemon-reload
sudo systemctl enable "$DASHES".service
sudo systemctl start "$DASHES".service
EOF
| true
|
f9697eeed24309ac3151afac0378f88b1c05610f
|
Shell
|
Vampouille/geocat
|
/web/src/main/webapp/WEB-INF/data/config/codelist/gemet/gemet-to-simpleskos.sh
|
UTF-8
| 1,428
| 3.71875
| 4
|
[] |
no_license
|
#!/bin/sh
if [ $1 ]
then
echo "Downloading GEMET thesaurus:"
echo " * backbone ..."
wget http://www.eionet.europa.eu/gemet/gemet-backbone.rdf
echo " * skoscore ..."
wget http://www.eionet.europa.eu/gemet/gemet-skoscore.rdf
echo " * langague files:"
for locale in $*; do
echo " loading: $locale ..."
wget --output-document=gemet-definitions-$locale.rdf http://www.eionet.europa.eu/gemet/gemet-definitions.rdf?langcode=$locale
done
# Creating list of locales for XSL processing
export LOCALES="<locales>"
for locale in $*; do
export LOCALES=$LOCALES"<locale>"$locale"</locale>"
done
export LOCALES=$LOCALES"</locales>"
echo $LOCALES > locales.xml
echo "Creating thesaurus ..."
export GEONETWORK_HOME=../../../..
export CLASSPATH=.:$GEONETWORK_HOME/WEB-INF/lib/xml-apis-1.3.04.jar:$GEONETWORK_HOME/WEB-INF/lib/xercesImpl-2.7.1.jar:$GEONETWORK_HOME/WEB-INF/lib/xalan-2.7.1.jar:$GEONETWORK_HOME/WEB-INF/lib/serializer-2.7.1.jar
java org.apache.xalan.xslt.Process -IN gemet-backbone.rdf -XSL gemet-to-simpleskos.xsl -OUT gemet.rdf
echo "Deploying to catalogue codelist directory:"
# mv gemet.rdf ../external/thesauri/theme/.
# rm locales.xml
# rm *.rdf
echo "Done."
else
echo "Usage: ./gemet-to-simpleskos.sh en fr de";
echo "to create a GEMET thesaurus with english, french and deutsch languages."
fi
| true
|
0fbfe5ae2a18880b721b80aeef53f1987b4353f5
|
Shell
|
nickwallen/metron-environments
|
/run.sh
|
UTF-8
| 700
| 3.453125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
#
# Deploy Metron to the target environment.
#
#
INVENTORY_PATH=$1
if [ -z "$INVENTORY_PATH" ]; then
echo "error: $0 [path-to-inventory-definition]"
exit 1
fi
# ensure METRON_HOME is defined
if [ -z "$METRON_HOME" ]; then
echo "Error: missing environment var: METRON_HOME"
exit 1
fi
# ensure METRON_SSH_KEY is defined
if [ -z "$METRON_SSH_KEY" ]; then
echo "Error: missing environment var: METRON_SSH_KEY"
exit 1
fi
EXTRA_ARGS="${@:2}"
ansible-playbook \
$METRON_HOME/metron-deployment/playbooks/metron_full_install.yml \
--tags="metron-hbase-tables,metron-kafka-topics,enrichment,elasticsearch_templates" \
-i $INVENTORY_PATH \
$EXTRA_ARGS
| true
|
82dadb216c6a885aaa96da50b02941db51af95d7
|
Shell
|
HuuuuuwIn/LogMng
|
/yjq-in.sh
|
UTF-8
| 1,441
| 4.125
| 4
|
[] |
no_license
|
#!/bin/bash
## java env
export JAVA_HOME=/usr/java/jdk1.8.0
export JRE_HOME=$JAVA_HOME/jre
#这里可替换为你自己的执行程序,其他代码无需更改
APP_NAME=yjq-in-1.0.0.jar
#使用说明,用来提示输入参数
usage() {
echo "Usage: sh 执行脚本.sh [start|stop|restart|status]"
exit 1
}
#检查程序是否在运行
is_exist(){
pid=`ps -ef|grep $APP_NAME|grep -v grep|awk '{print $2}' `
#如果不存在返回1,存在返回0
if [ -z "${pid}" ]; then
return 1
else
return 0
fi
}
#启动方法
start(){
is_exist
if [ $? -eq "0" ]; then
echo "${APP_NAME} is already running. pid=${pid} ."
else
nohup java -Xmx256m -Xms256m -jar $APP_NAME --spring.profiles.active=online >/dev/null 2>&1 &
echo ">>> start $APP_NAME successed PID=$! <<<"
fi
}
#停止方法
stop(){
is_exist
if [ $? -eq "0" ]; then
kill -9 $pid
echo ">>> stop $APP_NAME successed PID=$pid! <<<"
else
echo "${APP_NAME} is not running"
fi
}
#输出运行状态
status(){
is_exist
if [ $? -eq "0" ]; then
echo "${APP_NAME} is running. Pid is ${pid}"
else
echo "${APP_NAME} is NOT running."
fi
}
#重启
restart(){
stop
start
}
#根据输入参数,选择执行对应方法,不输入则执行使用说明
case "$1" in
"start")
start
;;
"stop")
stop
;;
"status")
status
;;
"restart")
restart
;;
*)
usage
;;
esac
| true
|
0660e1293a5f878d5d99a768e9351d2962aca231
|
Shell
|
mangelajo/quickstart-routing
|
/setup.sh
|
UTF-8
| 3,399
| 3.671875
| 4
|
[] |
no_license
|
#!/bin/sh
source config
VIRT_IP=$(dig +short $VIRTHOST)
SSHCMD="ssh $SSH_OPTS $TRIPLEO_USER@$VIRT_IP"
$SSHCMD hostname >/dev/null|| echo "Can't ssh to $VIRTHOST ($VIRT_IP)" || exit 1
FWD_IFACE=$($SSHCMD "sudo ip a | grep 192.168.1.144 | grep -o \"\w*\$\"")
echo "* The remote interface for $VIRT_IP is $FWD_IFACE"
#
# Configure the VIRTHOST to route traffic back and forth
#
echo "* Adding an IP address on the libvirt bridge on the host ($BRIDGE_ADDR)"
$SSHCMD "sudo ip a add $BRIDGE_ADDR/$BRIDGE_MASK dev $VIRTD_BRIDGE 2>&1 | grep -v exists"
echo "* Configuring forwarding from/to the libvirt bridge $VIRTD_BRIDGE"
$SSHCMD "sudo iptables -C FORWARD -i $FWD_IFACE -o $VIRTD_BRIDGE -s $LOCAL_NET -d $BRIDGE_NET -j ACCEPT || \
sudo iptables -I FORWARD -i $FWD_IFACE -o $VIRTD_BRIDGE -s $LOCAL_NET -d $BRIDGE_NET -j ACCEPT &&
sudo iptables -I FORWARD -o $FWD_IFACE -i $VIRTD_BRIDGE -d $LOCAL_NET -s $BRIDGE_NET -j ACCEPT"
#
# Distribute our pubkey to the hosts we want to access
#
cat $YOUR_SSH_PUBKEY | $SSHCMD "$UNDERCLOUD_SSH \"cat >~/your_pubkey\""
cat $YOUR_SSH_PUBKEY | $SSHCMD "$UNDERCLOUD_SSH \"cat >>~/.ssh/authorized_keys\""
#
# Setup routing and pubkey on the cloud nodes
#
$SSHCMD "$UNDERCLOUD_SSH \"echo nameserver 8.8.8.8 | sudo tee /etc/resolv.conf\""
$SSHCMD "$UNDERCLOUD_SSH \"sudo ip r add $LOCAL_NET via $BRIDGE_ADDR\" 2>&1 | grep -v exists"
$SSHCMD "$UNDERCLOUD_SSH \" \
source stackrc; \
for ip in \\\$(openstack server list -c Networks -f value | sed 's/ctlplane=//g'); do \
echo setting up route in \\\$ip
ssh heat-admin@\\\$ip sudo ip r add $LOCAL_NET via $BRIDGE_ADDR 2>&1 | grep -v exists ; \
cat ~/your_pubkey | ssh heat-admin@\\\$ip \\\"cat >>~/.ssh/authorized_keys\\\"
done\""
#
# Create SSH and Ansible config files for the hosts
#
cat > ~/.ssh/ooo_config <<EOF
Host undercloud
Hostname $UNDERCLOUD_IP
IdentityFile $YOUR_SSH_PK
IdentitiesOnly yes
User stack
StrictHostKeyChecking no
UserKnownHostsFile=/dev/null
LogLevel ERROR
EOF
echo "[undercloud]" > inventory
echo "undercloud-node ansible_ssh_host=$UNDERCLOUD_IP ansible_ssh_user=stack ansible_become=true ansible_ssh_private_key=$YOUR_SSH_PK ansible_ssh_extra_args=\"-oIdentitiesOnly=yes\"" >> inventory
echo "" >> inventory
echo "[hosts]" >> inventory
for data in $($SSHCMD "$UNDERCLOUD_SSH \"source stackrc; openstack server list -c Name -c Networks -f csv | sed 's/ctlplane=//g' | grep -v Networks\"");
do
IFS=',' read -r -a array <<< "$data"
hostname=$(echo ${array[0]} | sed 's/"//g')
ip=$(echo ${array[1]} | sed 's/"//g')
echo $hostname -\> $ip
cat >> ~/.ssh/ooo_config << EOF
Host $hostname
Hostname $ip
User heat-admin
IdentitiesOnly yes
IdentityFile $YOUR_SSH_PK
StrictHostKeyChecking no
UserKnownHostsFile=/dev/null
LogLevel ERROR
EOF
echo $hostname ansible_ssh_host=$ip ansible_ssh_user=heat-admin ansible_become=true ansible_ssh_private_key_file=$YOUR_SSH_PK ansible_ssh_extra_args="-oIdentitiesOnly=yes">> inventory
done
cat > ansible.cfg <<EOF
[defaults]
host_key_checking = False
inventory=inventory
EOF
./setup-local.sh
echo "==========================================="
echo "Config files created:"
echo " * ansible.cfg"
echo " * inventory"
echo " * ~/.ssh/ooo_config"
| true
|
0436347d8afb27e3d15ef7b2e3fa782497e346da
|
Shell
|
greydjin/1C
|
/files/scripts/backup1c
|
UTF-8
| 283
| 2.734375
| 3
|
[] |
no_license
|
#!/bin/bash
BACKUP_PROG=/usr/pgsql-9.6/bin/pg_basebackup
BACKUP_USER=postgres
BACKUP_DIR=/mnt/BACKUPS/1c-backup
BACKUP_FILE=backup_$(date +%Y-%m-%d_%H%M).tar.gz
$BACKUP_PROG -x --format=tar -z -U $BACKUP_USER -D - | tee > $BACKUP_DIR/$BACKUP_FILE
chmod 666 $BACKUP_DIR/$BACKUP_FILE
| true
|
2df04dde863a1ed50931f884ddd217f8e129896f
|
Shell
|
alvarodeleon/httpd-setup
|
/docker-setup-full.sh
|
UTF-8
| 445
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/bash
mkdir -p $2/{html,mysql,backup,disk,nginx}
docker run -it --privileged --hostname="server" -e "container=docker" --cap-add SYS_ADMIN \
--tmpfs /run -v /sys/fs/cgroup:/sys/fs/cgroup:ro \
-p 22:22 -p 80:80 -p 3306:3306 \
-v "$2/html:/var/www/html" \
-v "$2/nginx:/etc/nginx/conf.d" \
-v "$2/mysql/:/var/lib/mysql" \
-v "$2/backup/:/backup" \
-v "$2/disk/:/mnt/disk" \
--name $1 "alvarodeleon/centos7-nginx-multiphp" "/usr/sbin/init"
| true
|
d916e3b1125af747cdf78ea9246757fccc9064c9
|
Shell
|
allenthomas05/Cpp-programs
|
/cpp/simple programs/gcd.sh
|
UTF-8
| 276
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/bash
clear
echo "Enter two numbers"
read a
read b
if [ $a -gt $b ]
then
n=$a
else
n=$b
fi
for((i=1;i<=n;i++))
do
rem1 = $(($a%$i))
rem2 =$(($b%$i))
if [ rem1 -eq 0 -a rem2 -eq 0 ]
then
gcd = $i
fi
done
echo " The gcd of $a and $b are $gcd"
| true
|
46dd038598c9f4567c58ffd1dc87813cab6c6195
|
Shell
|
Earne/git-tfs-gitextensions-plugin
|
/.nuget/Nuget.sh
|
UTF-8
| 301
| 2.875
| 3
|
[] |
no_license
|
#!/bin/sh
if [ ! -d 'packages' ]; then
mkdir packages
fi
if [ "$1" = "NoMono" ] ; then
find ./ -name packages.config -exec .nuget/NuGet.exe install {} -o packages \;
else
find ./ -name packages.config -exec mono --runtime=v4.0.30319 .nuget/NuGet.exe install {} -o packages \;
fi
| true
|
a1c00f9d3044ee5b320ac88ae7ca1b81f94f4fed
|
Shell
|
mobilemadman2/travis-ci
|
/bin/deploy/env.sh
|
UTF-8
| 292
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
REPO_NAME=${TRAVIS_REPO_SLUG##*/}
export RELEASE_FILE=${REPO_NAME}.zip
export RELEASE_TITLE=${TRAVIS_TAG}
export RELEASE_TAG=${TRAVIS_TAG}
export RELEASE_BODY="Auto updated (Travis build: ${TRAVIS_BUILD_WEB_URL})"
export GH_PAGES_DIR=${TRAVIS_BUILD_DIR}/gh-pages
| true
|
5453dca075e67537fd07c03b0446b599b7914ac8
|
Shell
|
stenpiren/Lexos
|
/install/Linux/Uninstaller.sh
|
UTF-8
| 334
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
declare location=$(pwd)
echo Getting ready to uninstall
if cd /home/$USER; then
rm -rf ~/anaconda2
rm -rf ~/Lexos-master
fi
cd
if [ -e ~/Downloads/Anaconda2-4.0.0-Linux-x86_64.sh ]; then
rm -rf ~/Downloads/Anaconda2-4.0.0-Linux-x86_64.sh
fi
cd $location
echo uninstall completed successfully
$SHELL
| true
|
6cdc20b40771e2a3816fe2db83c0f0572069693c
|
Shell
|
cselab/amrex
|
/.github/workflows/cmake/dependencies_nvcc.sh
|
UTF-8
| 1,050
| 3.265625
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/usr/bin/env bash
#
# Copyright 2020 Axel Huebl
#
# License: BSD-3-Clause-LBNL
# search recursive inside a folder if a file contains tabs
#
# @result 0 if no files are found, else 1
#
set -eu -o pipefail
sudo apt-get install -y --no-install-recommends\
build-essential \
cmake \
g++-5 \
gfortran-5 \
libopenmpi-dev \
openmpi-bin \
nvidia-cuda-dev \
nvidia-cuda-toolkit
# Patch broken GCC 5.5 libs in <algorithm>
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=76731
# https://stackoverflow.com/a/50815334/2719194
for f in avx512fintrin.h avx512pfintrin.h avx512vlintrin.h
do
curl \
-H "User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36" \
-o ${f} "https://gcc.gnu.org/viewcvs/gcc/branches/gcc-5-branch/gcc/config/i386/${f}?view=co&revision=245536&content-type=text%2Fplain&pathrev=245536"
done
sudo mv avx512*intrin.h /usr/lib/gcc/x86_64-linux-gnu/5/include/
| true
|
5eefa3e530faee270123eabf2b1080d494c0047b
|
Shell
|
nvllsvm/dotfiles
|
/scripts/terminal/lower
|
UTF-8
| 242
| 3.671875
| 4
|
[] |
no_license
|
#!/usr/bin/env sh
_lower() {
tr '[:upper:]' '[:lower:]'
}
if [ $# -gt 0 ]; then
if [ $# -eq 1 ]; then
printf '%s' "$1" | _lower
else
echo 'error: unexpected arguments' >&2
exit 1
fi
else
_lower
fi
| true
|
a3604cb03da437b00a8d96b40ec87dfc19d620ef
|
Shell
|
dmwm/PHEDEX
|
/Schema/OracleInitRole.sh
|
UTF-8
| 5,085
| 3.765625
| 4
|
[] |
no_license
|
#!/bin/sh
##H Usage:
##H OracleInitRole.sh DBPARAM:SECTION KEY-DIRECTORY/USERCERT-FILE SITE-NAME
##H
##H Where:
##H DBPARAM is the database parameter file with
##H the contact information
##H SECTION is the name of the section to pick
##H out, plus the name to pick out
##H from KEY-DIRECTORY; "/Admin" is
##H appended automatically
##H KEY-DIRECTORY is the directory where keys are held;
##H it is assumed that Details/SECTION
##H will contain the necessary info;
##H unencrypted passwords are stored
##H in Details/USERCERT-FILE
##H USERCERT-FILE name of the user certificate file in
##H KEY-DIRECTORY, formed as e-mail
##H address
##H SITE-NAME is the name of the site (e.g. "CERN")
#[ $# != 3 ] && { echo "Insufficient parameters." 1>&2; exit 1; }
if [ $# -ne 3 ]; then
grep "^##H" < $0 | sed 's/^\#\#\H\( \|$\)//'
exit 1
fi
dbparam="$(echo $1 | sed 's/:.*//')"
section="$(echo $1 | sed 's/.*://')"
keydir="$(dirname $2)"
usercert="$(basename $2)"
sitename="$3"
[ -z "$dbparam" ] && { echo "Insufficient parameters." 1>&2; exit 1; }
[ -z "$section" ] && { echo "Insufficient parameters." 1>&2; exit 1; }
[ -z "$keydir" ] && { echo "Insufficient parameters." 1>&2; exit 1; }
[ -z "$usercert" ] && { echo "Insufficient parameters." 1>&2; exit 1; }
[ -z "$sitename" ] && { echo "Insufficient parameters." 1>&2; exit 1; }
[ -f "$dbparam" ] ||
{ echo "$dbparam: no such file" 1>&2; exit 1; }
[ -d "$keydir" ] ||
{ echo "$keydir: no such directory" 1>&2; exit 1; }
[ -f "$keydir/$usercert" ] ||
{ echo "$keydir/$usercert: no such file" 1>&2; exit 1; }
case $usercert in *@* ) ;; * )
{ echo "$usercert is not an e-mail address" 1>&2; exit 1; } ;;
esac
case $sitename in *_* )
{ echo "$sitename cannot contain _" 1>&2; exit 1; } ;;
esac
home=$(dirname $0)/..
sitename_uc="$(echo $sitename | tr '[:lower:]' '[:upper:]')"
role_dn="$(openssl x509 -in $keydir/$usercert -noout -subject | sed 's/^subject= //')"
role_email="$usercert"
role_passwd="$($home/Utilities/WordMunger)"
role_section="$(echo $section | sed s/Production/prod/ | sed s/Dev/dev/ | sed s/Debug/debug/)"
role_name="PHEDEX_${sitename_uc}_${role_section}"
role_name_lc="$(echo $role_name | tr '[:upper:]' '[:lower:]')"
ora_master="$($home/Utilities/OracleConnectId -db $dbparam:$section/Admin)"
ora_reader="$($home/Utilities/OracleConnectId -db $dbparam:$section/Reader)"
ora_writer="$($home/Utilities/OracleConnectId -db $dbparam:$section/Writer)"
case $ora_master in */*@* ) ;; * )
echo "$dbparam:$section/Admin: database contact not defined" 1>&2; exit 1;;
esac
case $ora_reader in */*@* ) ;; * )
echo "$dbparam:$section/Reader: database contact not defined" 1>&2; exit 1;;
esac
case $ora_writer in */*@* ) ;; * )
echo "$dbparam:$section/Writer: database contact not defined" 1>&2; exit 1;;
esac
$home/Schema/OracleNewRole.sh "$ora_master" "$role_name" "$role_passwd"
$home/Schema/OraclePrivs.sh "$ora_master" \
"$(echo $ora_reader | sed 's|/.*||')" \
"$(echo $ora_writer | sed 's|/.*||')" \
"$role_name_lc";
mkdir -p Details
(echo "Section $section/$sitename_uc"
echo "Interface Oracle"
echo "Database $(echo $ora_writer | sed 's|.*@||')"
echo "AuthDBUsername $(echo $ora_writer | sed 's|/.*||')"
echo "AuthDBPassword $(echo $ora_writer | sed 's|.*/||; s|@.*||')"
echo "AuthRole $role_name_lc"
echo "AuthRolePassword $role_passwd"
echo "ConnectionLife 600"
echo "LogConnection on"
echo "LogSQL off") \
> Details/$role_name_lc
mkdir -p Output
(echo "Subject: PhEDEx authentication role for $section/$sitename_uc";
echo "From: cms-phedex-admins@cern.ch";
echo "Cc: cms-phedex-admins@cern.ch";
echo "To: $role_email";
echo;
echo "Hello $role_email";
echo "($role_dn),"; echo;
echo "Below is an authentication data for your PhEDEx database connection";
echo "for database $section/$sitename_uc using authentication role $role_name.";
echo;
echo "Please store the information in DBParam file, using Schema/DBParam.Site";
echo "as your example. Please keep this information secure: do not store it";
echo "in CVS or anywhere someone else might be able to read it. Should you";
echo "accidentally make the information public, please contact PhEDEx admins";
echo "as soon as you can at cms-phedex-admins@cern.ch. Thank you.";
echo;
echo "You can copy and paste the section between '====' lines in shell on a";
echo "computer which has access to your private certificate part, typically";
echo "in ~/.globus/userkey.pem."
echo; echo "====";
echo "cat << "\\"END_OF_DATA | /usr/bin/openssl smime -decrypt -in /dev/stdin -recip ~/.globus/usercert.pem -inkey ~/.globus/userkey.pem"
/usr/bin/openssl smime -encrypt -in Details/$role_name_lc $keydir/$usercert
echo "END_OF_DATA";
echo "====";
echo;
echo "Yours truly,";
echo " PhEDEx administrators";
echo " (cms-phedex-admins@cern.ch)") \
> "Output/${role_name_lc}:${role_email}"
| true
|
7a52c61aef785aa46ed6bf28ffd8ca88090e795a
|
Shell
|
d0ugal/hassio-jupyter
|
/scripts/docker-build.sh
|
UTF-8
| 282
| 2.578125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -evx
set -x
repo=$1;
docker run \
--privileged \
-v /var/run/docker.sock:/var/run/docker.sock \
-v "$repo":/docker \
hassioaddons/build-env:latest \
--image "d0ugal/hassio-jupyter-{arch}" \
--git \
--target jupyter \
--parallel \
--${ARCH:-all}
| true
|
0253a483e3881bc3ef075ff185ebef88ad269a8e
|
Shell
|
GeeZeeS/hyperglass
|
/.tests/app/setup.sh
|
UTF-8
| 1,104
| 3.28125
| 3
|
[
"BSD-3-Clause-Clear"
] |
permissive
|
#!/usr/bin/env bash
echo "[INFO] Starting Redis..."
redis-server &
cd /tmp/hyperglass
echo "[INFO] Starting setup..."
poetry run hyperglass setup -d
echo "[SUCCESS] Setup completed."
sleep 2
echo "listen_address: 127.0.0.1" >> /root/hyperglass/hyperglass.yaml
echo "[INFO] Starting UI build."
poetry run hyperglass build-ui
if [[ ! $? == 0 ]]; then
echo "[ERROR] Failed to start hyperglass."
exit 1
else
echo "[SUCCESS] UI build completed."
fi
echo "[INFO] Starting hyperglass..."
poetry run hyperglass start &> /var/log/hyperglassci.log &
sleep 180
if [[ ! $? == 0 ]]; then
echo "[ERROR] Failed to start hyperglass."
exit 1
else
echo "[SUCCESS] Started hyperglass."
fi
echo "[INFO] Running HTTP test..."
STATUS=$(curl -s -o /dev/null -w "%{http_code}" http://127.0.0.1:8001)
echo "[INFO] Status code: $STATUS"
if [[ ! $? == 0 ]]; then
echo "[ERROR] HTTP test failed."
exit 1
elif [[ ! "$STATUS" == "200" ]]; then
echo "[ERROR] HTTP test failed. Startup log:"
cat /var/log/hyperglassci.log
exit 1
fi
echo "[SUCCESS] Tests ran successfully."
exit 0
| true
|
5c07fc424e44fb18cee8de2fd45eaafb3f301587
|
Shell
|
hewlock/dotfiles
|
/home/.bashrc.d/aliases
|
UTF-8
| 672
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
alias ag="ag --hidden"
alias emacs='TERM=xterm-16color emacs --no-window-system'
alias l='ls -CF'
alias la='ls -A'
alias ll='ls -alF'
alias open='gio open 2>/dev/null'
alias ranger='ranger --choosedir=$HOME/.rangerdir; cd `cat $HOME/.rangerdir`; rm $HOME/.rangerdir'
alias trash='gio trash'
alias wttr='clear && curl wttr.in'
| true
|
5907b8f0118aaef040956e6c710a08e4867e5464
|
Shell
|
xiaoy00/skywalking
|
/install/deploy/istio-deploy.sh
|
UTF-8
| 2,985
| 3.5625
| 4
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
# Add istio official repo
add_repo(){
VERSION=$1
REPO="https://storage.googleapis.com/istio-release/releases/${VERSION}/charts/"
helm repo add istio $REPO
STATUS_CMD=`echo $?`
CHECK_REPO_CMD=`helm repo list | grep $REPO | wc -l`
echo "$STATUS_CMD"
echo "$CHECK_REPO_CMD"
while [[ $STATUS_CMD != 0 && $CHECK_REPO_CMD -ge 1 ]]
do
sleep 5
helm repo add istio $REPO
STATUS_CMD=`echo $?`
CHECK_REPO_CMD=`helm repo list | grep $REPO | wc -l`
done
}
# Create istio-system namespace
create_namespace() {
NAMESPACE=$1
kubectl create ns ${NAMESPACE}
STATUS_CMD=`echo $?`
while [[ $STATUS_CMD != 0 ]]
do
sleep 5
kubectl create ns ${NAMESPACE}
STATUS_CMD=`echo $?`
done
}
# Create CRD need for istio
create_crd() {
NAMESPACE=$1
helm install istio-init istio/istio-init -n ${NAMESPACE}
CRD_COUNT=`kubectl get crds | grep 'istio.i' | wc -l`
while [[ ${CRD_COUNT} != 23 ]]
do
sleep 5
CRD_COUNT=`kubectl get crds | grep 'istio.io' | wc -l`
done
echo 'Istio crd create successful'
}
# Deploy istio related components
deploy_istio() {
NAMESPACE=$1
VERSION=$2
CHART_DIR="istio-skywalking-ci/install/kubernetes/helm"
git clone -b istio-skywalking-ci https://github.com/SkyAPMTest/istio-skywalking-ci.git
cd $CHART_DIR
helm install istio istio -n ${NAMESPACE} -f istio/values-istio-skywalking.yaml
check() {
kubectl -n ${NAMESPACE} get deploy | grep istio | awk '{print "deployment/"$1}' | while read line ;
do
kubectl rollout status $line -n ${NAMESPACE} --timeout 10m
done
}
check
echo "Istio is deployed successful"
}
add_mixer_template() {
VERSION=$1
kubectl apply -f https://raw.githubusercontent.com/istio/istio/$VERSION/mixer/template/metric/template.yaml
}
main(){
ISTIO_VERSION="1.3.3"
ISTIO_NAMESPACE="istio-system"
add_repo $ISTIO_VERSION
if [[ `kubectl get ns | grep $ISTIO_NAMESPACE | wc -l ` == 0 && `kubectl get ns $ISTIO_NAMESPACE | grep -v NAME | wc -l` == 0 ]] ;then
create_namespace $ISTIO_NAMESPACE
fi
create_crd $ISTIO_NAMESPACE
deploy_istio $ISTIO_NAMESPACE $ISTIO_VERSION
add_mixer_template $ISTIO_VERSION
}
main
| true
|
58b5018e2e57f013afe161f740b82d5bafc1f04f
|
Shell
|
teja624/home
|
/.zsh/modules/aws/lib/sh/api/ec2/iam_instance_profile_associate.sh
|
UTF-8
| 258
| 2.921875
| 3
|
[
"Apache-2.0"
] |
permissive
|
aws_ec2_iam_instance_profile_associate() {
local iam_instance_profile="$1"
local instance_id="$2"
shift 2
cond_log_and_run aws ec2 associate-iam-instance-profile --iam-instance-profile $iam_instance_profile --instance-id $instance_id "$@"
}
| true
|
7116a259489269d99ac6b63e121d15df4883ef1c
|
Shell
|
moozer/BackupScripts
|
/CreateSqfsImg.sh
|
UTF-8
| 1,805
| 4.09375
| 4
|
[] |
no_license
|
#!/bin/sh
DIRTOPROCESS=$1
if [ ! -d "$DIRTOPROCESS" ]
then
echo directory not specified
exit
fi
BASENAME=$(basename $DIRTOPROCESS)
SQDIR="/home/moz/squashfs/"
MNTDIR="/mnt/sqfs/moz/"
SQFILENAME="$BASENAME.squashfs"
MD5SUMFILE="md5sums.txt"
CURDIR=$(pwd)
echo Taking data from $DIRTOPROCESS
echo - mount dir is $MNTDIR
echo - squashfs dir is $SQDIR
echo - current dir is $CURDIR
echo and saving to $SQDIR$SQFILENAME
# check if squashfs file already exists
if [ -e "$SQDIR$SQFILENAME" ]
then
echo output file already exits. aborting
exit
fi
# md5 sum source
cd "$DIRTOPROCESS"
if [ -e "$MD5SUMFILE" ]
then
echo md5 sum file already exists. Will not regenerate
else
echo generating md5sums on source dir
# generate and check return val.
md5deep -lre . > /tmp/$MD5SUMFILE
if [ $? -ne 0 ]
then
echo Error while calculating md5s
exit
fi
cd $CURDIR
mv "/tmp/$MD5SUMFILE" "$DIRTOPROCESS/$MD5SUMFILE"
fi
# generate squashfs image
echo creating squashfs file
cd $CURDIR
mksquashfs "$DIRTOPROCESS" "$SQDIR$SQFILENAME" -noappend
if [ $? -ne 0 ]
then
echo Error while creating squashfs file
exit
fi
# check the md5 values of the mounted image
echo checking files in squashfs
cd "$MNTDIR"
cd "$BASENAME"
if [ $? -ne 0 ]
then
echo failed to access autofs mount at $MNTDIR$/$BASENAME
exit
fi
md5sum -c $MD5SUMFILE --quiet
if [ $? -ne 0 ]
then
echo failed to validate md5 sums
exit
fi
# generate md5sum of squashfs file
echo generating md5 hash for new squashfs file
md5sum "$SQDIR$SQFILENAME" > "$SQDIR$SQFILENAME.md5"
if [ $? -ne 0 ]
then
echo Error while calculating md5s
exit
fi
# delete base data
echo delete original data using
echo rm -R "$DIRTOPROCESS"
| true
|
9f906c756efbf9e4233c89eb878bae75e533aea7
|
Shell
|
c3e5g7i9/install-demonsaw-linux
|
/run-demonsaw_router
|
UTF-8
| 924
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
#
##
####
####
#### This is a script to start demonsaw_router as a child process, monitor it, and restart it after a crash.
####
#### The script works by monitoring the exit code when demonsaw_router closes.
#### If it crashed/exited ungracefully (i.e. anything with a non-zero exit code), it will auto-restart.
#### If it closed as expected, it will remain off. Using "ctrl+c" works nicely.
####
#### 1) Copy/paste (or download) and save as "run-demonsaw_router" and place it in the same directory as demonsaw_router.
#### 2) Run "chmod +x" on both demonsaw_router and the run-demonsaw_router script.
#### 3) Open terminal, change to same directory, and enter "./run-demonsaw_router" to launch demonsaw_router.
####
####
####
####
###
##
#
until ./demonsaw_router; do
echo "'demonsaw_router' crashed with exit code $?. Respawning..." >&2
sleep 1
done
#
##
####
####
####
####
####
###
##
#
| true
|
c12337c6f7df4afe3902ea483e32b9ece7646160
|
Shell
|
Gremk/rs
|
/backup2/1.sh
|
UTF-8
| 1,503
| 3.78125
| 4
|
[] |
no_license
|
#!/bin/bash
#---------------------------------Set variables----------------------------------------------
Date= date +%F
Year= date +%Y
Dago= date --date='90 days ago' +%d
Mago= date --date='90 days ago' +%m
Dateago= date --date='90 days ago' +%F
sourceDir=/backup/
targetDir=/backup2/
LOGFILE=/backup2/backup.log
if [ "`df | grep "/dev/sda1" | awk '{print $5}' | sed 's/\%//'`" -ge 10 ]
then
#-----------------------------------Создание каталогов----------------------------------------
#find "$sourceDir" -type d | sed -e "s?$sourceDir?$targetDir?" | xargs mkdir -pv \; >>$LOGFILE
#-----------------------------------Перенос файлов созданных более 90 дней назад------------------------------------
echo "----------------На диске: $Disk осталось менее 10% свободного места------------------------------------"
echo "---Перемещение файлов созданных более 90 дней назад, до $Dago на резервное хранилище-----------------------------"
find $sourceDir -type f -mtime +10 -exec rsync -avRog --log-file=$LOGFILE --remove-source-files {} $targetDir \;
echo "----------------------------Создание архивов----------------------------------"
for i in /backup2/backup/$Year/*/*/*
do
echo "$i"
cd $i
tar -cvzf 1.tgz $i >>$LOGFILE
done
else
echo "----------------nothing to do---------------------------------"
fi
| true
|
17659bcfd1c2c922aa0fa09ff3b68527af642f65
|
Shell
|
Twisterblack/auto-cleaner
|
/auto-cleaner
|
UTF-8
| 2,075
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
apt-get install espeak wipe bleachbit
clear
(
modprobe pcspkr
cd /root/sessions
echo "10" ;
espeak "Destruction trace files"
echo "# Destruction des traces indésirables" ; sleep 1
wipe -f -r -i *
cd /root/bts
wipe -f -r -i *
cd /root/.armitage
wipe -f -r -i *
cd /root/.msf4
wipe -f -r -i *
cd /root/.sqlmap
wipe -f -r -i *
cd /root/.wireshark
wipe -f -r -i *
cd /root/.xxxterm
wipe -f -r -i *
cd /
echo "20" ;
espeak "Destruction backup files"
echo "# Destruction des fichiers de sauvegarde" ; sleep 1
find . -size -500k -type f -name "*~" -exec wipe -r -f {} \; > rapport-cleaner.txt
find . -size -500k -type f -name "*.bak" -exec wipe -r -f {} \; >> rapport-cleaner.txt
find . -size -500k -type f -name "*.old" -exec wipe -r -f {} \; >> rapport-cleaner.txt
echo "40" ;
espeak "destruction log files"
echo "# Destruction des fichiers journaux" ; sleep 1
find . -size -500k -type f -name "*.log" -exec wipe -r -f {} \; >> rapport-cleaner.txt
find . -type f -name "*.log" -exec rm -f -v {} \; >> rapport-cleaner.txt
find . -type f -name "*.bak" -exec rm -f -v {} \; >> rapport-cleaner.txt
find . -type f -name "*~" -exec rm -f -v {} \; >> rapport-cleaner.txt
find . -type f -name "*.old" -exec rm -f -v {} \; >> rapport-cleaner.txt
echo "70" ;
espeak "cleaning and optimisation system"
echo "# Nettoyage résiduelle & optimisation du système" ; sleep 1
bleachbit --preset -o -c >> rapport-cleaner.txt
echo "80" ;
echo "# Nettoyage terminé !" ; sleep 1
echo "90" ;
espeak "open cleaned rapport"
echo "# Ouverture du rapport de nettoyage" ; sleep 1
zenity --text-info --title="Rapport nettoyage" --filename="rapport-cleaner.txt"
echo "# Destruction du rapport" ; sleep 1
wipe -f rapport-cleaner.txt
echo "100" ;
espeak "finish !"
echo "# Terminé !"; beep -f 1500 -l 120 -n -f 2000 -l 120 -n -f 2500 -l 120 -n -f 3000 -l 300 -n -f 2500 -l 120 -n -f 3000 -l 300
) |
zenity --progress \
--title="Nettotage du système" \
--text="Analyse des fichiers ..." \
--percentage=0
if [ "$?" = -1 ] ; then
zenity --error \
--text="Nettoyage annulée."
fi
| true
|
aa8ac093247d4ab106a21cc13e2d4720eb6f9430
|
Shell
|
whigg/Empirical_LocalPatch
|
/a01-create_localpatch.sh
|
UTF-8
| 3,733
| 2.84375
| 3
|
[] |
no_license
|
#!/bin/sh
#====================
# convert binary files to netCDF4
# all the time steps will be in one file
# dimesnion (time, nx, ny)
# Menaka@IIS
# 2020/05/29
#====================
#*** PBS setting when needed
#PBS -q E40
#PBS -l select=1:ncpus=40:mem=100gb
#PBS -j oe
#PBS -m ea
#PBS -M menaka@rainbow.iis.u-tokyo.ac.jp
#PBS -V
#PBS -N EmpLP
#========
cd $PBS_O_WORKDIR
#================================================
# OpenMP Thread number
export OMP_NUM_THREADS=40
# input settings
syear=`python -c "import params; print (params.starttime()[0])"`
smonth=`python -c "import params; print (params.starttime()[1])"`
sdate=`python -c "import params; print (params.starttime()[2])"`
eyear=`python -c "import params; print (params.endtime()[0])"`
emonth=`python -c "import params; print (params.endtime()[1])"`
edate=`python -c "import params; print (params.endtime()[2])"`
echo $syear" to "$eyear
CAMADIR=`python -c "import params; print (params.CaMa_dir())"`
outdir=`python -c "import params; print (params.out_dir())"`
cpunums=`python -c "import params; print (params.cpu_nums())"`
mapname=`python -c "import params; print (params.map_name())"`
inputname=`python -c "import params; print (params.input_name())"`
N=`python src/calc_days.py $syear $smonth $sdate $eyear $emonth $edate`
#*************************
# runs s01-CaMa_sim.sh before following steps to get the simulated variables
# ./s01-CaMa_sim.sh
#*************************
#=========================
# convert binary to netCDF
#=========================
# water surface elevation
varname="sfcelv"
#=================================================
#./src/bin2nc $N $syear $eyear $varname $mapname $inputname $CAMADIR $outdir
# discharge
varname="outflw"
#=================================================
#./src/bin2nc $N $syear $eyear $varname $mapname $inputname $CAMADIR $outdir
#=========================
# remove trend lines
#=========================
varname="sfcelv"
#=================================================
./src/remove_trend $N $syear $eyear $varname $mapname $inputname $CAMADIR $outdir
#=========================
# remove seasonality
#=========================
varname="rmdtrnd"
#=================================================
./src/remove_season $N $syear $eyear $varname $mapname $inputname $CAMADIR $outdir
#=========================
# standardized sfcelve
#=========================
varname="rmdsesn"
#=================================================
./src/standardize $N $syear $eyear $varname $mapname $inputname $CAMADIR $outdir
#=========================
# calculate experimental semi-varaince
#=========================
varname="standardized"
#===make directories for semivar
python src/make_semivari.py $CAMADIR $mapname $outdir
#=================================================
./src/semivariance $N $syear $eyear $varname $mapname $inputname $CAMADIR $outdir
#=========================
# calculate spatial auto-correlation weightage
#=========================
threshold=`python -c "import params; print (params.threshold())"`
#=================================================
python src/weightage.py $CAMADIR $mapname $outdir $cpunums $threshold
#=========================
# write local patch to text files
#=========================
varname="weightage"
# make dir local patch
mkdir "local_patch"
#=================================================
./src/lpara $N $syear $eyear $varname $mapname $inputname $CAMADIR $outdir $threshold
#=========================
# write local patch [main strem] to text files
#=========================
varname="weightage"
# make dir local patch
mkdir "local_patchMS"
#=================================================
#./src/lparaMS $N $syear $eyear $varname $mapname $inputname $CAMADIR $outdir $threshold
| true
|
65c630acab3e1c26e7c8f1ec5dfed75780187b4d
|
Shell
|
hjw/tag_munger
|
/lib/updatesToWWL/update0_fromRepository_to_LocalCopyTagMunger.sh
|
UTF-8
| 468
| 2.765625
| 3
|
[] |
no_license
|
# copy from ~/code/tag_munger to DharaWWL (VMC's silver 2.5 inch portable hard disk), create new folder tag_munger
# to create or update the tag_munger utilities.
rsync -avz --exclude=.git --exclude=.DS_Store --delete-excluded ~/code/tag_munger /Volumes/DharaWWL/
echo 'Now TAG MUNGER UTILTIES should be on DharaWWL silver 1gb portable HD'
echo 'To tag the local "SonosLibrary"'
echo 'If git repostory "tag_munger" was not updated, get the lastest version and re-run'
| true
|
d0530a0c8d0268a53337ea3617726ffd346e3e8e
|
Shell
|
johanwestling/nvx
|
/helpers/output.sh
|
UTF-8
| 3,761
| 3.828125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
function nvx_text_wrap {
local text="${1}"
local line_length=${2:-80}
local wrapped_text=$(echo -e "${text}" | fold -sw $line_length)
while read -r text_line; do
echo -e "${text_line}"
done <<< "${wrapped_text}"
}
function nvx_text_pad {
local line="${1}"
local line_length=${2:-80}
local line_padding=$((${line_length} - ${#line}))
if [ ${line_padding} -gt 0 ]; then
line=$(printf "${line}%${line_padding}s")
echo -e "${line}"
else
echo -e "${line}"
fi
}
function nvx_box_top {
local line_length=74
local line=$(nvx_text_pad "" $line_length)
local border_length=78
local border_color="\033[33m"
local border_color_reset="\033[39m"
local border_line_horizontal="━"
local border_line_vertical="┃"
local border_line_corner_left="┏"
local border_line_corner_right="┓"
local border_line=$(eval printf "${border_line_horizontal}"'%.0s' {1..$border_length})
# Box top line
echo -ne "${border_color}${border_line_corner_left}"
echo -ne "${border_line}"
echo -e "${border_line_corner_right}${border_color_reset}"
# Box padding
echo -ne "${border_color}${border_line_vertical}${border_color_reset} "
echo -ne "${line}"
echo -e " ${border_color}${border_line_vertical}${border_color_reset}"
}
function nvx_box_bottom {
local line_length=74
local line=$(nvx_text_pad "" $line_length)
local border_length=78
local border_color="\033[33m"
local border_color_reset="\033[39m"
local border_line_horizontal="━"
local border_line_vertical="┃"
local border_line_corner_left="┗"
local border_line_corner_right="┛"
local border_line=$(eval printf "${border_line_horizontal}"'%.0s' {1..$border_length})
# Box bottom line
echo -ne "${border_color}${border_line_corner_left}"
echo -ne "${border_line}"
echo -e "${border_line_corner_right}${border_color_reset}"
}
function nvx_box_head {
local line_length=74
local text=$(nvx_text_wrap "${1}" $line_length)
local end_with_new_line="${2}"
local text_style="\033[1m"
local text_style_reset="\033[0m"
local border_color="\033[33m"
local border_color_reset="\033[39m"
local border_line_vertical="┃"
# Head lines
while read -r line; do
line=$(nvx_text_pad "${line}" $line_length)
echo -ne "${border_color}${border_line_vertical}${border_color_reset} "
echo -ne "${text_style}${line}${text_style_reset}"
echo -e " ${border_color}${border_line_vertical}${border_color_reset}"
done <<< "${text}"
if [ -z "${end_with_new_line}" ]; then
# Head padding
line=$(nvx_text_pad "" $line_length)
echo -ne "${border_color}${border_line_vertical}${border_color_reset} "
echo -ne "${line}"
echo -e " ${border_color}${border_line_vertical}${border_color_reset}"
fi
}
function nvx_box_text {
local line_length=74
local text=$(nvx_text_wrap "${1}" $line_length)
local end_with_new_line="${2}"
local border_color="\033[33m"
local border_color_reset="\033[39m"
local border_line_horizontal="━"
local border_line_vertical="┃"
local border_line_top_left="┏"
local border_line_top_right="┓"
local border_line_bottom_left="┗"
local border_line_bottom_right="┛"
while read -r line; do
line=$(nvx_text_pad "${line}" $line_length)
echo -ne "${border_color}${border_line_vertical}${border_color_reset} "
echo -ne "${line}"
echo -e " ${border_color}${border_line_vertical}${border_color_reset}"
done <<< "${text}"
if [ -z "${end_with_new_line}" ]; then
# Body padding
line=$(nvx_text_pad "" $line_length)
echo -ne "${border_color}${border_line_vertical}${border_color_reset} "
echo -ne "${line}"
echo -e " ${border_color}${border_line_vertical}${border_color_reset}"
fi
}
| true
|
1a060e6519f5236e620bff329998c5aa596d3e98
|
Shell
|
sswguo/jboss-app-workflow
|
/fetch.sh
|
UTF-8
| 479
| 2.859375
| 3
|
[] |
no_license
|
#! /bin/sh
if [ $1 ]; then
echo ""
else
echo "Please specify the app_repo."
exit
fi
APP_REPO="$1/jboss-7.1.1.Final-docker"
wget -O db/maitai_docker.sql $APP_REPO/db/maitai_docker.sql
mkdir jboss-as/maitai/etc
wget -O jboss-as/maitai/etc/maitai.conf $APP_REPO/jboss-as/maitai/etc/maitai.conf
wget -O jboss-as/standalone-full.xml $APP_REPO/jboss-as/standalone-full.xml
mkdir jboss-app
wget -O jboss-app/maitai-server-ear.ear $APP_REPO/jboss-app/maitai-server-ear.ear
| true
|
2d3114c8f8f10e0687ffa187f6e5319272c40224
|
Shell
|
asconix-old/asconix-plone-buildout
|
/custom/bin/backup.sh
|
UTF-8
| 1,183
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/sh
# This script backups the ZODB, the buildout files and custom configuration files
# create backup directory if not yet available
backup_dir=/var/plone/backup
backup_zodb_dir=$backup_dir/zodb
backup_blob_dir=$backup_dir/blob
backup_buildout_dir=$backup_dir/buildout
[ -d $backup_dir ] || mkdir $backup_dir
[ -d $backup_zodb_dir ] || mkdir $backup_zodb_dir
[ -d $backup_blob_dir ] || mkdir $backup_blob_dir
[ -d $backup_buildout_dir] || mkdir $backup_buildout_dir
# backup ZODB data
buildout_dir=/var/plone/buildout
python_cmd=/var/plone/bin/python
repozo_cmd=$buildout_dir/bin/repozo
zodb_file=$buildout_dir/var/filestorage/Data.fs
echo "ZODB backup started"
$python_cmd $repozo_cmd -Bvz -r $backup_zodb_dir -f $zodb_file
echo "ZODB backup finished"
# backup BLOB data
blob_dir=$buildout_dir/var/blobstorage
tar cvf $backup_blob_dir/`date +%Y-%m-%02d-%H-%M-%S`.tar $blob_dir
# backup buildout files
buildout_base_file=$buildout_dir/base.cfg
buildout_deployment_file=$buildout_dir/deployment.cfg
cp $buildout_base_file $backup_buildout_dir/base.cfg.`date +%Y-%m-%02d-%H-%M-%S`
cp $buildout_deployment_file $backup_buildout_dir/deployment.cfg.`date +%Y-%m-%02d-%H-%M-%S`
| true
|
5ddf34ac6c903fd81426e40cafb32a686a1cb3bf
|
Shell
|
ragusa/EV
|
/conservation_law/bin/dox
|
UTF-8
| 502
| 3.84375
| 4
|
[] |
no_license
|
#!/bin/bash
# check if current directory is valid
directory=${PWD##*/}
if [ $directory == "conservation_law" -o $directory == "transport" ]; then
# create doxygen output directory if it does not exist
mkdir -p doxygen
# compile html
doxygen Doxyfile
# view html using Google-Chrome
google-chrome doxygen/html/index.html
# switch focus to window with doxygen output
wmctrl -a Google Chrome
else
echo "Not a valid directory: must be in \"conservation_law\" or \"transport\""
fi
| true
|
1f78aa32d5446569c1a41d47f13064399bd8aa86
|
Shell
|
RazorCMS/RazorAnalyzer
|
/scripts_condor/runRazorJob_CaltechT2_InputFromCERNT2_JM.sh
|
UTF-8
| 3,532
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/sh
hostname
echo "Job started"
date
analysisType=$1
echo "analysisType "${analysisType}
inputfilelist=$2
echo "inputfilelist "${inputfilelist}
isData=$3
echo "isData "${isData}
option=$4
echo "option "${option}
filePerJob=$5
echo "filePerJob "${filePerJob}
jobnumber=$6
echo "jobnumber "${jobnumber}
outputfile=$7
echo "outputfile "${outputfile}
outputDirectory=$8
echo "outputDirectory "${outputDirectory}
#code_dir_suffix=$9
#echo "code_dir_suffix "${code_dir_suffix}
analysisTag=$9
echo "analysisTag "${analysisTag}
currentDir=`pwd`
homeDir=/data/jmao/CMSSW_9_2_1/src/RazorAnalyzer/
#runDir=${currentDir}/jmao_${code_dir_suffix}/
#runDir=${currentDir}/jmao_${analysisType}_Job${jobnumber}/
runDir=${currentDir}/jmao_$RANDOM/
rm -rf ${runDir}
mkdir -p ${runDir}
if [ -f /cvmfs/cms.cern.ch/cmsset_default.sh ]
then
#setup cmssw
cd ${homeDir}
workDir=`pwd`
echo "entering directory: ${workDir}"
source /cvmfs/cms.cern.ch/cmsset_default.sh
export SCRAM_ARCH=slc7_amd64_gcc630
ulimit -c 0
eval `scram runtime -sh`
echo `which root`
cd ${runDir}
echo "entering directory: ${runDir}"
#cd ${currentDir}
#echo "entering directory: ${currentDir}"
if [ -f $CMSSW_BASE/src/RazorAnalyzer/RazorRun_T2_JM ]
then
cp $CMSSW_BASE/src/RazorAnalyzer/RazorRun_T2_JM ./
#get grid proxy
export X509_USER_PROXY=${homeDir}x509_proxy
#run the job
cat ${CMSSW_BASE}${inputfilelist} | awk "NR > (${jobnumber}*${filePerJob}) && NR <= ((${jobnumber}+1)*${filePerJob})" > inputfilelistForThisJob_${jobnumber}.txt
echo ""
echo "************************************"
echo "Running on these input files:"
cat inputfilelistForThisJob_${jobnumber}.txt
echo "************************************"
echo ""
nRemote=`cat inputfilelistForThisJob_${jobnumber}.txt | wc -l`
#copy file to local directory, no need if the input is from T2
echo "copy file to local directory ======"
for ifile in `cat inputfilelistForThisJob_${jobnumber}.txt`
do
xrdcp ${ifile} ./
echo "copied to"
echo ${pwd}
done
ls razorNtuple_*.root > inputfilelistForThisJob_${jobnumber}_local.txt
nLocal=`cat inputfilelistForThisJob_${jobnumber}_local.txt |wc -l`
if [ "${nRemote}" -eq "${nLocal}" ]
then
echo " "; echo "Starting razor run job now"; echo " ";
echo ./RazorRun_T2_JM inputfilelistForThisJob_${jobnumber}_local.txt ${analysisType} -d=${isData} -n=${option} -f=${outputfile}
./RazorRun_T2_JM inputfilelistForThisJob_${jobnumber}_local.txt ${analysisType} -d=${isData} -n=${option} -f=${outputfile} -l=${analysisTag}
echo ${analysisTag}
echo ${outputfile}
echo ${outputDirectory}
mkdir -p /mnt/hadoop/${outputDirectory}
##^_^##
echo "RazorRun_T2_JM finished"
date
sleep 2
echo "I slept for 2 second"
##job finished, copy file to T2
echo "copying output file to /mnt/hadoop/${outputDirectory}"
cp ${outputfile} /mnt/hadoop/${outputDirectory}
if [ -f /mnt/hadoop/${outputDirectory}/${outputfile} ]
then
echo "ZZZZAAAA ============ good news, job finished successfully "
else
echo "YYYYZZZZ ============ somehow job failed, please consider resubmitting"
fi
else
echo "XXXXYYYY ============= copy file failed (${nRemote} -> ${nLocal}), job abandoned"
fi
else
echo echo "WWWWYYYY ============= failed to access file RazorRun_T2_JM, job anandoned"
fi
else
echo "VVVVYYYY ============= failed to access file /cvmfs/cms.cern.ch/cmsset_default.sh, job anandoned"
fi
cd ${currentDir}
rm -rf ${runDir}
echo "Job finished"
date
| true
|
ed156b4fbea822cfa739a276d0ede3ebaf7c5d3f
|
Shell
|
freebsd/freebsd-ports
|
/www/perlbal/files/perlbal.in
|
UTF-8
| 360
| 2.859375
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
# PROVIDE: perlbal
# REQUIRE: LOGIN
# KEYWORK: shutdown
. /etc/rc.subr
name="perlbal"
command="%%PREFIX%%/bin/perlbal"
load_rc_config "$name"
: ${perlbal_enable="NO"}
: ${perlbal_configuration="%%PREFIX%%/etc/perlbal/perlbal.conf"}
required_files="$perlbal_configuration"
command_args="--config $required_files --daemon"
run_rc_command "$1"
| true
|
986e1db31ef32995d00c5d05ed1ba0ddf2526dd6
|
Shell
|
za3k/noice
|
/media-player-rosemary/remote
|
UTF-8
| 195
| 2.546875
| 3
|
[
"CC0-1.0",
"LicenseRef-scancode-public-domain-disclaimer",
"LicenseRef-scancode-public-domain"
] |
permissive
|
#!/bin/bash
ARG="$(echo "$1" | sed -e 's/\/media\/germinate/\/data\/video/')"
if [ -e "$ARG" ]; then
ARG="$(xclip -o)"
fi
ssh germinate-lan -- /var/local/media-player/bin/magic-player "'$ARG'"
| true
|
b7c0ea3952ab59144b8f428c9584fe5d752b82d8
|
Shell
|
jwebcat/dotfiles
|
/sync-cdargs-mc_hotlist
|
UTF-8
| 518
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/sh
# Synchronize cdargs directory list with mc's hotlist
if [[ ! -e ~/.cdargs ]]; then
echo "Unable to find file ~/.cdargs." 1>&2
fi
if [[ ! -e ~/.config/mc/hotlist ]]; then
mkdir -p ~/.config/mc
fi
while read n l; do
if [[ -z "${n}" ]] || [[ -z "${l}" ]]; then
continue
fi
echo "ENTRY \"${n}\" URL \"${l}\""
done < ~/.cdargs > ~/.config/mc/hotlist
while read n l; do
if [[ -z "${n}" ]] || [[ -z "${l}" ]]; then
continue
fi
echo "file://${l} ${n}"
done < ~/.cdargs > ~/.config/spacefm/bookmarks
| true
|
de2e46e206ff37375618f81759e181de65b30a4d
|
Shell
|
choikongji/script
|
/취약점진단/shell_script/script/U-53.sh
|
UTF-8
| 1,812
| 2.859375
| 3
|
[] |
no_license
|
echo "<U-53>" >> ../result.xml
echo "<Name> ssh 원격접속 허용 </Name>" >> ../result.xml
echo "<Security_Threat>" >> ../result.xml
echo "원격 접속 시 Telnet, FTP 등은 암호화되지 않은 상태로 데이터를 전송하기" >> ../result.xml
echo "때문에 아이디/패스워드 및 중요 정보가 외부로 유출될 위험성이 있음" >> ../result.xml
echo "</Security_Threat>" >> ../result.xml
echo "<Data> <![CDATA[" >> ../result.xml
echo "#ps -ef | grep -i \"ssh\" | grep -v \"grep\"" >> ../result.xml
ps -ef | grep -i "ssh" | grep -v "grep" >> ../result.xml 2>&1
echo "" >> ../result.xml
echo "#netstat -al | grep -i -E 'telnet|ftp'" >> ../result.xml
netstat -al | grep -i -E 'telnet|ftp' >> ../result.xml 2>&1
echo "" >> ../result.xml
echo "]]> </Data>" >> ../result.xml
echo "<Comment>" >> ../result.xml
echo "[양호] : 원격 접속 시 SSH 프로토콜을 사용하는 경우" >> ../result.xml
echo "[취약] : 원격 접속 시 Telnet, FTP 등 안전하지 않은 프로토콜을 사용하는 경우" >> ../result.xml
echo "※ ssh, telnet이 동시에 설치되어 있는 경우 취약한 것으로 평가됨" >> ../result.xml
echo "</Comment>" >> ../result.xml
echo "<Result>" >> ../result.xml
flag="True"
if [ `netstat -al | grep -i -E 'telnet|ftp' | wc -l` -ge 1 ]
then
echo "[취약] : 원격 접속 시 Telnet, FTP 등 안전하지 않은 프로토콜을 사용하는 경우" >> ../result.xml
else
if [ `ps -ef | grep -i "ssh" | grep -v "grep" | wc -l` -ge 1 ]
then
echo "[양호] : 원격 접속 시 SSH 프로토콜을 사용하는 경우" >> ../result.xml
else
echo "[N/A] : SSH 서비스를 사용하지 않는 경우" >> ../result.xml
fi
fi
echo "</Result>" >> ../result.xml
echo "</U-53>" >> ../result.xml
echo "" >> ../result.xml
| true
|
876fbad6db2145b5ae51831f636a14236080ee41
|
Shell
|
liujitao/zabbix_template
|
/cloud/zabbix/script/rabbitmq-check.sh
|
UTF-8
| 421
| 3.09375
| 3
|
[] |
no_license
|
#set -o xtrace
NUM_OF_RUNNING_NODES=$(sudo /usr/sbin/rabbitmqctl cluster_status|grep "running_nodes"|awk -F[ '{print $2}'|awk -F] '{print $1}'|awk -F, '{print NF}')
CONTROLLER_NODE_NUM=`expr $(sudo cat /etc/astute.yaml|grep " role: controller"|wc -l) + $(sudo cat /etc/astute.yaml|grep " role: primary-controller"|wc -l)`
if [[ ${NUM_OF_RUNNING_NODES} -ge $CONTROLLER_NODE_NUM ]]
then
echo 1
else
echo 0
fi
| true
|
7187fab52135447db9a3576887ad7ad1269acb20
|
Shell
|
dalguete/e9ter
|
/usr/share/e9ter/core/options_handling/components/remove_option.sh
|
UTF-8
| 708
| 4.375
| 4
|
[] |
no_license
|
# Functionality for the remove_option function
#
# Used to remove a given stored value item for a given key
function remove_option() {
# Check a key is received
if [ $# -lt 2 ]; then
die "Required options key storage to check value existence against"
fi
# Get the items passed
local key=$(echo "$1" | tr '-' '_')
local value="$2"
# Get the options store place
local var="OPTION__$key"
# Loop through the option obtained to remove the desired one
local array="$var[@]"
local newArray=()
for op in "${!array}"; do
if [[ "$op" != "$value" ]]; then
newArray[${#newArray[@]}]="$op"
fi
done
# Reassing the new array formed
eval "$var=(\"\${newArray[@]}\")"
}
| true
|
dccbfe47891442b94d9b57a9362a6346bd3e3bc8
|
Shell
|
usamaahmadkhan/hts
|
/hts/build/scripts/install_ltp_centos.sh
|
UTF-8
| 1,377
| 3.28125
| 3
|
[] |
no_license
|
#!/bin/bash
source /root/hts/hts/build/scripts/bash_helpers.sh
source /root/hts/hts/build/scripts/log_helpers.sh
source /root/hts/hts/scripts/ltp_config
# Export language settings
tryexec export LANG=C
tryexec export LC_ALL=C
# PREREQS on CentOS (package-list is incomplete and may vary for other distros)
log "Installing pre-requisite packages"
tryexec sudo yum install gcc gcc-c++ kernel-devel make -y
tryexec sudo yum install autoconf automake m4 -y
tryexec sudo yum install git -y
tryexec sudo yum install libaio-devel libcap-devel -y
# Working directory
log "Creating LTP working directory"
VERSION=20170116
tryexec sudo rm -rf /opt/ltp
tryexec sudo mkdir -p /opt/ltp
tryexec pushd /opt/ltp &> /dev/null
# Get the LTP source
log "Cloning LTP source"
tryexec sudo wget https://github.com/linux-test-project/ltp/releases/download/$VERSION/ltp-full-$VERSION.tar.xz
# Configure LTP
log "Configuring LTP source"
tryexec sudo tar --strip-components=1 -Jxf ltp-full-$VERSION.tar.xz
# Start building LTP
log "Building LTP"
tryexec sudo mkdir build
tryexec sudo ./configure --prefix=$PREFIX
# Install LTP (requires superuser privileges)
log "Installing LTP"
tryexec sudo make all
tryexec sudo make SKIP_IDCHECK=1 install
tryexec popd &> /dev/null
#Run LTP Network setup
log "Configuring LTP Network Environment"
tryexec sudo bash /root/hts/hts/scripts/setup_env_centos.sh
| true
|
4a03123f6eb6f62798a5c0d944239196c3f9c5fd
|
Shell
|
kazufusa/dotfiles
|
/zsh/.zshrc
|
UTF-8
| 6,528
| 3.015625
| 3
|
[] |
no_license
|
# Start configuration added by Zim install {{{
#
# User configuration sourced by interactive shells
#
# -----------------
# Zsh configuration
# -----------------
#
# History
#
# Remove older command from the history if a duplicate is to be added.
setopt HIST_IGNORE_ALL_DUPS
#
# Input/output
#
# Set editor default keymap to emacs (`-e`) or vi (`-v`)
bindkey -e
# Prompt for spelling correction of commands.
#setopt CORRECT
# Customize spelling correction prompt.
#SPROMPT='zsh: correct %F{red}%R%f to %F{green}%r%f [nyae]? '
# Remove path separator from WORDCHARS.
WORDCHARS=${WORDCHARS//[\/]}
# -----------------
# Zim configuration
# -----------------
# Use degit instead of git as the default tool to install and update modules.
#zstyle ':zim:zmodule' use 'degit'
# --------------------
# Module configuration
# --------------------
#
# git
#
# Set a custom prefix for the generated aliases. The default prefix is 'G'.
#zstyle ':zim:git' aliases-prefix 'g'
#
# input
#
# Append `../` to your input for each `.` you type after an initial `..`
#zstyle ':zim:input' double-dot-expand yes
#
# termtitle
#
# Set a custom terminal title format using prompt expansion escape sequences.
# See http://zsh.sourceforge.net/Doc/Release/Prompt-Expansion.html#Simple-Prompt-Escapes
# If none is provided, the default '%n@%m: %~' is used.
#zstyle ':zim:termtitle' format '%1~'
#
# zsh-autosuggestions
#
# Disable automatic widget re-binding on each precmd. This can be set when
# zsh-users/zsh-autosuggestions is the last module in your ~/.zimrc.
ZSH_AUTOSUGGEST_MANUAL_REBIND=1
# Customize the style that the suggestions are shown with.
# See https://github.com/zsh-users/zsh-autosuggestions/blob/master/README.md#suggestion-highlight-style
#ZSH_AUTOSUGGEST_HIGHLIGHT_STYLE='fg=242'
#
# zsh-syntax-highlighting
#
# Set what highlighters will be used.
# See https://github.com/zsh-users/zsh-syntax-highlighting/blob/master/docs/highlighters.md
ZSH_HIGHLIGHT_HIGHLIGHTERS=(main brackets)
# Customize the main highlighter styles.
# See https://github.com/zsh-users/zsh-syntax-highlighting/blob/master/docs/highlighters/main.md#how-to-tweak-it
#typeset -A ZSH_HIGHLIGHT_STYLES
#ZSH_HIGHLIGHT_STYLES[comment]='fg=242'
# ------------------
# Initialize modules
# ------------------
ZIM_HOME=${ZDOTDIR:-${HOME}}/.zim
# Download zimfw plugin manager if missing.
if [[ ! -e ${ZIM_HOME}/zimfw.zsh ]]; then
if (( ${+commands[curl]} )); then
curl -fsSL --create-dirs -o ${ZIM_HOME}/zimfw.zsh \
https://github.com/zimfw/zimfw/releases/latest/download/zimfw.zsh
else
mkdir -p ${ZIM_HOME} && wget -nv -O ${ZIM_HOME}/zimfw.zsh \
https://github.com/zimfw/zimfw/releases/latest/download/zimfw.zsh
fi
fi
# Install missing modules, and update ${ZIM_HOME}/init.zsh if missing or outdated.
if [[ ! ${ZIM_HOME}/init.zsh -nt ${ZDOTDIR:-${HOME}}/.zimrc ]]; then
source ${ZIM_HOME}/zimfw.zsh init -q
fi
# Initialize modules.
source ${ZIM_HOME}/init.zsh
# ------------------------------
# Post-init module configuration
# ------------------------------
#
# zsh-history-substring-search
#
zmodload -F zsh/terminfo +p:terminfo
# Bind ^[[A/^[[B manually so up/down works both before and after zle-line-init
for key ('^[[A' '^P' ${terminfo[kcuu1]}) bindkey ${key} history-substring-search-up
for key ('^[[B' '^N' ${terminfo[kcud1]}) bindkey ${key} history-substring-search-down
for key ('k') bindkey -M vicmd ${key} history-substring-search-up
for key ('j') bindkey -M vicmd ${key} history-substring-search-down
unset key
# }}} End configuration added by Zim install
# PATH {{{
PATH=$HOME/dotfiles/bin:$PATH
mkdir -p $HOME/bin
if [ -d $HOME/Dropbox/forpath ]; then
PATH=$HOME/bin:$HOME/Dropbox/forpath:$PATH
else
PATH=$HOME/bin:$PATH
fi
# }}}
# cdrを有効にして設定する {{{
autoload -Uz chpwd_recent_dirs cdr add-zsh-hook
add-zsh-hook chpwd chpwd_recent_dirs
zstyle ':completion:*:*:cdr:*:*' menu selection
zstyle ':chpwd:*' recent-dirs-max 100
zstyle ':chpwd:*' recent-dirs-default true
zstyle ':chpwd:*' recent-dirs-insert true
zstyle ':chpwd:*' recent-dirs-file "$ZDOTDIR"/chpwd-recent-dirs
# AUTO_CDの対象に ~ と上位ディレクトリを加える
cdpath=(~ ..)
# }}}
# ZSH options {{{
setopt no_beep
setopt nolistbeep
setopt correct
setopt list_packed
# }}}
# Environment variables {{{
# SHELL
export SHELL=$(which zsh)
# history
if [ -d ~/Dropbox/dotfiles ]; then
export HISTFILE=~/Dropbox/dotfiles/.histfile
else
export HISTFILE=$ZDOTDIR/.histfile
fi
export HISTSIZE=50000
export SAVEHIST=10000000
# GPG_TTY
export GPG_TTY=$(tty)
export TERM=screen-256color
# Go
export _GOROOT=/usr/local/go
export GOPATH=$HOME/go
export PATH=$GOPATH/bin:$_GOROOT/bin:$PATH
export GO111MODULE=on
# Rust
if [ -f $HOME/.cargo/env ]; then
source "$HOME/.cargo/env"
fi
# }}}
# Aliases {{{
alias ls='exa --classify --icons -h --reverse --git'
alias l=ll
alias la='ll -a'
alias rm='rm -i'
alias cp='cp -i'
alias mv='mv -i'
alias h='sudo shutdown -h'
alias r='sudo shutdown -r'
function repo(){
ghq list | fzf --reverse +m -q "$1" \
--preview "find $(ghq root)/{} -maxdepth 1 -iname 'readme.*' \
| xargs bat --color=always --style=header,grid --line-range :80"
}
function ce() {
local _cd=`type z >/dev/null 2>&1 && echo z || echo cd`
if [ "$1" = "-" ]; then
$_cd - >/dev/null
else
local _repo="$(repo $1)"
[[ -n "${_repo}" ]] && $_cd "$(ghq root)/${_repo}"
fi
}
function installgo() {
if [ -d /usr/local/go$1 ]; then
echo /usr/local/go$1 exists
else
sudo wget https://golang.org/dl/go$1.$2-$3.tar.gz -O - | tar -C /tmp -xzf -
sudo mv /tmp/go /usr/local/go$1
fi
}
if [[ $(uname -a) =~ Linux ]]; then
function open() { cmd.exe /c start $(wslpath -w $1) }
fi
# }}}
# Etc {{{
# in vim, enable to map Ctrl-S to file saving
stty -ixon -ixoff
# zoxide {{{
if [ -x "$(command -v zoxide)" ]; then
eval "$(zoxide init zsh)"
alias cd="z"
fi
# }}}
# fzf for Mac {{{
if [[ $OSTYPE =~ darwin ]]; then
if [ -f ~/.fzf.zsh ]; then
source ~/.fzf.zsh
elif [ -x "$(command -v brew)" ]; then
brew install fzf
$(brew --prefix)/opt/fzf/install --all
source ~/.fzf.zsh
fi
fi
export FZF_DEFAULT_OPTS='--layout=reverse --border --exit-0'
# }}}
# zinit for Linux {{{
if [[ $(uname -a) =~ Linux ]]; then
source ${ZDOTDIR}/.zshrc.linux
fi
# }}}
# homebrew@M1 Mac {{{
if [[ -d '/opt/homebrew' ]]; then
HOMEBREW_HOME='/opt/homebrew'
eval "$($HOMEBREW_HOME/bin/brew shellenv)"
fi
# }}}
# }}}
autoload -Uz compinit && compinit
| true
|
6518fec621b0e6a08b9f4d25f1585a23a8946521
|
Shell
|
hilbix/rinse
|
/scripts/centos-5/post-install.sh
|
UTF-8
| 1,418
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/sh
#
#
prefix=$1
if [ ! -d "${prefix}" ]; then
echo "Serious error - the named directory doesn't exist."
exit
fi
touch ${prefix}/etc/mtab
# rpm's can now be removed
rm -f ${prefix}/*.rpm
#
# BUGFIX:
#
echo "BUGFIX"
mkdir -p ${prefix}/usr/lib/python2.4/site-packages/urlgrabber.skx
for i in ${prefix}/usr/lib/python2.4/site-packages/urlgrabber/keepalive.*; do
mv $i ${prefix}/usr/lib/python2.4/site-packages/urlgrabber.skx/
done
#
# Record arch, if present.
#
if [ -d $prefix/etc/rpm ]; then
#
# If i386 then record this
#
if [ "$arch" = "i386" ]; then
echo "i686-centos-linux-gnu" >> $prefix/etc/rpm/platform
fi
fi
#
# Run "yum install yum".
#
echo " Bootstrapping yum"
chroot ${prefix} /usr/bin/yum -y install yum vim-minimal dhclient 2>/dev/null
#
# make 'passwd' work.
#
echo " Authfix"
chroot ${prefix} /usr/bin/yum -y install authconfig
chroot ${prefix} /usr/bin/authconfig --enableshadow --update
#
# 5. Clean up
#
echo " Cleaning up"
chroot ${prefix} /usr/bin/yum clean all
umount ${prefix}/proc
umount ${prefix}/sys
#
# 6. Remove the .rpm files from the prefix root.
#
echo " Final tidy..."
find ${prefix} -name '*.rpmorig' -delete
find ${prefix} -name '*.rpmnew' -delete
# Install modprobe
if [ -e "${prefix}/etc/modprobe.d/modprobe.conf.dist" ]; then
cp "${prefix}/etc/modprobe.d/modprobe.conf.dist" "${prefix}/etc/modprobe.conf"
fi
| true
|
fd5d6f5f31a1099c1a7229f516097195d11ed560
|
Shell
|
wwwted/MySQL-HOWTOs
|
/scripts/start.sh
|
UTF-8
| 395
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
if [ -z "$WS_HOME" ]; then
echo "Need to set environment variable WS_HOME, run command: bash> . ./setenv"
exit 1
fi
if [ -S $WS_HOME/mysql.sock ]; then
echo "MySQL already runninng, socket file (/tmp/mysql.sock) exits"
exit 1
fi
echo "starting MySQL ..."
$WS_HOME/mysqlsrc/bin/mysqld_safe --defaults-file=$WS_HOME/my.cnf --ledir=$WS_HOME/mysqlsrc/bin &
sleep 5
| true
|
0e47fdc25bb07aaaae9bf22c9eb6359dbccc16c5
|
Shell
|
bu6hunt3r/awesome_dotfiles
|
/common/zsh/main.zsh
|
UTF-8
| 4,280
| 3.078125
| 3
|
[] |
no_license
|
source ~/.zsh.d/constants.zsh
# load custom own shell_options from ~/.options/shell-options.zsh if present
source ~/.zsh.d/envbootstrap.zsh
# history settings / dir navigation options / glob options / better completion options
source ~/.zsh.d/options.zsh
# ANSI Color codes
source ~/.zsh.d/termcolors.zsh
# load autosuggestions module
source ~/.zsh.d/autosuggestions.zsh
# new bind keys in zsh
source ~/.zsh.d/fixescapecodes.zsh
# megabytes / kilobytes measurement & faster dir creation & bc utility
source ~/.zsh.d/functions.zsh
# load diverse aliases
# source ~/.zsh.d/aliases.zsh
# load git aliases
source ~/.zsh.d/git-aliases.zsh
# load docker aliases
source ~/.zsh.d/docker-aliases.zsh
# load emacs aliases
source ~/.zsh.d/alias-emacs.zsh
# load vim aliases
source ~/.zsh.d/alias-vim.zsh
# load prompt_settings
source ~/.zsh.d/prompt.zsh
# load plugin functionality
source ~/.zsh.d/dotplug.zsh
#load step
load_plugins
# init completion system
source ~/.zsh.d/completions.zsh
# colors for GNU ls (from coreutils)
#eval $(dircolors ~/.lscolors)
# Initialize fuzzy command line finder
source ~/.zsh.d/fzf.zsh
source ~/.zsh.d/syntax-highlighting.zsh
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh
source ~/.options/shell-options.zsh
#VERBOSE=1
if [[ $VERBOSE -eq 1 ]]; then
echo -e "\033[1;33m==== Loading aliases ====\033[0m"
fi
lsd_installed=$(which lsd &>/dev/null)
if [[ -z $lsd_installed ]]; then
lsd_prompt="\033[1;32m- [√] lsd installed...\033[0m"
source "$HOME/.zsh.d/.config/zsh/.lsd_aliases"
else
lsd_prompt="\033[1;31m- [ ] lsd not installed...(also check for patched font -> https://github.com/ryanoasis/nerd-fonts/blob/master/readme.md)\033[0m"
fi
if [[ $VERBOSE -eq 1 ]]; then
echo -e "$lsd_prompt"
fi
# load diverse aliases
source ~/.zsh.d/aliases.zsh
# load tmux aliases
source ~/.config/zsh/.tmux_aliases
if [[ $? -eq 0 ]]; then
if [[ $VERBOSE -eq 1 ]]; then
echo -e "\033[1;32m- [√] tmux aliases loaded...\033[0m"
fi
else
if [[ $VERBOSE -eq 1 ]]; then
echo -e "\033[1;31m- [ ] tmux aliases not loaded...\033[0m"
fi
fi
# load virtualenv aliases
source ~/.config/zsh/.python_functions
if [[ $? -eq 0 ]]; then
if [[ $VERBOSE -eq 1 ]]; then
echo -e "\033[1;32m- [√] virtualenv aliases loaded...\033[0m"
fi
else
if [[ $VERBOSE -eq 1 ]]; then
echo -e "\033[1;31m- [ ] virtualenv aliases not loaded...\033[0m"
fi
fi
case "$(uname -s)" in
Linux)
if [[ -f "$HOME/.local/bin/virtualenvwrapper.sh" ]]; then
virtualenvwrapper_prompt="\033[1;32m- [√] virtualenvwrapper installed...\033[0m"
source "$HOME/.local/bin/virtualenvwrapper.sh"
else
virtualenvwrapper_prompt="\033[1;31m- [ ] virtualenvwrapper not installed...\033[0m"
fi
;;
esac
if [[ $VERBOSE -eq 1 ]]; then
echo -e "$virtualenvwrapper_prompt"
fi
if [[ $VERBOSE -eq 1 ]]; then
echo ""
echo "\033[1;33m==== Loading zsh's config files (stow actions) ====\033[0m"
fi
source "$HOME/.config/zsh/.zsh_path"
if [ $? -eq 0 ]; then
if [[ $VERBOSE -eq 1 ]]; then
echo -e "\033[1;32m- [√] loaded ~/.config/zsh/.zsh_path...(check within ~/.zshrc)\033[0m"
fi
else
if [[ $VERBOSE -eq 1 ]]; then
echo -e "\033[1;31m- [ ] did not load ~/.config/zsh/.zsh_path...something went wrong...(check within ~/.zshrc)\033[0m"
fi
fi
# General checks
if [[ $VERBOSE -eq 1 ]]; then
echo ""
echo -e "\033[1;33m==== General checks ====\033[0m"
fi
brew_installed=$(which brew &>/dev/null)
if [[ -z $brew_installed ]]; then
brew_prompt="\033[1;32m- [√] brew installed...\033[0m"
else
brew_prompt="\033[1;31m- [ ] brew not installed...\033[0m"
fi
if [[ $VERBOSE -eq 1 ]]; then
echo -e "$brew_prompt"
fi
screensaver_installed=$(which xscreensaver &>/dev/null)
if [[ -z $screensaver_installed ]]; then
screensaver_prompt="\033[1;32m- [√] xscreensaver installed...\033[0m"
else
screensaver_prompt="\033[1;31m- [ ] xscreensaver not installed...\033[0m"
fi
if [[ $VERBOSE -eq 1 ]]; then
echo -e "$screensaver_prompt"
fi
tig_installed=$(which tig &>/dev/null)
if [[ -z $tig_installed ]]; then
tig_prompt="\033[1;32m- [√] tig installed...\033[0m"
else
tig_prompt="\033[1;31m- [ ] tig not installed...\033[0m"
fi
if [[ $VERBOSE -eq 1 ]]; then
echo -e "$tig_prompt"
fi
export ZSH_LOADED=1
| true
|
758316e7a24260483a069a84122be656d94efea1
|
Shell
|
delkyd/alfheim_linux-PKGBUILDS
|
/monicelli-git/PKGBUILD
|
UTF-8
| 1,836
| 3.34375
| 3
|
[] |
no_license
|
# This is an example PKGBUILD file. Use this as a start to creating your own,
# and remove these comments. For more information, see 'man PKGBUILD'.
# NOTE: Please fill out the license field for your package! If it is unknown,
# then please put 'unknown'.
# The following guidelines are specific to BZR, GIT, HG and SVN packages.
# Other VCS sources are not natively supported by makepkg yet.
# Contributor: Andrea Zucchelli <zukka77@gmail.com>
pkgname=monicelli-git # '-bzr', '-git', '-hg' or '-svn'
pkgver=r171.f229f3f
pkgrel=1
pkgdesc="An esoterical programming language based on the so-called \"supercazzole\" from the movie Amici Miei, a masterpiece of the Italian comedy."
arch=('i686' 'x86_64')
url="https://github.com/esseks/monicelli"
license=('GPL3')
groups=()
depends=()
makedepends=('git' 'bison>=3' 'flex>=2.5' 'cmake') # 'bzr', 'git', 'mercurial' or 'subversion'
provides=("${pkgname%-git}")
conflicts=("${pkgname%-git}")
replaces=()
backup=()
options=()
install=
source=('monicelli::git+https://github.com/esseks/monicelli.git')
noextract=()
md5sums=('SKIP')
# Please refer to the 'USING VCS SOURCES' section of the PKGBUILD man page for
# a description of each element in the source array.
pkgver() {
cd "$srcdir/${pkgname%-git}"
# Git, no tags available
printf "r%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)"
}
prepare() {
cd "$srcdir/${pkgname%-git}"
}
build() {
cd "$srcdir/${pkgname%-git}"
if [ ! -d build ];then
mkdir build
fi
cd build
cmake ..
make
}
package() {
cd "$srcdir/${pkgname%-git}"
install -D -m755 build/mcc $pkgdir/usr/bin/mcc
install -Dd -m755 examples $pkgdir/usr/share/${pkgname%-git}/examples
install -D -m644 examples/* -t $pkgdir/usr/share/${pkgname%-git}/examples
install -D -m755 Specification.txt $pkgdir/usr/share/${pkgname%-git}/Specification.txt
}
| true
|
955f9f1b97b4429bcb4fa4beab9a0ccdb8ccb6d0
|
Shell
|
lidanyang/shell
|
/linuxcommandlineandshell/13userinput/test20.sh
|
UTF-8
| 752
| 3.5625
| 4
|
[] |
no_license
|
#########################################################################
# File Name: test20.sh
# Author: ma6174
# mail: ma6174@163.com
# Created Time: 2015年12月07日 星期一 13时50分32秒
#########################################################################
#!/bin/bash
# processing options and parameters with getopts
echo "$@"
while getopts :ab:cd opt
do
case "$opt" in
a) echo "Found the -a option";;
b) echo "Found the -b option,with value $OPTARG";;
c) echo "Found the -c option";;
d) echo "Found the -d option";;
*) echo "Unknow option :$opt";;
esac
done
echo $OPTIND
shift $[ $OPTIND - 1 ]
count=1
for param in "$@"
do
echo "Parameter $count: $param"
count=$[ $count + 1 ]
done
| true
|
d8f0a3e5e149bee08d2b7fc122dd3f658fc327c1
|
Shell
|
scchess/sbtappendix
|
/star/scripts/run_ribomap.sh
|
UTF-8
| 10,000
| 3.5
| 4
|
[] |
no_license
|
#!/bin/bash
#=============================
# default parameters
#=============================
adapter=CTGTAGGCACCATCAAT
min_fplen=27
max_fplen=33
nproc=15 # threads #used to be 15
nmismatch=10
offset=12 # P-site offset
force=true #now have to remake index each run
singleflag=0
doubleflag=0
#=============================
# pre-filled parameters
#=============================
src_dir=`dirname $0`
bin_dir=${src_dir}/../bin/
lib_dir=${src_dir}/../lib/
export PATH=${bin_dir}:$PATH
export LD_LIBRARY_PATH=${lib_dir}:$LD_LIBRARY_PATH
export DYLD_LIBRARY_PATH=${lib_dir}:$DYLD_LIBRARY_PATH
work_dir=${src_dir}/../
star_idx_dir=${work_dir}StarIndex/
# star index
rrna_idx=${star_idx_dir}contaminant/
transcript_idx=${star_idx_dir}transcript/
#=============================
# functions
#=============================
# print error message and usage message
# quit program if indicated
# error_msg $error_msg $quit
error_msg ()
{
echo "$1"
if [ "$2" = true ]; then
echo "Usage: ./run_ribomap.sh --rnaseq_fq rnaseq.fq.gz --riboseq_fq riboseq.fq.gz --contaminant_fa contaminant.fa --transcript_fa transcript.fa --cds_range cds_range.txt"
exit
fi
}
# check whether file generated successfully at each single step
# quit program if file failed to be generated
# check_file $file_name $error_msg
check_file ()
{
if [ ! -f "$1" ]; then
echo "$2"
exit
fi
}
#=============================
# read in command line args
# space separated
#=============================
while [[ $# > 1 ]]
do
key="$1"
shift
case $key in
--rnaseq_fq)
singleflag=1
rnaseq_fq="$1"
echo $rnaseq_fq
shift
;;
--rnaseq_fq2)
doubleflag=1
rnaseq_fq2="$1"
echo $rnaseq_fq2
shift
;;
--riboseq_fq)
riboseq_fq="$1"
shift
;;
--transcript_fa)
transcript_fa="$1"
shift
;;
--contaminant_fa)
contaminant_fa="$1"
shift
;;
--cds_range)
cds_range="$1"
shift
;;
--work_dir)
work_dir="$1"
shift
;;
--adapter)
adapter="$1"
shift
;;
--min_fplen)
min_fplen="$1"
shift
;;
--max_fplen)
max_fplen="$1"
shift
;;
--nproc)
nproc="$1"
shift
;;
--nmismatch)
nmismatch="$1"
shift
;;
--offset)
offset="$1"
shift
;;
--fasta_dir)
fasta_dir="$1"
shift
;;
--star_idx_dir)
star_idx_dir="$1"
rrna_idx=${star_idx_dir}contaminant/
transcript_idx=${star_idx_dir}transcript/
shift
;;
--alignment_dir)
tmp_dir="$1"
shift
;;
--sailfish_dir)
sm_odir="$1"
shift
;;
--output_dir)
output_dir="$1"
shift
;;
--force)
force="$1"
shift
;;
--timing_file)
timing_file="$1"
shift
;;
*)
# unknown option
;;
esac
done
#if [ -z "${riboseq_fq}" ]; then
# error_msg "ribo-seq reads not provided!" true
#elif [ ! -f ${riboseq_fq} ]; then
# error_msg "ribo-seq file not exist! ${riboseq_fq}" true
#elif [ -z "${rnaseq_fq}" ]; then
# error_msg "RNA-seq reads not provided!" true
#elif [ ! -f ${rnaseq_fq} ]; then
# error_msg "RNA-seq file not exist! ${rnaseq_fq}" true
#elif [ -z "${contaminant_fa}" ]; then
# error_msg "contaminant fasta not provided! Filter step skipped." false
#elif [ ! -f ${contaminant_fa} ]; then
# error_msg "contaminant fasta not exist! ${contaminant_fa}" false
#elif [ -z "${cds_range}" ]; then
# error_msg "cds range not provided! assume transcript fasta only contain cds regions." false
#elif [ ! -f ${cds_range} ]; then
# error_msg "cds range file not exist! ${cds_range}" false
#fi
if [ $singleflag -eq 1 ] && [ $doubleflag -eq 1 ]; then
echo "Paired read process."
rna_nrrna_fa="${rnaseq_fq} ${rnaseq_fq2}"
elif [ $singleflag -eq 1 ]; then
echo "Single read process."
rna_nrrna_fa=${rnaseq_fq}
else
echo "ERROR: User didn't provide single or double flag"
fi
#=============================
# make directories
#=============================
# preprocess
fasta_dir=${work_dir}data/fasta/
# star outputs
tmp_dir=${work_dir}alignment/
# salmon
sm_odir=${work_dir}sm_quant
# ribomap
output_dir=${work_dir}outputs
# star params
align_params="--seedSearchLmax 10 --outFilterMultimapScoreRange 0 --outFilterMultimapNmax 255 --outFilterMismatchNmax ${nmismatch} --outFilterIntronMotifs RemoveNoncanonical"
SAM_params="--outSAMtype BAM Unsorted --outSAMmode NoQS" # --outSAMprimaryFlag AllBestScore"
mkdir -p ${fasta_dir}
mkdir -p ${tmp_dir}
mkdir -p ${output_dir}
rna_core=${rnaseq_fq##*/}
rna_core=${rna_core%%.*}
ribo_core=${riboseq_fq##*/}
ribo_core=${ribo_core%%.*}
#=============================
# step 1: preprocess reads
#=============================
#echo "preprocessing reads (quality control + trim adapter + trim first base + collapse duplicate reads + fastq to fasta"
#fastx_pipe="fastx_clipper -Q33 -a ${adapter} -l ${min_fplen} -c -n -v | fastq_to_fasta -v"
#fastx_pipe="fastq_to_fasta -Q33 -v"
#rna_fa=${fasta_dir}${rna_core}.fa
#ribo_fa=${fasta_dir}${ribo_core}.fa
#if [ "${force}" = true ] || [ ! -f ${rna_fa} ]; then
# zcat ${rnaseq_fq} | ${fastx_pipe} -o ${rna_fa}
# check_file ${rna_fa} "pipeline failed at preprocessing rnaseq_fq: ${rnaseq_fq}!"
#fi
#if [ "${force}" = true ] || [ ! -f ${ribo_fa} ]; then
# #zcat ${riboseq_fq} | ${fastx_pipe} | fastx_collapser -v -o ${ribo_fa}
# zcat ${riboseq_fq} | ${fastx_pipe} -o ${ribo_fa}
# check_file ${ribo_fa} "pipeline failed at preprocessing riboseq_fq: ${riboseq_fq}!"
#fi
# ribo_size_fa=${fasta_dir}${ribo_core}-size.fa
# if [ ! -f ${ribo_size_fa} ] || [ "${force}" = true ]; then
# python ${src_dir}/filter_reads_by_size.py ${ribo_fa} ${ribo_size_fa} ${min_fplen} ${max_fplen}
# check_file ${ribo_size_fa} "pipeline failed at filtering riboseq with the wrong size!"
# fi
#=============================
# step 2: filter rrna
#=============================
#ornaprefix=${tmp_dir}${rna_core}_rrna_
#oriboprefix=${tmp_dir}${ribo_core}_rrna_
#rna_nrrna_fa=${ornaprefix}Unmapped.out.mate1
#ribo_nrrna_fa=${oriboprefix}Unmapped.out.mate1
#if [ ! -z "${contaminant_fa}" ] && [ -f ${contaminant_fa} ]; then
# echo "filtering contaminated reads"
# if [ "${force}" = true ] || [ ! -d ${rrna_idx} ]; then
# echo "building contaminant index..."
# mkdir -p ${rrna_idx}
# STAR --runThreadN $nproc --runMode genomeGenerate --genomeDir ${rrna_idx} --genomeFastaFiles ${contaminant_fa} --genomeSAindexNbases 5 --genomeChrBinNbits 11
# fi
# if [ "${force}" = true ] || [ ! -f ${rna_nrrna_fa} ]; then
# echo "filtering contaminants in RNA_seq..."
# STAR --runThreadN $nproc --genomeDir ${rrna_idx} --readFilesIn ${rna_fa} --outFileNamePrefix ${ornaprefix} --outStd SAM --outReadsUnmapped Fastx --outSAMmode NoQS${align_params} > /dev/null
# check_file ${rna_nrrna_fa} "pipeline failed at filtering rrna in RNA_seq!"
# fi
# if [ "${force}" = true ] || [ ! -f ${ribo_nrrna_fa} ]; then
# echo "filtering contaminants in ribo_seq..."
# STAR --runThreadN $nproc --genomeDir ${rrna_idx} --readFilesIn ${ribo_fa} --outFileNamePrefix ${oriboprefix} --outStd SAM --outReadsUnmapped Fastx --outSAMmode NoQS ${align_params} > /dev/null
# check_file ${ribo_nrrna_fa} "pipeline failed at filtering rrna in ribo_seq!"
# fi
#else
# echo "skipped filter read step."
# # TODO change rna_nrna_fa file name here
#fi
#========================================
# step 3: map to transcriptome
#========================================
ornaprefix=${tmp_dir}${rna_core}_transcript_
oriboprefix=${tmp_dir}${ribo_core}_transcript_
rna_bam=${ornaprefix}Aligned.out.bam
ribo_bam=${oriboprefix}Aligned.out.bam
echo "aligning reads to transcriptome"
if [ "${force}" = true ] || [ ! -d ${transcript_idx} ]; then
echo "building transcriptome index at ${transcript_idx}."
mkdir -p ${transcript_idx}
time STAR --runThreadN $nproc --runMode genomeGenerate --genomeDir ${transcript_idx} --genomeFastaFiles ${transcript_fa} --genomeSAindexNbases 6 --genomeChrBinNbits 15
fi
#if [ "${force}" = true ] || [ ! -f ${rna_bam} ]; then
echo "aligning RNA_seq to transcriptome..."
echo "time STAR --runThreadN $nproc --genomeDir ${transcript_idx} --readFilesIn ${rna_nrrna_fa} --readFilesCommand zcat --outFileNamePrefix ${ornaprefix} ${SAM_params} ${align_params}"
(time STAR --runThreadN $nproc --genomeDir ${transcript_idx} --readFilesIn ${rna_nrrna_fa} --readFilesCommand zcat --outFileNamePrefix ${ornaprefix} ${SAM_params} ${align_params} ) 2>> $timing_file
# check_file ${rna_bam} "pipeline failed at mapping RNA_seq to transcriptome!"
#fi
#if [ "${force}" = true ] || [ ! -f ${ribo_bam} ]; then
# echo "aligning ribo_seq to transcriptome..."
# STAR --runThreadN $nproc --genomeDir ${transcript_idx} --readFilesIn ${ribo_nrrna_fa} --outFileNamePrefix ${oriboprefix} ${SAM_params} ${align_params}
# check_file ${ribo_bam} "pipeline failed at mapping ribo_seq to transcriptome!"
#fi
#============================================
# step 4: salmon expression quantification
#============================================
#sm_out=${sm_odir}/quant_bias_corrected.sf
#if [ "${force}" = true ] || [ ! -f ${sm_out} ]; then
# echo "running salmon quant..."
# salmon quant -t ${transcript_fa} -l U -a ${rna_bam} -o ${sm_odir} -p $nproc --bias_correct
# check_file ${sm_out} "pipeline failed at expression quantification!"
#fi
#=============================
# step 5: run ribomap
#=============================
#ribomap_out=${output_dir}/${ribo_core}
#options="--mrnabam ${rna_bam} --ribobam ${ribo_bam} --fasta ${transcript_fa} --sf ${sm_out} --offset ${offset} --out ${ribomap_out}"
#if [ ! -z "${cds_range}" ] && [ -f ${cds_range} ]; then
# options+=" --cds_range ${cds_range}"
#fi
#if [ "${force}" = true ] || [ ! -f ${ribomap_out}.base ]; then
# echo "running riboprof..."
# riboprof ${options}
# check_file ${ribomap_out}.base "pipeline failed at ribosome profile generation!"
#fi
| true
|
44d0d5aa4a75e35aa1aeb948f90c5fb1983051de
|
Shell
|
nashvent/compiladores-ucsp
|
/practica-1/ejercicio-flex/runFlex.sh
|
UTF-8
| 414
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/sh
# compile
if flex test.l; then
echo 'flex compile success\n'
gcc lex.yy.c -L/lib -lfl
echo '------------'
echo 'INPUT'
echo '------------'
cat input.txt
./a.out < input.txt > output.txt
echo '\n\n------------'
echo 'OUTPUT'
echo '------------'
cat output.txt
echo '\n\n'
else
echo 'flex compile fail\n'
fi
# remove itermediate files
rm *.c
rm *.out
| true
|
45eefa4c56dfc3e9956d5f4980baa9d91d0afacf
|
Shell
|
xiongqi1/web
|
/db_apps/webif/cgi-scripts/upload_killall.sh
|
UTF-8
| 765
| 2.640625
| 3
|
[] |
no_license
|
#!/bin/sh
if [ -z "${SESSION_ID}" -o "${SESSION_ID}" != "${sessionid}" ]; then
exit 0
fi
kill_and_wait()
{
let "timeout=10"
killall -q "$1" 1>/dev/null 2>&1
while true; do
alive=`ps | grep "$1" | grep -v grep`
if [ "$alive" == "" ]; then
break
else
let "timeout-=1"
if [ "$timeout" -eq "0" ]; then
return
fi
sleep 1
fi
done
return
}
rdb_set service.cportal.enable 0
kill_and_wait "wwand"
#kill_and_wait "pppd"
kill_and_wait "wscd"
kill_and_wait "ntpclient"
kill_and_wait "telnetd"
kill_and_wait "cnsmgr"
kill_and_wait "upnpd"
kill_and_wait "udhcpd"
kill_and_wait "dnsmasq"
#kill_and_wait "nvram_daemon"
kill_and_wait "periodicpingd"
#modprobe -r cdcs_DD
#echo 3 > /proc/sys/vm/drop_caches
#killall cnsmgr 1>/dev/null 2>&1
exit 0
| true
|
7d4e5f1547c5815b126c67649c447e7eb321ad0d
|
Shell
|
three6five/kvm-ovs
|
/clone.sh
|
UTF-8
| 1,238
| 3.828125
| 4
|
[] |
no_license
|
#!/usr/bin/bash
#cloneSource='centos-mininst'
vmImagePath='/vm-images'
channelPath='/var/lib/libvirt/qemu/channel/target'
usage() {
echo "Usage $0: guest [-d|--delete -s|--start]"
}
cloneSys() {
local guest="$1"
local cloneSource="$2"
echo "Clone $guest from $cloneSource"
virt-clone --connect qemu:///system \
--original $cloneSource \
--name $guest \
--file $vmImagePath/$guest.img
}
editChan() {
local guest="$1"
echo "Edit $guest channel path.."
virt-xml $guest --edit --channel \
path="$channelPath/domain-$guest/org.qemu.guest_agent.0"
}
startGuest() {
local guest="$1"
echo "Starting $guest"
virsh start $guest
}
killGuest() {
local guest="$1"
echo "Annihilating $guest"
virsh destroy $guest
virsh undefine $guest
rm -f $vmImagePath/$guest.img
}
# handle options
if [ $# -eq 2 ]
then
# annihilate guest
if [ $2 == "-d" -o $2 == "--delete" ]
then
killGuest $1
# start guest
elif [ $2 == "-s" -o $2 == "--start" ]
then
startGuest $1
else
# clone system
cloneSys $1 $2
editChan $1
fi
# catch-all
else
usage
exit 1
fi
| true
|
84a78fd1858537ddb399d48ea56321f2db186f9b
|
Shell
|
fabiocicerchia/www.fabiocicerchia.it
|
/bin/console/modules/run.sh
|
UTF-8
| 14,171
| 3.453125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# FABIO CICERCHIA - WEBSITE
#
# Copyright 2012 - 2013 Fabio Cicerchia.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Bash Shell
#
# Category: Code
# Package: Console
# Author: Fabio Cicerchia <info@fabiocicerchia.it>
# License: MIT <http://www.opensource.org/licenses/MIT>
# Link: http://www.fabiocicerchia.it
#
################################################################################
# RUN ACTIONS
################################################################################
# {{{ run_check_accessibility() ------------------------------------------------
run_check_accessibility() {
print_subheader "RUNNING CHECK ACCESSIBILITY"
python $SCRIPT_APP_SOURCEDIR/check_accessibility.py
return $?
}
# }}} --------------------------------------------------------------------------
# {{{ run_check_validation() ---------------------------------------------------
run_check_validation() {
print_subheader "RUNNING CHECK VALIDATION"
python $SCRIPT_APP_SOURCEDIR/check_validation.py
return $?
}
# }}} --------------------------------------------------------------------------
# {{{ run_benchmark() ----------------------------------------------------------
run_benchmark() {
print_subheader "RUNNING BENCHMARK"
python $SCRIPT_APP_SOURCEDIR/benchmark.py
return $?
}
# }}} --------------------------------------------------------------------------
# {{{ run_todo() ---------------------------------------------------------------
run_todo() {
print_subheader "GENERATING TODO"
DIR=$2
horizontal_line > $ROOTDIR/TODO
echo "TODO LIST" >> $ROOTDIR/TODO
horizontal_line >> $ROOTDIR/TODO
for FILE in $(find $DIR -name "*.*"); do
OLDIFS=$IFS
IFS=$'\n'
RES=$(egrep -rni "TODO" $FILE)
RES=$(echo "$RES" | sed -r "s/([0-9]+):.*TODO[^0-9a-z]*(.)?/\1: \U\2/i")
RES=$(echo "$RES" | sed -r "s/([0-9]+):\s*(.+?)\s*/\2 (line \1)/")
if [ -n "$RES" ]; then
echo -en "\nFile: $FILE\n" >> $ROOTDIR/TODO
horizontal_line >> $ROOTDIR/TODO
for MATCH in $RES; do
LINE=$(echo "$MATCH" | sed -r "s/^ /<No Message> /")
LINE=$(echo -n " - $LINE" | fold -sw 80 | sed -r "s/^/ /")
LINE=$(echo "$LINE" | sed -r "s/^ {6}- / - /")
echo -ne "\n$LINE" >> $ROOTDIR/TODO
done
echo >> $ROOTDIR/TODO
fi
IFS=$OLDIFS
done
echo "done"
}
# }}} --------------------------------------------------------------------------
# {{{ run_changelog() ----------------------------------------------------------
run_changelog() {
print_subheader "GENERATING CHANGELOG"
python $ROOTDIR/lib/vendor/git2changelog/git2changelog.py > $ROOTDIR/CHANGELOG
echo "done"
}
# }}} --------------------------------------------------------------------------
# {{{ _ver_cmp_1() -------------------------------------------------------------
# Compare with one element of version components
_ver_cmp_1() {
[[ $1 == $2 ]] && return 0
[[ $1 > $2 ]] && return 1
[[ $1 < $2 ]] && return 2
# This should not be happening
exit 1
}
# }}} --------------------------------------------------------------------------
# {{{ ver_cmp() ----------------------------------------------------------------
ver_cmp() {
A=${1//./ }
B=${2//./ }
i=0
while (( i < ${#A[@]} )) && (( i < ${#B[@]})); do
_ver_cmp_1 "${A[i]}" "${B[i]}"
result=$?
[[ $result =~ [12] ]] && return $result
let i++
done
# Which has more, then it is the newer version
_ver_cmp_1 "${#A[i]}" "${#B[i]}"
return $?
}
# {{{ run_dependencies() -------------------------------------------------------
run_dependencies() {
print_subheader "CALCULATING DEPENDIENCIES"
declare -a commands=('cat' 'curl' 'cut' 'dirname' 'egrep' 'find' 'fold' \
'sed' 'seq' 'wc' 'wget' 'xargs');
for i in "${commands[@]}"; do
echo -en "Checking '${TXTCYN}$i${TXTRST}'... "
EXISTS=$(which $i)
if [ -n "$EXISTS" ]; then
echo -e "${TXTGRN}yes${TXTRST}"
else
echo -e "${TXTRED}no${TXTRST}"
fi
done
# --------------------------------------------------------------------------
declare -A versions=(['apache2']='2.2.22' ['cap']='2.12.0', \
['git']='1.7.9.5' ['git flow']='0.4.1' ['mongo']='2.0.4' \
['php']='5.4' ['perl']='5.14.2' ['python']='2.7');
for i in "${!versions[@]}"; do
echo -en "Checking '${TXTCYN}$i v${versions[$i]}${TXTRST}'... "
EXISTS=$(whereis $i)
if [ -n "$EXISTS" ]; then
echo -en "${TXTGRN}yes${TXTRST}"
else
echo -en "${TXTRED}no${TXTRST}"
fi
PRG=$(whereis $i | cut -d " " -f 2 | head -n 1)
PIECES=$(echo $i | tr ' ' '\n' | wc -l)
if [ $PIECES -gt 1 ]; then
PRG=$(dirname $PRG)"/$i"
fi
VERSION=$($PRG --version 2>&1 | \
egrep "([0-9]+\.)+([0-9]+\.?)+(-?[a-z]+[0-9]*)?" | \
sed -r "s/.*[^0-9\.](([0-9]+\.)([0-9]+\.?)+(-?[a-z]+[0-9]*)?).*/\1/")
if [ -z "$VERSION" ]; then
VERSION=$($PRG -V 2>&1 | \
egrep "([0-9]+\.)+([0-9]+\.?)+(-?[a-z]+[0-9]*)?" | \
sed -r "s/.*[^0-9\.](([0-9]+\.)([0-9]+\.?)+(-?[a-z]+[0-9]*)?).*/\1/")
fi
if [ -z "$VERSION" ]; then
VERSION=$($PRG version 2>&1 | \
egrep "([0-9]+\.)+([0-9]+\.?)+(-?[a-z]+[0-9]*)?" | \
sed -r "s/.*[^0-9\.](([0-9]+\.)([0-9]+\.?)+(-?[a-z]+[0-9]*)?).*/\1/")
fi
ver_cmp "$VERSION" "${versions[$i]}"
if [ $? -eq 2 ]; then
echo -e " ${TXTYLW}but lower version ($VERSION)${TXTRST}"
else
echo ""
fi
done
# --------------------------------------------------------------------------
declare -a modules=('actions' 'cache' 'disk_cache' 'expires' 'headers' \
'mem_cache' 'php5' 'perl' 'rewrite' 'security2' 'speling');
APACHE2CTL=$(whereis apache2ctl | cut -d " " -f 2 | head -n 1)
for i in "${modules[@]}"; do
echo -en "Checking '${TXTCYN}Apache Module $i${TXTRST}'... "
EXISTS=$($APACHE2CTL -M 2>&1 | grep "${i}_module")
if [ -n "$EXISTS" ]; then
echo -e "${TXTGRN}yes${TXTRST}"
else
echo -e "${TXTRED}no${TXTRST}"
fi
done
# --------------------------------------------------------------------------
# TODO: Check PEAR/PECL.
echo -en "Checking '${TXTCYN}PHP Depend v1.0.7${TXTRST}'... "
echo -e "${TXTYLW}TODO${TXTRST}"
echo -en "Checking '${TXTCYN}PHP Mess Detector v1.3.3${TXTRST}'... "
echo -e "${TXTYLW}TODO${TXTRST}"
echo -en "Checking '${TXTCYN}PHP Mongo v1.2.10${TXTRST}'... "
echo -e "${TXTYLW}TODO${TXTRST}"
echo -en "Checking '${TXTCYN}PHP XDebug v2.2.0${TXTRST}'... "
echo -e "${TXTYLW}TODO${TXTRST}"
echo -en "Checking '${TXTCYN}PHPUnit v3.6.11${TXTRST}'... "
echo -e "${TXTYLW}TODO${TXTRST}"
echo -en "Checking '${TXTCYN}PHP_CodeBrowser v1.0.2${TXTRST}'... "
echo -e "${TXTYLW}TODO${TXTRST}"
echo -en "Checking '${TXTCYN}PHP_CodeCoverage v1.1.2${TXTRST}'... "
echo -e "${TXTYLW}TODO${TXTRST}"
echo -en "Checking '${TXTCYN}PHP_CodeSniffer v1.3.4${TXTRST}'... "
echo -e "${TXTYLW}TODO${TXTRST}"
# --------------------------------------------------------------------------
declare -a modules2=('Data::Dumper' 'Date::Format' 'Devel::Cover' \
'Digest::MD5' 'File::Basename' 'File::Spec' 'LWP' 'POSIX' \
'Perl::Critic' 'Pod::Coverage' 'Template' 'Test::More' 'XML::Simple');
for i in "${modules2[@]}"; do
echo -en "Checking '${TXTCYN}Perl Module $i${TXTRST}'... "
EXISTS=$(perl -M$i -e 'print "\$$i::VERSION\n";' 2>&1 | grep "Can't locate")
if [ -z "$EXISTS" ]; then
echo -e "${TXTGRN}yes${TXTRST}"
else
echo -e "${TXTRED}no${TXTRST}"
fi
done
# --------------------------------------------------------------------------
declare -a modules3=('future' 'lxml' 're' 'time' 'urllib');
for i in "${modules3[@]}"; do
echo -en "Checking '${TXTCYN}Python Module $i${TXTRST}'... "
EXISTS=$(pip freeze 2>&1 | grep "$i")
if [ -n "$EXISTS" ]; then
echo -e "${TXTGRN}yes${TXTRST}"
else
echo -e "${TXTRED}no${TXTRST}"
fi
done
# ------------------------------------------------------------------------------
declare -a commands2=('add-apt-repository' 'apt-get' 'cover' 'cpanminus' \
'nikto' 'pdepend' 'pear' 'pecl' 'pep8' 'perltidy' 'phpcb' 'phpcov' \
'phpcpd' 'phpcs' 'phpdoc' 'phploc' 'phpmd' 'phpunit' 'pylint');
for i in "${commands2[@]}"; do
echo -en "Checking '${TXTCYN}$i${TXTRST}'... "
EXISTS=$(which $i)
if [ -n "$EXISTS" ]; then
echo -e "${TXTGRN}yes${TXTRST}"
else
echo -e "${TXTRED}no${TXTRST}"
fi
done
}
# }}} --------------------------------------------------------------------------
# {{{ run_generate_gettext() ---------------------------------------------------
run_generate_gettext() {
print_subheader "GENERATING GETTEXT"
CURR_DATE=$(date +%Y-%m-%d\ %H:%M%z)
echo "#"
echo "# FABIO CICERCHIA - WEBSITE"
echo "#"
echo "# Copyright 2012 - 2013 Fabio Cicerchia."
echo "#"
echo "# Permission is hereby granted, free of charge, to any person obtaining a copy"
echo "# of this software and associated documentation files (the \"Software\"), to deal"
echo "# in the Software without restriction, including without limitation the rights"
echo "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell"
echo "# copies of the Software, and to permit persons to whom the Software is"
echo "# furnished to do so, subject to the following conditions:"
echo "#"
echo "# The above copyright notice and this permission notice shall be included in all"
echo "# copies or substantial portions of the Software."
echo "#"
echo "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR"
echo "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,"
echo "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE"
echo "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER"
echo "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,"
echo "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE"
echo "# SOFTWARE."
echo "#"
echo ""
echo "msgid \"\""
echo "msgstr \"\""
echo "\"Project-Id-Version: 0.2\\n\""
echo "\"Report-Msgid-Bugs-To: info@fabiocicerchia.it\\n\""
echo "\"POT-Creation-Date: $CURR_DATE\\n\""
echo "\"PO-Revision-Date: $CURR_DATE\\n\""
echo "\"Last-Translator: Fabio Cicerchia <info@fabiocicerchia.it>\\n\""
echo "\"Language-Team: LANGUAGE <info@fabiocicerchia.it>\\n\""
echo "\"Language: \\n\""
echo "\"MIME-Version: 1.0\\n\""
echo "\"Content-Type: text/plain; charset=UTF-8\\n\""
echo "\"Content-Transfer-Encoding: 8bit\\n\""
echo ""
OLDIFS=$IFS
IFS=$'\n'
LINES=$(egrep -rn "\[% FILTER gettext %\].+\[% END %\]" \
$SITE_APP_SOURCEDIR/view/ | \
sed -r "s/(.+):(.+):.*?\[% FILTER gettext %\](.+)\[% END %\].*/\3\t\1:\2/" | sort)
PREVIOUS_LABEL=''
for LINE in $LINES; do
LABEL=$(echo "$LINE" | sed -r "s/\t.+//")
if [ "$LABEL" != "$PREVIOUS_LABEL" ]; then
echo "msgid \"$LABEL\""
echo "msgstr \"\""
echo ""
fi
PREVIOUS_LABEL=$LABEL
done
IFS=$OLDIFS
echo "done"
}
# }}} --------------------------------------------------------------------------
# {{{ run_compile_gettext() ----------------------------------------------------
run_compile_gettext() {
print_subheader "COMPILING GETTEXT"
FILES_PO=$(find $SITE_APP_SOURCEDIR/locale -name "*.po" -type f)
for FILE_PO in $FILES_PO; do
msgfmt -c $FILE_PO -o ${FILE_PO%.po}.mo
done
echo "done"
}
# }}} --------------------------------------------------------------------------
# {{{ run_authors() ------------------------------------------------------------
run_authors() {
print_subheader "GENERATING AUTHORS"
git log --pretty="%aN <%aE>" | sort -u > $ROOTDIR/AUTHORS
echo "done"
}
# }}} --------------------------------------------------------------------------
# {{{ run_nikto() --------------------------------------------------------------
run_nikto() {
print_subheader "RUNNING NIKTO"
sudo nikto -update
nikto -C all -nocache -evasion 1,2,3,4,5,6 -h http://demo.fabiocicerchia.it \
-o $REPORTDIR/site/logs/nikto.html -Format html -nossl \
-Tuning 0,1,2,3,4,5,6,7,8,9,a,b,c,x
echo "done"
}
# }}} --------------------------------------------------------------------------
| true
|
07684b5545fdb94762b9ca90365e50ec63184fa9
|
Shell
|
ryapric/boilerplate
|
/terraform/config-files/user-data.sh
|
UTF-8
| 691
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
#/usr/bin/env bash
# AWS EC2 "user data" (startup script). There's a lot here that's official
# copy-paste of AWS' installation & config examples for the CloudWatch Agent.
wget \
https://s3.amazonaws.com/amazoncloudwatch-agent/ubuntu/amd64/latest/amazon-cloudwatch-agent.deb \
-O /tmp/amazon-cloudwatch-agent.deb
dpkg -i /tmp/amazon-cloudwatch-agent.deb
apt-get update -y
# Restart the CloudWatch Agent to pick up your own config
/opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl -a stop
/opt/aws/amazon-cloudwatch-agent/bin/amazon-cloudwatch-agent-ctl \
-a fetch-config \
-m ec2 \
-c file:/opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json \
-s
| true
|
594efc33d6bd442322b87e15364c01fdda5ae463
|
Shell
|
aimadba/manage_container_lxc
|
/bash_lxc/start_container.sh
|
UTF-8
| 1,282
| 3.34375
| 3
|
[] |
no_license
|
#¡/bin/bash
# start de container
#oldIFS=$IFS
exp=$(echo $1|grep \*)
#echo $exp
if [ $exp ]
then
echo " --------------demarrage des conteneur avec le joker "$exp"------------------------------$
list_container=$(lxc-ls)
mot_sans_etoil=$(echo $1 | tr -d "*")
list_final=$(echo $list_container | tr " " "\n" | grep $mot_sans_etoil )
echo "list des conteneurs a demarrer ======> "$list_final
# IFS=' '
for i in $list_final
do
echo "----> Demarrage de :"$i
lxc-start -n $i
if [ $? -ne 0 ]
then
echo " le demarrage a echoue ! "
exit 1
fi
echo "demarrage de :" $i " : ok"
done
IFS=$oldIFS
else
echo "---------------------demarrage du conteneur sans Joker -------------------------"
lxc-start -n $1
if [ $? -ne 0 ]
then
echo " le demarrage a echoue ! "
exit 1
fi
echo " demarrage du container en arg: " $1 " : ok !"
fi
echo "---- -----------------Etat des container------------------ ----"
lxc-ls --fancy
echo "------------------------------------------------------------------"
| true
|
36dda7103757831acd2d45f4c28426dd1cb8ee3e
|
Shell
|
stoft/openmft
|
/agent/bin/uninstall.sh
|
UTF-8
| 430
| 3.96875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh
if [ "$#" -ne 1 ] && [ "$#" -ne 2 ]; then
echo "Usage: $0 AGENTID [BASEDIR]" >&2
exit 1
fi
AGENTID=$1
# Get absolute path of agent top directory
AGENTDIR=$(cd "$(dirname $(dirname "$0"))"; pwd)
BASEDIR=$AGENTDIR
# If supplied as an argument, replace BASEDIR
if [ "$#" -eq 2 ]; then
BASEDIR=$2
fi
# Delete agent directories
rm -rf $BASEDIR/log/$AGENTID
rm -rf $BASEDIR/etc/$AGENTID
rm -rf $BASEDIR/var/$AGENTID
| true
|
0c0d629379c810b907ec367530f788562c2d49be
|
Shell
|
beyerly/CCSR
|
/scripts/googleVoice2Text.sh
|
UTF-8
| 1,670
| 3.8125
| 4
|
[] |
no_license
|
#!/bin/bash
# This script continuously checks for the existence of the 'voiceFile'
# Once ccsr has recorded an audio snippet, it will write out this file to disk
# This script will send it to the Google Speech to text API, which returns a json file with text
# ./googleVoice2TextParse.py will parse this json file and write the first/best guess to stdout,
# which gets piped to nlpxCCSR.py. This script interprets the sentence. The voice file will be deleted, and the
# polling for a new voice file resumes
# CCSR writes this file to disk:
voiceFile="/home/root/ccsr/data/voice.wav"
logfile="./googleVoice2Text.log"
# fifo to nlpxCCSR.py
fifo="./text2nlpFifo"
rate=44100
if [ ! -f $fifo ]
then
mkfifo $fifo
fi
ps | grep ccsr > /dev/null
while [ $? -eq 0 ]; do
echo "Waiting to start google voice to text service..." > $logfile
echo $voiceFile > $logfile
echo waiting > $logfile
echo waiting
while [ ! -f $voiceFile ]; do
# wait untill voice file appears
sleep 0.1;
done
echo posting request to google specht2text > $logfile
curl -X POST \
--data-binary @$voiceFile \
--header 'Content-Type: audio/l16; rate=8820;' \
'https://www.google.com/speech-api/v2/recognize?output=json&lang=en-us&key=AIzaSyCRl0iv1MMI-vMafGdFyGlH4A0b7aXUsgI' -k\
-o text.json_pre
# apparently google returns 2 json structures, the first one always empty. Delete this first structure:
tail -n +2 text.json_pre > text.json
# this script reads text.json and pipes single sentence to nlpxCCSR.py
./googleVoice2TextParse.py > $fifo
./googleVoice2TextParse.py
rm $voiceFile;
done
echo "CCSR is not running..." > $logfile
| true
|
0db5dc04372aa8bdbe460286b78985be762500da
|
Shell
|
MathSmath/aws-es-setup
|
/es-install.sh
|
UTF-8
| 849
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash
cd ~
# Config
ES_FILENAME="elasticsearch-1.0.0.noarch.rpm"
AWS_PLUGIN_VERSION="2.0.0.RC1"
# Install ES
echo "Installing Elasticsearch"
wget https://download.elasticsearch.org/elasticsearch/elasticsearch/$ES_FILENAME
sudo rpm -ivh $ES_FILENAME
echo "Elasticsearch Installed"
# Install plugins (AWS and Marvel)
sudo /usr/share/elasticsearch/bin/plugin -i elasticsearch/elasticsearch-cloud-aws/$AWS_PLUGIN_VERSION
sudo /usr/share/elasticsearch/bin/plugin -i elasticsearch/marvel/latest
# If a custom config file exists
if [ -f /home/ec2-user/elasticsearch.yml ]
then
echo "Removing the default config"
sudo rm /etc/elasticsearch/elasticsearch.yml
echo "Adding our custom config"
sudo cp /home/ec2-user/elasticsearch.yml /etc/elasticsearch/elasticsearch.yml
fi
echo "Cleaning up ES install resources..."
rm $ES_FILENAME
| true
|
f97e9c1a48e979d602cbb98f8d34bff498f18e63
|
Shell
|
sergutsan/JDecafCompiler
|
/deploy.sh
|
UTF-8
| 516
| 3.34375
| 3
|
[] |
no_license
|
#!/bin/sh
TMPFOLDER=/tmp/JDecafTempFolder
if [ ! -e bin/JDecafCompiler.class ]; then
echo "This is not the right folder. Exiting..."
exit -1
elif [ -e $TMPFOLDER ]; then
echo "$TMPFOLDER exists. Exiting..."
exit -2
fi
mkdir ${TMPFOLDER}
jar cfmv ${TMPFOLDER}/JDecafCompiler.jar manifest.txt -C bin .
cp -rv javadecaf.bat javadecaf.sh lib ${TMPFOLDER}
cd ${TMPFOLDER}
zip -r JDecafCompiler.zip *
mv JDecafCompiler.zip ..
cd ..
rm -rf $TMPFOLDER
echo "Done!"
echo
echo "Created /tmp/JDecafCompiler.zip."
| true
|
c16046b93952c2d6801e342525f6d29fb508af77
|
Shell
|
lhnam298/kbr-analysis
|
/env/analysis.sh
|
UTF-8
| 606
| 2.84375
| 3
|
[] |
no_license
|
BASEDIR=$(cd $(dirname $0)/../.. && pwd)
CONTAINER_DB="kbr-db"
CONTAINER_WEB="kbr-analysis"
if [ $(docker ps -a | grep $CONTAINER_DB | wc -l) == "0" ]; then
docker run --name $CONTAINER_DB \
-e MYSQL_ALLOW_EMPTY_PASSWORD=yes \
-p 3306:3306 \
-d \
mysql:5.5
fi
docker start $CONTAINER_DB
docker run --name $CONTAINER_WEB \
-p 80:80 \
-p 443:443 \
-e APP_MODE=dev \
--link $CONTAINER_DB:$CONTAINER_DB \
-v $BASEDIR/analysis:/var/www/htdocs:ro \
-dt \
dongta
| true
|
4cad6134ac8021676b6a81e65ee3e8d7cbdf7069
|
Shell
|
Cedrusco/codesoju
|
/scripts/release.sh
|
UTF-8
| 1,600
| 3.71875
| 4
|
[] |
no_license
|
#!/bin/bash
ARG_DEFS=(
"[--version=(.*)]"
)
# $1 repo name
# $2 repo url git@github.com:Cedrusco/codesoju-angular
# $3 targetFolder ../generators/app/templates/angular1
function cloneRepo {
echo "-- Cloning codesoju-$1..."
git clone $2 $1 --depth=2
rm -rf $1/.git
replaceJsonProp "$1/package.json" "name" "<%= appName %>"
replaceJsonProp "$1/bower.json" "name" "<%= appName %>"
rm -rf $3 # ../generators/app/templates/angular1
mkdir -p $3
cp -R $1/ $3
}
function run {
# Run from within outside Scripts
cd ../
rm -rf temp-repo/
mkdir temp-repo
cd temp-repo
# Copy the template repo #
cloneRepo "angular1" "git@github.com:Cedrusco/codesoju-angular" "../generators/app/templates/angular1"
# cloneRepo "angular2" "git@github.com:Cedrusco/codesoju-angular2" "../generators/app/templates/angular2"
cloneRepo "mobile" "git@github.com:Cedrusco/codesoju-mobile" "../generators/app/templates/cordova-mobile"
# Clear the temp folder
cd ../
rm -rf temp-repo
# echo "-- Committing and tagging..."
if [[ ! $VERSION ]];
then echo "Version does not exists";
else replaceJsonProp "package.json" "version" "$VERSION"
fi
git add -A
git commit -am "Update: version $VERSION"
git tag -f v$VERSION
echo "-- Pushing to codesoju repository"
git push -q origin master
git push -q origin v$VERSION
echo "-- Version $VERSION pushed successfully to Cedrusco/codesoju!"
echo "-- You can publish to npmjs.org using: npm publish"
# publish to npm
# echo "-- Publish to npm"
# npm publish
cd ../
}
echo "Directory: $(dirname $0)"
source $(dirname $0)/utils.inc
| true
|
b58d02a403e09c2254e4f6cded53c364cac243a4
|
Shell
|
nakaji-dayo/learn-tiger
|
/2/test.sh
|
UTF-8
| 180
| 2.75
| 3
|
[] |
no_license
|
#!/bin/bash
set -eu
alex tiger.x
ghc tiger.hs
tests=~/Documents/tiger/testcases
for f in $( ls $tests | grep .tig$ ); do
echo "--${f}--"
cat ${tests}/${f} | ./tiger
done
| true
|
5bc38fec494c7146faebcbb38381df49043b43ab
|
Shell
|
osbilln/shell
|
/hermes.sh
|
UTF-8
| 4,558
| 3.5
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
# Copyright 2004 Colin Crist
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Try and sort out hermes set up - liberally cribbed from Apache Ant.
#
# $Id$
#
#
# Load system-wide hermes configuration
if [ -f "/etc/hermes.conf" ] ; then
. /etc/hermes.conf
fi
#
# load user hermes configuration
if [ -f "$HOME/.hermesrc" ] ; then
. "$HOME/.hermesrc"
fi
#
# OS specific support. $var _must_ be set to either true or false.
cygwin=false;
darwin=false;
case "`uname`" in
CYGWIN*) cygwin=true ;;
Darwin*) darwin=true
if [ -z "$JAVA_HOME" ] ; then
JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Home
HERMES_OPTS="-Xdock:name=HermesJMS -Dcom.apple.mrj.application.apple.menu.about.name=HermesJMS -Dcom.apple.mrj.application.growbox.intrudes=false -Dapple.laf.useScreenMenuBar=true $HERMES_OPTS"
fi
;;
Linux*) if [ -z "$HERMES_OPTS" ] ; then
HERMES_OPTS="-Dswing.defaultlaf=javax.swing.plaf.metal.MetalLookAndFeel"
else
HERMES_OPTS="-Dswing.defaultlaf=javax.swing.plaf.metal.MetalLookAndFeel $HERMES_OPTS"
fi ;;
esac
if [ -z "$HERMES_HOME" -o ! -d "$HERMES_HOME" ] ; then
# try to find HERMES
if [ -d /opt/hermes ] ; then
HERMES_HOME=/opt/hermes
fi
if [ -d "${HOME}/opt/hermes" ] ; then
HERMES_HOME="${HOME}/opt/hermes"
fi
## resolve links - $0 may be a link to hermes's home
PRG="$0"
progname=`basename "$0"`
# need this for relative symlinks
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
HERMES_HOME=`dirname "$PRG"`/..
# make it fully qualified
HERMES_HOME=`cd "$HERMES_HOME" && pwd`
fi
#
# For Cygwin, ensure paths are in UNIX format before anything is touched
if $cygwin ; then
[ -n "$HERMES_HOME" ] &&
HERMES_HOME=`cygpath --unix "$HERMES_HOME"`
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
#
# Set HERMES_LIB location
HERMES_LIB="${HERMES_HOME}/lib"
#
# Setup the Java VM
if [ -z "$JAVACMD" ] ; then
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
else
JAVACMD=`which java 2> /dev/null `
if [ -z "$JAVACMD" ] ; then
JAVACMD=java
fi
fi
fi
if [ ! -x "$JAVACMD" ] ; then
echo "Error: JAVA_HOME is not defined correctly."
echo " We cannot execute $JAVACMD"
exit 1
fi
#
# Slurp up everything in lib into the LOCALCLASSPATH
for F in `ls $HERMES_LIB`
do
if [ -z "$LOCALCLASSPATH" ] ; then
LOCALCLASSPATH=$HERMES_LIB/$F
else
LOCALCLASSPATH=$LOCALCLASSPATH:$HERMES_LIB/$F
fi
done
#
# See if we can find a config file.
#
# $HOME/hermes/hermes-config.xml ?
if [ -z "$HERMES_CFG" ] ; then
if [ -d "$HOME/.hermes" ] ; then
if [ -f "$HOME/.hermes/hermes-config.xml" ] ; then
HERMES_CFG=$HOME/.hermes/hermes-config.xml
fi
fi
fi
if [ -z "$HERMES_CFG" ] ; then
if [ -d "$HOME/hermes" ] ; then
if [ -f "$HOME/hermes/hermes-config.xml" ] ; then
HERMES_CFG=$HOME/hermes/hermes-config.xml
fi
fi
fi
if [ -z "$HERMES_CFG" ] ; then
HERMES_CFG=$HERMES_HOME/cfg/hermes-config.xml
fi
#
# For Cygwin, switch paths to Windows format before running java
if $cygwin; then
HERMES_HOME=`cygpath --windows "$HERMES_HOME"`
HERMES_CFG=`cygpath --windows "$HERMES_CFG"`
HERMES_LIB=`cygpath --windows "$HERMES_LIB"`
JAVA_HOME=`cygpath --windows "$JAVA_HOME"`
LOCALCLASSPATH=`cygpath --path --windows "$LOCALCLASSPATH"`
CYGHOME=`cygpath --windows "$HOME"`
fi
#
# Run main().
"$JAVACMD" -XX:NewSize=256m -Xmx1024m $HERMES_OPTS -Dlog4j.configuration=file:$HERMES_HOME/bin/log4j.props -Dhermes.home=$HERMES_HOME -Dhermes=$HERMES_CFG -Dhermes.libs=$HERMES_LIB -classpath $LOCALCLASSPATH hermes.browser.HermesBrowser
| true
|
dff3e1b6831c799e2d7a2768b0f235aa6c46dba5
|
Shell
|
szaffarano/docker-claws-mail
|
/build.sh
|
UTF-8
| 340
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
VERSION=3.17.3
IMAGE="szaffarano/claws-mail"
if ! [ -z "$http_proxy" ]; then
BUILD_ARG_HTTP="--build-arg http_proxy=$http_proxy"
fi
if ! [ -z "$https_proxy" ]; then
BUILD_ARG_HTTPS="--build-arg https_proxy=$https_proxy"
fi
docker build \
${BUILD_ARG_HTTP} \
${BUILD_ARG_HTTPS} \
-t ${IMAGE}:${VERSION} \
.
| true
|
3954225f10167a58429474a5f4f9273a0b4c6e12
|
Shell
|
madorskya/wib_sim
|
/soft_debug_p3/axisafety.sh
|
UTF-8
| 2,081
| 3.6875
| 4
|
[] |
no_license
|
#!/bin/bash
devmem="devmem"
# status
name=(bram_rstn read_fault write_fault bram0 bram1 )
addr=(0xA00C0040 0xA00C00CC 0xA00C00CC 0xA00D0000 0xA00D0004 )
bnum=(0 0 1 0 0 )
mask=(1 1 1 0xffffffff 0xffffffff )
#syntax: devreg reg_name [wr_value]
if [ $1 ]
then
reg_name=$1 # register name
if [ $2 ]
then
# writing
wr_value=$2 # write value
i=0
found=0
for n in "${name[@]}"
do
if [[ "$n" == "$1" ]]
then
#prepare write and mask values
wr_value=$(( $wr_value & ${mask[$i]} ))
wr_value=$(( $wr_value << ${bnum[$i]} ))
msk_value=$(( ${mask[$i]} << ${bnum[$i]} ))
msk_value=$(( 0xffffffff ^ $msk_value ))
# read the register
reg_read_value=`$devmem ${addr[$i]} 32`
reg_wr_value=$(( $reg_read_value & $msk_value )) # cut the bits
reg_wr_value=$(( $reg_wr_value | $wr_value )) # insert bits
# split field
# printf "%s value: %x mask: %x wr_reg: %x\n" "$n" "$wr_value" "$msk_value" "$reg_wr_value"
command=$(printf "%s %s 32 0x%x" "$devmem" "${addr[$i]}" "$reg_wr_value")
#`$devmem ${addr[$i]} 32 0x$reg_wr_value`
`$command`
found=1
fi
i=$(( $i + 1 ))
done
if [[ $found == 0 ]]
then
printf "undefined register: %s\n" "$reg_name"
fi
else
# reading
i=0
found=0
for n in "${name[@]}"
do
if [[ "$n" == "$1" ]]
then
# read the register
reg_read_value=`$devmem ${addr[$i]} 32`
# split field
reg_read_value=$(( $reg_read_value >> ${bnum[$i]} ))
reg_read_value=$(( $reg_read_value & ${mask[$i]} ))
printf "%s = 0x%x\n" "$n" "$reg_read_value"
found=1
fi
i=$(( $i + 1 ))
done
if [[ $found == 0 ]]
then
printf "undefined register: %s\n" "$reg_name"
fi
fi
else
# no arguments, print usage
printf "usage: devreg reg_name [wr_value]\n"
printf "available registers:\n"
printf "ADDR\t\tLBIT\tMASK\tNAME\n"
i=0
for n in "${name[@]}"
do
printf "%s\t%d\t%x\t%s\n" "${addr[$i]}" "${bnum[$i]}" "${mask[$i]}" "$n"
i=$(( $i + 1 ))
done
fi
| true
|
cd0ce6c389c956dddb8fd261e9a5bc8d220595aa
|
Shell
|
benbasscom/Generic-Bash-Scripts
|
/Local Items/Local_Items_cleanup.sh
|
UTF-8
| 302
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
# Script to remove Local Items folder.
# Strongly suggest restarting after running the tool.
# Ben Bass 2015
sphard="$(system_profiler SPHardwareDataType)"
uuid="$(echo "$sphard" | grep "Hardware UUID:" | awk '{print $3}')"
echo $HOME
mv $HOME/Library/Keychains/$uuid/ $HOME/.Trash
exit 0
| true
|
03827184ec468ef91e2bbdb02292b9c28759e863
|
Shell
|
hdeiner/CheckWordster
|
/provision_checkwordster_ec2.sh
|
UTF-8
| 703
| 3.125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# First, add the GPG key for the official Docker repository to the system
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
# Add the Docker repository to APT sources
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
# Next, update the package database with the Docker packages from the newly added repo:
sudo apt-get -qq update
# Install Docker
sudo apt-get -qq install -y docker-ce
echo "Start CheckWordster"
sudo docker network create -d bridge mynetwork
sudo docker run -d -p 80:9002 --network=mynetwork --name checkwordster howarddeiner/checkwordster
# A small delay to digest
sleep 5
| true
|
fd013212ae674fb9dfd91289047ec609604b77a8
|
Shell
|
zjpjohn/dianping-swallow
|
/swallow-consumerserver/src/main/resources/start.sh
|
UTF-8
| 2,371
| 3.625
| 4
|
[] |
no_license
|
#!/bin/bash
PRGDIR=`dirname "$0"`
LOCAL_IP=`ifconfig eth0 | grep 'inet addr:' | cut -d: -f2 | awk '{ print $1}'`
if [ ! -d "/data/applogs/swallow" ] ; then
mkdir -p "/data/applogs/swallow"
fi
usage(){
echo " Usage:"
echo " use '$0 master' to start as master."
echo " use '$0 slave [<masterIp>]' to start as slave."
exit 1
}
MASTER_JMX_PORT=9011
SLAVE_JMX_PORT=9012
JAVA_OPTS="-cp ${PRGDIR}/.:${PRGDIR}/* -server -Xms512m -Xmx2g -XX:+HeapDumpOnOutOfMemoryError -DLog4jContextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -XX:+PrintGCDetails -XX:+PrintGCTimeStamps"
MASTER_JAVA_OPTS="${JAVA_OPTS} -Dmaster.or.slave=master -Dcom.sun.management.jmxremote.port=${MASTER_JMX_PORT} -Xloggc:/data/applogs/swallow/swallow-consumerserver-master-gc.log"
SLAVE_JAVA_OPTS="${JAVA_OPTS} -Dmaster.or.slave=slave -Dcom.sun.management.jmxremote.port=${SLAVE_JMX_PORT} -Xloggc:/data/applogs/swallow/swallow-consumerserver-slave-gc.log"
MASTER_CLASS="com.dianping.swallow.consumerserver.bootstrap.MasterBootStrap"
SLAVE_CLASS="com.dianping.swallow.consumerserver.bootstrap.SlaveBootStrap"
if [ "$1" == "master" ]; then
STD_OUT="/data/applogs/swallow/swallow-consumerserver-master-std.out"
MASTER_JAVA_OPTS="${MASTER_JAVA_OPTS} -DmasterIp=$LOCAL_IP"
echo "starting as master(masterIp is $LOCAL_IP ) ..."
echo "output: $STD_OUT"
exec java $MASTER_JAVA_OPTS $MASTER_CLASS > "$STD_OUT" 2>&1 &
elif [ "$1" == "slave" ]; then
if [ "$2" != "" ]; then
echo "masterIp option: $2"
echo $2 |grep "^[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}$" > /dev/null
if [ $? == 1 ]; then
echo "'$2' is an illegal ip address!"
usage
exit 1
fi
SLAVE_JAVA_OPTS="${SLAVE_JAVA_OPTS} -DmasterIp=$2"
else
echo "no masterIp option (e.g. '<masterIp>'), would use the 'masterIp' property in swallow-consumerserver.properties "
fi
STD_OUT="/data/applogs/swallow/swallow-consumerserver-slave-std.out"
echo "starting as slave ..."
echo "output: $STD_OUT"
exec java $SLAVE_JAVA_OPTS $SLAVE_CLASS > "$STD_OUT" 2>&1 &
else
echo "Your input is not corrent!"
usage
fi
| true
|
2ac00eefac3acb5fac5e8ce8a7ae99621c7a840f
|
Shell
|
nephatrine/docker-gitea-web
|
/override/usr/local/bin/gitea-backup
|
UTF-8
| 475
| 2.84375
| 3
|
[
"ISC"
] |
permissive
|
#!/bin/sh
export HOME=/mnt/config/home
if [ ! -d /tmp/gitea-backup ]; then
/bin/mkdir -p /tmp/gitea-backup
fi
cd /mnt/config/home || exit 1
/usr/bin/find /mnt/config/home/ -maxdepth 1 -type f -name 'gitea-dump-*.zip' -print0 | /usr/bin/xargs -r0 /bin/ls -t | /usr/bin/tail -n +3 | /usr/bin/tr '\n' '\0' | /usr/bin/xargs -r0 /bin/rm
GITEA_CUSTOM=/mnt/config/www/gitea USER=guardian exec /usr/bin/gitea dump --config /mnt/config/etc/gitea.ini --tempdir=/tmp/gitea-backup;
| true
|
df600b159e2f4aff3beb722b0838b937676f7d9d
|
Shell
|
ironman820/freshstart
|
/style-login.sh
|
UTF-8
| 300
| 2.703125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -euo pipefail
mydir=$PWD
if [[ ! -x /usr/bin/paru ]]
then
echo "Installing AUR helper"
$mydir/setup-aur-helper.sh
fi
paru -Sy --noconfirm lightdm lightdm-gtk-greeter bunsen-themes-git
sudo cp $mydir/.backups/root/etc/lightdm/lightdm-gtk-greeter.conf /etc/lightdm/
| true
|
f0d7e281b3153e3b72bb7bf61f24452fd45b619b
|
Shell
|
smxi/smxi
|
/sm-lib-2009-fixes
|
UTF-8
| 12,450
| 2.921875
| 3
|
[] |
no_license
|
#!/bin/bash
########################################################################
#### Script Name: sm-lib-2009-fixes
#### version: 1.1.25
#### Date: 2010-09-16
#### Copyright (C) Harald Hope 2009
#### This program is free software; you can redistribute it and/or modify it under
#### the terms of the GNU General Public License as published by the Free Software
#### Foundation; either version 2 of the License, or (at your option) any later version.
#### This program is distributed in the hope that it will be useful, but WITHOUT
#### ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
#### FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#### Get the full text of the GPL here:
#### http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
#### Script Author: Harald Hope
#### This is a library file for smxi and cannot be run independently
#### Script URL: http://smxi.org/sm/sm-lib-2008-fixes
#### Script SVN: http://code.google.com/p/smxi
#### Script Home page: http://techpatterns.com/forums/about736.html
########################################################################
#### $DISTRO_VERSION numbers / distro name:
#### 5 sidux test pre release, kanotix/debian conversions ( debian - d:d )
#### 6 chaos, 7 tartaros, 8 gaia, 9 eros - 2007-01/4
#### 10, 11, 12, 13 - 2008-01/4
#### 14, 15, 16, 17 - 2009-01/4
#### 18, 19, 20, 21 - 2010-01/4
########################################################################
###---------------------------------------------------------------------
### script executor
###---------------------------------------------------------------------
# args: $1 - pre / post
run_2009_fixes()
{
if [ "$1" == 'pre' ];then
if [ "$DISTRO_LEVEL" -lt 17 ];then
ed_fix_1
acroread_fix_1
openoffice_kde_fix_1
ooo_fix_2009_5
kdenlive_fix_1
libuniconf_fix_1
xfs_fix_1
perl_fix_1
libvdpau1_fix_1
fi
if [ "$DISTRO_LEVEL" -lt 16 ];then
kdm_3_to_4_fix_1
fi
if [ "$DISTRO_LEVEL" -lt 15 ];then
cups_driver_fix_1
fi
elif [ "$1" == 'post' ];then
if [ "$DISTRO_LEVEL" -lt 20 ];then
# sysvinit_fix_1 # fix added to Sid 2010-05-15 pm
:
fi
if [ "$DISTRO_LEVEL" -lt 17 ];then
#xorg_1_6_fix_1
vbox_qt_fix_1
fi
fi
}
###---------------------------------------------------------------------
### specific du fixes
###---------------------------------------------------------------------
# http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=581704
# http://forums.debian.net/viewtopic.php?f=20&t=51996
sysvinit_fix_1()
{
local prefId='sysvinit-fix-1'
local smPref=$( sm_pref_tester $prefId )
local isInstalled=$( package_tester 'perl' )
if [ "$smPref" -eq 0 ];then
if [ "$B_APTOSID_SOURCES" != 'true' ];then # fixed package in sidux repos
if [ -f /etc/default/rcS ];then
echo $LINE
echo "${S}Patching ${C}sysvinit${S} now...${N}"
sed -i "/^CONCURRENCY\=/d" /etc/default/rcS
fi
fi
set_sticky_prefs $prefId
fi
}
libvdpau1_fix_1()
{
local prefId='libvdpau1-fix-1'
local smPref=$( sm_pref_tester $prefId )
# adding this so that testing/stable won't run this until the right time
local isAvailable=$( check_package_status 'nvidia-libvdpau1-driver' 'c' )
# new method, put all tests under initial test
if [ "$smPref" -eq 0 -a -n "$isAvailable" ];then
local isInstalled=$( package_tester 'nvidia-libvdpau1' )
local isInstalledList=$( package_tester '(mplayer|gecko-mediaplayer)' 'wild-full' )
# case 1, older mplayer install, just update the stuff
if [ -z "$isInstalled" -a -n "$isInstalledList" ];then
echo $LINE
echo "${S}Running ${C}mplayer/libvdpau1${S} fix now...${N}"
package_installer "$isInstalledList" 'install-always'
# case 2, midrange install,with legacy package and mplayer
elif [ -n "$isInstalled" -a -n "$isInstalledList" ];then
echo $LINE
echo "${S}Running ${C}mplayer/libvdpau1${S} fix now...${N}"
package_remover "$isInstalledList" 'group'
package_remover "nvidia-libvdpau1"
package_installer "$isInstalledList" 'install-always'
elif [ -n "$isInstalled" -a -z "$isInstalledList" ];then
echo $LINE
echo "${S}Running ${C}libvdpau1${S} fix now...${N}"
package_remover "nvidia-libvdpau1"
package_installer "nvidia-libvdpau1-driver" 'install-always'
fi
set_sticky_prefs $prefId
fi
}
perl_fix_1()
{
local prefId='perl-fix-1'
local smPref=$( sm_pref_tester $prefId )
local isInstalled=$( package_tester 'perl' )
if [ "$smPref" -eq 0 -a -n "$isInstalled" ];then
echo $LINE
echo "${S}Updating ${C}perl${S} now...${N}"
package_installer 'perl' 'install-always' ' -o APT::Immediate-Configure=0'
set_sticky_prefs $prefId
fi
}
xfs_fix_1()
{
local prefId='xfs-fix-1'
local smPref=$( sm_pref_tester $prefId )
local isInstalled=$( package_tester 'xfs' )
if [ "$smPref" -eq 0 -a -n "$isInstalled" ];then
echo $LINE
echo "${S}Updating ${C}xfs${S} now...${N}"
/etc/init.d/xfs stop
package_installer 'xfs' 'force-present'
/etc/init.d/xfs start
set_sticky_prefs $prefId
fi
}
libuniconf_fix_1()
{
local prefId='libuniconf-fix-1'
local smPref=$( sm_pref_tester $prefId )
local isInstalled=$( package_tester 'libuniconf4.4' )
local isInstalled2=$( package_tester 'wvdial' )
if [ "$smPref" -eq 0 -a -n "$isInstalled" ];then
local isInstalled2=$( package_tester 'wvdial' )
echo $LINE
echo "${S}Updating ${C}libuniconf4.4${S} to ${C}libuniconf4.6${S}...${N}"
if [ -n "$isInstalled2" ];then
package_remover 'wvdial'
fi
package_remover 'libuniconf4.4' 'purge'
package_installer 'libuniconf4.6' 'install-missing'
if [ -n "$isInstalled2" ];then
package_installer 'wvdial' 'install-missing'
fi
set_sticky_prefs $prefId
fi
}
ed_fix_1()
{
local prefId='test-ed-fix-1'
local smPref=$( sm_pref_tester $prefId )
local isInstalled=$( package_tester 'ed' )
if [ "$smPref" -eq 0 -a -n "$isInstalled" ];then
if [ ! -e /bin/ed ];then
echo $LINE
echo "${S}Updating ${C}ed${S}. Linking ${C}/usr/bin/ed${S} to ${C}/bin/ed${S}...${N}"
ln -s /usr/bin/ed /bin/ed
mkdir /bin/ed
fi
set_sticky_prefs $prefId
fi
}
kdenlive_fix_1()
{
local prefId='kdenlive-fix-1'
local smPref=$( sm_pref_tester $prefId )
local isInstalled=$( package_tester 'kdenlive' )
if [ "$smPref" -eq 0 -a -n "$isInstalled" ];then
local isInstalled2=$( package_tester 'mlt' )
local isAvailable=$( check_package_status 'inigo' 'c' )
if [ -n "$isInstalled2" -a -n "$isAvailable" ];then
echo $LINE
echo "${S}Running ${C}kdenlive${S} update now. Removing ${C}kdenlive/mlt${S}...${N}"
package_remover 'kdenlive mlt' 'purge'
echo "${S}Reinstalling ${C}kdenlive${S} now to bring in new packages...${N}"
package_installer 'kdenlive' 'install-missing'
set_sticky_prefs $prefId
fi
fi
}
ooo_fix_2009_5()
{
local missingOooDir='/var/lib/openoffice/share'
local isInstalled=$( package_tester 'openoffice.org-common' )
local prefId='openoffice-missing-dir-fix-1'
local smPref=$( sm_pref_tester $prefId )
if [ "$smPref" -eq 0 -a -n "$isInstalled" -a ! -d $missingOooDir ];then
echo $LINE
echo "${S}Updating OOo to fix missing directory issue...${N}"
mkdir -p $missingOooDir
set_sticky_prefs $prefId
fi
}
openoffice_kde_fix_1()
{
local prefId='openoffice-kde-fix-1'
local smPref=$( sm_pref_tester $prefId )
if [ "$smPref" -eq 0 ];then
local kdmVersionInst=$( check_package_status 'kdm' 'i' | cut -d ':' -f 2 | cut -d '.' -f 1 )
local isInstalled=$( package_tester 'openoffice.org-kde' )
if [ -z "$kdmVersionInst" ];then
kdmVersionInst=0
fi
if [ "$kdmVersionInst" -ge 4 -a -n "$isInstalled" ];then
echo $LINE
echo "${S}Removing ${C}openoffice.org-kde${S} to let OOo and KDE upgrade properly...${N}"
package_remover 'openoffice.org-kde' 'purge'
# ooo-gtk conflicts with this package
if [ -z "$( package_tester 'gtk-qt-engine' )" ];then
echo "${S}Installing ${C}openoffice.org-gtk${S} so that OOo looks nice again...${N}"
package_installer 'openoffice.org-gtk' 'install-missing'
else
echo "${S}Please note: without ${C}openoffice.org-kde${S} Openoffice.org will be very ugly."
echo "Since you are using the package ${C}gtk-qt-engine${S} you cannot install ${C}openoffice.org-gtk${S}"
echo "Since ${C}$SCRIPT_NAME${S} can't know what you want to do, it's going"
echo "to leave it up to you to handle manually, but the upgrade can't work with the"
echo "openoffice.org-kde package installed, so that had to be removed. There's no"
echo "great solution here, sorry."
fi
set_sticky_prefs $prefId
print_hec
fi
fi
}
vbox_qt_fix_1()
{
local prefId='vbox-qt-fix-1'
local smPref=$( sm_pref_tester $prefId )
if [ "$smPref" -eq 0 ];then
local packageVersion=$( check_package_status 'virtualbox-ose' 'i' )
packageVersion=$( grep -Ei '^(2\.[2-9]|[3-5])' <<< $packageVersion )
local isInstalled2=$( package_tester 'virtualbox-ose-qt' )
if [ -n "$packageVersion" -a -z "$isInstalled2" ];then
package_installer 'virtualbox-ose-qt' 'install-missing'
set_sticky_prefs $prefId
fi
fi
}
xorg_1_6_fix_1()
{
local prefId='xorg-1-6-fix-1'
local smPref=$( sm_pref_tester $prefId )
local case1='' case2='' case3=''
local newX='false'
local xVersion=$( X -version 2>&1 | grep 'X Window System' | egrep -o '(7|1)\.[1-6]' )
local xorgMouseI='' xorgMouseC=''
if [ -z "$xVersion" ];then
xVersion=$( X -version 2>&1 | grep 'X.Org X Server' | egrep -o '(7|1)\.[1-6]' )
fi
case $xVersion in
1.[6-9]|7.[4-9])
newX='true'
;;
esac
log_function_data "xVersion: $xVersion\ncase1: $case1\ncase2: $case2\ncase3: $case3"
# note: this will be false if no X is installed
if [ "$smPref" -eq 0 -a "$newX" == 'true' ];then
case1=$( grep -is 'AllowEmptyInput' $X_ORG_PATH )
case2=$( grep -is 'AutoAddDevices' $X_ORG_PATH )
case3=$( grep -is 'DontZap' $X_ORG_PATH )
if [ -z "$case1" -o -z "$case2" -o -z "$case3" ];then
echo $LINE
echo "${S}Running ${C}Xorg >= 1.6 xorg.conf${S} tweaks now...${N}"
if [ -f $X_ORG_PATH ];then
if [ -z "$case1" ];then
echo "$SPACER${S}Adding: ${C}Option \"AllowEmptyInput\" \"0\"${S}...${N}"
perl -pi -e 's/^([\s]*Section\s*"ServerFlags")/\1\n\tOption "AllowEmptyInput" "0"/' $X_ORG_PATH
fi
if [ -z "$case2" ];then
echo "$SPACER${S}Adding: ${C}Option \"AutoAddDevices\" \"0\"${S}...${N}"
perl -pi -e 's/^([\s]*Section\s*"ServerFlags")/\1\n\tOption "AutoAddDevices" "0"/' $X_ORG_PATH
fi
if [ -z "$case3" ];then
echo "$SPACER${S}Adding: ${C}Option \"DontZap\" \"Off\"${S}...${N}"
perl -pi -e 's/^([\s]*Section\s*"ServerFlags")/\1\n\tOption "DontZap" "Off"/' $X_ORG_PATH
fi
echo "${S}Done with ${C}Xorg >= 1.6 xorg.conf${S} update, continuing.${N}"
else
echo "${M}No ${C}$X_ORG_PATH${M} found. No modification of ${C}xorg.conf${M} data.${N}"
fi
fi
# check the mouse stuff too, this is an aptitude fix mainly, which does the upgrade wrong
xorgMouseI=$( check_package_status 'xserver-xorg-input-mouse' 'i' )
xorgMouseC=$( check_package_status 'xserver-xorg-input-mouse' 'c' )
if [ -z "$xorgMouseI" -a -n "$xorgMouseC" ];then
echo "${S}Installing missing package ${C}xserver-xorg-input-mouse${S}...${N}"
package_installer 'xserver-xorg-input-mouse' 'install-missing'
fi
set_sticky_prefs $prefId
fi
}
cups_driver_fix_1()
{
local prefId='cups-driver-fix-1'
local smPref=$( sm_pref_tester $prefId )
local isInstalled=$( package_tester 'cups-driver-gutenprint' )
local isInstalled2=$( package_tester 'cupsys-driver-gutenprint' )
if [ "$smPref" -eq 0 -a -n "$isInstalled" -a -n "$isInstalled2" ];then
echo $LINE
echo "${S}Running ${C}cups-driver-gutenprint${S} update fix now...${N}"
# aptitude pulls all cups stuff out... bad aptitude, bad!
apt-get remove -y cupsys-driver-gutenprint
package_installer 'cups-driver-gutenprint' 'force-always'
set_sticky_prefs $prefId
fi
}
acroread_fix_1()
{
local prefId='acroread-fix-1'
local smPref=$( sm_pref_tester $prefId )
local isInstalled=$( package_tester 'acroread' )
local isAvailable=$( check_package_status 'acroread-debian-files' 'c' )
if [ "$smPref" -eq 0 -a -n "$isInstalled" -a -n "$isAvailable" ];then
echo $LINE
echo "${S}Running ${C}acroread${S} update fix now...${N}"
# aptitude pulls all cups stuff out... bad aptitude, bad!
package_installer 'acroread-debian-files' 'force-always'
set_sticky_prefs $prefId
fi
}
###**EOF**###
| true
|
d5e2f268988ef98bdbd120e0caa0bdbf18f169a8
|
Shell
|
mackatozis/uni
|
/OS/Operating Systems/graded_problem_set_1.sh
|
UTF-8
| 3,714
| 4.03125
| 4
|
[] |
no_license
|
#!/bin/bash
#================================================================================#
# The MIT License #
# #
# Copyright 2013 Alexandros Efthymiadis. #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in #
# all copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #
# THE SOFTWARE. #
#================================================================================#
echo '1: descending sort of a directory'
echo '2: copy a directory to an another directory'
echo '3: show type of file and if it is a normal file rename it'
echo "4: show content from the 'n' line of a file"
echo '5: count the number of subdirectories of a directory'
echo '6: show permissions and owner of a file'
echo '7: count number of running processes'
echo '8: change permissions of a file'
echo '9: remove a directory with all its contents'
echo '10: get the size of a file'
echo '11: reverse a string'
echo '12: get the type of number'
echo "13: convert a file's lowercase letters to uppercase"
echo '14: show date as: weekday name, DD MM YY'
read NUM
#set -x
case $NUM in
1) read -p 'Give a path: ' PTH
cd "$(bash -cl \\"echo ${PTH}\\")"
ls -l | sort -g +4 -5 > sizes.txt
;;
2) read -p 'Give the path of the directory you want to copy: ' FPTH
read -p 'Give the destination path: ' SFPTH
cp -r $FPTH $SFPTH
;;
3) read -p 'Give a name of a file: ' FL
file $FL
if [ -f $FL ]; then
read -p 'Give a new name for your file: ' NFL
mv $FL $NFL
fi
;;
4) read -p 'Give the path of the file: ' PTH
read -p 'Give number of line: ' N
tail -n +$N $PTH
;;
5) read -p 'Give the path of the directory: ' PTH
cd "$(bash -cl \\"echo ${PTH}\\")"
echo "Number of subdirectories: $(ls -l | grep '^d' | wc -l)"
;;
6) read -p 'Give the path of the file: ' FL
cd "$(bash -cl \\"echo ${FL}\\")"
ls -l | cut -d ' ' -f 1,3
;;
7) echo "$USER's running processes: $(ps -ef | grep $USER | wc -l)"
;;
8) read -p 'Give the path of the file: ' FL
read -p 'Give new permissions: ' NUM
chmod $NUM $FL
;;
9) read -p 'Give the path of the directory you want to delete: ' PTH
rm -rf $PTH
;;
10) read -p 'Give the path of the file: ' FL
wc --byte $FL
;;
11) read -p 'Give a string: ' STR
echo "$STR" | rev
;;
12) read -p 'Give a number: ' N
if [[ "$N" =~ ^[0-9]+$ ]] ; then
if [ $N -eq 0 ]; then
echo "$N is zero"
elif [ $N -le 0 ]; then
echo "$N is negative"
else
echo "$N is positive"
fi
else
echo "$N is not a number."
fi
;;
13) read -p 'Give the path of the file: ' FL
tr '[:lower:]' '[:upper:]' < $FL > output.txt
mv output.txt $FL
;;
14) date +"%A, %d %B %Y"
;;
*) echo 'Bad argument'
;;
esac
exit
| true
|
779f93a04ab34f3f181bad7d733cd52fe7e7c8aa
|
Shell
|
ranjithkumar007/kgpkubssim3d.bin-
|
/itandroids/kill.sh
|
UTF-8
| 108
| 2.515625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
for KILLPID in `ps ax | grep agent | awk ' { print $1;}'`; do
kill -9 $KILLPID;
done
| true
|
2948f7ec14362b816da18fddd0fa3b288fc09898
|
Shell
|
shashankware/FUNCTON
|
/degtemp.sh
|
UTF-8
| 1,403
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/bash
echo "1 : degF to degC 2 : degC to degF "
read -p "Enter choice : " choice
funFtoC ( ) {
read -p "Enter Temp(F):" tempf
tempc=$((($tempf-32)*5/9))
echo "temp(C):" $tempc
}
funCtoF ( ) {
read -p "Enter temp(C):" tempc
tempf=$((($tempc*9/5)+32))
echo "temp(f):" $tempf
}
case $choice in
"1")
funFtoC
;;
"2")
funCtoF
;;
*)
echo "Wrong choice"
;;
esac
#!/bin/bash
sum=0
pallindrome () {
read -p "Enter number:" N
N1=$N
while [ $N -gt 0 ]
do
temp=$(($N%10))
sum=$(($sum*10+$temp))
N=$(($N/10))
done
if [ $N1 -eq $sum ]
then
echo "its pallindrome"
else
echo "not pallindrome"
fi
}
#!/bin/bash
prime () {
N=$1
for ((i=2;i<N;i++))
do
temp=$(($N%$i))
if [ $temp -eq 0 ]
then
flag=1
break
else
flag=0
fi
done
if [ $flag -eq 1 ]
then
echo "Not prime"
return 0
else
echo "prime"
return 1
fi
}
#!/bin/bash
pallindrome () {
sum=0
N=$1
N1=$1
while [ $N -gt 0 ]
do
temp=$(($N%10))
sum=$(($sum*10+$temp))
N=$(($N/10))
done
if [ $N1 -eq $sum ]
then
echo "pallindrome"
return 1
else
echo "not pallindrome"
return 0
fi
}
#!/bin/bash
pallindrome_and_prime () {
read -p "Enter Number : " N
pallindrome $N
ret1=$?
prime $N
ret2=$?
if [ $ret1 -eq 1 ] && [ $ret2 -eq 1 ]
then
echo "its pallindrome and prime"
else
echo "its not pallindrome and prime"
fi
}
| true
|
33e9d9e52df9267cd81cad5f35711e5d737598ca
|
Shell
|
diranetafen/mobileit
|
/Customize Desktop/script/Remote Access/check_lcd.sh
|
UTF-8
| 342
| 2.703125
| 3
|
[] |
no_license
|
if [ -c /dev/ttyACM0 ] ; then
echo "$(date) ---------- check_LCD : LCD up" >> /home/pi/custum_desktop/logs/check_lcd.log
echo "OK" > /home/pi/custum_desktop/logs/check_lcd.txt
else
echo "$(date) ---------- check_LCD : LCD down" >> /home/pi/custum_desktop/logs/check_lcd.log
echo "KO" > /home/pi/custum_desktop/logs/check_lcd.txt
fi
| true
|
a65db2e61288279bcd80606642409549392d6d38
|
Shell
|
49257620/reboot
|
/studysrc/shellstudy/array002.sh
|
UTF-8
| 436
| 2.84375
| 3
|
[] |
no_license
|
my_array=(A B "C" D)
echo "第一个元素为: ${my_array[0]}"
echo "第二个元素为: ${my_array[1]}"
echo "第三个元素为: ${my_array[2]}"
echo "第四个元素为: ${my_array[3]}"
my_array[0]=A
my_array[1]=B
my_array[2]=C
my_array[3]=D
my_array[10]=E
echo "数组的元素为: ${my_array[*]}"
echo "数组的元素为: ${my_array[@]}"
echo "数组元素个数为: ${#my_array[*]}"
echo "数组元素个数为: ${#my_array[@]}"
| true
|
210b51cf02f204c8ef7cdefc88342c095e44ddf4
|
Shell
|
jeqo/packer-oracle-bpm
|
/scripts/oracle-fmw/create-oracle-user.sh
|
UTF-8
| 467
| 2.78125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh -x
groupadd $OS_GROUP -g 500
useradd -b /home -g $OS_GROUP -u 501 $OS_USER
echo "welcome1" | passwd --stdin $OS_USER
sed -i '/.*EOF/d' /etc/security/limits.conf
echo "* soft nofile 16384" >> /etc/security/limits.conf
echo "* hard nofile 16384" >> /etc/security/limits.conf
echo "# EOF" >> /etc/security/limits.conf
echo "net.core.rmem_max=4192608" > /tmp/.sysctl.conf
echo "net.core.wmem_max=4192608" >> /tmp/.sysctl.conf
sysctl -e -p /tmp/.sysctl.conf
| true
|
3d3aa267ef367a1e5b0f22552b6a95ca4afec477
|
Shell
|
Lijiakuan/sequencing
|
/rins_ccls/blast_check.sh
|
UTF-8
| 1,133
| 4.125
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
script=`basename $0`
function usage(){
echo
echo "checks the contents of the given blast output file"
echo
echo "Usage:"
echo
echo "$script [--blast COMMAND] blast_output_file(s)"
echo
echo "COMMANDS: blastn, tblastx"
echo
echo "Example:"
echo "$script dna/output/fallon_SFPB001A_filtered_20130722/trinity_input_single.fasta.blastn.txt"
echo
exit 1
}
# Basically, this is TRUE AND DO ...
[ $# -eq 0 ] && usage
blast='blastn'
while [ $# -ne 0 ] ; do
case $1 in
-b|--b*)
shift; blast=$1; shift ;;
-*)
echo ; echo "Unexpected args from: ${*}"; usage ;;
*)
break;;
esac
done
case $blast in
'blastn' )
head="BLASTN"
tail="Gap Penalties"
;;
'tblastx' )
head="TBLASTX"
tail="Window for multiple hits"
;;
*)
echo "Unrecognized blast command $blast"
esac
dir=`dirname $0`
#
# Why did I need gawk and not just awk?
# I'm guessing its due to the character classes I used for matching.
# In this gawk script, awk finds control characters on EVERY line
# making it really kinda pointless.
#
gawk -v head="$head" -v tail="$tail" -f "$dir/blast_check.gawk" $@
| true
|
654269cf8a00eaf8deb29f30bad18d6a8386c219
|
Shell
|
silverbolt2016/unix-book
|
/chap3/4ex/ex4.sh
|
UTF-8
| 250
| 2.765625
| 3
|
[] |
no_license
|
#/bin/bash
: '
Write a script that creates three background processes, waits for them all to
complete, and then displays a simple message
'
#Creating 3 sleep processes
sleep 15 &
sleep 15 &
sleep 15 &
wait
echo All 3 background processes completed
| true
|
3d5f97a4a1c1b3d9499ce5c1d535568fe0076c1a
|
Shell
|
naveenmahadevuni/incubator-trafodion
|
/core/sqf/sql/scripts/bats/runmonitorbats.virtual
|
UTF-8
| 1,256
| 2.578125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# @@@ START COPYRIGHT @@@
#
# (C) Copyright 2010-2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@@ END COPYRIGHT @@@
#
# runmonitorbats.virtual script - executes monitor development tests
# This script rebuilds SQ environment
echo "***"
echo "*** Stopping SQ Environment"
echo "***"
cd $MY_SQROOT/sql/scripts
echo $PWD
sqstop
ckillall
echo "***"
echo "*** Generating SQ Environment configuration"
echo "***"
sqgen bats/sqconfig.monitor
echo "***"
echo "*** Running monitor tests"
echo "***"
cd $MY_SQROOT/monitor/test
echo $PWD
make clean;make
$MY_SQROOT/monitor/test/runtest -virtual -nogen
echo "***"
echo "*** Last monitor test stops SQ Environment"
echo "***"
sleep 10
cstat
| true
|
0a081708e5f643a08cf2789f41a571a7f27ac628
|
Shell
|
Le0nX/StudyX
|
/Bash/Ex2.sh
|
UTF-8
| 186
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
if [ -f /bin/bash ]
then
echo "file /bin/bash exists"
fi
if [ -d /bin/bash ]
then
echo "/bin/bash is directory"
else
echo "/bin/bash is NOT a directory"
fi
exit 0
| true
|
7512392a1e9d31b0d22c5045f707177bcc24c3fa
|
Shell
|
Linaro/lava-functional-tests
|
/lava-test-shell/android/scripts/get-adb-serial.sh
|
UTF-8
| 445
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/sh
which adb
# start daemon if not yet running.
adb start-server || true
adb wait-for-device
echo
# start adb and stop the daemon start message from appearing in $result
adb get-serialno || true
result=`adb get-serialno 2>&1 | tail -n1`
if [ "$result" = "unknown" ]; then
echo "ERROR: adb get-serialno returned" $result
exit 1
else
echo "adb get-serialno returned" $result
echo $result > adb-connection.txt
exit 0
fi
| true
|
618aa52ba81fb3901de00effbd5a2f7df4e4f4e5
|
Shell
|
osom8979/opm
|
/etc/setup.d/ubuntu/102-gravatar-face.sh
|
UTF-8
| 1,037
| 3.859375
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
if [[ -z $OPM_HOME ]]; then
echo "Not defined OPM_HOME variable." 1>&2
exit 1
fi
if ! command -v git &> /dev/null; then
echo "Not found git command" 1>&2
exit 1
fi
if ! command -v curl &> /dev/null; then
echo "Not found curl command" 1>&2
exit 1
fi
function md5_hash
{
local val=$1
if command -v md5 &> /dev/null; then
# Darwin kernel
echo -n "$val" | md5 -r | awk '{print $1}'
elif command -v md5sum &> /dev/null; then
echo -n "$val" | md5sum | awk '{print $1}'
elif command -v openssl &> /dev/null; then
echo -n "$val" | openssl md5 -r | awk '{print $1}'
else
echo "Unable to compute md5 checksum" 1>&2
exit 1
fi
}
if ! EMAIL=$(git config user.email); then
read -r -p "Enter the your email address: " EMAIL
fi
LOWER_EMAIL=$(echo -n "$EMAIL" | tr '[:upper:]' '[:lower:]')
HASH=$(md5_hash "$LOWER_EMAIL")
SIZE=512
URL="https://www.gravatar.com/avatar/${HASH}?s=${SIZE}"
DEST=$HOME/.face
curl -o "$DEST" "$URL"
| true
|
d3d982f9e317acadbe0736c293d666ca8b8627d2
|
Shell
|
HumanCompatibleAI/adversarial-policies
|
/scripts/doubleblind.sh
|
UTF-8
| 1,121
| 3.28125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
ROOT_DIR="$( dirname "${SCRIPT_DIR}" )"
OPTIONS="-v -z -r -lpt"
EXCLUDES="LICENSE README.md setup.py scripts/doubleblind.sh ci/local_tests.sh .travis.yml experiments/common.sh experiments/planning
src/aprl/configs/ray/ .git supplementary.zip *.pkl requirements*.txt"
# Refuse to compile if we find any of these words in non-excluded sources
BLACKLISTED="Adam Gleave Michael Dennis Cody Neel Kant Sergey Levine Stuart Russell berkeley humancompatibleai humancompatible"
TMPDIR=`mktemp --tmpdir -d doubleblinded.XXXXXXXX`
SYNC_CMD="rsync ${OPTIONS} --exclude-from=.gitignore"
for exclude in ${EXCLUDES}; do
SYNC_CMD="${SYNC_CMD} --exclude=${exclude}"
done
${SYNC_CMD} ${ROOT_DIR} ${TMPDIR}
pushd ${TMPDIR}
GREP_TERMS=""
for pattern in ${BLACKLISTED}; do
GREP_TERMS="${GREP_TERMS} -e ${pattern}"
done
grep -r . -i -F ${GREP_TERMS}
if [[ $? -ne 1 ]]; then
echo "Found blacklisted word. Dieing."
exit 1
fi
cp $HOME/dev/adversarial-policies-paper/supplementary.pdf .
rm ${ROOT_DIR}/supplementary.zip
zip -r ${ROOT_DIR}/supplementary.zip .
popd
| true
|
d3d4061cee18383d618709dac247cce966fea1e1
|
Shell
|
i2like/neatdns
|
/anti-pollution/run
|
UTF-8
| 532
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
var2list(){
local PREFIX=$1
seq 1 9 |
while read n; do
[[ -n $(eval echo -n \$$PREFIX$n) ]] && eval echo $(eval echo \$$PREFIX$n);
done
}
iptables -N UPSTREAM
var2list GLOBAL_DNS | xargs -I {} iptables -A INPUT -s {} -j UPSTREAM
iptables -A UPSTREAM -p udp -m udp --sport 53 -m u32 --u32 "36=0" -j DROP
iptables -A UPSTREAM -p udp -m udp --sport 53 -m u32 --u32 "28&0x10=0" -j DROP
iptables -A UPSTREAM -p tcp -m tcp --sport 53 --tcp-flags RST RST -j DROP
sleep infinity
| true
|
5d1ac0608ab188cc4347a2dddad3ff4a77dca4e7
|
Shell
|
agathongroup/ag-disable-grow-counts
|
/make-plugin.sh
|
UTF-8
| 334
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/sh
#
# invoke in the top-level directory of the plugin with ./make-plugin.sh.
#
# it will build a ZIP file named for the current directory. Note that it will only include stuff *committed to master*,
# any local uncommitted changes will be ignored.
name=${PWD##*/}
git archive --format=zip --prefix=$name/ master > $name.zip
| true
|
50ffbe4448a93d5686de24f892888beb8428131a
|
Shell
|
yomichi/HPCtools
|
/Enaga/pexlock
|
UTF-8
| 8,242
| 3.8125
| 4
|
[
"BSL-1.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
#! /bin/sh
######################################################################
#
# PEXLOCK : A Exclusive Lock Command within POSIX
#
# Usage : pexlock [options] <lockname> [lockname ...]
# -d <lockdir> ...... the directory for lockfiles.
# The lockfile directory will be decided
# as the following rule.
# 1) this option "-d" if specified
# 2) the environment varriable "PLOCKDIR"
# if specified
# 3) if the environment varriable "TMPDIR"
# specified, "$TMPDIR/plock.<username>"
# 4) "/tmp/plock.<username>" (default)
# -w <maxwaiting> ... maximum waiting seconds to succeed locking
# (-1 means waiting infinitely)
# The default value is 10.
# Return : $? ==0 ... one or more locking were success
# !=0 ... all failure
# stdout ....... generated path of the lockfile
#
# Example : lockid=$(pexlock -d /tmp/plock -w 10 foo) || exit 1 # do ex-lock
# :
# (do something requires exclusive lock)
# :
# touch -c "$lockid" # extend lifetime if you want
# : # (but it needs to remain SUFFICIENT LIFETIME)
# punlock "$lockid" # release the lock
#
# Notice : The lockfile is written with rw-rw-rw for sharing.
# If you want not to share it with others,
# you have to give the lockdir rwxrwx--- or rwx------ permisson.
#
# Written by Rich Mikan (richmikan[at]richlab.org) at 2016/01/22
#
# This is a public-domain software. It measns that all of the people
# can use this with no restrictions at all. By the way, I am fed up
# the side effects which are broght about by the major licenses.
#
######################################################################
# ===== FUNCTIONS ====================================================
# --- FUNC: print the usage and exit ---------------------------------
print_usage_and_exit () {
cat <<-__USAGE 1>&2
Usage : ${0##*/} [options] <lockname> [lockname ...]
-d <lockdir> ...... the directory for lockfiles.
The lockfile directory will be decided
as the following rule.
1) this option "-d" if specified
2) the environment varriable "PLOCKDIR"
if specified
3) if the environment varriable "TMPDIR"
specified, "\$TMPDIR/plock.<username>"
4) "/tmp/plock.<username>" (default)
-w <maxwaiting> ... maximum waiting seconds to succeed locking
(-1 means waiting infinitely)
The default value is 10.
Version : Fri Jan 22 18:59:37 JST 2016
__USAGE
exit 1
}
# ===== PREPARATION ==================================================
# --- initialize -----------------------------------------------------
set -u
PATH='/usr/bin:/bin'
IFS=$(printf ' \t\n_'); IFS=${IFS%_}
export IFS LC_ALL=C LANG=C PATH
umask 0000
# --- parse the arguments --------------------------------------------
dopt=''
max_waiting_secs=10
max_lifetime_secs=''
ppid=$(ps -Ao pid,ppid | awk '$1=='"$$"'{print $2;exit}')
optmode=''
while [ $# -gt 0 ]; do
case "$optmode" in
'') case "$1" in
--) shift
break
;;
-[hv]|--help|--version)
print_usage_and_exit
;;
-[dwl]*) ret=$(printf '%s\n' "${1#-}" |
awk '{opt = substr($0,1,1); #
opt_str = (length($0)>1) ? substr($0,2) : ""; #
printf("%s %s", opt, opt_str); }')
ret1=${ret%% *}
ret2=${ret#* }
case "$ret1$ret2" in
d) optmode='d' ;;
d*) dopt=$ret2 ;;
w) optmode='w' ;;
w*) max_waiting_secs=$ret2 ;;
l) optmode='l' ;;
l*) max_lifetime_secs=$ret2 ;;
esac
;;
-*) print_usage_and_exit
;;
*) break
;;
esac
;;
d) dopt=$1
optmode=''
;;
w) max_waiting_secs=$1
optmode=''
;;
l) max_lifetime_secs=$1
optmode=''
;;
esac
shift
done
case $# in 0) print_usage_and_exit;; esac
echo "_$max_waiting_secs" | grep -Eq '^_-?[0-9]+$' || {
printf '%s: Maximum waiting seconds parameter is invalid\n' "${0##*/}" 1>&2
exit 1
}
case "$max_lifetime_secs" in
'') : ;;
*) printf '%s: option "-l" could not use in this command\n' "${0##*/}" 1>&2;;
esac
Dir_lock=''
while :; do
case "$dopt" in '') :;; *) Dir_lock="${dopt%/}/"
break ;; esac
case "${PLOCKDIR:-}" in '') :;; *) Dir_lock="${PLOCKDIR%/}/"
break ;; esac
case "${TMPDIR:-}" in '') :;; *) Dir_lock="${TMPDIR%/}/plock.$(id -un)/"
mkdir -p "$Dir_lock" 2>/dev/null
break ;; esac
case 'default' in *) Dir_lock="/tmp/plock.$(id -un)/"
mkdir -p "$Dir_lock" 2>/dev/null
break ;; esac
done
case $? in
[!0]*) echo "${0##*/}: Failed to make the lockfile directory" 1>&2;exit 1;;
esac
[ \( -z "$dopt" \) -o \( -d "$dopt" \) ] || {
echo "${0##*/}: \"$Dir_lock\": No such directory or permission denied" 1>&2
exit 1
}
case "$Dir_lock" in [!/]*) s=$(pwd);Dir_lock="${s%/}/$Dir_lock";; esac
# ===== MAIN =========================================================
count_all=0
count_success=0
for lockname in "$@"; do
count_all=$((count_all+1))
# --- Validate the lockname ---------------------------------------
case "$lockname" in '.border_date_and_time.tmp')
printf '%s: ".border_date_and_time.tmp" is reserved, skipped\n' \
"${0##*/}" 1>&2
continue
;;
esac
echo "_$lockname" | grep -q '[/:]' && {
printf '%s: "%s": Neither "/" nor ":" can be used in lockname, skipped\n' \
"${0##*/}" "$lockname" 1>&2
continue
}
# --- Try to set exclusive-lock ------------------------------------
# 1) trying loop
if [ $max_waiting_secs -ge 0 ]; then
try=$((max_waiting_secs+1))
else
try=1
fi
while [ $try -gt 0 ]; do
# 1-1) Try to create a file
(set -C; echo $ppid >"$Dir_lock$lockname") 2>/dev/null || {
[ $max_waiting_secs -ge 0 ] && try=$((try-1)) # retry if already exists
case $try in 0) :;; *) sleep 1;; esac
continue
}
# 1-2) finish the loop successfully
break
done
# 2) $try > 0 : success and print the path of the generated lockfile
# ==0 : now fully locked
# < 0 : abnormally exit
# otherwise, it means some error happened
case $try in
[1-9]*) printf '%s%s\n' "$Dir_lock" "${lockname}"
;;
0) printf '%s: "%s": timeout, try again later\n' \
"${0##*/}" "$lockname" 1>&2
continue
;;
-*) printf '%s: "%s": unexpedter error!\n' "${0##*/}" "$lockname" 1>&2
continue
;;
esac
count_success=$((count_success+1))
done
# ===== FINISH =======================================================
# --- Return 1 if all locking failed ---------------------------------
case "$count_success:$count_all" in
0:[!0]*) exit 1;;
*) exit 0;;
esac
| true
|
7b633d28b015872f76ed6497fdcd623c89c8a157
|
Shell
|
martijnvogten/dock
|
/sequel Pro/sqlpro-tunnel.sh
|
UTF-8
| 533
| 3.34375
| 3
|
[] |
no_license
|
#!/bin/sh
LOCAL_PORT=8306
while [ -n "$(lsof -t -i :$LOCAL_PORT)" ] ; do
LOCAL_PORT=$(expr $LOCAL_PORT + 1)
done
set -e
echo "Opening SSH tunnel on local port $LOCAL_PORT"
REMOTE_PORT=$(ssh $DOCKER_SSH docker port $1 | grep '3306/tcp' | grep -oE '\S+$')
ssh -q -fN -L $LOCAL_PORT:$REMOTE_PORT $DOCKER_SSH &
SPF_FILE=$TMPDIR$1_tunnel_$LOCAL_PORT.spf
sed "s/8306/$LOCAL_PORT/" $DOCK_HOME/sequel\ Pro/tunnel.spf | \
sed "s/_DATABASE/$2/" | \
sed "s/_TABLE/$3/" > $SPF_FILE
open -n -W $SPF_FILE
kill $(lsof -t -i :$LOCAL_PORT )
| true
|
b6e82f153a4e6640d73f800fba0fedaf6cbb3299
|
Shell
|
askdoudou/bimp-plugin-MacOS
|
/install
|
UTF-8
| 453
| 2.703125
| 3
|
[] |
no_license
|
#! /bin/bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
cd "${DIR}"
chmod +x bimp
rm -rf ~/Library/Application\ Support/GIMP/2.10/plug-ins/bimp
mkdir -p ~/Library/Application\ Support/GIMP/2.10/plug-ins/bimp
cp bimp ~/Library/Application\ Support/GIMP/2.10/plug-ins/bimp/
cp -r bimp-locale ~/Library/Application\ Support/GIMP/2.10/plug-ins/bimp/
echo "BIMP - Batch Image Manipulation Plugin - is installed. You can close this window now."
| true
|
c9296a494d760e24431bca5fd781cc0fd092ffd2
|
Shell
|
wasiahmad/PrivacyQA
|
/data/fasttext/download.sh
|
UTF-8
| 901
| 2.875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
FASTTEXT=./
echo "Downloading fasttext embeddings"
FILE=polisis-100d-158k-subword.txt
if [[ -f "$FILE" ]]; then
echo "$FILE exists, skipping download"
else
fileid="1dqrmc1in81SiqvTQPFxQdnYZdKrbpxtu"
curl -c ./cookie -s -L "https://drive.google.com/uc?export=download&id=${fileid}" > /dev/null
curl -Lb ./cookie "https://drive.google.com/uc?export=download&confirm=`awk '/download/ {print $NF}' ./cookie`&id=${fileid}" -o ${FILE}
rm ./cookie
fi
FILE=polisis-300d-137M-subword.txt
if [[ -f "$FILE" ]]; then
echo "$FILE exists, skipping download"
else
fileid="1EIwu1ahCmoHkAIpnrG-fmosbu3qsCbda"
curl -c ./cookie -s -L "https://drive.google.com/uc?export=download&id=${fileid}" > /dev/null
curl -Lb ./cookie "https://drive.google.com/uc?export=download&confirm=`awk '/download/ {print $NF}' ./cookie`&id=${fileid}" -o ${FILE}
rm ./cookie
fi
| true
|
24609b295f66ccdfa69d91515802cb6ffabf16d6
|
Shell
|
2kw92/PAM
|
/pam.sh
|
UTF-8
| 582
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/bash
useradd day && useradd night && useradd friday
echo "Otus2019"|passwd --stdin day && echo "Otus2019" | passwd --stdin night && echo "Otus2019" | passwd --stdin friday
bash -c "sed -i 's/^PasswordAuthentication.*$/PasswordAuthentication yes/' /etc/ssh/sshd_config && systemctl restart sshd.service"
sed -i 's|account include password-auth|account required pam_exec.so /vagrant/test_login.sh|' /etc/pam.d/sshd
chmod -R 0770 /vagrant/test_login.sh
setenforce 0
groupadd admin
usermod -a -G admin day
usermod -a -G admin root
usermod -a -G admin vagrant
| true
|
7709d3a0c22357c6aad4894359ab2fb89de12818
|
Shell
|
chenxi-shi/Slurm_Bash_Automation
|
/scancel_not_bash_jobs.sh
|
UTF-8
| 769
| 3.703125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
USAGE() # use "-h" to get this help
{
cat << EOF
usage: $0 options
./scancel_not_bash_jobs -u <username>
REQUIRED:
-u <username> The account name
EOF
}
while getopts p:o:n:u:i: args
do
case $args in
u) # get user id
user_name=$OPTARG
printf "User: $user_name\n"
;;
*) # input is wrong
USAGE
exit 1
;;
esac
done
job_id=($(squeue -l -u $user_name | awk '$1~/^[0-9]+$/ && $3!="bash" {print $1}')) # job_id is an array
echo "Job count: ${#job_id[@]}"
squeue -l -u $user_name | awk '$1~/^[0-9]+$/ && $3!="bash" || NR==2 {printf "%-15s %-20s\n", $1, $3}' # output for checking
for job in "${job_id[@]}"
do
echo "Cancel job $job"
# scancel $job
done
squeue -l -u $user_name
| true
|
fe84e5d961d149f780965301901222353a96b4eb
|
Shell
|
aphecetche/scripts
|
/alice/run3/old/aliceo2-eclipse-generate-makefiles.sh
|
UTF-8
| 711
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
# Generate Makefiles for Eclipse for a given configuration (e.g. Debug, Release)
Configuration=${1:-Debug}
What=${2:-AliceO2}
Extra_CMake_Options=""
InstallDir=$HOME/o2/alfa/inst
if [[ $What =~ AliRoot ]];
then
Extra_CMake_Options="-DROOTSYS=$InstallDir"
fi
echo Configuration=$Configuration What=$What Extra_CMake_Options=$Extra_CMake_Options
export PATH=$PATH:/usr/local/bin
source $HOME/Scripts/o2-env.sh
mkdir -p $HOME/o2/alfa/$What/$Configuration
cd $HOME/o2/alfa/$What/$Configuration
/Applications/CMake.app/Contents/bin/cmake -G "Unix Makefiles" ../ -DCMAKE_BUILD_TYPE:STRING=$Configuration -DUSE_DIFFERENT_COMPILER=TRUE -DCMAKE_INSTALL_PREFIX=$InstallDir $Extra_CMake_Options
| true
|
1da1d690d6061d2b1488c392b62b98ad9c58a06b
|
Shell
|
mvalenziano/magento2-docker
|
/init
|
UTF-8
| 697
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
if [ "$2" == "clone" ]; then
git clone https://github.com/mvalenziano/magento2-docker.git $1 && cd $1;
fi
git config --global core.autocrlf false
git config --global core.eol LF
git config --global core.fileMode false
git config --global diff.renamelimit 5000
sudo chmod u+x ./*
docker image pull mvalenziano/magento2
if [ "$(uname)" == "Darwin" ]; then
//git checkout -f mac;
sed -i '' -e "s/<project_name>/$1/g" docker-compose.yml \
&& sed -i '' -e "s/<project_name>/$1/g" docker-compose-dev.yml \
&& sed -i '' -e "s/<project_name>/$1/g" docker-sync.yml
else
//git checkout -f master;
sed -i '' -e "s/<project_name>/$1/g" docker-compose.yml;
fi
bash start
| true
|
ee77082aa6a334067267c0f4444038fcd9c8afe9
|
Shell
|
timvideos/litex-buildenv
|
/scripts/debian-setup.sh
|
UTF-8
| 1,331
| 3.3125
| 3
|
[
"MIT",
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
# Need realpath for finding where we are running from.
apt-get install -y realpath
if [ "`whoami`" != "root" ]
then
echo "Please use sudo to run this script!"
exit 1
fi
SETUP_SRC=$(realpath ${BASH_SOURCE[0]})
SETUP_DIR=$(dirname $SETUP_SRC)
set -x
set -e
# Need wget to download conda in download-env.sh
apt-get install -y wget
# We are building C code, so need build-essential
apt-get install -y build-essential
# Need gpg to create the encrypted package of Xilinx tools for CI, not needed
# by normal developers.
#apt-get install -y gnupg
# gtkwave is needed for viewing the output of traces
apt-get install -y gtkwave
# FIXME: What needs python-yaml!?
apt-get install -y python-yaml
# aftpd is needed for tftp booting firmware
apt-get install -y atftpd
# These libraries are needed for working with the sim target
apt-get install -y openvpn libsdl1.2-dev
# FIXME: Work out if this stuff below is needed.
apt-get install -y software-properties-common
add-apt-repository -y ppa:timvideos/fpga-support
apt-get update
# Only need the udev rules (the full mode-switch tool is installed locally as
# part of the download-env.sh).
apt-get install -y hdmi2usb-udev || apt-get install -y hdmi2usb-mode-switch-udev
# Get the vizzini module, only needed for the Atlys board
#apt-get install -y vizzini-dkms
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.