blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
19bc66ab2a4b6f05487b78bdd462d97f21091963 | Shell | maximx1/sysscripts | /system management/language install/langstall.sh | UTF-8 | 1,151 | 3.25 | 3 | [] | no_license | #!/bin/bash
#System script that step by step prompts
# installation of a dev-language environment.
# Justin Walrath <walrathjaw@gmail.com>
echo "Would you like to install gcc? "
read response
if [ $response = "y" ] || [ $response = "yes" ]; then
sudo apt-get install gcc
fi
echo "Would you like to install g++? "
read response
if [ $response = "y" ] || [ $response = "yes" ]; then
sudo apt-get install g++
fi
echo "Would you like to install ncurses for c? "
read response
if [ $response = "y" ] || [ $response = "yes" ]; then
sudo apt-get install libncurses5-dev
fi
echo "Would you like to install golang? "
read response
if [ $response = "y" ] || [ $response = "yes" ]; then
sudo apt-get install golang
fi
echo "Would you like to install mono-complete? "
read response
if [ $response = "y" ] || [ $response = "yes" ]; then
sudo apt-get install mono-complete
fi
echo "Would you like to install python 2.7? "
read response
if [ $response = "y" ] || [ $response = "yes" ]; then
sudo apt-get install python2.7
fi
echo "Would you like to install python 3.2? "
read response
if [ $response = "y" ] || [ $response = "yes" ]; then
sudo apt-get install python3.2
fi
| true |
a10383e8bfacda7c74ef5dbce9e2ded1bfb6921d | Shell | gtristan/aboriginal | /root-filesystem.sh | UTF-8 | 2,902 | 4 | 4 | [] | no_license | #!/bin/bash
# Build a basic busybox+uClibc root filesystem for a given target.
# Requires a cross-compiler (or simple-cross-compiler) in the $PATH or in
# the build directory. In theory you can supply your own as long as the
# prefix- name is correct.
source sources/include.sh || exit 1
load_target "$1"
check_for_base_arch || exit 0
check_prerequisite "${CC_PREFIX}cc"
# Source control isn't good at storing empty directories, so create
# directory layout and apply permissions changes.
mkdir -p "$STAGE_DIR"/{tmp,proc,sys,dev,home,mnt,root} &&
chmod a+rwxt "$STAGE_DIR/tmp" || dienow
STAGE_USR="$STAGE_DIR/usr"
# Having lots of repeated locations at / and also under /usr is silly, so
# symlink them together. (The duplication happened back in the 1970's
# when Ken and Dennis ran out of space on their PDP-11's root disk and
# leaked the OS into the disk containing the user home directories. It's
# been mindlessly duplicated ever since.)
for i in bin sbin lib etc
do
mkdir -p "$STAGE_USR/$i" && ln -s "usr/$i" "$STAGE_DIR/$i" || dienow
done
# Copy qemu setup script and so on.
cp -r "$SOURCES/root-filesystem/." "$STAGE_USR/" &&
echo -e "CROSS_TARGET=$CROSS_TARGET\nKARCH=$KARCH" > \
"$STAGE_USR/src/host-info" &&
cp "$SRCDIR"/MANIFEST "$STAGE_USR/src" || dienow
# If user specified different files to put in the root filesystem, add them.
# (This overwrites existing files.)
if [ ! -z "$MY_ROOT_OVERLAY" ]
then
cd "$TOP"
tar -c -C "$MY_ROOT_OVERLAY" . | tar -x -C "$STAGE_DIR" || dienow
fi
# Build toybox
STAGE_DIR="$STAGE_USR" build_section busybox
cp "$WORK"/config-busybox "$STAGE_USR"/src || dienow
build_section toybox
# Put statically and dynamically linked hello world programs on there for
# test purposes.
"${CC_PREFIX}cc" "${SOURCES}/root-filesystem/src/hello.c" -Os $CFLAGS \
-o "$STAGE_USR/bin/hello-dynamic" || dienow
if [ "$BUILD_STATIC" != none ]
then
"${CC_PREFIX}cc" "${SOURCES}/root-filesystem/src/hello.c" -Os $CFLAGS -static \
-o "$STAGE_USR/bin/hello-static" || dienow
STATIC=--static
else
STATIC=
fi
# Debug wrapper for use with /usr/src/record-commands.sh
"${CC_PREFIX}cc" "$SOURCES/toys/wrappy.c" -Os $CFLAGS $STATIC \
-o "$STAGE_USR/bin/record-commands-wrapper" || dienow
# Do we need shared libraries?
if ! is_in_list toybox $BUILD_STATIC || ! is_in_list busybox $BUILD_STATIC
then
echo Copying compiler libraries...
mkdir -p "$STAGE_USR/lib" || dienow
(path_search \
"$("${CC_PREFIX}cc" --print-search-dirs | sed -n 's/^libraries: =*//p')" \
"*.so*" 'cp -H "$DIR/$FILE" "$STAGE_USR/lib/$FILE"' \
|| dienow) | dotprogress
[ -z "$SKIP_STRIP" ] &&
"${CC_PREFIX}strip" --strip-unneeded "$STAGE_USR"/lib/*.so
fi
# Clean up and package the result
[ -z "$SKIP_STRIP" ] &&
"${CC_PREFIX}strip" "$STAGE_USR"/{bin/*,sbin/*}
create_stage_tarball
# Color back to normal
echo -e "\e[0mBuild complete"
| true |
39e5e7eab88bb4f7363003c7316fbadc3fa79df3 | Shell | rstyczynski/wls-tools | /bin/killtree.sh | UTF-8 | 1,995 | 3.546875 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
if [ "$DEBUG" = "" ]; then DEBUG=0; fi
if [ -z $1 ]; then
echo Can not continue for pid:$1
else
if [ $DEBUG -eq 1 ]; then
echo killtree started for: $1
fi
pid=$1
if [ -z "$2" ]; then
depth=0
allChildren=$pid
else
depth=$2
fi
if [ $depth -eq 0 ]; then
#! it returns just $pid !!! :)
case $(uname) in
SunOS)
allChildren=$(ps -eo pid,ppid,pid | grep "^ *$pid " | sed 's/^ *//g' | cut -d' ' -f1)
;;
*)
allChildren=$(ps -eo pid,ppid,pid | grep "^\s*$pid " | sed 's/^\s*//g' | cut -d' ' -f1)
;;
esac
fi
kill[$depth]=$3
#list processes started by $pid
#grep "\s*$pid" does not work in osx :( space must be used instead, but it does not work in linux
case $(uname) in
SunOS)
children[$depth]=$(ps -eo pid,ppid,pid | grep " $pid " | grep -v grep | grep -v "^ *$pid" | sed 's/^ *//g' | tr -s ' ' | cut -d' ' -f1)
;;
*)
children[$depth]=$(ps -eo pid,ppid,pid | grep " $pid " | grep -v grep | grep -v "^\s*$pid" | sed 's/^\s*//g' | tr -s ' ' | cut -d' ' -f1)
;;
esac
allChildren="$allChildren ${children[$depth]}"
if [ $DEBUG -eq 1 ]; then
echo pid: $pid
echo depth=$depth
echo children at $depth: ${children[$depth]}
fi
for child in ${children[$depth]}; do
if [ $DEBUG -eq 1 ]; then
echo child: $child
fi
kill=${kill[$depth]}
depth=$(( $depth + 1 ))
. $0 "$child" "$depth" "$kill" >/dev/null
depth=$(( $depth - 1 ))
done
if [ $DEBUG -eq 1 ]; then
echo exit:
echo depth=$depth
echo children at $depth: ${children[$depth]}
echo kill:${kill[$depth]}
fi
if [ $depth -eq 0 ] && [ "${kill[$depth]}" != "NO" ]; then
kill -9 $allChildren 2>&1 >/dev/null
allChildren=""
fi
echo $allChildren
fi
#rm $1.*
| true |
4154fec13286c62fbca4f0faef521d7baa4565df | Shell | sensu/sensu-go-has-contact-filter | /bin/build.sh | UTF-8 | 714 | 3.421875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
if [[ "${TRAVIS_TAG}" ]]; then
VER=$TRAVIS_TAG
else
VER=$(git rev-parse --short HEAD)
fi
RELEASE="sensu-go-has-contact-filter_${VER}"
CHECKSUM_TXT="${RELEASE}_sha512-checksums.txt"
ARCHIVE="${RELEASE}.tar.gz"
rm -rf dist
mkdir -p dist
tar -c {lib,README.md} > "dist/${ARCHIVE}"
cd dist || exit
sha512sum "${ARCHIVE}" > "${CHECKSUM_TXT}"
cat "${CHECKSUM_TXT}"
if [[ "${TRAVIS_TAG}" ]] && [[ "${TRAVIS_REPO_SLUG}" ]] && [[ "${GITHUB_TOKEN}" ]]; then
for f in "${ARCHIVE}" "${CHECKSUM_TXT}"; do
echo "uploading ${f}"
../bin/github-release-upload.sh github_api_token="${GITHUB_TOKEN}" repo_slug="${TRAVIS_REPO_SLUG}" tag="${TRAVIS_TAG}" filename="${f}"
done
fi | true |
56db898607cbe446079f98fa23b3366e7b5f0c33 | Shell | FauxFaux/debian-control | /s/sdic/sdic-edict_2.1.3-22_all/postinst | UTF-8 | 1,144 | 3.40625 | 3 | [] | no_license | #!/bin/bash -e
. /usr/share/debconf/confmodule
case "$1" in
abort-upgrade|abort-remove|abort-deconfigure) exit 0 ;;
configure) ;; # continue below
*) exit 0 ;;
esac
MKARY=/usr/bin/mkary
EDICT=/usr/share/dict/edict.sdic
JDICT=/usr/share/dict/jedict.sdic
SCRIPT=/usr/share/sdic/edict.pl
make_array ()
{
test -s $1.ary && return 0
test -x $MKARY || return 0
echo -n "Building suffix array (This will take for a minute) ... "
$MKARY -q -b 10 $1 && chmod 644 $1 && echo done.
}
# generate eiwa dictionary
db_get sdic-edict/make_en
if [ "$RET" = "true" ]; then
if [ ! -s $EDICT ]; then
echo -n "Building $EDICT ... " 1>&2
$SCRIPT --reverse /usr/share/edict/edict >$EDICT
chmod 644 $EDICT
echo "done." 1>&2
fi
db_get sdic-edict/en_array
if [ "$RET" = "true" ]; then
make_array $EDICT
fi
fi
# generate waei dictionary
if [ ! -s $JDICT ]; then
echo -n "Building $JDICT ... " 1>&2
$SCRIPT /usr/share/edict/edict >$JDICT
chmod 644 $JDICT
echo "done." 1>&2
fi
db_get sdic-edict/jp_array
if [ "$RET" = "true" ]; then
make_array $JDICT
fi
| true |
87725c1084b0c0b6d41155d8aa21593da0d8dd32 | Shell | tomoyamachi/dotfiles | /shells/reference/tl | UTF-8 | 755 | 3.5 | 4 | [] | no_license | #!/bin/sh
CMDNAME='basename $0'
LINENUM=100
FOLDERPATH="${SY_LOG}zend/"
FILENAME="zend.log"
while getopts azmsn: OPT
do
case $OPT in
a) FOLDERPATH="${SY_LOG}apache/" ;;
z) ;;
m) FOLDERPATH="${SY_LOG}mq/" ;;
s) FOLDERPATH="${SY_LOG}sylog/" ;;
n) LINENUM=$OPTARG ;;
\?) echo "Usage : $0 [-asmz] [-n 表示行数] ファイル名のヒント" 1>&2
exit 1 ;;
esac
done
shift `expr $OPTIND - 1`
if [ $HOSTNAME = 'tools02.bkrs2' ]; then
FOLDERPATH="/home/sy/var/fluentd/logs/bkrs2/gree/links/"
fi
if [ "$1" ]; then
for f in ${FOLDERPATH}${1}*
do
echo $f
tail -f -n ${LINENUM} $f
break
done
else
tail -f -n ${LINENUM} ${FOLDERPATH}$FILENAME
fi
| true |
e5d61e3338e0ec98315a6fa9fe31d705636f8771 | Shell | v-dobrev/occa | /tests/run_tests | UTF-8 | 2,020 | 3.375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# The MIT License (MIT)
#
# Copyright (c) 2014-2018 David Medina and Tim Warburton
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
set -e
TEST_DIR=$(dirname "$0")
TEST_SRC_DIR=$(dirname "$0")/src
TEST_BIN_DIR=$(dirname "$0")/bin
export ASAN_OPTIONS
ASAN_OPTIONS+=':protect_shadow_gap=0'
ASAN_OPTIONS+=':detect_container_overflow=0'
HEADER_CHARS=80
tests_cpp=$(find "${TEST_SRC_DIR}" -type f -name '*.cpp')
for test_cpp in ${tests_cpp}; do
test="${test_cpp/${TEST_SRC_DIR}/${TEST_BIN_DIR}}"
test="${test/\.cpp/}"
# Test output header
test_name="${test_cpp/${TEST_SRC_DIR}\//}"
chars=$(echo "${test_name}" | wc -c);
linechars=$((${HEADER_CHARS} - ${chars} - 6));
line=$(printf '%*s' ${linechars} | tr ' ' '-');
echo -e "\n---[ ${test_name} ]${line}";
# Make sure not to go over the log size
if [ -n "${TRAVIS}" ]; then
"${test}" 2>&1 | head -n 100;
else
"${test}"
fi
# Test output footer
printf '%*s\n' ${HEADER_CHARS} | tr ' ' '=';
done
| true |
f5bc3ed744c5432bcb440e2ef528fd023d0c09d9 | Shell | neonopen/aquila_serving_module | /install_instr.sh | UTF-8 | 6,173 | 2.859375 | 3 | [
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | # #
# ARE YOU READY FOR THE BIGGEST ORDEAL OF YOUR LIFE?? HOPE SO! #
# #
# NOTE: This will require more than the default (8gb) amount of space afforded to new instances. Make sure you increase it!
# Install various packages
sudo apt-get update
sudo apt-get upgrade -y
sudo apt-get install -y build-essential curl libfreetype6-dev libpng12-dev libzmq3-dev pkg-config python-pip python-dev git python-numpy python-scipy swig software-properties-common python-dev default-jdk zip zlib1g-dev ipython autoconf libtool
# upgrade six & install gRPC systemwide
sudo pip install --upgrade six
# installing grpcio isn't sufficient if you intend on compiling new *_pb2.py files. You need to build from source.
git clone https://github.com/grpc/grpc.git
cd grpc
git submodule update --init
make -j4
make install
# now you can install grpcio
sudo pip install grpcio
# Blacklist Noveau which has some kind of conflict with the nvidia driver
echo -e "blacklist nouveau\nblacklist lbm-nouveau\noptions nouveau modeset=0\nalias nouveau off\nalias lbm-nouveau off\n" | sudo tee /etc/modprobe.d/blacklist-nouveau.conf
echo options nouveau modeset=0 | sudo tee -a /etc/modprobe.d/nouveau-kms.conf
sudo update-initramfs -u
sudo reboot # Reboot (annoying you have to do this in 2016!)
# Some other annoying thing we have to do
sudo apt-get install -y linux-image-extra-virtual
sudo reboot # Not sure why this is needed
# Install latest Linux headers
sudo apt-get install -y linux-source linux-headers-`uname -r`
# Install CUDA 7.0 (note – don't use any other version)
wget http://developer.download.nvidia.com/compute/cuda/7_0/Prod/local_installers/cuda_7.0.28_linux.run
chmod +x cuda_7.0.28_linux.run
./cuda_7.0.28_linux.run -extract=`pwd`/nvidia_installers
cd nvidia_installers
sudo ./NVIDIA-Linux-x86_64-346.46.run # accept everything it wants to do
sudo modprobe nvidia
sudo ./cuda-linux64-rel-7.0.28-19326674.run # accept the EULA, accept the defaults
cd
# trasfer cuDNN over from elsewhere (you can't download it directly)
tar -xzf cudnn-6.5-linux-x64-v2.tgz
sudo cp cudnn-6.5-linux-x64-v2/libcudnn* /usr/local/cuda/lib64
sudo cp cudnn-6.5-linux-x64-v2/cudnn.h /usr/local/cuda/include/
# OPTIONAL
# To increase free space, remove cuda install file & nvidia_installers
cd
rm -v cuda_7.0.28_linux.run
rm -rfv nvidia_installers/
# update to java 8 -- is this the best way to do this?
sudo add-apt-repository ppa:webupd8team/java
sudo apt-get update
sudo apt-get install oracle-java8-set-default
# this helps deal with the ABSOFUCKINGLUTELY COLOSSAL space requirements
# of bazel and tensorflow
cd /mnt/tmp
sudo mkdir /mnt/tmp
sudo chmod 777 /mnt/tmp
sudo rm -rf /tmp
sudo ln -s /mnt/tmp /tmp
# ^^^ might not be necessary
# install Bazel
git clone https://github.com/bazelbuild/bazel.git
cd bazel
git checkout tags/0.2.1 # note you can check the tags with git tag -l, you need at least 0.2.0
./compile.sh
sudo cp output/bazel /usr/bin
# more CUDA stuff - edit ~/.bashrc to put this in!
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/cuda/lib64"
export CUDA_HOME=/usr/local/cuda
# install tensorflow / tensorflow serving
cd
git clone --recurse-submodules https://github.com/neon-lab/aquila_serving.git
cd aquila_serving/tensorflow
# configure tensorflow; unofficial settings are necessary given the GRID compute cap of 3.0
TF_UNOFFICIAL_SETTING=1 ./configure # accept the defaults; build with gpu support; set the compute capacity to 3.0
cd ..
# clone aquila
cd ~
git clone https://github.com/neon-lab/aquila.git
# checkout whatever the fuck branch your using
git checkout some_branch
# NOTES:
# this is only necessary if you will be bazel-build'ing new models, since you have to protoc their compilers, too.
# while they instal protocol buffers for you, you need protocol buffer compiler > 3.0.0 alpha so let's get that too (blarg)
cd ~
wget https://github.com/google/protobuf/releases/download/v3.0.0-beta-2/protobuf-python-3.0.0-beta-2.tar.gz
tar xvzf protobuf-python-3.0.0-beta-2.tar.gz
cd protobuf-3.0.0-beta-2
./configure
make
sudo make install # sudo appears to be required
# it appears as thought the default install location is not in the LD Library path for whatever the fuck reason, so
# modify your bashrc again with:
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib"
# then source it
source ~/.bashrc
# assemble Aquila's *_pb2.py files
# NOTES:
# You may have to repeat this if you're going to be instantiating new .proto files.
# navigate to the directory which contains the .proto files
protoc -I ./ --python_out=. --grpc_out=. --plugin=protoc-gen-grpc=`which grpc_python_plugin` ./aquila_inference.proto
# Build TF-Serving
bazel build tensorflow_serving/... # build the whole source tree - this will take a bit
# convert tensorflow into a pip repo
cd tensorflow
bazel build -c opt --config=cuda //tensorflow/tools/pip_package:build_pip_package
bazel-bin/tensorflow/tools/pip_package/build_pip_package /tmp/tensorflow_pkg
# install it with pip for some reason
sudo pip install /tmp/tensorflow_pkg/tensorflow-0.7.1-py2-none-linux_x86_64.whl
# to export a model - note, the model directory structure has to be the same as it is when the model was trained!
cd ~/aquila_serving
bazel-bin/tensorflow_serving/aquila/aquila_export --checkpoint_dir=/data/aquila_snaps_lowreg --export_dir=/home/ubuntu/exported_model/test
# to run the server
cd ~
aquila_serving/bazel-bin/tensorflow_serving/aquila/aquila_inference --port=9000 exported_model/test &> aquila_log &
# test the model
time aquila_serving/bazel-bin/tensorflow_serving/aquila/aquila_client --image "lena30.jpg"
# aquila:
# real 0m13.621s
# user 0m0.994s
# sys 0m0.161s
aquila_serving/bazel-bin/tensorflow_serving/example/inception_inference --port=9000 inception-export &> inception_log &
time aquila_serving/bazel-bin/tensorflow_serving/example/inception_client --server=localhost:9000 --image "lena30.jpg"
# inception:
# real 0m9.061s
# user 0m0.936s
# sys 0m0.120s
# also:
# 6.125876 : cloak
# 5.997998 : brassiere, bra, bandeau
# 5.059655 : bonnet, poke bonnet
# 5.021771 : maillot
# 4.814725 : bath towel
| true |
9cfec9f5bb40704ce8ba62282637397fe3aced1e | Shell | devschooner/schooner | /junk/misc/sch-userlog | UTF-8 | 2,531 | 3.75 | 4 | [] | no_license | #!/bin/dash
#=======================================================================
# File: sch-userlog
# Created: 13/07/2015
# Purpose: Interface to schooner user logfile.
# Copyright: © Bill Cane
# Email:
#
# Licence: This program is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or(at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#=======================================================================
. /usr/bin/sch-constants
abort_if_root
#=======================================================================
usage() {
cat << EOF
NAME
$THIS_SCRIPT - interface to schooner user logfile
SYNOPSIS
$THIS_SCRIPT [OPTION]... [ARG]...
DESCRIPTION
-e <log-entry> free format text
-h help: display usage.
-h (any other options combined with -h will be ignored)
EXIT STATUS
0: if made log entry
1: if failed to make log entry
USAGE
$THIS_SCRIPT -e 'oops just made an error'
EOF
}
########## initializations #####################################################
LogEntry=''; e=1; Reason=''
########## functions ###########################################################
abend_no_log () { # $1=line-no $2=error-code $3=error-message
# can't use the usual 'abend' function because it would call this script
# and endlessly loop. So errors in this script won't be logged.
sch-dlg-error -r "$THIS_SCRIPT" -l "$1" -c "$2" -m "$3"
exit 9
}
########## main ################################################################
while getopts ":e:h" Option
do
case "$Option" in
e ) e=0; LogEntry="$OPTARG" ;;
h ) usage; exit 0 ;;
esac
done
[ "$e" = 0 ] || abend_no_log '??' '86421' 'missing option or argument -e'
touch "$USER_LOG"
Reason=$(sch-file-exists -s "$USER_LOG" -rwf)
exit_stat 'sch-file-exists' $? '84621'
[ "$?" = 0 ] || abend_no_log '??' '68143' "cannot access log - $USER_LOG is $Reason"
exit 0
| true |
4dacbe2186a3d7c1009d0988df654e3e7e2d3c47 | Shell | ks-nid/ksnid | /UpdateKernelByUSB/backupKernelImages.sh | UTF-8 | 3,391 | 3.859375 | 4 | [] | no_license | #!/bin/bash
# Copyright (c) 2009-2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Script to update the kernel on a live running ChromiumOS instance.
SCRIPT_ROOT=$(dirname $(readlink -f "$0"))
. "${SCRIPT_ROOT}/common.sh" || exit 1
. "${SCRIPT_ROOT}/remote_access.sh" || exit 1
# Script must be run inside the chroot.
restart_in_chroot_if_needed "$@"
ORIG_ARGS=("$@")
# Parse command line.
FLAGS "$@" || exit 1
eval set -- "${FLAGS_ARGV}"
# Only now can we die on error. shflags functions leak non-zero error codes,
# so will die prematurely if 'switch_to_strict_mode' is specified before now.
switch_to_strict_mode
learn_arch() {
[ -n "${FLAGS_arch}" ] && return
FLAGS_arch=$(sed -n -E 's/^CONFIG_(ARM|ARM64|X86)=y/\1/p' \
/build/"${BOARD}"/boot/config-* | \
uniq | awk '{print tolower($0)}')
if [ -z "${FLAGS_arch}" ]; then
error "Arch required"
exit 1
fi
info "Target reports arch is ${FLAGS_arch}"
}
make_local_kernelimage() {
local bootloader_path
local kernel_image
local config_path="${SRC_ROOT}/build/images/${BOARD}/latest/config.txt"
if [[ "${FLAGS_arch}" == "arm" || "${FLAGS_arch}" == "arm64" ]]; then
name="bootloader.bin"
bootloader_path="${SRC_ROOT}/build/images/${BOARD}/latest/${name}"
# If there is no local bootloader stub, create a dummy file. This matches
# build_kernel_image.sh. If we wanted to be super paranoid, we could copy
# and extract it from the remote image, if it had one.
if [[ ! -e "${bootloader_path}" ]]; then
warn "Bootloader does not exist; creating a stub"
bootloader_path="${TMP}/${name}"
truncate -s 512 "${bootloader_path}"
fi
kernel_image="/build/${BOARD}/boot/vmlinux.uimg"
else
bootloader_path="/lib64/bootstub/bootstub.efi"
kernel_image="/build/${BOARD}/boot/vmlinuz"
fi
if [ -d "${BOARD}/$1" ];then
error "Directory ${BOARD}/$1 exists, files no backup succefully"
exit 1
else
mkdir -p ${BOARD}/${now}
fi
vbutil_kernel --pack ./${BOARD}/$1/new_kern.bin \
--keyblock /usr/share/vboot/devkeys/kernel.keyblock \
--signprivate /usr/share/vboot/devkeys/kernel_data_key.vbprivk \
--version 1 \
--config ${config_path} \
--bootloader "${bootloader_path}" \
--vmlinuz "${kernel_image}" \
--arch "${FLAGS_arch}"
info "kernel image backup to ${BOARD}/$1 done"
#exit 1
}
copy_local_kernelmodules() {
local basedir="$1" # rootfs directory (could be in /tmp) or empty string
local modules_dir=/build/"${BOARD}"/lib/modules/
if [ ! -d "${modules_dir}" ]; then
warn "No modules. Skipping."
return
fi
info "Copying modules "
cp -a "${modules_dir}" ${BOARD}/$1
info "kernel modules backup to ${BOARD}/$1 done"
}
tarFiles(){
if [ ! -f ${backupFileName} ]; then
rm -rf tmp1234
mkdir tmp1234
cp -a ./${BOARD}/${now}/modules ./${BOARD}/${now}/new_kern.bin tmp1234
tar -zcvf ${backupFileName} tmp1234
rm -rf tmp1234
return
else
warn "file exists! Skipping."
fi
}
main() {
local now="$(date +'%m%d%H%M')"
#local backupFileName="${BOARD}/${BOARD}${now}.tar.gz"
local backupFileName="${BOARD}/a.tar.gz"
learn_arch
make_local_kernelimage ${now}
copy_local_kernelmodules ${now}
tarFiles
}
main "$@"
| true |
318b07357c5b7b773f2bb47078f01411fb9e9f3f | Shell | t00sh/misc | /crypto/check_dnssec_validation.sh | UTF-8 | 1,284 | 3.984375 | 4 | [] | no_license | #!/bin/sh
# Check the DNSSEC validation for a given DNS resolver
# Author: TOSH
set -e
check_program() {
if [[ -z `which $1` ]]
then
echo "$1 not installed !"
exit 1
fi
}
get_dns_status() {
if [[ -n $2 ]]
then
STATUS=`dig $1 @$2 +time=2 +tries=2 | grep "status\|timed out"`
echo $STATUS | perl -ne 'print $1 if m/status: (\S+),/; print "TIMEOUT" if m/connection timed out/'
else
STATUS=`dig $1 +time=2 +tries=2 | grep "status\|time out"`
echo $STATUS | perl -ne 'print $1 if m/status: (\S+),/; print "TIMEOUT" if m/connection timed out/'
fi
}
check_program perl
check_program dig
if [[ -n $1 && ($1 = "-h" || $1 = "--help") ]]
then
echo "Usages :"
echo "- $0"
echo "- $0 <resolver_ip>"
exit 1
fi
DOMAIN_SIG_OK="sigok.verteiltesysteme.net"
DOMAIN_SIG_KO="sigfail.verteiltesysteme.net"
SERV_STATUS_OK=`get_dns_status $DOMAIN_SIG_OK $1`
SERV_STATUS_KO=`get_dns_status $DOMAIN_SIG_KO $1`
if [[ $SERV_STATUS_OK = "TIMEOUT" || $SERV_STATUS_KO = "TIMEOUT" ]]
then
echo "[-] DNS query timed out"
exit 1
fi
if [[ $SERV_STATUS_OK = "NOERROR" && $SERV_STATUS_KO = "SERVFAIL" ]]
then
echo "[+] Your DNS resolver have dnssec validation"
else
echo "[-] Your DNS resolver DO NOT HAVE dnssec validation !!!"
fi
exit 0
| true |
5fad64c7b13905f9c21e430c7858ed4d004bc7da | Shell | openshift/release | /ci-operator/step-registry/gather/proxy/gather-proxy-commands.sh | UTF-8 | 857 | 3.1875 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
trap 'CHILDREN=$(jobs -p); if test -n "${CHILDREN}"; then kill ${CHILDREN} && wait; fi' TERM
# collect logs from the proxy here
if [ -f "${SHARED_DIR}/proxyip" ]; then
proxy_ip="$(cat "${SHARED_DIR}/proxyip")"
ssh_user="core"
if [ -s "${SHARED_DIR}/bastion_ssh_user" ]; then
ssh_user="$(< "${SHARED_DIR}/bastion_ssh_user" )"
fi
if ! whoami &> /dev/null; then
if [ -w /etc/passwd ]; then
echo "${USER_NAME:-default}:x:$(id -u):0:${USER_NAME:-default} user:${HOME}:/sbin/nologin" >> /etc/passwd
fi
fi
eval "$(ssh-agent)"
ssh-add "${CLUSTER_PROFILE_DIR}/ssh-privatekey"
ssh -A -o PreferredAuthentications=publickey -o StrictHostKeyChecking=false -o UserKnownHostsFile=/dev/null "${ssh_user}@${proxy_ip}" 'journalctl -u squid' > "${ARTIFACT_DIR}/squid.service"
fi | true |
f403927726f6a7ce38ec956de417bcd25a8c20d9 | Shell | gfoidl/trx2junit | /verify-xml.sh | UTF-8 | 1,325 | 3.984375 | 4 | [
"MIT"
] | permissive | #!/bin/bash
#
## Script for verifying an xml-file against the junit.xsd
#
# Arguments:
# schema the xsd to use for verification
# xml the xml-file to verify
#
# Functions (sorted alphabetically):
# main entry-point
#
# Exit-codes:
# 1 xsd-file does not exist
# 2 xml-file does not exist
# 200 no args given for script, help is displayed and exited
# $? exit-code from xmllint is returned unmodified
#------------------------------------------------------------------------------
set -e
#------------------------------------------------------------------------------
help() {
echo "verify script"
echo ""
echo "Arguments:"
echo " schema the xsd to use for verification"
echo " xml the xml-file to verify"
}
#------------------------------------------------------------------------------
main() {
if [[ ! -f "$1" ]]; then
echo "$1 schema does not exist";
exit 1
fi
if [[ ! -f "$2" ]]; then
echo "$2 test-results do not exist"
exit 2
fi
xmllint --noout --schema "$1" "$2"
}
#------------------------------------------------------------------------------
if [[ $# -lt 2 ]]; then
help
exit 200
fi
main $*
| true |
98cb84bbacb851bb5ec1c35a0de77701a3d77532 | Shell | ericbottard/riff | /ci/fats.sh | UTF-8 | 2,703 | 3.53125 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
mode=${1:-full}
version=`cat VERSION`
commit=$(git rev-parse HEAD)
# fetch FATS scripts
fats_dir=`dirname "${BASH_SOURCE[0]}"`/fats
fats_repo="projectriff/fats"
fats_refspec=2234005739491f39fabaa75098b19c6d521af324 # projectriff/fats master as of 2019-04-09
source `dirname "${BASH_SOURCE[0]}"`/fats-fetch.sh $fats_dir $fats_refspec $fats_repo
source $fats_dir/.util.sh
$fats_dir/install.sh kubectl
$fats_dir/install.sh kail
# install riff-cli
travis_fold start install-riff
echo "Installing riff"
if [ "$mode" = "full" ]; then
if [ "$machine" == "MinGw" ]; then
curl https://storage.googleapis.com/projectriff/riff-cli/releases/builds/v${version}-${commit}/riff-windows-amd64.zip > riff.zip
unzip riff.zip -d /usr/bin/
rm riff.zip
else
curl https://storage.googleapis.com/projectriff/riff-cli/releases/builds/v${version}-${commit}/riff-linux-amd64.tgz | tar xz
chmod +x riff
sudo cp riff /usr/bin/riff
fi
else
make build
sudo cp riff /usr/bin/riff
fi
travis_fold end install-riff
# start FATS
source $fats_dir/start.sh
# install riff system
travis_fold start system-install
echo "Installing riff system"
riff system install $SYSTEM_INSTALL_FLAGS
# health checks
echo "Checking for ready pods"
wait_pod_selector_ready 'app=controller' 'knative-serving'
wait_pod_selector_ready 'app=webhook' 'knative-serving'
wait_pod_selector_ready 'app=build-controller' 'knative-build'
wait_pod_selector_ready 'app=build-webhook' 'knative-build'
echo "Checking for ready ingress"
wait_for_ingress_ready 'istio-ingressgateway' 'istio-system'
# setup namespace
kubectl create namespace $NAMESPACE
fats_create_push_credentials $NAMESPACE
riff namespace init $NAMESPACE $NAMESPACE_INIT_FLAGS
travis_fold end system-install
# run test functions
source $fats_dir/functions/helpers.sh
for test in java java-boot node npm command; do
path=${fats_dir}/functions/uppercase/${test}
function_name=fats-cluster-uppercase-${test}
image=$(fats_image_repo ${function_name})
create_args="--git-repo https://github.com/${fats_repo}.git --git-revision ${fats_refspec} --sub-path functions/uppercase/${test}"
input_data=riff
expected_data=RIFF
run_function $path $function_name $image "${create_args}" $input_data $expected_data
done
if [ "$machine" != "MinGw" ]; then
for test in node command; do
path=${fats_dir}/functions/uppercase/${test}
function_name=fats-local-uppercase-${test}
image=$(fats_image_repo ${function_name})
create_args="--local-path ."
input_data=riff
expected_data=RIFF
run_function $path $function_name $image "${create_args}" $input_data $expected_data
done
fi
| true |
d15f31276709b78e22fad18ced6501962d1417eb | Shell | JCSDA/jedi-stack | /buildscripts/libs/build_cgal.sh | UTF-8 | 2,464 | 3.78125 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# © Copyright 2020 UCAR
# This software is licensed under the terms of the Apache Licence Version 2.0 which can be obtained at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# CGAL Library used by Atlas
# https://www.cgal.org
#
# WARNING:
# Dependencies include the gnu gmp and mpfr libraries
# Also, if you are using gnu compilers prior to 9.0, then
# you also need to install the boost.thread libraries.
# These are often availble from package managers such as
# apt, yum, or brew. For example, for debian systems:
#
# sudo apt-get update
# sudo apt-get install libgmp-dev
# sudo apt-get install libmpfr-dev
# sudo apt-get install libboost-thread-dev
#
#
set -ex
name="cgal"
version=$1
# this is only needed if MAKE_CHECK is enabled
if $MODULES; then
set +x
source $MODULESHOME/init/bash
module load jedi-$JEDI_COMPILER
module try-load cmake
module try-load boost-headers
module try-load zlib
module try-load eigen
module list
set -x
prefix="${PREFIX:-"/opt/modules"}/core/$name/$version"
if [[ -d $prefix ]]; then
[[ $OVERWRITE =~ [yYtT] ]] && ( echo "WARNING: $prefix EXISTS: OVERWRITING!";$SUDO rm -rf $prefix ) \
|| ( echo "WARNING: $prefix EXISTS, SKIPPING"; exit 1 )
fi
else
prefix=${CGAL_ROOT:-"/usr/local"}
fi
cd $JEDI_STACK_ROOT/${PKGDIR:-"pkg"}
software="CGAL-"$version
url="https://github.com/CGAL/cgal/releases/download/v$version/$software-library.tar.xz"
[[ -d $software ]] || ( $WGET $url; tar -xf $software-library.tar.xz )
[[ ${DOWNLOAD_ONLY} =~ [yYtT] ]] && exit 0
[[ -d $software ]] && cd $software || ( echo "$software does not exist, ABORT!"; exit 1 )
# Apply a patch to fix CMake intel compiler flags.
# Remove when possible or update as needed.
if [[ $version == "5.0.4" ]]; then
patch --merge -p1 < ${JEDI_STACK_ROOT}/buildscripts/libs/patches/${software}-intel-fpmodel-flag-fix.patch
else
echo "Error: Must generate new patch for unsupported CGal version: $version"
exit 1
fi
[[ -d _build ]] && rm -rf _build
cmake -H. -B_build -DCMAKE_INSTALL_PREFIX=$prefix -DWITH_CGAL_Qt5=0 -DCGAL_DISABLE_GMP=1 -DEIGEN3_INCLUDE_DIR=$EIGEN_ROOT/include -DCMAKE_INSTALL_LIBDIR=lib
cd _build && VERBOSE=$MAKE_VERBOSE $SUDO make install
# generate modulefile from template
$MODULES && update_modules core $name $version \
|| echo $name $version >> ${JEDI_STACK_ROOT}/jedi-stack-contents.log
| true |
ed40476b759650f8f79e86f3b0cefef7637c3a39 | Shell | francoisluus/tensorboard-supervise | /tensorboard/pip_package/pip_smoke_test.sh | UTF-8 | 5,516 | 3.59375 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# Smoke test for building, installing and basic usage of tensorboard pip package.
#
# Usage:
# pip_smoke_test.sh [--python3] [--retries <NUM_RETRIES>] [--port <PORT>]
#
# Note:
# * This script requires virtualenv.
set -eu
die() {
printf >&2 '%s\n' "$1"
exit 1
}
PY_VERSION=2
TEST_PORT=6006
NUM_RETRIES=20
while [[ "$#" -gt 0 ]]; do
if [[ "$1" == "--python3" ]]; then
PY_VERSION=3
elif [[ "$1" == "--retries" ]]; then
NUM_RETRIES="$2"
shift
elif [[ "$1" == "--port" ]]; then
TEST_PORT="$2"
shift
else
die "ERROR: Unrecognized argument $1"
fi
shift
done
echo
echo "=== Performing smoke test of tensorboard PIP package ==="
echo "Settings:"
echo " PY_VERSION=${PY_VERSION}"
echo " TEST_PORT=${TEST_PORT}"
echo " NUM_RETRIES=${NUM_RETRIES}"
echo
# Check that virtualenv is installed.
if [[ -z "$(which virtualenv)" ]]; then
die "ERROR: virtualenv is required, but does not appear to be installed."
fi
PIP_TMP_DIR=$(mktemp -d --suffix _tensorboard)
echo
echo "Building tensorboard pip package in directory: ${PIP_TMP_DIR}"
echo
cd "$(git -C "$(dirname "$0")" rev-parse --show-toplevel)"
bazel build tensorboard/pip_package:build_pip_package
# Create virtualenv directory, cleanly (i.e., no --system-site-packages).
VENV_TMP_DIR=$(mktemp -d --suffix _tensorboard_venv)
echo
echo "Creating virtualenv directory at: ${VENV_TMP_DIR}"
echo
if [[ "${PY_VERSION}" == 2 ]]; then
virtualenv -p python "${VENV_TMP_DIR}"
TF_NIGHTLY_URL='https://ci.tensorflow.org/view/Nightly/job/nightly-matrix-cpu/TF_BUILD_IS_OPT=OPT,TF_BUILD_IS_PIP=PIP,TF_BUILD_PYTHON_VERSION=PYTHON2,label=cpu-slave/lastSuccessfulBuild/artifact/pip_test/whl/tensorflow-1.head-cp27-none-linux_x86_64.whl'
elif [[ "${PY_VERSION}" == 3 ]]; then
virtualenv -p python3 "${VENV_TMP_DIR}"
TF_NIGHTLY_URL='https://ci.tensorflow.org/view/Nightly/job/nightly-matrix-cpu/TF_BUILD_IS_OPT=OPT,TF_BUILD_IS_PIP=PIP,TF_BUILD_PYTHON_VERSION=PYTHON3,label=cpu-slave/lastSuccessfulBuild/artifact/pip_test/whl/tensorflow-1.head-cp34-cp34m-linux_x86_64.whl'
fi
echo
echo "Activating virtualenv at ${VENV_TMP_DIR}"
echo
export VIRTUAL_ENV="${VENV_TMP_DIR}"
export PATH="${VENV_TMP_DIR}/bin:${PATH}"
unset PYTHON_HOME
echo
echo "Installing and upgrading pip packages required for wheel building"
echo
pip install --upgrade pip setuptools wheel
echo
echo "Creating tensorboard pip package in directory: ${PIP_TMP_DIR}"
echo
bazel-bin/tensorboard/pip_package/build_pip_package "${PIP_TMP_DIR}"
# Install the dependency, tensorflow, first.
echo
echo "Installing nightly tensorflow pip package."
echo
pip install "${TF_NIGHTLY_URL}"
echo
echo "Installing the just-built tensorboard pip package"
echo
if [[ "${PY_VERSION}" == 2 ]]; then
pip install "${PIP_TMP_DIR}"/tensorflow_tensorboard*-py2-*.whl
elif [[ "${PY_VERSION}" == 3 ]]; then
pip install "${PIP_TMP_DIR}"/tensorflow_tensorboard*-py3-*.whl
fi
# Check tensorboard binary path.
TB_BIN_PATH=$(which tensorboard)
if [[ -z ${TB_BIN_PATH} ]]; then
die "ERROR: Cannot find tensorboard binary path after installing tensorboard pip package."
fi
TMP_LOGDIR=$(mktemp -d --suffix _tensorboard_logdir)
tensorboard --port="${TEST_PORT}" --logdir="${TMP_LOGDIR}" &
TB_PID=$!
echo
echo "tensorboard binary should be running at pid ${TB_PID}"
echo
test_access_url() {
# Attempt to fetch given URL till an HTTP 200 status or reaching $NUM_RETRIES
#
# Retrying occur with a 1-second delay.
#
# Global variable(s) used: ${NUM_RETIRES}.
#
# Usage:
# test_access_url <URL>
# E.g.,
# test_access_url http://localhost:6006/
local test_url="$1"
echo
echo "Sending test HTTP requests at URL: ${test_url} (${NUM_RETRIES} retries)"
echo
local retry_counter=0
while [[ "${retry_counter}" -lt "${NUM_RETRIES}" ]]; do
local status_code="$(curl -Is "${test_url}" | head -1 | cut -d ' ' -f 2)"
if [[ "${status_code}" == 200 ]]; then
echo
echo "Request to ${test_url} succeeded (200)!"
echo
return
else
: $(( retry_counter++ ))
echo "Request to ${test_url} failed. Will retry in 1 second..."
sleep 1
fi
done
printf >&2 \
"ERROR: Failed to get 200 response status from %s in %d retries.\n" \
"${test_url}" "${NUM_RETRIES}"
return 1
}
TEST_URL_FAILED=0
test_access_url "http://localhost:${TEST_PORT}/data/logdir" || TEST_URL_FAILED=1
test_access_url "http://localhost:${TEST_PORT}" || TEST_URL_FAILED=1
echo
echo "Terminating tensorboard binary at pid ${TB_PID}"
echo
kill -9 "${TB_PID}"
echo
if [[ "${TEST_URL_FAILED}" == 0 ]]; then
# Clean up.
rm -r "${VENV_TMP_DIR}"
rm -r "${PIP_TMP_DIR}"
rm -r "${TMP_LOGDIR}"
echo "=== Smoke test of tensorboard PIP package PASSED ==="
else
die "=== Smoke test of tensorboard PIP package FAILED ==="
fi
| true |
4ee51c287d278d7433ed8cbc5a40aa78e3aa82f7 | Shell | chernic/Cacti | /SrcDownload/cchead.sh | UTF-8 | 1,143 | 3.796875 | 4 | [] | no_license | #!/bin/bash
# Author: yeho <lj2007331 AT gmail.com>
# Blog: http://blog.linuxeye.com
LABLE3='\n\n### Cc@FS: '
LABLE2='\n## Cc@FS: '
LABLE1='# Cc@FS: '
Download_src()
{
[ -s "${src_url##*/}" ] && echo "${src_url##*/} found" || wget -c --no-check-certificate $src_url
if [ ! -e "${src_url##*/}" ];then
echo -e "\033[31m${src_url##*/} download failed, Please contact the author! \033[0m"
kill -9 $$
fi
}
Cc_Wait()
{
while :
do
echo
read -p "Wait! And do you want to go ahead now? [y/n]: " step_yn
if [ "$step_yn" != 'y' -a "$step_yn" != 'n' ];then
echo -e "\033[31minput error! Please only input 'y' or 'n'\033[0m"
elif [ "$step_yn" == 'y' ];then
echo -e "\033[31mYou choose to go ahead.\033[0m" && break
else
echo -e "\033[31mYou choose to stop the game.\033[0m" && exit 0
fi
done
}
GetValFromURL()
{
#echo "Here we get values from url"
filename=${src_url##*/}
#&& echo "filename=$filename"
softwarename=${filename%-*}
#&& echo "softwarename=$softwarename"
version=$( echo "$filename" | sed -n '/^.*-/{s///;s/\.[a-z].*//;p}' )
#&& echo "version=$version"
}
| true |
c77ce4bfaa13ac130e197eb74df10cee23abc135 | Shell | josefsalyer/Aid-Watch-Dog | /init.d | UTF-8 | 985 | 3.859375 | 4 | [] | no_license | #!/bin/bash
#
# Init file for hackpov
#
# chkconfig: - 98 02
# description: hackpov
#
# processname: hackpov
# pidfile: /var/run/hackpov.pid
# Short-Description: Hack Pov
# Source function library.
. /etc/init.d/functions
### Default variables
name="hackpov"
prog="/var/www/hackpov/run"
# Check if requirements are met
[ -x "$prog" ] || exit 1
RETVAL=0
start() {
echo $"Starting $name"
daemon $prog
RETVAL=$?
echo -n $"Started: "
echo
return $RETVAL
}
stop() {
echo -n $"Shutting down $name: "
killproc -p /var/run/$name.pid
RETVAL=$?
echo
return $RETVAL
}
restart() {
stop
start
}
reload() {
echo -n $"Reloading $prog: "
monit -c "$CONFIG" reload
RETVAL=$?
echo
return $RETVAL
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
restart
;;
reload)
reload
;;
condrestart)
[ -e /var/lock/subsys/$prog ] && restart
RETVAL=$?
;;
status)
status $prog
RETVAL=$?
;;
*)
echo $"Usage: $0 {start|stop|restart|reload|condrestart|status}"
RETVAL=1
esac
exit $RETVAL
| true |
052b3cb55bdbcf2ef0a649fc8e9b819a1dc9c722 | Shell | mkoskar/dotfiles | /bin/status | UTF-8 | 2,775 | 3.59375 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
set -eu -o pipefail
shopt -s inherit_errexit
export LC_ALL=C
prgname=${0##*/}
if [[ $prgname = status-osd ]]; then
out=$(status "$@")
[[ -t 1 ]] && printf '%s\n' "$out"
osd "$out" || true
exit
fi
declare -i x11=0
has-x11 && x11=1
status_audio() {
local pval rval
pval=$(
unset DISPLAY; amixer get Master | gawk '
match($0, /\[([0-9]*%)\].*\[(on|off)\]/, a) {
print a[2] " " a[1]
exit
}
'
)
amixer -c 0 get Headphone,1 | grep -Fq 'Playback [on]' && pval="$pval +dock"
rval=$(
unset DISPLAY; amixer get Capture | gawk '
match($0, /\[([0-9]*%)\].*\[(on|off)\]/, a) {
print a[2] " " a[1]
exit
}
'
)
printf 'audio: %s / %s\n' "$pval" "$rval"
}
status_backlight() {
local val; val=$(backlight)
printf 'backlight: %s%%\n' "$val"
}
status_bluetooth() {
local val; val=$(status_rfkill bluetooth)
printf 'bluetooth: %s\n' "$val"
}
status_dpms() {
local val=-
if (( x11 )); then
xset q | grep -Fq 'DPMS is Enabled' && val=on || val=off
fi
printf 'dpms: %s\n' "$val"
}
status_pa_sink() {
local val=-
if pactl stat &>/dev/null; then
val=$(painfo default-sink-description)
fi
printf 'PA sink: %s\n' "$val"
}
status_rfkill() {
local val=- state
state=$(rfkill-state -a "$1")
case $state in
-1)
val=mixed
;;
0)
val='off (soft)'
;;
1)
val=on
;;
2)
val='off (hard)'
;;
esac
printf %s "$val"
}
status_touchpad() {
local val=- state
if (( x11 )); then
state=$(touchpad 2>/dev/null)
case $state in 1) val=on ;; 0) val=off ;; esac
fi
printf 'touchpad: %s\n' "$val"
}
status_trackpoint_wheel() {
local val=- state
if (( x11 )); then
state=$(trackpoint-wheel 2>/dev/null)
case $state in 1) val=on ;; 0) val=off ;; esac
fi
printf 'trackpoint wheel: %s\n' "$val"
}
status_wifi() {
local val; val=$(status_rfkill wlan)
printf 'wifi: %s\n' "$val"
}
status_xkb() {
local val=-
if (( x11 )); then
val=$(xkblayout-state print %s)
fi
printf 'xkb: %s\n' "$val"
}
if (( ! $# )); then
status_pa_sink
status_audio
status_backlight
status_bluetooth && true
status_dpms
status_touchpad && true
status_trackpoint_wheel && true
status_wifi
status_xkb
exit
fi
case ${1-} in
audio | backlight | bluetooth | dpms | pa_sink | \
touchpad | trackpoint_wheel | wifi | xkb)
"status_$1"
;;
*)
exit 2
;;
esac
| true |
edc22442e0d6b7f96021fb0254cdb2f3ae24982f | Shell | ruiixu23/advanced-system-lab | /scripts/experiments/replication/main.sh | UTF-8 | 1,605 | 2.703125 | 3 | [] | no_license | #!/usr/bin/env bash
thread_pool_size=16
num_clients=110
num_servers=1
replication_factor_config=(1)
for replication_factor in ${replication_factor_config[@]}
do
log_dir=./logs/replication/server-${num_servers}-replication-${replication_factor}
mkdir -p ${log_dir}
./scripts/experiments/replication/run-repetition.sh ${thread_pool_size} ${num_clients} ${log_dir} ${num_servers} ${replication_factor} | tee -a ${log_dir}/bash.log
done
num_servers=3
replication_factor_config=(1 2 3)
for replication_factor in ${replication_factor_config[@]}
do
log_dir=./logs/replication/server-${num_servers}-replication-${replication_factor}
mkdir -p ${log_dir}
./scripts/experiments/replication/run-repetition.sh ${thread_pool_size} ${num_clients} ${log_dir} ${num_servers} ${replication_factor} | tee -a ${log_dir}/bash.log
done
num_servers=5
replication_factor_config=(1 3 5)
for replication_factor in ${replication_factor_config[@]}
do
log_dir=./logs/replication/server-${num_servers}-replication-${replication_factor}
mkdir -p ${log_dir}
./scripts/experiments/replication/run-repetition.sh ${thread_pool_size} ${num_clients} ${log_dir} ${num_servers} ${replication_factor} | tee -a ${log_dir}/bash.log
done
num_servers=7
replication_factor_config=(1 4 7)
for replication_factor in ${replication_factor_config[@]}
do
log_dir=./logs/replication/server-${num_servers}-replication-${replication_factor}
mkdir -p ${log_dir}
./scripts/experiments/replication/run-repetition.sh ${thread_pool_size} ${num_clients} ${log_dir} ${num_servers} ${replication_factor} | tee -a ${log_dir}/bash.log
done
| true |
692815cce51f54ee4a32e6e8d7698c8853d3bc40 | Shell | wenshan231/test_doc | /shell/script/android_product/make_ew510_new21.sh | UTF-8 | 15,221 | 2.609375 | 3 | [] | no_license | #! /bin/bash
# This script is auto make mmcp incream
# @author: QiaoTing (on 2010/02/10)
# modify @author: xieyue (on 2010/06/01)
prj=$1
dir=$2
plat_list=$3
productname=$4
shellbash=$(pwd)/makecbb.sh
allpath=/home/hudson/project/$prj
workpath=$allpath/$dir
rm $allpath/logs -rf
rm $allpath/output -rf
mkdir $allpath/logs $allpath/output -p
log_file_detail=$allpath/logs/make_$prj\_detail.log
log_file=$allpath/logs/make_$prj.log
log_failedinfo=$allpath/logs/infotxt.log
mdli=0
flag=0
flagall=1
flagmmcpnomake=1
flag3rdnomake=0
mdli=0
flagdebug=0
copyall=1
flag3rdresult=0
lastmmcpverlog=/home/hudson/lastver/lastver\_$prj\_mmcp.log
nowmmcpver=`head -11 $workpath/.svn/entries | tail -1`
lastmmcpver=`cat $lastmmcpverlog`
echo -e "====================== mmcp:lastversion:$lastmmcpver, nowversion:$nowmmcpver =====================" >> $log_file
echo ====================== mmcp:lastversion:$lastmmcpver, nowversion:$nowmmcpver =====================
last3rdver=0
now3rdver=0
### if there is 3rd we need to get the 3rd ver
if [ "$productname" != "" ] && [ "$productname" != "none" ]
then
last3rdver=0
now3rdver=`head -11 $workpath/integration/product/$productname/.svn/entries | tail -1`
echo -e "===================== project version:$now3rdver ==================" >> $log_file
echo ====================== project version:$now3rdver =====================
fi
okmsg="[CC]Result of $prj make success revision $nowmmcpver"
errormsg="[CC]Result of $prj make failed revision $nowmmcpver"
echo -e "<infoxml>" > $log_failedinfo
if [ "$lastmmcpver" -eq 0 ]
then
flagall=0
else
cd $workpath
if [ "$prj" = "guizhou_android_6a801" ]
then
echo ================ current project is guizhou_android_6A801 ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/tplib/kernel/g' | sed 's/cfg/cfg\/guizhou_coship_Android6A801 cfg\/ottdvb_Android6A801/g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/unionPay thirdparty\/ca\/tfcdcasa/g' | sed 's/porting//g'`
elif [ "$prj" = "guizhou_android_6a801_client" ]
then
echo ================ current project is guizhou_android_6A801_client ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/tplib/kernel/g' | sed 's/cfg/cfg\/guizhou_coship_Android6A801_Client cfg\/ottdvb_Android6A801_Client/g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/ca\/tfcdcasa/g' | sed 's/jvm//g' | sed 's/guitool//g' | sed 's/graph//g' | sed 's/midp//g' | sed 's/porting//g'`
elif [ "$prj" = "wuhan_android_6a801" ]
then
echo ================ current project is wuhan_android_6A801 ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/tplib/kernel/g' | sed 's/cfg//g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/unionPay thirdparty\/ca\/tfcdcasa/g' | sed 's/porting//g'`
elif [ "$prj" = "wuhan_android_6a801_client" ]
then
echo ================ current project is wuhan_android_6A801_client ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/tplib/kernel/g' | sed 's/cfg//g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/ca\/tfcdcasa/g' | sed 's/jvm//g' | sed 's/guitool//g' | sed 's/graph//g' | sed 's/midp//g' | sed 's/porting//g'`
elif [ "$prj" = "delivery_android_6a801" ]
then
echo ================ current project is delivery_android_6A801 ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/tplib/kernel/g' | sed 's/cfg/cfg\/ottdvb_Android6A801/g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/unionPay thirdparty\/ca\/camanager thirdparty\/ca\/caudiadapter/g' | sed 's/porting//g'`
echo $model_list_all | grep "cfg/ottdvb_Android6A801"
if [ $? -eq 0 ]
then
echo cfg exits~~~~~~~~
else
model_list_all="$model_list_all cfg/ottdvb_Android6A801"
fi
elif [ "$prj" = "delivery_android_6a801_client" ]
then
echo ================ current project is delivery_android_6A801_client ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/makefile/kernel/g' | sed 's/tplib/kernel/g' | sed 's/cfg/cfg\/ottdvb_Android6A801_Client/g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/ca\/camanager/g' | sed 's/jvm//g' | sed 's/midp//g' | sed 's/porting//g'`
echo $model_list_all | grep "cfg/ottdvb_Android6A801_Client"
if [ $? -eq 0 ]
then
echo cfg exits~~~~~~~~
else
model_list_all="$model_list_all cfg/ottdvb_Android6A801_Client"
fi
elif [ "$prj" = "changsha_android_6a801" ]
then
echo ================ current project is changsha_android_6A801 ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/tplib/kernel/g' | sed 's/cfg/cfg\/changsha_coship_Android6A801/g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/unionPay/g' | sed 's/porting//g'`
echo $model_list_all | grep "cfg/changsha_coship_Android6A801"
if [ $? -eq 0 ]
then
echo cfg exits~~~~~~~~
else
model_list_all="$model_list_all cfg/changsha_coship_Android6A801"
fi
elif [ "$prj" = "changsha_android_6a801_client" ]
then
echo ================ current project is changsha_android_6A801_client ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/tplib/kernel/g' | sed 's/cfg/cfg\/changsha_coship_Android6A801_Client/g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis/g' | sed 's/jvm//g' | sed 's/midp//g' | sed 's/porting//g'`
echo $model_list_all | grep "cfg/changsha_coship_Android6A801_Client"
if [ $? -eq 0 ]
then
echo cfg exits~~~~~~~~
else
model_list_all="$model_list_all cfg/changsha_coship_Android6A801_Client"
fi
elif [ "$prj" = "delivery_Android_Hi3716C_V200" ]
then
echo ================ current project is delivery_Android_Hi3716C_V200 ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/tplib/kernel/g' | sed 's/cfg/cfg\/ottdvb_Android6A801/g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/unionPay thirdparty\/ca\/camanager thirdparty\/ca\/caudiadapter/g' | sed 's/porting//g'`
echo $model_list_all | grep "cfg/taiwan_hi3716C_V200"
if [ $? -eq 0 ]
then
echo cfg exits~~~~~~~~
else
model_list_all="$model_list_all cfg/taiwan_hi3716C_V200"
fi
elif [ "$prj" = "delivery_Android_Hi3716C_V200_Client" ]
then
echo ================ current project is delivery_Android_Hi3716C_V200_Client ================ >> $log_file
model_list_all=`svn diff --summarize --username xieyue --password 04303309 -r $lastmmcpver:$nowmmcpver | awk {'print $2'} | sed 's%/.*$%%' | uniq | grep -v "autoMake" | sed 's/integration//g' | sed 's/makefile/kernel/g' | sed 's/tplib/kernel/g' | sed 's/cfg/cfg\/ottdvb_Android6A801_Client/g' | sed 's/thirdparty/thirdparty\/tts thirdparty\/mis thirdparty\/ca\/camanager/g' | sed 's/jvm//g' | sed 's/midp//g' | sed 's/porting//g'`
echo $model_list_all | grep "cfg/taiwan_hi3716C_V200_client"
if [ $? -eq 0 ]
then
echo cfg exits~~~~~~~~
else
model_list_all="$model_list_all cfg/taiwan_hi3716C_V200_client"
fi
pause
fi
echo -e "==================== modified models compared to last version:"$model_list_all" ==================" >> $log_file
if [ "$model_list_all" = "" ]
then
echo "only noncbb modules change and not build" >> $log_file
flagmmcpnomake=0
else
echo $model_list_all | grep -E "include|bin|build"
if [ $? -eq 0 ]
then
flagall=0
else
echo $model_list_all | grep "dtvmx"
if [ $? -eq 0 ]
then
if [ "$prj" != "guizhou_android_6a801_client" ] && [ "$prj" != "changsha_android_6a801_client" ]
then
model_list=`echo $model_list_all | sed 's/jvm//g' | sed 's/dtvmx/dtvmx jvm/g'`
else
model_list=$model_list_all
fi
echo $model_list
else
model_list=$model_list_all
fi
fi
fi
fi
if [ "$flagmmcpnomake" -eq 0 ]
then
echo "no mmcp cbb need to make ~~~~~~~~~~~~~~~~~~~~~"
else
echo "some mmcp cbb has been modified, need to make ~~~~~~~~~~~~~~~~~~~~~~~~"
model_list_dbg="jvm"
model_list_dbg_author="huhuatao"
if [ $flagall -eq 0 ]
then
rm -fr $workpath/lib
model_list_author="yanghuiyuan huhuatao fushouwei caorui longshirong zhaodemin caozhenliang fushouwei zhuokeqiao zhengfen zhangminrui lianxijian caorui caorui zhuokeqiao fanyong fanyong"
if [ "$prj" = "guizhou_android_6a801" ]
then
echo ================ current project is guizhou_android_6A801 ================ >> $log_file
model_list="dtvmx shell codec dtv graph jsext kernel mediaplayer midp protocol guitool jvm thirdparty/tts thirdparty/mis thirdparty/unionPay thirdparty/ca/tfcdcasa cfg/guizhou_coship_Android6A801 cfg/ottdvb_Android6A801"
elif [ "$prj" = "guizhou_android_6a801_client" ]
then
echo ================ current project is guizhou_android_6A801_client ================ >> $log_file
model_list="dtv kernel protocol codec jsext mediaplayer dtvmx shell thirdparty/tts thirdparty/mis thirdparty/unionPay thirdparty/ca/tfcdcasa cfg/guizhou_coship_Android6A801_Client cfg/ottdvb_Android6A801_Client"
elif [ "$prj" = "delivery_android_6a801" ]
then
echo ================ current project is delivery_android_6A801 ================ >> $log_file
model_list="dtv kernel protocol codec jsext mediaplayer dtvmx jvm shell guitool graph midp thirdparty/tts thirdparty/mis thirdparty/unionPay thirdparty/ca/camanager thirdparty/ca/caudiadapter cfg/ottdvb_Android6A801"
elif [ "$prj" = "delivery_android_6a801_test" ]
then
echo ================ current project is delivery_android_6a801_test ================
model_list="dtv kernel protocol codec jsext mediaplayer dtvmx jvm shell guitool graph midp thirdparty/tts thirdparty/mis thirdparty/unionPay thirdparty/ca/camanager thirdparty/ca/caudiadapter cfg/ottdvb_Android6A801"
elif [ "$prj" = "delivery_android_6a801_client" ]
then
echo ================ current project is delivery_android_6a801_client ================ >> $log_file
model_list="dtv kernel protocol codec jsext mediaplayer dtvmx shell guitool graph thirdparty/tts thirdparty/mis thirdparty/ca/camanager cfg/ottdvb_Android6A801_Client"
elif [ "$prj" = "wuhan_android_6a801" ]
then
echo ================ current project is wuhan_android_6A801 ================ >> $log_file
model_list="dtv kernel protocol codec jsext mediaplayer dtvmx jvm shell guitool graph midp thirdparty/tts thirdparty/mis thirdparty/unionPay "
elif [ "$prj" = "wuhan_android_6a801_client" ]
then
echo ================ current project is wuhan_android_6A801_client ================ >> $log_file
model_list="dtv kernel protocol codec jsext mediaplayer dtvmx shell guitool graph thirdparty/tts thirdparty/mis "
elif [ "$prj" = "changsha_android_6a801" ]
then
echo ================ current project is changsha_android_6A801 ================ >> $log_file
model_list="dtv kernel protocol codec jsext mediaplayer dtvmx jvm shell guitool graph midp thirdparty/tts thirdparty/mis thirdparty/unionPay cfg/changsha_coship_Android6A801"
elif [ "$prj" = "changsha_android_6a801_client" ]
then
echo ================ current project is changsha_android_6A801_client ================ >> $log_file
model_list="dtv kernel protocol codec jsext mediaplayer dtvmx shell guitool graph thirdparty/tts thirdparty/mis cfg/changsha_coship_Android6A801_Client"
elif [ "$prj" = "delivery_Android_Hi3716C_V200_Client" ]
then
echo ================ current project is Android_Hi3716C_V200_Client ================ >> $log_file
model_list="dtv kernel protocol codec midp jsext mediaplayer dtvmx shell guitool graph thirdparty/unionPay cfg/taiwan_hi3716C_V200_client"
pause
elif [ "$prj" = "delivery_Android_Hi3716C_V200" ]
then
echo ================ current project is Android_Hi3716C_V200 ================ >> $log_file
model_list="dtv kernel protocol codec midp jsext mediaplayer dtvmx shell guitool graph thirdparty/unionPay cfg/taiwan_hi3716C_V200"
pause
fi
fi
echo -e "================ all need to make models:"$model_list" ==============" >> $log_file
echo -e "================ all need to make models:"$model_list" =============="
for plat in $plat_list
do
case "$plat" in
Android_6A801)
source $shellbash release $nowmmcpver "$model_list" "$model_list_author" $mdli $plat $prj $dir
source $shellbash debug $nowmmcpver "$model_list" "$model_list_author" $mdli $plat $prj $dir
;;
Android_6A801_Client)
source $shellbash release $nowmmcpver "$model_list" "$model_list_author" $mdli $plat $prj $dir
source $shellbash debug $nowmmcpver "$model_list" "$model_list_author" $mdli $plat $prj $dir
;;
Android_Hi3716C_V200)
#source $shellbash release $nowmmcpver "$model_list" "$model_list_author" $mdli $plat $prj $dir
#source $shellbash debug $nowmmcpver "$model_list" "$model_list_author" $mdli $plat $prj $dir
;;
Android_Hi3716C_V200_Client)
#source $shellbash release $nowmmcpver "$model_list" "$model_list_author" $mdli $plat $prj $dir
#source $shellbash debug $nowmmcpver "$model_list" "$model_list_author" $mdli $plat $prj $dir
;;
esac
done
fi
| true |
454c62dcfeef60521ba33c72144db62b467d6aba | Shell | Hamidou940/Docker_ESI | /entrypoint.sh | UTF-8 | 114 | 2.8125 | 3 | [] | no_license | #!/bin/bash
while [ 1 -eq 1 ];
do
Date=$(date -u)
echo $Date >> 'site/index.html'
sleep 10
done
| true |
8acbc3500ed34ed8d69148bfbf01fb45849eba54 | Shell | y-yagi/app_severs | /bin/performance | UTF-8 | 395 | 3.046875 | 3 | [] | no_license | #!/bin/bash
if [ $# -ne 1 ]; then
echo "specify application server"
exit 1
fi
WRK="wrk -t 10 -c 100 -d 30s"
mkdir -p tmp/result/$1
echo "plaintext"
$WRK http://localhost:3000/hello_world/plaintext >& tmp/result/$1/plaintext.txt
echo "json"
$WRK http://localhost:3000/hello_world/json >& tmp/result/$1/json.txt
echo "db"
$WRK http://localhost:3000/hello_world/db >& tmp/result/$1/db.txt
| true |
91cf09360c66f7e70476e585eb237f40e029aef3 | Shell | er4z0r/polscan | /lib/scanners/security-ipv6-forwarding.sh | UTF-8 | 214 | 2.78125 | 3 | [] | no_license | # group: Security
# name: No IPv6 forwarding
# description: IPv6 forwarding is to be disabled
if /sbin/sysctl net.ipv6 | grep -q "\.forwarding = 1" 2>/dev/null; then
result_failed "IPv6 forwarding is enabled"
fi
| true |
9fb9748666e581d48c8b0b0d44e36a4bbbd7c71a | Shell | rlex/ansible-role-zabbix-agent | /files/scripts/pcp-discovery.sh | UTF-8 | 1,856 | 3.59375 | 4 | [] | no_license | #!/bin/bash
case "$1" in
disk)
disks=($(cat /proc/diskstats | awk '{print $3}' | grep -v 'ram\|loop\|sr\|fd\|dm-\|md' | grep -v '[0-9]'))
counter=${#disks[@]}
echo "{"
echo -e "\t\"data\":[\n"
for (( i=0; ${counter}>i; i++ ))
do
if (( $i == $counter-1 )); then
echo -e "\t{ \"{#DISKDEV}\":\"${disks[$i]}\" }"
else
echo -e "\t{ \"{#DISKDEV}\":\"${disks[$i]}\" },"
fi
done
echo -e "\n\t]\n"
echo "}"
;;
partition)
partitions=($(cat /proc/diskstats | awk '{print $3}' | grep -v 'ram\|loop\|sr\|fd\|dm-\|md' | grep '[0-9]'))
counter=${#partitions[@]}
echo "{"
echo -e "\t\"data\":[\n"
for (( i=0; ${counter}>i; i++ ))
do
if (( $i == $counter-1 )); then
echo -e "\t{ \"{#DISKPART}\":\"${partitions[$i]}\" }"
else
echo -e "\t{ \"{#DISKPART}\":\"${partitions[$i]}\" },"
fi
done
echo -e "\n\t]\n"
echo "}"
;;
mdraid)
mddevices=($(cat /proc/diskstats | awk '{print $3}' | grep 'md'))
counter=${#mddevices[@]}
echo "{"
echo -e "\t\"data\":[\n"
for (( i=0; ${counter}>i; i++ ))
do
if (( $i == $counter-1 )); then
echo -e "\t{ \"{#MDDEV}\":\"${mddevices[$i]}\" }"
else
echo -e "\t{ \"{#MDDEV}\":\"${mddevices[$i]}\" },"
fi
done
echo -e "\n\t]\n"
echo "}"
;;
netif)
interfaces=($(basename -a /sys/class/net/*))
counter=${#interfaces[@]}
echo "{"
echo -e "\t\"data\":[\n"
for (( i=0; i<${counter}; i++ ))
do
if (( $i == $counter-1 )); then
echo -e "\t{ \"{#NETIF}\":\"${interfaces[$i]}\" }"
else
echo -e "\t{ \"{#NETIF}\":\"${interfaces[$i]}\" }",
fi
done
echo -e "\n\t]\n"
echo "}"
;;
ping)
echo 0
;;
*)
echo "wrong argument"
esac
| true |
6a016b52baf99a201eba217aae7db7848b080d88 | Shell | syhan/scripts | /cf/app.sh | UTF-8 | 189 | 3.203125 | 3 | [] | no_license | #!/bin/sh
if [ -z $1 ]; then
echo "Missing application name, e.g. use '$0 OData' to get the OData application name"
exit 2
fi
cf apps | grep -i $1 | grep started | awk '{print $1}'
| true |
bd59bdb91d0d04ed08c25cf26ea9097aad92588b | Shell | p1xelHer0/dotfiles | /bin/_darwin/wal-set | UTF-8 | 744 | 3.171875 | 3 | [] | no_license | #!/bin/bash
# import colors from wal
source "$HOME/.cache/wal/colors.sh"
ubersicht_css="$HOME/dotfiles/conf/_darwin/ubersicht/widgets/nerdbar.widget/colors.css"
wal_css="$HOME/.cache/wal/colors.css"
# update Übersicht with wal colors
reload_bar() {
# if the wal css is present, remove the last 28 lines (length of wal css)
if grep -q :root "$ubersicht_css" ;then
ed -s "$ubersicht_css" <<< $'-27,$d\nwq'
fi
cat "$wal_css" >> "$ubersicht_css"
osascript -e 'tell application "Übersicht" to refresh'
}
reload_alacritty_color() {
ln -fs "$HOME/.cache/wal/alacritty.yml" "$HOME/.config/alacritty/"
}
main() {
reload_alacritty_color
# reload_bar
# we need to force reload wal with alacritty and tmux
wal -R
}
main
| true |
577b58ba7c6b8acd7591b613ced96b05282d141a | Shell | hivesolutions/scudum | /system/etc/rc.d/init.d/localnet | UTF-8 | 1,176 | 3.625 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/sh
### BEGIN INIT INFO
# Provides: localnet
# Required-Start: $local_fs
# Should-Start:
# Required-Stop:
# Should-Stop:
# Default-Start: S
# Default-Stop: 0 6
# Short-Description: Starts the local network.
# Description: Sets the hostname of the machine and starts the
# loopback interface.
### END INIT INFO
. /lib/lsb/init-functions
[ -r /etc/sysconfig/network ] && . /etc/sysconfig/network
case "${1}" in
start)
log_info_msg "Bringing up the loopback interface..."
ip addr add 127.0.0.1/8 label lo dev lo
ip link set lo up
evaluate_retval
log_info_msg "Setting hostname to ${HOSTNAME}..."
hostname ${HOSTNAME}
evaluate_retval
;;
stop)
log_info_msg "Bringing down the loopback interface..."
ip link set lo down
evaluate_retval
;;
restart)
${0} stop
sleep 1
${0} start
;;
status)
echo "Hostname is: $(hostname)"
ip link show lo
;;
*)
echo "Usage: ${0} {start|stop|restart|status}"
exit 1
;;
esac
exit 0
| true |
c585fa1b4811c446a515387e11e75f433754d0bd | Shell | sistematico/majestic | /home/.local/bin/youtube-live | UTF-8 | 2,988 | 3.03125 | 3 | [] | no_license | #!/usr/bin/env bash
YOUTUBE_KEY=$(grep YOUTUBE_KEY $HOME/.env | xargs)
YOUTUBE_KEY="${YOUTUBE_KEY#*=}"
INRES="1920x1080" # input resolution
OUTRES="1920x1080" # output resolution
ABR="44100" # audio rate
VBR="2500k" # Bitrate de la vidéo en sortie
FPS="30" # FPS de la vidéo en sortie
QUAL="ultrafast" # Preset de qualité FFMPEG
YOUTUBE_URL="rtmp://a.rtmp.youtube.com/live2" # URL de base RTMP youtube
TXT_OVERLAY="${HOME}/.local/share/youtube/overlay.txt" #txt overlay file location
FONT="/usr/share/fonts/ubuntu/Ubuntu-B.ttf" #font file location. Probably same for all ubuntu. (or all linux ?)
# ffmpeg \
# -f x11grab \
# -s "$INRES" \
# -i $DISPLAY \
# -f pulse -i 0 \
# -deinterlace \
# -vcodec libx264 -pix_fmt yuv420p \
# -s $OUTRES \
# -preset $QUAL \
# -r $FPS -g $(($FPS * 2)) \
# -b:v $VBR \
# -ac 2 -acodec libmp3lame \
# -ar $AUDIO_RATE \
# -threads 6 -qscale 3 \
# -b:a 712000 \
# -bufsize 512k \
# -f flv "$YOUTUBE_URL/$YOUTUBE_KEY"
# -ar $ABR -threads 6 -q:a 3 -b:a 712000 -bufsize 128k \
ffmpeg \
-f x11grab -video_size ${INRES} \
-framerate $FPS \
-i $DISPLAY -deinterlace \
-f pulse \
-ac 2 \
-i default \
-c:v libx264 -pix_fmt yuv420p \
-crf 23 \
-c:a aac -ac 2 -b:a 128k -ar $ABR -threads 6 -q:a 3 -bufsize 128k \
-preset $QUAL -tune zerolatency -g $(($FPS * 2)) -b:v $VBR \
-filter_complex "[0:v][1:v] overlay=(W-w):0 [b]; \
[b] drawtext=fontfile=${FONT}: textfile=${TXT_OVERLAY}: reload=1: \
x=5: y=450: fontsize=25: fontcolor=white@1.0: box=1: boxcolor=black@0.5" \
-f flv "$YOUTUBE_URL/$YOUTUBE_KEY"
# VID_SOURCE="/dev/video0" #for lappy's webcam, it works. for usb cam try changing the location
# IM_SOURCE="/home/rik/Desktop/riktronics_small.bmp" #logo location
# TXT_OVERLAY_SOURCE="/home/rik/Desktop/txtfile.txt" #txt overlay file location
# FONT_SOURCE="/usr/share/fonts/truetype/freefont/FreeMono.ttf" #font file location. Probably same for all ubuntu. (or all linux ?)
# URL="rtmp://a.rtmp.youtube.com/liveStreamName" #Your live stream url
# KEY="hqd5-pu3v-abcd-vxyz" #your stream api
# ffmpeg \
# -f v4l2 -video_size 640x480 -framerate $FPS\
# -i "$VID_SOURCE" -deinterlace \
# -i "$IM_SOURCE" \
# -f lavfi -i anullsrc -c:v copy -c:a aac -strict -2\
# -vcodec libx264 -pix_fmt yuv420p -preset $QUAL -tune zerolatency -g $(($FPS * 2)) -b:v $VBR \
# -ar 44100 -threads 6 -q:a 3 -b:a 712000 -bufsize 128k \
# -filter_complex "[0:v][1:v] overlay=(W-w):0 [b]; \
# [b] drawtext=fontfile=$FONT_SOURCE: textfile=$TXT_OVERLAY_SOURCE: reload=1:\
# x=5: y=450: fontsize=25: fontcolor=white@1.0: box=1: boxcolor=black@0.5"\
# -f flv "$URL/$KEY"
| true |
d1ed8881f4ccdfdce5ad789b637ab385503223fd | Shell | titepweb/dotfiles | /zsh/settings.80%/cdpath.zsh | UTF-8 | 367 | 3.21875 | 3 | [] | no_license | # Remove duplicates from global array $cdpath.
typeset -gaU cdpath
if [[ "$(platform)" == "windows" ]]; then
cdpath=(/gw /scratch /lab /lab/.NET /git-demos /gw/Kunden/*(N) $cdpath)
else
cdpath=($HOME /etc /opt/jetbrains $cdpath)
fi
# ZSH sets $CDPATH automatically from $cdpath.
verbose Setting $fg[yellow]\$CDPATH$reset_color to $fg[yellow]$cdpath$reset_color
| true |
a3bff91ccdebeb2f875c2508d950cefdf58c905d | Shell | vysakhvk/dummy-app | /ecs/build.sh | UTF-8 | 666 | 2.96875 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
if [ "$DEPLOY_ENVIRONMENT" != "production" ]; then
echo -n "$CODEBUILD_BUILD_ID" | sed "s/.*:\([[:xdigit:]]\{7\}\).*/\1/" > build.id
echo -n "RELEASE_VERSION-$BUILD_SCOPE-$(cat ./build.id)" > docker.tag
docker build -t $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$ECR_NAME:$(cat docker.tag) .
TAG=$(cat docker.tag)
else
TAG=$RELEASE_VERSION
fi
sed -i "s@TAG@$TAG@g" ecs/service.yaml
sed -i "s@ENVIRONMENT_NAME@$ENVIRONMENT_NAME@g" ecs/service.yaml
sed -i "s@DOCKER_IMAGE_URI@$AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$ECR_NAME:$TAG@g" ecs/service.yaml
sed -i "s@BUILD_SCOPE@$BUILD_SCOPE@g" ecs/service.yaml
| true |
fec707c619f476b72676685a4c304107f3821ce1 | Shell | runngezhang/MCCCS | /MCCCS/main/example_scripts/example_RGB_segmentation/move_all_to_subdir.sh | UTF-8 | 378 | 3.578125 | 4 | [] | no_license | #!/bin/bash
#
# Modified 2018-04-14 by C. Klukas
#
if [ "$#" -ne 1 ]; then
echo "Illegal number of parameters (only one is supported, the postfix of the filenames)"
exit 1
fi
export R=$1
find *$1 -maxdepth 0 -type f -print0 | xargs -I '{}' -0 bash -c 'F={} && mkdir -p ${F%$R}'
find *$1 -maxdepth 0 -type f -print0 | xargs -I '{}' -0 bash -c 'F={} && mv $F $(basename -s $R $F)'
| true |
06254066f433a907bdda6f8131eaff678b9dc0b9 | Shell | gabe1314/Scripts | /bash/backup/Purge-Recovery-Points-by-vault-time-type.sh | UTF-8 | 348 | 2.796875 | 3 | [] | no_license | #!/bin/bash
set -e
for ARN in $(aws backup list-recovery-points-by-backup-vault --backup-vault-name "Acuity_backup_vault" --by-created-before 2020-12-30 --by-resource-type EC2 --query 'RecoveryPoints[].Recovery$
echo "deleting ${ARN} ..."
aws backup delete-recovery-point --backup-vault-name "Acuity_backup_vault" --recovery-point-arn "${ARN}"
done | true |
a563d0cda02ba3ca7b9edfd436ec38b28e769a27 | Shell | imma/pass | /script/version | UTF-8 | 303 | 3.1875 | 3 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/usr/bin/env bash
function version {
local shome="$(cd -P -- "${BASH_SOURCE%/*}/.." && pwd -P)"
source "$shome/script/profile"
local ver_pass="$(pass version 2>/dev/null | grep 'v[0-9]' | awk '{print $2}' | sed 's#^v##' || true)"
jq -n --arg pass "$ver_pass" '{pass: $pass}'
}
version "$@"
| true |
a50691afccbc8555f78b8b20b07b749c16e11e8a | Shell | hhadian/kaldi | /egs/cnceleb/v1/local/make_cnceleb1.sh | UTF-8 | 1,827 | 3.84375 | 4 | [
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] | permissive | #!/usr/bin/env bash
# Copyright 2019 Jiawen Kang
# Apache 2.0.
#
# This script prepares the CN-Celeb1 dataset.
# It creates separate directories for train, eval enroll and eval test.
# It also prepares a trials files, in the eval test directory.
if [ $# != 2 ]; then
echo "Usage: make_cnceleb1.sh <CN-Celeb1_PATH> <out_dir>"
echo "E.g.: make_cnceleb1.sh /export/corpora/CN-Celeb1 data"
exit 1
fi
in_dir=$1
out_dir=$2
# Prepare the cnceleb1 training data
this_out_dir=${out_dir}/cnceleb1_train
mkdir -p $this_out_dir 2>/dev/null
WAVFILE=$this_out_dir/wav.scp
SPKFILE=$this_out_dir/utt2spk
rm $WAVFILE $SPKFILE 2>/dev/null
this_in_dir=${in_dir}/dev
for spkr_id in `cat $this_in_dir/dev.lst`; do
for f in $in_dir/data/$spkr_id/*.wav; do
wav_id=$(basename $f | sed s:.wav$::)
echo "${spkr_id}-${wav_id} $f" >> $WAVFILE
echo "${spkr_id}-${wav_id} ${spkr_id}" >> $SPKFILE
done
done
utils/fix_data_dir.sh $this_out_dir
# Prepare the evaluation data
for mode in enroll test; do
this_out_dir=${out_dir}/eval_${mode}
mkdir -p $this_out_dir 2>/dev/null
WAVFILE=$this_out_dir/wav.scp
SPKFILE=$this_out_dir/utt2spk
rm $WAVFILE $SPKFILE 2>/dev/null
this_in_dir=${in_dir}/eval/${mode}
for f in $this_in_dir/*.wav; do
wav_id=$(basename $f | sed s:.wav$::)
spkr_id=$(echo ${wav_id} | cut -d "-" -f1)
echo "${wav_id} $f" >> $WAVFILE
echo "${wav_id} ${spkr_id}" >> $SPKFILE
done
utils/fix_data_dir.sh $this_out_dir
done
# Prepare test trials
this_out_dir=$out_dir/eval_test/trials
mkdir -p $out_dir/eval_test/trials
this_in_dir=${in_dir}/eval/lists
cat $this_in_dir/trials.lst | sed 's@-enroll@@g' | sed 's@test/@@g' | sed 's@.wav@@g' | \
awk '{if ($3 == "1")
{print $1,$2,"target"}
else
{print $1,$2,"nontarget"}
}'> $this_out_dir/trials.lst
| true |
c9a96d89661f551f319483ac1340d281ad4de77c | Shell | eclipseo/go-macros | /rpm/goinstall.sh | UTF-8 | 2,722 | 3.859375 | 4 | [] | no_license | #!/bin/sh -x
prefix=''
gopath=/usr/share/gocode
goipps=''
ignore_dirs=''
ignore_trees=''
ignore_regex=''
file_list='devel.file-list'
exts_list=''
ipprefix=''
while [ $# -gt 0 ] ; do
case $1 in
-h|--help) usage ;;
-p|--prefix) prefix=$(realpath -sm "$2") ; shift;;
--ignore-dirs) ignore_dirs="$2" ; shift;;
-R|--ignore-trees) ignore_trees="$2" ; shift;;
-r|--ignore-regex) ignore_regex="$2" ; shift;;
-f|--file-list) file_list="$2" ; shift;;
-e|--extensions) exts_list="$2" ; shift;;
-i|--ipprefix) ipprefix="$2" ; shift;;
(--) shift; break;;
(-*) echo "$0: error - unrecognized option $1" >&2; exit 3;;
(*) break;;
esac
shift
done
install -m 0755 -vd "${prefix}/${gopath}/src"
# create symlink
install -m 0755 -vd "$(dirname $PWD/_build/src/${ipprefix})"
ln -fs "$PWD" "$PWD/_build/src/${ipprefix}"
installfile() {
file=${1}
[[ ${file} == $PWD/_build/src/${ipprefix} ]] && continue
file="${file##$PWD/_build/src/${ipprefix}/}"
[[ -d "${file}" && ! -L "${file}" ]] && srcdir="${file}" || srcdir=$(dirname "${file}")
destdir="${prefix}/${gopath}/src/${ipprefix}/${srcdir}"
destdir="${destdir%/.}"
dir="${destdir}"
dirs=(${prefix}/${gopath}/src/${ipprefix})
while [[ ! -e "${dir}" ]] && [[ "${dir##${prefix}${gopath}/src/}" != "${ipprefix}" ]] ; do
dirs=("$dir" "${dirs[@]}")
dir=$(dirname "${dir}")
done
for dir in "${dirs[@]}" ; do
install -m 0755 -vd "${dir}"
if $(echo "${dir}" | grep -q "^${prefix}/${gopath}/src/${ipprefix}") ; then
touch -r ".${dir#${prefix}/${gopath}/src/${ipprefix}}" "${dir}"
fi
echo "%%dir \"${dir#${prefix}}\"" >> ${file_list}
done
if [[ -L "$file" ]] ; then
ln -s $(readlink "${file}") "${destdir}/$(basename ${file})"
touch -h -r "${file}" "${destdir}/$(basename ${file})"
fi
[[ -f "$file" && ! -L "$file" ]] && install -m 0644 -vp "${file}" "${destdir}/"
[[ -f "$file" || -L "$file" ]] && echo "${gopath}/src/${ipprefix}/${file}" >> ${file_list} || :
}
# Process automatically detected resources
for file in $(\
GOPATH=$PWD/_build golist \
--to-install \
--package-path ${ipprefix} \
--with-extensions "${exts_list}" \
--ignore-dirs "${ignore_dirs}" \
--ignore-trees "${ignore_trees}" \
--ignore-regex "${ignore_regex}" \
); do
installfile ${file}
done
# Process user specified resources
for file in $@; do
if [[ -d "${file}" ]]; then
echo "${gopath}/src/${ipprefix}/${file}" >> ${file_list}
install -m 0755 -vd ${prefix}/${gopath}/src/${ipprefix}/$file
cp -r $file/* ${prefix}/${gopath}/src/${ipprefix}/$file/.
continue
fi
installfile ${file}
done
sort -u -o ${file_list} ${file_list}
| true |
8614507a3e2d9222a30596b686225016e51f50fe | Shell | bcspragu/Radiotation | /serve.sh | UTF-8 | 280 | 3.046875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
CMD=serve
if [[ $# -eq 1 ]] ; then
CMD=$1
fi
docker run \
-it \
-u $(id -u):$(id -g) \
--net=host \
--mount type=bind,source=$DIR/frontend,destination=/project \
--rm \
node-env yarn $CMD
| true |
eb3bbf7735fa523c7d246fb78de0d1f53475ced8 | Shell | Ragaprabha/ScientificParallelComputing | /Assignment3/OLD_Preconditioner/Preconditioner.pbs.txt | UTF-8 | 975 | 2.96875 | 3 | [] | no_license | #!/bin/bash
#PBS -N ragaprabha_assignment3
#PBS -q default
#PBS -W x=QOS:eecs739-qos
#PBS -A eecs739
#PBS -l nodes=1:ppn=1:sled,mem=2000m,walltime=02:00:00
#PBS -m abe
#PBS -d /users/ragaprabha/
#PBS -e ${PBS_JOBNAME}-${PBS_JOBID}.err
#PBS -o ${PBS_JOBNAME}-${PBS_JOBID}.out
# Go to user's working directory
cd ${PBS_O_WORKDIR}
# Save job specific information for troubleshooting
echo "Job ID is ${PBS_JOBID}"
echo "Running on host ${hostname}"
echo "Working directory is ${PBS_O_WORKDIR}"
echo "The following processors are allocated to this job:"
echo $(cat $PBS_NODEFILE)
# copy program/data to a temporary directory on the cluster node
LOCAL_DIR=$(mktemp -d );
cp ${PBS_O_WORKDIR}/* ${LOCAL_DIR}
# Run the program
echo "Start: $(date +%F_%T)"
${LOCAL_DIR}/Preconditioner>${LOCAL_DIR}/${PBS_JOBNAME}-${PBS_JOBID}.log
echo "Stop: $(date +%F_%T)"
# Copy data back to the user's working directory and clean up
cp -rf ${LOCAL_DIR}/* ${PBS_O_WORKDIR}
rm -rf ${LOCAL_DIR} | true |
4e8b4cb6464ba38654e89d915011447e6e784d03 | Shell | ryanjbaxter/spring-native | /spring-native-samples/petclinic-jpa/verify.sh | UTF-8 | 118 | 2.53125 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
RESPONSE=`curl -s localhost:8080`
if [[ $RESPONSE == *"Welcome"* ]]; then
exit 0
else
exit 1
fi
| true |
d301e030f546ac34d0238145c45ce6bed9e6f4ea | Shell | kevenkoppel/skriptimine | /praks8/yl1 | UTF-8 | 267 | 3.546875 | 4 | [] | no_license | #!/bin/bash
#
#See skript liidab kokku kõik 1-10 vahemikus olevad paarisarvud
#
sum=0
for (( arv=1; arv<11; arv++ ))
#
do
jaak=$(($arv % 2))
if [ $jaak -eq 0 ]; then
sum=$(($sum + $arv))
echo $sum
#
fi
done
echo "Paarisarvude summa vahemikus 1-10 on: $sum"
#
| true |
86c72581a4bf72c2cda9bb3cb788efae208990fe | Shell | figbux/xfce4-terminal-gadgets | /colo | UTF-8 | 1,258 | 3.90625 | 4 | [] | no_license | #!/bin/bash
# xfce4-terminal colorscheme switcher.
# Usage:
# $ colo whi # all
# $ colo white # are
# $ colo white-on-black # same
#
# If you want different colorschemes for each terminal instance, see launcher
#
# Based on: https://askubuntu.com/a/676452
# if no arg supplied, list available themes
if [ $# -eq 0 ]
then
ls /usr/share/xfce4/terminal/colorschemes/*.theme
exit
fi
# set colorscheme
THEME=$(ls /usr/share/xfce4/terminal/colorschemes/$1*.theme | head -n 1)
# check if colorscheme exists
if ! [[ -f "$THEME" ]]
then
echo "No such colorscheme: $1"
exit 1
fi
# if not set, use default config dir
# this is used for setting different color profiles in each terminal (see shell.sh)
if [[ -z "${XDG_CONFIG_HOME}" ]]
then
XDG_CONFIG_HOME=~/.config
fi
# Switch color. Uncomment Font and Background, if swithing those are desired, too.
cd $XDG_CONFIG_HOME/xfce4/terminal
grep -v "Color" terminalrc > .terminalrc.tmp
#grep -v "Font" terminalrc > .terminalrc.tmp
#grep -v "Background" terminalrc > .terminalrc.tmp
grep "Color" $THEME >> .terminalrc.tmp
#grep "Font" $THEME >> .terminalrc.tmp
#grep "Background" $THEME >> .terminalrc.tmp
mv .terminalrc.tmp terminalrc
| true |
356e6bd86d9c6fb952a6a0409a6774e601665a57 | Shell | rykelley/Modern-DevOps-in-Practice | /code 5/build.5.sh | UTF-8 | 151 | 3.4375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
IMAGE=${1:-myimage}
if [ -z $IMAGE ]; then
echo "Image name must be supplied as and argument"
exit 1
fi
docker build -t $IMAGE . | true |
8b87420d2b6ebb6d9a2e1c71c5ee9128703333fd | Shell | AtnNn/old-code | /expddl | UTF-8 | 3,946 | 3.828125 | 4 | [] | no_license | #!/bin/sh
## TODO
#
# TABLE
# INDEX
# SEQUENCE
# FULLTABLE = TABLE, INDEX, SEQUENCE
# -l : list all objects
help=false
data=false
data_rec=
full=false
warn=false
verbose=false
verbose_rec=
while [ "`echo $1 | cut -b 1`" = "-" ]; do
case "$1" in
-h) help=true; shift;;
-d) data=true; data_rec=-d shift;;
-f) full=true; shift;;
-w) warn=true; shift;;
-v) verbose=true; verbose_rec=-v shift;;
--) shift; break;;
esac
done
log () { if $verbose; then echo "$*" >&2; fi }
error () {
if $warn && $verbose; then
echo warning: "$*" >&2;
else
echo error: "$*" >&2;
fi
}
if [ "$3" = "" ] || $help; then
echo USAGE: $0 '<connection> [-f] [-d] <type> <object>'
echo EXAMPLE: 'expddl foo:bar@//example.com:1521/oraclexe PACKAGE MY_PACKAGE'
echo supported types:
echo ' PACKAGE, PACKAGE BODY, TYPE, TYPE BODY, PROCEDURE, FUNCTION'
echo ' LIBRARY, TRIGGER, JAVA SOURCE, FULLPACKAGE, FULLTYPE, DATA'
echo with -f flag or FULL prefix:
echo ' PACKAGE: both the PACKAGE and PACKAGE BODY'
echo ' TYPE: both the TYPE and TYPE BODY'
exit 1;
fi;
db=$1
type="$2"
name="$3"
if [ "`echo "$type" | cut -b 1-4`" = "FULL" ]; then
type="`echo "$type" | cut -b 5-`"
full=true
fi
if $full; then
case "$type" in
PACKAGE)
"$0" $verbose_rec "$1" 'PACKAGE' "$name"
"$0" $verbose_rec -w "$1" 'PACKAGE BODY' "$name"
exit
;;
TYPE)
"$0" $verbose_rec "$1" 'TYPE' "$name"
"$0" $verbose_rec -w "$1" 'TYPE BODY' "$name"
exit
;;
esac
fi
case "$type" in
TYPE)
echo 'DROP TYPE '$name' FORCE;'
echo /
;;
esac
sql_settings='
set termout off
set feedback off
set serveroutput on size 100000
set echo off
set newpage 0
set space 0
set pagesize 0
set feedback off
set long 4000
set longchunksize 4000
set wrap on
set heading off
set trimspool on
set linesize 4000
set timing off
'
exists () {
(echo "$sql_settings" "
select 'YES' from user_objects where object_name='$name' and object_type='$type';
" ) | sqlplus -S $db
}
newline='
'
case "$type" in
DATA)
type='TABLE';
if [ "`exists`" != 'YES' ]; then
error $type $name does not exist
exit 1
fi
log Dumping DATA from $name
echo 'REM replacing content of '$name
echo "delete from $name;"
echo set scan off
echo set define off
coltyps="`echo "$sql_settings select column_name || '^' || data_type from user_tab_columns where table_name = '$name';" | sqlplus -S $db`"
columns=
format="''"
for coltyp in $coltyps; do
col=`echo $coltyp | cut -f 1 -d ^`
typ=`echo $coltyp | cut -f 2 -d ^`
columns="$columns $col"
case $typ in
NUMBER) fmt="to_char($col)";;
VARCHAR2|CHAR) fmt="'''' || replace(replace($col,'''',''''''), chr(10), '''||chr(10)||''') || ''''" ;;
DATE) fmt="'to_date(''' || to_char($col, 'YYYY-MM-DD HH24:MI:SS') || ''',''YYYY-MM-DD HH24:MI:SS'')'";;
*) echo ERROR: Cannot export type $typ >&2
exit 1
esac
format="$format || ',' || nvl2($col, $fmt, 'null')$newline"
done
echo "$sql_settings select $format from $name;" | sqlplus -S $db | while read -r data; do
echo "Insert into $name (`echo $columns | sed 's/ /,/g'`) values (`echo "$data" | sed 's/^,//'`);"
done
;;
*) if [ "`exists`" != 'YES' ]; then
error $type $name does not exist
exit 1
fi
log Dumping $type $name
echo -n 'CREATE OR REPLACE '
(echo "$sql_settings"; cat <<EOF) | sqlplus -S $db
select text from user_source where name = '$name' and type = '$type' order by line;
EOF
echo /
;;
esac
| true |
43123d8570ecd617e9c87ee4418acb8b07a852ae | Shell | dorfsmay/laptop-setup-ubuntu-18.04 | /dependant/pcloud.bash | UTF-8 | 297 | 3.015625 | 3 | [
"CC0-1.0"
] | permissive | #!/bin/bash -e
if [[ ! -f ~/Downloads/pcloud ]] ; then
echo -e '\nDownload pcloud executable from https://www.pcloud.com/download-free-online-cloud-file-storage.html\n'
else
chmod +rx ~/Downloads/pcloud
sudo mkdir -p /usr/local/bin
sudo mv ~/Downloads/pcloud /usr/local/bin/.
fi
| true |
488c0c43fad9cb9a6f47a125c4791da5c62e29dc | Shell | jo23023/Gatway1_5 | /SourceCode/Gateway_wdt/gateway_wdt.sh | UTF-8 | 235 | 2.6875 | 3 | [] | no_license | #!/bin/sh
# Clean and set dmesg level
/bin/dmesg -c > /dev/null
/bin/dmesg -n 1
# Run watchdog
./gateway_wdt &
sleep 3
if [ -e /var/run/gateway_wdt.pid ]; then
PID="`cat /var/run/gateway_wdt.pid`"
echo -17 > /proc/$PID/oom_adj
fi
| true |
414392a5b6597f1b5640a2822097f9a5a5560c04 | Shell | lots0logs/antergos-packages | /lightdm-webkit2-greeter/PKGBUILD | UTF-8 | 1,556 | 2.515625 | 3 | [] | no_license | # Maintainer: Gustau Castells <karasu@antergos.com>
pkgname=lightdm-webkit2-greeter
pkgver=0.0.1
pkgrel=1
pkgdesc="A lightweight display manager greeter"
arch=('i686' 'x86_64')
#url="https://launchpad.net/lightdm-webkit-greeter"
url="https://github.com/karasu/lightdm-webkit2-greeter"
license=('GPL3' 'LGPL3')
#source=("http://launchpad.net/lightdm-webkit-greeter/trunk/$pkgver/+download/$pkgname-$pkgver.tar.gz")
source=("$pkgname-$pkgver.tar.gz::https://github.com/karasu/lightdm-webkit2-greeter/archive/v${pkgver}.tar.gz")
depends=('lightdm' 'webkitgtk' 'gtk-engines')
options=(!libtool)
makedepends=('gnome-doc-utils' 'gobject-introspection' 'intltool')
sha256sums=('0291397919705173df04b411e1393729ee774214f208f1b1a707cecc8c34a4fc')
build() {
cd $srcdir/$pkgname-$pkgver
LIBS+="-ljavascriptcoregtk-1.0" ./autogen.sh --prefix=/usr \
--sysconfdir=/etc --libexecdir=/usr/lib/lightdm
make
# What is Ambiance? This should be a GTK+ 2.x theme, so we use Clearlooks here.
#sed -i '/^theme-name=/s/Ambiance/Clearlooks/' data/lightdm-webkit2-greeter.conf
# Theme 'default' does not exist...
sed -i '/^webkit-theme=/s/default/webkit/' data/lightdm-webkit2-greeter.conf
# this is Ubuntu branding... Replace it with something useful. ;)
sed -i '/^background=/s|/usr/share/backgrounds/warty-final-ubuntu.png||' data/lightdm-webkit2-greeter.conf
# Replace Ubuntu font with Dejavusans
sed -i '/^font-name=/s|Ubuntu 11|DejaVuSans 11|' data/lightdm-webkit2-greeter.conf
}
package() {
cd $srcdir/$pkgname-$pkgver
make DESTDIR=$pkgdir install
}
| true |
5a670d29ecaa4a31bdf58f703c5142ab6fa15cf7 | Shell | imzjy/snippets | /shell/get-lottery-tickets.sh | UTF-8 | 1,164 | 2.71875 | 3 | [] | no_license | #!/bin/bash
pagenum=$( \
curl 'http://caipiao.taobao.com/lottery/order/united_list.htm?lottery_type=SSQ' | \
grep -B1 'page-next' | grep -o '[[:digit:]]' | tr -d '\r\n' \
)
echo $pagenum
#clear links.txt
echo "" > links.txt
for (( i = 1; i <= $pagenum; i=i+1 ));
do curl "http://caipiao.taobao.com/lottery/order/united_list.htm?page=$i&lottery_type=SSQ&sort_obj=perfee&sort=desc&change_sort=true&chg_type=0" | \
egrep -i -B 4 '<td class="td6">0.20</td>' | \
grep -Eo '<a .+>.+</a>' >> links.txt;
done
#add link with <br />
cat links.txt | awk '{print $0, "<br />"}' > linksWithBr.txt
echo "<html xmlns='http://www.w3.org/1999/xhtml'><head><title>caipiao</title><script type='text/javascript' src='http://ajax.aspnetcdn.com/ajax/jQuery/jquery-1.7.js'> </script>" > autoclick.html
echo "<script type='text/javascript'>" >> autoclick.html
echo '$(function(){var anwser=confirm("Do you want purchase all of lottery?");if(anwser){$("a").each(function(){var url=$(this).attr("href");window.open(url)})}});' >> autoclick.html
echo "</script></head><body>" >> autoclick.html
cat linksWithBr.txt >> autoclick.html
echo "</body></html" >> autoclick.html
| true |
feae37cd4815e8a86924c19bcc702d6ff4ba8820 | Shell | Rp70/docker-petalinux | /resources/package_SDK.sh | UTF-8 | 424 | 3.34375 | 3 | [
"BSD-2-Clause"
] | permissive | #!/bin/bash
SDK=${1:-SDK}
VERFILE="$SDK/data/dynamic_language_bundle.properties"
[ ! -f "$VERFILE" ] && echo "Could not locate version file" && exit 1
eval $(grep ^DEFAULT_DESTINATION_FOLDER_LIN= "$VERFILE")
[ -z "$DEFAULT_DESTINATION_FOLDER_LIN" ] && echo "Could not guess version" && exit 2
XILVER=$(basename "$DEFAULT_DESTINATION_FOLDER_LIN")
cd "$SDK" && tar czf ../Xilinx-SDK-v${XILVER}.tgz * && cd .. && rm -rf "$SDK"
| true |
f8d44cd2ed6cf4d740587ba119df1ead1846ae8e | Shell | ppc64le/build-scripts | /p/pyzbar/pyzbar_rhel_7.6.sh | UTF-8 | 1,751 | 2.859375 | 3 | [
"Apache-2.0"
] | permissive | # ----------------------------------------------------------------------------
#
# Package : pyzbar
# Version : v0.18
# Source repo : https://github.com/NaturalHistoryMuseum/pyzbar
# Tested on : RHEL 7.6, RHEL 7.7
# Script License: Apache License, Version 2 or later
# Maintainer : Priya Seth <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
#!/bin/bash
export PATH=${PATH}:$HOME/conda/bin
export PYTHON_VERSION=3.6
export LANG=en_US.utf8
export LD_LIBRARY_PATH=/usr/local/lib
export PYZBAR_VERSION=v0.1.8
WDIR=`pwd`
#Enable EPEL, install required packages
wget https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
yum install -y epel-release-latest-7.noarch.rpm
yum update -y
yum install -y gcc gcc-c++ make autoconf git wget zbar lapack-devel atlas-devel libtool libjpeg-devel
#Install conda
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-ppc64le.sh
sh Miniconda3-latest-Linux-ppc64le.sh -u -b -p $HOME/conda
$HOME/conda/bin/conda update -y -n base conda
conda create -n pyzbar -y python=${PYTHON_VERSION}
conda init bash
eval "$(conda shell.bash hook)"
conda activate pyzbar
conda install -y pytest
cd ..
git clone https://github.com/NaturalHistoryMuseum/pyzbar
cd pyzbar
git checkout ${PYZBAR_VERSION}
pip install -r requirements.pip
python setup.py install
python setup.py test
| true |
faf69fcff8baa1142ec4585c1c011f5a353ee641 | Shell | SrinivasaBharath/ceph-1 | /qa/standalone/mon/msgr-v2-transition.sh | UTF-8 | 2,603 | 3.078125 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
#
source $CEPH_ROOT/qa/standalone/ceph-helpers.sh
function run() {
local dir=$1
shift
export CEPH_MON_V1="v1:127.0.0.1:7148" # git grep '\<7148\>' : there must be only one
export CEPH_MON_V2="v2:127.0.0.1:7149" # git grep '\<7149\>' : there must be only one
export CEPH_ARGS
CEPH_ARGS+="--fsid=$(uuidgen) --auth-supported=none "
local funcs=${@:-$(set | ${SED} -n -e 's/^\(TEST_[0-9a-z_]*\) .*/\1/p')}
for func in $funcs ; do
setup $dir || return 1
$func $dir || return 1
teardown $dir || return 1
done
}
function TEST_mon_v1_osd_addrs() {
local dir=$1
export CEPH_ARGS="$CEPH_ARGS --mon-host=$CEPH_MON_V1 --mon-debug-no-require-nautilus"
run_mon $dir a || return 1
ceph mon dump | grep mon.a | grep $CEPH_MON_V1
run_osd $dir 0 || return 1
wait_for_osd up 0 || return 1
ceph osd dump | grep osd.0 | grep v1: || return 1
ceph osd dump | grep osd.0 | grep v2: && return 1
ceph osd require-osd-release nautilus
ceph osd down 0
wait_for_osd up 0 || return 1
# public should be v1, cluster v2
ceph osd dump | grep osd.0 | grep v1: || return 1
ceph osd dump -f json | jq '.osds[0].public_addrs.addrvec[0]' | grep v1 || return 1
ceph osd dump -f json | jq '.osds[0].cluster_addrs.addrvec[0]' | grep v2 || return 1
# enable v2 port on mon
ceph mon set-addrs a "[$CEPH_MON_V2,$CEPH_MON_V1]"
ceph osd down 0
wait_for_osd up 0 || return 1
# both public and cluster should be v2+v1
ceph osd dump | grep osd.0 | grep v1: || return 1
ceph osd dump -f json | jq '.osds[0].public_addrs.addrvec[0]' | grep v2 || return 1
ceph osd dump -f json | jq '.osds[0].cluster_addrs.addrvec[0]' | grep v2 || return 1
}
function TEST_mon_v2v1_osd_addrs() {
local dir=$1
export CEPH_ARGS="$CEPH_ARGS --mon-host=[$CEPH_MON_V2,$CEPH_MON_V1] --mon-debug-no-require-nautilus"
run_mon $dir a || return 1
ceph mon dump | grep mon.a | grep $CEPH_MON_V1
run_osd $dir 0 || return 1
wait_for_osd up 0 || return 1
ceph osd dump | grep osd.0 | grep v1: || return 1
ceph osd dump | grep osd.0 | grep v2: && return 1
ceph osd require-osd-release nautilus
ceph osd down 0
wait_for_osd up 0 || return 1
# both public and cluster should be v2+v1
ceph osd dump | grep osd.0 | grep v1: || return 1
ceph osd dump -f json | jq '.osds[0].public_addrs.addrvec[0]' | grep v2 || return 1
ceph osd dump -f json | jq '.osds[0].cluster_addrs.addrvec[0]' | grep v2 || return 1
}
main msgr-v2-transition "$@"
| true |
d1d543bf961fbaf0d7a21f95e4cf420cd3c31a4d | Shell | ETeamSquadraCorse/RemoteComm2015 | /CAR/MWorker.sh | UTF-8 | 313 | 2.65625 | 3 | [
"MIT"
] | permissive | #!/bin/sh
# MWorker.sh
#
# This tool gets the audio data from stdin stream.
# Then applies some filtering transformation of some
# kind (maybe none, maybe compression, maybe coding...)
#
# The data is then inserted in the FIFOVoice FIFO
# pipeline file.
while [ ! -e FIFOVoice ]
do
sleep 1
done
./MFilter.out | true |
6d7f0fc16d0a59a64b2c217ab11949bd0e9c0e99 | Shell | RobinJ1995/postgresql-backup-to-s3 | /main.sh | UTF-8 | 774 | 3.53125 | 4 | [] | no_license | #!/bin/bash
set -xe
timestamp=`date +%s`
test ! -z "$timestamp"
test ! -z "$DB_HOST"
test ! -z "$DB_NAME"
test ! -z "$DB_USERNAME"
set +x
test ! -z "$DB_PASSWORD"
export PGPASSWORD="$DB_PASSWORD"
set -x
test ! -z "$S3_BUCKET"
output_filename="${DB_HOST}_${DB_NAME}_${timestamp}.sql"
echo '== Backup =='
pg_dump -h $DB_HOST -U $DB_USERNAME $DB_NAME > $output_filename
test -f $output_filename
test -s $output_filename
echo '== Compress =='
xz -v -9 $output_filename
compressed_output_filename="${output_filename}.xz"
test -f $compressed_output_filename
test -s $compressed_output_filename
echo '== Upload =='
if [ -z $S3_ENDPOINT ]
then
s3_cmd='aws s3'
else
s3_cmd="aws s3 --endpoint=${S3_ENDPOINT}"
fi
$s3_cmd cp $compressed_output_filename "s3://${S3_BUCKET}/"
| true |
6c8fb65c52b302c5696585842842189ad26a3271 | Shell | darron/backup-cookbook | /templates/default/rsync.erb | UTF-8 | 1,112 | 2.71875 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
RSYNC_COMMAND="rsync -avz --delete"
BACKUP_SERVER_FOLDER=<%= node.name %>
$RSYNC_COMMAND /etc <%= node.rsync_username %>@<%= node.rsync_destination %>:/<%= node.rsync_path %>/$BACKUP_SERVER_FOLDER
$RSYNC_COMMAND /usr/local/sbin <%= node.rsync_username %>@<%= node.rsync_destination %>:/<%= node.rsync_path %>/$BACKUP_SERVER_FOLDER
$RSYNC_COMMAND /root <%= node.rsync_username %>@<%= node.rsync_destination %>:/<%= node.rsync_path %>/$BACKUP_SERVER_FOLDER
$RSYNC_COMMAND /home <%= node.rsync_username %>@<%= node.rsync_destination %>:/<%= node.rsync_path %>/$BACKUP_SERVER_FOLDER
if [ -d "/var/www" ]; then
$RSYNC_COMMAND /var/www <%= node.rsync_username %>@<%= node.rsync_destination %>:/<%= node.rsync_path %>/$BACKUP_SERVER_FOLDER
fi
if [ -d "/srv/backup" ]; then
$RSYNC_COMMAND /srv/backup <%= node.rsync_username %>@<%= node.rsync_destination %>:/<%= node.rsync_path %>/$BACKUP_SERVER_FOLDER
fi
if [ -d "/var/lib/mysql" ]; then
$RSYNC_COMMAND /var/lib/mysql <%= node.rsync_username %>@<%= node.rsync_destination %>:/<%= node.rsync_path %>/$BACKUP_SERVER_FOLDER
fi
| true |
99c3574b209a3988a1a2f11d5416e63dd977e6b3 | Shell | quabid/mbs | /searchpwds | UTF-8 | 2,654 | 3.703125 | 4 | [] | no_license | #!/bin/bash
clearVars() {
unset ARG ARGS res
}
cleanUp() {
msg="all cleaned up!!"
magenta "\t\t${msg^^}\n\n"
}
gracefulExit() {
clearVars
cleanUp
exit 0
}
exitProg() {
clearVars
exit 121
}
trap "gracefulExit" INT TERM QUIT STOP KILL
while getopts ':?t:ab:c:o:' OPTION; do
case ${OPTION} in
b)
if [[ $# -lt 2 ]];
then
exitProg
else
# printf '\tSearch Term: %s\n' ${2^^}
res=$(cat ~/Documents/information/ChromePasswords.csv | awk "/$2/" | awk -F ',' '{print "URL:",$1," Username:",$3," Password:",$4;}')
if [[ $str != $res ]];
then
printf '\tSearch Term: %s\n' ${2^^}
printf '%s\n' "${res}"
fi
fi
# printf '\Argument: %s\n' $2
;;
a)
cat ~/Documents/information/ChromePasswords.csv | awk "/.*/" | awk -F ',' '{print "URL:", $1, " Domain:", $2, " Username:", $3, " Password:", $4;}'
;;
o)
if [[ $# -lt 2 ]];
then
exitProg
else
TERM=$2
# printf '\tSearch Term: %s\n' ${TERM^^}
res=$(cat ~/Documents/information/chrome_passwords.txt | awk "/${TERM}/" | awk '{gsub(/[ \t]/,",");print}' | awk -F ',' '{print "URL:",$1, " Username:",$3, " Password:", $4;}')
if [[ $str != $res ]];
then
printf '\tSearch Term: %s\n' ${TERM^^}
printf '%s\n' "${res}"
fi
fi
;;
c)
if [[ -f $2 ]] && [[ -r $2 ]] && [[ -e $2 ]];
then
awk "/.*/" $2 | awk -F ',' "/$3/" | awk '{print $0,"\n";}' | sed 's/\,/ /g'
else
exitProg
fi
;;
t)
if [[ $# -lt 2 ]];
then
exitProg
else
TERM=$2
# printf '\tSearch Term: %s\n' ${TERM^^}
res=$(cat ~/Documents/information/ChromePasswords.csv | awk "/${TERM}/" | awk -F ',' '{print "Username:",$3," Password:",$4;}')
if [[ $str != $res ]];
then
printf '\tSearch Term: %s\n' ${TERM^^}
printf '%s\n' "${res}"
fi
fi
;;
:)
exitProg
;;
\?)
gracefulExit
;;
esac
done
shift "$(($OPTIND -1))"
if [ $# -gt 0 ];
then
unset $@
fi
| true |
77b263f4a6a57ee9236184ea80ceacd8703bf2b5 | Shell | thu-ml/tianshou | /examples/mujoco/run_experiments.sh | UTF-8 | 273 | 3.09375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
LOGDIR="results"
TASK=$1
ALGO=$2
echo "Experiments started."
for seed in $(seq 0 9)
do
python mujoco_${ALGO}.py --task $TASK --epoch 200 --seed $seed --logdir $LOGDIR > ${TASK}_`date '+%m-%d-%H-%M-%S'`_seed_$seed.txt 2>&1 &
done
echo "Experiments ended."
| true |
65fae45a4bc703037a4cf4d6b6d0ae691312e289 | Shell | casualcore/casual | /setup.sh | UTF-8 | 1,178 | 3.71875 | 4 | [
"MIT"
] | permissive | #! /bin/bash
function checkout()
{
directory=$1
branch=$2
if [[ -d "../$directory" ]]
then
echo $directory
pushd ../$directory > /dev/null
git pull
git checkout $branch
git pull
popd > /dev/null
else
echo $directory
url=$( git remote -v | grep fetch | awk '{print $2}' | sed -e s/casual.git/${directory}.git/g )
echo $url
pushd .. > /dev/null
git clone $url
pushd $directory > /dev/null
git checkout $branch
popd > /dev/null
popd > /dev/null
fi
}
MAKE_PATH="casual-make"
MAKE_BRANCH="master"
THIRDPARTY_PATH="casual-thirdparty"
THIRDPARTY_BRANCH="master"
checkout $MAKE_PATH $MAKE_BRANCH
checkout $THIRDPARTY_PATH $THIRDPARTY_BRANCH
TEMPLATE_ENV_FILE="middleware/example/env/casual.env"
ENV_FILE="casual.env"
if [[ ! -f $ENV_FILE ]]
then
echo "copying $TEMPLATE_ENV_FILE to $ENV_FILE"
echo "review and adapt file"
cp $TEMPLATE_ENV_FILE $ENV_FILE
else
echo "$ENV_FILE already exists"
if ! diff $ENV_FILE $TEMPLATE_ENV_FILE > /dev/null
then
echo "consider updating from $TEMPLATE_ENV_FILE"
fi
fi | true |
fb8868cd559a97a8e7dc1397617935f3b42f4f70 | Shell | GunioRobot/LightCube-OS | /packages/ntp/ntpd.init | UTF-8 | 1,230 | 3.625 | 4 | [] | no_license | #!/bin/sh
# Begin /etc/init.d/ntpd
### BEGIN INIT INFO
# Provides: ntpd
# Required-Start: hwclock $network
# Should-Start:
# Required-Stop: $network
# Should-Stop: $remote_fs
# Default-Start: 3 4 5
# Default-Stop: 0 1 2 6
# Short-Description: NTP Network Time Protocal
# Description: NTP Syncronizes time with time servers worldwide
### END INIT INFO
. /lib/lsb/init-functions
MESSAGE="Network Time Protocal Daemon"
BIN_FILE="/usr/sbin/ntpd"
CONFIG_FILE="/etc/ntp.conf"
PIDFILE="/var/run/ntpd.pid"
# Make certain that the binary exists, and that the config file exists
chk_stat
case "$1" in
start)
start_daemon -p "${PIDFILE}" "${BIN_FILE}" -g
evaluate_retval start
;;
stop)
killproc -p "${PIDFILE}" "${BIN_FILE}"
evaluate_retval stop
;;
restart)
# Restart service (if running) or start service
killproc -p "${PIDFILE}" "${BIN_FILE}"
sleep 1
start_daemon -p "${PIDFILE}" "${BIN_FILE}" -g
evaluate_retval restart
;;
status)
statusproc
;;
*)
echo " Usage: ${0}{start|stop|restart|status}"
exit 2
;;
esac
# End /etc/init.d/ntpd
| true |
6790f3898c38739a5b6361855535359195a5be31 | Shell | leodido99/dotfiles | /git/git.sh | UTF-8 | 432 | 3.203125 | 3 | [] | no_license | #!/usr/bin/env bash
# Setup git
# Get script's dir
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd)"
# Setup symlinks
NAME=".gitconfig"
SRC="$DIR/$NAME"
DST="$HOME/$NAME"
$DIR/../tools/sym_link.sh $SRC $DST
NAME=".gitignore"
SRC="$DIR/$NAME"
DST="$HOME/$NAME"
$DIR/../tools/sym_link.sh $SRC $DST
mkdir $HOME/.git/myhooks
NAME="commit-msg"
SRC="$DIR/$NAME"
DST="$HOME/.git/myhooks/$NAME"
$DIR/../tools/sym_link.sh $SRC $DST
| true |
b0638784e466c2c2fa621b4ba957a13fd0f579a0 | Shell | swhite-zhang/shell | /move | UTF-8 | 1,440 | 3.015625 | 3 | [] | no_license | #!/bin/bash
xf=1000
yf=1000
sxf=0
syf=0
xs=4000
ys=4000
sxs=0
sys=0
a=0
upf()
{
syf=`expr $syf + 1`
}
downf()
{
syf=`expr $syf - 1`
}
leftf()
{
sxf=`expr $sxf - 1`
}
rightf()
{
sxf=`expr $sxf + 1`
}
ups()
{
sys=`expr $sys + 1`
}
downs()
{
sys=`expr $sys - 1`
}
lefts()
{
sxs=`expr $sxs - 1`
}
rights()
{
sxs=`expr $sxs + 1`
}
movef()
{
xf=`expr $xf + $sxf`
yf=`expr $yf + $syf`
echo "$xf $yf" > userf.dat
}
moves()
{
xs=`expr $xs + $sxs`
ys=`expr $ys + $sys`
echo "$xs $ys" > users.dat
}
judge()
{
if [[ $xf == 0 || $xf == 5000 ]]; then $sxf =`expr 0 - $sxf`
fi
if [[ $yf == 0 || $yf == 5000 ]]; then $syf =`expr 0 - $syf`
fi
if [[ $xs == 0 || $xs == 5000 ]]; then $sxs =`expr 0 - $sxs`
fi
if [[ $ys == 0 || $ys == 5000 ]]; then $sys =`expr 0 - $sys`
fi
}
echo -e "\033[?25l"
stty -echo
while :
do
read -s -n 1 -t 0.1 a
movef
moves
if [[ $a == "w" ]];then upf
elif [[ $a == "s" ]];then downf
elif [[ $a == "a" ]];then leftf
elif [[ $a == "d" ]];then rightf
elif [[ $a == "i" ]];then ups
elif [[ $a == "k" ]];then downs
elif [[ $a == "j" ]];then lefts
elif [[ $a == "l" ]];then rights
elif [[ $a == "q" ]];then
stty echo
exit
fi
sleep 0.1
gnuplot <<!
set terminal pngcairo size 900,900
set output 'user.png'
unset xtics
unset ytics
unset key
set xrange [0:5000]
set yrange [0:5000]
plot 'userf.dat' with circle lc rgb 'green' fs solid 0.5 noborder,\
'users.dat' with circle lc rgb 'red' fs solid 0.5 noborder
set output
exit
!
done
| true |
99d0070a68535c92883bf716771035770acd5120 | Shell | klantz81/mysql-backup | /mysql-backup.sh | UTF-8 | 579 | 2.828125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
databases=`/usr/bin/mysql -NB --user=root --execute="SHOW DATABASES" | grep -Ev "(information_schema|performance_schema)"`
for db in $databases
do
/usr/bin/mysqldump --lock-tables --user=root --databases $db > "/root/mysql-backups/$db.sql"
# /usr/bin/mysqldump --lock-tables --user=root --databases $db | gzip > "/root/mysql-backups/$db.sql.gz"
# /usr/bin/mysqldump --single-transaction --user=root --databases $db > "/root/mysql-backups/$db.sql"
# /usr/bin/mysqldump --single-transaction --user=root --databases $db | gzip > "/root/mysql-backups/$db.sql.gz"
done
| true |
7ce2f76cacd8375bf4a8c7c2ff1a0792dd439b32 | Shell | rahairston/project4 | /run_kubernetes.sh | UTF-8 | 452 | 2.828125 | 3 | [] | no_license | #!/usr/bin/env bash
# Step 1:
# This is your Docker ID/path
dockerpath="rahairston/ryan-udacity-microservices"
# Step 2
# Run the Docker Hub container with kubernetes
kubectl run ryan-udacity-microservices --image=$dockerpath --generator=run-pod/v1
# Step 3:
# List kubernetes pods
kubectl get pods
# Step 4:
# Forward the container port to a host
kubectl wait --for=condition=ready pod --all
kubectl port-forward ryan-udacity-microservices 8000:80 | true |
45636713cce0d34e1f3ddaaa9f7fb2fa00cd66e7 | Shell | yuxinPenny/Nanopore | /feature_extraction.sh | UTF-8 | 3,265 | 3.109375 | 3 | [] | no_license | #/bin/sh
# Start with raw nanopore sequencing data (.fast5).
# Step 1: Base call with Albacore
# Input directory containing .fast5 files, the script will read files recursively. The output is fastq files.
# Note: single fast5 file represents one single read. The output fastq file may contain multiple base-called reads
read_fast5_basecaller.py --input /home/yuxin/home/yuxin/Nanopore/DATA/fast5/pass \
--recursive --worker_threads 5 \
--flowcell FLO-MIN107 --kit SQK-RNA001 \
--save_path /home/yuxin/home/yuxin/Nanopore/DATA/fastq
# Step 2: Alignment with Minimap2
# you may need to specify the path: e.g., /home/yuxin/minimap2-2.17_x64-linux/minimap2
minimap2 -d ref.mmi /data/kunqidir/hg19/hg19.fa # indexing
minimap2 -ax map-ont ref.mmi /home/yuxin/home/yuxin/Nanopore/DATA/fastq/workspace/pass.tar.gz > \
/home/yuxin/home/yuxin/Nanopore/DATA/HEK_WT_Alignment.sam
# Sam to bam, sort bam, index bam
samtools view -b -S /home/yuxin/home/yuxin/Nanopore/DATA/HEK_WT_Alignment.sam > \/home/yuxin/home/yuxin/Nanopore/DATA/HEK_WT_Alignment.bam
samtools faidx /home/share/yuxin/DATA/hg19.fa
samtools sort /home/share/yuxin/Nanopore/DATA/HEK_WT_Alignment.bam -o /home/share/yuxin/Nanopore/DATA/HEK_WT_Alignment_sorted.bam
samtools index /home/share/yuxin/Nanopore/DATA/HEK_WT_Alignment_sorted.bam /home/share/yuxin/Nanopore/DATA/HEK_WT_Alignment.bam.bai
# Step 3A: Tombo feature extraction
# Resquiggle
tombo resquiggle /home/yuxin/home/yuxin/Nanopore/DATA/fast5/pass /data/kunqidir/hg19/hg19.fa \
--processes 4 --num-most-common-errors 5
# Modification base detection
tombo detect_modifications de_novo --fast5-basedirs /home/yuxin/home/yuxin/Nanopore/DATA/fast5/pass \
--statistics-file-basename /home/yuxin/home/yuxin/Nanopore/DATA/fast5/HEK_WT_de_novo_detection --processes 4
# Output the result
tombo text_output browser_files --fast5-basedirs /home/yuxin/home/yuxin/Nanopore/DATA/fast5/pass \
--statistics-filename /home/yuxin/home/yuxin/Nanopore/DATA/fast5/HEK_WT_de_novo_detection.tombo.stats \
--browser-file-basename /home/yuxin/home/yuxin/Nanopore/DATA/fast5/HEK_WT_de_novo_detect \
--file-types coverage dampened_fraction
# Results are stored in wiggle and bedgraph files, which can be processed as text file
# Step 3B: Base call error extraction with EpiNano
# First, you need to create an sequence dictionary for the reference genome with Picard
java -jar /home/share/yuxin/miniconda3/envs/bioinfo/share/picard-2.23.8-0/picard.jar \
CreateSequenceDictionary -R /home/share/yuxin/DATA/hg19.fa -O /home/share/yuxin/DATA/hg19.dict
# There may be something wrong with the EpiNano software: the result file does not include the seqname (chromosome).
# You may separate the bam file by chromosome, and call the function for splitted file in parallel
# Take chromosome 1 as an example
samtools view -b -h /home/share/yuxin/Nanopore/DATA/HEK_WT_Alignment_sorted.bam chr1 > \
/home/share/yuxin/Nanopore/DATA/HEK_WT_Alignment_sorted_chr1.bam
python3 /home/share/yuxin/EpiNano/Epinano_Variants.py -n 2 \
-R /home/share/yuxin/DATA/hg19.fa -b /home/share/yuxin/Nanopore/DATA/HEK_WT_Alignment_sorted_chr1.bam \
-s /home/share/yuxin/jvarkit/dist/sam2tsv.jar --type g
# Warning: Installation and execuation of those software can very time consuming and troublesome.
| true |
92194c70c1f01121fa784e8c7bb5fe47737a0389 | Shell | jjzhang166/littlekernel-imx8 | /scripts/jstart.sh.template | UTF-8 | 626 | 2.921875 | 3 | [
"MIT"
] | permissive | #! /bin/bash
ROOT_CELL=%TEMPLATE_ROOT_CELL%
LK_CELL=%TEMPLATE_LK_CELL%
DTB_BASENAME=%TEMPLATE_DTB_BASENAME%
DTB=/home/root/${DTB_BASENAME}
LK_BIN=/home/root/lk.bin
LK_ENTRY_ADDRESS=0x80010000
LK_START_ADDRESS=0x80000000
echo 'modprobe jailhouse'
modprobe jailhouse
echo 'Enabling jailhouse root cell'
jailhouse enable /usr/share/jailhouse/cells/${ROOT_CELL}
echo 'Creating jailhouse lk cell'
jailhouse cell create /usr/share/jailhouse/cells/${LK_CELL}
echo 'Loading jailhouse lk cell'
jailhouse cell load lk ${DTB} -a ${LK_START_ADDRESS} ${LK_BIN} -a ${LK_ENTRY_ADDRESS}
echo 'Starting lk cell'
jailhouse cell start lk
| true |
093699bf7f328a4eb1d3fd86ddc756ea89171adf | Shell | yogeek/very-whale | /docker-swarm-mode-workshop-automation/wordpress-on-swarm.sh | UTF-8 | 810 | 2.640625 | 3 | [] | no_license | #!/bin/bash
##############################################################################################
# This script aims to deploy Spark on a Docker swarm-mode cluster
#
# Prerequisites :
# - docker 1.12 (https://docs.docker.com/engine/installation/)
# - docker-machine 0.8.2 (https://docs.docker.com/machine/install-machine/)
# - docker-machine "bash completion scripts"
# (https://docs.docker.com/machine/install-machine/#/installing-bash-completion-scripts)
#
# Configuration must be specified in "spark_env.sh".
#
##############################################################################################
source env_cluster.sh $@
source create_machines.sh $@
source init_swarm.sh $@
#source pull_images.sh $1
source start_viz.sh $@
source deploy_wordpress.sh $@
| true |
6d595875f2a90eaca1c4cfbbae24fcb64bd7daf5 | Shell | apache/netbeans-jackpot30 | /cmdline/tool/test/scripted/test-ant | UTF-8 | 1,753 | 2.84375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
perform_test() {
create_file src/test/Test.java <<EOF
package test;
public class Test {
private void test() {
String s = "foo".intern();
}
}
EOF
create_file build.xml <<"EOF"
<?xml version="1.0" encoding="UTF-8"?>
<project name="test" default="run" basedir=".">
<target name="run">
<fail unless="jackpot.home">${jackpot.home} must be specified</fail>
<taskdef name="jackpot" classname="org.netbeans.modules.jackpot30.cmdline.ant.JackpotTask" classpath="${jackpot.home}/jackpot-ant.jar"/>
<jackpot jackpotHome="${jackpot.home}">
<src>
<pathelement path="src" />
</src>
</jackpot>
</target>
</project>
EOF
create_file src/META-INF/upgrade/test.hint <<"EOF"
$1.intern();;
EOF
run_ant >output
if grep <output 'warning: \[test\] test' >/dev/null 2>/dev/null; then
fail "does not contain required output";
fi;
}
. `dirname $0`/harness
| true |
1debfb8e1fa915c2c52fa0d13ab938cf33b19107 | Shell | ozonelmy/Taitank | /benchmark/build.sh | UTF-8 | 564 | 3.5625 | 4 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #! /bin/bash
clean_dir()
{
if [ -d "$BUILD_DIR" ]; then
rm -fr ${BUILD_DIR}/*
else
mkdir -p ${BUILD_DIR}
fi
}
PROJECT_DIR=`pwd`
BUILD_DIR=${PROJECT_DIR}/build/
SOURCE_DIR=$(dirname "$PWD")/src
echo "PROJECT_DIR:${PROJECT_DIR}"
echo "BUILD_DIR:${BUILD_DIR}"
echo "SOURCE_DIR:${SOURCE_DIR}"
# cd to build directory
clean_dir
cd $BUILD_DIR
# cmake
cmake ${PROJECT_DIR} \
-DSOURCE_DIR=${SOURCE_DIR}
# make
make -j8
# run benchmark
BENCH_MARK="${BUILD_DIR}"/taitank_benchmark
if [ -x "${BENCH_MARK}" ];then
${BENCH_MARK}
fi
| true |
d42d8a19693d56a944129881886d801129d19605 | Shell | AzusaOS/azusa-opensource-recipes | /app-text/enchant/enchant-2.2.7.sh | UTF-8 | 519 | 2.625 | 3 | [] | no_license | #!/bin/sh
source "../../common/init.sh"
get https://github.com/AbiWord/enchant/releases/download/v${PV}/${P}.tar.gz
cd "${T}"
doconf --disable-static
make
make install DESTDIR="${D}"
ln -snfv enchant-2 "${D}/pkg/main/${PKG}.dev.${PVRF}/include/enchant"
ln -snfv libenchant-2.so "${D}/pkg/main/${PKG}.libs.${PVRF}/lib$LIB_SUFFIX/libenchant.so"
ln -snfv enchant-2.pc "${D}/pkg/main/${PKG}.libs.${PVRF}/lib$LIB_SUFFIX/pkgconfig/enchant.pc"
ln -snfv enchant-2 "${D}/pkg/main/${PKG}.core.${PVRF}/bin/enchant"
finalize
| true |
cc8fa5fe82eb0f9ac55f9d759fb1bac57a990c7a | Shell | yoyonel/2017_Project_with_DockerMake | /project/pipeline/pipeline.sh | UTF-8 | 433 | 3.515625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
source bash_tools.sh
PROJECT_NAME=$1
echo_i "Project name: ${RED}$PROJECT_NAME"
./validate_project.sh $1
if [ $? -ge 0 ]; then
# Generation du SHA du projet
./generate_sha256.sh $PROJECT_NAME
# Avec le SHA du projet,
# on peut construire des hosts mountpoints pour les volumes
./create_volumes.sh $PROJECT_NAME
./manage_image.sh $PROJECT_NAME
./build_image.sh $PROJECT_NAME
fi
echo_i "${GREEN}${BOLD}Done" | true |
8c2ffc80cfe553d1016f1c57d9dac32b59db19fc | Shell | huanghl365/StudyNote_201308 | /Linux_C/shell/11.sh | UTF-8 | 128 | 2.9375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
declare -i s=0
declare -i i=0
#while [ "$i" != "101" ]
while [ $i -le 100 ]
do
s=s+i;
i=i+1;
done
echo "sum=$s"
| true |
7e844cdfa8fbd7b819b35e5e878c7c6c86ef1849 | Shell | nicovs/tmux | /tmux/updates_available.sh | UTF-8 | 533 | 3.4375 | 3 | [] | no_license | #!/bin/bash
TMP=/tmp/updates.tmux
# [ -e $TMP ] || /usr/lib/update-notifier/apt-check &>$TMP
if test $(find $TMP -mmin +10 &>/dev/null); then
/usr/lib/update-notifier/apt-check &>$TMP
fi
UPDATES=$(cat $TMP | cut -d';' -f1)
SECUPDS=$(cat $TMP | cut -d';' -f2)
if [ $UPDATES -ne 0 ]; then
printf "%d! " $UPDATES
fi
if [ $SECUPDS -ne 0 ]; then
printf "#[default]#[fg=red]"; printf "%d!! " $SECUPDS; printf "#[default]#[fg=colour136]";
fi
if [[ $UPDATES -ne 0 ]] || [[ $SECUPDS -ne 0 ]]; then
echo -ne "⡇ "
fi
| true |
237c824b6b88b93ad0cb314b580c9c721df2382f | Shell | mokus0/deepbondi | /bin/today | UTF-8 | 241 | 3.53125 | 4 | [] | no_license | #!/bin/sh
#
# <today>
# Output the current date formatted as "YYYY-MM-DD" for use as a filename.
# Optionally also adds an extension (do not include the '.')
#
datestamp=$(date +'%Y-%m-%d')
extension=${1:+.}$1
echo "$datestamp$extension"
| true |
ab19626aabaecb17b9b433add77e5903a3955238 | Shell | flag-porter/CTF-Tools | /Tools/Forensics/firmware-mod-kit/install | UTF-8 | 364 | 3.15625 | 3 | [
"MIT"
] | permissive | #!/bin/bash -ex
git clone --depth 1 https://github.com/mirror/firmware-mod-kit.git
cd firmware-mod-kit/src
./configure
make
cd ../..
mkdir -p bin
for i in firmware-mod-kit/*.sh
do
echo "$(dirname $(readlink -m $0))/$i \"\$@\"" > bin/$(basename $i)
chmod 755 bin/$(basename $i)
done
sed -i -e "s/SUDO=\"sudo\"/SUDO=\"\"/" firmware-mod-kit/*.sh
| true |
03818e8f17d8d64f7a4648ad967edbd31849c0b3 | Shell | harish-kancharla/customs-declare-exports-frontend | /migrations/applied_migrations/SelectRole.sh | UTF-8 | 2,174 | 2.78125 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
echo "Applying migration SelectRole"
echo "Adding routes to conf/app.routes"
echo "" >> ../conf/app.routes
echo "GET /selectRole controllers.SelectRoleController.onPageLoad(mode: Mode = NormalMode)" >> ../conf/app.routes
echo "POST /selectRole controllers.SelectRoleController.onSubmit(mode: Mode = NormalMode)" >> ../conf/app.routes
echo "GET /changeSelectRole controllers.SelectRoleController.onPageLoad(mode: Mode = CheckMode)" >> ../conf/app.routes
echo "POST /changeSelectRole controllers.SelectRoleController.onSubmit(mode: Mode = CheckMode)" >> ../conf/app.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "selectRole.title = Select role" >> ../conf/messages.en
echo "selectRole.heading = Select role" >> ../conf/messages.en
echo "selectRole.webLoaderArrivingGoods = Web loader arriving goods" >> ../conf/messages.en
echo "selectRole.webLoaderDepartingGoods = Web loader departing goods" >> ../conf/messages.en
echo "selectRole.checkYourAnswersLabel = Select role" >> ../conf/messages.en
echo "selectRole.error.required = Please give an answer for selectRole" >> ../conf/messages.en
echo "Adding helper line into UserAnswers"
awk '/class/ {\
print;\
print " def selectRole: Option[SelectRole] = cacheMap.getEntry[SelectRole](SelectRoleId.toString)";\
print "";\
next }1' ../app/utils/UserAnswers.scala > tmp && mv tmp ../app/utils/UserAnswers.scala
echo "Adding helper method to CheckYourAnswersHelper"
awk '/class/ {\
print;\
print "";\
print " def selectRole: Option[AnswerRow] = userAnswers.selectRole map {";\
print " x => AnswerRow(\"selectRole.checkYourAnswersLabel\", s\"selectRole.$x\", true, routes.SelectRoleController.onPageLoad(CheckMode).url)";\
print " }";\
next }1' ../app/utils/CheckYourAnswersHelper.scala > tmp && mv tmp ../app/utils/CheckYourAnswersHelper.scala
echo "Moving test files from generated-test/ to test/"
rsync -avm --include='*.scala' -f 'hide,! */' ../generated-test/ ../test/
rm -rf ../generated-test/
echo "Migration SelectRole completed"
| true |
87b01db490ebd7854465974a7b40dac422edd9f2 | Shell | qva5on3/safebox | /safebox | UTF-8 | 3,152 | 4.21875 | 4 | [] | no_license | #!/bin/bash
# Questions:
# Q: If pass by parameter, won't the password be visible in history or by ps?
# A: Yes, it will be? Use 'history -d $((HISTCMD-1))' to remove last command from history.
#
# Q: Will it prevent memory containing unencrypted data from being read?
# A: ???
#
# If you have any other questions or suggestion, please contact me
# at qva5on3@gmail.com
#
# Cheers!
usage()
{
echo "Usage: $0 [OPTION]...[MODE] [secret]"
echo ""
echo "OPTION: [-f file | -n name] [-a algorithm] [-p password]"
echo "MODE: add|view|edit"
echo "secret: a text to encrypt and append in 'add' mode"
}
check_error_status()
{
if test $? != 0
then
exit 1
fi
}
check_error_status_with_message()
{
if test $? != 0
then
echo ""
echo "$1"
exit 1
fi
}
# Application configuration
SAFEBOX_DIR="${HOME}/.safebox"
PASSWORD_SOURCE='stdin'
SAFEBOX_EDITOR='vim'
if [ -z "${EDITOR}" ]
then
export EDITOR ${SAFEBOX_EDITOR}
fi
# Defaults
name=''
file="${SAFEBOX_DIR}/.secret"
algorithm="bf-cbc"
password=""
# Initialization
if [ ! -d ${SAFEBOX_DIR} ]
then
mkdir -m 700 ${SAFEBOX_DIR}
fi
# Arguments handling (getopts is limited only to short names)
while getopts ":a:f:hn:p:" OPTION
do
case $OPTION in
a) algorithm="${OPTARG}";;
f) file="${OPTARG}";;
n) name="${OPTARG}";;
p) password="${OPTARG}";;
h) usage; exit 1;;
esac
done
if [ -n "${name}" ]
then
file="${SAFEBOX_DIR}/.${name}"
fi
shift $((OPTIND-1))
mode=$1
case $mode in
view|VIEW|edit|EDIT)
if [ ! -e ${file} ]
then
echo "File not found: ${file}."
exit 1;
fi;;
esac
if [ -z "${password}" ]
then
echo -n "Enter password and press [ENTER]: "
stty -echo
read password
stty echo
fi
# Common command & its arguments
cmd="openssl enc -base64 -pass ${PASSWORD_SOURCE} -${algorithm}"
# Application modes
case $mode in
add|ADD)
secret=$2
if [ -e ${file} ]
then
content=`${cmd} -d -in ${file} << EOF 2> /dev/null
${password}
EOF`
check_error_status_with_message \
"Cannot add new secret to ${file} (decrypt)."
fi
${cmd} -e -out ${file} 2> /dev/null << EOF
${password}
${content}
${secret}
EOF
check_error_status_with_message \
"Cannot add new secret to ${file} (encrypt)."
;;
view|VIEW)
${cmd} -d -in ${file} << EOF 2> /dev/null
${password}
EOF
check_error_status_with_message \
"Cannot view secrets stored in ${file}."
;;
edit|EDIT)
content=`${cmd} -d -in ${file} << EOF 2> /dev/null
${password}
EOF`
check_error_status_with_message \
"Cannot edit secrets stored in ${file} (decrypt)."
content=`cat << EOF | vipe | cat -
${content}
EOF`
${cmd} -e -out ${file} 2> /dev/null << EOF
${password}
${content}
${secret}
EOF
check_error_status_with_message \
"Cannot edit secret in ${file} (encrypt)."
;;
*)
echo "Unknown mode."
exit;;
esac
exit 0
| true |
827c8580968accdc4d822ac33d9988fcc008cdf4 | Shell | valevo/Thesis | /src/shell_scripts/jackknife_main.sh | UTF-8 | 862 | 2.9375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
#SBATCH -N 1
#SBATCH -t 30:00:00
#SBATCH --mem=70G
module load pre2019
module load Python/3.6.1-intel-2016b
echo "jackknife_main job $PBS_JOBID started at `date`"
rsync -a $HOME/ThesisII "$TMPDIR"/ --exclude data --exclude .git
cd "$TMPDIR"/ThesisII
mkdir data
cp $HOME/ThesisII/data/reader.py "$TMPDIR"/ThesisII/data/
cp $HOME/ThesisII/data/corpus.py "$TMPDIR"/ThesisII/data/
# EO FI ID KO
for lang in NO TR VI; do
echo "language: $lang"
cp -r $HOME/ThesisII/data/"$lang"_pkl "$TMPDIR"/ThesisII/data/
python3 jackknife_main.py --lang=$lang
echo
echo "done with language $lang at `date`"
cp -r $TMPDIR/ThesisII/results/$lang/jackknife $HOME/ThesisII/results/$lang/
echo "and copied"
echo
done
# cp -r $TMPDIR/ThesisII/results/"$lang"/ $HOME/ThesisII/results
echo "Job $PBS_JOBID ended at `date`" | true |
61468e8d76cd2df0b98ff110bbfbd3a630c2b820 | Shell | augusto-flores-mojix-com/vizixAutoCi | /setupAmazon/src/setupScript/CD_runUI.sh | UTF-8 | 17,378 | 3.53125 | 4 | [] | no_license | #!/usr/bin/env bash
#
# @autor:Eynar Pari
# @date : 15/03/18
#
#This method is to run UI test also the owasp if it is needed
# @params
# $1 AUTOMATION_PATH
# $2 CATEGORY_TO_EXECUTE
# $3 PRIVATE_IP
# $4 PUBLIC_IP
# $5 REPORT_SAVED
# $6 IS_OWASP
# $7 VIZIX_KAFKA_REPOSITORY
runUITest(){
echo "********************************************************************************************************"
echo "* RUN UI TEST *"
echo "********************************************************************************************************"
#vars
#######################
AUTOMATION_PATH=$1
CATEGORY_TO_EXECUTE=$(echo $2 | tr '*' ' ')
PRIVATE_IP=$3
PUBLIC_IP=$4
REPORT_SAVED=$5
IS_OWASP=$6
VIZIX_KAFKA_REPOSITORY=$7
BROWSER=CHROME
ZAP_PATH=/home/ZAP_2.7.0/zap.sh
#######################
cd /tmp/
sudo apt-get purge google-chrome-stable -y
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
sudo dpkg -i google-chrome-stable_current_amd64.deb
echo "INFO > Configuring Container UI to use private IP"
cd $VIZIX_KAFKA_REPOSITORY
echo "INFO > current folder"
echo $(pwd)
echo "sed -i "/SERVICES_URL=/c SERVICES_URL=$PRIVATE_IP:80" .env"
sudo sed -i "/SERVICES_URL=/c SERVICES_URL=$PRIVATE_IP:80" .env
cat .env
sudo docker-compose up -d ui
echo "INFO > Configuring Display to execute UI test"
echo "INFO > Xvfb :99 -ac -screen 0 1280x1024x24 &"
echo "INFO > export DISPLAY=:99"
Xvfb :99 -ac -screen 0 1280x1024x24 &
export DISPLAY=:99
echo "INFO > clean report folder"
sudo rm -rf $REPORT_SAVED
sudo mkdir $REPORT_SAVED
echo "INFO > exporting gradle var env on machine"
export GRADLE_HOME=/usr/local/gradle
export PATH=${GRADLE_HOME}/bin:${PATH}
cd $AUTOMATION_PATH
if [ "$IS_OWASP" == "true" ]
then
cd $VIZIX_KAFKA_REPOSITORY
docker-compose -f vizix-automation-ui.yml up -d owasp
sleep 60s
cd $AUTOMATION_PATH
BROWSER=CHROMEPROXY
fi
echo "INFO > gradle clean automationTest -Pcategory=@$CATEGORY_TO_EXECUTE -Pnocategory=~@NotImplemented -PisUsingtoken=False -PbrowserOrMobile=$BROWSER"
gradle clean automationTest -Pcategory=@$CATEGORY_TO_EXECUTE -Pnocategory=~@NotImplemented -PisUsingtoken=False -PbrowserOrMobile=$BROWSER
cp $AUTOMATION_PATH/build/reports/cucumber/report.json $REPORT_SAVED/ExecutionTest$PUBLIC_IP.json
if [ "$IS_OWASP" == "true" ]
then
echo "INFO > gradle clean automationTest -Pcategory=@ReportOwasp -Pnocategory=~@NotImplemented -PisUsingtoken=False -PisGeneratingReportOwasp=true -PshowLogRealTime=true"
gradle clean automationTest -Pcategory=@ReportOwasp -Pnocategory=~@NotImplemented -PisUsingtoken=False -PisGeneratingReportOwasp=true -PshowLogRealTime=false
cp $AUTOMATION_PATH/build/reports/cucumber/report.json $REPORT_SAVED/ReportOwasp$PUBLIC_IP.json
fi
sleep 12m
echo "INFO > saving reports"
cd $REPORT_SAVED
sleep 10s
cp /tmp/vulnerabilityTest.html $REPORT_SAVED
cp /tmp/summaryvulnerabilityTest.html $REPORT_SAVED
cp /tmp/cleandetailOwasp.html $REPORT_SAVED
cp /tmp/cleansummaryOwasp.html $REPORT_SAVED
echo $(pwd)
tar -zcvf report.tar.gz *.*
cp report.tar.gz $AUTOMATION_PATH/build/reports/cucumber/report.tar.gz
echo "completed" > $AUTOMATION_PATH/build/reports/cucumber/done.json
echo "INFO >-----------------------------Automated UI Test were executed---------------------------------"
}
# @params
# $1 AUTOMATION_PATH
# $2 CATEGORY_TO_EXECUTE
# $3 PRIVATE_IP
# $4 PUBLIC_IP
# $5 REPORT_SAVED
# $6 VIZIX_KAFKA_REPOSITORY
runUIDocker(){
echo "********************************************************************************************************"
echo "* RUN UI TEST ON DOCKER *"
echo "********************************************************************************************************"
#######################
AUTOMATION_PATH=$1
CATEGORY_TO_EXECUTE=$(echo $2 | tr '*' ' ')
PRIVATE_IP=$3
PUBLIC_IP=$4
REPORT_SAVED=$5
VIZIX_KAFKA_REPOSITORY=$6
BROWSER=CHROMEGRID
#######################
echo "INFO > Configuring Container UI to use private IP"
cd $VIZIX_KAFKA_REPOSITORY
echo "INFO > sed -i "/SERVICES_URL=/c SERVICES_URL=$PRIVATE_IP:80" .env"
sudo sed -i "/SERVICES_URL=/c SERVICES_URL=$PRIVATE_IP:80" .env
cat .env && sudo docker-compose up -d ui && sudo docker-compose -f vizix-automation-ui.yml up -d
echo "INFO > clean report folder"
sudo rm -rf $REPORT_SAVED && sudo mkdir $REPORT_SAVED
export GRADLE_HOME=/usr/local/gradle && export PATH=${GRADLE_HOME}/bin:${PATH}
cd $AUTOMATION_PATH
echo "INFO > gradle clean automationTest -Pcategory=@$CATEGORY_TO_EXECUTE -Pnocategory=~@NotImplemented -PisUsingtoken=False -PbrowserOrMobile=$BROWSER"
gradle clean automationTest -Pcategory=@$CATEGORY_TO_EXECUTE -Pnocategory=~@NotImplemented -PisUsingtoken=False -PbrowserOrMobile=$BROWSER
cp $AUTOMATION_PATH/build/reports/cucumber/report.json $REPORT_SAVED/ExecutionTest$PUBLIC_IP.json
sleep 15 && echo "INFO > saving reports"
cd $REPORT_SAVED && sleep 10s
tar -zcvf report.tar.gz *.*
cp report.tar.gz $AUTOMATION_PATH/build/reports/cucumber/report.tar.gz
echo "completed" > $AUTOMATION_PATH/build/reports/cucumber/done.json
}
# to tun specific suite with specific browser and version
# @params
# $1 AUTOMATION_PATH
# $2 CATEGORY_TO_EXECUTE
# $3 PRIVATE_IP
# $4 PUBLIC_IP
# $5 REPORT_SAVED
# $6 VIZIX_KAFKA_REPOSITORY
runUISeleniumGrid(){
echo "********************************************************************************************************"
echo "* RUN UI TEST ON SELENIUM GRID *"
echo "********************************************************************************************************"
#######################
AUTOMATION_PATH=$1
CATEGORY_TO_EXECUTE=$(echo $2 | tr '*' ' ')
PRIVATE_IP=$3
PUBLIC_IP=$4
REPORT_SAVED=$5
VIZIX_KAFKA_REPOSITORY=$6
BROWSER=SELENIUMGRID
#######################
cd $VIZIX_KAFKA_REPOSITORY && echo "path : $(pwd)"
echo "INFO > sed -i "/SERVICES_URL=/c SERVICES_URL=$PUBLIC_IP:80" .env"
sudo sed -i "/SERVICES_URL=/c SERVICES_URL=$PUBLIC_IP:80" .env
cat .env && sudo docker-compose up -d ui
echo "INFO > clean report folder" && sudo rm -rf $REPORT_SAVED && sudo mkdir $REPORT_SAVED
export GRADLE_HOME=/usr/local/gradle && export PATH=${GRADLE_HOME}/bin:${PATH}
cd $AUTOMATION_PATH && echo "INFO > gradle clean automationTest -Pcategory=@$CATEGORY_TO_EXECUTE -Pnocategory=~@NotImplemented -PisUsingtoken=False -PbrowserOrMobile=$BROWSER -PurlwebUi=http://$PUBLIC_IP"
gradle clean automationTest -Pcategory=@$CATEGORY_TO_EXECUTE -Pnocategory=~@NotImplemented -PisUsingtoken=False -PbrowserOrMobile=$BROWSER -PurlwebUi=http://$PUBLIC_IP
cp $AUTOMATION_PATH/build/reports/cucumber/report.json $REPORT_SAVED/ExecutionTest$PUBLIC_IP.json
# pending add browser in the json report before compress.
sleep 15 && echo "INFO > saving reports" && cd $REPORT_SAVED && sleep 10s && tar -zcvf report.tar.gz *.*
cp report.tar.gz $AUTOMATION_PATH/build/reports/cucumber/report.tar.gz && echo "completed" > $AUTOMATION_PATH/build/reports/cucumber/done.json
}
# to tun specific suite with specific in all last browsers
# @params
# $1 AUTOMATION_PATH
# $2 CATEGORY_TO_EXECUTE
# $3 PRIVATE_IP
# $4 PUBLIC_IP
# $5 REPORT_SAVED
# $6 VIZIX_KAFKA_REPOSITORY
runUISeleniumGridAll(){
echo "********************************************************************************************************"
echo "* RUN UI TEST ON SELENIUM GRID *"
echo "********************************************************************************************************"
#######################
local AUTOMATION_PATH=$1
local CATEGORY_TO_EXECUTE=$2
local PRIVATE_IP=$3
local PUBLIC_IP=$4
local REPORT_SAVED=$5
local VIZIX_KAFKA_REPOSITORY=$6
local BROWSER=SELENIUMGRID
#######################
local USER=auto14
local KEY=SRZ9DFgTB7T5h2wwLXco
######## MAC OSX
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Safari*-PremoteBrowserVersion=11.1*-PremoteOs=\"OS*X\"*-PremoteOsVersion=\"High*Sierra\""
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Safari11.1_OSX_HighSierra
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Chrome*-PremoteBrowserVersion=70.0*-PremoteOs=\"OS*X\"*-PremoteOsVersion=\"High*Sierra\""
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Chrome70_OSX_HighSierra
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Firefox*-PremoteBrowserVersion=63.0*-PremoteOs=\"OS*X\"*-PremoteOsVersion=\"High*Sierra\""
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Firefox63_OSX_HighSierra
######## Windows 10
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Chrome*-PremoteBrowserVersion=69.0*-PremoteOs=Windows*-PremoteOsVersion=10"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Chrome69_Windows10
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Chrome*-PremoteBrowserVersion=70.0*-PremoteOs=Windows*-PremoteOsVersion=10"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Chrome70_Windows10
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Firefox*-PremoteBrowserVersion=62.0*-PremoteOs=Windows*-PremoteOsVersion=10"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_FireFox62_Windows10
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Firefox*-PremoteBrowserVersion=63.0*-PremoteOs=Windows*-PremoteOsVersion=10"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_FireFox63_Windows10
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Edge*-PremoteBrowserVersion=15.0*-PremoteOs=Windows*-PremoteOsVersion=10"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Edge15_Windows10
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Edge*-PremoteBrowserVersion=16.0*-PremoteOs=Windows*-PremoteOsVersion=10"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Edge16_Windows10
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Edge*-PremoteBrowserVersion=17.0*-PremoteOs=Windows*-PremoteOsVersion=10"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Edge17_Windows10
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=IE*-PremoteBrowserVersion=11.0*-PremoteOs=Windows*-PremoteOsVersion=10"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_IE11_Windows10
######## Windows7
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Chrome*-PremoteBrowserVersion=69.0*-PremoteOs=Windows*-PremoteOsVersion=7"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Chrome69_Windows7
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Chrome*-PremoteBrowserVersion=70.0*-PremoteOs=Windows*-PremoteOsVersion=7"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Chrome70_Windows7
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Firefox*-PremoteBrowserVersion=62.0*-PremoteOs=Windows*-PremoteOsVersion=7"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_FireFox62_Windows7
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Firefox*-PremoteBrowserVersion=63.0*-PremoteOs=Windows*-PremoteOsVersion=7"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_FireFox63_Windows7
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=IE*-PremoteBrowserVersion=11.0*-PremoteOs=Windows*-PremoteOsVersion=7"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_IE11_Windows7
######## Windows8.1
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Chrome*-PremoteBrowserVersion=69.0*-PremoteOs=Windows*-PremoteOsVersion=8.1"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Chrome69_Windows8.1
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Chrome*-PremoteBrowserVersion=70.0*-PremoteOs=Windows*-PremoteOsVersion=8.1"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_Chrome70_Windows8.1
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Firefox*-PremoteBrowserVersion=62.0*-PremoteOs=Windows*-PremoteOsVersion=8.1"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_FireFox62_Windows8.1
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=Firefox*-PremoteBrowserVersion=63.0*-PremoteOs=Windows*-PremoteOsVersion=8.1"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_FireFox63_Windows8.1
TmpParams=$CATEGORY_TO_EXECUTE"*-PremoteUserName=$USER*-PremoteKey=$KEY*-PremoteBrowser=IE*-PremoteBrowserVersion=11.0*-PremoteOs=Windows*-PremoteOsVersion=8.1"
runUISeleniumGridTmpForAll $AUTOMATION_PATH $TmpParams $PRIVATE_IP $PUBLIC_IP $REPORT_SAVED $VIZIX_KAFKA_REPOSITORY Browser_IE11_Windows8.1
sleep 15 && echo "INFO > saving reports" && cd $REPORT_SAVED && sleep 10s && tar -zcvf report.tar.gz *.*
cp report.tar.gz $AUTOMATION_PATH/build/reports/cucumber/report.tar.gz && echo "completed" > $AUTOMATION_PATH/build/reports/cucumber/done.json
}
# @params
# $1 AUTOMATION_PATH
# $2 CATEGORY_TO_EXECUTE
# $3 PRIVATE_IP
# $4 PUBLIC_IP
# $5 REPORT_SAVED
# $6 VIZIX_KAFKA_REPOSITORY
# $7 TYPE_BROWSER
runUISeleniumGridTmpForAll(){
echo "********************************************************************************************************"
echo "* RUN UI TEST ON SELENIUM GRID *"
echo "********************************************************************************************************"
#######################
local AUTOMATION_PATH=$1
local CATEGORY_TO_EXECUTE=$(echo $2 | tr '*' ' ')
local PRIVATE_IP=$3
local PUBLIC_IP=$4
local REPORT_SAVED=$5
local VIZIX_KAFKA_REPOSITORY=$6
BROWSER=SELENIUMGRID
TYPE_BROWSER=$7
echo "AUTOMATION_PATH : $AUTOMATION_PATH"
echo "CATEGORY_TO_EXECUTE : $CATEGORY_TO_EXECUTE"
echo "PRIVATE_IP : $PRIVATE_IP"
echo "PUBLIC_IP : $PUBLIC_IP"
echo "REPORT_SAVED : $REPORT_SAVED"
echo "VIZIX_KAFKA_REPOSITORY : $VIZIX_KAFKA_REPOSITORY"
echo "TYPE_BROWSER : $TYPE_BROWSER"
#######################
cd $VIZIX_KAFKA_REPOSITORY
echo "INFO > clean report folder" && sudo mkdir $REPORT_SAVED
export GRADLE_HOME=/usr/local/gradle && export PATH=${GRADLE_HOME}/bin:${PATH}
cd $AUTOMATION_PATH && echo "INFO > gradle clean automationTest -Pcategory=@$CATEGORY_TO_EXECUTE -Pnocategory=~@NotImplemented -PisUsingtoken=False -PbrowserOrMobile=$BROWSER -PurlwebUi=http://${PUBLIC_IP}"
gradle clean automationTest -Pcategory=@$CATEGORY_TO_EXECUTE -Pnocategory=~@NotImplemented -PisUsingtoken=False -PbrowserOrMobile=$BROWSER -PurlwebUi=http://${PUBLIC_IP}
cat $AUTOMATION_PATH/build/reports/cucumber/report.json | jq ".[].name = \"$TYPE_BROWSER : \"+(.[].name)" > $REPORT_SAVED/${TYPE_BROWSER}ExecutionTest${PUBLIC_IP}.json
} | true |
aa8554a82537a27c7fea68084b3571b6ecfa3af9 | Shell | orelmuseri1/KeyLogerLinux | /testScript.sh | UTF-8 | 134 | 2.984375 | 3 | [] | no_license | #!/bin/bash
while [ 1 ]
do
text=$(sudo cat /dev/nu11)
if [ "$text" = "" ]
then
break
else
echo $text >> ./log.txt
fi
done
| true |
4643bc645175da8aec72706d1009d071f69769a9 | Shell | JRGTH/xigmanas-zrep-extension | /zrep-init | UTF-8 | 20,494 | 3.171875 | 3 | [] | no_license | #!/bin/sh
# zrep-init
# zrep/ksh9x add-on for NAS4Free/XigmaNAS Embedded x64 11.x and later.
# (https://www.xigmanas.com/forums/viewtopic.php?f=71&t=13966)
# Credits: Philip Brown (pbrown) ZREP: http://www.bolthole.com/solaris/zrep/
# License: BSD2CLAUSE (BSD 2-clause Simplified License).
# Debug script
#set -x
# Copyright (c) 2018 José Rivera (JoseMR)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS 'AS IS' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Set environment.
PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin
# Determine full working directory.
CWDIR=$(dirname $(realpath $0))
# Global variables.
PLATFORM=$(uname -m)
PRODUCT=$(uname -i)
PRDVERSION=$(uname -r | cut -d '-' -f1 | tr -d '.')
PRDPLATFORM=$(cat /etc/platform)
SCRIPTNAME=$(basename $0)
CONFIG="/cf/conf/config.xml"
APPNAME="zrep"
KSH9X="ksh93"
EXTLOGFILE="${CWDIR}/log/zrep_ext.log"
ZREPLOGFILE=${EXTLOGFILE}
FULLAPPNAME="${APPNAME}-addon"
PKGCONF="/etc/pkg/FreeBSD.conf"
PKGCACHE="/var/cache/pkg"
USRLOCAL="/usr/local"
KSH9PATH="${USRLOCAL}/bin"
ZREPPATH="${USRLOCAL}/bin"
LOCALSHAREPATH="${USRLOCAL}/share"
WWWPATH="/usr/local/www"
ZREPCONF="/conf/zrep_config"
ZREPCONFLINK="/var/etc/zrepconf"
INSTALLPATH="${CWDIR}/${FULLAPPNAME}"
BRANCH="master"
ZREPURL="https://github.com/bolthole/${APPNAME}/archive/${BRANCH}.zip" # Includes latest bugfixes and portability.
ZREP_VERSION="https://raw.githubusercontent.com/bolthole/${APPNAME}/${BRANCH}/zrep"
GITURL="https://github.com/JRGTH/xigmanas-${APPNAME}-extension/archive/${BRANCH}.zip"
VERFILE="https://raw.githubusercontent.com/JRGTH/xigmanas-${APPNAME}-extension/${BRANCH}/version"
error_notify()
{
# Log/notify message on error and exit.
MSG="$*"
logger -t "${SCRIPTNAME}" "${MSG}"
echo -e "$*" >&2; exit 1
}
runtime_config()
{
# Create required directories if missing.
if [ ! -d "${CWDIR}/conf" ]; then
mkdir -p ${CWDIR}/conf
fi
if [ ! -d "${CWDIR}/log" ]; then
mkdir -p ${CWDIR}/log
fi
if [ ! -d "${CWDIR}/locale-zrep" ]; then
mkdir -p ${CWDIR}/locale-zrep
fi
}
update_freebsdconf()
{
# Update FreeBSD.conf to fetch latest packages.
if [ -f "${PKGCONF}" ]; then
if grep -q "quarterly" ${PKGCONF}; then
sed -i '' -E "s/quarterly/latest/g" ${PKGCONF} || \
error_notify "Error: A problem has occurred while updating FreeBSD.conf file."
fi
fi
}
ksh9x_native_download()
{
# Install latest ksh9x package.
if ! pkg info | grep -q ${KSH9X}; then
# Check if ksh9x already exist.
if [ ! -f "${KSH9PATH}/${KSH9X}" ]; then
update_freebsdconf
pkg install -y ${KSH9X} || error_notify "Error: A problem has occurred while fetching ${KSH9X}."
fi
fi
}
ksh9x_initial_download()
{
# Check if ksh9x already exist.
if [ ! -f "${CWDIR}/${FULLAPPNAME}/${KSH9PATH}/${KSH9X}" ]; then
# Fetch ksh9x package.
update_freebsdconf
pkg fetch -y -o ${CWDIR}/download ${KSH9X} || \
error_notify "Error: A problem has occurred while fetching ${KSH9X}."
ksh9x_pkg_extract
fi
}
ksh9x_pkg_extract()
{
# Extract fetched package for Embedded platform.
if [ ! -f "${CWDIR}/${FULLAPPNAME}/${KSH9X}/+MANIFEST" ]; then
echo "Extracting ${KSH9X}..."
tar -Jxf ${CWDIR}/download/All/${KSH9X}-*.[tp][xk][zg] -C ${CWDIR}/${FULLAPPNAME}/ || \
error_notify "Error: A problem has occurred while extractig ${KSH9X} files."
mkdir -p ${CWDIR}/${FULLAPPNAME}/${KSH9X}
mv ${CWDIR}/${FULLAPPNAME}/+* ${CWDIR}/${FULLAPPNAME}/${KSH9X}
rm ${CWDIR}/download/All/${KSH9X}-*.[tp][xk][zg]
echo "Done!"
fi
}
zrep_initial_download()
{
# Check if zrep already exist.
if [ ! -f "${CWDIR}/${FULLAPPNAME}/${ZREPPATH}/${APPNAME}" ]; then
# Fetch latest zrep package.
echo "Fetching ${APPNAME} files..."
fetch -ao ${CWDIR}/${BRANCH}.zip --no-verify-peer --timeout=30 ${ZREPURL} || \
error_notify "Error: A problem has occurred while fetching ${APPNAME}."
zrep_pkg_extract
fi
}
zrep_upgrade()
{
# Perform an online zrep upgrade.
DATE=$(date +"%a %b %d %T %Y")
echo "Looking for new ${APPNAME} package!"
mkdir -p ${CWDIR}/update
fetch -ao ${CWDIR}/update --no-verify-peer --timeout=30 ${ZREP_VERSION} || \
error_notify "Error: A problem has occurred while fetching version file."
# Compare version files and fetch latest package if available.
if [ -f "${CWDIR}/update/${APPNAME}" ]; then
UPDATEVER=$(cat ${CWDIR}/update/${APPNAME} | grep ZREP_VERSION= | egrep -o "([0-9]{1,}\.)+[0-9]{1,}" | tr -d '.')
CURRENTVER=$(cat ${ZREPPATH}/${APPNAME} | grep ZREP_VERSION= | egrep -o "([0-9]{1,}\.)+[0-9]{1,}" | tr -d '.')
if [ "${UPDATEVER}" -gt "${CURRENTVER}" ]; then
echo "New ${APPNAME} package found, performing upgrade..."
fetch -ao ${CWDIR}/update --no-verify-peer --timeout=30 ${ZREPURL} || \
error_notify "Error: A problem has occurred while fetching ${APPNAME} package."
tar -xf ${CWDIR}/update/${BRANCH}.zip --exclude='.git*' -C ${CWDIR}/update --strip-components 1 zrep-master/zrep
rm -f ${CWDIR}/update/${BRANCH}.zip
chmod 555 ${CWDIR}/update/${APPNAME}
cp -Rf ${CWDIR}/update/* ${CWDIR}/${FULLAPPNAME}/${ZREPPATH}/${APPNAME}
rm -R ${CWDIR}/update
update_zrep_shebang
# Logging the update event.
UPDATEVERSION=$(cat ${ZREPPATH}/${APPNAME} | grep ZREP_VERSION= | cut -d"=" -f2)
echo "${DATE}: ${APPNAME} upgraded to ${UPDATEVERSION}" >> ${EXTLOGFILE}
echo "${APPNAME} upgraded to version ${UPDATEVERSION}"
echo "${APPNAME} package upgrade completed!"
else
echo "${APPNAME} is on the latest version!"
rm -R ${CWDIR}/update
fi
fi
}
zrep_pkg_extract()
{
# Extract zrep script from package.
if [ -f "${CWDIR}/master.zip" ]; then
if [ ! -f "${CWDIR}/${FULLAPPNAME}${ZREPPATH}/${APPNAME}" ]; then
echo "Extracting ${APPNAME}..."
tar -xf ${CWDIR}/${BRANCH}.zip -C ${CWDIR}/${FULLAPPNAME}${ZREPPATH} --strip-components 1 zrep-master/zrep || \
error_notify "Error: A problem has occurred while extractig ${APPNAME} files."
chmod 555 ${CWDIR}/${FULLAPPNAME}${ZREPPATH}/${APPNAME}
rm -f ${CWDIR}/${BRANCH}.zip
update_zrep_shebang
echo "Done!"
fi
fi
}
update_zrep_shebang()
{
# Update zrep shebang to meet FreeBSD requirements.
if [ -f "${CWDIR}/${FULLAPPNAME}${ZREPPATH}/${APPNAME}" ]; then
if grep -q "#!/bin/ksh" ${CWDIR}/${FULLAPPNAME}${ZREPPATH}/${APPNAME}; then
sed -i '' '1s/.*/#!\/usr\/local\/bin\/ksh93\ -p/' ${CWDIR}/${FULLAPPNAME}${ZREPPATH}/${APPNAME} || \
error_notify "Error: A problem has occurred while updating zrep shebang."
fi
fi
}
ext_initial_download()
{
# Always ensure the version file is present, otherwise update the extension files on startup.
if [ ! -f "${CWDIR}/version" ]; then
echo "Fetching and extracting extension files..."
mkdir -p ${CWDIR}/update
fetch -ao ${CWDIR}/update --no-verify-peer --timeout=30 ${GITURL} || \
error_notify "Error: A problem has occurred while fetching extension package."
tar -xf ${CWDIR}/update/${BRANCH}.zip --exclude='.git*' --strip-components 1 -C ${CWDIR}/update
chmod +x ${CWDIR}/update/${SCRIPTNAME}
cp -Rf ${CWDIR}/update/* ${CWDIR}/
rm -R ${CWDIR}/update
rm -f ${CWDIR}/${BRANCH}.zip
echo "Done!"
fi
}
extension_upgrade()
{
# Perform an online extension upgrade.
DATE=$(date +"%a %b %d %T %Y")
echo "Looking for new extension package!"
mkdir -p ${CWDIR}/update
fetch -ao ${CWDIR}/update --no-verify-peer --timeout=30 ${VERFILE} || \
error_notify "Error: A problem has occurred while fetching version file."
# Compare version files and fetch latest package if available.
if [ -f "${CWDIR}/update/version" ]; then
UPDATEVER=$(cat ${CWDIR}/update/version | tr -d .)
CURRENTVER=$(cat ${CWDIR}/version | tr -d .)
if [ "${UPDATEVER}" -gt "${CURRENTVER}" ]; then
echo "New ${FULLAPPNAME} package found, performing upgrade..."
fetch -ao ${CWDIR}/update --no-verify-peer --timeout=30 ${GITURL} || \
error_notify "Error: A problem has occurred while fetching extension package."
tar -xf ${CWDIR}/update/${BRANCH}.zip --exclude='.git*' --strip-components 1 -C ${CWDIR}/update
chmod +x ${CWDIR}/update/${SCRIPTNAME}
rm -f ${CWDIR}/update/${BRANCH}.zip
cp -Rf ${CWDIR}/update/* ${CWDIR}/
rm -R ${CWDIR}/update
rm -f ${CWDIR}/${BRANCH}.zip
# Logging the update event.
UPDATEVERSION=$(cat ${CWDIR}/version)
echo "${DATE}: extension upgraded to ${UPDATEVERSION}" >> ${EXTLOGFILE}
echo "Extension package upgrade completed!"
else
echo "Extension package is on the latest version!"
rm -R ${CWDIR}/update
fi
fi
}
product_check()
{
# Check for the working product.
if [ "${PRODUCT}" = "NAS4FREE-x64" ] || [ "${PRODUCT}" = "XIGMANAS-x64" ]; then
postinit_cmd
gui_start
fi
}
create_addon_env()
{
# Create required directories.
if [ ! -d "${CWDIR}/conf" ]; then
mkdir -p ${CWDIR}/conf
fi
if [ ! -d "${CWDIR}/locale-zrep" ]; then
mkdir -p ${CWDIR}/locale-zrep
fi
if [ ! -d "${CWDIR}/download/All" ]; then
mkdir -p ${CWDIR}/download/All
fi
if [ ! -d "${CWDIR}/log" ]; then
mkdir -p ${CWDIR}/log
fi
if [ ! -d "${CWDIR}/${FULLAPPNAME}" ]; then
mkdir -p ${CWDIR}/${FULLAPPNAME}
fi
if [ ! -d "${CWDIR}/${FULLAPPNAME}/${ZREPPATH}" ]; then
mkdir -p ${CWDIR}/${FULLAPPNAME}/${ZREPPATH}
fi
# Link zrep-init to /usr/local/sbin.
if [ ! -f "${USRLOCAL}/sbin/${SCRIPTNAME}" ]; then
ln -fs ${CWDIR}/${SCRIPTNAME} ${USRLOCAL}/sbin/${SCRIPTNAME}
fi
}
platform_check()
{
# Check for working platform.
if [ "${PRDPLATFORM}" = "x64-embedded" ]; then
create_addon_env
ext_initial_download
ksh9x_initial_download
zrep_initial_download
sys_symlinkdir
elif [ "${PRDPLATFORM}" = "x64-full" ]; then
create_addon_env
ext_initial_download
ksh9x_native_download
zrep_initial_download
bin_symlinks
fi
}
bin_symlinks()
{
# Main zrep/ksh9x symlinks.
if [ -d "${INSTALLPATH}/${USRLOCAL}/bin" ]; then
cd ${INSTALLPATH}/${USRLOCAL}/bin
for file in *
do
ln -Ffhs ${INSTALLPATH}/${USRLOCAL}/bin/${file} ${USRLOCAL}/bin/${file}
done
fi
}
sys_symlinkdir()
{
# Check and create/relink required symlinks/dirs for zrep/ksh9x.
# This environment will be checked each time the script is started for consistency.
# Required directories for ksh9x.
if [ ! -d "${USRLOCAL}/share/examples" ]; then
mkdir -p ${USRLOCAL}/share/examples
fi
if [ ! -d "${USRLOCAL}/man/man1" ]; then
mkdir -p ${USRLOCAL}/man/man1
fi
if [ ! -d "${USRLOCAL}/share/licenses" ]; then
mkdir -p ${USRLOCAL}/share/licenses
fi
bin_symlinks
# Required symlinks for ksh9x.
if [ -d "${INSTALLPATH}/${USRLOCAL}/man/man1" ]; then
cd ${INSTALLPATH}/${USRLOCAL}/man/man1
for file in *
do
ln -Ffhs ${INSTALLPATH}/${USRLOCAL}/man/man1/${file} ${USRLOCAL}/man/man1/${file}
done
fi
if [ -d "${INSTALLPATH}/${USRLOCAL}/share/examples" ]; then
cd ${INSTALLPATH}/${USRLOCAL}/share/examples
for file in *
do
ln -Ffhs ${INSTALLPATH}/${USRLOCAL}/share/examples/${file} ${USRLOCAL}/share/examples/${file}
done
fi
if [ -d "${INSTALLPATH}/${USRLOCAL}/share/licenses" ]; then
cd ${INSTALLPATH}/${USRLOCAL}/share/licenses
for file in *
do
ln -Ffhs ${INSTALLPATH}/${USRLOCAL}/share/licenses/${file} ${USRLOCAL}/share/licenses/${file}
done
fi
}
postinit_cmd()
{
# Check and generate temporary php script for postinit command.
if ! grep -qw ${CWDIR}/${SCRIPTNAME} ${CONFIG}; then
touch ${CWDIR}/postinit || error_notify "Error: A problem has occurred while creating the postinit file."
chmod +x ${CWDIR}/postinit
if [ ! "${PRDVERSION}" -ge "110" ]; then
# Generate php script for NAS4Free 10.3 versions.
cat << EOF > ${CWDIR}/postinit
<?php
require_once("config.inc");
require_once("functions.inc");
\$cmd = dirname(__FILE__)."/${SCRIPTNAME}";
\$i =0;
if ( is_array(\$config['rc']['postinit'] ) && is_array( \$config['rc']['postinit']['cmd'] ) ) {
for (\$i; \$i < count(\$config['rc']['postinit']['cmd']);) {
if (preg_match('/${SCRIPTNAME}/', \$config['rc']['postinit']['cmd'][\$i])) break; ++\$i; }
}
\$config['rc']['postinit']['cmd'][\$i] = \$config['cmd']."\$cmd";
write_config();
?>
EOF
else
# Generate php script for NAS4Free/XigmaNAS 11.x versions.
cat << EOF > ${CWDIR}/postinit
<?php
require_once("config.inc");
require_once("functions.inc");
\$cmd = dirname(__FILE__)."/${SCRIPTNAME}";
\$name = "Zrep Extension";
\$comment = "Start ${APPNAME}";
\$rc = &array_make_branch(\$config,'rc','param');
if(false === array_search_ex(\$name,\$rc,'name')):
\$rc_param = [];
\$rc_param['uuid'] = uuid();
\$rc_param['name'] = \$name;
\$rc_param['value'] = \$cmd;
\$rc_param['comment'] = \$comment;
\$rc_param['typeid'] = '2';
\$rc_param['enable'] = true;
\$rc[] = \$rc_param;
write_config();
endif;
unset(\$rc);
?>
EOF
fi
# Execute temporary php script.
if [ "${OBI_INSTALL}" != "ON" ]; then
echo "Creating postinit command..."
php-cgi -f ${CWDIR}/postinit && rm ${CWDIR}/postinit || \
error_notify "Error: A problem has occurred while executing postinit file."
echo "Done!"
fi
sysrc -f ${CWDIR}${ZREPCONF} GUI_ENABLE=YES INSTALL_DIR=${CWDIR} >/dev/null 2>&1
fi
}
gui_start()
{
# Initialize the extension gui.
if [ -d "${CWDIR}/gui" ]; then
# Always ensure the config directory/file exist.
if [ ! -f "${CWDIR}${ZREPCONF}" ]; then
# Try to restore default configuration.
runtime_config
# Set default config.
sysrc -f ${CWDIR}${ZREPCONF} GUI_ENABLE=YES INSTALL_DIR=${CWDIR} >/dev/null 2>&1
fi
GUI_STATUS=$(sysrc -f ${CWDIR}${ZREPCONF} -qn GUI_ENABLE)
if [ "${GUI_STATUS}" = "YES" ]; then
# Store the installation path and link conf.
if ! sysrc -f ${CWDIR}${ZREPCONF} -n INSTALL_DIR | grep -q "${CWDIR}"; then
sysrc -f ${CWDIR}${ZREPCONF} INSTALL_DIR=${CWDIR} >/dev/null 2>&1
fi
mkdir -p ${ZREPCONFLINK}
ln -Ffhs ${CWDIR}/conf ${ZREPCONFLINK}/conf
# Copy the gui files.
cp -R ${CWDIR}/gui/* ${WWWPATH}/ || error_notify "Error: A problem has occurred while copying extension gui files."
fi
fi
}
gui_enable()
{
# Relink conf and copy the gui files.
if [ -d "${CWDIR}/gui" ]; then
mkdir -p ${ZREPCONFLINK}
ln -Ffhs ${CWDIR}/conf ${ZREPCONFLINK}/conf
sysrc -f ${CWDIR}${ZREPCONF} GUI_ENABLE=YES >/dev/null 2>&1
cp -R ${CWDIR}/gui/* ${WWWPATH}/ || error_notify "Error: A problem has occurred while copying extension gui files."
exit 0
else
error_notify "Error: Extension gui files not found."
fi
}
gui_disable()
{
# Disable gui if -t option specified.
if [ -d "${CWDIR}/gui" ]; then
rm -f ${WWWPATH}/zrep-gui.php
rm -f ${WWWPATH}/zrep-info.php
rm -Rf ${WWWPATH}/ext/zrep-gui
rm -f ${LOCALSHAREPATH}/locale-zrep
rm -Rf ${ZREPCONFLINK}
sysrc -f ${CWDIR}${ZREPCONF} GUI_ENABLE=NO >/dev/null 2>&1 || error_notify "Error: A problem while removing extension gui files."
exit 0
else
error_notify "Error: Extension gui files not found."
fi
# Remove empty ext folder to prevent empty "Extensions" tab.
if [ -d "${WWWPATH}/ext" ]; then
if [ ! "$(ls -A ${WWWPATH}/ext)" ]; then
rm -R ${WWWPATH}/ext
fi
fi
}
pkg_upgrade()
{
# Re-fetch zrep package and extract.
if [ -f "${CWDIR}/${FULLAPPNAME}/${ZREPPATH}/zrep" ]; then
zrep_upgrade
else
zrep_initial_download
fi
# Check for extension updates.
extension_upgrade
}
reset_install()
{
# Reset the extension environment.
echo "Removing extension files..."
if [ -d "${CWDIR}/conf" ]; then
rm -rf ${CWDIR}/conf
fi
if [ -d "${CWDIR}/log" ]; then
rm -rf ${CWDIR}/log
fi
if [ -d "${CWDIR}/locale-zrep" ]; then
rm -rf ${CWDIR}/locale-zrep
fi
if [ -d "${CWDIR}/${FULLAPPNAME}" ]; then
rm -rf ${CWDIR}/${FULLAPPNAME}
fi
if [ -d "${CWDIR}/download" ]; then
rm -rf ${CWDIR}/download
fi
if [ -f "${CWDIR}/version" ]; then
rm -f ${CWDIR}/version
fi
runtime_config
}
remove_addon()
{
# Confirm for addon removal.
while :
do
read -p "Do you wish to proceed with the ${FULLAPPNAME} removal? [y/N]:" yn
case ${yn} in
[Yy]) break;;
[Nn]) exit 0;;
esac
done
echo "Proceeding..."
if [ -d "${WWWPATH}/zrep-gui.php" ]; then
rm -f ${WWWPATH}/zrep-gui.php
rm -f ${WWWPATH}/zrep-info.php
rm -Rf ${WWWPATH}/ext/zrep-gui
rm -f ${LOCALSHAREPATH}/locale-zrep
rm -Rf ${ZREPCONFLINK}
fi
# Check for working platform.
if [ "${PRDPLATFORM}" = "x64-embedded" ]; then
if [ -d "${USRLOCAL}/share/examples/${KSH9X}" ]; then
rm -rf ${USRLOCAL}/share/examples/${KSH9X}
fi
if [ -f "${USRLOCAL}/man/man1/${KSH9X}.1.gz" ]; then
rm -f ${USRLOCAL}/man/man1/${KSH9X}.1.gz
fi
if [ -d "${USRLOCAL}/share/licenses" ]; then
KSH93_LIC=$(find ${USRLOCAL}/share/licenses -name "${KSH9X}-*")
if [ -n "${KSH93_LIC}" ]; then
rm -rf ${USRLOCAL}/share/licenses/${KSH93_LIC}
fi
fi
if [ -f "${USRLOCAL}/bin/${KSH9X}" ]; then
rm -f ${USRLOCAL}/bin/${KSH9X}
fi
if [ -f "${USRLOCAL}/bin/${APPNAME}" ]; then
rm -f ${USRLOCAL}/bin/${APPNAME}
fi
elif [ "${PRDPLATFORM}" = "x64-full" ]; then
if [ -f "${USRLOCAL}/bin/${APPNAME}" ]; then
rm -f ${USRLOCAL}/bin/${APPNAME}
fi
pkg delete -y ${KSH9X}
fi
# Remove addon related files and folders only-
# to protect any user-created custom files.
FILES="conf download gui locale-zrep log zrep-addon README.md postinit release_notes version zrep-init"
for file in ${FILES}; do
if [ -f "${CWDIR}/${file}" ] || [ -d "${CWDIR}/${file}" ]; then
rm -rf ${CWDIR}/${file}
fi
done
if [ ! -f "${USRLOCAL}/sbin/${SCRIPTNAME}" ]; then
rm ${USRLOCAL}/sbin/${SCRIPTNAME}
fi
echo "Done!"
echo "Please manually remove the Zrep Extension Command Script from the WebGUI."
exit 0
}
get_versions()
{
# Get zrep-addon extension version.
if [ -f "${CWDIR}/version" ]; then
APPVERSION=$(cat ${CWDIR}/version)
else
APPVERSION="version file not found!"
fi
# Display product versions.
echo -e "\nksh93 version:" && ${USRLOCAL}/bin/ksh93 --version
echo -e "\nzrep version:" && ${USRLOCAL}/bin/zrep version
echo -e "\nextension version:"
echo "${FULLAPPNAME} ${APPVERSION}"
exit 0
}
exec_status()
{
# Log on startup success, else logging with faults.
if [ $? -eq 0 ]; then
MSG="script has been started successfully!"
logger -t ${SCRIPTNAME} ${MSG}
else
MSG="script started with faults"
logger -t ${SCRIPTNAME} ${MSG}
fi
}
zrep_init()
{
# Check for system compatibility.
if [ ! "${PLATFORM}" = "amd64" ]; then
echo "Unsupported platform!"; exit 1
fi
# Check for product compatibility.
if [ ! "${PRDVERSION}" -ge "110" ]; then
echo "Unsupported version!"; exit 1
fi
echo "Initializing ${APPNAME}..."
# Function calls.
platform_check
product_check
exec_status
}
# Run-time configuration.
runtime_config
while getopts ":ouxrvgth" option; do
case ${option} in
[h]) echo "Usage: ${SCRIPTNAME} -[option]";
echo "Options:"
echo " -u Upgrade Zrep/Add-On packages."
echo " -v Display product versions."
echo " -g Enables the addon GUI."
echo " -t Disable the addon GUI."
echo " -x Reset ${FULLAPPNAME}."
echo " -r Remove ${FULLAPPNAME}."
echo " -h Display this help message."; exit 0;;
[o]) OBI_INSTALL="ON";; # To prevent nested PHP-CGI call for installation with OBI.
[u]) pkg_upgrade;;
[x]) reset_install;;
[r]) remove_addon;;
[v]) get_versions;;
[g]) gui_enable; exit 0 ;; # For enable the addon gui.
[t]) gui_disable; exit 0 ;; # For disable the addon gui.
[?]) echo "Invalid option, -h for usage."; exit 1;;
esac
done
zrep_init
| true |
132b27cd97668e9d7975a8e2540b083daf8e4780 | Shell | nx0/watogen | /watogen.sh | UTF-8 | 7,540 | 3.890625 | 4 | [] | no_license | #!/bin/bash
TITLE_TEXT="Web Administration Tool Host Generator"
FOOTER_TXT="bugs, etc: hackgo@gmail.com"
echo "
_ [ $TITLE_TEXT ]
__ ____ _| |_ ___ __ _ ___ _ __
\ \ /\ / / _ | __/ _ \ / _ |/ _ \ '_ \
\ V V / (_| | || (_) | (_| | __/ | | |
\_/\_/ \__,_|\__\___/ \__, |\___|_| |_|
|___/ $FOOTER_TXT
"
######### COLOR SETUP #########
GREEN=`tput setaf 2`
RED=`tput setaf 3`
ENDCOLOR=`tput sgr0`
######### COLOR SETUP #########
function reload {
cmk -R
}
function scanips {
# ARG 1: Rango
# ARG 2: Opción de solo escaneo o nombre para crear carpeta en wato
# ARG 3: Nombre de la carpeta wato
if [ "${1}" != "" ]; then
savescan="/tmp/$(echo ${1}| sed 's/\./_/g' | sed 's/\//-/g').txt"
> $savescan
echo "[*] Guardando resultados en ${savescan} ... "
#echo `nmap -sP ${1} -vv| grep -B2 "MAC Address" | grep -E "report|MAC" | grep -Eo "([0-9]{1,3}\.?){4}|\(.*?\)"` | sed 's/(/ /g' | sed 's/)/\n/g' | sed 's/^ //g' | sed "s/'/_/g" | sed '/^$/d' >> ${savescan}
cont=0
nmap -sP ${1} | grep "scan report for"| awk '{ print $5 " " $6 }'| sed 's/^ //g' | sed '/^$/d' |
while read ip; do
echo "[>>] $ip"
# DETECCION DE IP MEJORADA
if [ "`echo \"$ip\" | awk '{ print $1 }' | grep -E '([0-9]\.?){4}'`" != "" ]; then
# nmblookup -T -A IP
echo -n "[$ip] sin DNS, probando snmp..."
snmpname=`snmpget -c public -v 2c $ip iso.3.6.1.2.1.1.1.0 2>/dev/null| grep -v "No Response" | awk '{ print $5 }'`
if [ "${snmpname}" != "" ]; then
echo " OK (${snmpname})"
echo "SNMPDNS_$snmpname $(echo $ip| awk '{ print $1 }')" >> $savescan
else
echo " ERROR!"
echo "NO_DNS $(echo $ip| awk '{ print $1 }')" >> $savescan
fi
else
echo $ip | tr -d "(" | tr -d ")" >> $savescan
fi
cont=`wc -l $savescan | awk '{ print $1 }'`
done
echo "... Completado con ($cont) hosts."
case ${2} in
"onlyscan")
exit 0
;;
*)
filex ${savescan} ${2}
esac
else
echo "rango de ip no especificado. Especifica XXX.XXX.XXX.XXX/XX"
fi
}
# PARSEADOR
function filex {
# ARG 1 => nombre del fichero
# ARG 2 => nombre de la carpeta en wato
# ESCANEA UN ARCHIVO QUE SE LE PASA COMO ARGUMENTO
# LLAMA A PARSE PARA GENERAR LA CONFIGURACION
#folder=`basename $1`
ipfile=$1
folder=$2
if [ -f "${1}" ]; then
echo -n "Analizando $1 "
echo "(`cat $1 | wc -l` hosts)"
if [ ! -d "/etc/check_mk/conf.d/wato/$folder/" ]; then
mkdir "/etc/check_mk/conf.d/wato/$folder/"
parse $folder $ipfile
else
echo -n "la carpeta $folder ya existe, quieres actualizar los hosts? [y]/n: "
read resp
case $resp in
*|"y")
echo "# ---- FICHERO GENERADO POR WATOGEN ----" > "/etc/check_mk/conf.d/wato/$folder/hosts.mk"
parse $folder $ipfile
;;
"n")
exit 0
;;
esac
fi
chown apache:nagios -R "/etc/check_mk/conf.d/wato/$folder/"
else
echo "el fichero ${1} no existe"
fi
}
# PARSEAR LA CONFIGURACION
function genconfig {
filewato="${2}"
file_to_read="${3}"
cat ${file_to_read} |sed '/^$/d' | sort | while read line; do
ip="`echo $line| grep -Eo '(([0-9]{1,3}+\.){3}[0-9]{1,3})'`"
model="`echo $line | awk '{ print $1 }'`"
case $model in
"NO_DNS")
model="$ip"
attr_string="'$ip': {'alias': u'NO_DNS', 'inventory_failed': True, 'ipaddress': u'$ip'},"
;;
SNMPDNS_*)
model="`echo $model | awk -F 'SNMPDNS_' '{ print $2 }'`"
attr_string="'$model': {'alias': u'SNMP_DNS', 'inventory_failed': True, 'ipaddress': u'$ip'},"
;;
*)
attr_string="'$model': {'inventory_failed': True, 'ipaddress': u'$ip'},"
esac
if [ "${1}" == "all" ]; then
echo "\"$model|ping|wato|/\" + FOLDER_PATH + \"/\"," >> $filewato
elif [ "${1}" == "ip" ]; then
echo "'$model': u'$ip'," >> $filewato
elif [ "${1}" == "attrib" ]; then
echo "$attr_string" >> $filewato
else
echo "nope"
fi
done
}
# CREAR LA CONFIGURACION
function parse {
file_to_parse=${2}
hostfile="/etc/check_mk/conf.d/wato/$1/hosts.mk"
echo "[*] Generando $hostfile"
echo "all_hosts += [" >> $hostfile
echo -n "[-] Añadiendo ips ..."
genconfig all $hostfile $file_to_parse
echo " OK"
echo "]
" >> $hostfile
echo "ipaddresses.update({" >> $hostfile
echo -n "[-] Generado IPS ..."
genconfig ip $hostfile $file_to_parse
echo " OK"
echo "})
" >> $hostfile
echo "host_attributes.update({" >> $hostfile
echo -n "[-] Generado atributos ..."
genconfig attrib $hostfile $file_to_parse
echo " OK"
echo "})" >> $hostfile
}
####################################################################################################
# ENTRY POINT
####################################################################################################
case ${1} in
""|"help")
echo "Elige un modo para descubrir equipos:"
echo " $0 [--mode] [network [--rango RANGO-IP] <onlyscan>|--name [NAME]|file [--location LOCATION.txt]] <reload>"
echo " * Ejemplo de uso:"
echo " # $0 --mode network --rango 192.168.1.0/24 --name red_local"
echo " # $0 --mode network --rango 192.168.1.0/24 onlyscan"
echo " # $0 --mode file --ipfile /tmp/algo.txt --folder red_local"
echo " # $0 --mode manual host.doma.in --folder red_local"
echo " * Modos de uso:"
echo " - network: Escanea un rango dado para descubrir equipos"
echo " - file: Lee un fichero de texto para añadir equipos (formato: DNS IP)"
echo " # $0 --list"
;;
"--mode")
case ${2} in
"network")
case ${3} in
"--rango")
rango=${4}
options_for_rango=${5}
name=${6}
case ${options_for_rango} in
"onlyscan")
scanips ${rango} onlyscan
;;
"--name")
if [ "${name}" != "" ]; then
scanips ${rango} ${name}
if [ "${7}" == "reload" ]; then
reload
fi
else
echo "Faltan argumentos. Opciones disponibles: [--name [NAME]]"
fi
;;
*)
echo "Faltan argumentos!"
echo "Opciones disponibles: [onlyscan|--name [NAME]]"
esac
;;
*)
echo "Error!"
echo "Opciones disponibles: [--rango [RANGO IP]]"
exit 0
esac
;;
"file")
options_for_file="${5}"
ipfile=${4}
folder_name="${6}"
if [ -f "$ipfile" ]; then
case ${options_for_file} in
"--folder")
if [ "$folder_name" != "" ]; then
filex "${ipfile}" "${folder_name}"
if [ "${7}" == "reload" ]; then
reload
fi
else
echo "Especifica un nombre válido para la carpeta"
fi
;;
*)
echo "Faltan argumentos!"
echo "Opciones disponibles: [--folder [FOLDER NAME]]"
esac
else
echo "no existe: $ipfile"
exit 0
fi
;;
"manual")
echo "buscando $3"
find /etc/check_mk/conf.d/wato/ | xargs grep -i $3
if [ $? -eq 0 ]; then
echo "servidor ya añadido"
fi
;;
*)
echo "Faltan argumentos!"
echo "Opciones disponibles: [network|file]"
esac
;;
"--list")
for xx in `find /etc/check_mk/conf.d/wato/ -type d`; do
echo "[ $xx ]"
for dd in $xx; do
find $dd -name "hosts.mk" | xargs sed -n '/\[/,/\]/p' | awk -F '|' '{ print $1 }' | grep -vE 'hosts|\[|\]' | awk -F '"' '{ print $2 }' | sed '/^$/d'
done
echo " "
done
;;
"--manual")
echo "we"
find /etc/check_mk/conf.d/wato/ | xargs grep -i $2
;;
*)
echo "Opción inválida!"
echo "Opciones disponibles: [network|file]"
esac
| true |
f2288d19be87b26d6ee7c8e38be5aaac3824a3c4 | Shell | multicom-toolbox/multicom | /installation/MULTICOM_test_codes/.T17-run-prc-hard.sh.default | UTF-8 | 1,092 | 2.828125 | 3 | [] | no_license | #!/bin/bash
#SBATCH -J prc
#SBATCH -o prc-hard-%j.out
#SBATCH --partition Lewis,hpc5,hpc4
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=1
#SBATCH --mem-per-cpu=10G
#SBATCH --time 2-00:00
dtime=$(date +%m%d%y)
mkdir -p SOFTWARE_PATH/test_out/T1006_prc_hard/
cd SOFTWARE_PATH/test_out/T1006_prc_hard/
mkdir prc
touch SOFTWARE_PATH/test_out/T1006_prc_hard.running
if [[ ! -f "SOFTWARE_PATH/test_out/T1006_prc_hard/prc/prc1.pdb" ]];then
perl SOFTWARE_PATH/src/meta/prc/script/tm_prc_main_v2.pl SOFTWARE_PATH/src/meta/prc/prc_option_hard SOFTWARE_PATH/examples/T1006.fasta prc 2>&1 | tee SOFTWARE_PATH/test_out/T1006_prc_hard.log
fi
printf "\nFinished.."
printf "\nCheck log file <SOFTWARE_PATH/test_out/T1006_prc_hard.log>\n\n"
if [[ ! -f "SOFTWARE_PATH/test_out/T1006_prc_hard/prc/prc1.pdb" ]];then
printf "!!!!! Failed to run prc, check the installation <SOFTWARE_PATH/src/meta/prc/>\n\n"
else
printf "\nJob successfully completed!"
printf "\nResults: SOFTWARE_PATH/test_out/T1006_prc_hard/prc/prc1.pdb\n\n"
fi
rm SOFTWARE_PATH/test_out/T1006_prc_hard.running
| true |
d067e5e5e07962cc5d32a41d9ddacb73680d8748 | Shell | gghatano/BKB | /nobel_scrape/get_text.bash | UTF-8 | 307 | 3.0625 | 3 | [] | no_license | #!/bin/bash
dir=$(dirname $0)
cat url_list | while read line
do
echo $line
now_time=$(date +%Y%m%d%H%M%S).$$
curl $line > $dir/tmp/$now_time
num=$(cat $dir/tmp/$now_time | wc -l)
cat $dir/tmp/$now_time |
grep -A $num "novel_honbun" |
grep -B $num "novel_bn" > $dir/tmp/$now_time.text
done
| true |
bb21ee60fc88d48511bd610b967edfa1d55d0c4e | Shell | SerenityOS/serenity | /Toolchain/BuildCMake.sh | UTF-8 | 2,264 | 4.0625 | 4 | [
"BSD-2-Clause"
] | permissive | #!/usr/bin/env bash
# This script builds the CMake build system
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# shellcheck source=/dev/null
. "${DIR}/../Meta/shell_include.sh"
exit_if_running_as_root "Do not run BuildCMake.sh as root, parts of your Toolchain directory will become root-owned"
PREFIX_DIR="$DIR/Local/cmake"
BUILD_DIR="$DIR/Build/cmake"
TARBALLS_DIR="$DIR/Tarballs"
NPROC=$(get_number_of_processing_units)
[ -z "$MAKEJOBS" ] && MAKEJOBS=${NPROC}
check_sha() {
if [ $# -ne 2 ]; then
error "Usage: check_sha FILE EXPECTED_HASH"
return 1
fi
FILE="${1}"
EXPECTED_HASH="${2}"
SYSTEM_NAME="$(uname -s)"
if [ "$SYSTEM_NAME" = "Darwin" ]; then
SEEN_HASH="$(shasum -a 256 "${FILE}" | cut -d " " -f 1)"
else
SEEN_HASH="$(sha256sum "${FILE}" | cut -d " " -f 1)"
fi
test "${EXPECTED_HASH}" = "${SEEN_HASH}"
}
# Note: Update this alongside the cmake port, and Meta/CMake/cmake-version.cmake if the build requires this version of cmake.
CMAKE_VERSION=3.26.4
CMAKE_ARCHIVE_SHA256=313b6880c291bd4fe31c0aa51d6e62659282a521e695f30d5cc0d25abbd5c208
CMAKE_ARCHIVE=cmake-${CMAKE_VERSION}.tar.gz
CMAKE_ARCHIVE_URL=https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/${CMAKE_ARCHIVE}
mkdir -p "$DIR"/Tarballs
pushd "$DIR"/Tarballs
if [ ! -e "${CMAKE_ARCHIVE}" ]; then
echo "Downloading CMake from ${CMAKE_ARCHIVE_URL}..."
curl "${CMAKE_ARCHIVE_URL}" -L -o "${CMAKE_ARCHIVE}"
else
echo "${CMAKE_ARCHIVE} already exists, not downloading archive"
fi
if ! check_sha "${CMAKE_ARCHIVE}" "${CMAKE_ARCHIVE_SHA256}"; then
echo "CMake archive SHA256 sum mismatch, please run script again"
rm -f "${CMAKE_ARCHIVE}"
exit 1
fi
if [ ! -d "cmake-${CMAKE_VERSION}" ]; then
echo "Extracting ${CMAKE_ARCHIVE}..."
tar -xf "${CMAKE_ARCHIVE}"
else
echo "cmake-${CMAKE_VERSION} already exists, not extracting archive"
fi
popd
mkdir -p "${PREFIX_DIR}"
mkdir -p "${BUILD_DIR}"
pushd "${BUILD_DIR}"
"${TARBALLS_DIR}"/cmake-"${CMAKE_VERSION}"/bootstrap --generator="Ninja" --prefix="${PREFIX_DIR}" --parallel="${MAKEJOBS}"
ninja -j "${MAKEJOBS}"
ninja install
popd
| true |
e31e73b4f7856190703e57b27a88eb04b6fb6fa0 | Shell | nnkennard/coref_survey | /decision_tree/generate_all.sh | UTF-8 | 290 | 2.875 | 3 | [] | no_license | for spans in gold predicted
do
for split in train test
do
for model in spanbert-base bers
do
args="outputs/"$model"_"$spans"_conll-dev.jsonl outputs/conll-dev_dt_"$split".keys "$model" "$spans" conll-dev "$split
python generate_examples.py $args
done
done
done
| true |
3d694bfaae895fc7b28e3bc483c8d1470c0591e1 | Shell | TranslationalBioinformaticsUnit/RNASEQ-Pair-end-Pipeline | /RNAseqPE/PipelineRNAseqPE.sh | UTF-8 | 3,953 | 3.265625 | 3 | [] | no_license | #!/bin/bash
#/example/fileId.txt
SamplesNames_Dir=$1
#/example/fastq -> where are the fastq files
Fastq_Dir=$2
#/example/workdir
Work_Dir=$3
#/example/scripts
Code_Dir=$4
#/example/reference (where is the reference genome and gtf file)
Reference_Dir=$5
#INDEX REFERENCE GENOME (mus_musculus38)
#Parameters(1): ReferenceDir
#qsub -N Index_Genome $Code_Dir/indexReferenceGenome.sh $Reference_Dir
#for each fastq
for FastqID in $(cat $SamplesNames_dir);
do
echo "$FastqID"
###############
#1-Quality control of the original fastq
###############
#Create folder fastqQC
if [ ! -d $Work_Dir/fastqQC ]; then
mkdir $Work_Dir/fastqQC
chmod 777 $Work_Dir/fastqQC
fi
#Parameters(4): FastqID Option(1) InputDir OutputDir
###Option=1 -> fastqc of original fastq
###Option=2 -> fastqc of trimmed fastq
qsub -N QC_${FastqID} $Code_Dir/fastqQC.sh $FastqID 1 $Fastq_Dir $Work_Dir/fastqQC
##############
#2-Trimmed Reads
##############
#Create folder trimmed_reads
if [ ! -d $Work_Dir/trimmed_reads ]; then
mkdir $Work_Dir/trimmed_reads
chmod 777 $Work_Dir/trimmed_reads
fi
#Parameters(3): FastqID InputDir OutputDir
#Don't start this job until fastqc is not finish
qsub -hold_jid QC_${FastqID} -N trim_${FastqID} $Code_Dir/trimmedReads.sh $FastqID $Fastq_Dir $Work_Dir/trimmed_reads
###############
#3-Quality control of the TRIMMED fastq
###############
#Create folder trimmed_fastqQC
if [ ! -d $Work_Dir/trimmed_reads/trimmed_fastqQC ]; then
mkdir $Work_Dir/trimmed_reads/trimmed_fastqQC
chmod 777 $Work_Dir/trimmed_reads/trimmed_fastqQC
fi
#Parameters(4): FastqID Option(2) InputDir OutputDir
#Don't start this job until trimming is not finish
qsub -hold_jid trim_${FastqID} -N QC_trim_${FastqID} $Code_Dir/fastqQC.sh $FastqID 2 $Work_Dir/trimmed_reads $Work_Dir/trimmed_reads/trimmed_fastqQC
################
#4-Mapping TopHAT
################
#Create folder bam
if [ ! -d $Work_Dir/bam ]; then
mkdir $Work_Dir/bam
chmod 777 $Work_Dir/bam
fi
#Parameters(4): FastqID InputDir OutputDir ReferenceDir
#Don't start this job until fastqc trimming is not finish
qsub -hold_jid QC_trim_${FastqID} -N mapping_${FastqID} $Code_Dir/mappingTophat.sh $FastqID $Work_Dir/trimmed_reads $Work_Dir/bam $Reference_Dir
###############
#5-Quality control of the mapping
###############
#Parameters(4): FastqID InputDir OutputDir (the inputdir and oputputdir is the same) ReferenceDir
#Don't start this job until mapping is not finish
qsub -hold_jid mapping_${FastqID} -N mappingQC_${FastqID} $Code_Dir/mappingQC.sh $FastqID $Work_Dir/bam $Work_Dir/bam $Reference_Dir
################
#6-HTSEQ Count Tables
################
#Create folder count_tables
if [ ! -d $Work_Dir/count_tables ]; then
mkdir $Work_Dir/count_tables
chmod 777 $Work_Dir/count_tables
fi
#Parameters(5): FastqID Mode_option InputDir OutputDir ReferenceDir
###Mode_option=1 -> intersection-nonempty(by default)
###Mode_option=2 -> union
###Mode_option=3 -> intersection_strict
#Don't start this job until quality control of mapping is not finish
qsub -hold_jid mappingQC_${FastqID} -N count_${FastqID} $Code_Dir/htseq.sh $FastqID 1 $Work_Dir/bam $Work_Dir/count_tables $Reference_Dir
################
#7-MultiQC Plots
################
if [ ! -d $Work_Dir/multiQCPlots ]; then
mkdir $Work_Dir/multiQCPlots
chmod 777 $Work_Dir/multiQCPlots
fi
#MultiQC plots of the mapping quality control results
#Parameters(2): InputDir OutputDir
#Don't start this job until the generating of count tables is not finish
qsub -hold_jid count_${FastqID} -N multiQC_${FastqID} $Code_Dir/multiQC.sh $Work_Dir/bam $Work_Dir/multiQCPlots
done | true |
d7cfb3cac48b276eddc1e69f554d0112ec9f6867 | Shell | arcanexil/sif-git | /cp_add_ext_date | UTF-8 | 770 | 3.765625 | 4 | [] | no_license | #!/bin/bash
usage="syntaxe $0 nom_de_file"
C="cp -ax" # cp_add_ext_date ou mv_add_ext_date
CLASSE=`basename $0`
[ $CLASSE = "mv_add_ext_date" ] && C=mv
if [ $# -ne 1 ] ;then
echo "$C file file.AAAAMMJJhhmm"
echo 'avec la date de file (et non date_ext)'
echo $usage
exit 1
fi
if [ ! -f $1 ] && [ ! -d $1 ] && [ ! -h $1 ] ;then
echo $1 inexistant
exit 1
fi
YYmmdd=`ls -ld --full-time $1 | awk '{print $6}'`
hhmmss=`ls -ld --full-time $1 | awk '{print $7}'`
# option -l pour compatibilite anciennes redhat
ANNEE=`expr substr $YYmmdd 1 4`
MOIS=`expr substr $YYmmdd 6 2`
JOUR=`expr substr $YYmmdd 9 2`
HEURE=`expr substr $hhmmss 1 2`
MINUTE=`expr substr $hhmmss 4 2`
$C $1 $1.$ANNEE$MOIS$JOUR$HEURE$MINUTE
echo $C $1 $1.$ANNEE$MOIS$JOUR$HEURE$MINUTE
| true |
5a6c602159794d5625e6dd5106f91356d434252e | Shell | eufrankoficial/SHELL | /createAlias.sh | UTF-8 | 491 | 3.375 | 3 | [] | no_license | #!/bin/bash
# It's a free code for create alias automatically in your Linux. You can change the commands and collaborate with community
# Author: Rafael Franke
# Email: rafaelfranekan@gmail.com
echo $1
echo "Enter alias name"
read name
echo $name
echo "Enter the action that him will execute: Ex.: cd /var/www && ./myScript.sh"
read action
cd
# Here you can change for your directory of .bashrc. I'm using Linux Mint so, your .bashrc can be in a directory different
echo "alias $name='$action'" >> .bashrc | true |
8872ec7cf5d4b60ceba6b6e9d03a533ec96979af | Shell | Sherlock-Holo/repo | /archlinuxcn/deepin-wine-tim/PKGBUILD | UTF-8 | 2,774 | 2.875 | 3 | [] | no_license | # Maintainer: CountStarlight <countstarlight@gmail.com>
# Maintainer: wszqkzqk <wszqkzqk@gmail.com>
# Maintainer: ssfdust <ssfdust@gmail.com>
pkgname=deepin-wine-tim
pkgver=2.3.1_3
deepintimver=2.0.0deepin4
pkgrel=5
pkgdesc="Tencent TIM (com.qq.office) on Deepin Wine For Archlinux"
arch=("x86_64")
url="http://tim.qq.com/"
license=('custom')
depends=('p7zip' 'wine' 'wine-mono' 'wine_gecko' 'xorg-xwininfo' 'xdotool' 'wqy-microhei' 'adobe-source-han-sans-cn-fonts' 'lib32-alsa-lib' 'lib32-alsa-plugins' 'lib32-libpulse' 'lib32-openal' 'lib32-mpg123' 'lib32-gnutls')
conflicts=('wine-tim' 'deepin.com.qq.office' 'deepin-tim-for-arch')
install="deepin-wine-tim.install"
_mirror="https://mirrors.ustc.edu.cn/deepin"
source=("$_mirror/pool/non-free/d/deepin.com.qq.office/deepin.com.qq.office_${deepintimver}_i386.deb"
"https://qd.myapp.com/myapp/qqteam/tim/down/TIM${pkgver}.exe"
"run.sh"
"reg_files.tar.bz2"
"update.policy")
md5sums=('d5c37cb4f960e13111ce24dbc0dd2d58'
'ecd2cbce6f497f045e7920e3c7468db2'
'cd6a7a61ff94739d9f125840df0592bb'
'44291a46887c0f9107a97c4ddf0c8d63'
'a66646b473a3fbad243ac1afd64da07a')
build() {
msg "Extracting DPKG package ..."
mkdir -p "${srcdir}/dpkgdir"
tar -xvf data.tar.xz -C "${srcdir}/dpkgdir"
sed "s/\(Categories.*$\)/\1Network;/" -i "${srcdir}/dpkgdir/usr/share/applications/deepin.com.qq.office.desktop"
msg "Extracting Deepin Wine TIM archive ..."
7z x -aoa "${srcdir}/dpkgdir/opt/deepinwine/apps/Deepin-TIM/files.7z" -o"${srcdir}/deepintimdir"
msg "Removing original outdated TIM directory ..."
rm -r "${srcdir}/deepintimdir/drive_c/Program Files/Tencent/TIM"
msg "Adding config files and fonts"
tar -jxvf reg_files.tar.bz2 -C "${srcdir}/"
cp userdef.reg "${srcdir}/deepintimdir/userdef.reg"
cp system.reg "${srcdir}/deepintimdir/system.reg"
cp update.policy "${srcdir}/deepintimdir/update.policy"
cp user.reg "${srcdir}/deepintimdir/user.reg"
ln -sf "/usr/share/fonts/wenquanyi/wqy-microhei/wqy-microhei.ttc" "${srcdir}/deepintimdir/drive_c/windows/Fonts/wqy-microhei.ttc"
ln -sf "/usr/share/fonts/adobe-source-han-sans/SourceHanSansCN-Medium.otf" "${srcdir}/deepintimdir/drive_c/windows/Fonts/SourceHanSansCN-Medium.otf"
msg "Repackaging app archive ..."
7z a -t7z -r "${srcdir}/files.7z" "${srcdir}/deepintimdir/*"
}
package() {
msg "Preparing icons ..."
install -d "${pkgdir}/usr/share"
cp -a ${srcdir}/dpkgdir/usr/share/* "${pkgdir}/usr/share/"
msg "Copying TIM to /opt/deepinwine/apps/Deepin-TIM ..."
install -d "${pkgdir}/opt/deepinwine/apps/Deepin-TIM"
install -m644 "${srcdir}/files.7z" "${pkgdir}/opt/deepinwine/apps/Deepin-TIM/"
install -m755 "${srcdir}/run.sh" "${pkgdir}/opt/deepinwine/apps/Deepin-TIM/"
install -m644 "${srcdir}/TIM$pkgver.exe" "${pkgdir}/opt/deepinwine/apps/Deepin-TIM/"
}
| true |
e590727145e925ce72a276d4980c061f1ca9af04 | Shell | petarblazevski/dotfiles | /.bash_aliases | UTF-8 | 1,776 | 2.921875 | 3 | [] | no_license | # Interactive operation...
alias rm='rm -i'
alias cp='cp -i'
alias mv='mv -i'
# Default to human readable figures
alias du='du -khs * | sort -h | less'
alias df='df -kTh'
# Misc :)
alias less='less -r' # raw control characters
alias whence='type -a' # where, of a sort
alias grep='grep --color' # show differences in colour
alias egrep='egrep --color=auto' # show differences in colour
alias fgrep='fgrep --color=auto' # show differences in colour
# Some shortcuts for different directory listings
alias ls='ls -hCF --color=tty' # classify files in colour
alias dir='ls -d --color=auto --format=vertical */'
alias vdir='ls --color=auto --format=long'
alias l='ls -CF' #
#Useful alias for navigating
alias ..='cd ..'
alias ...='cd ../..'
alias ....='cd ../../..'
alias hh='cd ~'
alias cs..='cs ..'
alias cd..='cd ..'
alias ls='ls --color -hLF --group-directories-first --format=horizontal'
alias ll='ls -ahlL --color --group-directories-first'
alias la='ls -a --color --group-directories-first'
# git macros
alias ga="git add --all $args"
alias gb="git branch $args"
alias gc="git commit -ev $args"
alias gck="git checkout $args"
alias gd="git diff -p --stat"
alias gdc="git diff --cached"
alias gf="git fetch $args"
alias gl="git log --graph --oneline --decorate $args"
alias gpl="git pull $args"
alias gps="git push $args"
alias gr="git remote -v $args"
alias gs="git status -b $args"
## GENERAL ALIASES
alias code="cd /c/Code"
## PROJECTS
## LARAVEL
alias art="php artisan"
alias serve="php artisan serve"
alias migrate="php artisan migrate"
alias tinker="php artisan tinker"
## GLOBAL ALIASES
alias behat="vendor/bin/behat"
| true |
29cb5650feafec56f16302c478abe6b8b7442415 | Shell | delkyd/alfheim_linux-PKGBUILDS | /ocaml-javalib/PKGBUILD | UTF-8 | 1,317 | 2.890625 | 3 | [] | no_license | # This is an example PKGBUILD file. Use this as a start to creating your own,
# and remove these comments. For more information, see 'man PKGBUILD'.
# NOTE: Please fill out the license field for your package! If it is unknown,
# then please put 'unknown'.
# Maintainer: Your Name <youremail@domain.com>
_oname='javalib'
pkgname="ocaml-$_oname"
pkgver='2.3.3'
pkgrel=3
pkgdesc="Parses Java .class files into OCaml data structures"
arch=('i686' 'x86_64')
url="http://sawja.inria.fr/"
license=('LGPL2')
depends=('ocaml>=4.0.0' 'ocaml-extlib>=1.5.1' 'ocaml-zip>=1.04' 'zlib')
makedepends=('ocaml-findlib')
options=(!strip)
changelog=
source=("https://gforge.inria.fr/frs/download.php/file/36307/$_oname-$pkgver.tar.bz2")
md5sums=('a4d4b06e8f4860db34c128e760fa8397')
validpgpkeys=()
prepare() {
cd "$_oname-$pkgver"
sed -i 's,OCAMLPATH=$(LOCALDEST)'",OCAMLPATH=$srcdir/$_oname-$pkgver,g" Makefile.config.example
sed -i 's,INSTALL = $(FINDER) install,INSTALL = $(FINDER) install'" -destdir $pkgdir/usr/lib/ocaml," Makefile.config.example
sed -i "s,^OCAMLPATH=,export OCAMLPATH=$srcdir/$_oname-$pkgver," configure.sh
}
build() {
cd "$_oname-$pkgver"
DESTDIR=/usr ./configure.sh
make ptrees
DESTDIR=/usr ./configure.sh
make
}
package() {
cd "$_oname-$pkgver"
mkdir -p $pkgdir/usr/lib/ocaml
make installptrees
make install
}
| true |
df305d452e9eb3378b7166053bb75133aacfd4eb | Shell | fcoambrozio/slackbuilds | /docker/docker-compose/getsrc.sh | UTF-8 | 172 | 2.734375 | 3 | [] | no_license | #!/bin/bash
#
# get source for docker-compose
#
VERSION="2.20.2"
DOWNLOAD="https://github.com/docker/compose/archive/v$VERSION/compose-$VERSION.tar.gz"
wget -c $DOWNLOAD
| true |
ff2e46fa608086dc7ab1c4794ca3a7519049b4fe | Shell | lauramzarescu/TemaSO | /agenda/agenda.sh | UTF-8 | 3,367 | 3.984375 | 4 | [] | no_license | #!/bin/bash
function alegere_optiune() {
if [[ $1 == "1" ]]; then
insert
actiune="inserare"
elif [[ $1 == "2" ]]; then
delete
actiune="stergere"
elif [[ $1 == "3" ]]; then
cautare
actiune="cautare"
elif [[ $1 == "4" ]]; then
afisare
actiune="afisare"
elif [[ $1 == "5" ]]; then
empty
actiune="stergere continut"
elif [[ $1 == "6" ]]; then
reset
actiune="resetare"
elif [[ $1 -lt "1" || $1 -gt "5" ]]; then
echo "Optiune invalida!"
fi
}
function update_index() {
count=`wc -l agenda.txt | awk '{ print $1 }'`
for ((i=1;i<=$count;i++)); do
sed -i "$i s/^[^.]*./$i./" agenda.txt
done
}
function time_log() {
time_var=`date`
printf "$time_var"
}
function meniu() {
# clear
printf "\n1. Adauga inregistrare\n2. Sterge inregistrare\n3. Cauta\n4. Afisare\n5. Sterge tot continutul\n6. Reset\n"
}
function submeniu() {
# clear
printf "\n $log \n\n1. Revenire la meniul principal\n2. Repetati ultima actiune ($1)\n3. Iesire\nCe vreti sa faceti mai departe? [1 | 2 | 3] "
}
function update_log() {
log_var="$(time_log) $*"
echo $log_var >> log.txt
}
function insert() {
clear
time_log
printf "\n"
echo -n "Nume: "
read nume
echo -n "Prenume: "
read prenume
echo -n "Telefon: "
read telefon
count=`wc -l agenda.txt | awk '{ print $1 }'`
let "count=count+1"
res="${count}. ${nume} ${prenume} ${telefon}"
log="Inserare cu succes!"
update_log $log
echo $res >> agenda.txt
}
function delete() {
clear
time_log
printf "\n"
echo -n "Introduceti datele inregistrarii (index / nume complet / telefon): "
read sterge
if grep -Fq "$sterge" agenda.txt; then
sed -i "/$sterge/d" agenda.txt
log="Stergere cu succes!"
update_log $log
update_index
afisare
else
echo -n "Inregistrarea nu a fost gasita!"
log="Eroare la stergere!"
fi
}
function cautare() {
clear
time_log
printf "\n"
echo -n "Introduceti datele inregistrarii (index / nume complet / telefon): "
read cauta
if grep -Fq "$cauta" agenda.txt; then
grep "$cauta" agenda.txt
count_cautare=`grep "$cauta" agenda.txt | wc -l`
log="Au fost gasite $count_cautare rezultate!"
update_log $log
else
echo -n "Inregistrarea nu a fost gasita!"
log="Eroare la cautare!"
update_log $log
fi
}
function empty() {
clear
time_log
printf "\n"
> agenda.txt
log="Golire agenda!"
update_log $log
}
function reset() {
clear
time_log
printf "\n"
rm agenda.txt
rm log.txt
exit 1
}
function afisare() {
clear
time_log
printf "\n"
count=`wc -l agenda.txt | awk '{ print $1 }'`
if [[ $count == "0" ]]; then
log="Nu exista nicio inregistrare"
update_log $log
else
log="Afisare agenda..."
update_log $log
cat agenda.txt
fi
}
FILE=agenda.txt
LOG_FILE=log.txt
if [ test -f "$FILE" ] && [ test -f "$LOG_FILE" ]; then
count=`wc -l agenda.txt | awk '{ print $1 }'`
else
touch agenda.txt
touch log.txt
fi
optiune_submeniu="1"
while [[ $optiune_submeniu != "3" ]]; do
if [[ $optiune_submeniu == "1" ]]; then
clear
meniu
printf "\nIntroduceti optiunea [1 | 2 | 3 | 4 | 5]: "
read optiune
alegere_optiune "$optiune"
elif [[ $optiune_submeniu == "2" ]]; then
alegere_optiune "$optiune"
fi
submeniu "$actiune"
read optiune_submeniu
done
| true |
a0df415889c1d2e4fb0f0fa2dd6ed59edfb5a68f | Shell | 13361997280/mstrunk_new | /output-esdata/bin/env.sh | UTF-8 | 13,958 | 2.796875 | 3 | [] | no_license | #!/bin/bash
. global.sh
LOCAL_IP=$(/sbin/ifconfig -a|grep inet|grep -v 127.0.0.1|grep -v inet6|awk '{print $2}'|tr -d "addr:"|head -1)
#current bin dir
index_name=`pwd`
#delete the last / if exists
index_name=${index_name%/}
#to the build dir
index_name=${index_name%/*}
#get the name of the build dir
index_name=${index_name##/*/}
#delete the version info if exists
index_name=${index_name%%_deploy*}
Index_name=`echo $index_name | sed 's/^[[:lower:]]/\u&/'`
env_type=`cat env.type`
case $index_name in
"vacation")
port_prefix="80"
port_key="9"
#default except online
engine_mem_info="-Xms128m -Xmx1024m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx512m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx512m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.143 192.168.81.142"
rebuild_ip="192.168.81.143"
scraper_ip="192.168.81.143"
job_ip="192.168.81.143"
monitor_ip="192.168.81.143"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.81.64"
scraper_ip="192.168.81.64"
job_ip="192.168.81.64"
monitor_ip="192.168.81.64"
;;
"uat")
engine_ips="192.168.82.221 192.168.83.125"
rebuild_ip="192.168.81.63"
scraper_ip="192.168.81.63"
job_ip="192.168.81.63"
monitor_ip="192.168.81.63"
;;
"fat")
engine_ips="10.2.6.15 10.2.6.16"
rebuild_ip="10.2.6.21"
scraper_ip="10.2.6.21"
job_ip="10.2.6.21"
monitor_ip="10.2.6.21"
ssh_port=22
;;
"lpt")
engine_ips="10.2.4.35 10.2.4.36"
rebuild_ip="10.2.4.41"
scraper_ip="10.2.4.41"
job_ip="10.2.4.41"
monitor_ip="10.2.4.41"
ssh_port=22
;;
"online")
engine_ips="192.168.86.250 192.168.86.251 192.168.86.252 192.168.86.123 192.168.86.124 192.168.86.166 192.168.86.167 192.168.86.168"
rebuild_ip="192.168.86.253"
scraper_ip="192.168.86.253"
job_ip="192.168.86.253"
monitor_ip="192.168.86.253"
engine_mem_info="-Xms1G -Xmx6G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1G -Xmn200m -Xss128k"
job_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
"markland")
port_prefix="80"
port_key="7"
#default except online
engine_mem_info="-Xms128m -Xmx1536m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.143 192.168.81.142"
#engine_ips="192.168.83.100"
rebuild_ip="192.168.81.142"
monitor_ip="192.168.81.142"
engine_mem_info="-Xms128m -Xmx1536m -Xmn64m -Xss512k"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.81.223"
monitor_ip="192.168.81.223"
;;
"uat")
engine_ips="192.168.82.221 192.168.83.125"
rebuild_ip="192.168.81.65"
monitor_ip="192.168.81.65"
;;
"fat")
engine_ips="10.2.6.15 10.2.6.16"
rebuild_ip="10.2.6.21"
monitor_ip="10.2.6.21"
ssh_port=22
;;
"lpt")
engine_ips="10.2.4.35 10.2.4.36"
rebuild_ip="10.2.4.41"
monitor_ip="10.2.4.41"
ssh_port=22
;;
"online")
engine_ips="192.168.79.36 192.168.79.37 192.168.79.71 192.168.79.72 192.168.79.73 192.168.79.74 192.168.79.75"
rebuild_ip="192.168.86.253"
monitor_ip="192.168.86.253"
engine_mem_info="-Xms1G -Xmx4G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1G -Xmn200m -Xss128k"
job_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
"guide")
port_prefix="80"
port_key="6"
#default except online
engine_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.145"
rebuild_ip="192.168.81.142"
#scraper_ip="192.168.81.142"
job_ip="192.168.81.142"
monitor_ip="192.168.81.142"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.83.222"
#scraper_ip="192.168.83.222"
job_ip="192.168.83.222"
monitor_ip="192.168.83.222"
;;
"uat")
engine_ips="192.168.82.221 192.168.83.125"
rebuild_ip="192.168.83.125"
#scraper_ip="192.168.83.125"
job_ip="192.168.83.125"
monitor_ip="192.168.83.125"
;;
"fat")
engine_ips="10.2.6.15 10.2.6.16"
rebuild_ip="10.2.6.21"
#scraper_ip="10.2.6.21"
job_ip="10.2.6.21"
monitor_ip="10.2.6.21"
ssh_port=22
;;
"online")
engine_ips="192.168.79.72 192.168.79.73 192.168.79.74 192.168.79.75"
rebuild_ip="192.168.86.253"
#scraper_ip="192.168.83.222"
job_ip="192.168.86.253"
monitor_ip="192.168.86.253"
engine_mem_info="-Xms1G -Xmx3G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1G -Xmn200m -Xss128k"
job_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
"scenicspot")
port_prefix="80"
port_key="5"
#default except online
engine_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.144 192.168.81.145"
rebuild_ip="192.168.81.145"
scraper_ip="192.168.81.145"
monitor_ip="192.168.81.145"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.81.64"
scraper_ip="192.168.81.64"
monitor_ip="192.168.81.64"
;;
"uat")
engine_ips="192.168.82.15 192.168.81.65"
rebuild_ip="192.168.81.65"
scraper_ip="192.168.81.65"
monitor_ip="192.168.81.65"
;;
"fat")
engine_ips="10.2.6.15 10.2.6.16"
rebuild_ip="10.2.6.21"
scraper_ip="10.2.6.21"
monitor_ip="10.2.6.21"
ssh_port=22
;;
"lpt")
engine_ips="10.2.4.35 10.2.4.36"
rebuild_ip="10.2.4.41"
scraper_ip="10.2.4.41"
monitor_ip="10.2.4.41"
ssh_port=22
;;
"online")
engine_ips="192.168.79.72 192.168.79.73 192.168.79.74 192.168.79.75"
rebuild_ip="192.168.86.253"
scraper_ip="192.168.86.253"
monitor_ip="192.168.86.253"
engine_mem_info="-Xms1G -Xmx3G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1G -Xmn200m -Xss128k"
job_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
"shx")
port_prefix="80"
port_key="8"
#default except online
engine_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.144 192.168.81.145"
rebuild_ip="192.168.81.145"
scraper_ip="192.168.81.145"
monitor_ip="192.168.81.145"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.81.64"
scraper_ip="192.168.81.64"
monitor_ip="192.168.81.64"
;;
"uat")
engine_ips="192.168.82.15 192.168.81.65"
rebuild_ip="192.168.81.65"
scraper_ip="192.168.81.65"
monitor_ip="192.168.81.65"
;;
"fat")
engine_ips="10.2.6.15 10.2.6.16"
rebuild_ip="10.2.6.21"
scraper_ip="10.2.6.21"
monitor_ip="10.2.6.21"
ssh_port=22
;;
"lpt")
engine_ips="10.2.4.35 10.2.4.36"
rebuild_ip="10.2.4.41"
scraper_ip="10.2.4.41"
monitor_ip="10.2.4.41"
ssh_port=22
;;
"online")
engine_ips="10.8.91.56 10.8.91.57 10.8.91.58"
rebuild_ip="192.168.86.253"
scraper_ip="192.168.86.253"
monitor_ip="192.168.86.253"
engine_mem_info="-Xms1G -Xmx3G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1G -Xmn200m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
"expansion")
port_prefix="80"
port_key="4"
#default except online
engine_mem_info="-Xms128m -Xmx512m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.143 192.168.81.142"
#engine_ips="192.168.83.100"
rebuild_ip="192.168.81.142"
monitor_ip="192.168.81.142"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.81.223"
monitor_ip="192.168.81.223"
;;
"online")
engine_ips="10.8.91.113 10.8.91.114"
rebuild_ip="10.8.91.112"
monitor_ip="10.8.91.112"
engine_mem_info="-Xms1G -Xmx4G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1G -Xmn200m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
"global")
port_prefix="80"
port_key="2"
#default except online
engine_mem_info="-Xms128m -Xmx2048m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx1024m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.143 192.168.81.142"
rebuild_ip="192.168.81.142"
monitor_ip="192.168.81.142"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.81.223"
monitor_ip="192.168.81.223"
;;
"online")
engine_ips="10.8.91.113 10.8.91.114 10.8.91.115 10.8.91.116 10.8.91.117"
rebuild_ip="10.8.91.112"
monitor_ip="10.8.91.112"
engine_mem_info="-Xms1G -Xmx4G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1G -Xmn200m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
"globalautocomplete")
port_prefix="80"
port_key="1"
#default except online
engine_mem_info="-Xms128m -Xmx1536m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx1024m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.143 192.168.81.142"
rebuild_ip="192.168.81.142"
monitor_ip="192.168.81.142"
engine_mem_info="-Xms128m -Xmx1536m -Xmn64m -Xss512k"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.81.223"
monitor_ip="192.168.81.223"
;;
"uat")
engine_ips="192.168.82.221 192.168.83.125"
rebuild_ip="192.168.81.65"
monitor_ip="192.168.81.65"
;;
"fat")
engine_ips="10.2.6.15 10.2.6.16"
rebuild_ip="10.2.6.21"
monitor_ip="10.2.6.21"
ssh_port=22
;;
"lpt")
engine_ips="10.2.4.35 10.2.4.36"
rebuild_ip="10.2.4.41"
monitor_ip="10.2.4.41"
ssh_port=22
;;
"online")
engine_ips="10.8.91.113 10.8.91.114 10.8.91.115 10.8.91.116 10.8.91.117"
rebuild_ip="10.8.91.112"
monitor_ip="10.8.91.112"
engine_mem_info="-Xms1G -Xmx4G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1536m -Xmn200m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
"autocomplete")
port_prefix="80"
port_key="3"
#default except online
engine_mem_info="-Xms128m -Xmx1536m -Xmn64m -Xss512k"
scraper_mem_info="-Xms128m -Xmx512m -Xmn64m -Xss128k"
rebuild_mem_info="-Xms128m -Xmx512m -Xmn64m -Xss128k"
job_mem_info="-Xms128m -Xmx256m -Xmn64m -Xss128k"
monitor_mem_info="-Xms128m -Xmx128m -Xmn64m -Xss128k"
case $env_type in
"dev")
engine_ips="192.168.81.143 192.168.81.142"
rebuild_ip="192.168.81.143"
monitor_ip="192.168.81.143"
engine_mem_info="-Xms128m -Xmx1536m -Xmn64m -Xss512k"
;;
"test")
engine_ips="192.168.81.222 192.168.83.222"
rebuild_ip="192.168.81.223"
monitor_ip="192.168.81.223"
;;
"uat")
engine_ips="192.168.82.221 192.168.83.125"
rebuild_ip="192.168.81.65"
monitor_ip="192.168.81.65"
;;
"fat")
engine_ips="10.2.6.15 10.2.6.16"
rebuild_ip="10.2.6.21"
monitor_ip="10.2.6.21"
ssh_port=22
;;
"lpt")
engine_ips="10.2.4.35 10.2.4.36"
rebuild_ip="10.2.4.41"
monitor_ip="10.2.4.41"
ssh_port=22
;;
"online")
engine_ips="10.8.91.113 10.8.91.114"
rebuild_ip="10.8.91.112"
monitor_ip="10.8.91.112"
engine_mem_info="-Xms1G -Xmx4G -Xmn700m -Xss512k"
scraper_mem_info="-Xms500m -Xmx500m -Xmn100m -Xss128k"
rebuild_mem_info="-Xms1G -Xmx1536m -Xmn200m -Xss128k"
monitor_mem_info="-Xms200m -Xmx200m -Xmn100m -Xss128k"
;;
esac
;;
esac
| true |
744037324172418f5decae9a4711f460b85ae81e | Shell | AdrianPardo99/scriptsVM | /eliminaTapRoute.sh | UTF-8 | 335 | 2.640625 | 3 | [] | no_license | #!/usr/bin/env bash
echo -n "Ingresa el nombre de la interfaz virtual que deseas eliminar: "
read virtualInt
echo "Se eliminara la tabla de enrutamiento para ${virtualInt}: "
sudo ip route flush dev ${virtualInt}
echo "A continuación se eliminara la interfaz de red ${virtualInt}: "
sudo ip l d ${virtualInt}
echo "Tarea finalizada"
| true |
9fb49eba6ec64de9b4a19d060b718c887c396c97 | Shell | WenhaoChen0907/Shell_demo | /day02/shell11_ifexer.sh | UTF-8 | 374 | 3.9375 | 4 | [] | no_license | #! /bin/bash
# 键盘录入内容
read -p "请输入文件或目录:" content
# 判断,如果为空则提示,不为空判断是否为目录,不为目录判断是否为文件,否则提示错误信息
if [ -z $content ]
then
echo "为空"
else
if [ -d $content ]
then
echo "目录"
elif [ -f $content ]
then
echo "文件"
else
echo "错误信息"
fi
fi
| true |
7385393ace3cebab2c4abe49917eea9d9829ea45 | Shell | sparclusive/project-system | /code/regression/getint_io.sh | UTF-8 | 309 | 3.15625 | 3 | [
"MIT-Modern-Variant"
] | permissive | #!/bin/bash
NACHOS_DIR=../build/
cd $NACHOS_DIR
TEST_STRING="-5 -4 -3 -2 -1 0 1 2 3 4 5 "
OUTPUT_STRING="\-5 \-4 \-3 \-2 \-1 0 1 2 3 4 5 "
./nachos-final -cp getint run
OUTPUT=$(echo "$TEST_STRING" | ./nachos-final -x run | grep "$OUTPUT_STRING")
if [ -n "$OUTPUT" ]; then
exit 0
else
exit -1
fi
| true |
24ac21196425dc69c823a598da3ebd883b7efa0d | Shell | naqushab/teton | /bootstrap/setup-xcode-clt.sh | UTF-8 | 1,252 | 3.734375 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
NORMAL='\033[0m'
GREEN='\033[0;32m'
RED='\033[0;31m'
BLUE='\033[0;34m'
marker="${BLUE}==>${NORMAL}"
status_good="${GREEN}✔${NORMAL}"
status_bad="${RED}✘${NORMAL}"
function bootstrap-xcode-clt () {
# Check for xcode-CLT
if [[ ! -d "$(xcode-select -p)" ]]; then
printf "%b\n" "$marker Checking for Command Line Tools - $status_bad"
touch /tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress
PROD=$(softwareupdate -l |
grep "\*.*Command Line" |
head -n 1 |
awk -F"*" '{print $2}' |
sed -e 's/^ *//' |
tr -d '\n')
sudo softwareupdate -i "$PROD" --verbose
rm /tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress
else
printf "%b\n" "$marker Checking for Command Line Tools - $status_good"
fi
}
function install-clt () {
touch /tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress
PROD=$(softwareupdate -l |
grep "\*.*Command Line" |
head -n 1 |
awk -F"*" '{print $2}' |
sed -e 's/^ *//' |
tr -d '\n')
softwareupdate -i "$PROD" --verbose
rm /tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress
}
bootstrap-xcode-clt
| true |
ea08e1d12138666f04c73c2884bd634bba5121f0 | Shell | leoj3n/prezto | /runcoms/zprofile | UTF-8 | 4,560 | 3.296875 | 3 | [
"MIT"
] | permissive | #
# Executes commands at login pre-zshrc.
#
# Authors:
# Joel Kuzmarski <leoj3n@gmail.com>
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
###############################################################################
# Setopts
###############################################################################
#
# bash-style handling of spaces and parens.
#
# setopt SH_WORD_SPLIT
#
# By default, Zsh considers many characters part of a word (e.g., _ and -).
# Narrow that down to allow easier skipping through words via M-f and M-b.
#
export WORDCHARS='*?[]~&;!$%^<>'
###############################################################################
# Custom scripting variables
###############################################################################
readonly CASKROOM='/opt/homebrew-cask/Caskroom'
###############################################################################
# Variables
###############################################################################
#
# Browser
#
if [[ "$OSTYPE" == darwin* ]]; then
export BROWSER='open'
fi
#
# Editors
#
export PAGER='less'
export EDITOR='vim'
export VISUAL='gvim -f'
export GIT_EDITOR='vim'
export MANPAGER='less -s -M +Gg'
#
# Language
#
if [[ -z "$LANG" ]]; then
export LANG='en_US.UTF-8'
fi
#
# GTK
#
export GTK_PATH='/usr/local/lib/gtk-2.0'
#
# Go
#
export GOPATH="${HOME}/.go"
#
# Python
#
export PYENV_ROOT='/usr/local/var/pyenv'
#
# nvm
#
export NVM_DIR="${HOME}/.nvm"
#
# Homebrew Cask
#
export HOMEBREW_CASK_OPTS='--appdir=/Applications'
#
# Text-Aid-Too
#
export TEXT_AID_TOO_EDITOR='gvim -f'
###############################################################################
# Paths
###############################################################################
#
# Ensure path arrays do not contain duplicates.
#
typeset -gU cdpath fpath mailpath path
#
# Set the the list of directories that cd searches.
#
# cdpath=(
# $cdpath
# )
#
# Set the list of directories that Zsh searches for programs.
#
path=(
'/opt/local/bin'
'/usr/local/'{bin,sbin}
${path}
)
if [[ ! -z "$(brew ls --versions php54)" ]]; then
path=(
"$(brew --prefix josegonzalez/php/php54)/bin"
${path}
)
fi
path=(
'/usr/local/mysql/bin'
"${HOME}/.composer/bin"
"${GOPATH}/bin"
"${HOME}/bin"
"${HOME}/bin/git-plugins"
${path}
)
#
# Set the list of directories that Zsh searches for functions.
#
fpath=(
"${HOME}/.homesick/repos/homeshick/completions"
${fpath}
)
###############################################################################
# Less
###############################################################################
#
# Set the default Less options.
# Mouse-wheel scrolling has been disabled by -X (disable screen clearing).
# Remove -X and -F (exit if the content fits on one screen) to enable it.
#
export LESS='-F -g -i -M -R -S -w -X -z-4'
#
# Set the Less input preprocessor.
# Try both `lesspipe` and `lesspipe.sh` as either might exist on a system.
#
if (( $#commands[(i)lesspipe(|.sh)] )); then
export LESSOPEN="| /usr/bin/env $commands[(i)lesspipe(|.sh)] %s 2>&-"
fi
###############################################################################
# Temporary Files
###############################################################################
if [[ ! -d "$TMPDIR" ]]; then
export TMPDIR="/tmp/$LOGNAME"
mkdir -p -m 700 "$TMPDIR"
fi
TMPPREFIX="${TMPDIR%/}/zsh"
###############################################################################
# Sources
###############################################################################
sourceif() {
if [[ -s "$1" ]]; then
source "$1"
fi
}
#SANDBOXRC=""
sourceif "${ZDOTDIR:-$HOME}/.zprezto/runcoms/submodules/sandboxd/sandboxd"
#
# iTerm
#
sourceif "${HOME}/.iterm2_shell_integration.zsh"
#
# POWERLEVEL9K
#
POWERLEVEL9K_MODE='awesome-fontconfig'
POWERLEVEL9K_PROMPT_ON_NEWLINE=true
POWERLEVEL9K_RPROMPT_ON_NEWLINE=true
POWERLEVEL9K_COLOR_SCHEME='light'
POWERLEVEL9K_STATUS_OK_BACKGROUND='black'
POWERLEVEL9K_SHORTEN_DELIMITER=''
POWERLEVEL9K_SHORTEN_DIR_LENGTH=4
POWERLEVEL9K_SHORTEN_STRATEGY='truncate_with_package_name'
POWERLEVEL9K_TIME_FORMAT="%D{%H:%M:%S} \uf017" #
POWERLEVEL9K_OK_ICON='\uf00c' #
#POWERLEVEL9K_HOME_SUB_ICON='📂'
#POWERLEVEL9K_APPLE_ICON='🍎'
POWERLEVEL9K_RAM_ELEMENTS=(ram_free)
POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=('status' 'todo' 'dir' 'vcs')
POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=('nvm' 'rvm' 'ram' 'background_jobs' 'time')
alias colorlist='for code ({000..255}) print -P -- "$code: %F{$code}This is how your text would look like%f"'
| true |
2155f51b28f9eb8297e2e474203ba3ec2a5d5c83 | Shell | WZQ1397/config | /jmeter/jmeter.erb | UTF-8 | 1,432 | 3.703125 | 4 | [] | no_license | #!/bin/sh
#
# Startup script for Apache JMeter
# CONTROLLED BY PUPPET DO NOT MODIFY!
#
# chkconfig: - 85 15
# description: Apache JMeter
# pidfile: /var/run/$JMETER.pid
# config:
#set -x
# Source function library.
. /etc/rc.d/init.d/functions
# Source networking configuration.
. /etc/sysconfig/network
JAVA_HOME=/opt/java
PATH=/opt/java/bin:$PATH
JMETER=jmeter
JMETER_HOME=/opt/jmeter
JAVA_PID=`/usr/bin/pgrep -f "jar /opt/jmeter/CMDRunner.jar"`
XMS_MEM='<%= @xms_memory %>'
XMX_MEM='<%= @xmx_memory %>'
case "$1" in
start)
# Start daemon.
[ -n "$JAVA_PID" ] && echo "$JMETER already started" && exit 0
echo -n "Starting $JMETER: "
/opt/java/bin/java -Xms${XMS_MEM}m -Xmx${XMS_MEM}m -jar /opt/jmeter/CMDRunner.jar --tool PerfMonAgent --tcp-port 3450 &
RETVAL=$?
echo "$JMETER started."
[ $RETVAL = 0 ] && touch /var/lock/subsys/$JMETER
;;
stop)
# Stop daemons.
[ -z "$JAVA_PID" ] && echo "$JMETER already stopped" && exit 0
echo -n "Shutting down $JMETER: "
kill $JAVA_PID
RETVAL=$?
echo
[ $RETVAL = 0 ] && rm -f /var/lock/subsys/$JMETER
;;
restart)
$0 stop
$0 start
;;
condrestart)
[ -e /var/lock/subsys/$JMETER ] && $0 restart
;;
status)
if [ -n "$JAVA_PID" ]; then
MODE="RUNNING pid $JAVA_PID"
echo "$JMETER is $MODE"
exit 0
else
MODE="STOPPED"
echo "$JMETER is $MODE"
exit 1
fi
;;
*)
echo "Usage: $0 {start|stop|restart|status}"
exit 1
esac
exit 0
| true |
e93bea3391ad3c64ba7cfb2f24517aba0fff498c | Shell | MinhxLe/DeepSetRNN | /src/scripts/process1000Genome.sh | UTF-8 | 1,613 | 3.34375 | 3 | [] | no_license | #!/bin/sh
####settings####
POP_NAME=$1
CHROM_NUM=$2
BP_NUM=$3
BASE_DIR='/u/home/m/minhle/project-sriram/minh/data'
VCF_DIR=$BASE_DIR'/1000genome/phase1/integrated_call_sets'
ANCESTRY_DIR=$BASE_DIR'/1000genome/phase1/ancestry_deconvolution'
POP_DIR=$ANCESTRY_DIR'/'$POP_NAME
PROCESSED_DIR=$BASE_DIR'/DeepSetProcessed/'$POP_NAME"_CHR"$CHROM_NUM"_BP"$BP_NUM
mkdir -p $PROCESSED_DIR
#getting labels of individuals
POP_LIST_FNAME=$ANCESTRY_DIR'/'$POP_NAME'_individuals.txt'
if [ ! -f $POP_LIST_FNAME ]; then
echo 'generating list of individuals in this population'
ls $POP_DIR/*.bed | sed -e 's/.*\/\(.*\)\.bed$/\1/' > $POP_LIST_FNAME
fi
#filtering with vftool the individuals, chromosome and base
VCF_FNAME=$VCF_DIR'/ALL.chr'$CHROM_NUM$'.integrated_phase1_v3.20101123.snps_indels_svs.genotypes.vcf'
VCF_PROCESSED=$PROCESSED_DIR'/filtered'
PROCESSED_FNAME=$VCF_PROCESSED'.recode.vcf'
if [ ! -f $PROCESSED_FNAME ]; then
echo 'filtering with vcftools'
vcftools --vcf $VCF_FNAME \
--recode \
--out $VCF_PROCESSED \
--chr $CHROM_NUM \
--to-bp $BP_NUM \
--remove-indels \
--remove-filtered-all \
--min-alleles 2 --max-alleles 2 \
--max-missing 1 \
--keep $POP_LIST_FNAME \
--remove-filtered-geno-all
fi
##generating the CSV file from vcf file with python scripy
echo 'running python script to generate csv genomes and population labels'
python2 process1000Genome.py --vcfFileName $PROCESSED_FNAME\
--ancestryDir $POP_DIR --chromosomeNum $CHROM_NUM\
--processedDir $PROCESSED_DIR
##remove the header sections
| true |
a40183c0d0a3f62417c5a195947441cfea5e7dbf | Shell | uptimesoftware/plugin-manager-for-uptime | /src/plugin_manager-upt7.0/load_plugin.sh | UTF-8 | 456 | 3.28125 | 3 | [] | no_license | #!/bin/sh
cd ..
UPTIME_DIR=`pwd`
if [ -e "/usr/local/uptime/apache/bin/php" ]; then
PHPDIR="/usr/local/uptime/apache/bin/"
elif [ -e "/opt/uptime/apache/bin/php" ]; then
PHPDIR="/opt/uptime/apache/bin/"
else
PHPDIR="/usr/local/uptime/apache/bin"
echo "ERROR (load_plugin.sh): Could not confirm apache directory!"
fi
LOADER_DIR="$UPTIME_DIR/plugin_manager/"
cd "$LOADER_DIR"
"$PHPDIR/php" "$LOADER_DIR/load_plugin.php" $1 $2 $3 $4 $5 $6 $7 $8 $9
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.