blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
1dabe771d8644a70a114b3c088e65c3dbb1c485b
|
Shell
|
brieuclambert/airbyte
|
/resources/examples/airflow/up.sh
|
UTF-8
| 594
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
cd ../../..
docker-compose down -v
docker-compose up -d
cd resources/examples/airflow || exit
docker-compose down -v
docker-compose up -d
echo "Access Airbyte at http://localhost:8000 and set up a connection."
echo "Enter your Airbyte connection ID: "
read connection_id
docker exec -ti airflow_webserver airflow variables set 'AIRBYTE_CONNECTION_ID' "$connection_id"
docker exec -ti airflow_webserver airflow connections add 'airbyte_example' --conn-uri 'airbyte://host.docker.internal:8000'
echo "Access Airflow at http://localhost:8085 to kick off your Airbyte sync DAG."
| true
|
8edbd9e8ad2c8cf20b9f531642aa33997077c1c9
|
Shell
|
kamilpek/systemy
|
/zad3.sh
|
UTF-8
| 110
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/bash
liczba=$#
paramter=$1
if [ $liczba -eq 3 ] && [ $1 -eq -f]
then
echo "Tak"
else
echo "Nie"
fi
| true
|
02893c3ad178c731edffb93066c70ffdb149bfba
|
Shell
|
heimareed/yutWebsAgent
|
/demo_howto_customize_by_goahead_mk/build.sh
|
UTF-8
| 382
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
goahead_src_dir="../goahead_src"
goahead_src_projects_dir="../goahead_src/projects"
build_dir="../build"
mk_filename="yut_webs_agentdemo.mk"
if [ ! -d $goahead_src_projects_dir ]; then
echo "ERROR: " $goahead_src_projects_dir "is NOT exist!"
exit 1
fi
sudo cp $mk_filename $goahead_src_projects_dir
cd $goahead_src_dir
sudo make -f projects/$mk_filename SHOW=1
| true
|
906dfdd88170243cdbc9791ac2cb5730c007ffc8
|
Shell
|
CMSCompOps/MonitoringScripts
|
/metrics/siteReadiness/load_data.sh
|
UTF-8
| 547
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
# go to the path that contain this script
cd $(dirname $(readlink -f "${BASH_SOURCE[0]}"))
source ../../init.sh
OUT=$SSTBASE/output/metrics/siteReadiness
if [ ! -d "$OUT" ]; then
mkdir -p $OUT
fi
d=$(<lastdate.txt)
enddate=$(date -I --utc)
echo "Deleting old file"
rm -rf /afs/cern.ch/user/c/cmssst/www/siteReadiness/siteReadiness.txt
echo "Starting $d"
d=$(date -I --utc -d "$d + 1 day")
echo $d > lastdate.txt
echo "$d - $(date)" > results
python dailyMetric.py $d $OUT
cp $OUT/* /afs/cern.ch/user/c/cmssst/www/siteReadiness/
| true
|
30a1c4e60ebc2a0fecd94702b44406244646ac29
|
Shell
|
mba5157/opbb10
|
/hookflash-libs/boost/rim-build/build.sh
|
UTF-8
| 3,542
| 4.125
| 4
|
[
"BSD-2-Clause-Views",
"BSL-1.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
#!/bin/bash
# ensure required BBNDK env variables are set
: ${QNX_HOST:?"Error: QNX_HOST environment variable is not set."}
: ${QNX_TARGET:?"Error: QNX_TARGET environment variable is not set."}
# Get absolute path of script
SCRIPT_PATH=$PWD/$BASH_SOURCE
SCRIPT_DIR=`dirname $SCRIPT_PATH`
# Set to Python source location when building Python library
PYTHON_SRC_DIR=""
pushd $SCRIPT_DIR
usage()
{
echo "$0 <install|clean> <static|shared>"
}
echo_action()
{
echo "===> $1"
}
build()
{
if [ "$ACTION" == "clean" ] ; then
if [ "$PREFIX" != "/" ] && [ -n $PREFIX ] ; then
rm -rf $PREFIX
fi
fi
pushd $BOOST_DIR
if [ ! -f ./bjam ] ; then
./bootstrap.sh
fi
# Build the libraries and install them to staging dir
for CPU in arm x86 ; do
if [ "$CPU" == "x86" ] ; then
CONFIG=$BOOST_DIR/blackberry-x86-config.jam
CPU_DIR="x86"
elif [ "$CPU" == "arm" ] ; then
CONFIG=$BOOST_DIR/blackberry-armv7le-config.jam
CPU_DIR="armle-v7"
else
echo "Unrecognized CPU ($CPU)."
exit
fi
for VARIANT in debug release ; do
echo_action "Building $CPU $VARIANT"
if [ "$ACTION" == "install" ] ; then
# bjam will append the lib directory
BJAM_ARGS="stage --stagedir=$PREFIX/$CPU_DIR/$VARIANT"
elif [ "$ACTION" == "clean" ] ; then
BJAM_ARGS="--clean"
fi
if [ "$VARIANT" == "release" ] ; then
BJAM_ARGS="$BJAM_ARGS optimization=space"
fi
if [ "$PYTHON_SRC_DIR" != "" ] && [ -d $PYTHON_SRC_DIR ] ; then
BJAM_ARGS="$BJAM_ARGS include=$PYTHON_SRC_DIR:$PYTHON_SRC_DIR/Include --with-python"
fi
# Disable builds with ICU because the ICU libraries are linked against Dinkumware libraries.
# This will cause crashes because GNU and Dinkumware libraries cannot be mixed.
./bjam $BJAM_ARGS \
-j 4 \
--disable-icu \
--with-chrono \
--with-date_time \
--with-exception \
--with-filesystem \
--with-graph \
--with-graph_parallel \
--with-iostreams \
--with-locale \
--with-math \
--with-mpi \
--with-program_options \
--with-random \
--with-regex \
--with-serialization \
--with-signals \
--with-system \
--with-test \
--with-thread \
--with-timer \
--with-wave \
--user-config=$CONFIG \
--layout=system toolset=qcc target-os=qnxnto architecture=$CPU \
variant=$VARIANT link=$LINK_TYPE threading=multi runtime-link=shared
done
done
popd
}
#########################################################
# Begin main script
#########################################################
if [ $# -lt 1 ] ; then
usage
exit
fi
ACTION=$1
if [ "$ACTION" != "install" ] && [ "$ACTION" != "clean" ] ; then
usage
exit
fi
LINK_TYPE=$2
if [ "$LINK_TYPE" != "static" ] && [ "$LINK_TYPE" != "shared" ] ; then
usage
exit
fi
BOOST_DIR="`pwd`/.."
PREFIX=`pwd`/boost-stage
if [ ! -d $PREFIX ] ; then
mkdir -p $PREFIX
fi
echo "build $ACTION $LINK_TYPE"
build
| true
|
99ca6b08a0b55b4eeb27796eb45f0a1b7fac3816
|
Shell
|
nettan20/widget.dart
|
/bin/copy_out.sh
|
UTF-8
| 933
| 3.4375
| 3
|
[
"BSD-2-Clause",
"BSD-2-Clause-Views"
] |
permissive
|
#!/bin/bash -x
BUILD_DIR=web/out
WEB_DIR=example
rm -rf $WEB_DIR
mkdir -p $WEB_DIR
find $BUILD_DIR -maxdepth 1 -type f -print0 | xargs -0 -I % cp % $WEB_DIR
rm $WEB_DIR/*.dart
# Since we're using the JS file directly, remove the dart.js helper
TO_FIND='packages\/browser\/dart.js"'
COMMAND2=/$TO_FIND/d
# Remove imports from output
# DARTBUG, WEB-UI BUG: https://github.com/dart-lang/web-ui/issues/514
TO_FIND='rel="import"'
COMMAND3=/$TO_FIND/d
# fix the type of the script from dart to js
TO_FIND='type="application/dart"'
REPLACE_WITH='type="text/javascript"'
COMMAND4=s*$TO_FIND*$REPLACE_WITH*g
# change reference to dart file to use the corresponding js file
TO_FIND='bootstrap.dart'
REPLACE_WITH='bootstrap.dart.js'
COMMAND5=s*$TO_FIND*$REPLACE_WITH*g
# remove empty lines
COMMAND6='/^[[:blank:]]*$/d'
sed -E -e $COMMAND2 -e $COMMAND3 -e $COMMAND4 -e $COMMAND5 -e $COMMAND6 -- $BUILD_DIR/index.html > $WEB_DIR/index.html
| true
|
2135e86ad59a9ca84e642965105ca3543c408c10
|
Shell
|
MedvidekPu/cz-region-boundaries
|
/scripts/refresh-data.sh
|
UTF-8
| 822
| 2.828125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
curl -X POST 'https://lz4.overpass-api.de/api/interpreter' -d@overpass-query.txt | osmtogeojson > src.geojson
node prepare-gps-data.js
PSCSOURCE='https://www.ceskaposta.cz/documents/10180/3738087/xls_pcobc.zip/50617e56-6e9a-4335-9608-96fec214e6ef'
OKRESFILE="../data/zip/county-region.csv"
ZIPFILE="src.zip"
FILE='xls_pcobc'
PSCFILE="../data/zip/zip-county.csv"
RESULT="../data/zip/zip-region.csv"
wget "$PSCSOURCE" -O src.zip
unzip -n "$ZIPFILE"
libreoffice --headless --convert-to csv --infilter=csv:44,34,76 "$FILE.xls" --outdir . > /dev/null
cut -d, -f2,5 "$FILE.csv" | tail -n +2 | sort | uniq > "$PSCFILE"
join -t ',' -1 2 -2 1 -o 1.1,2.2 <(sort -t ',' -k2,2 "$PSCFILE") <(sort -t ',' -k1,1 "$OKRESFILE") | sort | uniq > "$RESULT"
rm "$ZIPFILE"
rm -f "$FILE.xlsx"
rm -f "$FILE.xls"
rm -f "$FILE.csv"
| true
|
1a66682c890d2296393f1489204bb7d3c1374ba0
|
Shell
|
bandgeekndb/imageNow
|
/Bash/MyCommonAppSearch.sh
|
UTF-8
| 2,202
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/bash
#source /export/$(hostname -s)/inserver6/bin/setenv.sh
#cd /export/$(hostname -s)/inserver6/script
#cd /export/$(hostname -s)/inserver6/script
#echo `Date` >> /export/$(hostname -s)/inserver6/log/running_log-CommonAppSearch
#intool --cmd run-iscript --file /export/$(hostname -s)/inserver6/script/CommonAppSearch.js
logTo=/export/$(hostname -s)/inserver6/log/running_log-CommonAppSearch.log
lockTo=/export/$(hostname -s)/inserver6/script/lock/CommonAppSearch.lock
logName=/export/$(hostname -s)/inserver6/log/CommonAppSearch_$(date +"%Y%m%d").log
cd /export/$(hostname -s)/inserver6/script >> ${logTo} 2>&1
if (mkdir $lockTo); then >> ${logTo} 2>&1
MACH_OS=`uname -s`
IMAGENOWDIR6=/export/$(hostname -s)/inserver6
ODBCINI=$IMAGENOWDIR6/etc/odbc.ini
LD_LIBRARY_PATH=$IMAGENOWDIR6/odbc/lib:$IMAGENOWDIR6/bin:$IMAGENOWDIR6/fulltext/k2/_ilnx21/bin:/usr/local/waspc6.5/lib:/usr/lib
PATH=$PATH:$IMAGENOWDIR6/fulltext/k2/_ilnx21/bin
IMAGE_GEAR_PDF_RESOURCE_PATH=./Resource/PDF/
IMAGE_GEAR_PS_RESOURCE_PATH=./Resource/PS/
IMAGE_GEAR_HOST_FONT_PATH=./Resource/PS/Fonts/
export IMAGENOWDIR6 ODBCINI LD_LIBRARY_PATH PATH IMAGE_GEAR_PDF_RESOURCE_PATH IMAGE_GEAR_PS_RESOURCE_PATH IMAGE_GEAR_HOST_FONT_PATH >> ${logTo} 2>&1
echo `date` >> ${logTo}
begin=$(date +%s)
/export/$(hostname -s)/inserver6/bin/intool1 --cmd run-iscript --file /export/$(hostname -s)/inserver6/script/CommonAppSearch.js >> ${logTo} 2>&1
end=$(date +%s)
total=$((end - begin))
if [ $total -lt 30 ]; then
message="The matching script finished in $total seconds - that seems kind of quick.
There may have been an error logging in with intool. Please verify that the
script actually ran."
else
numberMatch=`grep 'Successfully reindexed' ${logName} | wc -l`
docLines=`grep 'document id:' ${logName} | wc -l`
totalDocs=`expr $docLines - 3`
message="The script finished running at $(date). \nIt took $((total/60)) minutes to make ${numberMatch} matches out of ${totalDocs} documents across all Common App queues."
fi
echo -e ${message} | mailx -s "MyCommonAppSearch.sh has finished running" UITS.DI.CORE@umassp.edu >> ${logTo} 2>&1
rm -rf "$lockTo" >> ${logTo} 2>&1
else
echo "`date` - Script already running" >> ${logTo}
fi
| true
|
d9f49c5deba9072c0fbc068cf2db4a35c0ca886b
|
Shell
|
kehanlu/server-docker
|
/init.sh
|
UTF-8
| 990
| 2.765625
| 3
|
[] |
no_license
|
echo `id`
# install zsh
printf "Y\n" | sh -c "$(wget https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh -O -)"
# install pyenv
curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash
echo '\nexport PYENV_ROOT="$HOME/.pyenv"\nexport PATH="$PYENV_ROOT/bin:$PATH"\neval "$(pyenv init --path)"\neval "$(pyenv virtualenv-init -)"\n' >> .zshrc
echo '\nexport PATH="$HOME/.local/bin:$PATH"' >> .zshrc
$HOME/.pyenv/bin/pyenv install $DEFAULT_PYTHON_VERSION
$HOME/.pyenv/bin/pyenv virtualenv $DEFAULT_PYTHON_VERSION $DEFAULT_ENV_NAME
$HOME/.pyenv/bin/pyenv global $DEFAULT_ENV_NAME
# install jupyter
$HOME/.pyenv/versions/$DEFAULT_ENV_NAME/bin/python3 -m pip install --upgrade pip
$HOME/.pyenv/versions/$DEFAULT_ENV_NAME/bin/python3 -m pip install jupyterlab
mkdir -p $HOME/.jupyter
echo "c.ServerApp.ip = '0.0.0.0'\n\
c.ServerApp.open_browser = False\n\
c.ServerApp.notebook_dir = '$HOME'" > $HOME/.jupyter/jupyter_lab_config.py
| true
|
66d17688e17ca1bb7bc1ab49ad4699b2bd72d649
|
Shell
|
ttp55/learnshell
|
/chengfa.sh
|
UTF-8
| 107
| 2.875
| 3
|
[] |
no_license
|
#!/bin/bash
for i in $(seq 9)
do
for j in $(seq $i)
do
echo -n "$i*$j=$(($i*$j)) "
done
echo ""
done
| true
|
ee0f52c3877b5acd2d6d4bdf02671c6b9c910152
|
Shell
|
ci2c/code
|
/scripts/renaud/FMRI_PreprocessNIAK.sh
|
UTF-8
| 4,641
| 3.265625
| 3
|
[] |
no_license
|
#! /bin/bash
if [ $# -lt 14 ]
then
echo ""
echo "Usage: FMRI_PreprocessNIAK.sh -sd <subj_dir> -subj <name> -TR <value> -N <value> -a <name> -fwhm <value> -o <output> "
echo ""
echo " -sd : Path to subjects "
echo " -subj : subject name (need file: fmri/EPI.nii)"
echo " -TR : TR value "
echo " -N : Number of slices "
echo " -a : type of acquisition (interleaved or ascending) "
echo " -fwhm : smoothing value "
echo " -o : output folder "
echo ""
echo "Usage: FMRI_PreprocessNIAK.sh -sd <subj_dir> -subj <name> -TR <value> -N <value> -a <name> -fwhm <value> -o <output> "
echo ""
echo "Author: Renaud Lopes - CHRU Lille - Jun 01, 2012"
echo ""
exit 1
fi
index=1
while [ $index -le $# ]
do
eval arg=\${$index}
case "$arg" in
-h|-help)
echo ""
echo "Usage: FMRI_PreprocessNIAK.sh -sd <subj_dir> -subj <name> -TR <value> -N <value> -a <name> -fwhm <value> -o <output> "
echo ""
echo " -sd : Path to subjects "
echo " -subj : subject name "
echo " -TR : TR value "
echo " -N : Number of slices "
echo " -a : type of acquisition (interleaved or ascending) "
echo " -fwhm : smoothing value "
echo " -o : output folder "
echo ""
echo "Usage: FMRI_PreprocessNIAK.sh -sd <subj_dir> -subj <name> -TR <value> -N <value> -a <name> -fwhm <value> -o <output> "
echo ""
echo "Author: Renaud Lopes - CHRU Lille - Jun 01, 2012"
echo ""
exit 1
;;
-sd)
index=$[$index+1]
eval DIR=\${$index}
echo "subject's path : ${DIR}"
;;
-subj)
index=$[$index+1]
eval subj=\${$index}
echo "subject's name : ${subj}"
;;
-TR)
index=$[$index+1]
eval TR=\${$index}
echo "TR value : ${TR}"
;;
-N)
index=$[$index+1]
eval N=\${$index}
echo "number of slices : ${N}"
;;
-a)
index=$[$index+1]
eval acquis=\${$index}
echo "type of acquisition : ${acquis}"
;;
-fwhm)
index=$[$index+1]
eval fwhm=\${$index}
echo "smoothing value : ${fwhm}"
;;
-o)
index=$[$index+1]
eval output=\${$index}
echo "output folder : ${output}"
;;
-*)
eval infile=\${$index}
echo "${infile} : unknown option"
echo ""
echo "Usage: FMRI_PreprocessNIAK.sh -sd <subj_dir> -subj <name> -TR <value> -N <value> -a <name> -fwhm <value> -o <output> "
echo ""
echo " -sd : Path to subjects "
echo " -subj : subject name "
echo " -TR : TR value "
echo " -N : Number of slices "
echo " -a : type of acquisition (interleaved or ascending) "
echo " -fwhm : smoothing value "
echo " -o : output folder "
echo ""
echo "Usage: FMRI_PreprocessNIAK.sh -sd <subj_dir> -subj <name> -TR <value> -N <value> -a <name> -fwhm <value> -o <output> "
echo ""
echo "Author: Renaud Lopes - CHRU Lille - Jun 01, 2012"
echo ""
exit 1
;;
esac
index=$[$index+1]
done
## Check mandatory arguments
if [ -z ${DIR} ]
then
echo "-sd argument mandatory"
exit 1
fi
if [ -z ${subj} ]
then
echo "-subj argument mandatory"
exit 1
fi
if [ -z ${TR} ]
then
echo "-TR argument mandatory"
exit 1
fi
if [ -z ${N} ]
then
echo "-N argument mandatory"
exit 1
fi
if [ -z ${acquis} ]
then
echo "-a argument mandatory"
exit 1
fi
if [ -z ${fwhm} ]
then
echo "-fwhm argument mandatory"
exit 1
fi
if [ -z ${output} ]
then
echo "-o argument mandatory"
exit 1
fi
## Creates out dir
if [ -d ${DIR}/${subj}/fmri/${output} ]
then
rm -rf ${DIR}/${subj}/fmri/${output}
fi
mkdir ${DIR}/${subj}/fmri/${output}
mkdir ${DIR}/${subj}/fmri/${output}/fmri
mkdir ${DIR}/${subj}/fmri/${output}/anat
mkdir ${DIR}/${subj}/fmri/${output}/output
nii2mnc ${DIR}/${subj}/fmri/EPI.nii ${DIR}/${subj}/fmri/${output}/fmri/epi.mnc
mri_convert ${DIR}/${subj}/mri/orig.mgz ${DIR}/${subj}/fmri/${output}/anat/orig.nii --out_orientation RAS
nii2mnc ${DIR}/${subj}/fmri/${output}/anat/orig.nii ${DIR}/${subj}/fmri/${output}/anat/orig.mnc
matlab -nodisplay <<EOF
% Load Matlab Path
cd /home/renaud/
p = pathdef;
addpath(p);
addpath(genpath('/home/renaud/matlab/niak-0.6.4.1'));
opt.nslices=${N};
opt.tr=${TR};
FMRI_NiakPreprocess('${DIR}/${subj}/fmri/${output}','/home/renaud/matlab/niak-0.6.4.1','${acquis}',${fwhm},opt);
EOF
| true
|
0024e0ff5e49ce2a3a5c9c19813f771b72b36e4e
|
Shell
|
ozgurgul/hcp-demo-env-aws-terraform
|
/etc/postcreate_core.sh_template
|
UTF-8
| 1,076
| 3.328125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -e # abort on error
set -u # abort on undefined variable
source "./scripts/variables.sh"
source "./scripts/functions.sh"
print_header "Installing HPECP CLI to local machine"
export HPECP_CONFIG_FILE=generated/hpecp.conf
export HPECP_LOG_CONFIG_FILE=${PWD}/generated/hpecp_cli_logging.conf
pip3 uninstall -y hpecp || true # uninstall if exists
pip3 install --user --upgrade --quiet hpecp
HPECP_VERSION=$(hpecp config get --query 'objects.[bds_global_version]' --output text)
echo "HPECP Version: ${HPECP_VERSION}"
print_header "Configuring Global Active Directory in HPE CP"
./bin/experimental/01_configure_global_active_directory.sh
print_header "Adding a Gateway to HPE CP"
./bin/experimental/02_gateway_add.sh
if [[ "${INSTALL_WITH_SSL}" == "True" ]]; then
print_header "Setting Gateway SSL"
./bin/experimental/set_gateway_ssl.sh
fi
print_header "Configuring Active Directory in Demo Tenant"
./bin/experimental/setup_demo_tenant_ad.sh
print_header "Enable Virtual Nodes on Controller"
./bin/experimental/epic_enable_virtual_node_assignment.sh
| true
|
54250a014b6921734b97c8f336ead208e0cf46ce
|
Shell
|
navikt/ab-test
|
/actions/build/build-test-branches.sh
|
UTF-8
| 442
| 3.171875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
git branch -r | grep --line-buffered $TEST_BRANCH_PREFIX | awk -v tbp=$TEST_BRANCH_PREFIX '{n=split($0,a, tbp); print a[n]}' | xargs -L1 >> ab-test-branches.tmp
while read -r l; do
if [ ! -z $l ]; then
git checkout "$TEST_BRANCH_PREFIX$l";
npm install && npm run $BUILD_SCRIPT;
mkdir tmp/"$l";
cp -r $DIST_DIRECTORY/* tmp/"$l"/;
git reset --hard;
fi
done < ab-test-branches.tmp
git checkout $GITHUB_REF
| true
|
1eb729915ee1697c4c96a70749b94a38ec447302
|
Shell
|
maniaabdi/system-config
|
/bin/daily-build-ww
|
UTF-8
| 1,662
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
exec 9>~/.logs/$(basename $0).lock
set -e
function die() {
echo Error: "$@"
exit -1
}
if ! flock -n 9; then
die "daily-build-ww already running, can not lock ~/.logs/$(basename $0).lock"
fi
cd ~/src/github/lww/
. .gitx
git reset --hard
git pull
version=$(
git log -1 |perl -e '
$x = join("", <>);
if ($x =~ m/git-svn-id:.*\@(\d+)/) {
print "svn.$1";
exit;
} elsif ($x =~ m/^commit ([a-f0-9A-F]{6})/) {
print "git.$1";
exit;
} else {
print "unknown"
}'
)
perl -npe "s/%version is read from git-svn-log%/$version/" -i AliWangWangLinux_back/src/gui/main.cpp
perl -npe "s/--nightly--/$version/" -i AliWangWangLinux_back/aliwangwang.spec
export WW_VERSION=$version
me=$(basename $0)
if git log --since $(today -1) | tee ~/.cache/$me.log.$$ | grep -q .; then
true;
else
git reset --hard
exit
fi
mach-bhj || true
mach-bhj -r fedora-development-i386-core || true
set -o pipefail
(
set -e
WHICH_COW='precise i386' ~/bin/cowbuilder-bhj cowbuilder-bhj-build-wangwang --save-after-exec
rsync *.deb bhj-win7:/var/www/linux-wangwang/$(today)-$WW_VERSION/
WHICH_COW='precise amd64' ~/bin/cowbuilder-bhj cowbuilder-bhj-build-wangwang --save-after-exec
rsync *.deb bhj-win7:/var/www/linux-wangwang/$(today)-$WW_VERSION/
rsync ~/.cache/$me.log.$$ bhj-win7:/var/www/linux-wangwang/$(today)-$WW_VERSION/git-svn.log
) | tee ~/.logs/$me.build.log 2>&1 || (
if ! is-tty-io; then
mailx '"Linux旺旺开发邮件列表" <linux-wangwang-dev@list.alibaba-inc.com>' -s 'Linux wangwang 日常编译失败' -a ~/.logs/$me.build.log </dev/null
else
echo build failed
fi
)
| true
|
040365532494f6850960630f07eff57420d9d210
|
Shell
|
walker233/updateWordPressUrl
|
/autoUpdateWpName
|
UTF-8
| 1,566
| 3.875
| 4
|
[] |
no_license
|
#!/bin/bash
#Name: autoUpdateWpName
#Desc: This script is intended to be run from root at startup.
#Original command
#php wp-cli.phar search-replace 'http://ec2-52-26-132-226.us-west-2.compute.amazonaws.com' 'http://ec2-52-25-180-240.us-west-2.compute.amazonaws.com' --path=/var/www/html --skip-columns=guid
USER="ec2-user"
UPDATE_WP_DIR="/usr/local/updateWordPressUrl"
WP_INSTALL_DIR="/var/www/html"
#don't quote, as shell expansion will not occur then.
WP_CLI=/home/ec2-user/updateWordPressUrl/wp-cli.phar
CURL=/usr/bin/curl
cd $UPDATE_WP_DIR
#When we do not have wp-cli installed, get it.
if [ ! -e "$WP_CLI" ]
then
WP_CLI="wp-cli.phar"
if [ ! -e "wp-cli.phar" ]
then
curl -O -s https://raw.githubusercontent.com/wp-cli/builds/gh-pages/phar/wp-cli.phar
fi
fi
function usage() {
echo "$0"
echo
echo " Updates wordpress url in all places it exists. Used from user data script"
echo " when amazon instance startsup. Good for development environments. Auto"
echo " determines the correct URL at startup."
echo
exit
}
if [ $# -ne 0 ]
then
usage
fi
PUBLICNAME=`$CURL -s http://169.254.169.254/latest/meta-data/public-hostname`
NEW_SITE="http://$PUBLICNAME"
#get old site name by using mysql and grepping through file for information
OLD_SITE=`sudo -u $USER -- php $WP_CLI option get siteurl --path=$WP_INSTALL_DIR`
#echo "php $WP_CLI search-replace "$OLD_SITE" "$NEW_SITE" --path=$WP_INSTALL_DIR --skip-columns=guid"
sudo -u $USER -- php $WP_CLI search-replace "$OLD_SITE" "$NEW_SITE" --path=$WP_INSTALL_DIR --skip-columns=guid
| true
|
03b553489589bb0ee2225997f95fb4b86842e893
|
Shell
|
conmarti/ubuntu-dev
|
/src/ubuntu/install/postman.sh
|
UTF-8
| 275
| 2.671875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
### every exit != 0 fails the script
set -e
echo "Install Postman"
wget https://dl.pstmn.io/download/latest/linux64 -O postman.tar.gz
tar -xzf postman.tar.gz -C /usr/local/etc/
rm postman.tar.gz
ln -s /usr/local/etc/Postman/Postman /usr/local/bin/postman
| true
|
1537cd0ea55629f1f64f3713c57dfbfc1b9dee8f
|
Shell
|
JoeMido/networking-midonet-el
|
/package.sh
|
UTF-8
| 1,262
| 2.65625
| 3
|
[] |
no_license
|
#!/bin/bash
# Copyright (C) 2015 Midokura SARL.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -o errexit
set -o xtrace
# Create the structure of directories to build the rpm
# a new hierarchy will be created in $HOME/rpmbuild
SRC_NAME=networking-midonet
BUILD_DIR=$HOME/rpmbuild
rm -rf $BUILD_DIR
rpmdev-setuptree
# Copy the tarball to the SOURCES directory
cp ./${SRC_NAME}-*.tar.gz $BUILD_DIR/SOURCES
# Copy the patches to the SOURCES directory
cp ./patches/* $BUILD_DIR/SOURCES || true
# Copy the spec file to the SPECS directory
cp ./${SRC_NAME}.spec ${BUILD_DIR}/SPECS
rpmbuild -ba ${BUILD_DIR}/SPECS/${SRC_NAME}.spec
# Copy the rpm to the current directory
cp -r $BUILD_DIR/RPMS/noarch/*.rpm .
| true
|
9dfe50a5436dcce757d682030f7c7b3b2f96db5f
|
Shell
|
kaypee0/intern-work
|
/setup/exp-stop.sh
|
UTF-8
| 374
| 3.265625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Launch the explorer
function usage {
echo "Usage: ./exp-start.sh "
echo "Launches the Explorer."
echo "Fails if explorer is already up!!!"
}
DIR="$( which $BASH_SOURCE)"
DIR="$(dirname $DIR)"
cd $DIR/../devenv
echo "===>Stopping explorer....please wait"
docker-compose -f ./compose/docker-compose.explorer.yaml down 2> /dev/null
echo "Done."
| true
|
661d88a12770c87b6fee4651a6213e5ef246ddc1
|
Shell
|
oslet/doc
|
/ops/tools/sh/checkhd.sh
|
UTF-8
| 264
| 2.796875
| 3
|
[] |
no_license
|
#!/bin/bash
cat /proc/cpuinfo | grep "physical id"|sort|uniq
if [ "$?" == "1" ]; then
echo "VM"
fi
cat /proc/cpuinfo | grep "model name" |sort|uniq|awk -F ":" '{print $2}'
expr `cat /proc/meminfo | grep MemTotal | awk -F ":" '{print $2}' |awk '{print $1}'` / 1024
| true
|
13b3b92dc6d0a25bd33aac90ee092ebcc7b88288
|
Shell
|
omsai/dREG
|
/dREG_paper_analyses/train_svm/erna_regression/makePIQ_starch.bsh
|
UTF-8
| 1,691
| 2.953125
| 3
|
[] |
no_license
|
#!/usr/bin/bash
#Makes STARCH file for PIQ
piq_path=/home/cgd24/storage/home/bin/thashim-piq-single-b73d9ff9853d/output
function makestarch {
## Get score file ready.
cat $piq_path/$1-$2-calls.all.csv $piq_path/$1-$2.RC-calls.all.csv | grep '^""' -v | awk -F "," 'BEGIN{OFS="\t"} {print $3,$4,$5,$6,$7}' > tmp
cat $piq_path/$1-$2-calls.all.bed $piq_path/$1-$2.RC-calls.all.bed | grep '^tr' -v > tmp2
paste tmp2 tmp | sort-bed - > tmp.bed
rm tmp tmp2
# bedops --merge --exact tmp.bed > uniqueSites.bed ## Sometimes they are slightly off-center from one another. Only merge exact, so remove col 4-6 in bed files and UNIQ.
cat $piq_path/$1-$2-calls.all.bed $piq_path/$1-$2.RC-calls.all.bed | grep '^tr' -v | sort-bed - | awk 'BEGIN{OFS="\t"} {print $1,$2,$3}' | uniq > uniqueSites.bed
# cat tmp.bed | bedmap --max uniqueSites.bed - > tmp
cat tmp.bed | awk 'BEGIN{OFS="\t"} {print $1,$2,$3,$4,$7,$6}' | bedmap --exact --max uniqueSites.bed - > tmp2 ##
cat tmp.bed | awk 'BEGIN{OFS="\t"} {print $1,$2,$3,$4,$8,$6}' | bedmap --exact --max uniqueSites.bed - > tmp3 ##
cat tmp.bed | awk 'BEGIN{OFS="\t"} {print $1,$2,$3,$4,$9,$6}' | bedmap --exact --max uniqueSites.bed - > tmp4 ##
cat tmp.bed | awk 'BEGIN{OFS="\t"} {print $1,$2,$3,$4,$10,$6}' | bedmap --exact --max uniqueSites.bed - > tmp5 ##
cat tmp.bed | awk 'BEGIN{OFS="\t"} {print $1,$2,$3,$4,$11,$6}' | bedmap --exact --max uniqueSites.bed - > tmp6 ##
cat uniqueSites.bed | awk 'BEGIN{OFS="\t"} {print $1,$2,$3,"'$2'","0","."}' > tmp0
paste tmp0 tmp2 tmp3 tmp4 tmp5 tmp6 | starch - > $2.starch
rm tmp*
}
makestarch 1 NRF1
makestarch 2 ELF1
makestarch 3 SP1
makestarch 4 MAX
makestarch 5 SP1JASPAR
| true
|
330bdb4c9016c179187b0caca174feb09e396487
|
Shell
|
uc-cdis/cirrus
|
/release_notes.sh
|
UTF-8
| 658
| 2.859375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
pip install git+https://github.com/uc-cdis/release-helper.git@master#egg=gen3git
gen3git --repo $TRAVIS_REPO_SLUG --from-tag $(git describe --tags --abbrev=0 --match=[0-9]* --exclude=$TRAVIS_TAG) gen --text --markdown --to-tag $TRAVIS_TAG
URL=$(curl -s -H "Authorization:token $GH_TOKEN" https://api.github.com/repos/${TRAVIS_REPO_SLUG}/releases/tags/$TRAVIS_TAG | python -c "import sys, json; print(json.load(sys.stdin)['url'])")
echo $URL
if [[ $URL ]]; then
curl -H "Authorization: token $GH_TOKEN" --request PATCH $URL --data "$(python -c "import sys,json; json.dump(dict(body=open('release_notes.md').read()), sys.stdout)")"
fi
| true
|
7ca0a2a8fcb246a50c20ad712b9dbb60a66e4d60
|
Shell
|
YounilityInc/bootstrap-legacy-sass
|
/test/node_sass_compile_test.sh
|
UTF-8
| 326
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
# Test compilation with node-sass binary
mkdir -p tmp/node-sass
node-sass assets/stylesheets/_bootstrap-legacy.scss -o tmp/node-sass/bootstrap-legacy.css && \
node-sass assets/stylesheets/bootstrap-legacy/_theme.scss -o tmp/node-sass/bootstrap-legacy-theme.css || \
(echo "node-sass compilation failed" && exit 1)
| true
|
3a2d217cba25a699abd46fdad519b6dd333b5d36
|
Shell
|
nublic/Nublic
|
/build
|
UTF-8
| 9,111
| 3.234375
| 3
|
[] |
no_license
|
#!/bin/bash
# For .deb packages
export DEBFULLNAME='Nublic Packaging Team'
export DEBEMAIL='packaging@nublic.com'
export DEBKEY=4EEA5986
export DEBREVISION=`date +%F-%H-%M`
export INITIAL_TIME=`date +%s`
# Used directories
export INITIAL_DIR=`pwd`
export TARGET_DIR="${INITIAL_DIR}/target"
export REPO_DIR="${TARGET_DIR}/repo"
export MAVEN_REPO_DIR="${TARGET_DIR}/temp/maven"
export MAVEN_CONFIG_FILE="${INITIAL_DIR}/conf/maven/settings.xml"
export MAVEN="mvn --settings \"${MAVEN_CONFIG_FILE}\" -Dmaven.test.skip"
export MAVEN_SYS="mvn -Dmaven.test.skip"
export DEBUILD="debuild -d -eMAVEN -eMAVEN_REPO_DIR -eMAVEN_OPTS"
export MAVEN_OPTS="-XX:PermSize=1024m -XX:MaxPermSize=1024m"
# Config options
export SHOULD_CLEAN_MAVEN=1 # 1 for running 'maven clean'
install_in_maven_repo() {
${MAVEN} install:install-file -Dfile=$1 -DgroupId=$2 \
-DartifactId=$3 -Dversion=$4 -Dpackaging=jar
}
initialize() {
# Make sure you have libdbus-java maven2 and reprepro installed
# You need to install maven3!! It's not in the repository and needs to be manually installed
# sudo apt-get install libdbus-java maven2 reprepro dpkg-sig libterm-readkey-perl cdbs python-all python-setuptools libtag1-dev ffmpeg libboost1.42-dev zlib1g-dev scala dpkg-sig reprepro devscripts openjdk-7-jdk junit junit4 python-sphinx python-support
# sudo update-java-alternatives -s java-1.7.0-\*
rm -rf "${REPO_DIR}"
# Create build folders, if non existant
mkdir -p "${TARGET_DIR}"
mkdir -p "${REPO_DIR}"
mkdir -p "${MAVEN_REPO_DIR}"
# Delete previous builds
rm -rf "${TARGET_DIR}"/*.{deb,build,changes}
# Install libraries in maven repo
install_in_maven_repo /usr/share/java/dbus.jar org.freedesktop dbus 2.8.1
install_in_maven_repo "${INITIAL_DIR}/3rd-party-libs/jEN/json_simple-1.1.jar" jEN json-simple 1.1
install_in_maven_repo "${INITIAL_DIR}/3rd-party-libs/jEN/jEN.jar" jEN jEN 4.x.o
install_in_maven_repo "${INITIAL_DIR}/3rd-party-libs/gwt-dnd/gwt-dnd-3.1.2-2.jar" com.allen_sauer gwt-dnd 3.1.2-2
install_in_maven_repo "${INITIAL_DIR}/3rd-party-libs/gwt-slider/gwt-slider-bar-1.0.jar" gwt-incubator gwt-slider-bar 1.0
install_in_maven_repo "${INITIAL_DIR}/3rd-party-libs/gwt-comet/gwt-comet-1.2.3.jar" gwt-comet gwt-comet 1.2.3
install_in_maven_repo "${INITIAL_DIR}/3rd-party-libs/gwtmobile/gwtmobile-1.1.jar" gwtmobile gwtmobile 1.1
# Install key into user keychain
gpg --import conf/keys/packaging.key
gpg --import conf/keys/packaging.private.key
}
install_in_system_repo() {
${MAVEN_SYS} install:install-file -Dfile=$1 -DgroupId=$2 \
-DartifactId=$3 -Dversion=$4 -Dpackaging=jar
}
package_and_sysinstall_dev_mode() {
echo
echo "PACKAGING AND INSTALLING IN MAVEN " $1
cd "$1"
${MAVEN_SYS} -Pdev-mode clean
${MAVEN_SYS} -Pdev-mode install
cd "${INITIAL_DIR}"
}
initialize_system() {
install_in_system_repo /usr/share/java/dbus.jar org.freedesktop dbus 2.8.1
install_in_system_repo "${INITIAL_DIR}/3rd-party-libs/jEN/json_simple-1.1.jar" jEN json-simple 1.1
install_in_system_repo "${INITIAL_DIR}/3rd-party-libs/jEN/jEN.jar" jEN jEN 4.x.o
install_in_system_repo "${INITIAL_DIR}/3rd-party-libs/gwt-dnd/gwt-dnd-3.1.2-2.jar" com.allen_sauer gwt-dnd 3.1.2-2
install_in_system_repo "${INITIAL_DIR}/3rd-party-libs/gwt-slider/gwt-slider-bar-1.0.jar" gwt-incubator gwt-slider-bar 1.0
install_in_system_repo "${INITIAL_DIR}/3rd-party-libs/gwt-comet/gwt-comet-1.2.3.jar" gwt-comet gwt-comet 1.2.3
install_in_system_repo "${INITIAL_DIR}/3rd-party-libs/gwtmobile/gwtmobile-1.1.jar" gwtmobile gwtmobile 1.1
package_and_sysinstall_dev_mode player
}
copy_and_touch() {
echo
echo "COPYING DEBS FROM " $1
sleep 1
cd "$1"
for f in *.deb
do
cp $f "${TARGET_DIR}"
touch "${TARGET_DIR}/$f"
done
cd "${INITIAL_DIR}"
}
package_and_install() {
echo
echo "PACKAGING AND INSTALLING IN MAVEN " $1
cd "$1"
${MAVEN} clean
${MAVEN} install
cd "${INITIAL_DIR}"
}
package_and_install_dev_mode() {
echo
echo "PACKAGING AND INSTALLING IN MAVEN " $1
cd "$1"
${MAVEN} -Pdev-mode clean
${MAVEN} -Pdev-mode install
cd "${INITIAL_DIR}"
}
build_deb_package() { # $1 = directory, $2 = "gather_dependencies" -> try to gather dependencies
echo
echo "BUILDING .DEB FOR " $1
cd "$1"
# Try to run Maven clean
if [ ${SHOULD_CLEAN_MAVEN} -eq 1 ]
then
${MAVEN} clean
fi
# Add changelog entry
dch -p -v ${DEBREVISION} Building on `date`
${DEBUILD} -i -us -uc -b
exit=$?
git checkout debian/changelog
if [ $exit -ne 0 ]
then
dch -p -v ${DEBREVISION} Building on `date`
${DEBUILD} -i -us -uc -b
exit=$?
git checkout debian/changelog
if [ $exit -ne 0 ]
then
echo "Error building package " $1
exit 1
fi
fi
${DEBUILD} clean
cd ..
mv *.deb *.build *.changes "${TARGET_DIR}"
cd "${INITIAL_DIR}"
}
build_submodule_lib() {
echo
echo "BUILDING EXTERNAL LIBRARY " $1
cd 3rd-party-libs
rm -rf "$1"/debian
mkdir "$1"/debian
cp -R "$1-debian"/* "$1"/debian
cd "$1"
${DEBUILD} -i -us -uc -b
exit=$?
if [ $exit != 0 ]
then
echo "Error building submodule " $1
exit 1
fi
${DEBUILD} clean
cd ..
mv *.deb *.build *.changes *.dsc *.tar.gz "${TARGET_DIR}"
cd "${INITIAL_DIR}"
}
create_repo() {
echo
echo "CREATING REPOSITORY"
# Create repo structure
mkdir -p "${REPO_DIR}/conf"
mkdir -p "${REPO_DIR}/incoming"
cp conf/repo/conf/distributions "${REPO_DIR}/conf"
cd "${TARGET_DIR}"
# Sign packages and add packages to repo
for pkg in *.deb
do
DEB_DATE=`date -r ${pkg} +%s`
if [ ${DEB_DATE} -gt ${INITIAL_TIME} ]
then
dpkg-sig -k ${DEBKEY} --verbose --sign builder $pkg
reprepro --ignore=wrongdistribution -Vb repo includedeb precise $pkg
fi
done
cd "${INITIAL_DIR}"
}
git submodule update --init --merge
initialize
# Libraries
cp ${INITIAL_DIR}/3rd-party-libs/echoprint-codegen-Makefile ${INITIAL_DIR}/3rd-party-libs/echoprint-codegen/Makefile
build_submodule_lib echoprint-codegen
build_submodule_lib sunburnt
build_submodule_lib pyechonest
copy_and_touch 3rd-party-libs/aria2
copy_and_touch 3rd-party-libs/python-fstab
copy_and_touch 3rd-party-libs/tomcat7
copy_and_touch 3rd-party-libs/lucene-solr
build_deb_package 3rd-party-libs/pykka
# build_deb_package 3rd-party-libs/guava
# bst-player fork
package_and_install_dev_mode player
# Base modules
build_deb_package resource/main/src
build_deb_package resource/java
package_and_install resource/java
build_deb_package notification/main/src
build_deb_package notification/java
package_and_install notification/java
build_deb_package files_and_users/main/src
build_deb_package files_and_users/client/src
build_deb_package files_and_users/java
package_and_install files_and_users/java
build_deb_package filewatcher/main/src
build_deb_package filewatcher/scala
install_in_maven_repo filewatcher/scala/target/*jar-with-dependencies.jar com.nublic filewatcher.scala 0.0.1-SNAPSHOT
# Utilities
build_deb_package python/client/src
build_deb_package python/server/src
package_and_install apps/util/gwt
package_and_install apps/util/ws-json
# Manager App
build_deb_package apps/app.manager/server-python/src
build_deb_package apps/app.manager/web
build_deb_package apps/app.manager/welcome
build_deb_package apps/app.manager/settings
build_deb_package apps/app.manager
# Browser App
build_deb_package apps/app.browser/server
build_deb_package apps/app.browser/web
build_deb_package apps/app.browser
# Music App
build_deb_package apps/app.music/server-python/src
build_deb_package apps/app.music/fs/src
build_deb_package apps/app.music/web
build_deb_package apps/app.music
# Photos App
build_deb_package apps/app.photos/server-python/src
package_and_install apps/app.photos/common
build_deb_package apps/app.photos/web
build_deb_package apps/app.photos/mobile
build_deb_package apps/app.photos
# Downloads App
build_deb_package apps/app.downloads/server
build_deb_package apps/app.downloads/web
build_deb_package apps/app.downloads
# Market App
build_deb_package apps/app.market/daemon/src
build_deb_package apps/app.market/server-python/src
build_deb_package apps/app.market/web
build_deb_package apps/app.market
# Init App
build_deb_package apps/app.init/server-python/src
build_deb_package apps/app.init/web
build_deb_package apps/app.init
# House App
build_deb_package apps/app.house nublic-app-house
# Extra configuration packages
build_deb_package conf/apache
build_deb_package conf/samba
build_deb_package conf/solr
build_deb_package conf/tomcat6
build_deb_package conf/tomcat7
build_deb_package conf/minidlna
build_deb_package conf/upstart
build_deb_package conf/lshell
build_deb_package conf/openssh
build_deb_package conf/dyndnschecker
build_deb_package conf/unattended-upgrades
build_deb_package conf/nublic
create_repo
# create_repo_cd
exit 0
| true
|
f7ce0103584b38864cf33821f67cd60dabf40e0f
|
Shell
|
youtube/cobalt
|
/starboard/evergreen/testing/linux-x64x11/start_cobalt.sh
|
UTF-8
| 1,345
| 3.875
| 4
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
#
# Copyright 2021 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Runs Cobalt on the desired platform.
#
# Globals:
# CONTENT
# LOG_PATH
#
# Args:
# URL, path for logging, variable to store Cobalt's pid in, extra arguments.
function start_cobalt() {
if [[ $# -lt 3 ]]; then
log "error" " start_cobalt missing args"
return 1
fi
URL="${1}"
LOG="${2}"
declare -n loader_pid_ref=$3
ARGS="${4}"
stop_cobalt
log "info" " Starting Cobalt with:"
log "info" " --url=${URL}"
for arg in $ARGS; do
log "info" " ${arg}"
done
log "info" " Logs will be output to '${LOG_PATH}/${LOG}'"
eval "${OUT}/loader_app --url=\"\"${URL}\"\" ${ARGS} &> >(tee \"${LOG_PATH}/${LOG}\") &"
loader_pid_ref=$!
log "info" " Cobalt process ID is ${loader_pid_ref}"
}
| true
|
c892b0a04117e68da2151f49af8f40a0d889e925
|
Shell
|
jmg1297/thesis
|
/bl6_strg_merge/src/get_novel_exons.sh
|
UTF-8
| 358
| 2.765625
| 3
|
[] |
no_license
|
#!/bin/bash
for d in $(find data/split_by_gffcmp_class -type d | sed '1d');
do
MRG=$(echo $d | cut -d/ -f3)
mkdir data/novel_exons/$MRG
cat $d/novel_non-coding.gtf | grep -P "\sexon\s" | awk '{print $1,$4,$5,$12,$14,0,$7}' | sed -r -e 's/[\";]//g' -e 's/ /./4' -e 's/^/chr/g' -e 's/\s+/\t/g' | sort -k1,1 -k2,2n > data/novel_exons/$MRG/novel_exons.bed
done
| true
|
7de05a62a52232ae906f47c7a0e68b79628ed0f6
|
Shell
|
stahta01/my-MSYS2-packages
|
/linux-cross-glibc/PKGBUILD
|
UTF-8
| 4,050
| 3.4375
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
# Contributer: Tim Stahlhut <stahta01@gmail.com>
# Fork of: https://www.archlinux.org/packages/community/any/aarch64-linux-gnu-glibc/
_targets="aarch64-linux-gnu"
_run_helper_script=yes # Set to no once linux-cross-glibc and linux-cross-gcc packages have been built and installed
_realname=glibc
_mingw_suff=linux-cross
pkgname=("${_mingw_suff}-${_realname}")
pkgver=2.32
pkgrel=1
pkgdesc="GNU C Library for Linux targets"
arch=('i686' 'x86_64')
url='https://gcc.gnu.org/'
license=(GPL LGPL FDL)
depends=("${_mingw_suff}-headers")
makedepends=('rsync' 'gcc' 'tar' 'make' 'python' 'bison'
$([[ "$_run_helper_script" != "no" ]] && echo 'diffutils' 'texinfo')
$([[ "$_run_helper_script" == "no" ]] && echo "${_mingw_suff}-gcc") )
groups=("${_mingw_suff}-toolchain" "${_mingw_suff}")
options=(!emptydirs !strip staticlibs !buildflags)
source=(https://ftp.gnu.org/gnu/libc/glibc-$pkgver.tar.xz )
sha256sums=('1627ea54f5a1a8467032563393e0901077626dc66f37f10ee6363bb722222836')
noextract=(glibc-$pkgver.tar.xz )
_pkgsourcedir=glibc-${pkgver}
# Helper macros to help make tasks easier #
_extract() {
local tarfile="$1"
local extracted="$(echo "$tarfile" | sed 's/\.tar.*$//')"
if [ ! -d "$extracted" ]; then
echo "Extracting ${tarfile}"
tar -xf $tarfile --checkpoint=1000
fi
}
# ======================================= #
prepare() {
_extract glibc-$pkgver.tar.xz || true
cd ${_pkgsourcedir}
# Replace "oS" with "oZ" to avoid filename clashes
sed -i 's/.oS)/.oZ)/g; s/.oS$/.oZ/g; s/.oS =/.oZ =/g' Makeconfig
sed -i 's/.oS,/.oZ,/g; s/.oS +=/.oZ +=/g; s/.oS)/.oZ)/g' Makerules
sed -i 's/.oS)/.oZ)/g; s/.oS,/.oZ,/g' extra-lib.mk
sed -i 's/.oS)/.oZ)/g' nptl/Makefile
sed -i 's/.oS +=/.oZ +=/g' csu/Makefile
sed -i 's/.oS +=/.oZ +=/g' sysdeps/i386/i686/Makefile
sed -i 's/.oS,/.oZ,/g' sysdeps/ieee754/ldbl-opt/Makefile
sed -i 's/.oS +=/.oZ +=/g' sysdeps/sparc/sparc32/sparcv9/Makefile
sed -i 's/.oS +=/.oZ +=/g' sysdeps/sparc/sparc64/Makefile
sed -i 's/.oS +=/.oZ +=/g' sysdeps/unix/sysv/linux/mips/Makefile
sed -i 's/.oS +=/.oZ +=/g' sysdeps/x86/Makefile
sed -i 's/,oS}/,oZ}/g' scripts/check-local-headers.sh
# use copy because the rellns-sh has issues under msys2
sed -i 's|$(LN_S) `$(..)scripts/rellns-sh -p $< $@` $@|cp -p $< $@|' Makerules
}
build() {
local _path=$PATH
for _target in ${_targets}; do
if [ "${_run_helper_script}" = "yes" ]; then
msg "Running build_cross_linux_gcc.sh for ${_target}"
cd ${srcdir}/..
./build_cross_linux_gcc.sh $_target "/opt/local/cross"
_gcc_bin=/opt/local/cross/bin
else
_gcc_bin=/opt/bin
fi
export PATH=/opt/bin:${_path}
msg "Building ${_target} glibc"
[[ -d "${srcdir}"/build-glibc-${_target} ]] && rm -rf "${srcdir}"/build-glibc-${_target}
mkdir -p "${srcdir}"/build-glibc-${_target} && cd "${srcdir}"/build-glibc-${_target}
BUILD_CC=${CHOST}-gcc \
CC=${_gcc_bin}/${_target}-gcc \
CXX=${_gcc_bin}/${_target}-g++ \
AR=${_target}-ar \
AS=${_target}-as \
RANLIB=${_target}-ranlib \
../${_pkgsourcedir}/configure \
--prefix=/opt/$_target \
--datarootdir=/opt/$_target/share \
--includedir=/opt/$_target/include \
--target=$_target \
--host=$_target \
--build=$CHOST \
--with-headers=/opt/$_target/include \
--enable-shared --enable-static \
--disable-profile \
--disable-werror
echo "build-programs=no" >> configparms
make -j1
done
}
package() {
for _target in ${_targets}; do
msg "Packaging ${_target} headers"
cd "${srcdir}"/build-glibc-${_target}
make install_root="$pkgdir" install
done
}
| true
|
cfbf94902b29f617b332659a53ca66d822d4bbf5
|
Shell
|
corretto/amazon-corretto-crypto-provider
|
/tests/ci/run_accp_overkill.sh
|
UTF-8
| 758
| 3.328125
| 3
|
[
"Apache-2.0",
"BSD-3-Clause",
"OpenSSL"
] |
permissive
|
#!/bin/bash
set -exo pipefail
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# Testing non-FIPS is the default.
testing_fips=false
while [[ $# -gt 0 ]]; do
case ${1} in
--fips)
testing_fips=true
;;
*)
echo "${1} is not supported."
exit 1
;;
esac
# Check next option -- key/value.
shift
done
echo "Testing ACCP overkill tests."
# dieharder_threads are not supported on ARM for now.
if [[ ("$(uname -p)" == 'aarch64'*) || ("$(uname -p)" == 'arm'*) ]]; then
./gradlew -DFIPS=$testing_fips test_extra_checks test_integration_extra_checks
else
./gradlew -DFIPS=$testing_fips test_extra_checks test_integration_extra_checks dieharder_threads
fi
| true
|
1f5bfd78763f76599f911b18778dab600c819448
|
Shell
|
mmanumos/holberton-system_engineering-devops
|
/0x05-processes_and_signals/5-kill_me_now
|
UTF-8
| 200
| 2.71875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Kills the process of the before task, searching the id by the full pattern used in the console, in this case the name file
PID=$(pgrep -f '4-to_infinity_and_beyond')
kill "$PID"
| true
|
a0841a5898290c08a0a6b0eba7efd281e2665873
|
Shell
|
r-tty/radios
|
/Misc/boot/make-floppy.sh
|
UTF-8
| 340
| 2.8125
| 3
|
[] |
no_license
|
#! /bin/sh
BOOTDIR="/boot/RadiOS"
KERNEL="rmk586.rdm.gz"
MODULES="libc.rdm taskman.rdm monitor.rdm console.rdx x-ray.rdx"
DRIVE="x:"
DIR="boot/radios"
MTOOLSRC="$HOME/.mtoolsrc"
# Start here
grep -q "drive $DRIVE" $MTOOLSRC && (
cd $BOOTDIR
mcopy -o $KERNEL $DRIVE/$DIR
cd modules
mcopy -o $MODULES $DRIVE/$DIR/modules
)
| true
|
47da11b9d3a178e0f2a72282222e8cf899eee9d8
|
Shell
|
aarontp/l2tdevtools
|
/utils/run_linter.sh
|
UTF-8
| 1,553
| 3.890625
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Script that runs the linter on all files.
EXIT_FAILURE=1;
EXIT_SUCCESS=0;
DIFFBASE="upstream/master";
SHOW_HELP=0;
if ! test -f "utils/common.sh";
then
echo "Unable to find common scripts (utils/common.sh).";
echo "This script can only be run from the root of the source directory.";
exit ${EXIT_FAILURE};
fi
# Exports GIT_URL and PROJECT_NAME.
. utils/common.sh
while test $# -gt 0;
do
case $1 in
--diffbase )
shift;
DIFFBASE=$1;
shift;
;;
-h | --help )
SHOW_HELP=1;
shift;
;;
*)
;;
esac
done
if test ${SHOW_HELP} -ne 0;
then
echo "Usage: ./${SCRIPTNAME} [--diffbase DIFFBASE] [--help]";
echo "";
echo " --diffbase: the name of the branch to use as diffbase for the CL.";
echo " The default is upstream/master";
echo "";
echo " --help: shows this help.";
echo "";
exit ${EXIT_SUCCESS};
fi
if ! linting_is_correct_remote_origin;
then
echo "Linting aborted - fix the reported issues.";
exit ${EXIT_FAILURE};
fi
# Determine if we have the master repo as origin.
HAVE_REMOTE_ORIGIN=have_remote_origin;
if ! ${HAVE_REMOTE_ORIGIN};
then
if ! have_remote_upstream;
then
echo "Linting aborted - missing upstream.";
echo "Run: 'git remote add upstream https://github.com/log2timeline/${PROJECT_NAME}.git'";
exit ${EXIT_FAILURE};
fi
git fetch upstream;
if ! linting_is_correct_remote_diffbase ${DIFFBASE};
then
echo "Linting aborted - fix the reported issues.";
exit ${EXIT_FAILURE};
fi
fi
exit ${EXIT_SUCCESS};
| true
|
140d36488b0dc1cf8677fbfec1ebebbc51b1f938
|
Shell
|
zeroknowledgediscovery/cognet
|
/examples/mpi_tmp/mpi_run.sh
|
UTF-8
| 289
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
YEARS='2016'
# nodes requested
NODES=4
# time requested
T=2
NUM='all'
LAUNCH='../launcher.sh'
for yr in `echo $YEARS`
do
echo $yr
./mpi_setup.sh $yr $NODES $NUM tmp_"$yr"
$LAUNCH -P tmp_"$yr" -F -T $T -N "$NODES" -C 28 -p broadwl -J MPI_TMP_1"$yr" -M 56
done
rm tmp_"$yr"*
| true
|
3a737abcc96fe9256e18b1137a4a38b1797a7bd3
|
Shell
|
kant/RuDaS
|
/other/old/run-simple-cwa.sh
|
UTF-8
| 764
| 2.75
| 3
|
[] |
no_license
|
#!/bin/bash
DIR=`dirname $0`
SYSDIR=$DIR/../systems
DATA=$DIR/../data/simple-cwa/
for NAME in NNCWA-XS-1 NNCWA-S-2-1 NNCWA-S-2-2
do
#SYSTEM=Neural-LP
#source activate $SYSTEM
#python $SYSDIR/$SYSTEM/src/main.py --datadir=$DATA/$SYSTEM/$NAME --exps_dir=$DIR/../output/simple-cwa/$SYSTEM --exp_name=$NAME > $DIR/../output/simple-cwa/$SYSTEM/$NAME/log.txt
#SYSTEM=ntp
#source activate $SYSTEM
#export PYTHONPATH=$PYTHONPATH:$SYSDIR/$SYSTEM
#python $SYSDIR/$SYSTEM/ntp/experiments/learn.py $DATA/$SYSTEM/$NAME/run.conf > $DIR/../output/simple-cwa/$SYSTEM/$NAME/log.txt
SYSTEM=amiep
java -jar $SYSDIR/$SYSTEM/amie_plus.jar -mins 3 -minis 3 -minpca 0.25 -oute $DATA/$SYSTEM/$NAME/train.txt > $DIR/../output/simple-cwa/$SYSTEM/$NAME/results.txt
done
exit 0
| true
|
503f3aed9cb14c0dc70984648c95ba4dba4e519f
|
Shell
|
for2ando/sync-android-apps
|
/saa-get-apps
|
UTF-8
| 9,015
| 3.828125
| 4
|
[] |
no_license
|
#!/bin/bash
pname=$(basename "$0")
dname=$(dirname "$0")
case "$dname" in /*);; *) dname="$(pwd)/$dname";; esac
source "$dname/saa-environ.sh"
xpager=$(which pageless 2>/dev/null || ${PAGER:-less})
usage="$pname [{-t|--target} {apk|ab|apk,ab}] [-l|--list|-q|--quiet|-v|--verbose|-n|--dryrun|--log|--nolog]
$pname {-h|--help}
Features:
Get (or list) programs (apk files) and/or data (adb backups or raw /data
directory images) files from a remote Android device using adb command.
The entity (the programs and the data) are stored to the 'apps' directory,
which is a child of current directory (Such directory is called
SAADirectory).
If the entities are already in the '$appsdir' directory, they are not
getting or listing (The -o|--overwrite disables this behavior.). And if
the name of entities are already listed in the list: '$blacklist' file,
they are also not getting or listing.
Options:
-t|--target {apk|ab|apk,ab}
Target entities to get from a remote Android device. Default is apk,ab.
-o|--overwrite
If the entities are already on the '$appsdir' directory, do putting or
listing.
-l|--list
Instead of getting entities, List entities on a remote Android device.
-q|--quiet
No messages are displayed, except error message.
-v|--verbose
Additional messages are displayed. It has a precedence over -q|--quiet.
-n|--dryrun
Only messages displayed, No SAADirectory is made and no entities is got.
--log
Logging almost all messages to logfiles. It is a default.
--nolog
No log files created. It is a default when -n|--dryrun or -l|--list.
-h|--help
This help message is displayed."
source "$dname/run.sh"
source "$dname/adbwrappers.sh"
source "$dname/in_args.sh"
source "$dname/trapwrapper.sh"
printcommandline() {
echo -n $pname
test -n "$SAADIR" && echo -n " --saadir '$SAADIR'"
echo -n " --target '$targets'"
$listp && echo -n " --list"
$quietp && echo -n " --quiet"
$verbosep && echo -n " --verbose"
$dryrunp && echo -n " --dryrun"
$nologp && echo -n " --nolog" || echo -n " --log"
}
targets='apk,ab'
overwritep=false
listp=false
quietp=false
verbosep=false
dryrunp=false
nologp=false
helpp=false
opterrp=false
declare -a opts
while true; do
case "$1" in
-|--) break;;
--*) opts=("$1"); shift;;
-*) for n in $(seq 1 $((${#1}-1))); do opts[$n]="-${1:$n:1}"; done; shift;;
*) break;;
esac
for opt in "${opts[@]}"; do
case "$opt" in
-t|--target) targets="$1"; shift;;
-o|--overwrite) overwritep=true;;
-l|--list) listp=true; nologp=true;;
-q|--quiet) quietp=true;;
-v|--verbose) verbosep=true;;
-n|--dryrun) dryrunp=true; verbosep=true; nologp=true;;
--log) nologp=false;;
--nolog) nologp=true;;
-h|--help) helpp=true;;
-*) echo "$opt: unknown option.">&2; opterrp=true;;
esac
done
done
$opterrp && exit 1
$helpp || test $# -ne 0 && { echo "$usage" | $xpager; exit;}
targets=$(IFS=$' \t\n,'; echo $targets)
#signals='EXIT SIGHUP SIGINT SIGQUIT SIGTERM ERR'
signals='SIGHUP SIGINT SIGQUIT SIGTERM'
workdir=$(mktemp --tmpdir --directory $pname.d.XXXXXXXXXX)
trap-open rmtmp $signals
trap-addcmd rmtmp "rm -rf '$workdir'"
trap-calltrap rmtmp
adev="$(_adb devices -l)"
dev=$(echo "$adev" | adb-parsedev -b -o -r -s -u model) || exit 11
stamp=$(date +%Y%m%d-%H%M%S)
if $listp; then
logdir="log.d-$stamp-$dev-getlist"
else
logdir="log.d-$stamp-$dev-get"
fi
trap-removecmd rmtmp "rm -rf '$workdir'"
trap-addcmd rmtmp "$nologp || copyworktolog; rm -rf '$workdir'"
trap-calltrap rmtmp
if $nologp; then
log=/dev/null
else
log="$workdir/$logfilename"
fi
pkgondev="$workdir/$pkgondev_stem"
pkgtoget="$workdir/$pkgtoget_stem"
filetoget="$workdir/$filetoget_stem"
$overwritep && { gotten="$workdir/$gotten_stem"; >"$gotten";}
(
$quietp || echo "commandline: $(printcommandline)">&2
$quietp || echo "device: $dev">&2
$quietp || echo "timestamp: $stamp">&2
test "$(basename $(pwd))" = apps && cd ..
test -r "$pkglist" -o -d apps || {
echo "No $pkglist file nor apps directory here.
Please chdir to one of saadir which has '$pkglist' file and 'apps' dir.">&2
exit 3
}
! $listp && ! $dryrunp && cp -p "$pkglist" "$workdir/$pkglist-pre"
$quietp || { echo; echo "**** compute supplements based on packages."; }>&2
tPkgtoget_candidate="$workdir"/tPkgtoget_candidate
trap-addcmd rmtmp "rm -f '$tPkgtoget_candidate'"
trap-calltrap rmtmp
$quietp || echo "make $pkgondev_stem: pkgs-list on the device.">&2
dryrunp=false run get-android-apps list -3 >"$pkgondev" ||
{ echo "'get-android-apps list': failed.">&2; rm -f "$pkgondev"; exit 5;}
$quietp || echo "make $pkgtoget_stem: list of pkgs to get (exist on the device but not saadir).">&2
if $overwritep; then
cat "$pkgondev" >"$tPkgtoget_candidate"
else
set-complement "$pkgondev" <(sed '/^[ \t]*#/d' "$pkglist") >"$tPkgtoget_candidate"
fi ||
{ echo "make $pkgtoget: failed.">&2; exit 6;}
if test -f "$blacklist" && test -r "$blacklist"; then
$quietp || echo "apply blacklist: $blacklist to $pkgtoget_stem.">&2
set-complement "$tPkgtoget_candidate" <(sed '/^[ \t]*#/d' "$blacklist") >"$pkgtoget" ||
{ echo "apply $blacklist: failed.">&2; rm -f "$pkgtoget"; exit 7;}
rm -f "$tPkgtoget_candidate"
else
mv "$tPkgtoget_candidate" "$pkgtoget"
fi
trap-removecmd rmtmp "rm -f '$tPkgtoget_candidate'"
trap-calltrap rmtmp
if $listp; then
echo -------- Packages to get:
cat "$pkgtoget"
echo -------- end of Packages to get:
rm "$pkgtoget"
else
if test $(cat "$pkgtoget" | wc -l) -eq 0; then
$quietp || echo "No packages to get.">&2
else
cd apps
for obj in $targets; do
$quietp || echo "get $obj files of $pkgtoget_stem pkgs.">&2
$dryrunp && dryrun_opt=' --dryrun' || dryrun_opt=
dryrunp=false run get-android-apps${dryrun_opt}${gotten:+ -g "$gotten"} $obj $(cat "$pkgtoget") ||
{ test $? = 255 && break;}
done
cd ..
fi
$dryrunp && rm "$pkgtoget"
fi
in_args apk $targets && {
$quietp || { echo; echo "**** compute supplements based on files."; }>&2
tPkgtochk_candidate="$workdir"/tPkgtochk_candidate
tPkgtochk_last="$workdir"/tPkgtochk_last
trap-addcmd rmtmp "rm -f '$tPkgtochk_last'"
trap-addcmd rmtmp "rm -f '$tPkgtochk_candidate'"
trap-calltrap rmtmp
$quietp || echo "make list of exist pkgs both on the device and the saadir.">&2
set-product "$pkgondev" <(sed '/^[ \t]*#/d' "$pkglist") >"$tPkgtochk_candidate" ||
{ echo "set-product: failed.">&2; exit 16;}
if test -f "$blacklist" && test -r "$blacklist"; then
$quietp || echo "apply blacklist: $blacklist to the list.">&2
set-complement "$tPkgtochk_candidate" <(sed '/^[ \t]*#/d' "$blacklist") >"$tPkgtochk_last" ||
{ echo "set-complement: failed.">&2; rm -f "$tPkgtochk_last"; exit 17;}
rm -f "$tPkgtochk_candidate"
else
mv "$tPkgtochk_candidate" "$tPkgtochk_last"
fi
trap-removecmd rmtmp "rm -f '$tPkgtochk_candidate'"
trap-calltrap rmtmp
tFilelist="$workdir"/tFilelist
tFileorig="$workdir"/tFileorig
trap-addcmd rmtmp "rm -f '$tFilelist'"
trap-addcmd rmtmp "rm -f '$tFileorig'"
trap-calltrap rmtmp
$quietp || echo "make $tFilelist: file-list on the saadir.">&2
(cd apps; ls *.apk 2>/dev/null) >"$tFilelist"
$quietp || echo "make $tFileorig: file-list on the device.">&2
for pkg in $(cat "$tPkgtochk_last"); do
dryrunp=false run get-android-apps listapk "$pkg"
done >"$tFileorig"
$quietp || echo "make $filetoget_stem: list of files to get (exist on the device but not saadir).">&2
if $overwritep; then
set-complement "$tFileorig" "$gotten" >"$filetoget"
else
set-complement "$tFileorig" "$tFilelist" >"$filetoget"
fi ||
{ echo "set-complement: failed.">&2; exit 18;}
if $listp; then
echo -------- Files to get:
cat "$filetoget"
echo -------- end of Files to get:
rm "$filetoget"
else
if test $(cat "$filetoget" | wc -l) -eq 0; then
$quietp || echo "No files to get.">&2
else
cd apps
mapfile -t files <"$filetoget"
$dryrunp && dryrun_opt=' --dryrun' || dryrun_opt=
dryrunp=false run get-android-apps${dryrun_opt}${gotten:+ -g "$gotten"} singleapk "${files[@]}"
cd ..
fi
$dryrunp && rm "$filetoget"
fi
rm -f "$tPkgtochk_last" "$tFilelist" "$tFileorig"
trap-removecmd rmtmp "rm -f '$tFileorig'"
trap-removecmd rmtmp "rm -f '$tFilelist'"
trap-removecmd rmtmp "rm -f '$tPkgtochk_last'"
trap-calltrap rmtmp
}
if $listp || $dryrunp; then
rm "$pkgondev"
else
cp -p "$pkglist" "$workdir/$pkglist-post"
test -f "$blacklist" && cp -p "$blacklist" "$workdir/$blacklist"
"$dname"/saa-make-list || { echo "saa-make-list: failed.">&2; exit 32;}
fi
) 2>&1 | tee "$log"
$nologp || copyworktolog
rm -rf "$workdir"
trap-close rmtmp
| true
|
2b14541c3ed5f5fd0c89aecb542cce62ee7fd0f8
|
Shell
|
kuiba1949/conky-weather
|
/bin/conky-weather.runonce
|
UTF-8
| 4,601
| 3.515625
| 4
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
# coding: utf-8
# updated: 2018-3-03, by Careone
# 天气更新周期变更:由每小时更新一次,调整为每20分钟更新一次
# filename: conky-weather.runonce
### head ###
APPNAME="conky-weather.runonce"
APPVER="2.1"
USER_CONKYRC="$HOME/.conkyrc"
USER_PROFILE="$HOME/.profile"
AUTOSTART_DIR="$HOME/.config/autostart"
#FEED_DIR="$HOME/.feed"
CMD01="conky-weather-theme.enable"
#CMD02="conky"
CMD03="conky-weather-update"
#CMD04="echo 'conky &' >> $USER_PROFILE"
CMD05="conky-weather-add-crontab"
#declare -a feedfile=( feed_city feed_wd feed_ls feed_xls )
## 每一步完成后,暂停几秒,方便用户了解程序做了一些什么
declare -i SLEEP=3
## _make_conky_desktop () :
# used to creat '90conky.desktop' and save to ~/.config/autostart/
# to enable conky autostart for each login
_make_conky_desktop () {
cat<<EOF
[Desktop Entry]
Name=Conky
Exec=/usr/bin/conky
Icon=conky
Terminal=false
Type=Application
Categories=GNOME;GTK;System;Utility;
Hidden=false
EOF
}
## _make_weather_update () :
# used to creat '86conky-weather-update.desktop' and
# save to ~/.config/autostart/
# to enable conky-weather-update autostart for each login
_make_weather_update () {
cat<<EOF
[Desktop Entry]
Name=conky-weather-update
Exec=/usr/local/bin/conky-weather-update
Icon=conky-weather-update
Terminal=yes
Type=Application
Categories=GNOME;GTK;System;Utility;
Hidden=false
EOF
}
### ///head ###
### body ###
echo -e "\n 设置Conky天气 (命令: $APPNAME )...\n 说明:只需要运行一次即可!"
### -------------------
### disabled since verion 1.5; should be done by command 'conky-weather-update'
#for a in "$FEED_DIR/${feedfile[@]}"; do
#if [ ! -e "$a" ]; then
# if [ ! -d "$FEED_DIR" ]; then
# mkdir -p "$FEED_DIR"
# fi
# touch "$FEED_DIR/$a"
#fi
#done
#wait
### -------------------
echo -e "\n * 第1步:更新用户的Conky配置文件 $USER_CONKYRC\n (命令: $CMD01 )\n"
#echo -e "\t(命令: $CMD01 )\n"
$CMD01
sleep $SLEEP
echo -e "\n * 第2步:启动Conky\n (命令: conky )\n"
#pkill conky; wait
conky &
sleep $SLEEP
echo -e "\n * 第3步:更新Conky天气\n (命令: $CMD03 )\n"
$CMD03
sleep $SLEEP
echo -e "\n * 第4步:设置开机后, 自动启动Conky\n"
## optional modes to autostart conky for current user:
for a in `seq 1 2`; do
AUTOSTART_MODE=$a
case "$AUTOSTART_MODE" in
1) # For GNOME, add 86conky-weather-update.desktop and
# 90conky.desktop to ~/.config/autostart/
# 开机自启动先后顺序:先更新天气数据,后启动conky
# 86conky-weather-update.desktop
# 90conky.desktop
CONKY_DESKTOP="90conky.desktop"
OLD_CONKY_DESKTOP="conky.desktop"
## 旧版本兼容性操作:
# ver 1.3 中的 conky 自启动文件名为 conky.desktop
# ver 1.4 中的 conky 自启动文件名变更为 90conky.desktop (添加了数字序号90)
# 检测到旧文件名后,会自动重命名为新文件名(文件内容不变)。
if [ -e "$AUTOSTART_DIR/$OLD_CONKY_DESKTOP" ]; then
mv -v "$AUTOSTART_DIR/$OLD_CONKY_DESKTOP" "$AUTOSTART_DIR/$CONKY_DESKTOP"
fi
if [ ! -e "$AUTOSTART_DIR/$CONKY_DESKTOP" ]; then
if [ ! -d "$AUTOSTART_DIR" ]; then
mkdir -p "$AUTOSTART_DIR"
fi
_make_conky_desktop > "$AUTOSTART_DIR/$CONKY_DESKTOP"
else echo -e " 提示1: 已存在conky开机自启动文件:\n $AUTOSTART_DIR/$CONKY_DESKTOP"
#echo -e "\tOK."
fi
;;
2 | *) # For all, add 'conky &' to ~/.profile
if grep conky "$USER_PROFILE" &>/dev/null; then
echo -e "\n 提示2: 用户 $USER_PROFILE 配置文件中,已存在conky自启动项!"
echo " --------"
grep -n --color=auto conky "$USER_PROFILE"
echo " --------"
else echo -e " 添加conky自启动指令到用户 $USER_PROFILE 配置文件..."
echo -e " (命令: echo 'conky &' >> $USER_PROFILE )"
echo 'conky &' >> "$USER_PROFILE"
fi
;;
esac
done
sleep $SLEEP
echo -e "\n * 第5步:添加Conky天气自动更新指令到crontab计划任务"
echo -e " 天气信息更新周期:每20分钟更新一次"
echo -e " (命令: $CMD05 )\n"
$CMD05
sleep $SLEEP
echo -e "\n * 第6步:添加 $CMD03 开机自启动文件(开机后会立即更新天气一次)..."
WEATHER_UPDATE_DESKTOP="86conky-weather-update.desktop"
if [ ! -e "$AUTOSTART_DIR/$WEATHER_UPDATE_DESKTOP" ]; then
_make_weather_update > "$AUTOSTART_DIR/$WEATHER_UPDATE_DESKTOP"
else echo -e " 提示: 已存在 $CMD03 自启动文件:\n $AUTOSTART_DIR/$WEATHER_UPDATE_DESKTOP"
#echo -e "\tOK."
fi
echo -e "\n OK.\n"
### ///body ###
exit 0
| true
|
2b046d74d2220a22af5611be4ee7bce6ad1fa3e5
|
Shell
|
yiochen/Pynote
|
/install
|
UTF-8
| 465
| 3.375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
install_path="/usr/local/"
python_path=$(which python)
# copy source code to /usr/local/src
mkdir ${install_path}src/Pynote
cp -r pynote/ ${install_path}src/Pynote/pynote
# make some change to main file, and make the file executable
cp pynote.py main
sed -i "1 i #!${python_path}" main
chmod 755 main
# make it symbolic link to the executable
mv main ${install_path}src/Pynote/
ln -s ${install_path}src/Pynote/main ${install_path}bin/pynote
| true
|
2836e623ce4eb6abb19812b1f8d536c89b99a9ef
|
Shell
|
bdimych/pdb-scripts
|
/pdb-lib.sh
|
UTF-8
| 861
| 3.484375
| 3
|
[
"MIT"
] |
permissive
|
function mydate { date +%F-%T; }
function log { echo -e $(mydate): LOG: "$@"; }
function error { echo -e $(mydate): ERROR: "$@"; exit 1; }
function warning { echo -e $(mydate): WARNING: "$@"; }
function _tee_progress {
if [[ $copy_progress_files_to ]]
then
tee -i -a "$pf" "$copy_progress_files_to/${pf##*/}"
else
tee -i -a "$pf"
fi
}
function tee_progress {
if [[ $2 == yes ]]
then
local pf
case "$1" in
*-in-progress.txt) pf="$1";;
*) pf="${1%.7z}-in-progress.txt";;
esac
_tee_progress
else
echo -n | tee_progress "$1" yes || error tee_progress failure
trap 'sleep 1' EXIT # for tee delay
exec > >(tee_progress "$1" yes) 2>&1 || error tee_progress failure
fi
}
perl_print_downloads_in_plain_text='if (/<div id="content">/../priority_bottom/) {s/^\s*//; s/\n/ /; /identifier-/ && s/^/\n/; s/<.*?>//g; print} END {print "\n"}'
| true
|
03b0dfa793c523b8f636f0fbecdec1ec68acda72
|
Shell
|
gm-mmurillo/aws-cli-get-session-token
|
/aws_mfa.sh
|
UTF-8
| 2,859
| 3.859375
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -e
# INPUT PARAMETERS
# origin_profile (prompt iniailly, store in config)
# mfa_profile (prompt-only)
# mfa_serial (prompt initially, store in config)
# mfa_token_code (prompt-only)
# Dependencies: awscli
test ! $(command -v aws) && echo "awscli is required, please install and ensure the binary is in your \$PATH." && exit 1
# Prompt for the mfa_profile
while [ -z "${TMP_AWS_MFA_PROFILE}" ]
do
echo "Please specify which aws profile you want to configure, or enter a new one [ this will be the profile that uses the temporary mfa credentials ]:"
aws configure list-profiles
read -p "MFA Profile: " TMP_AWS_MFA_PROFILE
done
# read the origin_profile value from the mfa_profile config
set +e
read TMP_AWS_ORIGIN_PROFILE < <(aws configure get profile.${TMP_AWS_MFA_PROFILE}.origin_profile)
set -e
# prompt for origin_profile if it did not exist within the mfa_profile
while [ -z "${TMP_AWS_ORIGIN_PROFILE}" ]
do
echo "Please enter the origin profile [ This is the profile used to make the aws sts get-session-token call ]:"
read -p "Origin Profile: " TMP_AWS_ORIGIN_PROFILE
done
# store the origin_profile value in the mfa_profile config
aws configure set profile.${TMP_AWS_MFA_PROFILE}.origin_profile ${TMP_AWS_ORIGIN_PROFILE}
# read the mfa_serial from the mfa_profile config
set +e
read TMP_MFA_SERIAL_ARN < <(aws configure get profile.${TMP_AWS_MFA_PROFILE}.mfa_serial)
set -e
# prompt for the mfa_serial if it did not exist within the mfa_profile
while [ -z "${TMP_MFA_SERIAL_ARN}" ]
do
echo "Please enter your MFA Serial ARN [ example: arn:aws:iam::1234567890:user/my.username ]:"
read -p "MFA Serial ARN: " TMP_MFA_SERIAL_ARN
done
# store the mfa_serial value in the mfa_profile config
aws configure set profile.${TMP_AWS_MFA_PROFILE}.mfa_serial ${TMP_MFA_SERIAL_ARN}
# prompt for the mfa_token_code
while [ -z "${TMP_MFA_TOKEN_CODE}" ]
do
echo "Please enter your MFA Token Code:"
read -p "MFA Token Code: " TMP_MFA_TOKEN_CODE
done
# run get-session-token using the mfa_serial and mfa_token_code using the origin_profile and capture the temporary credentials
TMP_RESPONSE=$(aws sts get-session-token --serial-number ${TMP_MFA_SERIAL_ARN} --token ${TMP_MFA_TOKEN_CODE} --output text --profile ${TMP_AWS_ORIGIN_PROFILE} \
--query 'Credentials.[AccessKeyId,SecretAccessKey,SessionToken,Expiration]')
read -r TMP_ACCESS_KEY_ID TMP_SECRET_ACCESS_KEY TMP_SESSION_TOKEN TMP_EXPIRATION < <(echo ${TMP_RESPONSE})
# set the temporary credentials for the mfa_profile
aws configure set profile.${TMP_AWS_MFA_PROFILE}.aws_access_key_id ${TMP_ACCESS_KEY_ID}
aws configure set profile.${TMP_AWS_MFA_PROFILE}.aws_secret_access_key ${TMP_SECRET_ACCESS_KEY}
aws configure set profile.${TMP_AWS_MFA_PROFILE}.aws_session_token ${TMP_SESSION_TOKEN}
aws configure set profile.${TMP_AWS_MFA_PROFILE}.aws_session_expiration ${TMP_EXPIRATION}
| true
|
b830a073d31e9ae706cb4ed56a816d1a08ec12f5
|
Shell
|
Ogaday/Resources
|
/dev/remote-run.sh
|
UTF-8
| 738
| 3.234375
| 3
|
[] |
no_license
|
#!/bin/bash
# Script to remotely start a CFD simulation in a screen on a given host.
# Inputs are eg.
# casepath=/path/to/case/files
# genid=10
# solid=15
# remotehost=blue02
echo "args are $@"
args=("$@")
casepath=${args[0]}
genid=${args[1]}
solid=$genid.${args[2]}
remotehost=${args[3]}
echo "casepath is $casepath"
echo "genid is $genid"
echo "solid is $solid"
echo "remotehost is $remotehost"
ssh wow203@$remotehost "mkdir -p /scratch/wow203/OpenFOAM/wow203-2.1.0/run/generation$genid/"
scp -r $casepath wow203@$remotehost:"/scratch/wow203/OpenFOAM/wow203-2.1.0/run/generation$genid/"
ssh wow203@$remotehost "cd /scratch/wow203/OpenFOAM/wow203-2.1.0/run/generation$genid/$solid && ~/Project/Resources/dev/run.sh $genid ${args[2]}"
| true
|
cb44e85dc3a123753cc178722bc929819025c525
|
Shell
|
Joel-a-dev/jenkins-ci-git-info
|
/generate-init-py.sh
|
UTF-8
| 427
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
# script to generate the api_server/___init___.py file
# fetch tags from repo
git fetch --tags
# set variables
GIT_COMMIT=$(git rev-parse HEAD)
GHE_VERSION=$(git describe --tags --abbrev=0)
BUILD_TIMESTAMP=$(date +'%Y-%m-%dT%H:%M:%SZ')
# Create ___init___.py file
echo "
import os
__version__ = '$GHE_VERSION'
commit_hash = '$GIT_COMMIT'
build_timestamp = '$BUILD_TIMESTAMP'
" > api_server/___init___.py
| true
|
3723c889736c85d0f1ff27968d133cac4ece61c4
|
Shell
|
jeanmanguy/prezto
|
/runcoms/zshrc
|
UTF-8
| 1,134
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
#
# Executes commands at the start of an interactive session.
#
# Authors:
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Source Prezto.
if [[ -s "${ZDOTDIR:-$HOME}/.zprezto/init.zsh" ]]; then
source "${ZDOTDIR:-$HOME}/.zprezto/init.zsh"
fi
zstyle ':prezto:load' pmodule \
'environment' \
'terminal' \
'rust' \
'cargo' \
'editor' \
'history' \
'directory' \
'spectrum' \
'utility' \
'completion' \
'prompt' \
'git' \
'history-substring-search' \
'syntax-highlighting' \
'ssh' \
'rsync'
# Customize to your needs...
# export PATH="/home/jean/miniconda3/bin:$PATH" # commented out by conda initialize
# >>> conda initialize >>>
# !! Contents within this block are managed by 'conda init' !!
__conda_setup="$('/home/jean/miniconda3/bin/conda' 'shell.zsh' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__conda_setup"
else
if [ -f "/home/jean/miniconda3/etc/profile.d/conda.sh" ]; then
. "/home/jean/miniconda3/etc/profile.d/conda.sh"
else
export PATH="/home/jean/miniconda3/bin:$PATH"
fi
fi
unset __conda_setup
# <<< conda initialize <<<
export PATH="${PATH}:${HOME}/.cargo/bin"
| true
|
d83abd7e312db02e48b8c5af39741f787b253185
|
Shell
|
dogonthehorizon/lec-notes
|
/User_Interface_Design/build
|
UTF-8
| 269
| 2.515625
| 3
|
[] |
no_license
|
#!/bin/sh
for z in chap*.tex;do
y=${z%.tex};
pdflatex -output-directory="../Compiled_Sources/User_Interface_Design/" -jobname=$y "\includeonly{$y}\input{user_interface_design}";
done
rm ../Compiled_Sources/User_Interface_Design/{*.aux,*.log,*.out,*.html}
exit 0
| true
|
d188523f31e63d9d53add3249ebd2141e10012ac
|
Shell
|
benf/dotfiles
|
/.config/openbox/scripts/xcompmgr.sh
|
UTF-8
| 3,083
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
# Openbox Pipe Menu for xcompmgr
# Written for CrunchBang Linux <http://crunchbang.org/projects/linux/>
# by Philip Newborough (aka corenominal) <mail@philipnewborough.co.uk>
# Set xcompmgr command options
#EXEC='xcompmgr -c -t-5 -l-5 -r4.2 -o.55' #basic
EXEC='xcompmgr -cCfF -t-5 -l-5 -r4.2 -o.55 -D6' #more effects
# Toggle compositing. Call with "myxcompmgr --startstop"
if [ "$1" = "--startstop" ]; then
if [ ! "$(pidof xcompmgr)" ]; then
$EXEC
else
killall xcompmgr
fi
exit 0
fi
# Output Openbox menu
if [ ! "$(pidof xcompmgr)" ]; then
cat << _EOF_
<openbox_pipe_menu>
<separator label="Compositing" />
<item label="Enable Compositing">
<action name="Execute">
<execute>~/.config/openbox/scripts/xcompmgr.sh --startstop</execute>
</action>
</item>
</openbox_pipe_menu>
_EOF_
else
cat << _EOF_
<openbox_pipe_menu>
<separator label="Compositing" />
<item label="Remove Transparency from Target Window">
<action name="Execute">
<execute>transset-df 1</execute>
</action>
</item>
<item label="Set Target Window Transparency to 10%">
<action name="Execute">
<execute>transset-df .90</execute>
</action>
</item>
<item label="Set Target Window Transparency to 20%">
<action name="Execute">
<execute>transset-df .80</execute>
</action>
</item>
<item label="Set Target Window Transparency to 30%">
<action name="Execute">
<execute>transset-df .70</execute>
</action>
</item>
<item label="Set Target Window Transparency to 40%">
<action name="Execute">
<execute>transset-df .60</execute>
</action>
</item>
<item label="Set Target Window Transparency to 50%">
<action name="Execute">
<execute>transset-df .50</execute>
</action>
</item>
<item label="Set Target Window Transparency to 60%">
<action name="Execute">
<execute>transset-df .40</execute>
</action>
</item>
<item label="Set Target Window Transparency to 70%">
<action name="Execute">
<execute>transset-df .30</execute>
</action>
</item>
<item label="Set Target Window Transparency to 80%">
<action name="Execute">
<execute>transset-df .20</execute>
</action>
</item>
<item label="Set Target Window Transparency to 90%">
<action name="Execute">
<execute>transset-df .10</execute>
</action>
</item>
<separator/>
<item label="Disable Compositing">
<action name="Execute">
<execute>~/.config/openbox/scripts/xcompmgr.sh --startstop</execute>
</action>
</item>
</openbox_pipe_menu>
_EOF_
fi
exit 0
| true
|
26c4108f43cc5d2bd855f7d9d695ba501ae82411
|
Shell
|
lostoli/dotfiles
|
/zshrc
|
UTF-8
| 5,936
| 3
| 3
|
[] |
no_license
|
# Start configuration added by Zim install {{{
#
# User configuration sourced by interactive shells
#
# -----------------
# Zsh configuration
# -----------------
#
# History
#
# Remove older command from the history if a duplicate is to be added.
setopt HIST_IGNORE_ALL_DUPS
#
# Input/output
#
# Set editor default keymap to emacs (`-e`) or vi (`-v`)
bindkey -v
# Prompt for spelling correction of commands.
#setopt CORRECT
# Customize spelling correction prompt.
#SPROMPT='zsh: correct %F{red}%R%f to %F{green}%r%f [nyae]? '
# Remove path separator from WORDCHARS.
WORDCHARS=${WORDCHARS//[\/]}
# -----------------
# Zim configuration
# -----------------
# Use degit instead of git as the default tool to install and update modules.
#zstyle ':zim:zmodule' use 'degit'
# --------------------
# Module configuration
# --------------------
#
# completion
#
# Set a custom path for the completion dump file.
# If none is provided, the default ${ZDOTDIR:-${HOME}}/.zcompdump is used.
#zstyle ':zim:completion' dumpfile "${ZDOTDIR:-${HOME}}/.zcompdump-${ZSH_VERSION}"
#
# git
#
# Set a custom prefix for the generated aliases. The default prefix is 'G'.
#zstyle ':zim:git' aliases-prefix 'g'
#
# input
#
# Append `../` to your input for each `.` you type after an initial `..`
#zstyle ':zim:input' double-dot-expand yes
#
# termtitle
#
# Set a custom terminal title format using prompt expansion escape sequences.
# See http://zsh.sourceforge.net/Doc/Release/Prompt-Expansion.html#Simple-Prompt-Escapes
# If none is provided, the default '%n@%m: %~' is used.
#zstyle ':zim:termtitle' format '%1~'
#
# zsh-autosuggestions
#
# Customize the style that the suggestions are shown with.
# See https://github.com/zsh-users/zsh-autosuggestions/blob/master/README.md#suggestion-highlight-style
ZSH_AUTOSUGGEST_HIGHLIGHT_STYLE='fg=242'
#
# zsh-syntax-highlighting
#
# Set what highlighters will be used.
# See https://github.com/zsh-users/zsh-syntax-highlighting/blob/master/docs/highlighters.md
ZSH_HIGHLIGHT_HIGHLIGHTERS=(main brackets)
# Customize the main highlighter styles.
# See https://github.com/zsh-users/zsh-syntax-highlighting/blob/master/docs/highlighters/main.md#how-to-tweak-it
#typeset -A ZSH_HIGHLIGHT_STYLES
#ZSH_HIGHLIGHT_STYLES[comment]='fg=242'
# ------------------
# Initialize modules
# ------------------
if [[ ! -e ${ZIM_HOME}/zimfw.zsh ]]; then
# Download zimfw script if missing.
command mkdir -p ${ZIM_HOME}
if (( ${+commands[curl]} )); then
command curl -fsSL -o ${ZIM_HOME}/zimfw.zsh https://github.com/zimfw/zimfw/releases/latest/download/zimfw.zsh
else
command wget -nv -O ${ZIM_HOME}/zimfw.zsh https://github.com/zimfw/zimfw/releases/latest/download/zimfw.zsh
fi
fi
if [[ ! ${ZIM_HOME}/init.zsh -nt ${ZDOTDIR:-${HOME}}/.zimrc ]]; then
# Install missing modules, and update ${ZIM_HOME}/init.zsh if missing or outdated.
source ${ZIM_HOME}/zimfw.zsh init -q
fi
source ${ZIM_HOME}/init.zsh
# ------------------------------
# Post-init module configuration
# ------------------------------
#
# zsh-history-substring-search
#
# Bind ^[[A/^[[B manually so up/down works both before and after zle-line-init
bindkey '^[[A' history-substring-search-up
bindkey '^[[B' history-substring-search-down
# Bind up and down keys
zmodload -F zsh/terminfo +p:terminfo
if [[ -n ${terminfo[kcuu1]} && -n ${terminfo[kcud1]} ]]; then
bindkey ${terminfo[kcuu1]} history-substring-search-up
bindkey ${terminfo[kcud1]} history-substring-search-down
fi
bindkey '^P' history-substring-search-up
bindkey '^N' history-substring-search-down
bindkey -M vicmd 'k' history-substring-search-up
bindkey -M vicmd 'j' history-substring-search-down
# }}} End configuration added by Zim install
#
# Prompt
#
export ON_COLOR="green"
export OFF_COLOR="blue"
# -- Variables
export EDITOR=nvim
export PATH=$HOME/bin:/usr/local/bin:$PATH
export BROWSER="firefox"
export HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND='bg=background,fg=white,bold'
export HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND='bg=background,fg=red,bold'
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
# Example aliases
alias ls='ls --group-directories-first --time-style=+"%d.%m.%Y %H:%M" --color=auto -F'
alias ll='ls -l --group-directories-first --time-style=+"%d.%m.%Y %H:%M" --color=auto -F'
alias la='ls -la --group-directories-first --time-style=+"%d.%m.%Y %H:%M" --color=auto -F'
alias grep='grep --color=tty -d skip'
alias cp="cp -i" # confirm before overwriting something
alias df='df -h' # human-readable sizes
alias free='free -m' # show sizes in MB
alias restart='killall -SIGUSR1'
alias update='yay -Syu && sudo paccache -r -k 1 && update.sh'
alias mp='jmtpfs ~/mnt'
alias up='fusermount -u ~/mnt'
alias op='offlineimap -o'
alias colors='sh ~/bin/color.sh'
alias vbrc='$EDITOR ~/.config/bspwm/bspwmrc'
alias vzrc='$EDITOR ~/.zshrc'
alias vprc='$EDITOR ~/.config/polybar/config'
alias vsrc='$EDITOR ~/.config/sxhkd/sxhkdrc'
alias e=$EDITOR
alias m='python -m pmail -m client'
alias p='bpython'
alias bp='bpython'
alias se='sudo $EDITOR'
alias :q='exit'
alias www='sudo systemctl restart wpa_supplicant.service && sudo systemctl restart connman.service && connmanctl'
alias pow='sudo tlp stat'
alias charge='sudo -A tlp fullcharge BAT1 && sudo tlp fullcharge BAT0'
alias blue='sudo -A systemctl restart bluetooth.service && bin/minirig.sh'
source /usr/share/fzf/key-bindings.zsh
source /usr/share/fzf/completion.zsh
function mkscript(){
if [[ ! -a ~/bin/$1.sh ]]; then
touch ~/bin/$1.sh && echo "#!/bin/sh" >! ~/bin/$1.sh && chmod +x ~/bin/$1.sh && $EDITOR ~/bin/$1.sh
else
echo "Script allready exists!"
fi
}
| true
|
037dd7b858ecf4a0dc2afb9ec8d33ae32140fec5
|
Shell
|
lshatok/boilerplate
|
/ansible/roles/wt_users/files/bash_profile
|
UTF-8
| 2,782
| 3.140625
| 3
|
[] |
no_license
|
## Default Profile for WebTelemetry users
## Export Defaults ##
WT_PATH=/WT
WEBTELEMETRY_PATH=${WT_PATH}/grafana
TELEMETRIX_PATH=${WT_PATH}/influxdb
DASHBOARD_PATH=${WT_PATH}/grafana
WT_PATH=${WT_PATH}/telemetrix
TELEMETRIX_PATH=${WT_PATH}/telemetrix
TELEMETRIX_HOME=${WT_PATH}/telemetrix
APP_ENV=production
JAVA_HOME="{{ java.home }}"
TELEMETRIX_HOME=${WT_PATH}
[ -f ~/.bashrc ] && ~/.bashrc
[ -d ${JAVA_HOME} ] && export JAVA_HOME
echo "=================================="
if
[ -d ${WT_PATH} ]; then
export WT_PATH && export WT_PATH && echo "WT_PATH = ${WT_PATH}"
fi
if [ -d ${WEBTELEMETRY_PATH} ]; then
export WEBTELEMETRY_PATH && echo "WEBTELEMETRY_PATH = ${WEBTELEMETRY_PATH}"
fi
if [ -d ${DASHBOARD_PATH} ]; then
export DASHBOARD_PATH && echo "DASHBOARD_PATH = ${DASHBOARD_PATH}"
fi
if [ -d ${WT_PATH} ]; then
export WT_PATH && echo "WT_PATH = ${WT_PATH}"
fi
if
[ -d ${TELEMETRIX_HOME} ]; then export TELEMETRIX_HOME && echo "TELEMETRIX_HOME = ${TELEMETRIX_HOME}"
fi
if [ -d ${TELEMETRIX_PATH} ]; then
export TELEMETRIX_PATH && echo "TELEMETRIX_PATH = ${TELEMETRIX_PATH}"
fi
if [ -d ${WEBTELEMETRY_PATH} ]; then
echo 'alias cddw="cd '${WEBTELEMETRY_PATH}'"'
fi
if [ -d ${TELEMETRIX_PATH} ]; then
echo 'alias cdinfluxdb="cd '${TELEMETRIX_PATH}'"'
fi
if [ -d ${DASHBOARD_PATH} ]; then
echo 'alias cddso="cd '${DASHBOARD_PATH}'"'
fi
if [ -d ${WT_PATH} ]; then
echo 'alias cdwt="cd '${WT_PATH}'"'
fi
if [ -d ${TELEMETRIX_PATH} ]; then
echo 'alias cdan="cd '${TELEMETRIX_PATH}'"'
fi
echo 'alias sudoa="sudo su -l wtuser"'
echo "__________________________________"
echo `uptime`
echo "=================================="
## Aliases ##
alias less="less -nR"
alias cddw="cd ${WEBTELEMETRY_PATH}"
alias cdinfluxdb="cd ${TELEMETRIX_PATH}"
alias cddso="cd ${DASHBOARD_PATH}"
alias cdwt="cd ${WT_PATH}"
alias cdan="cd ${TELEMETRIX_PATH}"
alias sudoa="sudo su -l wtuser"
alias alogs="cd /WT/appserver/logs"
alias jlogs="cd /WT/webapps/snmp/WEB-INF/log"
alias tw="cd /WT/appserver/webapps/snmp/WEB-INF"
### tput colors ###
# - tput doesn't cause wrapping issues - #
_Black=$(tput setaf 0)
_Red=$(tput setaf 1)
_Green=$(tput setaf 2)
_Yellow=$(tput setaf 3)
_Blue=$(tput setaf 4)
_Magenta=$(tput setaf 5)
_Cyan=$(tput setaf 6)
_Reverse=$(tput rev)
_Underline=$(tput smul)
_Underline_Off=$(tput rmul)
_Bold=$(tput bold)
_Off=$(tput sgr0)
## if git is installed, enable git-prompt.sh ##
if [ "$PS1" ] && [ -f /usr/bin/git ] && [ -f /etc/bash_completion.d/git-prompt ]; then
#PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\[${_Blue}\]$(__git_ps1 "(%s)")\[${_Off}\]\$ '
PS1='${debian_chroot:+($debian_chroot)}(\A)\[${_Bold}${_Green}\]\u\[${_Off}\]@\[${_Underline}\]\h\[${_Off}\]:\[${_Blue}\]\w\[${_Off}\]$(__git_ps1 "(%s)")\$ '
export PS1
fi
| true
|
d6b6b4c460bd50256cd8fdd019a6a5e4f5e05dee
|
Shell
|
pct960/iudx-auth-server
|
/setup.postgresql.openbsd
|
UTF-8
| 504
| 2.53125
| 3
|
[
"MPL-2.0",
"ISC"
] |
permissive
|
#!/bin/sh
cd /var/postgresql
initdb -D /var/postgresql/data/ -U postgres --auth=md5 --encoding=UTF-8 --pwfile=admin.db.password
pg_ctl -D /var/postgresql/data/ -l logfile start
export PGPASSWORD=`cat admin.db.password`
sed -i "s/America/Asia" /var/postgresql/data/postgresql.conf
sed -i "s/Los_Angeles/Kolkata" /var/postgresql/data/postgresql.conf
sed -i "s/max_connections = 30/max_connections = 100" /var/postgresql/data/postgresql.conf
psql -U postgres < schema.sql >/dev/null
rm -rf schema.sql
| true
|
048667f4d9370f4ae226208942aad3f1aeb0f6e2
|
Shell
|
cmspixelpilot/POSOverlay
|
/jmt/scurve.sh
|
UTF-8
| 357
| 3.046875
| 3
|
[] |
no_license
|
RUN=$1
if [ "$RUN" == "" ]; then
RUN=$(echo $(pwd) | sed 's/.*_//')
if ! [[ $RUN =~ $re ]] ; then
echo "error: need run number" >&2
exit 1
fi
fi
LOGFN=${POS_LOG_DIR}/scurve.run${RUN}.log.gz
echo q | ${BUILD_HOME}/pixel/PixelAnalysisTools/test/bin/linux/x86_64_slc6/PixelAnalysis.exe SCurve $RUN 2>&1 | gzip > $LOGFN
echo $LOGFN
| true
|
bf7fa745c6b78ba33849f940f7cb1a1f5d8f897c
|
Shell
|
TurtleEngr/story-xml
|
/src/bin/pub-fop-ps.sh
|
UTF-8
| 1,108
| 3.390625
| 3
|
[] |
no_license
|
#!/bin/sh
# $Header: /repo/local.cvs/app/story-xml/src/publishing/pub-fop-ps.sh,v 1.8 2008/06/23 21:42:05 bruce Exp $
if [ $# -lt 3 -a -z "$SERNA_XSL_STYLESHEET" ]; then
cat <<EOF
Usage:
$0 IN.xml STYLE.xsl OUT
or
Env. Var:
\$SERNA_XML_SRCFULLPATH
\$SERNA_XSL_STYLESHEET
\$SERNA_OUTPUT_FILE
$0
EOF
exit 1
fi
if [ $# -eq 3 ]; then
pIn=$1
echo "Input=$pIn"
pStyle=$2
echo "Style=$pStyle"
pOut=$3
echo "Output=$pOut"
else
pIn=$SERNA_XML_SRCFULLPATH
echo "Input=$pIn"
pStyle=$SERNA_XSL_STYLESHEET
echo "Style=$pStyle"
pOut=$SERNA_OUTPUT_FILE
echo "Output=$pOut"
fi
touch $pOut
if [ ! -r $pIn ]; then
echo "Error: Can not read $pIn"
exit 1
fi
if [ ! -r $pStyle ]; then
echo "Error: Can not read $pStyle"
exit 1
fi
if [ ! -w $pOout ]; then
echo "Error: Can not write to $pOut"
exit 1
fi
tBase=${pOut%.*}
tBase=/tmp/${tBase##*/}
tBin=${0%/*}
# ---------------------------------
echo "$tBin/pub-fop-pdf.sh $pIn $pStyle $tBase.pdf"
$tBin/pub-fop-pdf.sh $pIn $pStyle $tBase.pdf
echo "pdf2ps -dLanguageLevel=1 -r600 $tBase.pdf $pOut"
pdf2ps -dLanguageLevel=1 -r600 $tBase.pdf $pOut
| true
|
9bec347a92a46943bfecfbd6f98f234e22750128
|
Shell
|
chao-hu/nginxconfig-manager
|
/deploy/.svn/pristine/9b/9bec347a92a46943bfecfbd6f98f234e22750128.svn-base
|
UTF-8
| 1,055
| 2.75
| 3
|
[] |
no_license
|
#!/usr/bin/expect
cd /tmp/loadbalance-manager
set WEB_HOST [lindex $argv 0]
set USER [lindex $argv 1]
set PASSWORD [lindex $argv 2]
set DATE [lindex $argv 3]
set VERSION [lindex $argv 4]
set timeout -1
#scp
spawn scp deploy/target/nginx-api-${VERSION}.zip $USER@${WEB_HOST}:/dianyi/
expect "password:" {
send "${PASSWORD}\r"
}
expect "*]*"
#login
spawn ssh $USER@$WEB_HOST
expect "(yes/no)?" {
send "yes\r"
expect "password:" {
send "$PASSWORD\r"
}
} "password:" {
send "$PASSWORD\r"
}
expect "*]*"
#stop
send "jetty.sh stop\r"
expect "*]*"
#backup,unzip
send "mkdir -p /dianyi/backup_lbs\r"
send "rm /dianyi/backup_lbs/lbs* -rf\r"
send "mv /dianyi/webapps/lbs /dianyi/backup_lbs/lbs.${DATE}\r"
expect "*]*"
send "unzip -q /dianyi/nginx-api-${VERSION}.zip -d /tmp/lbs\r"
expect "*]*"
send "mv /tmp/lbs/ROOT /dianyi/webapps/lbs\r"
expect "*]*"
#start
send "jetty.sh restart\r"
expect "*]*"
send "exit\r"
expect eof
exit
| true
|
9cd3033f4043d1d3f683b397c8f07936080048c5
|
Shell
|
sample/docker-java-builder
|
/build.sh
|
UTF-8
| 3,799
| 3.359375
| 3
|
[] |
no_license
|
#!/bin/bash
mkdir -p /build
mkdir -p /target
tar xvf /java/$1 -C /build
JAVA_DIRECTORY=`ls -1 /build`
# Transform jdk1.8.0_60 to 1.8.0.60
JAVA_VERSION=$(echo $JAVA_DIRECTORY | sed -nr 's/([a-zA-Z]+)([0-9])\.([0-9]+)\.([0-9]+)\_([0-9]+).*/\2.\3.\4.\5/p')
JAVA_VARIANT=$(echo $JAVA_DIRECTORY | sed -nr 's/([a-zA-Z]+)[0-9].*/\1/p')
JAVA_MAJOR_VERSION=$(echo $JAVA_DIRECTORY | sed -nr 's/[a-zA-Z]+[0-9]\.([0-9]+)\..*/\1/p')
SDK_VERSION=$JAVA_DIRECTORY
cd /target
cat <<EOF > alternatives.sh
#!/bin/bash
LATEST=1
LATEST=$((`LANG=C update-alternatives --display java | grep ^/ | sed -e 's/.* //g' | sort -n | tail -1`+1))
EOF
cat <<EOF > uninstall.sh
#!/bin/bash
rm -rf /opt/$SDK_VERSION
EOF
for f in /build/$JAVA_DIRECTORY/bin/*; do
name=`basename $f`;
if [ ! -f "/usr/bin/$name" -o -L "/usr/bin/$name" ]; then
# Some files, like jvisualvm might not be links
if [ -f "/build/$JAVA_DIRECTORY/man/man1/$name.1" ]; then
echo $name
echo update-alternatives --install /usr/bin/$name $name /opt/$JAVA_DIRECTORY/bin/$name \$LATEST \
--slave /usr/share/man/man1/$name.1 $name.1 /opt/$JAVA_DIRECTORY/man/man1/$name.1 >> alternatives.sh
#echo "jdk $name /opt/$JAVA_DIRECTORY/bin/$name" >> /usr/lib/jvm/.java-8-oracle.jinfo
fi
fi
done
for f in /build/$JAVA_DIRECTORY/man/man1/*; do
name=`basename $f .1`;
# Some files, like jvisualvm might not be links.
# Further assume this for corresponding man page
if [ ! -f "/usr/bin/$name" -o -L "/usr/bin/$name" ]; then
echo $name
echo update-alternatives --remove $name /opt/$JAVA_DIRECTORY/bin/$name >> uninstall.sh
fi
done
JRE_PROVIDES='java-virtual-machine java-compiler default-jre default-jre-headless
java-runtime java2-runtime java5-runtime java6-runtime java8-runtime
java-runtime-headless java2-runtime-headless java5-runtime-headless java6-runtime-headless java8-runtime-headless
openjdk-6-jre openjdk-6-jre-headless
openjdk-7-jre openjdk-7-jre-headless
openjdk-8-jre openjdk-8-jre-headless
sun-java6-bin sun-java6-jre sun-java6-fonts sun-java6-plugin
oracle-java7-bin oracle-java7-jre oracle-java7-fonts oracle-java7-plugin
oracle-java8-bin oracle-java8-jre oracle-java8-fonts oracle-java8-plugin'
JDK_PROVIDES='java-virtual-machine java-compiler default-jre default-jdk default-jdk-headless
java-runtime java2-runtime java5-runtime java6-runtime java8-runtime
java-runtime-headless java2-runtime-headless java5-runtime-headless java6-runtime-headless java8-runtime-headless
java-jdk java2-jdk java5-jdk java6-jdk java8-jdk
java-sdk java2-sdk java5-sdk java6-sdk java8-sdk
openjdk-6-jre openjdk-6-jre-headless openjdk-6-jdk openjdk-6-jdk-headless openjdk-6-jdk
openjdk-7-jre openjdk-7-jre-headless openjdk-6-jdk openjdk-6-jdk-headless openjdk-6-jdk
openjdk-8-jre openjdk-8-jre-headless openjdk-8-jdk openjdk-8-jdk-headless openjdk-8-jdk
sun-java6-bin sun-java6-jdk sun-java6-jdk sun-java6-fonts sun-java6-plugin
oracle-java8-bin oracle-java8-fonts oracle-java8-plugin'
PROVIDES=""
if [ $JAVA_VARIANT == 'jdk' ]; then
for i in `echo $JDK_PROVIDES`; do PROVIDES+="--provides $i "; done
PKG_NAME=oracle-java8-jdk
else
for i in `echo $JRE_PROVIDES`; do PROVIDES+="--provides $i "; done
PKG_NAME=oracle-java8
fi
fpm -f --verbose -s dir -t deb --after-install ./alternatives.sh --after-remove ./uninstall.sh --name "$PKG_NAME" $PROVIDES -v $JAVA_VERSION --prefix=/opt/ -C /build/ $SDK_VERSION
rm -f alternatives.sh uninstall.sh
| true
|
e89ab804eeede4fed465d3281cb8deabcf64aa36
|
Shell
|
jahhulbert-ccri/cloud-local
|
/conf/cloud-local.conf
|
UTF-8
| 1,365
| 3.40625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
#
# Source for packages (accumulo, hadoop, etc)
# Available options are (local, wget)
#
# Set the variable 'pkg_src_mirror' if you want to specify a mirror
# else it will use https://www.apache.org/dyn/closer.cgi to determine
# the fastest mirror
#
# pkg_src_mirror="http://apache.mirrors.tds.net"
# Specify a maven repository to use
pkg_src_maven="https://repo1.maven.org/maven2"
#
# Versions of popular software
#
pkg_accumulo_ver="1.7.2"
pkg_hbase_ver="1.1.6"
pkg_hadoop_ver="2.6.4"
pkg_zookeeper_ver="3.4.6"
# Note convention is scala.version_kafka.version
pkg_kafka_scala_ver="2.11"
pkg_kafka_ver="0.9.0.0"
pkg_spark_ver="1.6.3"
# The following options can be overriden in the user environment
# bind address and hostname to use for all service bindings
if [[ -z "${CL_HOSTNAME}" ]]; then
CL_HOSTNAME=localhost
#CL_HOSTNAME=$(hostname)
fi
if [[ -z "${CL_BIND_ADDRESS}" ]]; then
CL_BIND_ADDRESS="127.0.0.1"
fi
if [[ -z "${CL_PORT_OFFSET}" ]]; then
CL_PORT_OFFSET=0
fi
if [[ -z "${CL_VERBOSE}" ]]; then
CL_VERBOSE=0
fi
# Enable accumulo or hbase - probably best not to run both but it might work
# 1 = enabled
# 0 = disabled
acc_enable=1
hbase_enable=0
# accumulo config
cl_acc_inst_name="local"
cl_acc_inst_pass="secret"
# Note, just the major+minor from Hadoop, not patch level
hadoop_base_ver=${pkg_hadoop_ver:0:3}
| true
|
5465be1db108515b676b5005d9fbe5131077178a
|
Shell
|
Liwan31/package
|
/cp_31.sh
|
UTF-8
| 4,205
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/bash
#只需要在终端中输入 $ sh cp_31.sh + 配置文件地址. 即可打包成ipa
#配置文件目录结构
# |-图标 |-xxx
# |-...
# |-ExportOptions.plist
# |-archive
#工程名
project_name='LotteryShop'
#工程地址
project_path='/Users/gwh/mine/svn/ltshop_trunk/root/LotteryShopWithoutLL'
#31个图标启动图等存放位置 里面是app名字的文件夹 文件夹里放图标 启动图 和额外替换的图
#注意工程中的icon 使用Images.xcassets/AppIcon.appiconset
#launch使用Images.xcassets/LaunchImage.launchimage
resource_path='/Users/gwh/mine/打包/打包素材/图标'
#ipa生成路径
#在此新建一个空文件夹archive
output_path='/Users/gwh/mine/打包/打包素材'
#xcarchive临时存放路径
archive_path="$output_path/archive"
#打1个试一下all=0 还是全部all=1
all=0
#app的名字 CFBundleDisplayName CFBundleName
appNames=(白鹭巷体彩店 必胜体彩店)
#app bid前缀
bidPrefix="czlm.LotteryShop."
#app bid后缀
appIds=(bailuxiang bishen)
#ExportOptions 用xcode打包一次可生成
#要替换的icons素材
icons=(AppIcon40x40@2x.png AppIcon40x40@3x.png AppIcon60x60@2x.png AppIcon60x60@3x.png)
launchs=(LaunchImage-700-568h@2x.png LaunchImage-700@2x.png LaunchImage-800-667h@2x.png LaunchImage-800-Portrait-736h@3x.png)
#素材文件夹中额外要替换的资源名
#要替换的工程中的资源路径
changeNames=(AppIcon60x60@3x.png dongfanghong_login_back.png)
changeNamesInProj=(pic/熊猫体育/app_icon.png pic/熊猫体育/app_login_back.png)
#下面不用配置了
appName='xx'
appId='xx'
packaging(){
#***********配置项目
MWConfiguration=Debug
#日期
MWDate=`date +%Y%m%d_%H%M`
#pod 相关配置
#更新pod配置
# pod install
#构建
xcodebuild archive \
-workspace "$project_path/$project_name.xcworkspace" \
-scheme "$project_name" \
-configuration "$MWConfiguration" \
-archivePath "$archive_path/$project_name" \
clean \
build \
-derivedDataPath "$MWBuildTempDir"
#生成ipa
xcodebuild -exportArchive -exportOptionsPlist "$output_path/ExportOptions.plist" -archivePath "$archive_path/$project_name.xcarchive" -exportPath $output_path/$appId
#########这里不需要也可以去掉#########
#移动重命名
mv /$output_path/$appId/LotteryShop.ipa /$output_path/$appId.ipa
#删除
rm -r $output_path/$appId/
#########这里不需要也可以去掉#########
}
#---------------------------------------------------------------------------------------------------------------------------------
prepare(){
plist_path="${project_path}/${project_name}/Info.plist"
echo $plist_path
#替换displayName以及bundleId
sed -i '' "/CFBundleDisplayName/{n;s/<string>.*<\/string>/<string>$appName<\/string>/;}" $plist_path
sed -i '' "/CFBundleName/{n;s/<string>.*<\/string>/<string>$appName<\/string>/;}" $plist_path
sed -i '' "/CFBundleIdentifier/{n;s/<string>.*<\/string>/<string>czlm.LotteryShop.$appId<\/string>/;}" $plist_path
m=0
while [[ m -lt ${#icons[@]} ]]; do
icon=${icons[m]}
launch=${launchs[m]}
echo "${icon}"
echo "${launch}"
let m++
#替换图标、启动图
cp "${resource_path}/${appName}/${icon}" "${project_path}/${project_name}/Images.xcassets/AppIcon.appiconset/${icon}"
cp "${resource_path}/${appName}/${launch}" "${project_path}/${project_name}/Images.xcassets/LaunchImage.launchimage/${launch}"
done
n=0
while [[ n -lt ${#changeNames[@]} ]]; do
changeName=${changeNames[n]}
changeNameInProj=${changeNamesInProj[n]}
let n++
#替换app内用到的图标 和 首页那个图
cp "${resource_path}/${appName}/${changeName}" "${project_path}/${project_name}/${changeNameInProj}"
done
}
group(){
if [[ $all -eq 0 ]]; then
echo "all=$all"
appNames=${appNames[0]}
appIds=${appIds[0]}
fi
appNames_new=appNames
appIds_new=appIds
i=0
while [[ i -lt ${#appIds_new[@]} ]]; do
appName=${appNames_new[i]}
appId=${appIds_new[i]}
let i++
echo $appName
#替换资源
prepare
#打包
packaging
done
open $output_path
}
#---------------------------------------------------------------------------------------------------------------------------------
#打包
group
| true
|
4a428b9354d4a663a727efaa02ebb2e5697635f7
|
Shell
|
cntrump/PDUninstaller
|
/PDUninstaller.sh
|
UTF-8
| 6,102
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/bash
#
# Script to clean up Parallels Desktop and Parallels Desktop Switch to Mac
# installations starting from version 3.
#
# Use it at your own risk.
#
# Copyright (c) 2004-2014 Parallels IP Holdings GmbH.
# All rights reserved.
# http://www.parallels.com
domain='com.parallels'
apps_path='/Applications/Parallels Desktop.app'
lib_path='/Library/Parallels'
svc_path="${lib_path}/Parallels Service.app"
agents_path='/Library/LaunchAgents'
daemons_path='/Library/LaunchDaemons'
incompat_path='/Incompatible Software'
pd5_uninst="${svc_path}/Contents/Resources/Uninstaller.sh"
pd7_uninst="${lib_path}/Uninstaller/Parallels Hypervisor/Uninstaller.sh"
pd9_uninst="${apps_path}/Contents/MacOS/Uninstaller"
pd10_uninst="${apps_path}/Contents/MacOS/Uninstaller"
mode=kind
[ "x${1}" = 'x-p' -o "x${1}" = 'x--purge' ] && mode='purge'
if [ `id -u` -ne 0 ]; then
echo 'Root privileges required.'
exit 1
fi
uninst() {
local uninst_script="$2"
[ -x "${uninst_script}" ] || return
echo " * Found $1 uninstaller"
"${uninst_script}" $3
}
uninst PD5 "${pd5_uninst}" desktop
uninst PD7 "${pd7_uninst}" virtualization
uninst PD9 "${pd9_uninst}" remove
uninst PD10 "${pd10_uninst}" remove
users=$(
for u in `ls /Users`; do
[ -d "/Users/$u" ] && id "$u" >/dev/null 2>&1 && echo "$u"
done)
daemons="
${daemons_path}/${domain}.desktop.launchdaemon.plist
${daemons_path}/pvsnatd.plist
"
agents="
${agents_path}/${domain}.desktop.launch.plist
${agents_path}/${domain}.DesktopControlAgent.plist
${agents_path}/${domain}.vm.prl_pcproxy.plist
"
IFS=$'\n'
for plist in $daemons; do
launchctl unload "${plist}"
rm -f "${plist}"
done
for plist in $agents; do
for u in $users; do
sudo -u "$u" launchctl unload "${plist}"
done
rm -f "${plist}"
done
unset IFS
rm -rf '/Library/StartupItems/ParallelsDesktopTransporter'
rm -rf '/Library/StartupItems/ParallelsTransporter'
rm -rf '/Library/StartupItems/Parallels'
bins2kill='
prl_vm_app
prl_client_app
prl_disp_service
Parallels Transporter
Parallels Image Tool
llipd
Parallels Explorer
Parallels Mounter
Parallels
'
IFS=$'\n'
for bin in ${bins2kill}; do
killall -KILL "${bin}"
done
unset IFS
kill -KILL `ps -A -opid,command | \
fgrep "/bin/bash ${apps_path}/Contents/MacOS/watchdog" | awk '{print $1}'`
ps -A -opid,comm | \
grep -E "(${apps_path}/Contents/MacOS|${svc_path}/Contents)" | \
awk '{print $1}' | \
while read pid; do kill -KILL $pid; done
kextunload -b "${domain}.kext.netbridge"
kextunload -b "${domain}.kext.prl_netbridge"
kextstat | fgrep "${domain}." |
fgrep -v "${domain}.virtualsound" | fgrep -v "${domain}.prl_video" | \
awk '{print $6}' | while read i; do kextunload -b $i; done
pd3kexts='
ConnectUSB
Pvsnet
hypervisor
vmmain
'
IFS=$'\n'
for k in ${pd3kexts}; do
kpath="/System/Library/Extensions/${k}.kext"
defaults read "${kpath}/Contents/Info" CFBundleIdentifier | \
fgrep -q "${domain}" && rm -rf "${kpath}"
rm -rf "${incompat_path}/${k}.kext"
done
unset IFS
rm -rf /System/Library/Extensions/prl*
rm -rf "${apps_path}"
rm -rf "/Applications/Parallels"
rm -f "/Applications/._Parallels"
rm -rf "${svc_path}"
rm -rf "${lib_path}/Parallels Mounter.app"
rm -rf "${lib_path}/Parallels Transporter.app"
rm -rf "${lib_path}/Receipts"
rm -rf "${lib_path}/Tools"
rm -rf "${lib_path}/Uninstaller"
rm -rf "${lib_path}/Bioses"
rm -rf "${lib_path}/Help"
rm -rf "${lib_path}/libmspack_prl.dylib"
rm -rf "${lib_path}/.bc_backup"
rm -f "${lib_path}/.dhcp"*
rm -f /var/db/receipts/${domain}.pkg.virtualization.*
rm -f /var/db/receipts/${domain}.pkg.desktop.*
rm -f /var/db/receipts/${domain}.prlufs.core.{bom,plist}
rm -rf '/Library/Receipts/Parallels '*.pkg
rm -rf '/Library/Receipts/Install Parallels Desktop.pkg'
rm -rf "${incompat_path}/Parallels "*.app
rm -rf "${incompat_path}"/${domain}.*.plist
IFS=$'\n'
for u in ${users}; do
home="/Users/${u}"
home_lib="${home}/Library"
rm -rf "${home}/.Trash/Parallels Desktop.app"
rm -rf "${home}/.Trash/Parallels Service.app"
rm -rf "${home}/Desktop/Parallels Desktop.app"
rm -rf "${home}/Applications (Parallels)"
caches="${home_lib}/Caches"
rm -rf "${caches}/Parallels"
rm -rf "${caches}/${domain}.desktop.console"
rm -rf "${caches}/${domain}.winapp."*
rm -rf "${home_lib}/Parallels/Application Menus"
rm -f "${home_lib}/Parallels/"*.pid
home_saved="${home_lib}/Saved Application State"
rm -rf "${home_saved}/${domain}.desktop.console.savedState"
rm -rf "${home_saved}/${domain}.desktop.transporter.savedState"
rm -rf "${home_saved}/${domain}.smartinstall.savedState"
if [ "${mode}" = 'purge' ]; then
rm -rf "${home_lib}/Parallels"
rm -rf "${home_lib}/Logs/Parallels"
rm -rf "${home_lib}/Logs/parallels.log"*
rm -rf "${home_lib}/Preferences/Parallels"
rm -rf "${home_lib}/Preferences/${domain}."*
rm -rf "${home}/Documents/.parallels-vm-directory"
fi
done
cmd_tools='
prl_convert
prl_disk_tool
prl_perf_ctl
prlctl
prlsrvctl
'
for cmd in ${cmd_tools}; do
rm -f "/usr/bin/${cmd}"
rm -f "/usr/share/man/man8/${cmd}.8"
done
unset IFS
find /System/Library/Frameworks/Python.framework /Library/Python \
-name prlsdkapi -exec rm -rf "{}" \;
rm -rf '/System/Library/Filesystems/prlufs.fs'
rm -rf '/Library/Filesystems/prlufs.fs'
rm -rf '/usr/lib/parallels'
rm -f '/usr/local/lib/libprl_sdk.'*
rm -rf '/usr/share/parallels-server'
rm -rf '/usr/include/parallels-server'
rm -rf '/Library/Spotlight/ParallelsMD.mdimporter'
rm -rf '/Library/QuickLook/ParallelsQL.qlgenerator'
rm -rf '/Library/Contextual Menu Items/ParallelsCM.plugin'
rm -rf /var/run/lic_events
rm -f /var/run/prl_*.pid
rm -rf '/tmp/.pd'
rm -f /etc/pam.d/prl_disp_service*
rm -f /tmp/.pd-video-path
if [ "${mode}" = 'purge' ]; then
rm -rf "${lib_path}"
rm -rf '/Library/Preferences/Parallels'
rm -f '/Library/Logs/parallels.log'*
rm -f '/Library/Logs/parallels_mounter.log'
rm -f '/Library/Logs/parallels_migration.log'
rm -f '/var/log/prl_disp_service_server.log'
rm -f "/var/root/Library/Preferences/${domain}.Parallels Desktop.plist"
rm -f "/var/root/Library/Preferences/${domain}.desktop"*.plist
rm -f "/var/root/Library/Preferences/${domain}.desktop"*.plist.lockfile
fi
| true
|
05a6e7882101d263224faac4e1505ac51a182ede
|
Shell
|
nuxlli/azk_bash
|
/test/cli/provision.bats
|
UTF-8
| 7,184
| 3.53125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bats
load ../test_helper
setup() {
mkdir -p "$AZK_TEST_DIR"
cd "$AZK_TEST_DIR"
mkdir -p "$HOME"
git config --global user.name "Tester"
git config --global user.email "tester@test.local"
}
git_commit() {
command git commit --quiet --allow-empty -m "empty"
}; export -f git_commit
mock_git_clone() {
export AZK_DATA_PATH="${AZK_TEST_DIR}/data"
export test_clone_url="https://github.com/azukiapp/test-box"
export test_clone_path="${AZK_DATA_PATH}/boxes/azukiapp/test-box"
export test_fixture_path="$(fixtures test-box)"
git() {
clone_path="${AZK_DATA_PATH}/boxes/azukiapp/test-box"
mkdir -p "$(dirname "$clone_path")"
if [[ "$@" == "clone $test_clone_url $clone_path" ]]; then
cp -rf $test_fixture_path $clone_path
cd $clone_path
git init 1>/dev/null;
echo "Cloning into '$clone_path'..."
git add .
git commit --quiet -m "First version"
git tag v0.0.1
return 0;
fi
if [[ "$@" == "--git-dir=${clone_path}/.git remote update" ]]; then
cd $clone_path
echo "v0.0.2" > version
git add .
git commit --quiet -m "Second version"
git tag v0.0.2
echo "Fetching origin"
return 0;
fi
command git "$@";
}; export -f git
}
@test "$test_label required parameters" {
run azk-provision
assert_failure
assert_match '^Usage:.*provision' "${lines[0]}"
}
@test "$test_label requires azkfile" {
run azk-provision /bin/bash
assert_failure "$(azk azkfile 2>&1)"
}
@test "$test_label requires exec in agent" {
exec() {
[[ "$@" == "azk-agent-exec provision box" ]] && echo "$@" && return 1;
command exec $@;
}
export -f exec
cp_fixture full_azkfile "${AZK_TEST_DIR}/project/azkfile.json"
cd "project"
run azk-provision box
assert_failure
assert_output "azk-agent-exec provision box"
}
@test "$test_label unsupported image type" {
cp_fixture full_azkfile "${AZK_TEST_DIR}/project/azkfile.json"
cd "project"
run azk-provision --get-name __unsupported__
assert_failure "azk: '__unsupported__' unsupported image type"
}
@test "$test_label only return a image name for box type" {
cp_fixture full_azkfile "${AZK_TEST_DIR}/project/azkfile.json"
cd "project"
run azk-provision --get-name box
assert_success "azk/boxes/azukiapp_test-box:v0.0.1"
}
# TODO: Reducing coupling test
@test "$test_label return ok if the image for this box is already provisioned" {
export box_name='azk/boxes/azukiapp_test-box:v0.0.1'
cp_fixture full_azkfile "${AZK_TEST_DIR}/project/azkfile.json"
cd "project"
azk-dcli() {
if [[ "$@" == "--final /images/$box_name/json" ]]; then
echo '{ "id": "4e220cf3e4156b0b1fd9" }'
return 0;
fi
return 1;
}; export -f azk-dcli
run azk-provision --final box
assert_success
assert_equal "azk: [image-box] searching: '$box_name'" "${lines[0]}"
assert_equal "azk: [image-box] already provisioned: '$box_name'" "${lines[1]}"
}
@test "$test_label call git to clone repository of box" {
export box_name='azk/boxes/azukiapp_test-box:v0.0.1'
cp_fixture full_azkfile "${AZK_TEST_DIR}/project/azkfile.json"
cd "project"
export test_clone_url="https://github.com/azukiapp/test-box"
azk-dcli() {
echo '{}'; return 0;
}; export -f azk-dcli
git() {
clone_path="`azk root`/data/boxes/azukiapp/test-box"
[[ "$@" == "clone $test_clone_url $clone_path" ]] && echo "git-clone" && return 1;
[[ "$@" == "--git-dir=$clone_path/.git rev-parse" ]] && echo "git-rev-parse" && return 1;
return 0;
}; export -f git
run azk-provision --final box
assert_failure
assert_equal "azk: [image-box] not found: '$box_name'" "${lines[1]}"
assert_equal "azk: [image-box] get box '${test_clone_url}#v0.0.1'..." "${lines[2]}"
assert_equal "azk: [image-box] could not get or update the box $test_clone_url repository" "${lines[3]}"
}
@test "$test_label checkout to version" {
cp_fixture full_azkfile "${AZK_TEST_DIR}/project/azkfile.json"
cd "project"
azk-dcli() {
echo '{}'; return 0;
}; export -f azk-dcli
azk-image-generate() {
return 0;
}; export -f azk-image-generate
mock_git_clone
run azk-provision --final box 2>&1
assert_success
assert_equal "azk: [image-box] get box '$test_clone_url#v0.0.1'..." "${lines[2]}"
assert_equal "azk: [image-box] check for version 'v0.0.1'..." "${lines[3]}"
run git --git-dir="${test_clone_path}/.git" branch
assert_success
values=( '* (detached from v0.0.1)' '* (no branch)' )
assert_include values "${lines[0]}"
}
@test "$test_label if exist clone, only checkout version" {
azk_file="${AZK_TEST_DIR}/project/azkfile.json"
cp_fixture full_azkfile $azk_file
cd "project"
azk-dcli() {
echo '{}'; return 0;
}; export -f azk-dcli
azk-image-generate() {
return 0;
}; export -f azk-image-generate
mock_git_clone
run azk-provision --final box
assert_success
cat $(fixtures full_azkfile.json) | sed 's:test-box#v0.0.1:test-box#v0.0.2:g' > $azk_file
run azk-provision --final box
assert_success
assert_equal "azk: [image-box] check for box updates in '${test_clone_url}#v0.0.2'..." "${lines[2]}"
assert_equal "azk: [image-box] check for version 'v0.0.2'..." "${lines[3]}"
}
@test "$test_label at the end generate image-box" {
azk_file="${AZK_TEST_DIR}/project/azkfile.json"
cp_fixture full_azkfile $azk_file
cd "project"
azk-dcli() {
echo '{}'; return 0;
}; export -f azk-dcli
azk-image-generate() {
echo "$@"
}; export -f azk-image-generate
mock_git_clone
run azk-provision --final box
assert_success
assert_equal "box $test_clone_path azk/boxes/azukiapp_test-box:v0.0.1" "${lines[4]}"
}
mock_project() {
local azkfile="${AZK_TEST_DIR}/project/azkfile.json"
cp_fixture full_azkfile $azkfile
local id=$(cat $azkfile | jq -r -c ".id")
echo "azk/apps/$id"
}
@test "$test_label only return a image name for app type" {
local image=$(mock_project)
cd "project"
run azk-provision --get-name app
assert_success "$image"
}
@test "$test_label search image-app in docker" {
export image_tag=$(mock_project)
cd "project"
azk-dcli() {
if [[ "$@" == "--final /images/$image_tag/json" ]]; then
echo '{ "id": "image-docker-id" }'
exit 0
fi
}; export -f azk-dcli
run azk-provision --final app
assert_success
assert_equal "azk: [image-app] searching: '$image_tag'" "${lines[0]}"
assert_equal "azk: [image-app] already provisioned: '$image_tag'" "${lines[1]}"
}
@test "$test_label at the end generate image-app" {
export image_tag=$(mock_project)
cd "project"
export image_box=$(azk-provision --get-name box)
azk-dcli() {
if [[ "$@" == "--final /images/${image_box}/json" ]]; then
echo '{ "id": "image-box-id" }'
else
echo '{}'
fi
return 0
}; export -f azk-dcli
azk-image-generate() {
echo "$@"
}; export -f azk-image-generate
mock_git_clone
run azk-provision --final app
assert_success
assert_equal "azk: [image-app] not found: '$image_tag'" "${lines[1]}"
assert_equal "azk: [image-box] searching: '${image_box}'" "${lines[2]}"
assert_equal "app `pwd` $image_tag" "${lines[4]}"
}
| true
|
d75a6e7bdbea2df079c9f3bd7dcf13be1988ff66
|
Shell
|
kpk-pl/linux-workspace
|
/shell/path/retry
|
UTF-8
| 216
| 3.515625
| 4
|
[] |
no_license
|
#!/bin/bash
COUNT=$1
COMMAND=${*:2}
echo "retry command: $COMMAND"
for i in $(seq 1 $COUNT);
do
echo "Attempt $i"
eval $COMMAND
if [ $? -eq 0 ]
then
echo "Success"
exit 0
fi
done
exit 1
| true
|
6857a436fa75b647a5be0a9187d28836a4da0cbe
|
Shell
|
FauxFaux/debian-control
|
/o/opennebula/opennebula_4.12.3+dfsg-3.1_amd64/postrm
|
UTF-8
| 4,912
| 2.75
| 3
|
[] |
no_license
|
#!/bin/sh
set -e
ONEHOME=/var/lib/one
ONE_GROUP=oneadmin
ONE_USER=oneadmin
if [ "$1" = "remove" ]; then
# Remove logs
rm -rf /var/log/one/*
fi
if [ "$1" = "purge" ]; then
# purge stats overrides
# stats overrides could already be removed by root
dpkg-statoverride --quiet --remove /var/log/one || true
dpkg-statoverride --quiet --remove $ONEHOME/.one || true
dpkg-statoverride --quiet --remove $ONEHOME/images || true
dpkg-statoverride --quiet --remove $ONEHOME/datastores/0 || true
dpkg-statoverride --quiet --remove $ONEHOME/datastores/1 || true
fi
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/bash_completion.d/one 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/econe.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /one/auth/ldap_auth.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/auth/x509_auth.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/vmm_ec2/vmm_ec2rc 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/vmm_ec2/vmm_ec2.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/occi_templates/small.erb 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/occi_templates/network.erb 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/occi_templates/medium.erb 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/occi_templates/large.erb 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/occi_templates/custom.erb 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/occi_templates/common.erb 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/im_ec2/im_ec2rc 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/im_ec2/im_ec2.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/auth/quota.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/occi-server.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/ozones-server.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/occi-server.conf 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_installdeb
dpkg-maintscript-helper rm_conffile /etc/one/group.default 4.12.3+dfsg-1 -- "$@"
# End automatically added section
# Automatically added by dh_systemd_start
if [ -d /run/systemd/system ]; then
systemctl --system daemon-reload >/dev/null || true
fi
# End automatically added section
# Automatically added by dh_installinit
if [ "$1" = "purge" ] ; then
update-rc.d opennebula remove >/dev/null
fi
# In case this system is running systemd, we make systemd reload the unit files
# to pick up changes.
if [ -d /run/systemd/system ] ; then
systemctl --system daemon-reload >/dev/null || true
fi
# End automatically added section
# Automatically added by dh_installinit
if [ "$1" = "purge" ] ; then
update-rc.d opennebula-scheduler remove >/dev/null
fi
# In case this system is running systemd, we make systemd reload the unit files
# to pick up changes.
if [ -d /run/systemd/system ] ; then
systemctl --system daemon-reload >/dev/null || true
fi
# End automatically added section
# Automatically added by dh_systemd_enable
if [ "$1" = "remove" ]; then
if [ -x "/usr/bin/deb-systemd-helper" ]; then
deb-systemd-helper mask opennebula.service >/dev/null
fi
fi
if [ "$1" = "purge" ]; then
if [ -x "/usr/bin/deb-systemd-helper" ]; then
deb-systemd-helper purge opennebula.service >/dev/null
deb-systemd-helper unmask opennebula.service >/dev/null
fi
fi
# End automatically added section
| true
|
f276060348f27735eb7adc441a4c9eec928bc541
|
Shell
|
lululeta2014/mihaib
|
/forge/mi-hai.appspot.com/build.sh
|
UTF-8
| 555
| 3.171875
| 3
|
[] |
no_license
|
#! /usr/bin/env bash
set -u # exit if using uninitialised variable
set -e # exit if some command in this script fails
trap "echo $0 failed because a command in the script failed" ERR
SCRIPT=`readlink -f "$0"`
DIR=`dirname "$SCRIPT"`
PKG_ROOT='github.com/MihaiB/mihaib/forge/mi-hai.appspot.com/app'
golink --replace "$PKG_ROOT" "$DIR"/app
for X in \
util \
root \
headers \
;do
#$APPENG_GO/goapp get -d "$PKG_ROOT"/"$X"
$APPENG_GO/goapp fmt "$PKG_ROOT"/"$X"
$APPENG_GO/goapp build "$PKG_ROOT"/"$X"
$APPENG_GO/goapp test "$PKG_ROOT"/"$X"
done
| true
|
05b5b69c34cb3d8936b39a78e855d939413389bc
|
Shell
|
citizenlee/autoScan
|
/autoScan.sh
|
UTF-8
| 458
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
scancmd="scanimage -l 0 -t 0 -x 215 -y 297 --resolution 300 --buffer-size=1M --format tiff > "
filenum=0
while true
do
eval "date -u"
echo $filenum
if [ $filenum -lt "10" ];
then
fullscancmd="$scancmd img00$filenum.tiff"
elif [ $filenum -lt 100 ] && [ $filenum -ge 10 ];
then
fullscancmd="$scancmd img0$filenum.tiff"
else
fullscancmd="$scancmd img$filenum.tiff"
fi
echo $fullscancmd
filenum=$((filenum+1))
sleep 0
done
| true
|
41aa94a27231acc53a1527f6670662c6f6baaf54
|
Shell
|
pentchomakov/SchoolProjects
|
/Python&C/C - Project 1/q3_image_sorter.sh
|
UTF-8
| 262
| 2.953125
| 3
|
[] |
no_license
|
#Pentcho Tchomakov 260632861
if [ ! -d "$1" ]; then
echo "The specified directory does not exist"
exit
fi
filename="$1"".jpg"
>$filename
for file in `find $1 -type f -print|xargs ls -t`
do
convert $file $filename -append $filename
done
display ./ $filename
| true
|
79f2cca7dc4883e22589e36beb5425355e16764a
|
Shell
|
orland0m/bashrc
|
/bootstrap.sh
|
UTF-8
| 395
| 3.484375
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
TARGET_BASHRC=~/.bashrc
INSTALLER_FILE=rc_install.sh
if [ -f "$TARGET_BASHRC" ]; then
BACKUP="$TARGET_BASHRC.back.$RANDOM"
echo "Current $TARGET_BASHRC will be backed up in $BACKUP"
cp $TARGET_BASHRC $BACKUP
echo "Removing $TARGET_BASHRC"
rm $TARGET_BASHRC
fi
printf "#!/usr/bin/env bash\n\n" > $TARGET_BASHRC
find . -name $INSTALLER_FILE -exec {} \;
| true
|
1e35c08819b54ee22f243a6aff92031add400192
|
Shell
|
nemuTUI/nemu
|
/sh/ntty
|
UTF-8
| 549
| 2.828125
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
tty_id="${1:-0}"
ntty_dev="/tmp/ntty${tty_id}"
ntty_pid="/tmp/ntty${tty_id}.pid"
ntty_log="/tmp/ntty${tty_id}.log"
stty_dev="/tmp/stty${tty_id}"
stty_pty="pty,raw,link=${stty_dev},echo=0"
set -x
socat="/usr/bin/socat"
socat_args="${ntty_dev} ${stty_pty}"
picocom="/usr/bin/picocom"
start-stop-daemon -Sb -mp "${ntty_pid}" -x "${socat}" -- ${socat_args}
while [ ! -c "${stty_dev}" ]; do
sleep 0.1
done
"${picocom}" -e x -b115200 --logfile "${ntty_log}" "${stty_dev}"
start-stop-daemon -K -p "${ntty_pid}" -x "${socat}" -- ${socat_args}
| true
|
9b6208af62495603adb91bb667b68bccfe64f8ea
|
Shell
|
mahuagaming/scue_kitchen
|
/linkscue-scripts/menu_scripts/menu_advance_bootimg.sh
|
UTF-8
| 1,984
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
#self
script_self=$(readlink -f $0)
#dir
TOPDIR=${script_self%/linkscue-scripts/menu_scripts/menu_advance_bootimg.sh}
scripts_dir=$TOPDIR/linkscue-scripts
sub_menu_dir=$scripts_dir/menu_scripts
dir_bootimg=$scripts_dir/bootimg-scripts-advance
bootimg_mtk_dir=$scripts_dir/bootimg-scripts-mtk
bootimg1=$scripts_dir/bootimg-scripts-advance/unpackbootimg
bootimg2=$scripts_dir/bootimg-scripts-advance/unpack-bootimg.pl
bootimg3=$scripts_dir/bootimg-scripts-advance/unpack-hisilicon-scue.sh
mkbootimg=$scripts_dir/bootimg-scripts-common/mkbootimg
wd=$1
oldwd=$(pwd)
#list the menu
clear
echo ""
echo "欢迎使用linkscue bootimg定制高级菜单!"
echo ""
echo ""
echo "1) 解压boot.img增强版1;"
echo "2) 解压boot.img增强版2;"
echo "3) 解压海思平台boot.img(华为荣耀2专用);"
echo "4) 打包MTK平台boot.img工作目录为recovery.img;"
echo ""
echo "b) 返回菜单;"
echo ""
read -p "请输入选项:" opt
while [[ ! -f $wd/boot.img ]]; do
read -p "请把boot.img放置于$wd!"
done
case $opt in
1)
cd $wd
$bootimg1 $wd/boot.img
cd $oldwd
echo ""
read -p "按任意键返回菜单:"
$TOPDIR/linkscue-scripts/menu_scripts/menu_advance_bootimg.sh $wd
;;
2)
cd $wd
$bootimg2 $wd/boot.img
cd $oldwd
echo ""
read -p "按任意键返回菜单:"
$TOPDIR/linkscue-scripts/menu_scripts/menu_advance_bootimg.sh $wd
;;
3)
cd $wd
$bootimg3 $wd $wd/boot.img
cd $oldwd
echo ""
read -p "按任意键返回菜单:"
$TOPDIR/linkscue-scripts/menu_scripts/menu_advance_bootimg.sh $wd
;;
4)
$bootimg_mtk_dir/repack-MT65xx.sh -recovery boot.img-kernel.img boot.img-ramdisk boot_new.img $mkbootimg
echo ""
read -p "按任意键返回菜单:"
$TOPDIR/linkscue-scripts/menu_scripts/menu_advance_bootimg.sh $wd
;;
b)
$TOPDIR/linkscue-scripts/menu_scripts/menu_bootimgs.sh $wd
;;
*)
$TOPDIR/linkscue-scripts/menu_scripts/menu_advance_bootimg.sh $wd
;;
esac
| true
|
8533aa4747a6e410929cf0a9aaac49480fe07c74
|
Shell
|
rushikantaria/cmdbasicCommand
|
/filefolder.sh
|
UTF-8
| 392
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
# to display files and directoy folders from current directory
echo ******files********
echo
ls *
echo
echo ******directory********
echo
for item in *
do
if [ -d $item ];
then
echo $item
fi
done
echo
echo ******size*******
echo
du -sh ./*
echo
echo ******content of text-file******
echo
cat react.txt
echo
echo
echo
echo *******current location**********
echo
echo
pwd
| true
|
265628ab25ca4e45cafb1e2ea11638b1fd5e54b3
|
Shell
|
Menooker/dgl
|
/tests/scripts/task_kg_test.sh
|
UTF-8
| 4,637
| 3.140625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
. /opt/conda/etc/profile.d/conda.sh
KG_DIR="./apps/kg/"
function fail {
echo FAIL: $@
exit -1
}
function usage {
echo "Usage: $0 backend device"
}
# check arguments
if [ $# -ne 2 ]; then
usage
fail "Error: must specify device and bakend"
fi
if [ "$2" == "cpu" ]; then
dev=-1
elif [ "$2" == "gpu" ]; then
export CUDA_VISIBLE_DEVICES=0
dev=0
else
usage
fail "Unknown device $2"
fi
export DGLBACKEND=$1
export DGL_LIBRARY_PATH=${PWD}/build
export PYTHONPATH=${PWD}/python:$KG_DIR:$PYTHONPATH
export DGL_DOWNLOAD_DIR=${PWD}
conda activate ${DGLBACKEND}-ci
# test
pushd $KG_DIR> /dev/null
python3 -m pytest tests/test_score.py || fail "run test_score.py on $1"
if [ "$2" == "cpu" ]; then
# verify CPU training DistMult
python3 train.py --model DistMult --dataset FB15k --batch_size 128 \
--neg_sample_size 16 --hidden_dim 100 --gamma 500.0 --lr 0.1 --max_step 100 \
--batch_size_eval 16 --valid --test -adv --eval_interval 30 --eval_percent 0.01 \
--save_emb DistMult_FB15k_emb --data_path /data/kg || fail "run DistMult on $2"
# verify saving training result
python3 eval.py --model_name DistMult --dataset FB15k --hidden_dim 100 \
--gamma 500.0 --batch_size 16 --model_path DistMult_FB15k_emb/ \
--eval_percent 0.01 --data_path /data/kg || fail "eval DistMult on $2"
elif [ "$2" == "gpu" ]; then
# verify GPU training DistMult
python3 train.py --model DistMult --dataset FB15k --batch_size 128 \
--neg_sample_size 16 --hidden_dim 100 --gamma 500.0 --lr 0.1 --max_step 100 \
--batch_size_eval 16 --gpu 0 --valid --test -adv --eval_interval 30 --eval_percent 0.01 \
--data_path /data/kg || fail "run DistMult on $2"
# verify mixed CPU GPU training
python3 train.py --model DistMult --dataset FB15k --batch_size 128 \
--neg_sample_size 16 --hidden_dim 100 --gamma 500.0 --lr 0.1 --max_step 100 \
--batch_size_eval 16 --gpu 0 --valid --test -adv --mix_cpu_gpu --eval_percent 0.01 \
--save_emb DistMult_FB15k_emb --data_path /data/kg || fail "run mix with async CPU/GPU DistMult"
# verify saving training result
python3 eval.py --model_name DistMult --dataset FB15k --hidden_dim 100 \
--gamma 500.0 --batch_size 16 --gpu 0 --model_path DistMult_FB15k_emb/ \
--eval_percent 0.01 --data_path /data/kg || fail "eval DistMult on $2"
if [ "$1" == "pytorch" ]; then
# verify mixed CPU GPU training with async_update
python3 train.py --model DistMult --dataset FB15k --batch_size 128 \
--neg_sample_size 16 --hidden_dim 100 --gamma 500.0 --lr 0.1 --max_step 100 \
--batch_size_eval 16 --gpu 0 --valid --test -adv --mix_cpu_gpu --eval_percent 0.01 \
--async_update --data_path /data/kg || fail "run mix CPU/GPU DistMult"
# verify mixed CPU GPU training with random partition
python3 train.py --model DistMult --dataset FB15k --batch_size 128 \
--neg_sample_size 16 --hidden_dim 100 --gamma 500.0 --lr 0.1 --max_step 100 \
--batch_size_eval 16 --num_proc 2 --gpu 0 --valid --test -adv --mix_cpu_gpu \
--eval_percent 0.01 --async_update --force_sync_interval 100 \
--data_path /data/kg || fail "run multiprocess async CPU/GPU DistMult"
# verify mixed CPU GPU training with random partition async_update
python3 train.py --model DistMult --dataset FB15k --batch_size 128 \
--neg_sample_size 16 --hidden_dim 100 --gamma 500.0 --lr 0.1 --max_step 100 \
--batch_size_eval 16 --num_proc 2 --gpu 0 --valid --test -adv --mix_cpu_gpu \
--eval_percent 0.01 --rel_part --async_update --force_sync_interval 100 \
--data_path /data/kg || fail "run multiprocess async CPU/GPU DistMult"
# multi process training TransR
python3 train.py --model TransR --dataset FB15k --batch_size 128 \
--neg_sample_size 16 --hidden_dim 100 --gamma 500.0 --lr 0.1 --max_step 100 \
--batch_size_eval 16 --num_proc 2 --gpu 0 --valid --test -adv --eval_interval 30 \
--eval_percent 0.01 --data_path /data/kg --mix_cpu_gpu --rel_part --async_update \
--save_emb TransR_FB15k_emb || fail "run multiprocess TransR on $2"
python3 eval.py --model_name TransR --dataset FB15k --hidden_dim 100 \
--gamma 500.0 --batch_size 16 --num_proc 2 --gpu 0 --model_path TransR_FB15k_emb/ \
--eval_percent 0.01 --mix_cpu_gpu --data_path /data/kg || fail "eval multiprocess TransR on $2"
fi
fi
popd > /dev/null
| true
|
7b3b9eddb7e021d40ca37cc4f133a2d9a16717fc
|
Shell
|
Jintian/linux.env.setting
|
/ssh/sshx
|
UTF-8
| 3,506
| 3.96875
| 4
|
[] |
no_license
|
#!/bin/bash
SSH_HOME=$HOME/.ssh
SSHX_HOME=$HOME/.sshx
SSHX_REGISTRY=$SSHX_HOME/sites
unamestr=`uname`
SED_OPT="-r"
if [[ "$unamestr" == 'Darwin' ]]; then
SED_OPT="-E"
fi
mkdir $SSHX_HOME 2> /dev/null
function loadentry {
echo Loading ssh logging info from history...
grep -i -E "ssh\s+[a-zA-Z0-9_@]+[a-zA-Z0-9\.]+$" $HOME/.bash_history|awk '{print $2}'|sed $SED_OPT 's/(.*)/\1 n/'|sed $SED_OPT "s/^([a-zA-Z0-9\.]+\s+[yn])$/${USER}@\1/"|sort|uniq &>$SSHX_REGISTRY
}
function genkeys {
[ -d $SSH_HOME ] || mkdir $SSH_HOME
local idfile
idfile=`ls $HOME/.ssh/*.pub 2> /dev/null | head -1`
[ -z "$idfile" ] && ssh-keygen -q -t dsa -f "${SSH_HOME}/id_dsa" -N ""
}
[ -f "$SSHX_REGISTRY" ] || touch "$SSHX_REGISTRY"
if [ -z "$1" ]; then
# INTERACTIVE MODE
#check status for loading history ssh logging info
ln=`cat $SSHX_REGISTRY | grep -vE "^\s*$" |wc -l`
if [ $((ln)) == 0 ]; then
loadentry
fi
exec 3<&0
exec 0<"$SSHX_REGISTRY"
num=1
while read -r line
do
FLAG=1
echo -e "\t$num.) $line" | sed $SED_OPT 's/(\w+@[a-zA-Z0-9_\.]+).*/\1/'
let num++
done
exec 0<&3
if [ "$FLAG" = "" ]; then
echo "No registed sites info."
else
echo -n "Please select the sites number: "
read SEL
registry=`head -$SEL $SSHX_REGISTRY | tail -1`
pat="^([a-zA-Z0-9_\.]+@[a-zA-Z0-9_\.]+)\s+([yn])$"
if [[ $registry =~ $pat ]]; then
h=${BASH_REMATCH[1]}
m=${BASH_REMATCH[2]}
if [ $m != "y" ]; then
echo -n "Do you want to store your public key on remote machine? [y|n] "
read ans
if [ $ans == "y" ]; then
genkeys
idfile=`ls ~/.ssh/*.pub 2> /dev/null | head -1`
cat $idfile | ssh $h 'mkdir .ssh; touch ~/.ssh/authorized_keys; cat >> ~/.ssh/authorized_keys' 2>/dev/null
sed -i $SED_OPT "s/($h\s+)n$/\1y/g" $SSHX_REGISTRY
fi
fi
ssh -X $h
fi
fi
else
# DIRECT MODE
host=$1
pat=".+@.+"
if [[ ! $host =~ $pat ]]; then
host="${USER}@${host}"
fi
line=`grep $host $SSHX_REGISTRY`
if [ "$line" == "" ]; then
echo -n "Do you want to save this host to your bookmark? [y|n] "
read ans1
echo -n "Do you want to store your public key on remote machine? [y|n] "
read ans2
if [ "$ans2" == "y" ]; then
genkeys
idfile=`ls $HOME/.ssh/*.pub 2> /dev/null | head -1`
cat $idfile | ssh $host 'mkdir .ssh; touch ~/.ssh/authorized_keys; cat >> ~/.ssh/authorized_keys' 2>/dev/null
fi
if [ "$ans1" == "y" ]; then
echo "$host $ans2" >> "$SSHX_REGISTRY"
fi
else
pat="^(\w+@[a-zA-Z0-9_\.]+)\s+([yn])$"
if [[ $line =~ $pat ]]; then
h=${BASH_REMATCH[1]}
m=${BASH_REMATCH[2]}
if [ $m != "y" ]; then
echo -n "Do you want to store your public key on remote machine? [y|n] "
read ans
if [ $ans == "y" ]; then
genkeys
idfile=`ls $HOME/.ssh/*.pub 2> /dev/null | head -1`
cat $idfile | ssh $h 'mkdir .ssh; touch ~/.ssh/authorized_keys; cat >> ~/.ssh/authorized_keys' 2>/dev/null
sed -ri "s/($h\s+)n$/\1y/g" $SSHX_REGISTRY
fi
fi
fi
fi
ssh -X $1
fi
| true
|
edf826acc8670aedbf99856b871e5b8ec860c7b7
|
Shell
|
mfuery/edumotion
|
/UnityProject/Assets/Editor/PostprocessBuildPlayer
|
UTF-8
| 420
| 2.75
| 3
|
[] |
no_license
|
#!/bin/sh
echo "Preparing stage for building Unity Android App"
UNITY_STAGE="Temp/StagingArea"
UNITY_PLAYER_FOLDER="UnityPlayer"
ANDROID_PROJECT="AndroidBGProject"
cp -R "$UNITY_STAGE"/ "$UNITY_PLAYER_FOLDER"
cp -R "$UNITY_STAGE"/assets "$ANDROID_PROJECT"
cp -R "$UNITY_STAGE"/res/values "$ANDROID_PROJECT"/res/
cp -R "$UNITY_STAGE"/res/drawable* "$ANDROID_PROJECT"/res/
echo "Stage done. Refresh your Android project."
| true
|
e4914a16f6279e567e4a65d167658a67d45c4523
|
Shell
|
EctoPLSM/wpc-unofficial.org
|
/src/bash/countries_code.sh
|
UTF-8
| 286
| 2.9375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# arguments:
# $1: country code to be created
# $2: previous country code if exists, "" otherwise
# $3: next country code if exists, "" otherwise
echo "Creating countries/$1"
mkdir -p ../countries/$1
./countries_code_index.sh $1 $2 $3
./countries_code_individual.sh $1 $2 $3
| true
|
b40d06e01e6525d0dce7ec1bfb9375350a5e515b
|
Shell
|
sridharatv/devopsdemo
|
/loop.sh
|
UTF-8
| 245
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
echo "this is a loop shell script"
# we need to add a graceful exit
c=0
while $true
do
c=`expr $c + 1`
echo $RANDOM
sleep 2
if [ $c -eq 20 ]; then
break
fi
done
echo "done"
# This change was made after we created a branch
| true
|
cb35eb6718872b6a85b68698599bdd33f1f03e5e
|
Shell
|
obreitwi/vim-sort-folds
|
/test/run.sh
|
UTF-8
| 782
| 3.9375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -euo pipefail
if (( $# < 1 )); then
echo "Usage: $0 <testcase_directory>..." >&2
exit 1
fi
while (( $# > 0 )); do
dir_testcase="${1}"
shift
version_info="[$EDITOR/$(python --version)]"
echo "${version_info} Testing ${dir_testcase}.." >&2
file_cmd="${dir_testcase}/commands.vim"
file_in="${dir_testcase}/input.txt"
file_out="${dir_testcase}/output.txt"
file_exp="${dir_testcase}/expected.txt"
cp -v "${file_in}" "${file_out}"
true > messages.log
${EDITOR} -V0messages.log -s "${file_cmd}" "${file_out}" 2>&1
if diff "${file_out}" "${file_exp}"; then
echo "${version_info} SUCCESS" >&2
else
echo "${version_info} FAILED" >&2
cat messages.log >&2
exit 1
fi
done
| true
|
86451f68d79940d21e348bee6ce5f63e525a082f
|
Shell
|
ligadata-yasser/Kamanja
|
/trunk/SampleApplication/EasyInstall/easyInstallKamanja.sh
|
UTF-8
| 45,698
| 3.609375
| 4
|
[
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/bash
set -e
installPath=$1
srcPath=$2
ivyPath=$3
KafkaRootDir=$4
if [ ! -d "$installPath" ]; then
echo "Not valid install path supplied. It should be a directory that can be written to and whose current content is of no value (will be overwritten) "
echo "$0 <install path> <src tree trunk directory> <ivy directory path for dependencies> <kafka installation path>"
exit 1
fi
if [ ! -d "$srcPath" ]; then
echo "Not valid src path supplied. It should be the trunk directory containing the jars, files, what not that need to be supplied."
echo "$0 <install path> <src tree trunk directory> <ivy directory path for dependencies> <kafka installation path>"
exit 1
fi
if [ ! -d "$ivyPath" ]; then
echo "Not valid ivy path supplied. It should be the ivy path for dependency the jars."
echo "$0 <install path> <src tree trunk directory> <ivy directory path for dependencies> <kafka installation path>"
exit 1
fi
if [ ! -d "$KafkaRootDir" ]; then
echo "Not valid Kafka path supplied."
echo "$0 <install path> <src tree trunk directory> <ivy directory path for dependencies> <kafka installation path>"
exit 1
fi
installPath=$(echo $installPath | sed 's/[\/]*$//')
srcPath=$(echo $srcPath | sed 's/[\/]*$//')
ivyPath=$(echo $ivyPath | sed 's/[\/]*$//')
# *******************************
# Clean out prior installation
# *******************************
rm -Rf $installPath
# *******************************
# Make the directories as needed
# *******************************
mkdir -p $installPath/bin
mkdir -p $installPath/lib
mkdir -p $installPath/lib/system
mkdir -p $installPath/lib/application
mkdir -p $installPath/storage
mkdir -p $installPath/logs
mkdir -p $installPath/config
mkdir -p $installPath/documentation
mkdir -p $installPath/output
mkdir -p $installPath/workingdir
mkdir -p $installPath/template
mkdir -p $installPath/template/config
mkdir -p $installPath/template/script
mkdir -p $installPath/input
#new one
mkdir -p $installPath/input/SampleApplications
mkdir -p $installPath/input/SampleApplications/bin
mkdir -p $installPath/input/SampleApplications/data
mkdir -p $installPath/input/SampleApplications/metadata
mkdir -p $installPath/input/SampleApplications/metadata/config
mkdir -p $installPath/input/SampleApplications/metadata/container
mkdir -p $installPath/input/SampleApplications/metadata/function
mkdir -p $installPath/input/SampleApplications/metadata/message
mkdir -p $installPath/input/SampleApplications/metadata/model
mkdir -p $installPath/input/SampleApplications/metadata/script
mkdir -p $installPath/input/SampleApplications/metadata/type
mkdir -p $installPath/input/SampleApplications/template
#new one
bin=$installPath/bin
systemlib=$installPath/lib/system
applib=$installPath/lib/application
echo $installPath
echo $srcPath
echo $bin
# *******************************
# Build fat-jars
# *******************************
echo "clean, package and assemble $srcPath ..."
cd $srcPath
sbt clean package KamanjaManager/assembly MetadataAPI/assembly KVInit/assembly MethodExtractor/assembly SimpleKafkaProducer/assembly NodeInfoExtract/assembly ExtractData/assembly MetadataAPIService/assembly JdbcDataCollector/assembly FileDataConsumer/assembly SaveContainerDataComponent/assembly CleanUtil/assembly MigrateManager/assembly
# recreate eclipse projects
#echo "refresh the eclipse projects ..."
#cd $srcPath
#sbt eclipse
# Move them into place
echo "copy the fat jars to $installPath ..."
cd $srcPath
cp Utils/KVInit/target/scala-2.11/KVInit* $bin
cp MetadataAPI/target/scala-2.11/MetadataAPI* $bin
cp KamanjaManager/target/scala-2.11/KamanjaManager* $bin
cp Pmml/MethodExtractor/target/scala-2.11/MethodExtractor* $bin
cp Utils/SimpleKafkaProducer/target/scala-2.11/SimpleKafkaProducer* $bin
cp Utils/ExtractData/target/scala-2.11/ExtractData* $bin
cp Utils/JdbcDataCollector/target/scala-2.11/JdbcDataCollector* $bin
cp MetadataAPIService/target/scala-2.11/MetadataAPIService* $bin
cp FileDataConsumer/target/scala-2.11/FileDataConsumer* $bin
cp Utils/CleanUtil/target/scala-2.11/CleanUtil* $bin
cp Utils/Migrate/MigrateManager/target/MigrateManager* $bin
# *******************************
# Copy jars required (more than required if the fat jars are used)
# *******************************
# Base Types and Functions, InputOutput adapters, and original versions of things
echo "copy all Kamanja jars and the jars upon which they depend to the $systemlib"
# -------------------- generated cp commands --------------------
cp $srcPath/lib_managed/bundles/org.apache.directory.api/api-util/api-util-1.0.0-M20.jar $systemlib
cp $ivyPath/cache/org.codehaus.jackson/jackson-xc/jars/jackson-xc-1.8.3.jar $systemlib
cp $ivyPath/cache/org.apache.kafka/kafka-clients/jars/kafka-clients-0.8.2.2.jar $systemlib
cp $ivyPath/cache/org.xerial.snappy/snappy-java/bundles/snappy-java-1.0.4.1.jar $systemlib
cp $ivyPath/cache/javax.xml.bind/jaxb-api/jars/jaxb-api-2.2.2.jar $systemlib
cp $ivyPath/cache/log4j/log4j/bundles/log4j-1.2.16.jar $systemlib
cp $ivyPath/cache/com.sun.jersey/jersey-core/bundles/jersey-core-1.9.jar $systemlib
cp $srcPath/lib_managed/bundles/org.codehaus.jettison/jettison/jettison-1.1.jar $systemlib
cp $ivyPath/cache/com.google.guava/guava/bundles/guava-16.0.1.jar $systemlib
cp $ivyPath/cache/org.jruby.jcodings/jcodings/jars/jcodings-1.0.8.jar $systemlib
cp $ivyPath/cache/org.scalatest/scalatest_2.11/bundles/scalatest_2.11-2.2.0.jar $systemlib
cp $ivyPath/cache/org.apache.curator/curator-client/bundles/curator-client-2.7.1.jar $systemlib
cp $ivyPath/cache/commons-digester/commons-digester/jars/commons-digester-1.8.jar $systemlib
cp $ivyPath/cache/org.jruby.joni/joni/jars/joni-2.1.2.jar $systemlib
cp $ivyPath/cache/org.apache.directory.api/api-util/bundles/api-util-1.0.0-M20.jar $systemlib
cp $ivyPath/cache/org.mortbay.jetty/jetty-util/jars/jetty-util-6.1.26.jar $systemlib
cp $srcPath/InputOutputAdapters/KafkaSimpleInputOutputAdapters/target/scala-2.11/kafkasimpleinputoutputadapters_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.commons/commons-lang3/commons-lang3-3.1.jar $systemlib
cp $ivyPath/cache/org.apache.commons/commons-collections4/jars/commons-collections4-4.0.jar $systemlib
cp $srcPath/Storage/SqlServer/target/scala-2.11/sqlserver_2.11-0.1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.commons/commons-compress/commons-compress-1.4.1.jar $systemlib
cp $ivyPath/cache/org.apache.logging.log4j/log4j-api/jars/log4j-api-2.4.1.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.hadoop/hadoop-auth/hadoop-auth-2.7.1.jar $systemlib
cp $srcPath/lib_managed/jars/commons-lang/commons-lang/commons-lang-2.6.jar $systemlib
cp $srcPath/lib_managed/jars/com.google.code.gson/gson/gson-2.2.4.jar $systemlib
cp $ivyPath/cache/org.jpmml/pmml-schema/jars/pmml-schema-1.2.9.jar $systemlib
cp $ivyPath/cache/org.javassist/javassist/bundles/javassist-3.18.1-GA.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.logging.log4j/log4j-api/log4j-api-2.4.1.jar $systemlib
cp $ivyPath/cache/com.typesafe/config/bundles/config-1.2.1.jar $systemlib
cp $srcPath/lib_managed/bundles/org.apache.curator/curator-framework/curator-framework-2.7.1.jar $systemlib
cp $ivyPath/cache/org.json4s/json4s-jackson_2.11/jars/json4s-jackson_2.11-3.2.9.jar $systemlib
cp $ivyPath/cache/commons-net/commons-net/jars/commons-net-3.1.jar $systemlib
cp $ivyPath/cache/org.apache.hadoop/hadoop-annotations/jars/hadoop-annotations-2.7.1.jar $systemlib
cp $ivyPath/cache/com.101tec/zkclient/jars/zkclient-0.3.jar $systemlib
cp $ivyPath/cache/org.apache.commons/commons-math3/jars/commons-math3-3.6.jar $systemlib
cp $ivyPath/cache/org.apache.camel/camel-core/bundles/camel-core-2.9.2.jar $systemlib
cp $ivyPath/cache/com.google.code.findbugs/jsr305/jars/jsr305-3.0.0.jar $systemlib
cp $srcPath/lib_managed/jars/commons-cli/commons-cli/commons-cli-1.2.jar $systemlib
cp $srcPath/lib_managed/jars/com.jamesmurty.utils/java-xmlbuilder/java-xmlbuilder-0.4.jar $systemlib
cp $srcPath/lib_managed/bundles/com.fasterxml.jackson.core/jackson-annotations/jackson-annotations-2.3.0.jar $systemlib
cp $ivyPath/cache/com.pyruby/java-stub-server/jars/java-stub-server-0.12-sources.jar $systemlib
cp $ivyPath/cache/com.esotericsoftware.reflectasm/reflectasm/jars/reflectasm-1.07-shaded.jar $systemlib
cp $ivyPath/cache/io.spray/spray-client_2.11/bundles/spray-client_2.11-1.3.3.jar $systemlib
cp $srcPath/lib_managed/jars/javax.servlet/servlet-api/servlet-api-2.5.jar $systemlib
cp $srcPath/Utils/Audit/target/scala-2.11/auditadapters_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/bundles/com.sun.jersey/jersey-server/jersey-server-1.9.jar $systemlib
cp $srcPath/lib_managed/jars/org.json4s/json4s-native_2.11/json4s-native_2.11-3.2.9.jar $systemlib
cp $ivyPath/cache/org.apache.directory.server/apacheds-i18n/bundles/apacheds-i18n-2.0.0-M15.jar $systemlib
cp $srcPath/FactoriesOfModelInstanceFactory/JarFactoryOfModelInstanceFactory/target/scala-2.11/jarfactoryofmodelinstancefactory_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.jpmml/pmml-evaluator/jars/pmml-evaluator-1.2.9.jar $systemlib
cp $ivyPath/cache/commons-httpclient/commons-httpclient/jars/commons-httpclient-3.1.jar $systemlib
cp $srcPath/TransactionService/target/scala-2.11/transactionservice_2.11-0.1.0.jar $systemlib
cp $ivyPath/cache/io.netty/netty-all/jars/netty-all-4.0.23.Final.jar $systemlib
cp $ivyPath/cache/com.esotericsoftware.kryo/kryo/bundles/kryo-2.21.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/tokens_2.11/tokens_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/com.ning/compress-lzf/bundles/compress-lzf-0.9.1.jar $systemlib
cp $ivyPath/cache/org.scala-lang/scala-actors/jars/scala-actors-2.11.7.jar $systemlib
cp $srcPath/lib_managed/bundles/org.apache.directory.server/apacheds-i18n/apacheds-i18n-2.0.0-M15.jar $systemlib
cp $srcPath/Utils/Controller/target/scala-2.11/controller_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.6.1.jar $systemlib
cp $srcPath/lib_managed/bundles/com.fasterxml.jackson.core/jackson-databind/jackson-databind-2.3.1.jar $systemlib
cp $srcPath/lib_managed/jars/javax.activation/activation/activation-1.1.jar $systemlib
cp $srcPath/Utils/ExtractData/target/scala-2.11/extractdata_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/bundles/org.xerial.snappy/snappy-java/snappy-java-1.0.4.1.jar $systemlib
cp $srcPath/lib_managed/jars/joda-time/joda-time/joda-time-2.9.1.jar $systemlib
cp $ivyPath/cache/io.spray/spray-io_2.11/bundles/spray-io_2.11-1.3.3.jar $systemlib
cp $ivyPath/cache/com.typesafe.akka/akka-actor_2.11/jars/akka-actor_2.11-2.3.2.jar $systemlib
cp $ivyPath/cache/com.typesafe.akka/akka-actor_2.11/jars/akka-actor_2.11-2.3.9.jar $systemlib
cp $ivyPath/cache/uk.co.bigbeeconsultants/bee-client_2.11/jars/bee-client_2.11-0.28.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/foundation_2.11/foundation_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/org.apache.hbase/hbase-protocol/jars/hbase-protocol-1.0.2.jar $systemlib
cp $ivyPath/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.2.jar $systemlib
cp $srcPath/lib_managed/jars/javax.xml.bind/jaxb-api/jaxb-api-2.2.2.jar $systemlib
cp $ivyPath/cache/org.jdom/jdom/jars/jdom-1.1.jar $systemlib
cp $srcPath/KvBase/target/scala-2.11/kvbase_2.11-0.1.0.jar $systemlib
cp $ivyPath/cache/com.googlecode.json-simple/json-simple/jars/json-simple-1.1.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.hbase/hbase-client/hbase-client-1.0.2.jar $systemlib
cp $ivyPath/cache/org.apache.hbase/hbase-client/jars/hbase-client-1.0.2.jar $systemlib
cp $ivyPath/cache/ch.qos.logback/logback-classic/jars/logback-classic-1.0.13.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.hadoop/hadoop-annotations/hadoop-annotations-2.7.1.jar $systemlib
cp $ivyPath/cache/io.spray/spray-util_2.11/bundles/spray-util_2.11-1.3.3.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.commons/commons-collections4/commons-collections4-4.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.zookeeper/zookeeper/zookeeper-3.4.6.jar $systemlib
cp $ivyPath/cache/com.twitter/chill-java/jars/chill-java-0.5.0.jar $systemlib
cp $ivyPath/cache/asm/asm/jars/asm-3.1.jar $systemlib
cp $ivyPath/cache/org.scalameta/quasiquotes_2.11/jars/quasiquotes_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/org.apache.curator/curator-recipes/bundles/curator-recipes-2.7.1.jar $systemlib
cp $srcPath/MetadataAPI/target/scala-2.11/metadataapi_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.apache.commons/commons-math/jars/commons-math-2.2.jar $systemlib
cp $ivyPath/cache/com.sun.xml.bind/jaxb-impl/jars/jaxb-impl-2.2.3-1.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/dialects_2.11/dialects_2.11-0.0.3.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/parsers_2.11/parsers_2.11-0.0.3.jar $systemlib
cp $srcPath/lib_managed/bundles/io.netty/netty/netty-3.9.0.Final.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.commons/commons-math3/commons-math3-3.6.jar $systemlib
cp $srcPath/lib_managed/jars/com.github.stephenc.findbugs/findbugs-annotations/findbugs-annotations-1.3.9-1.jar $systemlib
cp $ivyPath/cache/com.fasterxml.jackson.core/jackson-databind/bundles/jackson-databind-2.3.1.jar $systemlib
cp $ivyPath/cache/com.fasterxml.jackson.core/jackson-annotations/bundles/jackson-annotations-2.3.0.jar $systemlib
cp $ivyPath/cache/org.apache.commons/commons-dbcp2/jars/commons-dbcp2-2.1.jar $systemlib
cp $srcPath/lib_managed/jars/com.twitter/chill_2.11/chill_2.11-0.5.0.jar $systemlib
cp $ivyPath/cache/org.json4s/json4s-native_2.11/jars/json4s-native_2.11-3.2.9.jar $systemlib
cp $ivyPath/cache/junit/junit/jars/junit-3.8.1.jar $systemlib
cp $srcPath/lib_managed/jars/org.mortbay.jetty/jetty-util/jetty-util-6.1.26.jar $systemlib
cp $ivyPath/cache/com.typesafe/config/bundles/config-1.2.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.hadoop/hadoop-common/hadoop-common-2.7.1.jar $systemlib
cp $srcPath/lib_managed/jars/commons-digester/commons-digester/commons-digester-1.8.1.jar $systemlib
cp $ivyPath/cache/commons-dbcp/commons-dbcp/jars/commons-dbcp-1.4.jar $systemlib
cp $srcPath/lib_managed/bundles/com.google.guava/guava/guava-16.0.1.jar $systemlib
cp $srcPath/lib_managed/jars/org.scala-lang/scala-actors/scala-actors-2.11.7.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.logging.log4j/log4j-core/log4j-core-2.4.1.jar $systemlib
cp $ivyPath/cache/org.apache.curator/curator-framework/bundles/curator-framework-2.7.1.jar $systemlib
cp $srcPath/lib_managed/bundles/log4j/log4j/log4j-1.2.17.jar $systemlib
cp $srcPath/lib_managed/jars/xmlenc/xmlenc/xmlenc-0.52.jar $systemlib
cp $ivyPath/cache/org.mortbay.jetty/jetty-embedded/jars/jetty-embedded-6.1.26-sources.jar $systemlib
cp $ivyPath/cache/org.apache.httpcomponents/httpclient/jars/httpclient-4.2.5.jar $systemlib
cp $srcPath/lib_managed/bundles/org.apache.directory.api/api-asn1-api/api-asn1-api-1.0.0-M20.jar $systemlib
cp $ivyPath/cache/net.jpountz.lz4/lz4/jars/lz4-1.2.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.scala-lang/scala-reflect/scala-reflect-2.11.7.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.commons/commons-dbcp2/commons-dbcp2-2.1.jar $systemlib
cp $ivyPath/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.4.jar $systemlib
cp $srcPath/lib_managed/jars/org.jpmml/pmml-agent/pmml-agent-1.2.9.jar $systemlib
cp $ivyPath/cache/commons-logging/commons-logging/jars/commons-logging-1.1.1.jar $systemlib
cp $ivyPath/cache/net.java.dev.jets3t/jets3t/jars/jets3t-0.9.0.jar $systemlib
cp $ivyPath/cache/org.scalatest/scalatest_2.11/bundles/scalatest_2.11-2.2.4.jar $systemlib
cp $srcPath/lib_managed/bundles/com.sun.jersey/jersey-json/jersey-json-1.9.jar $systemlib
cp $ivyPath/cache/io.spray/spray-can_2.11/bundles/spray-can_2.11-1.3.3.jar $systemlib
cp $ivyPath/cache/commons-logging/commons-logging/jars/commons-logging-1.2.jar $systemlib
cp $srcPath/Storage/TreeMap/target/scala-2.11/treemap_2.11-0.1.0.jar $systemlib
cp $srcPath/Utils/KVInit/target/scala-2.11/kvinit_2.11-1.0.jar $systemlib
cp $srcPath/Utils/Serialize/target/scala-2.11/serialize_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.apache.htrace/htrace-core/jars/htrace-core-3.1.0-incubating.jar $systemlib
cp $srcPath/Storage/StorageBase/target/scala-2.11/storagebase_2.11-1.0.jar $systemlib
cp $ivyPath/cache/io.spray/spray-json_2.11/bundles/spray-json_2.11-1.3.2.jar $systemlib
cp $ivyPath/cache/commons-codec/commons-codec/jars/commons-codec-1.9.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.avro/avro/avro-1.7.4.jar $systemlib
cp $ivyPath/cache/org.joda/joda-convert/jars/joda-convert-1.7.jar $systemlib
cp $srcPath/lib_managed/bundles/com.codahale.metrics/metrics-core/metrics-core-3.0.2.jar $systemlib
cp $ivyPath/cache/com.esotericsoftware.minlog/minlog/jars/minlog-1.2.jar $systemlib
cp $ivyPath/cache/org.mortbay.jetty/servlet-api/jars/servlet-api-2.5.20110712-sources.jar $systemlib
cp $ivyPath/cache/com.google.collections/google-collections/jars/google-collections-1.0.jar $systemlib
cp $ivyPath/cache/ch.qos.logback/logback-core/jars/logback-core-1.0.12.jar $systemlib
cp $ivyPath/cache/org.scala-lang.modules/scala-parser-combinators_2.11/bundles/scala-parser-combinators_2.11-1.0.1.jar $systemlib
cp $ivyPath/cache/io.netty/netty/bundles/netty-3.7.0.Final.jar $systemlib
cp $ivyPath/cache/org.scalameta/parsers_2.11/jars/parsers_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/com.jamesmurty.utils/java-xmlbuilder/jars/java-xmlbuilder-0.4.jar $systemlib
cp $srcPath/lib_managed/bundles/com.fasterxml.jackson.core/jackson-core/jackson-core-2.3.1.jar $systemlib
cp $srcPath/lib_managed/bundles/com.datastax.cassandra/cassandra-driver-core/cassandra-driver-core-2.1.2.jar $systemlib
cp $srcPath/MetadataAPIService/target/scala-2.11/metadataapiservice_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/exceptions_2.11/exceptions_2.11-0.0.3.jar $systemlib
cp $srcPath/Utils/ZooKeeper/CuratorClient/target/scala-2.11/zookeeperclient_2.11-1.0.jar $systemlib
cp $srcPath/Metadata/target/scala-2.11/metadata_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.scalameta/exceptions_2.11/jars/exceptions_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/commons-pool/commons-pool/jars/commons-pool-1.5.4.jar $systemlib
cp $ivyPath/cache/org.parboiled/parboiled-scala_2.11/jars/parboiled-scala_2.11-1.1.7.jar $systemlib
cp $ivyPath/cache/org.parboiled/parboiled-core/jars/parboiled-core-1.1.7.jar $systemlib
cp $srcPath/lib_managed/jars/io.netty/netty-all/netty-all-4.0.23.Final.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.htrace/htrace-core/htrace-core-3.1.0-incubating.jar $systemlib
cp $srcPath/lib_managed/bundles/org.apache.curator/curator-client/curator-client-2.7.1.jar $systemlib
cp $ivyPath/cache/org.apache.commons/commons-compress/jars/commons-compress-1.4.1.jar $systemlib
cp $srcPath/lib_managed/jars/org.codehaus.jackson/jackson-xc/jackson-xc-1.8.3.jar $systemlib
cp $srcPath/lib_managed/jars/jline/jline/jline-0.9.94.jar $systemlib
cp $ivyPath/cache/commons-beanutils/commons-beanutils/jars/commons-beanutils-1.7.0.jar $systemlib
cp $ivyPath/cache/org.apache.avro/avro/jars/avro-1.7.4.jar $systemlib
cp $ivyPath/cache/ch.qos.logback/logback-core/jars/logback-core-1.0.13.jar $systemlib
cp $srcPath/KamanjaBase/target/scala-2.11/kamanjabase_2.11-1.0.jar $systemlib
cp $ivyPath/cache/com.typesafe.akka/akka-testkit_2.11/jars/akka-testkit_2.11-2.3.9.jar $systemlib
cp $ivyPath/cache/commons-configuration/commons-configuration/jars/commons-configuration-1.7.jar $systemlib
cp $ivyPath/cache/commons-beanutils/commons-beanutils-core/jars/commons-beanutils-core-1.8.0.jar $systemlib
cp $srcPath/Utils/CleanUtil/target/scala-2.11/cleanutil_2.11-1.0.jar $systemlib
cp $ivyPath/cache/com.jcraft/jsch/jars/jsch-0.1.42.jar $systemlib
cp $srcPath/lib_managed/jars/org.codehaus.jackson/jackson-core-asl/jackson-core-asl-1.9.13.jar $systemlib
cp $ivyPath/cache/com.google.code.gson/gson/jars/gson-2.3.1.jar $systemlib
cp $ivyPath/cache/io.spray/spray-routing_2.11/bundles/spray-routing_2.11-1.3.3.jar $systemlib
cp $ivyPath/cache/org.apache.kafka/kafka_2.11/jars/kafka_2.11-0.8.2.2.jar $systemlib
cp $ivyPath/cache/com.chuusai/shapeless_2.11/jars/shapeless_2.11-1.2.4.jar $systemlib
cp $ivyPath/cache/org.apache.directory.api/api-asn1-api/bundles/api-asn1-api-1.0.0-M20.jar $systemlib
cp $ivyPath/cache/com.google.code.findbugs/jsr305/jars/jsr305-1.3.9.jar $systemlib
cp $srcPath/lib_managed/jars/commons-logging/commons-logging/commons-logging-1.2.jar $systemlib
cp $srcPath/lib_managed/jars/org.ow2.asm/asm/asm-4.0.jar $systemlib
cp $ivyPath/cache/org.apache.thrift/libthrift/jars/libthrift-0.9.2.jar $systemlib
cp $ivyPath/cache/com.google.guava/guava/bundles/guava-19.0.jar $systemlib
cp $srcPath/KamanjaUtils/target/scala-2.11/kamanjautils_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/com.esotericsoftware.reflectasm/reflectasm/reflectasm-1.07-shaded.jar $systemlib
cp $srcPath/Utils/ZooKeeper/CuratorListener/target/scala-2.11/zookeeperlistener_2.11-1.0.jar $systemlib
cp $srcPath/InputOutputAdapters/InputOutputAdapterBase/target/scala-2.11/inputoutputadapterbase_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.scala-lang/scala-reflect/jars/scala-reflect-2.11.7.jar $systemlib
cp $ivyPath/cache/org.jpmml/pmml-agent/jars/pmml-agent-1.2.9.jar $systemlib
cp $ivyPath/cache/org.json4s/json4s-core_2.11/jars/json4s-core_2.11-3.2.9.jar $systemlib
cp $srcPath/lib_managed/jars/commons-configuration/commons-configuration/commons-configuration-1.7.jar $systemlib
cp $ivyPath/cache/org.codehaus.jackson/jackson-mapper-asl/jars/jackson-mapper-asl-1.9.13.jar $systemlib
cp $ivyPath/cache/org.hamcrest/hamcrest-core/jars/hamcrest-core-1.3.jar $systemlib
cp $ivyPath/cache/org.joda/joda-convert/jars/joda-convert-1.6.jar $systemlib
cp $ivyPath/cache/org.scala-lang/scala-library/jars/scala-library-2.11.7.jar $systemlib
cp $ivyPath/cache/commons-collections/commons-collections/jars/commons-collections-3.2.1.jar $systemlib
cp $ivyPath/cache/org.scalameta/tokenizers_2.11/jars/tokenizers_2.11-0.0.3.jar $systemlib
cp $srcPath/lib_managed/jars/org.slf4j/slf4j-api/slf4j-api-1.7.10.jar $systemlib
cp $ivyPath/cache/org.apache.zookeeper/zookeeper/jars/zookeeper-3.4.6.jar $systemlib
cp $ivyPath/cache/org.scalameta/foundation_2.11/jars/foundation_2.11-0.0.3.jar $systemlib
cp $srcPath/lib_managed/bundles/org.apache.curator/curator-recipes/curator-recipes-2.7.1.jar $systemlib
cp $ivyPath/cache/org.objenesis/objenesis/jars/objenesis-1.2.jar $systemlib
cp $srcPath/lib_managed/bundles/org.scalatest/scalatest_2.11/scalatest_2.11-2.2.4.jar $systemlib
cp $srcPath/lib_managed/jars/commons-net/commons-net/commons-net-3.1.jar $systemlib
cp $srcPath/lib_managed/jars/commons-httpclient/commons-httpclient/commons-httpclient-3.1.jar $systemlib
cp $srcPath/lib_managed/bundles/org.apache.directory.server/apacheds-kerberos-codec/apacheds-kerberos-codec-2.0.0-M15.jar $systemlib
cp $srcPath/lib_managed/bundles/com.esotericsoftware.kryo/kryo/kryo-2.21.jar $systemlib
cp $srcPath/lib_managed/jars/org.scala-lang/scala-compiler/scala-compiler-2.11.7.jar $systemlib
cp $ivyPath/cache/org.scala-lang.modules/scala-xml_2.11/bundles/scala-xml_2.11-1.0.4.jar $systemlib
cp $srcPath/Utils/JsonDataGen/target/scala-2.11/jsondatagen_2.11-0.1.0.jar $systemlib
cp $ivyPath/cache/org.apache.httpcomponents/httpclient/jars/httpclient-4.1.2.jar $systemlib
cp $srcPath/lib_managed/jars/org.jruby.jcodings/jcodings/jcodings-1.0.8.jar $systemlib
cp $ivyPath/cache/ch.qos.logback/logback-classic/jars/logback-classic-1.0.12.jar $systemlib
cp $srcPath/FactoriesOfModelInstanceFactory/JpmmlFactoryOfModelInstanceFactory/target/scala-2.11/jpmmlfactoryofmodelinstancefactory_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.apache.directory.server/apacheds-kerberos-codec/bundles/apacheds-kerberos-codec-2.0.0-M15.jar $systemlib
cp $ivyPath/cache/io.netty/netty/bundles/netty-3.9.0.Final.jar $systemlib
cp $ivyPath/cache/org.scalameta/tokens_2.11/jars/tokens_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.10.jar $systemlib
cp $srcPath/OutputMsgDef/target/scala-2.11/outputmsgdef_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.jpmml/pmml-model/jars/pmml-model-1.2.9.jar $systemlib
cp $ivyPath/cache/org.apache.httpcomponents/httpcore/jars/httpcore-4.2.4.jar $systemlib
cp $ivyPath/cache/commons-configuration/commons-configuration/jars/commons-configuration-1.6.jar $systemlib
cp $ivyPath/cache/log4j/log4j/bundles/log4j-1.2.17.jar $systemlib
cp $ivyPath/cache/org.apache.curator/curator-test/jars/curator-test-2.8.0.jar $systemlib
cp $srcPath/Storage/StorageManager/target/scala-2.11/storagemanager_2.11-0.1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.jpmml/pmml-evaluator/pmml-evaluator-1.2.9.jar $systemlib
cp $srcPath/lib_managed/jars/com.jcraft/jsch/jsch-0.1.42.jar $systemlib
cp $ivyPath/cache/org.apache.curator/curator-recipes/bundles/curator-recipes-2.6.0.jar $systemlib
cp $srcPath/lib_managed/bundles/org.scala-lang.modules/scala-parser-combinators_2.11/scala-parser-combinators_2.11-1.0.4.jar $systemlib
cp $ivyPath/cache/org.ow2.asm/asm/jars/asm-4.0.jar $systemlib
cp $ivyPath/cache/org.apache.hbase/hbase-annotations/jars/hbase-annotations-1.0.2.jar $systemlib
cp $ivyPath/cache/com.google.protobuf/protobuf-java/bundles/protobuf-java-2.6.0.jar $systemlib
cp $ivyPath/cache/org.tukaani/xz/jars/xz-1.0.jar $systemlib
cp $ivyPath/cache/org.codehaus.jackson/jackson-jaxrs/jars/jackson-jaxrs-1.8.3.jar $systemlib
cp $srcPath/SampleApplication/InterfacesSamples/target/scala-2.11/interfacessamples_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/junit/junit/junit-4.12.jar $systemlib
cp $ivyPath/cache/io.spray/spray-http_2.11/bundles/spray-http_2.11-1.3.3.jar $systemlib
cp $ivyPath/cache/org.xerial.snappy/snappy-java/bundles/snappy-java-1.1.1.7.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.httpcomponents/httpcore/httpcore-4.2.4.jar $systemlib
cp $ivyPath/cache/org.jvnet.mimepull/mimepull/jars/mimepull-1.9.5.jar $systemlib
cp $ivyPath/cache/org.mortbay.jetty/servlet-api/jars/servlet-api-2.5.20110712.jar $systemlib
cp $ivyPath/cache/org.apache.logging.log4j/log4j-core/jars/log4j-core-2.4.1.jar $systemlib
cp $ivyPath/cache/org.mortbay.jetty/jetty/jars/jetty-6.1.26.jar $systemlib
cp $ivyPath/cache/javax.activation/activation/jars/activation-1.1.jar $systemlib
cp $ivyPath/cache/com.sdicons.jsontools/jsontools-core/jars/jsontools-core-1.7-sources.jar $systemlib
cp $srcPath/HeartBeat/target/scala-2.11/heartbeat_2.11-0.1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.hbase/hbase-protocol/hbase-protocol-1.0.2.jar $systemlib
cp $srcPath/lib_managed/jars/commons-beanutils/commons-beanutils/commons-beanutils-1.8.3.jar $systemlib
cp $ivyPath/cache/org.scalameta/dialects_2.11/jars/dialects_2.11-0.0.3.jar $systemlib
cp $srcPath/lib_managed/jars/com.esotericsoftware.minlog/minlog/minlog-1.2.jar $systemlib
cp $ivyPath/cache/antlr/antlr/jars/antlr-2.7.7.jar $systemlib
cp $srcPath/MetadataAPIServiceClient/target/scala-2.11/metadataapiserviceclient_2.11-0.1.jar $systemlib
cp $ivyPath/cache/org.apache.curator/curator-framework/bundles/curator-framework-2.6.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.scala-lang/scala-library/scala-library-2.11.7.jar $systemlib
cp $srcPath/Utils/SaveContainerDataComponent/target/scala-2.11/savecontainerdatacomponent_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.apache.curator/curator-client/bundles/curator-client-2.6.0.jar $systemlib
cp $srcPath/Storage/HashMap/target/scala-2.11/hashmap_2.11-0.1.0.jar $systemlib
cp $srcPath/InputOutputAdapters/FileSimpleInputOutputAdapters/target/scala-2.11/filesimpleinputoutputadapters_2.11-1.0.jar $systemlib
cp $ivyPath/cache/javax.servlet.jsp/jsp-api/jars/jsp-api-2.1.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.cassandra/cassandra-thrift/cassandra-thrift-2.0.3.jar $systemlib
cp $ivyPath/cache/com.sun.jersey/jersey-json/bundles/jersey-json-1.9.jar $systemlib
cp $srcPath/KamanjaManager/target/scala-2.11/kamanjamanager_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.ow2.asm/asm-tree/jars/asm-tree-4.0.jar $systemlib
cp $ivyPath/cache/commons-io/commons-io/jars/commons-io-2.4.jar $systemlib
cp $srcPath/lib_managed/jars/org.joda/joda-convert/joda-convert-1.6.jar $systemlib
cp $ivyPath/cache/net.sf.jopt-simple/jopt-simple/jars/jopt-simple-3.2.jar $systemlib
cp $ivyPath/cache/com.github.stephenc.findbugs/findbugs-annotations/jars/findbugs-annotations-1.3.9-1.jar $systemlib
cp $srcPath/lib_managed/jars/org.json4s/json4s-ast_2.11/json4s-ast_2.11-3.2.9.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.thrift/libthrift/libthrift-0.9.2.jar $systemlib
cp $srcPath/Exceptions/target/scala-2.11/exceptions_2.11-1.0.jar $systemlib
cp $srcPath/EnvContexts/SimpleEnvContextImpl/target/scala-2.11/simpleenvcontextimpl_2.11-1.0.jar $systemlib
cp $ivyPath/cache/commons-cli/commons-cli/jars/commons-cli-1.2.jar $systemlib
cp $srcPath/lib_managed/jars/javax.servlet.jsp/jsp-api/jsp-api-2.1.jar $systemlib
cp $ivyPath/cache/com.yammer.metrics/metrics-core/jars/metrics-core-2.2.0.jar $systemlib
cp $srcPath/FileDataConsumer/target/scala-2.11/filedataconsumer_2.11-0.1.0.jar $systemlib
cp $ivyPath/cache/org.mapdb/mapdb/bundles/mapdb-1.0.6.jar $systemlib
cp $srcPath/lib_managed/bundles/com.sun.jersey/jersey-core/jersey-core-1.9.jar $systemlib
cp $srcPath/Pmml/MethodExtractor/target/scala-2.11/methodextractor_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/net.java.dev.jets3t/jets3t/jets3t-0.9.0.jar $systemlib
cp $srcPath/lib_managed/bundles/com.google.protobuf/protobuf-java/protobuf-java-2.6.0.jar $systemlib
cp $ivyPath/cache/net.java.dev.jna/jna/jars/jna-3.2.7.jar $systemlib
cp $ivyPath/cache/org.scalameta/prettyprinters_2.11/jars/prettyprinters_2.11-0.0.3.jar $systemlib
cp $srcPath/Utils/JdbcDataCollector/target/scala-2.11/jdbcdatacollector_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/bundles/org.apache.shiro/shiro-core/shiro-core-1.2.3.jar $systemlib
cp $ivyPath/cache/joda-time/joda-time/jars/joda-time-2.8.2.jar $systemlib
cp $srcPath/Utils/SimpleKafkaProducer/target/scala-2.11/simplekafkaproducer_2.11-0.1.0.jar $systemlib
cp $ivyPath/cache/voldemort/voldemort/jars/voldemort-0.96.jar $systemlib
cp $srcPath/lib_managed/jars/asm/asm/asm-3.1.jar $systemlib
cp $srcPath/Utils/UtilsForModels/target/scala-2.11/utilsformodels_2.11-1.0.jar $systemlib
cp $ivyPath/cache/com.fasterxml.jackson.core/jackson-core/bundles/jackson-core-2.3.1.jar $systemlib
cp $srcPath/lib_managed/jars/commons-pool/commons-pool/commons-pool-1.5.4.jar $systemlib
cp $ivyPath/cache/org.json4s/json4s-ast_2.11/jars/json4s-ast_2.11-3.2.9.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/prettyprinters_2.11/prettyprinters_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/commons-lang/commons-lang/jars/commons-lang-2.6.jar $systemlib
cp $ivyPath/cache/commons-digester/commons-digester/jars/commons-digester-1.8.1.jar $systemlib
cp $ivyPath/cache/org.slf4j/slf4j-api/jars/slf4j-api-1.7.10.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.hbase/hbase-annotations/hbase-annotations-1.0.2.jar $systemlib
cp $ivyPath/cache/org.apache.hbase/hbase-common/jars/hbase-common-1.0.2.jar $systemlib
cp $ivyPath/cache/io.spray/spray-testkit_2.11/jars/spray-testkit_2.11-1.3.3.jar $systemlib
cp $ivyPath/cache/net.debasishg/redisclient_2.11/jars/redisclient_2.11-2.13.jar $systemlib
cp $ivyPath/cache/org.scalameta/tokenquasiquotes_2.11/jars/tokenquasiquotes_2.11-0.0.3.jar $systemlib
cp $srcPath/lib_managed/jars/com.thoughtworks.paranamer/paranamer/paranamer-2.6.jar $systemlib
cp $ivyPath/cache/org.scala-lang/scala-compiler/jars/scala-compiler-2.11.0.jar $systemlib
cp $ivyPath/cache/org.scala-lang/scala-compiler/jars/scala-compiler-2.11.7.jar $systemlib
cp $srcPath/Utils/ZooKeeper/CuratorLeaderLatch/target/scala-2.11/zookeeperleaderlatch_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/tokenquasiquotes_2.11/tokenquasiquotes_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/com.google.guava/guava/bundles/guava-14.0.1.jar $systemlib
cp $ivyPath/cache/com.sun.jersey/jersey-server/bundles/jersey-server-1.9.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.httpcomponents/httpclient/httpclient-4.2.5.jar $systemlib
cp $ivyPath/cache/com.twitter/chill_2.11/jars/chill_2.11-0.5.0.jar $systemlib
cp $srcPath/lib_managed/bundles/org.mapdb/mapdb/mapdb-1.0.6.jar $systemlib
cp $srcPath/Pmml/PmmlRuntime/target/scala-2.11/pmmlruntime_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.mortbay.jetty/jetty/jetty-6.1.26.jar $systemlib
cp $srcPath/lib_managed/jars/com.sun.xml.bind/jaxb-impl/jaxb-impl-2.2.3-1.jar $systemlib
cp $srcPath/lib_managed/jars/com.googlecode.json-simple/json-simple/json-simple-1.1.jar $systemlib
cp $srcPath/AuditAdapters/AuditAdapterBase/target/scala-2.11/auditadapterbase_2.11-1.0.jar $systemlib
cp $ivyPath/cache/javax.xml.stream/stax-api/jars/stax-api-1.0-2.jar $systemlib
cp $ivyPath/cache/commons-pool/commons-pool/jars/commons-pool-1.6.jar $systemlib
cp $ivyPath/cache/org.apache.hadoop/hadoop-auth/jars/hadoop-auth-2.7.1.jar $systemlib
cp $srcPath/Pmml/PmmlUdfs/target/scala-2.11/pmmludfs_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.scala-lang/scalap/scalap-2.11.0.jar $systemlib
cp $ivyPath/cache/org.codehaus.jettison/jettison/bundles/jettison-1.1.jar $systemlib
cp $srcPath/lib_managed/jars/org.codehaus.jackson/jackson-mapper-asl/jackson-mapper-asl-1.9.13.jar $systemlib
cp $srcPath/Utils/NodeInfoExtract/target/scala-2.11/nodeinfoextract_2.11-1.0.jar $systemlib
cp $srcPath/Utils/UtilitySerivce/target/scala-2.11/utilityservice_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.json4s/json4s-jackson_2.11/json4s-jackson_2.11-3.2.9.jar $systemlib
cp $ivyPath/cache/org.apache.commons/commons-lang3/jars/commons-lang3-3.1.jar $systemlib
cp $ivyPath/cache/com.datastax.cassandra/cassandra-driver-core/bundles/cassandra-driver-core-2.1.2.jar $systemlib
cp $ivyPath/cache/org.apache.shiro/shiro-core/bundles/shiro-core-1.2.3.jar $systemlib
cp $ivyPath/cache/commons-beanutils/commons-beanutils/jars/commons-beanutils-1.8.3.jar $systemlib
cp $srcPath/lib_managed/jars/commons-collections/commons-collections/commons-collections-3.2.1.jar $systemlib
cp $srcPath/lib_managed/bundles/org.scala-lang.modules/scala-xml_2.11/scala-xml_2.11-1.0.4.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.commons/commons-pool2/commons-pool2-2.3.jar $systemlib
cp $srcPath/lib_managed/jars/org.tukaani/xz/xz-1.0.jar $systemlib
cp $ivyPath/cache/org.mortbay.jetty/jetty-sslengine/jars/jetty-sslengine-6.1.26.jar $systemlib
cp $srcPath/MessageDef/target/scala-2.11/messagedef_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.codehaus.jackson/jackson-jaxrs/jackson-jaxrs-1.8.3.jar $systemlib
cp $ivyPath/cache/org.apache.commons/commons-pool2/jars/commons-pool2-2.3.jar $systemlib
cp $ivyPath/cache/com.sleepycat/je/jars/je-4.0.92.jar $systemlib
cp $srcPath/lib_managed/jars/org.slf4j/slf4j-log4j12/slf4j-log4j12-1.7.10.jar $systemlib
cp $srcPath/lib_managed/jars/commons-io/commons-io/commons-io-2.4.jar $systemlib
cp $srcPath/SampleApplication/CustomUdfLib/target/scala-2.11/customudflib_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/quasiquotes_2.11/quasiquotes_2.11-0.0.3.jar $systemlib
cp $srcPath/lib_managed/jars/commons-codec/commons-codec/commons-codec-1.10.jar $systemlib
cp $ivyPath/cache/com.thoughtworks.paranamer/paranamer/jars/paranamer-2.6.jar $systemlib
cp $ivyPath/cache/javax.servlet/servlet-api/jars/servlet-api-2.5.jar $systemlib
cp $srcPath/lib_managed/jars/org.json4s/json4s-core_2.11/json4s-core_2.11-3.2.9.jar $systemlib
cp $ivyPath/cache/com.google.code.gson/gson/jars/gson-2.2.4.jar $systemlib
cp $srcPath/SecurityAdapters/SecurityAdapterBase/target/scala-2.11/securityadapterbase_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.ow2.asm/asm-commons/jars/asm-commons-4.0.jar $systemlib
cp $srcPath/lib_managed/jars/com.twitter/chill-java/chill-java-0.5.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.hamcrest/hamcrest-core/hamcrest-core-1.3.jar $systemlib
cp $srcPath/lib_managed/jars/org.jpmml/pmml-schema/pmml-schema-1.2.9.jar $systemlib
cp $ivyPath/cache/org.scalameta/trees_2.11/jars/trees_2.11-0.0.3.jar $systemlib
cp $srcPath/Storage/HBase/target/scala-2.11/hbase_2.11-0.1.0.jar $systemlib
cp $ivyPath/cache/joda-time/joda-time/jars/joda-time-2.9.1.jar $systemlib
cp $ivyPath/cache/org.apache.cassandra/cassandra-thrift/jars/cassandra-thrift-2.0.3.jar $systemlib
cp $srcPath/BaseTypes/target/scala-2.11/basetypes_2.11-0.1.0.jar $systemlib
cp $srcPath/lib_managed/jars/com.google.code.findbugs/jsr305/jsr305-3.0.0.jar $systemlib
cp $ivyPath/cache/org.apache.hadoop/hadoop-common/jars/hadoop-common-2.7.1.jar $systemlib
cp $srcPath/BaseFunctions/target/scala-2.11/basefunctions_2.11-0.1.0.jar $systemlib
cp $ivyPath/cache/commons-codec/commons-codec/jars/commons-codec-1.10.jar $systemlib
cp $ivyPath/cache/junit/junit/jars/junit-4.12.jar $systemlib
cp $srcPath/Pmml/PmmlCompiler/target/scala-2.11/pmmlcompiler_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.objenesis/objenesis/objenesis-1.2.jar $systemlib
cp $ivyPath/cache/org.codehaus.jackson/jackson-core-asl/jars/jackson-core-asl-1.9.13.jar $systemlib
cp $srcPath/lib_managed/jars/org.apache.hbase/hbase-common/hbase-common-1.0.2.jar $systemlib
cp $srcPath/DataDelimiters/target/scala-2.11/datadelimiters_2.11-1.0.jar $systemlib
cp $ivyPath/cache/jline/jline/jars/jline-0.9.94.jar $systemlib
cp $ivyPath/cache/org.scala-lang/scalap/jars/scalap-2.11.0.jar $systemlib
cp $srcPath/MetadataBootstrap/Bootstrap/target/scala-2.11/bootstrap_2.11-1.0.jar $systemlib
cp $ivyPath/cache/org.apache.commons/commons-math3/jars/commons-math3-3.1.1.jar $systemlib
cp $srcPath/Storage/Cassandra/target/scala-2.11/cassandra_2.11-0.1.0.jar $systemlib
cp $srcPath/Utils/Security/SimpleApacheShiroAdapter/target/scala-2.11/simpleapacheshiroadapter_2.11-1.0.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/tokenizers_2.11/tokenizers_2.11-0.0.3.jar $systemlib
cp $ivyPath/cache/xmlenc/xmlenc/jars/xmlenc-0.52.jar $systemlib
cp $srcPath/lib_managed/jars/org.jpmml/pmml-model/pmml-model-1.2.9.jar $systemlib
cp $ivyPath/cache/org.apache.httpcomponents/httpcore/jars/httpcore-4.1.2.jar $systemlib
cp $srcPath/lib_managed/jars/commons-dbcp/commons-dbcp/commons-dbcp-1.4.jar $systemlib
cp $srcPath/lib_managed/jars/javax.xml.stream/stax-api/stax-api-1.0-2.jar $systemlib
cp $srcPath/lib_managed/jars/org.scalameta/trees_2.11/trees_2.11-0.0.3.jar $systemlib
cp $srcPath/lib_managed/jars/org.jruby.joni/joni/joni-2.1.2.jar $systemlib
cp $ivyPath/cache/com.codahale.metrics/metrics-core/bundles/metrics-core-3.0.2.jar $systemlib
cp $ivyPath/cache/io.spray/spray-httpx_2.11/bundles/spray-httpx_2.11-1.3.3.jar $systemlib
cp $ivyPath/cache/commons-codec/commons-codec/jars/commons-codec-1.4.jar $systemlib
cp $ivyPath/cache/com.101tec/zkclient/jars/zkclient-0.6.jar $systemlib
# -------------------- end of generated cp commands --------------------
cp $ivyPath/cache/io.spray/spray-json_2.11/bundles/spray-json_2.11-1.3.2.jar $systemlib
cp $ivyPath/cache/com.codahale.metrics/metrics-core/bundles/metrics-core-3.0.2.jar $systemlib
cp $ivyPath/cache/org.json4s/json4s-ast_2.11/jars/json4s-ast_2.11-3.2.9.jar $systemlib
cp $ivyPath/cache/io.spray/spray-testkit_2.11/jars/spray-testkit_2.11-1.3.3.jar $systemlib
cp $srcPath/Utils/Migrate/MigrateBase/target/migratebase-1.0.jar $systemlib
cp $srcPath/Utils/Migrate/SourceVersion/MigrateFrom_V_1_1/target/scala-2.10/migratefrom_v_1_1_2.10-1.0.jar $systemlib
cp $srcPath/Utils/Migrate/SourceVersion/MigrateFrom_V_1_2/target/scala-2.10/migratefrom_v_1_2_2.10-1.0.jar $systemlib
cp $srcPath/Utils/Migrate/DestinationVersion/MigrateTo_V_1_3/target/scala-2.11/migrateto_v_1_3_2.11-1.0.jar $systemlib
cp $srcPath/Storage/Cassandra/target/scala-2.11/*.jar $systemlib
cp $srcPath/Storage/HashMap/target/scala-2.11/*.jar $systemlib
cp $srcPath/Storage/HBase/target/scala-2.11/*.jar $systemlib
#cp $srcPath/Storage/Redis/target/scala-2.11/*.jar $systemlib
cp $srcPath/Storage/StorageBase/target/scala-2.11/storagebase_2.11-1.0.jar $systemlib
cp $srcPath/Storage/StorageManager/target/scala-2.11/*.jar $systemlib
cp $srcPath/Storage/TreeMap/target/scala-2.11/*.jar $systemlib
#cp $srcPath/Storage/Voldemort/target/scala-2.11/*.jar $systemlib
cp $srcPath/InputOutputAdapters/InputOutputAdapterBase/target/scala-2.11/*.jar $systemlib
cp $srcPath/KamanjaUtils/target/scala-2.11/kamanjautils_2.11-1.0.jar $systemlib
cp $srcPath/SecurityAdapters/SecurityAdapterBase/target/scala-2.11/*.jar $systemlib
cp $srcPath/Utils/SaveContainerDataComponent/target/scala-2.11/SaveContainerDataComponent* $systemlib
cp $srcPath/Utils/UtilsForModels/target/scala-2.11/utilsformodels*.jar $systemlib
# sample configs
#echo "copy sample configs..."
cp $srcPath/Utils/KVInit/src/main/resources/*cfg $systemlib
# Generate keystore file
#echo "generating keystore..."
#keytool -genkey -keyalg RSA -alias selfsigned -keystore $installPath/config/keystore.jks -storepass password -validity 360 -keysize 2048
#copy kamanja to bin directory
cp $srcPath/Utils/Script/kamanja $bin
#cp $srcPath/Utils/Script/MedicalApp.sh $bin
cp $srcPath/MetadataAPI/target/scala-2.11/classes/HelpMenu.txt $installPath/input
# *******************************
# COPD messages data prep
# *******************************
# Prepare test messages and copy them into place
echo "Prepare test messages and copy them into place..."
# *******************************
# Copy documentation files
# *******************************
cd $srcPath/Documentation
cp -rf * $installPath/documentation
# *******************************
# Copy ClusterInstall
# *******************************
mkdir -p $installPath/ClusterInstall
cp -rf $srcPath/SampleApplication/ClusterInstall/* $installPath/ClusterInstall/
cp $srcPath/Utils/NodeInfoExtract/target/scala-2.11/NodeInfoExtract* $installPath/ClusterInstall/
# *******************************
# copy models, messages, containers, config, scripts, types messages data prep
# *******************************
#HelloWorld
cd $srcPath/SampleApplication/HelloWorld/data
cp * $installPath/input/SampleApplications/data
cd $srcPath/SampleApplication/HelloWorld/message
cp * $installPath/input/SampleApplications/metadata/message
cd $srcPath/SampleApplication/HelloWorld/model
cp * $installPath/input/SampleApplications/metadata/model
cd $srcPath/SampleApplication/HelloWorld/template
cp -rf * $installPath/input/SampleApplications/template
cd $srcPath/SampleApplication/HelloWorld/config
cp -rf * $installPath/config
#HelloWorld
#Medical
cd $srcPath/SampleApplication/Medical/SampleData
cp *.csv $installPath/input/SampleApplications/data
cp *.csv.gz $installPath/input/SampleApplications/data
cd $srcPath/SampleApplication/Medical/MessagesAndContainers/Fixed/Containers
cp * $installPath/input/SampleApplications/metadata/container
cd $srcPath/SampleApplication/Medical/Functions
cp * $installPath/input/SampleApplications/metadata/function
cd $srcPath/SampleApplication/Medical/MessagesAndContainers/Fixed/Messages
cp * $installPath/input/SampleApplications/metadata/message
cd $srcPath/SampleApplication/Medical/Models
cp *.* $installPath/input/SampleApplications/metadata/model
cd $srcPath/SampleApplication/Medical/Types
cp * $installPath/input/SampleApplications/metadata/type
cd $srcPath/SampleApplication/Medical/template
cp -rf * $installPath/input/SampleApplications/template
cd $srcPath/SampleApplication/Medical/Configs
cp -rf * $installPath/config
#Medical
#Telecom
cd $srcPath/SampleApplication/Telecom/data
cp * $installPath/input/SampleApplications/data
cd $srcPath/SampleApplication/Telecom/metadata/container
cp * $installPath/input/SampleApplications/metadata/container
cd $srcPath/SampleApplication/Telecom/metadata/message
cp * $installPath/input/SampleApplications/metadata/message
cd $srcPath/SampleApplication/Telecom/metadata/model
cp *.* $installPath/input/SampleApplications/metadata/model
cd $srcPath/SampleApplication/Telecom/metadata/template
cp -rf * $installPath/input/SampleApplications/template
cd $srcPath/SampleApplication/Telecom/metadata/config
cp -rf * $installPath/config
#Telecom
#Finance
cd $srcPath/SampleApplication/InterfacesSamples/src/main/resources/sample-app/data
cp * $installPath/input/SampleApplications/data
cd $srcPath/SampleApplication/InterfacesSamples/src/main/resources/sample-app/metadata/container
cp * $installPath/input/SampleApplications/metadata/container
cd $srcPath/SampleApplication/InterfacesSamples/src/main/resources/sample-app/metadata/message
cp * $installPath/input/SampleApplications/metadata/message
cd $srcPath/SampleApplication/InterfacesSamples/src/main/resources/sample-app/metadata/model
cp *.* $installPath/input/SampleApplications/metadata/model
cd $srcPath/SampleApplication/InterfacesSamples/src/main/resources/sample-app/metadata/type
cp * $installPath/input/SampleApplications/metadata/type
cd $srcPath/SampleApplication/InterfacesSamples/src/main/resources/sample-app/metadata/template
cp -rf * $installPath/input/SampleApplications/template
cd $srcPath/SampleApplication/InterfacesSamples/src/main/resources/sample-app/metadata/config
cp -rf * $installPath/config
#Finance
cd $srcPath/SampleApplication/EasyInstall/template
cp -rf * $installPath/template
cd $srcPath/SampleApplication/EasyInstall
cp SetPaths.sh $installPath/bin/
bash $installPath/bin/SetPaths.sh $KafkaRootDir
chmod 0700 $installPath/input/SampleApplications/bin/*sh
echo "Kamanja install complete..."
| true
|
b845d143a58082f2f0f8096174342f6b40ea07b6
|
Shell
|
Tommoa/tectonic-fontawesome-action
|
/entrypoint.sh
|
UTF-8
| 134
| 2.578125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -e
echo "Compiling $1"
tectonic -k -r0 $1
set +e
biber $(dirname $1)/$(basename $1 .tex)
set -e
tectonic $1
| true
|
e2da107e786c9587cc333fb510639e8ee66666dd
|
Shell
|
c3r/pdf-metadata-script
|
/books.sh
|
UTF-8
| 6,955
| 4.125
| 4
|
[] |
no_license
|
#!/bin/bash
# Global preferences
# ------------------
CHECK_RUN=false # You would want to have this set to 'true' at first and make a test run before changing production data
IFS=';' # Delimiter in the CSV file
DIVIDER='-' # Change whitespace in to this divider
# Enable checking window size for drawing lines
# ---------------------------------------------
shopt -s checkwinsize
if [ -z "$1" ]; then
echo "No input file"
exit 1
fi
function draw_line() {
if [[ -z $1 ]]; then
cols="$COLUMNS"
else
cols="$1"
fi
while ((cols-- > 0)); do
printf '\u2500'
done
printf '\n'
}
while IFS=$IFS read -u 10 -ra tuple; do
draw_line
omit=false
old_fn=${tuple[0]}
if [ ! -f "$old_fn" ]; then
echo "$old_fn does not exist."
omit=true
fi
if $omit; then
continue
fi
printf '\n'
printf "File name: ${old_fn}\n"
select yn in "Proceed" "Omit"; do
case $yn in
Proceed) break;;
Omit)
omit=true
break;;
esac
done
old_title=$(exiftool -s3 -Title "${old_fn}")
old_auth=$(exiftool -s3 -Author "${old_fn}")
# If there is no new title, get the old one, if there is any.
# If there is no new, nor old title, omit the file
# If there are both old and new titles defined, prompt the user to choose which one
# he wants to go with.
# ---------------------------------------------------------------------------------
new_title=${tuple[1]}
if [[ -z $new_title ]]; then
if [[ -n $old_title ]]; then
echo "The original title is: $old_title"
fi
echo "There is no new title defined in CSV file. Do you want to define your own?"
select yn in "Yes" "No"; do
case $yn in
Yes)
printf "Please type the new title: "
read new_title
break;;
No)
break;;
esac
done
fi
change_title=true
if [[ -z "${new_title}" ]] && [[ -n "${old_title}" ]]; then
new_title=$old_title
elif [[ -n "${new_title}" ]] && [[ -n "${old_title}" ]]; then
if [ ${old_title} != ${new_title} ]; then
printf "\nBoth new nad old titles are defined and are different. Which title you want to go with?\n"
select yn in "${old_title} (from metadata)" ${new_title}; do
case $yn in
"${old_title} (from metadata)")
new_title=${old_title}
break;;
${new_title}) break;;
esac
done
else
change_title=flase
fi
fi
# If there is no new author, get the old one, if there is any.
# If there is no new, nor old author, omit the file
# If there are both old and new authors defined, prompt the user to choose which one
# he wants to go with.
# ----------------------------------------------------------------------------------
new_auth=${tuple[2]}
if [[ -z $new_auth ]]; then
if [[ -n $old_auth ]]; then
echo "The original author is: $old_auth"
fi
echo "There is no new author defined in CSV file. Do you want to define your own?"
select yn in "Yes" "No"; do
case $yn in
Yes)
printf "Please type in the new author: "
read new_auth
break;;
No)
break;;
esac
done
fi
change_auth=true
if [[ -z "${new_auth}" ]] && [[ -n "${old_auth}" ]]; then
new_auth=$old_auth
elif [[ -n "${new_auth}" ]] && [[ -n "${old_auth}" ]]; then
if [ "${old_auth}" != "${new_auth}" ]; then
printf "\nBoth new and original authors are defined and are different. Which one you want to go with?\n"
select yn in "${old_auth} (from metadata)" ${new_auth}; do
case $yn in
"${old_auth} (from metadata)")
new_auth=${old_auth}
break;;
${new_auth}) break;;
esac
done
else
change_auth=false
fi
fi
# Generate a new filename.
#
# The new filename will be generated by removing all the
# non-ASCII characters from the previously generated title.
# Then, all the remaining whitespace chars that are still in
# the title are changed to anything that is defined by the $DIVIDER
# -----------------------------------------------------------------
new_fn=$(echo "$new_title" |\
iconv -f utf8 -t ascii//TRANSLIT} |\
tr -dc '[:alnum:][:space:]\n\r' |\
tr -s ' ' |\
tr ' ' $DIVIDER |\
tr '[:upper:]' '[:lower:]')
new_fn=${new_fn}.pdf
printf "\nSummary of changes:\n"
draw_line 20
printf "Original title: ${old_title}\n"
printf "Original author: ${old_auth}\n"
printf "New file name: ${new_fn}\n"
printf "New title: ${new_title}\n"
printf "New author: ${new_auth}\n\n"
select yn in "Proceed" "Omit"; do
case $yn in
Proceed) break;;
Omit)
omit=true
break;;
esac
done
if $omit; then
echo "Omitting file..."
continue
fi
# Rename the file to the newly generated file name
# ------------------------------------------------
echo "Renaming file to '${new_fn}'..."
if [ "$CHECK_RUN" = false ]; then
mv "${old_fn}" "${new_fn}" &> /dev/null
retval=$?
if [ $retval -ne 0 ]; then
echo "Renaming file failed... The return code was: $retval"
fi
fi
echo "File renamed."
# Remove all the metadata of the processed file
# ---------------------------------------------
echo "Removing all metadata..."
if [ "$CHECK_RUN" = false ]; then
exiftool -all= "${new_fn}" &> /dev/null
retval=$?
if [ $retval -ne 0 ]; then
echo "Removing metadata failed... The return code was: $retval"
fi
fi
echo "Metadata removed."
# If there is a need for changing the title in metadata
# use exiftool and check the return value of the exiftool
# command. If not, just tell the user about preservation of the
# original title in metadata
# -------------------------------------------------------------
if [ $change_title ]; then
echo "Setting new title to '${new_title}'..."
if [ "$CHECK_RUN" = false ]; then
exiftool -Title="${new_title}" "${new_fn}" &> /dev/null
retval=$?
if [ $retval -ne 0 ]; then
echo "Setting new title failed... The return code was: $retval"
fi
fi
else
echo "Preserving original title: '${new_title}'"
fi
# If there is a need for changing the author name in metadata
# use exiftool and check the return value of the exiftool
# command. If not, just tell the user about preservation of the
# original author name in metadata
# -------------------------------------------------------------
if [ $change_auth ]; then
echo "Setting new author to '${new_auth}'..."
if [ "$CHECK_RUN" = false ]; then
exiftool -Author="${new_auth}" "${new_fn}" &> /dev/null
retval=$?
if [ $retval -ne 0 ]; then
echo "Setting new author failed... The return code was: $retval"
fi
fi
else
echo "Preserving original author: '${new_auth}'"
fi
echo "All done."
done 10<$1
| true
|
fed7a1d4bde49896335559df7a68851650d6bfd6
|
Shell
|
NuTufts/ubresnet
|
/weights/ssnet2018caffe/copy_data.sh
|
UTF-8
| 411
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
# Copy data from remote computer.
# Note, it is recommented to use the make_link.sh script if at all possible.
# set username of remote computer
username=`whoami`
#username="override"
# On Meitner
SSNET_WEIGHT_DIR="/media/hdd1/larbys/ssnet_model_weights/"
scp ${username}@130.64.84.151:${SSNET_WEIGHT_DIR}/segmentation_pixelwise_ikey_plane*.caffemodel .
# On Tufts Cluster
# On UBOONE machines
| true
|
5112f29e0aff70b6c9b60c71a8776efc641dc1b4
|
Shell
|
acdlite/bash-dotfiles
|
/.bashrc
|
UTF-8
| 148
| 2.65625
| 3
|
[
"MIT"
] |
permissive
|
[ -n "$PS1" ] && source ~/.bash_profile
# This loads RVM into a shell session.
[[ -s "$HOME/.rvm/scripts/rvm" ]] && source "$HOME/.rvm/scripts/rvm"
| true
|
62a666c994ba372ed7d45e262cf75bfb4b1cc060
|
Shell
|
vhernandomartin/ocp4-ipibm-acm-ztp
|
/01_create_spoke_vms_OCP3M.sh
|
UTF-8
| 5,994
| 3.125
| 3
|
[] |
no_license
|
#!/bin/bash
NET_TYPE=ipv6 # Set this var accordingly -> ipv4/ipv6
SPOKEVMS=(spoke1-master-1 spoke1-master-2 spoke1-master-3)
NETWORK=lab-spoke
CLUSTER_NAME=mgmt-spoke1
DOMAIN=example.com
INSTALLER_VM=ipibm-installer
OCP_PARENT_DOMAIN=lab.example.com
SPOKE_CIDR_IPV4=192.168.120.1/24
SPOKE_IPV4_IPROUTE=192.168.120.1
SPOKE_IPV4_PREFIX=24
SPOKE_IPV4_INSTALLER_IP=192.168.120.100
SPOKE_IPV4_API_IP=192.168.120.10
SPOKE_IPV4_INGRESS_IP=192.168.120.11
IPV4_RANGE_START=192.168.120.2
IPV4_RANGE_END=192.168.120.254
MASTERS_IPV4=(192.168.120.20 192.168.120.21 192.168.120.22)
MASTERS_MAC_IPV4=(aa:aa:aa:aa:de:01 aa:aa:aa:aa:de:02 aa:aa:aa:aa:de:03)
INSTALLER_MAC_IPV4=aa:aa:aa:aa:de:00
SPOKE_CIDR_IPV6=2510:49:0:1101::1/64
SPOKE_IPV6_IPROUTE=2510:49:0:1101::1
SPOKE_IPV6_PREFIX=64
SPOKE_IPV6_INSTALLER_IP=2510:49:0:1101::100
SPOKE_IPV6_API_IP=2510:49:0:1101::10
SPOKE_IPV6_INGRESS_IP=2510:49:0:1101::11
IPV6_RANGE_START=2510:49:0:1101::2
IPV6_RANGE_END=2510:49:0:1101::ffff
MASTERS_IPV6=(2510:49:0:1101::20 2510:49:0:1101::21 2510:49:0:1101::22)
MASTERS_MAC_IPV6=(00:03:00:01:aa:aa:aa:aa:de:01 00:03:00:01:aa:aa:aa:aa:de:02 00:03:00:01:aa:aa:aa:aa:de:03)
INSTALLER_MAC_IPV6=00:03:00:01:aa:aa:aa:aa:de:00
function set_vars () {
OCP_DOMAIN=${CLUSTER_NAME}.${DOMAIN}
PWD=$(/usr/bin/pwd)
IP_TYPE=$1
if [ "${IP_TYPE}" = "ipv4" ]; then
echo -e "+ Setting vars for a ipv4 cluster."
echo -e "+ The network range configured is: ${SPOKE_CIDR_IPV4}"
IPV="ip4"
IPFAMILY="ipv4" #ok
IPROUTE=${SPOKE_IPV4_IPROUTE} #ok
IPPREFIX=${SPOKE_IPV4_PREFIX} #ok
INSTALLER_IP=${SPOKE_IPV4_INSTALLER_IP}
API_IP=${SPOKE_IPV4_API_IP} #ok
INGRESS_IP=${SPOKE_IPV4_INGRESS_IP} #ok
HOSTIDMAC="host mac" #ok
IP_RANGE_START=${IPV4_RANGE_START} #ok
IP_RANGE_END=${IPV4_RANGE_END} #ok
MASTERS_IP=("${MASTERS_IPV4[@]}") #ok
MASTERS_MAC=("${MASTERS_MAC_IPV4[@]}") #ok
INSTALLER_MAC=${INSTALLER_MAC_IPV4}
elif [ "${IP_TYPE}" = "ipv6" ]; then
echo -e "+ Setting vars for a ipv6 cluster."
echo -e "+ The network range configured is: ${SPOKE_CIDR_IPV6}"
IPV="ip6"
IPFAMILY="ipv6" #ok
IPROUTE=${SPOKE_IPV6_IPROUTE} #ok
IPPREFIX=${SPOKE_IPV6_PREFIX} #ok
INSTALLER_IP=${SPOKE_IPV6_INSTALLER_IP}
API_IP=${SPOKE_IPV6_API_IP} #ok
INGRESS_IP=${SPOKE_IPV6_INGRESS_IP} #ok
HOSTIDMAC="host id" #ok
IP_RANGE_START=${IPV6_RANGE_START} #ok
IP_RANGE_END=${IPV6_RANGE_END} #ok
MASTERS_IP=("${MASTERS_IPV6[@]}") #ok
MASTERS_MAC=("${MASTERS_MAC_IPV6[@]}") #ok
INSTALLER_MAC=${INSTALLER_MAC_IPV6}
else
echo -e "+ A valid network type value should be provided: ipv4/ipv6."
fi
}
function networks () {
echo -e "\n+ Defining virsh network and applying configuration..."
cat << EOF > ${NETWORK}-network.xml
<network>
<name>${NETWORK}</name>
<forward mode='nat'>
<nat>
<port start='1024' end='65535'/>
</nat>
</forward>
<bridge name='${NETWORK}' stp='on' delay='0'/>
<mac address='52:54:00:ec:9b:dd'/>
<domain name='${NETWORK}'/>
<dns>
<host ip='${API_IP}'>
<hostname>api</hostname>
<hostname>api-int.${OCP_DOMAIN}</hostname>
<hostname>api.${OCP_DOMAIN}</hostname>
</host>
<host ip='${INGRESS_IP}'>
<hostname>apps</hostname>
<hostname>console-openshift-console.apps.${OCP_DOMAIN}</hostname>
<hostname>oauth-openshift.apps.${OCP_DOMAIN}</hostname>
<hostname>prometheus-k8s-openshift-monitoring.apps.${OCP_DOMAIN}</hostname>
<hostname>canary-openshift-ingress-canary.apps.${OCP_DOMAIN}</hostname>
<hostname>assisted-service-open-cluster-management.apps.${OCP_DOMAIN}</hostname>
<hostname>assisted-service-assisted-installer.apps.${OCP_DOMAIN}</hostname>
</host>
</dns>
<ip family='${IPFAMILY}' address='${IPROUTE}' prefix='${IPPREFIX}'>
<dhcp>
<range start='${IP_RANGE_START}' end='${IP_RANGE_END}'/>
<${HOSTIDMAC}='${MASTERS_MAC[0]}' name='${SPOKEVMS[0]}' ip='${MASTERS_IP[0]}'/>
<${HOSTIDMAC}='${MASTERS_MAC[1]}' name='${SPOKEVMS[1]}' ip='${MASTERS_IP[1]}'/>
<${HOSTIDMAC}='${MASTERS_MAC[2]}' name='${SPOKEVMS[2]}' ip='${MASTERS_IP[2]}'/>
<${HOSTIDMAC}='${INSTALLER_MAC}' name='${INSTALLER_VM}' ip='${INSTALLER_IP}'/>
</dhcp>
</ip>
</network>
EOF
virsh net-define ${NETWORK}-network.xml
virsh net-autostart ${NETWORK}
virsh net-start ${NETWORK}
}
function add_nic_installer_vm () {
virsh attach-interface --domain ${INSTALLER_VM} --type network --source ${NETWORK} --mac ${INSTALLER_MAC_IPV4} --alias net2 --config
virsh destroy ${INSTALLER_VM}
virsh start ${INSTALLER_VM}
}
function create_vms () {
ID=1
for s in ${SPOKEVMS[@]}
do
qemu-img create -f qcow2 /var/lib/libvirt/images/$s.qcow2 130G
virt-install --virt-type=kvm --name=$s --ram 33792 --vcpus 8 --hvm --network network=${NETWORK},model=virtio,mac=aa:aa:aa:aa:de:0${ID} --disk /var/lib/libvirt/images/$s.qcow2,device=disk,bus=scsi,format=qcow2 --os-type Linux --os-variant rhel8.0 --graphics none --import --noautoconsole
virsh destroy $s
let ID++
done
}
function set_dns_hosts () {
while [[ ${IP} = "" ]]
do
IP=$(virsh net-dhcp-leases ${NETWORK} |grep ${INSTALLER_MAC_IPV4}|tail -1|awk '{print $5}'|cut -d "/" -f 1)
echo -e "+ Waiting to grab an IP from DHCP..."
sleep 5
done
echo -e "+ IP already assigned: ${IP}"
virsh net-update ${NETWORK} add dns-host "<host ip='${IP}'> <hostname>ipibm-installer</hostname> <hostname>ipibm-installer.lab-ipibm</hostname> <hostname>ipibm-installer.${OCP_DOMAIN}</hostname> </host>" --live --config
virsh net-update ${NETWORK} add dns-host "<host ip='${INGRESS_IP}'> <hostname>assisted-service-open-cluster-management.apps.${OCP_PARENT_DOMAIN}</hostname> <hostname>assisted-service-assisted-installer.apps.${OCP_PARENT_DOMAIN}</hostname> </host>" --live --config
}
# MAIN
set_vars ${NET_TYPE}
networks
add_nic_installer_vm
create_vms
set_dns_hosts
| true
|
ce9817c87e35542b14ad2c0834f2b11c72af5c36
|
Shell
|
mcclurmc/dissertation
|
/bin/recordcount.sh
|
UTF-8
| 330
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash
# Use texcount to get the total word count (sum of text and headers),
# and append it to a file with the date.
DATE=`date "+%Y-%m-%d"`
WORDS=`texcount -sum -merge $1 2> /dev/null| grep "Sum count" | awk '{ print $3 }'`
PAGES=`pdfinfo -meta $2 | grep "Pages" | awk '{ print $2 }'`
echo "${DATE} ${WORDS} ${PAGES}"
| true
|
5c10db9f489b17a09903fff99269e5cdb7b96df4
|
Shell
|
awslabs/one-line-scan
|
/regression/compilation/faulty-input/run.sh
|
UTF-8
| 518
| 3.46875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# execute failed build, check whether faulty output is present
if ! command -v goto-cc &> /dev/null
then
echo "warning: did not find goto-cc, skip test"
exit 0
fi
# clean up directory first
make clean
# make sure we run on a clean environment (otherwise we fail with "SP" exists)
rm -rf SP
../../../one-line-scan -o SP --cbmc -- gcc fail.c
# did we produce faulty input files?
ls SP/faultyInput/*
if [ $? -eq 0 ]
then
echo "success"
cat SP/faultyInput/*
exit 0
else
echo "fail"
exit 1
fi
| true
|
06dc39cc6fb68cf9487ab211fca7944688290625
|
Shell
|
teja624/home
|
/.zsh/modules/aws/lib/sh/api/iam/service_linked_role_create.sh
|
UTF-8
| 184
| 2.8125
| 3
|
[
"Apache-2.0"
] |
permissive
|
aws_iam_service_linked_role_create() {
local aws_service_name="$1"
shift 1
cond_log_and_run aws iam create-service-linked-role --aws-service-name $aws_service_name "$@"
}
| true
|
e998283fb98956be227550c7494b0bc4446457b1
|
Shell
|
dals/mongo-conf
|
/etc/init.d/mongod.centos
|
UTF-8
| 1,050
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
# init script for mongodb
# chkconfig: 2345 70 11
# description: mongod
# processname: mongod
# pidfile: /var/run/mongodb.pid
# Source function library.
. /etc/rc.d/init.d/functions
RETVAL=0
pidfile=/var/run/mongodb.pid
exec="/usr/local/bin/mongod"
prog="mongod"
config="/etc/mongodb.conf"
lockfile="/var/lock/mongod"
[ -e $config ] && . $config
start() {
if [ ! -x $exec ]
then
echo \$exec not found
exit 5
fi
echo -n $"Starting $prog: "
daemon $exec --fork --logpath=/var/log/mongodb/mongod.log --logappend -f $config
RETVAL=$?
echo
[ $RETVAL = 0 ] && touch ${lockfile}
return $RETVAL
}
stop() {
echo -n $"Stopping $prog: "
killproc $prog
RETVAL=$?
echo
[ $RETVAL = 0 ] && rm -f $lockfile $pidfile
}
restart() {
stop
start
}
# See how we were called.
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
stop
sleep 1
start
;;
*)
echo $"Usage: `basename $0` {start|stop|restart|status}"
exit 2
esac
exit $RETVAL
| true
|
ae53723ef92724154c8a244c70a1d696160def88
|
Shell
|
iatanasov77/vs-devops-server
|
/examples/Packer/scripts/provision.sh
|
UTF-8
| 562
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Iinstall EPEL, kernel, gcc, etc.
yum install -y epel-release kernel-devel gcc make tar bzip2 wget elfutils-libelf-devel
# Install VirtualBox add-ons
mount $(ls /home/vagrant/VBoxGuestAdditions*.iso) /mnt
/mnt/VBoxLinuxAdditions.run
usermod -aG vboxsf vagrant
# Prepare the insecure key
mkdir -m 0700 -p /home/vagrant/.ssh
wget --no-check-certificate https://raw.github.com/mitchellh/vagrant/master/keys/vagrant.pub -O /home/vagrant/.ssh/authorized_keys
chmod 0600 /home/vagrant/.ssh/authorized_keys
chown vagrant:vagrant -R /home/vagrant/.ssh
| true
|
fe87249a9ba97f33e523c6e878491b8a27ac5280
|
Shell
|
cloudbase/openstack-hyperv-release-tests
|
/reboot-win-host.sh
|
UTF-8
| 210
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
BASEDIR=$(dirname $0)
. $BASEDIR/utils.sh
host=$1
win_user=Administrator
win_password=Passw0rd
if [ -z "$host" ]; then
echo "Usage: $0 <host>"
exit 1
fi
reboot_win_host $host
| true
|
7c6f2dc3ebba0c5b2fde682a42e653c672e1c550
|
Shell
|
SiteMindOpen/SiteMind
|
/sitemind-admin/sm-user-new.sh
|
UTF-8
| 1,393
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash
## F U N C T I O N S
create_user(){
PASSWORD=$(openssl rand -base64 20);
htpasswd -bB /etc/apache2/.htpasswd "$USERNAME" "$PASSWORD";
echo -e "Your password is $PASSWORD";
}
backup(){
TIMESTAMP=$(date +'%s')
cp /etc/apache2/sites-available/000-default-le-ssl.conf /tmp/000-default-le-ssl-"$TIMESTAMP".conf
}
header(){
head -7 /etc/apache2/sites-available/000-default-le-ssl.conf
}
entry(){
echo -e " "
echo -e " <Directory \"/var/www/html/sitemind/"$USERNAME"\">"
echo -e " AuthType Basic"
echo -e " AuthName \"Method Media Intelligence - Sitemind Login\""
echo -e " AuthUserFile /etc/apache2/.htpasswd"
echo -e " Require user "$USERNAME""
echo -e " </Directory>"
echo -e " "
}
footer(){
sed '1,8d' /etc/apache2/sites-available/000-default-le-ssl.conf
}
update_conf(){
rm /tmp/apache_conf_file.temp
header >> /tmp/apache_conf_file.temp
entry >> /tmp/apache_conf_file.temp
footer >> /tmp/apache_conf_file.temp
}
apache_conf(){
sudo apachectl configtest
sudo service apache2 restart
}
sitemind_package(){
rsync -avq ~/sitemind-master/ /var/www/html/sitemind/"$USERNAME"
}
## P R O G R A M S T A R T S
USERNAME=$1
create_user
backup
update_conf
cp /tmp/apache_conf_file.temp /etc/apache2/sites-available/000-default-le-ssl.conf
apache_conf
sitemind_package
| true
|
a7d5ab1416cc41822f4dd3cd3a31aff62145ab56
|
Shell
|
chunfuchang/settings
|
/bin/findbom
|
UTF-8
| 475
| 3.921875
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Tom M. Yeh
# Copyright (C) Potix Corporation. All Rights Reserved.
if [ "$1" = "" ] ; then
echo "Use:"
echo " findbom dir1 dir2"
echo " findbom *"
exit 1
fi
function dofind {
if [ "$(od -x -N 8 $1 | grep bbef)" != "" ] ; then
echo "$(pwd)/$1"
fi
}
function dodir {
for f in $* ; do
if [ -f "$f" ] ; then
dofind "$f"
elif [ -d "$f" ] ; then
(
cd "$f"
dodir *
)
fi
done
}
#echo "Search $* for files containing UTF-BOM"
dodir $*
| true
|
c97ede52d76f80121d0bc64c8f693ec4788d0880
|
Shell
|
shyim/shopware-docker
|
/modules/platform/config-set.sh
|
UTF-8
| 149
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
checkParameter
cd "${SHOPWARE_FOLDER}" || exit 1
shift 2
FILE="config/packages/swdc.yml"
touch "$FILE"
yq e -i "$1" "$FILE"
| true
|
6a0c038c6e155782c75557ecfa244ac9ab58254c
|
Shell
|
peter-m-shi/ztool
|
/gitz/traverse_tags.sh
|
UTF-8
| 380
| 2.90625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
git tag > tag_list.txt
while read line;
do
echo $line;
git clean . -df
git checkout .
git checkout $line
find . -name "*.a" > $HOME/Downloads/result/$line.a.txt
find . -name "*.framework" >> $HOME/Downloads/result/$line.f.txt
find . -name "*.m" >> $HOME/Downloads/result/$line.m.txt
find . -name "*.h" >> $HOME/Downloads/result/$line.h.txt
done < tag_list.txt
| true
|
504c83a60e6ccbb6a864407460338dd40f28cd57
|
Shell
|
gspu/bitkeeper
|
/src/crank.sh
|
UTF-8
| 2,162
| 3.328125
| 3
|
[
"Apache-2.0",
"GPL-2.0-only",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
#!/bin/bash
# Copyright 2000-2003,2011,2014-2016 BitMover, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# If you edit this, please apply those changes to the master template in
# /home/bk/crankturn/crank.sh
set -a
test -d SCCS && {
echo CRANK does not work in repo with SCCS dirs
exit 1
}
test "X$REMOTE" = X && {
echo You need to set \$REMOTE before cranking
exit 1
}
test -r "$REMOTE" || {
echo No remote crank shell script found
exit 1
}
test "X$HOSTS" = X && HOSTS=`chosts build`
test "X$HOSTS" = X && {
echo No build hosts found.
exit 1
}
test "X$URL" = X && URL=bk://`bk gethost`/`pwd | sed s,/home/bk/,,`
test "X$REPO" = X && REPO=`pwd | sed 's,.*/,,'`
case "$REPO" in
*/*)
echo "REPO identifier may not contain a / (slash)"
exit 1
;;
esac
U=`bk getuser`
test "X$LOG" = X && LOG=LOG.${REPO}-$U
remote() {
$RSH $host "env LOG=$LOG BK_USER=$U URL=$URL REPO=$REPO \
/bin/bash /build/.$REPO.$U $@"
}
for host in $HOSTS
do
RCP=rcp
RSH=rsh
if [ "$host" = "macos106" -o "$host" = "macos1012" ]
then
RCP="scp -q"
RSH="ssh -A"
fi
(
test "X$@" = Xstatus && {
printf "%-10s %s\n" $host "`remote status`"
continue
}
test "X$@" = Xclean && {
printf "%-10s %s\n" $host "`remote clean`"
continue
}
trap "rm -f .[st].$host; exit" 0 1 2 3 15
$RCP $REMOTE ${host}:/build/.$REPO.$U
/usr/bin/time -o .t.$host -f "%E" $RSH $host \
"env LOG=$LOG BK_USER=$U URL=$URL REPO=$REPO \
/bin/bash /build/.$REPO.$U $@"
remote status > .s.$host
printf \
"%-10s took %s and %s\n" $host `sed 's/\.[0-9][0-9]$//' < .t.$host` "`cat .s.$host`"
rm -f
) &
done
wait
exit 0
| true
|
c7482f206ad1bf17780bc11a9786fedba73902dc
|
Shell
|
cute-aaa/k8sImageDownload
|
/getK8sImages.sh
|
UTF-8
| 6,560
| 2.953125
| 3
|
[] |
no_license
|
#!/bin/bash
# 环境:https://www.katacoda.com/courses/ubuntu/playground
# 也可以直接在配置好的环境下载镜像:
# https://www.katacoda.com/courses/kubernetes/launch-single-node-cluster
# 不过版本需要变更一下
#包管理程序
PKG_MANAGER="apt"
#镜像总数
imageNum=24
#核心镜像版本
K8S_VERSION="v1.15.1"
COREDNS_VERSION="1.3.1"
ETCD_VERSION="3.3.10"
PAUSE_VERSION="3.1"
#dashboard版本
DASHBOARD_VERSION="v1.10.1"
DASHBOARD_BETA_VERSION="v2.0.0-beta2"
#dashboard-beta的依赖
METRICS_SCRAPER_VERSION="v1.0.1"
#网络附件镜像版本
#calico
CALICO_VERSION="v3.8.0"
#cilium
CILIUM_ETCD_OPERATOR_VERSION="v2.0.6"
CILIUM_INIT_VERSION="2019-04-05"
CILIUM_VERSION="v1.5.5"
#flannel
FLANNEL_VERSION="v0.11.0-amd64"
#romana
ROMANA_VERSION="v2.0.2"
#romana依赖
ETCD_AMD64_VERSION="3.0.17"
#weavenet
WEAVENET_VERSION="2.5.2"
echo "只用于镜像下载,不可用于生产环境"
echo "卸载旧docker"
$PKG_MANAGER -y remove docker \
docker-engine \
docker.io
echo "下载所需工具"
$PKG_MANAGER -y install curl
echo "获取下载脚本"
curl -fsSL get.docker.com -o get-docker.sh
echo "下载docker"
sh get-docker.sh
echo "清理无关镜像"
docker rmi $(docker images -q)
echo "下载k8s"
if [ $PKG_MANAGER == "apt" ]
then
$PKG_MANAGER update && $PKG_MANAGER install -y apt-transport-https curl
curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
cat <<EOF >/etc/apt/sources.list.d/kubernetes.list
deb https://apt.kubernetes.io/ kubernetes-xenial main
EOF
$PKG_MANAGER update
$PKG_MANAGER install -y kubelet kubeadm kubectl
apt-mark hold kubelet kubeadm kubectl
elif [ $PKG_MANAGER == "yum" ]
then
cat <<EOF > /etc/yum.repos.d/kubernetes.repo
[kubernetes]
name=Kubernetes
baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64
enabled=1
gpgcheck=1
repo_gpgcheck=1
gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg
EOF
# Set SELinux in permissive mode (effectively disabling it)
setenforce 0
sed -i 's/^SELINUX=enforcing$/SELINUX=permissive/' /etc/selinux/config
yum install -y kubelet kubeadm kubectl --disableexcludes=kubernetes
systemctl enable --now kubelet
fi
echo "关闭swap分区"
swapoff -a && sysctl -w vm.swappiness=0
sed -ri '/^[^#]*swap/s@^@#@' /etc/fstab
echo "获取IP地址"
#hostname -I > ip.txt $(cut -d ' ' -f 1 ip.txt)
ipaddrs=($(hostname -I))
echo "IP地址:"${ipaddrs[0]}
echo "初始化k8s"
kubeadm init --apiserver-advertise-address ${ipaddrs[0]} --pod-network-cidr 10.244.0.0/16
mkdir -p $HOME/.kube
sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
sudo chown $(id -u):$(id -g) $HOME/.kube/config
echo "保存基础镜像"
mkdir ~/images
cd ~/images
docker save k8s.gcr.io/kube-proxy:$K8S_VERSION > k8s.gcr.io#kube-proxy.tar
docker save k8s.gcr.io/kube-apiserver:$K8S_VERSION > k8s.gcr.io#kube-apiserver.tar
docker save k8s.gcr.io/kube-scheduler:$K8S_VERSION > k8s.gcr.io#kube-scheduler.tar
docker save k8s.gcr.io/kube-controller-manager:$K8S_VERSION > k8s.gcr.io#kube-controller-manager.tar
docker save k8s.gcr.io/coredns:$COREDNS_VERSION > k8s.gcr.io#coredns.tar
docker save k8s.gcr.io/etcd:$ETCD_VERSION > k8s.gcr.io#etcd.tar
docker save k8s.gcr.io/pause:$PAUSE_VERSION > k8s.gcr.io#pause.tar
echo "下载附件"
docker pull k8s.gcr.io/kubernetes-dashboard-amd64:$DASHBOARD_VERSION
docker pull kubernetesui/dashboard:$DASHBOARD_BETA_VERSION
docker pull kubernetesui/metrics-scraper:$METRICS_SCRAPER_VERSION
docker pull calico/kube-controllers:$CALICO_VERSION
kubectl apply -f https://docs.projectcalico.org/v3.8/manifests/calico.yaml
#kubectl apply -f https://raw.githubusercontent.com/kubernetes/dashboard/v2.0.0-beta2/aio/deploy/recommended.yaml
kubectl create -f https://raw.githubusercontent.com/cilium/cilium/v1.5/examples/kubernetes/1.14/cilium.yaml
kubectl apply -f https://raw.githubusercontent.com/coreos/flannel/62e44c867a2846fefb68bd5f178daf4da3095ccb/Documentation/kube-flannel.yml
kubectl apply -f https://raw.githubusercontent.com/romana/romana/master/containerize/specs/romana-kubeadm.yml
kubectl apply -f "https://cloud.weave.works/k8s/net?k8s-version=$(kubectl version | base64 | tr -d '\n')"
while [ $(docker images -q | wc -l) -lt $imageNum ]
do
echo "当前镜像数:" $(docker images -q | wc -l)
echo "目标镜像数:" $imageNum
echo "等待镜像下载完成"
sleep 5
done
echo "保存dashboard"
mkdir dashboard
cd dashboard/
docker save k8s.gcr.io/kubernetes-dashboard-amd64:$DASHBOARD_VERSION > k8s.gcr.io#kubernetes-dashboard-amd64.tar
docker save kubernetesui/dashboard:$DASHBOARD_BETA_VERSION > kubernetesui#dashboard.tar
docker save kubernetesui/metrics-scraper:$METRICS_SCRAPER_VERSION > kubernetesui#metrics-scraper.tar
cd ..
echo "保存网络附件"
mkdir add-on/
cd add-on/
mkdir calico
cd calico/
docker save calico/node:$CALICO_VERSION > calico#node.tar
docker save calico/cni:$CALICO_VERSION > calico#cni.tar
docker save calico/kube-controllers:$CALICO_VERSION > calico#kube-controllers.tar
docker save calico/pod2daemon-flexvol:$CALICO_VERSION > calico#pod2daemon-flexvol.tar
cd ..
#mkdir canal
#cd canal/
#kubectl apply -f https://docs.projectcalico.org/v3.8/manifests/canal.yaml
#这个没有镜像
#cd ..
mkdir cilium
cd cilium/
docker save cilium/cilium-etcd-operator:$CILIUM_ETCD_OPERATOR_VERSION > cilium#cilium-etcd-operator.tar
docker save cilium/cilium-init:$CILIUM_INIT_VERSION > cilium#cilium-init.tar
docker save cilium/cilium:$CILIUM_VERSION > cilium#cilium.tar
cd ..
mkdir flannel && cd flannel
docker save quay.io/coreos/flannel:$FLANNEL_VERSION > quay.io#coreos#flannel.tar
cd ..
mkdir romana && cd romana/
docker save quay.io/romana/daemon:$ROMANA_VERSION > quay.io#romana#daemon.tar
docker save quay.io/romana/listener:$ROMANA_VERSION > quay.io#romana#listener.tar
docker save quay.io/romana/agent:$ROMANA_VERSION > quay.io#romana#agent.tar
docker save gcr.io/google_containers/etcd-amd64:$ETCD_AMD64_VERSION > gcr.io#google_containers#etcd-amd64.tar
cd ..
mkdir weavenet && cd weavenet
docker save weaveworks/weave-kube:$WEAVENET_VERSION > weaveworks#weave-kube.tar
docker save weaveworks/weave-npc:$WEAVENET_VERSION > weaveworks#weave-npc.tar
cd ..
cd ~
echo "打包镜像"
tar czvf images.tar images/
echo "完成!"
| true
|
93fe578f537aa0475d48db15598baa9db1e6193d
|
Shell
|
prithachanana12/FHCRC
|
/virScan/generateLib.sh
|
UTF-8
| 929
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
#generate counts from new input/library sample, this will only have to be done once and these input counts can be used for future experiments
#do this from the command line on rhino
module load bowtie/1.1.1
sampleName=library_July_2019
PATH=/home/solexa/apps/samtools/samtools-1.3.1:/home/solexa/apps/anaconda3/bin:$PATH
bowtieIndex=/shared/solexa/solexa/Genomes/genomes/PhIPseq/vir2/vir2
transferDir=/shared/ngs/illumina/tsayers/190814_SN367_1421_BH3G2JBCX3
resultsDir=$transferDir/results
mkdir $resultsDir
cd filtered
zcat $sampleName.R1.fastq.gz | bowtie -n 3 -l 30 -e 1000 --tryhard --nomaqround --norc --best --sam --quiet $bowtieIndex - | samtools view -u - | samtools sort - > $resultsDir/$sampleName.bam
cd $resultsDir
samtools index $sampleName.bam
samtools idxstats $sampleName.bam | cut -f 1,3 | sed -e '/^\*\t/d' -e "1 i id\t$sampleName" | tr "\\t" "," >$sampleName.count.csv
gzip $sampleName.count.csv
| true
|
dabf2d85d08951e6169bc8ac22d9c46ad29058b1
|
Shell
|
song10/bin
|
/patch_etc_hosts.sh
|
UTF-8
| 238
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/sh
DATA="$(dirname "$0")/.patch_etc_hosts.dat"
if [ -n "$1" ]; then
DATA="$1"
shift
fi
if [ ! -f "$DATA" ]; then
echo "'$DATA' not found! Abort."
exit 1
fi
sudo sh -c "cat \"$DATA\" >> /etc/hosts"
tail /etc/hosts
| true
|
97bad9ecc9e6b0d77cb5ab10c8a18e64e74ae1d4
|
Shell
|
mikejzx/dotfiles
|
/.xinitrc
|
UTF-8
| 911
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/sh
userresources=$HOME/.Xresources
usermodmap=$HOME/.Xmodmap
sysresources=/etc/X11/xinit/.Xresources
sysmodmap=/etc/X11/xinit/.Xmodmap
# Merge in defaults and keymaps
[ -f $sysresources ] && xrdb -merge $sysresources
[ -f $sysmodmap ] && xmodmap $sysmodmap
[ -f "$userresources" ] && xrdb -merge "$userresources"
[ -f "$usermodmap" ] && xmodmap "$usermodmap"
# Start scripts
if [ -d /etc/X11/xinit/xinitrc.d ] ; then
for f in /etc/X11/xinit/xinitrc.d/?*.sh ; do
[ -x "$f" ] && . "$f"
done
unset f
fi
# Key input rate adjustment.
xset r rate 200 25 &
# Swap caps-lock and escape.
setxkbmap -option caps:swapescape &
# Turn on numlock if it's not on yet
numlockx on &
# Allows us to use some bound multimedia keys
xbindkeys &
# Start the GPU fan controller.
gpu-fancurve &
# Start gpg agent
gpg-agent &
# Try fix Vsync
export __GL_SYNC_TO_VBLANK=1
# Start Fluxbox session
exec startfluxbox
| true
|
7e620a64d5e9e380cd3c30c228d0a8c394c1860b
|
Shell
|
cyberlooper/OpenFLIXR2.OnlineUpdate
|
/scripts/l_update_12
|
UTF-8
| 3,970
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
THISUSER=$(whoami)
if [ $THISUSER != 'root' ]
then
echo 'You must use sudo to run this script, sorry!'
exit 1
fi
exec 1> >(tee -a /var/log/openflixrupdate/l_update_12.log) 2>&1
TODAY=$(date)
echo "-----------------------------------------------------"
echo "Date: $TODAY"
echo "-----------------------------------------------------"
## OpenFLIXR Update version 2.9.1
# updates
cp /opt/update/updates/configs/nginx.conf /etc/nginx/nginx.conf
rm /etc/resolv.conf
ln -s ../run/resolvconf/resolv.conf /etc/resolv.conf
resolvconf -u
sed -i 's/# Disable IPv6//' /etc/sysctl.conf
sed -i 's/net.ipv6.conf.all.disable_ipv6 = 1//' /etc/sysctl.conf
sed -i 's/net.ipv6.conf.default.disable_ipv6 = 1//' /etc/sysctl.conf
sed -i 's/net.ipv6.conf.lo.disable_ipv6 = 1//' /etc/sysctl.conf
sysctl -p
## OpenFLIXR
echo ""
echo "OpenFLIXR updates:"
cp /opt/update/updates/openflixr/apt-get-queue /opt/openflixr/apt-get-queue
cp /opt/update/updates/openflixr/blocklist.sh /opt/openflixr/blocklist.sh
chmod +x /opt/openflixr/blocklist.sh
cp /opt/update/updates/openflixr/ipaddress /opt/openflixr/ipaddress
chmod +x /opt/openflixr/ipaddress
cp /opt/update/updates/openflixr/checkinet /opt/openflixr/checkinet
chmod +x /opt/openflixr/checkinet
cp /opt/update/updates/openflixr/purgeoldkernels /opt/openflixr/purgeoldkernels
chmod +x /opt/openflixr/purgeoldkernels
cp /opt/update/updates/openflixr/startup.sh /opt/openflixr/startup.sh
chmod +x /opt/openflixr/startup.sh
cp /opt/update/updates/openflixr/updatewkly.sh /opt/openflixr/updatewkly.sh
chmod +x /opt/openflixr/updatewkly.sh
cp /opt/update/updates/openflixr/updateof /opt/openflixr/updateof
chmod +x /opt/openflixr/updateof
cp /opt/update/updates/openflixr/createdirs /opt/openflixr/createdirs
chmod +x /opt/openflixr/createdirs
cp /opt/update/updates/openflixr/logio.sh /opt/openflixr/logio.sh
chmod +x /opt/openflixr/logio.sh
cp /opt/update/updates/openflixr/hotfixes /opt/openflixr/hotfixes
cp /opt/update/updates/monit/* /etc/monit/conf.d/
cp /opt/update/updates/openflixr/createnginxconfig.sh /opt/openflixr/createnginxconfig.sh
chmod +x /opt/openflixr/createnginxconfig.sh
cp /opt/update/updates/openflixr/letsencrypt.sh /opt/openflixr/letsencrypt.sh
chmod +x /opt/openflixr/letsencrypt.sh
cp /opt/update/updates/openflixr/cleanup.sh /opt/openflixr/cleanup.sh
chmod +x /opt/openflixr/cleanup.sh
cp /opt/update/updates/openflixr/fixpermissions.sh /opt/openflixr/fixpermissions.sh
chmod +x /opt/openflixr/fixpermissions.sh
# lazylibrarian
cd /opt/LazyLibrarian
git remote rm origin
git remote add origin https://gitlab.com/LazyLibrarian/LazyLibrarian.git
git config master.remote origin
git config master.merge refs/heads/master
git stash clear
git pull origin master
git reset --hard origin/master
git branch --set-upstream-to=origin/master master
# ubooquity
update-rc.d -f ubooquity remove
rm /etc/init.d/ubooquity
cp /opt/update/updates/configs/ubooquity.service /etc/systemd/system/ubooquity.service
systemctl enable ubooquity.service
# plexmediaserver
echo deb https://downloads.plex.tv/repo/deb public main | sudo tee /etc/apt/sources.list.d/plexmediaserver.list
curl https://downloads.plex.tv/plex-keys/PlexSign.key | sudo apt-key add -
DEBIAN_FRONTEND=noninteractive APT_LISTCHANGES_FRONTEND=mail apt-get install -y plexmediaserver
# tripple edit mopidy service fix
cp /opt/update/updates/configs/mopidy.service /lib/systemd/system/mopidy.service
systemctl daemon-reload
## update version
sed -i 's/2.*/2.9.1 Takashi Shimura/' /opt/openflixr/version
crudini --set /usr/share/nginx/html/setup/config.ini custom custom1 2.9.1
version=$(crudini --get /usr/share/nginx/html/setup/config.ini custom custom1)
sed -i 's/Version.*/Version '$version'<\/span>/' /usr/share/nginx/html/openflixr/index.html
## let system know update has been installed
touch /opt/update/doneupdate/l_update_12
# update everything else
bash /opt/openflixr/updatewkly.sh
## reboot
reboot now
| true
|
db71d4b02dd5a3f055fd2b715e54bca33f7dd209
|
Shell
|
bgsa/spectrum-engine
|
/vendor/build-opencl-linux.sh
|
UTF-8
| 1,611
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
set echo off
export BUILD_DIR=OpenCL-ICD-Loader/build
export OUTPUT_DIR_32=../../lib/x86
export OUTPUT_DIR_64=../../lib/x86_64
export HEADER_DIR=$(pwd)/OpenCL-Headers
export BUILD_TYPE=Release
create_dir()
{
if [ ! -d "$1" ]; then
mkdir -p "$1"
fi
}
clear_build_dir()
{
if [ -d $BUILD_DIR ]; then
rm -rf $BUILD_DIR
fi
}
make_build_dir()
{
clear_build_dir
create_dir $BUILD_DIR
}
make_build_dir
cd $BUILD_DIR
cmake .. -G "Unix Makefiles" \
-DBUILD_SHARED_LIBS:BOOL=OFF \
-DWIN32:BOOL=OFF \
-DCMAKE_ENABLE_EXPORTS:BOOL=ON \
-DOPENCL_ICD_LOADER_HEADERS_DIR="$HEADER_DIR" \
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:STRING="$OUTPUT_DIR" \
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:STRING="$OUTPUT_DIR" \
-DCMAKE_C_FLAGS:STRING="-m32"
cmake --build . --config $BUILD_TYPE
cd ../../
cp -f OpenCL-ICD-Loader/build/libOpenCL.a lib/x86/$BUILD_TYPE/
make_build_dir
cd $BUILD_DIR
cmake .. -G "Unix Makefiles" \
-DBUILD_SHARED_LIBS:BOOL=OFF \
-DWIN32:BOOL=OFF \
-DCMAKE_ENABLE_EXPORTS:BOOL=ON \
-DOPENCL_ICD_LOADER_HEADERS_DIR="$HEADER_DIR" \
-DCMAKE_LIBRARY_OUTPUT_DIRECTORY:STRING="$OUTPUT_DIR" \
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY:STRING="$OUTPUT_DIR" \
-DCMAKE_C_FLAGS:STRING="-m64"
cmake --build . --config $BUILD_TYPE
cd ../../
cp -f OpenCL-ICD-Loader/build/libOpenCL.a lib/x86_64/$BUILD_TYPE/
clear_build_dir
| true
|
b0a36713fa4d58e7b8dc1d06d4b5a5404f4214a3
|
Shell
|
RishiDesai/BioFabric
|
/releaseTools/buildWindowsExeStep1.sh
|
UTF-8
| 240
| 2.515625
| 3
|
[] |
no_license
|
# /bin/sh
EXEHOME=$1
RESHOME=$2
LAUNCH4J_HOME=$3
L4J_WORKING=$EXEHOME/bioFabl4jWorking.xml
VERCOMP=$4
CURRYEAR=$5
SC_HOME=$6
KEY_HOME=$7
echo $EXEHOME
echo $L4J_WORKING
cat $L4J_WORKING
java -jar $LAUNCH4J_HOME/launch4j.jar $L4J_WORKING
| true
|
3c365c5bbde7f4c8691b40aa68c9a7d5c2ce7333
|
Shell
|
dergigi/raspiblitz
|
/home.admin/config.scripts/bonus.lndconnect.sh
|
UTF-8
| 7,085
| 3.578125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# command info
if [ $# -eq 0 ] || [ "$1" = "-h" ] || [ "$1" = "-help" ]; then
echo "# config script to connect mobile apps with lnd connect"
echo "# will autodetect dyndns, sshtunnel or TOR"
echo "# bonus.lndconnect.sh [zap-ios|zap-android|zeus-ios|zeus-android|shango-ios|shango-android] [?ip|tor]"
exit 1
fi
# load raspiblitz config data
source /home/admin/raspiblitz.info
source /mnt/hdd/raspiblitz.conf
#### PARAMETER
# 1. TARGET WALLET
targetWallet=$1
# 1. TOR or IP (optional - default IP)
forceTOR=0
if [ "$2" == "tor" ]; then
forceTOR=1
fi
#### MAKE SURE LNDCONNECT IS INSTALLED
# check if it is installed
# https://github.com/rootzoll/lndconnect
# using own fork of lndconnet because of this commit to fix for better QR code:
commit=82d7103bb8c8dd3c8ae8de89e3bc061eef82bb8f
isInstalled=$(lndconnect -h 2>/dev/null | grep "nocert" -c)
if [ $isInstalled -eq 0 ] || [ "$1" == "update" ]; then
echo "# Installing lndconnect.."
# make sure Go is installed
/home/admin/config.scripts/bonus.go.sh
# get Go vars
source /etc/profile
# Install latest lndconnect from source:
go get -d github.com/rootzoll/lndconnect
cd $GOPATH/src/github.com/rootzoll/lndconnect
git checkout $commit
make
else
echo "# lndconnect is already installed"
fi
#### ADAPT PARAMETERS BASED TARGETWALLET
# defaults
connector=""
host=""
port=""
extraparamter=""
supportsTOR=0
if [ "${targetWallet}" = "zap-ios" ]; then
connector="lndconnect"
if [ ${forceTOR} -eq 1 ]; then
# deactivated until fix: https://github.com/rootzoll/raspiblitz/issues/1001
echo "error='no tor support'"
exit 1
# when ZAP runs on TOR it uses REST
port="8080"
extraparamter="--nocert"
else
# normal ZAP uses gRPC ports
port="10009"
fi
elif [ "${targetWallet}" = "zap-android" ]; then
connector="lndconnect"
if [ ${forceTOR} -eq 1 ]; then
# when ZAP runs on TOR it uses REST
port="8080"
extraparamter="--nocert"
else
# normal ZAP uses gRPC ports
port="10009"
fi
elif [ "${targetWallet}" = "zeus-ios" ]; then
connector="lndconnect"
if [ ${forceTOR} -eq 1 ]; then
echo "error='no tor support'"
exit 1
fi
port="8080"
elif [ "${targetWallet}" = "zeus-android" ]; then
connector="lndconnect"
port="8080"
elif [ "${targetWallet}" = "shango-ios" ]; then
connector="shango"
if [ ${forceTOR} -eq 1 ]; then
echo "error='no tor support'"
exit 1
fi
port="10009"
elif [ "${targetWallet}" = "shango-android" ]; then
connector="shango"
if [ ${forceTOR} -eq 1 ]; then
echo "error='no tor support'"
exit 1
fi
port="10009"
else
echo "error='unknown target wallet'"
exit 1
fi
#### ADAPT PARAMETERS BASED RASPIBLITZ CONFIG
# get the local IP as default host
host=$(ip addr | grep 'state UP' -A2 | tail -n1 | awk '{print $2}' | cut -f1 -d'/')
# change host to dynDNS if set
if [ ${#dynDomain} -gt 0 ]; then
host="${dynDomain}"
fi
# tunnel thru TOR if running and supported by the wallet
if [ ${forceTOR} -eq 1 ]; then
# depending on RPC or REST use different TOR address
if [ "${port}" == "10009" ]; then
host=$(sudo cat /mnt/hdd/tor/lndrpc10009/hostname)
port="10009"
echo "# using TOR --> host ${host} port ${port}"
elif [ "${port}" == "8080" ]; then
host=$(sudo cat /mnt/hdd/tor/lndrest8080/hostname)
port="8080"
echo "# using TOR --> host ${host} port ${port}"
fi
fi
# tunnel thru SSH-Reverse-Tunnel if activated for that port
if [ ${#sshtunnel} -gt 0 ]; then
isForwarded=$(echo ${sshtunnel} | grep -c "${port}<")
if [ ${isForwarded} -gt 0 ]; then
if [ "${port}" == "10009" ]; then
host=$(echo $sshtunnel | cut -d '@' -f2 | cut -d ' ' -f1 | cut -d ':' -f1)
port=$(echo $sshtunnel | awk '{split($0,a,"10009<"); print a[2]}' | cut -d ' ' -f1 | sed 's/[^0-9]//g')
echo "# using ssh-tunnel --> host ${host} port ${port}"
elif [ "${port}" == "8080" ]; then
host=$(echo $sshtunnel | cut -d '@' -f2 | cut -d ' ' -f1 | cut -d ':' -f1)
port=$(echo $sshtunnel | awk '{split($0,a,"8080<"); print a[2]}' | cut -d ' ' -f1 | sed 's/[^0-9]//g')
echo "# using ssh-tunnel --> host ${host} port ${port}"
fi
fi
fi
# special case: for Zeus android over TOR
hostscreen="${host}"
if [ "${targetWallet}" = "zeus-android" ] && [ ${forceTOR} -eq 1 ]; then
# show TORv2 address on LCD (to make QR code smaller and scannable by Zeus)
host=$(sudo cat /mnt/hdd/tor/lndrest8080fallback/hostname)
# show TORv3 address on Screen
hostscreen=$(sudo cat /mnt/hdd/tor/lndrest8080/hostname)
fi
#### RUN LNDCONNECT
imagePath=""
datastring=""
if [ "${connector}" == "lndconnect" ]; then
# get Go vars
source /etc/profile
# write qr code data to an image
cd /home/admin
lndconnect --host=${host} --port=${port} --image ${extraparamter}
# display qr code image on LCD
/home/admin/config.scripts/blitz.lcd.sh image /home/admin/lndconnect-qr.png
elif [ "${connector}" == "shango" ]; then
# write qr code data to text file
datastring=$(echo -e "${host}:${port},\n$(xxd -p -c2000 /home/admin/.lnd/data/chain/${network}/${chain}net/admin.macaroon),\n$(openssl x509 -sha256 -fingerprint -in /home/admin/.lnd/tls.cert -noout)")
# display qr code on LCD
/home/admin/config.scripts/blitz.lcd.sh qr "${datastring}"
else
echo "error='unkown connector'"
exit 1
fi
# show pairing info dialog
msg=""
if [ $(echo "${host}" | grep -c '192.168') -gt 0 ]; then
msg="Make sure you are on the same local network.\n(WLAN same as LAN - use WIFI not cell network on phone).\n\n"
fi
msg="You should now see the pairing QR code on the RaspiBlitz LCD.\n\n${msg}When you start the App choose to connect to your own node.\n(DIY / Remote-Node / lndconnect)\n\nClick on the 'Scan QR' button. Scan the QR on the LCD and <continue> or <show QR code> to see it in this window."
whiptail --backtitle "Connecting Mobile Wallet" \
--title "Pairing by QR code" \
--yes-button "continue" \
--no-button "show QR code" \
--yesno "${msg}" 18 65
if [ $? -eq 1 ]; then
# backup - show QR code on screen (not LCD)
if [ "${connector}" == "lndconnect" ]; then
lndconnect --host=${hostscreen} --port=${port} ${extraparamter}
echo "(To shrink QR code: OSX->CMD- / LINUX-> CTRL-) Press ENTER when finished."
read key
elif [ "${connector}" == "shango" ]; then
/home/admin/config.scripts/blitz.lcd.sh qr-console ${datastring}
fi
fi
# clean up
/home/admin/config.scripts/blitz.lcd.sh hide
shred ${imagePath} 2> /dev/null
rm -f ${imagePath} 2> /dev/null
echo "------------------------------"
echo "If the connection was not working:"
if [ ${#dynDomain} -gt 0 ]; then
echo "- Make sure that your router is forwarding port ${port} to the Raspiblitz"
fi
if [ $(echo "${host}" | grep -c '192.168') -gt 0 ]; then
echo "- Check that your WIFI devices can talk to the LAN devices on your router (deactivate IP isolation or guest mode)."
fi
echo "- try to refresh the TLS & macaroons: Main Menu 'EXPORT > 'RESET'"
echo "- check issues: https://github.com/rootzoll/raspiblitz/issues"
echo ""
| true
|
0533133b50e0c280b48740a66d15d2f965924f2c
|
Shell
|
plantenos/plan10-packages
|
/common/pkginst-src/libexec/pkginst-src-dopatch.sh
|
UTF-8
| 742
| 3.625
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
#
# vim: set ts=4 sw=4 et:
#
# Passed arguments:
# $1 - pkgname [REQUIRED]
# $2 - cross target [OPTIONAL]
if [ $# -lt 1 -o $# -gt 2 ]; then
echo "${0##*/}: invalid number of arguments: pkgname [cross-target]"
exit 1
fi
PKGNAME="$1"
PKGINST_CROSS_BUILD="$2"
for f in $PKGINST_SHUTILSDIR/*.sh; do
. $f
done
setup_pkg "$PKGNAME" $PKGINST_CROSS_BUILD
for f in $PKGINST_COMMONDIR/environment/patch/*.sh; do
source_file "$f"
done
PKGINST_PATCH_DONE="${PKGINST_STATEDIR}/${sourcepkg}_${PKGINST_CROSS_BUILD}_patch_done"
if [ -f $PKGINST_PATCH_DONE ]; then
exit 0
fi
for f in $PKGINST_COMMONDIR/environment/patch/*.sh; do
source_file "$f"
done
run_step patch optional
touch -f $PKGINST_PATCH_DONE
exit 0
| true
|
6b4d2ef0b4a820e706c31e5c6017145f05874760
|
Shell
|
forkdump/wikitopics
|
/src/batch/serif.sh
|
UTF-8
| 2,599
| 3.5625
| 4
|
[] |
no_license
|
#$ -N serif
#$ -S /bin/bash
#$ -j y
#$ -cwd
#$ -V
#$ -o /home/hltcoe/bahn/log/grid
#$ -l h_vmem=4G
echo serif.sh $* >&2
if [ "$WIKITOPICS" == "" ]; then
echo "Set the WIKITOPICS environment variable first." >&2
exit 1
fi
if [ "$1" == "--dry-run" ]; then
DRYRUN=1
shift
fi
if [ $# -lt 1 ]; then
echo "Usage: $0 [--dry-run] LANG START_DATE [END_DATE]" >&2
exit 1
fi
DATA_SET="$1"
# to avoid using LANG, which is used by Perl
LANG_OPTION=`echo $DATA_SET | sed -e 's/-.\+$//'`
if [ "$2" != "" ]; then
START_DATE=`date --date "$2" +"%Y-%m-%d"`
if [ "$3" == "" ]; then
END_DATE="$START_DATE"
else
END_DATE=`date --date "$3" +"%Y-%m-%d"`
fi
else
# if DATE is omitted, process all articles
START_DATE="0000-00-00"
END_DATE="9999-99-99"
fi
SENTENCE_DIR="$WIKITOPICS/data/serif/input"
SERIF_DIR="$WIKITOPICS/data/serif"
if [ ! -d "$SENTENCE_DIR/$DATA_SET" ]; then
echo "input directory not found: $SENTENCE_DIR/$DATA_SET" >&2
exit 1
fi
for DIR in $SENTENCE_DIR/$DATA_SET/*/*; do
if [ ! -d "$DIR" ]; then # such directory not found
continue
fi
BASEDIR=`basename $DIR`
echo $BASEDIR | grep "^[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]$" > /dev/null
if [ $? -ne 0 ]; then # the directory's name is not a date
continue
fi
if [ "$START_DATE" \> "$BASEDIR" -o "$END_DATE" \< "$BASEDIR" ]; then # if the date falls out of the range
continue
fi
YEAR=${BASEDIR:0:4}
OUTPUT_DIR="$SERIF_DIR/$DATA_SET/$YEAR/$BASEDIR"
mkdir -p "$OUTPUT_DIR"
### list the input files
find "$DIR" -type f -name *.xml \
> "$OUTPUT_DIR/input_list.txt"
if [ $DRYRUN ]; then
cat "$OUTPUT_DIR/input_list.txt" >&2
else
### run SERIF
if [ "$LANG_OPTION" == "en" ]; then
/export/common/tools/serif/bin/SerifEnglish \
/export/common/tools/serif/par/english.par \
-p start_stage=tokens \
-p source_format=serifxml \
-p output_format=serifxml \
-p batch_file="$OUTPUT_DIR/input_list.txt" \
-o "$OUTPUT_DIR"
elif [ "$LANG_OPTION" == "ar" ]; then
/export/common/tools/serif/bin/SerifArabic \
/export/common/tools/serif/par/arabic.par \
-p start_stage=tokens \
-p source_format=serifxml \
-p output_format=serifxml \
-p batch_file="$OUTPUT_DIR/input_list.txt" \
-o "$OUTPUT_DIR"
elif [ "$LANG_OPTION" == "zh" ]; then
/export/common/tools/serif/bin/SerifChinese \
/export/common/tools/serif/par/chinese.par \
-p start_stage=tokens \
-p source_format=serifxml \
-p output_format=serifxml \
-p batch_file="$OUTPUT_DIR/input_list.txt" \
-o "$OUTPUT_DIR"
else
echo "no serif version for the language $LANG_OPTION" >&2
fi
fi
done
| true
|
4e560d4a7dae181d6256e146f1ee71f5cd38d7d3
|
Shell
|
IBM-Cloud/vpc-tutorials
|
/vpc-snapshot/terraform/user_data.sh
|
UTF-8
| 1,469
| 3.953125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -x
sleep 60; # disks may not be mounted yet.... TODO
# step through the disks in /dev/disk/by-id and find just the data (unformatted) disks
# partition, make a file system, mount, add uuid to fstab, add a version.txt file
cd /dev/disk/by-id
for symlink in $(ls -1 virtio-* |sed -e /-part/d -e /-cloud-init/d); do
disk=$(readlink $symlink)
disk=$(realpath $disk)
mount_parent=/datavolumes
mkdir -p $mount_parent
chmod 755 $mount_parent
if fdisk -l $disk | grep Linux; then
echo Disk: $disk is already partitioned
else
echo Partition
# the sed is used for self documentation
sed -e 's/\s*\([\+0-9a-zA-Z]*\).*/\1/' << ____EOF | fdisk $disk
n # new partition
p # primary partition
# default - partition #
# default - first sector
# default - last sector
w # write the partition table
____EOF
echo mkfs
disk_partition=${disk}1
yes | mkfs -t ext4 $disk_partition
uuid=$(blkid -sUUID -ovalue $disk_partition)
mount_point=$mount_parent/$uuid
echo add uuid $uuid to /etc/fstab
echo "UUID=$uuid $mount_point ext4 defaults,relatime 0 0" >> /etc/fstab
echo mount $mount_point
mkdir -p $mount_point
chmod 755 $mount_point
mount $mount_point
cat > $mount_point/version.txt << ____EOF
version=1
initial_disk_partition=$disk_partition
mount_point=$mount_point
____EOF
echo wrote version to $mount_point/version.txt
cat $mount_point/version.txt
sync;sync
fi
done
| true
|
a59253f9931319499489e389d73bc50e6fd5422f
|
Shell
|
oda-hub/oda-runner-bootstrap
|
/runner.sh
|
UTF-8
| 2,158
| 3.203125
| 3
|
[] |
no_license
|
#!/bin/bash
export ODAHUB=https://dqueue.staging-1-3.odahub.io@queue-osa11
function container() {
args=$@
echo -e "\033[33msingularity image origin:\033[0m ${IMAGE_ORIGIN:="https://www.isdc.unige.ch/~savchenk/singularity/odahub_dda_db22be8-2020-07-18-2b9b737e0fb6.sif"}"
echo -e "\033[33m local data store:\033[0m ${ODA_LOCAL_DATA:?please specify local data store}"
export DDA_TOKEN=$(cat $HOME/.dda-token)
singularity exec \
-B runner.sh:/runner.sh \
-B $ODA_LOCAL_DATA:/data \
$IMAGE_ORIGIN \
bash runner.sh $args
}
function sync-python-modules() {
echo -e "\033[34m... installing recent dependencies\033[0m"
pip install --user --upgrade \
oda-node\>=0.1.21 \
data-analysis
}
function sync-ic() {
echo -e "\033[34m... synching semi-permanent data (IC tree) to REP_BASE_PROD=${REP_BASE_PROD:?}\033[0m"
set -e
mkdir -pv $REP_BASE_PROD/idx/scw/
wget https://www.isdc.unige.ch/~savchenk/GNRL-SCWG-GRP-IDX.fits -O $REP_BASE_PROD/idx/scw/GNRL-SCWG-GRP-IDX.fits
rsync -Lzrtv isdcarc.unige.ch::arc/FTP/arc_distr/ic_tree/prod/ $REP_BASE_PROD/
}
function sync-all() {
echo -e "\033[34mwill sync:\033[0m: ${SYNC:=python-modules ic}"
for sync in $SYNC; do
echo -e "\033[34m... sync:\033[0m: $sync"
sync-$sync
done
}
function test-osa() {
which ibis_science_analysis
plist ibis_science_analysis
echo "managed!"
}
function test-odahub() {
oda-node version
}
function self-test() {
for a_test in test-osa test-odahub; do
if $a_test > log-$a_test 2>&1; then
echo -e "$a_test \033[32mPASSED\033[0m"
else
echo -e "$a_test \033[31mFAILED\033[0m"
cat $a_test
fi
done
}
function run() {
args=$@
export HOME_OVERRRIDE=/tmp/home # if many workers, choose non-overlapping
source /init.sh
export PATH=/tmp/home/.local/bin:$PATH
export REP_BASE_PROD=/data/
export INTEGRAL_DATA=$REP_BASE_PROD
export CURRENT_IC=$REP_BASE_PROD
export INTEGRAL_DDCACHE_ROOT=/data/reduced/ddcache/
$args
}
$@
| true
|
e36befe8969af60e9048493a37a7f08c1e8678f6
|
Shell
|
wilcashe/graph_dump
|
/scripts/dump_ploss.sh
|
UTF-8
| 1,528
| 3.375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
echo "-------------------------------"
echo "dump_ploss.sh: Calculando Packet Loss"
working_folder=$1
output_folder=$2
PLoss_file="${working_folder}PackLoss.txt"
trafico_host_1_ordenado="${working_folder}trafico_host_1_ordenado"
trafico_server_1_ordenado="${working_folder}trafico_server_1_ordenado"
#Se utilizan los archivos "trafico_host_1_ordenado" y "trafico_server_1_ordenado"
#los cuales son generados por dump_delay.sh
#wc -l cuenta el número de líneas de un archivo. salida: <#lineas> <nombre fichero>
# entonces se toma el primer campo de la salida
recv_pack=$(wc -l ${trafico_host_1_ordenado} | awk '{print $1}')
sent_pack=$(wc -l ${trafico_server_1_ordenado} | awk '{print $1}')
#Se hace la división recv_pack/sent_pack
pack_loss=$((sent_pack-recv_pack))
echo "dump_ploss.sh: paquetes perdidos= $pack_loss"
ppack_loss=$(echo "scale=4; $pack_loss*100/$sent_pack" | bc)
echo "Paquetes enviados= $sent_pack" > ${PLoss_file}
echo "Paquetes recibidos= $recv_pack" >> ${PLoss_file}
echo "Paquetes perdidos= $pack_loss" >> ${PLoss_file}
echo "Porcentaje de Paquetes perdidos (%)= $ppack_loss" >> ${PLoss_file}
#grep IP /home/pi/graph_dump/created/trafico | grep id |awk '{print $1,$17}' | tr "," " " > /home/pi/graph_dump/created/traf_1
#sed 's/)$//' /home/pi/graph_dump/created/traf_1 > /home/pi/graph_dump/created/traf_2
#a=$(awk 'NR == 1 {print $1}' /home/pi/graph_dump/created/traf_2)
#awk -v c="${a}" '{print $1-c, $2}' /home/pi/graph_dump/created/traf_2 > /home/pi/graph_dump/created/traf_3
| true
|
f6822ceb81df1033bcccf70946ca008152e80e81
|
Shell
|
alme197112/scripts
|
/addScripts.sh
|
UTF-8
| 838
| 3.765625
| 4
|
[
"WTFPL"
] |
permissive
|
#!/usr/bin/env bash
set -ex
: ${MYSCRPITS:=${HOME}/scripts}
function ifInstall() {
if [ ! -z "$(which apt-get)" ]
then
if [ -z $(which ${1}) ]
then
apt-get -q -y install ${1}
fi
fi
}
ifInstall jq
ifInstall vim
ifInstall git
[[ -z $(which git) ]] && echo You need git installed! && exit 1
if [ -d "${MYSCRPITS}" ]
then
pushd ${MYSCRPITS}
git pull -r origin master
popd
else
git clone --recursive https://github.com/hibooboo2/scripts.git ${MYSCRPITS}
fi
if [ ! -f "~/.bashrc" ]
then
echo . ${MYSCRPITS}/.profile >> ~/.bashrc
elif [ ! -f "~/.profile" ]
then
echo . ${MYSCRPITS}/.profile >> ~/.profile
else
echo Where do you want to source ${MYSCRIPTS}/.profile ?
read TOSOURCE
[[ -f "${TOSOURCE}" ]] && echo . ${MYSCRPITS}/.profile >> ${TOSOURCE}
fi
| true
|
e9ef724f6984eddb63798e207bcb0bcde6c0120f
|
Shell
|
dmayilyan/configs
|
/.dwm/autostart.sh
|
UTF-8
| 267
| 3.234375
| 3
|
[] |
no_license
|
#! /bin/bash
mem(){
mem=`/bin/cat /proc/meminfo | grep -w MemFree | awk '{printf "%.0f MB", $2/1024.0}'`
echo "Free mem: $mem"
}
dt(){
dt=$(date +'%a %d %b %H:%M')
echo "$dt"
}
nitrogen --restore &
while true; do
xsetroot -name "$(mem) $(dt)"
sleep 1
done &
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.