blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
2e30fce0c19d0b46e11074d9984fa2d1c5c40585
|
Shell
|
SwissDataScienceCenter/renku-storage
|
/.travis/check-code-formatting.sh
|
UTF-8
| 384
| 3.71875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
RESULTS_FILE=$(mktemp)
DIFF=$(git diff --exit-code > $RESULTS_FILE; echo $?)
echo "Running git diff on $(pwd) to check if scalariform changed code..."
if [ $DIFF -eq 0 ]; then
echo "No diff on source code"
rm $RESULTS_FILE
else
echo "Changes detected, code was probably not formatted before commit:"
cat $RESULTS_FILE
rm $RESULTS_FILE
exit 1
fi
| true
|
a55d5dd839ea3c602563195f6044a4fd4063c72d
|
Shell
|
AshutoshMatal/Bridgelabz
|
/function/Two_No_Pallindrome.sh
|
UTF-8
| 536
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/bash -x
read -p "Enter number : " number
function isPalindrome()
{
local sum=0;
#HERE IS OUR SECOND NUMBER
temp=321
local number=$1
while [[ $number -gt 0 ]]
do
remainder=$((number%10));
sum=$((sum*10+remainder));
number=$((number/10));
done
if [ $sum -eq $temp ]
then
echo "$1 is Palindrome"
else
echo "$1 is not Palindrome "
fi
}
result="$( isPalindrome $number)"
echo "$result"
| true
|
a6d11e99642700d12c1249af8b285b074bb489a8
|
Shell
|
yigong/PyBeamDiag
|
/slurm/SLM_add_fName
|
UTF-8
| 870
| 2.609375
| 3
|
[] |
no_license
|
#!/bin/bash -I
#
#SBATCH --job-name=1um_backP_fName
#
#SBATCH --account=ac_etci
#
#SBATCH --partition=lr3
#
#SBATCH --qos=lr_debug
#
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=16
#
#SBATCH --time=00:30:00
#
#SBATCH --mail-type=all
#SBATCH --mail-user=ygzhang@lbl.gov
PyBeamDiag=/global/scratch/ygzhang/PyBeamDiag
G4BeamDiag=/global/scratch/ygzhang/G4BeamDiag
step=1um
plane=backP
if [ $plane == backP ]; then
facing=bottom
elif [ $plane == pixelP ]; then
facing=top
else
echo 'WRONG $plane !!!'
echo "plane = $plane"
fi
fitsDir=${G4BeamDiag}/step_size/out_LRC/${step}/fits_${plane}
pickle=${PyBeamDiag}/data/step_size/PSF_${step}_${plane}.p
echo "step = $step"
echo "plane = $plane"
echo "facing = $facing"
echo "fitsDir = $fitsDir"
echo "pickle = $pickle"
echo 'Compute PSF at window'
cd $PyBeamDiag
python PSF_window.py $fitsDir $pickle
| true
|
a833b4bac7af7ba63e3a7c26e1f9ac633802a920
|
Shell
|
MinaProtocol/mina
|
/automation/scripts/gen-keys-ledger.sh
|
UTF-8
| 2,293
| 3.375
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#! /bin/bash
MINA_DAEMON_IMAGE="minaprotocol/mina-daemon:1.3.2beta2-release-2.0.0-6f9d956-focal-berkeley"
while [ $# -gt 0 ]; do
case "$1" in
--testnet=*)
TESTNET="${1#*=}"
;;
--reset=*)
RESET="${1#*=}"
;;
--whales=*)
WHALES="${1#*=}"
;;
--fish=*)
FISH="${1#*=}"
;;
--seeds=*)
SEEDS="${1#*=}"
;;
--privkey-pass=*)
PRIVKEY_PASS="${1#*=}"
;;
# --artifact-path=*)
# ARTIFACT_PATH="${1#*=}"
# ;;
esac
shift
done
TOTAL_KEYS=$(( $WHALES + $FISH ))
WHALE_AMOUNT=2250000
FISH_AMOUNT=375000
KEYSDIR="./keys"
if [[ -d "$KEYSDIR" ]]
then
echo "keys directory and genesis ledger already exists"
read -r -p "Are you sure you want to overwrite it? [any response other than 'y' will exit] " RESPONSE
case "$RESPONSE" in
[yY])
rm -rf "$KEYSDIR"
rm ./*.accounts.json
rm ./genesis_ledger.json
echo "keysdir and genesis ledger deleted, continuing"
;;
*)
exit 1
;;
esac
fi
mkdir "$KEYSDIR"
export MINA_PRIVKEY_PASS="${PRIVKEY_PASS}"
export MINA_LIBP2P_PASS="${PRIVKEY_PASS}"
echo "generating whale keys"
for i in $(seq 1 $WHALES); do
mina advanced generate-keypair --privkey-path "${KEYSDIR}/whale-${i}" 2>/dev/null
done
echo "generating fish keys"
for i in $(seq 1 $FISH); do
mina advanced generate-keypair --privkey-path "${KEYSDIR}/fish-${i}" 2>/dev/null
done
echo "generating seeds' libp2p keys"
mkdir "${KEYSDIR}/libp2p-keys"
for i in $(seq 1 $SEEDS); do
mina libp2p generate-keypair --privkey-path "${KEYSDIR}/libp2p-keys/seed-${i}" 2>/dev/null
done
echo "creating partial whale and fish json account objects"
cat ${KEYSDIR}/whale-*.pub | jq -R '{"pk": ., "delegate": ., "sk": null, "balance": "'${WHALE_AMOUNT}'.000000000" }' > ./whales.accounts.json
cat ${KEYSDIR}/fish-*.pub | jq -R '{"pk": ., "delegate": ., "sk": null, "balance": "'${FISH_AMOUNT}'.000000000" }' > ./fish.accounts.json
GENESIS_TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
echo "Merging partial ledgers into genesis_ledger..."
jq -s '{ genesis: { genesis_state_timestamp: "'${GENESIS_TIMESTAMP}'" }, ledger: { name: "'${TESTNET}'", accounts: [ .[] ] } }' ./*.accounts.json > "./genesis_ledger.json"
rm ./*.accounts.json
| true
|
ea189d3b1fd448e88404fab8b3b0c8766efd7d6a
|
Shell
|
tantlab/digitazation-of-everyday-life
|
/docker/api/entrypoint.sh
|
UTF-8
| 592
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
cd /api
if [ "$MODE" = "dev" ]; then
echo "/!\\ Mode is set to DEV /!\\"
else
echo "/!\\ Mode is set to PRODUCTION /!\\"
fi
echo "(i) Python version is $(python --version)"
echo
echo " ~"
echo " ~ Install dependencies"
echo " ~"
echo
pip install -r requirements.txt
if [ "$MODE" = "dev" ]; then
echo
echo " ~"
echo " ~ Start flask server"
echo " ~"
echo
export FLASK_APP=api.py
export FLASK_ENV=development
python -m flask run --host 0.0.0.0
else
echo
echo " ~"
echo " ~ Start Gunicorn"
echo " ~"
echo
gunicorn -c ./gunicorn.conf.py wsgi:app
fi
| true
|
88241dfa72635bbd75999e7d2ded86e9f7fabc55
|
Shell
|
mrpossoms/exo
|
/bin/exo-cmd-msg.sh
|
UTF-8
| 750
| 4.125
| 4
|
[] |
no_license
|
#!/bin/bash
source exo-utils.sh
function help {
echo "Creates a blank message. Must be used inside a 'msg' directory"
exit 0
}
function usage {
echo "msg [message-name]"
exit 0
}
MOD_DIR=$(get_cfg_val $HOME/.exo template_path)/mod/.mod
name="$1"
invoke help $name
invoke usage $name
if [ $(basename $(pwd)) != "msg" ]; then
echo "Command should only be run withing a 'msg' directory"
exit 1
fi
if [ -z $name ]; then
name=$(prompt "Enter a new module name: ")
fi
if [ -d $name ]; then
echo "Error: '$name' already exists"
exit 2
else
upper_case_name=$(echo $name | awk '{print toupper($0)}')
cat .msg.hpp | sed s/MSG_NAME_MAGIC/$(echo $upper_case_name)_MAGIC/ | sed s/MSG_NAME/$name/ > $name.hpp
fi
| true
|
0cd783970ed2ddb0f06f774ff5f858aacb3a98ec
|
Shell
|
Vivek2s/dev-app-frontend
|
/build.sh
|
UTF-8
| 805
| 3.40625
| 3
|
[] |
no_license
|
git pull
git commit -am "Building Home Page"
prod='dev-app-live'
echo "**************************************************** $1 ***********************"
if [ "$1" = "prod" ]
then
ng build --prod --aot --output-hashing=none --build-optimizer --base-href=https://vivek2s.github.io/dev-app-live/
dir=$prod
fi
echo "**************************************************** $dir ***********************"
read -p "Are you sure you want to push these changes to $1 server?(y/n): " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
rm -rf ../$dir/*
mv dist/* ../$dir/
cd ../$dir
git add . && git commit -am "Build DevApp"
git push origin master
else
echo "*************** You Choose No: Deleting dist file ******************";
rm -rf dist/
fi
| true
|
0a36b916c1344ae80864f3508db65ce1397f3c10
|
Shell
|
purna1sai/OS-Shell-Scripting
|
/pr02_evenodd.sh
|
UTF-8
| 117
| 3.515625
| 4
|
[] |
no_license
|
#!/bin/bash
n=$1
r=$(($n%2))
if [ $r -eq 0 ]
then
echo "$n is an even number"
else
echo "$n is an odd number"
fi
| true
|
0aa15897d4447ff7404af2222884c441e51e8fa4
|
Shell
|
OpenVnmrJ/OpenVnmrJ
|
/src/scripts/restore3x.sh
|
UTF-8
| 2,039
| 3.375
| 3
|
[
"Apache-2.0",
"GPL-3.0-only"
] |
permissive
|
: '@(#)restore3x.sh 22.1 03/24/08 2003-2008 '
#
#
# Copyright (C) 2015 University of Oregon
#
# You may distribute under the terms of either the GNU General Public
# License or the Apache License, as specified in the LICENSE file.
#
# For more information, see the LICENSE file.
#
#
#! /bin/sh
# This scripts copies files from VnmrJ 2.3A or later
# to another VnmrJ directory, so we can restore that copy of VnmrJ
# to the console, restoreing NDDS 3x
if [ -f /vnmr/vnmrrev ]
then
echo "This version of VnmrJ is in ${vnmrsystem}:"
head -2 ${vnmrsystem}/vnmrrev
else
echo "Cannot find /vnmr/vnmrrev, cannot proceed"
exit 1
fi
echo ""
read -p "Enter the full path of the VnmrJ version you want to restore: " restore_dir
echo ""
if [ -f ${restore_dir}/vnmrrev ]
then
echo "This will restore ${restore_dir}"
head -2 ${restore_dir}/vnmrrev
else
echo "cannot find ${restore_dir}/vnmrrev, cannot proceed"
exit 1
fi
touch ${restore_dir}/remove_me >/dev/null 2>/dev/null
if [ $? -ne 0 ]
then
rm -f ${restore_dir}/remove_me
echo ""
echo "No write permission to ${restore_dir}, cannot proceed"
exit 1
else
rm -f ${restore_dir}/remove_me
fi
abc="555"
if [ $abc=1 ]
then
cd ${restore_dir}/bin
rm -f lnvsetacq.orig #don't complain if it does not exist
mv lnvsetacq lnvsetacq.orig
cp /vnmr/bin/lnvsetacq2 lnvsetacq
cd ${restore_dir}/acqbin
rm -f consoledownload42x #avoid permission problems
cp /vnmr/acqbin/consoledownload42x .
rm -f testconf42x #avoid permission problems
cp /vnmr/acqbin/testconf42x .
rm -f flashia42x #avoid permission problems
cp /vnmr/acqbin/flashia42x .
rm -f consoledownload3x #avoid permission problems
cp /vnmr/acqbin/consoledownload3x .
rm -f testconf3x #avoid permission problems
cp /vnmr/acqbin/testconf3x .
rm -f flashia3x #avoid permission problems
cp /vnmr/acqbin/flashia3x .
cd ${restore_dir}/acq/download
rm -f nddslib.o #avoid permission problems
cp nvScript nddslib.o
rm -f nddslib.md5 #avoid permission problems
cp nvScript.md5 nddslib.md5
fi
echo "Done"
| true
|
424d68300ac9f4e89855ba55772033e70c04f28c
|
Shell
|
sami1riaz/Comp-421
|
/P2 Deliverable/SQL_and_Bash_Scripts/execute_sql.sh
|
UTF-8
| 267
| 3.109375
| 3
|
[] |
no_license
|
#! /bin/bash
SQL_FILE=$1
LOG_FILE=$2
ECHO_ALL=$3
printf " executing $SQL_FILE\n"
printf "\npsql cs421 < $SQL_FILE\n\n" >> $LOG_FILE
if [ ECHO_ALL != "" ]
then
psql $ECHO_ALL cs421 < $SQL_FILE >> $LOG_FILE 2>&1
else
psql cs421 < $SQL_FILE >> $LOG_FILE 2>&1
fi
| true
|
d362b0d771be057111022cc5203775930ed49cd8
|
Shell
|
liujunai/docx
|
/shell脚本/myhadoop.sh
|
UTF-8
| 1,000
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ $# -lt 1 ]
then
echo "No Args Input..."
exit ;
fi
case $1 in
"start")
echo " =================== 启动 hadoop 集群 ==================="
echo " --------------- 启动 hdfs ---------------"
ssh liu1 "/opt/module/hadoop-3.2.0/sbin/start-dfs.sh"
echo " --------------- 启动 yarn ---------------"
ssh liu2 "/opt/module/hadoop-3.2.0/sbin/start-yarn.sh"
echo " --------------- 启动 historyserver ---------------"
ssh liu1 "/opt/module/hadoop-3.2.0/bin/mapred --daemon start historyserver"
;;
"stop")
echo " =================== 关闭 hadoop 集群 ==================="
echo " --------------- 关闭 historyserver ---------------"
ssh liu1 "/opt/module/hadoop-3.2.0/bin/mapred --daemon stop historyserver"
echo " --------------- 关闭 yarn ---------------"
ssh liu2 "/opt/module/hadoop-3.2.0/sbin/stop-yarn.sh"
echo " --------------- 关闭 hdfs ---------------"
ssh liu1 "/opt/module/hadoop-3.2.0/sbin/stop-dfs.sh"
;;
*)
echo "Input Args Error..."
;;
esac
| true
|
6410f46a57d9c4f5399f265d69f3e6a77cc9155d
|
Shell
|
jiangqianghua/workspace
|
/shell/imocc_study/if2.sh
|
UTF-8
| 396
| 2.703125
| 3
|
[] |
no_license
|
#########################################################################
# File Name: if2.sh
# Author: qianghua jiang
# mail: 240437339@qq.com
# Created Time: Thu 03 Sep 2015 08:51:47 PM PDT
#########################################################################
#!/bin/bash
test1=$(df -h | grep sda1 | awk '{print $5}' | cut -d '%' -f 1)
#echo $test1
if [ "$test1" -ge "60" ]
then
echo "the sda is full"
fi
| true
|
f0ef3db620f3f81c10ce0abd7d0285c50406ce54
|
Shell
|
bentech/docker-ikev2-vpn
|
/image/scripts/entrypoint
|
UTF-8
| 189
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
if [ -z "$(ls -A /etc/ipsec.d)" ]; then
cp -r /etc/ipsec.d.bk/* /etc/ipsec.d/
fi
if [ $# -eq 0 ]
then
create-host-cert
exec start-vpn
else
exec ${*}
fi
| true
|
6ec5f6b543eb14d0a49fe9ed44171f165f8529e6
|
Shell
|
morhekil/dotfiles
|
/zsh/includes/S55_git
|
UTF-8
| 5,565
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/zsh
autoload colors
colors
#export GIT_AUTHOR_NAME="${FULLNAME}"
#export GIT_AUTHOR_EMAIL="${EMAIL}"
#export GIT_COMMITTER_NAME="${FULLNAME}"
#export GIT_COMMITTER_EMAIL="${EMAIL}"
# ----------------------------------------------------------------------
# various git scripts
gitize() {
git init \
&& git add . \
&& git commit -a -m"initial commit" \
&& git gc
}
# ----------------------------------------------------------------------
# The following implements a caching mechanism for git information.
# The RPROMPT executes get_git_bprompt_info() and include the output...
#
# setopt prompt_subst
# RPROMPT="$(get_git_prompt_info)"
#
export __ZSH_GIT_DIR=
export __ZSH_GIT_BRANCH=
export __ZSH_GIT_STATE=
export __ZSH_GIT_VARS_INVALID=1
# get the name of the branch we are on
parse_git_branch() {
git branch --no-color 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/(\1) -- /'
}
# http://blog.madism.org/index.php/2008/05/07/173-git-prompt
new_parse_git_branch() {
local git_dir branch
psvar=()
git_dir=$(git rev-parse --git-dir 2> /dev/null) || return
# rewritten by Thomas Ritz <thomas(at)galaxy-ritz(dot)de>
if test -d "$git_dir/rebase-apply"; then
if test -f "$git_dir/rebase-apply/rebasing"; then
__ZSH_GIT_STATE="rebase"
elif test -f "$git_dir/rebase-apply/applying"; then
__ZSH_GIT_STATE="am"
else
__ZSH_GIT_STATE="am/rebase"
fi
branch="$(git symbolic-ref HEAD 2>/dev/null)"
elif test -f "$git_dir/rebase-merge/interactive"; then
__ZSH_GIT_STATE="rebase -i"
branch="$(cat "$git_dir/rebase-merge/head-name")"
elif test -d "$git_dir/rebase-merge"; then
__ZSH_GIT_STATE="rebase -m"
branch="$(cat "$git_dir/rebase-merge/head-name")"
elif test -f "$git_dir/MERGE_HEAD"; then
__ZSH_GIT_STATE="merge"
branch="$(git symbolic-ref HEAD 2>/dev/null)"
else
test -f "$git_dir/BISECT_LOG" && __ZSH_GIT_STATE="bisect"
branch="$(git symbolic-ref HEAD 2>/dev/null)" || \
branch="$(git describe --exact-match HEAD 2>/dev/null)" || \
branch="$(cut -c1-7 "$git_dir/HEAD")..."
fi
# __ZSH_GIT_FULL_DIR=$(readlink -f ${git_dir:h})
__ZSH_GIT_FULL_DIR="${git_dir:h}"
__ZSH_GIT_DIR="${__ZSH_GIT_FULL_DIR/$HOME/~}"
__ZSH_GIT_BRANCH="${branch#refs/heads/}"
export __ZSH_GIT_FULL_DIR
export __ZSH_GIT_DIR
export __ZSH_GIT_BRANCH
export __ZSH_GIT_STATE
export __ZSH_GIT_VARS_INVALID=
}
zsh_git_invalidate_vars() {
export __ZSH_GIT_VARS_INVALID=1
}
zsh_git_compute_vars() {
new_parse_git_branch
}
# on each chdir update the cached git variable(s)
preexec_functions+='zsh_git_preexec_update_vars'
chpwd_functions+='zsh_git_chpwd_update_vars'
#precmd_functions+='zsh_git_precmd_update_vars'
zsh_git_chpwd_update_vars() {
zsh_git_invalidate_vars
}
zsh_git_preexec_update_vars() {
case "$(history $HISTCMD)" in
*git*) zsh_git_invalidate_vars ;;
esac
}
# this function returns the current git branch
# it takes as argument a string with printf like arguments:
# '%P' path to top of repository
# '%p' path to top of repository, with s/$HOME/~/
# '%b' current branch
# '%s' state of rebase/merge/bisect/am
# '%%' a '%' character
# providing no arguments is equivalent to "%p %b %s".
get_git_prompt_info() {
test -n "$__ZSH_GIT_VARS_INVALID" && zsh_git_compute_vars
test -n "$__ZSH_GIT_DIR" || return
local def fmt res
def='%p %b %s'
fmt=$@
res=${fmt:-$def}
res=${res//\%P/$__ZSH_GIT_FULL_DIR }
res=${res//\%p/$__ZSH_GIT_DIR }
res=${res//\%b/$__ZSH_GIT_BRANCH }
test -n "$__ZSH_GIT_STATE" && \
res=${res//\%s/$__ZSH_GIT_STATE }
res=${res//\%%/%}
echo -n "${res}"
}
#
# Will return the current branch name
# Usage example: git pull origin $(current_branch)
#
function current_branch() {
ref=$(git symbolic-ref HEAD 2> /dev/null) || return
echo ${ref#refs/heads/}
}
# delete tracking branches which are gone on remote
function git-delete-gone() {
git branch --list --format "%(if:equals=[gone])%(upstream:track)%(then)%(refname)%(end)" \
| sed 's,^refs/heads/,,;/^$/d' \
| xargs git branch -D
}
# these aliases take advantage of the previous function
alias gad='git add --all'
alias gf='git fetch --prune && git-delete-gone'
alias gsa='git stash save --include-untracked'
alias gsp='git stash pop'
alias gpl='git pull --ff-only origin $(current_branch)'
alias gps='git push -u origin $(current_branch)'
alias gpsf='git push -u --force-with-lease origin $(current_branch)'
alias gundo="git reset 'HEAD^1'"
alias gco="git checkout"
alias gci="git commit"
alias gst="git status"
alias gdi="git diff"
alias gdic="git diff --cached"
source ~/.dotfiles/git/githelpers
alias gl="pretty_git_log" # git log | less"
alias gla="pretty_git_log --all" # git log | less"
alias gr="pretty_git_log -20" # git log | less"
alias gra="pretty_git_log --all -20" # git log | less"
alias gh="pretty_git_log -1"
alias ghp="show_git_head"
alias grt='cd $(git rev-parse --show-toplevel || echo ".")'
alias gitb='git checkout $(git branch | percol)'
alias gitba='git checkout $(git branch -a | percol)'
| true
|
1aecfebfa9c968394128dfd7b64bdff62c628fad
|
Shell
|
MOVE-II/move-on-helium-sensors
|
/tools/grafana/bin/mqttserial/mqttserial.sh
|
UTF-8
| 102
| 2.546875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
topic=sensors-raw
if [ -n "$1" ]; then
topic=$1
fi
mosquitto_sub -t $topic
| true
|
09788969edbecefb1c343bc7fec58b90a9cc0de1
|
Shell
|
billy-wang/shell_test
|
/TmakeChenyee
|
UTF-8
| 49,122
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
#
# DESCRIPTION: make project
#
# SCRIPT NAME:
#
# Usage:
#
#
# Input: stdin
#
# Output:
#
# AUTHOR: Ling Fen
#
# EMAIL: lingfen@chenyee.com
#
# DATE: 2013-11-49
#
# HISTORY:
# REVISOR DATE MODIFICATION
# LingFen 2013-11-49 create
usage(){
local tools_name=$(basename $0)
printf "
Usage : ./${tools_name} [Options] GBW8901A01_A Actions
Options:
-l, -listp : List the available project.
-h, -help : Print this message and exit.
-t, -tee : Print log information on the standard-out.
#-q, -quiet : Don't print log information on the standard-out.
#-j [N], cpu [N]
# : Allow N jobs to compile at once,you must add a space between Options and [N].
#-m [MODULE] : Only compile the [MODULE], you must add a space between Options and [MODULE].
Actions:
-n, --new : Clean and perform a full build.
-r, --remake : retry perform a build.
-a, --android : do not update-api,only build android image.
-c, --copy_target : copy target to release only.
-C, --copy_chenyee : copy chenyee code to alps dir
-s, --sign : sign image
Module:
-i, --image <bootimage|systemimage|userdataimage|update-api|kernel|otapackage>
:Specify image to be build/re-build (bootimage|systemimage|userdataimage|recoveryimage).
:Specify module to be build/re-build (kernel|otapackage).
Project:
-p, --project <packages/app/Email/> use mmma commad, check and build other dependency module
:Specif Project pathdir to be build
-P, --project <packages/app/Email/> use mmm commad, no dependency module
:Specif Project pathdir to be build
编译rom项目,代码copy 顺序:
#编译接口代码存放位置
chenyee/alps_build #只有builder 才可以上传代码到这里
#驱动相关代码存放位置
chenyee/alps_drv #所有项目共用的代码
chenyee/project/\${project} #某个项目共用的代码
chenyee/project/\${product} #某个产品代码
#rom相关代码存放位置
chenyee/alps #所有项目共用的代码
chenyee/alps_oversea #所有海外项目共用的代码
chenyee/project/\${project}_alps #某个项目共用的代码
chenyee/project/\${product}_alps #某个产品代码
编译cta/platform项目,代码copy 顺序:
#编译接口代码存放位置
chenyee/alps_build #只有builder 才可以上传代码到这里
#驱动相关代码存放位置
chenyee/alps_drv #所有项目共用的代码
chenyee/project/\${project} #某个项目共用的代码
chenyee/project/\${product} #某个产品代码
#cta/platform 相关代码存放位置
chenyee/alps_cta or chenyee/alps_platform #所有项目共用的代码
chenyee/project/\${product}_alps #某个产品代码
编译stock原生项目,代码copy 顺序:
#编译接口代码存放位置
chenyee/alps_build #只有builder 才可以上传代码到这里
#驱动相关代码存放位置
chenyee/alps_drv #所有项目共用的代码
chenyee/project/\${project} #某个项目共用的代码
chenyee/project/\${product} #某个产品代码
#stock 相关代码存放位置
chenyee/alps_oversea_stock #所有海外原生项目共用的代码
chenyee/project/\${project}_alps #某个项目共用的代码
chenyee/project/\${product}_alps #某个产品代码
Example:
./${tools_name} -l
: 列出可编译的产品id
./${tools_name} [-v <user|eng|userdebug>] GBW8901A01_A <-n|--new>
: 编译产品 GBW8901A01_A 的user or eng or userdebug 版本
./${tools_name} GBW8901A01_A -i <bootimage|systemimage|bootimage>
: 编译产品GBW8901A01_A 的bootimage | systemimage | bootimage. 还可以是kernel,otapackage等模块
./${tools_name} GBW8901A01_A -p package/app/Email
: 编译产品GBW8901A01_A的一个apk.还可是以framework.jar等project.
"
}
error(){
local ret="$1";shift
local info="$@"
if [ "$ret" -ne 0 ];then
echo -e "\033[31m[${FUNCNAME[1]}: "`caller 0 | cut -d " " -f 1`"] Error $ret: $info\033[0m" >&2
echo "*********************************************************************************"
echo "************************build error check keywords begin ************************"
grep -rn -E "^ERROR\: |^FATAL\: |^FAILED\: |^Error\:|\: \*\*\* |\: error\: " ${LOGPATH}
echo "************************ build error check keywords end ************************"
echo "*********************************************************************************"
exit $ret
fi
}
get_opts(){
argv="$@"
#兼容服务器编译
local build_variant=$(expr match "$argv" '-opt=TARGET_BUILD_VARIANT=\(\w*\)')
if [ "$build_variant" == "user" -o "$build_variant" == "userdebug" -o "$build_variant" == "eng" ];then
echo "Is server built!"
IS_SERVER_BUILD=true
TARGET_BUILD_VARIANT=$build_variant
shift
echo $@
fi
opts=$(getopt -o cChHi:kslnp:P:rqatv:S:f:m: --long history, --long copy_target,copy_chenyee,help,image:,key,sign,listp,new,project:,PROJECT:,remake,remake_billy,tee,target_build_variant:,clear_ccache,sonar:,file_script:,mp_status:, -- "$@")
if [ $? -ne 0 ];then
echo "***$? ***"
usage
exit 1
fi
eval set -- "$opts"
while true
do
case "$1" in
-c|--copy_target)
COPY_TARGET=true
shift
;;
-C|--copy_chenyee)
COPY_CHENYEE=true
shift
;;
-i|--image)
MODULE=$2
shift 2
;;
-n|--new)
ACTION=new
shift
;;
-p|--project)
PROJECT_PATH=$2
shift 2
;;
-P|--PROJECT)
PROJECT_PATH=$2
CMD_MMM=true
shift 2
;;
-r|--remake)
ACTION=remake
shift
;;
-q|--remake_billy)
ACTION=remake_billy
shift
;;
-a|--android)
ACTION=android
shift
;;
-v|--target_build_variant)
TARGET_BUILD_VARIANT=$2
shift 2
;;
-t|--tee)
BUILD_TOOLS+=" --debug"
shift
;;
-l|--listp)
perl cy_project/listP.pl
shift
exit 0
;;
--clear_ccache)
export CY_CLEAN_CCACHE="true"
shift
;;
-H|--history)
show_build_history
exit 0 ;;
-k|--key)
ACTION=key
shift
;;
-s|--sign)
ACTION=sign
shift
;;
-S|--sonar)
ACTION=sonar
export CY_SONAR_CHECK_PATH="$2"
shift 2
;;
-f|--file_script)
FILE_SCRIPT="$2"
shift 2
;;
-m|--mp_status)
MP_STATUS="$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
--)
shift
break
;;
*)
usage
exit 1
;;
esac
done
if [ -z "$ACTION" -a -z "$MODULE" -a -z "$PROJECT_PATH" -a -z "$COPY_TARGET" -a -z "$COPY_CHENYEE" ];then
echo "Error 1: Specify action to be build/re-build"
usage
exit 1
fi
CHENYEEPRODUCTID=$@
if [ ! -e "cy_project/${CHENYEEPRODUCTID:0:7}/${CHENYEEPRODUCTID}.mk" ];then
echo "Error 2: *********** product file not exist! ***********"
exit 2
fi
if [ -n "$PROJECT_PATH" -a ! -d "${PROJECT_PATH%%:*}" ];then
echo "Error 3: ***********$PROJECT_PATH not exist ************"
exit 3
fi
}
export_variable_from(){
local file=$1
if [ -e "$file" ];then
#modify for CR01549696 begin
if [ "$TARGET_BUILD_VARIANT" != "user" ];then
export MTK_BUILD_ROOT=yes
else
sed -i '/MTK_BUILD_ROOT\s*=\s*yes/d' "$file"
fi
#modify for CR01549696 end
while read line
do
#去掉空行,'#'开头,含有':=',不含有'='的行
[ -z "$line" -o "${line:0:1}" == "#" ] && continue
[ -z "$(expr match "$line" '.*\(=\).*')" ] && continue
[ -n "$(expr match "$line" '.*\(:=\).*')" ] && continue
[ -n "$(expr match "$line" '.*\(+=\).*')" ] && continue
local key=$(echo ${line%=*})
local value=$(echo ${line#*=})
export "$key"="$value"
done < $file
else
echo "$file not exist"
fi
}
create_cy_cflags(){
local file="$1"
#CY_CFLAGS_CONFIG_FILE 必须绝对路径
CY_CFLAGS_CONFIG_FILE=$CY_BUILD_ROOT_DIR/cy_project/.cy_cflags_config
export CY_CFLAGS_CONFIG_FILE
CY_CFLAGS="CY_CFLAGS +="
CY_KBUILD_CFLAGS="CY_KBUILD_CFLAGS +="
if [ -f "$1" ];then
mkdir -p $(dirname $CY_CFLAGS_CONFIG_FILE)
rm -rf $CY_CFLAGS_CONFIG_FILE
> $CY_CFLAGS_CONFIG_FILE
while read line
do
[ -z "$line" -o "${line:0:1}" == "#" ] && continue;
#约定 CONFIG CY_BSP CY_Q_BSP 开头的宏 为驱动配置宏
[ "${line:0:6}" != "CONFIG" -a "${line:0:6}" != "CY_BSP" -a "${line:0:8}" != "CY_Q_BSP" ] && continue
key=$(echo ${line%=*})
value=$(echo ${line#*=})
if [ "$value" == "n" -o "$value" == "no" ];then
:
elif [ "$value" == "y" -o "$value" == "yes" ];then
CY_CFLAGS+=" -D$key"
if [ "${line:0:6}" == "CONFIG" ];then
echo "$key=y" >> $CY_CFLAGS_CONFIG_FILE
CY_KBUILD_CFLAGS+=" -D$key"
fi
else
CY_CFLAGS+=" -D$key=$value"
fi
done < <(env)
echo $CY_CFLAGS >> $CY_CFLAGS_CONFIG_FILE
echo $CY_KBUILD_CFLAGS>> $CY_CFLAGS_CONFIG_FILE
fi
}
prebuild(){
cy_project=$CHENYEEPRODUCTID
CHENYEEPROJECTID=${CHENYEEPRODUCTID:0:7}
export cy_project CHENYEEPRODUCTID CHENYEEPROJECTID
#Chenyee <taofapan> <20171110> add for replace img begin
CY_CUSTOM_IMG_RELEASED=release/${CHENYEEPROJECTID}_${TARGET_BUILD_VARIANT}_img_released
export CY_CUSTOM_IMG_RELEASED
#Chenyee <taofapan> <20171110> add for replace img end
#FIXME: not a good way to obtian TARGET_PRODUCT
[ "${CHENYEEPRODUCTID:(-8):8}" == "platform" -o "${CHENYEEPRODUCTID:(-3):3}" == "CTA" -o "${CHENYEEPRODUCTID:(-4):4}" == "CMCC" -o "${CHENYEEPRODUCTID:(-2):2}" == "CT" ] && BUILD_OPTIONS="platform"
export BUILD_OPTIONS
#export product.mk begin
export_variable_from "cy_project/${CHENYEEPROJECTID}/${CHENYEEPROJECTID}_ROM_COMMON.mk"
export_variable_from "cy_project/${CHENYEEPROJECTID}/${CHENYEEPRODUCTID}.mk"
TARGET_PRODUCT=`grep -w 'TARGET_PRODUCT' <(env) | sed 's/ //g' | grep '^TARGET_PRODUCT' | awk -F= '{print $2}'`
MTK_PLATFORM=`grep -w 'MTK_PLATFORM' <(env) | sed 's/ //g' | grep '^MTK_PLATFORM' | awk -F= '{print $2}'`
export TARGET_PRODUCT
#export product.mk end
create_cy_cflags "cy_project/${CHENYEEPROJECTID}/${CHENYEEPRODUCTID}.mk"
#Chenyee <CY_Builder> <zhouli> <20170924> modify it for 222653 begin
#Generation kernel Kconfig
echo "*** python cy_project/tools/cy_auto_gen_kconfig.py "cy_project/${CHENYEEPROJECTID}/${CHENYEEPRODUCTID}.mk" $CY_BUILD_ROOT_DIR ***"
python cy_project/tools/cy_auto_gen_kconfig.py "cy_project/${CHENYEEPROJECTID}/${CHENYEEPRODUCTID}.mk" $CY_BUILD_ROOT_DIR
#Chenyee <CY_Builder> <zhouli> <20170924> modify it for 222653 end
#export release version begine
[ ! -e "cy_project/${CHENYEEPROJECTID}_VerNumber" ] && echo "MINI_VER_NUMBER=T0001;">cy_project/${CHENYEEPROJECTID}_VerNumber
if ! $IS_SERVER_BUILD ;then
local line=$(head -n 1 cy_project/${CHENYEEPROJECTID}_VerNumber)
local number=$(expr match "$line" "MINI_VER_NUMBER=T\([0-9]\+\)")
sed -i "/^MINI_VER_NUMBER/c\MINI_VER_NUMBER=T${number}_LOCAL;" cy_project/${CHENYEEPROJECTID}_VerNumber
fi
. cy_project/${CHENYEEPROJECTID}_VerNumber
CY_ZNVERNUMBER=${CHENYEEPRODUCTID}_${MINI_VER_NUMBER}
export MINI_VER_NUMBER CY_ZNVERNUMBER
#export release version end
#gn ro
if [ "$CY_RO_PRODUCT_MODEL" == "" ];then
CY_RO_PRODUCT_MODEL=${CHENYEEPROJECTID}
fi
CY_INTERNAL_VER_NUMBER=${CHENYEEPRODUCTID}_V${MINI_VER_NUMBER:1:4}
CY_EXTERNAL_VER_NUMBER=${CY_RO_PRODUCT_MODEL}_$(echo ${CY_INTERNAL_VER_NUMBER}|cut -d '_' -f 2,3)
echo "CY_INTERNAL_VER_NUMBER=$CY_INTERNAL_VER_NUMBER"
echo "CY_EXTERNAL_VER_NUMBER=$CY_EXTERNAL_VER_NUMBER"
export CY_INTERNAL_VER_NUMBER CY_EXTERNAL_VER_NUMBER
mkdir -p $LOGPATH
}
replece_sign_key(){
if [ "${BUILD_OPTIONS}" == "platform" -o "${TARGET_BUILD_VARIANT}" != "user" ];then
echo "BUILD_OPTIONS=${BUILD_OPTIONS}"
echo "TARGET_BUILD_VARIANT=${TARGET_BUILD_VARIANT}"
else
CY_SIGN_KEY_FOLDER="$HOME/singkey/cykey"
if [ "${CY_CUSTOM_SIGN_KEY}" != "" ];then
CY_SIGN_KEY_FOLDER="$HOME/singkey/${CY_CUSTOM_SIGN_KEY}"
fi
if [ -d "${CY_SIGN_KEY_FOLDER}" ];then
cp -f ${CY_SIGN_KEY_FOLDER}/* "build/make/target/product/security"
echo "copy $CY_SIGN_KEY_FOLDER to build/make/target/product/security"
cp -f ${CY_SIGN_KEY_FOLDER}/* "device/mediatek/security"
echo "copy $CY_SIGN_KEY_FOLDER to device/mediatek/security"
else
echo "NO ${CY_SIGN_KEY_FOLDER}"
fi
fi
}
get_chenyee_code(){
local vendor=$1
local version=$2
#copy chenyee dir code to alps
bash cy_project/tools/cy_prebuild.sh $vendor $version
replece_sign_key
#Chenyee sunll 2015-01-21 modify MTK_BUILD_VERNO=${CY_ZNVERNUMBER} begin
CY_TARGET_PRODUCT_DIR=$(cat <(env) | grep "^\s*TARGET_PRODUCT\s*=" | sed 's/.*\s*=\s*full_//g')
CY_PRODUCT_FILE=device/mediateksample/${CY_TARGET_PRODUCT_DIR}/ProjectConfig.mk
sed -i "s/^\s*MTK_BUILD_VERNO\s*=\s*.*/MTK_BUILD_VERNO=${CY_ZNVERNUMBER}/" ${CY_PRODUCT_FILE}
echo 'MTK_BUILD_VERNO='$(cat ${CY_PRODUCT_FILE} | grep "^\s*MTK_BUILD_VERNO" | sed 's/.*\s*=\s*//g')
#Chenyee sunll 2015-01-21 modify MTK_BUILD_VERNO=${CY_ZNVERNUMBER} end
}
enable_kernel_debug_config()
{
grep "CONFIG_DEBUG_PINCTRL=y" $CY_KERNEL_CFG_FILE > /dev/null
if [ $? -eq 0 ]; then
echo "kernel DEBUG已经修改过,不用修改"
else
echo "CONFIG_DEBUG_PINCTRL=y" >> $CY_KERNEL_CFG_FILE
fi
}
set_kernel_config(){
#函数中未定义的变量都来源于export_variable_from中的export
if [ "$TARGET_BUILD_VARIANT" == "eng" -a -f "$CY_KERNEL_CFG_FILE_ENG" ];then
CY_KERNEL_CFG_FILE=$CY_KERNEL_CFG_FILE_ENG
fi
#Chenyee <BY_BSP_CHG> <zhouli> <20170106> add for CSW1705A-423 begin
if [ "$TARGET_BUILD_VARIANT" != "user" ];then
export CONFIG_I2C_CHARDEV=yes
fi
#Chenyee <BY_BSP_CHG> <zhouli> <20170106> add for CSW1705A-423 end
#echo $CY_KERNEL_CFG_FILE
if [ -f "$CY_KERNEL_CFG_FILE" ];then
echo "*** kernel config file: $CY_KERNEL_CFG_FILE ****"
while read line
do
[ -z "$line" -o "${line:0:1}" == "#" ] && continue
local key=$(echo ${line%=*})
local value=$(echo ${line#*=})
[ -z "$key" -o -z "$value" ] && continue
if ! `echo "$key" | grep -q "^CONFIG"`;then
continue
fi
#echo $line
if [ "$value" == "n" -o "$value" == "no" ];then
if `grep -qw "$key" $CY_KERNEL_CFG_FILE` ;then
sed -i "/\<$key\>/c\# $key is not set" $CY_KERNEL_CFG_FILE
else
echo "# $key is not set" >> $CY_KERNEL_CFG_FILE
fi
elif [ "$value" == "y" -o "$value" == "yes" ];then
if `grep -qw "$key" $CY_KERNEL_CFG_FILE` ;then
sed -i "/\<$key\>/c\\$key=y" $CY_KERNEL_CFG_FILE
else
echo "$key=y" >> $CY_KERNEL_CFG_FILE
fi
else
if `grep -qw "$key" $CY_KERNEL_CFG_FILE` ;then
sed -i "/\<$key\>/c\\$key=$value" $CY_KERNEL_CFG_FILE
else
echo "$key=$value" >> $CY_KERNEL_CFG_FILE
fi
fi
done < <(env)
enable_kernel_debug_config
else
echo "kernel config file: $CY_KERNEL_CFG_FILE not exist"
fi
}
get_release_version(){
#函数中未定义的变量都来源于export_variable_from中的export
CY_RELEASE_DIR="release/$CY_ZNVERNUMBER"
CY_RELEASE_OTA_DIR_ROOT=${CY_RELEASE_DIR}_OTA
CY_RELEASE_OTA_TMP_DIR=${CY_RELEASE_OTA_DIR_ROOT}/tmp
CY_RELEASE_OTA_DIR=${CY_RELEASE_OTA_DIR_ROOT}/ota
CY_RELEASE_MODEM_DIR="$CY_RELEASE_DIR/bp_image"
}
create_product_for_flash_tool(){
if [ -e "./cy_project/tools/cy_output_product_info.sh" ];then
bash ./cy_project/tools/cy_output_product_info.sh ${CHENYEEPRODUCTID} ${CY_RELEASE_DIR}
fi
}
# Chenyee <CY_BSP_OTA_SECUREBOOT> <goudaqiang> <20170605> add for 151694 begin
copy_signed_img_to_dir(){
echo "CHENYEE Copy_signed_img_to_dir"
mkdir -p ./backup_img/
cp -f release/${CY_ZNVERNUMBER}_noota/sec_boot_sig_dir/boot-verified.img ./backup_img/
cp -f release/${CY_ZNVERNUMBER}_noota/sec_boot_sig_dir/recovery-verified.img ./backup_img/
cp -f release/${CY_ZNVERNUMBER}_noota/sec_boot_sig_dir/trustzone.bin ./backup_img/
}
# Chenyee <CY_BSP_OTA_SECUREBOOT> <goudaqiang> <20170605> add for 151694 end
copy_results_to_release(){
[ -d "$CY_RELEASE_DIR" ] && rm -rf $CY_RELEASE_DIR
mkdir -p $CY_RELEASE_DIR
PRODUCT_OUT_ROOT="out/target/product"
CY_TARGET_PRODUCT=${TARGET_PRODUCT/full_}
TARGET_PRODUCT_OUT_ROOT="$PRODUCT_OUT_ROOT/${CY_TARGET_PRODUCT}"
#lihuafang modify begin
if [ -d "${TARGET_PRODUCT_OUT_ROOT}" ];then
cp -f ${TARGET_PRODUCT_OUT_ROOT}/*.img $CY_RELEASE_DIR/;
cp -f ${TARGET_PRODUCT_OUT_ROOT}/*.bin $CY_RELEASE_DIR/;
cp -f ${TARGET_PRODUCT_OUT_ROOT}/*.mbn $CY_RELEASE_DIR/;
cp -f ${TARGET_PRODUCT_OUT_ROOT}/*.txt $CY_RELEASE_DIR/;
cp -f ${TARGET_PRODUCT_OUT_ROOT}/EBR1 $CY_RELEASE_DIR/;
cp -f ${TARGET_PRODUCT_OUT_ROOT}/EBR2 $CY_RELEASE_DIR/;
cp -f ${TARGET_PRODUCT_OUT_ROOT}/MBR $CY_RELEASE_DIR/;
#Chenyee <CY_BSP_EFUSE> <chendonghai> <20180507> add for freeme copy config begin
cp -f ${TARGET_PRODUCT_OUT_ROOT}/freeme_build.cfg $CY_RELEASE_DIR/;
cp -f ${TARGET_PRODUCT_OUT_ROOT}/system_image_info.txt $CY_RELEASE_DIR/;
cp -f ${TARGET_PRODUCT_OUT_ROOT}/*_tiny.zip $CY_RELEASE_DIR/;
#Chenyee <CY_BSP_EFUSE> <chendonghai> <20180507> add for freeme copy config end
rm $CY_RELEASE_DIR/preloader.bin
rm $CY_RELEASE_DIR/preloader.img
else
echo "Wanning: ${TARGET_PRODUCT_OUT_ROOT} does no exist"
fi
#lihuafang modify end
# Chenyee <CY_BSP_OTA_SECUREBOOT> <goudaqiang> <20170605> add for 151694 begin
if [ -d "release/${CY_ZNVERNUMBER}_noota" ];then
cp -f release/${CY_ZNVERNUMBER}_noota/boot.img $CY_RELEASE_DIR/
cp -f release/${CY_ZNVERNUMBER}_noota/recovery.img $CY_RELEASE_DIR/
cp -f release/${CY_ZNVERNUMBER}_noota/trustzone.bin $CY_RELEASE_DIR/
rm $CY_RELEASE_DIR/boot-verified.img
rm $CY_RELEASE_DIR/recovery-verified.img
rm -rf ./backup_img/
echo "Chenyee rm boot-verified.img and recovery-verified.img"
else
echo "Chenyee Deafult KEY"
fi
# Chenyee <CY_BSP_OTA_SECUREBOOT> <goudaqiang> <20170605> add for 151694 begin
#chenyee, chuqf, for tee raw imgs, begin
cp -f ${TARGET_PRODUCT_OUT_ROOT}/trustzone/bin/*pad\.img $CY_RELEASE_DIR/
if [ -f $CY_RELEASE_DIR/mt6735_microtrust_release_pad.img ];then
cp $CY_RELEASE_DIR/mt6735_microtrust_release_pad.img $CY_RELEASE_DIR/mt6735_tee_microtrust_release_pad.img
mkdir $CY_RELEASE_DIR/cy_secure_boot_configs
echo "0x1900000" > $CY_RELEASE_DIR/cy_secure_boot_configs/SECURE_DRAM_SIZE_CFG.ini
fi
if [ -f $CY_RELEASE_DIR/mt6735_microtrust_debug_pad.img ];then
cp $CY_RELEASE_DIR/mt6735_microtrust_debug_pad.img $CY_RELEASE_DIR/mt6735_tee_microtrust_release_pad.img
mkdir $CY_RELEASE_DIR/cy_secure_boot_configs
echo "0x1900000" > $CY_RELEASE_DIR/cy_secure_boot_configs/SECURE_DRAM_SIZE_CFG.ini
fi
#chenyee, chuqf, for tee raw imgs, end
#copy DbgInfo file
local dbginfo_file=$(find $TARGET_PRODUCT_OUT_ROOT/obj/ETC -type f -name "DbgInfo*" 2>/dev/null )
if [ -f "$dbginfo_file" ];then
cp -r $dbginfo_file $CY_RELEASE_DIR/
fi
#copy BPLGUInfo file
local bplguinfo_file=$(find $TARGET_PRODUCT_OUT_ROOT/obj/ETC -type f -name "BPLGUInfo*" 2>/dev/null )
for eachbplgui in $bplguinfo_file
do
if [ -f "$eachbplgui" ];then
cp -r $eachbplgui $CY_RELEASE_DIR/
fi
done
#copy MDDB file
local bpl_file=$(find $TARGET_PRODUCT_OUT_ROOT/obj/ETC -type f -name "MDDB_*" 2>/dev/null )
if [ -f "$bpl_file" ];then
cp -r $bpl_file $CY_RELEASE_DIR/
fi
#copy modem files
find $TARGET_PRODUCT_OUT_ROOT/obj -type f -name "APDB*" -exec cp \{} $CY_RELEASE_DIR \;
find $TARGET_PRODUCT_OUT_ROOT/system/etc/mddb -type f -name "BP*" -exec cp \{} $CY_RELEASE_DIR \;
#copy symbols to cy_backup
if [ -d "$TARGET_PRODUCT_OUT_ROOT/symbols" ];then
mkdir -p $PRODUCT_OUT_ROOT/chenyee_backup/
rsync -avq "$TARGET_PRODUCT_OUT_ROOT/symbols" "$PRODUCT_OUT_ROOT/chenyee_backup/"
find $TARGET_PRODUCT_OUT_ROOT -type f -name "*.elf" |cpio -dup --quiet $PRODUCT_OUT_ROOT/chenyee_backup
find $TARGET_PRODUCT_OUT_ROOT/obj/BOOTLOADER_OBJ -type f -name "lk" | cpio -dup --quiet $PRODUCT_OUT_ROOT/chenyee_backup
find $TARGET_PRODUCT_OUT_ROOT/obj -type f -name "System.map" | cpio -dup --quiet $PRODUCT_OUT_ROOT/chenyee_backup
#lk
cp -r $TARGET_PRODUCT_OUT_ROOT/obj/BOOTLOADER_OBJ/build-*/lk "$PRODUCT_OUT_ROOT/chenyee_backup/" 2>/dev/null
fi
#copy vmlinux for debug
if [ -f "$TARGET_PRODUCT_OUT_ROOT/obj/KERNEL_OBJ/vmlinux" ];then
cp "$TARGET_PRODUCT_OUT_ROOT/obj/KERNEL_OBJ/vmlinux" "$CY_RELEASE_DIR"
fi
#copy qcn
if [ -f "./chenyee/project/${CHENYEEPRODUCTID}/${CHENYEEPRODUCTID}.qcn" ];then
cp "./chenyee/project/${CHENYEEPRODUCTID}/${CHENYEEPRODUCTID}.qcn" "$CY_RELEASE_DIR"
fi
#chenyee, chuqf, for fat.img, begin
if [[ -e cy_project/tools/gen_fat.sh ]]; then
chmod +x cy_project/tools/gen_fat.sh
./cy_project/tools/gen_fat.sh ${MTK_PLATFORM} ${CHENYEEPRODUCTID} ${CY_TARGET_PRODUCT} ${TARGET_PRODUCT_OUT_ROOT}
fi
#chenyee, chuqf, for fat.img, end
create_product_for_flash_tool
local ota_zip=$(find ${TARGET_PRODUCT_OUT_ROOT}/ -maxdepth 1 -type f -name ${TARGET_PRODUCT}-ota-*.zip | sort -r | head -n 1 2>/dev/null)
local ota_tmp_zip=$(find ${TARGET_PRODUCT_OUT_ROOT}/obj/PACKAGING/target_files_intermediates/ \
-maxdepth 1 -type f -name ${TARGET_PRODUCT}-target_files-*.zip | sort -r | head -n 1 2>/dev/null)
if [ -f "$ota_zip" ];then
rm -rf $CY_RELEASE_OTA_DIR_ROOT
mkdir -p $CY_RELEASE_OTA_TMP_DIR
mkdir -p $CY_RELEASE_OTA_DIR
cp -r $ota_zip $CY_RELEASE_OTA_DIR/${CHENYEEPRODUCTID}_update_${CY_RO_BUILD_DISPLAY_ID}_${MINI_VER_NUMBER}.zip
local ota_md5=$(md5sum $CY_RELEASE_OTA_DIR/${CHENYEEPRODUCTID}_update_${CY_RO_BUILD_DISPLAY_ID}_${MINI_VER_NUMBER}.zip |awk '{print $1}')
echo "${CHENYEEPRODUCTID}_update_${CY_RO_BUILD_DISPLAY_ID}_${MINI_VER_NUMBER}.zip=$ota_md5" > $CY_RELEASE_OTA_DIR/${CHENYEEPRODUCTID}_${MINI_VER_NUMBER}.md5
cp -r $ota_tmp_zip $CY_RELEASE_OTA_TMP_DIR/${CHENYEEPRODUCTID}_update_${CY_RO_BUILD_DISPLAY_ID}_${MINI_VER_NUMBER}_tmp.zip
#Chenyee <CY_3rd> <lucy> <20180306> for CSW1703MX-9 beign
if [ "${ADUPS_FOTA_SUPPORT}" == "yes" ];then
if [ "${ADUPS_FOTA_SIGINATURE}" == "yes" ];then
mkdir -p ${CY_RELEASE_OTA_DIR_ROOT}/3rd
cp -f ${TARGET_PRODUCT_OUT_ROOT}/target_files-package.zip ${CY_RELEASE_OTA_DIR_ROOT}/3rd/;
else
cp -f ${TARGET_PRODUCT_OUT_ROOT}/target_files-package.zip $CY_RELEASE_OTA_DIR/;
fi
fi
#Chenyee <CY_3rd> <lucy> <20180306> for CSW1703MX-9 end
#Chenyee <CY_REQ> <huangxiaohong> <20180808> for CSW1703IC-92 beign
if [ "${REDSTONE_FOTA_SUPPORT}" == "yes" ];then
mkdir -p ${CY_RELEASE_OTA_DIR_ROOT}/3rd
cp -f ${TARGET_PRODUCT_OUT_ROOT}/rsfota_target_files-package.zip ${CY_RELEASE_OTA_DIR_ROOT}/3rd/;
#cp -f ${TARGET_PRODUCT_OUT_ROOT}/target_files-package.zip $CY_RELEASE_OTA_DIR/;
fi
#Chenyee <CY_REQ> <huangxiaohong> <20180808> for CSW1703IC-92 end
fi
if [ ! -e "$TARGET_PRODUCT_OUT_ROOT/system.img" ];then
echo "你的编译结果不完全,请查看是否编译成功。"
error 1 "copy results to release"
fi
if [ -f cy_project/tools/verified_boot_tools/GenEfuseImg.py ];then
chmod +x cy_project/tools/verified_boot_tools/GenEfuseImg.py
./cy_project/tools/verified_boot_tools/GenEfuseImg.py $CY_RELEASE_DIR
fi
#Chenyee <taofapan> <20171110> add for replace img begin
if [ "${CY_BUILD_WITH_CHENYEE_RELEASED}" == "yes" ];then
rm -rf release/${CHENYEEPROJECTID}_user_img_released
rm -rf release/${CHENYEEPROJECTID}_userdebug_img_released
rm -rf release/${CHENYEEPROJECTID}_eng_img_released
fi
#Chenyee <taofapan> <20171110> add for replace img end
}
build_clean(){
echo "*** $BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --clean_build $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-clean ***"
$BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --clean_build $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-clean
error "$?" "*********** build_clean *************"
}
build_update_api(){
echo "*** $BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --update-api $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-update-api***"
$BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --update-api $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-update-api
error "$?" "*********** build_update_api ************"
}
# Chenyee <CY_BSP_OTA_SECUREBOOT> <goudaqiang> <20170605> add for 151694 begin
build_module(){
local image=$1
set_kernel_config
if `echo "systemimage bootimage userdataimage" | grep -qw "$image" `;then
$BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --image $image $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-$image
else
if [ "$image" == "otapackage" ];then
if [ -d "release/${CY_ZNVERNUMBER}_noota" ];then
copy_signed_img_to_dir
else
echo "Chenyee Deafult OTA KEY"
fi
fi
$BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --module $image $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-$image --debug
fi
error "$?" "************ build_$image ************"
}
# Chenyee <CY_BSP_OTA_SECUREBOOT> <goudaqiang> <20170605> add for 151694 end
build_android(){
echo "*** $BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --android_build $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-android"
$BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --android_build $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-android
error "$?" "************ build_android ************"
}
config_verified_boot(){
echo "set verified boot"
chmod +x cy_project/tools/verified_boot_tools/cfg_vb.py
./cy_project/tools/verified_boot_tools/cfg_vb.py "cy_project/${CHENYEEPROJECTID}/${CHENYEEPRODUCTID}.mk"
echo "cy_project/${CHENYEEPROJECTID}/${CHENYEEPRODUCTID}.mk"
}
download_modem(){
local cy_modem_config_file="modem_config/${CY_MODEM_CONFIG}"
if [ -f "$cy_modem_config_file" ];then
echo "*** python cy_project/tools/lf_download_modem.py $cy_modem_config_file ***"
python cy_project/tools/lf_download_modem.py $cy_modem_config_file
error "$?" "************ download_modem ************"
fi
}
build_copy_cy_code(){
echo "****** build_copy_cy_code ****"
get_chenyee_code "mtk" "7.0"
download_modem
set_kernel_config
#chenyee yangcuntao 2015-05-11 add
. cy_project/tools/cy_oversea_locales.sh
if [ "${FILE_SCRIPT}" != "" ];then
. cy_project/tools/debug/${FILE_SCRIPT}
else
if [ "${MP_STATUS}" == "dev" -a "${TARGET_BUILD_VARIANT}" == "user" ];then
. cy_project/tools/debug/devlog
fi
fi
}
build_copy_target(){
get_release_version
copy_results_to_release
error "$?" "*********** build_copy_target **********"
}
build_remake(){
echo "*** build_remake ***"
config_verified_boot
build_update_api
fix_build_err
enable_AudPar
enable_audio_debug
change_MMI_packages
change_logd_size
fix_aftrack_dump_err
enable_kernel_log
build_android
build_copy_target
}
build_remake_billy(){
#config_verified_boot
#build_update_api
fix_build_err
enable_AudPar
enable_audio_debug
#dump_stack
change_MMI_packages
change_logd_size
fix_aftrack_dump_err
enable_kernel_log
build_android
#build_copy_target
}
create_check_list(){
if $IS_SERVER_BUILD ;then
echo "*** create_check_list $IS_SERVER_BUILD ***"
echo "python cy_project/tools/cy_create_tags.py -d chenyee/alps/ -P $CHENYEEPRODUCTID -o out "
python cy_project/tools/cy_create_tags.py -d chenyee/alps/ -P $CHENYEEPRODUCTID -o out
fi
}
#Chenyee <CY_BSP_SYS> <liuxinhua> <20180504> add for CSW1703KR-63 begin
release_kernel_binary(){
if [ "${CY_BUILD_WITH_CHENYEE_KERNEL_RELEASED}" == "yes" ];then
python cy_project/tools/cy_release_kernel_binary.py cy_project/${CHENYEEPROJECTID}/${CHENYEEPRODUCTID}.mk
fi
}
change_logd_size()
{
version_defaults=$CY_BUILD_ROOT_DIR/build/core/version_defaults.mk
tt=`cat $version_defaults | grep PLATFORM_VERSION. | cut -d '=' -f 2 | grep -Eo '[0-9]+'`
PLATFORM_VERSION=`echo $tt | cut -d ' ' -f 4`
if [[ $PLATFORM_VERSION -ge 9 ]];then
android_logger=$CY_BUILD_ROOT_DIR/system/core/liblog/include/private/android_logger.h
LogdSizeSetting=$CY_BUILD_ROOT_DIR/frameworks/base/packages/SettingsLib/src/com/android/settingslib/development/AbstractLogdSizePreferenceController.java
LogdSizeSettingVendor=$CY_BUILD_ROOT_DIR/vendor/mediatek/proprietary/packages/apps/SettingsLib/src/com/android/settingslib/development/AbstractLogdSizePreferenceController.java
else
android_logger=$CY_BUILD_ROOT_DIR/system/core/include/private/android_logger.h
LogdSizeSetting=$CY_BUILD_ROOT_DIR/packages/apps/Settings/src/com/android/settings/development/DevelopmentSettings.java
LogdSizeSettingVendor=$CY_BUILD_ROOT_DIR/vendor/mediatek/proprietary/packages/apps/MtkSettings/src/com/android/settings/development/DevelopmentSettings.java
fi
grep "define LOG_BUFFER_SIZE (256 * 1024 * 1024)" $android_logger > /dev/null
if [ $? -eq 0 ]; then
echo "logdsize is 256M,setting 16M,不用修改"
else
sed -i 's/#define LOG_BUFFER_SIZE (256 \* 1024)/#define LOG_BUFFER_SIZE (256 \* 1024 \* 1024)/g' $android_logger
sed -i 's/SELECT_LOGD_DEFAULT_SIZE_VALUE = \"262144\"/SELECT_LOGD_DEFAULT_SIZE_VALUE = \"16777216\"/g' $LogdSizeSetting
sed -i 's/SELECT_LOGD_DEFAULT_SIZE_VALUE = \"262144\"/SELECT_LOGD_DEFAULT_SIZE_VALUE = \"16777216\"/g' $LogdSizeSettingVendor
fi
#grep -nr LOG_BUFFER_SIZE $android_logger
#grep -nr SELECT_LOGD_DEFAULT_SIZE_VALUE $LogdSizeSetting
#grep -nr SELECT_LOGD_DEFAULT_SIZE_VALUE $LogdSizeSettingVendor
}
fix_aftrack_dump_err()
{
AudioTrack=$CY_BUILD_ROOT_DIR/frameworks/av/media/libaudioclient/AudioTrack.cpp
grep "defined(CONFIG_FIX_MKDIR)" $AudioTrack > /dev/null
if [ $? -eq 0 ]; then
echo "fix AudioTrack: mkdir (audio_dump) error! Permission denied,不用修改"
echo "转到AudioUtilmtk: AudioDumpThread创建"
else
sed -i 's/defined(CONFIG_MT_ENG_BUILD)/defined(CONFIG_MT_ENG_BUILD) \&\& defined(CONFIG_FIX_MKDIR) \/\/billy/g' $AudioTrack
fi
}
enable_kernel_log()
{
logging=$CY_BUILD_ROOT_DIR/vendor/mediatek/proprietary/external/mobile_log_d/logging.c
device=$CY_BUILD_ROOT_DIR/device/mediatek/common/device.mk
#grep "defined(CONFIG_FIX_MKDIR)" $logging > /dev/null
grep "ro.logd.kernel=true" $device > /dev/null
if [ $? -eq 0 ]; then
echo "已经使能logd kernel log,不用修改"
else
sed -i 's/if (\!strcmp(is_lowram/return KERNEL_LOGD; \/\/billy add\n\n\t&/' $logging
sed -i 's/ro.logd.kernel=false/ro.logd.kernel=true/g' $device
fi
}
fix_build_err()
{
MK=$CY_BUILD_ROOT_DIR/vendor/mediatek/proprietary/hardware/interfaces/Android.mk
linenum=`sed -n '/touch/=' $MK`
num=`echo ${linenum:0:3}`
#echo $num
s=`sed -n ''"$[num+1]"'p' $MK`
if [ -z "$s" ];then
#输入制表符TAB方法 ctrl+v ctrl+i
sed -e ''"$num"'a\ @mv \$@ \$@_' -i $MK
sed -e ''"$[num+1]"'a\ @sort \$@_ > $@' -i $MK
sed -e ''"$[num+2]"'a\ @rm -f \$@_' -i $MK
else
echo "已经修复,不需要修改"
fi
}
enable_AudPar(){
AudPar=$CY_BUILD_ROOT_DIR/vendor/mediatek/proprietary/external/AudioParamParser/AudioParamParser.h
sed -i 's/\/\/\#define FORCE_DEBUG_LEVEL/\#define FORCE_DEBUG_LEVEL/g' $AudPar
if false;then
PhoneWindowManager=$CY_BUILD_ROOT_DIR/frameworks/base/services/core/java/com/android/server/policy/PhoneWindowManager.java
sed -i 's/DEBUG = false;/DEBUG = true;/g' $PhoneWindowManager
sed -i 's/localLOGV = false;/localLOGV = true;/g' $PhoneWindowManager
sed -i 's/DEBUG_INPUT = false;/DEBUG_INPUT = true;/g' $PhoneWindowManager
sed -i 's/DEBUG_KEYGUARD = false;/DEBUG_KEYGUARD = true;/g' $PhoneWindowManager
sed -i 's/DEBUG_LAYOUT = false;/DEBUG_LAYOUT = true;/g' $PhoneWindowManager
sed -i 's/DEBUG_SPLASH_SCREEN = false;/DEBUG_SPLASH_SCREEN = true;/g' $PhoneWindowManager
sed -i 's/DEBUG_WAKEUP = false;/DEBUG_WAKEUP = true;/g' $PhoneWindowManager
fi
}
dump_stack()
{
AudioALSAStreamManager=$CY_BUILD_ROOT_DIR/vendor/mediatek/proprietary/hardware/audio/common/V3/aud_drv/AudioALSAStreamManager.cpp
grep "#include <utils/CallStack.h>" $AudioALSAStreamManager > /dev/null
if [ $? -eq 0 ]; then
echo "已经修改过,不用修改"
else
sed -i '2i #include <utils/CallStack.h> //billy' $AudioALSAStreamManager
sed -i '\mMicMute = state;\a/CallStack\(\"Billy\"\);' $AudioALSAStreamManager
sed -i 's/return pPlaybackHandler;/\tCallStack\(\"billy\"\);\n\t&/' $AudioALSAStreamManager
sed -i 's/return pAudioALSAStreamOut;/\tCallStack\(\"billy\"\);\n\t&/' $AudioALSAStreamManager
fi
AudioPolicyManager=$CY_BUILD_ROOT_DIR/frameworks/av/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
grep "#include <utils/CallStack.h>" $AudioPolicyManager > /dev/null
if [ $? -eq 0 ]; then
echo "已经修改过,不用修改"
else
sed -i '47i #include <utils/CallStack.h> //billy' $AudioPolicyManager
sed -i 's/ALOGV(\"setStreamVolumeIndex()/CallStack(\"billy\");\n\t&/' $AudioPolicyManager
fi
AudioService=$CY_BUILD_ROOT_DIR/frameworks/base/services/core/java/com/android/server/audio/AudioService.java
grep "RuntimeException re = new RuntimeException()" $AudioService > /dev/null
if [ $? -eq 0 ]; then
echo "已经修改过,不用修改"
else
sed -i 's/ensureValidDirection(direction);/Log.e(TAG, "billy:" + Log.getStackTraceString(new Throwable()));\n\t\t&/' $AudioService
sed -i 's/ensureValidDirection(direction);/RuntimeException re = new RuntimeException();\n\t\t\t\tre.fillInStackTrace();\n\t\t\t\tLog.d(TAG, "billy", re);\n\t\t&/' $AudioService
fi
}
scandir()
{
local cur_dir parent_dir workdir
workdir=$1
cd ${workdir}
if [ ${workdir} = "/" ]
then
cur_dir=""
else
cur_dir=$(pwd)
fi
for dirlist in $(ls ${cur_dir})
do
if test -d ${dirlist};then
cd ${dirlist}
scandir ${cur_dir}/${dirlist}
cd ..
else
sed -i 's/\/\/#define LOG_NDEBUG 0/#define LOG_NDEBUG 0 \/\/billy/g' $dirlist
sed -i 's/\/\/#define VERY_VERBOSE_LOGGING/#define VERY_VERBOSE_LOGGING \/\/billy/g' $dirlist
sed -i 's/\/\/ #define VERY_VERBOSE_LOGGING/#define VERY_VERBOSE_LOGGING \/\/billy/g' $dirlist
sed -i 's/\/\/#define VERY_VERY_VERBOSE_LOGGING/#define VERY_VERY_VERBOSE_LOGGING \/\/billy/g' $dirlist
fi
done
}
enable_audio_debug()
{
local cur_dir
cur_dir=$(pwd)
echo "**** curdir $(pwd) *******"
echo "********* enable audio debug LOG_NDEBUG 0 **********"
AudioJniDir=$CY_BUILD_ROOT_DIR/frameworks/base/core/jni
AudioFlinger=$CY_BUILD_ROOT_DIR/frameworks/av/services/audioflinger
AudioPolicy=$CY_BUILD_ROOT_DIR/frameworks/av/services/audiopolicy
MediaExtractor=$CY_BUILD_ROOT_DIR/frameworks/av/services/mediaextractor
MediaJniDir=$CY_BUILD_ROOT_DIR/frameworks/base/media/jni
Media=$CY_BUILD_ROOT_DIR/frameworks/av/media
android_media_MediaDrm=$MediaJniDir/android_media_MediaDrm.cpp
AudioStreamInternal=$Media/libaaudio/src/client/AudioStreamInternal.cpp
AudioStreamInternalCapture=$Media/libaaudio/src/client/AudioStreamInternalCapture.cpp
AudioStreamInternalPlay=$Media/libaaudio/src/client/AudioStreamInternalPlay.cpp
MonoPipeReader=$Media/libnbaio/MonoPipeReader.cpp
MonoPipe=$Media/libnbaio/MonoPipe.cpp
Pipe=$Media/libnbaio/Pipe.cpp
PipeReader=$Media/libnbaio/PipeReader.cpp
AudioMixer=$Media/libaudioprocessing/AudioMixer.cpp
SpeechType=$CY_BUILD_ROOT_DIR/vendor/mediatek/proprietary/hardware/audio/common/include/SpeechType.h
grep "#define LOG_NDEBUG 0 //billy" $AudioMixer > /dev/null
if [ $? -eq 0 ]; then
echo "已经修改过,不用修改"
else
scandir $AudioJniDir
scandir $AudioFlinger
scandir $AudioPolicy
scandir $MediaExtractor
scandir $MediaJniDir
scandir $Media
#sed -i 's/ i);/name\/*billy*\/);/g' $AudioMixer
sed -i 's/#define LOG_NDEBUG 0 \/\/billy/\/\/#define LOG_NDEBUG 0 \/\/billy/g' $android_media_MediaDrm
sed -i 's/#define LOG_NDEBUG 0 \/\/billy/\/\/#define LOG_NDEBUG 0 \/\/billy/g' $AudioStreamInternal
sed -i 's/#define LOG_NDEBUG 0 \/\/billy/\/\/#define LOG_NDEBUG 0 \/\/billy/g' $AudioStreamInternalCapture
sed -i 's/#define LOG_NDEBUG 0 \/\/billy/\/\/#define LOG_NDEBUG 0 \/\/billy/g' $AudioStreamInternalPlay
sed -i 's/#define LOG_NDEBUG 0 \/\/billy/\/\/#define LOG_NDEBUG 0 \/\/billy/g' $MonoPipeReader
sed -i 's/#define LOG_NDEBUG 0 \/\/billy/\/\/#define LOG_NDEBUG 0 \/\/billy/g' $MonoPipe
sed -i 's/#define LOG_NDEBUG 0 \/\/billy/\/\/#define LOG_NDEBUG 0 \/\/billy/g' $Pipe
sed -i 's/#define LOG_NDEBUG 0 \/\/billy/\/\/#define LOG_NDEBUG 0 \/\/billy/g' $PipeReader
sed -i 's/#if 0 \/\/ for/#if 1 \/\/ billy for/g' $SpeechType
cd $cur_dir
fi
}
disable_dreams()
{
enableKeyguardService_config=$CY_BUILD_ROOT_DIR/frameworks/base/packages/SystemUI/res/values/config.xml
dreams_config=$CY_BUILD_ROOT_DIR/frameworks/base/core/res/res/values/config.xml
defaults_values=$CY_BUILD_ROOT_DIR/frameworks/base/packages/SettingsProvider/res/values/defaults.xml
PowerManagerService=$CY_BUILD_ROOT_DIR/frameworks/base/services/core/java/com/android/server/power/PowerManagerService.java
grep "name=\"config_dreamsEnabledByDefault\">false" $dreams_config > /dev/null
if [ $? -eq 0 ]; then
echo "已经修改过,不用修改"
else
sed -i 's/name=\"config_enableKeyguardService\">true/name=\"config_enableKeyguardService\">false/g' $enableKeyguardService_config
sed -i 's/bool name=\"config_dreamsSupported\">true/bool name=\"config_dreamsSupported\">false/g' $dreams_config
sed -i 's/name=\"config_dreamsEnabledByDefault\">true/name=\"config_dreamsEnabledByDefault\">false/g' $dreams_config
sed -i 's/name=\"def_lockscreen_disabled\">false/name=\"def_lockscreen_disabled\">true/g' $defaults_values
sed -i '/goToSleep(long/a\\t\t\tif (true){ return; }' $PowerManagerService
fi
}
change_MMI_packages()
{
if [ -d $CY_BUILD_ROOT_DIR/packages/apps/CY_MMI_old ];then
echo "已经替换过了,不用再替换"
else
mv $CY_BUILD_ROOT_DIR/packages/apps/CY_MMI/Android.mk $CY_BUILD_ROOT_DIR/packages/apps/CY_MMI/Android.mk.bak
mv $CY_BUILD_ROOT_DIR/packages/apps/CY_MMI $CY_BUILD_ROOT_DIR/packages/apps/CY_MMI_old
ln -s ~/bin/shell_test/package/CY_MMI/OM8.0 $CY_BUILD_ROOT_DIR/packages/apps/CY_MMI
mv $CY_BUILD_ROOT_DIR/packages/apps/CY_AutoMMI/Android.mk $CY_BUILD_ROOT_DIR/packages/apps/CY_AutoMMI/Android.mk.bak
mv $CY_BUILD_ROOT_DIR/packages/apps/CY_AutoMMI $CY_BUILD_ROOT_DIR/packages/apps/CY_AutoMMI_old
ln -s ~/bin/shell_test/package/CY_AutoMMI/OM8.0 $CY_BUILD_ROOT_DIR/packages/apps/CY_AutoMMI
fi
}
build_new(){
build_clean
build_copy_cy_code
fix_build_err
enable_AudPar
enable_audio_debug
#dump_stack
change_MMI_packages
change_logd_size
fix_aftrack_dump_err
enable_kernel_log
build_remake
create_check_list
release_kernel_binary
}
#Chenyee <CY_BSP_SYSTEM> <liuxinhua> <20180504> add for CSW1703KR-63 end
build_key(){
echo "${MTK_PLATFORM} ${CHENYEEPRODUCTID}"
./cy_project/tools/verified_boot_tools/gen_vb_keys.py -o release/
}
build_sign(){
build_copy_target
./cy_project/tools/verified_boot_tools/vb_sign.py -k cy_project/tools/verified_boot_tools/prik/ -c cy_project/tools/verified_boot_tools/configs/ -s $CY_RELEASE_DIR -d release/img_signed/
}
build_project(){
speedup_set_state "modified"
local project_path=$1
if [ "$CMD_MMM" = "true" ]; then
$BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --PROJECT $project_path $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-mmm
else
$BUILD_TOOLS --build_variant $TARGET_BUILD_VARIANT --project $project_path $TARGET_PRODUCT --log_file $LOGPATH/$TARGET_PRODUCT-mmma
fi
error "$?" "*********** build_project ************"
}
build_sonar(){
build_project "$CY_SONAR_CHECK_PATH"
local apk_sonar=$(find cy_project/tools/ant -maxdepth 1 -mindepth 1 -type d)
> $LOGPATH/build_sonar.log
for dir in $apk_sonar
do
if [ -f "$dir/build.xml" ];then
ant -buildfile $dir/build.xml sonar | tee -a $LOGPATH/build_sonar.log
error "$?" "*********** sonar check $CY_SONAR_CHECK_PATH ************"
rm -rf $dir
fi
done
}
check_external_iw_version_sh(){
modifyFile=external/iw/version.sh
oldmd5="0ceb0c1cd79b2f7f10d12b15ccd066f4"
sumvar=`md5sum ${modifyFile}`
sumvar=${sumvar%% *}
echo $sumvar
if [[ ${sumvar} == ${oldmd5} ]];then
sed -i "6,20d" ${modifyFile}
sed -i "6i #Chenyee deleted 6 to 20 line for build" ${modifyFile}
echo "`echo $(basename $0)` modify external/iw/version.sh and go to building"
else
if `grep -qw "#Chenyee deleted 6 to 20 line for build" ${modifyFile}` ;then
echo "`echo $(basename $0)` Check external/iw/version.sh PASS, go to building"
else
echo "`echo $(basename $0)` external/iw/version.sh file changed, you have to update the MD5 value \"$oldmd5\" with $sumvar in this function"
exit 0
fi
fi
}
setup_speedup(){
[ ! -e $SPEEDUP_PATH ] && mkdir -p $SPEEDUP_PATH
}
is_speedup_support(){
# DO NOT enable this feature on build server.
if $IS_SERVER_BUILD; then
echo false; return 1
fi
if [ "$CHENYEE_SPEEDUP_FEATURE" == "false" ]; then
echo false; return 1
fi
# DO NOT enable this feature if repo's version is not matching the shell script.
if [ ! -f $CY_BUILD_ROOT_DIR/../.repo/repo/subcmds/state.py ]; then
echo false; return 1
fi
# DO NOT enable this feature on old brunch.
if [ -f $CY_BUILD_ROOT_DIR/cy_tools/cy_speedup_flag ]; then
echo true; return 0
else
echo false; return 1
fi
}
speedup_get_state(){
if [ $(is_speedup_support) == "false" ]; then
echo false; return 1
fi
if [ x"$1" == x"block" ]; then
{ flock -e 7; cat $SPEEDUP_STATE; flock -u 7; } 7<>"$SPEEDUP_STATE"
else
cat $SPEEDUP_STATE
fi
}
speedup_set_state(){
if [ $(is_speedup_support) == "false" ]; then
echo false; return 1
else
{ flock -e 7; echo $1 > $SPEEDUP_STATE; flock -u 7; } 7<>"$SPEEDUP_STATE"
fi
}
store_build_history(){
local dline="==========================="
local record=`date +"%Y.%m.%d-%H:%M:%S"`" $@"
[ ! -e $SPEEDUP_PATH/.history ] && echo "$dline END $dline" > $SPEEDUP_PATH/.history
sed -i "1i\\$record" $SPEEDUP_PATH/.history
echo $CHENYEEPRODUCTID > $SPEEDUP_PATH/.build_product
echo $TARGET_BUILD_VARIANT > $SPEEDUP_PATH/.build_variant
}
show_build_history(){
[ ! -e $SPEEDUP_PATH/.history ] && error 1 "NO BUILD HISTORY!"
local dline="========================="
echo -e "\033[1mThe Last Command:"
echo -e "$(head -n 1 $SPEEDUP_PATH/.history | cut -b 24-)\033[0m\n"
echo "$dline"" HISTORY ""$dline"
cat $SPEEDUP_PATH/.history
}
main(){
get_opts "$@"
setup_speedup
store_build_history "$0 $@"
#check external/iw/version.sh before build
check_external_iw_version_sh
prebuild
#copy chenyee code
if [ -n "$COPY_CHENYEE" ];then
echo "copy chenyee code"
build_copy_cy_code
fi
if [ -n "$ACTION" ];then
echo "********* build $ACTION *********"
build_$ACTION
fi
if [ -n "$MODULE" ];then
echo "********* build $MODULE*********"
config_verified_boot
build_module $MODULE
fi
if [ -d "${PROJECT_PATH%%:*}" ];then
echo "********* build $PROJECT_PATH *********"
build_project $PROJECT_PATH
fi
if [ -n "$COPY_TARGET" -o "$MODULE" == "otapackage" ];then
echo "********* build copy target *********"
build_copy_target
fi
#Chenyee <CY_BSP_EFUSE> <chendonghai> modify out dir efuse download to false begin
PRODUCT_OUT_ROOT="out/target/product"
CY_TARGET_PRODUCT=${TARGET_PRODUCT/full_}
TARGET_PRODUCT_OUT_ROOT="$PRODUCT_OUT_ROOT/${CY_TARGET_PRODUCT}"
if [ -f cy_project/tools/verified_boot_tools/GenEfuseImg.py ];then
chmod +x cy_project/tools/verified_boot_tools/GenEfuseImg.py
./cy_project/tools/verified_boot_tools/GenEfuseImg.py $TARGET_PRODUCT_OUT_ROOT
fi
#Chenyee <CY_BSP_EFUSE> <chendonghai> modify out dir efuse download to false end
}
#check external/iw/version.sh before build
check_external_iw_version_sh
OPTS=""
MODULE=""
ACTION=""
IS_SERVER_BUILD=false
SHOWLOG=false
#CY_BUILD_ROOT_DIR=$(cd `dirname $0`; pwd)
CY_BUILD_ROOT_DIR=$(pwd)
SPEEDUP_PATH="$CY_BUILD_ROOT_DIR/.speedup"
SPEEDUP_STATE="$SPEEDUP_PATH/state"
export CY_BUILD_ROOT_DIR
GN_BUILD_ROOT_DIR=$CY_BUILD_ROOT_DIR
export GN_BUILD_ROOT_DIR
TARGET_BUILD_VARIANT="eng"
BUILD_TOOLS="$CY_BUILD_ROOT_DIR/build.sh --setup_ccache=true"
LOGPATH="BUILDING_LOG"
version_defaults=$CY_BUILD_ROOT_DIR/build/core/version_defaults.mk
tt=`cat $version_defaults | grep PLATFORM_VERSION. | cut -d '=' -f 2 | grep -Eo '[0-9]+'`
PLATFORM_VERSION=`echo $tt | cut -d ' ' -f 4`
echo "Android version $PLATFORM_VERSION"
export CY_BUILD_ROOT_DIR
main "$@"
| true
|
4f429f16e3d3fba120304220f652a03818c96085
|
Shell
|
delight09/gadgets
|
/network/qiniu_logdownload_helper.sh
|
UTF-8
| 1,752
| 3.8125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash --
# Download yesterday's qiniu CDN traffic log for specified domain
# API refer: https://developer.qiniu.com/fusion/api/1226/download-the-log
# NOTICE: Log resource will delay for 8-10 hours, it's a known issue written in API doc.
# It's recommand to trigger this script after 2 AM UTC in order to get a 24h log.
# Dependency on `jq`, `curl`, `gunzip` tool.
# Only accept _ONE_ domain.
# USAGE: qiniu_renew_helper.sh xx.clouddn.com
SECRET_AK=xxxxxx
SECRET_SK=yyyyyy
DIR_LOGFILE=/var/log/qiniu_cdn
PREFIX_LOGFILE=qiniucdn_
COUNT_MAX_TRY=5
super_eval() {
_exit_code=255 # foo value
_count=0
while [[ $_exit_code -ne 0 ]] && [[ $_count -lt $COUNT_MAX_TRY ]]
do
eval "$1"
_exit_code=$(echo $?)
_count=$(($_count + 1))
done
}
rm -f ${DIR_LOGFILE}/*.tmp.gz
domain_name=$1
tk=$(echo "/v2/tune/log/list" |openssl dgst -binary -hmac $SECRET_SK -sha1 |base64 | tr + - | tr / _)
target_date=$(TZ="Asia/Shanghai" date +%Y-%m-%d -d "yesterday")
str_json_res=''
_curl_data="{\"day\":\"$target_date\",\"domains\":\"${domain_name}\"}"
_curl_cmd="curl -sfL -X POST -H '""Authorization: QBox ${SECRET_AK}:${tk}""' http://fusion.qiniuapi.com/v2/tune/log/list -d '"$_curl_data"' -H Content-Type:application/json"
str_json_res=$(super_eval "$_curl_cmd")
_index=0
_url=''
_curl_cmd=''
cd $DIR_LOGFILE # enter working dir
for i in $(echo $str_json_res | grep -o url)
do
_url=$(echo $str_json_res | jq '.data["'$domain_name'"]['$_index'].url' | tr -d \")
_curl_cmd="curl -sfL -o $_index.tmp.gz '"$_url"'" # tag .tmp.gz for later concatenate
super_eval "$_curl_cmd"
_index=$(($_index + 1))
done
gunzip ./*.gz
cat *.tmp >${PREFIX_LOGFILE}${target_date}.log
rm *.tmp
cd - >/dev/null # exit working dir
| true
|
fcaa45dc54631afe9e6cd3e3521ae78d8bc56401
|
Shell
|
mrvon/leetcode
|
/0192/0192_word_frequency.sh
|
UTF-8
| 194
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
# Read from the file words.txt and output the word frequency list to stdout.
cat words.txt | awk '{ for (i = 1; i <= NF; i++) { print $i } }' | sort | uniq -c | sort -r | awk '{ print $2, $1 }'
| true
|
e2f134ffd6f4671696d9dda91988eb4a89f49c08
|
Shell
|
trucnguyenlam/mucke
|
/contrib/abcd/src/configure
|
UTF-8
| 652
| 3.828125
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
# (C) 1997 - 1998 Armin Biere
# $Id: configure,v 1.2 2000-05-08 11:33:36 biere Exp $
AUTOMATIC=true
while [ $# -ne 0 ]
do
case $1 in
--manual) AUTOMATIC=false;;
*)
echo "*** unknown command line option $1" 1>&2
echo "*** usage: configure [--manual]" 1>&2
exit 1;;
esac
shift
done
if [ $AUTOMATIC = false ]
then
found_dialog=no
for dir in "" `echo $PATH | sed -e 's,:, ,g'`
do
if test -f "${dir}/dialog"
then
found_dialog=yes
break
fi
done
if test "$found_dialog" = yes
then
. ./configure.dialog
else
. ./configure.sh --manual
fi
else
. ./configure.sh
fi
| true
|
696fcee8eb2eeaeef9e5429a52244c36c589e964
|
Shell
|
woody/dotfiles
|
/git.sh
|
UTF-8
| 351
| 3.640625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
export GIT_INCLUDED=true
update_git_repo () {
# Is it git repo?
if [ -d "$1"/.git ]; then
cd $1
else
mkdir -p $1 && cd $1
git init && git remote add origin $2
fi
git pull origin master
# Back to original
cd - >>/dev/null
}
update_github_repo () {
update_git_repo $1 "https://github.com/${2}.git"
}
| true
|
1f251dec729e1a04f2b95eb1f1f70f3a341e68d0
|
Shell
|
hypnoglow/homka
|
/bin/homka-deploy
|
UTF-8
| 4,706
| 4.3125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# This is a simple deploy tool.
################################################################################
# Include libraries
. ${_SELF_HOME}/lib/std.sh
. ${_SELF_HOME}/lib/io.sh
. ${_SELF_HOME}/lib/errors.sh
main() {
declare -g build=""
declare -g force=false
process_args "$@"
check_args
io::read_conf "${config}"
check_variables_defined
deliver
deploy
}
# Arguments handler.
process_args() {
while [[ $# > 0 ]] ; do
case $1 in
-h|--help)
print_usage
exit 0
;;
-n|--build-number)
build="$2"
shift
;;
-f|--force)
force=true
;;
-*)
_error "Unknown argument $param"
print_usage
exit ${_ERROR_UNKNOWN_ARGUMENT}
;;
*)
config=$1
break
;;
esac
shift
done
}
# Ensures that all necessary variables are passed as arguments.
check_args() {
if [ -z "${build}" ] ; then
_error "Build is not specified."
print_usage
exit ${_ERROR_VARIABLE_NOT_DEFINED}
fi
if [ -z "${config}" ] ; then
_error "Config file is not specified!"
print_usage
exit ${_ERROR_VARIABLE_NOT_DEFINED}
fi
}
# Ensures that all necessary variables are defined in config or as arguments.
check_variables_defined() {
local variables=(
builds_store
project
tag_prefix
remote_user
remote_host
)
local variable
local error=false
for variable in ${variables[@]}; do
if [ -z "${!variable}" ]; then
error=true
_error "${variable} is not defined."
fi
done
if $error; then
print_usage
exit ${_ERROR_VARIABLE_NOT_DEFINED}
fi
}
print_usage() {
cat <<EOF
Usage:
$_CALLER deploy -b <N> <path..>
-n|--build-number Build number, e.g. "-b 4"
<path..> Path to your project's config.conf
See \`template.conf\` for further details.
$_SELF version $_VERSION
EOF
}
deliver() {
declare -g build_tag="${tag_prefix}${build}"
declare -g build_file="${project}@${build_tag}.tar.gz"
local build_file_path="${builds_store}/${project}/${build_file}"
if [ ! -e "${build_file_path}" ] ; then
_error "Build file ${build_file_path} not found."
exit 67 #TODO
fi
_info "Prepare host for build package uploading"
ssh "${remote_user}@${remote_host}" "mkdir -p ~/tmp ~/${project}/releases/${build}"
_expect "Cannot create ~/tmp & ~/${project}/releases/${build} directories on remote"
_info "Deliver package to ${remote_host}"
scp "${builds_store}/${project}/${build_file}" "${remote_user}@${remote_host}:~/tmp/${build_file}"
_expect "Cannot deliver package to ${remote_host}"
# Extract archive to releases directory and then remove tar file
_info "Extracting package tarball"
ssh "${remote_user}@${remote_host}" "tar -xzf tmp/${build_file} -C ~/${project}/releases/${build} ; rm tmp/${build_file}"
_expect "Cannot extract package"
# Change a symlink to this build
_info "Creating symlink to last build..."
ssh "${remote_user}@${remote_host}" "ln -sfn ${build} ~/${project}/releases/last"
_expect "Cannot create symlink to delivered build"
}
deploy() {
if ! ${force}; then
echo -n "Finish deploy? [y/n]: "
read answer
if [ "$answer" != "y" ] ; then
exit 0
fi
fi
_info "Installing main symlinks..."
ssh "${remote_user}@${remote_host}" /usr/bin/env bash <<- HERE
# Make sure hidden files are not expanded in wildcard
shopt -u dotglob
for filepath in ~/${project}/releases/${build}/* ; do
filename=\$( basename \$filepath )
# If destination file is a normal file or directory - backup it.
if [ -e ~/${project}/\$filename ] && [ ! -L ~/${project}/\$filename ] ; then
mv ~/${project}/\$filename ~/${project}/\$filename~
if [ $? -ne 0 ] ; then
echo "Cannot backup ~/${project}/\$filename" >&2
exit 1
fi
fi
ln -sfn ~/${project}/releases/${build}/* ~/${project}/
if [ $? -ne 0 ] ; then
echo "Cannot set symlink ~/${project}/\$filename -> \$filepath" >&2
exit 1
fi
done
HERE
_expect "Cannot install main symlinks"
_info "Build ${build} has been deployed!"
}
main "$@"
exit 0
| true
|
a43c5b99409e99e5ef05bf48018384ee02d3409e
|
Shell
|
augix/unsup_vvs
|
/unsup_vvs/neural_fit/brainscore_mask/run_behavior.sh
|
UTF-8
| 1,222
| 2.515625
| 3
|
[] |
no_license
|
:"
for set_func in \
cate_settings.cate_seed0 \
cate_settings.cate_p03 \
llp_settings.llp_p03 \
mt_settings.mt_p03 \
untrained_settings.untrn_seed0 \
la_settings.old_la_mid \
la_settings.la_seed0 \
ir_settings.ir_seed0 \
color_settings.color_seed0 \
rp_settings.rp_seed0 \
depth_settings.depth_seed0 \
ae_settings.ae_seed0 \
dc_settings.dc_seed0 \
la_cmc_settings.cmc_seed0 \
cpc_settings.cpc_seed0
do
RESULTCACHING_DISABLE=model_tools.activations python -W ignore \
brainscore_mask/bs_fit_neural.py \
--set_func ${set_func} \
--bench_func objectome_i2n_with_save_layer_param_scores \
--gpu ${1} --id_suffix save
#RESULTCACHING_DISABLE=model_tools.activations python -W ignore \
# brainscore_mask/bs_fit_neural.py \
# --set_func ${set_func} \
# --bench_func objectome_i2n_with_save_spearman_layer_param_scores \
# --gpu ${1} --id_suffix save
done
"
#for method in cate la ir ae untrn simclr depth color rp cpc cmc dc
for method in depth color rp cmc dc
do
for seed in 0 1 2
do
sh brainscore_mask/run_single_behavior.sh $1 ${method}_settings.${method}_seed${seed}
done
done
| true
|
c5c77af14baa1cfe3f4d37641b3ff77031e7554d
|
Shell
|
logan248/dotfiles
|
/scripts/setup.sh
|
UTF-8
| 1,324
| 3.65625
| 4
|
[] |
no_license
|
#!/usr/bin/env sh
fonts(){
[ ! -d "$HOME/.local/share" ] && \
mkdir -p $HOME/.local/share
echo "Copying ~/.config/fontconfig/ to ~/.config/fontconfig"
[ ! -d "$HOME/.config/fontconfig" ] && \
mkdir -p $HOME/.config/fontconfig
cp -r $HOME/dotfiles/.config/fontconfig \
$HOME/.config/fontconfig && \
echo "Copying fonts to ~/.local/share..." && \
cp -r $HOME/dotfiles/.local/share/fonts \
$HOME/.local/share/ && \
echo "Generating fonts cache..." && \
fc-cache -f -v && \
echo "Verifying if font was cached successfully..." && \
fc-list | grep "Hack" && \
echo "Hack font was successfully cached."
}
bash_prompt(){
cp -r $HOME/dotfiles/bash_prompt/.aliases \
$HOME/dotfiles/bash_prompt/.bash_prompt \
$HOME/dotfiles/bash_prompt/.bash_profile \
$HOME/dotfiles/bash_prompt/.dircolors \
$HOME
}
neovim(){
[ ! -d "$HOME/.config" ] && \
mkdir -p $HOME/.config
cp -r $HOME/dotfiles/.config/nvim \
$HOME/.config
}
# Update pip
# and install some packages
verify_pip(){
which pip &> /dev/null
if [ $? -eq 0 ]; then
pip install pip --upgrade
pip install pipenv
pip install youtube-dl
fi
}
others(){
cp $HOME/dotfiles/.git-completion.bash \
$HOME/dotfiles/.gitconfig \
$HOME
}
bash_prompt
verify_pip
neovim
others
fonts
| true
|
a3ec61679f250b6923efd3e41581bd94b6c92040
|
Shell
|
FedotovDN/made_hpc_hw2
|
/script4
|
UTF-8
| 95
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ ! -f "Linux" ]; then
echo "very easy"
touch "Linux"
else
echo "course"
fi
| true
|
17f0996376941bcb4a429f654474a4951bba4a0b
|
Shell
|
gstackio/harbor-boshrelease
|
/jobs/harbor/templates/bin/post-start.erb.sh
|
UTF-8
| 1,619
| 3.078125
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
#!/usr/bin/env bash
set -e # exit immediately if a simple command exits with a non-zero status
source /var/vcap/packages/common/utils.sh
waitForDBReady() {
set +e
TIMEOUT=12
while [ $TIMEOUT -gt 0 ]; do
$DOCKER_CMD exec harbor-db pg_isready | grep "accepting connections"
if [ $? -eq 0 ]; then
break
fi
TIMEOUT=$((TIMEOUT - 1))
sleep 5
done
if [ $TIMEOUT -eq 0 ]; then
echo "Harbor DB cannot reach within one minute."
clean_db
exit 1
fi
set -e
}
changeUserConfigSetting() {
key=$1
value=$2
$DOCKER_CMD exec harbor-db psql -U postgres -d registry -c "insert into properties (k, v) values ('$key', '$value') on conflict (k) do update set v = '$value';"
}
waitForDBReady
changeUserConfigSetting auth_mode <%= p("auth_mode") %>
<%- if p("auth_mode") == "uaa_auth" %>
<%- if p("uaa.is_saml_backend") == true %>
changeUserConfigSetting auth_mode oidc_auth
changeUserConfigSetting oidc_name uaa
changeUserConfigSetting oidc_endpoint <%= p("uaa.url") %>/oauth/token
changeUserConfigSetting oidc_client_id <%= p("uaa.client_id") %>
changeUserConfigSetting oidc_client_secret <%= p("uaa.client_secret") %>
changeUserConfigSetting oidc_scope openid
changeUserConfigSetting oidc_verify_cert false
<%- else %>
changeUserConfigSetting uaa_endpoint <%= p("uaa.url") %>
changeUserConfigSetting uaa_client_id <%= p("uaa.client_id") %>
changeUserConfigSetting uaa_client_secret <%= p("uaa.client_secret") %>
changeUserConfigSetting uaa_verify_cert <%= p("uaa.verify_cert") %>
<%- end %>
<%- end %>
exit 0
| true
|
d3f1ee8a7e6c4606969fc0e7f68bde950433c031
|
Shell
|
loboris/OdroidC1-BuildLinux
|
/install_lxde_desktop
|
UTF-8
| 7,378
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/bash
# ******************************
# Install minimal lxde desktop *
# ******************************
if [ "$(id -u)" != "0" ]; then
echo "Script must be run as root !"
exit 0
fi
echo ""
date
echo -e "\033[36m======================="
echo -e "Installing LXDE Desktop"
echo -e "=======================\033[37m"
echo ""
# Check if we have enough disk space.
S=`df / | tail -n 1 | awk '{printf $4}' | sed s/[G,M,K]//g`
if [ "$S" -le "500000" ]; then
echo "We need at least 500MB free to perform minimal desktop instalation"
echo "Have you resized your partition (fs_resize)?"
exit 0
fi
# SET USER NAME !!
_user="odroid"
logfile="lxde_install.log"
#logfile=/dev/stdout
#_auto=""
_auto="-y -q"
_DST=`lsb_release -si`
_REL=`lsb_release -sc`
echo "Package update..."
apt-get $_auto update >> $logfile
echo "Package upgrade..."
apt-get $_auto upgrade >> $logfile
echo ""
echo "$_DST - $_REL, Installing LXDE DESKTOP..."
echo "$_DST - $_REL, Installing LXDE DESKTOP..." > $logfile
# === Install base packages =======================================================================================================================================================
echo " installing base packages, please wait..."
if [ "${_REL}" = "wheezy" ]; then
apt-get $_auto install pulseaudio pulseaudio-module-x11 pulseaudio-utils alsa-base alsa-oss alsa-utils alsa-tools libasound2 aml-libs --no-install-recommends >> $logfile
else
apt-get $_auto install pulseaudio pulseaudio-module-x11 pulseaudio-utils alsa-base alsa-oss alsa-utils alsa-tools libasound2-data aml-libs --no-install-recommends >> $logfile
fi
apt-get clean
# === Install desktop =============================================================================================================================================================
echo " installing xserver & lxde desktop, please wait..."
if [ "${_REL}" = "trusty" ]; then
apt-get $_auto install gstreamer1.0-pulseaudio mali-x11 xserver-xorg-video-mali >> $logfile
apt-get clean
apt-get $_auto install mesa-utils mesa-utils-extra glmark2-es2 >> $logfile
apt-get clean
apt-get $_auto install lubuntu-core --no-install-recommends >> $logfile
apt-get clean
apt-get $_auto install synaptic lxappearance lxterminal leafpad pavucontrol dmz-cursor-theme metacity metacity-themes dconf-editor software-properties-gtk lxtask gnome-calculator xarchiver evince geany lxsession-edit update-notifier gnome-themes-standard >> $logfile
apt-get clean
else
apt-get $_auto install xinit xserver-xorg lxde lightdm lightdm-gtk-greeter policykit-1 --no-install-recommends >> $logfile
apt-get clean
if [ "${_DST}" = "Ubuntu" ] ; then
apt-get $_auto install humanity-icon-theme --no-install-recommends >> $logfile
fi
apt-get $_auto install synaptic pavucontrol software-properties-gtk lxtask galculator policykit-1-gnome gksu --no-install-recommends >> $logfile
apt-get clean
fi
# === Install network packages & internet browser =================================================================================================================================
# === you don't have to install internet browser, you can save ~100MB ===
echo " installing network packages, please wait..."
if [ "${_DST}" = "Ubuntu" ] ; then
apt-get $_auto install firefox gvfs-fuse gvfs-backends --no-install-recommends >> $logfile
else
apt-get $_auto install iceweasel gvfs-fuse gvfs-backends --no-install-recommends >> $logfile
fi
apt-get clean
if [ -f /etc/X11/xorg.conf ]; then
cp /etc/X11/xorg.conf /usr/local/bin/xorg.conf
fi
apt-get $_auto remove xserver-xorg --purge >> $logfile
if [ "${_REL}" = "trusty" ]; then
apt-get $_auto install xserver-xorg lubuntu-core >> $logfile
else
apt-get $_auto install xserver-xorg >> $logfile
fi
apt-get clean
if [ -f /usr/local/bin/xorg.conf ]; then
mv /usr/local/bin/xorg.conf /etc/X11/xorg.conf
fi
# === Configuration ===============================================================================================================================================================
echo ""
echo "Configuring desktop..."
if [ -f /etc/X11/Xwrapper.config ]; then
cat /etc/X11/Xwrapper.config | sed s/"allowed_users=console"/"allowed_users=anybody"/g > /tmp/_xwrap
mv /tmp/_xwrap /etc/X11/Xwrapper.config
fi
if [ "${_REL}" = "trusty" ]; then
cat /etc/xdg/lxsession/Lubuntu/desktop.conf | sed s/"windows_manager\/command=openbox"/"windows_manager\/command=metacity"/g > /tmp/_desktop.conf
mv /tmp/_desktop.conf /etc/xdg/lxsession/Lubuntu/desktop.conf
echo " configuring file manager"
cat /etc/xdg/pcmanfm/lubuntu/pcmanfm.conf | sed s/"mount_on_startup=1"/"mount_on_startup=0"/g > /tmp/_pcmanfm.conf
mv /tmp/_pcmanfm.conf /etc/xdg/pcmanfm/lubuntu/pcmanfm.conf
cat /etc/xdg/pcmanfm/lubuntu/pcmanfm.conf | sed s/"mount_removable=1"/"mount_removable=0"/g > /tmp/_pcmanfm.conf
mv /tmp/_pcmanfm.conf /etc/xdg/pcmanfm/lubuntu/pcmanfm.conf
mv /wallpaper.png /usr/share/lubuntu/wallpapers/lubuntu-default-wallpaper.png > /dev/null 2>&1
if [ -f /etc/lightdm/lightdm-gtk-greeter.conf ]; then
cat /etc/lightdm/lightdm-gtk-greeter.conf | sed "/background=\/usr/d" > /tmp/_greet
mv /tmp/_greet /etc/lightdm/lightdm-gtk-greeter.conf
cat /etc/lightdm/lightdm-gtk-greeter.conf | sed '/\[greeter\]/abackground=\/usr\/share\/lubuntu\/wallpapers\/lubuntu-default-wallpaper.png' > /tmp/_greet
mv /tmp/_greet /etc/lightdm/lightdm-gtk-greeter.conf
fi
if [ -f /boot/boot.ini ]; then
cat /boot/boot.ini | sed s/"setenv m_bpp \"16\""/"setenv m_bpp \"32\""/g > /tmp/_bootini
mv /tmp/_bootini /boot/boot.ini
cat /boot/boot.ini | sed s/"setenv vpu \"0\""/"setenv vpu \"1\""/g > /tmp/_bootini
mv /tmp/_bootini /boot/boot.ini
fi
if [ -f /media/boot/boot.ini ]; then
cat /media/boot/boot.ini | sed s/"setenv m_bpp \"16\""/"setenv m_bpp \"32\""/g > /tmp/_bootini
mv /tmp/_bootini /media/boot/boot.ini
cat /media/boot/boot.ini | sed s/"setenv vpu \"0\""/"setenv vpu \"1\""/g > /tmp/_bootini
mv /tmp/_bootini /media/boot/boot.ini
fi
else
if [ -f /etc/lightdm/lightdm-gtk-greeter.conf ]; then
cat /etc/lightdm/lightdm-gtk-greeter.conf | sed "/background=\/usr/d" > /tmp/_greet
mv /tmp/_greet /etc/lightdm/lightdm-gtk-greeter.conf
cat /etc/lightdm/lightdm-gtk-greeter.conf | sed '/\[greeter\]/abackground=\/usr\/share\/lxde\/wallpapers\/lxde_blue.jpg' > /tmp/_greet
mv /tmp/_greet /etc/lightdm/lightdm-gtk-greeter.conf
fi
fi
#*********************
# ** CONFIGURE SOUND
#*********************
cat > /etc/asound.conf << _EOF_
pcm.!default {
type plug
slave {
pcm "hw:0,1"
}
}
ctl.!default {
type hw
card 0
}
_EOF_
if [ -f /etc/pulse/default.pa ]; then
cat /etc/pulse/default.pa | sed s/"#load-module module-alsa-sink"/"load-module module-alsa-sink"/g > /tmp/default.pa
mv /tmp/default.pa /etc/pulse/default.pa
cat /etc/pulse/default.pa | sed s/"#load-module module-alsa-source device=hw:1,0"/"load-module module-alsa-source device=hw:0,1"/g > /tmp/default.pa
mv /tmp/default.pa /etc/pulse/default.pa
fi
usermod -a -G adm,dialout,cdrom,audio,dip,video,plugdev,netdev,fuse $_user
chown -R $_user:$_user /home/$_user
echo ""
echo "**********************************************"
echo "* LXDE DESKTOP INSTALLED, please REBOOT now! *"
echo "**********************************************"
date
echo ""
| true
|
08784cbe1476ab864f423803bef56f235a686e79
|
Shell
|
FBoisson/ISN-live
|
/live-isn/DEBIAN/prerm
|
UTF-8
| 466
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/sh -e
# remove alternatives links
if [ "$1" = "remove" ]; then
grep -v "#backup ISN" /etc/crontab > /tmp/crontab
mv /etc/crontab /etc/crontab.dpkg
mv /tmp/crontab /etc/crontab
mv /etc/initramfs-tools/initramfs.conf /etc/initramfs-tools/initramfs.conf.live
mv /etc/initramfs-tools/initramfs.conf /etc/initramfs-tools/initramfs.conf.old
mv /etc/skel/.bashrc /etc/skel/.bashrc.live
mv /etc/skel/.bashrc /etc/skel/.bashrc.old
fi
exit 0
| true
|
bddf0b08ae16a4156b97df01b606da66260d6862
|
Shell
|
ermaker/bootstrap
|
/vagrant.sh
|
UTF-8
| 268
| 3.15625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
VAGRANT_VERSION="1.7.4"
VAGRANT_DEB=vagrant_${VAGRANT_VERSION}_x86_64.deb
if ! command -v vagrant &>/dev/null; then
\curl -sSL "https://dl.bintray.com/mitchellh/vagrant/${VAGRANT_DEB}" -o $VAGRANT_DEB
sudo dpkg -i $VAGRANT_DEB
rm -f $VAGRANT_DEB
fi
| true
|
414c36c602239f4cba795c31f4c1fc6976999695
|
Shell
|
chenxfeng/kaggle-dogs-vs-cats-caffe
|
/create_submissions_for_all.sh
|
UTF-8
| 379
| 2.625
| 3
|
[] |
no_license
|
BASE_FOLDER="finetuning" #BASE_FOLDER="learning_from_scratch"
echo "Base folder "$BASE_FOLDER
for item in `ls -v $BASE_FOLDER`
do
echo "Create submission for: "$item
time python create_kaggle_submission_probability.py $BASE_FOLDER/$item/deploy.prototxt $BASE_FOLDER/$item/model.caffemodel $BASE_FOLDER/$item/mean.npy /home/myuser/Desktop/CatsVsDogs/kaggle_data/test
done
| true
|
7988c3db0f36676f2514f765e09c3433b3477e30
|
Shell
|
emalm/gitea-boshrelease
|
/jobs/gitea/templates/bpm-pre-start.erb
|
UTF-8
| 1,295
| 3.203125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
gitea_bin=/var/vcap/packages/gitea/gitea
source_config_dir=/var/vcap/jobs/gitea/config
target_config_dir=/var/vcap/store/gitea/config
mkdir -p $target_config_dir
export PATH=/var/vcap/packages/git/bin:$PATH
if [[ -e $target_config_dir/INTERNAL_TOKEN ]]; then
echo "INTERNAL_TOKEN file exists; skipping generation"
else
echo "Generating INTERNAL_TOKEN file"
$gitea_bin generate secret INTERNAL_TOKEN > $target_config_dir/INTERNAL_TOKEN
fi
if [[ -e $target_config_dir/LFS_JWT_SECRET ]]; then
echo "LFS_JWT_SECRET file exists; skipping generation"
else
echo "Generating LFS_JWT_SECRET file"
$gitea_bin generate secret LFS_JWT_SECRET > $target_config_dir/LFS_JWT_SECRET
fi
if [[ -e $target_config_dir/SECRET_KEY ]]; then
echo "SECRET_KEY file exists; skipping generation"
else
echo "Generating SECRET_KEY file"
$gitea_bin generate secret SECRET_KEY > $target_config_dir/SECRET_KEY
fi
cp $source_config_dir/gitea.ini $target_config_dir/gitea.ini
sed \
-i \
-e "s/INTERNAL_TOKEN_PLACEHOLDER/$(cat $target_config_dir/INTERNAL_TOKEN)/" \
-e "s/LFS_JWT_SECRET_PLACEHOLDER/$(cat $target_config_dir/LFS_JWT_SECRET)/" \
-e "s/SECRET_KEY_PLACEHOLDER/$(cat $target_config_dir/SECRET_KEY)/" \
$target_config_dir/gitea.ini
chown -R vcap:vcap $target_config_dir
| true
|
6a1a96148dacd637ca642ff00b82a01d85069d21
|
Shell
|
guluchen/z3
|
/contrib/ci/scripts/install-lib.sh
|
UTF-8
| 586
| 3.328125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
function install_openfst {
wget http://www.openfst.org/twiki/pub/FST/FstDownload/openfst-1.7.1.tar.gz
tar zxvf openfst-1.7.1.tar.gz
cd openfst-1.7.1
./configure
make
sudo make install
cd ../
}
function install_apron {
git clone https://github.com/antoinemine/apron.git
cd apron
./configure
make
sudo make install
cd ../
}
SCRIPT_DIR="$( cd ${BASH_SOURCE[0]%/*} ; echo $PWD )"
. ${SCRIPT_DIR}/run_quiet.sh
set -x
set -e
set -o pipefail
# Install openfst
echo "Installing Openfst..."
install_openfst
# Install Apron
echo "Installing Apron..."
install_apron
| true
|
806aae2c6f5a1a76326d6cdde52a4736fb2975f3
|
Shell
|
rbarreiros/vorwerk-tm5-oss-sources
|
/modified_packages/GPL_LGPL_licensed_packages/imx-bootlets_10.12.01/cst_tools/mk_signed_bootstream.sh
|
UTF-8
| 2,330
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/bash
# expects objcopy, elftosb and sbtool in the path
export PATH=$PATH:../linux:~/build_trunk/ltib/otp_tools:/opt/freescale/usr/local/gcc-4.4.4-glibc-2.11.1-multilib-1.0/arm-fsl-linux-gnueabi/arm-fsl-linux-gnueabi/bin
BOOTLETS_PATH=~/build_trunk/ltib/rpm/BUILD/imx-bootlets-src-10.12.01
OTP_KEY=~/build_trunk/ltib/otp_tools/otp.key_Bunker2
BD_FILE=~/build_trunk/ltib/cst_tools/build/linux_ivt_signed.bd
BOOTSTREAM_FILE=~/build_trunk/ltib/cst_tools/build/imx28_ivt_linux_signed.sb
ZIMAGE_FILE=~/build_trunk/ltib/rootfs/boot/zImage
# build bootlets
spath=`pwd`
cd ..
./mk_bootstream
cd $spath
# Doh! We HAVE to do it from sibling-dir, as required by cst tool
cd ./build
# convert bootlets to binary
cp -f $ZIMAGE_FILE zImage
cp -f $BOOTLETS_PATH/chooser_prep/chooser_prep chooser_prep
cp -f $BOOTLETS_PATH/power_prep/power_prep power_prep
cp -f $BOOTLETS_PATH/boot_prep/boot_prep boot_prep
cp -f $BOOTLETS_PATH/linux_prep/output-target/linux_prep linux_prep
objcopy -I elf32-little -O binary --gap-fill 0xFF $BOOTLETS_PATH/chooser_prep/chooser_prep chooser_prep.bin
objcopy -I elf32-little -O binary --gap-fill 0xFF $BOOTLETS_PATH/power_prep/power_prep power_prep.bin
objcopy -I elf32-little -O binary --gap-fill 0xFF $BOOTLETS_PATH/boot_prep/boot_prep boot_prep.bin
objcopy -I elf32-little -O binary --gap-fill 0xFF $BOOTLETS_PATH/linux_prep/output-target/linux_prep linux_prep.bin
# linux_prep is modified in-place and called twice, so we need to generate 2 signatures,
# modify entry_count 0->1 (hardcoded, offset 0x24 can move!)
./patch_linux_prep.py linux_prep.bin linux_kernel.bin
# generate HAB data
./fill_csf_tmpl.py chooser_prep.csf.tmpl
./fill_csf_tmpl.py boot_prep.csf.tmpl
./fill_csf_tmpl.py power_prep.csf.tmpl
./fill_csf_tmpl.py linux_prep.csf.tmpl
./fill_csf_tmpl.py linux_kernel.csf.tmpl
cst -o chooser_prep_hab_data < chooser_prep.csf
cst -o boot_prep_hab_data < boot_prep.csf
cst -o power_prep_hab_data < power_prep.csf
cst -o linux_prep_hab_data < linux_prep.csf
cst -o linux_kernel_hab_data < linux_kernel.csf
# generate signed bootstream with HAB data (SRK table) and encrypted with OTP key (AES-128)
# (-k, so only 1 key is possible)
elftosb -V -f imx28 -k $OTP_KEY -c $BD_FILE -o $BOOTSTREAM_FILE
# verify bootstream with OTP key only (no zero key)
sbtool -k $OTP_KEY $BOOTSTREAM_FILE
cd ..
| true
|
795938fbbbe2e66c844779fad80563477ac6bee4
|
Shell
|
ximitiejiang/PythonCodingSkill
|
/sync_codes/sync.sh
|
UTF-8
| 2,933
| 3.46875
| 3
|
[] |
no_license
|
#!/bin/sh
# 本脚本用于同步本文件夹下所有git repo
# 同步之前需要评估:是否有大文件产生,如果有,需要先去除大文件才能同步到github
# 执行方式:从命令行进入该脚本目录,然后运行 sh sync.sh
# -------------------------------------------------------
del_ipch()
{
for element in `find $1 -type d -name "ipch"`
do
rm -rf $element
done
}
echo "starting pull all the assigned repo from github...."
cd ./simple_ssd_pytorch
git pull
cd ..
echo "finish pull 1/9..."
cd ./PythonCodingSkill
git pull
cd ..
echo "finish pull 2/9..."
cd ./cv_study
git pull
cd ..
echo "finish pull 3/9..."
cd ./Car_LaneLines_Detector
git pull
cd ..
echo "finish pull 4/9..."
cd ./lessonslearn
git pull
cd ..
echo "finish pull 5/9..."
cd ./CppStudy
git pull
cd ..
echo "finish pull 6/9..."
cd ./machine_learning_algorithm
git pull
cd ..
echo "finish pull 7/9..."
cd ./machine_learning_for_stock
git pull
cd ..
echo "finish pull 8/9..."
# 由于该仓库是从slcv继承过来的,直接pull下来会有巨大的.git文件夹,所以git clone采用--depth=1,
# 但接下来更新的pull就是问题,所以暂时不pull,只push,有空再pull
#cd ./cvpk
#git pull
#cd ..
#echo "finish pull 10/9..."
# -------------------------------------------------------
echo "starting push all the local update to github..."
cd ./simple_ssd_pytorch
path=$(pwd)
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 1/9 in ${path}------------"
cd ./PythonCodingSkill
path=$(pwd)
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 2/9 in ${path}------------"
cd ./cv_study
path=$(pwd)
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 3/9 in ${path}------------"
cd ./Car_LaneLines_Detector
path=$(pwd)
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 4/9 in ${path}------------"
cd ./lessonslearn
path=$(pwd)
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 5/9 in ${path}------------"
cd ./CppStudy
path=$(pwd) # 获得完整路径
echo "start to delete all the ipch document in ${path}" # 删除ipch里边大文件
del_ipch $path # 删除该路径下所有ipch文件夹
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 6/9 in ${path}------------"
cd ./machine_learning_algorithm
path=$(pwd)
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 7/9 in ${path}------------"
cd ./machine_learning_for_stock
path=$(pwd)
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 8/9 in ${path}------------"
cd ./cvpk
path=$(pwd)
git add .
git commit -m 'update'
git push
cd ..
echo "------------finish push 9/9 in ${path}------------"
# -------------------------------------------------------
echo 'synchronize finished!'
| true
|
be70d2c873b97a536e8467fb7228eb81887c98b0
|
Shell
|
purinchu/rabornrecord-docker-devenv
|
/start-dev-env
|
UTF-8
| 1,312
| 3.953125
| 4
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
#!/bin/sh
# Run this script to start Docker with the proper development environment container
# No options are available.
# Assumes the user has already authenticated separately (for now).
if [ "x$GCP_PROJECT" = "x" ]; then
echo "No default Google cloud project is assigned. To set one, export"
echo "GCP_PROJECT to the project *ID* (not the name) of the GCP project"
echo "to work on by default, and then run this again."
exit 1
fi
AUTH_CONTAINER_NAME="gcloud-config"
GCLOUD_BASE_DOCKER_IMAGE="${GCP_PROJECT}-devenv"
# Check for whether we have setup the container volume holding cached auth yet
docker \
inspect -f '{{ .Mounts }}' \
${AUTH_CONTAINER_NAME} \
| grep -q 'root.\.config'
if [ "$?" != 0 ]; then
# Failure
echo <<EOF
Could not verify that you've already setup authentication into the dev environment
container.
Please run
docker run -it --name ${AUTH_CONTAINER_NAME} ${GCLOUD_BASE_DOCKER_IMAGE} gcloud auth login
(this launches a new container based off of Google Cloud SDK Docker image and saves the container,
including embedded authentication information, with the name ${AUTH_CONTAINER_NAME})
EOF
exit 1
fi
# Success
docker run --rm -it --volumes-from ${AUTH_CONTAINER_NAME} ${GCLOUD_BASE_DOCKER_IMAGE}
exit $?
| true
|
a39a6b17c40d7ebc298fa23d9a819f2eda5d24ee
|
Shell
|
jyothimorampudi/sample
|
/script.sh
|
UTF-8
| 90
| 2.984375
| 3
|
[] |
no_license
|
intA=10
intB=20
if [ $intA == $intB ]
then
echo "Both are equal"
else
echo "Not equal"
fi
| true
|
f0bb6f6f36cfc8d9be7ecbc6b973fd116c34923c
|
Shell
|
kagurazakayashi/pi
|
/U盘模式.sh
|
UTF-8
| 789
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
echo "dtoverlay=dwc2" | sudo tee -a /boot/config.txt
echo "dwc2" | sudo tee -a /etc/modules
echo "g_mass_storage" | sudo tee -a /etc/modules
dd if=/dev/zero of=/home/my_u_disk.bin bs=1024 count=1000 #1G
sudo modprobe g_mass_storage file=/home/my_u_disk.bin removable=1 dVendor=0x0781 idProduct=0x5572 bcdDevice=0x011a iManufacturer="SanDisk" iProduct="Cruzer Switch" iSerialNumber="1234567890"
# 在内存中创建
mkdir /mnt/vram
mount -t ramfs none /mnt/vram -o maxsize=6144m
mount ramfs /mnt/vram -t ramfs -o size=6144m
# 格式化为 exfat
apt install exfat-fuse exfat-utils
mkfs.exfat -n USBRAM /mnt/vram
fsck.exfat -n USBRAM /mnt/vram
# 开机启动脚本
sudo vim /etc/rc.local
# 在exit 0 之前添加代码
# 系统启动时在执行这段代码时是使用root用户权限的
| true
|
251b6ae042bc987a1c6da811a3f9f34ac0377ae9
|
Shell
|
adellam/ansible-playbooks
|
/library/debian-ubuntu/roles/postgresql/templates/postgresql_wal_backup_and_removal.j2
|
UTF-8
| 388
| 2.859375
| 3
|
[] |
no_license
|
#!/bin/bash
BASE_BACKUP_DIR={{ psql_base_backup_dir }}
WAL_ARCHIVES_LOG_DIR={{ psql_wal_archiving_log_dir }}
WAL_LATEST_BACKUP=
# The base backup dir needs to be empty
rm -f $BASE_BACKUP_DIR/*
pg_basebackup -F t -z -D $BASE_BACKUP_DIR
cd $WAL_ARCHIVES_LOG_DIR
WAL_LATEST_BACKUP=$( /bin/ls -1tr *.backup | tail -1 )
pg_archivecleanup $WAL_ARCHIVES_LOG_DIR $WAL_LATEST_BACKUP
exit $?
| true
|
7160968c8ce9c0e480b29eb2b7c84a37017c74c7
|
Shell
|
dmedme/web_path_web
|
/fdbase_windows.sh
|
UTF-8
| 3,373
| 2.875
| 3
|
[] |
no_license
|
# fdbase.sh - Global variables for PATH
# @(#) $Name$ $Id$
# Copyright (c) E2 Systems Limited 1993
#
ulimit -n 1024
PATH_IG=${PATH_IG:-}
PATH_SCENE=${PATH_SCENE:-}
# Establish directory where PATH is run
PATH_HOME=${PATH_HOME:-/c/e2}
# O/S type for establishing executables directory (PATH_SOURCE) and text editor
# (PATH_EDITOR)
PATH_OS=${PATH_OS:-NT4}
PATH_SOURCE=${PATH_SOURCE:-/c/e2/web_path_web}
PATH_EDITOR=${PATH_EDITOR:-vi}
# path and name of ./rules directory
PATH_RULE_BASE=$PATH_HOME/rules
pid=$$
export PATH_OS TERM PATH_RULE_BASE PATH_HOME PATH_EDITOR PATH_IG PATH_SCENE PATH_SOURCE pid
# path and name of directory for saved scripts
#
# The file extension of the script files
#
PATH_USER=${PATH_USER:-}
E2_HOME_PORT=${E2_HOME_PORT:-5000}
E2_HOME_HOST=${E2_HOME_HOST:-127.0.0.1}
PATH_DRIVER=${PATH_DRIVER:-t3drive}
#PATH_DRIVER=${PATH_DRIVER:-webdrive}
PATH_STAGGER=${PATH_STAGGER:-3}
E2_SCENE_LEN=${E2_SCENE_LEN:-3600}
E2_TEST_ID=${E2_TEST_ID:-ESALES}
E2_TEST_LEN=${E2_TEST_LEN:-86400}
E2_WEB_PORTS=${E2_WEB_PORTS:-3128}
#E2_ORA_WEB_PORTS=${E2_ORA_WEB_PORTS:-"9000 9500"}
#E2_T3_WEB_PORTS=${E2_ORA_WEB_PORTS:-"15001"}
# ************************************************************************
# Optional features
# E2_BOTH makes webdump show both in and out
# E2_VERBOSE makes webdump provide a human-readable decode of the
# ORACLE Web traffic
#
E2_BOTH=1
#E2_VERBOSE=1
export E2_BOTH E2_VERBOSE
PATH_THINK=${PATH_THINK:-5}
if [ $PATH_OS = NT4 -o $PATH_OS = LINUX ]
then
PATH_AWK=${PATH_AWK:-gawk}
else
PATH_AWK=${PATH_AWK:-nawk}
fi
# Application Redraw String
export PATH_THINK PATH_OS PATH_DRIVER PATH_AWK PATH_STAGGER E2_SCENE_LEN E2_TEST_ID E2_TEST_LEN PATH_EXT E2_WEB_PORTS E2_ORA_WEB_PORTS
if [ ! "$PATH_OS" = NT4 ]
then
case $PATH in
*$PATH_SOURCE*)
;;
*)
PATH=$PATH_SOURCE:$PATH
export PATH
;;
esac
fi
#
# Pick up portable configuration data
#
. pathenv.sh
export E2_CLIENT_LOCATION
export E2_TEST_LEN
export PATH_EXT
export E2_TEST_ID
export E2_SCENE_LEN
export PATH_STAGGER
export PATH_EXTRA_ARGS0
export PATH_DRIVER
export PATH_REMOTE
export PATH_TIMEOUT
export E2_HOME_HOST
export E2_HOME_PORT
export E2_ORA_TUNNEL_PORTS
export PATH_DEGENERATE
export PATH_SINGLE_THREAD
if [ -n "$E2_DEFAULT_SEP_EXP" ]
then
export E2_DEFAULT_SEP_EXP
fi
if [ -n "$E2_PROXY_PORT" ]
then
export E2_PROXY_PORT
fi
#
# Pick the correct script to control script capture
#
# The choices are Citrix (Windows only), and then between capture via the
# in-built proxy, and capture from the network.
#
if [ "$PATH_EXT" = "rec" ]
then
export FDSCRIPT=fdscript_citrix.sh
elif [ "$PATH_WKB" = tty ]
then
export FDSCRIPT=fdscript_captty.sh
elif [ "$PATH_DRIVER" = dotnetdrive ]
then
export FDSCRIPT=fdscript_capweb.sh
else
export FDSCRIPT=fdscript_proxweb.sh
fi
#E2_ORA_WEB_PORTS="9000 9500"
#E2_T3_WEB_PORTS="15001"
E2_TRAFMUL_EXTRA=
export E2_TRAFMUL_EXTRA
unset PATH_REMOTE
#
# Set up SQL*Plus access to the databasE, eg. as here
#
#ORACLE_HOME=/opt/oracle/product/10.1.0/db_1
#export ORACLE_HOME
#ORACLE_SID=STUAT3
#ORACLE_SID=e2acer
#export ORACLE_SID
#case $PATH in
#*$ORACLE_HOME/bin*)
# ;;
#*)
# PATH=$PATH:$ORACLE_HOME/bin
# LD_LIBRARY_PATH=$ORACLE_HOME/lib
# export PATH LD_LIBRARY_PATH
#esac
export E2_CLIENT_LOCATION E2_HOME_PORT E2_HOME_HOST PATH_DRIVER PATH_STAGGER E2_SCENE_LEN E2_TEST_ID E2_TEST_LEN E2_ORA_WEB_PORTS E2_T3_WEB_PORTS
| true
|
b677afc6e2da5091ad5d955bb3f11d6aa457cf69
|
Shell
|
beebus/CPUfactory4
|
/scripts/install.sh
|
UTF-8
| 272
| 2.6875
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/usr/bin/env bash
mkdir -p deps
# install glui ==================================
if [ ! -d "deps/glui" ]; then
git clone https://github.com/libglui/glui deps/glui
fi
cd deps/glui
make
sudo cp lib/libglui.a /usr/local/lib
sudo cp include/GL/glui.h /usr/local/include
| true
|
c3bc30ebca954ea3eb9f3626553122667d88f8d3
|
Shell
|
hakatashi/esolang-box
|
/boxes/swift/script
|
UTF-8
| 175
| 2.515625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
infile=$(realpath "$1")
ln -sf "$infile" /tmp/code.swift
/root/.swiftenv/shims/swiftc -o /tmp/code /tmp/code.swift
cat - | /tmp/code
rm /tmp/code.swift /tmp/code
| true
|
b193615b69e418d85acc693e36903734b4d56e4f
|
Shell
|
vrvenky1407/terraform-aws-ghost
|
/node-and-mariadb-secure-install.sh
|
UTF-8
| 857
| 2.5625
| 3
|
[] |
no_license
|
MYSQL_ROOT_PASSWORD='mysqlroot123'
curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash -
sudo apt install -y nodejs
node -v
npm -v
# Install MariaDB
sudo apt update -y
sudo apt install -y mariadb-server mariadb-client
sudo systemctl enable mariadb
sudo systemctl status mariadb
sudo mysql --user=root <<_EOF_
update mysql.user set authentication_string = PASSWORD('$MYSQL_ROOT_PASSWORD') where user='root';
update mysql.user set plugin = 'mysql_native_password' where user = 'root';
delete from mysql.user where user='';
delete from mysql.user where user='root' and Host NOT IN ('localhost', '127.0.0.1', '::1');
drop database if exists test;
delete from mysql.db where Db='test' OR Db='test\\_%';
flush privileges;
create database ghost;
grant all privileges on ghost.* to ghost@localhost identified by 'ghost1234';
flush privileges;
_EOF_
| true
|
ff29535f8f415b1477aadcdbc2a3c2838226c22d
|
Shell
|
chs2019/dotfiles
|
/home/.zsh/quotes.zsh
|
UTF-8
| 2,638
| 2.8125
| 3
|
[] |
no_license
|
# empty line
echo ""
# Mensagem
echo -e "\n\"Agora é o lugar onde as perguntas descansam e as respostas crescem, nos seus próprios tempos…\"\n\n~ Jeff Foster, \"Slow Down, Friend\"\n"
echo -e "\"You either die a hero or you live long enough to see yourself become the villain.\"\n\n~ Harvey Dent\n"
echo -e "\"All those moments will be lost in time... like tears in rain... Time to die.\"\n\n~ Batty - Blade Runner(1982)\n"
echo -e "\"Então, pra que querer ter sempre razão? Não quero ter razão, Quero é ser Feliz!\"\n\n~ Ferreira Gullar\n"
echo -e "\"Most of answers you need already exist in someone else's head; find those people.\"\n\n~ Unknown\n"
echo -e "\"Não exageres o culto da verdade; não há homem que ao fim de um dia não tenha mentido com razão muitas vezes.\"\n\n~ Unknown\n"
echo -e "\"I heard a joke once: Man goes to doctor. Says he's depressed. Says life is harsh and cruel.\n\
Says he feels all alone in a threatening world. Doctor says, \"Treatment is simple. The great clown Pagliacci is in town tonight.\n\
Go see him. That should pick you up.\" Man bursts into tears. Says, \"But doctor... I am Pagliacci.\"\n\
Good joke. Everybody laugh. Roll on snare drum. Curtains. \"\n\n~ Rorschach\n"
echo -e "\"Forge meaning, build identity.\"\n\n~ Andrew Solomon.\n"
echo -e "\"1º Curiosity comes first.\n2º Embrace the mess.\n3º Practice reflection.\"\n\n~Ramsey Musallam\n"
echo -e "\"Everybody is a genius. But if you judge a fish by its ability to climb a tree,\n\
it will live its whole life believing that it is stupid.\"\n\n~Albert Einstein\n"
echo -e "\"You will give the people of Earth an ideal to strive towards.
They will race behind you, they will stumble, they will fall.
But in time, they will join you in the sun, Kal.
In time, you will help them accomplish wonders. \"\n\n ~Jor-El (Man Of Steel)\n"
echo -e "\"Inventei um ditado, que diz, ‘Coleciono vinhos ruins’.
Porque se o vinho é bom, está à disposição e a pessoa está ali, eu abrirei\"\n\n ~Ric Elias\n"
echo -e "\"[Speaking to his son] You love playing with that.
You love playing with all your stuffed animals.
You love your Mommy, your Daddy. You love your pajamas.
You love everything, don't ya? Yea. But you know what, buddy?
As you get older... some of the things you love might not seem so special anymore.
Like your Jack-in-a-Box.
Maybe you'll realize it's just a piece of tin and a stuffed animal.
And the older you get, the fewer things you really love.
And by the time you get to my age, maybe it's only one or two things.
With me, I think it's one.\"\n
~Staff Sergeant William James (Hurt Locker)"
# Fortunes
echo -e "\n"
fortune | cowsay
| true
|
8d4825555a7df5871e7f715b3403daa69ca1736b
|
Shell
|
fysikum/cmbenv
|
/pkgs/module-cp2k.sh
|
UTF-8
| 1,444
| 3.375
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
pkg=module-cp2k
log=$(realpath "log_${pkg}")
echo "Building ${pkg}..." >&2
echo "Creating @MODULE_DIR@/@VERSION@..." >&2
mkdir -p "@MODULE_DIR@" || exit 1
cat > "@MODULE_DIR@/@VERSION@" <<'EOF'
#%Module###<-magic cookie ####################################################
set version @VERSION@
proc ModulesHelp { } {
puts stderr " "
puts stderr "This module loads CP2K which is a quantum chemistry and "
puts stderr "solid state physics software package."
puts stderr "\nVersion @VERSION@\n"
}
module-whatis "Name: cp2k"
module-whatis "Version: @VERSION@"
module-whatis "Description: quantum chemistry and solid state physics software"
if [ module-info mode load ] {
module load gsl/2.6
module load openblas/0.3.12
module load fftw/3.3.8
module load boost/1.71.0
module load scalapack/2.0.2
}
prepend-path PATH @AUX_PREFIX@/bin
prepend-path LD_LIBRARY_PATH @AUX_PREFIX@/lib64
prepend-path INCLUDE @AUX_PREFIX@/include
prepend-path MANPATH @AUX_PREFIX@/share/man
setenv CP2K_DIR @AUX_PREFIX@
setenv CP2K_BIN @AUX_PREFIX@/bin
setenv CP2K_INC @AUX_PREFIX@/include
setenv CP2K_LIB @AUX_PREFIX@/lib64
setenv CP2K_DATA_DIR @AUX_PREFIX@/data
EOF
if [ $? -ne 0 ]; then
echo "Failed to build ${pkg}" >&2
exit 1
fi
echo "Finished building ${pkg}" >&2
echo "${cleanup}"
exit 0
| true
|
98db560f75197f87a1e363859734351b27fa26a5
|
Shell
|
pixelhandler/dotfiles
|
/bootstrap.sh
|
UTF-8
| 1,263
| 3.6875
| 4
|
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
#!/usr/bin/env bash
# Borrowed from https://github.com/mathiasbynens/dotfiles/blob/master/bootstrap.sh
# and https://github.com/toranb/dotfiles/blob/master/symlink.sh
cd "$(dirname "${BASH_SOURCE}")";
git pull origin master;
function doIt() {
source ./bin/ssh.sh
git submodule init
git submodule update
git submodule foreach git submodule init
git submodule foreach git submodule update
source ./bin/vim.sh
rsync --exclude ".git/" \
--exclude ".gitignore" \
--exclude ".gitmodules" \
--exclude ".DS_Store" \
--exclude "bootstrap.sh" \
--exclude "README.md" \
--exclude "LICENSE.txt" \
-avh --no-perms . ~;
chmod 700 ~/.ssh
source ~/.bash_profile;
}
if [ "$1" == "--force" -o "$1" == "-f" ]; then
doIt;
else
read -p "This may overwrite existing files in your home directory. Are you sure? (y/n) " -n 1;
echo "";
if [[ $REPLY =~ ^[Yy]$ ]]; then
doIt;
fi;
fi;
echo -n "Would you like to configure your git name and email? (y/n) => "; read answer
if [[ $answer = "Y" ]] || [[ $answer = "y" ]]; then
echo -n "What is your git user name => "; read name
git config --global user.name "$name"
echo -n "What is your git email => "; read email
git config --global user.email "$email"
fi
unset doIt;
| true
|
3d821fdeee9bbedfd2256276f85354f21f50c736
|
Shell
|
gluster/build-jobs
|
/build-gluster-org/scripts/regression-test-burn-in.sh
|
UTF-8
| 1,987
| 3.375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
MY_ENV=`env | sort`
BURL=${BUILD_URL}consoleFull
# Display all environment variables in the debugging log
echo "Start time $(date)"
echo
echo "Display all environment variables"
echo "*********************************"
echo
echo "$MY_ENV"
echo
# use "7 and not "7" since RHEL use 7.6 while Centos use 7
grep -q 'VERSION_ID="7' /etc/os-release && export PYTHON=/usr/bin/python2.7
# Remove any gluster daemon leftovers from aborted runs
sudo -E bash /opt/qa/cleanup.sh
# Clean up the git repo
sudo rm -rf $WORKSPACE/.gitignore $WORKSPACE/*
sudo chown -R jenkins:jenkins $WORKSPACE
cd $WORKSPACE
git reset --hard HEAD
# Clean up other Gluster dirs
sudo rm -rf /var/lib/glusterd/* /build/install /build/scratch >/dev/null 2>&1
# Remove the many left over socket files in /var/run
sudo rm -f /var/run/????????????????????????????????.socket >/dev/null 2>&1
# Remove GlusterFS log files from previous runs
sudo rm -rf /var/log/glusterfs/* /var/log/glusterfs/.cmd_log_history >/dev/null 2>&1
JDIRS="/var/log/glusterfs /var/lib/glusterd /var/run/gluster /d /d/archived_builds /d/backends /d/build /d/logs /home/jenkins/root"
sudo mkdir -p $JDIRS
sudo chown jenkins:jenkins $JDIRS
chmod 755 $JDIRS
# Build Gluster
echo "Start time $(date)"
echo
echo "Build GlusterFS"
echo "***************"
echo
/opt/qa/build.sh
RET=$?
if [ $RET != 0 ]; then
# Build failed, so abort early
exit $RET
fi
echo
# Run the regression test
echo "Start time $(date)"
echo
echo "Run the regression test"
echo "***********************"
echo
sudo -E bash /opt/qa/regression.sh -c
RET=$?
echo "Logs are archived at Build artifacts: https://build.gluster.org/job/${JOB_NAME}/${UNIQUE_ID}"
sudo mv /tmp/gluster_regression.txt $WORKSPACE || true
sudo chown jenkins:jenkins gluster_regression.txt || true
# do clean up after a regression test suite is run
sudo -E bash /opt/qa/cleanup.sh
# make sure that every file/diretory belongs to jenkins
sudo chown -R jenkins:jenkins $WORKSPACE
exit $RET
| true
|
d12b3dbd18310d3308ba0b9fad096bd5ef0e1b4b
|
Shell
|
TaylanUB/arch2parabola
|
/repo-list-diff
|
UTF-8
| 1,418
| 3.75
| 4
|
[] |
no_license
|
#!/bin/bash
# (C) This script is free software! You can do what you want with it, as long as you don't convert it into proprietary software
# and if you redistribute it either vertabim or modified you must do so under the same licence or GPLv3 or later.
dirname=$(pwd)
tempdir=$(mktemp -d)
cd $tempdir
#Run a sanity check
which pacman sudo wget >/dev/null 2>/dev/null || {
echo "Cannot find pacman, sudo or wget, exiting";
exit 1
}
echo ""
echo "Updating pacman database."
echo ""
sudo pacman -Sy --noconfirm >>"$dirname/db-sync" 2>>"$dirname/db-sync"
echo "Downloading the whitelist of free software packages."
echo ""
wget http://www.parabolagnulinux.org/docs/whitelist.txt 2>/dev/null || {
echo "Download failed, exiting"
exit 1
}
a=($(cut -d: -f1 whitelist.txt))
echo "Searching for packages in whitelist and not in repo"
echo ""
for i in ${a[@]} ; do
pacman -Si $i >/dev/null 2>/dev/null || echo $i >> "$dirname/in whitelist and not in repo"
done
echo "Downloading the blacklist of proprietary software packages."
echo ""
wget http://www.parabolagnulinux.org/docs/blacklist.txt 2>/dev/null || {
echo "Download failed, exiting"
exit 1
}
a=($(cut -d: -f1 blacklist.txt))
echo "Searching for packages in blacklist and in repo"
echo ""
for i in ${a[@]} ; do
pacman -Si $i >/dev/null 2>/dev/null && echo $i >> "$dirname/in blacklist and in repo"
done
rm -rf $tempdir
exit 0
| true
|
118ecb8c8882cd24cf31aac489c49ededfd64e79
|
Shell
|
mmirko/bondmachine
|
/bondgo/src/regression.sh
|
UTF-8
| 2,259
| 3.390625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
BASE="test"
if [ "a$1" == "areset" ]
then
for i in $BASE/sourc*go
do
echo
echo
echo "Resetting $i"
TESTIN="$i"".asm"
echo -n " Creating $TESTIN"
go run bondgo_main.go -input-file $i -save-assembly $TESTIN && echo -e "\033[32m\033[300C\033[10D[ Reset ]\033[0m"
done
for i in $BASE/mpm_sourc*go
do
echo
echo
echo "Resetting $i"
TESTIN="$i"".asm"
go run bondgo_main.go -input-file $i -save-assembly $TESTIN -mpm
done
elif [ "a$1" == "adiff" ]
then
for i in $BASE/sourc*go
do
echo
echo
echo "Testing $i"
TESTIN="$i"".asm"
TESTOUT="$i"".out"
go run bondgo_main.go -input-file $i -save-assembly $TESTOUT
echo -n " Comparing $TESTIN and $TESTOUT "
if [ "a$2" == "a+" ]
then
vimdiff $i $TESTIN $TESTOUT
else
vimdiff $TESTIN $TESTOUT
fi
rm -f "$TESTOUT"
done
for i in $BASE/mpm_sourc*go
do
echo
echo
echo "Testing $i"
TESTIN="$i"".asm"
TESTOUT="$i"".out"
go run bondgo_main.go -input-file $i -save-assembly $TESTOUT -mpm
for j in $TESTOUT*
do
CMP1=$TESTIN""_"`echo $j | cut -d_ -f 3`"
CMP2=$j
echo -n " Comparing $CMP1 and $CMP2"
if [ "a$2" == "a+" ]
then
vimdiff $i $CMP1 $CMP2
else
vimdiff $CMP1 $CMP2
fi
rm -f "$j"
done
done
else
for i in $BASE/sourc*go
do
echo
echo
echo "Testing $i"
TESTIN="$i"".asm"
TESTOUT="$i"".out"
go run bondgo_main.go -input-file $i -save-assembly $TESTOUT
echo -n " Comparing $TESTIN and $TESTOUT "
( cmp $TESTIN $TESTOUT > /dev/null 2>&1 && echo -e "\033[32m\033[300C\033[11D[ Passed ]\033[0m" ) || (echo -e "\033[31m\033[300C\033[11D[ Failed ]\033[0m" ; exit 1)
rm -f "$TESTOUT"
done
for i in $BASE/mpm_sourc*go
do
echo
echo
echo "Testing $i"
TESTIN="$i"".asm"
TESTOUT="$i"".out"
go run bondgo_main.go -input-file $i -save-assembly $TESTOUT -mpm
for j in $TESTOUT*
do
CMP1=$TESTIN""_"`echo $j | cut -d_ -f 3`"
CMP2=$j
echo -n " Comparing $CMP1 and $CMP2"
( cmp $CMP1 $CMP2 > /dev/null 2>&1 && echo -e "\033[32m\033[300C\033[11D[ Passed ]\033[0m" ) || (echo -e "\033[31m\033[300C\033[11D[ Failed ]\033[0m" ; exit 1)
rm -f "$j"
done
done
fi
| true
|
8681076d7e28619040616a89a3e7fab0317bf34d
|
Shell
|
TAREK-ELOUARET/neural_IP
|
/neural_network_sfixed/neural_network_sfixed.sim/sim_1/behav/simulate.sh
|
UTF-8
| 260
| 2.640625
| 3
|
[] |
no_license
|
#!/bin/bash -f
xv_path="/apps/Xilinx/Vivado/2016.4"
ExecStep()
{
"$@"
RETVAL=$?
if [ $RETVAL -ne 0 ]
then
exit $RETVAL
fi
}
ExecStep $xv_path/bin/xsim neuro_simul_behav -key {Behavioral:sim_1:Functional:neuro_simul} -tclbatch neuro_simul.tcl -log simulate.log
| true
|
89beea70c761c07b1b6fc99fda3ec413d93043ce
|
Shell
|
djamseed/dotfiles
|
/.bin/uninstall-go
|
UTF-8
| 551
| 4.09375
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -euo pipefail
current_userid=$(id -u)
if [ "$current_userid" -ne 0 ]; then
echo "$(basename "$0") uninstallation script requires superuser privileges to run" >&2
exit 1
fi
echo "This script will delete these files/directories permanently:"
echo "/usr/local/go"
echo "/etc/paths.d/go"
echo ""
read -p "Are you sure you want to proceed? [Y/n] " input
if [[ $input =~ ^[Yy]$ ]]; then
rm -rf /usr/local/go
rm /etc/paths.d/go
echo "Golang was successfully removed from the system."
else
echo "Aborting..."
fi
| true
|
006a4b9abc02651383d6b456962eb92b00493022
|
Shell
|
ortizjd-jmu/bash-scripts
|
/Jekyll-setup.sh
|
UTF-8
| 684
| 2.6875
| 3
|
[] |
no_license
|
yes | sudo apt-get update && yes | sudo apt-get upgrade
# Install Dependencies:
yes | sudo apt-get install ruby-full build-essential zlib1g-dev
# Add environement variables to ~/.bashrc to configure gem installation path:
echo '# Install Ruby Gems to ~/gems' >> ~/.bashrc
echo 'export GEM_HOME="$HOME/gems"' >> ~/.bashrc
echo 'export PATH="$HOME/gems/bin:$PATH"' >> ~/.bashrc
source ~/.bashrc
# Install Jekyll:
sudo gem install jekyll bundler
# Install the rest of the gems in the project folder:
cd /Documents
mkdir jacobortiz.dev
cd /jacobortiz.dev
sudo bundle install
# Serve application
sudo bundle exec jekyll serve --host 0.0.0.0
# Open Link
xdg-open http://0.0.0.0:4000/
| true
|
ffd2bc7a61f8da3ac3acb6ac372353984e8fe8d4
|
Shell
|
oweidner/vntg
|
/vntg-core/lib/vntg_build.sh
|
UTF-8
| 12,725
| 3.953125
| 4
|
[] |
no_license
|
#!/bin/sh
REPO_PATH="/Users/architeuthis/Code/vntg/vntg-formulae"
CONFIG_FILE="/Users/architeuthis/Code/vntg/vntg-formulae/irix64-mips4-cc.cfg"
# do_check_formula(): Parse a formula file
# o param 1: formula name
#
do_check_formula() {
typeset _formula_name=$1
if [ ! -f "${FORMULAE}/${1}" ]
then
echo "$file not found."
exit -1
else
echo "$_formula_name found."
while IFS='=' read -r key value
do
# skip empty lines
[ -z $key ] && continue
key=$(echo $key | tr '.' '_')
eval ${key}=\${value}
done < "${FORMULAE}/${1}"
echo "\nBuilding package:"
echo "================="
echo "Package Name = " ${package_name}
echo "Package Version = " ${package_version}
echo "Package Location = " ${package_location}
echo "Package Deps = " ${package_deps}
fi
}
# ----------------------------------------------------------------------
# do_check_dep: checks if package exists in the /opt/vntg tree.
# o param 1 <name>: package name to check
# o param 2 <version>: package version to check
#
do_check_dep() {
typeset _name=$1
typeset _version=$2
if [ -d "/opt/vntg/pkg/${_name}/${_version}" ]; then
# TODO: Check if packages is linked properly into /opt/vntg.
# If not, suggest to run 'vntg relink'
return 0
else
return 1
fi
}
do_source_get () {
source_location=$1
name=$2
version=$3
archive=$(basename $source_location)
srcdir=/opt/vntg/src/${name}/${version}
rm -rf $archive
rm -rf $srcdir
mkdir -p $srcdir
cd /opt/vntg/src/
wget $source_location
tar xzf $archive -C /opt/vntg/src/${name}/${version} --strip-components=1
rm $archive
}
do_source_configure () {
name=$1
version=$2
configure_flags=$3
cd /opt/vntg/src/${name}/${version}
configure="./configure --prefix=/opt/vntg/pkg/${name}/${version} --libdir=/opt/vntg/pkg/${name}/${version}/lib64 ${configure_flags}"
$configure
}
do_source_build () {
name=$1
version=$2
cd /opt/vntg/src/${package_name}/${package_version}
# make
# make install
#make clean
# copy build file to .vntg
mkdir /opt/vntg/pkg/${package_name}/${package_version}/.vntg/
cp ${FORMULAE}/$file /opt/vntg/pkg/${package_name}/${package_version}/.vntg/
}
do_package () {
name=$1
version=$2
# copy build file to .vntg
mkdir /opt/vntg/pkg/${package_name}/${package_version}/.vntg/
cp ${FORMULAE}/$file /opt/vntg/pkg/${package_name}/${package_version}/.vntg/
# create archive
cd /opt/vntg/pkg/
tar cfz vntg-${name=}-${version}.tgz ${name=}/${version}
for miscfile in $(echo ${pacakge_src_build_copyfiles} | sed "s/,/ /g")
do
# copy the file to distribution directory
cp /opt/vntg/src/${package_name}/${package_version}/${miscfile} /opt/vntg/pkg/${package_name}/${package_version}/
done
}
do_install_from_source () {
do_source_get ${package_src_location} ${package_name} ${package_version}
do_source_configure ${package_name} ${package_version} ${package_src_build_configure}
do_source_build ${package_name} ${package_version}
}
# Strip leading and trailing whitespaces
# package_name=$(echo ${package_name} | sed -e 's/^ *//g;s/ *$//g')
# package_deps=$(echo ${package_deps} | sed -e 's/^ *//g;s/ *$//g')
# package_version=$(echo ${package_version} | sed -e 's/^ *//g;s/ *$//g')
# package_src_build_configure=$(echo ${package_src_build_configure} | sed -e 's/^ *//g;s/ *$//g')
# package_src_location=$(echo ${package_src_location} | sed -e 's/^ *//g;s/ *$//g')
# check dependencies
# for each dependecy, do the following:
# 1. check if depdent package is installed
# for dep in $(echo $package_deps | sed "s/,/ /g")
# do
# # split dependency into name and version
# dep_name=$(echo $dep | cut -f1 -d-)
# dep_version=$(echo $dep | cut -f2 -d-)
# if ! do_check_dep $dep_name $dep_version; then
# echo "Dependency ${dep} is missing"
# exit 1
# else
# echo "Dependency ${dep} found"
# fi
# done
# do_check_formula $1
# exit
# do_source_get ${package_src_location} ${package_name} ${package_version}
# #do_source_configure ${package_name} ${package_version} ${package_src_build_configure}
# #do_source_build ${package_name} ${package_version}
# do_package ${package_name} ${package_version}
# ----------------------------------------------------------------------
# Entry point for 'build'
#
vntg_build () {
# Parse build options. Adopted from
# https://stackoverflow.com/questions/4882349/parsing-shell-script-arguments#4882493
#
if test $# -gt 0; then shift; fi
while [[ $1 == -* ]]; do
case "$1" in
-h|--help|-\?) vntg_build_help; exit 0;;
-i|--install) opt_install=1; shift;;
-d|--dist) opt_dist=1; shift;;
--no-prune) opt_noprune=1; shift;;
-v|--verbose) opt_verbose=1; shift;;
-*) odie "Unknown option $1";;
esac
done
# Parse the build environment config file
# TODO: Move this to separate function
if [ ! -f "${CONFIG_FILE}" ]
then
echo "Build environment $CONFIG_FILE not found."
exit -1
else
while IFS='=' read -r key value
do
# skip empty lines
[ -z $key ] && continue
key=$(echo $key | tr '.' '_')
eval typeset ${key}=\${value}
done < "$CONFIG_FILE"
# Prune whitespaces
# TODO - find a more elegant solution
buildenv_name=$(echo ${buildenv_name} | sed -e 's/^ *//g;s/ *$//g')
buildenv_srcrep=$(echo ${buildenv_srcrep} | sed -e 's/^ *//g;s/ *$//g')
buildenv_pkgrep=$(echo ${buildenv_pkgrep} | sed -e 's/^ *//g;s/ *$//g')
buildenv_PATH=$(echo ${buildenv_PATH} | sed -e 's/^ *//g;s/ *$//g')
buildenv_LD_LIBRARY_PATH=$(echo ${buildenv_LD_LIBRARY_PATH} | sed -e 's/^ *//g;s/ *$//g')
buildenv_compiler_ABI=$(echo ${buildenv_compiler_ABI} | sed -e 's/^ *//g;s/ *$//g')
buildenv_compiler_CC=$(echo ${buildenv_compiler_CC} | sed -e 's/^ *//g;s/ *$//g')
buildenv_compiler_CFLAGS=$(echo ${buildenv_compiler_CFLAGS} | sed -e 's/^ *//g;s/ *$//g')
buildenv_compiler_CXX=$(echo ${buildenv_compiler_CXX} | sed -e 's/^ *//g;s/ *$//g')
buildenv_compiler_CXXFLAGS=$(echo ${buildenv_compiler_CXXFLAGS} | sed -e 's/^ *//g;s/ *$//g')
buildenv_compiler_CXX=$(echo ${buildenv_compiler_CXX} | sed -e 's/^ *//g;s/ *$//g')
buildenv_compiler_CXXFLAGS=$(echo ${buildenv_compiler_CXXFLAGS} | sed -e 's/^ *//g;s/ *$//g')
buildenv_linker_LDFLAGS=$(echo ${buildenv_linker_LDFLAGS} | sed -e 's/^ *//g;s/ *$//g')
echo ""
echo "${TEXT_B}Using environment ${buildenv_name}:${TEXT_R}"
echo ""
echo " o Source repo = "${buildenv_srcrep}
echo " o Package repo = "${buildenv_pkgrep}
echo " o PATH = "${buildenv_PATH}
echo " o LD_LIBRARY_PATH = "${buildenv_LD_LIBRARY_PATH}
echo " o ABI = "${buildenv_compiler_ABI}
echo " o CC = "${buildenv_compiler_CC}
echo " o CFLAGS = "${buildenv_compiler_CFLAGS}
echo " o CXX = "${buildenv_compiler_CXX}
echo " o CXXFLAGS = "${buildenv_compiler_CXXFLAGS}
echo " o LDFLAGS = "${buildenv_linker_LDFLAGS}
fi
# TODO - fix the path.
typeset formula_name=$1
typeset formula_file="${REPO_PATH}/${formula_name}.vf"
if [ ! -f "${formula_file}" ]
then
echo "Formula $formula_file not found."
exit -1
else
while IFS='=' read -r key value
do
# skip empty lines
[ -z $key ] && continue
key=$(echo $key | tr '.' '_')
eval typeset ${key}=\${value}
done < "$formula_file"
package_name=$(echo ${package_name} | sed -e 's/^ *//g;s/ *$//g')
package_deps=$(echo ${package_deps} | sed -e 's/^ *//g;s/ *$//g')
package_version=$(echo ${package_version} | sed -e 's/^ *//g;s/ *$//g')
package_src_location=$(echo ${package_src_location} | sed -e 's/^ *//g;s/ *$//g')
package_src_build_deps=$(echo ${package_src_build_deps} | sed -e 's/^ *//g;s/ *$//g')
package_src_build_configure=$(echo ${package_src_build_configure} | sed -e 's/^ *//g;s/ *$//g')
package_src_build_configure_script=$(echo ${package_src_build_configure_script} | sed -e 's/^ *//g;s/ *$//g')
# Use custom configure script if defined
if [ -n ${package_src_build_configure_script} ];
then
configure="${package_src_build_configure_script} ${package_src_build_configure}"
else
configure="./configure --prefix=/opt/vntg/pkg/${package_name}/${package_version} --libdir=/opt/vntg/pkg/${package_name}/${package_version}/lib64 ${package_src_build_configure}"
fi
echo ""
echo "${TEXT_B}Building formula ${package_name} (${package_version}):${TEXT_R}"
echo ""
echo " o Source location = " ${package_src_location}
echo " o Dependencies = " ${package_src_build_deps} ${package_deps}
echo " o Config options = " ${configure}
echo ""
fi
# Check package build dependencies
echo
echo "Checking build dependencies..."
for dep in $(echo $package_deps | sed "s/,/ /g")
do
# split dependency into name and version
dep_name=$(echo $dep | cut -f1 -d-)
dep_version=$(echo $dep | cut -f2 -d-)
if ! do_check_dep $dep_name $dep_version; then
echo " o ${dep} is missing"
exit 1
else
echo " o ${dep} found"
fi
done
export ABI=64
export CC=c99
export CXX=CC
export CFLAGS='-64 -mips4 -c99 -O2 -LANG:anonymous_unions=ON -I/opt/vntg/include:/usr/include -L/opt/vntg/lib64 -L/usr/lib64'
export CXXFLAGS='-64 -mips4 -c99 -O2 -LANG:anonymous_unions=ON -I/opt/vntg/include:/usr/include -L/opt/vntg/lib64 -L/usr/lib64'
export CPPFLAGS='-64 -mips4 -c99 -O2 -LANG:anonymous_unions=ON -I/opt/vntg/include -L/opt/vntg/lib64 -L/usr/lib64'
export LD_LIBRARY_PATH='/opt/vntg/lib64:/usr/lib64'
export LDFLAGS='-64 -L/opt/vntg/lib64 -L/usr/lib64'
export PATH=/opt/vntg/bin:$PATH
export PKG_CONFIG=/opt/vntg/bin/pkg-config
# prune dead links
if [ "$opt_noprune" == "1" ]; then
echo "==> pruning... SKIPPED. "
else
echo "==> pruning... "
cd /opt/vntg/
find * -type l -exec sh -c '! test -e $0 && unlink $0' {} \;
fi
echo "==> unpacking"
mkdir -p "/tmp/vntg-build/"
cd /tmp/vntg-build/
cp ${package_src_location} /tmp/vntg-build/
tar xf /tmp/vntg-build/${package_name}-${package_version}.tar
rm /tmp/vntg-build/${package_name}-${package_version}.tar
cd /tmp/vntg-build/${package_name}-${package_version}/
ls /tmp/vntg-build/
[ -d /opt/vntg/pkg/${package_name}/${package_version}/ ] && echo "exists" && exit 1
echo "==> configuring"
${configure} || exit 1
make || exit 1
make install || exit 1
echo "==> activating"
# Create directory structure and symlinks in /opt/vntg/
cd /opt/vntg/pkg/${package_name}/${package_version}/
find . -type d -depth | cpio -dumpl /opt/vntg/
find * -type f -depth -exec sh -c 'ln -fs `pwd`/$0 /opt/vntg/$0' {} \;
find * -type l -depth -exec sh -c 'ln -fs `pwd`/$0 /opt/vntg/$0' {} \;
echo "==> packaging"
cd /opt/vntg/pkg/
tar cf /opt/vntg/rep/vntg-${package_name}-${package_version}-n64.tar ${package_name}/${package_version}
}
# ----------------------------------------------------------------------
# Display help
#
vntg_build_help () {
echo """${TEXT_B}vntg build${TEXT_R} [options] ${TEXT_U}formula${TEXT_R}:
Build ${TEXT_U}formula${TEXT_R} from source.
${TEXT_U}formula${TEXT_R} is the name of the formula to build.
${TEXT_B}--install${TEXT_R} Install ${TEXT_U}formula${TEXT_R} under /opt/vntg after the build was successful.
${TEXT_B}--dist${TEXT_R} Create distribution archive after build was successful
${TEXT_B}--no-prune${TEXT_R} Omitt dead link pruning in /opt/vntg before build.
WARNING: this can lead to unwanted side-effects.
${TEXT_B}--verbose${TEXT_R} Print the output of all build steps to STDOUT.
${TEXT_B}--help${TEXT_R} Show this message.
--------------------------------------------------------------------------------
"""
return 0
}
| true
|
a84fd8fe737748c91637ea32cda365e3cbf5a87a
|
Shell
|
solareenlo/ft_server
|
/srcs/autoindex.sh
|
UTF-8
| 277
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
INDEX=$1
if [[ "$INDEX" == "on" || "$INDEX" == "off" ]];
then
sed -i -E "/autoindex/ s/on|off/$INDEX/" /etc/nginx/sites-available/default.conf
service nginx reload
echo "Autoindex is now set to $INDEX"
else
echo "Please set a valid value ('on' or 'off')."
fi
| true
|
1a1bc5d10bde1c3031a80e1a23d2e497b2e127a1
|
Shell
|
Mangemannen66/bash_grundkurs
|
/lab5/uppgift2.sh
|
UTF-8
| 850
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/bash
################################################
# Uppgift 2 laboration 5 #
# Magnus Danielsson LX13 #
# Ett enkelt skript för att skriva in ett tal #
#+som ligger mellan 10 och 100. #
#+Här testar skriptet om inslagen siffra är #
#+inom ramen d v s mellan 10 och 100 #
#+Skriptet räknar ner till 1 efter <ENTER> #
#?Notera annat tillvägagångsätt gällande #
#+loopframställan. #
################################################
i=1
read -p "Välj ett nummer mellan 10 och 100: " count
if [ "$count" -lt 10 ] || [ "$count" -gt 100 ] ;then
echo "Ett nummer MELLAN 10 och 100!"
else
while [ $count -gt $i ]
do
echo
echo "$count"
count=`expr $count - 1`
sleep 0.2
clear
done
fi
exit 0
| true
|
fb685084c69dcb3d0cde5c69d54b2d67354a420a
|
Shell
|
rebelplutonium/nixos-configuration
|
/custom/expressions/initialization-utils/src/pre-push.sh
|
UTF-8
| 220
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/sh
REMOTE="${1}" &&
if [ "${REMOTE}" == "upstream" ]
then
echo Pushing to the UPSTREAM branch is not allowed. &&
echo Use the REPORT branch for pushing upstream. &&
exit 67 &&
true
fi
| true
|
bcbbb015ac44f7289b3cb54486ccde38e4ff1944
|
Shell
|
andrebriggs/bedrock-agents
|
/setup-variable-group.sh
|
UTF-8
| 2,275
| 2.765625
| 3
|
[] |
no_license
|
RESOURCE_GROUP="REPLACE_ME"
ACR_NAME="REPLACE_ME"
AZDO_ORG_URL="https://dev.azure.com/REPLACE_ME"
AZDO_PROJECT_NAME="REPLACE_ME"
AZP_POOL="bedrock-pool" # Must exist in $AZDO_ORG_URL/_settings/agentpools
SUBSCRIPTION_ID="REPLACE_ME"
TENANT_ID="REPLACE_ME"
SP_CLIENT_ID="REPLACE_ME"
# Should come from Azure KeyVault ideally
AZDO_PAT="REPLACE_ME"
SP_CLIENT_PASS="REPLACE_ME"
# Delete and create variable group
vg_name="agent-build-vg"
vg_result=$(az pipelines variable-group list --org $AZDO_ORG_URL -p $AZDO_PROJECT_NAME)
vg_exists=$(echo $vg_result | jq -r --arg vg_name "$vg_name" '.[].name | select(. == $vg_name ) != null')
vg_id=$(echo "$vg_result" | jq -r --arg vg_name "$vg_name" '.[] | select(.name == $vg_name) | .id')
echo "variable group to delete is $vg_id"
az pipelines variable-group delete --id "$vg_id" --yes --org $AZDO_ORG_URL -p $AZDO_PROJECT_NAME
CREATE_RESULT=$(az pipelines variable-group create --name $vg_name \
--org $AZDO_ORG_URL \
-p $AZDO_PROJECT_NAME \
--variables \
RESOURCE_GROUP=$RESOURCE_GROUP \
ACR_NAME=$ACR_NAME \
AZDO_ORG_URL=$AZDO_ORG_URL \
AZP_POOL=$AZP_POOL)
GROUP_ID=$(echo $CREATE_RESULT | jq ".id")
echo "The group id is $GROUP_ID"
az pipelines variable-group variable create \
--org $AZDO_ORG_URL \
-p $AZDO_PROJECT_NAME \
--group-id "$GROUP_ID" \
--secret true \
--name "AZDO_PAT" \
--value $AZDO_PAT
az pipelines variable-group variable create \
--org $AZDO_ORG_URL \
-p $AZDO_PROJECT_NAME \
--group-id "$GROUP_ID" \
--secret true \
--name "SUBSCRIPTION_ID" \
--value $SUBSCRIPTION_ID
az pipelines variable-group variable create \
--org $AZDO_ORG_URL \
-p $AZDO_PROJECT_NAME \
--group-id "$GROUP_ID" \
--secret true \
--name "TENANT_ID" \
--value $TENANT_ID
az pipelines variable-group variable create \
--org $AZDO_ORG_URL \
-p $AZDO_PROJECT_NAME \
--group-id "$GROUP_ID" \
--secret true \
--name "SP_CLIENT_ID" \
--value $SP_CLIENT_ID
az pipelines variable-group variable create \
--org $AZDO_ORG_URL \
-p $AZDO_PROJECT_NAME \
--group-id "$GROUP_ID" \
--secret true \
--name "SP_CLIENT_PASS" \
--value $SP_CLIENT_PASS
| true
|
e7404c78bf68971e3f6e9b3532ce6232d9e409fc
|
Shell
|
lightster/pier-11
|
/bin/vagrant/install-docker-compose.sh
|
UTF-8
| 372
| 3.4375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
echo -n "Installing docker-compose... "
if [ ! -f /usr/local/bin/docker-compose ]; then
curl -sS -L https://github.com/docker/compose/releases/download/1.16.1/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
echo "docker-compose installed!"
else
echo "docker-compose is already installed!"
fi
| true
|
ee9696832d78baab569f9204b5ae8042e2c71c4c
|
Shell
|
n-daniel/YCSB
|
/runtest.sh
|
UTF-8
| 670
| 3.375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# if not set THREADS use default
if [ -z "$THREADS" ]; then
THREADS=1
fi
# if not set GRANULARITY use default (1sec)
if [ -z "$GRANULARITY" ]; then
GRANULARITY=1000
fi
# clean up
CLEANUP=$1.$THREADS.*
for f0 in $CLEANUP
do
echo "delete $f0"
rm $f0
done
# execute test
FILES=workloads/*
for f in $FILES
do
now=$(date +"%Y%m%d_%H_%M_%S")
bin/ycsb load $1 -P $f $2 $3 $4 $5 $6 $7 $8 $9 -p table=$(basename $f) 1>$1.$THREADS.$(basename $f).$now.load
bin/ycsb run $1 -P $f $2 $3 $4 $5 $6 $7 $8 $9 -threads $THREADS -p table=$(basename $f) -p measurementtype=timeseries -p timeseries.granularity=$GRANULARITY > $1.$THREADS.$(basename $f).$now
done
| true
|
1d5314b521cc52c350b16099f307583a3893e9f7
|
Shell
|
donald-e-boyce/libf95dplab
|
/Dev/Test/Run.sh
|
UTF-8
| 310
| 3.0625
| 3
|
[] |
no_license
|
#! /bin/bash
#
# Script to run test problem.
#
if [ $# -lt 2 ]; then
echo 'need arguments: data-file num_procs'
exit 1
fi
#
DATAFILE=$1
NUM_PROC=$2
#
MAINDIR=`pwd`
BINARY=./linear-solver.x
MPIRUN=/opt/openmpi-1.2.7/bin/mpirun
#
cp $DATAFILE input.txt
$MPIRUN -np $NUM_PROC $BINARY
rm -f input.txt
| true
|
5d8e6fd1fc6c97cb43532fd33f906e872bd3edd8
|
Shell
|
algo-cancer/CAMMiQ
|
/install_CAMMiQ.sh
|
UTF-8
| 796
| 3.328125
| 3
|
[
"MIT"
] |
permissive
|
cd src
if [ -d "./parallel-divsufsort" ]; then
echo "Destination path 'parallel-divsufsort' already exists."
else
make downloads
fi
if [ -d "./robin-hood-hashing" ]; then
echo "Destination path 'robin-hood-hashing' already exists."
else
make downloadr
fi
DIVSUFSORTLIB=./parallel-divsufsort/lib/libdivsufsort.a
if [ -f "$DIVSUFSORTLIB" ]; then
echo "Library 'libdivsufsort.a' already exists."
else
make divsufsort
fi
if [ "$1" != "--cplex-dir" ]; then
if [ "$1" != "--gurobi-dir" ]; then
echo "Please clarify cplex or gurobi directory!"
else
if [ "$3" != "--gurobi-version" ]; then
echo "Please clarify gurobi version!"
else
VERSION="$4"
VERSION="${VERSION:0:3}"
make GUROBIROOTDIR="$2" GRB_VERSION="${VERSION//.}" gurobi
fi
fi
else
make CPLEXROOTDIR="$2" cplex
fi
| true
|
69a05df72786066dff12a5f1eb0db5fc0c8e92c7
|
Shell
|
tozd/docker-nginx-mailer
|
/test.sh
|
UTF-8
| 1,344
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/sh
set -e
cleanup_mailhog=0
cleanup_docker=0
cleanup_network=0
cleanup() {
set +e
if [ "$cleanup_mailhog" -ne 0 ]; then
echo "Logs mailhog"
docker logs mailhog
echo "Stopping mailhog Docker image"
docker stop mailhog
docker rm -f mailhog
fi
if [ "$cleanup_docker" -ne 0 ]; then
echo "Logs"
docker logs test
echo "Stopping Docker image"
docker stop test
docker rm -f test
fi
if [ "$cleanup_network" -ne 0 ]; then
echo "Removing Docker network"
docker network rm testnet
fi
}
trap cleanup EXIT
echo "Creating Docker network"
time docker network create testnet
cleanup_network=1
echo "Preparing"
apk add --no-cache jq
echo "Running Docker image"
docker run -d --name test --network testnet -e LOG_TO_STDOUT=1 -e REMOTES=mailhog "${CI_REGISTRY_IMAGE}:${TAG}"
cleanup_docker=1
echo "Running mailhog Docker image"
docker run -d --name mailhog --network testnet -p 8025:8025 mailhog/mailhog:v1.0.1 -hostname mailhog -smtp-bind-addr :25
cleanup_mailhog=1
echo "Sleeping"
sleep 10
echo "Testing"
# We inside test container use sendmail to send an e-mail to mailhog.
/bin/echo -e "Subject: test\nTo: test@mailhog" | docker exec -i test sendmail -t
sleep 10
wget -q -O - http://docker:8025/api/v2/messages | jq -r .items[0].Raw.To[0] | grep -F test@mailhog
echo "Success"
| true
|
f666a0fab8e6d2db4eb77a216f756ed8f42c74db
|
Shell
|
ukiroot/Epistola_LFS
|
/step/Chapter_6/chroot/step/Chapter_10/109_step_install_nginx.sh
|
UTF-8
| 4,496
| 3.1875
| 3
|
[] |
no_license
|
#!/bin/bash
#########
#########110 step. Install Nginx.
#########
step_110_install_nginx ()
{
PCRE="pcre-8.35"
PCRE_SRC_FILE="$PCRE.tar.gz"
if [ ! -f /sources/$PCRE_SRC_FILE ]; then
wget -O /sources/$PCRE_SRC_FILE $REPOSITORY/$PCRE_SRC_FILE
fi
cd /sources
tar zxf $PCRE_SRC_FILE
cd $PCRE
./configure --prefix=/usr
make -j$STREAM
make install
cd ..
rm -rf $PCRE
groupadd -g 47 nginx
useradd -c "Nginx user" -d /var/www -u 47 -g nginx -s /bin/false nginx
NGINX="nginx-1.9.0"
NGINX_SRC_FILE="$NGINX.tar.gz"
if [ ! -f /sources/$NGINX_SRC_FILE ]; then
wget -O /sources/$NGINX_SRC_FILE $REPOSITORY/$NGINX_SRC_FILE
fi
cd /sources
tar zxf $NGINX_SRC_FILE
cd $NGINX
./configure --prefix=/usr --pid-path=/run/nginx.pid \
--conf-path=/etc/nginx/nginx.conf \
--user=nginx \
--group=nginx \
--with-http_ssl_module \
--http-client-body-temp-path=/var/lib/nginx/body-temp \
--http-proxy-temp-path=/var/lib/nginx/proxy-temp \
--http-fastcgi-temp-path=/var/lib/nginx/fastcgi-temp \
--http-uwsgi-temp-path=/var/lib/nginx/uwsgi-temp \
--http-scgi-temp-path=/var/lib/nginx/scgi-temp \
--without-mail_pop3_module \
--without-mail_imap_module \
--without-mail_smtp_module \
--without-http_uwsgi_module \
--without-http_scgi_module \
--without-http_memcached_module
make
make install
cd ..
rm -rf $NGINX
##
##Install unit file
##
cp /root_tmp/step/systemd-units/units/nginx.service /lib/systemd/system/
systemctl enable nginx.service
#Create config
mkdir -p /var/www/webmail/public_html/
mkdir -p /etc/nginx/conf.d/
mkdir -p /etc/nginx/sites-enabled/
mkdir -p /etc/nginx/sites-available/
mkdir -p /var/lib/nginx
mkdir -p /etc/nginx/site-conf/
chown -R nginx:nginx /var/www
cat > /etc/nginx/nginx.conf << "EOF"
user nginx;
worker_processes 2;
pid /run/nginx.pid;
events {
worker_connections 768;
}
http {
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 900;
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
access_log syslog:server=unix:/run/systemd/journal/dev-log;
error_log syslog:server=unix:/run/systemd/journal/dev-log;
gzip on;
gzip_disable "msie6";
include /etc/nginx/conf.d/*.conf;
include /etc/nginx/sites-enabled/*;
client_header_timeout 3m;
client_body_timeout 3m;
send_timeout 3m;
}
EOF
cat > /etc/nginx/sites-available/webmail << "EOF"
server {
listen 443 ssl;
server_name webmail.epistola.ru www.webmail.epistola.ru;
ssl on;
ssl_protocols SSLv3 TLSv1;
ssl_certificate /etc/ssl/certs/epistola.local.crt;
ssl_certificate_key /etc/ssl/private/epistola.local.key;
include /etc/nginx/site-conf/webmail;
}
server {
listen 80;
server_name webmail.epistola.ru www.webmail.epistola.ru;
include /etc/nginx/site-conf/webmail;
}
EOF
cat > /etc/nginx/site-conf/webmail << "EOF"
root /var/www/webmail/public_html;
index index.php;
location ~ ^/favicon.ico$ {
root /var/www/webmail/public_html/skins/default/images;
log_not_found off;
access_log off;
expires max;
}
location = /robots.txt {
allow all;
log_not_found off;
access_log off;
}
location ~ ^/(README|INSTALL|LICENSE|CHANGELOG|UPGRADING)$ {
deny all;
}
location ~ ^/(bin|SQL)/ {
deny all;
}
# Deny all attempts to access hidden files such as .htaccess, .htpasswd, .DS_Store (Mac).
location ~ /\. {
deny all;
access_log off;
log_not_found off;
}
location ~ \.php$ {
try_files $uri =404;
include /etc/nginx/fastcgi_params;
fastcgi_pass 127.0.0.1:9000;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_index index.php;
}
EOF
ln -s /etc/nginx/sites-available/webmail /etc/nginx/sites-enabled/
rm -rf /etc/nginx/fastcgi.conf.default
rm -rf /etc/nginx/fastcgi_params.default
rm -rf /etc/nginx/koi-win
rm -rf /etc/nginx/mime.types.default
rm -rf /etc/nginx/nginx.conf.default
rm -rf /etc/nginx/scgi_params.default
rm -rf /etc/nginx/uwsgi_params
rm -rf /etc/nginx/uwsgi_params.default
rm -rf /etc/nginx/scgi_params
}
| true
|
5bb780d2b19e8aaed6f2b0c45f664d8af5e4eaa0
|
Shell
|
digcat/puppet-alfresco
|
/config/ootb_output.sh
|
UTF-8
| 2,136
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
function write_output {
domain_name=`get_param domain_name`
initial_admin_pass=`get_param initial_admin_pass`
mail_from_default=`get_param mail_from_default`
alfresco_base_dir=`get_param alfresco_base_dir`
tomcat_home=`get_param tomcat_home`
alfresco_version=`get_param alfresco_version`
download_path=`get_param download_path`
db_root_password=`get_param db_root_password`
db_user=`get_param db_user`
db_pass=`get_param db_pass`
db_name=`get_param db_name`
db_host=`get_param db_host`
db_port=`get_param db_port`
db_type=`get_param db_type`
mem_xmx=`get_param mem_xmx`
mem_xxmaxpermsize=`get_param mem_xxmaxpermsize`
ssl_cert_path=`get_param ssl_cert_path`
echo -e "${GREEN}Writing puppet file ${BLUE}go.pp${WHITE}"
cat > go.pp <<EOF
class { 'alfresco':
domain_name => '${domain_name}',
initial_admin_pass => '${initial_admin_pass}',
mail_from_default => '${mail_from_default}',
alfresco_base_dir => '${alfresco_base_dir}',
tomcat_home => '${tomcat_home}',
alfresco_version => '${alfresco_version}',
download_path => '${download_path}',
db_root_password => '${db_root_password}',
db_user => '${db_user}',
db_pass => '${db_pass}',
db_name => '${db_name}',
db_host => '${db_host}',
db_port => '${db_port}',
db_type => '${db_type}',
mem_xmx => '${mem_xmx}',
mem_xxmaxpermsize => '${mem_xxmaxpermsize}',
ssl_cert_path => '${ssl_cert_path}',
}
EOF
echo -e "${GREEN}Writing puppet file ${BLUE}test.pp${WHITE}"
cat > test.pp <<EOF
class { 'alfresco::tests':
delay_before => 10,
domain_name => '${domain_name}',
initial_admin_pass => '${initial_admin_pass}',
mail_from_default => '${mail_from_default}',
alfresco_base_dir => '${alfresco_base_dir}',
tomcat_home => '${tomcat_home}',
alfresco_version => '${alfresco_version}',
download_path => '${download_path}',
db_root_password => '${db_root_password}',
db_user => '${db_user}',
db_pass => '${db_pass}',
db_name => '${db_name}',
db_host => '${db_host}',
db_port => '${db_port}',
db_type => '${db_type}',
mem_xmx => '${mem_xmx}',
mem_xxmaxpermsize => '${mem_xxmaxpermsize}',
}
EOF
sleep 1
}
write_output
| true
|
f47590b4af6f43dc3a7eee4a639429fb82d5e595
|
Shell
|
lgz282/virtualization-benchmarks
|
/tests/boot/boot_time_vm
|
UTF-8
| 670
| 3.71875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
ITERATIONS=10
VM_NAME=vm1
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
OUTPUT_DIR=$DIR/output
DATETIME=$(date +%Y-%m-%dT%H:%M:%S%z)
LOGFILE_NAME=$OUTPUT_DIR/$DATETIME-vm-boot-time.log
launch_vm() {
virsh start $VM_NAME
}
destroy_vm() {
virsh destroy $VM_NAME
}
ping_vm() {
until timeout 0.1 ping -c1 $VM_NAME >/dev/null 2>&1; do :; done
}
run_iteration() {
launch_vm & time ping_vm
# Sleep in between just in case
sleep 5
destroy_vm
# Sleep in between just in case
sleep 5
}
run_test() {
for ((i=1; i<=$ITERATIONS;i++)); do
run_iteration
done
}
run_test &> $LOGFILE_NAME
| true
|
e1c60e218a5363dbf8ecc527d931cdd378f69071
|
Shell
|
archived-codacy/homebrew-tap
|
/bin/update.sh
|
UTF-8
| 831
| 4.15625
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
#
# Update the Formula version
#
# Requirements:
# * git
# * curl
# * shasum (with 256 algorithm)
# * sed
#
set -e
if [ -z "$1" ] && [ -z "$2" ]; then
echo "usage: $0 <formula> <VERSION>" >&2
exit 1
fi
formula="$1"
formula_path="Formula/${1}.rb"
version="$2"
sha256=$(curl -sL "https://github.com/codacy/${formula}/archive/v$version.tar.gz" | shasum -a256 | cut -d " " -f 1)
git checkout master
git pull
sed 's/^\(.*\/v\).*\(.tar.gz"\)$/\1'"$version"'\2/' "$formula_path" > "$formula_path.modified"
sed 's/^\(.*sha256 "\).*\(".*\)$/\1'"$sha256"'\2/' "$formula_path.modified" > "$formula_path"
rm "$formula_path.modified"
git diff
read -p "Do you want to push? [y/N] " choice
if [[ "$choice" =~ ^[Yy]$ ]]; then
git add "$formula_path"
git commit -m "Release v$version"
git show
git push
fi
| true
|
0f9a34121a06d79d6438af9190ba4aa17b20bb04
|
Shell
|
FreemanX/pocl-android-dependency
|
/build_llvm_aarch64.sh
|
UTF-8
| 1,744
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/bash
# Scripts for building llvm
INSTALL_DIR=$HOME/data/llvm_arm64-out/
[ ! -d "$INSTALL_DIR" ] && mkdir $INSTALL_DIR
TOOLCHAIN_PATH=your_standalone_toolchain_path
TOOLCHAIN_BIN=$TOOLCHAIN_PATH/bin
SYSROOT=$TOOLCHAIN_PATH/sysroot
ARM_LIB_PATH=$HOME/data/Libs_ARM64
export PATH=$TOOLCHAIN_BIN:$PATH
export LD_LIBRARY_PATH=$SYSROOT/usr/lib:$TOOLCHAIN_PATH/lib64:$LD_LIBRARY_PATH
TARGET_HOST=aarch64-linux-android
export CC=$TOOLCHAIN_BIN/$TARGET_HOST-clang
export CXX=$TOOLCHAIN_BIN/$TARGET_HOST-clang++
export AS=$TOOLCHAIN_BIN/$TARGET_HOST-clang
export AR=$TOOLCHAIN_BIN/$TARGET_HOST-ar
export RANLIB=$TOOLCHAIN_BIN/$TARGET_HOST-ranlib
export LD=$TOOLCHAIN_BIN/$TARGET_HOST-ld
export STRIP=$TOOLCHAIN_BIN/$TARGET_HOST-strip
export CXXFLAGS=" -funwind-tables -O2 -fPIE -fPIC -static-libstdc++ -fuse-ld=gold -I$ARM_LIB_PATH/include -L$ARM_LIB_PATH/lib -Wno-error=unused-command-line-argument "
export CFLAGS=" -O2 -fPIC -fPIE -I$ARM_LIB_PATH/include -L$ARM_LIB_PATH/lib -Wno-error=unused-command-line-argument "
export LDFLAGS=" -pie "
export PYTHONPATH=$HOME/toolchains/android-ndk/python-packages:$PYTHONPATH
PYTHON_EXECUTABLE=$TOOLCHAIN_BIN/python #for android
cmake -G "Unix Makefiles" \
-DCMAKE_CROSSCOMPILING=True \
-DLLVM_TARGET_ARCH=AArch64 \
-DLLVM_ENABLE_PROJECTS="clang;llvm" \
-DCMAKE_INSTALL_PREFIX=$INSTALL_DIR \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_C_COMPILER=$CC \
-DCMAKE_CXX_COMPILER=$CXX \
-DLLVM_TARGETS_TO_BUILD="AArch64" \
-DLLVM_DEFAULT_TARGET_TRIPLE=$TARGET_HOST \
-DLLVM_ENABLE_DIA_SDK=OFF \
-DLLVM_TABLEGEN=path_to_your_host_build_directory/bin/llvm-tblgen \
-DCLANG_TABLEGEN=path_to_your_host_build_directory/bin/clang-tblgen \
-DLLVM_BUILD_LLVM_DYLIB=ON \
../llvm
make -j4
make install
| true
|
15add917b826ce924ce71a4421f66e91c46008b3
|
Shell
|
gunnjo/twofing
|
/debian/makeit.sh
|
UTF-8
| 425
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/sh
PKG=twofing
VERSION=0.7
DEBVER=1.0
PKGDIR=${PKG}_${VERSION}-${DEBVER}
TARFILE=${PKG}_${VERSION}.orig.tar.gz
FILES="../*.c ../*.h ../Makefile ../70-touchscreen-egalax.rules"
tar -cvzf ${TARFILE} ${FILES}
mkdir ${PKGDIR}
cd ${PKGDIR}
tar -xvzf ../${TARFILE}
cp -ar ../debian .
[ -e debian/changelog ] || dch --create -v ${VERSION} --package ${PKG}
[ -e debian/compat ] || echo 8 > debian/compat
debuild -us -uc
| true
|
8ba6ce686096be513ef0677a76397972a6530601
|
Shell
|
AlexAegis/dotfiles
|
/modules/xdg-user-dirs/1.root.sh
|
UTF-8
| 620
| 3.34375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
# xdg-user-dirs-update errors out with "No default user directories"
# In the 2.users.sh file becuase on some systems /etc/user-dirs.defaults
# is in /etc/xdg/user-dirs.defaults
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=647313
# Once this resolves, this script can be reprivileged to user
if [ ! -e /etc/user-dirs.conf ] && [ ! -e /etc/user-dirs.defaults ] \
&& [ -e /etc/xdg/user-dirs.conf ] && [ -e /etc/xdg/user-dirs.defaults ]
then
current_pwd=$(pwd)
cd /etc || exit 1
ln -s xdg/user-dirs.conf user-dirs.conf
ln -s xdg/user-dirs.defaults user-dirs.defaults
cd "$current_pwd" || exit 1
fi
| true
|
c8a060eaf15893ce6f2c12b0ac5c5579fc990b28
|
Shell
|
jarrettkenny/dillo-bot
|
/cicd/scripts/build-jar.sh
|
UTF-8
| 2,671
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
NORMAL_COLOR='\033[0m'
PREFIX_COLOR='\033[0;36m'
ECHO_PREFIX="[${PREFIX_COLOR}PIPELINE${NORMAL_COLOR}]"
FAIL_COLOR='\033[1;31m'
FAIL_PREFIX="${ECHO_PREFIX} [${FAIL_COLOR}ERROR${NORMAL_COLOR}]"
SUCCESS_COLOR='\033[1;32m'
SUCCESS_PREFIX="${ECHO_PREFIX} [${SUCCESS_COLOR}SUCCESS${NORMAL_COLOR}]"
INFO_COLOR='\033[0;34m'
INFO_PREFIX="${ECHO_PREFIX} [${INFO_COLOR}INFO${NORMAL_COLOR}]"
UI_SUCCESS=0
BUILD_SUCCESS=0
VERSION_SUCCESS=0
pwd
ls -al
echo ""
echo -e "${INFO_PREFIX} integrating ui into dillo-bot..."
mkdir dillo-bot/src/main/resources/static
cp -R build/* dillo-bot/src/main/resources/static/
UI_SUCCESS=$?
if [ $UI_SUCCESS -ne 0 ]; then
echo -e "${FAIL_PREFIX} ui failed"
echo -e "${INFO_PREFIX} the build will continue regardless"
else
echo -e "${SUCCESS_PREFIX} ui successful"
fi
echo ""
cd dillo-bot/
chmod +x mvnw
BUILD_SUCCESS=$?
if [ $BUILD_SUCCESS -ne 0 ]; then
echo -e "${FAIL_PREFIX} failed to change access permissions for ./mvnw"
echo -e "${INFO_PREFIX} attempting to continue..."
echo ""
fi
echo -e "${INFO_PREFIX} building artifact..."
./mvnw install -DskipTests
BUILD_SUCCESS=$?
if [ $BUILD_SUCCESS -ne 0 ]; then
echo -e "${FAIL_PREFIX} build failed"
else
echo -e "${SUCCESS_PREFIX} build successful"
fi
echo ""
cd ..
echo -e "${INFO_PREFIX} getting version..."
VERSION=$(cat version/version)
VERSION_SUCCESS=$?
if [ $VERSION_SUCCESS -ne 0 ]; then
echo -e "${FAIL_PREFIX} unable to find version"
exit 1;
else
echo -e "${SUCCESS_PREFIX} version found: $VERSION"
fi
echo ""
echo -e "${INFO_PREFIX} getting environment..."
LOWERCASED_ENV="$(echo "$ENV" | tr '[A-Z]' '[a-z]')"
ENVIRONMENT_SUCCESS=$?
if [ "$LOWERCASED_ENV" = "" ]; then
ENVIRONMENT_SUCCESS=1
fi
if [ $ENVIRONMENT_SUCCESS -ne 0 ]; then
echo -e "${FAIL_PREFIX} unable to find environment from ENV"
echo -e "${INFO_PREFIX} assuming environment is prod"
else
echo -e "${SUCCESS_PREFIX} environment found: $LOWERCASED_ENV"
fi
if [ $ENVIRONMENT_SUCCESS -ne 0 ] && [ "$LOWERCASED_ENV" = "d" ] || [ "$LOWERCASED_ENV" = "dev" ] || [ "$LOWERCASED_ENV" = "develop" ]; then
VERSION="d-$VERSION"
echo -e "${INFO_PREFIX} updated version: $VERSION"
fi
echo ""
echo -e "${INFO_PREFIX} versioning artifact..."
cp dillo-bot/target/*.jar target/dillo-bot-$VERSION.jar
BUILD_SUCCESS=$?
if [ $BUILD_SUCCESS -ne 0 ]; then
echo -e "${FAIL_PREFIX} failed to version artifact"
exit 1;
else
echo -e "${SUCCESS_PREFIX} created: dillo-bot-$VERSION.jar"
fi
if [ $UI_SUCCESS -ne 0 ]; then
echo -e "${FAIL_PREFIX} ui failed"
echo -e "${INFO_PREFIX} dillo-bot-$VERSION.jar will not have a ui"
fi
exit 0;
| true
|
4cd218a4c426a4f5eb545c5308c9935ff6b94f41
|
Shell
|
ashishforgive/tbs_lamp
|
/tbs_lamp.sh
|
UTF-8
| 1,809
| 3.796875
| 4
|
[] |
no_license
|
#!/bin/bash
# Author : Ashish Kumar
# Copyright (c) TechBrise.com
#Color script
red=`tput setaf 1`
green=`tput setaf 2`
yellwo=`tput setaf 3`
echo "${green} +++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
echo "${green}| Hola!! you are installing the LAMP using TechBrise LAMP.|
| L- Linux |
| A- Aapache2 |
| M- MYSQL |
| P- PHP |
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
echo
echo "${yellwo}Are you sure? you want to install LAMP in your machine if yes please press Y/y elase press N/n key for decline."
echo -n "Please press [Y][N]:"; read -r acceptance
if [ $acceptance == Y -o $acceptance == y ]
then
echo "Hurry!! Your installation is being started. It will take long have a cup of tea and Enjoy!!........."
# Update Package Index
sudo apt update
echo "${green}Update completed!!"
# Install Apache2, MySQL, PHP
echo "${green}Installing Apache2, MySQL, PHP....."
sudo apt install apache2 mysql-server php php-mysql libapache2-mod-php php-cli
echo "${green}Completed!!."
# Allow to run Apache on boot up
echo "${green}Allowing to run Apache on boot up"
sudo systemctl enable apache2
# Restart Apache Web Server
echo "${green}Restarting Apache2..."
sudo systemctl start apache2
# Adjust Firewall
echo "${green}Adjusting Firewall..."
sudo ufw allow "Apache Full"
# Allow Read/Write for Owner
echo "${green}Allowing Read/Write permission..."
sudo chmod -R 0755 /var/www/html/
# Create info.php for testing php processing
echo "${green}Creating info files for test php..."
sudo echo "<?php phpinfo(); ?>" > /var/www/html/info.php
# Open localhost in the default browser
echo "${green}Opening browser for test......."
xdg-open "http://localhost"
xdg-open "http://localhost/info.php"
else
echo "You have terminated the installations"
fi
| true
|
8e19a610a3057e72507fd16dec5b7e644fb70f0b
|
Shell
|
tcider/sh_scripts
|
/4_linux.sh
|
UTF-8
| 98
| 2.5625
| 3
|
[] |
no_license
|
#!/bin/bash
apt-get install -y procps
echo "Enter pid of parent proccess"
read pp
ps --ppid $pp
| true
|
2e403eb354f933e819e3dccb1be7c877bb3e8b1d
|
Shell
|
blacknon/dotfiles
|
/sh/functions/iterm2.sh
|
UTF-8
| 5,525
| 3.78125
| 4
|
[] |
no_license
|
#!/bin/bash
# Copyright(c) 2023 Blacknon. All rights reserved.
# Use of this source code is governed by an MIT license
# that can be found in the LICENSE file.
# ____print_osc():
# about:
# tmuxなどの場合にOSCエスケープシーケンスを出力するためのfunction。
____print_osc() {
if [[ $TERM == screen* ]]; then
printf "\033Ptmux;\033\033]"
else
printf "\033]"
fi
}
# ____print_st():
# about:
# More of the tmux workaround described above.
____print_st() {
if [[ $TERM == screen* ]]; then
printf "\a\033\\"
elif [ x"$TERM" = "xscreen" ]; then
printf "\a\033\\"
else
printf "\a"
fi
}
# ____check_dependency():
# about:
# コマンドの有無を確認するfunction。
____check_dependency() {
if ! (builtin command -V "$2" >/dev/null 2>&1); then
echo "$1: missing dependency: can't find $2" 1>&2
exit 1
fi
}
# ____print_image():
# about:
# iTerm2にイメージを出力するimgview用のfunction
____print_image() {
# local変数を宣言
local BASE64ARG
____print_osc
printf '1337;File='
if [[ -n "$1" ]]; then
printf 'name='$(printf "%s" "$1" | base64)";"
fi
VERSION=$(base64 --version 2>&1)
if [[ "$VERSION" =~ fourmilab ]]; then
BASE64ARG=-d
elif [[ "$VERSION" =~ GNU ]]; then
BASE64ARG=-di
else
BASE64ARG=-D
fi
printf "%s" "$3" | base64 $BASE64ARG | wc -c | awk '{printf "size=%d",$1}'
printf ";inline=$2"
printf ":"
printf "%s" "$3"
____print_st
printf '\n'
if [[ -n "$4" ]]; then
echo $1
fi
}
# ____show_error():
# about:
# エラーメッセージを出力するfunction
____show_error() {
echo "ERROR: $*" 1>&2
}
# ____show_imgview_help():
# about:
# imgview用のhelpを出力するためのfunction
____show_imgview_help() {
echo "Usage:"
echo " imgview [-p] filename ..." 1>&2
echo " cat filename | imgview" 1>&2
}
# ____show_imgls_list_file():
# about:
# imglsの結果を出力するfunction
____show_imgls_list_file() {
# local変数の宣言
local fn
local dims
local rc
fn=$1
test -f "$fn" || return 0
dims=$(php -r 'if (!is_file($argv[1])) exit(1); $a = getimagesize($argv[1]); if ($a==FALSE) exit(1); else { echo $a[0] . "x" .$a[1]; exit(0); }' -- "$fn")
rc=$?
if [[ $rc == 0 ]]; then
____print_osc
printf '1337;File=name='$(echo -n "$fn" | base64)";"
wc -c -- "$fn" | awk '{printf "size=%d",$1}'
printf ";inline=1;height=3;width=3;preserveAspectRatio=true"
printf ":"
base64 <"$fn"
____print_st
if [ x"$TERM" == "xscreen" ]; then
printf '\033[4C\033[Bx'
else
printf '\033[A'
fi
echo -n "$dims "
ls -ld -- "$fn"
else
ls -ld -- "$fn"
fi
}
# imgview():
# about:
# iTerm2上で画像ファイルを表示するfunction
# origin:
# https://www.iterm2.com/utilities/imgcat
# TODO(blacknon): show_helpと同様の処理を他の主要functionにも実装できるか検討する
imgview() {
# local変数の宣言
local has_stdin
local print_filename
local encoded_image
# Main
if [ -t 0 ]; then
has_stdin=f
else
has_stdin=t
fi
# Show help if no arguments and no stdin.
if [ $has_stdin = f -a $# -eq 0 ]; then
____show_imgview_help
return
fi
____check_dependency imgview awk
____check_dependency imgview base64
____check_dependency imgview wc
# Look for command line flags.
while [ $# -gt 0 ]; do
case "$1" in
-h | --h | --help)
____show_imgview_help
return
;;
-p | --p | --print)
print_filename=1
;;
-u | --u | --url)
check_dependency curl
encoded_image=$(curl -s "$2" | base64) || (
____show_error "No such file or url $2"
exit 2
)
has_stdin=f
____print_image "$2" 1 "$encoded_image" "$print_filename"
set -- ${@:1:1} "-u" ${@:3}
if [ "$#" -eq 2 ]; then
return
fi
;;
-*)
____show_error "Unknown option flag: $1"
____show_imgview_help
return
;;
*)
if [ -r "$1" ]; then
has_stdin=f
____print_image "$1" 1 "$(base64 <"$1")" "$print_filename"
else
____show_error "imgview: $1: No such file or directory"
return
fi
;;
esac
shift
done
# Read and print stdin
if [ $has_stdin = t ]; then
____print_image "" 1 "$(cat | base64)" ""
fi
}
# TODO(blacknon): 引数でPATHを指定するように書き換える
# TODO(blacknon): オプションを付与する
# imgls():
# about:
# iTerm2上で画像ファイルをls状に表示するfunction
# origin:
# https://www.iterm2.com/utilities/imgls
# require:
# - php
imgls() {
____check_dependency imgls php
____check_dependency imgls base64
____check_dependency imgls wc
if [ $# -eq 0 ]; then
for fn in *; do
____show_imgls_list_file "$fn"
done < <(ls -ls)
else
for fn in "$@"; do
____show_imgls_list_file "$fn"
done
fi
}
| true
|
ab75a85d24d69eb3915b4f86170e42349711eb90
|
Shell
|
jushikj/work
|
/portal/CAE/ANSYS_LSDYNA/ANSYS_LSDYNA.run
|
UTF-8
| 6,555
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
get_portal_input()
{
QTIMESTAMP=`date +%G%m%d_%H%M`
PORTALVAR=/tmp/clusportal_$USER$QTIMESTAMP.var
#touch $PORTALVAR
dos2unix -n $1 $PORTALVAR 2>/dev/null
source $PORTALVAR
}
get_basic_params()
{
source /opt/gridview/gridviewAppTemplate/CAE/ANSYS_LSDYNA/ANSYS_LSDYNA.setting
### runfile setting
#openmpi_ENVFILE intelmpi_ENVFILE mpich2_ENVFILE cr_mpi_ENVFILE mvapich2_ENVFILE debug
##prefix=`basename $1|awk -F. '{print $1}'`
time=`date +%m%d%H%M%S`
username=`whoami`
pbsfile=${GAP_NAME}.pbs
}
parse_portal_params()
{
if [ "null$GAP_MPI_MPI_OPT" == "null" ];then
GAP_MPI_MPI_OPT=" "
fi
if [ "null$GAP_PBS_OPT" == "null" ];then
GAP_MPI_PBS_OPT=" "
fi
if [ "null$GAP_PRE_CMD" == "null" ];then
GAP_MPI_PRE_CMD=" "
fi
if [ "null$GAP_POST_CMD" == "null" ];then
GAP_MPI_POST_CMD=" "
fi
## remote shell option
if [ ${GAP_REMOTE_SHELL} == "ssh" ];then
RSH="-usessh"
else
RSH=""
fi
## MPI Type optios
if [ "${GAP_MPI_TYPE}" == "pcmpi" ];then
MPIINPUT="-mpi PCMPI"
elif [ "${GAP_MPI_MPIRUNTYPE}" == "intelmpi" ];then
MPIINPUT="-mpi INTELMPI"
else
MPIINPUT="-mpi HPMPI"
fi
## PRECISSION
if [ ${GAP_PRECISSION} == "single" ];then
PRECISION=""
else
PRECISION="-dp"
fi
## keyword file
if [ "${GAP_KEYWORD_FILE}" != "" ];then
dos2unix ${GAP_KEYWORD_FILE} >& /dev/null &
INP="i=${GAP_KEYWORD_FILE}"
else
INP=""
fi
## hpc run option
if [ "${GAP_MPI_PARAMODE}" == dmp ];then
PRECMD='hostlist="";for node in `cat $PBS_NODEFILE | uniq`;do num=`cat $PBS_NODEFILE | grep $node | wc -l`;if [ -z $hostlist ];then hostlist=$nodeGridViewPortalColon$num;else hostlist=$hostlistGridViewPortalColon$nodeGridViewPortalColon$num;fi;done'
RUNLIST='-dis -machines $hostlist'
MPP="-lsdynampp"
else
PRECMD='num=`cat $PBS_NODEFILE | wc -l`;'
#RUNLIST='-np $num'
RUNLIST=""
#no mpi
MPIINPUT=""
MPP="-lsdynampp"
fi
## memory
if [ "${GAP_MEMORY}" != "" ];then
MEMORY="memory=${GAP_MEMORY}m"
else
MEMORY=""
fi
## gpu acc option
## vnc option
NCPUS="ncpus=${GAP_PPN}"
cd ${GAP_WORK_DIR}
}
create_lsdyna_script()
{
/opt/gridview/gridviewAppTemplate/createpbs -t serial -o "${GAP_OUTPUT}" -pbs ${pbsfile} -precommand "${PRECMD}" -prog "${GAP_PROGRAM}" -proarg "${MPIINPUT} ${MPP} ${NCPUS} ${INP} ${MEMORY} ${PRECISION} ${RUNLIST} ${RSH} ${GAP_ARGUMENTS}" -envfile "${ANSYS_ENVFILE}" -nnodes "${GAP_NNODES}" -ppn "${GAP_PPN}" -wtime "${GAP_WALL_TIME}" -name "${GAP_NAME}" -wdir "${GAP_WORK_DIR}" -q "${GAP_QUEUE}" -pbsoption "${GAP_PBS_OPT}"
sed -i 's/GridViewPortalColon/:/g' ${pbsfile}
}
create_tclsdyna_script()
{
NPSUM=$[${GAP_NNODES}*${GAP_PPN}]
np="-np ${NPSUM}"
/opt/gridview/gridviewAppTemplate/createpbs -t serial -o "${GAP_OUTPUT}" -pbs ${pbsfile} -precommand "${PRECMD}" -prog "${GAP_PROGRAM}" -proarg "${MPIINPUT} ${np} ${INP} ${MEMORY} 2>&1|tee ./${GAP_OUTPUT} & ${RUNLIST} ${RSH} ${GAP_ARGUMENTS}" -envfile "${ANSYS_ENVFILE}" -nnodes "${GAP_NNODES}" -ppn "${GAP_PPN}" -wtime "${GAP_WALL_TIME}" -name "${GAP_NAME}" -wdir "${GAP_WORK_DIR}" -q "${GAP_QUEUE}" -pbsoption "${GAP_PBS_OPT}"
sed -i 's/GridViewPortalColon/:/g' ${pbsfile}
}
create_pbs_script()
{
if [ "${GAP_ANSYS_VERSION}" == "lsdyna" ];then
create_lsdyna_script
else
create_tclsdyna_script
fi
################################################################################################################################################
#createpbs -t Type [-o logfile] [-pbs pbsfile]
# -prog Program [-proarg Program Argment]
# [-nnodes NodeNum] [-ppn PPN] [-wtime WallTime] [-name JobName] [-wdir WorkDir] [-q queque] [ -pbsoption 'option1:option2: ... :optionN' ]
# [-envfile envfile1 envfile2 ... envfileN]
# [-precommand 'command1:command2: ... commandN'] [ -postcommand 'command1:command2: ... :commandN' )
# [-vnc]
# [-mpirun mpirun command] [-net Network] [-r ssh/rsh] [-bindcore] [-mpiarg MPIRUN Argment]
# [-cri CheckPoint Interval(Minitues)] [-ocpg Old CheckPoint Group name] [-ncpg Old CheckPoint Group name] [-ncp number CheckPoints]
# Type should be serial,openmpi,intelmpi cr_mpi, mvapich2,mpich2
# Network should be tcp,ib,memory,ib_memory,tcp_memory #
##############################################################################################################################################
#/opt/gridview/gridviewAppTemplate/createpbs -t serial -o "${GAP_MPI_OUTPUT}" -pbs ${pbsfile} -precommand "${PRECMD}" -prog "${GAP_MPI_PROGRAM}" -proarg "${MPIINPUT} -lsdynampp ${INP} ${RUNLIST} ${RSH} ${PRECISION} ${GAP_MPI_PROGRAM_ARG}" -envfile "${ANSYS_ENVFILE}" -nnodes "${GAP_MPI_NNODES}" -ppn "${GAP_MPI_PPN}" -wtime "${GAP_MPI_WALL_TIME}" -name "${GAP_MPI_NAME}" -wdir "${GAP_MPI_WORK_DIR}" -q "${GAP_MPI_QUEUE}" -pbsoption "${GAP_MPI_PBS_OPT}"
#sed -i 's/GridViewPortalColon/:/g' ${pbsfile}
#/opt/gridview/gridviewAppTemplate/createpbs -t "${GAP_MPI_MPI_TYPE}" -o "${GAP_MPI_OUTPUT}" -pbs ${pbsfile} \
# -prog "${GAP_MPI_PROGRAM}" -proarg "${GAP_MPI_PROGRAM_ARG}"\
# -envfile "${ANSYS_ENVFILE}" \
# -precommand "${GAP_MPI_PRE_CMD}" -postcommand "${GAP_MPI_POST_CMD}" \
# ${VNC_OPTION} \
# -nnodes "${GAP_MPI_NNODES}" -ppn "${GAP_MPI_PPN}" -wtime "${GAP_MPI_WALL_TIME}" -name "${GAP_MPI_NAME}" -wdir "${GAP_MPI_WORK_DIR}" -q "${GAP_MPI_QUEUE}" \
# -pbsoption "${GAP_MPI_PBS_OPT}" \
# -net "${GAP_MPI_NETWORK}" -r "${GAP_MPI_REMOTE_SHELL}" ${CPU_BINDING_OPTION} -mpiarg "${GAP_MPI_MPI_OPT}" \
# -ncpg '$id.ckps' -cri "${GAP_MPI_INTERVAL}" -ncp 2
#
}
submit_job()
{
jobid=`qsub $pbsfile`
return $?
}
checkpoint_func()
{
if [ "${GAP_CHECK_POINT}" == 1 ];then
id=`echo $jobid|awk -F. '{print $1}' `
echo "Jobid:${id}
Cr_type:mpi_system
User_name:`whoami`
Work_dir:${wdir}
Job_batch:autocr.${id}.pbs
Cr_batch:autocr.${id}.pbs
Cr_interval:${cri}
Num_CP_save:${ncp}
" >/checkpoints/autocr/spoolautocr.${id}.Q
fi
}
clean_tmp_file()
{
if [ "$debug" != "1" ];then
rm -f /tmp/${prefix}.var
rm -f /tmp/${prefix}.runvar
rm -f $PORTALVAR
fi
}
output_result()
{
if [ "$1" == "0" ];then
echo $jobid | grep "^[0-9]*\.\w*" || exit 1
else
echo "Job Submit Error: ${jobid}" && exit 1
fi
}
main()
{
get_portal_input $@
get_basic_params $@
parse_portal_params
create_pbs_script
submit_job
job_status=$?
checkpoint_func
#clean_tmp_file
output_result ${job_status}
}
main "$@"
| true
|
17fb7dd49d9a9717106090c25b0c21da151a2e88
|
Shell
|
starcoinorg/starcoin
|
/scripts/update_version.sh
|
UTF-8
| 1,879
| 4.5
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Default flag value for simulation mode
simulate=true
# Function to display script usage
display_help() {
echo "Usage: ./update_version.sh <old_version> <new_version> [--execute]"
echo " ./update_version.sh -h|--help"
echo ""
echo "Options:"
echo " --execute Perform actual changes in files"
echo " -h, --help Display this help message"
}
# Check if -h or --help flag is provided to display help
if [ "$1" == "-h" ] || [ "$1" == "--help" ]; then
display_help
exit 0
fi
# Check if input parameters are empty
if [ -z "$1" ] || [ -z "$2" ]; then
echo "Error: Please provide the old version number and new version number as parameters"
display_help
exit 1
fi
# Check if --execute flag is provided to perform actual changes
if [ "$3" == "--execute" ]; then
simulate=false
fi
if [[ "$OSTYPE" == "darwin"* ]]; then
if ! command -v gsed >/dev/null; then
echo "gsed not found, installing..."
brew install gnu-sed
fi
SED=gsed
else
SED=sed
fi
# Get input parameters
old_version=$1
new_version=$2
# Get the absolute path of the script directory
script_dir=$(cd "$(dirname "$0")" && pwd)
# Get the parent directory path of the script location
base_dir=$(dirname "$script_dir")
# Find all Cargo.toml files (excluding target directory) and process each file
find "$base_dir" -name "target" -prune -o -name "Cargo.toml" -type f | while read -r cargo_file; do
# Use sed command to find and replace version number in [package] section
if [ "$simulate" = true ]; then
$SED -n "/\[package\]/,/^$/p" "$cargo_file" | $SED "s/version = \"$old_version\"/version = \"$new_version\"/g"
else
$SED -i "/\[package\]/,/^$/ s/version = \"$old_version\"/version = \"$new_version\"/" "$cargo_file"
echo "Version number in $cargo_file has been changed to $new_version"
fi
done
| true
|
8bffd9161770a4d299e1fdd94b1c619339930f17
|
Shell
|
PaaS-TA/PaaS-TA-Core
|
/paasta-controller/src/routing-release/jobs/routing-api/templates/routing-api_ctl.erb
|
UTF-8
| 1,123
| 3.640625
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash -e
RUN_DIR=/var/vcap/sys/run/routing-api
LOG_DIR=/var/vcap/sys/log/routing-api
PIDFILE=$RUN_DIR/routing-api.pid
source /var/vcap/packages/routing_utils/pid_utils.sh
source /var/vcap/packages/routing_utils/syslog_utils.sh
function setup_environment() {
mkdir -p "${RUN_DIR}"
}
case $1 in
start)
setup_environment
pid_guard $PIDFILE "routing-api"
tee_output_to_sys_log "${LOG_DIR}" "routing_api"
set +e
host <%= p("dns_health_check_host") %>
if [[ "0" != "$?" ]]; then
echo "DNS is not up"
exit 1
fi
set -e
echo $$ > $PIDFILE
# Allowed number of open file descriptors
# ulimit -n 100000
exec /var/vcap/packages/routing-api/bin/routing-api \
-config /var/vcap/jobs/routing-api/config/routing-api.yml \
-port <%= p("routing_api.port") %> \
-logLevel=<%= p("routing_api.log_level") %> \
-ip <%= spec.ip %> \
<% if p("routing_api.auth_disabled") == true %> -devMode <% end %> \
;;
stop)
kill_and_wait $PIDFILE
;;
*)
echo "Usage: routing-api_ctl {start|stop}"
;;
esac
| true
|
8be813885b9a28deef65f5c9177eab7cb528b520
|
Shell
|
visavi/rotordock
|
/.cleanup
|
UTF-8
| 121
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
if [ -n "$(docker ps -a -q)" ]; then
docker stop $(docker ps -a -q)
docker rm $(docker ps -a -q)
fi
| true
|
a81c7c2966f0799a23428e85bb8eb34cc5daa4a4
|
Shell
|
coscin/laptop
|
/bin/ovs-nyc-ifup
|
UTF-8
| 323
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/sh
switch='br-nyc'
/sbin/ifconfig $1 0.0.0.0 up
case "$1" in
tap1) ofport=1
;;
tap3) ofport=2
;;
# tap6) ofport=3
# ;;
# tap7) ofport=4
# ;;
*) echo "Unexpected tap port $1 for NYC"
exit
;;
esac
ovs-vsctl add-port ${switch} $1 -- set Interface $1 ofport_request=${ofport}
| true
|
4a27d81de56ae27bc5ce0cada44d1745fa7ec769
|
Shell
|
lisadlima/riptide-ros
|
/scripts/install-by-deps
|
UTF-8
| 1,294
| 3.5625
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
# A Script to install only necessary ROS packages
#
# Installs gazebo6-ros-pkgs
# Resolves package dependencies
# Select the appropriate ROS version for the current OS:
if [ "${ROS_DISTRO:-false}" == "false" ]; then
if [ $(lsb_release -cs) == "precise" ]; then ROS_DISTRO=hydro
elif [ $(lsb_release -cs) == "trusty" ]; then ROS_DISTRO=indigo
elif [ $(lsb_release -cs) == "vivid" ]; then ROS_DISTRO=jade
else echo "We've had a MAIN B BUS UNDERVOLT."; fi
fi
# Add the necessary repositories for ROS and Gazebo:
sudo sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -cs) main" > /etc/apt/sources.list.d/ros-latest.list'
wget -nv http://packages.ros.org/ros.key -O - | sudo apt-key add -
sudo sh -c 'echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable $(lsb_release -cs) main" > /etc/apt/sources.list.d/gazebo-stable.list'
wget -nv http://packages.osrfoundation.org/gazebo.key -O - | sudo apt-key add -
# Install rosdep and Gazebo packages:
sudo apt-get -qq update
sudo apt-get -qq -y install python-rosdep ros-"$ROS_DISTRO"-gazebo6-ros-pkgs
sudo rosdep init
rosdep update # Do not sudo!
# Install the current package's dependencies:
rosdep install -iyq --from-paths ./
echo "source /opt/ros/$ROS_DISTRO/setup.bash" >> ~/.bashrc
| true
|
c55f299c4fb9b702a7cab3e06f877947f8ed803a
|
Shell
|
tlbanken/Simple_Auto_Grader
|
/auto_grader
|
UTF-8
| 2,812
| 4.21875
| 4
|
[] |
no_license
|
#!/bin/bash
# Script to automate diffing testcases against user and given executables
# Author: Travis Banken
# MUST give name of the given exec and your exec
EXE_GIVEN=
MY_EXE=
# check for executables
if [ "$EXE_GIVEN" == "" ] || [ "$MY_EXE" == "" ]; then
echo -e "No executables specified. Edit the grader with the names of the executables"
exit 1
fi
if [ ! -f "./$EXE_GIVEN" ]; then
echo "The exec '$EXE_GIVEN' not found, is it in current directory?"
exit 1
fi
if [ ! -f "./$MY_EXE" ]; then
echo -e "The exec '$MY_EXE' not found, is it in current directory?"
exit 1
fi
# check if tests are in specified path
if [ "$1" == "" ]; then
testpath=.
else
testpath="$1"
fi
tests=( $testpath/*.in )
if [ ! -f $tests ]; then
echo "No .in files found for testing :("
exit 1
fi
if [ -f diffout ]; then
rm diffout
fi
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
NC='\033[0m' # No Color
echo "================================================="
echo "SUPER GRADING SCRIPT GOOOOOOOOOO!!!"
echo "------------------------------------"
total=0
passed=0
timedout=0
flag1=0
flag2=0
for TESTCASE in $testpath/*.in; do
echo "Testing $TESTCASE . . . "
# run run and store outputs and rc of both executables (10 sec timeout)
# We don't care about stderr so redirect away
timeout 10 ./$EXE_GIVEN < $TESTCASE > exOut 2>/dev/null
ex_rc=$(echo $?)
if [ $ex_rc -eq 124 ]; then
echo -e "${YELLOW}Timeout!${NC}"
echo "$EXE_GIVEN timed out! :("
echo -e "\n"
timedout=$((timedout + 1))
continue
fi
timeout 10 ./$MY_EXE < $TESTCASE > myOut 2>/dev/null
my_rc=$(echo $?)
if [ $my_rc -eq 124 ]; then
echo "$MY_GIVEN timed out! :("
echo -e "\n"
timedout=$((timedout + 1))
continue
fi
output=$(diff exOut myOut)
rm exOut myOut
# check if stdout and rc match on both executables
echo -n "Output: "
if [ "$output" == "" ]; then
echo -e "${GREEN}PASSED${NC}"
flag1=1
else
echo -e "*********************************" >> diffout
echo -e "${TESTCASE} DIFF OUTPUT" >> diffout
echo -e "*********************************" >> diffout
echo -e "${output}\n" >> diffout
echo -e "${RED}FAILED${NC}"
flag1=0
fi
echo -n "Return Code: "
if [ "$ex_rc" == "$my_rc" ]; then
echo -e "${GREEN}PASSED${NC}"
flag2=1
else
echo -e "${RED}FAILED${NC}"
flag2=0
fi
echo -e "\n"
# check if output and rc passed
if [ $((flag1 * flag2)) == 1 ]; then
passed=$((passed + 1))
fi
total=$((total + 1))
flag="0"
done
score=$((passed * 100 / total))
echo "------------------------------------"
echo " RESULTS "
echo "------------------------------------"
echo "Passed: $passed"
echo "Total: $total"
echo -e "\n"
echo -e "\tScore: $score%"
echo -e "\n"
echo "Timeouts: $timedout"
echo "Done."
echo "================================================="
| true
|
54ca5a078dae7c7a96e856b0326ea964783fe3eb
|
Shell
|
lrocca/push_swap
|
/test/test.sh
|
UTF-8
| 2,651
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
# **************************************************************************** #
# #
# ::: :::::::: #
# test.sh :+: :+: :+: #
# +:+ +:+ +:+ #
# By: lrocca <marvin@42.fr> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2021/05/18 21:22:55 by lrocca #+# #+# #
# Updated: 2021/05/18 21:22:55 by lrocca ### ########.fr #
# #
# **************************************************************************** #
C_RESET="\e[0;0m"
C_BOLD="\e[1m"
C_RED="\e[0;31m"
C_GREEN="\e[0;32m"
C_YELLOW="\e[0;33m"
C_GRAY="\e[0;37m"
C_BLUE="\e[0;34m"
C_LGRAY="\e[0;90m"
C_LRED="\e[0;91m"
C_LGREEN="\e[0;92m"
C_LBLUE="\e[0;94m"
C_BGRAY="\e[1;30m"
C_BRED="\e[1;31m"
C_BGREEN="\e[1;32m"
C_BYELLOW="\e[1;33m"
C_BBLUE="\e[1;34m"
CHECKER=./checker
PUSH_SWAP=./push_swap
TMP=./test/test.tmp
print_stat() {
printf " $C_GRAY$1$C_RESET $C_BOLD$2$C_RESET"
}
test() {
printf "${C_BBLUE}$1${C_RESET}\n"
fail_ops=0
ko_ops=0
all_ops=()
for i in $( seq 1 $3 )
do
ko=0
echo "" > $TMP
arg=`ruby -e "puts (1..$1).to_a.shuffle.join(' ')" 2> /dev/null`
($PUSH_SWAP $arg 1> $TMP 2> /dev/null)
ops=`wc -l < $TMP`
checker=`$CHECKER $arg < $TMP 2> /dev/null`
printf "${C_LGRAY}%-5i" ${i}
all_ops[i]=$(($ops))
if [[ $((ops)) -lt $2 ]]; then
status=${C_LGREEN}
elif [[ $((ops)) -eq $2 ]]; then
status=${C_BYELLOW}
else
status=${C_BRED}
fail_ops=$(($fail_ops + 1))
ko=1
fi
printf "${status}%2i$C_RESET " $((ops))
if [[ $checker == "OK" ]]; then
status=${C_GREEN}
else
status=${C_RED}
ko_ops=$(($ko_ops + 1))
ko=1
fi
printf "${status}%5s$C_RESET" $checker
if [[ $ko -eq 1 ]]; then
echo -en "\t${arg}"
fi
echo
done
avg=$(IFS='+'; bc<<<"scale=1;(${all_ops[*]})/${#all_ops[@]}")
if [[ ${avg::1} == "." ]]; then
avg="0"$avg
fi
max=$(IFS=$'\n'; echo "${all_ops[*]}" | sort -nr | head -n1)
max_times=$(echo ${all_ops[*]} | grep -o ${max} | wc -l | tr -d ' ')
printf "$C_YELLOW$1$C_RESET"
print_stat "avg" $avg
print_stat "max" $max
echo -n " ($max_times)"
print_stat "fail" $fail_ops
print_stat "ko" $ko_ops
echo
echo
}
touch $TMP
test 3 4 10
test 5 12 10
test 100 700 20
test 500 5500 20
rm $TMP
| true
|
a31216305b1428d2b4ee86436b912d773b8d3ce4
|
Shell
|
linuxdabbler/debian-dialog-install-script
|
/dialog.sh
|
UTF-8
| 29,048
| 3.359375
| 3
|
[] |
no_license
|
#!/bin/bash
# Borrowed some of the syntax from DasGeek among others...
# Tested on Debian Buster and Testing
# Version 0.1
## Define Temp location - "dis" stands for "debian-install-script"
tmd_dir=/tmp/dis
## Define some variables because I'm lazy
install='apt install'
update='apt update; apt upgrade -y'
user=$USER
#User=$(getent passwd 1000 | awk -F: '{ print $1}')
## Start script
cp /etc/apt/sources.list /etc/apt/sources.list.original
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root type: sudo ./dialog.sh"
exit 1
else
#Update and Upgrade
echo "Updating and Upgrading"
$update
echo "Creating temporary folder"
mkdir $tmp_dir
$install dialog
cmd=(dialog --title "LD-Installer" --separate-output --checklist "Please Select Software You Want To Install:" 22 80 16)
options=(
#A "<----Category: Repositories---->" on
1_repos " Grant Standard User Root Access" off
2_repos " Contrib and Non-free Repos" off
# 3_repos " Testing Repos" off
# 4_repos " Unstable Repos" off
# 5_repos " Experimental Repos" off
6_repos " Return to Original" off
7_repos " Liquorx-Kernel" off
#B "<----Category: Alternate Installers---->" on
1_installer " Snap Packages" off
2_installer " Flatpak" off
3_installer " Synaptic" off
4_installer " PIP" off
#C "<----Category: Text Editors---->" on
1_editor " Vim" off
2_editor " Nano" off
3_editor " Geany" off
4_editor " emacs" off
5_editor " Gedit" off
#D "<----Category: Phone---->" on
1_phone " android" off
2_phone " iphone" off
#E "<----Category: Terminal Programs---->" on
1_terminal " Compress/Decompress" off
2_terminal " UFW" off
3_terminal " Identify hardware" off
4_terminal " Python" off
5_terminal " Cups" off
6_terminal " Youtube-dl" off
7_terminal " Htop" off
8_terminal " Parted" off
9_terminal " Curl" off
10_terminal " Wget" off
11_terminal " Ranger" off
12_terminal " Dmenu" off
13_terminal " Rofi" off
14_terminal " Build Essential" off
15_terminal " SSH" off
16_terminal " Urxvt" off
17_terminal " Sakura" off
18_terminal " Terminator" off
19_terminal " Tilix" off
20_terminal " Xterm" off
#F "<----Category: Terminal Customization---->" on
1_customize " Neofetch" off
2_customize " Screenfetch" off
3_customize " Figlet" off
4_customize " Lolcat" off
5_customize " Powerline" off
#G "<----Category: Email---->" on
1_email " Thunderbird" off
2_email " Neomutt" off
3_email " Geary" off
#H "<----Category: Web Browsers/Downloaders---->" on
1_web " Chromium" off
2_web " Google Chrome" off
3_web " Vivaldi" off
4_web " ICE-SSB-Application" off
5_web " Transmission" off
#I "<----Category: Networking---->" on
1_network " SAMBA" off
#J "<----Category: Graphics---->" on
1_graphics " Nvidia Driver" off
2_graphics " AMD Driver" off
#K "<----Category: Sound---->" on
1_sound " Pulse Audio" off
2_sound " ALSA" off
#L "<----Category: Fonts---->" on
1_font " Microsoft fonts" off
2_font " Ubuntu fonts" off
#M "<----Category: Icons---->" on
1_icon " Numix icons" off
2_icon " Moka icons" off
3_icon " Mate icons" off
4_icon " Papirus icons" off
5_icon " Deepin-icons" off
#N "<----Category: Photo Viewing/Editing---->" on
1_photo " Feh" off
2_photo " Gimp" off
3_photo " Inkscape" off
4_photo " Digikam" off
5_photo " Darktable" off
6_photo " Shotwell" off
#O "<----Category: Media Viewing/Editing/Converting---->" on
1_media " Handbrake" off
2_media " Kdenlive" off
3_media " VLC" off
4_media " Audacity" off
5_media " Plex Media Server" off
6_media " Simple Screen Recorder" off
7_media " OBS Studio" off
8_media " Optical Drive Software" off
9_media " SM Player" off
10_media " FFmpeg" off
#P "<----Category: Gaming---->" on
1_gaming " Steam" off
2_gaming " Lutris" off
#Q "<----Category: File Explorer---->" on
1_files " Nemo" off
2_files " Thunar" off
3_files " Pcmanfm" off
4_files " Caja" off
5_files " Nautilus" off
6_files " Dolphin" off
#R "<----Category: Desktop Customization---->" on
1_desktop " nitrogen" off
2_desktop " variety" off
3_desktop " lxappearance" off
4_desktop " conky" off
5_desktop " QT matches GTK" off
6_desktop " Vimix Theme" off
7_desktop " Adapta Theme" off
8_desktop " Polybar" off
#S "<----Category: File Systems---->" on
1_filesystem " ZFS" off
2_filesystem " Exfat" off
#T "<----Category: Virtualizaion---->" on
1_virtual " Virtualbox" off
2_virtual " Gnome Boxes" off
#U "<----Category: System---->" on
1_system " Swappiness=10" off
V "Post Install Auto Clean Up & Update" off)
choices=$("${cmd[@]}" "${options[@]}" 2>&1 >/dev/tty)
clear
for choice in $choices
do
case $choice in
# Section A -----------------------Repositories----------------------------
1_repos)
# Find the standard user you created during installation and make it a variable
User=$(getent passwd 1000 | awk -F: '{ print $1}')
# Echo the user into the sudoers file
echo "$User ALL=(ALL:ALL) ALL" >> /etc/sudoers
sleep 1
;;
2_repos)
#Enable Contrib and Non-free Repos
echo "enabling Contrib and Non-free Repos"
cat /etc/apt/sources.list >> /etc/apt/sources.list.bak
sed -e '/Binary/s/^/#/g' -i /etc/apt/sources.list
sed -i 's/main/main contrib non-free/gI' /etc/apt/sources.list
apt update
sleep 1
;;
# 3_repos)
# #Enable Testing Repos
# echo "enabling Bullseye Repos"
# #cat /etc/apt/sources.list >> /etc/apt/sources.list.bak
# #echo "deb http://deb.debian.org/debian testing main contrib non-free" >> /etc/apt/sources.list
# apt update
# sleep 1
# ;;
# 4_repos)
# #Enable Unstable Repos
# echo "enabling Unstable Repos"
# #cat /etc/apt/sources.list >> /etc/apt/sources.list.bak
# #echo "deb http://ftp.us.debian.org/debian unstable main contrib non-free" >> /etc/apt/sources.list
# #echo "deb-src http://ftp.us.debian.org/debain unstable main contrib non-free" >> /etc/apt/sources.list
# apt update
# sleep 1
# ;;
# 5_repos)
# #Enable Experimental Repos
# cat /etc/apt/sources.list >> /etc/apt/sources.list.bak
# #echo "deb http://ftp.us.debian.org/debain experimental main contrib non-free" >> /etc/apt/sources.list
# #echo "deb-src http://ftp.us.debian.org/debian experimental main contrib non-free" >> /etc/apt/sources.list
# apt update
# sleep 1
# ;;
6_repos)
#Return sources.list to original
echo "Returning /etc/apt/sources.list to its Original State
cat /etc/apt/sources.list.original > /etc/apt/sources.list
apt update
sleep 1
;;
7_repos)
#Install dependencies
echo "installing dependencies"
sudo apt install curl wget apt-transport-https dirmngr
echo "getting custom kernel"
codename="$(find /etc/apt -type f -name '*.list' | xargs grep -E '^deb' | awk '{print $3}' | grep -Eo '^[a-z]+' | sort | uniq -c | sort -n | tail -n1 | grep -Eo '[a-z]+$')" && sudo apt-get update && sudo apt-get install apt-transport-https && echo -e "deb http://liquorix.net/debian $codename main\ndeb-src http://liquorix.net/debian $codename main\n\n# Mirrors:\n#\n# Unit193 - France\n# deb http://mirror.unit193.net/liquorix $codename main\n# deb-src http://mirror.unit193.net/liquorix $codename main" | sudo tee /etc/apt/sources.list.d/liquorix.list && curl https://liquorix.net/linux-liquorix.pub | sudo apt-key add - && sudo apt-get update
echo "DONE"
echo "running updates"
sudo apt update
echo "YOU CAN INSTALL THE NEW KERNEL MANUALLY NOW OR WHEN THIS SCRIPT FINISHES"
# Section B ---------------------Alternate Installers----------------------------
1_installer)
#Install snap.d
echo "Installing Snap.d"
sudo apt install snapd -yy
sleep 1
;;
2_installer)
#Install flatpak
echo "installing Flatpak"
sudo apt install flatpak -yy
sleep 1
;;
3_installer)
#Install Synaptic
echo "installing Synaptic"
sudo apt install synaptic -yy
sleep 1
;;
4_installer)
#Install PIP
echo "installing PIP -python installer"
sudo apt install python-pip python3-pip -yy
sleep 1
;;
# Section C ------------------------Text Editors------------------------------
1_editor)
#Install Vim
echo "Installing VIM"
sudo apt install vim -yy
sleep 1
;;
2_editor)
#Install Nano
echo "Installing Nano"
sudo apt install nano -yy
sleep 1
;;
3_editor)
#Install Geany
echo "Installing Geany"
sudo apt install geany -yy
sleep 1
;;
4_editor)
#Install Emacs
echo "Installing Emacs"
sudo apt install emacs -yy
sleep 1
;;
5_editor)
#Install Gedit"
echo "Installing Gedit"
sudo apt install gedit -yy
sleep 1
;;
# Section D ---------------------------Phone------------------------------------
1_phone)
#Install Everything for Android Phones
echo "Installing Android SDK, ADB, Fastboot, and Build Tools"
sudo apt install android-sdk adb fastboot android-sdk-build-tools android-sdk-common android-sdk-platform-tools -yy
sleep 1
;;
2_phone)
#Install Everything to do with an iPhone"
echo "Installing All Packages for iPhone"
sudo apt install ideviceinstaller libimobiledevice-utils python-imobiledevice libimobiledevice6 libplist3 libplist-utils python-plist ifuse usbmuxd libusbmuxd-tools gvfs-backends gvfs-bin gvfs-fuse -yy
sudo echo "user_allow-other" >> /etc/fuse.conf
sudo usermod -aG fuse $User
sleep 1
;;
# Section E --------------------------Terminal Programs---------------------------
1_terminal)
#Install Compression Programs
echo "Installing Compression Programs"
sudo apt install p7zip p7zip-full unrar-free unrar unrar-free unzip zip -yy
sleep 1
;;
2_terminal)
#Install Firewall
echo "Installing UFW"
sudo apt install ufw gufw -yy
sleep 1
;;
3_terminal)
#Install Hardware Identifier"
echo "Installing lshw"
sudo apt install lshw lshw-gtk -yy
sleep 1
;;
4_terminal)
#Install Cups
echo "Installing CUPS"
sudo apt install cups cups-pdf -yy
sleep 1
;;
5_terminal)
#Install Youtube-dl
echo "Installing youtube-dl"
sudo apt install wget -yy
sudo wget https://yt-dl.org/latest/youtube-dl -O /usr/local/bin/youtube-dl
sudo chmod a+x /usr/local/bin/youtube-dl
hash -r
sleep 1
;;
6_terminal)
#Install Htop"
echo "Installing Htop"
sudo apt install htop -yy
sleep 1
;;
7_terminal)
#Install Parted
echo "Installing Parted and Gparted"
sudo apt install parted gparted -yy
sleep 1
;;
8_terminal)
#Install Curl
echo "Installing Curl"
sudo apt install curl -yy
sleep 1
;;
9_terminal)
#Install Wget
echo "Installing Wget"
sudo apt install wget -yy
sleep 1
;;
10_terminal)
#Install Ranger
echo "Installing Ranger"
sudo apt install ranger -yy
sleep 1
;;
11_terminal)
#Install Dmenu
echo "Installing Dmenu"
sudo apt install dmenu -yy
sleep 1
;;
12_terminal)
#Install Rofi
echo "Installing Rofi"
sudo apt install rofi -yy
sleep 1
;;
13_terminal)
#Install Build-Essential
echo "Installing Build-Essential"
sudo apt install build-essential cmake -yy
sleep 1
;;
14_terminal)
#Install SSH
echo "Installing SSH"
sudo apt install ssh -yy
sudo systemctl enable ssh
sudo systemctl start ssh
sleep 1
;;
15_terminal)
#Install Urxvt
echo "Installing Urxvt"
sudo apt install rxvt-unicode -yy
sleep 1
;;
16_terminal)
#Install Sakura
echo "Installing Sakura"
sudo apt install sakura -yy
sleep 1
;;
17_terminal)
#Install Terminator
echo "Installing Terminator"
sudo apt install terminator -yy
sleep 1
;;
18_terminal)
#Install Tilix
echo "Installing Tilix"
sudo apt install tilix -yy
sleep 1
;;
19_terminal)
#Install Xterm
echo "Install XTerm"
sudo apt install xterm -yy
sleep 1
;;
# Section F -------------------------Terminal Customization--------------------------
1_customize)
#Install Neofetch
echo "Installing Neofetch"
sudo apt install Neofetch -yy
sleep 1
;;
2_customize)
#Install Screenfetch
echo "Installing Screenfetch"
sudo apt install screenfetch -yy
sleep 1
;;
3_customize)
#Install Figlet
echo "Installing Figlet"
sudo apt install figlet -yy
sleep 1
;;
4_customize)
#Install Lolcat
echo "Installing lolcat"
sudo apt install lolcat -yy
sleep 1
;;
5_customize)
#Install Powerline
echo "Installing Powerline"
sudo apt install powerline git -yy
#Make a powerline font folder
sudo mkdir /usr/share/fonts/powerline
# clone powerline fonts from github
git clone https://github.com/powerline/fonts
# change directories into fonts folder created by cloning powerline from github
cd fonts
# run installation script for powerline fonts
./install.sh
# copy powerline fonts into the powerline folder wer created eariler
sudo cp /home/$USER/.local.share/fonts/*Powerline* /usr/share/fonts/powerline
#backup the bashrc just to be safe
sudo cp .bashrc .bashrc.bak
#enable Powerline Shell
echo "if [ -f /usr/share/powerline/bindings/bash/powerline.sh ]; then
source /usr/share/powerline/bindings/bash/powerline.sh
fi" >> .bashrc
# Restart Bash
. .bashrc
sleep 1
;;
# Section G ----------------------------------Terminal Customization------------------------
1_email)
#Install Thunderbird
echo "Installing Thunderbird"
sudo apt install thunderbird -yy
sleep 1
;;
2_email)
#Install NeoMutt
echo "Install NeoMutt"
sudo apt install neomutt -yy
sleep 1
;;
3_email)
#Install Geary
echo "Installing Geary"
sudo apt install geary -yy
sleep 1
;;
# Section H ----------------------------------Web Browsers/Downloaders-------------------------
1_web)
#Install Chromium
echo "Installing Chromium"
sudo apt install chromium -yy
sleep 1
;;
2_web)
#Install Google Chrome
echo "Installing Gooogle Chrome"
sudo apt install wget -yy
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
sudo dpkg -i google-chome*.deb
sleep 1
;;
3_web)
#Install Vivaldi
echo "Installing Vivaldi"
sudo apt install wget -yy
wget https://downloads.vivaldi.com/stable/vivaldi-stable_2.1.1337.47-1_amd64.deb
sudo dpkg -i vivaldi*.deb
sleep 1
;;
4_web)
#Install ICE-SSB-Application
echo "Installing ICE-SSB-Application"
sudo apt install wget -yy
wget https://launchpad.net/~peppermintos/+archive/ubuntu/ice-dev/+files/ice_6.0.5_all.deb
sudo dpkg -i ice*.deb
sleep 1
;;
5_web)
#Install Transmission
echo "Installing Transmission"
sudo apt install transmission-gtk -yy
sleep 1
;;
# Section I ----------------------------------Networking----------------------------------------------
1_network)
#Install Samba
echo "Installing Samba"
sudo apt install samba samba-common samba-libs cifs-utils libcups2 cups smbclient gvfs-backends net-tools network-manager network-manager-openvpn network-manager-openvpn-gnome
#backup smb.conf
sudo cp /etc/samba/smb.conf /etc/samba/smb.conf.bak
sudo chmod 755 /etc/samba/smb.conf.bak
sudo chmod 755 /etc/samba/smb.conf
sudo grep -v -E "^#|^;" /etc/samba/smb.conf.bak | grep . > /etc/samba/smb.conf
sudo systemctl enable smbd
sudo systemctl start smbd
sudo systemctl enable nmbd
sudo systemctl start nmbd
sleep 1
;;
# Section J -------------------------------Graphics---------------------------------------------------
1_graphics)
#Install Nvidia Driver
echo "Installing Nvidia Driver"
echo "Make sure you have the contrib and non-free repos enabled and updated"
sudo apt install nvidia-driver -yy
sleep 1
;;
1_graphics)
#Install AMD Driver
echo "Installing AMD firmware for graphics cards"
sudo apt install firmware-amd-graphics -yy
sleep 1
;;
# Section K --------------------------------------Sound----------------------------------------------
1_sound)
#Install Pulse Audio
echo "Installing Pulse Audio"
sudo apt install pulseaudio pulseaudio-utils pavucontrol pulseaudio-equalizer gstreamer1.0-pulseaudio -yy
sleep 1
;;
2_sound)
#Install ALSA
echo "Installing ALSA"
sudo apt install alsa-utils gstreamser1.0-alsa alsamixergui alsaplayer-gtk alsa-player-daemon alsa-player-common alsa-player-alsa libao-common libao-dbd libao-dev libao4 libasound2 libasound-data libasoundev-libasound-doc libasound-plugins -yy
sleep 1
;;
# Section L -------------------------------------Fonts------------------------------------------------
1_font)
#Install Microsoft fonts
echo "Installing Microsoft fonts"
sudo apt install ttf-mscorefonts-installer -yy
sleep 1
;;
2_font)
#Install Ubuntu fonts
echo "Installing Ubuntu fonts"
# make an ubuntu font folder
sudo mkdir /usr/share/fonts/truetype/ubuntu-fonts
# download ubuntu font family
sudo apt install wget unzip -yy
wget https://assets.ubuntu.com/v1/fad7939b-ubuntu-font-family-0.83.zip
unzip *.zip
# change directories into unzipped ubuntu folder
cd ubuntu-font-family*
# move all ttf fonts into the ubuntu font folder we created eariler
sudo mv *.ttf /usr/share/fonts/truetype/ubuntu-fonts/
# change directories back home
cd ..
# remove all files dending in ".zip"
rm *.zip
# remove all folders beginning with "ubuntu-font-family"
rm -r ubuntu-font-family*
sleep 1
;;
# Section M ---------------------------------Icons---------------------------------------------------
1_icon)
#Install Numix Icons
echo "Installing Numix Icons"
sudo apt install numix-icon-theme -yy
sleep 1
;;
2_icon)
#Install Moka Icons
echo "Installing Moka Icons"
sudo apt install moka-icon-theme -yy
sleep 1
;;
3_icon)
#Install Mate Icons
echo "Installing Mate Icons"
sudo apt install mate-icon-theme mate-icon-theme-faenza -yy
sleep 1
;;
4_icon)
#Install Papirus Icons
echo "Installing Papirus Icons"
sudo apt install papirus-icon-theme -yy
sleep 1
;;
5_icon)
#Install Deepin Icons
echo "Installing Deepin Icons"
sudo apt install deepin-icon-theme -yy
sleep 1
;;
# Section N ---------------------------------Photo Viewing/Editing--------------------------------------
1_photo)
#Install Feh
echo "Installing Feh"
sudo apt install feh -yy
sleep 1
;;
2_photo)
#Install Gimp
echo "Installing Gimp"
sudo apt install gimp -yy
sleep 1
;;
3_photo)
#Install Inkscape
echo "Installing Inkscape"
sudo apt install inkscape -yy
sleep 1
;;
4_photo)
#Install Digikam
echo "Installing Digikam"
sudo apt install digikam -yy
sleep 1
;;
5_photo)
#Install Darktable
echo "Installing Darktable"
sudo apt install darktable -yy
sleep 1
;;
6_photo)
#Install Shotwell
echo "Installing Shotwell"
sudo apt install shotwell shotwell-common -yy
sleep 1
;;
# Section O --------------------------Media Viewing/Editing/Converting---------------------------------
1_media)
#Install Handbrake
echo "Installing Handbrake"
sudo apt install handbrake -yy
sleep 1
;;
2_media)
#Install Kdenlive
echo "Installing Kdenlive"
sudo apt install kdenlive -yy
sleep 1
;;
3_media)
#Install VLC
echo "Installing VLC"
sudo apt install VLC -yy
sleep 1
;;
4_media)
#Install Audacity
echo "Installaing Audacity"
sudo apt install audacity -yy
sleep 1
;;
5_media)
#Install Plex Media Server
echo "Installing Plex Media Server"
sudo apt install wget -yy
wget -q https://downloads.plex.tv/plex-media-server-new/1.16.2.1321-ad17d5f9e/debian/plexmediaserver_1.16.2.1321-ad17d5f9e_amd64.deb
sudo dpkg -i plex*.deb
sudo systemctl enable plexmediaserver
sudo systemctl start plexmediaserver
sleep 1
;;
6_media)
#Install Simple Screen Recorder
echo "Installing Simple Screen Recorder"
sudo apt install simplescreenrecorder -yy
sleep 1
;;
7_media)
#Install OBS Studio
echo "Installing OBS-Studio"
sudo apt install obs-studio -yy
sleep 1
;;
8_media)
#Install Optical Drive Software
echo "Installing Optical Drive Software"
sudo apt install k3b asunder -yy
sudo chmod 4711 /usr/bin/cdrdao
sudo chmod 4711 /usr/bin/wodim
sleep 1
;;
9_media)
#Install SM Player
echo "Installing SMPlayer"
sudo apt install smplayer smplayer-themes -yy
sleep 1
;;
10_media)
#Install FFmpeg
echo "Install FFmpeg"
sudo apt install ffmpeg -yy
sleep 1
;;
# Section P --------------------------------Gaming-------------------------------------------------
1_gaming)
#Installing Steam
ulimit -Hn > ulimit.txt
# fix permissions for scripting
sudo chown $USER /etc/apt/sources.list.d
# add 32bit architecture
sudo dpkg --add-architecture i386
# update
sudo apt update -yy
# Install vulkan and mesa drivers
sudo apt install mesa-vulkan-drivers mesa-vulkan-drivers:i386 -yy
# Install dxvk
sudo apt install dxvk dxvk-wine32-development dxvk-wine64-development -yy
# Install Steam
sudo apt install steam -yy
# Install game mode
sudo apt install gamemode -yy
sleep 1
;;
2_gaming)
#Install Lutris
echo " Installing Lutris"
# import wine gpg key
sudo chown $User /etc/apt/sources.list
sudo chmod 755 /etc/apt/sources.list
sudo chown $User /etc/apt/sources.list.d/
sudo chmod 755 /etc/apt/sources.list.d/
sudo wget -nc https://dl.winehq.org/wine-builds/winehq.key
# add wine gpg key
sudo apt-key add winehq.key
# add wine repository
sudo touch /etc/apt/sources.list.d/wine.list
sudo echo "deb https://dl.winehq.org/wine-builds/debian buster main" > /etc/apt/sources.list.d/wine.list
# update
sudo apt update -yy
# Install wine staging
sudo apt install --install-recommends winehq-staging -yy
# Install wine-tricks
sudo apt install winetricks -yy
# Install PlayOnLinux
sudo apt install playonlinux -yy
# Import lutris repository key
sudo wget https://download.opensuse.org/repositories/home:/strycore/Debian_9.0/Release.key
# Add key with apt
sudo apt-key add Release.key
# Add Lutris Repository
sudo touch /etc/apt/sources.list.d/lutris.list
sudo echo "deb http://download.opensuse.org/repositories/home:/strycore/Debian_9.0/ ./" > /etc/apt/sources.list.d/lutris.list
$update
sudo apt install lutris -yy
# Change Permissions to Root
sudo chown root:root /etc/apt/sources.list
sudo chmod 600 /etc/apt/sources.list
sudo chown root:root /etc/apt/sources.list.d/
sudo chmod 600 /etc/apt/sources.list.d/
sleep 1
;;
# Section Q -----------------------------------File Explorers-----------------------------------------------
1_files)
#Install Nemo
echo "Installing Nemo"
sudo apt install nemo nemo-python nemo-data nemo-fileroller ffmpegthumbnailer nemo-nextcloud nemo-owncloud -yy
sleep 1
;;
2_files)
#Install Thunar
echo "Installing Thunar"
sudo apt install thunar thunar-data thunar-archive-plugin thunar-media-tags-plugin thunar-vcs-plugin thunar-volman ffmpegthumbnailer -yy
sleep 1
;;
3_files)
#Install Pcmanfm
echo "Installing Pcmanfm"
sudo apt install pcmanfm pcmanfm-qt ffmpegthumbnailer -yy
sleep 1
;;
4_files)
#Install Caja
echo "Installing Caja"
sudo apt install caja caja-common caja-actions caja-actions-common caja-admin caja-extensions-common caja-image-converter caja-open-terminal caja-sendto caja-share caja-wallpaper caja-xattr-tage caja-rename caja-seahorse caja-nextcloud caja-owncloud caja-dropbox ffmpegthumbnailer -yy
sleep 1
;;
5_files)
#Install Nautilus
echo "Installing Nautilus"
sudo apt install nautilus nautilus-data nautilus-admin nautilus-compare nautilus-hide nautilus-scripts-manager nautilus-sendto nautilus-share ffmpegthumbnailer -yy
sleep 1
;;
6_files)
#Install Dolphin
echo "Installing Dolphin"
sudo apt install doplhin dolphin-dev ffmpegthumbnailer -yy
sleep 1
;;
# Section R ----------------------------------Desktop Customization---------------------------------------------
1_desktop)
#Install nitrogen
echo "Installing nitrogen"
sudo apt install nitrogen -yy
sleep 1
;;
2_desktop)
#Install Variety
echo "Installing Variety"
sudo apt install variety -yy
sleep 1
;;
3_desktop)
#Install LX Appearance
echo "Installing LXAppearance"
sudo apt install lxappearance -yy
sleep 1
;;
4_desktop)
#Install conky
echo "Installing Conky"
sudo apt install conky-all
sleep 1
;;
5_desktop)
#Make qt match gtk
echo "Make QT match GTK Themes"
sudo chown $User /etc/environment
sudo chmod 755 /etc/environment
sudo echo "QT_QPA_PLATFORMTHEME=gtk2" >> /etc/environment
sudo chown root:root /etc/environment
sudo chmod 600 /etc/environment
sleep 1
;;
6_desktop)
#Install Vimix Theme
echo "Installing Vimix Theme"
#Install git
sudo apt install git -yy
#Clone the git Repo
echo "Cloning the Git Repo"
git clone https://github.com/vinceliuice/vimix-gtk-themes
cd vimix-gtk-themes
./Install
cd ..
sudo rm -r vimix*
sleep 1
;;
7_desktop)
#Install Adapta Theme
echo "Installing Adapta Themes"
sudo apt install adapta-gtk-theme -yy
sleep 1
;;
8_desktop)
# Install polybar
echo "installing Dependencies"
sudo apt install cmake cmake-data libcairo2-dev libxcb1-dev libxcb-ewmh-dev -yy
sudo apt install libxcb-icccm4-dev libxcb-image0-dev libxcb-randr0-dev libxcb-util0-dev -yy
sudo apt install libxcb-xkb-dev pkg-config python-xcbgen xcb-proto libxcb-xrm-dev -yy
sudo apt install libasound2-dev libmpdclient-dev libiw-dev libcurl4-openssl-dev -yy
sudo apt install libpulse-dev ccache libxcb-composite0 libxcb-composite0-dev -yy
# Download from polybar from github
echo "Downloading Polybar form Github"
git clone https://github.com/jaagr/polybar.git
# Change directories into polybar
cd polybar
echo "Installing Polybar"
./build.sh
;;
# Section S -----------------------------------File Systems-------------------------------------------
1_filesystem)
#Install ZFS
echo " Make sure you have the contrib and non-free repos enabled and updated"
sleep 1
echo "Installing the headers for your kernel"
sudo apt install linux-headers-"$(uname -r)" linux-image-amd64 -yy
echo "Installing the ZFS DKMS and Utilities"
sudo apt install zfs-dkms zfsutils-linux -yy
echo "Installing kernel modules"
sudo modprobe zfs
echo "Enabling ZFS Services"
sudo systemctl enable zfs.target
sudo systemctl enable zfs-import-cache
sudo systemctl enable zfs-mount
sudo systemctl enable zfs-import.target
sudo systemctl enable zfs-import-scan
sudo systemctl enable zfs-share
echo "Starting ZFS Services"
sudo systemctl start zfs.target
sudo systemctl start zfs-import-cache
sudo systemctl start zfs-mount
sudo systemctl start zfs-import.target
sudo systemctl start zfs-import-scan
sudo systemctl start zfs-share
sleep 1
;;
2_filesystem)
#Install Exfat
echo "Installing Exfat Utilities"
sudo apt install exfat-utils -yy
sleep 1
;;
# Section T ------------------------------------Virtualization------------------------------------------
1_virtual)
#Install Virtualbox
echo "wget is needed... installing"
sudo apt install wget -yy
echo "Setting up the Repository"
wget -q https://www.virtualbox.org/download/oracle_vbox_2016.asc -O- | sudo apt-key add -
wget -q https://www.virtualbox.org/download/oracle_vbox.asc -O- | sudo apt-key add -
echo "Adding Repo to Sources.list"
sudo chown $USER /etc/apt/sources.list.d/
sudo chmod 755 /etc/apt/sources.list.d/
sudo echo "deb http://download.virtualbox.org/virtualbox/debian bionic contrib" >> /etc/apt/sources.list.d/virtualbox.list
echo "Running Updates"
sudo apt update -yy
echo "Installing Virtualbox"
sudo apt install virtualbox-6.0 -yy
echo "Downloading Extension Pack"
wget -q https://download.virtualbox.org/virtualbox/6.0.10/Oracle_VM_VirtualBox_Extension_Pack-6.0.10.vbox-extpack
echo "Adding user to the vbox user group"
sudo usermod -aG vboxusers $User
sleep 1
;;
2_virtual)
#Install Gnome Boxes
echo "Installing Gnome Boxes"
sudo apt install gnome-boxes -yy
sleep 1
;;
V)
#Cleanup
echo "Cleaning up"
sudo apt update -yy
sudo apt upgrade -yy
sudo apt autoremove -yy
rm -rf $tmp_dir
;;
esac
done
fi
| true
|
541bee75cd21533b98056e246c33000baf40db00
|
Shell
|
vyxxr/afetch
|
/afetch
|
UTF-8
| 2,242
| 3.71875
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env sh
#
# by mhess
distro="$(sed -rn '/^NAME/ s/^NAME="?([a-z A-Z]+).?$/\1/p' /etc/os-release)"
kernel="$(uname -r)"
shell="$(basename $SHELL)"
get_ppid="$(sed -rn 's/^PPid:\s+([[:digit:]]+)/\1/p' "/proc/${PPID}/status")"
get_term="$(cat "/proc/${get_ppid}/comm")"
term() {
case "$get_term" in
login|Login|init\(init\))
printf "$(tty)"
;;
*) printf "${get_term}" ;;
esac
}
packages() {
PACK=$(type apt xbps-query emerge pacman nix-env rpm apk cave gaze 2>/dev/null | grep '\/')
PACK="${PACK##*/}"
case ${PACK} in
apt) dpkg -l | wc -l ;;
xbps-query) xbps-query -l | wc -l ;;
emerge) ls -d /var/db/pkg/*/* | wc -l ;;
pacman) pacman -Q | wc -l ;;
nix-env) ls -d -1 /nix/store/ | wc -l ;;
rpm) rpm -qa | wc -l ;;
apk) apk info | wc -l ;;
esac
}
wm() {
if [ ! -z "$DISPLAY" ]; then
# check if xprop is installed
if type xprop 1>/dev/null 2>&1; then
id="$(xprop -root -notype _NET_SUPPORTING_WM_CHECK)"
id="${id##* }"
wm="$(xprop -id "$id" -notype -len 100 -f _NET_WM_NAME 8t)"
wm="$(printf '%s\n' "$wm" | sed -nr '/_NET_WM_NAME/ s/^.*"([a-zA-Z0-9 ()]+)"/\1/p')"
printf "$wm"
fi
window_manager="(i3 bspwm dwm xmonad windowchef openbox fluxbox spectrwm awesome dwm 2bwm herbstluftwm monsterwm fvwm)"
formated="$(printf "$window_manager" | sed 's/ /|/g')"
# if xprop is not installed or occur any errors, look for process
if [ -z "$wm" ]; then
ps -e | grep -v 'grep' | grep -m 1 -o -E "$formated" || printf 'none'
fi
else
printf 'none'
fi
}
# get random number for color
rand="$(awk -v min=1 -v max=6 'BEGIN{srand(); print int(min+rand()*(max-min+1))}')"
f1=$(printf '%b' "\33[3${rand}m")
f2=$(printf '%b' "\33[37m")
t=$(printf '%b' "\33[0m")
[ "$1" = "-b" ] && b="" || b=$(printf '%b' "\33[1m")
# start
clear
cat << EOF
${b}${f1} .---.
${b}${f1} / \\ DISTRO: ${t}${f2}${distro}
${b}${f1} \ @-@ / KERNEL: ${t}${f2}${kernel}
${b}${f1} /\` \\_/ \`\\ SHELL: ${t}${f2}${shell}
${b}${f1} / / \\ \\ PACKAGES: ${t}${f2}$(packages)
${b}${f1} \ \ / / WM: ${t}${f2}$(wm)
${b}${f1} / \_> <_/ \\ TERM: ${t}${f2}$(term)
${b}${f1} \__/'---'\__/${t}
EOF
| true
|
ec3994881d0fb27c386b677e3127001468ece5cc
|
Shell
|
weilaidb/PythonExample
|
/regularexpress/home/weilaidb/software/git-2.0.5/git-rebase.sh
|
UTF-8
| 8,667
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/sh
#
# Copyright (c) 2005 Junio C Hamano.
#
SUBDIRECTORY_OK=Yes
OPTIONS_KEEPDASHDASH=
OPTIONS_STUCKLONG=t
OPTIONS_SPEC="\
git rebase [-i] [options] [--exec <cmd>] [--onto <newbase>] [<upstream>] [<branch>]
git rebase [-i] [options] [--exec <cmd>] [--onto <newbase>] --root [<branch>]
git-rebase --continue | --abort | --skip | --edit-todo
--
Available options are
v,verbose! display a diffstat of what changed upstream
q,quiet! be quiet. implies --no-stat
autostash! automatically stash/stash pop before and after
fork-point use 'merge-base --fork-point' to refine upstream
onto=! rebase onto given branch instead of upstream
p,preserve-merges! try to recreate merges instead of ignoring them
s,strategy=! use the given merge strategy
no-ff! cherry-pick all commits, even if unchanged
m,merge! use merging strategies to rebase
i,interactive! let the user edit the list of commits to rebase
x,exec=! add exec lines after each commit of the editable list
k,keep-empty preserve empty commits during rebase
f,force-rebase! force rebase even if branch is up to date
X,strategy-option=! pass the argument through to the merge strategy
stat! display a diffstat of what changed upstream
n,no-stat! do not show diffstat of what changed upstream
verify allow pre-rebase hook to run
rerere-autoupdate allow rerere to update index with resolved conflicts
root! rebase all reachable commits up to the root(s)
autosquash move commits that begin with squash!/fixup! under -i
committer-date-is-author-date! passed to 'git am'
ignore-date! passed to 'git am'
whitespace=! passed to 'git apply'
ignore-whitespace! passed to 'git apply'
C=! passed to 'git apply'
S,gpg-sign? GPG-sign commits
Actions:
continue! continue
abort! abort and check out the original branch
skip! skip current patch and continue
edit-todo! edit the todo list during an interactive rebase
"
. git-sh-setup
. git-sh-i18n
set_reflog_action rebase
require_work_tree_exists
cd_to_toplevel
LF='
'
ok_to_skip_pre_rebase=
resolvemsg="
$(gettext 'When you have resolved this problem, run "git rebase --continue".
If you prefer to skip this patch, run "git rebase --skip" instead.
To check out the original branch and stop rebasing, run "git rebase --abort".')
"
unset onto
cmd=
strategy=
strategy_opts=
do_merge=
merge_dir="$GIT_DIR"/rebase-merge
apply_dir="$GIT_DIR"/rebase-apply
verbose=
diffstat=
test "$(git config --bool rebase.stat)" = true && diffstat=t
autostash="$(git config --bool rebase.autostash || echo false)"
fork_point=auto
git_am_opt=
rebase_root=
force_rebase=
allow_rerere_autoupdate=
# Non-empty if a rebase was in progress when 'git rebase' was invoked
in_progress=
# One of
type=
# One of
state_dir=
# One of , as parsed from command line
action=
preserve_merges=
autosquash=
keep_empty=
test "$(git config --bool rebase.autosquash)" = "true" && autosquash=t
gpg_sign_opt=
read_basic_state ()
write_basic_state ()
output ()
move_to_original_branch ()
cmd_live_rebase="git rebase (--continue | --abort | --skip)"
cmd_clear_stale_rebase="rm -fr \"$state_dir\""
die "
$(eval_gettext 'It seems that there is already a $state_dir_base directory, and
I wonder if you are in the middle of another rebase. If that is the
case, please try
$cmd_live_rebase
If that is not the case, please
$cmd_clear_stale_rebase
and run me again. I am stopping in case you still have something
valuable there.')"
fi
if test -n "$rebase_root" && test -z "$onto"
then
test -z "$interactive_rebase" && interactive_rebase=implied
fi
if test -n "$interactive_rebase"
then
type=interactive
state_dir="$merge_dir"
elif test -n "$do_merge"
then
type=merge
state_dir="$merge_dir"
else
type=am
state_dir="$apply_dir"
fi
if test -z "$rebase_root"
then
case "$#" in
0)
if ! upstream_name=$(git rev-parse --symbolic-full-name \
--verify -q @ 2>/dev/null)
then
. git-parse-remote
error_on_missing_default_upstream "rebase" "rebase" \
"against" "git rebase <branch>"
fi
test "$fork_point" = auto && fork_point=t
;;
*) upstream_name="$1"
if test "$upstream_name" = "-"
then
upstream_name="@"
fi
shift
;;
esac
upstream=$(peel_committish "$") ||
die "$(eval_gettext "invalid upstream \$upstream_name")"
upstream_arg="$upstream_name"
else
if test -z "$onto"
then
empty_tree=`git hash-object -t tree /dev/null`
onto=`git commit-tree $empty_tree </dev/null`
squash_onto="$onto"
fi
unset upstream_name
unset upstream
test $# -gt 1 && usage
upstream_arg=--root
fi
# Make sure the branch to rebase onto is valid.
onto_name=$
case "$onto_name" in
*...*)
if left=$ right=$ &&
onto=$(git merge-base --all $ $)
then
case "$onto" in
?*"$LF"?*)
die "$(eval_gettext "\$onto_name: there are more than one merge bases")"
;;
'')
die "$(eval_gettext "\$onto_name: there is no merge base")"
;;
esac
else
die "$(eval_gettext "\$onto_name: there is no merge base")"
fi
;;
*)
onto=$(peel_committish "$onto_name") ||
die "$(eval_gettext "Does not point to a valid commit: \$onto_name")"
;;
esac
# If the branch to rebase is given, that is the branch we will rebase
# $branch_name -- branch being rebased, or HEAD (already detached)
# $orig_head -- commit object name of tip of the branch before rebasing
# $head_name -- refs/heads/<that-branch> or "detached HEAD"
switch_to=
case "$#" in
1)
# Is it "rebase other $branchname" or "rebase other $commit"?
branch_name="$1"
switch_to="$1"
if git show-ref --verify --quiet -- "refs/heads/$1" &&
orig_head=$(git rev-parse -q --verify "refs/heads/$1")
then
head_name="refs/heads/$1"
elif orig_head=$(git rev-parse -q --verify "$1")
then
head_name="detached HEAD"
else
die "$(eval_gettext "fatal: no such branch: \$branch_name")"
fi
;;
0)
# Do not need to switch branches, we are already on it.
if branch_name=`git symbolic-ref -q HEAD`
then
head_name=$branch_name
branch_name=`expr "z$branch_name" : 'zrefs/heads/\(.*\)'`
else
head_name="detached HEAD"
branch_name=HEAD ;# detached
fi
orig_head=$(git rev-parse --verify HEAD) || exit
;;
*)
die "BUG: unexpected number of arguments left to parse"
;;
esac
if test "$fork_point" = t
then
new_upstream=$(git merge-base --fork-point "$upstream_name" \
"$")
if test -n "$new_upstream"
then
upstream=$new_upstream
fi
fi
if test "$autostash" = true && ! (require_clean_work_tree) 2>/dev/null
then
stash_sha1=$(git stash create "autostash") ||
die "$(gettext 'Cannot autostash')"
mkdir -p "$state_dir" &&
echo $stash_sha1 >"$state_dir/autostash" &&
stash_abbrev=$(git rev-parse --short $stash_sha1) &&
echo "$(eval_gettext 'Created autostash: $stash_abbrev')" &&
git reset --hard
fi
require_clean_work_tree "rebase" "$(gettext "Please commit or stash them.")"
# Now we are rebasing commits $upstream..$orig_head (or with --root,
# everything leading up to $orig_head) on top of $onto
# Check if we are already based on $onto with linear history,
# but this should be done only when upstream and onto are the same
# and if this is not an interactive rebase.
mb=$(git merge-base "$onto" "$orig_head")
if test "$type" != interactive && test "$upstream" = "$onto" &&
test "$mb" = "$onto" &&
# linear history?
! (git rev-list --parents "$onto".."$orig_head" | sane_grep " .* ") > /dev/null
then
if test -z "$force_rebase"
then
# Lazily switch to the target branch if needed...
test -z "$switch_to" ||
GIT_REFLOG_ACTION="$GIT_REFLOG_ACTION: checkout $switch_to" \
git checkout "$switch_to" --
say "$(eval_gettext "Current branch \$branch_name is up to date.")"
finish_rebase
exit 0
else
say "$(eval_gettext "Current branch \$branch_name is up to date, rebase forced.")"
fi
fi
# If a hook exists, give it a chance to interrupt
run_pre_rebase_hook "$upstream_arg" "$@"
if test -n "$diffstat"
then
if test -n "$verbose"
then
echo "$(eval_gettext "Changes from \$mb to \$onto:")"
fi
# We want color (if set), but no pager
GIT_PAGER='' git diff --stat --summary "$mb" "$onto"
fi
test "$type" = interactive && run_specific_rebase
# Detach HEAD and reset the tree
say "$(gettext "First, rewinding head to replay your work on top of it...")"
GIT_REFLOG_ACTION="$GIT_REFLOG_ACTION: checkout $onto_name" \
git checkout -q "$onto^0" || die "could not detach HEAD"
git update-ref ORIG_HEAD $orig_head
# If the $onto is a proper descendant of the tip of the branch, then
# we just fast-forwarded.
if test "$mb" = "$orig_head"
then
say "$(eval_gettext "Fast-forwarded \$branch_name to \$onto_name.")"
move_to_original_branch
finish_rebase
exit 0
fi
if test -n "$rebase_root"
then
revisions="$onto..$orig_head"
else
revisions="$upstream..$orig_head"
fi
run_specific_rebase
| true
|
a5c4769ef0d0d91021489bf662a63b38d9b418a5
|
Shell
|
hgxl64/mariadb-benchmarks
|
/regression-ES/run_one.sh
|
UTF-8
| 495
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
LOG="log.txt"
DONE="done.txt"
TODO="revlist.txt"
cat $TODO | while read r
do
fgrep -q $r $DONE && continue
NOW=$(date "+%Y-%m-%d %H:%M:%S")
echo "[$NOW] start $r" >>$LOG
echo "running $r"
./runme.sh mariadb-10.5-ES-$r
NOW=$(date "+%Y-%m-%d %H:%M:%S")
echo "[$NOW] finish $r" >>$LOG
./digest.pl -ta 5 > "10.5-ES $NOW".txt
./gdigest.pl -ta 5 -ba "10.5-ES $NOW"
rm "10.5-ES $NOW.data"
rm "10.5-ES $NOW.gnuplot"
echo $r >>$DONE
break
done
| true
|
6feaa1c4e54bf3849deae1af44a949e51c674cb3
|
Shell
|
adlibre/adlibre-backup
|
/bin/list-backups.sh
|
UTF-8
| 1,042
| 4.0625
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/usr/bin/env bash
# Adlibre Backup - List backups for a host
CWD="$(dirname $0)/"
# Source Config
. ${CWD}../etc/backup.conf
# Source Functions
. ${CWD}functions.sh;
HOSTS_DIR="/${POOL_NAME}/hosts/"
if [ ! $(whoami) = "root" ]; then
echo "Error: Must run as root."
exit 99
fi
if [ "$1" == '--all' ]; then
HOSTS=$(ls ${HOSTS_DIR})
elif
[ "$1" == '' ]; then
echo "Please specify host or hostnames name as the arguments, or --all."
exit 99
else
HOSTS=$@
fi
for host in $HOSTS; do
if [ -d ${HOSTS_DIR}${host}/.${POOL_TYPE}/snapshot ]; then
SNAPSHOTS=$(find ${HOSTS_DIR}${host}/.${POOL_TYPE}/snapshot -maxdepth 1 -mindepth 1 | sort)
for snapshot in $SNAPSHOTS; do
SNAPSHOT=$(basename $snapshot)
EXPIRY=$(cat $snapshot/c/EXPIRY 2> /dev/null)
ANNOTATION=$(cat $snapshot/c/ANNOTATION 2> /dev/null)
STATUS=$(cat $snapshot/l/STATUS 2> /dev/null)
echo "$host $SNAPSHOT $EXPIRY $STATUS \"$ANNOTATION\""
done
fi
done
exit 0
| true
|
4e9fecd0b14635617643511ec49badb61dc42a7b
|
Shell
|
diegofps/ngd_docker_images
|
/bin/ngd_doctor.sh
|
UTF-8
| 504
| 4.03125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh
if [ `whoami` = 'root' ]
then
echo "You should not run this script as root, aborting"
exit 0
fi
NODES_FILEPATH=$1
if [ "$NODES_FILEPATH" = "" ]
then
NODES=`ifconfig | grep tap | sed 's/tap\([0-9]\+\).\+/node\1/'`
else
NODES=`cat $NODES_FILEPATH`
fi
check_ecdsa()
{
NODE=$1
echo "Fixing possible ECDSA issues for node $NODE..."
ssh $NODE -o BatchMode=yes -o StrictHostKeyChecking=no echo > /dev/null
}
for node in $NODES ; do
check_ecdsa $node &
done
wait
echo "Done!"
| true
|
34e23a83ac4a9e4c564aca3e6901dfa7f9d2c7d0
|
Shell
|
nagyist/jitsi
|
/resources/install/debian/jitsi
|
UTF-8
| 473
| 3.25
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
#!/bin/sh
SHOW_SPLASH=true
SPLASH_ARG=""
for arg in "$@"; do
if [ "$arg" = "--splash=no" ]; then
SHOW_SPLASH=false
fi
done
SCDIR=/usr/share/jitsi
if $SHOW_SPLASH; then
SPLASH_ARG="-splash:${SCDIR}/splash.gif"
fi
java \
-cp "${SCDIR}/lib/*:${SCDIR}/config/" \
--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED \
--add-opens=java.base/java.lang=ALL-UNNAMED \
${SPLASH_ARG} \
${JITSI_EXTRA_ARGS} \
net.java.sip.communicator.launcher.Jitsi
"$@"
| true
|
6ff1787bcd1e7a35248b84b7564343dd3f0f0ca5
|
Shell
|
JoanClaret/pch-chart
|
/datesToCommits.sh
|
UTF-8
| 141
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/sh
while read date
do
echo "- $date -"
echo -n " " >> f
git add f
git commit --date="$date" -m "$date"
done < dates.txt
rm f
| true
|
d668c5ccfad29caa1f5915b193e22d84628d87d6
|
Shell
|
arnabkc/simple-spam-filter
|
/spamfilter/git_push.sh
|
UTF-8
| 432
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/bash
# STEP 1: Initiate git project - first time
git init
# STEP 2: Add local project to git. This will also add new file to local repo
git add .
# STEP 3: Commit adds to local repo
git commit -m "message"
# STEP 4: Set up new remote repository - First time. This is my github URL
git remote add origin https://github.com/arnabkc/simple-spam-filter.git
# STEP 5: Push code to remote repository
git push -u origin master
| true
|
354858b885794254bc339ef38982ba458af1c7f7
|
Shell
|
ggainey/pulp_startup
|
/1794647/docopy.bsh
|
UTF-8
| 3,006
| 3.5
| 4
|
[] |
no_license
|
#!/bin/bash
# Poll a Pulp2 task until it is finished.
wait_until_task_finished() {
echo "Polling the task until it has reached a final state."
local task_url=$1
while true
do
local response=$(curl -k -s -u admin:admin --cert ~/.pulp/user-cert.pem -X GET $task_url)
local state=$(jq -r .state <<< ${response})
#jq . <<< "${response}"
case ${state} in
failed|canceled)
echo "Task in final state: ${state}"
exit 1
;;
finished)
echo "$task_url complete."
break
;;
*)
echo "Still waiting..."
sleep 1
;;
esac
done
}
BASE='rhel8-baseos'
STREAM='rhel8-appstream'
DEST_CONSERVATIVE='destination-perl-FCGI-conservative'
DEST='destination-perl-FCGI-nonconservative'
#COPY_MODULE='inkscape'
COPY_MODULE='perl-FCGI'
FIND_CMD="db.units_modulemd.find({name: '$COPY_MODULE'}, {_id:1}).map(function(item){ return item._id; })"
# insure DEST exists (throws error if it's there already)
pulp-admin rpm repo create --repo-id=$DEST
pulp-admin rpm repo create --repo-id=$DEST_CONSERVATIVE
# find all the module-ids for the module we want to test with
ID_LIST=$(mongo pulp_database --eval "db.units_modulemd.find({name: '$COPY_MODULE'}, {_id:1}).map(function(item){ return item._id; })" --quiet)
# transform the output into a comma-separated *quoted* list
ID_LIST=$(printf "\"%s\"" "${ID_LIST//,/\",\"}")
echo $ID_LIST
# do the multi-src copy, recursive and then recursive_conservative
# recursive
TASK_OUTPUT=$(curl -k -u admin:admin --cert ~/.pulp/user-cert.pem \
-d "{\"source_repo_id\":\"$STREAM\",\"criteria\":{\"type_ids\":[\"modulemd\"],\"filters\":{\"association\":{\"unit_id\":{\"\$in\":[$ID_LIST]}}}},\"override_config\":{\"recursive\":true,\"additional_repos\":{\"$BASE\":\"$DEST\"}}}" \
-H "Content-Type: application/json" \
-X POST https://localhost/pulp/api/v2/repositories/$DEST/actions/associate/)
TASK_URL=$(echo $TASK_OUTPUT | jq -r '.spawned_tasks[]._href')
echo "Task url : " $TASK_URL
if [ -z "$TASK_URL" ]; then echo "NO TASK?!?"; exit; fi
wait_until_task_finished https://localhost$TASK_URL
# recursive_conservative
TASK_OUTPUT=$(curl -k -u admin:admin --cert ~/.pulp/user-cert.pem \
-d "{\"source_repo_id\":\"$STREAM\",\"criteria\":{\"type_ids\":[\"modulemd\"],\"filters\":{\"association\":{\"unit_id\":{\"\$in\":[$ID_LIST]}}}},\"override_config\":{\"recursive_conservative\":true,\"additional_repos\":{\"$BASE\":\"$DEST_CONSERVATIVE\"}}}" \
-H "Content-Type: application/json" \
-X POST https://localhost/pulp/api/v2/repositories/$DEST_CONSERVATIVE/actions/associate/)
echo "TASK_OUTPUT : " $TASK_OUTPUT
TASK_URL=$(echo $TASK_OUTPUT | jq -r '.spawned_tasks[]._href')
echo "Task url : " $TASK_URL
if [ -z "$TASK_URL" ]; then echo "NO TASK?!?"; exit; fi
wait_until_task_finished https://localhost$TASK_URL
pulp-admin repo list
| true
|
a6cd4ee21a7cd56e8862d6ee253a0721d32fd8db
|
Shell
|
shuo-wu/longhorn-engine
|
/package/launch-simple-longhorn
|
UTF-8
| 1,377
| 3.5
| 4
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
#!/bin/bash
set -x
set -e
mount --rbind /host/dev /dev
volume=$1
size=$2
frontend=$3
if [ -z $volume ]
then
echo "Usage: launch-simple-longhorn <volume_name> <frontend> "
echo "<volume_name>: Required. User defined volume name"
echo "<frontend>: Optional. By default 'tgt-blockdev'. "
exit -1
fi
if [ -z $size ]
then
echo Use default size 1g
size="1g"
fi
if [ -z $frontend ]
then
echo Use default frontend TGT block device
frontend="tgt-blockdev"
fi
function start() {
set +e
while true;
do
/usr/local/bin/grpc_health_probe -addr localhost:8500
if [[ $? -eq 0 ]];
then
echo longhorn instance manager is ready
break;
fi
sleep 1
done
set -e
tgtd -f 2>&1 | tee /var/log/tgtd.log &
longhorn-instance-manager process create --name "$volume-r" --binary /usr/local/bin/longhorn --port-count 15 --port-args "--listen,localhost:" -- replica /volume/ "--size" $size
# wait for the replica to be started
sleep 5
longhorn-instance-manager process create --name "$volume-e" --binary /usr/local/bin/longhorn --port-count 1 --port-args "--listen,localhost:" -- controller $volume --frontend $frontend "--size" $size "--current-size" $size --replica tcp://localhost:10000
}
start &
exec longhorn-instance-manager daemon
| true
|
ab4e074b161096400c75e50b91362884446b3226
|
Shell
|
franchuterivera/automlbenchmark
|
/frameworks/autoxgboost/setup.sh
|
UTF-8
| 1,499
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
HERE=$(dirname "$0")
VERSION=${1:-"stable"}
REPO=${2:-"ja-thomas/autoxgboost"}
# currently both stable and latest maps to master branch
if [[ "$VERSION" == "latest" || "$VERSION" == "stable" ]]; then
VERSION="master"
fi
. ${HERE}/../shared/setup.sh "${HERE}"
if [[ -x "$(command -v apt-get)" ]]; then
SUDO apt-get update
#SUDO apt-get install -y software-properties-common apt-transport-https libxml2-dev
#SUDO apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 51716619E084DAB9
#SUDO add-apt-repository 'deb [arch=amd64,i386] https://cran.rstudio.com/bin/linux/ubuntu bionic-cran35/'
SUDO apt-get install -y software-properties-common dirmngr
SUDO apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9
SUDO add-apt-repository "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/"
SUDO apt-get update
SUDO apt-get install -y r-base r-base-dev
SUDO apt-get install -y libgdal-dev libproj-dev
SUDO apt-get install -y libssl-dev libcurl4-openssl-dev
SUDO apt-get install -y libcairo2-dev libudunits2-dev
fi
#PIP install --no-cache-dir -r $HERE/requirements.txt
Rscript -e 'options(install.packages.check.source="no"); install.packages(c("remotes", "mlr", "mlrMBO", "mlrCPO", "farff", "GenSA", "rgenoud", "xgboost"), repos="https://cloud.r-project.org/")'
Rscript -e 'remotes::install_github("'"${REPO}"'", ref="'"${VERSION}"'")'
Rscript -e 'packageVersion("autoxgboost")' | awk '{print $2}' | sed "s/[‘’]//g" >> "${HERE}/.installed"
| true
|
2dfc5bd2cf3c95293ee39f04612df9a91f39b82b
|
Shell
|
Le0nX/StudyX
|
/Bash/case3.sh
|
UTF-8
| 271
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
echo "Is it morning? Enter yes or no."
read answer
case "$answer" in
yes | Yes | YES | y )
echo "Good morning"
echo "Bright morning"
;;
[nN]* )
echo "Good afternoon"
;;
* )
echo "Sorry. Enter yes or no."
exit 1
;;
esac
exit 0
| true
|
830267086f0c4261ce222d3347b490e20480ee50
|
Shell
|
BenDoan/scripts
|
/backup_to_remote.sh
|
UTF-8
| 502
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Performs an incremental backup to a remote client, and commits
# the changes to a git repository. Depends on commit_directory.sh.
#
# Usage ./backup_to_remote directory_to_backup hostname host_location
# Usage ./backup_to_remote ~/documents ben-vps /storage/documents-backup
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
rsync -az $1 "$2:$3"
sync
cat $DIR/commit_directory.sh | ssh $2 "cat > /tmp/backup.sh ; chmod 755 /tmp/backup.sh ; /tmp/backup.sh $3 ; rm /tmp/backup.sh"
| true
|
ec0096627dff03676407b3acf4c8d462c39d3ba8
|
Shell
|
ytlzq0228/Public_Share_Project
|
/PRTG 自定义传感器Customer Senser/ARM开发板无线质量监测/checknetwork.sh
|
UTF-8
| 1,858
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/bash、
#采集样本数据,交由Python格式化处理,并通过TFTP上报到PRTG服务器
PATH=/etc:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
cd /home/pi
if [ -f checkresult.txt ]; then
#echo "remove old checkresult.txt file ... "
rm -f checkresult.txt
fi
if [ -f $HOSTNAME-WIFI_Check_result.txt ]; then
#echo "$HOSTNAME-WIFI_Check_result.txt file ... "
rm -f $HOSTNAME-WIFI_Check_result.txt
fi
if [ -f IP-WIFI-Check-$HOSTNAME.txt ]; then
#echo IP-WIFI-Check-$HOSTNAME.txt file ... "
rm -f IP-WIFI-Check-$HOSTNAME.txt
fi
wlangwip=$(route | grep wlan0 | grep default)
wlangwip=${wlangwip##*default}
wlangwip=${wlangwip%%0.0.0.0*}
# route | grep wlan0 | grep default like this:
#default 10.0.192.1 0.0.0.0 UG 100 0 0 wlan0
ping -c 200 -i 0.1 $wlangwip >> checkresult.txt
iwconfig wlan0 >> checkresult.txt
echo '<device_temp>' >> checkresult.txt
cat /sys/class/thermal/thermal_zone0/temp >> checkresult.txt
echo '</device_temp>' >> checkresult.txt
echo '<device_eth0ip>' >> checkresult.txt
ifconfig eth0 | grep "inet " | awk '{print $2}' >> checkresult.txt
echo '</device_eth0ip>' >> checkresult.txt
echo '<device_wlanip>' >> checkresult.txt
ifconfig wlan0 | grep "inet " | awk '{print $2}' >> checkresult.txt
echo '</device_wlanip>' >> checkresult.txt
NIC=$(route -n | grep UG | awk '{print $8}'| awk 'NR==1')
ifconfig $NIC | grep "inet " | awk '{print $2}' >> IP-WIFI-Check-$HOSTNAME.txt
#cat checkresult.txt
python checknetwork.py checkresult.txt $HOSTNAME-WIFI_Check_result.txt $HOSTNAME
tftp 10.0.20.178 << !
put $HOSTNAME-WIFI_Check_result.txt
put IP-WIFI-Check-$HOSTNAME.txt
quit
!
tftp 10.0.20.20 << !
put IP-WIFI-Check-$HOSTNAME.txt
quit
!
rm -f checkresult.txt
rm -f $HOSTNAME-WIFI_Check_result.txt
rm -f IP-WIFI-Check-$HOSTNAME.txt
| true
|
e87862f61041d1805dbad32ee133f5646b8038b7
|
Shell
|
gtrabanco/dotfiles
|
/scripts/secrets/apply
|
UTF-8
| 965
| 3.640625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
[[ -z "$DOTLY_PATH" ]] && exit 1
. "$DOTLY_PATH/scripts/core/_main.sh"
. "$DOTFILES_PATH/scripts/secrets/src/secrets_files_helpers.sh"
##? Apply stored secrets aliases
##?
##?
##? Usage:
##? apply [-h | --help]
##? apply [-v | --version]
##? apply revert
##?
##? Options:
##? -h --help Show this help
##? -v --version Show the program version
##? revert Unapply all aliases of stored files. Useful when
##?
##? Author:
##? Gabriel Trabanco Llano <gtrabanco@users.noreply.github.com>
##?
docs::parse "$@"
SCRIPT_NAME="dot secrets load"
SCRIPT_VERSION="1.0.0"
# Print name and version
if ${version:-}; then
output::write "$SCRIPT_NAME v$SCRIPT_VERSION"
exit
fi
case "$1" in
"revert")
output::header "Reverting secrets"
if secrets::revert; then
output::empty_line
output::solution "Reverted"
else
output::error "Could not be reverted"
fi
;;
*)
output::header "Applying secrets"
secrets::apply
;;
esac
| true
|
fbc9ce69c0328a745a8e1172aa154d3d55e5c948
|
Shell
|
HashDefineElectronics/HDE_Environment
|
/bootstrap.sh
|
UTF-8
| 2,808
| 3.28125
| 3
|
[] |
no_license
|
#!/bin/bash
# Use single quotes instead of double quotes to make it work with special-character passwords
PASSWORD='zattaz#'
# update / upgrade
sudo apt-get update
sudo apt-get -y upgrade
# install apache 2.5 and php 5.5
sudo apt-get install -y apache2
sudo apt-get install -y php5
# install mysql and give password to installer
sudo debconf-set-selections <<< "mysql-server mysql-server/root_password password $PASSWORD"
sudo debconf-set-selections <<< "mysql-server mysql-server/root_password_again password $PASSWORD"
sudo apt-get -y install mysql-server
sudo apt-get install php5-mysql
# install phpmyadmin and give password(s) to installer
# for simplicity I'm using the same password for mysql and phpmyadmin
sudo debconf-set-selections <<< "phpmyadmin phpmyadmin/dbconfig-install boolean true"
sudo debconf-set-selections <<< "phpmyadmin phpmyadmin/app-password-confirm password $PASSWORD"
sudo debconf-set-selections <<< "phpmyadmin phpmyadmin/mysql/admin-pass password $PASSWORD"
sudo debconf-set-selections <<< "phpmyadmin phpmyadmin/mysql/app-pass password $PASSWORD"
sudo debconf-set-selections <<< "phpmyadmin phpmyadmin/reconfigure-webserver multiselect apache2"
sudo apt-get -y install phpmyadmin
# enable mod_rewrite
sudo a2enmod rewrite
# restart apache
service apache2 restart
# install git
sudo apt-get -y install git
#add the vagrant user to this groups
usermod -a -G dialout vagrant
usermod -a -G video vagrant
usermod -a -G plugdev vagrant
sudo userdel ubuntu
sudo rm -r /home/ubuntu
sudo apt-get -y install ubuntu-desktop
sudo apt-get -y install kicad
sudo apt-get -y install linux-image-extra-virtual
sudo apt-get -y install putty
sudo apt-get -y install doxygen
sudo apt-get -y install doxygen-gui
sudo apt-get -y install graphviz
sudo apt-get -y install meld
sudo apt-get -y install curl
sudo apt-get -y install wget
sudo apt-get -y install libftdi-dev
sudo apt-get -y install linux-headers-generic build-essential dkms
# Install guake
sudo apt-get -y install guake
#install the guest addition
wget http://download.virtualbox.org/virtualbox/5.0.10/VBoxGuestAdditions_5.0.10.iso
sudo mkdir /media/VBoxGuestAdditions
sudo mount -o loop,ro VBoxGuestAdditions_5.0.10.iso /media/VBoxGuestAdditions
sudo sh /media/VBoxGuestAdditions/VBoxLinuxAdditions.run
rm VBoxGuestAdditions_5.0.10.iso
sudo umount /media/VBoxGuestAdditions
sudo rmdir /media/VBoxGuestAdditions
mkdir ~/.config/autostart
#Make the guake auto startup
GUAKE_STARTUP_FILE=$(cat <<EOF
[Desktop Entry]
Type=Application
Exec=/usr/bin/guake
Hidden=false
NoDisplay=false
X-GNOME-Autostart-enabled=true
Name[en]=Guake
Name=Guake
Comment[en]=Terminal
Comment=Terminal
EOF
)
echo "${GUAKE_STARTUP_FILE}" > ~/.config/autostart/guake.desktop
#Change the system keyboard to uk gb
setxkbmap -layout gb
sudo reboot
| true
|
09fc849af1ea13bcd7b97ba0f40ab7b7c5595d88
|
Shell
|
mxmlnkn/indexed_bzip2
|
/results/asciinema/show-benchmark.sh
|
UTF-8
| 1,738
| 2.84375
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
# xfce4-terminal --geometry 94x17
# asciinema rec --overwrite rapidgzip-comparison-2.rec -c 'bash show-benchmark.sh'
tmux new-session '
( echo '"'"'^[OSgzip ^M'"'"'; sleep 1h; ) | htop
' \; split-window -t top -l 10 '
export PS1="$ "; bash --norc
' \; split-window -h '
export PS1="$ "; bash --norc
' \; set-option status off \; select-pane -t top \; resize-pane -y 6
# time gzip -d -c silesia-20x.gz | wc -c
# time rapidgzip -d -c silesia-20x.gz | wc -c
# Get palette from ~/.config/xfce4/terminal/terminalrc (for some reason 4 twice as many digits per hex code, so reduce it)
# Append to .rec .jsonl file. Note that the first line may not contain newlines for formatting!
# "theme": {"fg": "#ffffff", "bg": "#000000", "palette": "#000000:#aa0000:#00aa00:#aa5500:#0000aa:#aa00aa:#00aaaa:#aaaaaa:#555555:#ff5555:#55ff55:#ffff55:#5555ff:#ff55ff:#55ffff:#ffffff"}
# The broken font sizes and line heights are necessary to reduce font rendering issues. Else, the tmux lines look bad.
# Edit the asciinema recording:
# - Remove everything after the first "exit" command
# - Retime all setup stuff to happen at t = 0.0s instead of various t < 0.4s
# agg --font-family 'DejaVu Sans Mono' --font-size 13 --line-height 1.16 rapidgzip-comparison.{asciinema.jsonl,gif}
# Further post-processing in Gimp:
# - Remove the top 20 pixels:
# 1. Select all but those
# 2. Image -> Fit Canvas to Selection
# - Round the edges:
# 1. Go to Background layer (a the bottom)
# 2. Right Mouse Button (RMB) -> Add Alpha Channel
# 3. Ctrl + A
# 4. Menu->Select->Rounded Rectangle->Radius (%): 10
# 5. Ctrl + I
# 6. Press Delete
# - Export as GIF
# 1. Be sure to uncheck the comment and check the "Save as Animation"
| true
|
f45cd50993871911adef6a3d5ffbcf9bd63e2d09
|
Shell
|
crypdex/blackbox
|
/services/bitcoin/docker/download-release.sh
|
UTF-8
| 898
| 3.59375
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
platform=$(uname -sm)
if [[ ${platform} == "Linux x86_64" ]]; then
arch=x86_64
archive_name=bitcoin-${VERSION}-${arch}-linux-gnu.tar.gz
elif [[ ${platform} == "Linux aarch64" ]]; then
arch=aarch64
archive_name=bitcoin-${VERSION}-${arch}-linux-gnu.tar.gz
elif [[ ${platform} == "Linux armv7l" ]]; then
arch=arm
archive_name=bitcoin-${VERSION}-${arch}-linux-gnueabihf.tar.gz
else
echo "Sorry, ${platform} is not supported"
exit 1
fi
archive=https://bitcoin.org/bin/bitcoin-core-${VERSION}/${archive_name}
echo "Downloading ${archive}"
curl -SLO ${archive}
curl -SLO https://bitcoin.org/bin/bitcoin-core-${VERSION}/SHA256SUMS.asc
echo "Verifying checksums"
curl -SL https://bitcoin.org/laanwj-releases.asc | gpg --batch --import
gpg --verify SHA256SUMS.asc
grep " ${archive_name}\$" SHA256SUMS.asc | sha256sum -c
tar -xzf *.tar.gz -C /opt
rm *.tar.gz *.asc
| true
|
f3415d716ac66ae7d336264ef31c66a57762bdcf
|
Shell
|
puremourning/vim
|
/docker/linux-test/run
|
UTF-8
| 419
| 3.453125
| 3
|
[
"Vim"
] |
permissive
|
#!/usr/bin/env bash
if [ -z "$1" ]; then
CONTAINER="puremourning:vim-test-linux"
else
CONTAINER=$1
shift
fi
echo "Using container $CONTAINER"
CMD="bash"
if [ "$@" ];then
CMD="$@"
fi
docker run --cap-add=SYS_PTRACE \
--security-opt seccomp=unconfined \
--mount src="$(pwd)/../..",target=/home/dev/vim,type=bind \
-p 4567:4567 \
-it ${CONTAINER} \
$CMD
| true
|
7cf9b1dc0796b09a8366de0a67955cf8b1f8d888
|
Shell
|
pkubanek/ts_Dockerfiles
|
/develop-env/salobj_4/setup.sh
|
UTF-8
| 536
| 2.8125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Source this file when starting the container to set it up
echo "#"
echo "# Loading LSST Stack"
. /opt/lsst/software/stack/loadLSST.bash
setup lsst_distrib
echo "#"
echo "# Loading sal environment"
. /home/saluser/repos/ts_sal/setup.env
# Work around for setting LSST_DDS_IP working on most systems
export LSST_DDS_IP=`ip route get 1 | awk '{print $7;exit}'`
echo "#"
echo "# Setting up salobj"
setup ts_xml -t current
setup ts_sal -t current
setup ts_salobj -t current
/bin/bash --rcfile /home/saluser/.bashrc
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.