blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
bb03dbee29a5e7e5e74b8ff57e02cc589c655fab
|
Shell
|
stuckinaboot/iGenomics
|
/outside_core_app/general_output_validation/bowtie2-2.2.3/scripts/make_h_sapiens_ncbi36.sh
|
UTF-8
| 2,458
| 3.875
| 4
|
[
"MIT",
"GPL-3.0-only"
] |
permissive
|
#!/bin/sh
#
# Downloads sequence for H. sapiens (human) from NCBI. This script was
# used to build the Bowtie index for H. sapiens.
#
# From README_CURRENT_BUILD:
# Organism: Homo sapiens (human)
# NCBI Build Number: 36
# Version: 3
# Release date: 24 March 2008
#
GENOMES_MIRROR=ftp://ftp.ncbi.nih.gov/genomes
FILE_PATH=${GENOMES_MIRROR}/H_sapiens/ARCHIVE/BUILD.36.3/Assembled_chromosomes
BOWTIE_BUILD_EXE=./bowtie2-build
if [ ! -x "$BOWTIE_BUILD_EXE" ] ; then
if ! which bowtie2-build ; then
echo "Could not find bowtie2-build in current directory or in PATH"
exit 1
else
BOWTIE_BUILD_EXE=`which bowtie2-build`
fi
fi
for c in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 X Y ; do
if [ ! -f hs_ref_chr$c.fa ] ; then
if ! which wget > /dev/null ; then
echo wget not found, looking for curl...
if ! which curl > /dev/null ; then
echo curl not found either, aborting...
else
# Use curl
curl ${FILE_PATH}/hs_ref_chr$c.fa.gz
fi
else
# Use wget
wget ${FILE_PATH}/hs_ref_chr$c.fa.gz
fi
gunzip hs_ref_chr$c.fa.gz
fi
if [ ! -f hs_ref_chr$c.fa ] ; then
echo "Could not find hs_ref_chr$c.fa file!"
exit 2
fi
done
# Special case: get mitochondrial DNA from its home
if [ ! -f hs_ref_chrMT.fa ] ; then
if ! which wget > /dev/null ; then
echo wget not found, looking for curl...
if ! which curl > /dev/null ; then
echo curl not found either, aborting...
else
# Use curl
curl ${GENOMES_MIRROR}/H_sapiens/CHR_MT/hs_ref_chrMT.fa.gz
fi
else
# Use wget
wget ${GENOMES_MIRROR}/H_sapiens/CHR_MT/hs_ref_chrMT.fa.gz
fi
gunzip hs_ref_chrMT.fa.gz
fi
INPUTS=hs_ref_chr1.fa,hs_ref_chr2.fa,hs_ref_chr3.fa,hs_ref_chr4.fa,hs_ref_chr5.fa,hs_ref_chr6.fa,hs_ref_chr7.fa,hs_ref_chr8.fa,hs_ref_chr9.fa,hs_ref_chr10.fa,hs_ref_chr11.fa,hs_ref_chr12.fa,hs_ref_chr13.fa,hs_ref_chr14.fa,hs_ref_chr15.fa,hs_ref_chr16.fa,hs_ref_chr17.fa,hs_ref_chr18.fa,hs_ref_chr19.fa,hs_ref_chr20.fa,hs_ref_chr21.fa,hs_ref_chr22.fa,hs_ref_chrMT.fa,hs_ref_chrX.fa,hs_ref_chrY.fa
echo Running ${BOWTIE_BUILD_EXE} $* ${INPUTS} h_sapiens_asm
${BOWTIE_BUILD_EXE} $* ${INPUTS} h_sapiens_asm
if [ "$?" = "0" ] ; then
echo "h_sapiens_asm index built:"
echo " h_sapiens_asm.1.ebwt h_sapiens_asm.2.ebwt"
echo " h_sapiens_asm.3.ebwt h_sapiens_asm.4.ebwt"
echo " h_sapiens_asm.rev.1.ebwt h_sapiens_asm.rev.2.ebwt"
echo "You may remove hs_ref_chr*.fa"
else
echo "Index building failed; see error message"
fi
| true
|
8637311e92c57ea10fcd65db679cb5579e048413
|
Shell
|
RajkumarVerma4124/EMPLOYWAGECOMPUTATION-PROGRAM
|
/EmpWageComputation.sh
|
UTF-8
| 6,488
| 3.8125
| 4
|
[] |
no_license
|
echo "Welcome to Employee Wage Computation Program on Master Branch "
#---------------------------------------------------------------------------------------------------------------------------------------------------
workhour (){
read -p "Enter the hours you work : " Empworkhour
echo $Empworkhour
}
#---------------------------------------------------------------------------------------------------------------------------------------------------
#!/bin/bash -x
printf "\n"
echo " checking if an employ is present or not "
printf "\n"
Attendence=$(( $RANDOM % 2 + 1 ))
if [ $Attendence -eq 1 ]
then
employ="Present"
read -p "Enter the Employ name : " name
echo $employ
else
employ="Absent"
read -p "Enter the Employ name : " name
echo $employ
fi
printf "\n"
echo " Calculating its daily wage if present "
printf "\n"
if [ "$employ" == "Present" ]
then
Employwageperhour=20
workhour
dailyemploywage=$(($Employwageperhour * $Empworkhour))
echo "Wage of $name who does full time is : "$dailyemploywage
else
dailyemploywage=0
echo "Employee Name $name was Absent today : "$dailyemploywage
fi
printf "\n"
echo " Calculating its part time wage if present "
printf "\n"
if [ "$employ" == "Present" ]
then
partime=$(($Empworkhour/2))
parttimeemploywage=$(($Employwageperhour * $partime))
echo "Wage of $name who does part time is : " $parttimeemploywage
else
parttimeemploywage=0
echo "Employee Name $name was Absent today : "$dailyemploywage
fi
#------------------------------------------------------------------------------------------------------------------
printf "\n"
echo "Solving Using Switch case"
printf "\n"
while true
do
echo "1.Check if employ is Present or not"
echo "2.Wages of the employ who does full time"
echo "3.Wages of the employ who does part time"
echo "4.Exit"
read -p "Enter your choice from above statements : " choice
case $choice in
"1")
Attendence=$(( $RANDOM % 2 + 1 ))
if [ $Attendence -eq 1 ]
then
employ="Present"
read -p "Enter the Employ name : " name
echo $employ
else
employ="Absent"
read -p "Enter the Employ name : " name
echo $employ
fi
;;
"2")
if [ "$employ" == "Present" ]
then
NewEmploywageperhour=20
workhour
Newdailyemploywage=$(($NewEmploywageperhour * $Empworkhour))
echo "Wage of $name for full time is : "$Newdailyemploywage
printf "\n"
printf "\n"
else
echo "The employ $name is : " $employ
printf "\n"
printf "\n"
fi
;;
"3")
if [ "$employ" == "Present" ]
then
Newpartworktime=$(($Empworkhour/2))
Newparttimeemploywage=$(($NewEmploywageperhour*$Newpartworktime))
echo "Wage of $name for part time is : "$Newparttimeemploywage
printf "\n"
printf "\n"
else
echo "The employ $name is : "$employ
printf "\n"
printf "\n"
fi
;;
4)
break
printf "\n"
printf "\n"
;;
*)
echo invalid choice
;;
esac
done
#---------------------------------------------------------------------------------------------------------------------------------------------------------
printf "\n"
echo "Calculating Wages for a Month - Assume 20 Working Day per Month"
printf "\n"
WorkingDayPerMonth=20
workhour
dailyemploywage=$(($Empworkhour*$Employwageperhour))
wagesperamonth=$(($dailyemploywage*$WorkingDayPerMonth))
echo "Wages Per Month of $name is : " $wagesperamonth
printf "\n"
#--------------------------------------------------------------------------------------------------------------------------------------------------
printf "\n"
echo " Calculate Wages till a condition of total working hours or days is reached for a month - Assume 100 hours and 20 days"
printf "\n"
read -p "Enter the Total hours you worked : " totaltime
read -p "Enter the total no. of days you worked : " totalday
newworktime=0
wages=0
days=0
dailyemploywages=160
parttimeemploywages=80
absentday=0
i=0
while [ $newworktime -le $totaltime -a $days -lt $totalday ]
do
checkPresent=$(( $RANDOM % 3 + 1 ))
if [ $checkPresent -eq 1 ]
then
newworktime=$(($newworktime+8))
days=$(($days+1))
i=$(($i+1))
wages=$(($wages+$dailyemploywages))
declare -A dayss=( [day]="DAY [$i]" [wages]="FULLDAY WAGE :: $dailyemploywages" [totalwage]="TOTAL WAGE :: $wages" )
echo "WAGE IN : " ${dayss[@]}
fi
if [ $checkPresent -eq 2 ]
then
newworktime=$(($newworktime+4))
days=$(($days+1))
i=$(($i+1))
wages=$(($wages+$parttimeemploywages))
declare -A dayss=( [day]="DAY [$i]" [wages]="PARTTIME WAGE :: $parttimeemploywages" [totalwage]="TOTAL WAGE :: $wages" )
echo "WAGE IN : " ${dayss[@]}
fi
if [ $checkPresent -eq 3 ]
then
days=$(($days+1))
i=$(($i+1))
declare -A dayss=( [day]="DAY [$i]" [wages]="ABSENT DAY WAGE :: $absentday" [totalwage]="TOTAL WAGE :: $wages" )
echo "WAGE IN : " ${dayss[@]}
fi
done
echo " Total wages of $name is : $wages "
#----------------------------------------------------------------------------------------------------------------------------------------------------------------
printf "\n"
echo " Storing the Daily Wage along with the Total Wage"
printf "\n"
dailyemploywage(){
Employwageperhour=20
dailyemploywage=$(($Employwageperhour * $Empworkhour))
}
parttimeemploywage(){
Employwageperhour=20
partime=$(($Empworkhour/2))
parttimeemploywage=$(($Employwageperhour * $partime))
}
checkifAvailable() {
person=$(($RANDOM%3+1))
if [ $person -eq 1 ]
then
dailyemploywage
employ=$dailyemploywage
elif [ $person -eq 2 ]
then
parttimeemploywage
employ=$parttimeemploywage
elif [ $person -eq 3 ]
then
employ=0
fi
}
totalwage=0
read -p "Enter the number of days you want to work : " days
workhour
echo $days
for((i=1; i<=$days; i++))
do
checkifAvailable
Day[$i]=$employ
totalwage=$(($totalwage+${Day[$i]}))
echo Day $i " = " ${Day[$i]} " : " $totalwage
done
echo "Total wages of $name in $days days of work is : "$totalwage
#----------------------------------------------------------------------------------------------------------------------------------------------------------
printf "\n"
echo "Store the Day and the Daily Wage along with the Total Wage"
printf "\n"
totalwage=0
read -p "Enter the number of days you want to work : " days
workhour
echo $days
for((i=1; i<=$days; i++))
do
checkifAvailable
totalwage=$(($totalwage+$employ))
declare -A dayss=( [day]="DAY [$i]" [wages]="DAILY WAGE :: $employ" [totalwage]="TOTAL WAGE :: $totalwage" )
echo "WAGE IN === " ${dayss[@]}
done
echo "Total wages of $name in $days days of work is : "$totalwage
| true
|
87dd6aca6ee4b28dca0e14c71fd1a42cad3a44c5
|
Shell
|
ginomcevoy/vespa
|
/mgmt/create-vm.sh
|
UTF-8
| 422
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/bash
# Creates a VM from an XML
# Author: Giacomo Mc Evoy - giacomo@lncc.br
# LNCC Brazil 2013
# Validate input
if [ $# -lt 2 ]; then
echo "Create a VM from an XML"
echo "Usage: $0 <hostname> <xml>"
exit 1
fi
# Params
HOST=$1
XML=$2
# Call libvirt
echo -e "virsh -c qemu+ssh://$HOST/system create $XML"
virsh -c qemu+ssh://$HOST/system create $XML
# Wait before sending another libvirt request, may prevent VM hangup
sleep 3
| true
|
3379a34a33ab9bb1ec75f9d3f2767d397cfb03a3
|
Shell
|
igordot/sns
|
/segments/qc-coverage-gatk.sh
|
UTF-8
| 4,200
| 3.515625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# GATK coverage stats
# script filename
script_path="${BASH_SOURCE[0]}"
script_name=$(basename "$script_path")
segment_name=${script_name/%.sh/}
echo -e "\n ========== SEGMENT: $segment_name ========== \n" >&2
# check for correct number of arguments
if [ ! $# == 3 ] ; then
echo -e "\n $script_name ERROR: WRONG NUMBER OF ARGUMENTS SUPPLIED \n" >&2
echo -e "\n USAGE: $script_name project_dir sample_name BAM \n" >&2
if [ $# -gt 0 ] ; then echo -e "\n ARGS: $* \n" >&2 ; fi
exit 1
fi
# arguments
proj_dir=$(readlink -f "$1")
sample=$2
bam=$3
#########################
# settings and files
summary_dir="${proj_dir}/summary"
mkdir -p "$summary_dir"
summary_csv="${summary_dir}/${sample}.${segment_name}.csv"
cov_dir="${proj_dir}/QC-coverage"
mkdir -p "$cov_dir"
out_prefix="${cov_dir}/${sample}"
gatk_sample_summary="${out_prefix}.sample_summary"
# unload all loaded modulefiles
module purge
module add default-environment
#########################
# exit if output exits already
if [ -s "$gatk_sample_summary" ] ; then
echo -e "\n $script_name SKIP SAMPLE $sample \n" >&2
exit 0
fi
#########################
# check that inputs exist
if [ ! -d "$proj_dir" ] ; then
echo -e "\n $script_name ERROR: PROJ DIR $proj_dir DOES NOT EXIST \n" >&2
exit 1
fi
if [ ! -s "$bam" ] ; then
echo -e "\n $script_name ERROR: BAM $bam DOES NOT EXIST \n" >&2
exit 1
fi
code_dir=$(dirname $(dirname "$script_path"))
ref_fasta=$(bash "${code_dir}/scripts/get-set-setting.sh" "${proj_dir}/settings.txt" REF-FASTA);
if [ ! -s "$ref_fasta" ] ; then
echo -e "\n $script_name ERROR: FASTA $ref_fasta DOES NOT EXIST \n" >&2
exit 1
fi
ref_dict=$(bash "${code_dir}/scripts/get-set-setting.sh" "${proj_dir}/settings.txt" REF-DICT);
if [ ! -s "$ref_dict" ] ; then
echo -e "\n $script_name ERROR: DICT $ref_dict DOES NOT EXIST \n" >&2
exit 1
fi
found_bed=$(find "$proj_dir" -maxdepth 1 -type f -iname "*.bed" | grep -v "probes" | sort | head -1)
bed=$(bash "${code_dir}/scripts/get-set-setting.sh" "${proj_dir}/settings.txt" EXP-TARGETS-BED "$found_bed");
if [ ! -s "$bed" ] ; then
echo -e "\n $script_name ERROR: BED $bed DOES NOT EXIST \n" >&2
exit 1
fi
#########################
# GATK settings
# command
# this segment failed for canFam3 WES (1.1M targets) with error "adjust the maximum heap size provided to Java"
gatk_jar="/gpfs/data/igorlab/software/GenomeAnalysisTK/GenomeAnalysisTK-3.8-1/GenomeAnalysisTK.jar"
gatk_cmd="java -Xms32G -Xmx32G -jar ${gatk_jar}"
if [ ! -s "$gatk_jar" ] ; then
echo -e "\n $script_name ERROR: GATK $gatk_jar DOES NOT EXIST \n" >&2
exit 1
fi
# error log (blank for troubleshooting)
gatk_log_level_arg="--logging_level ERROR"
#########################
# on-target coverage
echo
echo " * GATK: $(readlink -f $gatk_jar) "
echo " * GATK version: $($gatk_cmd --version) "
echo " * BAM: $bam "
echo " * BED: $bed "
echo " * output prefix: $out_prefix "
echo " * sample_summary: $gatk_sample_summary "
echo
# using '-nt' with this combination of arguments causes an error
gatk_doc_cmd="
$gatk_cmd -T DepthOfCoverage -dt NONE $gatk_log_level_arg \
-rf BadCigar \
--reference_sequence $ref_fasta \
--intervals $bed \
--omitDepthOutputAtEachBase \
-ct 10 -ct 50 -ct 100 -ct 500 -mbq 20 -mmq 20 --nBins 999 --start 1 --stop 1000 \
--input_file $bam \
--outputFormat csv \
--out $out_prefix
"
echo "CMD: $gatk_doc_cmd"
$gatk_doc_cmd
#########################
# check that output generated
if [ ! -s "$gatk_sample_summary" ] ; then
echo -e "\n $script_name ERROR: sample_summary $gatk_sample_summary NOT GENERATED \n" >&2
exit 1
fi
#########################
# generate summary
# summarize log file
cat "$gatk_sample_summary" \
| head -2 \
| cut -d ',' -f 1,3,5,7-99 \
| sed 's/sample_id,mean,granular_median/#SAMPLE,MEAN COVERAGE,MEDIAN COVERAGE/' \
> "$summary_csv"
sleep 5
# combine all sample summaries
cat ${summary_dir}/*.${segment_name}.csv | LC_ALL=C sort -t ',' -k1,1 | uniq > "${proj_dir}/summary.${segment_name}.csv"
#########################
# delete files that are not needed
rm -fv "${out_prefix}.sample_statistics"
rm -fv "${out_prefix}.sample_interval_statistics"
#########################
# end
| true
|
2f24baeac03066f3943656cfc1933c906be84468
|
Shell
|
sspeng/EasyMesh
|
/test
|
UTF-8
| 623
| 3.296875
| 3
|
[] |
no_license
|
#!/bin/bash
dir_cur=$PWD
dir_src=$PWD/Src
dir_test=$PWD/Examples
#------------------------------------#
# go to source directory and compile #
#------------------------------------#
cd $dir_src
make
cd $dir_test
ln -f -s $dir_src/Easy .
#------------------------------#
# browse through all the tests #
#------------------------------#
for i in $( ls $dir_test/*.d ); do
if [ $i != "CVS" ]; then
./Easy $i +eps
fi
done
#-------------------------------#
# find the md5 sum for checking #
#-------------------------------#
md5sum *.e *.n *.s *.eps > md5.now
rm -f *.e *.n *.s *.eps
echo 'done !'
cd $dir_cur
| true
|
519836b9732a556477b2679743f2c4ec27adace6
|
Shell
|
824728350/batfish
|
/tools/role_reachability.sh
|
UTF-8
| 10,525
| 3.53125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
batfish_confirm_analyze_role_reachability() {
BATFISH_CONFIRM=batfish_confirm batfish_analyze_role_reachability $@
}
export -f batfish_confirm_analyze_role_reachability
batfish_analyze_role_reachability() {
local TEST_RIG_RELATIVE=$1
shift
local PREFIX=$1
shift
local MACHINES="$@"
local NUM_MACHINES="$#"
if [ -z "$PREFIX" ]; then
echo "ERROR: Empty prefix" 1>&2
return 1
fi
if [ -z "$BATFISH_CONFIRM" ]; then
local BATFISH_CONFIRM=true
fi
local WORKSPACE=batfish-$USER-$PREFIX
local OLD_PWD=$PWD
if [ "$(echo $TEST_RIG_RELATIVE | head -c1)" = "/" ]; then
local TEST_RIG=$TEST_RIG_RELATIVE
else
local TEST_RIG=$PWD/$TEST_RIG_RELATIVE
fi
local REACH_PATH=$OLD_PWD/$PREFIX-reach.smt2
local NODE_SET_PATH=$OLD_PWD/$PREFIX-node-set
local QUERY_PATH=$OLD_PWD/$PREFIX-query
local RR_QUERY_BASE_PATH=$QUERY_PATH/role-reachability-query
local DUMP_DIR=$OLD_PWD/$PREFIX-dump
local FLOWS=$OLD_PWD/$PREFIX-flows
local BGP=$OLD_PWD/$PREFIX-bgp
local ROUTES=$OLD_PWD/$PREFIX-routes
local VENDOR_SERIAL_DIR=$OLD_PWD/$PREFIX-vendor
local INDEP_SERIAL_DIR=$OLD_PWD/$PREFIX-indep
local DP_DIR=$OLD_PWD/$PREFIX-dp
local NODE_ROLES_PATH=$OLD_PWD/$PREFIX-node_roles
local ROLE_SET_PATH=$OLD_PWD/$PREFIX-role_set
echo "Parse vendor configuration files and serialize vendor structures"
$BATFISH_CONFIRM && { batfish_serialize_vendor_with_roles $TEST_RIG $VENDOR_SERIAL_DIR $NODE_ROLES_PATH || return 1 ; }
echo "Parse vendor structures and serialize vendor-independent structures"
$BATFISH_CONFIRM && { batfish_serialize_independent $VENDOR_SERIAL_DIR $INDEP_SERIAL_DIR || return 1 ; }
echo "Compute the fixed point of the control plane"
$BATFISH_CONFIRM && { batfish_compile $WORKSPACE $TEST_RIG $DUMP_DIR $INDEP_SERIAL_DIR || return 1 ; }
echo "Query routes"
$BATFISH_CONFIRM && { batfish_query_routes $ROUTES $WORKSPACE || return 1 ; }
echo "Query bgp"
$BATFISH_CONFIRM && { batfish_query_bgp $BGP $WORKSPACE || return 1 ; }
echo "Query data plane predicates"
$BATFISH_CONFIRM && { batfish_query_data_plane $WORKSPACE $DP_DIR || return 1 ; }
echo "Extract z3 reachability relations"
$BATFISH_CONFIRM && { batfish_generate_z3_reachability $DP_DIR $INDEP_SERIAL_DIR $REACH_PATH $NODE_SET_PATH || return 1 ; }
echo "Find role-reachability packet constraints"
$BATFISH_CONFIRM && { batfish_find_role_reachability_packet_constraints $REACH_PATH $QUERY_PATH $RR_QUERY_BASE_PATH $NODE_SET_PATH $NODE_ROLES_PATH $ROLE_SET_PATH "$MACHINES" "$NUM_MACHINES" || return 1 ; }
echo "Generate role-reachability concretizer queries"
$BATFISH_CONFIRM && { batfish_generate_role_reachability_concretizer_queries $RR_QUERY_BASE_PATH $NODE_ROLES_PATH "$MACHINES" "$NUM_MACHINES" || return 1 ; }
echo "Inject concrete packets into network model"
$BATFISH_CONFIRM && { batfish_inject_packets_with_role_flow_duplication $WORKSPACE $QUERY_PATH $DUMP_DIR || return 1 ; }
echo "Query flow results from LogicBlox"
$BATFISH_CONFIRM && { batfish_query_flows $FLOWS $WORKSPACE || return 1 ; }
}
export -f batfish_analyze_role_reachability
batfish_find_role_reachability_packet_constraints() {
batfish_date
echo ": START: Find role-reachability packet constraints"
batfish_expect_args 8 $# || return 1
local REACH_PATH=$1
local QUERY_PATH=$2
local QUERY_BASE_PATH=$3
local NODE_SET_PATH=$4
local NODE_ROLES_PATH=$5
local ROLE_SET_PATH=$6
local MACHINES="$7"
local NUM_MACHINES="$8"
local NODE_SET_TEXT_PATH=${NODE_SET_PATH}.txt
local OLD_PWD=$PWD
local SERVER_OPTS=
mkdir -p $QUERY_PATH
cd $QUERY_PATH
batfish -rr -rrpath $QUERY_BASE_PATH -nodes $NODE_SET_PATH -nrpath $NODE_ROLES_PATH -rspath $ROLE_SET_PATH || return 1
if [ -n "$NUM_MACHINES" -a "$NUM_MACHINES" -gt 0 ]; then
for MACHINE in $MACHINES; do
#set server options for GNU parallel
local SERVER_OPTS="$SERVER_OPTS -S $MACHINE"
# copy necessary files to remote machines
ssh $MACHINE mkdir -p $QUERY_PATH || return 1
rsync -av -rsh=ssh --stats --progress $REACH_PATH $MACHINE:$REACH_PATH || return 1
rsync -av -rsh=ssh --stats --progress $QUERY_PATH/. $MACHINE:$QUERY_PATH/. || return 1
done
fi
cat $NODE_SET_TEXT_PATH | while read NODE
do
cat $ROLE_SET_PATH | while read ROLE
do
echo "${NODE}:${ROLE}"
done
if [ "${PIPESTATUS[0]}" -ne 0 -o "${PIPESTATUS[1]}" -ne 0 ]; then
return 1
fi
done | sort -R | $BATFISH_PARALLEL $SERVER_OPTS batfish_find_role_reachability_packet_constraints_helper {} $REACH_PATH $QUERY_BASE_PATH
if [ "${PIPESTATUS[0]}" -ne 0 -o "${PIPESTATUS[1]}" -ne 0 -o "${PIPESTATUS[2]}" -ne 0 ]; then
return 1
fi
if [ -n "$NUM_MACHINES" -a "$NUM_MACHINES" -gt 0 ]; then
for MACHINE in $MACHINES; do
# copy output files from remote machines
rsync -av -rsh=ssh --stats --progress $MACHINE:$QUERY_PATH/. $QUERY_PATH/. || return 1
done
fi
cd $OLD_PWD
batfish_date
echo ": END: Find role-reachability packet constraints"
}
export -f batfish_find_role_reachability_packet_constraints
batfish_find_role_reachability_packet_constraints_helper() {
batfish_expect_args 3 $# || return 1
local NODE=$(echo "$1" | cut -d':' -f 1)
local ROLE=$(echo "$1" | cut -d':' -f 2)
local REACH_PATH=$2
local QUERY_BASE_PATH=$3
batfish_date
local QUERY_PATH=${QUERY_BASE_PATH}-${NODE}-${ROLE}.smt2
local QUERY_OUTPUT_PATH=${QUERY_PATH}.out
echo ": START: Find role-reachability packet constraints from node \"${NODE}\" to role \"${ROLE}\" (\"${QUERY_OUTPUT_PATH}\")"
cat $REACH_PATH $QUERY_PATH | batfish_time $BATFISH_Z3_DATALOG -smt2 -in 3>&1 1> $QUERY_OUTPUT_PATH 2>&3
if [ "${PIPESTATUS[0]}" -ne 0 -o "${PIPESTATUS[1]}" -ne 0 ]; then
return 1
fi
batfish_date
echo ": END: Find role-reachability packet constraints from node \"${NODE}\" to role \"${ROLE}\" (\"${QUERY_OUTPUT_PATH}\")"
}
export -f batfish_find_role_reachability_packet_constraints_helper
batfish_generate_role_reachability_concretizer_queries() {
batfish_date
echo ": START: Generate role-reachability concretizer queries"
batfish_expect_args 4 $# || return 1
local QUERY_BASE_PATH=$1
local ROLE_NODES_PATH=$2
local MACHINES="$3"
local NUM_MACHINES="$4"
local ITERATIONS_PATH=${ROLE_NODES_PATH}.iterations
local QUERY_PATH="$(dirname $QUERY_BASE_PATH)"
local OLD_PWD=$PWD
cd $QUERY_PATH
local SERVER_OPTS=
if [ -n "$NUM_MACHINES" -a "$NUM_MACHINES" -gt 0 ]; then
for MACHINE in $MACHINES; do
#set server options for GNU parallel
local SERVER_OPTS="$SERVER_OPTS -S $MACHINE"
# copy necessary files to remote machines
rsync -av -rsh=ssh --stats --progress $QUERY_PATH/. $MACHINE:$QUERY_PATH/. || return 1
done
fi
sort -R $ITERATIONS_PATH | $BATFISH_PARALLEL $SERVER_OPTS batfish_generate_role_reachability_concretizer_queries_helper {} $QUERY_BASE_PATH \;
if [ "${PIPESTATUS[0]}" -ne 0 -o "${PIPESTATUS[1]}" -ne 0 ]; then
return 1
fi
if [ -n "$NUM_MACHINES" -a "$NUM_MACHINES" -gt 0 ]; then
for MACHINE in $MACHINES; do
# copy output files from remote machines
rsync -av -rsh=ssh --stats --progress $MACHINE:$QUERY_PATH/. $QUERY_PATH/. || return 1
done
fi
cd $OLD_PWD
batfish_date
echo ": END: Generate role-reachability concretizer queries"
}
export -f batfish_generate_role_reachability_concretizer_queries
batfish_generate_role_reachability_concretizer_queries_helper() {
batfish_expect_args 2 $# || return 1
local ITERATION_LINE=$1
local QUERY_BASE_PATH=$2
local TRANSMITTING_ROLE=$(echo $ITERATION_LINE | cut -d':' -f 1)
local MASTER_NODE=$(echo $ITERATION_LINE | cut -d':' -f 2)
local SLAVE_NODE=$(echo $ITERATION_LINE | cut -d':' -f 3)
local RECEIVING_ROLE=$(echo $ITERATION_LINE | cut -d':' -f 4)
local MASTER_QUERY_OUT=${QUERY_BASE_PATH}-${MASTER_NODE}-${RECEIVING_ROLE}.smt2.out
local SLAVE_QUERY_OUT=${QUERY_BASE_PATH}-${SLAVE_NODE}-${RECEIVING_ROLE}.smt2.out
local MASTER_CONCRETIZER_QUERY_BASE_PATH=${QUERY_BASE_PATH}-${MASTER_NODE}-${SLAVE_NODE}-${RECEIVING_ROLE}-concrete
local SLAVE_CONCRETIZER_QUERY_BASE_PATH=${QUERY_BASE_PATH}-${SLAVE_NODE}-${MASTER_NODE}-${RECEIVING_ROLE}-concrete
local QUERY_DIR=$(dirname $QUERY_BASE_PATH)
cd $QUERY_DIR
batfish_date
echo ": START: Generate role-reachability concretizer queries for transmitting role \"${TRANSMITTING_ROLE}\", master node \"${MASTER_NODE}\", slave node \"${SLAVE_NODE}\", receiving role \"${RECEIVING_ROLE}\""
batfish -conc -concin $MASTER_QUERY_OUT -concinneg $SLAVE_QUERY_OUT -concunique -concout $MASTER_CONCRETIZER_QUERY_BASE_PATH || return 1
batfish -conc -concinneg $MASTER_QUERY_OUT -concin $SLAVE_QUERY_OUT -concunique -concout $SLAVE_CONCRETIZER_QUERY_BASE_PATH || return 1
$GNU_FIND $PWD -regextype posix-extended -regex "${MASTER_CONCRETIZER_QUERY_BASE_PATH}-[0-9]+.smt2" | \
$BATFISH_NESTED_PARALLEL batfish_generate_concretizer_query_output {} $MASTER_NODE \;
if [ "${PIPESTATUS[0]}" -ne 0 -o "${PIPESTATUS[1]}" -ne 0 ]; then
return 1
fi
$GNU_FIND $PWD -regextype posix-extended -regex "${SLAVE_CONCRETIZER_QUERY_BASE_PATH}-[0-9]+.smt2" | \
$BATFISH_NESTED_PARALLEL batfish_generate_concretizer_query_output {} $SLAVE_NODE \;
if [ "${PIPESTATUS[0]}" -ne 0 -o "${PIPESTATUS[1]}" -ne 0 ]; then
return 1
fi
batfish_date
echo ": END: Generate role-reachability concretizer queries for transmitting role \"${TRANSMITTING_ROLE}\", master node \"${MASTER_NODE}\", slave node \"${SLAVE_NODE}\", receiving role \"${RECEIVING_ROLE}\""
echo
}
export -f batfish_generate_role_reachability_concretizer_queries_helper
batfish_inject_packets_with_role_flow_duplication() {
batfish_date
echo ": START: Inject concrete packets into network model"
batfish_expect_args 3 $# || return 1
local WORKSPACE=$1
local QUERY_PATH=$2
local DUMP_DIR=$3
local OLD_PWD=$PWD
cd $QUERY_PATH
batfish -workspace $WORKSPACE -flow -flowpath $QUERY_PATH -drf -dumptraffic -dumpdir $DUMP_DIR || return 1
batfish_format_flows $DUMP_DIR || return 1
cd $OLD_PWD
batfish_date
echo ": END: Inject concrete packets into network model"
}
export -f batfish_inject_packets_with_role_flow_duplication
| true
|
c0b2d87692b23f094b861afc109648b2089c266e
|
Shell
|
wangmuy/docker-ubuntu-runas
|
/start.sh
|
UTF-8
| 353
| 3.34375
| 3
|
[] |
no_license
|
#!/bin/sh
echo 'start.sh'
mydir="$( cd `dirname $0` && pwd )"
[ -n "$WORKDIR_ROOT" ] && cd $WORKDIR_ROOT
[ -f $mydir/start_private.sh ] && . $mydir/start_private.sh
[ -f $mydir/start-user.sh ] && startScript=$mydir/start-user.sh
if [ -n "${USER_ID}" -a "${USER_ID}" != "0" ]; then
exec sudo -E -u $USER_NAME $startScript $@
else
$startScript $@
fi
| true
|
6ebdbb723b9f0fdceefee4986ae84a5ea8c89707
|
Shell
|
Gancc123/flame
|
/scripts/mysql.sh
|
UTF-8
| 594
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
DBNAME="flame_mgr_db"
PASSWORD="123456"
USER_NAME="flame"
if_not_exist_create_database="create database if not exists ${DBNAME}"
if_not_exist_create_user="create user if not exists '${USER_NAME}'@'localhost' identified by '123456'"
grant_privileges="grant all privileges on ${DBNAME}.* to '${USER_NAME}'@'localhost' identified by '123456'"
flush_privileges="flush privileges"
mysql -p${PASSWORD} -e "${if_not_exist_create_database}"
mysql -p${PASSWORD} -e "${if_not_exist_create_user}"
mysql -p${PASSWORD} -e "${grant_privileges}"
mysql -p${PASSWORD} -e "${flush_privileges}"
| true
|
d3f2fb85c3081dd7e50f0b1b63fad53ed3c5f313
|
Shell
|
MinbinGong/OpenStack-Ocata
|
/glance_store-0.20.0/glance_store/tests/functional/hooks/gate_hook.sh
|
UTF-8
| 1,004
| 2.53125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script is executed inside gate_hook function in devstack gate.
# NOTE(NiallBunting) The store to test is passed in here from the
# project config.
GLANCE_STORE_DRIVER=${1:-swift}
ENABLED_SERVICES+=",key,glance"
case $GLANCE_STORE_DRIVER in
swift)
ENABLED_SERVICES+=",s-proxy,s-account,s-container,s-object,"
;;
esac
export GLANCE_STORE_DRIVER
export ENABLED_SERVICES
$BASE/new/devstack-gate/devstack-vm-gate.sh
| true
|
a26673eeda4228c524d2f04732160a43cd6c16c4
|
Shell
|
terriera/c-swimming-pool
|
/j11/test/test.sh
|
UTF-8
| 7,057
| 3.828125
| 4
|
[] |
no_license
|
#! /bin/sh
# message colors
ERROR_COLOR='1;31' # Errors
HILITE_COLOR='1;33;4' # Matches
INFO_COLOR='1;32' # Information (e.g. current text, globality)
MATCHES_COLOR='37' # Match count
PROMPT_COLOR='1;36' # Prompt
TITLE_COLOR='1;33' # Emphasized messages
# Determine whether we can afford attempting ANSI codes or not.
tty -s <&1 && hasTTY=true || hasTTY=false
##
# Clears the console (requires ANSI terminal) and puts the prompt
# on the top-left corner.
#
clearConsole()
{
$hasTTY && printf "\033[2J\033[H"
}
##
# ANSI-based colored output.
#
# @param (optional) -n Prevents from final EOL.
# @param color The ANSI code for the color. Automatically bright.
# @params The message text.
#
colorMsg()
{
msg="$1"
color="$2"
padding="$3"
$hasTTY && printf "\033[${color}m"
if [ $padding -eq 0 ]; then
printf "%s" "$msg"
else
printf "%-${padding}s" "$msg"
fi
$hasTTY && printf "\033[0m"
}
can_val_test=0
bin="../eval_expr"
val_ok()
{
echo -n "no error... -> ["
colorMsg "-n" $INFO_COLOR "OK"
echo "]"
}
val_unknown()
{
echo -n "unknown errors... -> ["
colorMsg "-n" $ERROR_COLOR "KO"
echo "]"
}
val_leak()
{
echo -n "memory leaks... -> ["
colorMsg "-n" $ERROR_COLOR "KO"
echo "]"
}
test_valgrind()
{
tmp=/tmp/val_fractio.txt
valgrind < $2 $1 2> $tmp > /dev/null
res1=`grep 'ERROR' $tmp | cut -d" " -f 4`
res2=`grep 'allocs.*frees' $tmp | cut -d\ -f 3`
res3=`grep 'allocs.*frees' $tmp | cut -d\ -f 5`
err=0
[ 0 -ne $res1 ] && err=1
[ $res2 -ne $res3 ] && err=2
[ 0 -eq $err ] && val_ok
[ 1 -eq $err ] && val_unknown;
[ 2 -eq $err ] && val_leak;
rm -f $tmp
}
test_ok()
{
colorMsg "$1" $TITLE_COLOR 70
printf " -> ["
colorMsg "OK" $INFO_COLOR 0
printf "]\n"
}
test_ko()
{
colorMsg "$1" $TITLE_COLOR 70
printf " -> ["
colorMsg "KO" $ERROR_COLOR 0
printf "]\n"
}
dotest()
{
[ $# -eq 5 ] || { echo "dotest: 5 arguments expected..."; exit 1; }
testbin="$1"
expression="$2"
outbase="$3"
expected_result="$4"
expected_errcode="$5"
if [ -z "$outbase" ]; then
actual_result=$($testbin $expression)
else
actual_result=$($testbin $expression $outbase)
fi
actual_errcode="$?"
if [ $actual_result = $expected_result ]; then
test_ok "${expression} = ${actual_result}"
else
test_ko "${expression} = ${actual_result}, but ${expected_result} expected"
fi
if [ $actual_errcode = $expected_errcode ]; then
test_ok "return code = ${actual_errcode}"
else
test_ko "return code = ${actual_errcode}, but ${expected_errcode} expected"
fi
#echo "==================="
if [ 1 -eq $can_val_test ]; then
#for i in "$input"; do
#test_valgrind "echo $i | $bin"
#done
#echo "$input"
test_valgrind "$testbin" "$input"
fi
#echo "==================="
}
[ -e $bin ] || { echo "no \"$bin\" found..."; exit 1; }
echo "====================="
echo " TESTS DE NIVEAU 1"
echo "====================="
dotest "$bin" "1" "" "1" "0"
dotest "$bin" "1+2" "" "3" "0"
dotest "$bin" "2-1" "" "1" "0"
dotest "$bin" "2*5" "" "10" "0"
dotest "$bin" "42/6" "" "7" "0"
dotest "$bin" "7/0" "" "" "1"
dotest "$bin" "7/1" "" "7" "0"
dotest "$bin" "21+7*3" "" "42" "0"
dotest "$bin" "3+3*3+4" "" "16" "0"
dotest "$bin" "2*3%5" "" "1" "0"
dotest "$bin" "3/0" "" "" "1"
dotest "$bin" "5-6/4+6*7-7/63+9*7" "" "109" "0"
dotest "$bin" "3*5--3+6" "" "24" "0"
dotest "$bin" "3*5---3+6" "" "" "2"
dotest "$bin" "-3*-5--3+-6" "" "12" "0"
dotest "$bin" "-3*-5--3+-6+" "" "" "2"
dotest "$bin" "3+(2+" "" "" "2"
dotest "$bin" "forty-two" "" "" "2"
dotest "$bin" "3i" "" "" "2"
dotest "$bin" "6/5+3/14*82" "" "1" "0"
dotest "$bin" "53/3%1+111/555" "" "0" "0"
echo "====================="
echo " TESTS DE NIVEAU 2"
echo "====================="
dotest "$bin" "01:1" "" "1" "0"
dotest "$bin" "01:100" "" "4" "0"
dotest "$bin" "01234:4" "" "4" "0"
dotest "$bin" "x:xxxx" "" "4" "0"
dotest "$bin" "!:" "0" "" "0"
dotest "$bin" "x:-0" "" "0" "0"
dotest "$bin" "-0:" "" "0" "0"
dotest "$bin" "1:+-1:" "" "0" "0"
dotest "$bin" "ab:bba*azertyuiop:i" "" "42" "0"
dotest "$bin" "0123456789abcdef:15+7*3" "" "42" "0"
dotest "$bin" "ac:cacaca+51/1:111" "" "59" "0"
dotest "$bin" "ac:cacaca+51/1:111" "0123456789abcdef" "3b" "0"
dotest "$bin" "-edf:eeeefde/w:www" "ba" "-aaa" "0"
dotest "$bin" "-0123456789:00000018/0123456789:3" "0123456789" "-6" "0"
echo "====================="
echo " TESTS DE NIVEAU 3"
echo "====================="
dotest "$bin" "1+2*4" "" "9" "0"
dotest "$bin" "(1+2)*4" "" "12" "0"
dotest "$bin" "7*(2+4)" "" "42" "0"
dotest "$bin" "(3+3)*(3+4)" "" "42" "0"
dotest "$bin" "2*3%5" "" "1" "0"
dotest "$bin" "5-6/4+6*7-7/63+9*7" "" "109" "0"
dotest "$bin" "(3*5)--(3+6)" "" "24" "0"
dotest "$bin" "-(-3*-5)--(-3+-6)" "" "-24" "0"
dotest "$bin" "-(-3*-5)--(-3+-6+)" "" "" "2"
dotest "$bin" "3+(2+" "" "" "2"
dotest "$bin" "(6/5+3/14)*82" "" "82" "0"
dotest "$bin" "53/3%(1+111/555)" "" "0" "0"
dotest "$bin" "-v.@:vv@.v*(x:/(40+gh:hg))" "AB" "A" "0"
dotest "$bin" "-v.@:vv@.v/(x:/(40+gh:hg))" "" "" "1"
dotest "$bin" "-v.@:vv@.v/(x::/(40+gh:hg))" "" "" "2"
dotest "$bin" "-(-3*-5)--(-3+-6)" "<" "-<<<<<<<<<<<<<<<<<<<<<<<<" "0"
dotest "$bin" "1" "<" "<" "0"
dotest "$bin" "-x:x" "" "-1" "0"
dotest "$bin" "-x:" "" "0" "0"
dotest "$bin" "-1" "<" "-<" "0"
echo "====================="
echo " TESTS DE NIVEAU 4"
echo "====================="
dotest "$bin" "fact(0)" "" "1" "0"
dotest "$bin" "fact(1)" "" "1" "0"
dotest "$bin" "fact(2)" "" "2" "0"
dotest "$bin" "fact(3)" "" "6" "0"
dotest "$bin" "fact(4)" "" "24" "0"
dotest "$bin" "fact(5)" "" "120" "0"
dotest "$bin" "fact(6)" "" "720" "0"
dotest "$bin" "fact(7)" "" "5040" "0"
dotest "$bin" "fibo(0)" "" "0" "0"
dotest "$bin" "fibo(1)" "" "1" "0"
dotest "$bin" "fibo(2)" "" "1" "0"
dotest "$bin" "fibo(3)" "" "2" "0"
dotest "$bin" "fibo(4)" "" "3" "0"
dotest "$bin" "fibo(5)" "" "5" "0"
dotest "$bin" "fibo(6)" "" "8" "0"
dotest "$bin" "fibo(7)" "" "13" "0"
dotest "$bin" "fibo(19)" "" "4181" "0"
dotest "$bin" "pow(2,0)" "" "1" "0"
dotest "$bin" "pow(2,1)" "" "2" "0"
dotest "$bin" "pow(2,2)" "" "4" "0"
dotest "$bin" "pow(2,3)" "" "8" "0"
dotest "$bin" "pow(2,4)" "" "16" "0"
dotest "$bin" "pow(2,5)" "" "32" "0"
dotest "$bin" "pow(2,6)" "" "64" "0"
dotest "$bin" "pow(2,7)" "" "128" "0"
dotest "$bin" "pow(2,8)" "" "256" "0"
dotest "$bin" "pow(2,9)" "" "512" "0"
dotest "$bin" "pow(2,10)" "" "1024" "0"
dotest "$bin" "pow(2,11)" "" "2048" "0"
dotest "$bin" "pow(2,12)" "" "4096" "0"
dotest "$bin" "pgcd(26,15)" "" "1" "0"
dotest "$bin" "pgcd(96,36)" "" "12" "0"
dotest "$bin" "pgcd(n123456789:42,n123456789:28)" "n123456789" "14" "0"
dotest "$bin" "ppcm(15,12)" "n123456789abcdef" "3c" "0"
dotest "$bin" "-pow(2,6)" "" "-64" "0"
dotest "$bin" "-pow((5+7)/4,2+10/5)" "" "-81" "0"
dotest "$bin" "fact((2+3)*2-5)" "" "120" "0"
dotest "$bin" "53/fibo(pow(2,2))%(1+111/555)-pgcd(96,36)" "" "-12" "0"
dotest "$bin" "fact(3)/(fact(2)*fact(3-2))" "01" "11" "0"
dotest "$bin" "pow(n1234567:7,p1234:2)+pgcd(10:00111,xy:yxxyx)" "0abcdefghi" "ee" "0"
| true
|
f9cdceb5eeef6748efcc4e03f9de7ff093ae9e0c
|
Shell
|
dfong/api-platform-samples
|
/sample-proxies/apigee-healthcheck/resources/cleanup.sh
|
UTF-8
| 2,617
| 3.890625
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
usage() { echo "Usage: $0 [-o][org1,org2...] [-u] [-p] [-e]" ; exit 1; }
if [ "$#" -ne "8" ]; then
usage
exit 1
fi
echo "****************** Cleanup script started at `TZ=":US/Pacific" date` by user at `echo $SSH_CLIENT | cut -d ' ' -f 1` ****************** "
MGMT_URL="https://api.e2e.apigee.net"
DEVELOPER="healthcheck@apigee.com"
ORG=""
USERNAME=""
PASSWORD=""
ENV=""
APP="apigee-healthcheck"
PRODUCT="apigee-healthcheck"
while [[ $# > 1 ]]
do
key="$1"
case $key in
-o)
ORG="$2"
set -f # disable glob
IFS=',' # split on space characters
orgs=($2)
shift # past argument
;;
-u)
USERNAME="$2"
shift # past argument
;;
-p)
PASSWORD="$2"
shift # past argument
;;
-e)
ENV="$2"
shift # past argument
;;
*)
echo "Invalid option: $key"
usage
exit
;;
esac
shift # past argument or value
done
#export ORG=$ORG
export USERNAME=$USERNAME
export PASSWORD=$PASSWORD
export URL=$MGMT_URL
export DEVELOPER=$DEVELOPER
echo "Number of orgs: ${#orgs[@]}"
echo "Deleting entities for these Organizations: "
#echo -n "Arguments are:"
for i in "${orgs[@]}"; do
echo " ${i}"
done
for ORG in "${orgs[@]}"; do
echo "Deleting App"
curl -u $USERNAME:$PASSWORD $URL/v1/o/$ORG/developers/$DEVELOPER/apps/$APP -X DELETE
echo "Deleting Developer"
curl -u $USERNAME:$PASSWORD $URL/v1/o/$ORG/developers/$DEVELOPER -X DELETE
echo "Deleting Product"
curl -u $USERNAME:$PASSWORD $URL/v1/o/$ORG/apiproducts/$PRODUCT -X DELETE
echo "Undeploy & Delete apigee-healthcheck proxy"
#UNDEPLOY
echo "UnDeploying apigee-healthcheck proxy from Org:$ORG"
echo "$URL/v1/o/$ORG/environments/$ENV/apis/apigee-healthcheck/revisions/1/deployments"
curl -X DELETE -u $USERNAME:$PASSWORD "$URL/v1/o/$ORG/environments/$ENV/apis/apigee-healthcheck/revisions/1/deployments"
#curl -X POSt -u $USERNAME:$PASSWORD "$URL/v1/o/$ORG/apis/apigee-healthcheck/revisions/1/deployments?action=undeploy&env=$ENV&force=force"
#DELETE
echo "Deleting apigee-healthcheck proxy from Org:$ORG"
echo "$URL/v1/o/$ORG/apis/apigee-healthcheck"
curl -X DELETE -u $USERNAME:$PASSWORD -H "content-type: application/json" "$URL/v1/o/$ORG/apis/apigee-healthcheck"
#DELETE KVM entry
echo "Deleting KVM entry"
echo "$URL/v1/o/apigee-bot/environments/test/keyvaluemaps/EdgeOrganizations/entries/$ORG"
curl -X DELETE -u $USERNAME:$PASSWORD -H "content-type: application/json" "$URL/v1/o/apigee-bot/environments/test/keyvaluemaps/EdgeOrganizations/entries/$ORG"
done
echo "\nCleanup Completed\n"
| true
|
f3aaeff2714df80d51220c99f91d2c221d768b02
|
Shell
|
kalyangoud145/Employee_Wage
|
/empwageproblem.sh
|
UTF-8
| 960
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
echo "welcome to employee wage calculation problem"
isPresent=1;
randomCheck=$((RANDOM%2));
if [ $isPresent -eq $randomCheck ]
then
echo "Employee is present"
else
echo "Employee is absent"
fi
IS_PART_TIME=1;
IS_FULL_TIME=2;
MAX_HRS_IN_MONTH=100;
EMP_RATE_PER_HR=20;
NUM_WORKING_DAYS=20;
totalEmpHrs=0;
totalWorkingDays=0;
declare -A dailyWages
function getWorkingHrs() {
case $1 in
$IS_FULL_TIME)
workHrs=8
;;
$IS_PART_TIME)
workHrs=4
;;
*)
workHrs=0
;;
esac
}
function getEmpWage (){
echo $(($1*$EMP_RATE_PER_HR))
}
while [[ $totalEmpHrs -lt $MAX_HRS_IN_MONTH && $totalWorkingDays -lt $NUM_WORKING_DAYS ]]
do
((totalWorkingDays++))
getWorkingHrs $((RANDOM%3))
totalEmpHrs=$(( $workHrs + $totalEmpHrs ))
dailyWages["$totalWorkingDays"]=$(( $workHrs * $EMP_RATE_PER_HR ))
done
totalsalary="$( getEmpWage $totalEmpHrs )"
echo "${dailyWages[@]}"
| true
|
17b4a3e5c1a2bfda20bee6776794c2821510955b
|
Shell
|
rajeevpd/python3.7-alpine
|
/entrypoint.sh
|
UTF-8
| 301
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
# Add local user
# Either use the LOCAL_USER_ID if passed in at runtime or
# fallback
#
if [ "$LOCAL_USER_ID" != "" ]; then
USER_ID=${LOCAL_USER_ID:-9001}
echo "Starting with UID : $USER_ID"
adduser -D -h /home/user -s /bin/sh -u $USER_ID user
export HOME=/home/user
fi
exec "$@"
| true
|
188e938a540e51dd13324a941c200f707b3a4fa6
|
Shell
|
j23414/adv-unix-workshop
|
/section-4/supplementary/summation-solution.sh
|
UTF-8
| 125
| 2.609375
| 3
|
[] |
no_license
|
#!/bin/bash
awk '
{a[$2] += $7}
END{
for(k in a){
print k, a[k]
}
}
' $1 | column -t
| true
|
eec7408d8ec7a6e789440b6ba8e12f435d252ee7
|
Shell
|
mxbi/shell_scripts
|
/compile_tf.sh
|
UTF-8
| 1,266
| 3.109375
| 3
|
[
"MIT"
] |
permissive
|
echo "[STATUS] Cloning TF repo"
git clone https://github.com/tensorflow/tensorflow.git &&
echo "[STATUS] Installing Java"
sudo apt install -y openjdk-8-jdk
yes | sudo add-apt-repository ppa:webupd8team/java
sudo apt-get update && sudo apt-get -y install oracle-java8-installer
echo "[STATUS] Installing other dependencies"
sudo apt-get install -y libcupti-dev &&
sudo apt-get install -y pkg-config zip g++ zlib1g-dev unzip python &&
echo "[STATUS] Installing Bazel 0.8.1"
wget https://github.com/bazelbuild/bazel/releases/download/0.8.1/bazel_0.8.1-linux-x86_64.deb -O bazel.deb &&
sudo dpkg -i bazel.deb &&
rm bazel.deb &&
cd tensorflow &&
git checkout v1.5.0 &&
echo "[STATUS] !!!!!!!!! TENSORFLOW CONFIGURING:"
# Build with cuda support => y
# Cuda version => 9.0
# CuDNN version => 7.0.5
# All other configure options default (this will vary depending on your system).
./configure &&
echo "[STATUS] Building Tensorflow!"
bazel clean &&
bazel build --config=mkl --config=opt --config=cuda //tensorflow/tools/pip_package:build_pip_package &&
echo "[STATUS] Installing TF Package"
bazel-bin/tensorflow/tools/pip_package/build_pip_package /tmp/tensorflow_pkg &&
pip install /tmp/tensorflow_pkg/*.whl &&
rm /tmp/tensorflow_pkg/*.whl &&
echo "Done! :)"
| true
|
4e430c9a75fbc9f884be4a29a92224685e926fe1
|
Shell
|
kundajelab/Personal_genome_mapping
|
/new_ase_code/bin/recalSample.sh
|
UTF-8
| 1,515
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
INDELS1=${GATK}/bundle/1.5/hg19/1000G_phase1.indels.hg19.vcf
INDELS2=${GATK}/bundle/1.5/hg19/Mills_and_1000G_gold_standard.indels.hg19.sites.vcf
SNPS=${GATK}/bundle/1.5/hg19/dbsnp_135.hg19.vcf
COV="--standard_covs"
pref=`basename $INFILE`
pref=${pref/.bam}
outpref=${OUTDIR}/${pref}
tmpdir="${TMP}/tmp_${pref}_${RANDOM}_recal"
if [ -d $tmpdir ]; then
echo "Existing temporary directory! Aborting..." 1>&2; exit 1;
else
mkdir $tmpdir
fi
if [[ ! -f ${outpref}.realign.intervals ]]; then
java -Xmx8g -Djava.io.tmpdir=${tmpdir} -jar ${GATK}/GenomeAnalysisTK.jar -T RealignerTargetCreator -I ${INFILE} -R ${SEQ} -o ${outpref}.realign.intervals -known ${INDELS1} -known ${INDELS2}
fi
if [[ ! -f ${outpref}.realign.bam ]]; then
java -Xmx8g -Djava.io.tmpdir=${tmpdir} -jar ${GATK}/GenomeAnalysisTK.jar -T IndelRealigner -I ${INFILE} -R ${SEQ} -targetIntervals ${outpref}.realign.intervals -o ${outpref}.realign.bam -known ${INDELS1} -known ${INDELS2}
fi
if [[ ! -f ${outpref}.realign.cov.cvs ]]; then
java -Xmx8g -Djava.io.tmpdir=$tmpdir -jar ${GATK}/GenomeAnalysisTK.jar -T CountCovariates -I ${outpref}.realign.bam -R ${SEQ} -knownSites $SNPS -knownSites $INDELS1 -knownSites $INDELS2 $COV -recalFile ${outpref}.realign.cov.cvs -nt 4
fi
if [[ ! -f ${outpref}.recal.bam ]]; then
java -Xmx8g -Djava.io.tmpdir=$tmpdir -jar ${GATK}/GenomeAnalysisTK.jar -T TableRecalibration -I ${outpref}.realign.bam -R ${SEQ} -o ${outpref}.recal.bam -recalFile ${outpref}.realign.cov.cvs
fi
rm $tmpdir
| true
|
bf85fe440cda4479d67b89dde2738439a0ed0e3c
|
Shell
|
jakubpawlowicz/nixos-setup
|
/home/jakub/.local/bin/rename-workspace
|
UTF-8
| 264
| 2.84375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -o errexit -o nounset
TARGET_WORKSPACE=$1
SOURCE_WORKSPACE=$(nix-shell -p jq --run "swaymsg -t get_workspaces | jq '.[] | select(.focused==true).name' | cut -d '\"' -f2")
swaymsg "rename workspace $SOURCE_WORKSPACE to $TARGET_WORKSPACE"
| true
|
554e1f740d86999b3b9c07ed8a384fb69a443067
|
Shell
|
1814729186/ppopp17-artifact
|
/clean.sh
|
UTF-8
| 540
| 2.953125
| 3
|
[] |
no_license
|
#!/bin/sh
# Sets up environment for artifact evaluation
SCRIPT=$(readlink -f "$0")
SCRIPTPATH=$(dirname "$SCRIPT")
AEROOT=$SCRIPTPATH
echo "==== Groute Artifact Evaluation Cleanup ===="
rm -rf $AEROOT/setup.complete
rm -rf $AEROOT/code/groute/metis
rm -rf $AEROOT/code/mgbench/build
rm -rf $AEROOT/code/nccl/build
rm -rf $AEROOT/code/gunrock
rm -rf $AEROOT/code/groute/build
rm -rf $AEROOT/*.exists
rm -rf $AEROOT/*.patched
rm -rf $AEROOT/figures/*.log
rm -rf $AEROOT/*.log
rm -rf $AEROOT/output/*
echo "==== Cleanup Complete ===="
| true
|
9bf4d56e4d63baee6fd769d68d175c9473acf51c
|
Shell
|
snowflakedb/libsnowflakeclient
|
/deps/util-linux-2.39.0/tests/ts/lsfd/lsfd-functions.bash
|
UTF-8
| 2,504
| 3.734375
| 4
|
[
"BSD-4-Clause-UC",
"GPL-2.0-only",
"LicenseRef-scancode-public-domain",
"GPL-3.0-or-later",
"BSD-2-Clause",
"GPL-2.0-or-later",
"LGPL-2.1-or-later",
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# Copyright (C) 2021 Masatake YAMATO <yamato@redhat.com>
#
# This file is part of util-linux.
#
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# The exit-status used in a test target.
readonly ENOSYS=17
readonly EPERM=18
readonly ENOPROTOOPT=19
readonly EPROTONOSUPPORT=20
readonly EACCESS=21
function lsfd_wait_for_pausing {
ts_check_prog "sleep"
local PID=$1
until [[ $(ps --no-headers -ostat "${PID}") =~ S.* ]]; do
sleep 1
done
}
function lsfd_compare_dev {
local LSFD=$1
local FILE=$2
local EXPR=$3
ts_check_prog "grep"
ts_check_prog "expr"
ts_check_prog "stat"
local DEV=$("${LSFD}" --raw -n -o DEV -Q "${EXPR}")
echo 'DEV[RUN]:' $?
local MAJ=${DEV%:*}
local MIN=${DEV#*:}
local DEVNUM=$(( ( MAJ << 8 ) + MIN ))
local STAT_DEVNUM=$(stat -c "%d" "$FILE")
echo 'STAT[RUN]:' $?
if [ "${DEVNUM}" == "${STAT_DEVNUM}" ]; then
echo 'DEVNUM[STR]:' 0
else
echo 'DEVNUM[STR]:' 1
# Print more information for debugging
echo 'DEV:' "${DEV}"
echo 'MAJ:MIN' "${MAJ}:${MIN}"
echo 'DEVNUM:' "${DEVNUM}"
echo 'STAT_DEVNUM:' "${STAT_DEVNUM}"
fi
}
lsfd_strip_type_stream()
{
# lsfd changes the output of NAME column for a unix stream socket
# whether the kernel reports it is a "UNIX-STREAM" socket or a
# "UNIX" socket. For "UNIX", lsfd appends "type=stream" to the
# NAME column. Let's delete the appended string before comparing.
sed -e 's/ type=stream//'
}
lsfd_make_state_connected()
{
# Newer kernels report the states of unix dgram sockets created by
# sockerpair(2) are "connected" via /proc/net/unix though Older
# kernels report "unconnected".
#
# Newer kernels report the states of unix dgram sockets already
# connect(2)'ed are "connected", too.
#
# https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=83301b5367a98c17ec0d76c7bc0ccdc3c7e7ad6d
#
# This rewriting adjusts the output of lsfd running on older kernels
# to that on newer kernels.
sed -e 's/state=unconnected/state=connected/'
}
| true
|
7a3d46f606fa36617427f0cb63327ff41c801869
|
Shell
|
Gardeezy/rtorrent-QPKG
|
/etc/init.d/rtorrent
|
UTF-8
| 7,887
| 3.75
| 4
|
[] |
no_license
|
#!/bin/sh
### BEGIN INIT INFO
# Provides: rtorrent_autostart
# Required-Start: $local_fs $remote_fs $network $syslog $netdaemons
# Required-Stop: $local_fs $remote_fs
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: rtorrent script using screen(1)
# Description: rtorrent script using screen(1) to keep torrents working without the user logging in
### END INIT INFO
#############
###<Notes>###
#############
# This script depends on screen.
# For the stop function to work, you must set an
# explicit session directory using ABSOLUTE paths (no, ~ is not absolute) in your rtorrent.rc.
# If you typically just start rtorrent with just "rtorrent" on the
# command line, all you need to change is the "user" option.
# Attach to the screen session as your user with
# "screen -dr rtorrent". Change "rtorrent" with srnname option.
# Licensed under the GPLv2 by lostnihilist: lostnihilist _at_ gmail _dot_ com
##############
###</Notes>###
##############
#######################
##Start Configuration##
#######################
# You can specify your configuration in a different file
# (so that it is saved with upgrades, saved in your home directory,
# or whateve reason you want to)
# by commenting out/deleting the configuration lines and placing them
# in a text file (say /home/user/.rtorrent.init.conf) exactly as you would
# have written them here (you can leave the comments if you desire
# and then uncommenting the following line correcting the path/filename
# for the one you used. note the space after the ".".
# . /etc/rtorrent.init.conf
#Do not put a space on either side of the equal signs e.g.
# user = user
# will not work
# system user to run as
user="admin"
# the system group to run as, not implemented, see d_start for beginning implementation
# group=`id -ng "$user"`
# the full path to the filename where you store your rtorrent configuration
#config="`su -c 'echo $HOME' $user`/.rtorrent.rc"
config="/etc/rtorrent.conf"
# set of options to run with
options=""
# default directory for screen, needs to be an absolute path
base="`su -c 'echo $HOME' $user`/"
#base="/"
# name of screen session
srnname="rtorrent"
# file to log to (makes for easier debugging if something goes wrong)
#logfile="/share/Download/rtorrent/logs/rtorrentInit.log"
: ${QPX_LOG_FILE:="/share/Download/rtorrent/logs/rtorrent.log"}
#######################
###END CONFIGURATION###
#######################
#PATH=/usr/bin:/usr/local/bin:/usr/local/sbin:/sbin:/bin:/usr/sbin
#export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/games:/usr/games:/opt/bin:/opt/sbin"
#export PATH=$Int_PATH:$PATH:$Int_PATH
export PATH=/usr/bin/rtorrent/bin:$PATH
DESC="engine program"
NAME=rtorrent
DAEMON=$NAME
SCRIPTNAME=/etc/init.d/$NAME
su_cmd="su -c"
checkcnfg() {
exists=0
for i in `echo "$PATH" | tr ':' '\n'` ; do
if [ -f $i/$NAME ] ; then
exists=1
break
fi
done
if [ $exists -eq 0 ] ; then
echo "[ rtorrent::engine ] cannot find rtorrent binary in PATH $PATH" | tee -a "$QPX_LOG_FILE" >&2
exit 3
fi
if ! [ -r "${config}" ] ; then
echo "[ rtorrent::engine ] cannot find readable config ${config}. check that it is there and permissions are appropriate" | tee -a "$QPX_LOG_FILE" >&2
exit 3
fi
session=`getsession "$config"`
if ! [ -d "${session}" ] ; then
echo "[ rtorrent::engine ] cannot find readable session directory ${session} from config ${config}. check permissions" | tee -a "$QPX_LOG_FILE" >&2
exit 3
fi
}
d_start() {
# [ -f "$QPX_LOG_FILE" ] && rm -rf "$QPX_LOG_FILE"
# local _dDIR="/share/Download/rtorrent"
# local _DIRS="${_dDIR} ${_dDIR}/downloads ${_dDIR}/session ${_dDIR}/watch ${_dDIR}/complete ${_dDIR}/unpack ${_dDIR}/temp ${_dDIR}/settings ${_dDIR}/logs"
# for d in ${_DIRS}; do
# [ -d "${d}" ] || {
# echo "[ rtorrent::engine ] !mkdir: ${d}"
# mkdir -p "${d}" ; chmod 777 "${d}"
# } && {
# echo "[ rtorrent::engine ] +777: ${d}"
# chmod 777 "${d}"
# }
# done
# rm -f ${_dDIR}/logs/* ; rm -f ${_dDIR}/temp/*
#chmod 666 /dev/ptmx ; chmod 666 /dev/null
local d="/share/Download/rtorrent/" ; [ ! -d "${d}" ] && mkdir -p "${d}"
[ -d "${base}" ] && cd "${base}"
stty stop undef && stty start undef
/usr/bin/rtorrent/bin/su -c "screen -ls | /usr/bin/rtorrent/bin/grep -sq "\.${srnname}[[:space:]]" " ${user} || /usr/bin/rtorrent/bin/su -c "screen -dm -S ${srnname} 2>&1 1>/dev/null" ${user} | /usr/bin/rtorrent/bin/tee -a "$QPX_LOG_FILE" >&2
# this works for the screen command, but starting rtorrent below adopts screen session gid
# even if it is not the screen session we started (e.g. running under an undesirable gid
#/usr/bin/rtorrent/bin/su -c "screen -ls | grep -sq "\.${srnname}[[:space:]]" " ${user} || /usr/bin/rtorrent/bin/su -c "sg \"$group\" -c \"screen -fn -dm -S ${srnname} 2>&1 1>/dev/null\"" ${user} | tee -a "$QPX_LOG_FILE" >&2
/usr/bin/rtorrent/bin/su -c "/usr/bin/rtorrent/bin/screen -S "${srnname}" -X screen /usr/bin/rtorrent/bin/rtorrent.exe ${options} 2>&1 1>/dev/null" ${user} | /usr/bin/rtorrent/bin/tee -a "$QPX_LOG_FILE" >&2
RETVAL=${PIPESTATUS[0]}
sleep 1
sync
if [ -s "${session}/rtorrent.lock" ]; then
pid=`cat ${session}/rtorrent.lock | awk -F: '{print($2)}' | sed "s/[^0-9]//g"`
echo -n "$pid" > /tmp/rtorrent.lock
fi
exit $RETVAL
}
d_stop() {
session=`getsession "$config"`
if ! [ -s ${session}/rtorrent.lock ] ; then
echo "[ rtorrent::engine ] Socket/Lock file: ${session}/rtorrent.lock ${red}NOT FOUND${nc}"
exit 1
fi
pid=`cat ${session}/rtorrent.lock | awk -F: '{print($2)}' | sed "s/[^0-9]//g"`
if ps -A | grep -sq ${pid}.*rtorrent ; then # make sure the pid doesn't belong to another process
echo "[ rtorrent::engine ] Socket/Lock file: ${session}/rtorrent.lock"
echo "[ rtorrent::engine ] Stop rtorrent from PID: ${yellow}${pid}${nc}"
echo "[ rtorrent::engine ] -${green}SIGINT${nc}: Sending Term Interrupt signal."
kill -s INT ${pid}
x=8;
while [ ! -z "`ps -A | grep rtorrent | grep ${pid}`" -a "$x" -gt 0 ]; do
echo "[ rtorrent::engine ] -${green}SIGINT${nc} ... ${bd}$x${nc} seconds left"
x=$(expr $x - 1)
sleep 1
done
if ps -A | grep -sq ${pid}.*rtorrent ; then
echo "[ rtorrent::engine ] -${cyan}SIGTERM${nc}: Sending Term Termination signal."
kill -s TERM ${pid}
x=5;
while [ ! -z "`ps -A | grep rtorrent | grep ${pid}`" -a "$x" -gt 0 ]; do
echo "[ rtorrent::engine ] -${cyan}SIGTERM${nc} ... ${bd}$x${nc} seconds left"
x=$(expr $x - 1)
sleep 1
done
fi
if ps -A | grep -sq ${pid}.*rtorrent ; then
echo "[ rtorrent::engine ] -${red}SIGKILL${nc}: Sending Term Kill signal!"
kill -s KILL ${pid}
echo "[ rtorrent::engine ] -${red}SIGKILL${nc} ... ${bd}0${nc} seconds left"
fi
else
echo "[ rtorrent::engine ] ${red}rtorrent process instance not found!${nc}"
exit 1
fi
}
getsession() {
session=`cat "$1" | grep "^[[:space:]]*session[[:space:]]*=" | sed "s/^[[:space:]]*session[[:space:]]*=[[:space:]]*//" `
echo $session
}
checkcnfg
case "$1" in
start)
echo "[ rtorrent::engine ] Starting $DESC: $NAME"
d_start
;;
stop)
#echo "[ rtorrent::engine ] Stopping $DESC: $NAME"
d_stop
;;
restart|force-reload)
#echo "[ rtorrent::engine ] Restarting $DESC: $NAME"
d_stop
sync
sleep 1
d_start
;;
*)
echo "[ rtorrent::engine ] Usage: $SCRIPTNAME {start|stop|restart|force-reload}" >&2
exit 1
;;
esac
exit 0
| true
|
0b4e75fc21bf627062de79c80aaf0200d74ae93f
|
Shell
|
mrromadon/theportal
|
/ram-iptables.cgi
|
UTF-8
| 3,090
| 2.6875
| 3
|
[] |
no_license
|
#!/bin/bash
# LOGIC #
eval "`/u2/files/proccgi $*`"
if [ "$FORM_SUBMIT" = "SuBM1t" ]
then
case $SERVICE_PORT in
FTP) echo "-A INPUT -p tcp -s $ENTRY_VALUE -m state --state NEW -m tcp --dport 21 -j ACCEPT" >> /u/02.1.SCRIPT-RAW/IPTABLES-GEN/01.FTP;;
SSH) echo "-A INPUT -p tcp -s $ENTRY_VALUE -m state --state NEW -m tcp --dport 22 -j ACCEPT" >> /u/02.1.SCRIPT-RAW/IPTABLES-GEN/02.SSH;;
TELNET) echo "-A INPUT -p tcp -s $ENTRY_VALUE -m state --state NEW -m tcp --dport 23 -j ACCEPT" >> /u/02.1.SCRIPT-RAW/IPTABLES-GEN/03.TELNET;;
OTHERS_IP) echo "-A INPUT -p tcp -s $ENTRY_VALUE -m state --state NEW -m tcp -j ACCEPT" >> /u/02.1.SCRIPT-RAW/IPTABLES-GEN/50.OTHERS;;
OTHERS_PORT) echo "-A INPUT -p tcp -m state --state NEW -m tcp --dport $ENTRY_VALUE -j ACCEPT" >> /u/02.1.SCRIPT-RAW/IPTABLES-GEN/50.OTHERS;;
BLOCK_IP) echo "-A INPUT -p tcp -s $ENTRY_VALUE -m state --state NEW -m tcp -j REJECT" >> /u/02.1.SCRIPT-RAW/IPTABLES-GEN/98.BLOCK;;
esac
sudo /u2/IPTABLES-GEN/iptables_gen.sh
fi
## HTML HEADER ##
cat ram-header
## HTML BODY ##
cat ram-app_bar
## HTML CONTAIN ##
cat<<EndCat
<div class="container page-content">
<div class="login-form padding20 block-shadow">
<form METHOD=POST>
<h1 class="text-light">Tambah IP Akses</h1>
<hr class="thin"/>
<h4> IP Dirimu : $REMOTE_ADDR | </h4>
<br />
<h4> Entry Input Value</h4>
<div class="input-control modern text full-size" data-role="input">
<input type="text" name="ENTRY_VALUE">
<span class="label">Nilai Masukan</span>
<span class="informer">Silahkan masukan Nilai Masukan</span>
<span class="placeholder">Nilai Masukan</span>
<button class="button helper-button clear"><span class="mif-cross"></span></button>
</div>
<br />
<br />
<h4> Entry Service Port</h4>
<div class="input-control modern select full-size">
<select name="SERVICE_PORT">
<option value="SSH">SSH</option>
<option value="FTP">FTP</option>
<option value="TELNET">Telnet</option>
<option value="OTHERS_IP">Others by IP</option>
<option value="OTHERS_PORT">Others by Port</option>
<option value="BLOCK_IP">Block IP Address</option>
</select>
</div>
<br />
<br />
<br />
<div class="form-actions">
<button type="submit" class="button primary" VALUE="SuBM1t" NAME="FORM_SUBMIT">Save</button>
</div>
</form>
</div>
</div>
EndCat
## HTML FOOTER ##
cat ram-footer
| true
|
6d2c41b6e95e859f9c803681009b0cde7dd59ba1
|
Shell
|
GoogleCloudPlatform/kafka-pubsub-emulator
|
/go/src/kafka-pubsub-emulator-gateway/build.sh
|
UTF-8
| 2,909
| 3.15625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eu -o pipefail
PROTO_PATH=proto/*.proto
GOPATH=$(go env GOPATH)
clean() {
echo "==== Initiating Clean Process ===="
# clean cmd, internal
rm -rf internal/*
rm -rf cmd/*
mkdir internal/swagger
echo "==== Finish Clean Process ===="
}
build_proto_files(){
go get -u -v github.com/spf13/cobra/cobra && \
go get -u -v google.golang.org/grpc && \
go get -u -v github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway && \
go get -u -v github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger && \
go get -u -v github.com/golang/protobuf/protoc-gen-go
for filename in ${PROTO_PATH}; do
protoc -I/usr/local/include -I. \
-I${GOPATH}/src \
-I${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \
--plugin=protoc-gen-go=${GOPATH}/bin/protoc-gen-go \
--go_out=plugins=grpc:./internal \
${filename}
done
for filename in ${PROTO_PATH}; do
protoc -I/usr/local/include -I. \
-I${GOPATH}/src/google.golang.org/genproto/googleapis \
-I${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \
-I${GOPATH}/src/github.com/golang/protobuf/ptypes \
-I${GOPATH}/src \
--plugin=protoc-gen-grpc-gateway=${GOPATH}/bin/protoc-gen-grpc-gateway \
--grpc-gateway_out=logtostderr=true:./internal \
${filename}
done
for filename in ${PROTO_PATH}; do
protoc -I/usr/local/include -I. \
-I${GOPATH}/src \
-I${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \
--plugin=protoc-gen-swagger=${GOPATH}/bin/protoc-gen-swagger \
--swagger_out=logtostderr=true:./internal/swagger \
${filename}
done
}
build() {
echo "==== Initiating Build Process ===="
build_proto_files
install
docker build -t kafka-pubsub-emulator-gateway:1.0.0.0 .
echo "==== Finish Build Process ===="
}
install() {
echo "==== Initiating Install Process ===="
clean
build_proto_files
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o cmd/kafka-pubsub-emulator-gateway .
echo "==== Finish Install Process ===="
}
execute() {
clean
build
}
if [[ "${1:-}" == "clean" ]] ; then
clean
elif [[ "${1:-}" == "build" ]] ; then
build
elif [[ "${1:-}" == "install" ]] ; then
install
else
execute
fi
| true
|
0823d5ae3821756c41418bae1b4100d78194f102
|
Shell
|
Tedezed/bitnami-docker-mariadb
|
/10.2/rhel-7/rootfs/setup.sh
|
UTF-8
| 464
| 2.875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
# set -o xtrace
. /libfs.sh
. /libmariadb.sh
. /libos.sh
# ensure MariaDB env var settings are valid
mysql_valid_settings
# ensure MariaDB is stopped when this script ends.
trap "mysql_stop" EXIT
if am_i_root; then
ensure_user_exists "$DB_DAEMON_USER" "$DB_DAEMON_GROUP"
fi
# ensure MariaDB is initialized
mysql_initialize
# allow running custom initialization scripts
msyql_custom_init_scripts
| true
|
5063370037a2de90f68d3e4b25eef2a03c0dd757
|
Shell
|
niko-progress/progress-devops
|
/Lectures/Bash-Intro/Homework/task-1.sh
|
UTF-8
| 208
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
packages=(python3-pip python3)
for package in ${packages[@]}
do
dpkg-query --show $package &> /dev/null && \
echo "${package} is installed" || \
echo "${package} is nowhere to be found"
done
| true
|
d3158b2f528441f887e022afc51d5989cbb9b20b
|
Shell
|
mohansantosh/f5-icontrol-gateway
|
/config/etc/service/f5-icontrol-trusted-devices-express/finish
|
UTF-8
| 77
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/sh
NAME=node
PID=`pgrep $NAME`
echo $PID > log
date >> log
wait $PID
| true
|
9ea037b473e026ad8f2772a7cf0f3457fab053ef
|
Shell
|
jordiabante/myutils
|
/src/simulate_fastq_pair/simulate_fastq_pair.sh
|
UTF-8
| 1,239
| 4.0625
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
shopt -s extglob
abspath_script="$(readlink -f -e "$0")"
script_absdir="$(dirname "$abspath_script")"
script_name="$(basename "$0" .sh)"
if [ $# -eq 0 ]
then
cat "$script_absdir/${script_name}_help.txt"
exit 1
fi
TEMP=$(getopt -o hd: -l help,outdir: -n "$script_name.sh" -- "$@")
if [ $? -ne 0 ]
then
echo "Terminating..." >&2
exit -1
fi
eval set -- "$TEMP"
# Defaults
outdir="$PWD"
while true
do
case "$1" in
-h|--help)
cat "$script_absdir"/${script_name}_help.txt
exit
;;
-d|--outdir)
outdir="$2"
shift 2
;;
--)
shift
break
;;
*)
echo "$script_name.sh:Internal error!"
exit -1
;;
esac
done
# Read input file
readFile="$1"
readName="$(basename "$readFile")"
readDir="$(dirname "$readFile")"
# Output prefix
prefix="${readName%%_*}"
outfile="${outdir}/${prefix}_R2.fastq.gz"
# Outdir
mkdir -p "$outdir"
# FASTQ or FASTA
sequence_position=4
# Run
j=3
while read line;
do
if [ "$j" -eq "$sequence_position" ]
then
n="$(reverse_complement.sh "$line")"
j=1
else
n="$line"
j="$((j+1))"
fi
echo -e "$n"
done < <(zcat -f -- "$readFile") | gzip > "$outfile"
| true
|
399fe483f210839cc4f34ba0977db3447ee8cb3e
|
Shell
|
higuchi-toshio-intec/example-picam
|
/picam_viewer/picam_viewer.sh
|
UTF-8
| 191
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ "${PICAM_DATA}" = "" ] ; then
export PICAM_DATA=/opt/picam_data
fi
if [ "${PICAM_PORT}" = "" ] ; then
export PICAM_PORT=8080
fi
#
cd app
python picam_viewer.py
#
| true
|
5b0378e6ab48a81ba1f182b017af9876a781a43a
|
Shell
|
PeterJCLaw/srcomp-dev
|
/init.sh
|
UTF-8
| 2,537
| 3.671875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
function clone_repo {
if [ -d "$1" ]; then
echo 'Skipped: already exists.'
else
git clone --recursive https://github.com/${2:-PeterJCLaw}/$1
fi
}
for POSSIBLE_PYTHON in python3.9 python3.8 python3.7 python3 python;
do
PYTHON=$(which $POSSIBLE_PYTHON)
$PYTHON --version 2>&1 | grep -E 'Python (3\.)' >/dev/null
if [ $? -eq 0 ]; then
echo "Found Python: $PYTHON"
break
else
PYTHON=
fi
done
if [ -z "$PYTHON" ]; then
echo "No suitable Python installation found."
exit 1
fi
if [ -f /etc/lsb-release ]; then
if grep 'Ubuntu 14\.04' /etc/lsb-release; then
DODGY_UBUNTU=1
fi
fi
# Check that yarn is installed
yarn --version
if [ $? -ne 0 ]; then
npm --version
if [ $? -ne 0 ]; then
echo "npm not installed. Install it through your system package manager."
exit 1
fi
echo "yarn not installed. Please install it:"
echo "$ npm install -g yarn"
exit 1
fi
set -e
if [ -n "$DODGY_UBUNTU" ]; then
echo "Using /usr/bin/virtualenv due to Ubuntu 14.04's broken Python 3.4"
/usr/bin/virtualenv -p "$PYTHON" venv
else
"$PYTHON" -m venv venv
fi
source venv/bin/activate
set -v
pip install -U setuptools pip
pip install -r requirements.txt
clone_repo ranker
clone_repo srcomp
clone_repo srcomp-http
clone_repo srcomp-screens
clone_repo dummy-comp
clone_repo srcomp-scorer
clone_repo srcomp-cli
clone_repo srcomp-stream
clone_repo srcomp-kiosk
clone_repo srcomp-puppet
clone_repo livestream-overlay srobo
cd ranker
pip install -e .
cd ..
cd srcomp
pip install -e .
cd ..
cd srcomp-http
pip install -e .
cd ..
cd srcomp-scorer
pip install -e .
cd ..
cd srcomp-cli
pip install -e .
cd ..
cd srcomp-screens
yarn install
python -c '
import sys, json
print(json.dumps({
**json.load(sys.stdin),
"apiurl": "http://localhost:5112/comp-api",
"streamurl": "http://localhost:5001/"
}, indent=2))' <config.example.json >config.json
cd ..
cd srcomp-stream
sed 's_SRCOMP: .*_SRCOMP: "http://localhost:5112/comp-api"_' <config.local.coffee.example >config.local.coffee
npm install
cd ..
cd livestream-overlay
npm install
npm run build
sed 's_streamServerURI.*_streamServerURI = "http://localhost:5001/";_;
s_apiURI.*_apiURI = "http://localhost:5112/";_' settings.example.js >settings.js
cd ..
set +v
echo "-- DONE SETUP --"
echo "Usage: "
echo " (1) Activate the virtualenv: source venv/bin/activate"
echo " (2) Run everything with run.py"
| true
|
333d32b9bfd8d34b91d11d2542ee2e10ce2101e6
|
Shell
|
Migacz85/dotfiles
|
/setup-env.sh
|
UTF-8
| 6,979
| 2.78125
| 3
|
[] |
no_license
|
#!/bin/bash # Script for installation and setting fresh environment
# on manjaro
# Run symlinking.sh script first
#First clone this
#git clone https://github.com/Migacz85/dotfiles.git --recurse-submodules
#Configure mirrors
sudo pacman-mirrors --fasttrack
#Upgrade system
yes | sudo pacman -Syyu
#Git config
git config --global user.email "migacz85@gmail.com"
git config --global user.name "Marcin"
git config --global user.name "Type_Password_before"
#Symlinking dotfiles with dotfiles folder
bash $HOME/dotfiles/.scripts/capsesc.sh
cd dotfiles/.package-list
cp $HOME/dotfiles/.package-list/my-manjaro-packages-Q.txt $HOME/dotfiles/.old-package-list/my-manjaro-packages-Q.txt
bash $HOME/dotfiles/symlinking.sh
read -p "Extend .bashrc with loading a .bash_profile? (run this only once)" yn
case $yn in
[Yy]* ) bash $HOME/dotfiles/fixes.sh; break;;
[Nn]* ) exit;;
* ) echo "Please answer yes or no.";;
esac
#AUR support
yes | sudo pacman -S base-devel
yes | sudo pacman -S yay
#Compositor
# sudo pacman -S picom
yay picom-ibhagwan-git
#Wallpapers
mkdir $HOME/.wallpapers
cd $HOME/.wallpapers
curl --output 1.jpg https://wallroom.io/2560x1440/bg-381e88e/download
curl --output 2.jpg https://forums.tumult.com/uploads/db2156/original/3X/2/6/264395883921d9ac5c74d5f5b841b00c2c7b130e.jpeg
curl --output 3.jpg https://wallroom.io/2560x1440/bg-2efe366/download
#Cursor Installation
yes | pacman -S capitaine-cursors
cd ~Downloads/
sudo cp -r /usr/share/icons/capitaine-cursors/ /.icons/
#If you want to install AUR packages from file:
# for i in `cat my-manjaro-AUR-packages-Qm.txt` ; do yay -y install $i; done
yes | sudo pacman -S binutils --noconfirm
yes | sudo pacman -S bat --noconfirm
yes | sudo pacman -S i3-gaps --noconfirm
yes | sudo pacman -S i3status --noconfirm
yes | sudo pacman -S rofi --noconfirm
yes | sudo pacman -S pavucontrol --noconfirm
yes | sudo pacman -S volumeicon --noconfirm
yes | sudo pacman -S ranger --noconfirm
yes | sudo pacman -S unp --noconfirm
yes | sudo pacman -S unzip --noconfirm
yes | sudo pacman -S fzf --noconfirm
yes | sudo pacman -S stow --noconfirm
yes | sudo pacman -S vim --noconfirm
yes | sudo pacman -S alacritty --noconfirm
yes | sudo pacman -S feh --noconfirm
yes | sudo pacman -S cmake --noconfirm
yes | sudo pacman -S udiskie --noconfirm
yes | sudo pacman -S xsel --noconfirm
yes | sudo pacman -S ncdu --noconfirm
yes | sudo pacman -S bat --noconfirm
yes | sudo pacman -S python-pip --noconfirm
yes | sudo pacman -S npm --noconfirm
yes | sudo pacman -S ruby --noconfirm
yes | sudo pacman -S w3m --noconfirm
yes | sudo pacman -S mpv --noconfirm
yes | sudo pacman -S network-manager-applet --noconfirm
yes | sudo pacman -S xorg-xwininfo --noconfirm
yes | sudo pacman -S xorg-xbacklight --noconfirm
yes | sudo pacman -S xdotool --noconfirm
#For autorotation on convertible laptops
# sudo pacman -S xorg-xinput
# yay iio-sensor-proxy
#Emoticons
yay emojione-picker-git
# Notifications
yay deadd-notification-center
sudo pip3 install i3ipc --noconfirm
#APPS
yes | sudo pacman -S thunar --noconfirm
yes | sudo pacman -S copyq --noconfirm
yes | sudo pacman -S emacs --noconfirm
yes | sudo pacman -S chromium --noconfirm
yes | sudo pacman -S thunderbird --noconfirm
yes | sudo pacman -S virtualbox --noconfirm
yes | sudo pacman -S calibre --noconfirm
yes | sudo pacman -S epr --noconfirm
yay qbittorrent-dark-git
yay telegram-desktop
# Controlling brightness
yay brillo
# sudo pacman -S spectacle
yay straw-viewer
pip3 install --user youtube-dl
yay imgur-screenshot
#Mail
# https://wiki.archlinux.org/index.php/Msmtp
#
yay msmtp
chmod 600 ~/.msmtprc
#Bluetooth
sudo pacman -S bluetooth pulseaudio-bluetooth bluez-utils pulseaudio-alsa
#Cli torrents
#In console type: t name_of_file
yes | sudo pip3 install pirate-get
mkdir $HOME/Downloads/torrents
#qutebrowser
yes | sudo pacman -S qutebrowser --noconfirm
mkdir ~/Downloads/git/
cd ~/Downloads/git/
git clone https://github.com/alphapapa/solarized-everything-css.git
#Ranger
git clone https://github.com/alexanderjeurissen/ranger_devicons ~/.config/ranger/plugins/ranger_devicons
cd /.config/ranger/plugins/ranger_devicons
make
yay devicons
yay nerd-fonts-noto-sans-mono
mkdir ~/Android
mkdir ~/Trash
mkdir $HOME/log
yes | sudo pacman -S sshfs --noconfirm
yes | sudo pacman -S ssmtp # for mail you need to run fixes.sh aswell
yes | sudo pacman -S sxiv
#Imgur Screenshot
cd $HOME/Downloads/git
git clone https://github.com/jomo/imgur-screenshot.git
yes | sudo pacman -S xclip scrot --noconfirm
#Deepin screenshot
wget https://gist.github.com/msenol86/c0c7daad3de32a7922486e5d669f24c6/archive/82abb9ad54f13c8e53d6272e0d0a999498ffa204.zip
unp 82abb9ad54f13c8e53d6272e0d0a999498ffa204.zip
cd c0c7daad3de32a7922486e5d669f24c6-82abb9ad54f13c8e53d6272e0d0a999498ffa204
makepkg -si PKGBUILD
rm 82abb9ad54f13c8e53d6272e0d0a999498ffa204.zip
rm -r c0c7daad3de32a7922486e5d669f24c6-82abb9ad54f13c8e53d6272e0d0a999498ffa204
#Pywall
echo "Installing pywal"
cd $HOME/dotfiles/submodules/pywal
pip3 install --user pywal
bash $HOME/dotfiles/.scripts/dwall.sh
bash $HOME/dotfiles/.scripts/wall.sh
wal -R
xrdb merge ~/.cache/wal/colors.Xresources
bash /home/$USER/.scripts/dwall.sh
#Translation in shell
yay translate-shell
#Editor
#Vim
echo "Vim installation"
yes | sudo pacman -S vim
yes | sudo pacman -S cmake
cd ~
git clone https://github.com/Migacz85/vim-config.git
mv vim-config .vim
ln -s ~/.vim/.vimrc ~/.vimrc
curl -fLo ~/.vim/autoload/plug.vim --create-dirs \
https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
mkdir ~/.vim/undo
echo "Run :PlugInstall inside vim (in 5sec vim will start)"
sleep 5
vim
# python3 /home/$USER/.vim/plugged/YouCompleteMe/install.py
# python3 /home/$USER/.vim/plugged/YouCompleteMe/install.py --clang-completer
python3 /home/$USER/.vim/plugged/YouCompleteMe/install.py --all
sudo npm install js-beautify -g
#You need to run this command manually after installation
cd ~/.vim/plugged/command-t/ruby/command-t/ext/command-t && make clean; ruby extconf.rb && make
#Doom emacs
sudo npm install -g tern
sudo pacman --needed --noconfirm -S ripgrep
yay prettier mu mbsync-git
git clone https://github.com/hlissner/doom-emacs ~/.emacs.d
~/.emacs.d/bin/doom install
#mail in doom
mkdir -p ~/Maildir/gmail/
mkdir -p ~/Maildir/gmail/INBOX
time mu init --maildir=~/Maildir --my-address='migacz85@gmail.com'
time mbsync -c ~/.config/mu4e/mbsyncrc -a
# Fixes for Legion 5p amd 7 4800h with rtx2060
sudo su
rmmod pcspkr ; echo "blacklist pcspkr" >>/etc/modprobe.d/blacklist.conf
#Create service with script that is running as root
cd /etc/systemd/system
sudo cat >~/etc/ssmtp/run-at-startup.service <<EOL
[Unit]
Description=Run script at startup after all systemd services are loaded
After=getty.target
[Service]
Type=simple
RemainAfterExit=yes
ExecStart=/home/migacz/.scripts/startup.sh
TimeoutStartSec=0
[Install]
WantedBy=default.target
EOL
systemctl enable run-at-startup.service
| true
|
1c0b2fa005272312124d8608f8c653fc69c7b2b1
|
Shell
|
dinAlt/yatt
|
/shell/yatt-untag
|
UTF-8
| 634
| 4
| 4
|
[] |
no_license
|
#!/bin/sh
id_cmd=${0%yatt-tag}"yatt-id -m"
usage () {
echo "\
Untags yatt's selected task.
By default, 'fzf' is used, but you can change it by setting \
YATT_FZF environment variable value.
USAGE:
$0 [TAGS]
ARGUMENTS:
TAGS Comma separated tags list (will be prompted if omitted)
FLAGS:
-h, --help Show this message
"
exit 0
}
[ "$1" == "--help" ] && usage
[ "$1" == "-h" ] && usage
tags=$1
[ "$tags" != "--help" ] || usage
[ "$tags" != "-h" ] || usage
id=$($id_cmd task)
[ -z "$tags" ] && read -p "tags: " tags
[ -z "$tags" ] && echo "no tags provided" && exit 1
[ -z "$id" ] && exit 0
yatt untag $id $tags
| true
|
e26d6198097981e0fab07d2edb6e256b6707d84b
|
Shell
|
Charlisim/laptop
|
/.laptop.local
|
UTF-8
| 3,679
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
# Install command-line tools using Homebrew
# Usage: `brew bundle Brewfile`
# Make sure we’re using the latest Homebrew
brew update
fancy_echo() {
printf "\n%b\n" "$1"
}
brew tap caskroom/cask
# Upgrade any already-installed formulae
brew upgrade
brew install brew-cask
brew cask install dropbox
brew cask install google-chrome
brew cask install adobe-creative-cloud
brew cask install alfred
brew cask install android-studio-bundle
brew cask install android-file-transfer
brew cask install appcleaner
brew cask install appcode
brew cask install bonjour-browser
brew cask install caffeine
brew cask install calibre
brew cask install ccleaner
brew cask install crashlytics
brew cask install filezilla
brew cask install flux
brew cask install java
brew cask install spotify
brew cask install google-chrome-canary
brew cask install imagealpha
brew cask install imageoptim
brew cask install totalterminal
brew cask install textwrangler
brew cask install virtualbox
brew cask install sublime-text
brew cask install telegram
brew cask install evernote
brew cask install genymotion
brew cask install istat-menus
brew cask install steam
brew cask install origin
brew cask install sourcetree
brew cask install utorrent
brew cask install tunnelblick
brew cask install vlc
brew cask install unrarx
fancy_echo "Installing Postgres, a good open source relational database ..."
brew_install_or_upgrade 'postgres'
fancy_echo "Installing Redis, a good key-value database ..."
brew_install_or_upgrade 'redis'
# Install GNU core utilities (those that come with OS X are outdated)
# Don’t forget to add `$(brew --prefix coreutils)/libexec/gnubin` to `$PATH`.
brew install coreutils
#sudo ln -s /usr/local/bin/gsha256sum /usr/local/bin/sha256sum
# Install some other useful utilities like `sponge`
brew install moreutils
# Install GNU `find`, `locate`, `updatedb`, and `xargs`, `g`-prefixed
brew install findutils
# Install GNU `sed`, overwriting the built-in `sed`
brew install gnu-sed --default-names
# Install Bash 4
# Note: don’t forget to add `/usr/local/bin/bash` to `/etc/shells` before running `chsh`.
brew install bash
brew install bash-completion
echo "/usr/local/bin/bash" >> /etc/shells
# Install wget with IRI support
brew install wget --enable-iri
# Install RingoJS and Narwhal
# Note that the order in which these are installed is important; see http://git.io/brew-narwhal-ringo.
brew install ringojs
brew install narwhal
# Install more recent versions of some OS X tools
brew install vim --override-system-vi
brew install homebrew/dupes/grep
brew install homebrew/dupes/screen
brew install homebrew/php/php55 --with-gmp
# Install some CTF tools; see https://github.com/ctfs/write-ups
brew install bfg
brew install binutils
brew install binwalk
brew install cifer
brew install dex2jar
brew install dns2tcp
brew install fcrackzip
brew install foremost
brew install hashpump
brew install hydra
brew install john
brew install knock
brew install nmap
brew install pngcheck
brew install sqlmap
brew install tcpflow
brew install tcpreplay
brew install tcptrace
brew install ucspi-tcp # `tcpserver` et al.
brew install xpdf
brew install xz
# Install other useful binaries
brew install ack
#install exiv2
brew install git
brew install imagemagick --with-webp
brew install lynx
brew install node # This installs `npm` too using the recommended installation method
brew install p7zip
brew install pigz
brew install pv
brew install rename
brew install rhino
brew install tree
brew install webkit2png
brew install zopfli
brew install homebrew/versions/lua52
# Remove outdated versions from the cellar
brew cleanup
curl -L http://install.ohmyz.sh | sh
| true
|
34e924e34f8cd2d51ce470bc9c87faf9a81cb1d1
|
Shell
|
normoes/lnd_client_container
|
/entrypoint.sh
|
UTF-8
| 825
| 2.609375
| 3
|
[] |
no_license
|
#!/bin/bash
set -euo pipefail
mkdir /data/lnd
echo "$TLS_CERT" > /data/lnd/tls.cert
echo "Reference:"
echo "https://api.lightning.community/#lnd-rest-api-reference"
echo ""
echo "Example - Check channels:"
echo 'curl -w "\n" -sSL -X GET --cacert /data/lnd/tls.cert -H "Grpc-Metadata-macaroon: $READ_MAC" https://$LND_URL:8080/v1/channels | jq'
echo "Example - Unlock wallet:"
echo 'curl -w "\n" -sSL -X POST --cacert /data/lnd/tls.cert -H "Grpc-Metadata-macaroon: $READ_MAC" https://$LND_URL:8080/v1/unlockwallet -d '"'"'{"wallet_password": "base64_passwd"}'"'"' | jq'
echo "Example - Create invoice:"
echo 'curl -w "\n" -sSL -X POST --cacert /data/lnd/tls.cert -H "Grpc-Metadata-macaroon: $ADMIN_MAC" https://$LND_URL:8080/v1/invoices -d '"'"'{"value_msat": "15000000", "memo": "description of payment"}'"'"' | jq'
exec $@
| true
|
36711b981ddd2ace6eda5d95f0ec052a9efb7199
|
Shell
|
jayelm/rnn-syn
|
/recipes.sh
|
UTF-8
| 863
| 3.28125
| 3
|
[] |
no_license
|
#!/bin/bash
# Recipes for experiments.
# Note: these recipes don't report generalization statistics (i.e. accuracy
# on test data). TODO: Implement that, while still enabling saving test/train
# (maybe save 2 versions of messages: 1 -train, 1 -test)
# Maximum number of messages to save
SAVE_MAX=1000
set -e
# 500_200: 3 pick 2 game
# 500_400: 8 pick 4 game
for data in 500_200 500_400; do
for model in feature end2end; do
for n_comm in 2 4 64; do
if [ "$n_comm" = "64" ]; then
# Continuous
comm_type=continuous
else
comm_type=discrete
fi
echo $data $model $n_comm $comm_type
python3 rnn-syn.py --data "data/$data" --model "$model" --epochs 25 --n_comm "$n_comm" --comm_type "$comm_type" --save_max "$SAVE_MAX"
done
done
done
| true
|
3ce0532b6fb153bc52ffd4978a7d2ded970f9b85
|
Shell
|
BxNxM/rpitools
|
/autodeployment/lib/motion_remote_video_stream_over_apache/motion_remote_video_stream_over_apache.bash
|
UTF-8
| 2,257
| 3.765625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
MYDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# RPIENV SETUP (BASH)
if [ -e "${MYDIR}/.rpienv" ]
then
source "${MYDIR}/.rpienv" "-s" > /dev/null
# check one var from rpienv - check the path
if [ ! -f "$CONFIGHANDLER" ]
then
echo -e "[ ENV ERROR ] \$CONFIGHANDLER path not exits!"
echo -e "[ ENV ERROR ] \$CONFIGHANDLER path not exits!" >> /var/log/rpienv
exit 1
fi
else
echo -e "[ ENV ERROR ] ${MYDIR}/.rpienv not exists"
sudo bash -c "echo -e '[ ENV ERROR ] ${MYDIR}/.rpienv not exists' >> /var/log/rpienv"
exit 1
fi
source "$TERMINALCOLORS"
motion_action_stream_forward="$($CONFIGHANDLER -s APACHE_MOTION_STREAM_FORWARDING -o activate)"
motion_stream_hostname="$($CONFIGHANDLER -s APACHE_MOTION_STREAM_FORWARDING -o stream_hostname)"
motion_stream_port="$($CONFIGHANDLER -s APACHE_MOTION_STREAM_FORWARDING -o stream_port)"
motion_proxy_point="$($CONFIGHANDLER -s APACHE_MOTION_STREAM_FORWARDING -o proxy_point)"
apache_html_folder_link_to="$($CONFIGHANDLER -s APACHE -o html_folder_link_to)"
apache_conf="/etc/apache2/apache2.conf"
source "${MYDIR}/../message.bash"
_msg_title="motion stream forwarding SETUP"
function set_apache_conf_proxy() {
local is_set_proxy_forwarding="$(cat $apache_conf | grep -v grep | grep ${motion_proxy_point})"
local text=""
if [ "$is_set_proxy_forwarding" == "" ]
then
_msg_ "Set $apache_conf for motion stream forwarding"
text="\n# external motion camera stream forwarding under apache\n"
text+="ProxyPass ${motion_proxy_point} http://${motion_stream_hostname}:${motion_stream_port}\n"
text+="ProxyPassReverse ${motion_proxy_point} http://${motion_stream_hostname}:${motion_stream_port}\n"
sudo echo -e "$text" >> "$apache_conf"
_msg_ "Restart apache: sudo systemctl restart apache2"
sudo systemctl restart apache2
else
_msg_ "Already set: $apache_conf for motion stream forwarding"
fi
}
if [[ "$motion_action_stream_forward" == "True" ]] || [[ "$motion_action_stream_forward" == "true" ]]
then
_msg_ "Motion remote stream forwarding IS required"
set_apache_conf_proxy
else
_msg_ "Motion remote stream forwarding IS NOT required"
fi
| true
|
efaaac1f6f8649aef039a790d97bccac04d8ad97
|
Shell
|
MostafaEllabaan/GEN2019
|
/GenomeContiminationChecks/checkGenome16SrRNAForContamination.sh
|
UTF-8
| 2,385
| 3.328125
| 3
|
[] |
no_license
|
genomeFolder=$1; ## the folder where the genome is stored
min16SrRNALength=$2; ## the minimum length to confirm that a 16SrRNA gene is found
minIdentity=$3 ## the minimum identity of the 16SrRNA
## precompiled 16SrRNA
rRNAFolder=$DataSpace/Databases/16SrRNADB/;
rRNAblastdb=$rRNAFolder/ALL16SrRNA/all16SrRNA.tab.trimmedBasedOnPrimers.HaveMostOfPrimersBetween1400-1600pb;
workingDir=/dev/shm/Contimination-$RANDOM-$RANDOM-$RANDOM;
mkdir $workingDir
cp $genomeFolder/*.fna* $workingDir
cd $workingDir;
file=$(ls *.fna* | grep -v -i "rna\|cds" )
gzip -d $file;
genomeFile=$(echo $file | sed 's/\.gz//g' | awk '{print $1}')
echo $genomeFile
genomeID=$(echo $genomeFile | sed 's/\.fna//g')
changeFastaFormatToTabularFormat.withIndexedWithGIDS.sh $genomeFile $genomeFile.tab
## blast genome against the 16SrRNA
time blastn -query $genomeFile -db $rRNAblastdb -out result.tmp -word_size 20 -num_threads 28 -outfmt 6 -max_target_seqs 100
awk -F"\t" -v min16SrRNALength=$min16SrRNALength -v minIdentity=$minIdentity '{
if(FNR>1 && $3>=minIdentity && $4> min16SrRNALength) print $0}' result.tmp > result.tmp.16SrRNA
awk -F"\t" '{print $1"\t"$7"\t"$8}' result.tmp.16SrRNA | sort -u |
sort -k1,1 -k2,2n -k3,3nr | awk -F"\t" '{if(FNR==1) {
print "Contig\tStart\tEnd"; print $0; contig=$1; regionStart=$2; regionEnd=$3;}
else {
if(contig==$1) { x=$2-regionEnd; if(x>0) { print $0; regionStart=$2; regionEnd=$3;}}
else { print $0; contig=$1; regionStart=$2; regionEnd=$3;}}}' |
JoinTwoFiles.sh $genomeFile.tab - | awk -F"\t" '{
print $1"\t"$2"\t"$3"\t"substr($NF,$2,($3-$2))}' > result.tmp.16SrRNA.seq
awk -F"\t" '{if( FNR>1) print $0}' result.tmp.16SrRNA.seq | sort -u |
awk -F"\t" -v genomeID=$genomeFile '{
print ">"$1"__"$2"__"$3"=="FNR"\n" $NF
}' | sed 's/[_\.]/=/g' > $genomeID.16SrRNA
mkdir Clustering
cd Clustering
SequenceClustering.sh ../$genomeID.16SrRNA 0.97 clustering.out gene.cluster cluster.representative $(pwd) 0.05 &> tmp.result.tmp
numberOfCluster=$(awk -F"\t" '{if(FNR>1) print $2}' gene.cluster | sort -u | wc -l)
if (( numberOfCluster==1 )); then
echo "single 16S rRNA Clustered genomes"> ../$genomeFile.16SrRNA.proceed
fi
cd $workingDir
zip -r $genomeID.16SrRNA.zip {result*,*.16SrRNA*,Clustering}
mv $genomeID.16SrRNA.zip $genomeFolder
sleep 1s
touch $genomeFolder
ls $genomeFolder/$genomeID.16SrRNA.zip
rm -rf $workingDir
| true
|
eadc5d0fcdba2b482a7db06ee7ad93ab6d743bbf
|
Shell
|
heilerich/Dash-User-Contributions
|
/mne/build.sh
|
UTF-8
| 1,088
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/bash
cd "$(dirname "$0")"
MNE_VERSION=$(sed 's/^mne==\(.*\)$/\1/' ../versions.txt)
if [ -d "mne-tools.github.io" ]; then
pushd mne-tools.github.io && git pull && popd
else
git clone --depth 1 https://github.com/mne-tools/mne-tools.github.io
fi
cd mne-tools.github.io/
doc2dash -n MNE -d ../template/ -i ../template/icon@2x.png -j -u https://mne-tools.github.io/stable/ stable
cd ../template/
tar --exclude='.DS_Store' -cvzf MNE.tgz MNE.docset
rm -rf MNE.docset
cat docset.json
cat docset.json | sed "s/\(\"version\": \"\)\"/\1$MNE_VERSION\"/g" | tee docset.json
cat docset.json
cd ..
git config --global user.email "code@fehe.eu"
git config --global user.name "heilerich"
git clone https://github.com/Kapeli/Dash-User-Contributions.git
yes | cp -f template/* Dash-User-Contributions/docsets/MNE/
cd Dash-User-Contributions
git checkout -b pr-branch
git add *
git commit -m "Updating to MNE version $MNE_VERSION (Via Travis Build $TRAVIS_BUILD_ID)"
git remote add fork https://heilerich:${GH_TOKEN}@github.com/heilerich/Dash-User-Contributions.git
git push -u fork pr-branch
| true
|
241315d85de8d0e7331c3f6191392d363e08d37f
|
Shell
|
rdodson41/dot-files
|
/.config/yadm/bootstrap
|
UTF-8
| 1,102
| 2.921875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Install macOS package manager Homebrew.
if [[ "$(uname)" == "Darwin" ]] && ! which brew > /dev/null; then
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
# Install all dependencies in the global Brewfile.
if which brew > /dev/null; then
brew bundle install --no-upgrade --global
fi
# Install Vim plugin manager 'vim-plug'.
curl -fLo ~/.vim/autoload/plug.vim --create-dirs https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
# Install Vim plugins.
vim -c PlugInstall -c quitall
# Install useful 'fzf' key bindings and fuzzy completion.
if which brew > /dev/null; then
"$(brew --prefix fzf)/install" --key-bindings --completion --no-update-rc --no-zsh --no-fish
fi
# Install RVM GPG Keys.
gpg --keyserver hkp://pool.sks-keyservers.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB
# Install Ruby version manager RVM.
\curl -sSL https://get.rvm.io | bash -s stable
# Install Node.js version manager 'nvm'.
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.2/install.sh | bash
| true
|
93185941d77fb80ed2865e9ec6c39f653c77cc11
|
Shell
|
Diegovsky/dotfiles
|
/wayland-common/.local/bin/wayonce.sh
|
UTF-8
| 521
| 2.53125
| 3
|
[] |
no_license
|
#!/usr/bin/env sh
COMPOSITOR=${1:-sway}
echo $COMPOSITOR > /tmp/wayinit.log
echo $DESKTOP_SESSION >> /tmp/wayinit.log
dbus-update-activation-environment --systemd WAYLAND_DISPLAY SWAYSOCK SSH_AUTH_SOCK DBUS_SESSION_BUS_ADDRESS XAUTHORITY XDG_CURRENT_DESKTOP=$COMPOSITOR &
xsettingsd &
dex -a -e $COMPOSITOR &
# solaar -w hide &
mako &
kanshi &
/lib/polkit-gnome/polkit-gnome-authentication-agent-1 &
WAYINIT_DIR="${XDG_DATA_HOME:-$HOME/.local/share}/wayinit.d/"
export WAYINIT_DIR
source "$WAYINIT_DIR/session.sh"
| true
|
936c0f2ff923224a7bdfb98490e462c0202a5062
|
Shell
|
TomaNikolov/linux-encryption-benchmark
|
/run-nativescript.sh
|
UTF-8
| 170
| 2.75
| 3
|
[] |
no_license
|
#!/bin/bash
set -ex
echo "folder is " $PRIVATE_DIR
cd $PRIVATE_DIR
echo "create {N} project..."
tns create test
cd test
echo "build {N} project..."
tns build android
| true
|
d5e8777770492b1dff0f5bb4e0babcd48588a56d
|
Shell
|
rogba1/example-testiam
|
/setup-new-iam-user.sh
|
UTF-8
| 1,347
| 4.1875
| 4
|
[] |
no_license
|
#!/bin/bash
# Description: A script to call the AWS CLI to create a new iam user.
# This script requires the latest version of the AWS CLI and should be run on a unix environment.
# You'll also need to make the script executable using the following command
# `chmod +x ./setup-new-iam-user.sh`
#
# Usage:
# Run from command line with the following parameters (maintaining this sequence).
# ./setup-new-iam-user.sh --user_name <user_name> --profile <profile_name>
# e.g
# ./setup-new-iam-user.sh --user_name joeblogg --profile default
#
# Variables
user_name=
profile_name=
# Read Input Flags
echo Read Input Flags
while [[ $# -gt 1 ]]
do
key="$1"
case $key in
-h|--help)
echo "Description"
echo ""
echo "Usage:"
echo " setup-new-iam-user.sh"
echo "--profile : The aws named profile that indicates the account set up under"
echo "--user_name : The aws new user account name"
exit 0
;;
--user_name) user_name="$2"; shift ;;
--profile) profile_name="$2"; shift ;;
*) ;; # Undeclared flags
esac
shift # past argument or value
done
# AWS CLI command - create account using name passed in
aws iam create-user --user-name $user_name
aws iam put-user-policy --user-name $user_name --policy-name MyUserPolicy --policy-document file://user-policy.json
| true
|
dfa72c5f6614b8b2674ad6b2af03bd0c264e1909
|
Shell
|
rutendos/mumerge_explore
|
/scripts/rest/03_merging_full_len_regions.sbatch
|
UTF-8
| 2,503
| 2.75
| 3
|
[] |
no_license
|
#!/bin/sh
#SBATCH --job-name=merging_significant_regions
#SBATCH -p short
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --mem=150mb
#SBATCH --time=00:05:00
#SBATCH --output /scratch/Users/rusi2317/e_and_o/%x.out
#SBATCH --error /scratch/Users/rusi2317/e_and_o/%x.err
#SBATCH --mail-type=ALL
#SBATCH --mail-user=rutendo.sigauke@cuanschutz.edu
module purge
module load python/3.6.3
module load python/3.6.3/numpy
module load python/3.6.3/scipy
module load bedtools/2.28.0
###################################################################
################### Merge all samples #############################
echo '--------------Combining bed files with merge----------------'
bed=/scratch/Users/rusi2317/projects/mumerge_test_rest/processed_data/significant_regions_whole_seq
outdir=/scratch/Users/rusi2317/projects/mumerge_test_rest/processed_data/merged_regions_full_len
cat ${bed}/ENCFF001UN* | bedtools sort -i stdin | bedtools merge -i stdin > ${outdir}/MCF7_merged.bed
##################################################################
######################Intersect then Beds #######################
echo '---------Combining bed files with bedtools intersect--------'
bedtools intersect -a ${bed}/ENCFF001UNH.bed_sigFimo.bed -b ${bed}/ENCFF001UNI.bed_sigFimo.bed > ${outdir}/MCF7_intersect.bed
##################################################################
######################## MuMerge ################################
mumerge_path=/scratch/Users/rusi2317/bin/TFEA/TFEA
data_dir=/scratch/Users/rusi2317/projects/mumerge_test_rest/processed_data
echo '-----------Combining bed files with muMerge---------------'
echo 'MCF7 samples'
python3 ${mumerge_path}/mumerge.py -i ${data_dir}/01_mcf7_samples.txt -o ${outdir}/mcf7_mumerged.bed -v
| true
|
a458331ddcd60d918eff1d6b8a507c91002db196
|
Shell
|
xiaoping378/elk-kubernetes
|
/deploy.sh
|
UTF-8
| 2,459
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash
CDIR=$(cd `dirname "$0"` && pwd)
cd "$CDIR"
print_red() {
printf '%b' "\033[91m$1\033[0m\n"
}
print_green() {
printf '%b' "\033[92m$1\033[0m\n"
}
render_template() {
eval "echo \"$(< "$1")\""
}
CONTEXT=""
#CONTEXT="--context=foo"
NAMESPACE="monitoring"
ES_DATA_REPLICAS=$(kubectl get nodes --no-headers ${CONTEXT} | awk '!/SchedulingDisabled/ {print $1}' | wc -l)
for yaml in *.yaml.tmpl; do
render_template "${yaml}" | kubectl ${CONTEXT} --namespace="${NAMESPACE}" create -f -
done
for yaml in *.yaml; do
kubectl ${CONTEXT} --namespace="${NAMESPACE}" create -f "${yaml}"
done
kubectl ${CONTEXT} --namespace="${NAMESPACE}" create configmap fluentd-config --from-file=docker/fluentd/td-agent.conf --dry-run -o yaml | kubectl ${CONTEXT} --namespace="${NAMESPACE}" apply -f -
# Set replicas to amount of worker nodes
#kubectl ${CONTEXT} --namespace="${NAMESPACE}" scale deployment es-data --replicas=${ES_DATA_REPLICAS}
#kubectl ${CONTEXT} --namespace="${NAMESPACE}" scale deployment es-master --replicas=${ES_DATA_REPLICAS}
#kubectl ${CONTEXT} --namespace="${NAMESPACE}" scale deployment es-data-master --replicas=${ES_DATA_REPLICAS}
# Wait for Elasticsearch client nodes
echo -n "Waiting for Elasticsearch client pods"
while true; do
echo -n .
kubectl ${CONTEXT} --namespace="${NAMESPACE}" get pods -l role=client,component=elasticsearch -o jsonpath={.items[0].status.phase} | grep -q Running && break || sleep 1
done
echo
# Wait for Elasticsearch cluster readiness, and then apply "readinessProbe" to allow smooth rolling upgrade
echo -n "Waiting for Elasticsearch cluster readiness"
while true; do
echo -n .
kubectl ${CONTEXT} --namespace="${NAMESPACE}" exec $(kubectl ${CONTEXT} --namespace="${NAMESPACE}" get pods -l role=client,component=elasticsearch -o jsonpath={.items[0].metadata.name}) -- sh -c 'curl -so/dev/null http://elasticsearch-logging:9200/_cluster/health?wait_for_status=green' >/dev/null 2>&1 && break || sleep 1
done
echo
# Apply readinessProbe only when our Elasticsearch cluster is up and running
kubectl ${CONTEXT} --namespace="${NAMESPACE}" patch deployment es-data-master -p'{"spec":{"template":{"spec":{"containers":[{"name":"es-data-master","readinessProbe":{"exec":{"command":["curl","-so/dev/null","http://elasticsearch-logging:9200/_cluster/health?wait_for_status=green"]},"timeoutSeconds":30,"successThreshold":3}}]}}}}'
kubectl ${CONTEXT} --namespace="${NAMESPACE}" get pods --watch
| true
|
c632b70450813d83f991fec99bc6f7776d5f7045
|
Shell
|
marcverney/marcv-node
|
/run-as-user.sh
|
UTF-8
| 892
| 4.03125
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# Runs a command as the LOCAL_USERID if passed in, otherwise stick with root
# Check the UID
if [ "$USERID" ] \
&& [ "$USERID" != "root" ] \
&& [ "$USERID" -gt 1 ]
then
# Create the user
useradd --create-home --shell /bin/bash --uid "$USERID" dockeruser
# Check the GID
if [ "$GROUPID" ] && [ "$GROUPID" -gt 0 ]
then
# Create the group
groupName=$(getent group "$GROUPID" | grep -Po '^.+?(?=:)')
if [ ! "$groupName" ]; then
groupName='dockeruser'
groupadd --gid "$GROUPID" "$groupName"
fi
adduser --quiet dockeruser "$groupName"
fi
# Switch to the user
echo "Running as user dockeruser (uid $USERID) and group $groupName the CMD: $@"
export HOME=/home/dockeruser
exec /usr/local/bin/gosu dockeruser "$@"
else
echo "Running as root the CMD: $@"
exec "$@"
fi
| true
|
60fdda6c22ea2529a8358a6eb86733857fff300f
|
Shell
|
fxha/dotfiles
|
/install.sh
|
UTF-8
| 4,423
| 3.9375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
DOTFILES_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export DOTFILES_ROOT
export DOTFILES_LOG_FILE="/tmp/dotfiles.log"
DOTFILES_ONLY_DOTFILES=false
DOTFILES_NO_INTERACTION=false
DOTFILES_ACCEPT_MSCOREFONTS_EULA=false
DOTFILES_ARG_ROOT=false
DOTFILES_ARG_HELP=false
export overwrite_all=false
export backup_all=false
export skip_all=false
# source: https://stackoverflow.com/a/14203146
for i in "$@"
do
case $i in
--dotfiles)
DOTFILES_ONLY_DOTFILES=true
;;
--no-interaction)
DOTFILES_NO_INTERACTION=true
export overwrite_all=true
;;
--accept-mscorefonts-eula)
DOTFILES_ACCEPT_MSCOREFONTS_EULA=true
;;
--root)
DOTFILES_ARG_ROOT=true
;;
-h|--help)
DOTFILES_ARG_HELP=true
;;
--log-path=*)
export DOTFILES_LOG_FILE="${i#*=}"
shift # skip =* value
;;
*)
# unknown options
;;
esac
done
export DOTFILES_NO_INTERACTION
export DOTFILES_ACCEPT_MSCOREFONTS_EULA
function cleanup() {
if [ -d "/tmp/dotfiles" ]; then
rm -rf /tmp/dotfiles
fi
}
function beforeExit() {
echo -e "\n======================================="
echo "[ERROR] One or more commands failed!"
echo "[ERROR] Log file: $DOTFILES_LOG_FILE"
echo "Deleting temporary files..."
cleanup
}
echo " ------------------------------------------------------------"
echo "< Dotfiles - Felix Häusler - https://github.com/fxha/dotfiles >"
echo " ------------------------------------------------------------"
echo ""
echo " __ __ ____ __ "
echo " ___/ /__ / /_/ _(_) /__ ___"
echo " / _ / _ \/ __/ _/ / / -_|_-<"
echo " \_,_/\___/\__/_//_/_/\__/___/"
echo ""
if [[ "$DOTFILES_ARG_HELP" = true ]]; then
cat <<EOF
Usage: $(basename "$0") [options]
Options:
-h, --help Print this help message
--root Install dotfiles for the root user
Please execute with 'sudo' or 'sudo su -'
--dotfiles Only install dotfiles
--no-interaction Automatically full installation without user interaction
--accept-mscorefonts-eula Accept the Microsoft's TrueType core fonts license
--log-path=%file% Set log directory
See the README for documentation.
EOF
exit 0
fi
trap beforeExit EXIT
trap 'echo -e "\nAborting..."; cleanup; trap - EXIT; exit 1' INT
echo -e "dotfile setup started: $(date +'%Y-%m-%d %H:%M:%S')\n" > "$DOTFILES_LOG_FILE"
if [[ "$(id -u)" = 0 ]]; then
if [[ -n "$SUDO_USER" && "$DOTFILES_ARG_ROOT" = false ]]; then
echo "The script need to be run without root permissions." >&2
trap - EXIT
exit 1
else
echo -e "\x1b[33mCurrent installation path is '$HOME'. Please run the setup without root permissions to install to local user if needed.\x1b[0m"
fi
elif [[ "$(id -u)" != 0 && "$DOTFILES_ARG_ROOT" = true ]]; then
echo "The script need to be run with root permissions." >&2
trap - EXIT
exit 1
fi
# Bash on Windows (cygwin or mingw)
if [[ "$(uname)" == "CYGWIN"* || "$(uname)" == "MINGW"* ]]; then
echo -e " windows"
echo ""
if [ "$DOTFILES_NO_INTERACTION" = false ]; then
read -p "Do you want install dotfiles? (y/n) " -n 1 -r
echo ""
else
REPLY="y"
fi
if [[ $REPLY =~ ^[Yy]$ ]]; then
"$DOTFILES_ROOT/scripts/install_dotfiles_windows.sh"
fi
trap - EXIT
exit 0
fi
# ask for root permissions
sudo -v
# keep-alive: update existing sudo time stamp if set, otherwise do nothing.
# source: https://gist.github.com/cowboy/3118588
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
if [ "$DOTFILES_NO_INTERACTION" = false ]; then
read -p "Do you want install dotfiles? (y/n) " _installDotfiles
else
_installDotfiles="y"
fi
if [[ "$_installDotfiles" = "y" ]]; then
"$DOTFILES_ROOT/scripts/install_dotfiles.sh"
fi
# reload bash profile
. ~/.bashrc
if [ "$DOTFILES_NO_INTERACTION" = false ]; then
read -p "Do you want install additional software and/or system configuration? (y/n) " _installApps
elif [ "$DOTFILES_ONLY_DOTFILES" = false ]; then
_installApps="y"
fi
if [[ "$_installApps" = "y" ]]; then
"$DOTFILES_ROOT/scripts/install_packages.sh"
fi
echo "Deleting temporary files..."
cleanup
trap - EXIT
exit 0
| true
|
bc6309210bbe8b2ab02718dc065f2e1952ad9c02
|
Shell
|
arkidev/OpenIDE
|
/deploy.sh
|
UTF-8
| 5,265
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
ROOT=$(cd $(dirname "$0"); pwd)
BINARYDIR=$(cd $(dirname "$0"); pwd)/build_output
DEPLOYDIR=$(cd $(dirname "$0"); pwd)/ReleaseBinaries
LIB=$(cd $(dirname "$0"); pwd)/lib
CSHARP_BIN=$(cd $(dirname "$0"); pwd)/Languages/CSharp/lib
if [ ! -d $BINARYDIR ]; then
{
mkdir $BINARYDIR
}
fi
if [ ! -d $DEPLOYDIR ]; then
{
mkdir $DEPLOYDIR
}
fi
rm -r $BINARYDIR/*
rm -r $DEPLOYDIR/*
mkdir $DEPLOYDIR/EditorEngine
mkdir $DEPLOYDIR/CodeEngine
mkdir $DEPLOYDIR/EventListener
mkdir $DEPLOYDIR/tests
mkdir $DEPLOYDIR/Packaging
mkdir $DEPLOYDIR/.OpenIDE
touch $DEPLOYDIR/.OpenIDE/oi.config
mkdir $DEPLOYDIR/.OpenIDE/languages
mkdir $DEPLOYDIR/.OpenIDE/languages/C#-files
mkdir $DEPLOYDIR/.OpenIDE/languages/C#-files/bin
mkdir $DEPLOYDIR/.OpenIDE/languages/C#-files/bin/AutoTest.Net
mkdir $DEPLOYDIR/.OpenIDE/languages/C#-files/bin/ContinuousTests
mkdir $DEPLOYDIR/.OpenIDE/languages/go-files
mkdir $DEPLOYDIR/.OpenIDE/languages/go-files/rscripts
mkdir $DEPLOYDIR/.OpenIDE/languages/go-files/graphics
mkdir $DEPLOYDIR/.OpenIDE/languages/python-files
mkdir $DEPLOYDIR/.OpenIDE/languages/python-files/rscripts
mkdir $DEPLOYDIR/.OpenIDE/languages/python-files/graphics
mkdir $DEPLOYDIR/.OpenIDE/languages/js-files
mkdir $DEPLOYDIR/.OpenIDE/languages/js-files/lib
mkdir $DEPLOYDIR/.OpenIDE/languages/php-files
mkdir $DEPLOYDIR/.OpenIDE/scripts
mkdir $DEPLOYDIR/.OpenIDE/scripts/templates
mkdir $DEPLOYDIR/.OpenIDE/rscripts
mkdir $DEPLOYDIR/.OpenIDE/rscripts/templates
mkdir $DEPLOYDIR/.OpenIDE/test
mkdir $DEPLOYDIR/.OpenIDE/test/templates
chmod +x $CSHARP_BIN/ContinuousTests/AutoTest.*.exe
chmod +x $CSHARP_BIN/ContinuousTests/ContinuousTests.exe
echo $BINARYDIR
xbuild OpenIDE.sln /target:rebuild /property:OutDir=$BINARYDIR/ /p:Configuration=Release;
xbuild OpenIDE.CodeEngine.sln /target:rebuild /property:OutDir=$BINARYDIR/ /p:Configuration=Release;
xbuild PackageManager/oipckmngr/oipckmngr.csproj /target:rebuild /property:OutDir=$BINARYDIR/ /p:Configuration=Release;
#xbuild .OpenIDE/languages/CSharp/CSharp.sln /target:rebuild /property:OutDir=$BINARYDIR/ /p:Configuration=Release;
cp $BINARYDIR/oi.exe $DEPLOYDIR/
cp $BINARYDIR/OpenIDE.dll $DEPLOYDIR/
cp $BINARYDIR/OpenIDE.Core.dll $DEPLOYDIR/
cp $BINARYDIR/Newtonsoft.Json.dll $DEPLOYDIR/
cp -r $LIB/EditorEngine/* $DEPLOYDIR/EditorEngine
cp $ROOT/oi/oi $DEPLOYDIR/oi
cp $BINARYDIR/OpenIDE.CodeEngine.exe $DEPLOYDIR/CodeEngine/OpenIDE.CodeEngine.exe
cp $BINARYDIR/OpenIDE.CodeEngine.Core.dll $DEPLOYDIR/CodeEngine/OpenIDE.CodeEngine.Core.dll
cp $BINARYDIR/OpenIDE.Core.dll $DEPLOYDIR/CodeEngine/
cp $BINARYDIR/Newtonsoft.Json.dll $DEPLOYDIR/CodeEngine/
cp $ROOT/lib/FSWatcher/FSWatcher.dll $DEPLOYDIR/CodeEngine/
cp $BINARYDIR/OpenIDE.EventListener.exe $DEPLOYDIR/EventListener/
cp -r $ROOT/oi/tests/* $DEPLOYDIR/tests
cp $BINARYDIR/oipckmngr.exe $DEPLOYDIR/Packaging
cp $BINARYDIR/OpenIDE.Core.dll $DEPLOYDIR/Packaging
cp $BINARYDIR/Newtonsoft.Json.dll $DEPLOYDIR/Packaging
cp $BINARYDIR/SharpCompress.3.5.dll $DEPLOYDIR/Packaging
cp -r $ROOT/oi/script-templates/* $DEPLOYDIR/.OpenIDE/scripts/templates
cp -r $ROOT/oi/rscript-templates/* $DEPLOYDIR/.OpenIDE/rscripts/templates
cp -r $ROOT/oi/test-templates/* $DEPLOYDIR/.OpenIDE/test/templates
cp -r $ROOT/oi/rscripts/* $DEPLOYDIR/.OpenIDE/rscripts
cp $ROOT/Languages/CSharp/C#.oilnk $DEPLOYDIR/.OpenIDE/languages/C#.oilnk
cp $ROOT/Languages/CSharp/language.oicfgoptions $DEPLOYDIR/.OpenIDE/languages/C#-files/language.oicfgoptions
cp $BINARYDIR/C#.exe $DEPLOYDIR/.OpenIDE/languages/C#-files/C#.exe
cp $BINARYDIR/OpenIDE.Core.dll $DEPLOYDIR/.OpenIDE/languages/C#-files/OpenIDE.Core.dll
cp $BINARYDIR/Newtonsoft.Json.dll $DEPLOYDIR/.OpenIDE/languages/C#-files/
cp $ROOT/lib/FSWatcher/FSWatcher.dll $DEPLOYDIR/CodeEngine/
cp $BINARYDIR/ICSharpCode.NRefactory.CSharp.dll $DEPLOYDIR/.OpenIDE/languages/C#-files/ICSharpCode.NRefactory.CSharp.dll
cp $BINARYDIR/ICSharpCode.NRefactory.dll $DEPLOYDIR/.OpenIDE/languages/C#-files/ICSharpCode.NRefactory.dll
cp $BINARYDIR/Mono.Cecil.dll $DEPLOYDIR/.OpenIDE/languages/C#-files/Mono.Cecil.dll
cp -r $ROOT/Languages/CSharp/templates/* $DEPLOYDIR/.OpenIDE/languages/C#-files
cp $ROOT/Languages/CSharp/initialize.sh $DEPLOYDIR/.OpenIDE/languages/C#-files
cp -r $CSHARP_BIN/AutoTest.Net/* $DEPLOYDIR/.OpenIDE/languages/C#-files/bin/AutoTest.Net
cp -r $CSHARP_BIN/ContinuousTests/* $DEPLOYDIR/.OpenIDE/languages/C#-files/bin/ContinuousTests
cp $ROOT/Languages/go/bin/go $DEPLOYDIR/.OpenIDE/languages/go
cp $ROOT/Languages/go/rscripts/go-build.sh $DEPLOYDIR/.OpenIDE/languages/go-files/rscripts/go-build.sh
cp $ROOT/Languages/go/graphics/* $DEPLOYDIR/.OpenIDE/languages/go-files/graphics/
cp $ROOT/Languages/python/python.py $DEPLOYDIR/.OpenIDE/languages/python
cp $ROOT/Languages/js/js.js $DEPLOYDIR/.OpenIDE/languages/js
cp $ROOT/Languages/js/js-files/lib/parse-js.js $DEPLOYDIR/.OpenIDE/languages/js-files/lib/parse-js.js
cp $ROOT/Languages/js/js-files/lib/parse-js.License $DEPLOYDIR/.OpenIDE/languages/js-files/lib/parse-js.License
cp $ROOT/Languages/js/js-files/lib/carrier.js $DEPLOYDIR/.OpenIDE/languages/js-files/lib/carrier.js
cp $ROOT/Languages/js/js-files/lib/carrier.License $DEPLOYDIR/.OpenIDE/languages/js-files/lib/carrier.License
cp -r $ROOT/Languages/php/* $DEPLOYDIR/.OpenIDE/languages
| true
|
089c6f7f1fc2e1c7a11c59cdfe1e9e5818f9e0b0
|
Shell
|
pedroromanvr/iot
|
/bin/tunnelMqtt.sh
|
UTF-8
| 1,120
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
# This service allows a remote tunneling over http using ssh
# Primary use is to tunnel mqtt traffic over http
# corkscrew is used to make the http clientside tunneling http://www.agroman.net/corkscrew/
# command to execute is:
# ssh -i PInstance.pem ec2-user@ec2-54-89-176-127.compute-1.amazonaws.com -L 1883:ec2-54-89-176-127.compute-1.amazonaws.com:1883 -N -o "ProxyCommand /home/pi/corkscrew/corkscrew-2.0/corkscrew www-proxy.us.oracle.com 80 ec2-54-89-176-127.compute-1.amazonaws.com 22"
export https_proxy=http://www-proxy.us.oracle.com:80;
export http_proxy=http://www-proxy.us.oracle.com:80;
SSH_CMD="/usr/bin/ssh"
KEY_LOC="/home/pi/corkscrew/PInstance.pem"
USR="ec2-user"
REM_SERV="ec2-54-89-176-127.compute-1.amazonaws.com"
CORK_LOC="/home/pi/corkscrew/corkscrew-2.0/corkscrew"
PROX="www-proxy.us.oracle.com"
PORT="80"
SSH_PORT="22"
MQTT_PORT="1883"
P_CMD="\"ProxyCommand $CORK_LOC $PROX $PORT $REM_SERV $SSH_PORT\""
KEY_ARGS="-i $KEY_LOC"
FWD_ARGS="-L $MQTT_PORT:$REM_SERV:$MQTT_PORT"
NOP_ARGS="-N"
OPT_ARGS="-o $P_CMD"
bash -c "$SSH_CMD $KEY_ARGS $USR@$REM_SERV $FWD_ARGS $NOP_ARGS $OPT_ARGS"
| true
|
86ce71731c6d55e2c16b39002fa9f928c8544046
|
Shell
|
vladimir4862/git-secret
|
/tests/test_main.bats
|
UTF-8
| 417
| 3.0625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bats
load _test_base
function setup {
set_state_git
}
function teardown {
unset_current_state
}
@test "run 'git secret' without command" {
run git secret
[ "$status" -eq 126 ]
}
@test "run 'git secret' with bad command" {
run git secret notacommand
[ "$status" -eq 126 ]
}
@test "run 'git secret --version'" {
run git secret --version
[ "$output" == "$GITSECRET_VERSION" ]
}
| true
|
c7ae3bfa101cca58315a974de296400a04a7e116
|
Shell
|
ciyam/ciyam
|
/src/src_hash
|
UTF-8
| 328
| 2.8125
| 3
|
[
"MIT",
"Zlib",
"BSD-3-Clause",
"OML"
] |
permissive
|
#!/bin/sh
# Copyright (c) 2019-2023 CIYAM Developers
#
# Distributed under the MIT/X11 software license, please refer to the file license.txt
# in the root project directory or http://www.opensource.org/licenses/mit-license.php.
git ls-files | xargs -l sha256sum > ~src_hash
cat ~src_hash | sha256sum | tr -d " -"
rm ~src_hash
| true
|
b7a57cb9d1d7137812d0cc9f773d5a6fcfc7470d
|
Shell
|
legit-sqa/legit
|
/legitimize
|
UTF-8
| 1,217
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/sh
. legit-setup.sh
orig_head=`git symbolic-ref -q --short HEAD`
stashed=0
# Check that the current branch exists - so we can check if it has
# unstashed changed
if git show-ref --quiet refs/heads/$orig_head; then
# Check if we need to stash some changes
if git diff-index --quiet HEAD --; then
git stash > /dev/null
stashed=1
fi
fi
# If we don't have a tracking branch, we must make one
if ! git show-ref --quiet refs/heads/tracking; then
git checkout --orphan tracking > /dev/null
# Git only allows us to clean up when there's something to delete
if git show-ref refs/heads/$orig_head; then
git rm --force --quiet -r . > /dev/null
fi
else
git checkout tracking > /dev/null
fi
# Init some stuff
if [ ! -d .tracking/ ]
then
mkdir .tracking/
fi
cd .tracking/
if [ ! -a config ]
then
touch config
fi
if [ ! -d proposals/ ]
then
mkdir proposals/
fi
cd proposals/
if [ ! -a open ]
then
touch open
fi
if [ ! -a pending ]
then
touch pending
fi
cd ../..
# Commit this initialisation to the tracking branch
git add .tracking/ > /dev/null
git commit --quiet -m 'Initialized .tracking branch'
return_to_orig_head
| true
|
d6ee2e1faf7219e656643f4244078d23b6b0c1ae
|
Shell
|
Xilinx/Vitis-AI
|
/src/vai_runtime/vart/dpu-runner/test/pack_for_trd.sh
|
UTF-8
| 2,614
| 2.859375
| 3
|
[
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSD-3-Clause-Open-MPI",
"LicenseRef-scancode-free-unknown",
"Libtool-exception",
"GCC-exception-3.1",
"LicenseRef-scancode-mit-old-style",
"OFL-1.1",
"JSON",
"LGPL-2.1-only",
"LGPL-2.0-or-later",
"ICU",
"LicenseRef-scancode-other-permissive",
"GPL-2.0-or-later",
"GPL-3.0-only",
"LicenseRef-scancode-issl-2018",
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-unicode",
"LGPL-3.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-or-later",
"Zlib",
"BSD-Source-Code",
"ClArtistic",
"LicenseRef-scancode-unknown-license-reference",
"ISC",
"NCSA",
"LicenseRef-scancode-proprietary-license",
"GPL-2.0-only",
"CC-BY-4.0",
"FSFULLR",
"Minpack",
"Unlicense",
"BSL-1.0",
"NAIST-2003",
"LicenseRef-scancode-protobuf",
"LicenseRef-scancode-public-domain",
"Libpng",
"Spencer-94",
"BSD-2-Clause",
"Intel",
"GPL-1.0-or-later",
"MPL-2.0"
] |
permissive
|
#
# Copyright 2022-2023 Advanced Micro Devices Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#!/bin/bash
set -e
workspace=$(mktemp -d --dry-run --tmpdir=$CMAKE_CURRENT_BINARY_DIR/tmp)
mkdir -p "$workspace"
cd "$workspace"
mkdir -p samples/lib
echo "this is pwd $PWD, echo $CMAKE_INSTALL_PREFIX"
cp -av \
$CMAKE_INSTALL_PREFIX/lib/libvart-util.so* \
$CMAKE_INSTALL_PREFIX/lib/libvart-runner.so* \
$CMAKE_INSTALL_PREFIX/lib/libvart-runner-assistant.so* \
$CMAKE_INSTALL_PREFIX/lib/libvart-xrt-device-handle.so* \
$CMAKE_INSTALL_PREFIX/lib/libvart-buffer-object.so* \
$CMAKE_INSTALL_PREFIX/lib/libvart-dpu-controller.so* \
$CMAKE_INSTALL_PREFIX/lib/libvart-dpu-runner.so* \
$CMAKE_INSTALL_PREFIX/lib/libvart-mem-manager.so* \
$CMAKE_INSTALL_PREFIX/lib/libvart-trace.so* \
$CMAKE_INSTALL_PREFIX/lib/libxir.so* \
$CMAKE_INSTALL_PREFIX/lib/libunilog.so* \
samples/lib
cp -av $CMAKE_INSTALL_PREFIX/include \
samples/
mkdir -p samples/bin
cp -av $CMAKE_CURRENT_BINARY_DIR/resnet50 samples/bin
mkdir -p samples/src
cp -av $CMAKE_CURRENT_SOURCE_DIR/resnet50.cpp samples/src/
cp -av ${CMAKE_CURRENT_BINARY_DIR}/word_list.inc samples/src/
cat <<EOF > samples/build.sh
CXX=\${CXX:-g++}
result=0 && pkg-config --list-all | grep opencv4 && result=1
if [ \$result -eq 1 ]; then
OPENCV_FLAGS=\$(pkg-config --cflags --libs-only-L opencv4)
else
OPENCV_FLAGS=\$(pkg-config --cflags --libs-only-L opencv)
fi
\$CXX -std=c++17 -Llib -Iinclude -Isrc src/resnet50.cpp -lglog -lvart-mem-manager -lxir -lunilog -lvart-buffer-object -lvart-runner -lvart-util -lvart-xrt-device-handle \${OPENCV_FLAGS} -lopencv_core -lopencv_imgproc -lopencv_imgcodecs -lvart-dpu-runner -lvart-dpu-controller -lvart-runner-assistant -lvart-trace
EOF
tar -zcvf $CMAKE_CURRENT_BINARY_DIR/resnet50.tar.gz samples
echo "CONGRATULATION $CMAKE_CURRENT_BINARY_DIR/resnet50.tar.gz is ready"
trap on_finish EXIT
function on_finish {
rm -fr "$workspace";
}
| true
|
3260242662bc84abc1a7e794ab7d9ac9e0b8f95d
|
Shell
|
NewMai/DLL_TEST
|
/cpp_with_class/compile.sh
|
UTF-8
| 1,064
| 2.515625
| 3
|
[] |
no_license
|
# 需要打开 VS 的开发环境控制台:比如:【x86 Native Tools Command Prompt for VS 2019】
#################################################################################
# 编译 DLL
# For vs2019
# 关闭输出版权信息 /nologo
# 方法一:
# 在 LINK 的时候加上 /DEBUG 信息,就可以包含 调试信息了
# Compile
cl /c /Od /Zi /EHsc /GS- /MDd /nologo /Fd:get_rand.pdb get_rand.cpp
# Link
link /DLL /DEBUG /INCREMENTAL:NO /NOLOGO /DEF:get_rand.def get_rand.obj /PDB:get_random.pdb /OUT:get_random.dll
# 方法二:
# 这种方式携带大量的调试信息,
# 一次性编译(但是这种方法没法使用 .def 文件)
cl /LD /EHsc get_rand.cpp
cl /LDd /EHsc get_rand.cpp # Debug 版本
cl /LDd /EHsc /MDd /Zi /Od /nologo /INCREMENTAL:NO /GS- get_rand.cpp /Fd:get_rand.pdb
#################################################################################
# 编译 EXE
cl /c /Od /Zi /EHsc /GS- /MDd /nologo /Fd:main.pdb main.cpp
link /DEBUG /INCREMENTAL:NO /NOLOGO main.obj /PDB:main.pdb /OUT:main.exe
| true
|
f95bd516d1f12ddbdfa67b11b53f629772b2f745
|
Shell
|
aan680/repo_senseshifteval
|
/scripts/test.sh
|
UTF-8
| 2,570
| 3.078125
| 3
|
[] |
no_license
|
#input files with columns [target, POS, t]. Save as targets.csv in the corresponding dataset folder (data/WSE or data/HW). t expresses time of change.
folder_hw="data/HW+"
folder_hw28="data/HW"
folder_wsct="data/WSCT"
folder_logs="data" #for extra output, for reference
hw_input="$folder_hw/sourcedata.csv"
wsct_input="$folder_wsct/sourcedata.csv"
gold_hwplus_synset="$folder_hw/gold_synset_after_iaa.csv"
gold_wsctplus_synset="$folder_wsct/gold_synset_after_iaa.csv"
###################################################################################
###################################################################################
echo "Now running:"
echo "python scripts/get_evaluationset_wsct+_hw+.py $hw_input $folder_hw/evaluationset_synset.csv $folder_hw/evaluationset_wordpair.csv"
echo "python scripts/get_evaluationset_wsct+_hw+.py $wsct_input $folder_hw/evaluationset_synset $folder_wsct/evaluationset_wordpair"
python scripts/get_evaluationset.py $hw_input $folder_hw/evaluationset_synset.csv $folder_hw/evaluationset_wordpair.csv
#python scripts/get_evaluationset_wsct+_hw+.py $hw_input $folder_hw/evaluationset_synset.csv $folder_hw/evaluationset_wordpair.csv
#python scripts/get_evaluationset_wsct+_hw+.py $wsct_input $folder_wsct/evaluationset_synset.csv $folder_wsct/evaluationset_wordpair.csv
###################################################################################
read -p "The following script transfers the synset-level gold standard onto the word-level file. The name of the output file is based on the input file with sed("synset", "wordpair"), e.g. gold_wordpair_after_iaa.csv and is saved to the same folder as the synset-level gold standard. Press enter to run."
echo "Now running: "
echo Rscript scripts/make_wordlevel_gold_from_synsetlevel_gold.R --folder=$folder_hw --file_input_gold_synset=$gold_hwplus_synset --file_input_wordpair="evaluationset_wordpair.csv"
echo Rscript scripts/make_wordlevel_gold_from_synsetlevel_gold.R --folder=$folder_wsct --file_input_gold_synset=$gold_wsctplus_synset --file_input_wordpair="evaluationset_wordpair.csv"
#Rscript scripts/make_wordlevel_gold_from_synsetlevel_gold.R --folder=$folder_hw --file_input_gold_synset=$gold_hwplus_synset --file_input_wordpair=$folder_hw/"evaluationset_wordpair.csv"
#Rscript scripts/make_wordlevel_gold_from_synsetlevel_gold.R --folder=$folder_wsct --file_input_gold_synset=$gold_wsctplus_synset --file_input_wordpair=$folder_wsct/"evaluationset_wordpair.csv"
###################################################################################
| true
|
f8626065b94e0c7d86b8b5bda975d49966aee3f3
|
Shell
|
GeorgeTaveras1231/git-gpg
|
/src/git-gpg-utils/diff.bash
|
UTF-8
| 411
| 3.34375
| 3
|
[] |
no_license
|
diff() {
local anchor_version=$1
for file in $(_hidden-files); do
local file_relative_path=$(echo $file | sed -e "s#$project_root/##")
git show $anchor_version:$file_relative_path | \
gpg --decrypt | \
git diff --no-index -- - $(_raw-file-for $file)
done
}
_register-command diff
_diff-signature() {
echo "diff"
}
_diff-brief() {
cat <<-EOF
View diff of changed secrets
EOF
}
| true
|
e1c71cefe13a75b472f0e7861db98cb8ad959058
|
Shell
|
BenLorantfy-Archive/FishBot
|
/deploy-vm.sh
|
UTF-8
| 4,887
| 3.875
| 4
|
[] |
no_license
|
#!/bin/bash
# =====================================================================================
# Script: deploy-vm.sh - benlorantfy.com/fishbot
# Author: Ben Lorantfy (ben@lorantfy.com)
# Last Updated: July 6th 2017
# =====================================================================================
# This script deploys the api and web client to a cloud virtual machine
# The following programs must be already installed for the app to deploy successfully
# - nodejs
# - npm
# - forever
# =====================================================================================
# CONSTANTS
IP=138.197.107.96
RED='\033[0;31m'
BLUE='\033[0;34m'
GREEN='\033[0;32m'
PURPLE='\033[0;35m'
NC='\033[0m' # No Color
REMOTE_TIME=`ssh root@${IP} 'date'`
KERNAL_INFO=`ssh root@${IP} 'uname -a'`
# STEPS
echo ""
echo "${PURPLE}[deploy.sh]${NC} ${BLUE}=========================================================${NC}"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE} Starting Deployment of benlorantfy.com/fishbot ${NC}"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE}=========================================================${NC}"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE} Remote System Time: ${REMOTE_TIME}"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE} Kernal Info: ${KERNAL_INFO}"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE}=========================================================${NC}"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE} Steps:${NC}"
echo "${PURPLE}[deploy.sh]${NC} 1. Stop nginx"
echo "${PURPLE}[deploy.sh]${NC} 2. Copy configuration file"
echo "${PURPLE}[deploy.sh]${NC} 3. Start nginx"
echo "${PURPLE}[deploy.sh]${NC} 4. Reload nginx configuration"
echo "${PURPLE}[deploy.sh]${NC} 5. Stop all current apps"
echo "${PURPLE}[deploy.sh]${NC} 6. Delete old frontend and backend files"
echo "${PURPLE}[deploy.sh]${NC} 7. Copy new frontend and backend files"
echo "${PURPLE}[deploy.sh]${NC} 8. Install dependencies for node api"
echo "${PURPLE}[deploy.sh]${NC} 9. Start node api using forever"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE}============================================${NC}"
# Provide the --skip-front-build option to skip the frontend build
SKIP_FRONT_END_BUILD=0
while getopts ":-:" opt; do
case ${opt} in
-)
case ${OPTARG} in
"skip-front-build"*) echo "${PURPLE}[deploy.sh]${NC} Skipping frontend build becuase of --skip-front-build"
SKIP_FRONT_END_BUILD=1
;;
esac
esac
done
# BUILD REACT APP
if [ $SKIP_FRONT_END_BUILD -eq 0 ]
then
echo "${PURPLE}[deploy.sh]${NC} Building react app..."
cd ./WebClient
npm run build
cd ../
fi
# DELETE OLD FILES
echo "${PURPLE}[deploy.sh]${NC} Deleting old files..."
ssh root@${IP} 'rm -r "/root/fishbot"'
echo "${PURPLE}[deploy.sh]${NC} ${GREEN}Deleted${NC} old files"
# COPY NEW FILES
echo "${PURPLE}[deploy.sh]${NC} Transfering new files..."
rsync -av --exclude '.vscode' --exclude 'node_modules' --exclude '.git' --exclude '.github' . root@${IP}:/root/fishbot
echo "${PURPLE}[deploy.sh]${NC} ${GREEN}Transfered${NC} new files"
# STOP ALL APPS
echo "${PURPLE}[deploy.sh]${NC} Stopping forever scripts..."
ssh root@${IP} 'forever stop FishBotApiServer'
ssh root@${IP} 'forever stop FishBotStreamServer'
# INSTALL API DEPENDENCIES
echo "${PURPLE}[deploy.sh]${NC} Installing node dependencies for api server..."
ssh root@${IP} 'cd "/root/fishbot/ApiServer" && npm install --production'
echo "${PURPLE}[deploy.sh]${NC} ${GREEN}Installed${NC} dependencies"
echo "${PURPLE}[deploy.sh]${NC} Pruning node dependencies..."
ssh root@${IP} 'cd "/root/fishbot/ApiServer" && npm prune --production'
echo "${PURPLE}[deploy.sh]${NC} ${GREEN}Pruned${NC} node dependencies"
echo "${PURPLE}[deploy.sh]${NC} Installing node dependencies for stream server..."
ssh root@${IP} 'cd "/root/fishbot/StreamServer" && npm install --production'
echo "${PURPLE}[deploy.sh]${NC} ${GREEN}Installed${NC} dependencies"
echo "${PURPLE}[deploy.sh]${NC} Pruning node dependencies..."
ssh root@${IP} 'cd "/root/fishbot/StreamServer" && npm prune --production'
echo "${PURPLE}[deploy.sh]${NC} ${GREEN}Pruned${NC} node dependencies"
# START ALL APPS
# forever is used to manage and monitor all the apps
# The forever configuration is read from forever-vm.json
echo "${PURPLE}[deploy.sh]${NC} Starting all apps using forever..."
ssh root@${IP} 'forever start /root/fishbot/forever-vm.json'
echo "${PURPLE}[deploy.sh]${NC} Listing all the started apps..."
ssh root@${IP} 'forever list'
# RESTART NGINX
# The master nginx.conf file has to include fishbot.nginx.conf
echo "${PURPLE}[deploy.sh]${NC} Restarting nginx"
ssh root@${IP} 'nginx -s reload'
# DONE
echo "${PURPLE}[deploy.sh]${NC} ${BLUE}=========================${NC}"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE} Done Deployment ${NC}"
echo "${PURPLE}[deploy.sh]${NC} ${BLUE}=========================${NC}"
| true
|
f2a069fc36335c9ce653f8a5f7356d91c40a5d91
|
Shell
|
apluslms/grade-java
|
/bin/move-to-package-dirs
|
UTF-8
| 2,065
| 4.46875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh -e
# Moves or copies scala/java files to package directories.
delim=' '
filter='*.java *.scala'
verbose=
while [ "$1" ]; do
case "$1" in
-d) delim=$2 ; shift ;;
-f) filter=$2 ; shift ;;
-v) verbose="-v" ;;
--) shift ; break ;;
-*) echo "ERROR: Invalid option '$1' for $0" >&2 ; exit 64 ;;
*) break ;;
esac
shift
done
find_files() {
path=$1
set -f # disable * expansion for $filter
set --
for f in $filter; do
[ "$1" ] && set -- "$@" '-o'
set -- "$@" '-iname' "$f"
done
find "$path" '(' "$@" ')' -print0
}
handle_dir() {
dir=$1
set -- '-d' "$delim"
[ "$verbose" ] && set -- "$@" '-v'
find_files "$dir" | xargs -r0 "$0" "$@" '--'
return $?
}
if [ -z "$*" ]; then
handle_dir .
exit $?
fi
for file; do
if ! [ -e "$file" ]; then
echo "WARNING: '$file' is missing" >&2
continue
elif [ -d "$file" ]; then
handle_dir "$file"
RES=$?
[ $RES -ne 0 ] && echo "WARNING: $0 handle_dir() failed with $RES"
continue
fi
# Extract java/scala package and package object from the source file
package=$(grep -Ezo 'package\s*[^[:space:];]+' "$file" | head -zn1 | sed -z 's/^package\s*//g')
package_obj=$(grep -Ezo 'package\s*object\s*[^][)(}{[:space:];]+' "$file" | head -zn1 | sed -z 's/^package\s*object\s*//g')
[ "$package_obj" ] && package=$package.$package_obj
# Destination directory and file
dir=$(echo "$package" | tr -d '\n' | tr '.' '/' | sed 's,^/*,,g')
[ "$dir" ] || dir='.'
[ -d "$dir" ] || mkdir -p "$dir"
dest=$dir/${file##*/}
if [ "$(realpath "$file")" = "$(realpath "$dest")" ]; then
# No move required
:
elif [ -e "$dest" ]; then
# Destination exists
echo "WARNING: can't copy '$file' to '$dest' as destination already exists." >&2
elif [ -w "$file" ]; then
mv $verbose "$file" "$dir" >&2
else
cp $verbose "$file" "$dir" >&2
fi
printf "$dest$delim"
done
| true
|
95402011c4cf167c939284a3c59c181dfe94d96d
|
Shell
|
clintval/aur-packages
|
/python-shellingham/PKGBUILD
|
UTF-8
| 1,395
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
# Maintainer: Clint Valentine <valentine.clint@gmail.com>
_name=shellingham
pkgbase='python-shellingham'
pkgname=('python-shellingham' 'python2-shellingham')
pkgver=1.2.4
pkgrel=1
pkgdesc="Detect what shell the current Python executable is running in."
arch=('any')
url=https://github.com/sarugaku/"${_name}"
license=('ISC')
makedepends=(
'python' 'python-setuptools'
'python2' 'python2-setuptools')
options=(!emptydirs)
source=("${pkgname}"-"${pkgver}".tar.gz::https://pypi.io/packages/source/"${_name:0:1}"/"${_name}"/"${_name}"-"${pkgver}".tar.gz)
sha256sums=('c09c563a2e185ec3d64e43c286dbba3150fc182d96cd29ff5b002f3d3c3f5076')
prepare() {
cp -a "${_name}"-"${pkgver}"{,-py2}
}
package() {
cd "${srcdir}"/"${_name}"-"${pkgver}"
python setup.py install --root="${pkgdir}/" --optimize=1
}
build(){
cd "${srcdir}"/"${_name}"-"${pkgver}"
python setup.py build
cd "${srcdir}"/"${_name}"-"${pkgver}"-py2
python2 setup.py build
}
package_python2-shellingham() {
depends=('python2')
cd "${_name}"-"${pkgver}"-py2
install -Dm644 LICENSE "${pkgdir}"/usr/share/licenses/"${pkgname}"/LICENSE
python2 setup.py install --root="${pkgdir}"/ --optimize=1 --skip-build
}
package_python-shellingham() {
depends=('python')
cd "${_name}"-"${pkgver}"
install -Dm644 LICENSE "${pkgdir}"/usr/share/licenses/"${pkgname}"/LICENSE
python setup.py install --root="${pkgdir}"/ --optimize=1 --skip-build
}
| true
|
c84173fdad11d073729011d16ead2950dec8ae2c
|
Shell
|
joenery/demux
|
/wait4rtacomplete
|
UTF-8
| 882
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
#
#Must be run from an Illumina run directory
#
#$1 argument is for basemask
#basemasks for 10x
# 1) basemask="Y28N*,I8N*,Y98N*" ;;
# 2) basemask="Y26N*,I8N*,N*,Y98N*" ;;
# 3) basemask="Y28N*,I8N*,N*,Y98N*" ;;
# 4) basemask="Y28N*,I10N*,I10N*,Y98N*" ;;
source "/gale/netapp/home/seq/bin/demux/date-time"
run_path="$(pwd)"
run_id="$(basename $run_path)"
RTACOMPLETE=./RTAComplete.txt
WAITTIME=1m
QLOG=/gale/netapp/seq2/illumina_runs/.qlog
echo "$(date_time) Waiting for $RTACOMPLETE to be generated..."
until [ -f $RTACOMPLETE ]
do
sleep $WAITTIME
done
echo "$(date_time) $RTACOMPLETE detected"
if [[ -z "${1}" ]]; then
#echo "$(date_time) Running: demux-run &> $QLOG/$run_id.txt"
demux-run &> $QLOG/$run_id.txt
else
#echo "$(date_time) Running: demux-run -b $1 &> $QLOG/$run_id.txt"
demux-run -b $1 &> $QLOG/$run_id.txt
fi
exit
| true
|
85afb92c9d1696aec3d39a16eed6f902af6b5a15
|
Shell
|
Zane-/dotfiles
|
/tmux.sh
|
UTF-8
| 976
| 3.46875
| 3
|
[] |
no_license
|
#!/bin/bash
mkdir -p ~/dotfiles/backups
if [ -x "$(command -v apt-get)" ]; then
sudo apt-get update
else
if [ -x "$(command -v brew)" ]; then
brew update
fi
fi
if [ ! -x "$(command -v tmux)" ]; then
if [ -x "$(command -v apt-get)" ]; then
sudo apt-get install tmux
else
if [ -x "$(command -v brew)" ]; then
brew install tmux
fi
fi
fi
if [ ! -f ~/.tmux/plugins/tpm ]; then
echo "[+] Installing tpm"
mkdir -p $HOME/.tmux/plugins
git clone https://github.com/tmux-plugins/tpm $HOME/.tmux/plugins/tpm
fi
if [ -f ~/.tmux.conf ]; then
mv ~/.tmux.conf ~/dotfiles/backups
fi
ln -sf ~/dotfiles/.tmux.conf ~/.tmux.conf
tmux source ~/.tmux.conf
$HOME/.tmux/plugins/tpm/scripts/install_plugins.sh
tmux source ~/.tmux.conf
echo "[+] Linked .tmux.conf"
echo "[+] Setup complete. Any existing files have been moved to ~/dotfiles/backups"
echo "[+] Install a nerd font from https://github.com/ryanoasis/nerd-fonts for the tmux-power plugin to display properly."
| true
|
cc5327fc7a5ab49359c86d6bb4e899a74722db96
|
Shell
|
petronny/aur3-mirror
|
/python2-pyclewn/PKGBUILD
|
UTF-8
| 923
| 2.515625
| 3
|
[] |
no_license
|
# Maintainer : Lenin Lee <lenin.lee (at) gmail (dot) com >
# Contributor: LUO, Jian <jian (dot) luo (dot) cn (at) gmail (dot) com >
# Ivan Sichmann Freitas <ivansichfreitas (at) gmail (dot) com>
pkgname=python2-pyclewn
pkgver=1.11
pkgrel=1
pkgdesc="A python2 version of gdb support for vim"
license=('GPL2')
arch=(i686 x86_64)
url="http://pyclewn.wiki.sourceforge.net"
depends=('python2' 'gvim>=7.3' 'gdb')
install=vimdoc.install
source=(http://downloads.sourceforge.net/sourceforge/pyclewn/pyclewn-$pkgver/pyclewn-$pkgver.py2.tar.gz)
md5sums=('313a88e7ec2dd6595d0fd609b8f19c13')
build() {
cd $srcdir/pyclewn-$pkgver.py2
# fixing a duplicated marker in pyclewn.txt
#sed -i 's/\*C\*//' runtime/doc/pyclewn.txt
vimdir=/usr/share/vim/vim`vim --version|head -n 1|cut -f 5 -d ' ' | cut -c 1,3` python2 setup.py install --force --root=$pkgdir
# removing tags to avoid conflict
#rm $pkgdir/usr/share/vim/vim73/doc/tags
}
| true
|
46cf27a0e9519d53689956fba047c58387270851
|
Shell
|
LUGM/bourne_again_2018
|
/day2_bash/3.sh
|
UTF-8
| 160
| 3.046875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
#passing arguments
echo "Number of arguments passed: $#"
echo "Script name: $0"
echo "$1 has been copied to $2"
cp -R $1 $2
echo "$@"
ls
| true
|
be9ce196487774701b3b2ff6f3a03e6f4aa81add
|
Shell
|
Varying-Vagrant-Vagrants/VVV
|
/config/homebin/switch_php_debugmod
|
UTF-8
| 2,659
| 4.0625
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -eo pipefail
source /srv/provision/provision-helpers.sh
# Grab the currently active PHP mods, and lowercase it for comparison
enabled_mods=$(php -m)
enabled_mods=${enabled_mods,,}
# These PHPMods all interfere with each other in some way
phpmods=(
"xdebug"
"xhgui"
"tideways_xhprof"
"pcov"
)
# The mod that the user wants enabled, `none` for vanilla PHP
mod=${1:-none}
mod=${mod,,}
disable_phpmods() {
# declare that our first param is an array
declare -a mods=("${!1}")
for i in "${mods[@]}"
do
for phpv in $(phpquery -V)
do
if is_module_enabled_fpm "${phpv}" "${i}"; then
vvv_info " ✘ Disabling active PHP <b>v${phpv}</b><info> debug mod: </info><b>'${i}'</b>"
sudo phpdismod -q -s fpm -v "${phpv}" "${i}"
sudo phpdismod -q -s cli -v "${phpv}" "${i}"
fi
done
done
}
enable_phpmod() {
for phpv in $(phpquery -V)
do
if is_module_installed_fpm "${phpv}" "${1}"; then
vvv_info " * Enabling <b>'${1}'</b><info> for PHP <b>v${phpv}</b>"
sudo phpenmod -q -v "${phpv}" -s fpm "${1}"
sudo phpenmod -q -v "${phpv}" -s cli "${1}"
else
vvv_info " * Skipped enabling ${1} in PHP <b>v${phpv}</b><info>, module isn't installed for this version"
fi
done
}
is_module_enabled_fpm() {
if [ -f "/var/lib/php/modules/${1}/fpm/enabled_by_admin/${2}" ] || [ -f "/var/lib/php/modules/${1}/fpm/enabled_by_maint/${2}" ]; then
return 0
fi
return 1
}
is_module_installed_fpm() {
if [ -f "/etc/php/${1}/mods-available/${2}.ini" ]; then
return 0
fi
return 1
}
restart_phpfpm() {
vvv_info " * Restarting PHP FPM services so that the change takes effect"
find /etc/init.d/ -name "php*-fpm" -exec bash -c 'sudo service "$(basename "$0")" restart' {} \;
}
disable_phpmods phpmods[@]
if [[ "${mod}" == "none" ]]; then
restart_phpfpm
vvv_success " ✔ All PHP Debug mods are now turned off."
exit 0
fi
if [[ "${mod}" == "pcov" ]]; then
vvv_info " * pcov supports PHP 7.1 and above, it is not available for 5.6 and 7.0"
fi
# Tideways needs 2 mods enabling
if [[ "${mod}" == "tideways" ]]; then
enable_phpmod "xhgui"
enable_phpmod "tideways_xhprof"
restart_phpfpm
vvv_success " ✔ PHP Debug mod switch to <b>${mod}</b><success> complete.</success>"
exit 0
fi
if [[ "${mod}" == "xdebug" ]]; then
# Ensure the log file for xdebug is group writeable.
vvv_info " * Making sure log/php/xdebug-remote.log is readable and present"
sudo touch /var/log/php/xdebug-remote.log
sudo chmod 664 /var/log/php/xdebug-remote.log
fi
enable_phpmod "${mod}"
restart_phpfpm
vvv_success " ✔ PHP Debug mod switch to <b>${mod}</b><success> on all available PHP versions complete.</success>"
| true
|
51ccde0d7b27eaad6f24be71b16d4bd7848ca789
|
Shell
|
scopely/skyline
|
/bin/webapp.d
|
UTF-8
| 1,290
| 4.15625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# This is used to start/stop webapp
BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/.."
RETVAL=0
start () {
rm -f $BASEDIR/src/webapp/*.pyc
/usr/bin/env python $BASEDIR/src/webapp/webapp.py start
RETVAL=$?
if [[ $RETVAL -eq 0 ]]; then
echo "started webapp"
else
echo "failed to start webapp"
fi
return $RETVAL
}
stop () {
/usr/bin/env python $BASEDIR/src/webapp/webapp.py stop
RETVAL=$?
if [[ $RETVAL -eq 0 ]]; then
echo "stopped webapp"
else
echo "failed to stop webapp"
fi
return $RETVAL
}
restart () {
rm -f $BASEDIR/src/webapp/*.pyc
/usr/bin/env python $BASEDIR/src/webapp/webapp.py restart
RETVAL=$?
if [[ $RETVAL -eq 0 ]]; then
echo "restarted webapp"
else
echo "failed to restart webapp"
fi
return $RETVAL
}
run () {
echo "running webapp"
/usr/bin/env python $BASEDIR/src/webapp/webapp.py run
}
# See how we were called.
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
restart
;;
run)
run
;;
*)
echo $"Usage: $0 {start|stop|run}"
exit 2
;;
esac
| true
|
976b36134d4d2d76c2fbfd275335a66af05029f4
|
Shell
|
eric-s-s/tdd_python
|
/tcr.sh
|
UTF-8
| 170
| 2.578125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
if pytest ; then
black .
git add .
git commit -m "$(echo $@)" # read the arguments as the commit message
fi
git reset --hard
git status
| true
|
7c359f2f9fab97160bb8c4b13af79d7bb9256639
|
Shell
|
rikkedag/BlogPostAnalysis
|
/sanitizeNew.sh
|
UTF-8
| 1,825
| 3.96875
| 4
|
[] |
no_license
|
# This script expects to be executed in a folder with a subfolder called blogs,
# containing the .xml files that should be sanitized. Results are written to a
# folder called sanblogs.
# Create output folder
mkdir -p sanblogs1
# Loop over input files (with path prefix)
for pathname in ./blogs/*.xml
do
#Progress report
echo "Processing $pathname..."
# Extract filename from path ( './blogs/post.xml' -> 'post.xml' )
filename=$(basename $pathname)
# Sanitize and save result in new file
cat $pathname |
# Get rid of everything non-ascii
iconv -c -f utf8 -t ascii |
# Convert escape sequences "&xxx;" to ""
sed -r "s/&[a-z]{1,6};//g" |
# NEW ----------------------------------
# Remove ' without adding a space
sed -r "s/'//g" |
# Change ? and ! to .
sed -e 's/?/\./g' |
sed -e 's/\!/\./g' |
# Change repetitions of . to a single .
sed -r 's/(\.)+(\.|)?/\./g' |
sed -r 's/(\. )+(\.| )?/\./g' |
# Add space after and before.
sed 's/\./\. /g' |
sed 's/\./\ ./g' |
# --------------------------------------
# Remove all lonely "<", ">" and "/" that are not part of tags
sed -r 's/\/|>|<|(<\/?[a-zA-Z]+>)/\1/g' |
# Remove all non alphanumeric characters except "<", ">", "." and "/"
sed -r 's.[^a-zA-Z0-9<>/.]. .g' |
# change Upper case to lower case
sed -e 's/A/a/g' \
-e 's/B/b/g' \
-e 's/C/c/g' \
-e 's/D/d/g' \
-e 's/E/e/g' \
-e 's/F/f/g' \
-e 's/G/g/g' \
-e 's/H/h/g' \
-e 's/I/i/g' \
-e 's/J/j/g' \
-e 's/K/k/g' \
-e 's/L/l/g' \
-e 's/M/m/g' \
-e 's/N/n/g' \
-e 's/O/o/g' \
-e 's/P/p/g' \
-e 's/Q/q/g' \
-e 's/R/r/g' \
-e 's/S/s/g' \
-e 's/T/t/g' \
-e 's/U/u/g' \
-e 's/V/v/g' \
-e 's/W/w/g' \
-e 's/X/x/g' \
-e 's/Y/y/g' \
-e 's/Z/z/g' |
# Write to file
cat > sanblogs1/$filename
done
| true
|
c82c6b43b52faa8b7e6e575b6467160d8af06627
|
Shell
|
karunmatharu/Android-4.4-Pay-by-Data
|
/external/dnsmasq/contrib/slackware-dnsmasq/dnsmasq.SlackBuild
|
UTF-8
| 1,633
| 3.234375
| 3
|
[
"GPL-3.0-only",
"GPL-2.0-only",
"MIT"
] |
permissive
|
#!/bin/sh
CWD=`pwd`
PKG=/tmp/package-dnsmasq
VERSION=2.24
ARCH=${ARCH:-i486}
BUILD=${BUILD:-1}
if [ "$ARCH" = "i386" ]; then
SLKCFLAGS="-O2 -march=i386 -mcpu=i686"
elif [ "$ARCH" = "i486" ]; then
SLKCFLAGS="-O2 -march=i486 -mcpu=i686"
elif [ "$ARCH" = "s390" ]; then
SLKCFLAGS="-O2"
elif [ "$ARCH" = "x86_64" ]; then
SLKCFLAGS="-O2"
fi
rm -rf $PKG
mkdir -p $PKG
cd /tmp
rm -rf dnsmasq-$VERSION
tar xzvf $CWD/dnsmasq-$VERSION.tar.gz
cd dnsmasq-$VERSION
zcat $CWD/dnsmasq.leasedir.diff.gz | patch -p1 --verbose --backup --suffix=.orig || exit
chown -R root.root .
make install-i18n PREFIX=/usr DESTDIR=$PKG MANDIR=/usr/man
chmod 755 $PKG/usr/sbin/dnsmasq
chown -R root.bin $PKG/usr/sbin
gzip -9 $PKG/usr/man/man8/dnsmasq.8
for f in $PKG/usr/share/man/*; do
if [ -f $$f/man8/dnsmasq.8 ]; then
gzip -9 $$f/man8/dnsmasq.8 ;
fi
done
gzip -9 $PKG/usr/man/*/man8/dnsmasq.8
mkdir -p $PKG/var/state/dnsmasq
( cd $PKG
find . | xargs file | grep "executable" | grep ELF | cut -f 1 -d : | xargs strip --strip-unneeded 2> /dev/null
find . | xargs file | grep "shared object" | grep ELF | cut -f 1 -d : | xargs strip --strip-unneeded 2> /dev/null
)
mkdir $PKG/etc
cat dnsmasq.conf.example > $PKG/etc/dnsmasq.conf.new
mkdir $PKG/etc/rc.d
zcat $CWD/rc.dnsmasq.gz > $PKG/etc/rc.d/rc.dnsmasq.new
mkdir -p $PKG/usr/doc/dnsmasq-$VERSION
cp -a \
CHANGELOG COPYING FAQ UPGRADING_to_2.0 doc.html setup.html \
$PKG/usr/doc/dnsmasq-$VERSION
mkdir -p $PKG/install
cat $CWD/slack-desc > $PKG/install/slack-desc
zcat $CWD/doinst.sh.gz > $PKG/install/doinst.sh
cd $PKG
makepkg -l y -c n ../dnsmasq-$VERSION-$ARCH-$BUILD.tgz
| true
|
74d524259f727a9f64856e83396a68c50d6829ba
|
Shell
|
JinnLynn/MacOSX-Kits
|
/opt/shell/commands/network.sh
|
UTF-8
| 946
| 3.21875
| 3
|
[
"MIT"
] |
permissive
|
kits_ip() {
local ip="$1"
if [[ -z "$ip" ]]; then
ip=$(curl -s http://ip.3322.net)
echo "$ip"
fi
local res=$(curl -s "http://ip.taobao.com/service/getIpInfo.php?ip=$ip")
local ret=$?
[[ "$ret" -ne 0 ]] && echo "fetch ip info fail. curl retcode: $ret" && return 1
# echo $res
local code=$(jsoner -k code -s "$res")
local data=$(jsoner -k data -s "$res")
[[ "$code" -ne 0 ]] && echo "error: $data" && return 1
jsoner -k data.country -s "$res"
jsoner -k data.region -s "$res"
jsoner -k data.city -s "$res"
jsoner -k data.isp -s "$res"
# echo $(jsoner -k data.country -s "$res")
# local country=$(jsoner -k data.country -s "$res")
# local area=$(jsoner -k data.area -s "$res")
# local region=$(jsoner -k data.region -s "$res")
# local city=$(jsoner -k data.city -s "$res")
# local isp=$(jsoner -k data.isp -s "$res")
# echo "$country $region $city"
}
| true
|
c70bc2e8a39e76f8c067e79b4edfa002383cf0c2
|
Shell
|
frzleaf/begin
|
/bash/awk1.sh
|
UTF-8
| 266
| 2.515625
| 3
|
[] |
no_license
|
1.
$ history | awk '{CMD[$2]++;count++;}END { for (a in CMD)print CMD[a] " " CMD[a]/count*100 "% " a;}' | grep -v "./" | sort -nr | head -n10 | column -c3 -s " " -t
2.
$ awk '!a[$0]++' ./logs
3.
$ awk '{gsub(/ /, "", $0); print } ' ./file_with_heading_space
| true
|
f64598ea3f5f4a739d60565ba5916da373fae641
|
Shell
|
jonohart/voltha
|
/docker/config/app-install.sh
|
UTF-8
| 760
| 3.375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
HERE=$(pwd)
OARS=$(find $DOWNLOAD_ROOT -name "*.oar")
for oar in $OARS; do
cd $HERE
echo "Installing application '$oar'"
rm -rf $APP_INSTALL_ROOT
mkdir -p $APP_INSTALL_ROOT
cd $APP_INSTALL_ROOT
cp $oar $APP_INSTALL_ROOT
unzip -oq -d . $APP_INSTALL_ROOT/$(basename $oar)
name=$(grep "name=" $APP_INSTALL_ROOT/app.xml | sed 's/<app name="//g;s/".*//g')
mkdir -p $APPS_ROOT/$name
cp $APP_INSTALL_ROOT/app.xml $APPS_ROOT/$name/app.xml
touch $APPS_ROOT/$name/active
[ -f $APP_INSTALL_ROOT/app.png ] && cp $APP_INSTALL_ROOT/app.png $APPS_ROOT/$name/app.png
cp $APP_INSTALL_ROOT/$(basename $oar) $APPS_ROOT/$name/$name.oar
cp -rf $APP_INSTALL_ROOT/m2/* $KARAF_M2
rm -rf $APP_INSTALL_ROOT
done
| true
|
d8918fef790980883d4ac97656bfbc0d62c1bf48
|
Shell
|
Environmental-Informatics/04-introduction-to-linux-os-MingdaLu270
|
/Lab04/my_backup.sh
|
UTF-8
| 634
| 4.125
| 4
|
[] |
no_license
|
## !/bin/bash
## Back up a single project directory
## Mingda Lu
# user input directory
if [ $# !=1 ]
then
echo 'input directory'
exit
fi
# check if the directory exists
if [ ! -d ~/folder/$1 ]
then
echo 'the given directory does not seem to exist (possible typo?)'
exit
fi
date=`date +%F`
# do we already have a backup folder for today's date?
if [ -d ~/backup/$1_$date ]
then
echo 'This project has already been backed up today, OVERWRITE?'
read answer
if [$answer != 'y']
then
exit
fi
else
mkdir ~/backup/$1_$date # create the backup folder
fi
cp -R ~/folder/$1 ~/backup/$1_$date
echo backup of $1 completed
| true
|
87c7b7b0f1a7fa26fdebc549ab2607a8920a35c0
|
Shell
|
mikezter/KP.tmbundle
|
/mategem.sh
|
UTF-8
| 550
| 3.765625
| 4
|
[] |
no_license
|
#!/bin/sh
gemname=$1
gempath=''
bundleshow() {
gempath=`bundle show $gemname`
rc=$?
if [[ $rc -ne 0 ]] ; then
echo $gempath
return 1
fi
return 0
}
gemwhich() {
gemlib=`gem which $gemname`
rc=$?
if [[ $rc -ne 0 ]] ; then
echo $gemlib
return 1
fi
gempath=`echo $gemlib | sed 's/\/lib\/.*//'`
return 0
}
bundleshow
rc=$?
if [[ $rc -eq 0 ]] ; then
mate $gempath
echo $gempath
exit 0
fi
gemwhich
rc=$?
if [[ $rc -eq 0 ]] ; then
mate $gempath
echo $gempath
exit 0
fi
echo "Can't find $gemname"
| true
|
45fc3ffc799ca2e2fbbfcf408e9ce23bff107c0a
|
Shell
|
jivkok/dotfiles
|
/sh/finds.sh
|
UTF-8
| 4,071
| 3.671875
| 4
|
[
"MIT"
] |
permissive
|
# Returns whether the given command is executable/aliased
_has() {
return $(command -v "$1" >/dev/null 2>&1)
}
# fzf + rg/ag
if _has fzf; then
if _has fd; then
export FZF_DEFAULT_COMMAND="fd --type f --type l --follow --hidden --exclude .git"
export FZF_CTRL_T_COMMAND="$FZF_DEFAULT_COMMAND"
_fzf_compgen_path() {
fd --follow --hidden --exclude ".git" . "$1"
}
_fzf_compgen_dir() {
fd --type d --follow --hidden --exclude ".git" . "$1"
}
elif _has rg; then
export FZF_DEFAULT_COMMAND="rg --smart-case --files --no-ignore --hidden --follow --glob '!.git'"
export FZF_CTRL_T_COMMAND="$FZF_DEFAULT_COMMAND"
_fzf_compgen_path() {
rg --smart-case --files --no-ignore --hidden --follow --glob '!.git' . "$1"
}
elif _has ag; then
export FZF_DEFAULT_COMMAND='ag --nocolor -g ""'
export FZF_CTRL_T_COMMAND="$FZF_DEFAULT_COMMAND"
# export FZF_ALT_C_COMMAND="$FZF_DEFAULT_COMMAND"
# export FZF_DEFAULT_OPTS='
# --color fg:242,bg:236,hl:65,fg+:15,bg+:239,hl+:108
# --color info:108,prompt:109,spinner:108,pointer:168,marker:168
#'
fi
fi
# bat colorizer
if _has batcat; then # bat is batcat in Debian (when using apt)
alias cat="batcat"
elif _has bat; then
alias cat="bat"
fi
if _has bat || _has batcat; then
export BAT_PAGER="less -R"
export BAT_STYLE="changes,numbers"
export BAT_THEME="ansi"
fi
# Enable colored `grep` output
if echo zzz | grep --color=auto zzz >/dev/null 2>&1; then
export GREP_COLORS='mt1;31' # matches
fi
# File system
# Pass selected files as arguments to the given command
# Usage: f echo
# Usage: f vim
f() {
IFS=$'\n'
files=($(fd . --type f --type l --follow --hidden --exclude .git "${@:2}" | fzf -0 -1 -m))
IFS=$' '
[[ -n "$files" ]] && $1 "${files[@]}"
}
# Pass selected directories as arguments to the given command
# Usage: d ws
d() {
IFS=$'\n'
dirs=($(fd . --type d --hidden --exclude .git "${@:2}" | fzf -0 -1 -m))
IFS=$' '
[[ -n "$dirs" ]] && $1 "${dirs[@]}"
}
ff() {
if [ -z "$1" ]; then
echo "Find Files (or directories)"
echo "Usage: ff file_pattern [directory]"
return
fi
file_pattern="$1"
directory="$2"
if [ -z "$directory" ]; then
directory='.'
fi
if command -V fd >/dev/null 2>&1; then
fd --hidden --follow --exclude .git "$file_pattern" "$directory"
else
find "$directory" -iname "$file_pattern"
fi
}
fs() {
if [ -z "$1" ]; then
echo "Find Strings in files"
echo "Usage: fs string_pattern [file_pattern] [directory]"
return
fi
string_pattern="$1"
file_pattern="$2"
directory="$3"
if [ -z "$file_pattern" ]; then
file_pattern='*'
fi
if [ -z "$directory" ]; then
directory='.'
fi
if command -V rg >/dev/null 2>&1; then
rg --color=always --line-number --no-heading --smart-case --no-ignore --hidden --follow --glob '!{.git,node_modules}/*' --glob "$file_pattern" "$string_pattern" "$directory"
else
# find "$directory" -type f -iname "$file_pattern" -exec grep -I -l -i "$string_pattern" {} \; -exec grep -I -n -i "$string_pattern" {} \;
grep -Hrn "$string_pattern" "$directory" --include "$file_pattern"
fi
}
# 1. Search for text in files using Ripgrep
# 2. Interactively narrow down the list using fzf
# 3. Open the file in Vim
fsf() {
if [ "$EDITOR" = *vim* ]; then
cmd="$EDITOR {1} +{2}"
elif [ "$EDITOR" = code ]; then
cmd="$EDITOR -g {1}:{2}"
else
cmd="$EDITOR {1}"
fi
rg --no-ignore --hidden --color=always --line-number --no-heading --smart-case "${*:-}" |
fzf --ansi \
--color "hl:-1:underline,hl+:-1:underline:reverse" \
--delimiter : \
--preview 'bat --color=always {1} --highlight-line {2}' \
--preview-window 'up,60%,border-bottom,+{2}+3/3,~3' \
--bind "enter:become($cmd)"
# --bind 'enter:become(vim {1} +{2})'
}
# find and list processes matching a case-insensitive partial-match string
fp() {
ps Ao pid,comm | awk '{match($0,/[^\/]+$/); print substr($0,RSTART,RLENGTH)": "$1}' | grep -i "$1" | grep -v grep
}
| true
|
acf5d30eae55d8e351a958b8451fddb86f27b9de
|
Shell
|
tobor88/Bash
|
/generate_all_uppercase_lowercase_combos_for_a_word.sh
|
UTF-8
| 752
| 3.796875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# This is a bash script I use to convert a word into every possible uppercase and lowercase combination
#
# EXAMPLE USAGE:
# ./generate_all_uppercase_lowercase_combos_for_a_word.sh osbornepro
# osbornepro
# osborneprO
# osbornepRo
# osbornePro
# osbornEpro
# osborNepro
# osboRnepro
# osbOrnepro
# osBornepro......
TOUPPER=${1^^}
GETLEN=${#TOUPPER}
for ((permutation=0; permutation <= GETLEN; permutation++))
do
for ((i=0; i <= GETLEN; i++))
do
lower=${TOUPPER,,}
if [ $permutation -gt 0 ]
then
nth=${lower:permutation-1}
lower=$(echo ${lower:0:permutation-1}${nth^})
fi
echo -n ${TOUPPER:0:i}
echo ${lower:i}
done
done | sort -u
| true
|
2b762ebbc1edf73cac4dbd7abb98f63e490b8103
|
Shell
|
cha63506/SERVICE
|
/TASK/count.sh
|
UTF-8
| 283
| 3.34375
| 3
|
[] |
no_license
|
set_env(){
file_points=/tmp/count
}
run(){
if [ -f $file_points ];then
num=`cat $file_points`
let num+=1 || { let num=0; }
else
let num=0
fi
echo $num > $file_points
xcowsay "$num Rounds !" &
}
#reset1 &
steps(){
set_env
run
}
steps
| true
|
ce0269a6ae3f8f84cc849f5e7c22ab755def511d
|
Shell
|
BU-NU-CLOUD-F19/Curating_Log_Files
|
/src/Elastic+Kibana/elasticsearch_automation.sh
|
UTF-8
| 1,185
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
yum install -y java-1.8.0-openjdk
rpm --import https://artifacts.elastic.co/GPG-KEY-elasticsearch
cat >>/etc/yum.repos.d/elk.repo<<EOF
[ELK-6.x]
name=ELK repository for 6.x packages
baseurl=https://artifacts.elastic.co/packages/6.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
autorefresh=1
type=rpm-md
EOF
yum install -y elasticsearch
yum install -y wget
rm -f elasticsearch.yml
wget https://raw.githubusercontent.com/BU-NU-CLOUD-F19/Curating_Log_Files/master/src/Elastic%2BKibana/elasticsearch.yml
rm -f /etc/elasticsearch/elasticsearch.yml
sudo cp elasticsearch.yml /etc/elasticsearch/
systemctl daemon-reload
systemctl enable elasticsearch
systemctl start elasticsearch
yum install -y kibana
rm -f kibana.yml
wget https://raw.githubusercontent.com/BU-NU-CLOUD-F19/Curating_Log_Files/master/src/Elastic%2BKibana/kibana.yml
rm -f /etc/kibana/kibana.yml
sudo cp kibana.yml /etc/kibana/
systemctl enable kibana
systemctl start kibana
if [ $? -ne 0 ]; then
echo "Could not start kibana and set kibana to start automatically when the system boots. (Error Code: $ERROR)."
else
echo "Kibana installed successfully."
fi
| true
|
18e5916593141c0aadd688ce1ccba843ce88a018
|
Shell
|
sayali199918/Shell_Programming_Construct
|
/Repetitions_18/While_loop/PowerOfTwo.sh
|
UTF-8
| 198
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash -x
read -p "enter the power:" n
ans=1
i=1
while [ $n -ne 0 ] && [ $ans -lt 256 ]
do
ans=$((2 * $ans))
echo "2^$i=$ans"
i=$(($i + 1))
n=$(($n - 1))
done
| true
|
b44858c4623f9c21304727995cf5f22990ecb62d
|
Shell
|
jordanrinke/linux_configs
|
/latest/bin/wpg-install.sh
|
UTF-8
| 7,685
| 3.890625
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
__ScriptVersion="0.1.5";
if [ -n "${XDG_CONFIG_HOME}" ]; then
CONFIG="${XDG_CONFIG_HOME}"
else
CONFIG="${HOME}/.config"
fi
if [ -n "${XDG_DATA_HOME}" ]; then
LOCAL="${XDG_DATA_HOME}"
else
LOCAL="${HOME}/.local/share"
fi
THEMES_DIR="${HOME}/.themes";
TEMPLATE_DIR="${PWD}/wpgtk-templates";
COLOR_OTHER="${CONFIG}/wpg/templates";
#=== FUNCTION ================================================================
# NAME: wpg-install.sh
# DESCRIPTION: Installs various wpgtk themes.
#===============================================================================
usage()
{
echo "Usage : $0 [options] [--]
Options:
-h Display this message
-v Display script version
-o Install openbox templates
-t Install tint2 template
-g Install gtk template
-i Install icon-set
-r Install rofi template
-I Install i3 template
-p Install polybar template
-b Install bspwm template
-d Install dunst template
-H Specify hash of wpgtk-templates repository to use
"
}
checkprogram()
{
command -v $1 >/dev/null 2>&1;
if [[ $? -eq 1 ]]; then
echo "Please install $1 before proceeding";
exit 1;
fi
}
getfiles()
{
checkprogram 'git';
checkprogram 'wpg';
mkdir -p "${LOCAL}/themes/color_other";
mkdir -p "${LOCAL}/icons";
git clone https://github.com/deviantfero/wpgtk-templates "$TEMPLATE_DIR";
if [[ $? -eq 0 ]]; then
cd "$TEMPLATE_DIR";
[[ ! -z "$commit" ]] && git checkout $commit;
return 0;
else
exit 1;
fi
}
install_tint2()
{
echo -n "This might override your tint2 config, Continue?[Y/n]: ";
read -r response;
if [[ ! "$response" == "n" ]]; then
echo "Installing tint2 config";
echo ":: backing up current tint2 conf in tint2rc.old.bak";
cp "${CONFIG}/tint2/tint2rc" "${CONFIG}/tint2/tint2rc.old.bak" 2>/dev/null;
cp --remove-destination ./tint2/tint2rc "${CONFIG}/tint2/tint2rc" && \
cp --remove-destination ./tint2/tint2rc.base "${COLOR_OTHER}" && \
ln -sf "${CONFIG}/tint2/tint2rc" "${COLOR_OTHER}/tint2rc" && \
echo ":: tint2 template install done."
return 0;
fi
echo ":: tint2 template not installed";
}
install_rofi()
{
echo -n "This might override your rofi config, Continue?[Y/n]: ";
read -r response;
if [[ ! "$response" == "n" ]]; then
echo "Installing rofi config";
echo ":: backing up current rofi conf in rofi.bak";
cp "${CONFIG}/rofi/config" "${CONFIG}/rofi/config.bak" 2>/dev/null;
cp --remove-destination ./rofi/config "${CONFIG}/rofi/config" && \
cp --remove-destination ./rofi/rofi.base "${COLOR_OTHER}" && \
ln -sf "${CONFIG}/rofi/config" "${COLOR_OTHER}/rofi" && \
echo ":: rofi template install done."
return 0;
fi
echo ":: rofi template not installed";
}
install_i3()
{
echo -n "This might override your i3 config, Continue?[Y/n]: ";
read -r response;
if [[ ! "$response" == "n" ]]; then
echo "Installing i3 config";
echo ":: backing up current i3 conf in config.bak";
cp "${CONFIG}/i3/config" "${CONFIG}/i3/config.bak" 2>/dev/null;
cp --remove-destination ./i3/config "${CONFIG}/i3/config" && \
cp --remove-destination ./i3/i3.base "${COLOR_OTHER}" && \
ln -sf "${CONFIG}/i3/config" "${COLOR_OTHER}/i3" && \
echo ":: i3 template install done."
return 0;
fi
echo ":: i3 template not installed";
}
install_polybar()
{
echo -n "This might override your polybar config, Continue?[Y/n]: ";
read -r response;
if [[ ! "$response" == "n" ]]; then
echo "Installing polybar config";
echo ":: backing up current polybar conf in config.bak";
cp "${CONFIG}/polybar/config" "${CONFIG}/polybar/config.bak" 2>/dev/null;
cp --remove-destination ./polybar/config "${CONFIG}/polybar/config" && \
cp --remove-destination ./polybar/polybar.base "${COLOR_OTHER}" && \
ln -sf "${CONFIG}/polybar/config" "${COLOR_OTHER}/polybar" && \
echo ":: polybar template install done."
return 0;
fi
echo ":: polybar template not installed";
}
install_gtk()
{
echo "Installing gtk themes";
cp -r ./FlatColor "${LOCAL}/themes/" && \
cp --remove-destination ./FlatColor/gtk-2.0/gtkrc.base "${COLOR_OTHER}/gtk2.base" && \
ln -sf "${LOCAL}/themes/FlatColor/gtk-2.0/gtkrc" "${COLOR_OTHER}/gtk2" && \
ln -sf "${LOCAL}/themes/FlatColor" "${THEMES_DIR}/FlatColor" && \
echo ":: gtk2 theme done" "${COLOR_OTHER}/gtk2";
cp --remove-destination ./FlatColor/gtk-3.0/gtk.css.base "${COLOR_OTHER}/gtk3.0.base" && \
ln -sf "${LOCAL}/themes/FlatColor/gtk-3.0/gtk.css" "${COLOR_OTHER}/gtk3.0" && \
echo ":: gtk3.0 theme done"
cp --remove-destination ./FlatColor/gtk-3.20/gtk.css.base "${COLOR_OTHER}/gtk3.20.base" && \
ln -sf "${LOCAL}/themes/FlatColor/gtk-3.20/gtk.css" "${COLOR_OTHER}/gtk3.20" && \
echo ":: gtk3.20 theme done"
echo ":: FlatColor gtk themes install done."
}
install_icons()
{
echo "Installing icon pack";
cp -r flattrcolor "${LOCAL}/icons/" && \
cp -r flattrcolor-dark "${LOCAL}/icons/" && \
echo ":: flattr icons install done."
}
install_openbox()
{
echo "Installing openbox themes";
cp --remove-destination -r ./openbox/colorbamboo/* "${LOCAL}/themes/colorbamboo"
if [[ $? -eq 0 ]]; then
mv "${LOCAL}/themes/colorbamboo/openbox-3/themerc.base" "${COLOR_OTHER}/ob_colorbamboo.base" && \
ln -sf "${LOCAL}/themes/colorbamboo/openbox-3/themerc" "${COLOR_OTHER}/ob_colorbamboo" && \
ln -sf "${LOCAL}/themes/colorbamboo" "${THEMES_DIR}/colorbamboo" && \
echo ":: colorbamboo openbox themes install done.";
fi
}
install_bspwm()
{
echo "Installing bspwm colors";
mv "./bspwm/bspwm_colors.base" "${COLOR_OTHER}/bspwm_colors.base";
mv "./bspwm/bspwm_colors" "${COLOR_OTHER}/bspwm_colors";
ln -sf "${CONFIG}/bspwm/bspwm_colors.sh" "${COLOR_OTHER}/bspwm_colors" && \
printf 'bash %s/bspwm/bspwm_colors.sh &' ${CONFIG} >> "${CONFIG}/bspwm/bspwmrc";
echo ":: bspwm colors install done.";
}
install_dunst()
{
echo "Installing dunst colors";
echo ":: backing up current dunst conf in dunstrc.bak";
cp "${CONFIG}/dunst/dunstrc" "${CONFIG}/dunst/dunstrc.bak" 2>/dev/null;
mv "./dunst/dunstrc.base" "${COLOR_OTHER}/dunstrc.base";
mv "./dunst/dunstrc" "${COLOR_OTHER}/dunstrc";
ln -sf "${CONFIG}/dunst/dunstrc" "${COLOR_OTHER}/dunstrc" && \
echo ":: dunst colors install done.";
}
clean_up()
{
rm -rf "$TEMPLATE_DIR";
}
#-----------------------------------------------------------------------
# Handle command line arguments
#-----------------------------------------------------------------------
getargs()
{
while getopts "H:bhvotgiIprd" opt
do
case $opt in
h)
usage;
exit 0
;;
v)
echo "$0 -- Version $__ScriptVersion";
exit 0;
;;
o) openbox="true" ;;
i) icons="true" ;;
g) gtk="true" ;;
t) tint2="true" ;;
r) rofi="true" ;;
I) i3="true" ;;
p) polybar="true" ;;
b) bspwm="true" ;;
d) dunst="true" ;;
H) commit="${OPTARG}" ;;
*)
echo -e "\n Option does not exist : $OPTARG\n"
usage;
exit 1
;;
esac
done
shift "$((OPTIND - 1))"
}
main()
{
getargs "$@";
getfiles;
[[ "$openbox" == "true" ]] && install_openbox;
[[ "$tint2" == "true" ]] && install_tint2;
[[ "$rofi" == "true" ]] && install_rofi;
[[ "$gtk" == "true" ]] && install_gtk;
[[ "$icons" == "true" ]] && install_icons;
[[ "$polybar" == "true" ]] && install_polybar;
[[ "$i3" == "true" ]] && install_i3;
[[ "$bspwm" == "true" ]] && install_bspwm;
[[ "$dunst" == "true" ]] && install_dunst;
clean_up;
}
main "$@"
| true
|
d3aea69860d393482e9b3e1b400e49118b3994dd
|
Shell
|
GeorgeT94/DevOpsJava
|
/script2.sh
|
UTF-8
| 1,254
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
echo "script running..."
sudo apt-get update
sudo apt-get upgrade
sudo cp /tmp/shared/java.tar.gz /opt
sudo cp /tmp/shared/maven.tar.gz /opt
cd /opt
sudo tar zxvf java.tar.gz
sudo tar zxvf maven.tar.gz
echo "files moved"
update-alternatives --install /usr/bin/java java /opt/jdk1.8.0_45/bin/java 100
update-alternatives --install /usr/bin/javac javac /opt/jdk1.8.0_45/bin/javac 100
update-alternatives --config java
echo "java unpacked"
sudo update-alternatives --install "/usr/bin/mvn" "mvn" "/opt/apache-maven-3.3.9/bin/mvn" 1
sudo update-alternatives --set mvn /opt/apache-maven-3.3.9/bin/mvn
sudo wget https://raw.github.com/dimaj/maven-bash-completion/master/bash_completion.bash
sudo apt-get -y install git
wget -q -O - https://pkg.jenkins.io/debian/jenkins-ci.org.key | sudo apt-key add -
sudo sh -c 'echo deb http://pkg.jenkins.io/debian-stable binary/ > /etc/apt/sources.list.d/jenkins.list'
sudo apt-get update
sudo apt-get install -y jenkins
echo "nexus installing..."
sudo wget http://www.sonatype.org/downloads/nexus-latest-bundle.zip -P /home/vagrant/Downloads
cd /home/vagrant/Downloads
sudo chmod 777 nexus-latest-bundle.zip
sudo unzip nexus-latest-bundle.zip
cd nexus-2.14.8-01/bin
sudo RUN_AS_USER=root ./nexus start
| true
|
f06036944faf30f798d32c0cb72cd219f2219d19
|
Shell
|
h2o/h2o
|
/src/h2olog/misc/autopep8
|
UTF-8
| 357
| 3.046875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
SCRIPT_DIR="$PWD/$(dirname $0)"
export PYTHONPATH="$SCRIPT_DIR/.pip3/lib/python3.5/site-packages"
export PATH="$SCRIPT_DIR/.pip3/bin:$PATH"
if ! autopep8 --version > /dev/null 2>/dev/null
then
echo "autopep8 is not found. Please install it by the following command:"
echo "sudo pip3 install pep8 autopep8"
exit 1
fi
exec autopep8 "$@"
| true
|
f7bce5894ce0c0ae039ba6e97785394a8b17fc44
|
Shell
|
rhcad/vgios-demo
|
/build.sh
|
UTF-8
| 1,522
| 3.015625
| 3
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
#!/bin/sh
# Type './build.sh' to make iOS libraries.
# Type './build.sh -arch arm64' to make iOS libraries for iOS 64-bit.
# Type './build.sh clean' to remove object files.
if [ ! -f ../vgcore/ios/build.sh ] ; then
git clone https://github.com/rhcad/vgcore ../vgcore
fi
if [ ! -f ../vgios/build.sh ] ; then
git clone https://github.com/rhcad/vgios ../vgios
fi
if [ ! -f ../DemoCmds/ios/build.sh ] ; then
git clone https://github.com/rhcad/DemoCmds ../DemoCmds
fi
if [ ! -f ../SVGKit/SVGKit.podspec ] ; then
git clone https://github.com/SVGKit/SVGKit ../SVGKit
fi
vgiospath=../vgios
corepath=../vgcore/ios/TouchVGCore
demopath=../DemoCmds/ios/DemoCmds
SVG_path=../SVGKit
xcodebuild -project $vgiospath/TouchVG.xcodeproj $1 $2 -configuration Release -alltargets
xcodebuild -project $demopath/DemoCmds.xcodeproj $1 $2 -configuration Release
xcodebuild -project $corepath/TouchVGCore.xcodeproj $1 $2 -configuration Release
xcodebuild -project $SVG_path/SVGKit-iOS.xcodeproj $1 $2 -configuration Release
mkdir -p output/TouchVG
cp -R $vgiospath/build/Release-universal/*.a output
cp -R $vgiospath/build/Release-universal/include/TouchVG output
mkdir -p output/DemoCmds
cp -R $demopath/build/Release-universal/libDemoCmds.a output
cp -R $demopath/build/Release-universal/include/DemoCmds output
mkdir -p output/TouchVGCore
cp -R $corepath/build/Release-universal/libTouchVGCore.a output
cp -R $corepath/build/Release-universal/include/TouchVGCore output
cp -R $SVG_path/build/Release-universal/*.a output
| true
|
e97beb6fb3ebf0ac14a32ee07219a27c76c56360
|
Shell
|
angeljr196/scripting
|
/script2.sh
|
UTF-8
| 232
| 2.75
| 3
|
[] |
no_license
|
#/bin/bash
sumando1=4
sumando2=6
let resto #tambien puedo crear varible con let
echo $sumando1 + $sumando2
echo sumando1 + sumando2
((resto=sumando1+sumando2)) #operaciones aritmeticas con (())
echo $resto
exit 0 #termino con exito
| true
|
3d2d807357101eed575db6943d9cbc46834fb353
|
Shell
|
psachin/system-config
|
/bin/aof
|
UTF-8
| 1,821
| 3.75
| 4
|
[] |
no_license
|
#!/bin/bash
if echo $SHELLOPTS | grep xtrace; then
export SHELLOPTS
fi
if test -d "$1" -a $# = 1; then
set -- "$(get-newest-file "$1")"
fi
if test $# = 0; then
set -- "$(get-newest-file ~/today/)"
fi
archive_mimes=(
application/x-compressed-tar
application/x-bzip-compressed-tar
application/x-rar
application/x-deb
application/zip
)
type=$(xdg-mime query filetype "$1")
is_archive_type() {
if echo "$1" | grep -q '/x-.*compress'; then
return 0
fi
for x in ${archive_mimes[@]}; do
if test "$1" = "$x"; then
return 0;
fi
done
return 1
}
if test "${1##*.}" = zip; then
gbkunzip "$@"
elif test "${1##*.}" = rar; then
file=$(readlink -f "$1")
if test "$2"; then
outd=$(readlink -m "$2")
else
outd=$PWD/$(basename "$1").unzip.$$
fi
mkdir -p "$outd"
cd "$outd"
if ! unrar x "$file"; then
rm -rf "$outd"
echo unrar "$file" failed\!
exit -1
fi
fix-deep-dirs
elif is_archive_type "$type" || test "$(xdg-mime query default "$type")" = file-roller.desktop; then
if test "$2"; then
dir=$(readlink -m "$2")
else
dir=./"$(basename "$1")".unzip.$$
fi
mkdir -p "$dir"
set -- "$(readlink -f "$1")"
cd "$dir"
file-roller --extract-to="$PWD" "$1"
up "$PWD"
fix-deep-dirs
exit
else
if yes-or-no-p "Not an archive type? Will open it using system program."; then
of "$1";
else
if test "$2"; then
dir=$(readlink -m "$2")
else
dir=./"$(basename "$1")".unzip.$$
fi
mkdir -p "$dir"
set -- "$(readlink -f "$1")"
cd "$dir"
file-roller --extract-to="$PWD" "$1"
up "$PWD"
fix-deep-dirs
exit
fi
fi
| true
|
16c40f8880ffc5742612d7a9df9a0eb3efdb444c
|
Shell
|
utopia-repository/utopia-packages
|
/build
|
UTF-8
| 603
| 3.9375
| 4
|
[] |
no_license
|
#!/bin/bash
trap 'exit 1' INT TERM
cd "$(dirname "${BASH_SOURCE[0]}")"
if [[ -z "$1" || "$1" == "all" ]]; then
PACKAGES=$(find . -maxdepth 1 -mindepth 1 -type d ! -name '.git')
else
PACKAGES="$@"
fi
echo "Packages to build: $PACKAGES"
SUCCESSES=()
FAILURES=()
for pkg in $PACKAGES; do
pushd "$pkg"
pkg="$(basename "$pkg")"
echo
echo "BUILDING: $pkg"
echo
dpkg-buildpackage -us -uc && SUCCESSES+=("$pkg") || FAILURES+=("$pkg")
popd
done
echo "${#SUCCESSES[@]} packages built successfully: ${SUCCESSES[@]}"
echo "${#FAILURES[@]} packages failed to build: ${FAILURES[@]}"
exit ${#FAILURES[@]}
| true
|
3149789c45aa56e534ae803a6b9ac5cb955bdbd8
|
Shell
|
bymealpal/docker-parse-server
|
/scripts/import-mongo-data
|
UTF-8
| 696
| 3.1875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
BACKUP_PATH=${1:-$HOME/Dropbox/MealPass/ENGINEERING/parse/55fe3a5f-eb48-4b6d-968a-054fe60a85f4_1472093260_export}
echo "Importing from: $BACKUP_PATH"
SCHEMAS=$BAKCUP_PATH/SCHEMA.json
DATA=$BACKUP_PATH/.json
function do_import {
for f in $SCHEMAS $DATA; do
if [ ! -s $f ]; then
continue
fi
collection=`basename $f .json`
echo "======================"
echo "Importing: $collection"
jq '.results' $f | mongoimport -d dev -c $collection --jsonArray
curl -X POST -H "X-Parse-Application-Id: YOUR_APP_ID" -H "X-Parse-Master-Key: YOUR_MASTER_KEY" http://localhost:1337/parse/classes/$collection
done
}
do_import
| true
|
23679eee55495483628797a44b68ee6c981c628f
|
Shell
|
CodeProBono/polilist
|
/tests/integration/local-yass.sh
|
UTF-8
| 430
| 3.1875
| 3
|
[] |
no_license
|
#!/bin/sh
PARSER="local yass"
COLUMN_LIST="suburb mobile postcode address phone1 firstname surname email fax phone level state electorate"
MIN_RECORDS=9
export PARSER COLUMN_LIST MIN_RECORDS
if [ "$(which ./generic-csv-validator.sh 2>/dev/null)" = "" ]; then
echo "$0: generic-csv-validator.sh not found. Are you running this script from the wrong current directory?" >&2
exit 1
fi
./generic-csv-validator.sh
exit $?
| true
|
9013e3ab63d56f567e71a30d779428ee81380689
|
Shell
|
kitech/triline
|
/myscripts/radioget_xml_to_smplayer_radio_list.sh
|
UTF-8
| 2,582
| 3.15625
| 3
|
[] |
no_license
|
#!/bin/sh
# format
# #EXTM3U
# #EXTINF:0,vvv,
# mms://fff
# #EXTINF:0,fff,
# mms://ttt
############ html mode
######1 #########2
# http://radioget.googlecode.com/svn/trunk/radiolist.xml
radio_get_xml=$HOME/.config/smplayer/radiolist.xml
smplayer_audio_file=$HOME/.config/smplayer/radio.m3u8
temp_audio_file=/tmp/radio.m3u8
temp_html_file=/tmp/radioget.html
if [ -f $radio_get_xml ] ; then
mv -v $radio_get_xml ${radio_get_xml}.bak
true
fi
wget -O $radio_get_xml http://radioget.googlecode.com/svn/trunk/radiolist.xml
echo "#EXTM3U" > $temp_audio_file
###############to html
cat <<EOF > $temp_html_file
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<style type="text/css">
<!--
#list {width: 800px;float:left; list-style:disc outside; color:#000; line-height:30px }
#list li {float:left;}
#list li a{display:block;float:left; width: 380px; }
-->
</style>
</head>
<body>
<p>在线广播/电视:</p>
<ul id="list">
EOF
radio_caty=
while read radio
do
# echo $radio
xml_ver_line=`echo $radio|grep "xml version"`
xml_root_line=`echo $radio|grep "RadioGet"`
radio_cat_line=`echo $radio|grep "radiotag"`
radio_addr_line=`echo $radio|grep "url"`
if [ x"$xml_ver_line" = x"" ] ; then
true;
else
continue;
fi
if [ x"$xml_root_line" = x"" ] ; then
true;
else
continue;
fi
if [ x"$radio_cat_line" = x"" ] ; then
true;
else
radio_caty=`echo $radio|awk -F\" '{print $2}'`
if [ x"$radio_caty" = x"" ] ; then
# echo $radio_caty
true;
fi
continue;
fi
if [ x"$radio_addr_line" = x"" ] ; then
true;
else
radio_addr=`echo $radio|awk -F\" '{print $4}'`
radio_name=`echo $radio|awk -F\" '{print $2}'`
if [ "$radio_name" = "true" ] ; then
radio_addr=`echo $radio|awk -F\" '{print $6}'`
radio_name=`echo $radio|awk -F\" '{print $4}'`
fi
echo $radio_caty "->" $radio_name "->" $radio_addr
echo "#EXTINF:0,${radio_caty}->${radio_name}," >> $temp_audio_file
echo "$radio_addr" >> $temp_audio_file
#####to html
echo "<li><a href=\"${radio_addr}\">${radio_caty}->${radio_name}</a></li>" >> $temp_html_file
fi
done < $radio_get_xml
mv -v $smplayer_audio_file ${smplayer_audio_file}.bak
cp -v $temp_audio_file ${smplayer_audio_file}
####### to html
echo "</ul></body></html>" >> $temp_html_file
| true
|
fc52bd87012be3864696ce34acbcef4108c0cf2b
|
Shell
|
laggardkernel/spacezsh-prompt
|
/scripts/version.sh
|
UTF-8
| 339
| 3.140625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env zsh
main() {
new_version=$(command grep -E '"version": "v?([0-9]+\.){1,}' package.json | cut -d\" -f4 2> /dev/null)
filename="$PWD/spacezsh.zsh"
sed -e "s/SPACESHIP_VERSION='.*'/SPACESHIP_VERSION='$new_version'/g" "$filename" >"$filename.bak"
mv -- "$filename.bak" "$filename"
git add spacezsh.zsh
}
main "$@"
| true
|
b8ce6abfde37bee5fbe69cf163a758a09fce8957
|
Shell
|
twosigma/Cook
|
/travis/show_executor_logs.sh
|
UTF-8
| 345
| 2.984375
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
#!/bin/bash
set -v
echo "Printing out all executor logs..."
while read path; do
echo "Contents of ${path}";
cat "${path}";
echo "------------------------------------"
done <<< "$(find ${GITHUB_WORKSPACE}/travis/.minimesos -name 'stdout' -o -name 'stderr' -o -name 'executor.log')"
${GITHUB_WORKSPACE}/travis/show_scheduler_logs.sh
| true
|
8396bd6d1e5ce3a641bb1a6957376a16b86e5a49
|
Shell
|
ekapujiw2002/Ubidots-Lua-OpenWRT-router-Ubidots-Real-time-data-monitoring
|
/example.bash
|
UTF-8
| 693
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
api_key="your api key here"
deviceID="your deviceID here"
value=10 #post this value
#obtain a token
my_token=$(bash -c "/usr/lib/lua/ubidots.lua -token "$api_key)
echo "Token: "$my_token
#post data to Ubidots
bash -c "/usr/lib/lua/ubidots.lua -post "$api_key" "$deviceID" "$value
#retrieve last value from Ubidots
my_data=$(bash -c "/usr/lib/lua/ubidots.lua -get "$api_key" "$deviceID)
echo $my_data
#retrieve all data from Ubidots
my_data=$(bash -c "/usr/lib/lua/ubidots.lua -get "$api_key" "$deviceID" all")
echo $my_data
#retrieve last value from Ubidots & save to router /tmp/data.txt
my_data=$(bash -c "/usr/lib/lua/ubidots.lua -get "$api_key" "$deviceID" all data.txt")
| true
|
c0ebdda3e2b24529811e10c94999b84f7771be25
|
Shell
|
dspjm/earlymodules
|
/scull/1/1.1/1.1.0/scull_unload
|
UTF-8
| 184
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
device_count=4
device="scull"
major=`grep -m 1 scull /proc/devices | cut -d ' ' -f 1`
for (( i=0 ; i<device_count ; i++ )) ; do
rm /dev/scull${i} ; done
rmmod scull.ko
| true
|
62ee21808c54e0a417d121f6d9e93a1081d0a4bb
|
Shell
|
almosting/omx_il_g
|
/vc8000d/software/linux/vcmd_pcidriver/register/create_swreg_struct.sh
|
UTF-8
| 18,624
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
#Copyright 2019 Verisilicon(Beijing) Co.,Ltd. All Rights Reserved.
#Author: carl.li@verisilicon.com (Carl Li)
#
#Description: Creates code for various purposes from
# given swhw register definition
if [ -z "$1" ] || [ ! -e "$1" ]
then
echo " This script produces swregisters.h, swregisters_directives.tcl,"
echo " hwdriver.vhd, signals.vhd, 8170table.h and 8170enum.h files"
echo " from given .csv register description"
echo ""
echo "Usage: ./create_swreg_struct.sh 'filename.csv' "
exit
fi
fileName=$1
version=`cat $1 | grep "Document Version" | tr "," " "| awk '{ print $3}'`
if [ "$version" == "" ]
then
echo "Document $1 version not found. Exit..."
exit
else
echo "Creating swregister struct from $1 version $version"
fi
vp9_output="$2"
printf "%-8s\n" $vp9_output;
catapultPrefix=SwRegister_
catapultPostfix="" ##empty (if interface label to direct input made correctly
if [ "$vp9_output" != "vp9" ]
then
awk '
BEGIN{FS=",";printf "//Copyright 2019 Verisilicon(Beijing) Co.,Ltd. All Rights Reserved.\n"> "swregisters.h";
printf "//Author: carl.li@verisilicon.com (Carl Li)\n">> "swregisters.h";
printf "//\n">> "swregisters.h";
printf "//Description: Common SWHW interface structure definition\n">> "swregisters.h";
printf "//Based on document version '$version'\n">> "swregisters.h";
printf "#ifndef SWREGISTERS_H_\n">> "swregisters.h";
printf "#define SWREGISTERS_H_\n\n">> "swregisters.h";
printf "#include \"actypes.h\"\n\n">> "swregisters.h";
printf "struct SwRegisters {\n">> "swregisters.h";
printf"--signal declaration from Document Version '$version'\n" > "signals.vhd";
printf"--register to signal map table from Document Version '$version'\n" > "hwdriver.vhd"
printf"##Common catapult directives from Document Version '$version' \n" > "swregisters_directives.tcl"
printf "/* Copyright 2019 Verisilicon(Beijing) Co.,Ltd. All Rights Reserved. */\n"> "8170enum.h";
printf "/* Register interface based on the document version '$version' */\n">> "8170enum.h";
printf "/* Copyright 2019 Verisilicon(Beijing) Co.,Ltd. All Rights Reserved. */\n"> "8170table.h";
printf "/* Register interface based on the document version '$version' */\n">> "8170table.h";
# regNum=-1;
}
END{printf "}; \n \n#endif /*SWREGISTERS_H_*/ \n">> "swregisters.h" }
$1 ~ /^swreg/ {
regNum=$1;
# regNum=regNum+1;
getline;
for (i = 1; i <= NF; i++)
{
if ($(i) ~ /Width/)
widthField=i;
else if ($(i) ~ /Name/)
nameField=i;
else if ($(i) ~ /Signed/)
signedField=i;
else if ($(i) ~ /Bit/)
bitField=i;
else if ($(i) ~ /trace/)
trace_value_field=i;
else if ($(i) ~ /HEVC/)
hevc_field=i;
else if ($(i) ~ /VP9/)
vp9_field=i;
else if($(i) ~ /Function/)
function_field=i;
else if($(i) ~ /Read\/Write/)
RW_field=i;
}
lastField=widthField>nameField ? widthField : nameField;
lastField=bitField>lastField ? bitField : lastField;
totalFields=NF;
}
$nameField ~ /sw_/ {
# all required fields on current row
numFields=NF;
for (i = 0; i < numFields; i++)
rowArray[i] = $(i+1)
fieldFinished = $0 ~ /;$/ ? 1 : 0;
#for (i = 0; i < numFields; i++)
# printf "%d -> %s\n", i,rowArray[i]
while (numFields < lastField)
{
getline;
i = fieldFinished ? 0 : 1;
for (; i < NF; i++)
rowArray[numFields++] = $(i+1);
fieldFinished = $0 ~ /;$/ ? 1 : 0;
}
lsb=rowArray[bitField-1];
reg=rowArray[nameField-1];
modelreg=rowArray[nameField-1];
width=rowArray[widthField-1];
signed=rowArray[signedField-1];
trace_value=rowArray[trace_value_field-1];
function_value=rowArray[function_field-1];
read_write_value=rowArray[RW_field-1];
hevc_reg=rowArray[hevc_field-1];
vp9_only_reg=rowArray[vp9_field-1];
# certain flags had empty width field
if (width == "")
{
width=1;
}
# 15:10 -> 10 etc
sub(/[0-9]*:/,"",lsb);
sub(/swreg/,"",regNum);
sub(/^sw_/,"hwif_",modelreg);
##decoder registers only
msb=lsb+width-1;
bit_occupy_high=0;
bit_occupy_low=0;
#printf "---lsb=%d,width=%d-----\n",lsb,width;
for (k = 1; k <=int(width); k++)
{
#printf "k=%d\n",k;
bit_occupy_high=bit_occupy_high*2;
if(bit_occupy_low>=32768)
{
bit_occupy_high=bit_occupy_high+1;
}
bit_occupy_low=bit_occupy_low*2;
if(bit_occupy_low>=65536)
{
bit_occupy_low=bit_occupy_low-65536;
}
bit_occupy_low=bit_occupy_low+1;
#printf "1bit_occupy_high=0x%0x\n",bit_occupy_high;
#printf "1bit_occupy_low=0x%0x\n",bit_occupy_low;
}
#printf "-xxx--lsb=%d-----\n",lsb;
for (j = 1; j <= int(lsb); j++)
{
bit_occupy_high=bit_occupy_high*2;
if(bit_occupy_low>=32768)
{
bit_occupy_high=bit_occupy_high+1;
}
bit_occupy_low=bit_occupy_low*2;
if(bit_occupy_low>=65536)
{
bit_occupy_low=bit_occupy_low-65536;
}
#printf "2bit_occupy_high=0x%0x\n",bit_occupy_high;
#printf "2bit_occupy_low=0x%0x\n",bit_occupy_low;
}
if (msb > 31)
{
printf "error found in '$1' line \n"
printf "%s definition over register limits: msb %d, lsb %d, width %d \n",reg,msb,lsb,width;
}
regNum=int(regNum)
#if((hevc_reg == "x"))
{
if (regNum < 512)
{
##structure
if (signed == "x")
{
printf " sai%d %s;\n",
width, reg >> "swregisters.h";
} else {
printf " uai%d %s;\n",
width, reg >> "swregisters.h";
}
##Directives
printf "directive set /$block/SwRegister.%s:rsc -MAP_TO_MODULE {[DirectInput]}\n",
reg >> "swregisters_directives.tcl";
##HW stuff
if (width == 1)
{
printf " '$catapultPrefix'%s'$catapultPostfix' <= swreg%d(%d);\n",
reg, regNum, msb >> "hwdriver.vhd";
printf " signal '$catapultPrefix'%s'$catapultPostfix' : std_logic;\n",
reg >> "signals.vhd";
}
else
{
printf " '$catapultPrefix'%s'$catapultPostfix' <= swreg%d(%d downto %d);\n",
reg, regNum, msb, lsb >> "hwdriver.vhd";
printf " signal '$catapultPrefix'%s'$catapultPostfix' : std_logic_vector(%d downto 0);\n",
reg, width-1 >> "signals.vhd";
}
}
##System model table
# change widths of base addresses to 32 bits
if (width == 30 && lsb == 2 && reg ~ /base$/)
{
printf " VCMDREG(%-40s, %-3d,0x%04x%04x, %8d, %8d,%2s,\"%-1s\"),\n",
toupper(modelreg), regNum*4, bit_occupy_high,bit_occupy_low, 0,trace_value,read_write_value,function_value >> "8170table.h";
} else {
printf " VCMDREG(%-40s, %-3d,0x%04x%04x, %8d, %8d,%2s,\"%-1s\"),\n",
toupper(modelreg), regNum*4,bit_occupy_high,bit_occupy_low, lsb,trace_value,read_write_value,function_value >> "8170table.h";
}
##System model enumerations
printf " %s,\n", toupper(modelreg) >> "8170enum.h";
}
}
' "$fileName"
else
awk '
BEGIN{FS=",";printf "//Copyright 2019 Verisilicon(Beijing) Co.,Ltd. All Rights Reserved.\n"> "swregisters.h";
printf "//Author: carl.li@verisilicon.com (Carl Li)\n">> "swregisters.h";
printf "//\n">> "swregisters.h";
printf "//Description: Common SWHW interface structure definition\n">> "swregisters.h";
printf "//Based on document version '$version'\n">> "swregisters.h";
printf "#ifndef SWREGISTERS_H_\n">> "swregisters.h";
printf "#define SWREGISTERS_H_\n\n">> "swregisters.h";
printf "#include \"actypes.h\"\n\n">> "swregisters.h";
printf "struct SwRegisters {\n">> "swregisters.h";
printf"--signal declaration from Document Version '$version'\n" > "signals.vhd";
printf"--register to signal map table from Document Version '$version'\n" > "hwdriver.vhd"
printf"##Common catapult directives from Document Version '$version' \n" > "swregisters_directives.tcl"
printf "/* Copyright 2019 Verisilicon(Beijing) Co.,Ltd. All Rights Reserved. */\n"> "8170enum.h";
printf "/* Register interface based on the document version '$version' */\n">> "8170enum.h";
printf "/* Copyright 2019 Verisilicon(Beijing) Co.,Ltd. All Rights Reserved. */\n"> "8170table.h";
printf "/* Register interface based on the document version '$version' */\n">> "8170table.h";
# regNum=-1;
}
END{printf "}; \n \n#endif /*SWREGISTERS_H_*/ \n">> "swregisters.h" }
$1 ~ /^swreg/ {
regNum=$1;
#regNum=regNum+1;
getline;
for (i = 1; i <= NF; i++)
{
if ($(i) ~ /Width/)
widthField=i;
else if ($(i) ~ /Name/)
nameField=i;
else if ($(i) ~ /Signed/)
signedField=i;
else if ($(i) ~ /Bit/)
bitField=i;
else if ($(i) ~ /trace/)
trace_value_field=i;
else if ($(i) ~ /HEVC/)
hevc_field=i;
else if ($(i) ~ /VP9/)
vp9_field=i;
else if($(i) ~ /Function/)
function_field=i;
else if($(i) ~ /Read\/Write/)
RW_field=i;
}
lastField=widthField>nameField ? widthField : nameField;
lastField=bitField>lastField ? bitField : lastField;
totalFields=NF;
}
$nameField ~ /sw_/ {
# all required fields on current row
numFields=NF;
for (i = 0; i < numFields; i++)
rowArray[i] = $(i+1)
fieldFinished = $0 ~ /;$/ ? 1 : 0;
#for (i = 0; i < numFields; i++)
# printf "%d -> %s\n", i,rowArray[i]
while (numFields < lastField)
{
getline;
i = fieldFinished ? 0 : 1;
for (; i < NF; i++)
rowArray[numFields++] = $(i+1);
fieldFinished = $0 ~ /;$/ ? 1 : 0;
}
lsb=rowArray[bitField-1];
reg=rowArray[nameField-1];
modelreg=rowArray[nameField-1];
width=rowArray[widthField-1];
signed=rowArray[signedField-1];
trace_value=rowArray[trace_value_field-1];
function_value=rowArray[function_field-1];
read_write_value=rowArray[RW_field-1];
hevc_reg=rowArray[hevc_field-1];
vp9_only_reg=rowArray[vp9_field-1];
# certain flags had empty width field
if (width == "")
{
width=1;
}
# 15:10 -> 10 etc
sub(/[0-9]*:/,"",lsb);
sub(/swreg/,"",regNum);
sub(/^sw_/,"hwif_",modelreg);
##decoder registers only
msb=lsb+width-1;
bit_occupy_high=0;
bit_occupy_low=0;
#printf "---lsb=%d,width=%d-----\n",lsb,width;
for (k = 1; k <=int(width); k++)
{
#printf "k=%d\n",k;
bit_occupy_high=bit_occupy_high*2;
if(bit_occupy_low>=32768)
{
bit_occupy_high=bit_occupy_high+1;
}
bit_occupy_low=bit_occupy_low*2;
if(bit_occupy_low>=65536)
{
bit_occupy_low=bit_occupy_low-65536;
}
bit_occupy_low=bit_occupy_low+1;
#printf "1bit_occupy_high=0x%0x\n",bit_occupy_high;
#printf "1bit_occupy_low=0x%0x\n",bit_occupy_low;
}
#printf "-xxx--lsb=%d-----\n",lsb;
for (j = 1; j <= int(lsb); j++)
{
bit_occupy_high=bit_occupy_high*2;
if(bit_occupy_low>=32768)
{
bit_occupy_high=bit_occupy_high+1;
}
bit_occupy_low=bit_occupy_low*2;
if(bit_occupy_low>=65536)
{
bit_occupy_low=bit_occupy_low-65536;
}
#printf "2bit_occupy_high=0x%0x\n",bit_occupy_high;
#printf "2bit_occupy_low=0x%0x\n",bit_occupy_low;
}
if (msb > 31)
{
printf "error found in '$1' line \n"
printf "%s definition over register limits: msb %d, lsb %d, width %d \n",reg,msb,lsb,width;
}
regNum=int(regNum)
if((hevc_reg == "x")||((vp9_only_reg=="x")))
{
if (regNum < 512)
{
##structure
if (signed == "x")
{
printf " sai%d %s;\n",
width, reg >> "swregisters.h";
} else {
printf " uai%d %s;\n",
width, reg >> "swregisters.h";
}
##Directives
printf "directive set /$block/SwRegister.%s:rsc -MAP_TO_MODULE {[DirectInput]}\n",
reg >> "swregisters_directives.tcl";
##HW stuff
if (width == 1)
{
printf " '$catapultPrefix'%s'$catapultPostfix' <= swreg%d(%d);\n",
reg, regNum, msb >> "hwdriver.vhd";
printf " signal '$catapultPrefix'%s'$catapultPostfix' : std_logic;\n",
reg >> "signals.vhd";
}
else
{
printf " '$catapultPrefix'%s'$catapultPostfix' <= swreg%d(%d downto %d);\n",
reg, regNum, msb, lsb >> "hwdriver.vhd";
printf " signal '$catapultPrefix'%s'$catapultPostfix' : std_logic_vector(%d downto 0);\n",
reg, width-1 >> "signals.vhd";
}
}
##System model table
# change widths of base addresses to 32 bits
if (width == 30 && lsb == 2 && reg ~ /base$/)
{
printf " VCMDREG(%-40s, %-3d,0x%04x%04x, %8d, %8d,%2s,\"%-1s\"),\n",
toupper(modelreg), regNum*4, bit_occupy_high,bit_occupy_low, 0,trace_value,read_write_value,function_value >> "8170table.h";
} else {
printf " VCMDREG(%-40s, %-3d,0x%04x%04x, %8d, %8d,%2s,\"%-1s\"),\n",
toupper(modelreg), regNum*4,bit_occupy_high,bit_occupy_low, lsb,trace_value,read_write_value,function_value >> "8170table.h";
}
##System model enumerations
printf " %s,\n", toupper(modelreg) >> "8170enum.h";
}
}
' "$fileName"
fi
cp 8170enum.h ../vcmdregisterenum.h
cp 8170table.h ../vcmdregistertable.h
cp 8170enum.h ../../../../system/models/vcmd/vcmdregisterenum.h
cp 8170table.h ../../../../system/models/vcmd/vcmdregistertable.h
| true
|
518c47dbd88811856c2c0e361a9f2492668080be
|
Shell
|
spartacus-technologies/Auxilo-box
|
/Devices/Voice/voice.sh
|
UTF-8
| 967
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
# arecord -q -t wav -d 5 audio.wav
arecord -q -D hw:1,0 -f s16_le -c 1 -d 15 > audio.wav
sox -q audio.wav -r 16000 -b 16 -c 1 audio.flac
language=$1
output="NO_OUTPUT"
if [[ "$language" == "US" ]]; then
output=$(wget -q -U "rate=16000" -O - "http://www.google.com/speech-api/v1/recognize?lang=en-US&client=Mozilla/5.0" --post-file audio.flac --header="Content-Type: audio/x-flac; rate=16000")
elif [[ "$language" == "FI" ]]; then
output=$(wget -q -U "rate=16000" -O - "http://www.google.com/speech-api/v1/recognize?lang=fi_FI&client=Mozilla/5.0" --post-file audio.flac --header="Content-Type: audio/x-flac; rate=16000")
else
echo -n "ERROR: Use parameter US or FI"
rm audio.wav audio.flac
exit 0
fi
if [[ "$output" == "NO_OUTPUT" ]]; then
echo -n "ERROR: Connection failed"
rm audio.wav audio.flac
exit 0
fi
result=$(echo $output | awk '{split($0,a,"\""); print a[12]}')
rm audio.wav audio.flac
echo -n "$result"
# > google_output.json
| true
|
cb310bd53ef12590427d421d0b7b79bbc4be3677
|
Shell
|
BIT-Studio-1/project-21s1-project-andromeda
|
/room/roomgenerator.sh
|
UTF-8
| 82
| 2.609375
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
for number in {00..99}
do
cp room.skeleton $number.room
done
exit 0
| true
|
bca1f9cd61a1e74e9b7664c33cb55b110008c494
|
Shell
|
fracz/photo-signatures
|
/fix-file-dates.sh
|
UTF-8
| 994
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash
if (( $# != 1 )); then
echo -e "Usage: $0 directory"
exit 1
fi
cd "$1" || exit 1
for name in *
do
date="$(echo "$name" | sed -r 's/(.+)_([0-9]{8})_([0-9]{4})([0-9]{2}).+/\2\3.\4/')"
if [[ $date == ????????????.?? ]]
then
touch -amt "$date" "$name"
echo "Updated "$name
else
echo "Skipped "$name
fi
done
exiftool -overwrite_original -P -progress -ext jpg "-CreateDate<FileModifyDate" "-DateTimeOriginal<FileModifyDate" .
exiftool -overwrite_original -P -progress -ext mp4 "-CreateDate<FileModifyDate" "-ModifyDate<FileModifyDate" "-MediaCreateDate<FileModifyDate" "-MediaModifyDate<FileModifyDate" "-TrackCreateDate<FileModifyDate" "-TrackModifyDate<FileModifyDate" .
#exiftool -overwrite_original -P -progress -ext mp4 -api QuickTimeUTC "-CreateDate<FileModifyDate" "-ModifyDate<FileModifyDate" "-MediaCreateDate<FileModifyDate" "-MediaModifyDate<FileModifyDate" "-TrackCreateDate<FileModifyDate" "-TrackModifyDate<FileModifyDate" .
| true
|
4f83398f100f673af7b4bfffcab8220d3bb257e4
|
Shell
|
angeloko23/wpt.fyi
|
/util/docker-dev/web_server.sh
|
UTF-8
| 1,341
| 3.828125
| 4
|
[
"LicenseRef-scancode-w3c-03-bsd-license",
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
# Start the Google Cloud web development server in `wptd-dev-instance`
# (started using ./run.sh).
DOCKER_DIR=$(dirname $0)
source "${DOCKER_DIR}/../commands.sh"
source "${DOCKER_DIR}/../logging.sh"
source "${DOCKER_DIR}/../path.sh"
WPTD_PATH=${WPTD_PATH:-$(absdir ${DOCKER_DIR}/../..)}
WPTD_CONTAINER_HOST=0.0.0.0
set -e
usage() {
USAGE="Usage: web_server.sh [-r]
-r - Allow remote requests (disable host checking)"
info "${USAGE}"
}
HOST_CHECKING=true
while getopts ':rh' flag; do
case "${flag}" in
r) HOST_CHECKING=false ;;
h|*) usage && exit 0;;
esac
done
info "Pruning node_modules so dev_appserver can handle watching file updates..."
wptd_exec make webapp_node_modules_prod
info "Installing other web server code dependencies"
wptd_exec make dev_appserver_deps
DOCKER_STATUS="${?}"
if [ "${DOCKER_STATUS}" != "0" ]; then
error "Failed to install web server code dependencies"
exit "${DOCKER_STATUS}"
fi
info "Starting web server. Port forwarded from wptd-dev-instance: 8080"
wptd_exec_it dev_appserver.py \
--enable_host_checking $HOST_CHECKING \
--host $WPTD_CONTAINER_HOST \
--port=8080 \
--admin_host=$WPTD_CONTAINER_HOST \
--admin_port=8000 \
--api_host=$WPTD_CONTAINER_HOST \
--api_port=9999 \
-A=wptdashboard \
/home/user/wpt.fyi/webapp/web/app.dev.yaml
| true
|
e0db9be2b1df272acdd096ca5f8f757af5d0b887
|
Shell
|
stevenvandenbrandenstift/libc
|
/ports/rust.sh
|
UTF-8
| 1,108
| 2.890625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
source environ.sh
UNSTABLE
DEPENDS llvm
GIT=https://github.com/ids1024/rust.git
GIT_BRANCH=compile-redox
DIR=rust
unset AR AS CC CXX LD NM OBJCOPY OBJDUMP RANLIB READELF STRIP
function rustbuild_template {
case $1 in
build)
cp ${BUILD}/../ports/rust/* "${BUILD}/${DIR}"
pushd "${BUILD}/${DIR}"
python x.py build
popd
;;
add)
fetch_template add
rustbuild_template build
rustbuild_template install
;;
install)
pushd "${BUILD}/${DIR}"
binpath="${BUILD}/sysroot/bin"
libpath="${BUILD}/sysroot/lib/rustlib/${RUST_HOST}/lib"
mkdir -p "$binpath" "$libpath"
cp -fv "build/${RUST_HOST}/stage2/bin/rustc" "$binpath"
${HOST}-strip "$binpath/rustc"
cp -fv $(find build/${RUST_HOST}/stage2/lib/rustlib/${RUST_HOST}/lib/ -type f | grep -v librustc) "$libpath"
popd
;;
*)
configure_template $*
;;
esac
}
rustbuild_template $*
| true
|
2acd5e264524c306e36277cbe7f2103561a4e202
|
Shell
|
timmyw/pocket2rm
|
/confs/installconfig.sh
|
UTF-8
| 224
| 3.234375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
ABSPATH=$(readlink -f $0)
ABSDIR=$(dirname $ABSPATH)
CONFIG=pocket2rm.yaml
if [ $# -eq 1 ]; then
CONFIG=$1
fi
rm -f $HOME/.config/pocket2rm.yaml
ln -s $ABSDIR/../confs/$CONFIG $HOME/.config/pocket2rm.yaml
| true
|
890349b7d2acfde2cef111fed80ef457e524d94a
|
Shell
|
apekshachauhan21/Shell-Scripting
|
/apeksha_submission_opt_project.bash
|
UTF-8
| 2,682
| 4.3125
| 4
|
[] |
no_license
|
#!/bin/bash
#checking if for the command usage
if [[ $# -ne 3 ]]; then
echo "USAGE: $0 username/UID groupname/GID abs=/directory_path or rel(wrt to pwd)=path"
exit 1
fi
#checking if an executable file already exits or not
file="executable_files.txt"
if [[ -f "$file" ]];then
rm "$file"
fi
#checking for the relative path with respect to the present working directory
main_path="$(pwd)"
user="$1"
#checking if the user is a valid user
getent passwd "$user" > /dev/null
if [[ $? -ne 0 ]];then
echo "Not a valid user"
exit 1
fi
#checking if a group is a valid group
group="$2"
getent group "$group" > /dev/null
if [[ $? -ne 0 ]]; then
echo "Not a valid group"
exit 1
fi
#checking is the user belongs to the group
if ! [ $(getent group "$group"|grep "\b"${user}"\b") ] ; then
echo "user not part of the group"
exit 1
fi
path_input="$3"
path=""
#checking for relative or absolute path. Relative path is checked with respect to the present working directory.
#if the path is relative then convertiing that to an absolute path
if [[ "$path_input" = /* ]]; then
path="$path_input"
else
path="$main_path"/"$path_input"
fi
#checking if it is a valid directory or not
if ! [[ -d "$path" ]]; then
echo "not a valid directory"
exit 1
fi
file_info=""
file_exec="executable_files.txt"
#iterating through the results of the find command and checking for the user , group and other execute permission
# and updating the file "executable_files.txt with the result"
while read line
do
check=0
file_perm="$(echo "$line"|awk '{print $1}')"
file_name="$(echo "$line"|awk '{print $9}')"
u_perm="$(echo "$file_perm"|awk '{print substr($1,4,1)}')" #owner permission
g_perm="$(echo "$file_perm"|awk '{print substr($1,7,1)}')" # group permission
o_perm="$(echo "$file_perm"|awk '{print substr($1,10,1)}')" #other permission
user1="$(echo "$line" |awk '{print $3}')" # owner of the file
file_info="$file_name:$file_perm"
if [[ "$user" = "$user1" ]];then
if [[ "$u_perm" = "x" ]]; then
u_ex_status="YU"
else
u_ex_status="NN"
fi
file_info="$file_info:$u_ex_status"
(( check++ ))
fi
group1="$(echo "$line" |awk '{print $4}')"
if [[ "$group" = "$group1" ]];then
if [[ "$g_perm" = "x" ]]; then
g_ex_status="YG"
else
g_ex_status="NN"
fi
file_info="$file_info:$g_ex_status"
(( check++ ))
fi
if [[ "$o_perm" = "x" ]]; then
o_ex_status="YO"
(( check++ ))
else
o_ex_status="NN"
fi
file_info="$file_info:$o_ex_status"
if [[ $check -ne 0 ]]; then
echo "$file_info">>$file_exec
fi
unset check
done < <(find $path -type f -exec ls -l {} \; ) # find all the files in all the mentioned directory and subdirectories
exit
| true
|
cc157baab2d5c40f1262920b4bfc21469ff08450
|
Shell
|
rflynn/biosensor
|
/util/convert.sh
|
UTF-8
| 429
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/bash
#set -x
dir=${1:-vid-tag}
du -hcs "$dir"
IFS=$'\n'
for f in $(find "$dir" -type f -name '*.jpg'); do
#echo "$f"
quality=$(identify -verbose "$f" | grep Quality | cut -c 12-)
if [[ $quality -gt 85 ]]; then
echo "$f"
#echo "$quality -gt 85"
tmp="${f/%.jpg/-q85.jpg}"
#echo "tmp: $tmp"
convert -quality 85 "$f" "$tmp" && mv "$tmp" "$f"
fi
done
du -hcs "$dir"
| true
|
ffa9ab7c16e98b1ce8df282a66da5f2eab6e9253
|
Shell
|
ericdwang/dotfiles
|
/dotfiles/.config/i3blocks/temperature
|
UTF-8
| 460
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/sh
temperature=0
total=0
# Calculate the average temperature across all thermal zones
for temp in /sys/class/thermal/thermal_zone*/temp; do
temperature=$((temperature + $(cat "$temp")))
total=$((total + 1))
done
temperature=$((temperature / (total * 1000)))
echo "$temperature°C"
echo "$temperature°C"
if [ "$temperature" -lt 30 ]; then
echo "#00FF00"
elif [ "$temperature" -lt 50 ]; then
echo "#FFFF00"
else
echo "#FF0000"
fi
| true
|
15e880ad6ad1453772cdd8d31612be2300c6d9ef
|
Shell
|
PanosFirmpas/jupyter_on_hpc
|
/notebook_job
|
UTF-8
| 1,554
| 2.609375
| 3
|
[] |
no_license
|
#!/bin/bash
#
#SBATCH --job-name=notebook
#SBATCH --output=/PATH/TO/NOTEBOOK/logs/slurm-notebook-%j.o.out
#SBATCH --error=/PATH/TO/NOTEBOOK/logs/slurm-notebook-%j.e.out
#
#SBATCH --ntasks=1
#
#SBATCH --nodes=1
#SBATCH --cpus-per-task=4
#### SBATCH --exclude=nodo[01-12]
##### #$ -S /bin/bash
##### #$ -N notebook
##### #$ -cwdq
##### #$ -V
##### #$ -q smp.q
##### #$ -pe smp 20
##### #$ -e /home/ska/panos/notebooks/logs
##### #$ -o /home/ska/panos/notebooks/logs
##### #$ -l h=nodo13
source /PATH/TO/YOUR_VIRTUALENV/bin/activate
source ~/.fix_paths
#set a port (this is also set in the ssh/config of my desktop)
NOTEBOOKPORT=9292
TMPDIR=/scratch/tmp_private_${SLURM_JOB_ID}
mkdir ${TMPDIR} && cd ${TMPDIR}
mkdir pytemp
socket_name=${TMPDIR}/SSHsocket_${SLURM_JOB_ID}
### This starts an ssh tunnel, somethingsomething master (?) at a specific socket (???) which is an actual file
### and sends it to the background. This will pipe the notebook to nodo00, the login node
### We pipe it this way, because we can later explicitely close this ssh connection
ssh -M -S ${socket_name} -fN -R localhost:${NOTEBOOKPORT}:localhost:${NOTEBOOKPORT} nodo00
# otherwise, there's some permission problem
# XDG_RUNTIME_DIR=/home/ska/panos/.local/share/jupyter/runtime
XDG_RUNTIME_DIR=''
###start the notebook
jupyter notebook --port=${NOTEBOOKPORT} --no-browser
### after the notebook server closes, we use the socket to specifically close that
### ssh tunnel that we sent to the background
ssh -S ${socket_name} -O exit nodo00
rm -r ${TMPDIR}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.