blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
c35fbd65771a013e8b8f27d311ee48fbdebe9509
|
Shell
|
sparkmbox/scripts
|
/xuguilian/test/path1.sh
|
UTF-8
| 366
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
find -name "*.*" > ~/file.txt
#cp ~/file.txt ~/file1.txt
num=1
while [ ${num} -gt 0 ]
do
exec < ~/file.txt
num=`wc -l ~/file.txt | awk '{print $1}'`
# echo ${num}
if [[ ${num} -gt 0 ]];then
read a1
# echo $a1
path=${a1}
dirname ${path} >>path.txt
sed -i '1d' ~/file.txt
fi
done
#rm -rf ~/file.txt
sort path.txt | uniq >path
rm -rf path.txt
| true
|
558743d64c0a13b257c0130ee64a8b982f5b72a6
|
Shell
|
jrock2004/dotfiles
|
/files/.config/rofi/scripts/screenshot.sh
|
UTF-8
| 704
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
## Author : Aditya Shakya
## Mail : adi1090x@gmail.com
## Github : @adi1090x
## Twitter : @adi1090x
dir="$HOME/.config/rofi/styles"
rofi_command="rofi -theme $dir/three.rasi"
# Options
screen=""
area=""
window=""
# Variable passed to rofi
options="$screen\n$area\n$window"
chosen="$(echo -e "$options" | $rofi_command -p '' -dmenu -selected-row 1)"
case $chosen in
$screen)
scrot -e 'xclip -selection clipboard -target image/png -i $f && rm $f'
;;
$area)
scrot -s --line mode=edge -e 'xclip -selection clipboard -target image/png -i $f && rm $f'
;;
$window)
scrot -u -e 'xclip -selection clipboard -target image/png -i $f && rm $f'
;;
esac
| true
|
782a5d4eeb992129d11dea952ad25729f87fb0e9
|
Shell
|
mtangh/dot-bashrc
|
/files/etc/bash.bashrc.d/99user_bashrc.sh
|
UTF-8
| 808
| 3.171875
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
# ${bashrc_dir}/99user_bashrc.sh
# $Id$
: "user_bashrc" && {
# No Unset Off
set +u
# User local .bashrc file(s)
for dot_bashrc in $( {
__pf_rc_loader -r "${HOME}"/{.bash,}.bashrc
} 2>/dev/null || :; )
do
[ -f "${dot_bashrc}" ] ||
continue
[[ "${BASH_SOURCE[@]}" \
=~ .*\ ${dot_bashrc}(\ .*|\ *)$ ]] &&
continue
. "${dot_bashrc}" &&
break || :
done
unset dot_bashrc
# Load scripts under the 'bash_profile.d' dir
for dot_prof_scr in $( {
__pf_rc_loader \
"${XDG_CONFIG_HOME:-${HOME}/.config}"/{etc/,}{bash_,}profile.d \
"${HOME}"/.{bash_,}profile.d
} 2>/dev/null || :; )
do
[ -f "${dot_prof_scr}" -a -x "${dot_prof_scr}" ] && {
. "${dot_prof_scr}"
} || :
done
unset dot_prof_scr
# No Unset On
set -u
} || :
# *eof*
| true
|
05977affc505e9ed39448417b4db23eadf095352
|
Shell
|
syspimp/openstack
|
/scripts/openstack-convert-vmdk-to-image.sh
|
UTF-8
| 679
| 3.796875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
DATASTORE=/var/lib/nova/instances
if [ ! "$1" ]
then
echo "need file name ( minus .vmdk ) vmdk in ${DATASTORE} to convert. "
echo "example PBX if exists ${DATASTORE}/PBX.vmdk"
fi
FILE="$1"
pushd ${DATASTORE}
if [ -e "${DATASTORE}/${FILE}-flat.vmdk" ]
then
echo converting $FILE
if [ ! -e "./${FILE}.qcow2" ]
then
qemu-img convert -O qcow2 ./${FILE}-flat.vmdk ./${FILE}.qcow2
else
echo "already exists"
fi
if [ $? -eq 0 ]
then
echo uploading ${FILE} to glance
glance image-create --name="${FILE}" --is-public=true --container-format=ovf --disk-format=raw < ./${FILE}.qcow2
echo Done.
else
echo "error occurred while converting"
fi
fi
popd
| true
|
d390d800cd41415f005fae7cf646edf1303a5a46
|
Shell
|
mcavazotti/Ray-Tracing
|
/InOneWeekendCUDA/runTests.sh
|
UTF-8
| 966
| 3.078125
| 3
|
[] |
no_license
|
BLOCK_X_SIZES=( 1 2 4 8 16 32 )
BLOCK_Y_SIZES=( 8 32 64 128 256 512 1024 )
REPETITIONS=10
echo "Block dimention, Iterative Time(ms), Recursive Time(ms)" > data.csv
for x in "${BLOCK_X_SIZES[@]}"
do
for y in "${BLOCK_Y_SIZES[@]}"
do
TOTAL_ITER=0
make clean
make ARGS="-DREC_MAX_DEPTH=5 -DBLOCK_X=$x -DBLOCK_Y=$y"
for r in `seq 1 $REPETITIONS`
do
TMP=`./rayTracer 1200 800 50 2>&1 >/dev/null | tail -n3 | head -1 | sed -e 's/[^0-9]//g'`
TOTAL_ITER=$(( $TOTAL_ITER + $TMP ))
done
MEAN_ITER=`echo "scale=3; $TOTAL_ITER/$REPETITIONS" | bc -l`
TOTAL_REC=0
make clean
make ARGS="-DRECURSIVE -DREC_MAX_DEPTH=5 -DBLOCK_X=$x -DBLOCK_Y=$y"
for r in `seq 1 $REPETITIONS`
do
TMP=`./rayTracer 1200 800 50 2>&1 >/dev/null | tail -n3 | head -1 | sed -e 's/[^0-9]//g'`
TOTAL_REC=$(( $TOTAL_REC + $TMP ))
done
MEAN_REC=`echo "scale=3; $TOTAL_REC/$REPETITIONS" | bc -l`
echo "${x}x${y}, ${MEAN_ITER}, ${MEAN_REC}" >> data.csv
done
done
| true
|
a0f4e6eedfc837239f29a6c1e078002d24d7a2dd
|
Shell
|
Uetty/shells
|
/certbot/certbot-generate.sh
|
UTF-8
| 1,134
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/bash
# sudo yum install epel-release
# sudo yum install snapd
# sudo systemctl enable --now snapd.socket
# sudo ln -s /var/lib/snapd/snap /snap
#https://certbot.eff.org/lets-encrypt/centosrhel7-nginx
#sudo snap install core
#sudo snap refresh core
#sudo snap install --classic certbot
#sudo ln -s /snap/bin/certbot /usr/bin/certbot
yum -y install certbot
# standalone方式验证域名归属时,会在服务器启动一个80端口的服务器,由let's encrypt网站发送请求到改该端口来完成验证
# 所以要先关闭nginx防止端口占用
# 验证通过后,打印信息里会包含生成的证书文件的存储目录
PRINT_TIP="\"standalone\" will startup a server which bind port 80 to verify domain, nginx need to temporarily stop\nAfter verified, cert file path will print in console"
echo ""
echo -e "\033[32m""${PRINT_TIP}""\033[0m"
echo ""
# service nginx stop
/usr/local/nginx/nginx -s stop
echo ""
certbot certonly --standalone -n --agree-tos --email "11112222@qq.com" --preferred-challenges http -d www.xxx.com
echo ""
# service nginx start
/usr/local/nginx/nginx -c /usr/local/nginx/nginx.conf
| true
|
69d6458a60dff2fd68ad2846888d14b913628f4b
|
Shell
|
HiDeStore/HiDeStore
|
/script/benchmark.sh
|
UTF-8
| 333
| 2.546875
| 3
|
[] |
no_license
|
#!/bin/bash
dataset="kernel"
kernel_path="/home/lpf/workspace/kernel_data_debug/"
#kernel_path="/home/lpf/workspace/kerneldata/"
path=$kernel_path
./rebuild
> log
./build/destor $path > log
#for file in $(ls $path); do
# ./build/destor $path/$file >> log
#done
./script/split.sh
./script/dedup_split.sh
./script/filter_split.sh
| true
|
80e3c4e1e96a94e78fcb03eeaadd15acb34f7766
|
Shell
|
adisubagja/custom-openwrt
|
/gas.sh
|
UTF-8
| 6,817
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/bash
#========================================================================================================================
# https://github.com/ophub/amlogic-s9xxx-openwrt
# Description: Automatically Build OpenWrt for Amlogic S9xxx STB
# Function: Diy script (After Update feeds, Modify the default IP, hostname, theme, add/remove software packages, etc.)
# Copyright (C) 2020 https://github.com/P3TERX/Actions-OpenWrt
# Copyright (C) 2020 https://github.com/ophub/amlogic-s9xxx-openwrt
#========================================================================================================================
# Modify default IP(FROM 192.168.1.1 CHANGE TO 192.168.31.4)
# sed -i 's/192.168.1.1/192.168.31.4/g' package/base-files/files/bin/config_generate
# Modify default theme(FROM uci-theme-bootstrap CHANGE TO luci-theme-material)
# sed -i 's/luci-theme-bootstrap/luci-theme-material/g' ./feeds/luci/collections/luci/Makefile
# Add the default password for the 'root' user(Change the empty password to 'password')
sed -i 's/root::0:0:99999:7:::/root:$1$V4UetPzk$CYXluq4wUazHjmCDBCqXF.:0:0:99999:7:::/g' package/base-files/files/etc/shadow
# Add branches package from Lienol/openwrt/branches/19.07/package
svn co https://github.com/Lienol/openwrt/branches/19.07/package/{lean,default-settings} package
# Remove duplicate packages
rm -rf package/lean/{luci-app-frpc,luci-app-frps,libtorrent-rasterbar} 2>/dev/null
# Add firewall rules
zzz_iptables_row=$(sed -n '/iptables/=' package/default-settings/files/zzz-default-settings | head -n 1)
zzz_iptables_tcp=$(sed -n ${zzz_iptables_row}p package/default-settings/files/zzz-default-settings | sed 's/udp/tcp/g')
sed -i "${zzz_iptables_row}a ${zzz_iptables_tcp}" package/default-settings/files/zzz-default-settings
sed -i 's/# iptables/iptables/g' package/default-settings/files/zzz-default-settings
# Set default language and time zone
sed -i 's/luci.main.lang=zh_cn/luci.main.lang=auto/g' package/default-settings/files/zzz-default-settings
#sed -i 's/zonename=Asia\/Shanghai/zonename=Asia\/Jayapura/g' package/default-settings/files/zzz-default-settings
#sed -i 's/timezone=CST-8/timezone=CST-9/g' package/default-settings/files/zzz-default-settings
# Add autocore support for armvirt
sed -i 's/TARGET_rockchip/TARGET_rockchip\|\|TARGET_armvirt/g' package/lean/autocore/Makefile
# Correct translation for Transmission
sed -i 's/发送/Transmission/g' feeds/luci/applications/luci-app-transmission/po/zh_Hans/transmission.po
# Add luci-app-passwall
svn co https://github.com/xiaorouji/openwrt-passwall/trunk package/openwrt-passwall
rm -rf package/openwrt-passwall/{kcptun,xray-core} 2>/dev/null
# Add luci-app-openclash
svn co https://github.com/vernesong/OpenClash/trunk/luci-app-openclash package/openwrt-openclash
pushd package/openwrt-openclash/tools/po2lmo && make && sudo make install 2>/dev/null && popd
# Add luci-app-ssr-plus
svn co https://github.com/fw876/helloworld/trunk/{luci-app-ssr-plus,shadowsocksr-libev} package/openwrt-ssrplus
rm -rf package/openwrt-ssrplus/luci-app-ssr-plus/po/zh_Hans 2>/dev/null
# Add luci-app-rclone
svn co https://github.com/ElonH/Rclone-OpenWrt/trunk package/openWrt-rclone
# Add luci-app-diskman
svn co https://github.com/lisaac/luci-app-diskman/trunk/applications/luci-app-diskman package/openwrt-diskman/luci-app-diskman
wget https://raw.githubusercontent.com/lisaac/luci-app-diskman/master/Parted.Makefile -q -P package/openwrt-diskman/parted
pushd package/openwrt-diskman/parted && mv -f Parted.Makefile Makefile 2>/dev/null && popd
# Add luci-app-amlogic
svn co https://github.com/ophub/luci-app-amlogic/trunk/luci-app-amlogic package/luci-app-amlogic
# Fix nginx-util
sed -i 's/\[\[fallthrough\]\]\;/\/\* fallthrough \*\//g' feeds/packages/net/nginx-util/src/nginx-ssl-util.hpp
# Replace the default software source
# sed -i 's#openwrt.proxy.ustclug.org#mirrors.bfsu.edu.cn\\/openwrt#' package/lean/default-settings/files/zzz-default-settings
# Default software package replaced with Lienol related software package
# rm -rf feeds/packages/utils/{containerd,libnetwork,runc,tini} 2>/dev/null
# svn co https://github.com/Lienol/openwrt-packages/trunk/utils/{containerd,libnetwork,runc,tini} feeds/packages/utils
# Apply patch
# git apply ../router-config/patches/{0001*,0002*}.patch --directory=feeds/luci
# Modify some code adaptation
# sed -i 's/LUCI_DEPENDS.*/LUCI_DEPENDS:=\@\(arm\|\|aarch64\)/g' package/lean/luci-app-cpufreq/Makefile
# Add luci-theme
# svn co https://github.com/Lienol/openwrt-package/trunk/lienol/luci-theme-bootstrap-mod package/luci-theme-bootstrap-mod
# Add Xderm
svn co https://github.com/adisubagja/pkg-xderm/trunk/packages package/xderm
# ------------------------------- Start Conversion -------------------------------
# Convert translation files zh-cn to zh_Hans
# [CTCGFW]immortalwrt
# Use it under GPLv3, please.
# Convert translation files zh-cn to zh_Hans
# The script is still in testing, welcome to report bugs.
convert_files=0
po_file="$({ find |grep -E "[a-z0-9]+\.zh\-cn.+po"; } 2>"/dev/null")"
for a in ${po_file}
do
[ -n "$(grep "Language: zh_CN" "$a")" ] && sed -i "s/Language: zh_CN/Language: zh_Hans/g" "$a"
po_new_file="$(echo -e "$a"|sed "s/zh-cn/zh_Hans/g")"
mv "$a" "${po_new_file}" 2>"/dev/null"
let convert_files++
done
po_file2="$({ find |grep "/zh-cn/" |grep "\.po"; } 2>"/dev/null")"
for b in ${po_file2}
do
[ -n "$(grep "Language: zh_CN" "$b")" ] && sed -i "s/Language: zh_CN/Language: zh_Hans/g" "$b"
po_new_file2="$(echo -e "$b"|sed "s/zh-cn/zh_Hans/g")"
mv "$b" "${po_new_file2}" 2>"/dev/null"
let convert_files++
done
lmo_file="$({ find |grep -E "[a-z0-9]+\.zh_Hans.+lmo"; } 2>"/dev/null")"
for c in ${lmo_file}
do
lmo_new_file="$(echo -e "$c"|sed "s/zh_Hans/zh-cn/g")"
mv "$c" "${lmo_new_file}" 2>"/dev/null"
let convert_files++
done
lmo_file2="$({ find |grep "/zh_Hans/" |grep "\.lmo"; } 2>"/dev/null")"
for d in ${lmo_file2}
do
lmo_new_file2="$(echo -e "$d"|sed "s/zh_Hans/zh-cn/g")"
mv "$d" "${lmo_new_file2}" 2>"/dev/null"
let convert_files++
done
po_dir="$({ find |grep "/zh-cn" |sed "/\.po/d" |sed "/\.lmo/d"; } 2>"/dev/null")"
for e in ${po_dir}
do
po_new_dir="$(echo -e "$e"|sed "s/zh-cn/zh_Hans/g")"
mv "$e" "${po_new_dir}" 2>"/dev/null"
let convert_files++
done
makefile_file="$({ find|grep Makefile |sed "/Makefile./d"; } 2>"/dev/null")"
for f in ${makefile_file}
do
[ -n "$(grep "zh-cn" "$f")" ] && sed -i "s/zh-cn/zh_Hans/g" "$f"
[ -n "$(grep "zh_Hans.lmo" "$f")" ] && sed -i "s/zh_Hans.lmo/zh-cn.lmo/g" "$f"
let convert_files++
done
echo -e "Convert translation files zh-cn to zh_Hans to complete. ${convert_files} in total."
# ------------------------------- End conversion -------------------------------
| true
|
cb6b32f2779e9bd85c6fb9266ad6e5a9adfcbf3f
|
Shell
|
jianlianggao/dhcp-structural-pipeline
|
/scripts/misc/structure-data.sh
|
UTF-8
| 7,120
| 3.328125
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/bash
usage()
{
base=$(basename "$0")
echo "usage: $base entries.csv results_dir release_dir
This script uploads all the results.
Arguments:
entries.csv Entries for the subjects (subjectID-sessionID) used for the release
release_dir The directory used for the release.
data_dir The directory used to output the files.
"
exit;
}
[ $# -ge 5 ] || { usage; }
subjectID=$1
sessionID=$2
subj=$3
age=$4
releasedir=$5
datadir=`pwd`
if [ $# -ge 6 ];then
datadir=$6;
cd $datadir
datadir=`pwd`
fi
minimal=1
if [ $# -ge 7 ];then
minimal=$7
fi
action=cp
Hemi=('left' 'right');
Cortex=('CORTEX_LEFT' 'CORTEX_RIGHT');
subjdir=sub-$subjectID
sessiondir=ses-$sessionID
prefix=${subjdir}_${sessiondir}
anat=$subjdir/$sessiondir/anat
outputRawDir=$releasedir/sourcedata/$anat
outputDerivedDir=$releasedir/derivatives/$anat
outputSurfDir=$outputDerivedDir/Native
outputWarpDir=$outputDerivedDir/xfms
if [ -f $outputDerivedDir/Native/${prefix}_wb.spec ];then exit; fi
mkdir -p $outputSurfDir $outputWarpDir $outputRawDir
# raw images
run cp restore/T2/${subj}_defaced.nii.gz $outputRawDir/${prefix}_T2w.nii.gz
if [ -f T1/$subj.nii.gz ];then
run mirtk transform-image masks/${subj}_mask_defaced.nii.gz masks/${subj}_mask_defaced_T1.nii.gz -target T1/$subj.nii.gz -dofin dofs/$subj-T2-T1-r.dof.gz -invert
run fslmaths T1/${subj}.nii.gz -thr 0 -mul masks/${subj}_mask_defaced_T1.nii.gz $outputRawDir/${prefix}_T1w.nii.gz
rm masks/${subj}_mask_defaced_T1.nii.gz
fi
# derived images
ms=T2
if [ -f T1/$subj.nii.gz ];then ms="T1 T2"; fi
for m in ${ms};do
for restore in "_defaced" "_restore_defaced" "_restore_brain" "_bias";do
nrestore=`echo $restore |sed -e 's:_defaced::g' |sed -e 's:_bias:_biasfield:g'`
run $action restore/$m/${subj}${restore}.nii.gz $outputDerivedDir/${prefix}_${m}w${nrestore}.nii.gz
done
done
# masks
run $action masks/$subj.nii.gz $outputDerivedDir/${prefix}_brainmask_drawem.nii.gz
run $action masks/$subj-bet.nii.gz $outputDerivedDir/${prefix}_brainmask_bet.nii.gz
# segmentations
for seg in all_labels tissue_labels;do
run $action segmentations/${subj}_${seg}.nii.gz $outputDerivedDir/${prefix}_drawem_${seg}.nii.gz
done
# warps
ages="$age"
if [ $age != 40 ];then ages="$ages 40";fi
for cage in ${ages};do
run mirtk convert-dof dofs/template-$cage-$subj-n.dof.gz $outputWarpDir/${prefix}_anat2std${cage}w.nii.gz -input-format mirtk -output-format fsl -target $code_dir/atlases/non-rigid-v2/T2/template-$cage.nii.gz -source $outputRawDir/${prefix}_T2w.nii.gz
run mirtk convert-dof dofs/$subj-template-$cage-n.dof.gz $outputWarpDir/${prefix}_std${cage}w2anat.nii.gz -input-format mirtk -output-format fsl -source $code_dir/atlases/non-rigid-v2/T2/template-$cage.nii.gz -target $outputRawDir/${prefix}_T2w.nii.gz
done
# surfaces
surfdir=surfaces/$subj/workbench
for f in corrThickness curvature drawem inflated pial roi sphere sulc thickness white MyelinMap; do
sfiles=`ls $surfdir/$subj.*$f*`
for sf in ${sfiles};do
so=`echo $sf | sed -e "s:$surfdir.::g"|sed -e "s:$subj.:${prefix}_:g"`
# bids:
so=`echo $so | sed -e 's:corrThickness:corr_thickness:g' | sed -e 's:SmoothedMyelinMap:smoothed_myelin_map:g' | sed -e 's:MyelinMap:myelin_map:g'`
so=`echo $so | sed -e 's:.native::g' | sed -e 's:_L.:_left_:g'| sed -e 's:_R.:_right_:g'`
run $action $sf $outputDerivedDir/Native/$so
done
done
wbvols="T2w_restore"
wbmetrics="sulc thickness curvature corr_thickness"
wbsurfs="white pial midthickness inflated very_inflated sphere"
if [ -f T1/$subj.nii.gz ];then
wbvols="$wbvols T1w_restore T1wdividedbyT2w T1wdividedbyT2w_ribbon"
wbmetrics="$wbmetrics myelin_map smoothed_myelin_map"
# myelin images etc.
run $action $surfdir/$subj.ribbon.nii.gz $outputDerivedDir/${prefix}_ribbon.nii.gz
run $action $surfdir/$subj.T1wDividedByT2w_defaced.nii.gz $outputDerivedDir/${prefix}_T1wdividedbyT2w.nii.gz
run $action $surfdir/$subj.T1wDividedByT2w_ribbon.nii.gz $outputDerivedDir/${prefix}_T1wdividedbyT2w_ribbon.nii.gz
fi
if [ ! $minimal -eq 1 ];then
# segmentations
seg=labels
run $action segmentations/${subj}_${seg}.nii.gz $outputDerivedDir/${prefix}_drawem_${seg}.nii.gz
# warps
for cage in ${ages};do
run mirtk convert-dof dofs/template-$cage-$subj-n.dof.gz dofs/template-$cage-$subj-r.dof.gz -input-format mirtk -output-format rigid
run mirtk convert-dof dofs/$subj-template-$cage-n.dof.gz dofs/$subj-template-$cage-r.dof.gz -input-format mirtk -output-format rigid
run mirtk convert-dof dofs/template-$cage-$subj-r.dof.gz $outputWarpDir/${prefix}_anat2std${cage}w.mat -input-format mirtk -output-format fsl -target $code_dir/atlases/non-rigid-v2/T2/template-$cage.nii.gz -source $outputRawDir/${prefix}_T2w.nii.gz
run mirtk convert-dof dofs/$subj-template-$cage-r.dof.gz $outputWarpDir/${prefix}_std${cage}w2anat.mat -input-format mirtk -output-format fsl -source $code_dir/atlases/non-rigid-v2/T2/template-$cage.nii.gz -target $outputRawDir/${prefix}_T2w.nii.gz
run $action dofs/template-$cage-$subj-r.dof.gz $outputWarpDir/${prefix}_anat2std${cage}w-r.dof.gz
run $action dofs/$subj-template-$cage-r.dof.gz $outputWarpDir/${prefix}_std${cage}w2anat-r.dof.gz
run $action dofs/template-$cage-$subj-n.dof.gz $outputWarpDir/${prefix}_anat2std${cage}w-n.dof.gz
run $action dofs/$subj-template-$cage-n.dof.gz $outputWarpDir/${prefix}_std${cage}w2anat-n.dof.gz
done
run mirtk convert-dof dofs/$subj-MNI-n.dof.gz dofs/$subj-MNI-r.dof.gz -input-format mirtk -output-format rigid
run mirtk convert-dof dofs/$subj-MNI-n.dof.gz $outputWarpDir/${prefix}_MNI2anat.nii.gz -input-format mirtk -output-format fsl -target $outputRawDir/${prefix}_T2w.nii.gz -source $MNI_T1
run mirtk convert-dof dofs/$subj-MNI-r.dof.gz $outputWarpDir/${prefix}_MNI2anat.mat -input-format mirtk -output-format fsl -target $outputRawDir/${prefix}_T2w.nii.gz -source $MNI_T1
run $action dofs/$subj-MNI-n.dof.gz $outputWarpDir/${prefix}_MNI2anat-n.dof.gz
run $action dofs/$subj-MNI-r.dof.gz $outputWarpDir/${prefix}_MNI2anat-r.dof.gz
if [ -f T1/$subj.nii.gz ];then
run $action dofs/$subj-T2-T1-r.mat $outputWarpDir/${prefix}_T1w2anat.mat
run $action dofs/$subj-T2-T1-r.dof.gz $outputWarpDir/${prefix}_T1w2anat-r.dof.gz
fi
# copy posteriors
outputPostDir=$outputDerivedDir/posteriors
mkdir -p $outputPostDir
structs=`ls posteriors`
for str in ${structs};do
run $action posteriors/$str/$subj.nii.gz $outputPostDir/${prefix}_drawem_${str}.nii.gz
done
fi
# create spec file
cd $outputDerivedDir/Native
spec=${prefix}_wb.spec
rm -f $spec
for hi in {0..1}; do
h=${Hemi[$hi]}
C=${Cortex[$hi]}
for surf in ${wbsurfs};do
run wb_command -add-to-spec-file $spec $C ${prefix}_${h}_$surf.surf.gii
done
done
C=INVALID
for metric in ${wbmetrics};do
run wb_command -add-to-spec-file $spec $C ${prefix}_$metric.dscalar.nii
done
run wb_command -add-to-spec-file $spec $C ${prefix}_drawem.dlabel.nii
for file in ${wbvols};do
run wb_command -add-to-spec-file $spec $C ../${prefix}_$file.nii.gz
done
| true
|
fce46904a2bf4f80bd7d990750184f21d5b41286
|
Shell
|
moneytech/che
|
/test.sh
|
UTF-8
| 360
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
if [ ! -d bin ]; then
mkdir bin || exit 1
fi
for i in prog/*.c; do
name=`basename $i`
echo $name
./che build "$i" "bin/$name" || exit 1
done
# ./che lexer < prog/lexer.c > lexertest.php.txt
# ./bin/lexer < prog/lexer.c > lexertest.che.txt
# diff --suppress-common-lines lexertest.php.txt lexertest.che.txt || echo "lexer outputs are different" && exit 1
| true
|
09a7059d6e3598ec20ef702ed7d5f30f16ca8fff
|
Shell
|
raejoon/graph-process
|
/experiments/solo-alpha-convergence-experiment/RUNME.sh
|
UTF-8
| 1,123
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
ROOT_DIR=../../
GRAPH_DIR=./graphs/
LOG_DIR=./logs/
ANALYSIS_DIR=./analysis/
FIG_DIR=./figs/
rm -rf $GRAPH_DIR
rm -rf $ANALYSIS_DIR
rm -rf $FIG_DIR
mkdir empty_dir
rsync -a --delete empty_dir/ $LOG_DIR
rm -rf $LOG_DIR
rmdir empty_dir
mkdir $GRAPH_DIR
mkdir $LOG_DIR
mkdir $ANALYSIS_DIR
mkdir $FIG_DIR
SEED_FILE=./seeds.txt
python $ROOT_DIR/graph-generate/main.py --small --max 6 --outdir $GRAPH_DIR
echo "Generated small graphs."
seq 0 999 > $SEED_FILE
echo "Generated seed file."
for ALPHA in 50 75 87;
do
ALPHA_DIR=./logs/solo-$ALPHA
mkdir -p $ALPHA_DIR
python $ROOT_DIR/graph-simulate/main.py --graph-dir $GRAPH_DIR \
--outdir $ALPHA_DIR --seed-list $SEED_FILE --algo solo2 --alpha $ALPHA
ANALYSIS_FILE=$ANALYSIS_DIR/solo-$ALPHA
python $ROOT_DIR/graph-simulate/analyze.py --logdir $ALPHA_DIR \
--converge-time --outfile $ANALYSIS_FILE
CDF_FILE=$ANALYSIS_DIR/solo-$ALPHA-cdf
python $ROOT_DIR/graph-simulate/analyze.py --logdir $ALPHA_DIR \
--converge-time --cdf --outfile $CDF_FILE
echo "Analyzed ALPHA=$ALPHA"
done
python plot.py
| true
|
3e1134f2d56c17d37ae42036ce5793d94ddff339
|
Shell
|
HaMster21/dotfiles
|
/zsh/prompt.zsh
|
UTF-8
| 1,428
| 3.390625
| 3
|
[
"MIT"
] |
permissive
|
precmd() {
export RPROMPT="$(git_quickinfo)$(last_exitcode)"
export PROMPT="$(current_user)@$(current_machine)$(directory_name)$(prompt_symbol) "
}
current_user() {
if [[ -n $SSH_CONNECTION ]]; then
#react to running ssh session
else
if [[ $(id -u) -eq 0 ]]; then
echo "%{$fg[brightred]%}root%{$reset_color%}"
else
echo "%{$fg[magenta]%}$(whoami)%{$reset_color%}"
fi
fi
}
current_machine() {
if [[ -n $SSH_CONNECTION ]]; then
#react to running ssh session
else
echo "%{$fg[green]%}%m%{$reset_color%}"
fi
}
directory_name() {
echo "%{$fg[magenta]%}:%~%{$reset_color%}"
}
prompt_symbol() {
echo "\n%{$fg[green]%}ᛄ%{$reset_color%}"
}
git_quickinfo() {
echo "$(git_branch)"
}
last_exitcode() {
echo " %(?.. %{%F{brightred}%?↲%{$reset_color%})"
}
has_git() {
if [[ -n $(git_branch) ]]; then
return false
else
return true
fi
}
git_branch() {
local ref=$(command git symbolic-ref HEAD 2> /dev/null) || return 0
echo "${ref#refs/heads/}"
}
git_stashcount() {
local count
count=$(git stash list 2>/dev/null | wc -l)
if [[ $count -gt 0 ]]; then
echo "(⛁ $count)"
else
echo ""
fi
}
git_remote_status() {
local rstatus
rstatus=$(git rev-list --count --left-right --boundary @{u}... 2>/dev/null)
echo "$rstatus"
#echo $($rstatus | grep < | wc -l)⇅$($rstatus | grep > | wc -l)
}
| true
|
7c96878785d3db0dfe47177bd27e55a64d87a0d8
|
Shell
|
MerrimanLab/GenomicPred
|
/scripts/riskscore.sh
|
UTF-8
| 4,112
| 2.96875
| 3
|
[] |
no_license
|
# assume a directory structure with software, data directories
# data contains keepfiles for lists of samples labelled keep????.txt and bed, bim, fam files. also contains a residuals.csv
# with ID and columns of residuals
# software contains the GCTA, LDAK and BayesR tools
# run commands from the base directory, script can be kept in scripts
# variable to create and output folder for each run
#
### pass in an argument making a batch/output directory for each run
OUTPUT=output
mkdir $OUTPUT/bayesR $OUTPUT/LDAK $OUTPUT/GCTA
### prune and sort the data
#prune to only those with residuals
cat residuals | awk '{print $1, $1}' > keepall.txt
plink2 --bfile data/data --keep data/keepall.txt --maf 0.001 --geno 0.05 --make-bed --out data/sorteddata
# combine residuals to fam file
# this step need to be repeated for each of the groups at bayesR section
# columns 8-12 are the FIVE residuals
# height, egfr, serumurate, diabetes and gout in that order
paste data/sorteddata.fam data/residuals.csv | awk '{print $1, $2, $3, $4, $5, $8, $9, $10, $11, $12}' > data/test.fam
######################## GCTA
#grm
parallel -j4 "software/gcta64 --bfile data/sorteddata --keep data/keep{}.txt --autosome --maf 0.01 --make-grm --out $OUTPUT/GCTA/{}" ::: east west poly euro
#reml
parallel -j4 "software/gcta64 --reml --grm $OUPUT/GCTA/{1} --keep data/keep{}.txt --pheno data/{1}.fam --mpheno {2} --prevalence 0.15 --out $OUTPUT/GCTA/{1}_{2}" ::: east west poly euro ::: $(seq 1 5)
# One GRM (quantitative traits)
#gcta64 --reml --grm test --pheno test.phen --reml-pred-rand –qcovar test_10PCs.txt --out test
#gcta64 --reml --grm test --pheno test.phen --grm-adj 0 --grm-cutoff 0.05 --out test
#gcta64 --reml --grm test --pheno test.phen --keep test.indi.list --grm-adj 0 --out test
# One GRM (case-control studies)
# gcta64 --reml --grm test --pheno test_cc.phen --prevalence 0.01 --out test_cc
# gcta64 --reml --grm test --pheno test_cc.phen --prevalence 0.01 --qcovar test_10PCs.txt --out test_cc
# BLUP solutions for the SNP effects
gcta64 --bfile test --blup-snp test.indi.blp --out test
# Then use plink --score test.snp.blp 1 2 3
####################### LDAK
### calculate the weights and kinships for LDAK prior to GRM
parallel -j4 "software/LDAK/ldak5.linux --keep data/keep{}.txt --cut-weights $OUTPUT/LDAK/{}_sections --bfile data/sorteddata" ::: east west poly euro
parallel -j4 "software/LDAK/ldak5.linux --keep data/keep{}.txt --calc-weights-all $OUTPUT/LDAK/{}_sections --bfile data/sorteddata" ::: east west poly euro
parallel -j4 "software/LDAK/ldak5.linux --keep data/keep{}.txt --calc-kins-direct $OUTPUT/LDAK/{}_kinships --bfile data/sorteddata --weights $OUTPUT/LDAK/{}_sections/weights.short --power -0.25" ::: east west poly euro
# reml
# LDAK counts the first phenotype as colum 3 of the fam file, so 1-5 is actually probably 4, 5, 6, 7
parallel -j4 "software/LDAK/ldak5.linux --reml $OUTPUT/LDAK/{1}_{2} --keep data/keep{1}.txt --pheno data/test.fam --mpheno {2} --grm $OUTPUT/LDAK/{1}_kinships" ::: east west poly euro ::: $(seq 4 8)
parallel -j4 "software/LDAK/ldak5.linux --calc-blups $OUTPUT/LDAK/{1}_{2} --remlfile $OUTPUT/LDAK/{1}_{2}.reml --grm $OUTPUT/LDAK/{1}_kinships --bfile data/sorteddata --check-root NO" ::: east west poly euro ::: $(seq 4 8)
###################### bayesR
# split data to the various populations
parallel -j4 "plink2 --bfile data/sorteddata --keep data/keep{}.txt --make-bed --out data/data_{}" ::: east west poly euro
# this step need to be repeated for each of the groups at bayesR section
parallel -j18 "join -1 1 -2 1 -o 1.1 2.1 1.3 1.4 1.5 2.2 2.3 2.4 2.5 2.6 -e "NA" <(sort -k 1 data/data_{}.fam) <(sort -k 1 data/residuals.csv) > tmp{}
rm data/data_{}.fam
cp tmp{} data/data_{}.fam
rm tmp{}" ::: east west poly euro
# run bayes
parallel "nice -n 10 bayesR -bfile data/data_{1} -out $OUTPUT/bayesR/{1}_{2} -numit 100000 -burnin 20000 -n {2} -seed 333" ::: east west poly euro ::: $(seq 1 5)
bayesR -bfile data/data_poly -out poly_1 -numit 100 -n 1 -burnin 20 -seed 333
###################### GCTB
| true
|
2fbf7f09083663fa62cdcf718cd25899c389fb7e
|
Shell
|
GiovanniCS/PanDelosFragments
|
/main.sh
|
UTF-8
| 2,535
| 4.125
| 4
|
[] |
no_license
|
#!/bin/bash
cd `dirname $0`
chmod +x align_to_candidate_genome.sh
chmod +x align_to_reconstructed_genome.sh
chmod +x gene_prediction.sh
chmod +x chromosomer_genome_recontruction.sh
genome=$1
if [[ -f $genome ]]
then
reference_genome=$3
if [[ -z "$reference_genome" || ! -f $reference_genome ]]
then
echo "ERROR: you must specify the full path to a reference genome file or the keyword 'prokaryotic' for searching in the local db"
else
if [[ $reference_genome == p* ]]
then
reference_genome="prokaryotic"
fi
output_folder=$2
if [[ ! -d $output_folder ]]
then
mkdir -p $output_folder
fi
echo "Using reference genome from: ${reference_genome}"
rm -rf ${output_folder}/results && mkdir ${output_folder}/results
rm -rf ${output_folder}/original_genome && mkdir ${output_folder}/original_genome
rm -rf ${output_folder}/artifacts && mkdir ${output_folder}/artifacts
genome_taxon=`head -n1 ${genome} | cut -d" " -f2 | cut -d">" -f2`
genome_file="${output_folder}/original_genome/${genome_taxon}_reference_genome.fna"
cp $genome $genome_file
genome=$genome_taxon
if [[ $reference_genome == "prokaryotic" ]]
then
#Using BLAST, find the most similar genome in the ref_prok_rep_genomes db to the contigs assembled
python3 find_candidate_genome.py $output_folder $reference_genome $genome_file
else
cp $reference_genome ${output_folder}/artifacts/candidate_genome.fna
fi
#Using bwa and samtools align the contigs to the reference genome
./align_to_candidate_genome.sh $genome $output_folder $genome_file
#Pull down missing bases in the aligned contigs set from the reference genome
python3 genome_recontruction.py $genome $output_folder
#Chromosomer variant of the pipeline
#./chromosomer_genome_recontruction.sh $genome $output_folder $genome_file
#python3 chromosomer_filter_unused_sequences.py $genome_file $output_folder
#Using bwa and samtools align the contigs to the reconstructed genome
./align_to_reconstructed_genome.sh $genome $output_folder $genome_file
#Predict genes on the reconstructed genome
./gene_prediction.sh $genome $output_folder
#Filter genes that overlaps at least one contig, describe how many bases
#are in the contigs and how many outside
python3 gene_filtering.py $genome $output_folder
python3 extract_gene_sequences.py $genome $output_folder
fi
else
echo "ERROR: genome file non found, provide an absolute path to it"
fi
| true
|
6fb8e605080f934fb7d42ba731f5a1bb495c6d19
|
Shell
|
hassan11196/OSA1
|
/shell_calc.sh
|
UTF-8
| 268
| 3.078125
| 3
|
[] |
no_license
|
echo -e "\nShell_calc Script arguments : $1 and $2"
num1=$1
num2=$2
sum=$(($1+$2))
sub=$(($1-$2))
mul=$(($1*$2))
div=$(($1/$2))
echo "Sum of $1 and $2 = $sum"
echo "Difference of $1 and $2 = $sub"
echo "Product of $1 and $2 = $mul"
echo "Quotient of $1 and $2 = $div"
| true
|
833f5e84df655f897853f7c7335dbf59fd2c14d6
|
Shell
|
SchellDa/cautious-lamp
|
/scripts/shiftHephyByTable.sh
|
UTF-8
| 437
| 3.515625
| 4
|
[] |
no_license
|
#!/bin/bash
checkTbEnv.sh && exit 1
if [ $# -ne 1 ]; then
echo "Usage: $0 CSV-FILE"
echo ""
echo "A CSV file with three columns (MPA-RUN REF-SHIFT TEL-SHIFT)"
echo "is expected by this script."
fi
counter=0
csvfile=$1
for line in `cat $csvfile`; do
IFS=","
set -- $line
echo "Start Run $1"
if [ `expr $counter % 2` -eq 0 ]; then
hephyRunShift.sh $1 $2 $3 &
else
hephyRunShift.sh $1 $2 $3
fi
counter=$(($counter+1))
done
| true
|
e3ebad30347c8266aae38a99a2317d3d7f51191f
|
Shell
|
ansemjo/dotfiles
|
/bash/aliases.d/fsnoise.sh
|
UTF-8
| 607
| 3.90625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# create some "filesystem noise" in the current directory by
# creating directories and files randomly
fsnoise() {
D=$1; F=$2; R=${3:-30};
if [[ -z $D ]] || [[ -z $F ]]; then
printf 'fsnoise a..f 0..9 [rand%%]\n' >&2
printf ' ^dirs ^files ^percentage\n' >&2
return 1
fi
# shellcheck disable=SC1083
for d in $(eval echo {"$D"}); do
# shellcheck disable=SC1083
for f in $(eval echo {"$F"}); do
[[ $RANDOM -lt $(( 327 * R )) ]] && {
echo "$d/$f"
mkdir -p "$d"
uuidgen > "$d/$f"
}
done
done
return 0
}
| true
|
ab56ef06bbefadaa67c8c551457ea3a6d0c0a167
|
Shell
|
ahmadassaf/oh-my-zsh
|
/themes/gaudi/segments/docker.zsh
|
UTF-8
| 2,366
| 3.890625
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env zsh
#
# Docker
#
# Docker automates the repetitive tasks of setting up development environments
# Link: https://www.docker.com
# ------------------------------------------------------------------------------
# Configuration
# ------------------------------------------------------------------------------
GAUDI_DOCKER_SHOW="${GAUDI_DOCKER_SHOW=true}"
GAUDI_DOCKER_PREFIX="${GAUDI_DOCKER_PREFIX="$GAUDI_PROMPT_DEFAULT_PREFIX"}"
GAUDI_DOCKER_SUFFIX="${GAUDI_DOCKER_SUFFIX="$GAUDI_PROMPT_DEFAULT_SUFFIX"}"
GAUDI_DOCKER_SYMBOL="${GAUDI_DOCKER_SYMBOL="\\uf308"}"
GAUDI_DOCKER_COLOR="${GAUDI_DOCKER_COLOR="$CYAN"}"
GAUDI_DOCKER_VERBOSE="${GAUDI_DOCKER_VERBOSE=false}"
# ------------------------------------------------------------------------------
# Section
# ------------------------------------------------------------------------------
# Show current Docker version and connected machine
gaudi_docker() {
[[ $GAUDI_DOCKER_SHOW == false ]] && return
gaudi::exists docker || return
# Better support for docker environment vars: https://docs.docker.com/compose/reference/envvars/
local compose_exists=false
if [[ -n "$COMPOSE_FILE" ]]; then
# Use COMPOSE_PATH_SEPARATOR or colon as default
local separator=${COMPOSE_PATH_SEPARATOR:-":"}
# COMPOSE_FILE may have several filenames separated by colon, test all of them
local filenames=("${(@ps/$separator/)COMPOSE_FILE}")
for filename in $filenames; do
if [[ ! -f $filename ]]; then
compose_exists=false
break
fi
compose_exists=true
done
# Must return if COMPOSE_FILE is present but invalid
[[ "$compose_exists" == false ]] && return
fi
# Show Docker status only for Docker-specific folders
[[ "$compose_exists" == true || -f Dockerfile || -f docker-compose.yml ||
-n $(find . -not -path '*/\.*' -maxdepth 1 -name "*.Dockerfile")
]] || return
# if docker daemon isn't running you'll get an error saying it can't connect
local docker_version=$(docker version -f "{{.Server.Version}}" 2>/dev/null | cut -f1 -d"-")
[[ -z $docker_version ]] && return
if [[ -n $DOCKER_MACHINE_NAME ]]; then
docker_version+=" via ($DOCKER_MACHINE_NAME)"
fi
gaudi::section \
"$GAUDI_DOCKER_COLOR" \
"$GAUDI_DOCKER_PREFIX" \
"$GAUDI_DOCKER_SYMBOL" \
"$docker_version" \
"$GAUDI_DOCKER_SUFFIX"
}
| true
|
8ad3987954497b7cf828a3d2c5e969055ecab331
|
Shell
|
jianxiamage/Proj_TestResults
|
/TestResult_Project_ver0.3/Extended_GetResultsXls/Summary_Excel_Select.sh
|
UTF-8
| 2,056
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash
#set -e
#功能:
#多线程执行程序,可以设置测试节点的每个测试用例的测试结果(标记:初值->1,成功->0)
#最终目的是便于之后为在前端展示测试用例的执行结果而做的结果文件
if [ $# -ne 2 ];then
echo "usage: $0 TestType Platform"
exit 1
fi
#----------------------------------------------------------------------------------------
TestType="$1"
Platform="$2"
#----------------------------------------------------------------------------------------
#TestType="OS"
#Platform="7A_Integrated"
#----------------------------------------------------------------------------------------
ResultPath='/data'
destPath="${ResultPath}/${TestType}/${Platform}"
destFile="${destPath}/${ResultFile}"
#----------------------------------------------------------------------------------------
outputDir=TestMark_SummaryExcel
outputFile="${outputDir}/TestMark_${TestType}_${Platform}.txt"
ScoreListFile="ScoreCaseList/ScoreCaseList_${TestType}.txt"
#----------------------------------------------------------------------------------------
merge_excelDir='Excel_Merge'
okfile="${merge_excelDir}/ok_file.txt"
errfile="${merge_excelDir}/err_file.txt"
#----------------------------------------------------------------------------------------
if [ ! -s $ScoreListFile ]
then
echo "File $ScoreListFile is not Existed!"
exit 1
fi
mkdir $outputDir -p
rm -rf $outputFile
mkdir ${merge_excelDir} -p
rm -rf $okfile
rm -rf $errfile
:> ${okfile}
:> ${errfile}
echo "[$TestType],[$Platform],Merging(Summary) the the Results Excel file finished."
echo "***************************************************"
rm -rf $outputFile
start_time=`date +%s` #定义脚本运行的开始时间
sh summary_Excel_Results_all.sh $TestType $Platform
stop_time=`date +%s` #定义脚本运行的结束时间
echo "Exec Time:`expr $stop_time - $start_time`s"
echo "***************************************************"
echo "[$TestType],[$Platform],Merging(Summary) the the Results Excel file finished."
| true
|
792d01afdb1c6bb3ba567d694ba99adaac109e9f
|
Shell
|
AgnitumuS/letv
|
/puppet/modules/ats_cloud/templates/records.config.sh
|
UTF-8
| 2,213
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/bash
# newcdn ats records.config
tmp_conf="/etc/trafficserver/records.config_tmp"
record_conf="/etc/trafficserver/records.config"
store_conf="/etc/trafficserver/storage.config"
disk_key=0
ssd_key=0
function get_root_disk()
{
echo `lsblk -nl | grep SWAP | awk '{print $1}' | grep -o "[a-z]*"`
}
root_disk=$(get_root_disk)
function get_local_disk()
{
echo `lsblk -nl | grep disk | grep 5.5T | grep -v letv |awk '{print "/dev/"$1}'| grep -v $root_disk | sort`
}
function get_conf_disk()
{
echo `grep -v "^#" $store_conf | grep "/dev" | sort -u`
}
function get_local_ssd()
{
echo `lsblk -nl | grep disk | grep -v 5.5T | awk '{print "/dev/"$1}' | grep -v $root_disk | sort`
}
function get_conf_ssd()
{
if [ -f $record_conf ];then
ssd=`grep proxy.config.cache.interim.storage $record_conf | grep -v "#" | grep -o "/dev/[a-z]*" | sort -u`
echo $ssd
else
echo ""
fi
}
ssd_disk=$(get_local_ssd)
conf_ssd=$(get_conf_ssd)
if [ "$ssd_disk" == "" ];then
cp -f $tmp_conf $record_conf
sed -i '/proxy.config.cache.interim.storage/d' $record_conf
/usr/bin/traffic_line -x
elif [ "$ssd_disk" == "$conf_ssd" ];then
cp -f $tmp_conf $record_conf
sed -i '/proxy.config.cache.interim.storage/d' $record_conf
echo "LOCAL proxy.config.cache.interim.storage STRING $ssd_disk" >> $record_conf
/usr/bin/traffic_line -x
else
cp -f $tmp_conf $record_conf
sed -i '/proxy.config.cache.interim.storage/d' $record_conf
echo "LOCAL proxy.config.cache.interim.storage STRING $ssd_disk" >> $record_conf
ssd_key=1
fi
cache_disk=$(get_local_disk)
conf_disk=$(get_conf_disk)
disk_list=`echo $cache_disk$ssd_disk | sed -e 's/ //g' -e 's/\/dev\/sd//g'`
echo "SUBSYSTEM==\"block\", KERNEL==\"sd[$disk_list]\", OWNER=\"ats\",GROUP=\"ats\"" >/etc/udev/rules.d/99-ats.rules
/sbin/udevadm trigger --subsystem-match=block
if [ "$cache_disk" != "$conf_disk" ];then
echo $cache_disk | sed 's/ /\n/g'
echo $cache_disk | sed 's/ /\n/g' > $store_conf
disk_key=1
fi
if [ $disk_key -eq 1 -o $ssd_key -eq 1 ];then
/etc/init.d/trafficserver restart
fi
grep -q /etc/trafficserver/records.config.sh /etc/rc.local
if [ $? -ne 0 ];then
echo /etc/trafficserver/records.config.sh >> /etc/rc.local
fi
| true
|
128cd3d299ed17c8d7eb24ea1ca8fa584c4cb782
|
Shell
|
vdesabou/kafka-docker-playground
|
/connect/connect-datadiode-source-sink/datadiode.sh
|
UTF-8
| 2,299
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
source ${DIR}/../../scripts/utils.sh
${DIR}/../../environment/plaintext/start.sh "${PWD}/docker-compose.plaintext.yml"
log "Creating DataDiode Source connector"
playground connector create-or-update --connector datadiode-source << EOF
{
"tasks.max": "1",
"connector.class": "io.confluent.connect.diode.source.DataDiodeSourceConnector",
"kafka.topic.prefix": "dest_",
"key.converter": "org.apache.kafka.connect.converters.ByteArrayConverter",
"value.converter": "org.apache.kafka.connect.converters.ByteArrayConverter",
"header.converter": "org.apache.kafka.connect.converters.ByteArrayConverter",
"diode.port": "3456",
"diode.encryption.password": "supersecretpassword",
"diode.encryption.salt": "secretsalt",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1",
"errors.tolerance": "all",
"errors.log.enable": "true",
"errors.log.include.messages": "true"
}
EOF
log "Creating DataDiode Sink connector"
playground connector create-or-update --connector datadiode-sink << EOF
{
"connector.class": "io.confluent.connect.diode.sink.DataDiodeSinkConnector",
"tasks.max": "1",
"topics": "diode",
"key.converter": "org.apache.kafka.connect.converters.ByteArrayConverter",
"value.converter": "org.apache.kafka.connect.converters.ByteArrayConverter",
"header.converter": "org.apache.kafka.connect.converters.ByteArrayConverter",
"diode.host": "connect",
"diode.port": "3456",
"diode.encryption.password": "supersecretpassword",
"diode.encryption.salt": "secretsalt",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1"
}
EOF
sleep 10
log "Send message to diode topic"
playground topic produce -t diode --nb-messages 10 << 'EOF'
This is a message 1
This is a message 2
This is a message 3
This is a message 4
This is a message 5
This is a message 6
This is a message 7
This is a message 8
This is a message 9
This is a message 10
EOF
sleep 5
log "Verifying topic dest_diode"
playground topic consume --topic dest_diode --min-expected-messages 10 --timeout 60
| true
|
def06b3001e5b0e1b2440d809186ef416ee7f9c9
|
Shell
|
rigel-eva/rigelPortfolio
|
/start_server.sh
|
UTF-8
| 607
| 3.140625
| 3
|
[] |
no_license
|
#! /bin/bash
until PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -U "$DATABASE_USER" -c '\q'; do
>&2 echo "Postgres is unavailable - sleeping"
sleep 1
done
>&2 echo "Postgres is up!"
if [ -f ready]; then
>&2 echo "Subsequent run detected - Skipping setup, starting main program"
else
>&2 echo "Initial run detected - Starting setup"
RAILS_ENV=production bundle exec rake db:create db:schema:load db:seed
export SECRET_KEY_BASE=bundle exec rake secret
touch ready
>&2 echo "Setup Finished - Starting main program"
fi
bundle exec rails s -p 3000 -b '0.0.0.0' -e production
| true
|
dc04857425b21f0ccde2e875fb3e88183589ade2
|
Shell
|
longzl2015/long12356
|
/run_service.sh
|
UTF-8
| 351
| 2.671875
| 3
|
[] |
no_license
|
#!/bin/sh
basePath=$(dirname "$0")
cd "${basePath}" || exit
npm install;
npm install hexo-deployer-git --save;
npm install hexo-filter-plantuml --save;
npm install hexo-generator-searchdb --save;
npm install hexo-server --save;
#npm audit fix;
echo "清空旧数据"
hexo clean
echo "生成静态文件"
hexo g
echo "启动服务"
hexo server
| true
|
7555a6ad2ceb5b8c38d5b071f36b92f44b6cf9a5
|
Shell
|
RoyWarwick/CCSE-Blue-Team
|
/security/security-unit-installation.sh
|
UTF-8
| 2,247
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
# This file is for enabling a machine to act as a security unit.
echo
echo "#### Security Unit ####"
echo
echo "+---------------------+"
echo "| Installation Script |"
echo "+---------------------+"
echo
if [[ $EUID != 0 ]]
then
echo "This script must be run as the superuser." >&2
echo "Exit status: 1" >&2
exit 1
fi
LOCAL="192.168.0.4"
FOREIGN="192.168.0.2"
echo "Installation starting..."
# installation
apt update
apt-get install -y openssl strongswan apache2-utils mosquitto mosquitto-clients
START="$(dirname $(readlink -f $0))"
# Place files in correct place
cd /usr
rm -r security 2> /dev/null # Remove any previous installations
mkdir security
cd security
mkdir x509
touch root.sh
chmod 755 root.sh
# Place files in the correct places
cp $START/sensor sensor
cp $START/off off
cp $START/alarm alarm
cp $START/PINs PINs
cp $START/MatrixKeypad MatrixKeypad
cp $START/openssl.cnf openssl.cnf
cp $START/reg.sh reg.sh
cp $START/x509/ca.crt x509/ca.crt
cp $START/x509/sec.key /etc/mosquitto/certs/sec.key
cp $START/x509/sec.crt x509/sec.crt
# Set appropriate file permissions
chmod 755 sensor off alarm MatrixKeypad reg.sh
chmod 777 PINs
# Update mosquitto.conf
grep -v "^port" /etc/mosquitto/mosquitto.conf | grep -v "^cafile" | grep -v "^keyfile" | grep -v "^certfile" | grep -v "^require_certificate" > /etc/mosquitto/mosquitto.conf
echo "port 8883" >> /etc/mosquitto/mosquitto.conf
echo "cafile /usr/security/x509/ca.crt" >> /etc/mosquitto/mosquitto.conf
echo "keyfile /etc/mosquitto/certs/sec.key" >> /etc/mosquitto/mosquitto.conf
echo "certfile /usr/security/x509/sec.crt" >> /etc/mosquitto/mosquitto.conf
echo "require_certificate true" >> /etc/mosquitto/mosquitto.conf
# Create the root.sh file
echo "#\!/bin/bash" > root.sh
echo "cd /usr/security" >> root.sh
echo "TOPIC=\$(/usr/security/reg.sh $LOCAL)" >> root.sh
echo "/usr/security/MatrixKeypad \"$LOCAL\" \$TOPIC & disown" >> root.sh
echo "/usr/security/sensor \"$LOCAL\" \$TOPIC & disown" >> root.sh
echo "exit 0" >> root.sh
# Ensure files are executed at startup
echo "#\!/bin/bash" > /etc/rc.local
echo "/usr/security/root.sh" >> /etc/rc.local
echo "exit 0" >> /etc/rc.local
echo "Installation complete."
echo "Restarting..."
reboot
exit 0
| true
|
134508a2771f4ab5f23990411410de8a3dfe6927
|
Shell
|
js-scala/play2-on-dotcloud
|
/playframework/builder
|
UTF-8
| 349
| 3.0625
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
BUILDROOT="$(dirname "$0")"
echo 'Installing application'
rm -rf ~/application
cp -R ./$SERVICE_APPROOT ~/application
echo 'Symlinking application logs to Supervisor area...'
rm -rf ~/application/logs
ln -s /var/log/supervisor ~/application/logs
echo 'Installing run script...'
cp "$BUILDROOT/run" ~
echo 'Build complete.'
| true
|
a84703417e8dd38df0952d1f498fe6d56735ed89
|
Shell
|
cloudera/hue
|
/tools/ops/create_userprofile_json.sh
|
UTF-8
| 1,985
| 2.9375
| 3
|
[
"CC-BY-3.0",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference",
"ZPL-2.0",
"Unlicense",
"LGPL-3.0-only",
"CC0-1.0",
"LicenseRef-scancode-other-permissive",
"CNRI-Python",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-2.0-or-later",
"Python-2.0",
"GPL-3.0-only",
"CC-BY-4.0",
"LicenseRef-scancode-jpython-1.1",
"AFL-2.1",
"JSON",
"WTFPL",
"MIT",
"LicenseRef-scancode-generic-exception",
"LicenseRef-scancode-jython",
"GPL-3.0-or-later",
"LicenseRef-scancode-python-cwi",
"BSD-3-Clause",
"LGPL-3.0-or-later",
"Zlib",
"LicenseRef-scancode-free-unknown",
"Classpath-exception-2.0",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"LGPL-2.0-or-later",
"MPL-2.0",
"ISC",
"GPL-2.0-only",
"ZPL-2.1",
"BSL-1.0",
"Apache-2.0",
"LGPL-2.0-only",
"LicenseRef-scancode-public-domain",
"Xnet",
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
export HUE_BIN=/opt/cloudera/parcels/CDH/share/hue/build/env/bin/hue
export JSON_FILE=/tmp/authuser.json
export TXT_FILE=/tmp/authuser.txt
export METHOD="EXTERNAL"
NEW="false"
echo "["
while read -r LINE
do
if [[ ${LINE} =~ '--' ]]
then
NEW="true"
echo " {"
echo " \"pk\": $ID,"
echo " \"model\": \"useradmin.userprofile\","
echo " \"fields\": {"
echo " \"creation_method\": \"${METHOD}\","
echo " \"user\": $ID,"
echo " \"home_directory\": \"/user/$USERNAME\""
echo " },"
fi
if [[ ${NEW} =~ "false" ]]
then
if [[ ${LINE} =~ "pk" ]]
then
ID=`echo ${LINE} | awk -F: '{print $2}' | awk -F, '{print $1}' | awk '{print $1}'`
fi
if [[ ${LINE} =~ "username" ]]
then
USERNAME=`echo ${LINE} | awk -F: '{print $2}' | awk -F, '{print $1}' | awk -F\" '{print $2}'`
fi
fi
NEW="false"
done < <(cat ${TXT_FILE})
echo " {"
echo " \"pk\": $ID,"
echo " \"model\": \"useradmin.userprofile\","
echo " \"fields\": {"
echo " \"creation_method\": \"${METHOD}\","
echo " \"user\": $ID,"
echo " \"home_directory\": \"/user/$USERNAME\""
echo " }"
echo "]"
| true
|
0b460736070094ec40cce4b910fe49e622ca632a
|
Shell
|
hemna/oh-my-zsh
|
/plugins/installer/installer.plugin.zsh
|
UTF-8
| 718
| 3.578125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/zsh
#
# System agnostic installer aliasing
#
# Assumes if you are on a mac, brew is used
# Suse = zypper
# ubuntu/debian = apt
# Redhat = yum
#
source "${0:h}/core.sh"
case $OS in
'mac')
alias in='brew install'
alias up='brew upgrade'
;;
'linux')
if [[ "$DIST" == "ubuntu" ]]; then
alias in="sudo -E apt-get install"
alias up="sudo -E apt-get upgrade"
elif [[ "$DIST" == "suse" ]]; then
alias in="sudo -E zypper in"
alias up="sudo -E zypper up"
elif [[ "$DIST" == "redhat" ]]; then
alias in="sudo -E yum install"
alias up="sudo -E yup upgrade"
fi
;;
*) ;;
esac
| true
|
15a8c34f7b16d4ec70b4bc9e26beff547d81e4e5
|
Shell
|
nhanhoangtran/shared_mem_with_mmap_unrelated_processes_example
|
/run.sh
|
UTF-8
| 491
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
function is_linux
{
if [[ "$(uname)" == "Linux" ]]; then
echo 1
return
fi
echo 0
}
SET="set"
GET="get"
CFLAGS=""
LDFLAGS=""
if [[ "$(is_linux)" == "1" ]]; then
LDFLAGS="-lrt"
fi
cc ${CFLAGS} -o ${SET} ${SET}.c ${LDFLAGS}
cc ${CFLAGS} -o ${GET} ${GET}.c ${LDFLAGS}
./${SET} &
if [[ "$(is_linux)" == "1" ]]; then
echo "/dev/shm:"
ls -l /dev/shm
fi
sleep 1
./${GET}
if [[ "$(is_linux)" == "1" ]]; then
echo "/dev/shm:"
ls -l /dev/shm
fi
sleep 1
rm ${SET} ${GET}
| true
|
3d592aead02e6179b5e863e39cc087db228e2f60
|
Shell
|
sbortman/ossim-ubuntu
|
/builder/build-kakadu.sh
|
UTF-8
| 229
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
cd /work/ossim-private/kakadu/${KAKADU_VERSION}
# for x in $(grep -l pthread_yield\(\) $(find . -type f)); do
# sed -i 's/pthread_yield();/sched_yield();/g' $x
# done
cd make
make -f Makefile-Linux-x86-64-gcc
| true
|
1acf86e99b724e3e42b20f7565d504cc9ba42104
|
Shell
|
himanshugharat/Shell_Programming_constructs
|
/sequence/ifProblems/placeNumbers.sh
|
UTF-8
| 907
| 3.796875
| 4
|
[] |
no_license
|
#program that takes day and month from command line and prints true if day of month is between
#march 20 and june 20 false otherwise
#!/bin/bash -x
read -p "Enter the month : " month
echo $date
read -p "Enter the date : " month
echo $month
if [ "$month" == "MARCH" -o "$month" == "March" -o "$month" == "march" ]
then
if [ $date -gt 20 ]
then
echo "TRUE"
else
echo "FALSE"
fi
elif [ "$month" == "April" -o "$month" == "APRIL" -o "$month" == "april" ]
then
if [ $date -lt 31 -a $date -gt 0 ]
then
echo "TRUE"
else
echo "FALSE"
fi
elif [ "$month" == "May" -o "$month" == "MAY" -o "$month" == "may" ]
then
if [ $date -lt 32 -a $date -gt 0 ]
then
echo "TRUE"
else
echo "FALSE"
fi
elif [ "$month" == "June" -o "$month" == "JUNE" -o "$month" == "june" ]
then
if [ $date -lt 20 ]
then
echo "TRUE"
else
echo "FALSE"
fi
else
echo "month should be between march 20 and june 20 false"
fi
| true
|
056ac091ef6bbfa1979901a3f28742288f5c1f81
|
Shell
|
ruebenramirez/dev-elk-stack
|
/2-load-up-some-data.sh
|
UTF-8
| 380
| 3
| 3
|
[] |
no_license
|
#!/bin/bash
# does the dir exist
if [ ! -d data ]; then
mkdir data
fi
# have we already downloaded shakespeare?
if [ ! -f data/shakespeare.json ]; then
curl -o data/shakespeare.json http://www.elasticsearch.org/guide/en/kibana/current/snippets/shakespeare.json
fi
# put the data into elasticsearch
curl -XPUT localhost:9200/_bulk --data-binary @data/shakespeare.json
| true
|
af6001904ced6e2fd417005c553de914924ff4f2
|
Shell
|
marekolsak/fastgrid
|
/tests/bench_web.sh
|
UTF-8
| 1,136
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
run_it()
{
echo -n "$fg_executable $1 $2: " >> bench_results_web
./rungridmol.sh $1 $1 $1 $2 2>&1|tail -n 1 >> bench_results_web
tail -n 1 bench_results_web
}
bench_one()
{
export fg_executable=fastgrid4
run_it $1 $2
export fg_executable=autogrid4
run_it $1 $2
export fg_executable=
}
bench_molecules()
{
if [ "$1" -ge "360" ]; then
bench_one $1 1000
bench_one $1 2500
bench_one $1 5000
bench_one $1 7500
bench_one $1 10000
bench_one $1 15000
fi
bench_one $1 20000
bench_one $1 25000
}
bench_grids()
{
# bench_molecules 10
# bench_molecules 20
# bench_molecules 30
# bench_molecules 40
# bench_molecules 50
# bench_molecules 60
# bench_molecules 70
# bench_molecules 80
# bench_molecules 90
# bench_molecules 100
# bench_molecules 120
# bench_molecules 140
# bench_molecules 160
# bench_molecules 180
# bench_molecules 200
# bench_molecules 233
# bench_molecules 266
# bench_molecules 300
# bench_molecules 350
bench_molecules 400
bench_molecules 500
}
#rm -f bench_results_web
bench_grids
| true
|
d50119418bf3c440b4b2e3b20d2ec2f5c6aebe0c
|
Shell
|
nategraff-sifive/zephyr-board-template
|
/customize_board.sh
|
UTF-8
| 1,165
| 3.9375
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
echo "Customizing board template for SiFive Freedom E-Series Chip"
BOARD_NAME=${PWD##*/}
BOARD_IDENTIFIER=$(echo $BOARD_NAME | sed -e 's/-/_/g' | tr '[:lower:]' '[:upper:]')
echo "Using the name of the containing folder for the board identifier: $BOARD_NAME"
echo "Using the upcase of that as an identifier: $BOARD_IDENTIFIER"
read -p "Do you want to boot from the default boot address 0x2040_0000? (Y/n): " DEFAULT_BASE_ADDR
case $DEFAULT_BASE_ADDR in
[Nn]* ) read -p "Enter ROM boot address in hex (ex. 0x20000000, 0x20400000): " ROM_BASE_ADDR;;
* ) ROM_BASE_ADDR="0x20400000";;
esac
RENAME_FILES=("board_name.dts" "board_name.yaml" "board_name_defconfig")
TEMPLATE_FILES=("Kconfig.board" "Kconfig.defconfig" "${RENAME_FILES[@]}")
for template_file in "${TEMPLATE_FILES[@]}"
do
cp templates/$template_file ./
sed -i $template_file -e "s/<BOARD_IDENTIFIER>/$BOARD_IDENTIFIER/g" \
-e "s/<BOARD_NAME>/$BOARD_NAME/g" -e "s/<ROM_BASE_ADDR>/$ROM_BASE_ADDR/"
done
for rename_file in "${RENAME_FILES[@]}"
do
mv $rename_file $(echo $rename_file | sed -e "s/board_name/$BOARD_NAME/")
done
echo "Done customizing board files"
| true
|
48f6192f51a4f8bfaad9e0dc8aa8b423d87d1a13
|
Shell
|
kshub44kamble/BootCampProblem_shellscripting
|
/ARRAY_PROBLEM/SecondLarge.sh
|
UTF-8
| 209
| 3.34375
| 3
|
[] |
no_license
|
#!/bin/bash
range=10
i=0
while [ $i -lt $range ]
do
num=$RANDOM
arrName[$i]=$(($num%1000))
sort -n <(printf "%s\n" "${arrName[@]}")
i=`expr $i + 1`
done
for number in ${arrName[@]}
do
echo $number
done
| true
|
d86d345f08a667f3d6d939ef0062e0ce2fb87198
|
Shell
|
GerritForge/gripmock
|
/entrypoint.sh
|
UTF-8
| 346
| 2.515625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
if [ ! -d /etc/nginx/ssl ]
then
mkdir /etc/nginx/ssl
CWD=`pwd`
cd /etc/nginx/ssl
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -nodes \
-subj "/C=US/ST=NRW/L=Earth/O=CompanyName/OU=IT/CN=localhost/emailAddress=email@localhost"
cd $CWD
fi
mkdir -p /run/nginx/ && nginx
gripmock $@
| true
|
d362af3999791b808a73b5e020fb33bb97d918a7
|
Shell
|
Ravenspark/CD-Spotwelder
|
/sources/fixSPI
|
UTF-8
| 390
| 2.71875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# The current spi driver seems to have a bug when using 8bit mode and unaligned variables...
# As a quick workaround this file replaces the generated file with a fixed one
rm ./Drivers/STM32F0xx_HAL_Driver/Src/stm32f0xx_hal_spi.c
cp ./Drivers/STM32F0xx_HAL_Driver/Src/stm32f0xx_hal_spi.c.fix ./Drivers/STM32F0xx_HAL_Driver/Src/stm32f0xx_hal_spi.c
echo "replaced faulty spi file"
| true
|
80a83966ead6da0589d939f17e68c1633c9b8048
|
Shell
|
someuser3/bin
|
/lab6
|
UTF-8
| 2,518
| 4.15625
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Lab Objective: Create a script called lab6.sh that will check to see if each media file in the medialab directory is in medialab/media.xml
#
# List the filenames in the medialab directory that are not in media.xml
# Display the number of files in the medialab directory that are not in media.xml
# List the media files by name in media.xml that do not exist in the medialab directory
# Display the number of files that are in the media.xml that do not exist in the medialab directory
#
# Desired Output:
#
# Files not in media.xml:
# file1.mpg
# file2.mpg
# file3.mpg
# Files not in medialab directory:
# file4.mpg
# file5.mpg
# file6.mpg
# 3 media files in medialab directory that are NOT listed in media.xml
# 3 media files in media.xml that are NOT in medialab directory
# Changing field separator so filenames with spaces translate properly:
OldIFS=#IFS
IFS=$'\n'
# Saving all the filenames in the medialab directory to a temp file:
ls -1 medialab > file.tmp
# Saving all the filenames from the XML file to a temp file:
grep filename < medialab/media.xml | awk -F'[<|>]' '{print $3}' | sort -r | uniq > xml.tmp
# Checking if files with filenames from XML file exist and creating a common list;
# Using while loop to read file due to reasons described here: http://mywiki.wooledge.org/DontReadLinesWithFor
# Style credits: http://stackoverflow.com/a/1521498
while read LINE; do
if [[ -f medialab/$LINE ]]
then
echo $LINE >> CommonList.tmp
fi
done < xml.tmp
# Adding the common list to both temp files to prepare for awk processing:
cat CommonList.tmp >> file.tmp
cat CommonList.tmp >> xml.tmp
# Generating the output. Awk eliminates duplicates and outputs only unique values.
# Credits: http://stackoverflow.com/a/23740629
echo "Files not in media.xml:"
awk '{!seen[$0]++};END{for(i in seen) if(seen[i]==1)print i}' file.tmp > ufile.tmp
# ^ Sending the awk output to another temp file to simplify file count below.
cat ufile.tmp
echo
echo "Files not in medialab directory:"
awk '{!seen[$0]++};END{for(i in seen) if(seen[i]==1)print i}' xml.tmp | grep -ve '^$' > uxml.tmp
# ^ The last grep above removes an empty line; credits: http://stackoverflow.com/a/3432574
cat uxml.tmp
echo
echo "$(wc -l < ufile.tmp) media files in medialab directory that are NOT listed in media.xml"
echo "$(wc -l < uxml.tmp) media files in media.xml that are NOT in medialab directory"
# Cleaning up:
IFS=$OldIFS
rm -f file.tmp
rm -f ufile.tmp
rm -f xml.tmp
rm -f uxml.tmp
rm -f CommonList.tmp
echo
| true
|
0bf84212e2fbcef2a4da2f6d06d4bbfd9f0f56ba
|
Shell
|
oracle/dtrace-utils
|
/test/unittest/io/tst.nfs.sh
|
UTF-8
| 1,695
| 3.234375
| 3
|
[
"UPL-1.0"
] |
permissive
|
#!/bin/bash
#
# Oracle Linux DTrace.
# Copyright (c) 2017, 2020, Oracle and/or its affiliates. All rights reserved.
# Licensed under the Universal Permissive License v 1.0 as shown at
# http://oss.oracle.com/licenses/upl.
#
# Test the io:::start probe for write and read operations by creating
# a file and reading it back after clearing the caches.
#
# @@xfail: dtv2
dtrace=$1
filesize=$((1024*1024))
minsize=$((filesize / 10 * 9))
serverpath=`mktemp -u`
clientpath=`mktemp -u`
tempfile=`mktemp -u -p $clientpath`
statname="nfs"
trap "rm -f $tempfile; umount $clientpath; rmdir $clientpath; exportfs -u 127.0.0.1:$serverpath; rmdir $serverpath" QUIT EXIT
# setup NFS server
service nfs start > /dev/null 2>&1
mkdir $serverpath
exportfs -i -v -o "rw,sync,no_root_squash,insecure,fsid=8434437287" 127.0.0.1:$serverpath > /dev/null
# setup NFS client
mkdir $clientpath
test/triggers/io-mount-nfs.sh $clientpath $serverpath
$dtrace $dt_flags -c "test/triggers/doio.sh $tempfile $filesize test/triggers/io-mount-nfs.sh $clientpath $serverpath" -qs /dev/stdin <<EODTRACE
BEGIN
{
byteswr = 0;
bytesrd = 0;
}
io:::start
/(args[0]->b_flags & B_WRITE) != 0 && args[1]->dev_statname == "$statname"/
{
byteswr += args[0]->b_bufsize;
}
io:::start
/(args[0]->b_flags & B_WRITE) == 0 && args[1]->dev_statname == "$statname"/
{
bytesrd += args[0]->b_bufsize;
}
END
/byteswr >= $minsize/
{
printf("wrote-expected: yes\n");
}
END
/byteswr < $minsize/
{
printf("wrote-expected: no (%d / %d)\n", byteswr, $minsize);
}
END
/bytesrd >= $minsize/
{
printf("read-expected: yes\n");
}
END
/bytesrd < $minsize/
{
printf("read-expected: no (%d / %d)\n", bytesrd, $minsize);
}
EODTRACE
| true
|
25cafd681e2b948f0fa3d7674c7b825827047adb
|
Shell
|
Francesco10681/FunctionalClassificationEpigenome
|
/bedToNormalizedSignal.sh
|
UTF-8
| 7,127
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
###############################################################################################################################
### Preliminary analysis for the Data-Integration (ENCODE/Roadmap Epigenomics) project: #######################################
###############################################################################################################################
### statistical survey on the overlap between different ChIP-seq data and genes from the GenCode database @Francesco Gandolfi
###############################################################################################################################
#NB (Task); Convert processed BED files into Normalized Signal tracks.
# 0.1 define root directorties
PROJECT_DIR=/home/fgandolfi/projects/ENCODE_project
DATA_DIR=${PROJECT_DIR}/data;
SCRIPT_DIR=/home/fgandolfi/scripts/ENCODE_project
# 0.2 Analysis directory
ANALYSIS_DIR=${PROJECT_DIR}/genomic_survey;
mkdir ${ANALYSIS_DIR};
# 0.3 Sample-datasheet
DATASHEET_SAMPLES_FILE=${DATA_DIR}/ENCODE_project_IMR90.tsv;
#DATASHEET_SAMPLES_FILE=${DATA_DIR}/ENCODE_project_datasheet_input_9-2015.tsv
# 0.4 Uniqueness mappability track
MAPPABILITY_TRACK=/home/fgandolfi/templates/wgEncodeDukeMapabilityUniqueness35bp.uniqueMapRegions.bedGraph
# 0.5 Chrominfo file
CHROMINFO_FILEPATH=${DATA_DIR}/chromInfo.txt
# 0.5 Be sure that you already have the k-binned genome file (k=bin size)
#/usr/bin/bedtools makewindows -g ${CHROMINFO_FILEPATH} -w 200 > ${ANALYSIS_DIR}/hg19binned.200bp.bed
# 0.6 Genomic bin size
GENOME_BINSIZE=200;
# 0.7 Define Binned hg19 genome path
BINNED_GENOME=${ANALYSIS_DIR}/hg19binned.200bp.bed
##############################
####### LOOP-1 ###############
##############################
# 1. Calculate the total number of informative reads mapped across all the datasets
# Take note of the number of informative reads in that sample to estimate total number of tags over all experiments in a cell line
# Define cell types
CELL_LINES=IMR90;
# Define signal-types
SIGNAL_TRACKS=(H3K27ac H3K9me3 H3K79me1 Pol2);
TAG_TOTALS=(); # should be the vector of totals across all ChIPseq experiments for each cell line.
#sep=' ';
for CELL_LINE in ${CELL_LINES[@]};do
mkdir ${ANALYSIS_DIR}/${CELL_LINE}
for SIGNAL_TRACK in ${SIGNAL_TRACKS[@]};do
# 2.1 Define the output directory
OUT_DIR=${ANALYSIS_DIR}/${CELL_LINE}/${SIGNAL_TRACK};
mkdir ${OUT_DIR};
awk -F "\t" '$1=="'${CELL_LINE}'" && $3=="'${SIGNAL_TRACK}'" {print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $6 "\t" $7 "\t" $8 "\t" $9}' ${DATASHEET_SAMPLES_FILE} > ${ANALYSIS_DIR}/ENCODE_project_datasheet_sub.tsv;
TAG_TOTALS=(); # should be the total of informative reads across all ChIP-seq samples in a given sample group (cell line/signal-track)
sep=' ';
while read LINE;do
SAMPLE_ID=$(echo ${LINE} | awk '{split($0,a," ");print a[4]}');
# 1.3.3 Identify and retrieve processed.bed file and its path
PROCESSED_FILEDIR=${DATA_DIR}/${CELL_LINE}/${SIGNAL_TRACK}/${SAMPLE_ID};
cd ${PROCESSED_FILEDIR};
PROCESSED_FILE=*.processed.bed
filelines=$(wc -l ${PROCESSED_FILE})
N_INFORMATIVE_TAGS=$(echo ${filelines} | awk '{split($0,a," ");print a[1]}');
echo "Increase the total count of informative reads across all ChIP-seq assay samples in ${CELL_LINE}: $SIGNAL_TRACK";
TAG_TOTALS+=$N_INFORMATIVE_TAGS;
TAG_TOTALS+=$sep;
done < ${ANALYSIS_DIR}/ENCODE_project_datasheet_sub.tsv;
echo ${TAG_TOTALS} > ${OUT_DIR}/${CELL_LINE}_${SIGNAL_TRACK}_allExp_totals.txt; # txt file contains the total read counts for all samples Bi in the group P.
done;
done;
##############################
####### LOOP-2 ###############
##############################
# 2. Once the cell-type and the signal-type are defined, extract all samples from the cell-type X and signal-type Y from the ENCODE_project_datasheet_samples.tsv
# then loop row by row on the subset
for CELL_LINE in ${CELL_LINES[@]};do
for SIGNAL_TRACK in ${SIGNAL_TRACKS[@]};do
# 2.1 Regenerate OUTPUT directory:
OUT_DIR=${ANALYSIS_DIR}/${CELL_LINE}/${SIGNAL_TRACK};
# 2.2 Create an empty the list of files
> ${ANALYSIS_DIR}/input_data_files.txt
# 2.3 Select from the full-datasheet file only raw corresponding to CELL_LINE and SIGNAL_TRACK
awk -F "\t" '$1=="'${CELL_LINE}'" && $3=="'${SIGNAL_TRACK}'" {print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $6 "\t" $7 "\t" $8 "\t" $9}' ${DATASHEET_SAMPLES_FILE} > ${ANALYSIS_DIR}/ENCODE_project_datasheet_sub.tsv
# Read through the datasheet subset (foreach sample Bi belonging to the group X...)
while read LINE;do
# 2.3.1 get values in single fields;
SAMPLE_ID=$(echo ${LINE} | awk '{split($0,a," ");print a[4]}')
ALIGN_FILENAME=$(echo ${LINE} | awk '{split($0,a," ");print a[5]}')
# 2.3.2 check if the alignment file is available or not
if [ -z "$ALIGN_FILENAME" ]
then
continue
fi
# 2.3.3 Identify and retrieve processed.bed file and its path
PROCESSED_FILEDIR=${DATA_DIR}/${CELL_LINE}/${SIGNAL_TRACK}/${SAMPLE_ID};
cd ${PROCESSED_FILEDIR};
PROCESSED_FILE=*processed.bed
PROCESSED_FILEPATH=${PROCESSED_FILEDIR}/${PROCESSED_FILE};
# 2.3.4 Estimate raw counts in each genomic bin;
echo "intersect genomic bins with processed bed ${PROCESSED_FILEPATH} to estimate rawCounts"
/usr/bin/bedtools intersect -a ${ANALYSIS_DIR}/hg19binned.200bp.bed -b ${PROCESSED_FILEPATH} -c > ${OUT_DIR}/${SAMPLE_ID}.rawCounts.bed
# 2.3.5 Extract only informative bins in 'temp.bed' , and replace 'rawCounts' by this one.
awk -F "\t" '$4 > 0 {print $1 "\t" $2 "\t" $3 "\t" $4}' ${OUT_DIR}/${SAMPLE_ID}.rawCounts.bed > ${OUT_DIR}/temp.bed
mv ${OUT_DIR}/temp.bed ${OUT_DIR}/${SAMPLE_ID}.rawCounts.bed
# 2.3.6 Put the rawCount file name in a list for normalized signal estimation step
echo "${OUT_DIR}/${SAMPLE_ID}.rawCounts.bed" >> ${ANALYSIS_DIR}/input_data_files.txt
done < ${ANALYSIS_DIR}/ENCODE_project_datasheet_sub.tsv;
# 2.4 Process rawCounts; the R script takes the list of rawCount files just generated.
#INPUT: rawCounts files for a given group (cell-lineX/signaltrackY) /// OUTPUT: normalizedSignal files for the group (cell-lineX/signalrackY)
# arguments are: 1) datafile with raw counts
# 2) datasheet_samples_file.tsv
# 3) Mappability track file
# 4) genomic bin sizes
# 5) total read counts per cell type.txt
/usr/bin/Rscript ${SCRIPT_DIR}/binCount2normalizedSignal.R ${ANALYSIS_DIR}/input_data_files.txt ${DATASHEET_SAMPLES_FILE} ${MAPPABILITY_TRACK} ${BINNED_GENOME} ${GENOME_BINSIZE} ${OUT_DIR}/${CELL_LINE}_${SIGNAL_TRACK}_allExp_totals.txt;
# 2.5. Estimate normalizedSignal values from all samples (replicates/experiments from different labs) pooled together
cd ${OUT_DIR}
BEDG_EXPECTED_FILES=*expectedScore.bg
/usr/bin/bedtools unionbedg -i ${BEDG_EXPECTED_FILES[@]} > ${OUT_DIR}/expectedScoreSum.bg;
BEDG_OBSERVED_FILES=*observedScore.bg
/usr/bin/bedtools unionbedg -i ${BEDG_OBSERVED_FILES[@]} > ${OUT_DIR}/observedScoreSum.bg;
/usr/bin/Rscript ${SCRIPT_DIR}/getPooledNormalizedSignal.R ${OUT_DIR}/expectedScoreSum.bg ${OUT_DIR}/observedScoreSum.bg;
# Remove intermediate files:
done
done
### End script ####
| true
|
fdf5e32dc42056ae53ad5664f0d6284d3a03ecd5
|
Shell
|
faucetsdn/daq
|
/resources/runtime_configs/discover/port-01/ping_runtime.sh
|
UTF-8
| 171
| 2.53125
| 3
|
[
"Apache-2.0"
] |
permissive
|
filter="ether host 9a:02:57:1e:8f:02 and port 47808"
count=$(tcpdump -en -r $TEST_ROOT/scans/monitor.pcap $filter | wc -l)
echo Found $count from $filter
[ $count -gt 0 ]
| true
|
fa7fc6d3a7a51e86958aacc56389867c90e95208
|
Shell
|
navarr393/assignment1assembly
|
/run.sh
|
UTF-8
| 553
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
#Program: Floating IO
#Author: F. Holliday
#Delete some un-needed files
rm *.o
rm *.out
echo "Assemble float-input-output.asm"
nasm -f elf64 -l perimeter.lis -o perimeter.o perimeter.asm
echo "Compile manage-floats.c using the gcc compiler standard 2011"
gcc -c -Wall -m64 -no-pie -o rectangle.o rectangle.c -std=c11
echo "Link the object files using the gcc linker standard 2011"
gcc -m64 -no-pie -o executable.out rectangle.o perimeter.o -std=c11
echo "Run the program Floating IO:"
./executable.out
echo "The script file will terminate"
| true
|
0a449421b841eaf465c0c0c646f03ab3e531732a
|
Shell
|
aschrab/dotfiles
|
/bin/cal
|
UTF-8
| 464
| 3.734375
| 4
|
[] |
no_license
|
#!/bin/bash
set -u
set -e
set -o pipefail
if [ "$(date +%d)" -lt 8 ] ; then
early=y
else
early=n
fi
if gcal --help >/dev/null 2>&1
then
if [ $early = y ] ; then
gcal -sMon .
else
gcal -sMon .+
fi
exit
fi
args=(
-b # Old-style output
-M # Week starts on Monday
)
if [ $early = y ] ; then
# In first week of the month
args+=(-3) # Show current and surrounding months
else
args+=(-A2) # Show current months, and 2 after
fi
ncal "${args[@]}" "$@"
| true
|
d3e5941105777d0cdedf62a9c0fa32ff67161463
|
Shell
|
k3nno88/forcrawler
|
/parse_ad_sold.sh
|
UTF-8
| 1,486
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/zsh
# This script goes through all link scraped by parselink.sh and scrape its content
export PATH=/home/uh/anaconda3/bin:$PATH
export PATH=/home/uh/anaconda3/bin/scrapy:$PATH
source ~/anaconda3/etc/profile.d/conda.sh
#conda init zsh
conda activate base
cd ~/funda-sold
echo "DELETE VERKOCHT IN URL"
# This make all links in database have the same format
mysql --login-path=server -s -N scraping_data -e "UPDATE link_compare SET url = REPLACE(url, 'verkocht/','');"
echo "CREATE NEW TABLE"
# Put links into new table if that link's status found to be available in the database
mysql --login-path=server -s -N scraping_data -e "create table link_compare_sold as select * from link_compare where url in (select url from funda where sold = 'False');" # Need more elegant way
echo "Delete and readd ID"
mysql --login-path=server -s -N scraping_data -e "ALTER TABLE link_compare_sold DROP ID; ALTER TABLE link_compare_sold ADD ID INT NOT NULL AUTO_INCREMENT FIRST, ADD PRIMARY KEY (ID), AUTO_INCREMENT=1"
count=$(mysql --login-path=server -s -N scraping_data -e "SELECT min(ID) FROM link_compare_sold")
max=$(mysql --login-path=server -s -N scraping_data -e "SELECT max(ID) FROM link_compare_sold")
~/refresh_vpn.sh
while [ $count -le $max ];
do
~/parse.sh $count funda_sold link_compare_sold # Scraping content
count=$((count+1))
done
echo "Truncate table, scraping finished"
mysql --login-path=server -s -N scraping_data -e "DROP TABLE link_compare_sold; TRUNCATE link_compare;"
| true
|
e1d999a0cf896e8bea55baa54730788e35be73c5
|
Shell
|
NCAR/container-dtc-nwp
|
/components/scripts/common/run_wrf.ksh
|
UTF-8
| 2,848
| 3.984375
| 4
|
[] |
no_license
|
#!/bin/ksh
#
# Simplified script to run WRF in Docker world
# Optional arguments: -np, -slots, -hosts, -face
#
set -x
# Constants
WRF_BUILD="/comsoftware/wrf"
INPUT_DIR="/data/case_data"
SCRIPT_DIR="/home/scripts/common"
CASE_DIR="/home/scripts/case"
WRFPRD_DIR="/home/wrfprd"
GSIPRD_DIR="/home/gsiprd"
# Check for the correct container
if [[ ! -e $WRF_BUILD ]]; then
echo
echo ERROR: wrf.exe can only be run with the dtc-wps_wrf container.
echo
exit 1
fi
# Check for input directory
if [[ ! -e $CASE_DIR ]]; then
echo
echo ERROR: The $CASE_DIR directory is not mounted.
echo
exit 1
fi
# Check for output directory
if [[ ! -e $WRFPRD_DIR ]]; then
echo
echo ERROR: The $WRFPRD_DIR directory is not mounted.
echo
exit 1
fi
cd $WRFPRD_DIR
# Include case-specific settings
. $CASE_DIR/set_env.ksh
# Initalize command line options
num_procs=4
process_per_host=1
iface=eth0
hosts=127.0.0.1
# Read in command line options
while (( $# > 1 ))
do
opt="$1"
case $opt in
"-np")
num_procs="$2"
shift
;;
"-slots")
process_per_host="$2"
shift
;;
"-hosts")
hosts="$2"
shift
;;
"-iface")
iface="$2"
shift
;;
*)
echo "Usage: Incorrect"
exit 1
;;
esac
shift
done
echo "slots = " $process_per_host
echo "iface = " $iface
echo "num_procs = " $num_procs
echo "hosts = " $hosts
# End sample argument list
# start ssh
/usr/sbin/sshd
##################################
# Run the WRF forecast model. #
##################################
echo Running wrf.exe
ln -sf $WRF_BUILD/WRF-${WRF_VERSION}/run/* .
rm namelist*
cp $CASE_DIR/namelist.input .
# If wrfinput_d01.orig exists, rename it to wrfinput_d01 to reset the state
if [[ -e wrfinput_d01.orig ]]; then
mv wrfinput_d01.orig wrfinput_d01
fi
# If GSI was run, update the wrfinput file
if [[ -e $GSIPRD_DIR/wrf_inout ]]; then
mv wrfinput_d01 wrfinput_d01.orig
cp $GSIPRD_DIR/wrf_inout wrfinput_d01
fi
if [ num_procs -eq 1 ]; then
# Run serial wrf
./wrf.exe > run_wrf.log 2>&1
else
# Generate machine list
IFS=,
ary=($hosts)
for key in "${!ary[@]}"; do echo "${ary[$key]} slots=${process_per_host}" >> $WRFPRD_DIR/hosts; done
# Run wrf using mpi
time mpirun -np $num_procs ./wrf.exe
fi
# Check success
ls -ls $WRFPRD_DIR/wrfo*
OK_wrfout=$?
#Double-check success: sometimes there are output files but WRF did not complete succesfully
if [ $OK_wrfout -eq 0 ]; then
grep "SUCCESS COMPLETE WRF" rsl.out.0000
OK_wrfout=$?
fi
if [ $OK_wrfout -eq 0 ]; then
tail rsl.error.0000
echo
echo OK wrf ran fine at `date`
echo Completed WRF model
echo
else
cat rsl.error.0000
echo
echo ERROR: wrf.exe did not complete
echo
exit 66
fi
echo Done with wrf.exe
| true
|
8e3ebc8fea47d4a1b296f6f5d0666e35be1b1bcd
|
Shell
|
melo0187/codingground
|
/registered mail checksum/.bash_history
|
UTF-8
| 2,640
| 2.53125
| 3
|
[] |
no_license
|
s
ls
clear
g++ -o main *.cpp
main
g++ -o main *.cpp
main
g++ -o main *.cpp
main
g++ -o main *.cpp
main
g++ -o main *.cpp
clear
main
g++ -o main *.cpp
clear
main
g++ -o main *.cpp
main
g++ -o main *.cpp
main
g++ -o main *.cpp
g++ -o main *.cpp
g++ -o main *.cpp
g++ -o main *.cpp
g++ -o main *.cpp
main
g++ -o main *.cpp
g++ -o main *.cpp
main
lsls
ls
g++ -o main *.cpp
main
main
clear
ls
main
g++ -o main *.cpp
g++ -o main *.cpp
main
main
main
g++ -o main *.cpp
main
g++ -o main *.cpp
main
g++ -o main *.cpp
g++ -o main *.cpp
main
g++ -o main *.cpp
main
g++ -o main *.cpp
main
main
g++ -o main *.cpp
main
main
main
g++ -o main *.cpp
main
g++ -o main *.cpp
main
g++ -o main *.cpp
g++ -o main *.cpp
main
g++ -o main *.cpp
main
g++ -o main *.cpp
#include <iostream>
#include <stdlib.h>
#include <ctype.h>
using namespace std;
int main()
{ bool result = false; const int multipliers[] = {8, 6, 4, 2, 3, 5, 9, 7}; const int denominator = 11; const int minuend = 11;
cout << "Please enter tracking number for Deutsche Post registered mail: ";
//string input = "RR473124829DE113";
string input;
cin >> input;
const string digits = input.substr (2,8);
if ((digits && digits.size() != 8) ||
digits.find_first_not_of("0123456789") != std::string::npos ||
!isdigit(input[10]))
{
cout << "Char 3 to 11 must be digits for a valid tracking number!" << endl;
result = false;
}
else
{
int digits_array[8];
const int checkdigit = input[10] - '0';
int sum = 0;
int remainder_as_subtrahend;
int difference;
int calculated_checkdigit;
for(std::string::size_type i = 0; i < digits.size(); ++i) {
digits_array[i] = digits[i] - '0';
sum += digits_array[i] * multipliers[i];
}
remainder_as_subtrahend = sum % denominator;
difference = minuend - remainder_as_subtrahend;
switch(difference){
case 10 :
calculated_checkdigit = 0;
break;
case 11 :
calculated_checkdigit = 5;
break;
default :
calculated_checkdigit = difference;
}
result = calculated_checkdigit == checkdigit;
}
cout << (result == true ? "Check passed." : "Check failed!") << endl;
}g++ -o main *.cpp
clear
main
ls
clrea
g++ -o main *.cpp
g++ -o main *.cpp
main
mainmain
main
main
g++ -o main *.cpp
g++ -o main *.cpp
main
main
main
g++ -o main *.cpp
main
main
g++ -o main *.cpp
g++ -o main *.cpp
| true
|
6117b4769d4fe057e589a9dc35806c283be58319
|
Shell
|
kego1992/cli-1
|
/install-from-code.sh
|
UTF-8
| 1,364
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# ----------------------------------
# Colors
# ----------------------------------
NOCOLOR='\033[0m'
RED='\033[0;31m'
GREEN='\033[0;32m'
ORANGE='\033[0;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
LIGHTGRAY='\033[0;37m'
DARKGRAY='\033[1;30m'
LIGHTRED='\033[1;31m'
LIGHTGREEN='\033[1;32m'
YELLOW='\033[1;33m'
LIGHTBLUE='\033[1;34m'
LIGHTPURPLE='\033[1;35m'
LIGHTCYAN='\033[1;36m'
WHITE='\033[1;37m'
echo 'This is out dated script we will update it soon.'
mkdir -p build || { echo -e '\e[31mFailed to create build folder.' && exit 1; }
cp -r dependency build
npm install || { echo -e '\e[31mFailed to install root dependencies.' && exit 1; }
npx lerna bootstrap
npx lerna run pack || { echo -e '\e[31mInstallable tarball creation failed.' && exit 1; }
echo 'Building CLI package...'
npx lerna run build || { echo -e '\e[31mBuild generation failed for core module.' ; exit 1; }
echo 'Built CLI package, Done!!!'
cd build/@contentstack/packages
echo 'Installing Contentstack CLI globally'
sudo npm install -g ./contentstack-cli.tgz || { echo -e '\e[31mGlobal installation failed for CLI module!!!' ; exit 1; }
echo 'Installtion, Done!!!'
echo 'Testing Contentstack Command...'
csdx || { echo -e '\e[31mSomething went wrong while build generation command not installed properly!!!' ; exit 1; }
echo 'Installtion completed successfully!!!'
| true
|
6a463ece84b1e41028a0c66bd9a7315f86d5561e
|
Shell
|
mhagander/pggit_migrate
|
/repository_fixups
|
UTF-8
| 118,328
| 2.53125
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
# Kill some generated files that somehow had patches committed even after
# they were marked dead.
for r in 2.89 2.90 2.91; do rcs -x,v -sdead:$r /cvsroot/pgsql/src/backend/parser/Attic/gram.c ; done
for r in 1.3 1.4 1.5 1.6; do rcs -x,v -sdead:$r /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/pgc.c ; done
for r in 1.7 1.8 1.9 1.10 1.11 1.12; do rcs -x,v -sdead:$r /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/preproc.c ; done
for r in 1.3 1.4 1.5 1.6 1.7 1.8; do rcs -x,v -sdead:$r /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/y.tab.h ; done
# delete WIN32_DEV versions of generated files
rcs -x,v -o1.11.2.1: /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/preproc.c
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/preproc.c
rcs -x,v -o1.3.2.1: /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/preproc.h
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/preproc.h
rcs -x,v -o1.5.2.1: /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/pgc.c
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/interfaces/ecpg/preproc/Attic/pgc.c
rcs -x,v -o2.90.2.1: /cvsroot/pgsql/src/backend/parser/Attic/gram.c
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/backend/parser/Attic/gram.c
rcs -x,v -o2.20.2.1: /cvsroot/pgsql/src/backend/parser/Attic/parse.h
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/backend/parser/Attic/parse.h
rcs -x,v -o1.38.2.1: /cvsroot/pgsql/src/backend/parser/Attic/scan.c
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/backend/parser/Attic/scan.c
rcs -x,v -o1.1.2.1: /cvsroot/pgsql/src/backend/bootstrap/Attic/bootparse.c
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/backend/bootstrap/Attic/bootparse.c
rcs -x,v -o1.1.2.1: /cvsroot/pgsql/src/backend/bootstrap/Attic/bootscanner.c
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/backend/bootstrap/Attic/bootscanner.c
rcs -x,v -o1.1.2.1: /cvsroot/pgsql/src/backend/bootstrap/Attic/bootstrap_tokens.h
rcs -x,v -nWIN32_DEV /cvsroot/pgsql/src/backend/bootstrap/Attic/bootstrap_tokens.h
# these will have no live versions left at all, so just delete the RCS files
rm -f /cvsroot/pgsql/src/pl/plpgsql/src/Attic/pl.tab.h,v
rm -f /cvsroot/pgsql/src/pl/plpgsql/src/Attic/pl_gram.c,v
rm -f /cvsroot/pgsql/src/pl/plpgsql/src/Attic/pl_scan.c,v
# clean up HISTORY, INSTALL tags messed up by manual tag move
rcs -x,v -nREL7_3_10 /cvsroot/pgsql/Attic/HISTORY
rcs -x,v -nREL7_3_10 /cvsroot/pgsql/Attic/INSTALL
# Not clear how these got tagged ...
rcs -x,v -nREL8_0_23 /cvsroot/pgsql/contrib/xml2/sql/xml2.sql
rcs -x,v -nREL8_0_23 /cvsroot/pgsql/contrib/xml2/expected/xml2.out
rcs -x,v -nREL8_0_23 /cvsroot/pgsql/contrib/xml2/expected/xml2_1.out
# nor how these didn't get tagged ...
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/doc/src/graphics/Attic/catalogs.ag
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/doc/src/graphics/Attic/catalogs.cgm
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/doc/src/graphics/Attic/catalogs.gif
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/doc/src/graphics/Attic/catalogs.ps
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/doc/src/graphics/Attic/clientserver.ag
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/doc/src/graphics/Attic/clientserver.gif
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/doc/src/graphics/Attic/connections.ag
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/doc/src/graphics/Attic/connections.gif
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/data/Attic/charset.conf
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/data/Attic/isocz-wincz.tab
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/data/Attic/koi-alt.tab
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/data/Attic/koi-iso.tab
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/data/Attic/koi-koi.tab
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/data/Attic/koi-mac.tab
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/data/Attic/koi-win.tab
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/cli/Attic/example1.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/cli/Attic/example2.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/cli/Attic/sqlcli.h
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/Makefile
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/connect.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/data.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/descriptor.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/error.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/execute.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/extern.h
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/memory.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/misc.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/pg_type.h
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/prepare.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/ecpg/lib/Attic/typename.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/Announce
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/ChangeLog
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/GNUmakefile
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/PyGreSQL.spec
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/README
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/Setup.in.raw
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/pg.py
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/pgdb.py
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/pgmodule.c
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/Attic/setup.py
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/tutorial/Attic/advanced.py
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/tutorial/Attic/basics.py
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/tutorial/Attic/func.py
rcs -x,v -nREL7_3_5:REL7_3_4 /cvsroot/pgsql/src/interfaces/python/tutorial/Attic/syscat.py
# another missed tag
rcs -x,v -nrelease-6-3:2.3 /cvsroot/pgsql/src/backend/parser/Attic/gram.c
# Relabel tag REL7_1 as REL7_1_BETA in various files that were deleted between
# the original placement of that tag and its renaming. The renaming action
# evidently missed files that'd been deleted in between.
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/contrib/linux/Attic/postgres.init.csh
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/contrib/linux/Attic/postgres.init.sh
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/Attic/FAQ_BSDI
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/Attic/README.mb
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/cidr
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/cnfify
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/flock
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/function
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/inherit
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/logging
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/memory
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/outer
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/pglog
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/subquery
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/about.sgml
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/environ.sgml
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/intro-ag.sgml
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/intro-pg.sgml
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/keys.sgml
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/oper.sgml
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/plan.sgml
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/populate.sgml
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/backend/port/hpux/Attic/port-protos.h
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/backend/storage/lmgr/Attic/multi.c
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/backend/storage/lmgr/Attic/single.c
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/bin/pgaccess/lib/Attic/qed
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/include/regex/Attic/cdefs.h
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/include/regex/Attic/regexp.h
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/Attic/README_6.3
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/org/postgresql/xa/Attic/Test.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/Connection.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/Driver.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/Field.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/PG_Stream.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/ResultSet.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/errors.properties
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/errors_fr.properties
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/fastpath/Attic/Fastpath.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/fastpath/Attic/FastpathArg.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGbox.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGcircle.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGline.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGlseg.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGpath.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGpoint.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGpolygon.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/CallableStatement.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/Connection.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/DatabaseMetaData.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/PreparedStatement.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/ResultSet.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/ResultSetMetaData.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/Statement.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/CallableStatement.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/Connection.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/DatabaseMetaData.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/PreparedStatement.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/ResultSet.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/ResultSetMetaData.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/Statement.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/largeobject/Attic/LargeObject.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/largeobject/Attic/LargeObjectManager.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/PGmoney.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/PGobject.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/PGtokenizer.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/PSQLException.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/Serialize.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/UnixCrypt.java
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/interfaces/libpq++/Attic/dependencies
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/pl/plpgsql/Attic/enable_plpgsql
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/pl/plpgsql/src/Attic/mklang.sql.in
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/test/regress/expected/Attic/abstime-1947-PDT.out
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/test/regress/expected/Attic/horology-1947-PDT.out
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/test/regress/expected/Attic/tinterval-1947-PDT.out
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/tools/mkldexport/Attic/Makefile
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/tools/mkldexport/Attic/README
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/tools/mkldexport/Attic/mkldexport.sh
rcs -x,v -nREL7_1_BETA:REL7_1 /cvsroot/pgsql/src/tools/Attic/release_prep
rcs -x,v -nREL7_1 /cvsroot/pgsql/contrib/linux/Attic/postgres.init.csh
rcs -x,v -nREL7_1 /cvsroot/pgsql/contrib/linux/Attic/postgres.init.sh
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/Attic/FAQ_BSDI
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/Attic/README.mb
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/cidr
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/cnfify
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/flock
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/function
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/inherit
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/logging
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/memory
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/outer
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/pglog
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/TODO.detail/Attic/subquery
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/about.sgml
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/environ.sgml
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/intro-ag.sgml
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/intro-pg.sgml
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/keys.sgml
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/oper.sgml
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/plan.sgml
rcs -x,v -nREL7_1 /cvsroot/pgsql/doc/src/sgml/Attic/populate.sgml
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/backend/port/hpux/Attic/port-protos.h
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/backend/storage/lmgr/Attic/multi.c
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/backend/storage/lmgr/Attic/single.c
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/bin/pgaccess/lib/Attic/qed
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/include/regex/Attic/cdefs.h
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/include/regex/Attic/regexp.h
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/Attic/README_6.3
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/org/postgresql/xa/Attic/Test.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/Connection.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/Driver.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/Field.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/PG_Stream.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/ResultSet.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/errors.properties
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/Attic/errors_fr.properties
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/fastpath/Attic/Fastpath.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/fastpath/Attic/FastpathArg.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGbox.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGcircle.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGline.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGlseg.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGpath.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGpoint.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/geometric/Attic/PGpolygon.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/CallableStatement.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/Connection.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/DatabaseMetaData.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/PreparedStatement.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/ResultSet.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/ResultSetMetaData.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc1/Attic/Statement.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/CallableStatement.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/Connection.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/DatabaseMetaData.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/PreparedStatement.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/ResultSet.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/ResultSetMetaData.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/jdbc2/Attic/Statement.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/largeobject/Attic/LargeObject.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/largeobject/Attic/LargeObjectManager.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/PGmoney.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/PGobject.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/PGtokenizer.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/PSQLException.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/Serialize.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/jdbc/postgresql/util/Attic/UnixCrypt.java
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/interfaces/libpq++/Attic/dependencies
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/pl/plpgsql/Attic/enable_plpgsql
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/pl/plpgsql/src/Attic/mklang.sql.in
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/test/regress/expected/Attic/abstime-1947-PDT.out
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/test/regress/expected/Attic/horology-1947-PDT.out
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/test/regress/expected/Attic/tinterval-1947-PDT.out
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/tools/mkldexport/Attic/Makefile
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/tools/mkldexport/Attic/README
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/tools/mkldexport/Attic/mkldexport.sh
rcs -x,v -nREL7_1 /cvsroot/pgsql/src/tools/Attic/release_prep
# Add REL2_0 tag and REL2_0B branch to some files that were missing them
# (probably this was an artifact of the old split-repository kluge)
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/COPYRIGHT
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/Attic/HISTORY
rcs -x,v -nREL2_0:1.9 /cvsroot/pgsql/Attic/INSTALL
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/Attic/MIGRATION_to_1.02.1
rcs -x,v -nREL2_0:1.2 /cvsroot/pgsql/README
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/array/Attic/array_iterator.c
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/array/Attic/array_iterator.doc
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/array/Attic/array_iterator.sql
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/datetime/Attic/datetime_functions.c
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/datetime/Attic/datetime_functions.doc
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/datetime/Attic/datetime_functions.sql
rcs -x,v -nREL2_0:1.3 /cvsroot/pgsql/contrib/pginterface/Attic/Makefile
rcs -x,v -nREL2_0:1.4 /cvsroot/pgsql/contrib/pginterface/Attic/README
rcs -x,v -nREL2_0:1.3 /cvsroot/pgsql/contrib/pginterface/Attic/halt.c
rcs -x,v -nREL2_0:1.3 /cvsroot/pgsql/contrib/pginterface/Attic/halt.h
rcs -x,v -nREL2_0:1.3 /cvsroot/pgsql/contrib/pginterface/Attic/pginsert.c
rcs -x,v -nREL2_0:1.4 /cvsroot/pgsql/contrib/pginterface/Attic/pginterface.c
rcs -x,v -nREL2_0:1.4 /cvsroot/pgsql/contrib/pginterface/Attic/pginterface.h
rcs -x,v -nREL2_0:1.3 /cvsroot/pgsql/contrib/pginterface/Attic/pgnulltest.c
rcs -x,v -nREL2_0:1.3 /cvsroot/pgsql/contrib/pginterface/Attic/pgwordcount.c
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/soundex/Attic/soundex.c
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/soundex/Attic/soundex.sql
rcs -x,v -nREL2_0:1.2 /cvsroot/pgsql/contrib/string/Attic/string_io.c
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/string/Attic/string_io.sql
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/zap_ltv/Attic/README
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/contrib/zap_ltv/Attic/zap_ltv.pl
rcs -x,v -nREL2_0:1.3 /cvsroot/pgsql/doc/Attic/FAQ
rcs -x,v -nREL2_0:1.2 /cvsroot/pgsql/doc/Attic/FAQ-Irix
rcs -x,v -nREL2_0:1.2 /cvsroot/pgsql/doc/Attic/FAQ-Linux
rcs -x,v -nREL2_0:1.1 /cvsroot/pgsql/doc/Attic/MIGRATION_1.0_to_1.01
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/doc/Attic/README.flex
rcs -x,v -nREL2_0:1.1 /cvsroot/pgsql/doc/Attic/README.fsync
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/doc/Attic/README.support
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/doc/Attic/RELEASE.patchlevel
rcs -x,v -nREL2_0:1.2 /cvsroot/pgsql/doc/TODO
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/doc/bug.template
rcs -x,v -nREL2_0:1.2 /cvsroot/pgsql/doc/Attic/libpgtcl.doc
rcs -x,v -nREL2_0:1.1.1.1 /cvsroot/pgsql/doc/Attic/userguide.ps
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/COPYRIGHT
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/Attic/HISTORY
rcs -x,v -nREL2_0B:1.9.0.2 /cvsroot/pgsql/Attic/INSTALL
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/Attic/MIGRATION_to_1.02.1
rcs -x,v -nREL2_0B:1.2.0.2 /cvsroot/pgsql/README
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/array/Attic/array_iterator.c
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/array/Attic/array_iterator.doc
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/array/Attic/array_iterator.sql
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/datetime/Attic/datetime_functions.c
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/datetime/Attic/datetime_functions.doc
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/datetime/Attic/datetime_functions.sql
rcs -x,v -nREL2_0B:1.3.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/Makefile
rcs -x,v -nREL2_0B:1.4.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/README
rcs -x,v -nREL2_0B:1.3.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/halt.c
rcs -x,v -nREL2_0B:1.3.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/halt.h
rcs -x,v -nREL2_0B:1.3.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/pginsert.c
rcs -x,v -nREL2_0B:1.4.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/pginterface.c
rcs -x,v -nREL2_0B:1.4.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/pginterface.h
rcs -x,v -nREL2_0B:1.3.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/pgnulltest.c
rcs -x,v -nREL2_0B:1.3.0.2 /cvsroot/pgsql/contrib/pginterface/Attic/pgwordcount.c
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/soundex/Attic/soundex.c
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/soundex/Attic/soundex.sql
rcs -x,v -nREL2_0B:1.2.0.2 /cvsroot/pgsql/contrib/string/Attic/string_io.c
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/string/Attic/string_io.sql
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/zap_ltv/Attic/README
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/contrib/zap_ltv/Attic/zap_ltv.pl
rcs -x,v -nREL2_0B:1.3.0.2 /cvsroot/pgsql/doc/Attic/FAQ
rcs -x,v -nREL2_0B:1.2.0.2 /cvsroot/pgsql/doc/Attic/FAQ-Irix
rcs -x,v -nREL2_0B:1.2.0.2 /cvsroot/pgsql/doc/Attic/FAQ-Linux
rcs -x,v -nREL2_0B:1.1.0.2 /cvsroot/pgsql/doc/Attic/MIGRATION_1.0_to_1.01
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/doc/Attic/README.flex
rcs -x,v -nREL2_0B:1.1.0.8 /cvsroot/pgsql/doc/Attic/README.fsync
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/doc/Attic/README.support
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/doc/Attic/RELEASE.patchlevel
rcs -x,v -nREL2_0B:1.2.0.2 /cvsroot/pgsql/doc/TODO
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/doc/bug.template
rcs -x,v -nREL2_0B:1.2.0.2 /cvsroot/pgsql/doc/Attic/libpgtcl.doc
rcs -x,v -nREL2_0B:1.1.1.1.0.2 /cvsroot/pgsql/doc/Attic/userguide.ps
# Finally, manually patch assorted files in which the file was added to
# mainline and then back-patched into release branches at a significantly
# later point. The CVS repository fails to show that these files didn't
# exist on the back branch right along, so we need a hack to show that.
# NOTE: the log messages for the dead revisions must match a regexp
# inside cvs2git, or it won't do what we want with these.
cd /cvsroot
chmod u+w pgsql/contrib/cube/expected/cube_1.out,v
chmod u+w pgsql/doc/Attic/FAQ_HPUX,v
chmod u+w pgsql/doc/Attic/FAQ_czeck,v
chmod u+w pgsql/doc/Attic/FAQ_hungarian,v
chmod u+w pgsql/doc/Attic/FAQ_turkish,v
chmod u+w pgsql/doc/src/FAQ/Attic/FAQ_czech.html,v
chmod u+w pgsql/doc/src/FAQ/Attic/FAQ_hungarian.html,v
chmod u+w pgsql/doc/src/FAQ/Attic/FAQ_turkish.html,v
chmod u+w pgsql/src/backend/utils/cache/typcache.c,v
chmod u+w pgsql/src/bin/pg_dump/po/it.po,v
chmod u+w pgsql/src/include/utils/typcache.h,v
chmod u+w pgsql/src/port/unsetenv.c,v
chmod u+w pgsql/src/test/regress/expected/geometry_2.out,v
chmod u+w pgsql/src/test/regress/expected/update.out,v
chmod u+w pgsql/src/test/regress/sql/update.sql,v
chmod u+w pgsql/src/win32/Attic/ipc.patch,v
chmod u+w pgsql/README.CVS,v
chmod u+w pgsql/contrib/xml2/expected/xml2.out,v
chmod u+w pgsql/contrib/xml2/expected/xml2_1.out,v
chmod u+w pgsql/contrib/xml2/sql/xml2.sql,v
chmod u+w pgsql/doc/Attic/FAQ_brazilian,v
chmod u+w pgsql/doc/Attic/FAQ_chinese,v
chmod u+w pgsql/doc/Attic/FAQ_chinese_simp,v
chmod u+w pgsql/doc/Attic/FAQ_chinese_trad,v
chmod u+w pgsql/doc/Attic/FAQ_russian,v
chmod u+w pgsql/doc/Attic/README.Charsets,v
chmod u+w pgsql/doc/src/FAQ/Attic/FAQ_brazilian.html,v
chmod u+w pgsql/doc/src/FAQ/Attic/FAQ_chinese.html,v
chmod u+w pgsql/doc/src/FAQ/Attic/FAQ_chinese_simp.html,v
chmod u+w pgsql/doc/src/FAQ/Attic/FAQ_chinese_trad.html,v
chmod u+w pgsql/doc/src/FAQ/Attic/FAQ_russian.html,v
chmod u+w pgsql/doc/src/sgml/generate_history.pl,v
chmod u+w pgsql/doc/src/sgml/release-7.4.sgml,v
chmod u+w pgsql/doc/src/sgml/release-8.0.sgml,v
chmod u+w pgsql/doc/src/sgml/release-8.1.sgml,v
chmod u+w pgsql/doc/src/sgml/release-8.2.sgml,v
chmod u+w pgsql/doc/src/sgml/release-8.3.sgml,v
chmod u+w pgsql/doc/src/sgml/release-old.sgml,v
chmod u+w pgsql/src/backend/po/pt_BR.po,v
chmod u+w pgsql/src/backend/storage/file/copydir.c,v
chmod u+w pgsql/src/bin/pg_controldata/po/Attic/zh_TW.po,v
chmod u+w pgsql/src/bin/pg_controldata/po/tr.po,v
chmod u+w pgsql/src/bin/pg_resetxlog/po/tr.po,v
chmod u+w pgsql/src/bin/pgaccess/Attic/Makefile,v
chmod u+w pgsql/src/bin/pgaccess/Attic/Makefile.in,v
chmod u+w pgsql/src/bin/pgaccess/Attic/libpgtcl.dll,v
chmod u+w pgsql/src/bin/pgaccess/Attic/libpq.dll,v
chmod u+w pgsql/src/bin/pgaccess/Attic/pgaccess.sh,v
chmod u+w pgsql/src/bin/psql/po/tr.po,v
chmod u+w pgsql/src/bin/scripts/po/tr.po,v
chmod u+w pgsql/src/data/Attic/isocz-wincz.tab,v
chmod u+w pgsql/src/interfaces/jdbc/org/postgresql/Attic/errors_pt_BR.properties,v
chmod u+w pgsql/src/interfaces/jdbc/org/postgresql/test/jdbc2/Attic/OID74Test.java,v
chmod u+w pgsql/src/interfaces/libpq/po/tr.po,v
chmod u+w pgsql/src/interfaces/python/Attic/advanced.py,v
chmod u+w pgsql/src/interfaces/python/Attic/basics.py,v
chmod u+w pgsql/src/interfaces/python/Attic/func.py,v
chmod u+w pgsql/src/interfaces/python/Attic/mkdefines,v
chmod u+w pgsql/src/interfaces/python/Attic/pg.py,v
chmod u+w pgsql/src/interfaces/python/Attic/pgtools.py,v
chmod u+w pgsql/src/interfaces/python/Attic/syscat.py,v
chmod u+w pgsql/src/pl/plperl/plperl_opmask.pl,v
chmod u+w pgsql/src/pl/plpgsql/Attic/enable_plpgsql,v
chmod u+w pgsql/src/pl/plpython/expected/Attic/plpython_error_3.out,v
chmod u+w pgsql/src/test/regress/expected/Attic/join_1.out,v
chmod u+w pgsql/src/tools/version_stamp.pl,v
patch -p1 <<EOFEOF
diff -cr repo/pgsql/contrib/cube/expected/cube_1.out,v repo.patched/pgsql/contrib/cube/expected/cube_1.out,v
*** repo/pgsql/contrib/cube/expected/cube_1.out,v Sat Sep 4 07:18:25 2010
--- repo.patched/pgsql/contrib/cube/expected/cube_1.out,v Sat Sep 11 15:28:45 2010
***************
*** 213,220 ****
1.2
date 2005.06.27.01.19.43; author tgl; state Exp;
branches
! 1.2.2.1
! 1.2.4.1;
next 1.1;
1.1
--- 213,220 ----
1.2
date 2005.06.27.01.19.43; author tgl; state Exp;
branches
! 1.2.2.0
! 1.2.4.0;
next 1.1;
1.1
***************
*** 222,232 ****
--- 222,242 ----
branches;
next ;
+ 1.2.2.0
+ date 2005.06.27.01.19.43; author tgl; state dead;
+ branches;
+ next 1.2.2.1;
+
1.2.2.1
date 2005.07.16.20.11.12; author tgl; state Exp;
branches;
next ;
+ 1.2.4.0
+ date 2005.06.27.01.19.43; author tgl; state dead;
+ branches;
+ next 1.2.4.1;
+
1.2.4.1
date 2005.07.17.17.36.56; author tgl; state Exp;
branches;
***************
*** 1672,1677 ****
--- 1682,1695 ----
@
+ 1.2.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.4.1
log
@Looks like cube_1 variant is also needed in 7.3 branch.
***************
*** 2086,2091 ****
--- 2104,2117 ----
@
+ 1.2.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.2.1
log
@cube_1 variant is needed in 7.4 branch, per results from buildfarm
diff -cr repo/pgsql/doc/Attic/FAQ_HPUX,v repo.patched/pgsql/doc/Attic/FAQ_HPUX,v
*** repo/pgsql/doc/Attic/FAQ_HPUX,v Thu May 13 23:44:40 2010
--- repo.patched/pgsql/doc/Attic/FAQ_HPUX,v Sun Sep 12 12:39:34 2010
***************
*** 283,291 ****
1.1
date 98.11.28.23.10.22; author tgl; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.18.05.25.57; author momjian; state Exp;
branches;
--- 283,296 ----
1.1
date 98.11.28.23.10.22; author tgl; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.11.28.23.10.22; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.18.05.25.57; author momjian; state Exp;
branches;
***************
*** 1314,1319 ****
--- 1319,1332 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Prepare for 6.4.1.
diff -cr repo/pgsql/doc/Attic/FAQ_czeck,v repo.patched/pgsql/doc/Attic/FAQ_czeck,v
*** repo/pgsql/doc/Attic/FAQ_czeck,v Tue Dec 16 04:47:55 2003
--- repo.patched/pgsql/doc/Attic/FAQ_czeck,v Sun Sep 12 15:32:47 2010
***************
*** 14,22 ****
1.1
date 2003.12.13.16.56.00; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.12.15.15.30.17; author momjian; state Exp;
branches;
--- 14,27 ----
1.1
date 2003.12.13.16.56.00; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.12.13.16.56.00; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.12.15.15.30.17; author momjian; state Exp;
branches;
***************
*** 1239,1244 ****
--- 1244,1257 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Add Czech FAQ to 7.4.X branch.
diff -cr repo/pgsql/doc/Attic/FAQ_hungarian,v repo.patched/pgsql/doc/Attic/FAQ_hungarian,v
*** repo/pgsql/doc/Attic/FAQ_hungarian,v Thu May 13 23:44:40 2010
--- repo.patched/pgsql/doc/Attic/FAQ_hungarian,v Sat Sep 11 12:39:17 2010
***************
*** 190,196 ****
1.3
date 2003.02.18.17.20.37; author momjian; state Exp;
branches
! 1.3.2.1
1.3.10.1;
next 1.2;
--- 190,196 ----
1.3
date 2003.02.18.17.20.37; author momjian; state Exp;
branches
! 1.3.2.0
1.3.10.1;
next 1.2;
***************
*** 204,209 ****
--- 204,214 ----
branches;
next ;
+ 1.3.2.0
+ date 2003.02.18.17.20.37; author momjian; state dead;
+ branches;
+ next 1.3.2.1;
+
1.3.2.1
date 2003.07.24.00.53.56; author momjian; state Exp;
branches;
***************
*** 3986,3991 ****
--- 3991,4004 ----
@
+ 1.3.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.3.2.1
log
@Add Hungarian/Turkish FAQ's to 7.3.4.
diff -cr repo/pgsql/doc/Attic/FAQ_turkish,v repo.patched/pgsql/doc/Attic/FAQ_turkish,v
*** repo/pgsql/doc/Attic/FAQ_turkish,v Thu May 13 23:44:40 2010
--- repo.patched/pgsql/doc/Attic/FAQ_turkish,v Sun Sep 12 12:39:41 2010
***************
*** 244,252 ****
1.1
date 2003.06.02.18.16.56; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.07.24.00.53.57; author momjian; state Exp;
branches;
--- 244,257 ----
1.1
date 2003.06.02.18.16.56; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.06.02.18.16.56; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.07.24.00.53.57; author momjian; state Exp;
branches;
***************
*** 6086,6091 ****
--- 6091,6104 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Add Hungarian/Turkish FAQ's to 7.3.4.
diff -cr repo/pgsql/doc/src/FAQ/Attic/FAQ_czech.html,v repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_czech.html,v
*** repo/pgsql/doc/src/FAQ/Attic/FAQ_czech.html,v Thu May 13 23:44:41 2010
--- repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_czech.html,v Sun Sep 12 15:31:27 2010
***************
*** 205,213 ****
1.1
date 2003.12.13.16.56.00; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.12.15.15.30.17; author momjian; state Exp;
branches;
--- 205,218 ----
1.1
date 2003.12.13.16.56.00; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.12.13.16.56.00; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.12.15.15.30.17; author momjian; state Exp;
branches;
***************
*** 3457,3462 ****
--- 3462,3475 ----
</body></html>@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Add Czech FAQ to 7.4.X branch.
diff -cr repo/pgsql/doc/src/FAQ/Attic/FAQ_hungarian.html,v repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_hungarian.html,v
*** repo/pgsql/doc/src/FAQ/Attic/FAQ_hungarian.html,v Thu May 13 23:39:10 2010
--- repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_hungarian.html,v Sat Sep 11 12:39:17 2010
***************
*** 111,117 ****
1.3
date 2005.06.06.16.30.42; author momjian; state Exp;
branches
! 1.3.2.1
1.3.10.1;
next 1.2;
--- 111,117 ----
1.3
date 2005.06.06.16.30.42; author momjian; state Exp;
branches
! 1.3.2.0
1.3.10.1;
next 1.2;
***************
*** 125,130 ****
--- 125,135 ----
branches;
next ;
+ 1.3.2.0
+ date 2005.06.06.16.30.42; author momjian; state dead;
+ branches;
+ next 1.3.2.1;
+
1.3.2.1
date 2005.10.04.14.17.44; author momjian; state Exp;
branches;
***************
*** 1850,1855 ****
--- 1855,1868 ----
@@
+ 1.3.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.3.2.1
log
@Add FAQ_hungarian.html to 8.0.X branch.
diff -cr repo/pgsql/doc/src/FAQ/Attic/FAQ_turkish.html,v repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_turkish.html,v
*** repo/pgsql/doc/src/FAQ/Attic/FAQ_turkish.html,v Thu May 13 23:44:41 2010
--- repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_turkish.html,v Sun Sep 12 12:39:47 2010
***************
*** 254,262 ****
1.1
date 2003.06.02.18.16.56; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.07.24.00.53.57; author momjian; state Exp;
branches;
--- 254,267 ----
1.1
date 2003.06.02.18.16.56; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.06.02.18.16.56; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.07.24.00.53.57; author momjian; state Exp;
branches;
***************
*** 6175,6180 ****
--- 6180,6193 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Add Hungarian/Turkish FAQ's to 7.3.4.
diff -cr repo/pgsql/src/backend/utils/cache/typcache.c,v repo.patched/pgsql/src/backend/utils/cache/typcache.c,v
*** repo/pgsql/src/backend/utils/cache/typcache.c,v Sat Sep 4 07:18:40 2010
--- repo.patched/pgsql/src/backend/utils/cache/typcache.c,v Sun Sep 12 10:38:01 2010
***************
*** 346,354 ****
1.1
date 2003.08.17.19.58.06; author tgl; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.09.07.04.36.55; author momjian; state Exp;
branches;
--- 346,359 ----
1.1
date 2003.08.17.19.58.06; author tgl; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.08.17.19.58.06; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.09.07.04.36.55; author momjian; state Exp;
branches;
***************
*** 2465,2470 ****
--- 2470,2483 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Update this branch to match CVS head, includes WIN32 improvements.
diff -cr repo/pgsql/src/bin/pg_dump/po/it.po,v repo.patched/pgsql/src/bin/pg_dump/po/it.po,v
*** repo/pgsql/src/bin/pg_dump/po/it.po,v Sat Sep 4 07:18:42 2010
--- repo.patched/pgsql/src/bin/pg_dump/po/it.po,v Sat Sep 11 12:33:07 2010
***************
*** 173,179 ****
1.7
date 2010.02.19.00.40.04; author petere; state Exp;
branches
! 1.7.6.1;
next 1.6;
1.6
--- 173,179 ----
1.7
date 2010.02.19.00.40.04; author petere; state Exp;
branches
! 1.7.6.0;
next 1.6;
1.6
***************
*** 206,211 ****
--- 206,216 ----
branches;
next ;
+ 1.7.6.0
+ date 2010.02.19.00.40.04; author petere; state dead;
+ branches;
+ next 1.7.6.1;
+
1.7.6.1
date 2010.05.13.10.50.03; author petere; state Exp;
branches;
***************
*** 3636,3641 ****
--- 3641,3654 ----
@
+ 1.7.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.7.6.1
log
@Translation update
diff -cr repo/pgsql/src/include/utils/typcache.h,v repo.patched/pgsql/src/include/utils/typcache.h,v
*** repo/pgsql/src/include/utils/typcache.h,v Sat Sep 4 07:18:49 2010
--- repo.patched/pgsql/src/include/utils/typcache.h,v Sun Sep 12 10:59:10 2010
***************
*** 275,283 ****
1.1
date 2003.08.17.19.58.06; author tgl; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.09.07.04.37.09; author momjian; state Exp;
branches;
--- 275,288 ----
1.1
date 2003.08.17.19.58.06; author tgl; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.08.17.19.58.06; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.09.07.04.37.09; author momjian; state Exp;
branches;
***************
*** 914,919 ****
--- 919,932 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Update this branch to match CVS head, includes WIN32 improvements.
diff -cr repo/pgsql/src/port/unsetenv.c,v repo.patched/pgsql/src/port/unsetenv.c,v
*** repo/pgsql/src/port/unsetenv.c,v Tue Sep 7 10:10:39 2010
--- repo.patched/pgsql/src/port/unsetenv.c,v Sat Sep 11 12:39:27 2010
***************
*** 203,210 ****
1.6
date 2005.11.22.18.17.34; author momjian; state Exp;
branches
! 1.6.2.1
! 1.6.4.1;
next 1.5;
1.5
--- 203,210 ----
1.6
date 2005.11.22.18.17.34; author momjian; state Exp;
branches
! 1.6.2.0
! 1.6.4.0;
next 1.5;
1.5
***************
*** 238,248 ****
--- 238,258 ----
branches;
next ;
+ 1.6.2.0
+ date 2005.11.22.18.17.34; author momjian; state dead;
+ branches;
+ next 1.6.2.1;
+
1.6.2.1
date 2006.01.05.00.51.25; author tgl; state Exp;
branches;
next ;
+ 1.6.4.0
+ date 2005.11.22.18.17.34; author momjian; state dead;
+ branches;
+ next 1.6.4.1;
+
1.6.4.1
date 2006.01.05.00.51.52; author tgl; state Exp;
branches;
***************
*** 433,438 ****
--- 443,456 ----
@
+ 1.6.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.6.4.1
log
@Add port support for unsetenv() in back branches. Needed for locale
***************
*** 445,450 ****
--- 463,476 ----
@
+ 1.6.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.6.2.1
log
@Add port support for unsetenv() in back branches. Needed for locale
diff -cr repo/pgsql/src/test/regress/expected/geometry_2.out,v repo.patched/pgsql/src/test/regress/expected/geometry_2.out,v
*** repo/pgsql/src/test/regress/expected/geometry_2.out,v Sat Sep 4 07:19:26 2010
--- repo.patched/pgsql/src/test/regress/expected/geometry_2.out,v Sat Sep 11 15:32:56 2010
***************
*** 198,204 ****
date 2004.12.02.01.34.17; author tgl; state Exp;
branches
1.2.4.1
! 1.2.6.1;
next 1.1;
1.1
--- 198,204 ----
date 2004.12.02.01.34.17; author tgl; state Exp;
branches
1.2.4.1
! 1.2.6.0;
next 1.1;
1.1
***************
*** 211,216 ****
--- 211,221 ----
branches;
next ;
+ 1.2.6.0
+ date 2004.12.02.01.34.17; author tgl; state dead;
+ branches;
+ next 1.2.6.1;
+
1.2.6.1
date 2005.07.16.18.39.26; author tgl; state Exp;
branches;
***************
*** 959,964 ****
--- 964,977 ----
@
+ 1.2.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.6.1
log
@The geometry_2 variant appears to be needed in 7.4 branch as well as
diff -cr repo/pgsql/src/test/regress/expected/update.out,v repo.patched/pgsql/src/test/regress/expected/update.out,v
*** repo/pgsql/src/test/regress/expected/update.out,v Sat Sep 4 07:19:26 2010
--- repo.patched/pgsql/src/test/regress/expected/update.out,v Sun Sep 12 10:58:42 2010
***************
*** 205,213 ****
1.1
date 2003.08.26.18.32.23; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.09.07.04.37.13; author momjian; state Exp;
branches;
--- 205,218 ----
1.1
date 2003.08.26.18.32.23; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.08.26.18.32.23; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.09.07.04.37.13; author momjian; state Exp;
branches;
***************
*** 440,445 ****
--- 445,458 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Update this branch to match CVS head, includes WIN32 improvements.
diff -cr repo/pgsql/src/test/regress/sql/update.sql,v repo.patched/pgsql/src/test/regress/sql/update.sql,v
*** repo/pgsql/src/test/regress/sql/update.sql,v Sat Sep 4 07:19:26 2010
--- repo.patched/pgsql/src/test/regress/sql/update.sql,v Sun Sep 12 11:00:31 2010
***************
*** 205,213 ****
1.1
date 2003.08.26.18.32.23; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.09.07.04.37.13; author momjian; state Exp;
branches;
--- 205,218 ----
1.1
date 2003.08.26.18.32.23; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.08.26.18.32.23; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.09.07.04.37.13; author momjian; state Exp;
branches;
***************
*** 371,376 ****
--- 376,389 ----
DROP TABLE update_test;@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Update this branch to match CVS head, includes WIN32 improvements.
diff -cr repo/pgsql/src/win32/Attic/ipc.patch,v repo.patched/pgsql/src/win32/Attic/ipc.patch,v
*** repo/pgsql/src/win32/Attic/ipc.patch,v Wed Dec 29 05:28:00 1999
--- repo.patched/pgsql/src/win32/Attic/ipc.patch,v Sat Sep 11 18:13:36 2010
***************
*** 24,32 ****
1.1
date 99.09.24.05.58.48; author inoue; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 99.12.29.10.14.46; author momjian; state Exp;
branches;
--- 24,37 ----
1.1
date 99.09.24.05.58.48; author inoue; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 99.09.24.05.58.48; author inoue; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 99.12.29.10.14.46; author momjian; state Exp;
branches;
***************
*** 362,367 ****
--- 367,380 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Add NT patch.
diff -cra repo/pgsql/README.CVS,v repo.patched/pgsql/README.CVS,v
*** repo/pgsql/README.CVS,v Sat Sep 4 07:18:23 2010
--- repo.patched/pgsql/README.CVS,v Mon Sep 13 19:25:12 2010
***************
*** 185,193 ****
1.1
date 2004.03.10.00.28.11; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2004.03.10.00.28.27; author momjian; state Exp;
branches;
--- 185,198 ----
1.1
date 2004.03.10.00.28.11; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2004.03.10.00.28.11; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2004.03.10.00.28.27; author momjian; state Exp;
branches;
***************
*** 299,304 ****
--- 304,317 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Remove HISTORY and INSTALL. Have them generated by the tarball scripts.
diff -cra repo/pgsql/contrib/xml2/expected/xml2.out,v repo.patched/pgsql/contrib/xml2/expected/xml2.out,v
*** repo/pgsql/contrib/xml2/expected/xml2.out,v Mon Sep 13 13:39:08 2010
--- repo.patched/pgsql/contrib/xml2/expected/xml2.out,v Mon Sep 13 15:42:55 2010
***************
*** 37,69 ****
1.1
date 2010.02.28.21.31.57; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
! 1.1.10.1;
next ;
1.1.2.1
date 2010.03.01.03.41.04; author tgl; state Exp;
branches;
next ;
1.1.4.1
date 2010.03.01.03.41.11; author tgl; state Exp;
branches;
next ;
1.1.6.1
date 2010.03.01.03.41.17; author tgl; state Exp;
branches;
next ;
1.1.8.1
date 2010.03.01.03.41.22; author tgl; state Exp;
branches;
next ;
1.1.10.1
date 2010.03.01.03.41.29; author tgl; state Exp;
branches;
--- 37,94 ----
1.1
date 2010.02.28.21.31.57; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
! 1.1.10.0;
next ;
+ 1.1.2.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2010.03.01.03.41.04; author tgl; state Exp;
branches;
next ;
+ 1.1.4.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2010.03.01.03.41.11; author tgl; state Exp;
branches;
next ;
+ 1.1.6.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2010.03.01.03.41.17; author tgl; state Exp;
branches;
next ;
+ 1.1.8.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2010.03.01.03.41.22; author tgl; state Exp;
branches;
next ;
+ 1.1.10.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.10.1;
+
1.1.10.1
date 2010.03.01.03.41.29; author tgl; state Exp;
branches;
***************
*** 331,336 ****
--- 356,369 ----
@
+ 1.1.10.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.10.1
log
@Back-patch today's memory management fixups in contrib/xml2.
***************
*** 437,442 ****
--- 470,483 ----
@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Back-patch today's memory management fixups in contrib/xml2.
***************
*** 537,542 ****
--- 578,591 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Back-patch today's memory management fixups in contrib/xml2.
***************
*** 631,636 ****
--- 680,693 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Back-patch today's memory management fixups in contrib/xml2.
***************
*** 689,694 ****
--- 746,759 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Back-patch today's memory management fixups in contrib/xml2.
diff -cra repo/pgsql/contrib/xml2/expected/xml2_1.out,v repo.patched/pgsql/contrib/xml2/expected/xml2_1.out,v
*** repo/pgsql/contrib/xml2/expected/xml2_1.out,v Mon Sep 13 13:39:08 2010
--- repo.patched/pgsql/contrib/xml2/expected/xml2_1.out,v Mon Sep 13 15:39:36 2010
***************
*** 37,69 ****
1.1
date 2010.03.01.18.07.59; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
! 1.1.10.1;
next ;
1.1.2.1
date 2010.03.01.18.08.07; author tgl; state Exp;
branches;
next ;
1.1.4.1
date 2010.03.01.18.08.16; author tgl; state Exp;
branches;
next ;
1.1.6.1
date 2010.03.01.18.08.27; author tgl; state Exp;
branches;
next ;
1.1.8.1
date 2010.03.01.18.08.34; author tgl; state Exp;
branches;
next ;
1.1.10.1
date 2010.03.01.18.08.41; author tgl; state Exp;
branches;
--- 37,94 ----
1.1
date 2010.03.01.18.07.59; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
! 1.1.10.0;
next ;
+ 1.1.2.0
+ date 2010.03.01.18.07.59; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2010.03.01.18.08.07; author tgl; state Exp;
branches;
next ;
+ 1.1.4.0
+ date 2010.03.01.18.07.59; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2010.03.01.18.08.16; author tgl; state Exp;
branches;
next ;
+ 1.1.6.0
+ date 2010.03.01.18.07.59; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2010.03.01.18.08.27; author tgl; state Exp;
branches;
next ;
+ 1.1.8.0
+ date 2010.03.01.18.07.59; author tgl; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2010.03.01.18.08.34; author tgl; state Exp;
branches;
next ;
+ 1.1.10.0
+ date 2010.03.01.18.07.59; author tgl; state dead;
+ branches;
+ next 1.1.10.1;
+
1.1.10.1
date 2010.03.01.18.08.41; author tgl; state Exp;
branches;
***************
*** 261,266 ****
--- 286,299 ----
@
+ 1.1.10.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.10.1
log
@Fix contrib/xml2 so regression test still works when it's built without libxslt.
***************
*** 323,328 ****
--- 356,369 ----
@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Fix contrib/xml2 so regression test still works when it's built without libxslt.
***************
*** 379,384 ****
--- 420,433 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Fix contrib/xml2 so regression test still works when it's built without libxslt.
***************
*** 435,440 ****
--- 484,497 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Fix contrib/xml2 so regression test still works when it's built without libxslt.
***************
*** 457,462 ****
--- 514,527 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Fix contrib/xml2 so regression test still works when it's built without libxslt.
diff -cra repo/pgsql/contrib/xml2/sql/xml2.sql,v repo.patched/pgsql/contrib/xml2/sql/xml2.sql,v
*** repo/pgsql/contrib/xml2/sql/xml2.sql,v Mon Sep 13 13:39:08 2010
--- repo.patched/pgsql/contrib/xml2/sql/xml2.sql,v Mon Sep 13 15:45:53 2010
***************
*** 37,69 ****
1.1
date 2010.02.28.21.31.57; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
! 1.1.10.1;
next ;
1.1.2.1
date 2010.03.01.03.41.04; author tgl; state Exp;
branches;
next ;
1.1.4.1
date 2010.03.01.03.41.11; author tgl; state Exp;
branches;
next ;
1.1.6.1
date 2010.03.01.03.41.17; author tgl; state Exp;
branches;
next ;
1.1.8.1
date 2010.03.01.03.41.22; author tgl; state Exp;
branches;
next ;
1.1.10.1
date 2010.03.01.03.41.29; author tgl; state Exp;
branches;
--- 37,94 ----
1.1
date 2010.02.28.21.31.57; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
! 1.1.10.0;
next ;
+ 1.1.2.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2010.03.01.03.41.04; author tgl; state Exp;
branches;
next ;
+ 1.1.4.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2010.03.01.03.41.11; author tgl; state Exp;
branches;
next ;
+ 1.1.6.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2010.03.01.03.41.17; author tgl; state Exp;
branches;
next ;
+ 1.1.8.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2010.03.01.03.41.22; author tgl; state Exp;
branches;
next ;
+ 1.1.10.0
+ date 2010.02.28.21.31.57; author tgl; state dead;
+ branches;
+ next 1.1.10.1;
+
1.1.10.1
date 2010.03.01.03.41.29; author tgl; state Exp;
branches;
***************
*** 248,253 ****
--- 273,286 ----
@
+ 1.1.10.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.10.1
log
@Back-patch today's memory management fixups in contrib/xml2.
***************
*** 309,314 ****
--- 342,355 ----
@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Back-patch today's memory management fixups in contrib/xml2.
***************
*** 369,374 ****
--- 410,423 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Back-patch today's memory management fixups in contrib/xml2.
***************
*** 429,434 ****
--- 478,491 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Back-patch today's memory management fixups in contrib/xml2.
***************
*** 446,451 ****
--- 503,516 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Back-patch today's memory management fixups in contrib/xml2.
diff -cra repo/pgsql/doc/Attic/FAQ_brazilian,v repo.patched/pgsql/doc/Attic/FAQ_brazilian,v
*** repo/pgsql/doc/Attic/FAQ_brazilian,v Thu May 13 23:44:40 2010
--- repo.patched/pgsql/doc/Attic/FAQ_brazilian,v Mon Sep 13 19:24:08 2010
***************
*** 218,226 ****
1.1
date 2004.06.10.03.46.08; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2004.06.10.03.46.40; author momjian; state Exp;
branches;
--- 218,231 ----
1.1
date 2004.06.10.03.46.08; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2004.06.10.03.46.08; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2004.06.10.03.46.40; author momjian; state Exp;
branches;
***************
*** 3027,3032 ****
--- 3032,3045 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Add Brazilian version of FAQ.
diff -cra repo/pgsql/doc/Attic/FAQ_chinese,v repo.patched/pgsql/doc/Attic/FAQ_chinese,v
*** repo/pgsql/doc/Attic/FAQ_chinese,v Thu May 13 23:39:10 2010
--- repo.patched/pgsql/doc/Attic/FAQ_chinese,v Mon Sep 13 19:11:27 2010
***************
*** 99,107 ****
1.1
date 2005.05.11.02.11.05; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2005.05.11.02.13.21; author momjian; state Exp;
branches;
--- 99,112 ----
1.1
date 2005.05.11.02.11.05; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2005.05.11.02.11.05; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2005.05.11.02.13.21; author momjian; state Exp;
branches;
***************
*** 4438,4443 ****
--- 4443,4456 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Backpatch new Chinese FAQ to 8.0.X.
diff -cra repo/pgsql/doc/Attic/FAQ_chinese_simp,v repo.patched/pgsql/doc/Attic/FAQ_chinese_simp,v
*** repo/pgsql/doc/Attic/FAQ_chinese_simp,v Thu May 13 23:32:27 2010
--- repo.patched/pgsql/doc/Attic/FAQ_chinese_simp,v Mon Sep 13 19:07:31 2010
***************
*** 53,62 ****
1.1
date 2007.02.21.16.42.35; author momjian; state Exp;
branches
! 1.1.2.1
1.1.6.1;
next ;
1.1.2.1
date 2007.02.21.16.42.43; author momjian; state Exp;
branches;
--- 53,67 ----
1.1
date 2007.02.21.16.42.35; author momjian; state Exp;
branches
! 1.1.2.0
1.1.6.1;
next ;
+ 1.1.2.0
+ date 2007.02.21.16.42.35; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2007.02.21.16.42.43; author momjian; state Exp;
branches;
***************
*** 918,923 ****
--- 923,936 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Update Chinese FAQs to have two versions, a traditional Chinese version (Taiwan)
diff -cra repo/pgsql/doc/Attic/FAQ_chinese_trad,v repo.patched/pgsql/doc/Attic/FAQ_chinese_trad,v
*** repo/pgsql/doc/Attic/FAQ_chinese_trad,v Thu May 13 23:32:27 2010
--- repo.patched/pgsql/doc/Attic/FAQ_chinese_trad,v Mon Sep 13 19:07:07 2010
***************
*** 53,62 ****
1.1
date 2007.02.21.16.42.35; author momjian; state Exp;
branches
! 1.1.2.1
1.1.6.1;
next ;
1.1.2.1
date 2007.02.21.16.42.43; author momjian; state Exp;
branches;
--- 53,67 ----
1.1
date 2007.02.21.16.42.35; author momjian; state Exp;
branches
! 1.1.2.0
1.1.6.1;
next ;
+ 1.1.2.0
+ date 2007.02.21.16.42.35; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2007.02.21.16.42.43; author momjian; state Exp;
branches;
***************
*** 910,915 ****
--- 915,928 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Update Chinese FAQs to have two versions, a traditional Chinese version (Taiwan)
diff -cra repo/pgsql/doc/Attic/FAQ_russian,v repo.patched/pgsql/doc/Attic/FAQ_russian,v
*** repo/pgsql/doc/Attic/FAQ_russian,v Thu May 13 23:44:40 2010
--- repo.patched/pgsql/doc/Attic/FAQ_russian,v Mon Sep 13 19:32:10 2010
***************
*** 347,355 ****
1.1
date 2002.02.22.12.48.55; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2002.02.22.13.02.58; author momjian; state Exp;
branches;
--- 347,360 ----
1.1
date 2002.02.22.12.48.55; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2002.02.22.12.48.55; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2002.02.22.13.02.58; author momjian; state Exp;
branches;
***************
*** 9517,9522 ****
--- 9522,9535 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@BACKPATCH:
diff -cra repo/pgsql/doc/Attic/README.Charsets,v repo.patched/pgsql/doc/Attic/README.Charsets,v
*** repo/pgsql/doc/Attic/README.Charsets,v Wed Sep 13 22:17:28 2000
--- repo.patched/pgsql/doc/Attic/README.Charsets,v Mon Sep 13 19:36:29 2010
***************
*** 16,24 ****
1.1
date 99.08.16.20.27.17; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 99.08.16.20.29.10; author momjian; state Exp;
branches;
--- 16,29 ----
1.1
date 99.08.16.20.27.17; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 99.08.16.20.27.17; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 99.08.16.20.29.10; author momjian; state Exp;
branches;
***************
*** 163,168 ****
--- 168,181 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@ I've sent 3 mails to pgsql-patches. There are two files, one for doc
diff -cra repo/pgsql/doc/src/FAQ/Attic/FAQ_brazilian.html,v repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_brazilian.html,v
*** repo/pgsql/doc/src/FAQ/Attic/FAQ_brazilian.html,v Thu May 13 23:44:41 2010
--- repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_brazilian.html,v Mon Sep 13 19:23:12 2010
***************
*** 233,241 ****
1.1
date 2004.06.10.03.46.11; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2004.06.10.03.46.45; author momjian; state Exp;
branches;
--- 233,246 ----
1.1
date 2004.06.10.03.46.11; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2004.06.10.03.46.11; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2004.06.10.03.46.45; author momjian; state Exp;
branches;
***************
*** 2917,2922 ****
--- 2922,2935 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Add Brazilian version of FAQ.
diff -cra repo/pgsql/doc/src/FAQ/Attic/FAQ_chinese.html,v repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_chinese.html,v
*** repo/pgsql/doc/src/FAQ/Attic/FAQ_chinese.html,v Thu May 13 23:39:10 2010
--- repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_chinese.html,v Mon Sep 13 19:13:00 2010
***************
*** 125,133 ****
1.1
date 2005.05.11.02.11.05; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2005.05.11.02.13.21; author momjian; state Exp;
branches;
--- 125,138 ----
1.1
date 2005.05.11.02.11.05; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2005.05.11.02.11.05; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2005.05.11.02.13.21; author momjian; state Exp;
branches;
***************
*** 10306,10311 ****
--- 10311,10324 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Backpatch new Chinese FAQ to 8.0.X.
diff -cra repo/pgsql/doc/src/FAQ/Attic/FAQ_chinese_simp.html,v repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_chinese_simp.html,v
*** repo/pgsql/doc/src/FAQ/Attic/FAQ_chinese_simp.html,v Thu May 13 23:32:28 2010
--- repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_chinese_simp.html,v Mon Sep 13 19:06:37 2010
***************
*** 53,62 ****
1.1
date 2007.02.21.16.42.36; author momjian; state Exp;
branches
! 1.1.2.1
1.1.6.1;
next ;
1.1.2.1
date 2007.02.21.16.42.43; author momjian; state Exp;
branches;
--- 53,67 ----
1.1
date 2007.02.21.16.42.36; author momjian; state Exp;
branches
! 1.1.2.0
1.1.6.1;
next ;
+ 1.1.2.0
+ date 2007.02.21.16.42.36; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2007.02.21.16.42.43; author momjian; state Exp;
branches;
***************
*** 1127,1132 ****
--- 1132,1145 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Update Chinese FAQs to have two versions, a traditional Chinese version (Taiwan)
diff -cra repo/pgsql/doc/src/FAQ/Attic/FAQ_chinese_trad.html,v repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_chinese_trad.html,v
*** repo/pgsql/doc/src/FAQ/Attic/FAQ_chinese_trad.html,v Thu May 13 23:32:28 2010
--- repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_chinese_trad.html,v Mon Sep 13 19:06:07 2010
***************
*** 53,62 ****
1.1
date 2007.02.21.16.42.36; author momjian; state Exp;
branches
! 1.1.2.1
1.1.6.1;
next ;
1.1.2.1
date 2007.02.21.16.42.43; author momjian; state Exp;
branches;
--- 53,67 ----
1.1
date 2007.02.21.16.42.36; author momjian; state Exp;
branches
! 1.1.2.0
1.1.6.1;
next ;
+ 1.1.2.0
+ date 2007.02.21.16.42.36; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2007.02.21.16.42.43; author momjian; state Exp;
branches;
***************
*** 1119,1124 ****
--- 1124,1137 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Update Chinese FAQs to have two versions, a traditional Chinese version (Taiwan)
diff -cra repo/pgsql/doc/src/FAQ/Attic/FAQ_russian.html,v repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_russian.html,v
*** repo/pgsql/doc/src/FAQ/Attic/FAQ_russian.html,v Thu May 13 23:44:41 2010
--- repo.patched/pgsql/doc/src/FAQ/Attic/FAQ_russian.html,v Mon Sep 13 19:33:05 2010
***************
*** 367,375 ****
1.1
date 2002.02.22.12.48.55; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2002.02.22.13.03.01; author momjian; state Exp;
branches;
--- 367,380 ----
1.1
date 2002.02.22.12.48.55; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2002.02.22.12.48.55; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2002.02.22.13.03.01; author momjian; state Exp;
branches;
***************
*** 8700,8705 ****
--- 8705,8718 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@BACKPATCH:
diff -cra repo/pgsql/doc/src/sgml/generate_history.pl,v repo.patched/pgsql/doc/src/sgml/generate_history.pl,v
*** repo/pgsql/doc/src/sgml/generate_history.pl,v Sat Sep 4 07:18:26 2010
--- repo.patched/pgsql/doc/src/sgml/generate_history.pl,v Mon Sep 13 15:56:11 2010
***************
*** 59,91 ****
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
! 1.1.10.1;
next ;
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
next ;
1.1.4.1
date 2009.05.02.20.17.45; author tgl; state Exp;
branches;
next ;
1.1.6.1
date 2009.05.02.20.17.57; author tgl; state Exp;
branches;
next ;
1.1.8.1
date 2009.05.02.20.18.09; author tgl; state Exp;
branches;
next ;
1.1.10.1
date 2009.05.02.20.18.21; author tgl; state Exp;
branches;
--- 59,116 ----
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
! 1.1.10.0;
next ;
+ 1.1.2.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
next ;
+ 1.1.4.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2009.05.02.20.17.45; author tgl; state Exp;
branches;
next ;
+ 1.1.6.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2009.05.02.20.17.57; author tgl; state Exp;
branches;
next ;
+ 1.1.8.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2009.05.02.20.18.09; author tgl; state Exp;
branches;
next ;
+ 1.1.10.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.10.1;
+
1.1.10.1
date 2009.05.02.20.18.21; author tgl; state Exp;
branches;
***************
*** 171,176 ****
--- 196,209 ----
@
+ 1.1.10.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.10.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 191,196 ****
--- 224,237 ----
@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 208,213 ****
--- 249,262 ----
@@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 225,230 ****
--- 274,287 ----
@@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 242,247 ****
--- 299,312 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Split the release notes into a separate file for each (active) major branch,
diff -cra repo/pgsql/doc/src/sgml/release-7.4.sgml,v repo.patched/pgsql/doc/src/sgml/release-7.4.sgml,v
*** repo/pgsql/doc/src/sgml/release-7.4.sgml,v Sat Sep 4 07:18:26 2010
--- repo.patched/pgsql/doc/src/sgml/release-7.4.sgml,v Mon Sep 13 19:04:54 2010
***************
*** 89,102 ****
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
! 1.1.10.1
1.1.12.1;
next ;
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
--- 89,107 ----
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
! 1.1.10.0
1.1.12.1;
next ;
+ 1.1.2.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
***************
*** 132,137 ****
--- 137,147 ----
branches;
next ;
+ 1.1.4.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2009.05.02.20.17.45; author tgl; state Exp;
branches;
***************
*** 167,172 ****
--- 177,187 ----
branches;
next ;
+ 1.1.6.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2009.05.02.20.17.57; author tgl; state Exp;
branches;
***************
*** 202,207 ****
--- 217,227 ----
branches;
next ;
+ 1.1.8.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2009.05.02.20.18.09; author tgl; state Exp;
branches;
***************
*** 237,242 ****
--- 257,267 ----
branches;
next ;
+ 1.1.10.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.10.1;
+
1.1.10.1
date 2009.05.02.20.18.21; author tgl; state Exp;
branches;
***************
*** 5493,5498 ****
--- 5518,5531 ----
@
+ 1.1.10.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.10.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 6107,6112 ****
--- 6140,6153 ----
@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 6721,6726 ****
--- 6762,6775 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7335,7340 ****
--- 7384,7397 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7949,7954 ****
--- 8006,8019 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Split the release notes into a separate file for each (active) major branch,
diff -cra repo/pgsql/doc/src/sgml/release-8.0.sgml,v repo.patched/pgsql/doc/src/sgml/release-8.0.sgml,v
*** repo/pgsql/doc/src/sgml/release-8.0.sgml,v Sat Sep 4 07:18:26 2010
--- repo.patched/pgsql/doc/src/sgml/release-8.0.sgml,v Mon Sep 13 19:04:23 2010
***************
*** 84,96 ****
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
1.1.10.1;
next ;
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
--- 84,101 ----
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
1.1.10.1;
next ;
+ 1.1.2.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
***************
*** 126,131 ****
--- 131,141 ----
branches;
next ;
+ 1.1.4.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2009.05.02.20.17.45; author tgl; state Exp;
branches;
***************
*** 161,166 ****
--- 171,181 ----
branches;
next ;
+ 1.1.6.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2009.05.02.20.17.57; author tgl; state Exp;
branches;
***************
*** 196,201 ****
--- 211,221 ----
branches;
next ;
+ 1.1.8.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2009.05.02.20.18.09; author tgl; state Exp;
branches;
***************
*** 6332,6337 ****
--- 6352,6365 ----
@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7093,7098 ****
--- 7121,7134 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7854,7859 ****
--- 7890,7903 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 8615,8620 ****
--- 8659,8672 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Split the release notes into a separate file for each (active) major branch,
diff -cra repo/pgsql/doc/src/sgml/release-8.1.sgml,v repo.patched/pgsql/doc/src/sgml/release-8.1.sgml,v
*** repo/pgsql/doc/src/sgml/release-8.1.sgml,v Sat Sep 4 07:18:27 2010
--- repo.patched/pgsql/doc/src/sgml/release-8.1.sgml,v Mon Sep 13 19:03:58 2010
***************
*** 85,96 ****
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
1.1.8.1;
next ;
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
--- 85,101 ----
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
1.1.8.1;
next ;
+ 1.1.2.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
***************
*** 126,131 ****
--- 131,141 ----
branches;
next ;
+ 1.1.4.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2009.05.02.20.17.45; author tgl; state Exp;
branches;
***************
*** 161,166 ****
--- 171,181 ----
branches;
next ;
+ 1.1.6.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2009.05.02.20.17.57; author tgl; state Exp;
branches;
***************
*** 6185,6190 ****
--- 6200,6213 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 6979,6984 ****
--- 7002,7015 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7773,7778 ****
--- 7804,7817 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Split the release notes into a separate file for each (active) major branch,
diff -cra repo/pgsql/doc/src/sgml/release-8.2.sgml,v repo.patched/pgsql/doc/src/sgml/release-8.2.sgml,v
*** repo/pgsql/doc/src/sgml/release-8.2.sgml,v Sat Sep 4 07:18:27 2010
--- repo.patched/pgsql/doc/src/sgml/release-8.2.sgml,v Mon Sep 13 19:03:30 2010
***************
*** 74,84 ****
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
1.1.6.1;
next ;
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
--- 74,89 ----
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
1.1.6.1;
next ;
+ 1.1.2.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
***************
*** 114,119 ****
--- 119,129 ----
branches;
next ;
+ 1.1.4.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2009.05.02.20.17.45; author tgl; state Exp;
branches;
***************
*** 7071,7076 ****
--- 7081,7094 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 8122,8127 ****
--- 8140,8153 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Split the release notes into a separate file for each (active) major branch,
diff -cra repo/pgsql/doc/src/sgml/release-8.3.sgml,v repo.patched/pgsql/doc/src/sgml/release-8.3.sgml,v
*** repo/pgsql/doc/src/sgml/release-8.3.sgml,v Sat Sep 4 07:18:27 2010
--- repo.patched/pgsql/doc/src/sgml/release-8.3.sgml,v Mon Sep 13 19:02:55 2010
***************
*** 69,78 ****
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.1
1.1.4.1;
next ;
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
--- 69,83 ----
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.0
1.1.4.1;
next ;
+ 1.1.2.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
***************
*** 7205,7210 ****
--- 7210,7223 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Split the release notes into a separate file for each (active) major branch,
diff -cra repo/pgsql/doc/src/sgml/release-old.sgml,v repo.patched/pgsql/doc/src/sgml/release-old.sgml,v
*** repo/pgsql/doc/src/sgml/release-old.sgml,v Sat Sep 4 07:18:27 2010
--- repo.patched/pgsql/doc/src/sgml/release-old.sgml,v Mon Sep 13 19:01:33 2010
***************
*** 64,77 ****
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
! 1.1.10.1
1.1.24.1;
next ;
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
--- 64,82 ----
1.1
date 2009.05.02.20.17.19; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
! 1.1.10.0
1.1.24.1;
next ;
+ 1.1.2.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2009.05.02.20.17.33; author tgl; state Exp;
branches;
***************
*** 82,87 ****
--- 87,97 ----
branches;
next ;
+ 1.1.4.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2009.05.02.20.17.45; author tgl; state Exp;
branches;
***************
*** 92,97 ****
--- 102,112 ----
branches;
next ;
+ 1.1.6.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2009.05.02.20.17.57; author tgl; state Exp;
branches;
***************
*** 102,107 ****
--- 117,127 ----
branches;
next ;
+ 1.1.8.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2009.05.02.20.18.09; author tgl; state Exp;
branches;
***************
*** 112,117 ****
--- 132,142 ----
branches;
next ;
+ 1.1.10.0
+ date 2009.05.02.20.17.19; author tgl; state dead;
+ branches;
+ next 1.1.10.1;
+
1.1.10.1
date 2009.05.02.20.18.21; author tgl; state Exp;
branches;
***************
*** 7143,7148 ****
--- 7168,7181 ----
@
+ 1.1.10.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.10.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7160,7165 ****
--- 7193,7206 ----
@@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7188,7193 ****
--- 7229,7242 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7216,7221 ****
--- 7265,7278 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Split the release notes into a separate file for each (active) major branch,
***************
*** 7244,7249 ****
--- 7301,7314 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Split the release notes into a separate file for each (active) major branch,
diff -cra repo/pgsql/src/backend/po/pt_BR.po,v repo.patched/pgsql/src/backend/po/pt_BR.po,v
*** repo/pgsql/src/backend/po/pt_BR.po,v Sat Sep 4 07:18:36 2010
--- repo.patched/pgsql/src/backend/po/pt_BR.po,v Mon Sep 13 19:29:25 2010
***************
*** 243,251 ****
1.1
date 2003.11.14.23.59.12; author petere; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.11.15.10.20.01; author petere; state Exp;
branches;
--- 243,256 ----
1.1
date 2003.11.14.23.59.12; author petere; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.11.14.23.59.12; author petere; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.11.15.10.20.01; author petere; state Exp;
branches;
***************
*** 232106,232111 ****
--- 232111,232124 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Translation updates
diff -cra repo/pgsql/src/backend/storage/file/copydir.c,v repo.patched/pgsql/src/backend/storage/file/copydir.c,v
*** repo/pgsql/src/backend/storage/file/copydir.c,v Sat Sep 4 07:18:37 2010
--- repo.patched/pgsql/src/backend/storage/file/copydir.c,v Mon Sep 13 15:30:41 2010
***************
*** 19,27 ****
1.1
date 2010.07.02.17.03.30; author rhaas; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2010.07.02.17.03.38; author rhaas; state Exp;
branches;
--- 19,32 ----
1.1
date 2010.07.02.17.03.30; author rhaas; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2010.07.02.17.03.30; author rhaas; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2010.07.02.17.03.38; author rhaas; state Exp;
branches;
***************
*** 357,362 ****
--- 362,375 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Move copydir.c from src/port to src/backend/storage/file
diff -cra repo/pgsql/src/bin/pg_controldata/po/Attic/zh_TW.po,v repo.patched/pgsql/src/bin/pg_controldata/po/Attic/zh_TW.po,v
*** repo/pgsql/src/bin/pg_controldata/po/Attic/zh_TW.po,v Thu May 13 23:44:58 2010
--- repo.patched/pgsql/src/bin/pg_controldata/po/Attic/zh_TW.po,v Mon Sep 13 19:26:43 2010
***************
*** 173,181 ****
1.1
date 2004.01.05.19.15.48; author petere; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2004.01.05.19.15.55; author petere; state Exp;
branches;
--- 173,186 ----
1.1
date 2004.01.05.19.15.48; author petere; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2004.01.05.19.15.48; author petere; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2004.01.05.19.15.55; author petere; state Exp;
branches;
***************
*** 934,939 ****
--- 939,952 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Translation updates
diff -cra repo/pgsql/src/bin/pg_controldata/po/tr.po,v repo.patched/pgsql/src/bin/pg_controldata/po/tr.po,v
*** repo/pgsql/src/bin/pg_controldata/po/tr.po,v Sat Sep 4 07:18:42 2010
--- repo.patched/pgsql/src/bin/pg_controldata/po/tr.po,v Mon Sep 13 19:16:22 2010
***************
*** 190,196 ****
1.2
date 2004.10.18.17.56.45; author petere; state Exp;
branches
! 1.2.2.1
1.2.6.1;
next 1.1;
--- 190,196 ----
1.2
date 2004.10.18.17.56.45; author petere; state Exp;
branches
! 1.2.2.0
1.2.6.1;
next 1.1;
***************
*** 199,204 ****
--- 199,209 ----
branches;
next ;
+ 1.2.2.0
+ date 2004.10.18.17.56.45; author petere; state dead;
+ branches;
+ next 1.2.2.1;
+
1.2.2.1
date 2004.10.30.08.22.08; author petere; state Exp;
branches;
***************
*** 1666,1671 ****
--- 1671,1684 ----
@
+ 1.2.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.2.1
log
@New translations
diff -cra repo/pgsql/src/bin/pg_resetxlog/po/tr.po,v repo.patched/pgsql/src/bin/pg_resetxlog/po/tr.po,v
*** repo/pgsql/src/bin/pg_resetxlog/po/tr.po,v Sat Sep 4 07:18:44 2010
--- repo.patched/pgsql/src/bin/pg_resetxlog/po/tr.po,v Mon Sep 13 19:17:36 2010
***************
*** 206,214 ****
1.1
date 2004.10.12.18.01.20; author petere; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2004.10.30.08.22.11; author petere; state Exp;
branches;
--- 206,219 ----
1.1
date 2004.10.12.18.01.20; author petere; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2004.10.12.18.01.20; author petere; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2004.10.30.08.22.11; author petere; state Exp;
branches;
***************
*** 3923,3928 ****
--- 3928,3941 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@New translations
diff -cra repo/pgsql/src/bin/pgaccess/Attic/Makefile,v repo.patched/pgsql/src/bin/pgaccess/Attic/Makefile,v
*** repo/pgsql/src/bin/pgaccess/Attic/Makefile,v Thu May 5 22:26:53 2005
--- repo.patched/pgsql/src/bin/pgaccess/Attic/Makefile,v Mon Sep 13 19:38:58 2010
***************
*** 124,133 ****
1.1
date 98.12.18.17.54.42; author momjian; state Exp;
branches
! 1.1.2.1
1.1.4.1;
next ;
1.1.2.1
date 98.12.18.18.01.36; author momjian; state Exp;
branches;
--- 124,138 ----
1.1
date 98.12.18.17.54.42; author momjian; state Exp;
branches
! 1.1.2.0
1.1.4.1;
next ;
+ 1.1.2.0
+ date 98.12.18.17.54.42; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.18.18.01.36; author momjian; state Exp;
branches;
***************
*** 657,662 ****
--- 662,675 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Included are patches for doc/README.mb and README.mb.jp.
diff -cra repo/pgsql/src/bin/pgaccess/Attic/Makefile.in,v repo.patched/pgsql/src/bin/pgaccess/Attic/Makefile.in,v
*** repo/pgsql/src/bin/pgaccess/Attic/Makefile.in,v Sun Oct 31 14:56:37 1999
--- repo.patched/pgsql/src/bin/pgaccess/Attic/Makefile.in,v Mon Sep 13 19:34:25 2010
***************
*** 14,20 ****
1.2
date 99.10.31.12.23.41; author momjian; state Exp;
branches
! 1.2.2.1;
next 1.1;
1.1
--- 14,20 ----
1.2
date 99.10.31.12.23.41; author momjian; state Exp;
branches
! 1.2.2.0;
next 1.1;
1.1
***************
*** 22,27 ****
--- 22,32 ----
branches;
next ;
+ 1.2.2.0
+ date 99.10.31.12.23.41; author momjian; state dead;
+ branches;
+ next 1.2.2.1;
+
1.2.2.1
date 99.10.31.12.34.10; author momjian; state Exp;
branches;
***************
*** 91,96 ****
--- 96,109 ----
@
+ 1.2.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.2.1
log
@Update for 0.98 pgaccess
diff -cra repo/pgsql/src/bin/pgaccess/Attic/libpgtcl.dll,v repo.patched/pgsql/src/bin/pgaccess/Attic/libpgtcl.dll,v
*** repo/pgsql/src/bin/pgaccess/Attic/libpgtcl.dll,v Sat Oct 30 12:00:54 1999
--- repo.patched/pgsql/src/bin/pgaccess/Attic/libpgtcl.dll,v Mon Sep 13 21:51:42 2010
***************
*** 16,25 ****
1.1
date 98.12.12.21.15.40; author momjian; state Exp;
branches
! 1.1.2.1
1.1.4.1;
next ;
1.1.2.1
date 98.12.12.21.23.20; author momjian; state Exp;
branches;
--- 16,30 ----
1.1
date 98.12.12.21.15.40; author momjian; state Exp;
branches
! 1.1.2.0
1.1.4.1;
next ;
+ 1.1.2.0
+ date 98.12.12.21.15.40; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.12.21.23.20; author momjian; state Exp;
branches;
***************
*** 201,206 ****
--- 206,219 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@update pgaccess to 0.93.
diff -cra repo/pgsql/src/bin/pgaccess/Attic/libpq.dll,v repo.patched/pgsql/src/bin/pgaccess/Attic/libpq.dll,v
*** repo/pgsql/src/bin/pgaccess/Attic/libpq.dll,v Sat Oct 30 12:00:54 1999
--- repo.patched/pgsql/src/bin/pgaccess/Attic/libpq.dll,v Mon Sep 13 21:50:41 2010
***************
*** 16,25 ****
1.1
date 98.12.12.21.15.40; author momjian; state Exp;
branches
! 1.1.2.1
1.1.4.1;
next ;
1.1.2.1
date 98.12.12.21.23.21; author momjian; state Exp;
branches;
--- 16,30 ----
1.1
date 98.12.12.21.15.40; author momjian; state Exp;
branches
! 1.1.2.0
1.1.4.1;
next ;
+ 1.1.2.0
+ date 98.12.12.21.15.40; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.12.21.23.21; author momjian; state Exp;
branches;
***************
*** 322,327 ****
--- 327,340 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@update pgaccess to 0.93.
diff -cra repo/pgsql/src/bin/pgaccess/Attic/pgaccess.sh,v repo.patched/pgsql/src/bin/pgaccess/Attic/pgaccess.sh,v
*** repo/pgsql/src/bin/pgaccess/Attic/pgaccess.sh,v Thu May 5 22:26:53 2005
--- repo.patched/pgsql/src/bin/pgaccess/Attic/pgaccess.sh,v Mon Sep 13 19:35:17 2010
***************
*** 72,78 ****
1.2
date 99.10.31.12.23.41; author momjian; state Exp;
branches
! 1.2.2.1;
next 1.1;
1.1
--- 72,78 ----
1.2
date 99.10.31.12.23.41; author momjian; state Exp;
branches
! 1.2.2.0;
next 1.1;
1.1
***************
*** 80,85 ****
--- 80,90 ----
branches;
next ;
+ 1.2.2.0
+ date 99.10.31.12.23.41; author momjian; state dead;
+ branches;
+ next 1.2.2.1;
+
1.2.2.1
date 99.10.31.12.34.10; author momjian; state Exp;
branches;
***************
*** 218,223 ****
--- 223,236 ----
@
+ 1.2.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.2.1
log
@Update for 0.98 pgaccess
diff -cra repo/pgsql/src/bin/psql/po/tr.po,v repo.patched/pgsql/src/bin/psql/po/tr.po,v
*** repo/pgsql/src/bin/psql/po/tr.po,v Sat Sep 4 07:18:44 2010
--- repo.patched/pgsql/src/bin/psql/po/tr.po,v Mon Sep 13 19:18:43 2010
***************
*** 237,243 ****
1.2
date 2004.10.28.09.01.06; author petere; state Exp;
branches
! 1.2.2.1;
next 1.1;
1.1
--- 237,243 ----
1.2
date 2004.10.28.09.01.06; author petere; state Exp;
branches
! 1.2.2.0;
next 1.1;
1.1
***************
*** 245,250 ****
--- 245,255 ----
branches;
next ;
+ 1.2.2.0
+ date 2004.10.28.09.01.06; author petere; state dead;
+ branches;
+ next 1.2.2.1;
+
1.2.2.1
date 2004.10.30.08.22.13; author petere; state Exp;
branches;
***************
*** 27211,27216 ****
--- 27216,27229 ----
@
+ 1.2.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.2.1
log
@New translations
diff -cra repo/pgsql/src/bin/scripts/po/tr.po,v repo.patched/pgsql/src/bin/scripts/po/tr.po,v
*** repo/pgsql/src/bin/scripts/po/tr.po,v Sat Sep 4 07:18:45 2010
--- repo.patched/pgsql/src/bin/scripts/po/tr.po,v Mon Sep 13 19:19:47 2010
***************
*** 211,219 ****
1.1
date 2004.10.18.17.58.54; author petere; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2004.10.30.08.22.15; author petere; state Exp;
branches;
--- 211,224 ----
1.1
date 2004.10.18.17.58.54; author petere; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2004.10.18.17.58.54; author petere; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2004.10.30.08.22.15; author petere; state Exp;
branches;
***************
*** 5132,5137 ****
--- 5137,5150 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@New translations
diff -cra repo/pgsql/src/data/Attic/isocz-wincz.tab,v repo.patched/pgsql/src/data/Attic/isocz-wincz.tab,v
*** repo/pgsql/src/data/Attic/isocz-wincz.tab,v Mon Sep 13 13:39:08 2010
--- repo.patched/pgsql/src/data/Attic/isocz-wincz.tab,v Mon Sep 13 19:37:25 2010
***************
*** 57,65 ****
1.1
date 99.08.16.20.27.19; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 99.08.16.20.29.11; author momjian; state Exp;
branches;
--- 57,70 ----
1.1
date 99.08.16.20.27.19; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 99.08.16.20.27.19; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 99.08.16.20.29.11; author momjian; state Exp;
branches;
***************
*** 104,109 ****
--- 109,122 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@ I've sent 3 mails to pgsql-patches. There are two files, one for doc
diff -cra repo/pgsql/src/interfaces/jdbc/org/postgresql/Attic/errors_pt_BR.properties,v repo.patched/pgsql/src/interfaces/jdbc/org/postgresql/Attic/errors_pt_BR.properties,v
*** repo/pgsql/src/interfaces/jdbc/org/postgresql/Attic/errors_pt_BR.properties,v Thu May 13 23:45:02 2010
--- repo.patched/pgsql/src/interfaces/jdbc/org/postgresql/Attic/errors_pt_BR.properties,v Mon Sep 13 19:30:34 2010
***************
*** 44,52 ****
1.1
date 2003.11.14.23.59.12; author petere; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 2003.11.15.10.20.02; author petere; state Exp;
branches;
--- 44,57 ----
1.1
date 2003.11.14.23.59.12; author petere; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 2003.11.14.23.59.12; author petere; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2003.11.15.10.20.02; author petere; state Exp;
branches;
***************
*** 198,203 ****
--- 203,216 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Translation updates
diff -cra repo/pgsql/src/interfaces/jdbc/org/postgresql/test/jdbc2/Attic/OID74Test.java,v repo.patched/pgsql/src/interfaces/jdbc/org/postgresql/test/jdbc2/Attic/OID74Test.java,v
*** repo/pgsql/src/interfaces/jdbc/org/postgresql/test/jdbc2/Attic/OID74Test.java,v Thu May 13 23:45:02 2010
--- repo.patched/pgsql/src/interfaces/jdbc/org/postgresql/test/jdbc2/Attic/OID74Test.java,v Mon Sep 13 19:28:16 2010
***************
*** 48,54 ****
1.2
date 2003.12.17.15.45.05; author davec; state Exp;
branches
! 1.2.2.1;
next 1.1;
1.1
--- 48,54 ----
1.2
date 2003.12.17.15.45.05; author davec; state Exp;
branches
! 1.2.2.0;
next 1.1;
1.1
***************
*** 56,61 ****
--- 56,66 ----
branches;
next ;
+ 1.2.2.0
+ date 2003.12.17.15.45.05; author davec; state dead;
+ branches;
+ next 1.2.2.1;
+
1.2.2.1
date 2003.12.17.15.48.40; author davec; state Exp;
branches;
***************
*** 286,291 ****
--- 291,304 ----
@
+ 1.2.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.2.1
log
@back patching fix for compat 7.1 binary stream issues with the new protocol
diff -cra repo/pgsql/src/interfaces/libpq/po/tr.po,v repo.patched/pgsql/src/interfaces/libpq/po/tr.po,v
*** repo/pgsql/src/interfaces/libpq/po/tr.po,v Sat Sep 4 07:18:51 2010
--- repo.patched/pgsql/src/interfaces/libpq/po/tr.po,v Mon Sep 13 19:21:11 2010
***************
*** 210,216 ****
1.2
date 2004.10.28.09.01.06; author petere; state Exp;
branches
! 1.2.2.1;
next 1.1;
1.1
--- 210,216 ----
1.2
date 2004.10.28.09.01.06; author petere; state Exp;
branches
! 1.2.2.0;
next 1.1;
1.1
***************
*** 218,223 ****
--- 218,228 ----
branches;
next ;
+ 1.2.2.0
+ date 2004.10.28.09.01.06; author petere; state dead;
+ branches;
+ next 1.2.2.1;
+
1.2.2.1
date 2004.10.30.08.22.17; author petere; state Exp;
branches;
***************
*** 5568,5573 ****
--- 5573,5586 ----
@
+ 1.2.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.2.2.1
log
@New translations
diff -cra repo/pgsql/src/interfaces/python/Attic/advanced.py,v repo.patched/pgsql/src/interfaces/python/Attic/advanced.py,v
*** repo/pgsql/src/interfaces/python/Attic/advanced.py,v Mon May 10 12:10:39 1999
--- repo.patched/pgsql/src/interfaces/python/Attic/advanced.py,v Mon Sep 13 21:45:36 2010
***************
*** 14,22 ****
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
--- 14,27 ----
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.12.17.01.43.03; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
***************
*** 214,219 ****
--- 219,232 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Upgrade to Pygress 2.2.
diff -cra repo/pgsql/src/interfaces/python/Attic/basics.py,v repo.patched/pgsql/src/interfaces/python/Attic/basics.py,v
*** repo/pgsql/src/interfaces/python/Attic/basics.py,v Mon May 10 12:10:40 1999
--- repo.patched/pgsql/src/interfaces/python/Attic/basics.py,v Mon Sep 13 21:46:14 2010
***************
*** 14,22 ****
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
--- 14,27 ----
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.12.17.01.43.03; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
***************
*** 327,332 ****
--- 332,345 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Upgrade to Pygress 2.2.
diff -cra repo/pgsql/src/interfaces/python/Attic/func.py,v repo.patched/pgsql/src/interfaces/python/Attic/func.py,v
*** repo/pgsql/src/interfaces/python/Attic/func.py,v Mon May 10 12:10:41 1999
--- repo.patched/pgsql/src/interfaces/python/Attic/func.py,v Mon Sep 13 21:47:00 2010
***************
*** 14,22 ****
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
--- 14,27 ----
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.12.17.01.43.03; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
***************
*** 236,241 ****
--- 241,254 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Upgrade to Pygress 2.2.
diff -cra repo/pgsql/src/interfaces/python/Attic/mkdefines,v repo.patched/pgsql/src/interfaces/python/Attic/mkdefines,v
*** repo/pgsql/src/interfaces/python/Attic/mkdefines,v Fri Jun 22 13:48:39 2001
--- repo.patched/pgsql/src/interfaces/python/Attic/mkdefines,v Mon Sep 13 21:49:15 2010
***************
*** 44,52 ****
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
--- 44,57 ----
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.12.17.01.43.03; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
***************
*** 125,130 ****
--- 130,143 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Upgrade to Pygress 2.2.
diff -cra repo/pgsql/src/interfaces/python/Attic/pg.py,v repo.patched/pgsql/src/interfaces/python/Attic/pg.py,v
*** repo/pgsql/src/interfaces/python/Attic/pg.py,v Mon Sep 13 13:39:08 2010
--- repo.patched/pgsql/src/interfaces/python/Attic/pg.py,v Mon Sep 13 21:48:31 2010
***************
*** 131,139 ****
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
--- 131,144 ----
1.1
date 98.12.17.01.43.03; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.12.17.01.43.03; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.17.01.43.38; author momjian; state Exp;
branches;
***************
*** 958,963 ****
--- 963,976 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Upgrade to Pygress 2.2.
diff -cra repo/pgsql/src/interfaces/python/Attic/pgtools.py,v repo.patched/pgsql/src/interfaces/python/Attic/pgtools.py,v
*** repo/pgsql/src/interfaces/python/Attic/pgtools.py,v Mon May 10 12:10:45 1999
--- repo.patched/pgsql/src/interfaces/python/Attic/pgtools.py,v Mon Sep 13 21:47:42 2010
***************
*** 14,22 ****
1.1
date 98.12.17.01.43.04; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.17.01.43.39; author momjian; state Exp;
branches;
--- 14,27 ----
1.1
date 98.12.17.01.43.04; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.12.17.01.43.04; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.17.01.43.39; author momjian; state Exp;
branches;
***************
*** 91,96 ****
--- 96,109 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Upgrade to Pygress 2.2.
diff -cra repo/pgsql/src/interfaces/python/Attic/syscat.py,v repo.patched/pgsql/src/interfaces/python/Attic/syscat.py,v
*** repo/pgsql/src/interfaces/python/Attic/syscat.py,v Mon May 10 12:10:45 1999
--- repo.patched/pgsql/src/interfaces/python/Attic/syscat.py,v Mon Sep 13 21:44:22 2010
***************
*** 14,22 ****
1.1
date 98.12.17.01.43.04; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.17.01.43.39; author momjian; state Exp;
branches;
--- 14,27 ----
1.1
date 98.12.17.01.43.04; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.12.17.01.43.04; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.17.01.43.39; author momjian; state Exp;
branches;
***************
*** 176,181 ****
--- 181,194 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Upgrade to Pygress 2.2.
diff -cra repo/pgsql/src/pl/plperl/plperl_opmask.pl,v repo.patched/pgsql/src/pl/plperl/plperl_opmask.pl,v
*** repo/pgsql/src/pl/plperl/plperl_opmask.pl,v Sat Sep 4 07:18:51 2010
--- repo.patched/pgsql/src/pl/plperl/plperl_opmask.pl,v Mon Sep 13 15:35:19 2010
***************
*** 26,64 ****
1.1
date 2010.05.13.16.39.43; author adunstan; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
! 1.1.10.1
! 1.1.12.1;
next ;
1.1.2.1
date 2010.05.13.16.40.36; author adunstan; state Exp;
branches;
next ;
1.1.4.1
date 2010.05.13.16.42.51; author adunstan; state Exp;
branches;
next ;
1.1.6.1
date 2010.05.13.16.43.14; author adunstan; state Exp;
branches;
next ;
1.1.8.1
date 2010.05.13.16.43.40; author adunstan; state Exp;
branches;
next ;
1.1.10.1
date 2010.05.13.16.44.03; author adunstan; state Exp;
branches;
next ;
1.1.12.1
date 2010.05.13.16.44.35; author adunstan; state Exp;
branches;
--- 26,94 ----
1.1
date 2010.05.13.16.39.43; author adunstan; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
! 1.1.10.0
! 1.1.12.0;
next ;
+ 1.1.2.0
+ date 2010.05.13.16.39.43; author adunstan; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2010.05.13.16.40.36; author adunstan; state Exp;
branches;
next ;
+ 1.1.4.0
+ date 2010.05.13.16.39.43; author adunstan; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2010.05.13.16.42.51; author adunstan; state Exp;
branches;
next ;
+ 1.1.6.0
+ date 2010.05.13.16.39.43; author adunstan; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2010.05.13.16.43.14; author adunstan; state Exp;
branches;
next ;
+ 1.1.8.0
+ date 2010.05.13.16.39.43; author adunstan; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2010.05.13.16.43.40; author adunstan; state Exp;
branches;
next ;
+ 1.1.10.0
+ date 2010.05.13.16.39.43; author adunstan; state dead;
+ branches;
+ next 1.1.10.1;
+
1.1.10.1
date 2010.05.13.16.44.03; author adunstan; state Exp;
branches;
next ;
+ 1.1.12.0
+ date 2010.05.13.16.39.43; author adunstan; state dead;
+ branches;
+ next 1.1.12.1;
+
1.1.12.1
date 2010.05.13.16.44.35; author adunstan; state Exp;
branches;
***************
*** 156,161 ****
--- 186,199 ----
@
+ 1.1.12.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.12.1
log
@Abandon the use of Perl's Safe.pm to enforce restrictions in plperl, as it is
***************
*** 198,203 ****
--- 236,249 ----
@
+ 1.1.10.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.10.1
log
@Abandon the use of Perl's Safe.pm to enforce restrictions in plperl, as it is
***************
*** 240,245 ****
--- 286,299 ----
@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Abandon the use of Perl's Safe.pm to enforce restrictions in plperl, as it is
***************
*** 282,287 ****
--- 336,349 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Abandon the use of Perl's Safe.pm to enforce restrictions in plperl, as it is
***************
*** 324,329 ****
--- 386,399 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Abandon the use of Perl's Safe.pm to enforce restrictions in plperl, as it is
***************
*** 366,371 ****
--- 436,449 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Abandon the use of Perl's Safe.pm to enforce restrictions in plperl, as it is
diff -cra repo/pgsql/src/pl/plpgsql/Attic/enable_plpgsql,v repo.patched/pgsql/src/pl/plpgsql/Attic/enable_plpgsql,v
*** repo/pgsql/src/pl/plpgsql/Attic/enable_plpgsql,v Mon Sep 13 13:39:09 2010
--- repo.patched/pgsql/src/pl/plpgsql/Attic/enable_plpgsql,v Mon Sep 13 19:40:19 2010
***************
*** 19,27 ****
1.1
date 98.12.13.05.22.58; author momjian; state Exp;
branches
! 1.1.2.1;
next ;
1.1.2.1
date 98.12.13.05.23.14; author momjian; state Exp;
branches;
--- 19,32 ----
1.1
date 98.12.13.05.22.58; author momjian; state Exp;
branches
! 1.1.2.0;
next ;
+ 1.1.2.0
+ date 98.12.13.05.22.58; author momjian; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 98.12.13.05.23.14; author momjian; state Exp;
branches;
***************
*** 117,122 ****
--- 122,135 ----
@@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Add enable_plpgsql from Oliver.
diff -cra repo/pgsql/src/pl/plpython/expected/Attic/plpython_error_3.out,v repo.patched/pgsql/src/pl/plpython/expected/Attic/plpython_error_3.out,v
*** repo/pgsql/src/pl/plpython/expected/Attic/plpython_error_3.out,v Thu May 13 23:36:03 2010
--- repo.patched/pgsql/src/pl/plpython/expected/Attic/plpython_error_3.out,v Mon Sep 13 16:15:12 2010
***************
*** 81,89 ****
1.1
date 2006.11.21.21.51.05; author tgl; state Exp;
branches
! 1.1.6.1;
next ;
1.1.6.1
date 2008.07.28.18.45.05; author tgl; state Exp;
branches;
--- 81,94 ----
1.1
date 2006.11.21.21.51.05; author tgl; state Exp;
branches
! 1.1.6.0;
next ;
+ 1.1.6.0
+ date 2006.11.21.21.51.05; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2008.07.28.18.45.05; author tgl; state Exp;
branches;
***************
*** 244,249 ****
--- 249,262 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Update 8.1 and 8.0 plpython to work with Python 2.5. This backports several
diff -cra repo/pgsql/src/test/regress/expected/Attic/join_1.out,v repo.patched/pgsql/src/test/regress/expected/Attic/join_1.out,v
*** repo/pgsql/src/test/regress/expected/Attic/join_1.out,v Thu May 13 23:39:33 2010
--- repo.patched/pgsql/src/test/regress/expected/Attic/join_1.out,v Mon Sep 13 19:13:39 2010
***************
*** 186,192 ****
1.3
date 2005.03.26.03.38.01; author tgl; state Exp;
branches
! 1.3.2.1;
next 1.2;
1.2
--- 186,192 ----
1.3
date 2005.03.26.03.38.01; author tgl; state Exp;
branches
! 1.3.2.0;
next 1.2;
1.2
***************
*** 199,204 ****
--- 199,209 ----
branches;
next ;
+ 1.3.2.0
+ date 2005.03.26.03.38.01; author tgl; state dead;
+ branches;
+ next 1.3.2.1;
+
1.3.2.1
date 2005.03.26.03.38.11; author tgl; state Exp;
branches;
***************
*** 4282,4287 ****
--- 4287,4300 ----
@
+ 1.3.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.3.2.1
log
@Add Windows-specific variant comparison file.
diff -cra repo/pgsql/src/tools/version_stamp.pl,v repo.patched/pgsql/src/tools/version_stamp.pl,v
*** repo/pgsql/src/tools/version_stamp.pl,v Sat Sep 4 07:19:26 2010
--- repo.patched/pgsql/src/tools/version_stamp.pl,v Mon Sep 13 16:12:29 2010
***************
*** 110,142 ****
1.1
date 2008.06.10.18.08.48; author tgl; state Exp;
branches
! 1.1.2.1
! 1.1.4.1
! 1.1.6.1
! 1.1.8.1
! 1.1.10.1;
next ;
1.1.2.1
date 2008.06.10.18.08.55; author tgl; state Exp;
branches;
next ;
1.1.4.1
date 2008.06.10.18.09.01; author tgl; state Exp;
branches;
next ;
1.1.6.1
date 2008.06.10.18.09.08; author tgl; state Exp;
branches;
next ;
1.1.8.1
date 2008.06.10.18.09.15; author tgl; state Exp;
branches;
next ;
1.1.10.1
date 2008.06.10.18.09.26; author tgl; state Exp;
branches;
--- 110,167 ----
1.1
date 2008.06.10.18.08.48; author tgl; state Exp;
branches
! 1.1.2.0
! 1.1.4.0
! 1.1.6.0
! 1.1.8.0
! 1.1.10.0;
next ;
+ 1.1.2.0
+ date 2008.06.10.18.08.48; author tgl; state dead;
+ branches;
+ next 1.1.2.1;
+
1.1.2.1
date 2008.06.10.18.08.55; author tgl; state Exp;
branches;
next ;
+ 1.1.4.0
+ date 2008.06.10.18.08.48; author tgl; state dead;
+ branches;
+ next 1.1.4.1;
+
1.1.4.1
date 2008.06.10.18.09.01; author tgl; state Exp;
branches;
next ;
+ 1.1.6.0
+ date 2008.06.10.18.08.48; author tgl; state dead;
+ branches;
+ next 1.1.6.1;
+
1.1.6.1
date 2008.06.10.18.09.08; author tgl; state Exp;
branches;
next ;
+ 1.1.8.0
+ date 2008.06.10.18.08.48; author tgl; state dead;
+ branches;
+ next 1.1.8.1;
+
1.1.8.1
date 2008.06.10.18.09.15; author tgl; state Exp;
branches;
next ;
+ 1.1.10.0
+ date 2008.06.10.18.08.48; author tgl; state dead;
+ branches;
+ next 1.1.10.1;
+
1.1.10.1
date 2008.06.10.18.09.26; author tgl; state Exp;
branches;
***************
*** 364,369 ****
--- 389,402 ----
@
+ 1.1.10.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.10.1
log
@Create a script to handle stamping release version numbers into files,
***************
*** 388,393 ****
--- 421,434 ----
@
+ 1.1.8.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.8.1
log
@Create a script to handle stamping release version numbers into files,
***************
*** 405,410 ****
--- 446,459 ----
@
+ 1.1.6.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.6.1
log
@Create a script to handle stamping release version numbers into files,
***************
*** 422,427 ****
--- 471,484 ----
@
+ 1.1.4.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.4.1
log
@Create a script to handle stamping release version numbers into files,
***************
*** 434,439 ****
--- 491,504 ----
@
+ 1.1.2.0
+ log
+ @file was added on branch on 0000-00-00 00:00:00
+ @
+ text
+ @@
+
+
1.1.2.1
log
@Create a script to handle stamping release version numbers into files,
EOFEOF
chmod u-w pgsql/contrib/cube/expected/cube_1.out,v
chmod u-w pgsql/doc/Attic/FAQ_HPUX,v
chmod u-w pgsql/doc/Attic/FAQ_czeck,v
chmod u-w pgsql/doc/Attic/FAQ_hungarian,v
chmod u-w pgsql/doc/Attic/FAQ_turkish,v
chmod u-w pgsql/doc/src/FAQ/Attic/FAQ_czech.html,v
chmod u-w pgsql/doc/src/FAQ/Attic/FAQ_hungarian.html,v
chmod u-w pgsql/doc/src/FAQ/Attic/FAQ_turkish.html,v
chmod u-w pgsql/src/backend/utils/cache/typcache.c,v
chmod u-w pgsql/src/bin/pg_dump/po/it.po,v
chmod u-w pgsql/src/include/utils/typcache.h,v
chmod u-w pgsql/src/port/unsetenv.c,v
chmod u-w pgsql/src/test/regress/expected/geometry_2.out,v
chmod u-w pgsql/src/test/regress/expected/update.out,v
chmod u-w pgsql/src/test/regress/sql/update.sql,v
chmod u-w pgsql/src/win32/Attic/ipc.patch,v
chmod u-w pgsql/README.CVS,v
chmod u-w pgsql/contrib/xml2/expected/xml2.out,v
chmod u-w pgsql/contrib/xml2/expected/xml2_1.out,v
chmod u-w pgsql/contrib/xml2/sql/xml2.sql,v
chmod u-w pgsql/doc/Attic/FAQ_brazilian,v
chmod u-w pgsql/doc/Attic/FAQ_chinese,v
chmod u-w pgsql/doc/Attic/FAQ_chinese_simp,v
chmod u-w pgsql/doc/Attic/FAQ_chinese_trad,v
chmod u-w pgsql/doc/Attic/FAQ_russian,v
chmod u-w pgsql/doc/Attic/README.Charsets,v
chmod u-w pgsql/doc/src/FAQ/Attic/FAQ_brazilian.html,v
chmod u-w pgsql/doc/src/FAQ/Attic/FAQ_chinese.html,v
chmod u-w pgsql/doc/src/FAQ/Attic/FAQ_chinese_simp.html,v
chmod u-w pgsql/doc/src/FAQ/Attic/FAQ_chinese_trad.html,v
chmod u-w pgsql/doc/src/FAQ/Attic/FAQ_russian.html,v
chmod u-w pgsql/doc/src/sgml/generate_history.pl,v
chmod u-w pgsql/doc/src/sgml/release-7.4.sgml,v
chmod u-w pgsql/doc/src/sgml/release-8.0.sgml,v
chmod u-w pgsql/doc/src/sgml/release-8.1.sgml,v
chmod u-w pgsql/doc/src/sgml/release-8.2.sgml,v
chmod u-w pgsql/doc/src/sgml/release-8.3.sgml,v
chmod u-w pgsql/doc/src/sgml/release-old.sgml,v
chmod u-w pgsql/src/backend/po/pt_BR.po,v
chmod u-w pgsql/src/backend/storage/file/copydir.c,v
chmod u-w pgsql/src/bin/pg_controldata/po/Attic/zh_TW.po,v
chmod u-w pgsql/src/bin/pg_controldata/po/tr.po,v
chmod u-w pgsql/src/bin/pg_resetxlog/po/tr.po,v
chmod u-w pgsql/src/bin/pgaccess/Attic/Makefile,v
chmod u-w pgsql/src/bin/pgaccess/Attic/Makefile.in,v
chmod u-w pgsql/src/bin/pgaccess/Attic/libpgtcl.dll,v
chmod u-w pgsql/src/bin/pgaccess/Attic/libpq.dll,v
chmod u-w pgsql/src/bin/pgaccess/Attic/pgaccess.sh,v
chmod u-w pgsql/src/bin/psql/po/tr.po,v
chmod u-w pgsql/src/bin/scripts/po/tr.po,v
chmod u-w pgsql/src/data/Attic/isocz-wincz.tab,v
chmod u-w pgsql/src/interfaces/jdbc/org/postgresql/Attic/errors_pt_BR.properties,v
chmod u-w pgsql/src/interfaces/jdbc/org/postgresql/test/jdbc2/Attic/OID74Test.java,v
chmod u-w pgsql/src/interfaces/libpq/po/tr.po,v
chmod u-w pgsql/src/interfaces/python/Attic/advanced.py,v
chmod u-w pgsql/src/interfaces/python/Attic/basics.py,v
chmod u-w pgsql/src/interfaces/python/Attic/func.py,v
chmod u-w pgsql/src/interfaces/python/Attic/mkdefines,v
chmod u-w pgsql/src/interfaces/python/Attic/pg.py,v
chmod u-w pgsql/src/interfaces/python/Attic/pgtools.py,v
chmod u-w pgsql/src/interfaces/python/Attic/syscat.py,v
chmod u-w pgsql/src/pl/plperl/plperl_opmask.pl,v
chmod u-w pgsql/src/pl/plpgsql/Attic/enable_plpgsql,v
chmod u-w pgsql/src/pl/plpython/expected/Attic/plpython_error_3.out,v
chmod u-w pgsql/src/test/regress/expected/Attic/join_1.out,v
chmod u-w pgsql/src/tools/version_stamp.pl,v
| true
|
29c3dabf0ec0428653e4eaad322313761623f8fa
|
Shell
|
dhungvi/publiy
|
/publiy/misc/bash_bin/extract_pathlength_deliveries
|
UTF-8
| 3,613
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
exit 0;
source common.sh
if [ -z "$1" ]; then
red "Missing working dir ($0)";
exit -1; fi
workingdir="$1";
process1() {
local resultsdir="$1";
local tempdir="$2";
local plotsdir="$3";
blue "Process 1";
all_ports="";
all_machines="";
all_subs="";
all_pubs="";
# Process publishers
for pubprop in $resultsdir/p*.properties; do
node=`awk 'BEGIN{FS="="} /NodeName/{printf $NF}' $pubprop`;
machineip=`awk 'BEGIN{FS="\\\\\\\\|:|="} /NodeAddress/ {printf $2}' $pubprop`;
port=`awk 'BEGIN{FS="\\\\\\\\|:|="} /NodeAddress/ {printf $4}' $pubprop`;
portstr="$portstr port[$port]=\"$node\";";
nodestr="$nodestr node[\"$node\"]=\"$machineip\";";
all_machines="$machine $all_machines";
all_ports="$port $all_ports";
all_pubs="$node $all_pubs";
done
# Process subscribers
for subprop in $resultsdir/s*.properties; do
node=`awk 'BEGIN{FS="="} /NodeName/{printf $NF}' $subprop`;
machineip=`awk 'BEGIN{FS="\\\\\\\\|:|="} /NodeAddress/ {printf $2}' $subprop`;
port=`awk 'BEGIN{FS="\\\\\\\\|:|="} /NodeAddress/ {printf $4}' $subprop`;
portstr="$portstr port[$port]=\"$node\";";
nodestr="$nodestr node[\"$node\"]=\"$machineip\";";
all_machines="$machine $all_machines";
all_ports="$port $all_ports";
all_subs="$node $all_subs";
done
for sub in $all_subs; do
subresultsfile="$tempdir/BC-$sub*";
awk "/DELIV/{print gensub(/.*TMulticast_Publication_MP\[ [^ ]* [^ ]* ([^ ]*) .*\] ([[:digit:]]*) [^ ]* Seq.*/,\"\\\\1 \\\\2\",1)}" $subresultsfile | awk "BEGIN{$portstr $nodestr} {print \"$sub\",node[\"$sub\"]\"\t\"port[\$2],node[port[\$2]],\$1}";
done > $plotsdir/delivery_pathlengths;
}
function process2() {
local resultsdir="$1";
local tempdir="$2";
local plotsdir="$3";
blue "Process 2";
awk 'BEGIN{l=0;} {
i=$2","$4;
if(count[i]==0)
pair[++l]=i;
count[i]++;
pathlengths[i]=pathlengths[i]" "$5;
} END{
l=asorti(pathlengths, sorted_indices);
for(j=1;j<=l;j++){
i=sorted_indices[j]
print i"\t"pathlengths[i];
}
}' $plotsdir/delivery_pathlengths > $plotsdir/delivery_pathlengths_grouped;
}
function process3() {
local resultsdir="$1";
local tempdir="$2";
local plotsdir="$3";
blue "Process 3";
awk '{tot=0;for(i=2;i<=NF;i++)tot+=$i; print $1"\t"tot"\t"(NF-1)"\t"(NF<=1?-1:(tot/(NF-1)))}' $plotsdir/delivery_pathlengths_grouped > $plotsdir/delivery_pathlengths_summary;
}
function process4() {
local resultsdir="$1";
local tempdir="$2";
local plotsdir="$3";
blue "Process 4";
awk '{tot_len+=$2; tot_msg+=$3;}END{print tot_len" "tot_msg" "(tot_len/tot_msg)}' $plotsdir/delivery_pathlengths_summary > $plotsdir/delivery_pathlengths_final_summary;
}
if [ ! -d "$1" ]; then
red "Results dir is not accessible";
exit -1; fi
resultsdir="$1";
tempdir="$resultsdir/temp";
if [ ! -d "$tempdir" ]; then
red "No temp dir under results dir";
exit -1; fi
plotsdir="$resultsdir/plots";
if [ ! -d "$plotsdir" ]; then
mkdir -p $plotsdir; fi
yellow "`basename $resultsdir`";
wait_on_stop;
process1 $resultsdir $tempdir $plotsdir;
wait_on_stop;
process2 $resultsdir $tempdir $plotsdir;
wait_on_stop;
process3 $resultsdir $tempdir $plotsdir;
wait_on_stop;
process4 $resultsdir $tempdir $plotsdir;
green "DONE. `wc -l $plotsdir/delivery_pathlengths | awk '{printf $1}'`";
| true
|
45423f3e7e578d869f76d0a5918afffa18bf5b98
|
Shell
|
technopreneural/jonawifi
|
/iptables/scripts/inetshare.sh
|
UTF-8
| 717
| 3
| 3
|
[] |
no_license
|
#!/bin/bash
#-----------
# VARIABLES
#-----------
WLAN=wlan0
LAN=eth0
WLANRANGE=192.168.253.0/24
LANRANGE=192.168.69.0/24
PORTAL=192.168.253.1
#------------------
# Internet Sharing
#------------------
# 1) Forward existing connections
iptables -t filter -A FORWARD \
-i $LAN -o $WLAN -d $WLANRANGE \
-m conntrack --ctstate ESTABLISHED,RELATED \
-j ACCEPT
# 2) Forward new wireless access point connections to the Internet
iptables -t filter -A FORWARD \
-i $WLAN -o $LAN -s $WLANRANGE \
-j ACCEPT
# 3) Drop everything else
iptables -t filter -A FORWARD \
-j DROP
# 4) Enable NAT for wireless access point connections to the Internet
iptables -t nat -A POSTROUTING \
-o $LAN -s $WLANRANGE \
-j MASQUERADE
| true
|
cca3a72751f1fa641970574bae3217264d9ffe41
|
Shell
|
Llinjing/bigdata
|
/nearline-feedback/ad-nearline-feedback-native/shell/monitor.sh
|
UTF-8
| 410
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
source ../conf/project.conf
source ../conf/warning.conf
for((i=0;i<${APP_NUM};++i))
do
app_id=`yarn application -list | grep "${APP_NAME[$i]}" | awk '{print $1}'`
if [ -z "${app_id}" ]
then
${WARNING_BIN} "${PRODUCT_LINE}: ${APP_NAME[$i]} is not running, service will restart !" "${SMS_USER[$FATAL]}"
`nohup sh -x ${APP_SCRIPT[$i]}.sh 1> ../log/${APP_SCRIPT[$i]}.log 2>&1 &`
fi
done
| true
|
54fc357b3a8f755678e34682e9f26fab0dcb306e
|
Shell
|
halkon/ng-resource-demo
|
/test/jenkins.example.sh
|
UTF-8
| 674
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/bash -e
# If using a ubuntu box for builds you need to install python3 python3.3-dev
# Kill any left over grunt tasks, avoid throwing error if not orphaned process
killall grunt || true
npm install --loglevel warn
bower install --quiet
# prep for unit tests
grunt ngTemplateCache
grunt plato
grunt jshint
grunt jscs:tests
grunt test:unit
# Compile and run server
echo "Building..."
grunt server:stubbed:watch 2>&1 &
# wait until server is up
while ! curl --silent http://localhost:9000 > /dev/null 2>&1; do sleep 5; done
echo "Running tests..."
cp ~/secrets.js ./test/secrets.js
protractor test/protractor.conf.js
killall grunt || true
killall firefox || true
| true
|
6f383da17ca2486417d9bdf79cbd2dca9f72b73c
|
Shell
|
souvik-sarkhel/assignments_for_z
|
/assignment2/crun
|
UTF-8
| 1,059
| 4.40625
| 4
|
[] |
no_license
|
#!/bin/bash
# The below conditions is for showing the usage of crun command
if [ "$1" = "-h" ] || [ "$1" = "--help" ]; then
echo 'Runs command on list of hosts specified as host1,host2,host3,....,hostn'
echo 'Usage: crun [list_of_hostnames_comma_seperated]'
exit 0
fi
#If the number of arguments passed to the command is less than 1 then it prints the correct usage of the command
if [ $# -lt 1 ]; then
echo 'Usage: crun [list_of_hostnames_comma_seperated]'
exit
fi
#It prints the line and waits for the user to input a command which will be run on all the hosts provided in the comma seperated list
echo "Enter command to run"
read cmd
option=''
This condition adds -t option i.e- force pseudo-tty allocation needed to run sudo commands if sudo keyword is present in the command
if [ "$(echo $cmd | head -c 4)" = "sudo" ]; then
option='-t'
fi
#This loop is to iterate through the list of hosts given and run the command
for ip in $(echo $1 | sed "s/,/ /g");do
echo "$ip ---"
ssh -C $option $ip $cmd
echo
done
| true
|
c017ede9c5c6d34b7e82a9af0e90a8680af759f7
|
Shell
|
shdunning/dev_env
|
/.cdstack.sh
|
UTF-8
| 3,346
| 3.8125
| 4
|
[] |
no_license
|
# maximum size of directory stack
: ${CDSTACKSIZE:=36}
# restore stack elements from saved file, if any
if [ -r ~/.cdstack ] ; then
read CDSTACK < ~/.cdstack
fi
# newer bashes appear to do 'cd -P' on login, but if part
# of your $HOME is symlinked (ie, your $HOME is /home/david
# but /home is a symlink to /usr/home) this symlink will
# mess up initial cdstack printing of $HOME as '~', but
# cd'ing to your symlinked $HOME seems to clear this up
#builtin cd
alias cd=cdstack
cdstack()
{
local dir new sep
local cnt indx total=0
local IFS=: PS3= HOME=${HOME%/}
# count all elements in the stack
for dir in $CDSTACK ; do
total=$(( $total + 1 ))
done
# typing 'cd .' means print the stack
# since stack elements are stored with $HOME expanded
# let's normalize $HOME into shorter tilde ~ notation
if [ "$1" = "." ] ; then
if [ $total -eq 0 ] ; then
echo "Stack empty" >&2
return 1
fi
new= sep=
for dir in $CDSTACK ; do
case "$dir" in "$HOME"/*)
# normalize into ~ notation
dir="~${dir#$HOME}"
esac
new="$new$sep$dir"
sep="$IFS"
done
# use 'select' for nice multi-column numbered output
select dir in $new ; do
:
done < /dev/null 2>&1
return 0
fi
# typing 'cd -n' means chdir to nth element in stack
# note how we assume '-n' is the first positional argument
# eg, on bash 2.0 and above, 'cd [-L|-P] -n' won't work
# see 'man bash' for explanation of other cd options
case "$1" in -[1-9]*)
if [ $total -eq 0 ] ; then
echo "Stack empty" >&2
return 1
fi
indx=${1#-}
if [ $indx -gt $total ] ; then
echo "Stack element out of range" >&2
return 1
fi
cnt=0 new=
for dir in $CDSTACK ; do
cnt=$(( $cnt + 1 ))
if [ $cnt -eq $indx ] ; then
# found nth element
new="$dir"
break
fi
done
# install nth element as positional argument
set -- "$new"
esac
# change to new directory as requested
builtin cd "$@" || return $?
# build temporary stack, popping old cwd
# also remove duplicates and other clutter
new= sep=
for dir in $CDSTACK ; do
[ "$dir" = "" ] && continue
[ "$dir" = "." ] && continue
[ "$dir" = "$PWD" ] && continue
[ "$dir" = "$HOME" ] && continue
[ "$dir" = "$OLDPWD" ] && continue
case :"$dir": in *:"$new":*)
# found duplicate
continue
esac
new="$new$sep$dir"
sep="$IFS"
done
# now push old cwd onto top of stack
# but never push home or cwd, those are clutter
if [ "$OLDPWD" != "$HOME" -a "$OLDPWD" != "$PWD" ] ; then
new="$OLDPWD$sep$new"
fi
# copy temporary stack to $CDSTACK variable
# trimming stack to first $CDSTACKSIZE elements
CDSTACK= cnt=0 sep=
for dir in $new ; do
cnt=$(( $cnt + 1 ))
if [ $cnt -le $CDSTACKSIZE ] ; then
CDSTACK="$CDSTACK$sep$dir"
sep="$IFS"
fi
done
return 0
}
# vim: ft=sh ai et ts=4 sts=4 sw=4
| true
|
74735ec7dc983e815627d8658b4bb80b04939b72
|
Shell
|
rdicosmo/docked-aspcud
|
/aspcud
|
UTF-8
| 515
| 3.390625
| 3
|
[] |
no_license
|
#!/bin/sh
#
# Simple wrapper script to call a dockerised aspcud
# feel free to improve it
#
cudfpath=$1
cudf=`basename $cudfpath`
solutionpath=$2
solution=`basename $solutionpath`
criteria=$3
volume=/tmp/aspcudwrapper.$$
if [ ! -d $volume ]; then
mkdir -p $volume
fi
cp $cudfpath $volume/$cudf
sudo docker run -v $volume:/mnt/cudf -i rdicosmo/aspcud:v1 aspcud /mnt/cudf/$cudf /mnt/cudf/$solution "$criteria"
cp $volume/$solution $solutionpath
# cleanup
rm -f $volume/$solution $volume/$cudf
rmdir $volume
| true
|
a78b53a50c5d3e988526358a298d0130b8de9acd
|
Shell
|
gavin2lee/incubator
|
/docs/sourcecodes/OpenBridge-passos-ui/ob-paasos-web/install/module/install_agent.sh
|
UTF-8
| 1,634
| 3.84375
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# 安装openbridge-agent
# wangxinxiang@yihecloud.com
# 2016-08-08
OPTS=$(getopt -o s: --long registry: -- "$@")
if [ $? != 0 ]; then
echo "[ERROR] 参数错误!"
usage;
exit 1
fi
eval set -- "$OPTS"
registry="docker.yihecloud.com:443"
version="1.9"
monitor_ip=
while true; do
case "$1" in
-s) monitor_ip=$2; shift 2;;
--registry) registry=$2; shift 2;;
--) shift; break;;
esac
done
function check_opt() {
arg="\$$1"
if [ "$(eval echo $arg)" = "" ]; then
echo "[ERROR] <$1> 参数缺失!"
usage;
exit 1
fi
}
function usage() {
echo "
Usage: $0
-s <monitor server ip> , eg: x.x.x.x
--registry <docker registry>, default: docker.yihecloud.com:443
"
}
# check options
check_opt "registry"
check_opt "monitor_ip"
# 获取本机IP地址,支持动态和静态IP
IP=
host_ips=(`ip addr show | grep inet | grep -v inet6 | grep brd | awk '{print $2}' | cut -f1 -d '/'`)
if [ "${host_ips[0]}" == "" ]; then
echo "[ERROR] get ip address error"
exit 1
else
IP=${host_ips[0]}
echo "[INFO] use host ip address: $IP"
fi
# 清理
docker rm -f agent
docker rmi -f $registry/agent:1.9
# run docker image
docker run -d --restart=always \
-e HOSTNAME="\"$IP\"" \
-e TRANSFER_ADDR="[\"$monitor_ip:8433\",\"$monitor_ip:8433\"]" \
-e TRANSFER_INTERVAL="60" \
-e HEARTBEAT_ENABLED="true" \
-e HEARTBEAT_ADDR="\"$monitor_ip:6030\"" \
-v /:/rootfs:ro \
-v /var/run:/var/run:rw \
-v /sys:/sys:ro \
-v /var/lib/docker/:/var/lib/docker:ro \
-p 1988:1988 \
--name agent \
$registry/openbridge/agent:$version
# show status
docker ps |grep "agent"
sleep 3;
docker logs -f agent
| true
|
0bfe5f88fa4e24e7224a88bc16334a4e28a189f9
|
Shell
|
jtw10/pokemon_trainer_manager
|
/Module Installation Scripts/startup.sh
|
UTF-8
| 149
| 2.921875
| 3
|
[] |
no_license
|
#!/bin/bash
read -n1 -r -p "Press space to continue..." key
if [ "$key" = '' ]; then
pip install pygame
pip install pillow
else
exit
fi
| true
|
18b90c32af339d9f6829c0061dcbf393e88d1fc2
|
Shell
|
joy13975/vpnshuttle
|
/scripts/run/config_ssh.sh
|
UTF-8
| 181
| 2.703125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash -e
host_ip=$(./scripts/run/get_host_ip.sh)
echo "
PasswordAuthentication yes
PermitRootLogin yes
PermitTunnel yes
AllowUsers appuser@${host_ip}" > /etc/ssh/sshd_config
| true
|
8fe38af1b859702007a8f3a1d9d19bce7a1c17a4
|
Shell
|
montymxb/closure-compiler-bash-wrapper
|
/closure-compiler.sh
|
UTF-8
| 619
| 3.859375
| 4
|
[
"MIT"
] |
permissive
|
## Closure compiler path, please update this as needed for you own system
CC_PATH="/closure-compiler/compiler.jar"
if [ $# -eq 0 ]; then
echo "Please include the file path(s) for the file(s) that you would like to compress." 1>&2
exit 1
fi
for file in "$@";
do
if [ -f "$file" ]; then
java -jar "$CC_PATH" --js "$file" --js_output_file "${file%%.*}-min.js"
if (( $? )); then
echo "$file was not able to be minified"
exit 1
else
echo "$file was minified to ${file%%.*}-min.js"
fi
else
echo "Unable to find the javascript file '$file'."
fi
done;
exit 0
| true
|
33f06b75b44c39e664039380a8b26a0081d6b8e9
|
Shell
|
mad01/deadman
|
/hacks/verify-gofmt
|
UTF-8
| 277
| 3.46875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
GOFMT="gofmt -s -w"
bad_files=$($GOFMT -l *.go)
if [[ -n "${bad_files}" ]]; then
echo "FAIL: '$GOFMT' needs to be run on the following files: "
echo "${bad_files}"
echo "FAIL: please execute make gofmt"
exit 1
fi
| true
|
1d4f1bb51164299425f89fc69e0c4a85384c03ba
|
Shell
|
SametSisartenep/dotfiles
|
/.kshrc
|
UTF-8
| 1,234
| 3.09375
| 3
|
[
"ISC"
] |
permissive
|
# $Antares: .kshrc,v 1.0 2016/10/17 05:17:26 samet Exp $
#
# Interactive Korn Shell resources.
#
# Copyright (C) 2016 Rodrigo González López.
# All rights reserved.
#
# Prompt
case $LOGNAME in
root) PS1="\# "
;;
*) PS1="\% "
;;
esac
# Functions
clipcopy() {
file=$1
if [[ -z $file ]]; then
xsel --clipboard --input
else
cat "$file" | xsel --clipboard --input
fi
}
clippaste() {
xsel --clipboard --output
}
# Aliases
alias ls='ls --color=tty'
alias jump='clear && ls -Filas'
alias ..='cd ..'
alias ...='cd ../..'
alias ....='cd ../../..'
alias rmhard='rm -rfv'
alias rmwarn='rm -rfvi'
alias rmsec='bleachbit -s'
alias rmsec2='shred -uv'
alias as='as --warn --statistics'
alias memcheck='valgrind --leak-check=full -v'
alias hd='od -Ax -tx1z -v' # Thanks Matt!
alias jobs='jobs -l'
alias get_window_geometry="xwininfo -id $(xprop -root 2> /dev/null | awk '/_NET_ACTIVE_WINDOW\(WINDOW\)/{print $NF}')"
# GnuPG
GPG_TTY=$(tty)
# Docker
## Remove exited containers
alias dockrec='docker ps -a -f status=exited -q | xargs -r docker rm -v'
# Keyboard config
if [ ! -z $DISPLAY ];then
if [[ ! "$(setxkbmap -query | grep layout | awk '{print $2}')" == "es" ]];then
setxkbmap es
fi
fi
# Emacs Mode
set -o emacs
| true
|
6c8b99b7900cfddcca3d5883c3c104f36f74e3d4
|
Shell
|
vieirinhasantana/aws-lambda-golang
|
/scripts/checkcoverage.sh
|
UTF-8
| 504
| 3.265625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
THRESHOLD=$1
go test -timeout=2m -covermode=atomic -coverprofile coverage.out ./...
sed -i '/mock.go/d' coverage.out
COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}')
COVERAGE=${COVERAGE%\%}
if (( $(echo "${COVERAGE} >= ${THRESHOLD}" | bc -l) ));then
echo "coverage above threshold"
echo "coverage: ${COVERAGE} - threshold: ${THRESHOLD}"
exit 0
fi
echo "coverage below threshold"
echo "coverage: ${COVERAGE} - threshold: ${THRESHOLD}"
exit 1
| true
|
afd2292e9ea930a004d08dd78dcb7f4673482b17
|
Shell
|
noda50/RubyItk
|
/GIS/gml2wfsInsert
|
UTF-8
| 3,113
| 2.953125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#! /bin/tcsh
## -*- Mode: shell-script -*-
##======================================================================
## help doc section
cat << __END_HELP__ > /dev/null
__HELP__
Usage: % __ThisProgram__ <SrcFile> [<DstFile>]
__END_HELP__
##======================================================================
set script = /tmp/$0.$$
if ($#argv < 1) goto __help__
set srcfile = $1
onintr __final__
cat <<__END__ > $script
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:gml="http://www.opengis.net/gml">
<xsl:output method="xml" />
<!-- ============================================================
- nake "collection" tag and insert "Transaction/Insert" tag
- ============================================================ -->
<xsl:template match="/collection">
<Transaction>
<Insert>
<xsl:for-each select="gml:featureMember">
<xsl:apply-templates select="@*|node()" mode="e"/>
</xsl:for-each>
</Insert>
</Transaction>
</xsl:template>
<!-- ============================================================
- copy every
- ============================================================ -->
<xsl:template match="@*|node()" mode="e">
<xsl:copy>
<xsl:apply-templates select="@*|node()" mode="e"/>
</xsl:copy>
</xsl:template>
</xsl:stylesheet>
__END__
#xt $srcfile $script
saxon $srcfile $script
__final__:
rm -f $script
exit
##======================================================================
__help__:
sed -n '/^__HELP__/,/^__END_HELP__/ p' $0 |\
sed '/^__.*$/ d' |\
sed "s/__ThisProgram__/$0/g"
exit
##======================================================================
## version 0.0
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:gml="http://www.opengis.net/gml">
<xsl:output method="xml" />
<!-- ============================================================
- nake "collection" tag and insert "Transaction/Insert" tag
- ============================================================ -->
<xsl:template match="/collection">
<Transaction>
<Insert>
<xsl:apply-templates />
</Insert>
</Transaction>
</xsl:template>
<!-- ============================================================
- ignore gml:boundedBy tag
- ============================================================ -->
<xsl:template match="gml:boundedBy">
</xsl:template>
<!-- ============================================================
- nake "gml:featureMember"
- ============================================================ -->
<xsl:template match="gml:featureMember">
<xsl:apply-templates select="@*|node()"/>
</xsl:template>
<!-- ============================================================
- nake "gml:featureMember"
- ============================================================ -->
<xsl:template match="@*|node()">
<xsl:copy>
<xsl:apply-templates select="@*|node()"/>
</xsl:copy>
</xsl:template>
</xsl:stylesheet>
| true
|
3a167a94c02055fb48a35d962f2754c62e0ae6cb
|
Shell
|
locuslab/stable_dynamics
|
/train_vae_simple
|
UTF-8
| 704
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
DATASET="$1"
LR="0.0005"
W1="-0.25"
INNER="PSICNN"
if [ -z "$DATASET" ]; then
echo "NO DATASET"
exit
fi
OUTDIR="experiments/vae_simple/${DATASET}"
mkdir -p "$OUTDIR"
MODEL="vae_trajectory[w=$W1,projfn=$INNER]"
date >> "$OUTDIR/progress.txt"
./.colorize ./train.py \
--log-to "runs/$OUTDIR" \
--batch-size 300 \
--learning-rate "$LR" \
--epochs 150 \
imagepairs[files=youtube/$DATASET/*] \
"$MODEL" \
"$OUTDIR/checkpoint_{epoch:0>5}.pth" | tee -a "$OUTDIR/progress.txt"
./render_vae "$DATASET" "$MODEL" "$OUTDIR"
| true
|
6105e14de261c914e325884cb0c44bdb91c9df53
|
Shell
|
rms1000watt/vagrant
|
/scripts/15-stern.sh
|
UTF-8
| 240
| 3.15625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -e
stern_version=1.19.0
if ! command -v stern &> /dev/null; then
asdf plugin add stern
asdf install stern ${stern_version}
asdf global stern ${stern_version}
exit 0
fi
echo "Skipping stern installation"
| true
|
19093cc68ec6b1297d7e03bb9ffdbe9a5b4862a0
|
Shell
|
openhpc/ohpc
|
/misc/get_source.sh
|
UTF-8
| 1,192
| 4.21875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
if [ $# -ne 1 ]; then
echo "${0} requires the name of the spec file as parameter."
exit 1
fi
# If running on Fedora special defines are needed
DISTRO=$(rpm --eval '0%{?fedora}')
if [ "${DISTRO}" != "0" ]; then
FLAGS=(--undefine fedora --define "rhel 8")
fi
PATTERN=${1}
IFS=$'\n'
find . -name "${PATTERN}" -print0 | while IFS= read -r -d '' file
do
if [ ! -f "${file}" ]; then
echo "${file} is not a file. Skipping."
continue
fi
echo "${file}"
DIR=$(dirname "${file}")
pushd "${DIR}" > /dev/null || exit 1
BASE=$(basename "${file}")
SOURCES=$(rpmspec --parse --define '_sourcedir ../../..' "${FLAGS[@]}" "${BASE}" | grep Source)
for u in ${SOURCES}; do
echo "${u}"
if [[ "${u}" != *"http"* ]]; then
continue
fi
u=$(awk '{ print $2 }' <<< "${u}")
echo "Trying to get ${u}"
# Try to download only if newer
WGET=$(wget -N -nv -P ../SOURCES "${u}" 2>&1)
# Handling for github URLs with #/ or #$/
if grep -E "#[$]?/" <<< "${u}"; then
MV_SOURCE=$(echo "${WGET}" | tail -1 | cut -d\ -f6 | sed -e 's/^"//' -e 's/"$//')
MV_DEST=../SOURCES/$(basename "${u}")
mv "${MV_SOURCE}" "${MV_DEST}"
fi
done
popd > /dev/null || exit 1
done
| true
|
f42cb64568af588674a311003a7c3d110a36db42
|
Shell
|
mohamdmido/iotencoder
|
/build/build.sh
|
UTF-8
| 680
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
# Script used when compiling binary during build phase
set -o errexit
set -o nounset
if set -o | grep -q "pipefail"; then
set -o pipefail
fi
# export build flags
export CGO_ENABLED="${CGO_ENABLED:-0}"
export GOARCH="${ARCH}"
# generate bindata assets
go generate -x "${PKG}/pkg/migrations/"
go generate -x "${PKG}/pkg/lua/"
go generate -x "${PKG}/pkg/smartcitizen/"
# compile our binary using install, the mounted volume ensures we can see it
# outside the build container
go install \
-v \
-ldflags "-X ${PKG}/pkg/version.Version=${VERSION} -X \"${PKG}/pkg/version.BuildDate=${BUILD_DATE}\" -X ${PKG}/pkg/version.BinaryName=${BINARY_NAME}" \
./...
| true
|
249edece899e38344c640beae311f2b3129364f0
|
Shell
|
gfontenot/dotfiles
|
/local/bin/git-create-branch
|
UTF-8
| 393
| 3.875
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
#
# create local and remote branch
set -e
name="$1"
if [ -z "$name" ]; then
echo "Branch name required"
printf "> "
read -r name
fi
origin=$(git remote get-url origin)
if [[ "$origin" =~ .*github\.com:gfontenot.* ]]; then
branch_name="$name"
else
branch_name="gfontenot/$name"
fi
git checkout -b "$branch_name"
git push origin "$branch_name" --set-upstream
| true
|
f036b4c9148512a86a588f673b4d084d9c8931ce
|
Shell
|
mike79-cdev/hello-world
|
/test.sh
|
UTF-8
| 139
| 3.265625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
if (($# == 2))
then
echo "$1 $2"
let RESULT=$1+$2
echo "Ergebnis: $RESULT"
else
echo "Falsche Anzahl !"
fi
echo "$0"
| true
|
90aecac82723fb84f5394518f11db0ba69404346
|
Shell
|
Sanj25/CSI6203
|
/portfolio/week6/GuesingGame.sh
|
UTF-8
| 738
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/bash
printError() # Print Error Function.
{
echo -e "\033[31mERROR:\033[0m $1"
}
getNumber() # Function for getting number.
{
read -p "$1:"
while (( $REPLY < $2 || $REPLY > $3 )); do
printError " Input must between $2 and $3"
read -p "$1:"
done
}
checknum() # Function for checking input number with 42.
{
[ $REPLY -gt 42 ] && echo "TOO HIGH!"
[ $REPLY -lt 42 ] && echo "TOO LOW!"
}
getNumber " Enter a number between 1 and 100:" 1 100 # Function getNumber called.
[ $REPLY -eq 42 ] && echo "Right!" # Check if input is equal to 42.
while [ $REPLY -ne 42 ] # Check until number not equal to 42.
do
checknum
getNumber " Enter number between 1 and 100:" 1 100
[ $REPLY -eq 42 ] && "Correct!"
done
| true
|
a586fab70f916751f212780050eacb03d81baa6a
|
Shell
|
Natthaphong/speke-reference-server
|
/local_build.sh
|
UTF-8
| 1,180
| 3.03125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
ORIGIN=`pwd`
BUILD=$ORIGIN/build
# date stamp for this deploy
STAMP=`date +%s`
echo build stamp is $STAMP
# clear the build folder
rm -f $BUILD/*
# create the reference server zip with a unique name
SERVZIP=speke-reference-lambda-$STAMP.zip
cd $ORIGIN/src
# using zappa for help in packaging
zappa package --output $BUILD/$SERVZIP
# create the custom resource zip with a unique name
RESZIP=cloudformation-resources-$STAMP.zip
cd $ORIGIN/cloudformation
zip -r $BUILD/$RESZIP mediapackage_endpoint_common.py mediapackage_speke_endpoint.py resource_tools.py
# update templates with the new zip filenames
sed -e "s/DEV_0_0_0/$STAMP/g" speke_reference.json >$BUILD/speke_reference.json
sed -e "s/DEV_0_0_0/$STAMP/g" mediapackage_speke_endpoint.json >$BUILD/mediapackage_speke_endpoint.json
cd $BUILD
| true
|
271994bad9db5782d859cb30430154dec6b3f81f
|
Shell
|
tykowale/vimconfig
|
/.bash_profile
|
UTF-8
| 2,998
| 3.375
| 3
|
[] |
no_license
|
# echo is like puts for bash (bash is the program running in your terminal)
# echo "Loading ~/.bash_profile a shell script that runs in every new terminal you open"
# $VARIABLE will render before the rest of the command is executed
echo "Logged in as $USER at $(hostname)"
if [ -f ~/.git-completion.bash ]; then
. ~/.git-completion.bash
fi
# load Node Version Manager
export NVM_DIR=~/.nvm
source $(brew --prefix nvm)/nvm.sh
nvm use 0.12
# rbenv version
eval "$(rbenv init -)"
# Path changes are made non-destructive with PATH=new_path;$PATH This is like A=A+B so we preserve the old path
# Path order matters, putting /usr/local/bin: before $PATH
# ensures brew programs will be seen and used before another program
# of the same name is called
# Tomcat settings
export CATALINA_HOME=/usr/local/Cellar/tomcat/8.0.30/libexec
# Path for brew
test -d /usr/local/bin && export PATH=/usr/local/bin:/usr/local/sbin:~/bin:/Users/tkowalewski/.rbenv/shims:/Users/tkowalewski/.nvm/versions/node/v0.12.7/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/git/bin:/Library/Tomcat/bin
# Load git completions
git_completion_script=/usr/local/etc/bash_completion.d/git-completion.bash
test -s $git_completion_script && source $git_completion_script
# A more colorful git prompt
# \[\e[0m\] resets the color to default color
c_reset='\[\e[0m\]'
# \e[0;31m\ sets the color to red
c_path='\[\e[0;31m\]'
# \e[0;32m\ sets the color to green
c_git_clean='\[\e[0;32m\]'
# \e[0;31m\ sets the color to red
c_git_dirty='\[\e[0;31m\]'
# PS1 is the variable for the prompt you see everytime you hit enter
PROMPT_COMMAND='PS1="${c_path}\W${c_reset}$(git_prompt) :> "'
export PS1='\n\[\033[0;31m\]\W\[\033[0m\]$(git_prompt)\[\033[0m\]:> '
# determines if the git branch you are on is clean or dirty
git_prompt ()
{
if ! git rev-parse --git-dir > /dev/null 2>&1; then
return 0
fi
# Grab working branch name
git_branch=$(Git branch 2>/dev/null| sed -n '/^\*/s/^\* //p')
# Clean or dirty branch
if git diff --quiet 2>/dev/null >&2; then
git_color="${c_git_clean}"
else
git_color=${c_git_dirty}
fi
echo " [$git_color$git_branch${c_reset}]"
}
# Colors ls should use for folders, files, symlinks etc, see `man ls` and
# search for LSCOLORS
export LSCOLORS=ExGxFxdxCxDxDxaccxaeex
# Force ls to use colors (G) and use humanized file sizes (h)
alias ls='ls -Gh'
# Useful aliases
alias dl="git pull origin develop"
alias home="git co develop"
alias e="wstorm ."
alias j="ij ."
alias yolo="rm -rf node_modules/ && rm -rf bower_components/ && npm install && bower install"
alias core="rm -rf bower_components/up-ui-core/src && rm -rf node_modules/up-ui-core && mkdir bower_components node_modules && cp -r ~/Uptake/frontend/up-ui-core/src bower_components/up-ui-core/ && cp -r ~/Uptake/frontend/up-ui-core node_modules/up-ui-core/"
alias report="open .stats/coverage/coverage/index.html"
alias pr='git push origin $1 2>&1 | tee >(grep -e https | sed "s/remote: //g" | pbcopy)'
| true
|
dfb8c55149b2d5ab6fdfcb19a2acfeb32bcecff3
|
Shell
|
cboettig/dockerfiles
|
/bce-extensions/rstudio-recipe.sh
|
UTF-8
| 1,115
| 3.078125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
## Download and install RStudio server & dependencies
## Attempts to get detect latest version, otherwise falls back to version given in $VER
## Symlink pandoc, pandoc-citeproc so they are available system-wide
rm -rf /var/lib/apt/lists/ \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
ca-certificates \
file \
git \
libcurl4-openssl-dev \
psmisc \
supervisor \
sudo \
&& wget -q http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl0.9.8_0.9.8o-4squeeze14_amd64.deb \
&& dpkg -i libssl0.9.8_0.9.8o-4squeeze14_amd64.deb && rm libssl0.9.8_0.9.8o-4squeeze14_amd64.deb \
&& VER=$(wget --no-check-certificate -qO- https://s3.amazonaws.com/rstudio-server/current.ver) \
&& wget -q http://download2.rstudio.org/rstudio-server-${VER}-amd64.deb \
&& dpkg -i rstudio-server-${VER}-amd64.deb \
&& rm rstudio-server-*-amd64.deb \
&& ln -s /usr/lib/rstudio-server/bin/pandoc/pandoc /usr/local/bin \
&& ln -s /usr/lib/rstudio-server/bin/pandoc/pandoc-citeproc /usr/local/bin \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/
| true
|
defdfd58172270d60b93904102be84d77eb0df5c
|
Shell
|
telatin/telatin.github.io
|
/scripts/new.sh
|
UTF-8
| 713
| 4.03125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
OUT_DIR="$SCRIPT_DIR/../_posts"
DATE=$(date +%Y-%m-%d)
if [ -z ${1+x} ];then
echo "Provide post title (eg. with dashes instead of spaces)";
exit 2;
else
TITLE=$(echo "$1" | sed 's/ /-/g' | sed 's/_/-/g');
NICE_TITLE=$(echo "$TITLE" | sed 's/-/ /g');
POST_FILE="$OUT_DIR/$DATE-$TITLE.md";
#echo "Post title: $NICE_TITLE";
echo "nano \"$POST_FILE\"";
fi
if [[ ! -d "$OUT_DIR" ]]; then
echo "Output directory expected at: $OUT_DIR. Not found"
exit 1
fi
if [[ -e "$POST_FILE" ]]; then
echo "File exists!";
else
echo "---
layout: post
title: $NICE_TITLE
---" >> $POST_FILE
fi
| true
|
5d74c0828f9c27442dc942fcfd929af1427d2217
|
Shell
|
CS262aCalvinFitness/server
|
/src/manual/start-java.sh
|
UTF-8
| 465
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/bash
# Environment variables
NAME=CalvinFitness
DIR=/var/cs262/$NAME/src/
PID=$DIR/$USER-$NAME.pid
# Write our PID file
echo $$ > $DIR/$USER-$NAME.pid
# Change to our working directory
cd $DIR
# Run this script to compile/start the cs262 data service.
javac -cp "../lib/*" edu/calvin/cs262/User.java edu/calvin/cs262/Workout.java edu/calvin/cs262/Exercise.java edu/calvin/cs262/FitnessResource.java
java -cp ".:../lib/*" edu.calvin.cs262.FitnessResource
| true
|
00b3a7636914f419c12c9d83175a9681ba4c882e
|
Shell
|
dsyer/docker-services
|
/bootable/build.sh
|
UTF-8
| 892
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash -x
docker rm bootable
docker build -t dsyer/bootable .
docker create --name=bootable dsyer/bootable
rm disk.*
qemu-img create -f raw disk.img 1G
sfdisk disk.img <<EOF
label: dos
label-id: 0x5d8b75fc
device: disk.img
unit: sectors
disk.img1 : start=2048, size=2095104, type=83, bootable
EOF
OFFSET=$(expr 512 \* 2048)
sudo losetup -D
sudo losetup -o ${OFFSET} /dev/loop1 disk.img
sudo mkfs.ext3 /dev/loop1
sudo mount -t auto /dev/loop1 /mnt/
docker export bootable | sudo tar x -C /mnt
sudo extlinux --install /mnt/boot
cat <<EOF | sudo tee /mnt/boot/syslinux.cfg
DEFAULT linux
SAY Now booting the kernel from SYSLINUX...
LABEL linux
KERNEL /boot/vmlinuz-virt
APPEND ro root=/dev/sda1 initrd=/boot/initramfs-virt
EOF
sudo umount /mnt
dd if=/usr/lib/syslinux/mbr/mbr.bin of=disk.img bs=440 count=1 conv=notrunc
qemu-img convert -f raw -O qcow2 disk.img disk.qcow
| true
|
d538953fe3b8262b01fb6e9475e451d3866821a6
|
Shell
|
RemyKaloustian/EZConcurrency
|
/rendu_1/execution.sh
|
UTF-8
| 1,497
| 3.015625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
#################################
# #
# Execution of the project #
# #
#################################
EXE="./bin/executable.exe"
if [ "$0" = "bench" ]; then
echo > ./test/data.txt
for i in `seq 0 9`;
do
for j in `seq 0 2`;
do
$(EXE) -t "$j" -p "$i" -m
done
done
elif [ "$0" = "bench_plot" ]; then
echo > ./test/data.txt
for i in `seq 0 9`;
do
for j in `seq 0 2`;
do
mkdir ./output
echo > ./output/data.txt
$(EXE) -t "$j" -p "$i" -m >> ./output/data.txt
python ./test/graph_analyse.py ./output/data.txt
done
done
else
echo $EXE -t 1 -p 4 -m
$EXE -t 1 -p 4 -m
fi
#./executable.exe -p 0 -t 0 -m
#./executable.exe -p 1 -t 0 -m
#./executable.exe -p 2 -t 0 -m
#./executable.exe -p 3 -t 0 -m
#./executable.exe -p 4 -t 0 -m
#./executable.exe -p 5 -t 0 -m
#./executable.exe -p 6 -t 0 -m
#./executable.exe -p 7 -t 0 -m
#./executable.exe -p 8 -t 0 -m
#./executable.exe -p 9 -t 0 -m
#
#
#
#./executable.exe -p 0 -t 1 -m > version1.txt
#./executable.exe -p 1 -t 1 -m >> version1.txt
#./executable.exe -p 2 -t 1 -m >> version1.txt
#./executable.exe -p 3 -t 1 -m >> version1.txt
#./executable.exe -p 4 -t 1 -m >> version1.txt
#./executable.exe -p 5 -t 1 -m >> version1.txt
#./executable.exe -p 6 -t 1 -m >> version1.txt
#./executable.exe -p 7 -t 1 -m >> version1.txt
#./executable.exe -p 8 -t 1 -m >> version1.txt
#./executable.exe -p 9 -t 1 -m >> version1.txt
| true
|
1eab5e7c04d20300e49cd5e1fa540499c188c16a
|
Shell
|
alexisDubus/ppeGSB-LAB
|
/mysqlrestore.sh
|
UTF-8
| 407
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
cd ./BackupMYSQL/
ls -al
echo "saisir le nom de la base de données à restaurer"
read BDDNAME
echo "saisir le nom complet du fichier SQL à restaurer"
read SQLFILE
echo "Saisir le mot de passe de securité pour se connecter à mysql"
echo "CREATE DATABASE IF NOT EXISTS $BDDNAME" | mysql -uroot -p
echo "Saisir le mot de passe pour importer le fichier sql"
mysql -uroot -p $BDDNAME < $SQLFILE
| true
|
f447b6fbe6395055652d79d959c122431fc255cd
|
Shell
|
bubaley/django-clean-template
|
/install.sh
|
UTF-8
| 2,069
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
base_python=""
default_python="/home/www/.python/bin/python3.8"
project_domain=""
project_path=`pwd`
basedir=`basename $(pwd)`
git_url=""
read -p "Python interpreter (default=$default_python): " base_python
if [ "$base_python" = "" ]
then
base_python=$default_python
fi
read -p "Your domain without protocol (for example, google.com): " project_domain
read -p "Git url to your django-project: " git_url
$base_python -m venv env
source env/bin/activate
pip install -U pip
pip install gunicorn
git clone $git_url src
pip install -r src/requirements.txt
cp nginx/site.conf nginx/$basedir.conf
cp systemd/gunicorn.service systemd/gu-$basedir.service
sed -i "s~template_domain~$project_domain~g" nginx/$basedir.conf
sed -i "s~template_path~$project_path~g" nginx/$basedir.conf systemd/gu-$basedir.service
if [ -e "/etc/nginx/sites-enabled/$basedir.conf" ]
then
echo "Ссылка для nginx уже создана"
else
sudo ln -s $project_path/nginx/$basedir.conf /etc/nginx/sites-enabled/
fi
if [ -e "/etc/systemd/system/gu-$basedir.service" ]
then
echo "Ссылка на сервис уже существует"
else
sudo ln -s $project_path/systemd/gu-$basedir.service /etc/systemd/system/
fi
random_str=`sudo head /dev/urandom | tr -dc "A-Za-z0-9!#$%&()*+,-./:;<=>?@[\]^_{|}~" | fold -w 50 | head -n 1`
env_path="$project_path/src/core/settings/.env"
echo "SECRET_KEY=$random_str" >> $env_path
echo "ALLOWED_HOST=$project_domain" >> $env_path
echo "DEBUG=False" >> $env_path
sudo service nginx restart
setup_ssl=""
read -p "Do you want to install ssl? (y/n): " setup_ssl
if [ "$setup_ssl" = "y" ]
then
I=`dpkg -s certbot | grep "Status" `
if [ -n "$I" ]
then
echo "sertbot already installed"
else
sudo apt-get update
sudo apt-get install software-properties-common
sudo add-apt-repository universe
sudo add-apt-repository ppa:certbot/certbot
sudo apt-get update
sudo apt-get install certbot python-certbot-nginx
fi
sudo certbot --nginx
fi
sudo systemctl daemon-reload
sudo systemctl start gu-$basedir
sudo systemctl enable gu-$basedir
| true
|
ccbf1d8769eec38e9ce7d4c0fc3ce92178e31eea
|
Shell
|
hdng/scripts
|
/.bashrc
|
UTF-8
| 1,392
| 3.328125
| 3
|
[] |
no_license
|
# .bashrc
# source global definitions
if [ -f /etc/bashrc ]; then # redhat
. /etc/bashrc
elif [ -f /etc/bash.bashrc ]; then # debian
. /etc/bash.bashrc
fi
# source the gapp bashrc
if [ -f /gapp/noarch/share/login/gapp.bashrc ]; then
. /gapp/noarch/share/login/gapp.bashrc
fi
# source alias file
if [ -f ~/.bash_alias ]; then
. ~/.bash_alias
fi
# coloring prompt
#PS1='\e[33;1m\u@\h: \e[31m\W\e[0m\$ '
#PS1="\[\033[1;34m\][\$(date +%H%M)][\u@\h:\w]$\[\033[0m\] "
#PS1="\e[0;36m[\u@\h \W]\$ \e[m "
### Set color prompt
color_prompt=yes
if [ "$color_prompt" = yes ]; then
#PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;35m\]\w\[\033[00m\]\$ '
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;35m\]\W\[\033[00m\]\$ '
else
PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ '
fi
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
#alias dir='dir --color=auto'
#alias vdir='vdir --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
export LS_COLORS=$LS_COLORS:'di=0;35:'
# some more ls aliases
alias ll='ls -alFh'
alias la='ls -A'
alias l='ls -CF'
| true
|
78d0c4f56adf2a44696524cd4897bbadb70971f1
|
Shell
|
megalithic/dotfiles
|
/config/sketchybar/plugins/yabai_spaces.sh
|
UTF-8
| 1,259
| 3.15625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# TODO: Remove spaces that do not exist anymore
icons=("" "" "" "" "" "" "" "" "" "" "" "" "" "" "") #0 to 14
highlight_colors=("" "0xff9dd274" "0xfff69c5e" "0xff72cce8" "0xffeacb64" "0xff9dd274" "0xfff69c5e" "0xff72cce8" "0xffeacb64" "0xff9dd274" "0xfff69c5e" "0xff72cce8" "0xffeacb64" "0xff9dd274" "0xfff69c5e")
args=()
QUERY="$(yabai -m query --spaces | jq -r '.[] | [.index, .windows[0], .label, .display, .["is-visible"]] | @sh')"
NAMES=""
while read -r index window yabai_name display visible
do
NAME="$(echo "${yabai_name}" | tr -d "'")"
if [ "${window}" = "null" ]; then
label="$NAME"
else
label="$NAME*"
fi
if [ "$NAME" = "" ] || [ "$NAME" = " " ]; then
NAME="${index}"
fi
NAMES="$NAMES $NAME"
args+=(--clone "$NAME" space_template after \
--set "$NAME" label="${label}" \
icon="${icons[${index}]}" \
icon.highlight_color="${highlight_colors[${index}]}" \
associated_display=${display} \
icon.highlight=${visible} \
drawing=on)
done <<< "$QUERY"
args+=(--reorder $NAMES)
sketchybar -m ${args[@]} &> /dev/null
| true
|
f4577ac7898f90c36aa5ff4ec9656f1addad7d26
|
Shell
|
mirzaelahi/quest
|
/version
|
UTF-8
| 2,108
| 4.125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# FileName: version
# Description: Updates or prints current version number crom cmake config.
#
# Copyright (C) 2014 K M Masum Habib <masum.habib@gmail.com>
#
# Created: 18 June 2014.
function usage(){
echo "Usage: version [x.yy.z]"
echo " When no option is given, it return version string."
echo " options: "
echo " x.yy.z - new version numbers."
exit 0
}
other_files="tests/test_dirac_band.py tests/test_dirac_transport.py"
major_keyword="set(QMICAD_MAJOR"
minor_keyword="set(QMICAD_MINOR"
patch_keyword="set(QMICAD_PATCH"
cmake_file="CMakeLists.txt"
version_major=`grep "$major_keyword [0-9])" $cmake_file | sed 's/[^0-9]//g'`
version_minor=`grep "$minor_keyword [0-9][0-9])" $cmake_file | sed 's/[^0-9]//g'`
version_patch=`grep "$patch_keyword [0-9])" $cmake_file | sed 's/[^0-9]//g'`
new_patch=$version_patch
new_minor=$version_minor
new_major=$version_major
if [[ $# -eq 0 ]]; then
echo "v$version_major.$version_minor.$version_patch"
elif [[ $# -eq 1 ]]; then
IFS='.' read -ra vers <<< "$1"
nvers=${#vers[@]}
if [[ $nvers -eq 1 ]];then
new_patch=${vers[0]}
elif [[ $nvers -eq 2 ]];then
new_patch=${vers[1]}
new_minor=${vers[0]}
elif [[ $nvers -eq 3 ]];then
new_patch=${vers[2]}
new_minor=${vers[1]}
new_major=${vers[0]}
else
usage
fi
# Update CMakeLists.txt
echo -n "Updateing $cmake_file ... "
cp $cmake_file $cmake_file.IN
sed -e "s/$patch_keyword $version_patch)/$patch_keyword $new_patch)/g" -e "s/$minor_keyword $version_minor)/$minor_keyword $new_minor)/g" -e "s/$major_keyword $version_major)/$major_keyword $new_major)/g" < $cmake_file.IN > $cmake_file
rm $cmake_file.IN
echo "done."
# update test scripts
for file in $other_files; do
echo -n "Updateing $file ... "
cp $file $file.IN
sed -e "s/$version_major.$version_minor.$version_patch/$new_major.$new_minor.$new_patch/g" < $file.IN > $file
rm $file.IN
echo "done."
done
echo "Updated to v$new_major.$new_minor.$new_patch"
else
uasge
fi
| true
|
1b380c49e889cadcf9eeed5c95acd033a041e68e
|
Shell
|
gauravv7/SoftwareEngineerChallenge
|
/run-project.sh
|
UTF-8
| 692
| 3.640625
| 4
|
[] |
no_license
|
if type -p java; then
echo found java executable in PATH
_java=java
elif [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
echo found java executable in JAVA_HOME
_java="$JAVA_HOME/bin/java"
else
echo "no java found, please install to run this project"
exit 1
fi
if [[ "$_java" ]]; then
version=$("$_java" -version 2>&1 | awk -F '"' '/version/ {print $2}')
echo version "$version"
# shellcheck disable=SC2072
if [[ "$version" > "1.7" ]]; then
echo version is more than 1.7
echo Running project...
cd paybaymax;
mvn clean test;
else
echo version is less than 1.5, expecting atleast 1.7
fi
fi
| true
|
f2ad49f1e64fb876357252123a85c320da3eea18
|
Shell
|
oarthursilva/stocks-market-job
|
/db/postgres/db-init.sh
|
UTF-8
| 1,538
| 3.25
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
echo "Verifying DB $POSTGRES_DB presence ..."
result=`psql -v ON_ERROR_STOP=on -U "$POSTGRES_USER" -d postgres -t -c "SELECT true FROM pg_database WHERE datname='$POSTGRES_DB';" | xargs`
if [[ $result == "t" ]]; then
echo "$POSTGRES_DB DB already exists"
else
echo "$POSTGRES_DB DB does not exist, creating it ..."
echo "Verifying role $POSTGRES_DB presence ..."
result=`psql -v ON_ERROR_STOP=on -U "$POSTGRES_USER" -d postgres -t -c "SELECT 1 FROM pg_roles WHERE rolname='$POSTGRES_DB';" | xargs`
if [[ $result == "1" ]]; then
echo "$POSTGRES_DB role already exists"
else
echo "$POSTGRES_DB role does not exist, creating it ..."
psql -v ON_ERROR_STOP=on -U "$POSTGRES_USER" <<-EOSQL
CREATE ROLE $POSTGRES_DB WITH LOGIN ENCRYPTED PASSWORD '${POSTGRES_PASSWORD}';
EOSQL
echo "$POSTGRES_DB role successfully created"
fi
psql -v ON_ERROR_STOP=on -U "$POSTGRES_USER" <<-EOSQL
CREATE DATABASE $POSTGRES_DB WITH OWNER $POSTGRES_DB TEMPLATE template0 ENCODING 'UTF8';
GRANT ALL PRIVILEGES ON DATABASE $POSTGRES_DB TO $POSTGRES_DB;
EOSQL
result=$?
if [[ $result == "0" ]]; then
echo "$POSTGRES_DB DB successfully created"
else
echo "$POSTGRES_DB DB could not be created"
fi
fi
echo "not end"
psql -v ON_ERROR_STOP=on -d postgres -U "$POSTGRES_USER" -f /home/db_startup.sql <<-EOSQL
EOSQL
result=$?
if [[ $result == "0" ]]; then
echo "$POSTGRES_DB tables and schemas successfully created"
else
echo "$POSTGRES_DB tables and schemas could not be created"
fi
| true
|
fbaa4aa8b2d8d9db1f284f8daeaa2839c6f4b18f
|
Shell
|
eugenepaniot/initscripts-vxlan
|
/ifdown-vxlan
|
UTF-8
| 348
| 2.796875
| 3
|
[] |
no_license
|
#!/bin/bash
. /etc/init.d/functions
cd /etc/sysconfig/network-scripts
. ./network-functions
[ -f ../network ] && . ../network
CONFIG=$1
need_config "$CONFIG"
source_config
/sbin/ip link set dev "$DEVICE" down
/sbin/ip link del "$DEVICE"
/etc/sysconfig/network-scripts/ifdown-eth "$CONFIG"
/etc/sysconfig/network-scripts/ifdown-post "$CONFIG"
| true
|
6f9fa3c92b54b48afc1ac1bfde07bb185af383cc
|
Shell
|
kiranhegde/MeshPreprocessorUG3
|
/RunUg3Pre
|
UTF-8
| 692
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
argu=$#
if [ $argu -lt 2 ]
then
echo "Input arguments missing..."
echo
echo 'Input argument format'
echo
echo 'ug3pre No_of_partition optional_write '
echo
echo 'No_of_partition : no. of cpu cores or partions for parallel run > 0'
echo 'optional_write : write_test_vtk'
echo ' write_test_msh'
echo ' write_part_msh'
echo ' write_sparse_matrix'
echo
echo 'Eg. mpirun -np 5 ./ug3preF90 -c 6 -w write_test_vtk'
echo
exit
else
make clean; make -f makefile ;
clear
mpirun -np 4 ./ug3preF90 -c $1 -w $2
fi
echo $1 $2
| true
|
2c377ca896f9f938173cc1b6d10e4636bcf87102
|
Shell
|
masteringkubernetes/hub-spoke-appdev-terraform
|
/app/k8s/test-internal-lb.sh
|
UTF-8
| 3,474
| 3.8125
| 4
|
[] |
no_license
|
#!/bin/bash -x
BLUE="false"
GREEN="false"
BLUE_IP=192.168.4.4
GREEN_IP=192.168.4.5
# Function that writes out Yaml for sample app
function writeYaml() {
COLOR=$1
cat <<EOF > nginx-$COLOR.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: nginx-$COLOR-dep
spec:
replicas: 1
selector:
matchLabels:
app: nginx-$COLOR
template:
metadata:
labels:
app: nginx-$COLOR
spec:
nodeSelector:
nodepoolcolor: $COLOR
nodepoolmode: user
containers:
- image: nginxdemos/hello
name: nginx-$COLOR
ports:
- containerPort: 80
resources:
requests:
memory: "64Mi"
cpu: "250m"
limits:
memory: "128Mi"
cpu: "350m"
---
apiVersion: v1
kind: Service
metadata:
name: nginx-$COLOR-svc
spec:
ports:
- port: 80
protocol: TCP
targetPort: 80
selector:
app: nginx-$COLOR
type: ClusterIP
---
apiVersion: networking.k8s.io/v1beta1
kind: Ingress
metadata:
name: nginx-$COLOR-ing
annotations:
kubernetes.io/ingress.class: $COLOR
nginx.ingress.kubernetes.io/ingress.class: $COLOR
nginx.ingress.kubernetes.io/use-regex: "true"
nginx.ingress.kubernetes.io/rewrite-target: /$1
nginx.ingress.kubernetes.io/ssl-redirect: "false"
spec:
rules:
- http:
paths:
- backend:
serviceName: nginx-$COLOR-svc
servicePort: 80
path: /(/|$)(.*)
- backend:
serviceName: nginx-$COLOR-svc
servicePort: 80
path: /nginx(/|$)(.*)
EOF
}
#Function for install nginx for each color nodepool
#This will be called based on whether there a any nodes
#that can be scheduled on for that color
installNginx() {
COLOR=$1
IP_ADDRESS=$2
# Use Helm to deploy an NGINX ingress controller
helm install ingress-$COLOR ingress-nginx/ingress-nginx --wait -f - \
--namespace nginx \
--set controller.ingressClass=$COLOR \
--set controller.replicaCount=2 \
--set controller.nodeSelector."beta\.kubernetes\.io/os"=linux \
--set defaultBackend.nodeSelector."beta\.kubernetes\.io/os"=linux \
--set controller.service.loadBalancerIP=$IP_ADDRESS \
--set controller.nodeSelector.nodepoolcolor=$COLOR << EOF
controller:
service:
annotations:
service.beta.kubernetes.io/azure-load-balancer-internal: "true"
service.beta.kubernetes.io/azure-load-balancer-internal-subnet: "clusteringressservices"
EOF
sleep 5; while echo && kubectl get service -n nginx --no-headers | grep $COLOR | grep -v -E "($IP_ADDRESS|<none>)"; do sleep 5; done
}
#MAIN PROGRAM
##############
# These are the blue nodes that can be scheduled
kubectl get nodes -l nodepoolcolor=blue --no-headers | grep -v SchedulingDisabled
if [ $? == 0 ]; then
BLUE="true"
fi
# These are the green nodes that can be scheduled
kubectl get nodes -l nodepoolcolor=green --no-headers | grep -v SchedulingDisabled
if [ $? == 0 ]; then
GREEN="true"
fi
echo "GREEN POOL is $GREEN"
echo "BLUE POOL is $BLUE"
# Create a namespace for your ingress resources
kubectl create namespace nginx
# Add the ingress-nginx repository
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
if [ $BLUE = "true" ]; then
installNginx blue $BLUE_IP
writeYaml blue
kubectl apply -f nginx-blue.yaml
fi
if [ $GREEN = "true" ]; then
installNginx green $GREEN_IP
writeYaml green
kubectl apply -f nginx-green.yaml
fi
| true
|
e808d26caf1e652bd14c73e15239a8a48bf80b3f
|
Shell
|
lqshow/dotfiles
|
/.functions
|
UTF-8
| 871
| 3.125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Create a new directory and enter it
function mkd() {
mkdir -p "$@" && cd "$_";
}
# Docker
export DOCKER_IP=127.0.0.1
docker-ip() {
docker inspect --format '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$@"
}
docker-ips() {
docker inspect --format='{{.Name}} - {{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $(docker ps -aq)
}
uuid-dash() {
uuidgen | awk '{print tolower($0)}'
}
uuid() {
uuidgen | awk '{gsub("-","",$0); print tolower($0)}'
}
ip() {
ipconfig getifaddr en0
}
# port forwarding
fwd() {
sudo -S kubefwd svc -n enigma2 \
-l "app in (
enigma2-awsx-awsd, \
enigma2-accountx, \
enigma2-project-api, \
enigma2-fuwu-api, \
enigma2-datasetx, \
enigma2-accountx, \
enigma2-workflow-api, \
enigma2-report-reportx \
)" \
-c=$KUBECONFIG_PATH/cluster-184-context
}
| true
|
69c821a55311ab56d64d5a4c49519db5b37950ec
|
Shell
|
ychenracing/ourvaast
|
/vaast/VST/VST.sh
|
UTF-8
| 1,005
| 3.234375
| 3
|
[] |
no_license
|
#!/bin/bash
cd ../case
concentrations=$(ls)
for concentration in $concentrations
do
cd $concentration
number=$(ls|wc -l)
prefix="VST -o 'U(0..$number)' -b hg19 "
vatGvfNames=$(ls *.vat.gvf)
space=" "
for nameItem in $vatGvfNames
do
prefix="${prefix}""${nameItem}""${space}"
done
suffix="> case_"${concentration}"_output.cdr"
prefix="${prefix}""${suffix}"
echo "#!/bin/sh" > VST_command_case_${concentration}.sh
echo $prefix >> VST_command_case_${concentration}.sh
chmod 777 VST_command_case_${concentration}.sh
sh VST_command_case_${concentration}.sh
cd ..
done
cd ../control
controlNumber=$(ls|sc -l)
prefix="VST -o 'U(0..$controlNumber)' -b hg19 "
vatGvfNames=$(ls *.vat.gvf)
space=" "
for nameItem in $vatGvfNames
do
prefix="${prefix}""${nameItem}""${space}"
done
suffix="> control_output.cdr"
prefix="${prefix}""${suffix}"
echo "#!/bin/sh" > VST_command_control.sh
echo $prefix >> VST_command_control.sh
chmod 777 VST_command_control.sh
sh VST_command_control.sh
cd ..
| true
|
f6052a75c969285db9bfbb636ed746109a444501
|
Shell
|
danielemidi/SecureAuditLog
|
/gen_keys.sh
|
UTF-8
| 670
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/bash
echo Removing previous keys and certificates...
rm -f *.pem
echo Generating keys and certificates...
openssl genrsa -out keyU.pem 2048
# openssl rsa -in keyU.pem -RSAPublicKey_out -out pub-keyU.pem
openssl req \
-outform PEM -new -x509 -nodes -days 365 \
-subj '/C=US/ST=Indiana/L=West Lafayette/CN=untrusted.purdue.com' \
-key keyU.pem -pubkey -out pub-keyU.pem
openssl genrsa -out keyT.pem 2048
# openssl rsa -in keyT.pem -RSAPublicKey_out -out pub-keyT2.pem
openssl req \
-outform PEM -new -x509 -nodes -days 365 \
-subj '/C=US/ST=Indiana/L=West Lafayette/CN=trusted.purdue.com' \
-key keyT.pem -pubkey -out pub-keyT.pem
echo Complete.
| true
|
2d618b10f0a0f7fd9a7c402f0c55fe8cbb82940f
|
Shell
|
guimauveb/tinyDLM
|
/install.sh
|
UTF-8
| 5,217
| 4.3125
| 4
|
[] |
no_license
|
#!/bin/bash
# Install script that will look for the required dependencies in the envionrment and will try to install them
# if not found. Once all the required dependencies are installed the script will build tinyDLM from source and
# launch the program.
# I only provide cURLpp source since it does not seem to be found on some Linux distributions using apt.
# macOS
# libcurl and libncurses will be installed via homebrew if homebrew is installed. Otherwise the installation
# will stop.
# Linux
# lbcurl and libncurses will be installed via apt on Linux Debian distributions. Otherwise the installation
# will stop.
# set some bools
linux=0
macos=0
# set some bools
apt=0
brew=0
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
linux=1
echo Using Linux...
# temporary
apt=1
# check apt presence
elif [[ "$OSTYPE" == "darwin"* ]]; then
macos=1
echo Using macOS...
echo checking if brew is installed...
brewout=$( { brew > tmp; } 2>&1 )
if [[ $brewout == *"brew install FORMULA"* ]]; then
brew=1
echo brew is installed.
else
echo brew is not installed.
fi
else
echo tinyDLM can only be installed on Linux and macOS for now.
fi
mkdir -p app/downloads
mkdir build
cd build
# comp will either be "gcc" or "clang"
comp="0"
# set some bools
gcc=0
clang=0
# check for gcc or clang presence
gccout=$( { gcc > tmp; } 2>&1 )
if [[ $gccout == *"no input files"* ]]; then
echo "gcc is installed"
comp="gcc"
else
echo "gcc is not installed"
echo "checking if clang is installed..."
fi
if [[ "${comp}" == "0" ]]
then
clout=$( { clang -v > tmp; } 2>&1 )
if [[ $clout == *"Apple clang"* ]]; then
echo "clang is installed"
comp="clang"
else
echo "clang is not installed"
fi
fi
# set some bools
curl=0
curlpp=0
ncurses=0
if [ "${comp}" != "0" ]; then
curlout=$( { ${comp} -lcurl > tmp; } 2>&1 )
if [[ $curlout != *"main"* ]]; then
echo "curl dev libraries are not installed"
if [ "$brew" -eq 1 ]; then
read -p "Do you want to install libcurl-dev (using homebrew)? [Y/n] " answer
if [ "$answer" == "Y" ] || [ "$answer" == "y" ]; then
brew install curl --with-openssl
curl=1
echo curl dev libraries are installed
else
echo Please install curl dev libraries
fi
elif [ "$apt" -eq 1 ]; then
read -p "Do you want to install curl dev libraries (using apt)? [Y/n] " answer
if [ "$answer" == "Y" ] || [ "$answer" == "y" ]; then
sudo apt-get install libcurl4-openssl-dev
curl=1
echo curl dev libraries are installed.
fi
else
echo Please install curl dev libraries.
fi
else
curl=1
echo "curl dev libraries are installed."
fi
# Build curlpp from source
curlppout=$( { ${comp} -lcurlpp > tmp; } 2>&1 )
if [[ $curlppout != *"main"* ]]; then
echo "curlpp is not installed"
read -p "Do you want to install curlpp ? Y/n " answer
if [ "$answer" == "Y" ] || [ "$answer" == "y" ]; then
cd ../dependencies/curlpp-0.8.1
mkdir build
cd build
sudo cmake ../
sudo make install
cd ../../../build
curlpp=1
echo curlpp is installed
fi
else
curlpp=1
echo "curlpp is installed."
fi
ncursesout=$( { ${comp} -lncurses > tmp; } 2>&1 )
if [[ $curlppout != *"main"* ]]; then
echo "ncurses dev libraries are not installed"
if [ "$brew" -eq 1 ]; then
read -p "Do you want to install ncurses dev libraries (using homebrew)? [Y/n] " answer
if [ "$answer" == "Y" ] || [ "$answer" == "y" ]; then
brew install ncurses
ncurses=1
else
echo Please install ncurses.
fi
elif [ "$apt" -eq 1 ]; then
read -p "Do you want to install ncurses dev libraries (using apt)? [Y/n] " answer
if [ "$answer" == "Y" ] || [ "$answer" == "y" ]; then
sudo apt-get install libncurses-dev
ncurses=1
echo ncurses dev libraries are installed
fi
else
echo Please install ncurses dev libraries.
fi
else
ncurses=1
echo "ncurses dev libraries are installed."
fi
else
echo Please installed all the required dependencies.
fi
# rm tmp files
rm tmp
rm ../tmp
# if everything required is installed
if [ "${comp}" != "0" ] && [ "${curl}" -eq 1 ] && [ "${curlpp}" -eq 1 ] && [ "${ncurses}" -eq 1 ]; then
# run cmake from build
cmake ../
# run make
make
if [ ! -f tinyDLM ]; then
echo "Couldn't locate tinyDLM. Build certainly failed. Check cmake error logs."
elif [ -f tinyDLM ]; then
mv tinyDLM ../app/tinyDLM
echo tinyDLM was successfully built in app/.
fi
else
echo Installation failed. Please install all required dependencies.
fi
rm -r ../build
| true
|
98fb9d20e5bc4c0f2a068cf06659862e403f4a01
|
Shell
|
data-intuitive/Portash
|
/tests/test_exit.sh
|
UTF-8
| 490
| 3.25
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
exit0=$(cat defaults.yaml | ../porta.sh 2> /dev/null| yq r - "output.result[1]")
exit0_ret=$?
exit1=$(cat errors.yaml | ../porta.sh 2> /dev/null| yq r - "output.result[1]")
exit1_ret=$?
# echo "$actual"
# echo "$expected"
echo -n "Exit code test... "
if [[ $exit0_ret -eq 0 ]]; then
echo -n "$(tput setaf 2)OK "
else
echo -n "$(tput setaf 1)NOK "
fi
if [[ $exit1_ret -eq 0 ]]; then
echo -n "$(tput setaf 2)OK"
else
echo -n "$(tput setaf 1)NOK"
fi
echo "$(tput sgr0)"
| true
|
ff02b7bf3d87cff41195d6f08ecb0e839331cbc3
|
Shell
|
nosseb/Ehwaz
|
/Scripts/Ubuntu/ManualSetup.sh
|
UTF-8
| 781
| 3.375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Provided under MIT Licence
# https://github.com/nosseb/Ehwaz
version=2.0
if [ -z "$1" ]
then
printf "No argument supplied" >&2
exit 128
fi
# REQUIREMENTS
printf "\n\n\nInstalling requirements\n=======================\n\n"
sudo apt-get install steamcmd
printf "\n\nPath export\n===========\n"
echo "export PATH=\$PATH:/usr/games" | sudo tee -a /home/steam/.bashrc
# EBS PERSISTENT STORAGE
# mount
printf "\n\nMounting persistent storage\n===========================\n\n"
printf "#sudo nvme list\n"
sudo nvme list
# shellcheck disable=SC1091
source /home/ubuntu/PersistentSetup.sh "$1"
# links
printf "\n\nCreate Link\n"
sudo -u steam ln -s /home/steam/backup/config /home/steam/config
printf "#ls -lha /home/steam\n"
ls -lha /home/steam/
| true
|
25cb7ca60a19f34c805585f132eb56d1f2a513ce
|
Shell
|
bmustiata/dotfiles
|
/bin/keys.sh
|
UTF-8
| 818
| 3.5
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
yad=$(which yad)
setkeyboard() {
if [[ "$1" == "co" ]]; then
setxkbmap us -variant colemak -print | xkbcomp - $DISPLAY
echo "Reset the keys as colemak"
exit 0
fi # [[ "$1" == "co" ]]
if [[ "$1" == "ro" ]]; then
setxkbmap ro -variant std -print | xkbcomp - $DISPLAY
echo "Reset the keys as Romanian"
exit 0
fi # [[ "$1" == "ro" ]]
if [[ "$1" == "de" ]]; then
setxkbmap de -print | xkbcomp - $DISPLAY
echo "Reset the keys as German"
exit 0
fi # [[ "$1" == "de" ]]
setxkbmap us -print | xkbcomp - $DISPLAY
echo "Reset the keys as US standard"
}
if [[ "$yad" != "" ]]; then
KEYBOARD=$($yad --entry co ro de us)
setkeyboard $KEYBOARD
exit 0
fi # [[ "$yad" != "" ]]
setkeyboard $1
| true
|
31f28de9712f9e2a64d0f340eeb8a5670ce9c55b
|
Shell
|
murer/sandbox
|
/proj/pyrnet/net.sh
|
UTF-8
| 1,561
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash -xe
pyrnet_name="PYRNET"
pyrnet_device="$(nmcli -g TYPE,DEVICE d | grep '^wifi:' | cut -d':' -f2)"
cmd_wifi() {
nmcli -g NAME,UUID c | grep ^"$pyrnet_name": | cut -d':' -f2 | xargs nmcli con delete || true
sleep 1
nmcli con add type wifi ifname "$pyrnet_device" con-name "$pyrnet_name" autoconnect yes ssid "$pyrnet_name" -- \
802-11-wireless.mode ap \
802-11-wireless.band bg \
ipv4.method shared \
ipv4.addresses 192.168.240.1/24 \
ipv6.method ignore \
wifi-sec.key-mgmt wpa-psk \
wifi-sec.psk 'PYRNET78'
sleep 1
nmcli con up "$pyrnet_name"
}
cmd_remove() {
iptables -t nat -v -L PREROUTING -n --line-number
sudo iptables -t nat -v -L PREROUTING -n --line-number | grep '8080$' | \
cut -d' ' -f1 | tac | while read k; do \
sudo iptables -t nat -D PREROUTING "$k";
done
}
cmd_redirect() {
cmd_remove
sysctl -w net.ipv4.ip_forward=1
#iptables -t nat -A PREROUTING -i "$pyrnet_device" -p tcp --dport 80 -j REDIRECT --to-port 8080
#iptables -t nat -A PREROUTING -i "$pyrnet_device" -p tcp --dport 443 -j REDIRECT --to-port 8080
#ip6tables -t nat -A PREROUTING -i "$pyrnet_device" -p tcp --dport 80 -j REDIRECT --to-port 8080
#ip6tables -t nat -A PREROUTING -i "$pyrnet_device" -p tcp --dport 443 -j REDIRECT --to-port 8080
iptables -t nat -A PREROUTING -i "$pyrnet_device" -p tcp --dport 80 -j REDIRECT --to 172.17.0.2:8080
iptables -t nat -A PREROUTING -i "$pyrnet_device" -p tcp --dport 443 -j REDIRECT --to 172.17.0.2:8080
}
cd "$(dirname "$0")"; _cmd="${1?"cmd is required"}"; shift; "cmd_${_cmd}" "$@"
| true
|
6fcf5b025e6efcf73245a5bf673ae2cb312e6c65
|
Shell
|
imatharv/Shell-Programming-Constructs
|
/for-loop/power-of-two.sh
|
UTF-8
| 149
| 3.359375
| 3
|
[] |
no_license
|
#!/bin/bash
read -p "Enter a number " n
powerOfTwo=1;
for (( i=0; i<=$n; i++ ))
do
echo "$i $powerOfTwo";
powerOfTwo=$((2 * powerOfTwo));
done
| true
|
b2ae0921ec9feacafff44a5307879cf096bdac1e
|
Shell
|
liyang85/scripts-during-mage-linux-training
|
/insert_shell_script_header.sh
|
UTF-8
| 503
| 2.75
| 3
|
[] |
no_license
|
#!/bin/bash
# insert_shell_script_header.sh: create script description automatically
fullFileName="${1}_by_liyang.sh"
cat << _EOF_ > ${fullFileName}
#!/bin/bash
#
#===== ===== ===== ===== ===== ===== ===== ===== ===== ===== ===== =====
# Filename: ${fullFileName}
# Description:
# Date: `date +%F`
# Author: Li Yang
# Website: https://liyang85.com
#===== ===== ===== ===== ===== ===== ===== ===== ===== ===== ===== =====
_EOF_
chmod +x ${fullFileName}
vim + -c 'startinsert' ${fullFileName}
| true
|
313ae0566c264f61fdc0d3374f436bd58dbaca0f
|
Shell
|
treblalee/hackbop2016Feb
|
/regress.sh
|
UTF-8
| 805
| 2.671875
| 3
|
[] |
no_license
|
#! /bin/bash
#HOST='http://ec2-52-23-125-147.compute-1.amazonaws.com'
HOST='localhost'
PORT='8000'
HOME_ENDPOINT="$HOST:$PORT"
SIM_ENDPOINT="$HOME_ENDPOINT/similar?image=https://s3.amazonaws.com/treblalee.images/watches7.jpg"
SIM_PATH_ENDPOINT="$HOME_ENDPOINT/similarbypath?image=localS3Images/watches7.jpg"
BAD_INPUT_ENDPOINT="$HOME_ENDPOINT/similar?image=https://s3.amazonaws.com/treblalee.images/watches7.jpgblah"
echo
echo "Result of http get request for $HOME_ENDPOINT"
curl -XGET $HOME_ENDPOINT
echo
echo
echo "Result of http get request for $SIM_ENDPOINT"
curl -XGET $SIM_ENDPOINT
echo
echo
echo "Result of http get request for $SIM_PATH_ENDPOINT"
curl -XGET $SIM_PATH_ENDPOINT
echo
echo
echo "Result of bad input http get request for $BAD_INPUT_ENDPOINT"
curl -XGET $BAD_INPUT_ENDPOINT
echo
echo
| true
|
a2191a13a7c8849bf17c9fbe2f3c1501ab761ece
|
Shell
|
kokkytos/DMSP-OLS-Forecast
|
/source/srtm.sh
|
UTF-8
| 2,201
| 3.078125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#NASA Shuttle Radar Topography Mission Global 3 arc second V003
#https://search.earthdata.nasa.gov/
#read settings
source ../config.sh
cd ../data/SRTM
# unzip
unzip -o '*.zip'
# convert hgt to geotiff
for i in *.hgt
do
gdal_translate -r bilinear $i "${i}.hgt.tif"
done
# merge tiles
gdalbuildvrt mosaic.vrt *.tif
gdal_translate -co COMPRESS=LZW -co PREDICTOR=2 -co TILED=YES -r bilinear mosaic.vrt srtm.tif
# reproject
gdalwarp -s_srs EPSG:4326 \
-t_srs EPSG:2100 \
-tr $RESOLUTION $RESOLUTION \
-te $Xmin $Ymin $Xmax $Ymax \
-r bilinear \
-overwrite \
-of GTiff srtm.tif srtm_2100.tif
# hillshade
gdaldem hillshade -az 45 -z 1.3 srtm_2100.tif srtm_hillshade_2100.tif
gdaldem slope -p srtm_2100.tif srtm_2100_slope.tif
#normalize slope
zMin=`gdalinfo -mm ./srtm_2100_slope.tif | sed -ne 's/.*Computed Min\/Max=//p'| tr -d ' ' | cut -d "," -f 1`
zMax=`gdalinfo -mm ./srtm_2100_slope.tif | sed -ne 's/.*Computed Min\/Max=//p'| tr -d ' ' | cut -d "," -f 2`
#zMin=`gdalinfo -stats ./srtm_2100_slope.tif | sed -ne 's/.*STATISTICS_MINIMUM=//p'`
#zMax=`gdalinfo -stats ./srtm_2100_slope.tif | sed -ne 's/.*STATISTICS_MAXIMUM=//p'`
echo Min:$zMin
echo Max:$zMax
#gdal_calc.py -A srtm_2100_slope.tif --outfile=srtm_2100_slope_norm.tif --calc="100*((A-$zMin)/($zMax-$zMin))"
gdal_calc.py -A srtm_2100_slope.tif --outfile=srtm_2100_slope_norm.tif --calc="(100*A)/${zMax}" --NoDataValue=-32768
gdal_translate -of VRT srtm_hillshade_2100.tif srtm_hillshade_2100.vrt -a_nodata none # convert NODATA VALUE to NaN
gdal_calc.py -A srtm_hillshade_2100.vrt --outfile=srtm_hillshade_2100_tmp.tif --calc="nan_to_num(A)" #Replace NaN with zero, https://docs.scipy.org/doc/numpy/reference/generated/numpy.nan_to_num.html
gdal_calc.py -A srtm_hillshade_2100_tmp.tif --overwrite --outfile=srtm_hillshade_2100_tmp2.tif --calc="181*equal(A,0)+A*not_equal(A,0)"
# export to gif
gdal_translate -of GIF srtm_2100_slope_norm.tif roi.slope.gif
gdal_translate -of GIF srtm_hillshade_2100_tmp2.tif roi.hillshade.gif
cp *.gif ../../output
cp *.xml ../../output
rm *.gif
rm *.xml
rm *.vrt
# clean up
rm -f *.hgt.tif mosaic.vrt srtm.tif srtm_2100.tif srtm_hillshade_2100.tif *.tif *.hgt
| true
|
f4f21906cd1ebf0f95733b20458d3d96c084e10e
|
Shell
|
chef/bento
|
/packer_templates/scripts/suse/repositories_suse.sh
|
UTF-8
| 595
| 2.515625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh -eux
version=$(grep VERSION= /etc/os-release | cut -f2 -d\" | cut -f1 -d\ )
zypper removerepo "openSUSE-${version}-0"
zypper ar http://download.opensuse.org/distribution/leap/"${version}"/repo/oss/ openSUSE-Leap-"${version}"-Oss
zypper ar http://download.opensuse.org/distribution/leap/"${version}"/repo/non-oss/ openSUSE-Leap-"${version}"-Non-Oss
zypper ar http://download.opensuse.org/update/leap/"${version}"/oss/ openSUSE-Leap-"${version}"-Update
zypper ar http://download.opensuse.org/update/leap/"${version}"/non-oss/ openSUSE-Leap-"${version}"-Update-Non-Oss
zypper refresh
| true
|
692d4edc69da72729173e5571a95770634d4f1f5
|
Shell
|
insight-decentralized-consensus-lab/ICON-P-Rep-Node
|
/modules/ec2/data/attach-data-volume.sh
|
UTF-8
| 1,123
| 3.375
| 3
|
[
"Apache-2.0"
] |
permissive
|
# Mount EBS
# Source: https://github.com/hashicorp/terraform/issues/2740#issuecomment-375144680
EC2_INSTANCE_ID=$(wget -q -O - http://169.254.169.254/latest/meta-data/instance-id || die \"wget instance-id has failed: $?\")
EC2_AVAIL_ZONE=$(wget -q -O - http://169.254.169.254/latest/meta-data/placement/availability-zone || die \"wget availability-zone has failed: $?\")
EC2_REGION="`echo \"$EC2_AVAIL_ZONE\" | sed -e 's:\([0-9][0-9]*\)[a-z]*\$:\\1:'`"
#############
# EBS VOLUME
#
# note: /dev/sdh => /dev/xvdh
# see: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html
#############
# wait for EBS volume to attach
DATA_STATE="unknown"
until [ $DATA_STATE == "attached" ]; do
DATA_STATE=$(aws ec2 describe-volumes \
--region $${EC2_REGION} \
--filters \
Name=attachment.instance-id,Values=$${EC2_INSTANCE_ID} \
Name=attachment.device,Values=/dev/sdh \
--query Volumes[].Attachments[].State \
--output text)
echo 'waiting for volume...'
sleep 5
done
sudo file -s /dev/nvme1n1
sudo mkdir /opt/data
sudo mount /dev/nvme1n1 /opt/data
echo 'EBS volume attached!'
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.