content stringlengths 4 1.04M | lang stringclasses 358
values | score int64 0 5 | repo_name stringlengths 5 114 | repo_path stringlengths 4 229 | repo_licenses listlengths 1 8 |
|---|---|---|---|---|---|
#!/usr/bin/env bash
# Copyright 2021 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script generates mock files using mockgen.
# Usage: `hack/update-mocks.sh`.
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE[0]}")/..
source "${KUBE_ROOT}/hack/lib/init.sh"
# Explicitly opt into go modules, even though we're inside a GOPATH directory
export GO111MODULE=on
_tmp="${KUBE_ROOT}/_tmp_build_tag_files"
mkdir -p "${_tmp}"
function cleanup {
rm -rf "$_tmp"
rm -f "tempfile"
}
trap cleanup EXIT
kube::golang::verify_go_version
echo 'installing mockgen'
pushd "${KUBE_ROOT}/hack/tools" >/dev/null
go install github.com/golang/mock/mockgen
popd >/dev/null
find_files() {
find . -not \( \
\( \
-wholename './output' \
-o -wholename './.git' \
-o -wholename './_output' \
-o -wholename './_gopath' \
-o -wholename './release' \
-o -wholename './target' \
-o -wholename '*/third_party/*' \
-o -wholename '*/vendor/*' \
-o -wholename './staging/src/k8s.io/client-go/*vendor/*' \
-o -wholename '*/bindata.go' \
\) -prune \
\) -name '*.go'
}
cd "${KUBE_ROOT}"
echo 'executing go generate command on below files'
for IFILE in $(find_files | xargs grep --files-with-matches -e '//go:generate mockgen'); do
temp_file_name=$(mktemp --tmpdir="${_tmp}")
# serach for build tag used in file
build_tag_string=$(grep -o '+build.*$' "$IFILE") || true
# if the file does not have build string
if [ -n "$build_tag_string" ]
then
# write the build tag in the temp file
echo -n "$build_tag_string" > "$temp_file_name"
# if +build tag is defined in interface file
BUILD_TAG_FILE=$temp_file_name go generate -v "$IFILE"
else
# if no +build tag is defined in interface file
go generate -v "$IFILE"
fi
done
# get the changed mock files
files=$(git diff --name-only)
for file in $files; do
if [ "$file" == "hack/update-mocks.sh" ]; then
continue
fi
# serach for build tags used in file
# //go:build !providerless
# // +build !providerless
go_build_tag_string=$(grep -o 'go:build.*$' "$file") || true
build_tag_string=$(grep -o '+build.*$' "$file") || true
new_header=''
# if the file has both headers
if [ -n "$build_tag_string" ] && [ -n "$go_build_tag_string" ]
then
# create a new header with the build string and the copyright text
new_header=$(echo -e "//""$go_build_tag_string""\n""//" "$build_tag_string""\n" | cat - hack/boilerplate/boilerplate.generatego.txt)
# ignore the first line (build tag) from the file
tail -n +3 "$file" > tempfile
fi
# if the file has only // +build !providerless header
if [ -n "$build_tag_string" ] && [ -z "$go_build_tag_string" ]
then
# create a new header with the build string and the copyright text
new_header=$(echo -e "//" "$build_tag_string""\n" | cat - hack/boilerplate/boilerplate.generatego.txt)
# ignore the first line (build tag) from the file
tail -n +2 "$file" > tempfile
fi
# if the file has only //go:build !providerless header
if [ -z "$build_tag_string" ] && [ -n "$go_build_tag_string" ]
then
# create a new header with the build string and the copyright text
new_header=$(echo -e "//""$go_build_tag_string""\n" | cat - hack/boilerplate/boilerplate.generatego.txt)
# ignore the first line (build tag) from the file
tail -n +2 "$file" > tempfile
fi
# if the header if generted
if [ -n "$new_header" ]
then
# write the newly generated header file to the original file
echo -e "$new_header" | cat - tempfile > "$file"
else
# if no build string insert at the top
cat hack/boilerplate/boilerplate.generatego.txt "$file" > tempfile && \
mv tempfile "$file"
fi
done
| Shell | 5 | 767829413/kubernetes | hack/update-mocks.sh | [
"Apache-2.0"
] |
=== Description ===
Some commands for Sublime Text 3 (NOT 2!) to list shortcut keys / preferences
etc in the QuickPanel and navigate to edit location on selection.
=== 2000 Words ===
{{http://ndudfield.com/zencoding/old/editprefs-settings.gif}}
{{http://ndudfield.com/zencoding/old/editprefs.gif}}
=== How to insert a binding repr in the Quick Panel ===
Copy from the Default.sublime-keymap.template the last line that contains a
multitude of bindings with `insert_binding_repr` and place it in your User
keymap.
So how would you insert alt+q? You can think of it like this:
* PRESS `alt` and hold it down
* PRESS `=` while thinking PLUS then lift all fingers
* PRESS `q`
Note that on a standard US keyboard `=` is on the same key as `+` (plus) You
can't bind to just a modifier like alt, so plus seems a resaonable key/mnemonic.
In sublime `{"keys": [...]}` terms previous exmaple would be:
* PRESS `["alt+="]`
* PRESS `["q"]`
The following bindings show how it works.
Note the `expecting_binding_repr_mode` key.
{{{
{"args": {"val": "alt"},
"command": "insert_binding_repr",
"context": [{"key": "overlay_visible", "operand": true, "operator": "equal"},
{"key": "setting.expecting_binding_repr_mode",
"operand": false,
"operator": "equal"}],
"keys": ["alt+="]}
}}}
{{{
{"args": {"val": "q"},
"command": "insert_binding_repr",
"context": [{"key": "overlay_visible", "operand": true, "operator": "equal"},
{"key": "setting.expecting_binding_repr_mode",
"operand": true,
"operator": "equal"}],
"keys": ["q"]}
}}}
So how would you insert ctrl+alt+q? (In sublime terms)
* PRESS `ctrl+alt+=`
* PRESS `q`
=== Help? You can't insert_binding_repr for down|enter|up ? ===
Unfortunately, some bindings don't work as the second key due to the quickpanel
swallowing them:
* <enter>
* <up>
* <down>
* and others ...
The workaround is to type the first letter of the key you desire, eg:
* type `e` for <enter> to insert `alt+e` then type `nter`
* type `u` for <up> to insert `alt+u` then type `p`
* type `d` for <down> to insert `alt+d` then type `own`
=== Command Palette ===
{{{
[
{ "caption": "Edit Preference: List Settings", "command": "list_settings"},
{ "caption": "Edit Preference: List Plugins Commands", "command": "list_commands" },
{ "caption": "Edit Preference: List Shortcut Keys", "command": "list_shortcut_keys"},
{ "caption": "Edit Preference: List Menu Bindings", "command": "list_menu_bindings"},
{ "caption": "Edit Preference: Theme",
"command": "edit_package_files",
"args": {"pref_type": "sublime-theme"}},
{ "caption": "Edit Preference: sublime-completions",
"command": "edit_package_files",
"args": {"pref_type": "sublime-completions"}},
{ "caption": "Edit Preference: sublime-build",
"command": "edit_package_files",
"args": {"pref_type": "sublime-build"}},
{ "caption": "Edit Preference: sublime-mousemap",
"command": "edit_package_files",
"args": {"pref_type": "sublime-mousemap"}},
{ "caption": "Edit Preference: sublime-menu",
"command": "edit_package_files",
"args": {"pref_type": "sublime-menu"}},
{ "caption": "Edit Preference: tmTheme|colorscheme",
"command": "edit_package_files",
"args": {"pref_type": ".*\\.(tmTheme|stTheme)$"}},
{ "caption": "Edit Preference: tmLanguage|syntax|grammar",
"command": "edit_package_files",
"args": {"pref_type": ".*\\.((tm|st)Language)$"}},
{ "caption": "Edit Preference: sublime-commands",
"command": "edit_package_files",
"args": {"pref_type": "sublime-commands"}}
]
}}}
=== TODO ===
Set cyclic tab key for auto complete
| Creole | 3 | sublimator/EditPreferences | README.creole | [
"MIT"
] |
/*
* Copyright 2010-2021 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.incremental.storage
import org.jetbrains.kotlin.incremental.dumpCollection
import java.io.File
class SourceToJsOutputMap(storageFile: File, private val pathConverter: FileToPathConverter) : BasicStringMap<Collection<String>>(storageFile, StringCollectionExternalizer) {
override fun dumpValue(value: Collection<String>): String = value.dumpCollection()
@Synchronized
fun add(key: File, value: File) {
storage.append(pathConverter.toPath(key), listOf(pathConverter.toPath(value)))
}
operator fun get(sourceFile: File): Collection<File> =
storage[pathConverter.toPath(sourceFile)]?.map { pathConverter.toFile(it) } ?: setOf()
@Synchronized
operator fun set(key: File, values: Collection<File>) {
if (values.isEmpty()) {
remove(key)
return
}
storage[pathConverter.toPath(key)] = values.map { pathConverter.toPath(it) }
}
@Synchronized
fun remove(key: File) {
storage.remove(pathConverter.toPath(key))
}
@Synchronized
fun removeValues(key: File, removed: Set<File>) {
val notRemoved = this[key].filter { it !in removed }
this[key] = notRemoved
}
} | Kotlin | 4 | Mu-L/kotlin | build-common/src/org/jetbrains/kotlin/incremental/storage/SourceToJsOutputMap.kt | [
"ECL-2.0",
"Apache-2.0"
] |
#!/bin/csh
#This script can be used to generate a web page to compare histograms from
#two input root files produced using the EDAnalyzers in RecoEgamma/Examples,
#by running one of:
#
#
#
# "Validation/RecoEgamma/test/PhotonValidator_cfg.py
#
# The default list of histograms (configurable) is based on version VXX-XX-XX
# of Validation/RecoEgamma
#
#Two files are created by this script: validation.C and validation.html.
#validation.C should be run inside root to greate a set of gif images
#which can then be viewed in a web browser using validation.html.
#=============BEGIN CONFIGURATION=================
setenv ANALYZERNAME1 PhotonValidator
setenv ANALYZERNAME2 pfPhotonValidator
#setenv TYPE GEDPhotons
setenv TYPE GEDPhotons
setenv COMPAREWITH Photons
setenv CMSSWver1 7_0_0
setenv RELEASE 7_0_0
setenv PRERELEASE pre11
setenv LHCENERGY 13
setenv UPGRADE True
setenv PU True
setenv PUlevel 25ns
#setenv FULLGLOBALTAG POSTLS162_V4_OldEG-v1
setenv FULLGLOBALTAG PU${PUlevel}_POSTLS162_V4-v1
setenv RELEASE ${RELEASE}_${PRERELEASE}
#setenv RELEASE ${RELEASE}
#setenv WorkDir1 /afs/cern.ch/user/n/nancy/scratch0/CMSSW/test/CMSSW_${CMSSWver1}/src/Validation/RecoEgamma/test
setenv WorkDir1 /afs/cern.ch/user/n/nancy/scratch0/CMSSW/test/CMSSW_${CMSSWver1}_${PRERELEASE}/src/Validation/RecoEgamma/test
#setenv WorkDir1 /afs/cern.ch/user/n/nancy/scratch0/CMSSW/test/CMSSW_${CMSSWver1}/src/Validation/RecoEgamma/test
#Name of sample (affects output directory name and htmldescription only)
setenv HISTOPATHNAME1_Efficiencies DQMData/Run\ 1/EgammaV/Run\ summary/${ANALYZERNAME1}/Efficiencies
setenv HISTOPATHNAME1_Photons DQMData/Run\ 1/EgammaV/Run\ summary/${ANALYZERNAME1}/Photons
setenv HISTOPATHNAME1_Conversions DQMData/Run\ 1/EgammaV/Run\ summary/${ANALYZERNAME1}/ConversionInfo
setenv HISTOPATHNAME2_Efficiencies DQMData/Run\ 1/EgammaV/Run\ summary/${ANALYZERNAME2}/Efficiencies
setenv HISTOPATHNAME2_Photons DQMData/Run\ 1/EgammaV/Run\ summary/${ANALYZERNAME2}/Photons
setenv HISTOPATHNAME2_Conversions DQMData/Run\ 1/EgammaV/Run\ summary/${ANALYZERNAME2}/ConversionInfo
#setenv SAMPLE SingleGammaPt10
#setenv SAMPLE SingleGammaPt35
setenv SAMPLE H130GGgluonfusion
#setenv SAMPLE PhotonJets_Pt_10
#setenv SAMPLE QCD_Pt_80_120
#==============END BASIC CONFIGURATION==================
#Input root trees for the two cases to be compared
if ($SAMPLE == SingleGammaPt10) then
setenv ROOTFILE ${WorkDir1}/DQM_V0001_R000000001__RelValSingleGammaPt10_UP15__CMSSW_${RELEASE}-${FULLGLOBALTAG}__DQM.root
else if ($SAMPLE == SingleGammaPt35) then
setenv ROOTFILE ${WorkDir1}/DQM_V0001_R000000001__RelValSingleGammaPt35__CMSSW_${RELEASE}-${FULLGLOBALTAG}__DQM.root
else if ($SAMPLE == H130GGgluonfusion) then
setenv ROOTFILE ${WorkDir1}/DQM_V0001_R000000001__RelValH130GGgluonfusion_${LHCENERGY}__CMSSW_${RELEASE}-${FULLGLOBALTAG}__DQM.root
#setenv ROOTFILE ${WorkDir1}/DQM_V0001_R000000001__RelValH130GGgluonfusion__CMSSW_${RELEASE}-${FULLGLOBALTAG}__DQM.root
#setenv ROOTFILE ${WorkDir1}/DQM_V0001_R000000001__Global__CMSSW_X_Y_Z__RECO_2K.root
else if ($SAMPLE == PhotonJets_Pt_10) then
setenv ROOTFILE ${WorkDir1}/DQM_V0001_R000000001__PhotonJets_Pt_10__CMSSW_${RELEASE}-${FULLGLOBALTAG}__DQM.root
else if ($SAMPLE == QCD_Pt_80_120) then
endif
#Location of output. The default will put your output in:
#http://cmsdoc.cern.ch/Physics/egamma/www/validation/
setenv CURRENTDIR $PWD
setenv OUTPATH /afs/cern.ch/cms/Physics/egamma/www/validation
cd $OUTPATH
#setenv RELEASE {$RELEASE}_OldEG
#setenv RELEASE {$RELEASE}_OldTrk
if (! -d $RELEASE) then
mkdir $RELEASE
endif
setenv OUTPATH $OUTPATH/$RELEASE
cd $OUTPATH
if (! -d ${TYPE}) then
mkdir ${TYPE}
endif
setenv OUTPATH $OUTPATH/${TYPE}
cd $OUTPATH
if (! -d vs${COMPAREWITH} ) then
mkdir vs${COMPAREWITH}
endif
setenv OUTPATH $OUTPATH/vs${COMPAREWITH}
if ( $UPGRADE == True && $PU == False ) then
setenv OUTDIR $OUTPATH/${SAMPLE}_${LHCENERGY}TeV
else if ( $UPGRADE == True && $PU == True ) then
setenv OUTDIR $OUTPATH/${SAMPLE}_${LHCENERGY}TeV_PU${PUlevel}
else
setenv OUTDIR $OUTPATH/${SAMPLE}
endif
if (! -d $OUTDIR) then
cd $OUTPATH
mkdir $OUTDIR
cd $OUTDIR
mkdir gifs
endif
cd $OUTDIR
#The list of histograms to be compared for each TYPE can be configured below:
if ( $TYPE == oldpfPhotons || $TYPE == GEDPhotons || $TYPE == fullGEDPhotons ) then
cat > efficiencyForPhotons <<EOF
recoEffVsEta
recoEffVsPhi
recoEffVsEt
deadChVsEta
deadChVsPhi
deadChVsEt
EOF
# gamgamMassAll
# gamgamMassBarrel
# gamgamMassEndcap
# gamgamMassNoConvAll
# gamgamMassNoConvBarrel
# gamgamMassNoConvEndcap
# gamgamMassConvAll
# gamgamMassConvBarrel
# gamgamMassConvEndcap
cat > scaledhistosForPhotons <<EOF
nOfflineVtx
scEta
scPhi
scEAll
scEtAll
phoEta
phoPhi
phoDEta
phoDPhi
phoEAll
phoEtAll
eResAll
eResBarrel
eResEndcap
eResunconvAll
eResunconvBarrel
eResunconvEndcap
eResconvAll
eResconvBarrel
eResconvEndcap
isoTrkSolidConeDR04All
isoTrkSolidConeDR04Barrel
isoTrkSolidConeDR04Endcap
nTrkSolidConeDR04All
nTrkSolidConeDR04Barrel
nTrkSolidConeDR04Endcap
r9Barrel
r9Endcap
r1Barrel
r1Endcap
r2Barrel
r2Endcap
sigmaIetaIetaBarrel
sigmaIetaIetaEndcap
hOverEAll
hOverEBarrel
hOverEEndcap
newhOverEAll
newhOverEBarrel
newhOverEEndcap
hcalTowerSumEtConeDR04Barrel
hcalTowerSumEtConeDR04Endcap
hcalTowerBcSumEtConeDR04Barrel
hcalTowerBcSumEtConeDR04Endcap
ecalRecHitSumEtConeDR04Barrel
ecalRecHitSumEtConeDR04Endcap
EOF
cat > scaledhistosForPhotonsLogScale <<EOF
hOverEAll
hOverEBarrel
hOverEEndcap
newhOverEAll
newhOverEBarrel
newhOverEEndcap
hcalTowerSumEtConeDR04Barrel
hcalTowerSumEtConeDR04Endcap
hcalTowerBcSumEtConeDR04Barrel
hcalTowerBcSumEtConeDR04Endcap
ecalRecHitSumEtConeDR04Barrel
ecalRecHitSumEtConeDR04Endcap
r9Barrel
r9Endcap
r1Barrel
r1Endcap
r2Barrel
r2Endcap
sigmaIetaIetaAll
sigmaIetaIetaBarrel
sigmaIetaIetaEndcap
EOF
cat > unscaledhistosForPhotons <<EOF
pEResVsR9Barrel
pEResVsR9Endcap
scpEResVsR9Barrel
scpEResVsR9Endcap
pEResVsEtAll
pEResVsEtBarrel
pEResVsEtEndcap
pEResVsEtaAll
pEResVsEtaUnconv
pEResVsEtaConv
pEcalRecHitSumEtConeDR04VsEtaAll
pEcalRecHitSumEtConeDR04VsEtBarrel
pEcalRecHitSumEtConeDR04VsEtEndcap
pHcalTowerSumEtConeDR04VsEtaAll
pHcalTowerSumEtConeDR04VsEtBarrel
pHcalTowerSumEtConeDR04VsEtEndcap
pHcalTowerBcSumEtConeDR04VsEtaAll
pHcalTowerBcSumEtConeDR04VsEtBarrel
pHcalTowerBcSumEtConeDR04VsEtEndcap
pHoverEVsEtaAll
pHoverEVsEtAll
pnewHoverEVsEtaAll
pnewHoverEVsEtAll
EOF
cat > efficiencyForConvertedPhotons <<EOF
convEffVsEtaTwoTracks
convEffVsPhiTwoTracks
convEffVsRTwoTracks
convEffVsZTwoTracks
convEffVsEtTwoTracks
convEffVsEtaTwoTracksAndVtxProbGT0
convEffVsRTwoTracksAndVtxProbGT0
EOF
cat > scaledhistosForConvertedPhotons <<EOF
convEta2
convPhi
convEResAll
convEResBarrel
convEResEndcap
PoverEtracksAll
PoverEtracksBarrel
PoverEtracksEndcap
convPtResAll
convPtResBarrel
convPtResEndcap
convVtxdR
convVtxdR_barrel
convVtxdR_endcap
convVtxdZ
convVtxdZ_barrel
convVtxdZ_endcap
convVtxdX
convVtxdX_barrel
convVtxdX_endcap
convVtxdY
convVtxdY_barrel
convVtxdY_endcap
mvaOutAll
mvaOutBarrel
mvaOutEndcap
EOF
cat > scaledhistosForConvertedPhotonsLogScale <<EOF
EoverPtracksAll
EoverPtracksBarrel
EoverPtracksEndcap
vtxChi2ProbAll
vtxChi2ProbBarrel
vtxChi2ProbEndcap
EOF
cat > unscaledhistosForConvertedPhotons <<EOF
pEoverEtrueVsEtaAll
pEoverPVsEtaAll
pEoverPVsRAll
pConvVtxdRVsR
pConvVtxdRVsEta
pConvVtxdXVsX
pConvVtxdYVsY
pConvVtxdZVsZ
EOF
cat > 2dhistosForConvertedPhotons <<EOF
convVtxRvsZAll
EOF
cat > projectionsForConvertedPhotons <<EOF
convVtxRvsZBarrel
convVtxRvsZEndcap
EOF
cat > fakeRateForConvertedPhotons <<EOF
convFakeRateVsEtaTwoTracks
convFakeRateVsPhiTwoTracks
convFakeRateVsRTwoTracks
convFakeRateVsZTwoTracks
convFakeRateVsEtTwoTracks
EOF
cat > scaledhistosForTracks <<EOF
tkChi2AllTracks
hTkPtPullAll
hTkPtPullBarrel
hTkPtPullEndcap
hDPhiTracksAtVtxAll
hDCotTracksAll
zPVFromTracksAll
zPVFromTracksBarrel
zPVFromTracksEndcap
dzPVFromTracksAll
dzPVFromTracksBarrel
dzPVFromTracksEndcap
EOF
cat > unscaledhistosForTracks <<EOF
h_nHitsVsEtaAllTracks
h_nHitsVsRAllTracks
pChi2VsEtaAll
pChi2VsRAll
pDCotTracksVsEtaAll
pDCotTracksVsRAll
pdzPVVsR
EOF
endif
#=================END CONFIGURATION=====================
if (-e validation.C) rm validation.C
touch validation.C
cat > begin.C <<EOF
{
TFile *file = TFile::Open("$ROOTFILE");
EOF
cat begin.C >>& validation.C
rm begin.C
setenv N 1
foreach i (`cat efficiencyForPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
c$i->Divide(1,2);
c$i->cd(1);
file->cd("$HISTOPATHNAME1_Efficiencies");
$i->SetStats(0);
int nBins = $i->GetNbinsX();
float xMin=$i->GetBinLowEdge(1);
float xMax=$i->GetBinLowEdge(nBins)+$i->GetBinWidth(nBins);
TH1F* hold=new TH1F("hold"," ",nBins,xMin,xMax);
hold=$i;
if ( $i==deadChVsEta || $i==deadChVsPhi || $i==deadChVsEt ) {
$i->SetMinimum(0.);
$i->SetMaximum(0.2);
} else if ( $i==recoEffVsEt ) {
$i->GetXaxis()->SetRangeUser(0.,200.);
} else {
$i->SetMinimum(0.);
$i->SetMaximum(1.1);
}
$i->SetLineColor(kPink+8);
$i->SetMarkerColor(kPink+8);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw();
file->cd("$HISTOPATHNAME2_Efficiencies");
$i->SetStats(0);
$i->SetMinimum(0.);
$i->SetMaximum(1.1);
TH1F* hnew=new TH1F("hnew"," ",nBins,xMin,xMax);
hnew=$i;
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw("same");
c$i->cd(2);
TH1F* ratio=new TH1F("ratio"," ",nBins,xMin,xMax);
ratio->Divide(hnew,hold);
ratio->SetStats(0);
for ( int i=1; i<=ratio->GetNbinsX(); i++ ) {
float num=hnew->GetBinContent(i);
float den=hold->GetBinContent(i);
float dNum=hnew->GetBinError(i);
float dDen=hold->GetBinError(i);
float erro=0;
if ( num!=0 && den!=0) {
erro= ((1./den)*(1./den)*dNum*dNum) + ((num*num)/(den*den*den*den) * (dDen*dDen));
erro=sqrt(erro);
}
ratio->SetBinError(i, erro);
}
ratio->SetLineColor(1);
ratio->SetLineWidth(2);
ratio->SetMinimum(0.);
ratio->SetMaximum(2.);
ratio->Draw("e");
TLine *l = new TLine(xMin,1.,xMax,1.);
l->Draw();
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat scaledhistosForPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
c$i->Divide(1,2);
c$i->cd(1);
file->cd("$HISTOPATHNAME2_Photons");
int nBins = $i->GetNbinsX();
float xMin=$i->GetBinLowEdge(1);
float xMax=$i->GetBinLowEdge(nBins)+$i->GetBinWidth(nBins);
Double_t mnew=$i->GetMaximum();
Double_t nnew=$i->GetEntries();
file->cd("$HISTOPATHNAME1_Photons");
TH1F* hold=new TH1F("hold"," ",nBins,xMin,xMax);
hold=$i;
Double_t mold=$i->GetMaximum();
Double_t nold=$i->GetEntries();
if ( $i==scEAll || $i==phoEAll ) {
$i->GetYaxis()->SetRangeUser(0.,2000.);
}
$i->SetStats(0);
$i->SetMinimum(0.);
if ( mnew > mold+sqrt(mold) ) {
$i->SetMaximum(mnew+2*sqrt(mnew));
} else {
$i->SetMaximum(mold+2*sqrt(mold));
}
$i->SetLineColor(kPink+8);
$i->SetFillColor(kPink+8);
//$i->SetLineWidth(3);
$i->Draw();
file->cd("$HISTOPATHNAME2_Photons");
Double_t nnew=$i->GetEntries();
$i->SetStats(0);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(0.8);
//$i->SetLineWidth(1);
$i->Scale(nold/nnew);
TH1F* hnew=new TH1F("hnew"," ",nBins,xMin,xMax);
hnew=$i;
$i->Draw("e1same");
c$i->cd(2);
TH1F* ratio=new TH1F("ratio"," ",nBins,xMin,xMax);
ratio->Divide(hnew,hold);
for ( int i=1; i<=ratio->GetNbinsX(); i++ ) {
float num=hnew->GetBinContent(i);
float den=hold->GetBinContent(i);
float dNum=hnew->GetBinError(i);
float dDen=hold->GetBinError(i);
float erro=0;
if ( num!=0 && den!=0) {
erro= ((1./den)*(1./den)*dNum*dNum) + ((num*num)/(den*den*den*den) * (dDen*dDen));
erro=sqrt(erro);
}
ratio->SetBinError(i, erro);
}
ratio->SetStats(0);
ratio->SetLineColor(1);
ratio->SetLineWidth(2);
ratio->SetMinimum(0.);
ratio->SetMaximum(4.);
ratio->Draw("e");
TLine *l = new TLine(xMin,1.,xMax,1.);
l->Draw();
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat scaledhistosForPhotonsLogScale`)
cat > temp$N.C <<EOF
TCanvas *cc$i = new TCanvas("cc$i");
cc$i->cd();
cc$i->SetFillColor(10);
cc$i->SetLogy();
file->cd("$HISTOPATHNAME2_Photons");
Double_t nnew=$i->GetEntries();
file->cd("$HISTOPATHNAME1_Photons");
if ( $i==hcalTowerSumEtConeDR04Barrel || $i==hcalTowerSumEtConeDR04Endcap ) {
$i->GetXaxis()->SetRangeUser(0.,10.);
}
Double_t nold=$i->GetEntries();
$i->SetStats(0);
$i->SetMinimum(1);
$i->SetLineColor(kPink+8);
$i->SetFillColor(kPink+8);
$i->Draw();
file->cd("$HISTOPATHNAME2_Photons");
Double_t nnew=$i->GetEntries();
$i->SetStats(0);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->Draw("e1same");
cc$i->SaveAs("gifs/log$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat unscaledhistosForPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
file->cd("$HISTOPATHNAME1_Photons");
$i->SetStats(0);
if ( $i==pEcalRecHitSumEtConeDR04VsEtaAll ) {
$i->GetYaxis()->SetRangeUser(0.,5.);
} else if ( $i==pEcalRecHitSumEtConeDR04VsEtBarrel )
{ $i->GetYaxis()->SetRangeUser(0.,20.);
} else if ( $i==pEcalRecHitSumEtConeDR04VsEtEndcap )
{ $i->GetYaxis()->SetRangeUser(0.,20.);
} else if ( $i==pHcalTowerSumEtConeDR04VsEtaAll)
{ $i->GetYaxis()->SetRangeUser(0.,0.5);
} else if ( $i==pHcalTowerBcSumEtConeDR04VsEtaAll )
{ $i->GetYaxis()->SetRangeUser(0.,1.);
} else if ( $i==pHcalTowerSumEtConeDR04VsEtBarrel || $i==pHcalTowerBcSumEtConeDR04VsEtBarrel)
{ $i->GetYaxis()->SetRangeUser(0.,5.);
} else if ( $i==pHcalTowerSumEtConeDR04VsEtEndcap || $i==pHcalTowerBcSumEtConeDR04VsEtEndcap )
{ $i->GetYaxis()->SetRangeUser(0.,5.);
} else if ( $i==pHoverEVsEtaAll || $i==pnewHoverEVsEtaAll )
{ $i->GetYaxis()->SetRangeUser(-0.05,0.05);
} else if ( $i==pHoverEVsEtAll || $i==pnewHoverEVsEtAll )
{ $i->GetYaxis()->SetRangeUser(-0.05,0.05);
} else {
$i->SetMinimum(0.8);
$i->SetMaximum(1.1);
}
$i->SetLineColor(kPink+8);
$i->SetMarkerColor(kPink+8);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw();
file->cd("$HISTOPATHNAME2_Photons");
$i->SetStats(0);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw("e1same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat efficiencyForConvertedPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
c$i->Divide(1,2);
c$i->cd(1);
file->cd("$HISTOPATHNAME1_Efficiencies");
$i->SetStats(0);
int nBins = $i->GetNbinsX();
float xMin=$i->GetBinLowEdge(1);
float xMax=$i->GetBinLowEdge(nBins)+$i->GetBinWidth(nBins);
TH1F* hold=new TH1F("hold"," ",nBins,xMin,xMax);
hold=$i;
$i->SetMinimum(0.);
$i->SetMaximum(1.);
$i->SetLineColor(kPink+8);
$i->SetMarkerColor(kPink+8);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw();
file->cd("$HISTOPATHNAME2_Efficiencies");
TH1F* hnew=new TH1F("hnew"," ",nBins,xMin,xMax);
hnew=$i;
$i->SetStats(0);
$i->SetMinimum(0.);
$i->SetMaximum(1.);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw("same");
TH1F* ratio=new TH1F("ratio"," ",nBins,xMin,xMax);
ratio->Divide(hnew,hold);
for ( int i=1; i<=ratio->GetNbinsX(); i++ ) {
float num=hnew->GetBinContent(i);
float den=hold->GetBinContent(i);
float dNum=hnew->GetBinError(i);
float dDen=hold->GetBinError(i);
float erro=0;
if ( num!=0 && den!=0) {
erro= ((1./den)*(1./den)*dNum*dNum) + ((num*num)/(den*den*den*den) * (dDen*dDen));
erro=sqrt(erro);
}
ratio->SetBinError(i, erro);
}
ratio->SetStats(0);
ratio->SetLineColor(1);
ratio->SetLineWidth(2);
ratio->SetMinimum(0.);
ratio->SetMaximum(2.);
c$i->cd(2);
ratio->Draw("e");
TLine *l = new TLine(xMin,1.,xMax,1.);
l->Draw();
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat scaledhistosForConvertedPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
file->cd("$HISTOPATHNAME2_Conversions");
Double_t mnew=$i->GetMaximum();
file->cd("$HISTOPATHNAME1_Conversions");
Double_t mold=$i->GetMaximum();
$i->SetStats(0);
$i->SetMinimum(0.);
if ( mnew > mold)
$i->SetMaximum(mnew+mnew*0.1);
else
$i->SetMaximum(mold+mold*0.1);
$i->SetLineColor(kPink+8);
$i->SetFillColor(kPink+8);
$i->SetLineWidth(3);
$i->Draw();
Double_t nold=$i->GetEntries();
file->cd("$HISTOPATHNAME2_Conversions");
Double_t nnew=$i->GetEntries();
$i->SetStats(0);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Scale(nold/nnew);
$i->Draw("e1same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat scaledhistosForConvertedPhotonsLogScale`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
c$i->SetLogy(1);
file->cd("$HISTOPATHNAME2_Conversions");
Double_t mnew=$i->GetMaximum();
file->cd("$HISTOPATHNAME1_Conversions");
Double_t mold=$i->GetMaximum();
$i->SetStats(0);
$i->SetLineColor(kPink+8);
$i->SetFillColor(kPink+8);
$i->SetLineWidth(3);
$i->Draw();
Double_t nold=$i->GetEntries();
file->cd("$HISTOPATHNAME2_Conversions");
Double_t nnew=$i->GetEntries();
$i->SetStats(0);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Scale(nold/nnew);
$i->Draw("e1same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat unscaledhistosForConvertedPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
file->cd("$HISTOPATHNAME1_Conversions");
$i->SetStats(0);
$i->GetYaxis()->SetRangeUser(0.6.,2);
if ( $i == pConvVtxdRVsR || $i == pConvVtxdRVsEta || $i == pConvVtxdXVsX || $i == pConvVtxdYVsY ) {
$i->GetYaxis()->SetRangeUser(-10.,10);
} else if ( $i == pConvVtxdZVsZ ) {
$i->GetYaxis()->SetRangeUser(-10.,10);
}
$i->SetLineColor(kPink+8);
$i->SetMarkerColor(kPink+8);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw();
file->cd("$HISTOPATHNAME2_Conversions");
$i->SetStats(0);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw("e1same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat fakeRateForConvertedPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
file->cd("$HISTOPATHNAME1_Efficiencies");
$i->SetStats(0);
$i->SetMinimum(0.);
$i->SetMaximum(1.);
$i->SetLineColor(kPink+8);
$i->SetMarkerColor(kPink+8);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw();
file->cd("$HISTOPATHNAME2_Efficiencies");
$i->SetStats(0);
$i->SetMinimum(0.);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw("same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat 2dhistosForConvertedPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
file->cd("$HISTOPATHNAME1_Conversions");
$i->SetStats(0);
$i->SetMinimum(0.);
$i->SetMarkerColor(kPink+8);
$i->Draw();
file->cd("$HISTOPATHNAME2_Conversions");
$i->SetStats(0);
$i->SetMarkerColor(kBlack);
$i->Draw("same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat projectionsForConvertedPhotons`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
file->cd("$HISTOPATHNAME1_Conversions");
if ($i==convVtxRvsZBarrel)
TH1D *tmp1$i= $i->ProjectionY();
else if ($i==convVtxRvsZEndcap)
TH1D *tmp1$i= $i->ProjectionX();
Double_t nold=tmp1$i->GetEntries();
Double_t mold=tmp1$i->GetMaximum();
file->cd("$HISTOPATHNAME2_Conversions");
//TH1D *tmp2$i= $i->ProjectionY();
if ($i==convVtxRvsZBarrel)
TH1D *tmp2$i= $i->ProjectionY();
else if ($i==convVtxRvsZEndcap)
TH1D *tmp2$i= $i->ProjectionX();
Double_t nnew=tmp2$i->GetEntries();
Double_t mnew=tmp2$i->GetMaximum();
tmp1$i->SetStats(0);
tmp1$i->SetMinimum(0.);
if ( mnew > mold)
tmp1$i->SetMaximum(mnew+mnew*0.2);
else
tmp1$i->SetMaximum(mold+mold*0.2);
tmp1$i->SetLineColor(kPink+8);
tmp1$i->SetFillColor(kPink+8);
tmp1$i->SetLineWidth(3);
tmp1$i->Draw();
tmp2$i->SetStats(0);
tmp2$i->SetLineColor(kBlack);
tmp2$i->SetLineWidth(3);
tmp2$i->Scale(nold/nnew);
tmp2$i->Draw("same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat scaledhistosForTracks`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
file->cd("$HISTOPATHNAME2_Conversions");
Double_t mnew=$i->GetMaximum();
file->cd("$HISTOPATHNAME1_Conversions");
Double_t mold=$i->GetMaximum();
$i->SetStats(0);
$i->SetMinimum(0.);
if ( mnew > mold)
$i->SetMaximum(mnew+mnew*0.1);
else
$i->SetMaximum(mold+mold*0.1);
$i->SetLineColor(kPink+8);
$i->SetFillColor(kPink+8);
$i->SetLineWidth(3);
$i->Draw();
Double_t nold=$i->GetEntries();
file->cd("$HISTOPATHNAME2_Conversions");
Double_t nnew=$i->GetEntries();
$i->SetStats(0);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Scale(nold/nnew);
$i->Draw("e1same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
foreach i (`cat unscaledhistosForTracks`)
cat > temp$N.C <<EOF
TCanvas *c$i = new TCanvas("c$i");
c$i->SetFillColor(10);
file->cd("$HISTOPATHNAME2_Conversions");
Double_t mnew=$i->GetMaximum();
file->cd("$HISTOPATHNAME1_Conversions");
Double_t mold=$i->GetMaximum();
$i->SetStats(0);
if ($i==pDCotTracksVsEtaAll || $i==pDCotTracksVsRAll ) {
$i->SetMinimum(-0.05);
$i->SetMaximum(0.05);
} else if ( $i==pdzPVVsR ) {
$i->GetYaxis()->SetRangeUser(-3.,3.);
} else {
$i->SetMinimum(0.);
if ( mnew > mold)
$i->SetMaximum(mnew+mnew*0.4);
else
$i->SetMaximum(mold+mold*0.4);
}
$i->SetLineColor(kPink+8);
$i->SetMarkerColor(kPink+8);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
$i->Draw();
Double_t nold=$i->GetEntries();
file->cd("$HISTOPATHNAME2_Conversions");
Double_t nnew=$i->GetEntries();
$i->SetStats(0);
$i->SetLineColor(kBlack);
$i->SetMarkerColor(kBlack);
$i->SetMarkerStyle(20);
$i->SetMarkerSize(1);
$i->SetLineWidth(1);
//$i->Scale(nold/nnew);
$i->Draw("e1same");
c$i->SaveAs("gifs/$i.gif");
EOF
setenv N `expr $N + 1`
end
setenv NTOT `expr $N - 1`
setenv N 1
while ( $N <= $NTOT )
cat temp$N.C >>& validation.C
rm temp$N.C
setenv N `expr $N + 1`
end
cat > end.C <<EOF
}
EOF
cat end.C >>& validation.C
rm end.C
#if ( $TYPE == PixelMatchGsfElectron ) then
# setenv ANALYZER PixelMatchGsfElectronAnalyzer
# setenv CFG read_gsfElectrons
#else if ( $TYPE == oldpfPhotons || $TYPE == pfPhotons ) then
# setenv ANALYZER PhotonValidator
# setenv CFG PhotonValidator_cfg
#endif
if (-e validation.html) rm validation.html
if (-e validationPlotsTemplate.html) rm validationPlotsTemplate.html
cp ${CURRENTDIR}/validationPlotsTemplate.html validationPlotsTemplate.html
touch validation.html
cat > begin.html <<EOF
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<title>$RELEASE : $TYPE vs ${COMPAREWITH} validation</title>
</head>
<h1>$RELEASE : $TYPE vs ${COMPAREWITH} validation
<br>
$SAMPLE
</h1>
In all plots below, ${COMPAREWITH} is in purple, ${TYPE} in black.<br>
<br>
Click on the plots to see them enlarged.
<br>
Responsible: N. Marinelli
<br>
<br>
EOF
cat begin.html >>& validation.html
rm begin.html
cat validationPlotsTemplate.html >>& validation.html
rm validationPlotsTemplate.html
rm scaledhistosForPhotons
rm unscaledhistosForPhotons
rm efficiencyForPhotons
rm scaledhistosForPhotonsLogScale
rm efficiencyForConvertedPhotons
rm fakeRateForConvertedPhotons
rm 2dhistosForConvertedPhotons
rm projectionsForConvertedPhotons
rm scaledhistosForTracks
rm unscaledhistosForTracks
rm scaledhistosForPhotonsLogScale
rm scaledhistosForConvertedPhotons
rm scaledhistosForConvertedPhotonsLogScale
rm unscaledhistosForConvertedPhotons
#echo "Now paste the following into your terminal window:"
#echo ""
echo "cd $OUTDIR"
#echo " root -b"
#echo ".x validation.C"
#echo ".q"
#echo "cd $CURRENTDIR"
#echo ""
root -b -l -q validation.C
cd $CURRENTDIR
echo "Then you can view your valdation plots here:"
echo "http://cmsdoc.cern.ch/Physics/egamma/www/$OUTPATH/validation.html"
| Tcsh | 4 | ckamtsikis/cmssw | Validation/RecoEgamma/test/pfValidation.csh | [
"Apache-2.0"
] |
******************************************
The Power of two
Copyright (C) 2002 Roland Illig
1illig%40informatik%2Euni%2Dhamburg%2Ede
This program is free software; you can
redistribute it and/or modify it under the
terms of the GNU General Public License as
published by the Free Software Foundation;
either version 2 of the License or (at
your option) any later version;
This program is distributed in the hope
that it will be useful but WITHOUT ANY
WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE; See the GNU General
Public License for more details;
You should have received a copy of the GNU
General Public License along with this
program; if not write to the Free Software
Foundation Inc 59 Temple Place
Suite 330; Boston MA; 02111/1307; USA
******************************************
2^++++++ =
>+<[
>[
>++<-
]>[
<+>-
]<<-
]
| Brainfuck | 0 | RubenNL/brainheck | examples/math/power.bf | [
"Apache-2.0"
] |
(import
itertools [repeat cycle islice]
pytest)
;;;; some simple helpers
(defn assert-true [x]
(assert (= True x)))
(defn assert-false [x]
(assert (= False x)))
(defn assert-equal [x y]
(assert (= x y)))
(defn assert-none [x]
(assert (is x None)))
(defn assert-requires-num [f]
(for [x ["foo" [] None]]
(try (f x)
(except [TypeError] True)
(else (assert False)))))
(defn test-setv []
(setv x 1)
(setv y 1)
(assert-equal x y)
(setv y 12)
(setv x y)
(assert-equal x 12)
(assert-equal y 12)
(setv y (fn [x] 9))
(setv x y)
(assert-equal (x y) 9)
(assert-equal (y x) 9)
(try (do (setv a.b 1) (assert False))
(except [e [NameError]] (assert (in "name 'a' is not defined" (str e)))))
(try (do (setv b.a (fn [x] x)) (assert False))
(except [e [NameError]] (assert (in "name 'b' is not defined" (str e)))))
(import itertools)
(setv foopermutations (fn [x] (itertools.permutations x)))
(setv p (set [(, 1 3 2) (, 3 2 1) (, 2 1 3) (, 3 1 2) (, 1 2 3) (, 2 3 1)]))
(assert-equal (set (itertools.permutations [1 2 3])) p)
(assert-equal (set (foopermutations [3 1 2])) p)
(setv permutations- itertools.permutations)
(setv itertools.permutations (fn [x] 9))
(assert-equal (itertools.permutations p) 9)
(assert-equal (foopermutations foopermutations) 9)
(setv itertools.permutations permutations-)
(assert-equal (set (itertools.permutations [2 1 3])) p)
(assert-equal (set (foopermutations [2 3 1])) p))
(setv globalvar 1)
(defn test-exec []
(setv localvar 1)
(setv code "
result['localvar in locals'] = 'localvar' in locals()
result['localvar in globals'] = 'localvar' in globals()
result['globalvar in locals'] = 'globalvar' in locals()
result['globalvar in globals'] = 'globalvar' in globals()
result['x in locals'] = 'x' in locals()
result['x in globals'] = 'x' in globals()
result['y in locals'] = 'y' in locals()
result['y in globals'] = 'y' in globals()")
(setv result {})
(exec code)
(assert-true (get result "localvar in locals"))
(assert-false (get result "localvar in globals"))
(assert-false (get result "globalvar in locals"))
(assert-true (get result "globalvar in globals"))
(assert-false (or
(get result "x in locals") (get result "x in globals")
(get result "y in locals") (get result "y in globals")))
(setv result {})
(exec code {"x" 1 "result" result})
(assert-false (or
(get result "localvar in locals") (get result "localvar in globals")
(get result "globalvar in locals") (get result "globalvar in globals")))
(assert-true (and
(get result "x in locals") (get result "x in globals")))
(assert-false (or
(get result "y in locals") (get result "y in globals")))
(setv result {})
(exec code {"x" 1 "result" result} {"y" 1})
(assert-false (or
(get result "localvar in locals") (get result "localvar in globals")
(get result "globalvar in locals") (get result "globalvar in globals")))
(assert-false (get result "x in locals"))
(assert-true (get result "x in globals"))
(assert-true (get result "y in locals"))
(assert-false (get result "y in globals")))
(defn test-filter []
(setv res (list (filter (fn [x] (> x 0)) [ 1 2 3 -4 5])))
(assert-equal res [ 1 2 3 5 ])
;; test with iter
(setv res (list (filter (fn [x] (> x 0)) (iter [ 1 2 3 -4 5 -6]))))
(assert-equal res [ 1 2 3 5])
(setv res (list (filter (fn [x] (< x 0)) [ -1 -4 5 3 4])))
(assert-false (= res [1 2]))
;; test with empty list
(setv res (list (filter (fn [x] (< x 0)) [])))
(assert-equal res [])
;; test with None in the list
(setv res (list
(filter (fn [x] (not (% x 2)))
(filter (fn [x] (isinstance x int))
[1 2 None 3 4 None 4 6]))))
(assert-equal res [2 4 4 6])
(setv res (list (filter (fn [x] (is x None)) [1 2 None 3 4 None 4 6])))
(assert-equal res [None None]))
(defn test-gensym []
(setv s1 (hy.gensym))
(assert (isinstance s1 hy.models.Symbol))
(assert (= 0 (.find s1 "_G\uffff")))
(setv s2 (hy.gensym "xx"))
(setv s3 (hy.gensym "xx"))
(assert (= 0 (.find s2 "_xx\uffff")))
(assert (not (= s2 s3)))
(assert (not (= (str s2) (str s3)))))
(defn test-import-init-hy []
(import tests.resources.bin)
(assert (in "_null_fn_for_import_test" (dir tests.resources.bin))))
(defn test-doc [capsys]
;; https://github.com/hylang/hy/issues/1970
;; Let's first make sure we can doc the builtin macros
;; before we create the user macros.
(doc doc)
(setv [out err] (.readouterr capsys))
(assert (in "Gets help for a macro function" out))
(doc "#@")
(setv [out err] (.readouterr capsys))
(assert (in "with-decorator tag macro" out))
(defmacro <-mangle-> []
"a fancy docstring"
'(+ 2 2))
(doc <-mangle->)
(setv [out err] (.readouterr capsys))
;; https://github.com/hylang/hy/issues/1946
(assert (.startswith (.strip out)
f"Help on function {(hy.mangle '<-mangle->)} in module "))
(assert (in "a fancy docstring" out))
(assert (not err))
(defmacro "#pillgrums" [x]
"Look at the quality of that picture!"
x)
(doc "#pillgrums")
(setv [out err] (.readouterr capsys))
(assert (in "Look at the quality of that picture!" out))
(assert (not err))
;; make sure doc raises an error instead of
;; presenting a default value help screen
(with [(pytest.raises NameError)]
(doc does-not-exist)))
| Hy | 4 | lafrenierejm/hy | tests/native_tests/core.hy | [
"MIT"
] |
<!-- Image card -->
<style>
.demo-card-image.mdl-card {
width: 256px;
height: 256px;
background: url('../assets/demos/image_card.jpg') center / cover;
}
.demo-card-image > .mdl-card__actions {
height: 52px;
padding: 16px;
background: rgba(0, 0, 0, 0.2);
}
.demo-card-image__filename {
color: #fff;
font-size: 14px;
font-weight: 500;
}
</style>
<div class="demo-card-image mdl-card mdl-shadow--2dp">
<div class="mdl-card__title mdl-card--expand"></div>
<div class="mdl-card__actions">
<span class="demo-card-image__filename">Image.jpg</span>
</div>
</div>
| HTML | 3 | greatwqs/staffjoy | frontend/third_party/node/material_design_lite/card/snippets/image.html | [
"MIT"
] |
(function() {
var check = function () {
}
var checkNot = function () {
}
MUnit.test(a);
})(); | TypeScript | 1 | nilamjadhav/TypeScript | tests/cases/conformance/parser/ecmascript5/VariableDeclarations/parserVariableDeclaration2.ts | [
"Apache-2.0"
] |
{-
Alertmanager API
API of the Prometheus Alertmanager (https://github.com/prometheus/alertmanager)
OpenAPI spec version: 0.0.1
NOTE: This file is auto generated by the openapi-generator.
https://github.com/openapitools/openapi-generator.git
Do not edit this file manually.
-}
module Data.AlertmanagerStatus exposing (AlertmanagerStatus, decoder, encoder)
import Data.AlertmanagerConfig as AlertmanagerConfig exposing (AlertmanagerConfig)
import Data.ClusterStatus as ClusterStatus exposing (ClusterStatus)
import Data.VersionInfo as VersionInfo exposing (VersionInfo)
import DateTime exposing (DateTime)
import Dict exposing (Dict)
import Json.Decode as Decode exposing (Decoder)
import Json.Decode.Pipeline exposing (optional, required)
import Json.Encode as Encode
type alias AlertmanagerStatus =
{ cluster : ClusterStatus
, versionInfo : VersionInfo
, config : AlertmanagerConfig
, uptime : DateTime
}
decoder : Decoder AlertmanagerStatus
decoder =
Decode.succeed AlertmanagerStatus
|> required "cluster" ClusterStatus.decoder
|> required "versionInfo" VersionInfo.decoder
|> required "config" AlertmanagerConfig.decoder
|> required "uptime" DateTime.decoder
encoder : AlertmanagerStatus -> Encode.Value
encoder model =
Encode.object
[ ( "cluster", ClusterStatus.encoder model.cluster )
, ( "versionInfo", VersionInfo.encoder model.versionInfo )
, ( "config", AlertmanagerConfig.encoder model.config )
, ( "uptime", DateTime.encoder model.uptime )
]
| Elm | 4 | jtlisi/alertmanager | ui/app/src/Data/AlertmanagerStatus.elm | [
"ECL-2.0",
"Apache-2.0"
] |
#include "script_component.hpp"
/*
Name: TFAR_fnc_setSwVolume
Author: NKey
Sets the volume for the SW radio
Arguments:
0: Radio <STRING>
1: Volume Range (0,10) <NUMBER>
Return Value:
None
Example:
[call TFAR_fnc_activeSWRadio, 10] call TFAR_fnc_setSwVolume;
Public: Yes
*/
params ["_radio_id", "_value"];
private _settings = _radio_id call TFAR_fnc_getSwSettings;
_settings set [VOLUME_OFFSET, _value];
[_radio_id, _settings] call TFAR_fnc_setSwSettings;
// Unit, radio ID, volume
["OnSWvolumeSet", [TFAR_currentUnit, _radio_id, _value]] call TFAR_fnc_fireEventHandlers;
| SQF | 4 | MrDj200/task-force-arma-3-radio | addons/core/functions/fnc_setSwVolume.sqf | [
"RSA-MD"
] |
\require "assert"
\require "b:.."
#(assert (not (defined? 'b:test))) | LilyPond | 2 | HolgerPeters/lyp | spec/package_setups/testing/b/test/b_fail_test.ly | [
"MIT"
] |
terraform {
required_version = "~> 1.0.0"
required_providers {
google = {
source = "hashicorp/google"
}
google-beta = {
source = "hashicorp/google-beta"
}
helm = {
source = "hashicorp/helm"
}
kubernetes = {
source = "hashicorp/kubernetes"
}
local = {
source = "hashicorp/local"
}
random = {
source = "hashicorp/random"
}
template = {
source = "hashicorp/template"
}
time = {
source = "hashicorp/time"
}
tls = {
source = "hashicorp/tls"
}
}
}
| HCL | 3 | PragmaTwice/diem | terraform/validator/gcp/versions.tf | [
"Apache-2.0"
] |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.thrift;
class Limits {
// Haxe limits are not fixed values, they depend on the target platform
// For example, neko limits an int to 31 bits instead of 32. So we detect
// the values once during initialization in order to
// (a) get the right values for the current platform, and
// (b) prevent us from dependencies to a bunch of defines
public static var I32_MAX = {
var last : Int = 0;
var next : Int = 0;
for(bit in 0 ... 32) {
last = next;
next = last | (1 << bit);
if(next < 0) {
break;
}
}
last; // final value
}
// add whatever you need
} | Haxe | 3 | Jimexist/thrift | lib/haxe/src/org/apache/thrift/Limits.hx | [
"Apache-2.0"
] |
{%- assign __return = site.data.variables.default.lang -%}
{%- if page.lang -%}
{%- assign __return = page.lang -%}
{%- elsif site.lang -%}
{%- assign __return = site.lang -%}
{%- endif -%} | HTML | 3 | matt783/matt783.github.io | _includes/snippets/get-lang.html | [
"MIT"
] |
type Signs = { kind: 'a'; a: 3; } | { kind: 'b'; b: 2; } | { kind: 'c'; c: 1; };
interface Opts<T> {
low?: number;
sign?: T
}
interface Wrapper<T> {
}
declare function sepsis<T extends Signs>(opts: Opts<T>): Wrapper<T>;
declare function unwrap<T>(w: Wrapper<T>): T;
export const y = sepsis({ low: 1, sign: { kind: 'a', a: 3 }});
// $ExpectType { kind: "a"; a: 3; }
export const yun = unwrap(y);
// $ExpectType { kind: "a"; a: 3; }
export const yone = unwrap(sepsis({ low: 1, sign: { kind: 'a', a: 3 }})); | TypeScript | 4 | monciego/TypeScript | tests/cases/compiler/returnTypeInferenceNotTooBroad.ts | [
"Apache-2.0"
] |
module Krb5 =
autoload xfm
let comment = Inifile.comment IniFile.comment_re "#"
let empty = Inifile.empty
let eol = Inifile.eol
let dels = Util.del_str
let indent = del /[ \t]*/ ""
let comma_or_space_sep = del /[ \t,]{1,}/ " "
let eq = del /[ \t]*=[ \t]*/ " = "
let eq_openbr = del /[ \t]*=[ \t\n]*\{[ \t]*\n/ " = {\n"
let closebr = del /[ \t]*\}/ "}"
(* These two regexps for realms and apps are not entirely true
- strictly speaking, there's no requirement that a realm is all upper case
and an application only uses lowercase. But it's what's used in practice.
Without that distinction we couldn't distinguish between applications
and realms in the [appdefaults] section.
*)
let realm_re = /[A-Z0-9][.a-zA-Z0-9-]*/
let realm_anycase_re = /[A-Za-z0-9][.a-zA-Z0-9-]*/
let app_re = /[a-z][a-zA-Z0-9_]*/
let name_re = /[.a-zA-Z0-9_-]+/
let value_br = store /[^;# \t\r\n{}]+/
let value = store /[^;# \t\r\n]+/
let entry (kw:regexp) (sep:lens) (value:lens) (comment:lens)
= [ indent . key kw . sep . value . (comment|eol) ] | comment
let subsec_entry (kw:regexp) (sep:lens) (comment:lens)
= ( entry kw sep value_br comment ) | empty
let simple_section (n:string) (k:regexp) =
let title = Inifile.indented_title n in
let entry = entry k eq value comment in
Inifile.record title entry
let record (t:string) (e:lens) =
let title = Inifile.indented_title t in
Inifile.record title e
let v4_name_convert (subsec:lens) = [ indent . key "v4_name_convert" .
eq_openbr . subsec* . closebr . eol ]
(*
For the enctypes this appears to be a list of the valid entries:
c4-hmac arcfour-hmac aes128-cts rc4-hmac
arcfour-hmac-md5 des3-cbc-sha1 des-cbc-md5 des-cbc-crc
*)
let enctype_re = /[a-zA-Z0-9-]{3,}/
let enctypes = /permitted_enctypes|default_tgs_enctypes|default_tkt_enctypes/i
(* An #eol label prevents ambiguity between "k = v1 v2" and "k = v1\n k = v2" *)
let enctype_list (nr:regexp) (ns:string) =
indent . del nr ns . eq
. Build.opt_list [ label ns . store enctype_re ] comma_or_space_sep
. (comment|eol) . [ label "#eol" ]
let libdefaults =
let option = entry (name_re - ("v4_name_convert" |enctypes)) eq value comment in
let enctype_lists = enctype_list /permitted_enctypes/i "permitted_enctypes"
| enctype_list /default_tgs_enctypes/i "default_tgs_enctypes"
| enctype_list /default_tkt_enctypes/i "default_tkt_enctypes" in
let subsec = [ indent . key /host|plain/ . eq_openbr .
(subsec_entry name_re eq comment)* . closebr . eol ] in
record "libdefaults" (option|enctype_lists|v4_name_convert subsec)
let login =
let keys = /krb[45]_get_tickets|krb4_convert|krb_run_aklog/
|/aklog_path|accept_passwd/ in
simple_section "login" keys
let appdefaults =
let option = entry (name_re - ("realm" | "application")) eq value_br comment in
let realm = [ indent . label "realm" . store realm_re .
eq_openbr . (option|empty)* . closebr . eol ] in
let app = [ indent . label "application" . store app_re .
eq_openbr . (realm|option|empty)* . closebr . eol] in
record "appdefaults" (option|realm|app)
let realms =
let simple_option = /kdc|admin_server|database_module|default_domain/
|/v4_realm|auth_to_local(_names)?|master_kdc|kpasswd_server/
|/admin_server|ticket_lifetime|pkinit_anchors|krb524_server/ in
let subsec_option = /v4_instance_convert/ in
let option = subsec_entry simple_option eq comment in
let subsec = [ indent . key subsec_option . eq_openbr .
(subsec_entry name_re eq comment)* . closebr . eol ] in
let v4subsec = [ indent . key /host|plain/ . eq_openbr .
(subsec_entry name_re eq comment)* . closebr . eol ] in
let realm = [ indent . label "realm" . store realm_anycase_re .
eq_openbr . (option|subsec|(v4_name_convert v4subsec))* .
closebr . eol ] in
record "realms" (realm|comment)
let domain_realm =
simple_section "domain_realm" name_re
let logging =
let keys = /kdc|admin_server|default/ in
let xchg (m:regexp) (d:string) (l:string) =
del m d . label l in
let xchgs (m:string) (l:string) = xchg m m l in
let dest =
[ xchg /FILE[=:]/ "FILE=" "file" . value ]
|[ xchgs "STDERR" "stderr" ]
|[ xchgs "CONSOLE" "console" ]
|[ xchgs "DEVICE=" "device" . value ]
|[ xchgs "SYSLOG" "syslog" .
([ xchgs ":" "severity" . store /[A-Za-z0-9]+/ ].
[ xchgs ":" "facility" . store /[A-Za-z0-9]+/ ]?)? ] in
let entry = [ indent . key keys . eq . dest . (comment|eol) ] | comment in
record "logging" entry
let capaths =
let realm = [ indent . key realm_re .
eq_openbr .
(entry realm_re eq value_br comment)* . closebr . eol ] in
record "capaths" (realm|comment)
let dbdefaults =
let keys = /database_module|ldap_kerberos_container_dn|ldap_kdc_dn/
|/ldap_kadmind_dn|ldap_service_password_file|ldap_servers/
|/ldap_conns_per_server/ in
simple_section "dbdefaults" keys
let dbmodules =
let keys = /db_library|ldap_kerberos_container_dn|ldap_kdc_dn/
|/ldap_kadmind_dn|ldap_service_password_file|ldap_servers/
|/ldap_conns_per_server/ in
simple_section "dbmodules" keys
(* This section is not documented in the krb5.conf manpage,
but the Fermi example uses it. *)
let instance_mapping =
let value = dels "\"" . store /[^;# \t\r\n{}]*/ . dels "\"" in
let map_node = label "mapping" . store /[a-zA-Z0-9\/*]+/ in
let mapping = [ indent . map_node . eq .
[ label "value" . value ] . (comment|eol) ] in
let instance = [ indent . key name_re .
eq_openbr . (mapping|comment)* . closebr . eol ] in
record "instancemapping" instance
let kdc =
simple_section "kdc" /profile/
let pam =
simple_section "pam" name_re
let includes = Build.key_value_line /include(dir)?/ Sep.space (store Rx.fspath)
let lns = (comment|empty|includes)* .
(libdefaults|login|appdefaults|realms|domain_realm
|logging|capaths|dbdefaults|dbmodules|instance_mapping|kdc|pam)*
let filter = (incl "/etc/krb5.conf.d/*.conf")
. (incl "/etc/krb5.conf")
let xfm = transform lns filter
| Augeas | 4 | greglarkin/pltraining-classroom | files/krb5.aug | [
"Apache-2.0"
] |
package com.baeldung.concurrent.sleepwait;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/***
* Example of wait() and sleep() methods
*/
public class WaitSleepExample {
private static final Logger LOG = LoggerFactory.getLogger(WaitSleepExample.class);
private static final Object LOCK = new Object();
public static void main(String... args) throws InterruptedException {
sleepWaitInSynchronizedBlocks();
}
private static void sleepWaitInSynchronizedBlocks() throws InterruptedException {
Thread.sleep(1000); // called on the thread
LOG.debug("Thread '" + Thread.currentThread().getName() + "' is woken after sleeping for 1 second");
synchronized (LOCK) {
LOCK.wait(1000); // called on the object, synchronization required
LOG.debug("Object '" + LOCK + "' is woken after waiting for 1 second");
}
}
}
| Java | 5 | zeesh49/tutorials | core-java-concurrency/src/main/java/com/baeldung/concurrent/sleepwait/WaitSleepExample.java | [
"MIT"
] |
# dynamic ugen loading
# corresponding c code is in the file called gain.c
# load the plugin into ftable "test"
"test" "./gain.so" fl
# create a unit-amplitude sine wave
# 440 1 sine
#
# # execute the ugen
# 0.1 "test" fe
# close the library only call this once at the end!
"test" fc
| SourcePawn | 4 | aleatoricforest/Sporth | examples/f.sp | [
"MIT"
] |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* Contains some contributions under the Thrift Software License.
* Please see doc/old-thrift-license.txt in the Thrift distribution for
* details.
*/
namespace rs transit
include "CityServices.thrift"
include "Vehicles.thrift"
const Vehicles.Capacity DEFAULT4WHEELCAPACITY = 30
enum Powertrain {
DIESEL = 0
BIO_DIESEL = 1
COMPRESSED_NATURAL_GAS = 2
TROLLEY = 3
HYBRID = 4
BATTERY = 5
}
struct Bus {
1: Vehicles.VehicleIdentifier identifier
2: Vehicles.Capacity capacity
3: Powertrain powertrain
4: list<Vehicles.Material> materials
}
struct Route {
1: string routeId
2: list<CityServices.TransitImprovements> improvements
}
service Garage {
Bus upgradeBus(1: Bus bus)
list<CityServices.TransitImprovements> improvementsForRoute(1: Route route)
} | Thrift | 5 | Jimexist/thrift | lib/rs/test_recursive/src/transit/Buses.thrift | [
"Apache-2.0"
] |
' *******************************************************************
' Source: Mini Timer
' Version: 1.00
' Author: Rob Hutchinson 2004
' Email: rob@proteanide.co.uk
' WWW: http://www.proteanide.co.uk/
' -------------------------------------------------------------------
' This include provides a class for a timer object. The class works
' in milliseconds. First of all the object can be enabled and
' disabled at will by setting the Enabled field
' to true or false. The Interval field can be set
' to mark a milliseconds interval that, when reached, IntervalReached
' will become true. You can use the Reset() method to reset the timer
' and MiillisecondsElapsed() will tell you the number of milliseconds
' that have passed since you called Reset. Enabled field only has
' any effect on the IntervalReached function of the timer. If false
' then the method will always return false.
' Ported directly from my .NET Framework game library: Lita.
' -------------------------------------------------------------------
' Required:
' - Nothing.
' *******************************************************************
Type MiniTimer
'#Region Declarations
Field TimeStarted:Int
Field Interval:Int
Field Enabled:Int = True
'#End Region
'#Region Method: Reset
'''-----------------------------------------------------------------------------
''' <summary>
''' Resets the timer.
''' </summary>
'''-----------------------------------------------------------------------------
Method Reset()
Self.TimeStarted = MilliSecs()
End Method
'#End Region
'#Region Method: MiillisecondsElapsed
'''-----------------------------------------------------------------------------
''' <summary>
''' Gets the number of milliseconds that have passed since a call to Reset.
''' </summary>
'''-----------------------------------------------------------------------------
Method MiillisecondsElapsed:Int()
If Self.TimeStarted = 0 Then Return 0
Local TimeNow:Int = MilliSecs()
Return TimeNow - Self.TimeStarted
End Method
'#End Region
'#Region Method: IntervalReached
'''-----------------------------------------------------------------------------
''' <summary>
''' Returns true if the given interval has been reached.
''' </summary>
'''-----------------------------------------------------------------------------
Method IntervalReached:Int()
If Self.Enabled Then Return (Self.MiillisecondsElapsed() > Self.Interval)
End Method
'#End Region
End Type
| BlitzMax | 4 | jabdoa2/blitzmax | samples/digesteroids/minitimer.bmx | [
"Zlib"
] |
let s = "hello world"
let n : int = 87
let id x = x
let add x y = x + y
let add' x y =
let left = x in
let right = y in
x + y
let add'' : int -> int -> int = fun x -> fun y -> x + y
let unwrap_option default opt =
match opt with
| None -> default
| Some v -> v
let string_of_bool = function true -> "true" | false -> "false"
let is_a c =
if c = 'a' then true
else false
let _ = Printf.printf "%s" "hello"
let () = Printf.printf "%s\n" "world"
let x = ref 0
let _ = x := 1
type my_bool = True | False
type shape = Circle of float | Square of float | Rectangle of (float * float)
type user = {
login : string;
password : string;
}
type 'a my_ref = { mutable ref_value : 'a }
let (:=) r v = r.ref_value <- v
let (+) 2 2 = 5
exception Bad_value of string
let bad_value_error () = raise (Bad_value "your value is bad and you should feel bad")
let () =
try bad_value_error ()
with Bad_value _ -> ()
let () =
try bad_value_error ()
with
| Bad_value _ -> ()
| Not_found -> ()
module type FOO = sig
val foo : 'a -> 'a
end
module Foo : FOO = struct
let foo x = x
end
let greeter = object
val greeting = "Hello"
method greet name = Printf.sprintf "%s, %s!" greeting name
end
let greeting = greeter#greet "world"
class greeter_factory greeting_text = object (self)
val greeting = greeting_text
method greet name = Printf.sprintf "%s, %s!" greeting name
initializer Printf.printf "Objects will greet the user with \"%s\"\n" greeting
end
let g = new greeter_factory "Hi"
| OCaml | 4 | JesseVermeulen123/bat | tests/syntax-tests/source/OCaml/syntax-test.ml | [
"Apache-2.0",
"MIT"
] |
data {
// dimensions
int N;
int S;
// index
int<lower = 0, upper = S + 1> s[N];
// data
int n_respondents[N];
int n_two_parties[N];
}
parameters {
vector[S + 1] delta_raw;
real delta_mu;
real<lower = 0> delta_sigma;
}
transformed parameters {
vector[S + 1] delta;
delta = delta_mu + delta_raw * delta_sigma;
}
model {
delta_raw ~ std_normal();
delta_mu ~ normal(0, 1);
delta_sigma ~ normal(0, 0.2);
for (n in 1:N) n_two_parties[n] ~ binomial_logit_lpmf(n_respondents[n], delta[s[n]]);
}
generated quantities {
vector[N] two_parties_mu;
vector[N] two_parties_sd;
two_parties_mu = inv_logit(delta[s]);
two_parties_sd = sqrt(two_parties_mu .* (1.0 - two_parties_mu));
}
| Stan | 4 | jamesmyatt/us-potus-model | scripts/deprecated/Stan/Refactored/poll_model_1st_stage_v2.stan | [
"MIT"
] |
20 5 0 0
0 1 4 4 3 2 1 [0] [0] [0] [0]
1 1 1 8 [1]
2 1 2 18 5 [19] [9]
3 1 1 6 [1]
4 1 1 15 [3]
5 1 3 15 11 16 [-1] [-91] [0]
6 1 2 3 19 [-96] [0]
7 1 1 15 [17]
8 1 1 10 [0]
9 1 5 13 15 5 7 14 [-3] [-2] [21] [-4] [0]
10 1 1 12 [10]
11 1 4 16 6 13 7 [6] [-58] [10] [14]
12 1 4 6 20 1 9 [5] [4] [-24] [6]
13 1 3 5 11 15 [-4] [-54] [11]
14 1 3 13 11 12 [0] [-5] [-23]
15 1 3 16 21 20 [-101] [9] [6]
16 1 1 15 [29]
17 1 2 6 7 [-72] [-3]
18 1 2 12 2 [0] [-20]
19 1 5 11 20 5 6 17 [5] [4] [9] [-96] [-5]
20 1 3 6 21 4 [-119] [2] [-65]
21 1 0
0 1 0 0 0 0 0 0
1 1 4 1 0 9 0 5
2 1 8 0 2 2 0 8
3 1 5 9 9 7 6 1
4 1 5 0 8 0 8 1
5 1 2 3 4 7 2 8
6 1 10 0 0 0 1 4
7 1 10 0 0 6 0 8
8 1 4 1 1 8 1 0
9 1 8 6 4 5 0 8
10 1 8 10 0 0 4 6
11 1 5 6 6 9 6 0
12 1 2 4 8 6 1 9
13 1 5 5 6 1 2 3
14 1 6 3 4 7 2 4
15 1 9 10 6 0 9 9
16 1 10 0 10 0 5 9
17 1 8 1 5 7 0 1
18 1 1 0 0 4 0 2
19 1 10 6 7 1 6 8
20 1 2 7 0 6 9 1
21 1 0 0 0 0 0 0
10 10 10 10 10
| Eagle | 1 | klorel/or-tools | examples/data/rcpsp/single_mode_max_delay/ubo_20/psp65.sch | [
"Apache-2.0"
] |
#N canvas 392 23 706 812 12;
#X obj 251 612 *~;
#X obj 230 769 outlet~;
#X obj 94 529 makefilename sample%d;
#X msg 94 552 set \$1;
#X obj 94 578 tabread4~ sample1;
#X obj 329 555 dbtorms;
#X obj 329 578 sqrt;
#X obj 329 601 sqrt;
#X obj 329 624 line~;
#X obj 311 695 *~;
#X obj 329 647 *~;
#X obj 329 670 *~;
#X msg 92 179 bang;
#X obj 104 206 delay 5;
#X obj 405 275 f;
#X obj 370 275 f;
#X obj 296 275 f;
#X obj 257 275 f;
#X obj 226 275 f;
#X obj 127 265 f;
#X obj 127 288 mtof;
#X obj 127 311 / 261.62;
#X obj 127 334 * 4.41e+08;
#X obj 127 365 +;
#X obj 104 229 t b b b;
#X obj 296 321 + 1;
#X obj 296 298 * 44.1;
#X msg 92 477 0 5;
#X msg 269 477 1 5;
#X msg 329 477 0 \, \$1 \$2;
#X msg 128 477 \$3 \, \$4 1e+07;
#X msg 227 477 \$5;
#X msg 405 477 0 \$1;
#X obj 254 116 inlet;
#X obj 230 698 inlet~;
#X obj 230 734 +~;
#X text 474 232 pitch in halftones;
#X text 475 209 amplitude (dB);
#X text 475 257 sample number;
#X text 475 281 start location (msec);
#X text 475 305 rise time (msec);
#X text 475 329 decay time (msec);
#X text 474 184 ARGUMENTS FOR NOTE ON:;
#X text 436 368 (Zero amplitude means note off \;;
#X text 457 389 other parameters are ignored.);
#X obj 254 141 route 0;
#X text 37 15 This is an abstraction used by the polyphonic sampler
\, version 2 \, which takes separate note-on and note-off messages.
Unlike "sampvoice" (the first version) \, there is no "duration" field
\, and the amplitude and pitch fields are reversed to make it easy
to separate note-on from note-off messages (which have amplitude zero.)
, f 71;
#X text 323 152 note-on;
#X text 187 152 note-off;
#X obj 34 532 vline~;
#X obj 269 529 vline~;
#X msg 254 165 bang;
#X obj 300 174 unpack f f f f f f;
#X obj 227 442 pack f f f f f;
#X obj 329 529 unpack f f;
#X connect 0 0 9 0;
#X connect 2 0 3 0;
#X connect 3 0 4 0;
#X connect 4 0 0 0;
#X connect 5 0 6 0;
#X connect 6 0 7 0;
#X connect 7 0 8 0;
#X connect 8 0 10 0;
#X connect 8 0 10 1;
#X connect 9 0 35 1;
#X connect 10 0 11 0;
#X connect 10 0 11 1;
#X connect 11 0 9 1;
#X connect 12 0 13 0;
#X connect 12 0 27 0;
#X connect 13 0 24 0;
#X connect 14 0 32 0;
#X connect 15 0 53 1;
#X connect 16 0 26 0;
#X connect 17 0 53 4;
#X connect 18 0 53 0;
#X connect 19 0 20 0;
#X connect 20 0 21 0;
#X connect 21 0 22 0;
#X connect 22 0 23 0;
#X connect 23 0 53 3;
#X connect 24 0 18 0;
#X connect 24 1 19 0;
#X connect 24 2 15 0;
#X connect 24 2 16 0;
#X connect 24 2 17 0;
#X connect 25 0 23 1;
#X connect 25 0 53 2;
#X connect 26 0 25 0;
#X connect 27 0 50 0;
#X connect 28 0 50 0;
#X connect 29 0 54 0;
#X connect 30 0 49 0;
#X connect 31 0 2 0;
#X connect 32 0 54 0;
#X connect 33 0 45 0;
#X connect 34 0 35 0;
#X connect 35 0 1 0;
#X connect 45 0 51 0;
#X connect 45 1 52 0;
#X connect 49 0 4 0;
#X connect 50 0 0 1;
#X connect 51 0 14 0;
#X connect 52 0 12 0;
#X connect 52 0 18 1;
#X connect 52 1 19 1;
#X connect 52 2 17 1;
#X connect 52 3 16 1;
#X connect 52 4 15 1;
#X connect 52 5 14 1;
#X connect 53 0 28 0;
#X connect 53 0 29 0;
#X connect 53 0 30 0;
#X connect 53 0 31 0;
#X connect 54 0 5 0;
#X connect 54 1 8 1;
| Pure Data | 4 | mcclure/pure-data | doc/3.audio.examples/sampvoice2.pd | [
"TCL"
] |
-- This migration enables metadata separation
-- Drops all tables which stores hasura metadata
DROP VIEW hdb_catalog.hdb_role;
DROP TABLE hdb_catalog.hdb_custom_types;
DROP TABLE hdb_catalog.hdb_action_permission;
DROP TABLE hdb_catalog.hdb_action;
DROP VIEW hdb_catalog.hdb_computed_field_function;
DROP TABLE hdb_catalog.hdb_computed_field;
DROP TABLE hdb_catalog.hdb_allowlist;
DROP TABLE hdb_catalog.hdb_query_collection;
DROP VIEW hdb_catalog.hdb_function_info_agg;
DROP VIEW hdb_catalog.hdb_table_info_agg;
DROP TRIGGER hdb_schema_update_event_notifier ON hdb_catalog.hdb_schema_update_event;
DROP FUNCTION hdb_catalog.hdb_schema_update_event_notifier();
DROP TABLE hdb_catalog.hdb_schema_update_event; -- https://github.com/hasura/graphql-engine/pull/6173
DROP TABLE hdb_catalog.remote_schemas;
DROP VIEW hdb_catalog.hdb_function_agg;
DROP TABLE hdb_catalog.hdb_function;
DROP TABLE hdb_catalog.event_triggers;
DROP FUNCTION hdb_catalog.inject_table_defaults(text, text, text, text);
DROP VIEW hdb_catalog.hdb_primary_key;
DROP VIEW hdb_catalog.hdb_unique_constraint;
DROP VIEW hdb_catalog.hdb_check_constraint;
DROP VIEW hdb_catalog.hdb_foreign_key_constraint;
DROP VIEW hdb_catalog.hdb_permission_agg;
DROP TABLE hdb_catalog.hdb_permission;
DROP TABLE hdb_catalog.hdb_remote_relationship;
DROP TABLE hdb_catalog.hdb_relationship;
DROP TRIGGER event_trigger_table_name_update_trigger ON hdb_catalog.hdb_table;
DROP FUNCTION hdb_catalog.event_trigger_table_name_update();
DROP TABLE hdb_catalog.hdb_table;
DROP FUNCTION hdb_catalog.check_violation(text);
-- Remove foreign key constraint to hdb_cron_triggers in hdb_cron_events
ALTER TABLE hdb_catalog.hdb_cron_events DROP CONSTRAINT hdb_cron_events_trigger_name_fkey;
-- Now drop hdb_cron_triggers
DROP VIEW hdb_catalog.hdb_cron_events_stats;
DROP TABLE hdb_catalog.hdb_cron_triggers;
-- Create table which stores metadata JSON blob
-- The "IF NOT EXISTS" is added due to the introduction of maintenance mode
-- in which migrations are not applied on startup but the 'hdb_catalog.hdb_table'
-- is expected to exist and contain the metadata of the graphql-engine. Now, when
-- the graphql-engine is run in normal mode (with maintenance mode disabled) this
-- migration file will be run and since this table already exists, we should add
-- the "IF NOT EXISTS" clause to avoid a migration error
CREATE TABLE IF NOT EXISTS hdb_catalog.hdb_metadata
(
id INTEGER PRIMARY KEY,
metadata JSON NOT NULL
);
-- DROP hdb_views schema (https://github.com/hasura/graphql-engine/pull/6135)
DROP SCHEMA IF EXISTS hdb_views CASCADE;
-- Note [Migration of schema related to table event triggers log]
-- Table event triggers log related schema is
-- - TABLE hdb_catalog.event_log
-- - TABLE hdb_catalog.event_invocation_logs
-- - PROCEDURE hdb_catalog.insert_event_log
-- We define this schema in any pg source to support table event triggers.
-- There's a possibility of using metadata storage database as a source
-- (more likely if server is started with only --database-url option).
-- In this case, dropping the schema in this up (42 to 43) migration and re-creating the
-- same while defining as a pg source causes loss of event trigger logs.
-- To avoid this we won't drop the schema in this migration. While defining
-- a pg source we will define this schema only if this doesn't exist. This also
-- raises a question, "What happens if old database is only used as metadata storage?".
-- Then, definitely, this schema will be of no use. But, this helps a lot in down
-- migration (opposite to this migration, 43 to 42) as we create this schema only if this
-- doesn't exist.
| SQL | 4 | gh-oss-contributor/graphql-engine-1 | server/src-rsr/migrations/42_to_43.sql | [
"Apache-2.0",
"MIT"
] |
#!/usr/bin/sage
import random
import string
import sys
t=100
print(t)
for _ in range(t):
p=set([3])
while(len(p)!=26):
p.add(next_prime(random.randint(10**99,10**100)))
p=sorted(list(p))
m=[x for x in string.ascii_uppercase*random(1,4)]
random.shuffle(m)
x=[]
for i in range(len(m)-1):
a,b=ord(m[i])-ord('A'),ord(m[i+1])-ord('A')
x.append(p[a]*p[b])
print('{} {}'.format(max(p),len(m)-1))
print(' '.join(map(str,x)))
| Sage | 3 | Ashindustry007/competitive-programming | codejam/2019-qualification/c-gen.sage | [
"WTFPL"
] |
AudioBufferSource abs { url assets/wav/pinknoise.wav, loop true} [ panner ]
StereoPanner panner { pan
[ setValueAtTime -1.0 t + 0.1,
linearRampToValueAtTime 1.0 t +10
] } [ gain ]
Gain gain { gain 0.5 } [ output ]
End | Augeas | 3 | newlandsvalley/purescript-audiograph | audiograph-editor/dist/augsamples/stereoPanner.aug | [
"MIT"
] |
package=libmultiprocess
$(package)_version=$(native_$(package)_version)
$(package)_download_path=$(native_$(package)_download_path)
$(package)_file_name=$(native_$(package)_file_name)
$(package)_sha256_hash=$(native_$(package)_sha256_hash)
$(package)_dependencies=native_$(package) boost capnp
define $(package)_config_cmds
$($(package)_cmake)
endef
define $(package)_build_cmds
$(MAKE)
endef
define $(package)_stage_cmds
$(MAKE) DESTDIR=$($(package)_staging_dir) install
endef
| Makefile | 3 | crptec/sinovate | depends/packages/libmultiprocess.mk | [
"MIT"
] |
#Updating records in a binary file
import pickle
def update():
F=open("class.dat",'rb+')
S=pickle.load(F)
found=0
rno=int(input("enter the roll number you want to update"))
for i in S:
if rno==i[0]:
print("the currrent name is",i[1])
i[1]=input("enter the new name")
found=1
break
if found==0:
print("Record not found")
else:
F.seek(0)
pickle.dump(S,F)
F.close()
update()
F=open("class.dat","rb")
val=pickle.load(F)
print(val)
F.close()
| Python | 3 | sulphatet/Python | 1 File handle/File handle binary/Update a binary file.py | [
"MIT"
] |
;;; Point-free functions
(defun partial (f &rest pargs)
"Create partial application of function."
(list 'lambda '(&rest args)
(list 'apply f (list 'append (list 'quote pargs) 'args))))
(defun comp-build (f &rest fs)
"Helper function for comp function composition."
(if (nullp fs)
(list 'apply f 'args)
(list f (apply comp-build fs))))
(defun comp (&rest fs)
"Compose a number of functions."
(list 'lambda '(&rest args)
(apply comp-build fs)))
(provide 'point-free)
| wisp | 5 | skeeto/wisp | wisplib/point-free.wisp | [
"Unlicense"
] |
Version 2 of Exit Descriptions SP by Matthew Fletcher begins here.
"Añade una lista de direcciones de salida válidas así como los nombres de las localidades previamente visitadas tras la descripción de una localidad.[6L02]"
A room can be female.[spanish need: al listar cosas como 'la Biblioteca']
A room can be plural-named or singular-named. [spanish need: al listar cosas como 'las Escaleras']
The amount is a number variable.
The amount is 0.
The num is a number variable.
The num is 0.
After looking:
Now the amount is the number of adjacent rooms;
REPEAT with destination running through adjacent rooms begin;
if the num is 0, say "Salidas:";
let the way be the best route from the location to the destination,using even locked doors;
if the way is a direction, say " [way]";
if the destination is visited, say " hacia [the destination]";
Decrease the amount by 1;
Increase the num by 1;
if the amount is 0, say ".";
if the amount is 1, say " y";
if the amount is greater than 1, say ",";
END repeat;
Now the amount is 0;
Now the num is 0.
Exit Descriptions SP ends here.
---- DOCUMENTATION ----
At the moment you just include this extension and it will add a sentence which lists the exits to the end of a room description, including the names of any previously visited rooms.
In future versions I will make it so that various settings can be changed, to display doors, not name visited rooms or to name all rooms, turning the option on and off in or out of play, etc.
SPANISH:
Esta extensión agrega en cada turno una lista con las posibles salidas de la localidad actual. Imprime el nombre de las salidas ya visitadas.
Al definir localidades con nombre femenino, indicarlo con 'It is female.'
Para usarla, solo incluye la extension al principio del código de tu juego.
Traducida al español por Sarganar.
Mejoras de código por Mel Hython.
| Inform 7 | 5 | brtl-fhc/I7-Spanish | EXTENSIONES/Exit Descriptions SP.i7x | [
"Artistic-2.0"
] |
#!/usr/bin/gnuplot -persist
set title "Boehm-GC: Full vs. Generational Mode (optimized)"
set xlabel "Interval #"
set ylabel "Pause Time [ms]"
set terminal pdfcairo transparent enhanced fontscale 0.5 size 5.00in, 3.00in
set output "GC_bench_opt.pdf"
plot "boehm_full_opt.txt" title "full GC" w i, "boehm_incr_opt.txt" title "incr/generational GC" w i
set output
# EOF
| Gnuplot | 3 | gamemaker1/v | vlib/v/tests/bench/gcboehm/GC_bench_opt.plt | [
"MIT"
] |
ruleset G2S.indy_sdk.ledger {
meta {
shares __testing, getNym,anchorSchema,blocks,getSchema,anchorCredDef,
credDefs,createLinkedSecret,issuerCreateCredentialOffer,
proverCreateCredentialReq,nym,createCred,storeCred,searchCredWithReq,searchCredWithReqForProof
provides getNym,anchorSchema,blocks,getSchema,anchorCredDef,
credDefs,createLinkedSecret,issuerCreateCredentialOffer,
proverCreateCredentialReq,nym,createCred,storeCred,searchCredWithReq,searchCredWithReqForProof
}
global {
__testing = { "queries":
[ { "name": "getNym","args":["poolHandle","submitterDid","targetDid"] },
{ "name": "blocks","args":["pool_handle","submitter_did", "ledger_type"] }
//, { "name": "entry", "args": [ "key" ] }
] , "events":
[ //{ "domain": "d1", "type": "t1" }
{ "domain": "ledger", "type": "nym", "attrs": [ "poolHandle",
"signing_did",
"anchoring_did",
"anchoring_did_verkey",
"alias",
"role",
"walletHandle"] }
]
}
blocks = function(pool_handle,submitter_did, ledger_type){
ledger:transactions(pool_handle,submitter_did, ledger_type)
}
getNym = function(poolHandle,submitterDid,targetDid){
request = ledger:buildGetNymRequest(submitterDid,targetDid);
ledger:submitRequest(poolHandle,request)
}
nym = defaction(pool_handle,signing_did,anchoring_did,anchoring_did_verkey,alias,role,wallet_handle){
request = ledger:buildNymRequest(signing_did,
anchoring_did,
anchoring_did_verkey,
alias,
role).klog("nymrequest")
response = ledger:signAndSubmitRequest(pool_handle,
wallet_handle,
signing_did,
request).klog("ledger submit nym transaction")
send_directive("nym transaction");
returns response
}
anchorSchema = function(pool_handle,wallet_handle,submitter_did,issuerDid,name,version,attrNames){
schema_id_schema = anoncred:issuerCreateSchema(issuerDid,name,version,attrNames).klog("issuercreateschema"); // returns [schema_id,schema]
request = ledger:buildSchemaRequest(submitter_did,schema_id_schema[1]).klog("buildSchemaRequest");
ledger:signAndSubmitRequest(pool_handle,wallet_handle,submitter_did,request).klog("singSubmit in anchorSchema")
}
getSchema = function(pool_handle,submitter_did,schema_id){
request = ledger:buildGetSchemaRequest(submitter_did,schema_id);
reponse = ledger:submitRequest(pool_handle,request);
ledger:parseGetSchemaResponse(reponse)
}
/*getCredentialDefinition = function(submitter_did,data){
request = ledger:buildCredDefRequest(submitter_did,data);
response = ledger:submitRequest(request.decode()); // request needs to be a json object??...
ledger:parseGetCredDefResponse(response)
}*/
anchorCredDef = defaction(pool_handle,wallet_handle, issuer_did,schema,tag, signature_type, cred_def_config){
every{
anoncred:issuerCreateAndStoreCredentialDef( wallet_handle, issuer_did, schema, tag, signature_type, cred_def_config ) setting(credDefId_credDef)
}
returns ledger:signAndSubmitRequest(pool_handle,wallet_handle,issuer_did,ledger:buildCredDefRequest(issuer_did,credDefId_credDef[1]))
}
credDefs = function(pool_handle,submitterDid, id){
req = ledger:buildGetCredDefRequest(submitterDid, id );
response = ledger:submitRequest(pool_handle,req);
ledger:parseGetCredDefResponse(response)
}
issuerCreateCredentialOffer = function(wallet_handle,cred_def_id){
anoncred:issuerCreateCredentialOffer(wallet_handle,cred_def_id)
}
proverCreateCredentialReq = function(wallet_handle ,prover_did,cred_offer,cred_def,secret_id){
anoncred:proverCreateCredentialReq(wallet_handle ,prover_did,cred_offer,cred_def,secret_id)
}
createLinkedSecret = defaction(wallet_handle, link_secret_id){
anoncred:proverCreateMasterSecret(wallet_handle, link_secret_id) setting(id)
returns id
}
createCred = function(wh, credOffer, credReq, credValues, revRegId, blobStorageReaderHandle){
anoncred:issuerCreateCredential(wh, credOffer, credReq, credValues, revRegId, blobStorageReaderHandle)
}
storeCred = defaction(wh, credId, credReqMetadata, cred, credDef, revRegDef){
anoncred:proverStoreCredential(wh, credId, credReqMetadata, cred, credDef, revRegDef)setting(results)
returns results
}
searchCredWithReqForProof = function(poolHandle,wh, query, attr_count,pred_count, schemaSubmitterDid, schemaId , credDefSubmitterDid, credDefId ){
identifiers_credsForProof = anoncred:proverSearchCredsForProof(wh, query, attr_count,pred_count);
anoncred:proverGetEntitiesFromLedger(identifiers_credsForProof[0],poolHandle,schemaSubmitterDid,schemaId,credDefSubmitterDid,credDefId).append(identifiers_credsForProof[1]);
}
/*searchCredWithReq = function(wh,query){// TODO: support revocation and more querys
search_for_proof_request_handle = anoncred:proverSearchCredentials(wh,query,count);
result = query{"requested_attributes"}.map(function(value,key){
anoncred:proverFetchCredentialsForProofReq ( search_for_proof_request_handle, key , count )[0]{"cred_info"}});
_result = result.put(query{"requested_predicates"}.map(function(value,key){
anoncred:proverFetchCredentialsForProofReq ( search_for_proof_request_handle, key , count )[0]{"cred_info"}}));
anoncred:proverCloseCredentialsSearchForProofReq ( search_for_proof_request_handle );
__result = _result.map(function(value,key){ {}.put(value{"referent"},value) }).values().reduce(function(a,b){a.put(b)});
//anoncrd:proverGetCredentials( wh, __result.klog("searchCredwithReq") )
__result.klog("searchCredwithReq")
}*/
proverCreateProof = function(wh, proofReq, requestedCredentials, masterSecretName, schemas, credentialDefs, revStates){
anoncred:proverCreateProof(wh, proofReq, requestedCredentials, masterSecretName, schemas, credentialDefs, revStates)
}
}
rule nym {
select when ledger nym
nym(event:attr("poolHandle"),
event:attr("signing_did"),
event:attr("anchoring_did"),
event:attr("anchoring_did_verkey"),
event:attr("alias"),
event:attr("role"),
event:attr("walletHandle"))
}
}
| KRL | 5 | Picolab/G2S | krl/g2s.indy_sdk.ledger.krl | [
"MIT"
] |
/**
Copyright 2015 Acacia Team
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.acacia.backend;
import java.io.File;
import java.net.Socket;
import java.net.ServerSocket;
import java.net.UnknownHostException;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.PrintWriter;
import java.io.InputStreamReader;
import java.io.IOException;
import org.acacia.centralstore.AcaciaHashMapCentralStore;
import org.acacia.log.java.Logger_Java;
import org.acacia.util.java.Conts_Java;
import org.acacia.util.java.Utils_Java;
import x10.core.Thread;
import x10.util.HashMap;
import x10.interop.Java;
import x10.util.ArrayList;
import x10.util.StringBuilder;
import x10.util.HashSet;
import org.acacia.localstore.AcaciaLocalStore;
import org.acacia.localstore.AcaciaLocalStoreFactory;
import org.acacia.server.java.AcaciaInstanceProtocol;
public class AcaciaBackEndServiceSession extends java.lang.Thread {
//public class AcaciaFrontEndServiceSession extends java.lang.Thread{
var sessionSkt:Socket = null;
public def this(val socket:Socket){
sessionSkt = socket;
}
public def run(){
try{
val buff:BufferedReader = new BufferedReader(new InputStreamReader(sessionSkt.getInputStream()));
val out:PrintWriter = new PrintWriter(sessionSkt.getOutputStream());
var msg:String = null;
while((msg = buff.readLine())!= null){
if(msg.equals(AcaciaBackEndProtocol.EXIT)){
out.println(AcaciaBackEndProtocol.EXIT_ACK);
out.flush();
sessionSkt.close();
break;
}else if(msg.equals(AcaciaBackEndProtocol.HANDSHAKE)){
out.println(AcaciaBackEndProtocol.HANDSHAKE_OK);
out.flush();
try{
//Here we should get the host name of the worker.
msg = buff.readLine();
//Logger_Java.info("Host name of the worker : " + msg);
}catch(e:IOException){
Logger_Java.error("WWWW Error : " + e.getMessage());
}
}else if(msg.equals(AcaciaBackEndProtocol.OUT_DEGREE_DISTRIBUTION_FOR_PARTITION)){
try{
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val graphID:String = buff.readLine();
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val partitionID:String = buff.readLine();
val hmp:HashMap[Long,Long] = getOutDegreeDistribution(graphID, partitionID);
//System.out.println("Size of HMAP is : "+hmp.size());
val itr:Iterator[x10.util.Map.Entry[Long,Long]] = hmp.entries().iterator();
var sb:StringBuilder = new StringBuilder();
var ctr:int = 0n;
val WINDOW_SIZE:int = 1000n; //This measure is taken to avoid the memory error thrown by Java sockets.
while(itr.hasNext()){
val pairs:x10.util.Map.Entry[Long,Long] = itr.next();
sb.add(pairs.getKey() + "," + pairs.getValue() + ";");
if(ctr > WINDOW_SIZE){
//System.out.println("Sending : " + sb.toString());
out.println(sb.toString());
out.flush();
sb = new StringBuilder();
ctr = 0n;
}
ctr++;
}
//We need to send the remaining set of values through the socket
if(ctr > 0){
out.println(sb.toString());
out.flush();
}
//System.out.println("Sending : " + sb.toString());
// out.println(sb.toString());
// out.flush();
out.println(AcaciaBackEndProtocol.DONE);
out.flush();
out.println(sb.toString());
out.flush();
// try{
// out.close();
// sessionSkt.close();
// }catch(IOException e){
// Logger_Java.error("Exitting from this session...");
// break;
// }
}catch(e:IOException){
Logger_Java.error("PPP Error : " + e.getMessage());
}
}else if(msg.equals(AcaciaBackEndProtocol.IN_DEGREE_DISTRIBUTION_FOR_PARTITION)){
try{
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val graphID:String = buff.readLine();
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val partitionID:String = buff.readLine();
val hmp:HashMap[Long,Long] = getInDegreeDistribution(graphID, partitionID);
val itr:Iterator[x10.util.Map.Entry[Long,Long]] = hmp.entries().iterator();
val sb:StringBuilder = new StringBuilder();
while(itr.hasNext()){
val pairs:x10.util.Map.Entry[Long,Long] = itr.next();
sb.add(pairs.getKey() + "," + pairs.getValue() + ";");
}
out.println(sb.toString());
out.flush();
// try{
// out.close();
// sessionSkt.close();
// }catch(IOException e){
// Logger_Java.error("Exitting from this session...");
// break;
// }
}catch(e:IOException){
Logger_Java.error("PPP Error : " + e.getMessage());
}
}else if(msg.equals(AcaciaBackEndProtocol.WORLD_ONLY_AUTHFLOW_FOR_PARTITION)){
try{
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val graphID:String = buff.readLine();
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val partitionID:String = buff.readLine();
val af:double = getWorldOnlyAuthorityFlow(graphID, partitionID);
out.println(af);
out.flush();
// try{
// out.close();
// sessionSkt.close();
// }catch(IOException e){
// Logger_Java.error("Exitting from this session...");
// break;
// }
}catch(e:IOException){
Logger_Java.error("PPP Error : " + e.getMessage());
}
}else if(msg.equals(AcaciaBackEndProtocol.LOCAL_TO_WORLD_AUTHFLOW_FOR_PARTITION)){
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val graphID:String = buff.readLine();
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val partitionID:String = buff.readLine();
val hmp:HashMap[Long, Float] = getAuthorityScoresLocalToWorld(graphID, partitionID);
//System.out.println("Size of HMAP is : "+hmp.size());
val itr:Iterator[x10.util.Map.Entry[Long,Float]] = hmp.entries().iterator();
var sb:StringBuilder = new StringBuilder();
var ctr:int = 0n;
val WINDOW_SIZE:int = 1000n; //This measure is taken to avoid the memory error thrown by Java sockets.
while(itr.hasNext()){
val pairs:x10.util.Map.Entry[Long, Float] = itr.next();
sb.add(pairs.getKey() + "," + pairs.getValue() + ";");
if(ctr > WINDOW_SIZE){
//System.out.println("Sending : " + sb.toString());
out.println(sb.toString());
out.flush();
sb = new StringBuilder();
ctr = 0n;
}
ctr++;
}
//We need to send the remaining set of values through the socket
if(ctr > 0){
out.println(sb.toString());
out.flush();
}
out.println(AcaciaBackEndProtocol.DONE);
out.flush();
}else if(msg.equals(AcaciaBackEndProtocol.WORLD_TO_LOCAL_FLOW_FROMIDS)){
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val graphID:String = buff.readLine();
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val partitionID:String = buff.readLine();
val hmp:HashMap[Long, ArrayList[Long]] = getIncommingEdges(graphID, partitionID);
//System.out.println("Size of HMAP is : "+hmp.size());
val itr:Iterator[x10.util.Map.Entry[Long,ArrayList[Long]]] = hmp.entries().iterator();
var sb:StringBuilder = new StringBuilder();
var ctr:int = 0n;
var WINDOW_SIZE:int = 1000n; //This measure is taken to avoid the memory error thrown by Java sockets.
while(itr.hasNext()){
val pairs:x10.util.Map.Entry[Long, ArrayList[Long]] = itr.next();
sb.add(pairs.getKey());//First send the key
val lst:ArrayList[Long] = pairs.getValue();
val itr2:Iterator[Long] = lst.iterator();
while(itr2.hasNext()){
sb.add(",");
sb.add(itr2.next());
}
sb.add(";");
//Now the sb is like <toID>,<fromid1>,<fromid2>,...;
if(ctr > WINDOW_SIZE){
//System.out.println("Sending : " + sb.toString());
out.println(sb.toString());
out.flush();
sb = new StringBuilder();
ctr = 0n;
}
ctr++;
}
//We need to send the remaining set of values through the socket
if(ctr > 0){
out.println(sb.toString());
out.flush();
}
out.println(AcaciaBackEndProtocol.DONE);
out.flush();
}else if(msg.equals(AcaciaBackEndProtocol.WORLD_TO_LOCAL_AUTHFLOW_FOR_PARTITION)){
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val graphID:String = buff.readLine();
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val partitionID:String = buff.readLine();
val hmp:HashMap[Long,Float] = getAuthorityScoresWorldToLocal(graphID, partitionID);
//System.out.println("Size of HMAP is : "+hmp.size());
val itr:Iterator[x10.util.Map.Entry[Long,Float]] = hmp.entries().iterator();
var sb:StringBuilder = new StringBuilder();
var ctr:int = 0n;
val WINDOW_SIZE:int = 1000n; //This measure is taken to avoid the memory error thrown by Java sockets.
while(itr.hasNext()){
val pairs:x10.util.Map.Entry[Long,Float] = itr.next();
sb.add(pairs.getKey() + "," + pairs.getValue() + ";");
if(ctr > WINDOW_SIZE){
//System.out.println("Sending : " + sb.toString());
out.println(sb.toString());
out.flush();
sb = new StringBuilder();
ctr = 0n;
}
ctr++;
}
//We need to send the remaining set of values through the socket
if(ctr > 0){
out.println(sb.toString());
out.flush();
}
out.println(AcaciaBackEndProtocol.DONE);
out.flush();
}else if (msg.equals(AcaciaBackEndProtocol.INTERSECTING_TRIANGLE_COUNT)){
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val graphID:String = buff.readLine();
out.println(AcaciaBackEndProtocol.OK);
out.flush();
val partitionID:String = buff.readLine();
val partRes:long = getIntersectingTraingles(graphID, partitionID);
//System.out.println("AAAAAAAAAAAAAAAAAAAAA234:" + partRes);
if(partRes == -1){
// System.out.println("Have to send the global list to the worker");
out.println("-1");
out.flush();
val centralPartionCount:int = Int.parseInt(org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).getObjectArray()(0n) as String);
var fromID:long = -1;
var toID:long = -1;
val hmp:HashMap[Long, HashSet[Long]] = new HashMap[Long, HashSet[Long]]();
val WINDOW_SIZE:int = 1000n; //This measure is taken to avoid the memory error thrown by Java sockets.
//
for(var i:int = 0n; i < centralPartionCount; i++){
//Here we should first bring the central store to the working directory and then construct the central store object with the
//local copy of the file. This seems to be the only solution we could find at the moment. A much more intelligent technique
//will not need to bring down the entire central store partition back to the local disk. Even bringing down the central store
//partitions can be done in an intelligent manner such that we do not exceed the disk quota available on the local disk.
//AcaciaManager.downloadCentralStore(Integer.parseInt(graphID), i);
// AcaciaHashMapCentralStore store = new AcaciaHashMapCentralStore(Integer.parseInt(graphID), i);
// store.loadGraph();
//The code for brining down the central store to worker's runtime data folder location should be coded here...
//Once we have the central store on our local directory, then we load it to the memory and extract its edge list.
//We have to do this for all the central store partitions because we need to have access to all of them's edge lists.
val centralStoreBaseDir:String = Utils_Java.getAcaciaProperty("org.acacia.server.runtime.location");
//AcaciaHashMapNativeStore store = new AcaciaHashMapNativeStore(Integer.parseInt(graphID), i, centralStoreBaseDir, true);
val store:AcaciaHashMapCentralStore = new AcaciaHashMapCentralStore(Int.parseInt(graphID), i);
store.loadGraph();
val edgeList:HashMap[Long, HashSet[Long]] = store.getUnderlyingHashMap();
// Iterator<Map.Entry<Long, HashSet<Long>>> itr = edgeList.entrySet().iterator();
// long firstVertex = 0l;
//
// while(itr.hasNext()){
// Map.Entry<Long, HashSet<Long>> entr = itr.next();
// firstVertex = entr.getKey();
// HashSet<Long> hs = (HashSet<Long>)hmp.get(firstVertex);
//
// if(hs == null){
// hs = new HashSet<Long>();
// }
//
// HashSet<Long> hs2 = entr.getValue();
//
// for(long secondVertex: hs2){
// hs.add(secondVertex);
// }
//
// hmp.put(firstVertex, hs);
// }
//Where we need to have some batched technique to send the edgelist. Because the edgelist size is going to be very large.
val itr:Iterator[x10.util.Map.Entry[Long,HashSet[Long]]] = edgeList.entries().iterator();
var ctr:int = 0n;
var key:long = 0;
while(itr.hasNext()){
val pairs:x10.util.Map.Entry[Long, HashSet[Long]] = itr.next();
key = pairs.getKey();
val lst2:HashSet[Long] = pairs.getValue();
/*
The following method seems much more efficient. But it does not deliver all the edges. This is strange. we get lesser triangle count.
//First we send the key
out.println("k-" + pairs.getKey());
out.flush();
//Next, we send all the neighbours of the key
StringBuilder sb = new StringBuilder();
//sb.append("v-");
for(Long im: lst2){
sb.append(im + ",");
if(ctr > WINDOW_SIZE){
//System.out.println("Sending : " + sb.toString());
out.println(sb.toString());
out.flush();
sb = new StringBuilder();
ctr = 0;
}
ctr++;
}
*/
//Next, we send all the neighbours of the key
var sb:StringBuilder = new StringBuilder();
//sb.append("v-");
for(im:Long in lst2){
sb.add(key + "," + im + ";");
if(ctr > WINDOW_SIZE){
//System.out.println("Sending : |" + sb.toString()+"|");
out.println(sb.toString());
out.flush();
sb = new StringBuilder();
ctr = 0n;
}
ctr++;
}
//We need to send the remaining set of values through the socket
if(ctr > 0){
//System.out.println("Sending : |" + sb.toString() + "| ctr:" + ctr);
out.println(sb.toString());
out.flush();
ctr = 0n;
}
}
}
out.println(AcaciaBackEndProtocol.DONE);
out.flush();
}else{
//This part of the code still need to be implemented. But we just send -2.
out.println("-2");
out.flush();
}
}else{
process(msg, buff, out);
}
}
}catch(e:IOException){
Logger_Java.error("QQQQ Error : " + e.getMessage());
}
}
private def getAuthorityScoresLocalToWorld(graphID:String, partitionID:String):HashMap[Long, Float] {
var fromID:long = -1;
var toID:long = -1;
// System.out.println("--------------------- SSSSSSSSS 1 --------------------------");
val centralPartionCount:int = Int.parse((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value as Rail[String])(0n));
//int partionVertxCount = Integer.parseInt(((String[])org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select VERTEXCOUNT from ACACIA_META.PARTITION where GRAPH_IDGRAPH=" + graphID + " and IDPARTITION=" + partitionID).value)[(int)0L]);
//int vcnt = Integer.parseInt(((String[])org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select VERTEXCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value)[(int)0L]);
//int worldOnlyVertexCount = vcnt - partionVertxCount; //This is the (N-n) term in APproxRank algo.
// System.out.println("--------------------- SSSSSSSSS 2 --------------------------");
//HashMap<Long, ArrayList<Long>> worldConnectToLocal = new HashMap<Long, ArrayList<Long>>();
val fromDegreeDistribution:HashMap[Long, Long] = new HashMap[Long, Long]();
//int centralPartionCount = Integer.parseInt(((String[])org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value)[(int)0L]);
var fromIDDegree:long = 0;
var vval:long = 0;
for(var i:int = 0n; i < centralPartionCount; i++){
val c:java.sql.Connection = org.acacia.centralstore.java.HSQLDBInterface.getConnectionReadOnly(graphID, ""+i);
try{
//Here we get all the edges that are comming from the world to the local sub graph.
val stmt:java.sql.Statement = c.createStatement();
//Note: Here we do not use partTo because we need to find all the out going degrees of fromIDs
val rs:java.sql.ResultSet = stmt.executeQuery("SELECT idfrom,count(idto) FROM acacia_central.edgemap where idgraph=" + graphID + " and idpartfrom=" + partitionID + " GROUP BY idfrom;" );
if(rs != null){
while(rs.next()){
fromID = rs.getLong(1n);
fromIDDegree = rs.getLong(2n);
//we need to do this kind of check because we are traversing across multiple partitions of the
//central store's edge list
if(fromDegreeDistribution.containsKey(fromID)){
vval = fromDegreeDistribution.get(fromID);
fromDegreeDistribution.put(fromID, vval + fromIDDegree); //We update the existing out degree value.
}else{
fromDegreeDistribution.put(fromID, fromIDDegree);
}
}
}
c.close();
}catch(e:java.sql.SQLException){
e.printStackTrace();
}
}
// System.out.println("--------------------- SSSSSSSSS 5 --------------------------");
//Note that next we need to divide these authority scores by the number of world only vertices (i.e., N-n).
val resultTemp:HashMap[Long, Float] = new HashMap[Long, Float]();
val it3:Iterator[x10.util.Map.Entry[Long, Long]] = fromDegreeDistribution.entries().iterator();
var fID:long = 0;
var degree:long = 0;
var f:float = 0;
while(it3.hasNext()){
val et:x10.util.Map.Entry[Long, Long] = it3.next();
fID = et.getKey();
f = et.getValue();
resultTemp.put(fID, ((1/f) as float));
}
// System.out.println("--------------------- SSSSSSSSS 6 --------------------------");
return resultTemp; //This is the final adjusted result
}
/**
* This method processes the query requests to AcaciaForntEnd. This is the main function that answers the queries.
*/
public def process(msg:String, buff:BufferedReader, out:PrintWriter){
var response:String ="";
var str:String = null;
if(msg.equals(AcaciaBackEndProtocol.RUOK)){
out.println(AcaciaBackEndProtocol.IMOK);
out.flush();
}else{
//This is the default response
out.println(AcaciaBackEndProtocol.SEND);
out.flush();
}
}
/**
* This method gets the out degree distribution of a specific graph partition.
*/
private static def getOutDegreeDistribution(graphID:String, partitionID:String):HashMap[Long,Long]{
//System.out.println("getting the out degree distribution for graph : " + graphID + " partitionID : " + partitionID);
//
// return -1;
val result:HashMap[Long,Long] = new HashMap[Long,Long]();
var fromID:long = -1;
var fromDegree:long = -1;
/*
val centralPartionCount:int = Int.parse(org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).getObjectArray()(0n) as String);
//System.out.println("centralPartionCount : " + centralPartionCount);
for(var i:int = 0n; i < centralPartionCount; i++){
val c:java.sql.Connection = org.acacia.centralstore.java.HSQLDBInterface.getConnectionReadOnly(graphID, ""+i);
//System.out.println("running for partionin" + i);
try{
//c.setAutoCommit(false);
val stmt:java.sql.Statement = c.createStatement();
//System.out.println("SELECT idfrom,COUNT(idto) FROM acacia_central.edgemap where idgraph=" + graphID + " and idpartfrom=" + partitionID + " GROUP BY idfrom;");
val rs:java.sql.ResultSet = stmt.executeQuery("SELECT idfrom,COUNT(idto) FROM acacia_central.edgemap where idgraph=" + graphID + " and idpartfrom=" + partitionID + " GROUP BY idfrom;" );
//java.sql.ResultSet rs = stmt.executeQuery("SELECT idfrom,COUNT(idto) FROM acacia_central.edgemap where idgraph=" + graphID + " and (idpartfrom=" + partitionID + " or idpartto=" + partitionID + ") GROUP BY idfrom;" );
//java.sql.ResultSet rs = stmt.executeQuery("SELECT idfrom,COUNT(idto) FROM acacia_central.edgemap where idgraph=" + graphID + " GROUP BY idfrom;" );
var v:int = 0n;
if(rs != null){
while(rs.next()){
fromID = rs.getLong(1n);
fromDegree = rs.getLong(2n);
result.put(fromID, fromDegree);
v++;
}
}else{
Logger_Java.info("result is null");
}
c.close();
}catch(e:java.sql.SQLException){
e.printStackTrace();
}
}
* */
//System.out.println("Result is : " + result);
return result;
}
/**
*
* @param graphID
* @param partitionID
* @return the results are indexed by the toID
*/
private static def getIncommingEdges(graphID:String, partitionID:String):HashMap[Long, ArrayList[Long]]{
val result:HashMap[Long, ArrayList[Long]] = new HashMap[Long, ArrayList[Long]]();
var fromID:long = -1;
var toID:long = -1;
val centralPartionCount:int = Int.parseInt((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value as Rail[String])(0));
val partionVertxCount:int = Int.parseInt((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select VERTEXCOUNT from ACACIA_META.PARTITION where GRAPH_IDGRAPH=" + graphID + " and IDPARTITION=" + partitionID).value as Rail[String])(0));
val vcnt:int = Int.parseInt((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select VERTEXCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value as Rail[String])(0));
val worldOnlyVertexCount:int = vcnt - partionVertxCount; //This is the (N-n) term in APproxRank algo.
for(var i:int = 0n; i < centralPartionCount; i++){
val c:java.sql.Connection = org.acacia.centralstore.java.HSQLDBInterface.getConnectionReadOnly(graphID, ""+i);
try{
//Here we get all the edges that are comming from the world to the local sub graph.
val stmt:java.sql.Statement = c.createStatement();
val rs:java.sql.ResultSet = stmt.executeQuery("SELECT idfrom,idto FROM acacia_central.edgemap where idgraph=" + graphID + " and idpartto=" + partitionID + ";" );
if(rs != null){
while(rs.next()){
fromID = rs.getLong(1n);
toID = rs.getLong(2n);
//Here the key should be toID. The edge is from the external graph to the loacl graph. So the fromID is in the external graph.
//but toID is on the local graph. We are interested of collecting all the fromIDs that connect with each toID.
//Then we can calculate the authority flow from world to local by summing the inverse of out degrees of fromIDs
//and dividing that value by the number of vertices that are on the external graph.
if(!result.containsKey(toID)){
val temp:ArrayList[Long] = new ArrayList[Long]();
temp.add(fromID);
result.put(toID, temp);
}else{
val temp:ArrayList[Long] = result.get(toID);
temp.add(fromID);
result.put(toID, temp);
}
}
}
c.close();
}catch(e:java.sql.SQLException){
e.printStackTrace();
}
}
return result;
}
/**
* This method gets the in degree distribution of a specific graph partition from the external world.
*/
private static def getInDegreeDistribution(graphID:String, partitionID:String):HashMap[Long, Long]{
// System.out.println("getting the out degree distribution for graph : " + graphID + " partitionID : " + partitionID);
//
// return -1;
val result:HashMap[Long, Long] = new HashMap[Long, Long]();
var fromID:long = -1;
var fromDegree:long = -1;
val centralPartionCount:int = Int.parseInt((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value as Rail[String])(0));
for(var i:int = 0n; i < centralPartionCount; i++){
val c:java.sql.Connection = org.acacia.centralstore.java.HSQLDBInterface.getConnectionReadOnly(graphID, ""+i);
try{
//c.setAutoCommit(false);
val stmt:java.sql.Statement = c.createStatement();
val rs:java.sql.ResultSet = stmt.executeQuery("SELECT idto,COUNT(idfrom) FROM acacia_central.edgemap where idgraph=" + graphID + " and idpartto=" + partitionID + " GROUP BY idto;" );
if(rs != null){
while(rs.next()){
fromID = rs.getLong(1n);
fromDegree = rs.getLong(2n);
result.put(fromID, fromDegree);
}
}
c.close();
}catch(e:java.sql.SQLException){
e.printStackTrace();
}
}
return result;
}
/**
* This method calculates the authority scores which flow from world to local graph.
* @param graphID
* @param partitionID - This is the partition ID corresponding to the subgraph
* @return
*/
private def getAuthorityScoresWorldToLocal(graphID:String, partitionID:String):HashMap[Long, Float]{
val result:HashMap[Long, Float] = new HashMap[Long, Float]();
var fromID:long = -1;
var toID:long = -1;
val worldConnectToLocal:HashMap[Long, ArrayList[Long]] = new HashMap[Long, ArrayList[Long]]();
// System.out.println("--------------------- SSSSSSSSS 1 --------------------------");
val centralPartionCount:int = Int.parseInt((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value as Rail[String])(0));
val partionVertxCount:int = Int.parseInt((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select VERTEXCOUNT from ACACIA_META.PARTITION where GRAPH_IDGRAPH=" + graphID + " and IDPARTITION=" + partitionID).value as Rail[String])(0));
val vcnt:int = Int.parseInt((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select VERTEXCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value as Rail[String])(0));
val worldOnlyVertexCount:int = vcnt - partionVertxCount; //This is the (N-n) term in APproxRank algo.
// System.out.println("--------------------- SSSSSSSSS 2 --------------------------");
for(var i:int = 0n; i < centralPartionCount; i++){
val c:java.sql.Connection = org.acacia.centralstore.java.HSQLDBInterface.getConnectionReadOnly(graphID, ""+i);
//
try{
//Here we get all the edges that are comming from the world to the local sub graph.
val stmt:java.sql.Statement = c.createStatement();
val rs:java.sql.ResultSet = stmt.executeQuery("SELECT idfrom,idto FROM acacia_central.edgemap where idgraph=" + graphID + " and idpartto=" + partitionID + ";" );
if(rs != null){
while(rs.next()){
fromID = rs.getLong(1n);
toID = rs.getLong(2n);
//result.put(fromID, fromDegree);
/*
if(!worldConnectToLocal.containsKey(fromID)){
ArrayList temp = new ArrayList();
temp.add(toID);
worldConnectToLocal.put(fromID, temp);
}else{
ArrayList temp = worldConnectToLocal.get(fromID);
temp.add(toID);
worldConnectToLocal.put(fromID, temp);
}*/
//Here the key should be toID. The edge is from the external graph to the loacl graph. So the fromID is in the external graph.
//but toID is on the local graph. We are interested of collecting all the fromIDs that connect with each toID.
//Then we can calculate the authority flow from world to local by summing the inverse of out degrees of fromIDs
//and dividing that value by the number of vertices that are on the external graph.
if(!worldConnectToLocal.containsKey(toID)){
val temp:ArrayList[Long] = new ArrayList[Long]();
temp.add(fromID);
worldConnectToLocal.put(toID, temp);
}else{
val temp:ArrayList[Long] = worldConnectToLocal.get(toID);
temp.add(fromID);
worldConnectToLocal.put(toID, temp);
}
}
}
c.close();
}catch(e:java.sql.SQLException){
e.printStackTrace();
}
}
// System.out.println("--------------------- SSSSSSSSS 3 --------------------------");
//Also we need to in degree distribution from world to local.
//This we get by calling the method getInDegreeDistribution()
//HashMap<Long, ArrayList<Long>> worldConnectToLocal = new HashMap<Long, ArrayList<Long>>();
val fromDegreeDistribution:HashMap[Long, Long] = new HashMap[Long, Long]();
//int centralPartionCount = Integer.parseInt(((String[])org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value)[(int)0L]);
var fromIDDegree:long = 0;
var vval:long = 0;
for(var i:int = 0n; i < centralPartionCount; i++){
val c:java.sql.Connection = org.acacia.centralstore.java.HSQLDBInterface.getConnectionReadOnly(graphID, ""+i);
try{
//Here we get all the edges that are comming from the world to the local sub graph.
val stmt:java.sql.Statement = c.createStatement();
//Note: Here we do not use partTo because we need to find all the out going degrees of fromIDs
val rs:java.sql.ResultSet = stmt.executeQuery("SELECT idfrom, count(idfrom) FROM (select idfrom from acacia_central.edgemap where idgraph=" + graphID + ") group by idfrom;" );
if(rs != null){
while(rs.next()){
fromID = rs.getLong(1n);
fromIDDegree = rs.getLong(2n);
//we need to do this kind of check because we are traversing across multiple partitions of the
//central store's edge list
if(fromDegreeDistribution.containsKey(fromID)){
vval = fromDegreeDistribution.get(fromID);
fromDegreeDistribution.put(fromID, vval + fromIDDegree); //We update the existing out degree value.
}else{
fromDegreeDistribution.put(fromID, fromIDDegree);
}
}
}
c.close();
}catch(e:java.sql.SQLException){
e.printStackTrace();
}
}
// System.out.println("--------------------- SSSSSSSSS 4--------------------------");
//+ This should be the total out degree distribution for world only vertices
//HashMap<Long, Long> inDegreeDistributionHashMap = getInDegreeDistribution(graphID, partitionID);
//HashMap<Long, Long> inDegreeDistributionHashMap = fromDegreeDistribution;
//Next we calculate the authority scores world to local
val it:Iterator[x10.util.Map.Entry[Long, ArrayList[Long]]] = worldConnectToLocal.entries().iterator();
// System.out.println("Size : " + worldConnectToLocal.size());
var tID:long = 0;
var fID:long = 0;
var degree:long = 0;
var f:float = 0;
while(it.hasNext()){
val p:x10.util.Map.Entry[Long, ArrayList[Long]] = it.next();
val arr:ArrayList[Long] = p.getValue(); //The value contains the list of fromIDs that connect with this toID
val itr:Iterator[Long] = arr.iterator();
tID = p.getKey();
f = 0;
// System.out.println("--------------------- SSSSSSSSS 4--------------------------");
//Next we iterate through all the fromIDs
while(itr.hasNext()){
fID = itr.next();
// System.out.println("--------------------- SSSSSSSSS 4AAA--------------------------");
degree = fromDegreeDistribution.get(fID);//This is the entire out degree of fromID
// System.out.println("--------------------- SSSSSSSSS 4BBB--------------------------");
if(result.containsKey(tID)){
// System.out.println("--------------------- SSSSSSSSS 4CCC--------------------------");
f = result.get(tID);
// System.out.println("--------------------- SSSSSSSSS 4DDD--------------------------");
result.put(tID, (f + ((1.0/degree) as float))); //This is the summation of inverse of each out degree of fromIDs
}else{
result.put(tID, ((1.0/degree) as float));
}
}
}
// System.out.println("--------------------- SSSSSSSSS 5 --------------------------");
//Note that next we need to divide these authority scores by the number of world only vertices (i.e., N-n).
val resultTemp:HashMap[Long, Float] = new HashMap[Long, Float]();
val it3:Iterator[x10.util.Map.Entry[Long, Float]] = result.entries().iterator();
while(it3.hasNext()){
val et:x10.util.Map.Entry[Long, Float] = it3.next();
tID = et.getKey();
f = et.getValue();
resultTemp.put(tID, ((f/worldOnlyVertexCount) as float));
}
// System.out.println("--------------------- SSSSSSSSS 6 --------------------------");
return resultTemp; //This is the final adjusted result
}
/**
* This is just a single value. Because there is only one logical vertex in the world.
*/
private def getWorldOnlyAuthorityFlow(graphID:String, partitionID:String):double{
var result:long=-1;
//-----------------------------------------------------------------------------------
//Note August 24 2014 : The following code fragment is inefficient because for each method call it constructs
//the out degree distribution for the entire graph. However, at this moment we follow this design.
//Next at each peer we need to construct the out degree distribution.
val sb:StringBuilder = new StringBuilder();
var line:String = null;
val lst:ArrayList[String] = new ArrayList[String]();
try{
val reader:BufferedReader = new BufferedReader(new FileReader("machines.txt"));
while((line = reader.readLine()) != null){
//Here we just read first line and then break. But this may not be the best option. Better iterate through all the
//lines and accumulate those to a HashMap in future.
lst.add(line.trim());
}
}catch(ec:IOException){
ec.printStackTrace();
}
for(host:String in lst){
//Here we get the number of vertices located on the local graph on each host
try{
val socket:Socket = new Socket(host, Conts_Java.ACACIA_INSTANCE_PORT);
val out:PrintWriter = new PrintWriter(socket.getOutputStream());
val reader:BufferedReader = new BufferedReader(new InputStreamReader(socket.getInputStream()));
var response:String = "";
out.println(AcaciaInstanceProtocol.OUT_DEGREE_DIST);
out.flush();
response = reader.readLine();
if((response != null)&&(response.equals(AcaciaInstanceProtocol.OK))){
//System.out.println("host : " + host + " graphID : " + graphID);
out.println(graphID);
out.flush();
}else{
result = -1;
}
response = reader.readLine();
if((response != null)&&(!response.equals("-1"))){
sb.add(response);
result = 0;
}else{
result = -1;
break;
}
out.close();
}catch(e:UnknownHostException){
Logger_Java.error("Connecting to host (10) " + host + " got error message : " + e.getMessage());
}catch(ec:IOException){
Logger_Java.error("Connecting to host (10) " + host + " got error message : " + ec.getMessage());
}
if(result == -1){
Logger_Java.info("There is an error in getting out degree from host : " + host);
return result;
}
}
Logger_Java.info("======AAAAAAAAAA===========");
val outDegreeEntireGraph:HashMap[Long,Long] = new HashMap[Long,Long]();
val res1:Rail[String] = sb.toString().split(";");
for(item:String in res1){
if(!item.trim().equals("")){
val res2:Rail[String] = item.split(":");
if(res2.size >= 2){
//outDegreeDistribution[Integer.parseInt(res2[0])] = Integer.parseInt(res2[1]);
outDegreeEntireGraph.put(Long.parseLong(res2(0)), Long.parseLong(res2(1)));
}
}
}
//System.out.println(outDegreeEntireGraph.toString());
val worldOnlyOutDegreeDistribution:HashMap[Long,Long] = getWorldOnlyOutDegreeDistribution(graphID, partitionID);
var authFlow:double = 0;
val itr:Iterator[x10.util.Map.Entry[Long,Long]] = worldOnlyOutDegreeDistribution.entries().iterator();
while(itr.hasNext()){
val pairs:x10.util.Map.Entry[Long,Long] = itr.next();
val vertex:long = pairs.getKey();
val vertexWorldOnlyOutDeg:long = pairs.getValue();
val vertexEntireGraphOutDeg:Long = outDegreeEntireGraph.get(vertex);
if(vertexEntireGraphOutDeg == -1){
//System.out.println("Out degree null for vertex : " + vertex);
}else{
authFlow += (vertexWorldOnlyOutDeg)/vertexEntireGraphOutDeg;
}
}
Logger_Java.info("Authority flow for sub graph : " + partitionID + " -> " + authFlow);
result = authFlow as Long;
Logger_Java.info("======BBBBBBBBBB===========");
//-----------------------------------------------------------------------------------
return result;
}
/**
* Out degree distribution only for the external world.
*/
private static def getWorldOnlyOutDegreeDistribution(graphID:String, partitionID:String):HashMap[Long,Long]{
// System.out.println("getting the out degree distribution for graph : " + graphID + " partitionID : " + partitionID);
//
// return -1;
val result:HashMap[Long,Long] = new HashMap[Long,Long]();
var fromID:long = -1;
var fromDegree:long = -1;
//java.sql.Connection c = org.acacia.centralstore.java.HSQLDBInterface.getConnection(graphID, partitionID);
//"select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID);
val centralPartionCount:int = Int.parseInt((org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).value as Rail[String])(0));
for(var i:int = 0n; i < centralPartionCount; i++){
val c:java.sql.Connection = org.acacia.centralstore.java.HSQLDBInterface.getConnectionReadOnly(graphID, ""+i);
try{
//c.setAutoCommit(false);
val stmt:java.sql.Statement = c.createStatement();
val rs:java.sql.ResultSet = stmt.executeQuery("SELECT idfrom,COUNT(idto) FROM acacia_central.edgemap where idgraph=" + graphID + " and idpartfrom <> " + partitionID + " and idpartto <> " + partitionID + " GROUP BY idfrom;" );
if(rs != null){
while(rs.next()){
fromID = rs.getLong(1n);
fromDegree = rs.getLong(2n);
result.put(fromID, fromDegree);
}
}
c.close();
}catch(e:java.sql.SQLException){
e.printStackTrace();
}
}
//System.out.println("WOnly out deg dist : " + result.toString());
return result;
}
/**
*
* @param graphID
* @param partitionID
* @return -1 if the global graph size is smaller than the local graph. This means we need to send the intersecting global graph to the worker.
* If this value is not -1, that means we do the calculation at the local graph. But then we need to ask the worker to send the
* edge list back to the master. This kind of scenarios will be less likely with highly partitioned graphs.
*/
private def getIntersectingTraingles(graphID:String, partitionID:String):long{
var result:long = -1;
var fromID:long = -1;
var toID:long = -1;
val centralPartionCount:int = Int.parseInt(org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select CENTRALPARTITIONCOUNT from ACACIA_META.GRAPH where IDGRAPH=" + graphID).getObjectArray()(0n) as String);
val edgeList:HashMap[Long, Long] = new HashMap[Long, Long]();
var globalSize:long = 0;
for(var i:int = 0n; i < centralPartionCount; i++){
val centralStoreBaseDir:String = Utils_Java.getAcaciaProperty("org.acacia.server.runtime.location");
//AcaciaHashMapNativeStore store = new AcaciaHashMapNativeStore(Integer.parseInt(graphID), i, centralStoreBaseDir, true);
//AcaciaHashMapCentralStore store = new AcaciaHashMapCentralStore(Integer.parseInt(graphID), i);
//store.loadGraph();
val store:AcaciaHashMapCentralStore = new AcaciaHashMapCentralStore(Int.parseInt(graphID), i);
store.loadGraph();
globalSize += store.getEdgeCount();
}
val localSize:long = Int.parseInt(org.acacia.metadata.db.java.MetaDataDBInterface.runSelect("select EDGECOUNT from ACACIA_META.PARTITION where GRAPH_IDGRAPH=" + graphID + " and IDPARTITION=" + partitionID).getObjectArray()(0n) as String);
if(localSize > globalSize){
result = -1;
}
return result;
}
} | X10 | 3 | mdherath/Acacia | src/org/acacia/backend/AcaciaBackEndServiceSession.x10 | [
"Apache-2.0"
] |
<?xml version="1.0" encoding="UTF-8"?>
<faces-config>
<faces-config-extension>
<namespace-uri>http://www.ibm.com/xsp/custom</namespace-uri>
<default-prefix>xc</default-prefix>
</faces-config-extension>
<composite-component>
<component-type>OneUILayout</component-type>
<composite-name>OneUILayout</composite-name>
<composite-file>/OneUILayout.xsp</composite-file>
<composite-extension>
<designer-extension>
<in-palette>true</in-palette>
<render-markup><?xml version="1.0" encoding="UTF-8"?>
<xp:view xmlns:xp="http://www.ibm.com/xsp/core"
xmlns:xc="http://www.ibm.com/xsp/custom">
<xp:div style="background-color:#CEE1FC; padding:5px">
<table width="98%"><tr><td>
{logo} &#160; {bannerApplicationLinks}
</td><td align="right">{bannerUtilityLinks}</td></tr></table>
<xp:table style="width: 98%; background-color:#FFFFFF">
<xp:tr>
<xp:td colspan="3" style="background-color:#4586D3">
<table width="100%"><tr><td>
<table><tr>
<td style="background-color:#4372A9;color:#FFFFFF">
{titleBarTabs}</td>
<td style="background-color:#E4E8EF">
selected</td>
</tr></table>
</td><td>
<div style="float:right;background:#FFFFFF">
{searchBar}</div>
</td></tr></table>
</xp:td>
</xp:tr>
<xp:tr>
<xp:td colspan="3" style="background-color:#E4E8EF">
<table width="100%"><tr><td><h2>{placeBarName}</h2></td>
<td> 
<div style="float:right;border:thin solid #C0C7CD">
{placeBarActions}</div></td>
</tr></table>
</xp:td>
</xp:tr>
<xp:tr>
<xp:td style="width:123px" valign="top">
<xp:callback id="LeftColumn" facetName="LeftColumn"/>
</xp:td>
<xp:td valign="top">
<xp:callback id="callback1"/>
<xp:br/><xp:br/><xp:br/>
</xp:td>
<xp:td style="width:123px" valign="top">
<xp:callback id="RightColumn" facetName="RightColumn" />
</xp:td>
</xp:tr>
</xp:table>
<xp:table style="width: 98%; background-color:#FFFFFF; margin-top:5px">
<xp:tr><xp:td> {footerLinks}</xp:td></xp:tr>
</xp:table>
{legalText}
</xp:div>
</xp:view>
</render-markup>
</designer-extension>
</composite-extension>
<property>
<property-name>navigationPath</property-name>
<property-class>string</property-class>
</property>
<property>
<property-name>defaultNavigationPath</property-name>
<property-class>string</property-class>
</property>
</composite-component>
</faces-config>
| XPages | 3 | OpenNTF/DomSQL | domsql/nsf/ondisk-DomSQL/CustomControls/OneUILayout.xsp-config | [
"Apache-2.0"
] |
(* Content-type: application/mathematica *)
(*** Wolfram Notebook File ***)
(* http://www.wolfram.com/nb *)
(* CreatedBy='Mathematica 7.0' *)
(*CacheID: 234*)
(* Internal cache information:
NotebookFileLineBreakTest
NotebookFileLineBreakTest
NotebookDataPosition[ 145, 7]
NotebookDataLength[ 51606, 1517]
NotebookOptionsPosition[ 50273, 1475]
NotebookOutlinePosition[ 50610, 1490]
CellTagsIndexPosition[ 50567, 1487]
WindowFrame->Normal*)
(* Beginning of Notebook Content *)
Notebook[{
Cell[BoxData[
RowBox[{"a", ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
RowBox[{"IdentityMatrix", "[", "8", "]"}], ",",
RowBox[{"Reverse", "[",
RowBox[{"IdentityMatrix", "[", "8", "]"}], "]"}]}], "}"}], ",",
RowBox[{"{",
RowBox[{
RowBox[{"Reverse", "[",
RowBox[{"IdentityMatrix", "[", "8", "]"}], "]"}], ",",
RowBox[{
RowBox[{"-", "1"}], "*",
RowBox[{"IdentityMatrix", "[", "8", "]"}]}]}], "}"}]}], "}"}],
"]"}]}]], "Input",
CellChangeTimes->{{3.545829278882098*^9, 3.545829319074397*^9}}],
Cell[BoxData[
RowBox[{"v", ":=",
RowBox[{
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "q0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "q1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "q2", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "q3", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "q4", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "q5", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"q6"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "p6", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "p5", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "p4", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "p3", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "p2", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "p1", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"p0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"s0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "s1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "s2", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "s3", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "s4", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "s5", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "s6", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"s7"}], "}"}]}], "}"}]}]}]], "Input",
CellChangeTimes->{{3.5458293327981825`*^9, 3.5458293359303617`*^9}, {
3.5458294146008615`*^9, 3.5458296302641964`*^9}, {3.545829660343917*^9,
3.545829713685968*^9}, {3.5458297688811245`*^9, 3.545829986622579*^9}, {
3.545830471926337*^9, 3.545830493855591*^9}}],
Cell[BoxData[
RowBox[{"b", ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
RowBox[{"IdentityMatrix", "[", "8", "]"}], ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "v"}], "}"}]}], "}"}], "]"}]}]], "Input",
CellChangeTimes->{{3.545830273509988*^9, 3.545830273802005*^9}, {
3.5458310447871027`*^9, 3.5458310471452374`*^9}, {3.5458310910837507`*^9,
3.545831121041464*^9}}],
Cell[BoxData[
RowBox[{"p", ":=",
RowBox[{
RowBox[{"1", "/", "2"}], "*",
RowBox[{"a", ".", "b", ".", "a"}]}]}]], "Input",
CellChangeTimes->{{3.545831029669238*^9, 3.5458310431910114`*^9}, {
3.5713513889224052`*^9, 3.5713513894743233`*^9}}],
Cell[BoxData[
RowBox[{"vi", ":=",
RowBox[{
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
RowBox[{"1", "/", "s0"}], ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0", ",", "0", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",",
RowBox[{"1", "/", "s1"}], ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",",
RowBox[{"1", "/", "s2"}], ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",",
RowBox[{"1", "/", "s3"}], ",", "0", ",", "0", ",", "0", ",", "0"}],
"}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "0", ",",
RowBox[{"1", "/", "s4"}], ",", "0", ",", "0", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
RowBox[{"1", "/", "s5"}], ",", "0", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
RowBox[{"1", "/", "s6"}], ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
RowBox[{"1", "/", "s7"}]}], "}"}]}], "}"}], ".", "\[IndentingNewLine]",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
RowBox[{"-", "p0"}], ",", "1", ",", "0", ",", "0", ",", "0", ",", "0",
",", "0", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".", "\[IndentingNewLine]",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",",
RowBox[{"-", "p1"}], ",", "1", ",", "0", ",", "0", ",", "0", ",", "0",
",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".", "\[IndentingNewLine]",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",",
RowBox[{"-", "p2"}], ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".", "\[IndentingNewLine]",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",",
RowBox[{"-", "p3"}], ",", "1", ",", "0", ",", "0", ",", "0"}], "}"}],
",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".", "\[IndentingNewLine]",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "0", ",",
RowBox[{"-", "p4"}], ",", "1", ",", "0", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".", "\[IndentingNewLine]",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
RowBox[{"-", "p5"}], ",", "1", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".", "\[IndentingNewLine]",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
RowBox[{"-", "p6"}], ",", "1"}], "}"}]}], "}"}], ".",
"\[IndentingNewLine]",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
RowBox[{"-", "q6"}]}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
RowBox[{"-", "q5"}], ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
RowBox[{"-", "q4"}], ",", "0", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "0", ",", "1", ",",
RowBox[{"-", "q3"}], ",", "0", ",", "0", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "0", ",", "1", ",",
RowBox[{"-", "q2"}], ",", "0", ",", "0", ",", "0", ",", "0"}], "}"}],
",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "1", ",",
RowBox[{"-", "q1"}], ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}], ".",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{"1", ",",
RowBox[{"-", "q0"}], ",", "0", ",", "0", ",", "0", ",", "0", ",", "0",
",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",",
"0"}], "}"}], ",",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"1"}], "}"}]}], "}"}]}]}]], "Input",
CellChangeTimes->{{3.5458303069829025`*^9, 3.545830307743946*^9},
3.5458303484782763`*^9, {3.54583039506394*^9, 3.5458304538903055`*^9}, {
3.545830517654952*^9, 3.545830670162675*^9}}],
Cell[BoxData[
RowBox[{"c", ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
RowBox[{"IdentityMatrix", "[", "8", "]"}], ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "vi"}], "}"}]}], "}"}], "]"}]}]], "Input",
CellChangeTimes->{{3.5457683897333775`*^9, 3.5457683916024837`*^9}, {
3.545830784950241*^9, 3.5458307852102556`*^9}}],
Cell[BoxData[
RowBox[{"pi", ":=",
RowBox[{
RowBox[{"1", "/", "2"}], "*",
RowBox[{"a", ".", "c", ".", "a"}]}]}]], "Input",
CellChangeTimes->{{3.5713513967514887`*^9, 3.57135139718403*^9}}],
Cell[BoxData[
RowBox[{
RowBox[{"dct", "[", "k_", "]"}], ":=",
RowBox[{"Table", "[",
RowBox[{
RowBox[{
RowBox[{"Sqrt", "[",
RowBox[{"2", "/", "k"}], "]"}], "*",
RowBox[{"Cos", "[",
RowBox[{
RowBox[{"(",
RowBox[{"j", "+",
RowBox[{"1", "/", "2"}]}], ")"}], "*", "i", "*",
RowBox[{"\[Pi]", "/", "k"}]}], "]"}], "*",
RowBox[{"If", "[",
RowBox[{
RowBox[{"i", "\[Equal]", "0"}], ",",
RowBox[{"Sqrt", "[",
RowBox[{"1", "/", "2"}], "]"}], ",", "1"}], "]"}]}], ",",
RowBox[{"{",
RowBox[{"i", ",", "0", ",",
RowBox[{"k", "-", "1"}]}], "}"}], ",",
RowBox[{"{",
RowBox[{"j", ",", "0", ",",
RowBox[{"k", "-", "1"}]}], "}"}]}], "]"}]}]], "Input",
CellChangeTimes->{{3.545830925593285*^9, 3.5458309314066176`*^9}}],
Cell[BoxData[
RowBox[{"d", ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"{",
RowBox[{"{",
RowBox[{
"0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",",
"0", ",",
RowBox[{"dct", "[", "16", "]"}], ",", "0", ",", "0", ",", "0", ",", "0",
",", "0", ",", "0", ",", "0", ",", "0"}], "}"}], "}"}],
"]"}]}]], "Input",
CellChangeTimes->{{3.5713514036194563`*^9, 3.571351403734996*^9}}],
Cell[BoxData[
RowBox[{"e", ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{"p", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "p"}], "}"}]}], "}"}], "]"}]}]], "Input",
CellChangeTimes->{{3.5457274768888016`*^9, 3.545727477021809*^9}, {
3.545764597801342*^9, 3.5457645985633855`*^9}, {3.545830810389696*^9,
3.545830853981189*^9}, {3.545830919428932*^9, 3.5458309561620336`*^9}}],
Cell[BoxData[
RowBox[{"g", ":=",
RowBox[{"d", ".", "e"}]}]], "Input"],
Cell[BoxData[
RowBox[{"g", ":=", "d"}]], "Input",
CellChangeTimes->{{3.571351408452491*^9, 3.571351410637926*^9}}],
Cell[BoxData[
RowBox[{"f", ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{"pi", ",", "0"}], "}"}], ",",
RowBox[{"{",
RowBox[{"0", ",", "pi"}], "}"}]}], "}"}], "]"}]}]], "Input"],
Cell[BoxData[
RowBox[{"h", ":=",
RowBox[{"f", ".",
RowBox[{"Transpose", "[", "d", "]"}]}]}]], "Input"],
Cell[BoxData[
RowBox[{"h", ":=",
RowBox[{"Transpose", "[", "d", "]"}]}]], "Input",
CellChangeTimes->{{3.57135141550872*^9, 3.571351418102202*^9}}],
Cell[BoxData[
RowBox[{
RowBox[{"ar", "[",
RowBox[{"k_", ",", "l_", ",", "r_"}], "]"}], ":=",
RowBox[{"Table", "[",
RowBox[{
RowBox[{"r", "^",
RowBox[{"(",
RowBox[{
RowBox[{"Abs", "[",
RowBox[{"i", "-", "j"}], "]"}], "+", "l"}], ")"}]}], ",",
RowBox[{"{",
RowBox[{"i", ",", "k"}], "}"}], ",",
RowBox[{"{",
RowBox[{"j", ",", "k"}], "}"}]}], "]"}]}]], "Input",
CellChangeTimes->{{3.54583097742725*^9, 3.5458309780892878`*^9}, {
3.571351431671892*^9, 3.5713514383357077`*^9}}],
Cell[BoxData[
RowBox[{"obj", ":=",
RowBox[{"GeometricMean", "[",
RowBox[{
RowBox[{"Diagonal", "[",
RowBox[{"g", ".",
RowBox[{"ar", "[",
RowBox[{"32", ",", "0", ",", "0.95"}], "]"}], ".",
RowBox[{"Transpose", "[", "g", "]"}]}], "]"}], "*",
RowBox[{"Diagonal", "[",
RowBox[{
RowBox[{"Transpose", "[", "h", "]"}], ".", "h"}], "]"}]}],
"]"}]}]], "Input",
CellChangeTimes->{{3.5458309882078667`*^9, 3.5458309885658865`*^9},
3.5466157425283413`*^9, 3.5466160069624662`*^9, {3.546616296586032*^9,
3.546616299230183*^9}, {3.5466163309889994`*^9, 3.54661634025953*^9}, {
3.546616817198809*^9, 3.546616821172036*^9}, {3.571351466766985*^9,
3.571351467025209*^9}}],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"cg", "=",
RowBox[{
RowBox[{"10", "*",
RowBox[{"Log", "[",
RowBox[{"10", ",",
RowBox[{"1", "/", "obj"}]}], "]"}]}], "//.",
RowBox[{"{",
RowBox[{
RowBox[{"s0", "\[Rule]",
RowBox[{"90.0", "/", "64"}]}], ",",
RowBox[{"s1", "\[Rule]",
RowBox[{"74.0", "/", "64"}]}], ",",
RowBox[{"s2", "\[Rule]",
RowBox[{"73.0", "/", "64"}]}], ",",
RowBox[{"s3", "\[Rule]",
RowBox[{"71.0", "/", "64"}]}], ",",
RowBox[{"s4", "\[Rule]",
RowBox[{"67.0", "/", "64"}]}], ",",
RowBox[{"s5", "\[Rule]",
RowBox[{"67.0", "/", "64"}]}], ",",
RowBox[{"s6", "\[Rule]",
RowBox[{"67.0", "/", "64"}]}], ",",
RowBox[{"s7", "\[Rule]",
RowBox[{"72.0", "/", "64"}]}], ",",
RowBox[{"p0", "\[Rule]",
RowBox[{
RowBox[{"-", "24.0"}], "/", "64"}]}], ",",
RowBox[{"p1", "\[Rule]",
RowBox[{
RowBox[{"-", "23.0"}], "/", "64"}]}], ",",
RowBox[{"p2", "\[Rule]",
RowBox[{
RowBox[{"-", "17.0"}], "/", "64"}]}], ",",
RowBox[{"p3", "\[Rule]",
RowBox[{
RowBox[{"-", "12.0"}], "/", "64"}]}], ",",
RowBox[{"p4", "\[Rule]",
RowBox[{
RowBox[{"-", "14.0"}], "/", "64"}]}], ",",
RowBox[{"p5", "\[Rule]",
RowBox[{
RowBox[{"-", "13.0"}], "/", "64"}]}], ",",
RowBox[{"p6", "\[Rule]",
RowBox[{
RowBox[{"-", "7.0"}], "/", "64"}]}], ",",
RowBox[{"q0", "\[Rule]",
RowBox[{"50.0", "/", "64"}]}], ",",
RowBox[{"q1", "\[Rule]",
RowBox[{"40.0", "/", "64"}]}], ",",
RowBox[{"q2", "\[Rule]",
RowBox[{"31.0", "/", "64"}]}], ",",
RowBox[{"q3", "\[Rule]",
RowBox[{"22.0", "/", "64"}]}], ",",
RowBox[{"q4", "\[Rule]",
RowBox[{"18.0", "/", "64"}]}], ",",
RowBox[{"q5", "\[Rule]",
RowBox[{"16.0", "/", "64"}]}], ",",
RowBox[{"q6", "\[Rule]",
RowBox[{"11.0", "/", "64"}]}]}], "}"}]}]}]], "Input",
CellChangeTimes->{{3.546615331788849*^9, 3.546615552381466*^9}, {
3.546615615810094*^9, 3.5466157264434214`*^9}, 3.5466160329119506`*^9,
3.546616834253785*^9, {3.570877445159778*^9, 3.570877606901031*^9},
3.571351456738694*^9}],
Cell[BoxData["9.89338176232053`"], "Output",
CellChangeTimes->{
3.5466157337758408`*^9, 3.546616038879292*^9, 3.5466168404231377`*^9,
3.570876126258851*^9, 3.5708774092622013`*^9, 3.570877609912533*^9, {
3.5713514531814833`*^9, 3.5713515034340487`*^9}, 3.579069516179351*^9}]
}, Open ]],
Cell[BoxData[
RowBox[{
RowBox[{"ar2d", "[",
RowBox[{"k_", ",", "r_"}], "]"}], ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"Table", "[",
RowBox[{
RowBox[{"ar", "[",
RowBox[{"k", ",",
RowBox[{"Abs", "[",
RowBox[{"i", "-", "j"}], "]"}], ",", "r"}], "]"}], ",",
RowBox[{"{",
RowBox[{"i", ",", "k"}], "}"}], ",",
RowBox[{"{",
RowBox[{"j", ",", "k"}], "}"}]}], "]"}], "]"}]}]], "Input",
CellChangeTimes->{{3.571215344262658*^9, 3.571215350241576*^9},
3.571215796960052*^9, 3.5712164912715063`*^9, {3.571217517951624*^9,
3.57121752072649*^9}, 3.571217741460986*^9}],
Cell[BoxData[
RowBox[{
RowBox[{"g2d", "[", "k_", "]"}], ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"Table", "[",
RowBox[{
RowBox[{"If", "[",
RowBox[{
RowBox[{"i", "\[Equal]", "j"}], ",", "g", ",", "0"}], "]"}], ",",
RowBox[{"{",
RowBox[{"i", ",", "k"}], "}"}], ",",
RowBox[{"{",
RowBox[{"j", ",", "k"}], "}"}]}], "]"}], "]"}]}]], "Input",
CellChangeTimes->{{3.571217377033023*^9, 3.5712174099145327`*^9}, {
3.5712175508007708`*^9, 3.571217558924345*^9}}],
Cell[BoxData[
RowBox[{
RowBox[{"h2d", "[", "k_", "]"}], ":=",
RowBox[{"ArrayFlatten", "[",
RowBox[{"Table", "[",
RowBox[{
RowBox[{"If", "[",
RowBox[{
RowBox[{"i", "\[Equal]", "j"}], ",", "h", ",", "0"}], "]"}], ",",
RowBox[{"{",
RowBox[{"i", ",", "k"}], "}"}], ",",
RowBox[{"{",
RowBox[{"j", ",", "k"}], "}"}]}], "]"}], "]"}]}]], "Input",
CellChangeTimes->{{3.57121768285324*^9, 3.571217702211115*^9}}],
Cell[BoxData[
RowBox[{
RowBox[{"t2d", "[", "k_", "]"}], ":=",
RowBox[{
RowBox[{"IdentityMatrix", "[",
RowBox[{"k", "*",
RowBox[{"k", "/", "2"}]}], "]"}], "[",
RowBox[{"[",
RowBox[{"Flatten", "[",
RowBox[{"Transpose", "[",
RowBox[{"Partition", "[",
RowBox[{
RowBox[{"Range", "[",
RowBox[{"k", "*",
RowBox[{"k", "/", "2"}]}], "]"}], ",",
RowBox[{"k", "/", "2"}]}], "]"}], "]"}], "]"}], "]"}],
"]"}]}]], "Input",
CellChangeTimes->{{3.571216514685655*^9, 3.571216535449212*^9}, {
3.571217837719702*^9, 3.571217852699953*^9}}],
Cell[BoxData[
RowBox[{"obj2d", ":=",
RowBox[{"GeometricMean", "[",
RowBox[{
RowBox[{"Diagonal", "[",
RowBox[{
RowBox[{"g2d", "[", "16", "]"}], ".",
RowBox[{"t2d", "[", "32", "]"}], ".",
RowBox[{"g2d", "[", "32", "]"}], ".",
RowBox[{"ar2d", "[",
RowBox[{"32", ",", "0.95"}], "]"}], ".",
RowBox[{"Transpose", "[",
RowBox[{
RowBox[{"g2d", "[", "16", "]"}], ".",
RowBox[{"t2d", "[", "32", "]"}], ".",
RowBox[{"g2d", "[", "32", "]"}]}], "]"}]}], "]"}], "*",
RowBox[{"Diagonal", "[",
RowBox[{
RowBox[{"Transpose", "[",
RowBox[{
RowBox[{"h2d", "[", "32", "]"}], ".",
RowBox[{"t2d", "[", "32", "]"}], ".",
RowBox[{"h2d", "[", "16", "]"}]}], "]"}], ".",
RowBox[{"h2d", "[", "32", "]"}], ".",
RowBox[{"t2d", "[", "32", "]"}], ".",
RowBox[{"h2d", "[", "16", "]"}]}], "]"}]}], "]"}]}]], "Input",
CellChangeTimes->{{3.571217495222146*^9, 3.57121749695989*^9}, {
3.571217529685446*^9, 3.5712175379810658`*^9}, {3.571217568436331*^9,
3.571217574381446*^9}, {3.571217660893468*^9, 3.5712176794285927`*^9}, {
3.5712178183248453`*^9, 3.5712178224715137`*^9}, {3.5712178578153143`*^9,
3.571217899033683*^9}, {3.5712180481933937`*^9, 3.571218065866927*^9}, {
3.5712189138225317`*^9, 3.571218932628921*^9}, {3.571220575284101*^9,
3.571220575519759*^9}, {3.571262758598935*^9, 3.571262768217821*^9}, {
3.5713515506821203`*^9, 3.571351607969413*^9}}],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"cg2d", "=",
RowBox[{
RowBox[{"10", "*",
RowBox[{"Log", "[",
RowBox[{"10", ",",
RowBox[{"1", "/", "obj2d"}]}], "]"}]}], "//.",
RowBox[{"{",
RowBox[{
RowBox[{"s0", "\[Rule]",
RowBox[{"90.0", "/", "64"}]}], ",",
RowBox[{"s1", "\[Rule]",
RowBox[{"74.0", "/", "64"}]}], ",",
RowBox[{"s2", "\[Rule]",
RowBox[{"73.0", "/", "64"}]}], ",",
RowBox[{"s3", "\[Rule]",
RowBox[{"71.0", "/", "64"}]}], ",",
RowBox[{"s4", "\[Rule]",
RowBox[{"67.0", "/", "64"}]}], ",",
RowBox[{"s5", "\[Rule]",
RowBox[{"67.0", "/", "64"}]}], ",",
RowBox[{"s6", "\[Rule]",
RowBox[{"67.0", "/", "64"}]}], ",",
RowBox[{"s7", "\[Rule]",
RowBox[{"72.0", "/", "64"}]}], ",",
RowBox[{"p0", "\[Rule]",
RowBox[{
RowBox[{"-", "24.0"}], "/", "64"}]}], ",",
RowBox[{"p1", "\[Rule]",
RowBox[{
RowBox[{"-", "23.0"}], "/", "64"}]}], ",",
RowBox[{"p2", "\[Rule]",
RowBox[{
RowBox[{"-", "17.0"}], "/", "64"}]}], ",",
RowBox[{"p3", "\[Rule]",
RowBox[{
RowBox[{"-", "12.0"}], "/", "64"}]}], ",",
RowBox[{"p4", "\[Rule]",
RowBox[{
RowBox[{"-", "14.0"}], "/", "64"}]}], ",",
RowBox[{"p5", "\[Rule]",
RowBox[{
RowBox[{"-", "13.0"}], "/", "64"}]}], ",",
RowBox[{"p6", "\[Rule]",
RowBox[{
RowBox[{"-", "7.0"}], "/", "64"}]}], ",",
RowBox[{"q0", "\[Rule]",
RowBox[{"50.0", "/", "64"}]}], ",",
RowBox[{"q1", "\[Rule]",
RowBox[{"40.0", "/", "64"}]}], ",",
RowBox[{"q2", "\[Rule]",
RowBox[{"31.0", "/", "64"}]}], ",",
RowBox[{"q3", "\[Rule]",
RowBox[{"22.0", "/", "64"}]}], ",",
RowBox[{"q4", "\[Rule]",
RowBox[{"18.0", "/", "64"}]}], ",",
RowBox[{"q5", "\[Rule]",
RowBox[{"16.0", "/", "64"}]}], ",",
RowBox[{"q6", "\[Rule]",
RowBox[{"11.0", "/", "64"}]}]}], "}"}]}]}]], "Input",
CellChangeTimes->{{3.571218095000925*^9, 3.5712180969589567`*^9},
3.571358649990808*^9, {3.5713587744026117`*^9, 3.5713587751039753`*^9}}],
Cell[BoxData["$Aborted"], "Output",
CellChangeTimes->{3.579069718407742*^9}]
}, Open ]]
},
WindowSize->{1424, 775},
WindowMargins->{{0, Automatic}, {Automatic, 33}},
FrontEndVersion->"7.0 for Linux x86 (64-bit) (February 25, 2009)",
StyleDefinitions->"Default.nb"
]
(* End of Notebook Content *)
(* Internal cache information *)
(*CellTagsOutline
CellTagsIndex->{}
*)
(*CellTagsIndex
CellTagsIndex->{}
*)
(*NotebookFileOutline
Notebook[{
Cell[545, 20, 631, 18, 32, "Input"],
Cell[1179, 40, 17489, 516, 517, "Input"],
Cell[18671, 558, 460, 12, 32, "Input"],
Cell[19134, 572, 252, 6, 32, "Input"],
Cell[19389, 580, 17915, 507, 605, "Input"],
Cell[37307, 1089, 412, 11, 32, "Input"],
Cell[37722, 1102, 199, 5, 32, "Input"],
Cell[37924, 1109, 845, 25, 32, "Input"],
Cell[38772, 1136, 441, 11, 32, "Input"],
Cell[39216, 1149, 460, 11, 32, "Input"],
Cell[39679, 1162, 73, 2, 32, "Input"],
Cell[39755, 1166, 116, 2, 32, "Input"],
Cell[39874, 1170, 246, 8, 32, "Input"],
Cell[40123, 1180, 110, 3, 32, "Input"],
Cell[40236, 1185, 151, 3, 32, "Input"],
Cell[40390, 1190, 544, 16, 32, "Input"],
Cell[40937, 1208, 730, 17, 32, "Input"],
Cell[CellGroupData[{
Cell[41692, 1229, 2245, 63, 77, "Input"],
Cell[43940, 1294, 285, 4, 31, "Output"]
}, Open ]],
Cell[44240, 1301, 640, 17, 32, "Input"],
Cell[44883, 1320, 517, 14, 32, "Input"],
Cell[45403, 1336, 463, 13, 32, "Input"],
Cell[45869, 1351, 616, 18, 32, "Input"],
Cell[46488, 1371, 1511, 33, 55, "Input"],
Cell[CellGroupData[{
Cell[48024, 1408, 2153, 61, 77, "Input"],
Cell[50180, 1471, 77, 1, 31, "Output"]
}, Open ]]
}
]
*)
(* End of internal cache information *)
| Mathematica | 4 | junaidnaseer/daala | doc/16x32.nb | [
"BSD-2-Clause"
] |
2E,5A
61,7A
| Component Pascal | 1 | janosch-x/character_set | lib/character_set/predefined_sets/crypt.cps | [
"MIT"
] |
"""Fixtures for MJPEG IP Camera integration tests."""
from __future__ import annotations
from collections.abc import Generator
from unittest.mock import AsyncMock, patch
import pytest
from requests_mock import Mocker
from homeassistant.components.mjpeg.const import (
CONF_MJPEG_URL,
CONF_STILL_IMAGE_URL,
DOMAIN,
)
from homeassistant.const import (
CONF_AUTHENTICATION,
CONF_PASSWORD,
CONF_USERNAME,
CONF_VERIFY_SSL,
HTTP_BASIC_AUTHENTICATION,
)
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
@pytest.fixture
def mock_config_entry() -> MockConfigEntry:
"""Return the default mocked config entry."""
return MockConfigEntry(
title="My MJPEG Camera",
domain=DOMAIN,
data={},
options={
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
CONF_MJPEG_URL: "https://example.com/mjpeg",
CONF_PASSWORD: "supersecret",
CONF_STILL_IMAGE_URL: "http://example.com/still",
CONF_USERNAME: "frenck",
CONF_VERIFY_SSL: True,
},
)
@pytest.fixture
def mock_setup_entry() -> Generator[AsyncMock, None, None]:
"""Mock setting up a config entry."""
with patch(
"homeassistant.components.mjpeg.async_setup_entry", return_value=True
) as mock_setup:
yield mock_setup
@pytest.fixture
def mock_reload_entry() -> Generator[AsyncMock, None, None]:
"""Mock setting up a config entry."""
with patch("homeassistant.components.mjpeg.async_reload_entry") as mock_reload:
yield mock_reload
@pytest.fixture
def mock_mjpeg_requests(requests_mock: Mocker) -> Generator[Mocker, None, None]:
"""Fixture to provide a requests mocker."""
requests_mock.get("https://example.com/mjpeg", text="resp")
requests_mock.get("https://example.com/still", text="resp")
yield requests_mock
@pytest.fixture
async def init_integration(
hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_mjpeg_requests: Mocker
) -> MockConfigEntry:
"""Set up the MJPEG IP Camera integration for testing."""
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
return mock_config_entry
| Python | 5 | MrDelik/core | tests/components/mjpeg/conftest.py | [
"Apache-2.0"
] |
pre: <%= @pre %>
<%= render "inner_live.html", assigns %>
post: <%= @post %> | HTML+EEX | 1 | basbz/phoenix_live_view | test/fixtures/templates/dead_with_live.html.eex | [
"MIT"
] |
/*
* Copyright 2010-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.kotlin.js.translate.context;
import org.jetbrains.kotlin.js.backend.ast.JsExpression;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.descriptors.DeclarationDescriptor;
import org.jetbrains.kotlin.psi.KtExpression;
import java.util.Collections;
import java.util.Map;
public class AliasingContext {
@NotNull
public static AliasingContext getCleanContext() {
return new AliasingContext(null, null, null);
}
@Nullable
private final Map<DeclarationDescriptor, JsExpression> aliasesForDescriptors;
@Nullable
private final Map<KtExpression, JsExpression> aliasesForExpressions;
@Nullable
private final AliasingContext parent;
private AliasingContext(
@Nullable AliasingContext parent,
@Nullable Map<DeclarationDescriptor, JsExpression> aliasesForDescriptors,
@Nullable Map<KtExpression, JsExpression> aliasesForExpressions
) {
this.parent = parent;
this.aliasesForDescriptors = aliasesForDescriptors;
this.aliasesForExpressions = aliasesForExpressions;
}
@NotNull
public AliasingContext inner() {
return new AliasingContext(this, null, null);
}
@NotNull
public AliasingContext inner(@NotNull DeclarationDescriptor descriptor, @NotNull JsExpression alias) {
return new AliasingContext(this, Collections.singletonMap(descriptor, alias), null);
}
@NotNull
public AliasingContext withExpressionsAliased(@NotNull Map<KtExpression, JsExpression> aliasesForExpressions) {
return new AliasingContext(this, null, aliasesForExpressions);
}
@NotNull
public AliasingContext withDescriptorsAliased(@NotNull Map<DeclarationDescriptor, JsExpression> aliases) {
return new AliasingContext(this, aliases, null);
}
@Nullable
public JsExpression getAliasForDescriptor(@NotNull DeclarationDescriptor descriptor) {
// these aliases cannot be shared and applicable only in current context
JsExpression alias = aliasesForDescriptors != null ? aliasesForDescriptors.get(descriptor.getOriginal()) : null;
JsExpression result = alias != null || parent == null ? alias : parent.getAliasForDescriptor(descriptor);
return result != null ? result.deepCopy() : null;
}
@Nullable
public JsExpression getAliasForExpression(@NotNull KtExpression element) {
JsExpression alias = aliasesForExpressions != null ? aliasesForExpressions.get(element) : null;
JsExpression result = alias != null || parent == null ? alias : parent.getAliasForExpression(element);
return result != null ? result.deepCopy() : null;
}
}
| Java | 4 | AndrewReitz/kotlin | js/js.translator/src/org/jetbrains/kotlin/js/translate/context/AliasingContext.java | [
"ECL-2.0",
"Apache-2.0"
] |
#!/usr/bin/env bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
python -c 'from tensorflow.python import _pywrap_util_port; print(_pywrap_util_port.IsMklEnabled()); import horovod.tensorflow as hvd'
new_mkl_horovod_enabled=$?
python -c 'from tensorflow.python import pywrap_tensorflow; print(pywrap_tensorflow.IsMklEnabled()); import horovod.tensorflow as hvd'
old_mkl_horovod_enabled=$?
if [[ $new_mkl_horovod_enabled -eq 0 ]]; then
echo "PASS: Horovod with MKL is enabled"
elif [[ $old_mkl_horovod_enabled -eq 0]]; then
echo "PASS: Horovod with Old MKL is detected"
else
die "FAIL: Horovod with MKL is not enabled"
fi
| Shell | 3 | yage99/tensorflow | tensorflow/tools/dockerfiles/tests/import-onednn-horovod.sh | [
"Apache-2.0"
] |
(* Content-type: application/vnd.wolfram.mathematica *)
(*** Wolfram Notebook File ***)
(* http://www.wolfram.com/nb *)
(* CreatedBy='Mathematica 8.0' *)
(*CacheID: 234*)
(* Internal cache information:
NotebookFileLineBreakTest
NotebookFileLineBreakTest
NotebookDataPosition[ 157, 7]
NotebookDataLength[ 11408, 362]
NotebookOptionsPosition[ 10747, 334]
NotebookOutlinePosition[ 11102, 350]
CellTagsIndexPosition[ 11059, 347]
WindowFrame->Normal*)
(* Beginning of Notebook Content *)
Notebook[{
Cell["\<\
The derivative of CalibratedCamera.project:\
\>", "Text",
CellChangeTimes->{{3.5349648356912127`*^9, 3.534964846480523*^9}, {
3.534983100980744*^9, 3.534983101484686*^9}}],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"H1", "=",
RowBox[{
RowBox[{
RowBox[{
FractionBox["1", "z"],
RowBox[{
RowBox[{"(", GridBox[{
{"1", "0",
RowBox[{
RowBox[{"-", "x"}], "/", "z"}]},
{"0", "1",
RowBox[{
RowBox[{"-", "y"}], "/", "z"}]}
}], ")"}], ".",
RowBox[{"(", GridBox[{
{"0",
RowBox[{"-", "z"}], "y",
RowBox[{"-", "1"}], "0", "0", "R00", "R10", "R20"},
{"z", "0",
RowBox[{"-", "x"}], "0",
RowBox[{"-", "1"}], "0", "R01", "R11", "R21"},
{
RowBox[{"-", "y"}], "x", "0", "0", "0",
RowBox[{"-", "1"}], "R02", "R12", "R22"}
}], ")"}]}]}], "//", "FullSimplify"}], "//",
"MatrixForm"}]}]], "Input",
CellChangeTimes->{{3.5349631885090446`*^9, 3.5349632390951033`*^9}, {
3.53496344228561*^9, 3.534963552777855*^9}, {3.53496365487344*^9,
3.53496369917972*^9}, 3.534963801932398*^9, {3.5349638327702837`*^9,
3.534963833842349*^9}, {3.5349638684957743`*^9, 3.534963869306225*^9}, {
3.5349640018609343`*^9, 3.534964073635954*^9}, {3.534964139661661*^9,
3.534964141597094*^9}, {3.534964228457883*^9, 3.534964230497267*^9}, {
3.534964522693956*^9, 3.534964554934105*^9}, 3.534979649216166*^9, {
3.534979703414953*^9, 3.5349797245266523`*^9}, 3.534979754875834*^9, {
3.5349801268943996`*^9, 3.534980128188809*^9}, {3.534981330976294*^9,
3.5349813327600317`*^9}, 3.534981567474765*^9, {3.534982081101321*^9,
3.534982107106247*^9}, {3.534982164393888*^9, 3.534982164510919*^9}, {
3.534982374683118*^9, 3.534982376464725*^9}}],
Cell[BoxData[
TagBox[
RowBox[{"(", "\[NoBreak]", GridBox[{
{
FractionBox[
RowBox[{"x", " ", "y"}],
SuperscriptBox["z", "2"]],
RowBox[{
RowBox[{"-", "1"}], "-",
FractionBox[
SuperscriptBox["x", "2"],
SuperscriptBox["z", "2"]]}],
FractionBox["y", "z"],
RowBox[{"-",
FractionBox["1", "z"]}], "0",
FractionBox["x",
SuperscriptBox["z", "2"]],
FractionBox[
RowBox[{
RowBox[{
RowBox[{"-", "R02"}], " ", "x"}], "+",
RowBox[{"R00", " ", "z"}]}],
SuperscriptBox["z", "2"]],
FractionBox[
RowBox[{
RowBox[{
RowBox[{"-", "R12"}], " ", "x"}], "+",
RowBox[{"R10", " ", "z"}]}],
SuperscriptBox["z", "2"]],
FractionBox[
RowBox[{
RowBox[{
RowBox[{"-", "R22"}], " ", "x"}], "+",
RowBox[{"R20", " ", "z"}]}],
SuperscriptBox["z", "2"]]},
{
RowBox[{"1", "+",
FractionBox[
SuperscriptBox["y", "2"],
SuperscriptBox["z", "2"]]}],
RowBox[{"-",
FractionBox[
RowBox[{"x", " ", "y"}],
SuperscriptBox["z", "2"]]}],
RowBox[{"-",
FractionBox["x", "z"]}], "0",
RowBox[{"-",
FractionBox["1", "z"]}],
FractionBox["y",
SuperscriptBox["z", "2"]],
FractionBox[
RowBox[{
RowBox[{
RowBox[{"-", "R02"}], " ", "y"}], "+",
RowBox[{"R01", " ", "z"}]}],
SuperscriptBox["z", "2"]],
FractionBox[
RowBox[{
RowBox[{
RowBox[{"-", "R12"}], " ", "y"}], "+",
RowBox[{"R11", " ", "z"}]}],
SuperscriptBox["z", "2"]],
FractionBox[
RowBox[{
RowBox[{
RowBox[{"-", "R22"}], " ", "y"}], "+",
RowBox[{"R21", " ", "z"}]}],
SuperscriptBox["z", "2"]]}
},
GridBoxAlignment->{
"Columns" -> {{Center}}, "ColumnsIndexed" -> {}, "Rows" -> {{Baseline}},
"RowsIndexed" -> {}},
GridBoxSpacings->{"Columns" -> {
Offset[0.27999999999999997`], {
Offset[0.7]},
Offset[0.27999999999999997`]}, "ColumnsIndexed" -> {}, "Rows" -> {
Offset[0.2], {
Offset[0.4]},
Offset[0.2]}, "RowsIndexed" -> {}}], "\[NoBreak]", ")"}],
Function[BoxForm`e$,
MatrixForm[BoxForm`e$]]]], "Output",
CellChangeTimes->{
3.534979650069955*^9, {3.534979704654621*^9, 3.5349797249719963`*^9},
3.534979755407372*^9, 3.534980128803479*^9, 3.534981333789564*^9,
3.5349815682066193`*^9, 3.534982108180704*^9, 3.534982166031692*^9,
3.534982377250555*^9}]
}, Open ]],
Cell["\<\
To get rid of (1/z) factors, work in terms of u=x/z and v=x/z.\
\>", "Text",
CellChangeTimes->{{3.534981922219325*^9, 3.534981963212936*^9}, {
3.534983115196439*^9, 3.534983121940318*^9}}],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"H2", "=",
RowBox[{
RowBox[{
RowBox[{
RowBox[{
FractionBox["1", "z"],
RowBox[{
RowBox[{"(", GridBox[{
{"1", "0",
RowBox[{
RowBox[{"-", "x"}], "/", "z"}]},
{"0", "1",
RowBox[{
RowBox[{"-", "y"}], "/", "z"}]}
}], ")"}], ".",
RowBox[{"(", GridBox[{
{"0",
RowBox[{"-", "z"}], "y",
RowBox[{"-", "1"}], "0", "0", "R00", "R10", "R20"},
{"z", "0",
RowBox[{"-", "x"}], "0",
RowBox[{"-", "1"}], "0", "R01", "R11", "R21"},
{
RowBox[{"-", "y"}], "x", "0", "0", "0",
RowBox[{"-", "1"}], "R02", "R12", "R22"}
}], ")"}]}]}], "/.",
RowBox[{"{",
RowBox[{
RowBox[{"x", "\[Rule]",
RowBox[{"z", " ", "u"}]}], ",",
RowBox[{"y", "\[Rule]",
RowBox[{"z", " ", "v"}]}]}], "}"}]}], "//", "FullSimplify"}], "//",
"MatrixForm", " "}]}]], "Input",
CellChangeTimes->{{3.534981575562448*^9, 3.534981660163035*^9}, {
3.534981699433219*^9, 3.534981715884362*^9}, {3.534981811953741*^9,
3.534981812758938*^9}, {3.534981967549185*^9, 3.534981970084591*^9}, {
3.5349821168936243`*^9, 3.5349821229726763`*^9}, {3.5349821691614027`*^9,
3.534982169838853*^9}, {3.5349823818506517`*^9, 3.5349823882411737`*^9}, {
3.534982418562274*^9, 3.534982445450313*^9}, {3.5349824987688103`*^9,
3.5349825453911533`*^9}}],
Cell[BoxData[
TagBox[
RowBox[{"(", "\[NoBreak]", GridBox[{
{
RowBox[{"u", " ", "v"}],
RowBox[{
RowBox[{"-", "1"}], "-",
SuperscriptBox["u", "2"]}], "v",
RowBox[{"-",
FractionBox["1", "z"]}], "0",
FractionBox["u", "z"],
FractionBox[
RowBox[{"R00", "-",
RowBox[{"R02", " ", "u"}]}], "z"],
FractionBox[
RowBox[{"R10", "-",
RowBox[{"R12", " ", "u"}]}], "z"],
FractionBox[
RowBox[{"R20", "-",
RowBox[{"R22", " ", "u"}]}], "z"]},
{
RowBox[{"1", "+",
SuperscriptBox["v", "2"]}],
RowBox[{
RowBox[{"-", "u"}], " ", "v"}],
RowBox[{"-", "u"}], "0",
RowBox[{"-",
FractionBox["1", "z"]}],
FractionBox["v", "z"],
FractionBox[
RowBox[{"R01", "-",
RowBox[{"R02", " ", "v"}]}], "z"],
FractionBox[
RowBox[{"R11", "-",
RowBox[{"R12", " ", "v"}]}], "z"],
FractionBox[
RowBox[{"R21", "-",
RowBox[{"R22", " ", "v"}]}], "z"]}
},
GridBoxAlignment->{
"Columns" -> {{Center}}, "ColumnsIndexed" -> {}, "Rows" -> {{Baseline}},
"RowsIndexed" -> {}},
GridBoxSpacings->{"Columns" -> {
Offset[0.27999999999999997`], {
Offset[0.7]},
Offset[0.27999999999999997`]}, "ColumnsIndexed" -> {}, "Rows" -> {
Offset[0.2], {
Offset[0.4]},
Offset[0.2]}, "RowsIndexed" -> {}}], "\[NoBreak]", ")"}],
Function[BoxForm`e$,
MatrixForm[BoxForm`e$]]]], "Output",
CellChangeTimes->{
3.534981661750978*^9, {3.534981700471121*^9, 3.5349817167574883`*^9},
3.534981813153152*^9, 3.5349819705942793`*^9, {3.534982117522484*^9,
3.534982124251252*^9}, 3.534982170293386*^9, 3.534982388956635*^9,
3.534982446240321*^9, {3.534982502305121*^9, 3.534982546180998*^9}}]
}, Open ]],
Cell["Finally, avoid divides.", "Text",
CellChangeTimes->{{3.534983093980698*^9, 3.5349831123126383`*^9}}],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"H2", "/.",
RowBox[{"{",
RowBox[{
RowBox[{"1", "/", "z"}], "\[Rule]", "d"}], "}"}]}]], "Input",
CellChangeTimes->{{3.534982548021124*^9, 3.534982548331835*^9}}],
Cell[BoxData[
TagBox[
RowBox[{"(", "\[NoBreak]", GridBox[{
{
RowBox[{"u", " ", "v"}],
RowBox[{
RowBox[{"-", "1"}], "-",
SuperscriptBox["u", "2"]}], "v",
RowBox[{"-", "d"}], "0",
RowBox[{"d", " ", "u"}],
RowBox[{"d", " ",
RowBox[{"(",
RowBox[{"R00", "-",
RowBox[{"R02", " ", "u"}]}], ")"}]}],
RowBox[{"d", " ",
RowBox[{"(",
RowBox[{"R10", "-",
RowBox[{"R12", " ", "u"}]}], ")"}]}],
RowBox[{"d", " ",
RowBox[{"(",
RowBox[{"R20", "-",
RowBox[{"R22", " ", "u"}]}], ")"}]}]},
{
RowBox[{"1", "+",
SuperscriptBox["v", "2"]}],
RowBox[{
RowBox[{"-", "u"}], " ", "v"}],
RowBox[{"-", "u"}], "0",
RowBox[{"-", "d"}],
RowBox[{"d", " ", "v"}],
RowBox[{"d", " ",
RowBox[{"(",
RowBox[{"R01", "-",
RowBox[{"R02", " ", "v"}]}], ")"}]}],
RowBox[{"d", " ",
RowBox[{"(",
RowBox[{"R11", "-",
RowBox[{"R12", " ", "v"}]}], ")"}]}],
RowBox[{"d", " ",
RowBox[{"(",
RowBox[{"R21", "-",
RowBox[{"R22", " ", "v"}]}], ")"}]}]}
},
GridBoxAlignment->{
"Columns" -> {{Center}}, "ColumnsIndexed" -> {}, "Rows" -> {{Baseline}},
"RowsIndexed" -> {}},
GridBoxSpacings->{"Columns" -> {
Offset[0.27999999999999997`], {
Offset[0.7]},
Offset[0.27999999999999997`]}, "ColumnsIndexed" -> {}, "Rows" -> {
Offset[0.2], {
Offset[0.4]},
Offset[0.2]}, "RowsIndexed" -> {}}], "\[NoBreak]", ")"}],
Function[BoxForm`e$,
MatrixForm[BoxForm`e$]]]], "Output",
CellChangeTimes->{3.534982550241898*^9}]
}, Open ]]
},
WindowSize->{740, 752},
WindowMargins->{{44, Automatic}, {-4, Automatic}},
FrontEndVersion->"8.0 for Mac OS X x86 (32-bit, 64-bit Kernel) (October 5, \
2011)",
StyleDefinitions->"Default.nb"
]
(* End of Notebook Content *)
(* Internal cache information *)
(*CellTagsOutline
CellTagsIndex->{}
*)
(*CellTagsIndex
CellTagsIndex->{}
*)
(*NotebookFileOutline
Notebook[{
Cell[557, 20, 184, 4, 26, "Text"],
Cell[CellGroupData[{
Cell[766, 28, 1644, 38, 61, "Input"],
Cell[2413, 68, 2645, 85, 85, "Output"]
}, Open ]],
Cell[5073, 156, 201, 4, 26, "Text"],
Cell[CellGroupData[{
Cell[5299, 164, 1487, 40, 80, "Input"],
Cell[6789, 206, 1860, 55, 75, "Output"]
}, Open ]],
Cell[8664, 264, 107, 1, 26, "Text"],
Cell[CellGroupData[{
Cell[8796, 269, 195, 5, 27, "Input"],
Cell[8994, 276, 1737, 55, 63, "Output"]
}, Open ]]
}
]
*)
(* End of internal cache information *)
| Mathematica | 4 | kvmanohar22/gtsam | doc/Mathematica/CalibratedCamera.nb | [
"BSD-3-Clause"
] |
{"format":"HTML", "children":
[
<%= render(partial: "first_layer", formats: :xml).chomp.html_safe %>,
<%= render(partial: "second_layer").chomp.html_safe %>
]}
| HTML+ERB | 3 | Jiwoong/rails | actionview/test/fixtures/test/mixing_formats_deep.html.erb | [
"MIT"
] |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.MySQL.Instances.Execute where
import Data.Aeson as J
import Data.HashMap.Strict.InsOrd qualified as OMap
import Data.Text qualified as T
import Data.Tree
import Hasura.Backends.MySQL.DataLoader.Execute (OutputValue (..), RecordSet (..))
import Hasura.Backends.MySQL.DataLoader.Execute qualified as DataLoader
import Hasura.Backends.MySQL.DataLoader.Plan qualified as DataLoader
import Hasura.Backends.MySQL.Plan
import Hasura.Base.Error
import Hasura.EncJSON
import Hasura.GraphQL.Execute.Backend
import Hasura.GraphQL.Parser
import Hasura.Prelude
import Hasura.RQL.IR
import Hasura.RQL.Types
import Hasura.Session
instance BackendExecute 'MySQL where
type PreparedQuery 'MySQL = Text
type MultiplexedQuery 'MySQL = Void
type ExecutionMonad 'MySQL = ExceptT QErr IO
mkDBQueryPlan = mysqlDBQueryPlan
mkDBMutationPlan = error "mkDBMutationPlan: MySQL backend does not support this operation yet."
mkDBSubscriptionPlan _ _ _ _ = error "mkDBSubscriptionPlan: MySQL backend does not support this operation yet."
mkDBQueryExplain = error "mkDBQueryExplain: MySQL backend does not support this operation yet."
mkLiveQueryExplain _ = error "mkLiveQueryExplain: MySQL backend does not support this operation yet."
mkDBRemoteRelationshipPlan = error "mkDBRemoteRelationshipPlan: MySQL does not support this operation yet."
mysqlDBQueryPlan ::
forall m.
( MonadError QErr m
) =>
UserInfo ->
SourceName ->
SourceConfig 'MySQL ->
QueryDB 'MySQL (Const Void) (UnpreparedValue 'MySQL) ->
m (DBStepInfo 'MySQL)
mysqlDBQueryPlan userInfo sourceName sourceConfig qrf = do
(headAndTail, actionsForest) <- queryToActionForest userInfo qrf
pure
( DBStepInfo
@'MySQL
sourceName
sourceConfig
(Just (T.pack (drawForest (fmap (fmap show) actionsForest))))
( do
result <-
DataLoader.runExecute
sourceConfig
headAndTail
(DataLoader.execute actionsForest)
either
(throw500WithDetail "MySQL DataLoader Error" . toJSON . show)
(pure . encJFromRecordSet)
result
)
)
--------------------------------------------------------------------------------
-- Encoding for Hasura's GraphQL JSON representation
encJFromRecordSet :: RecordSet -> EncJSON
encJFromRecordSet RecordSet {rows} =
encJFromList
( map
( encJFromAssocList
. map (first coerce . second encJFromOutputValue)
. OMap.toList
)
(toList rows)
)
encJFromOutputValue :: DataLoader.OutputValue -> EncJSON
encJFromOutputValue =
\case
ArrayOutputValue array -> encJFromList (map encJFromOutputValue (toList array))
RecordOutputValue m ->
encJFromAssocList
. map (first coerce . second encJFromOutputValue)
. OMap.toList
$ m
ScalarOutputValue value -> encJFromJValue value
NullOutputValue {} -> encJFromJValue J.Null
| Haskell | 4 | aloks98/graphql-engine | server/src-lib/Hasura/Backends/MySQL/Instances/Execute.hs | [
"Apache-2.0",
"MIT"
] |
package com.baeldung.unmappedproperties.mapper;
import com.baeldung.unmappedproperties.dto.DocumentDTO;
import com.baeldung.unmappedproperties.entity.Document;
import org.mapstruct.Mapper;
import org.mapstruct.ReportingPolicy;
import org.mapstruct.factory.Mappers;
@Mapper(unmappedTargetPolicy = ReportingPolicy.IGNORE)
public interface DocumentMapperUnmappedPolicy {
DocumentMapperUnmappedPolicy INSTANCE = Mappers.getMapper(DocumentMapperUnmappedPolicy.class);
DocumentDTO documentToDocumentDTO(Document entity);
Document documentDTOToDocument(DocumentDTO dto);
} | Java | 4 | DBatOWL/tutorials | mapstruct/src/main/java/com/baeldung/unmappedproperties/mapper/DocumentMapperUnmappedPolicy.java | [
"MIT"
] |
module audiostreamerscrobbler.players.heos.HeosSlaveMonitor
import audiostreamerscrobbler.maintypes.SongType.types.Song
import audiostreamerscrobbler.players.heos.HeosConnectionSingleton
import audiostreamerscrobbler.threads.PlayerMonitorThreadTypes.types.MonitorThreadTypes
import audiostreamerscrobbler.utils.ThreadUtils
import java.util.concurrent.atomic.{AtomicBoolean, AtomicReference}
let DEBUG = false
let IDLE_PLAYER_INTERVAL = 60
function createHeosSlaveMonitor = |heosConnection, player, cb| {
let heosConnectionReference = AtomicReference(getHeosConnectionInstance())
let isPlaying = AtomicBoolean(false)
let song = AtomicReference(null)
let duration = AtomicReference(null)
let slaveMonitor = DynamicObject("HeosSlaveMonitor"):
define("_cb", |this| -> cb):
define("_song", song):
define("_duration", duration):
define("_heosConnection", |this| -> heosConnectionReference):
define("_isPlaying", isPlaying):
define("pid", player: heosImpl(): pid(): toString()):
define("isPlaying", |this| -> this: _isPlaying(): get()):
define("send", |this, cmd| -> sendPlayerCommand(this, cmd)):
define("playerStateChange", |this, message| -> handlePlayerStateChange(this, message)):
define("nowPlayingProgress", |this, message| -> handlePlayerNowPlayingProgress(this, message)):
define("getPlayingNow", |this, payload| -> handleGetPlayingNow(this, payload))
return slaveMonitor
}
local function sendPlayerCommand = |slaveMonitor, cmd| {
let heosConnection = slaveMonitor: _heosConnection(): get()
let pid = slaveMonitor: pid()
heosConnection: sendCommand("heos://" + cmd + "?pid=" + pid)
}
local function handlePlayerStateChange = |slaveMonitor, message| {
# Player state is changed.
# {"heos": {"command": "player/get_play_state",
# "result": "success",
# "message": "pid=XXX&state=play"}}
# or
# {"heos": {"command": "event/player_state_changed",
# "message": "pid=XXX&state=play"}}
let isPlaying = slaveMonitor: _isPlaying()
isPlaying: set(message: get("state") == "play")
# Inform MonitorThread about status
_updateMonitorThread(slaveMonitor)
}
local function handlePlayerNowPlayingProgress = |slaveMonitor, message| {
# Progress changed.
# {"heos": {"command": "event/player_now_playing_progress",
# "message": "pid=-1465850739&cur_pos=189000&duration=235000"}}
let duration = slaveMonitor: _duration()
if (message: get("cur_pos") != "0") {
# Dirty hack, HEOS 2 seems to send duration event before song change event
duration: set(message)
# Inform MonitorThread about status
_updateMonitorThread(slaveMonitor)
}
}
local function handleGetPlayingNow = |slaveMonitor, payload| {
# Retrieving info song currently playing
# {"heos": {"command": "player/get_now_playing_media",
# "result": "success",
# "message": "pid=XXX"},
# "payload": {"type": "song",
# "song": "SONG TITLE",
# "album": "ALBUM",
# "artist": "ARTIST",
# "image_url": "xxx",
# "album_id": "yyy",
# "mid": "zzz",
# "qid": aa,
# "sid": bb},
# "options": []}
let song = slaveMonitor: _song()
song: set(payload)
# Reset duration
let duration = slaveMonitor: _duration()
duration: set(null)
_updateMonitorThread(slaveMonitor)
}
local function _updateMonitorThread = |slaveMonitor| {
let cb = slaveMonitor: _cb()
let songPayload = slaveMonitor: _song(): get()
let duration = slaveMonitor: _duration(): get()
let isPlaying = slaveMonitor: _isPlaying(): get()
let isSongKnown = (songPayload != null and songPayload: get("type") == "song")
let isDurationKnown = (duration != null)
var song = null
if (isPlaying and isSongKnown and isDurationKnown) {
# We should be able to construct song
song = _convertToSong(songPayload, duration)
}
if (song != null) {
cb(MonitorThreadTypes.PlayingSong(song))
} else {
cb(MonitorThreadTypes.Monitoring())
}
}
local function _convertToSong = |songPayload, durationMessage| {
let title = songPayload: get("song")
let artist = songPayload: get("artist")
let position = durationMessage: get("cur_pos")
let duration = durationMessage: get("duration")
if (title is null or artist is null or position is null or duration is null) {
return null
}
let song = Song(
title,
artist,
songPayload: get("album"),
position: toInt() / 1000,
duration: toInt() / 1000)
return song
}
| Golo | 5 | vvdleun/audiostreamerscrobbler | src/main/golo/include/players/heos/HeosSlaveMonitor.golo | [
"MIT"
] |
DAFETF NAIF DAF ENCODED TRANSFER FILE
'DAF/CK '
'2'
'6'
' < DAFCAT: CK CONCATENATION > '
BEGIN_ARRAY 1 99
'Partly corrected data: -1.414s; 98-11-29'
'1D956C1AD^9'
'1D956EA05^9'
'-16F30'
'1'
'3'
'1'
99
'84B90D871F54D8^0'
'-25100A513E235A^0'
'-D04ADA0ED06DB8^0'
'-3839AB9839EBE8^0'
'-97513BE4D06F08^-4'
'-3E5B58EC61176C^-2'
'-249C7B7EB50B86^-2'
'84D96AF490FE78^0'
'-2518604B9C87C4^0'
'-D0364B3DDBC888^0'
'-3833E15E72974E^0'
'-9B711108B0905^-4'
'-3E6A69988A7198^-2'
'-249A8B0AB7C118^-2'
'854D44FB261A6^0'
'-25386E30CAAE76^0'
'-CFEBEE81DE31E^0'
'-381FA856DAAF78^0'
'-78E11491D5BFE^-4'
'-3E0F8E934C067A^-2'
'-246866B58971^-2'
'85C2825C3F77E8^0'
'-25587FBCED256^0'
'-CFA0AD2FB56618^0'
'-3809FFD5D42A8A^0'
'-7B49782E59EC5^-4'
'-3E04987E5FF724^-2'
'-245B7829015B8^-2'
'86350495A0515^0'
'-2578148E459B6A^0'
'-CF56A8B3656AB^0'
'-37F52C346974F^0'
'-AE5781B859EED^-4'
'-3D0BBE446B31F4^-2'
'-24187E0B6D3068^-2'
'86A84D939B39E8^0'
'-2595629109AB5C^0'
'-CF0BE215B26828^0'
'-37E16AA03DC706^0'
'-9F8D4E6B9FFA5^-4'
'-3D2D25129A8594^-2'
'-241F0F6418B59E^-2'
'871A7E9F6C5E7^0'
'-25B4E0B58BB7E6^0'
'-CEC15E24309968^0'
'-37CC7497F48D2C^0'
'-8BFB399E127AF^-4'
'-3D35528B677E7A^-2'
'-24291D706693F6^-2'
'878D8817FA2C2^0'
'-25D4B4613F59B6^0'
'-CE75BFDCB19DC^0'
'-37B7E4CACAC73^0'
'-98B066C759711^-4'
'-3CE6AF25021256^-2'
'-23C1FF80639FF^-2'
'880040A9168E18^0'
'-25F272C5FE33B8^0'
'-CE2A61A77E5358^0'
'-37A32AA94F9A26^0'
'-C81380186130B^-4'
'-3D364EF4F5F096^-2'
'-23C01310AB1992^-2'
'8871066C9B6AF^0'
'-2610D9B9AF56EA^0'
'-CDDFD5D19FBC38^0'
'-378E4FBB138DDE^0'
'-81980F4F53C11^-4'
'-3CD871C936227A^-2'
'-23AB3983C1B468^-2'
'88E1931E8F8C68^0'
'-262FC96791E354^0'
'-CD9530EF3A8F1^0'
'-37789E64AAE1A2^0'
'-81E6D25A3C246^-4'
'-3C71C490EF17DA^-2'
'-23C3C77719A19A^-2'
'893E671FE72B28^0'
'-2647D8C5A727F^0'
'-CD5760017204A^0'
'-37679F1E2E83CA^0'
'-A42413ABEA34B8^-4'
'-3CEBC1714CA468^-2'
'-240783A603330C^-2'
'1D956C1AD^9'
'1D956C2C5^9'
'1D956C6C5^9'
'1D956CAC5^9'
'1D956CEC5^9'
'1D956D2C5^9'
'1D956D6C5^9'
'1D956DAC5^9'
'1D956DEC5^9'
'1D956E2C5^9'
'1D956E6C5^9'
'1D956EA05^9'
'1D956C1AD^9'
'1^1'
'C^1'
END_ARRAY 1 99
TOTAL_ARRAYS 1
~NAIF/SPC BEGIN COMMENTS~
This CK is for testing with the image: /home/acpaquette/lroc_nac/ab103105.spice.cub
This CK was generated using the following command: {}
~NAIF/SPC END COMMENTS~
| XC | 2 | ladoramkershner/ale | tests/pytests/data/JNCR_2016240_01M06152_V01/mgs_sc_ab1_1_sliced_-94000.xc | [
"Unlicense"
] |
fileFormatVersion: 2
guid: 802031397194d49dc9b2b8a534477d68
folderAsset: yes
timeCreated: 1508969008
licenseType: Free
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:
| Unity3D Asset | 0 | woshihuo12/Entitas-CSharp | Tests/Unity/VisualDebugging/Assets/Tests/Manual Tests/EntityLink.meta | [
"MIT"
] |
(***********************************************************
PROJECTOR INTERFACE
This file contains the code to interface with a fictitious
projector controlled via RS232. The projector needs to
receive 'PON' and 'POFF' strings to turn its power on
and off.
************************************************************)
PROGRAM_NAME='projector'
(***********************************************************)
(***********************************************************)
(* System Type : NetLinx *)
(***********************************************************)
(* REV HISTORY: *)
(***********************************************************)
(*
History:
*)
(***********************************************************)
(* INCLUDES GO BELOW *)
(***********************************************************)
(***********************************************************)
(* DEVICE NUMBER DEFINITIONS GO BELOW *)
(***********************************************************)
DEFINE_DEVICE
/*
* The projector is mapped to a virtual device to keep its
* code portable and testable. The virtual device allows the
* events to be captured and asserted in the test environment.
* It can be device-combined to the physical projector's port
* in the production system.
*/
vdvProjector = 33000:1:0;
(***********************************************************)
(* CONSTANT DEFINITIONS GO BELOW *)
(***********************************************************)
DEFINE_CONSTANT
(***********************************************************)
(* DATA TYPE DEFINITIONS GO BELOW *)
(***********************************************************)
DEFINE_TYPE
(***********************************************************)
(* VARIABLE DEFINITIONS GO BELOW *)
(***********************************************************)
DEFINE_VARIABLE
(***********************************************************)
(* LATCHING DEFINITIONS GO BELOW *)
(***********************************************************)
DEFINE_LATCHING
(***********************************************************)
(* MUTUALLY EXCLUSIVE DEFINITIONS GO BELOW *)
(***********************************************************)
DEFINE_MUTUALLY_EXCLUSIVE
(***********************************************************)
(* SUBROUTINE/FUNCTION DEFINITIONS GO BELOW *)
(***********************************************************)
(* EXAMPLE: DEFINE_FUNCTION <RETURN_TYPE> <NAME> (<PARAMETERS>) *)
(* EXAMPLE: DEFINE_CALL '<NAME>' (<PARAMETERS>) *)
define_function projectorOn()
{
send_string vdvProjector, "'PON'";
}
define_function projectorOff()
{
send_string vdvProjector, "'POFF'";
}
(***********************************************************)
(* STARTUP CODE GOES BELOW *)
(***********************************************************)
DEFINE_START
(***********************************************************)
(* THE EVENTS GO BELOW *)
(***********************************************************)
DEFINE_EVENT
(***********************************************************)
(* THE MAINLINE GOES BELOW *)
(***********************************************************)
DEFINE_PROGRAM
(***********************************************************)
(* END OF PROGRAM *)
(* DO NOT PUT ANY CODE BELOW THIS COMMENT *)
(***********************************************************)
| NetLinx | 4 | RBSystems/amx-test-suite | examples/(2) events/projector.axi | [
"Apache-2.0"
] |
import createSvgIcon from './utils/createSvgIcon';
import { jsx as _jsx } from "react/jsx-runtime";
export default createSvgIcon( /*#__PURE__*/_jsx("path", {
d: "M12 7c2.76 0 5 2.24 5 5 0 .64-.13 1.25-.35 1.82l1.5 1.5c.54-.99.85-2.12.85-3.32 0-2.22-1.03-4.19-2.64-5.47L15 2H9l-.96 3.21 2.14 2.14C10.75 7.13 11.36 7 12 7zM2.81 2.81 1.39 4.22l4.46 4.46C5.31 9.67 5 10.8 5 12c0 2.22 1.03 4.19 2.64 5.47L9 22h6l.96-3.21 3.82 3.82 1.41-1.41L2.81 2.81zM12 17c-2.76 0-5-2.24-5-5 0-.64.13-1.25.35-1.82l6.47 6.47c-.57.22-1.18.35-1.82.35z"
}), 'WatchOff'); | JavaScript | 4 | dany-freeman/material-ui | packages/mui-icons-material/lib/esm/WatchOff.js | [
"MIT"
] |
source "../tests/includes/init-tests.tcl"
source "../tests/includes/job-utils.tcl"
test "NACK immediately re-enqueue the job" {
set qname [randomQueue]
set id [D 0 addjob $qname myjob 5000 replicate 3 retry 3]
set job [D 0 show $id]
assert {$id ne {}}
D 0 getjob from $qname
assert {[count_job_copies $job queued] == 0}
D 0 NACK $id
assert {[count_job_copies $job queued] >= 0}
}
test "GET WITHCOUNTERS can retrieve NACKs count" {
set qname [randomQueue]
set id [D 0 addjob $qname myjob 5000 replicate 3 retry 3]
set job [D 0 show $id]
assert {$id ne {}}
set myjob [lindex [D 0 getjob from $qname] 0]
D 0 NACK $id
set myjob [lindex [D 0 getjob withcounters from $qname] 0]
assert {[lindex $myjob 4] > 0}
}
test "GET WITHCOUNTERS can retrieve additional deliveries count" {
set qname [randomQueue]
set id [D 0 addjob $qname myjob 5000 replicate 3 retry 3]
set job [D 0 show $id]
assert {$id ne {}}
set myjob [lindex [D 0 getjob from $qname] 0]
wait_for_condition {
[count_job_copies $job queued] >= 1
} else {
fail "Job never rescheduled while it should"
}
set myjob [lindex [D 0 getjob withcounters from $qname] 0]
assert {[lindex $myjob 6] > 0}
}
| Tcl | 3 | justincase/disque | tests/cluster/tests/12-nack-and-counters.tcl | [
"BSD-3-Clause"
] |
module m1 {
export class C1_public {
private f1() {
}
}
class C2_private {
}
export class C3_public {
constructor (m1_c3_c1: C1_public);
constructor (m1_c3_c2: C2_private); //error
constructor (m1_c3_c1_2: any) {
}
private f1_private(m1_c3_f1_arg: C1_public) {
}
public f2_public(m1_c3_f2_arg: C1_public) {
}
private f3_private(m1_c3_f3_arg: C2_private) {
}
public f4_public(m1_c3_f4_arg: C2_private) { // error
}
private f5_private() {
return new C1_public();
}
public f6_public() {
return new C1_public();
}
private f7_private() {
return new C2_private();
}
public f8_public() {
return new C2_private(); // error
}
private f9_private(): C1_public {
return new C1_public();
}
public f10_public(): C1_public {
return new C1_public();
}
private f11_private(): C2_private {
return new C2_private();
}
public f12_public(): C2_private { // error
return new C2_private(); //error
}
}
class C4_private {
constructor (m1_c4_c1: C1_public);
constructor (m1_c4_c2: C2_private);
constructor (m1_c4_c1_2: any) {
}
private f1_private(m1_c4_f1_arg: C1_public) {
}
public f2_public(m1_c4_f2_arg: C1_public) {
}
private f3_private(m1_c4_f3_arg: C2_private) {
}
public f4_public(m1_c4_f4_arg: C2_private) {
}
private f5_private() {
return new C1_public();
}
public f6_public() {
return new C1_public();
}
private f7_private() {
return new C2_private();
}
public f8_public() {
return new C2_private();
}
private f9_private(): C1_public {
return new C1_public();
}
public f10_public(): C1_public {
return new C1_public();
}
private f11_private(): C2_private {
return new C2_private();
}
public f12_public(): C2_private {
return new C2_private();
}
}
export class C5_public {
constructor (m1_c5_c: C1_public) {
}
}
class C6_private {
constructor (m1_c6_c: C1_public) {
}
}
export class C7_public {
constructor (m1_c7_c: C2_private) { // error
}
}
class C8_private {
constructor (m1_c8_c: C2_private) {
}
}
function f1_public(m1_f1_arg: C1_public) {
}
export function f2_public(m1_f2_arg: C1_public) {
}
function f3_public(m1_f3_arg: C2_private) {
}
export function f4_public(m1_f4_arg: C2_private) { // error
}
function f5_public() {
return new C1_public();
}
export function f6_public() {
return new C1_public();
}
function f7_public() {
return new C2_private();
}
export function f8_public() {
return new C2_private(); // error
}
function f9_private(): C1_public {
return new C1_public();
}
export function f10_public(): C1_public {
return new C1_public();
}
function f11_private(): C2_private {
return new C2_private();
}
export function f12_public(): C2_private { // error
return new C2_private(); //error
}
}
class C6_public {
}
class C7_public {
constructor (c7_c2: C6_public);
constructor (c7_c1_2: any) {
}
private f1_private(c7_f1_arg: C6_public) {
}
public f2_public(c7_f2_arg: C6_public) {
}
private f5_private() {
return new C6_public();
}
public f6_public() {
return new C6_public();
}
private f9_private(): C6_public {
return new C6_public();
}
public f10_public(): C6_public {
return new C6_public();
}
}
class C9_public {
constructor (c9_c: C6_public) {
}
}
function f4_public(f4_arg: C6_public) {
}
function f6_public() {
return new C6_public();
}
function f10_public(): C6_public {
return new C6_public();
}
| TypeScript | 3 | nilamjadhav/TypeScript | tests/cases/compiler/privacyFunc.ts | [
"Apache-2.0"
] |
import ReactGA from 'react-ga';
import {
devAnalyticsId,
prodAnalyticsId
} from '../../../config/analytics-settings';
import envData from '../../../config/env.json';
const { deploymentEnv } = envData;
const analyticsId =
deploymentEnv === 'staging' ? devAnalyticsId : prodAnalyticsId;
ReactGA.initialize(analyticsId);
export default ReactGA;
| TypeScript | 4 | fcastillo-serempre/freeCodeCamp | client/src/analytics/index.tsx | [
"BSD-3-Clause"
] |
mongodb = require 'mongodb'
assert = require 'assert'
open mongodb connection to (name, address: '127.0.0.1', port: 27017) =
db = new (mongodb.Db (name, new (mongodb.Server (address, port)), safe: false))
with mongodb! (name, block, options) =
db = open mongodb connection to (name, options)
db.open!()
block!(db)
db.close!()
with mongodb! 'test' @(test db)
collection = test db.collection 'test_insert'
collection.remove!()
console.log "inserting" {a 2}
docs = collection.insert! {a 2}
count = collection.count!()
console.log "document count: #(count)"
console.log "searching"
results = collection.find ().to array!()
console.log "found #(results.length) documents"
for each @(doc) in (results)
console.log "document:" (doc)
| PogoScript | 3 | featurist/pogoscript | examples/mongodb.pogo | [
"BSD-2-Clause"
] |
Import rockout
' TODO: Separate list for indestructable blocks?
Class Block Extends Sprite
Field parent:List <Block>
Field hittime:Int
Field hitcount:Int
Field hitby:Shot ' Shot which hit this block; gets speed from here
Field hitx:Float ' x-position when hit
Field hity:Float ' y-position when hit
Field strength:Int = 1
' Update parent via go-between function?
Method Fall (forcexs:Float = 0.0, forceys:Float = 0.0)
GameSession.CurrentLevel.FallingBlocks.AddLast Self
Self.parent = GameSession.CurrentLevel.FallingBlocks
GameSession.CurrentLevel.Blocks.RemoveEach Self
If Self.hitby
Self.xs = Self.hitby.xs * 0.5
Self.ys = -Self.hitby.ys * 0.5
Else
Self.xs = forcexs
Self.ys = forceys
Endif
End
Method Delete ()
parent.RemoveEach Self
End
Method New (img:Image, x:Float, y:Float, xs:Float, ys:Float, xscale:Float, yscale:Float)
Self.image = img
Self.x = x
Self.y = y
Self.xscale = xscale
Self.yscale = yscale
' This is the pixel width of the image after scaling...
Self.width = img.Width * xscale
Self.height = img.Height * yscale
GameSession.CurrentLevel.Blocks.AddLast Self
Self.parent = GameSession.CurrentLevel.Blocks
End
Function UpdateAll ()
Local b:Block
For b = Eachin GameSession.CurrentLevel.FallingBlocks
b.x = b.x + FrameScale (b.xs)
b.ys = b.ys + FrameScale (GameSession.CurrentLevel.Gravity) * 2.0
b.y = b.y + FrameScale (b.ys)
If b.y > VDeviceHeight + b.height Then b.Delete
Next
End
Function Render ()
Local b:Block
For b = Eachin GameSession.CurrentLevel.Blocks
b.Draw
Next
For b = Eachin GameSession.CurrentLevel.FallingBlocks
b.Draw
Next
End
End
| Monkey | 4 | blitz-research/monkey | bananas/hitoro/rockout/imports/block.monkey | [
"Zlib"
] |
' Debug Print Queue...
' Copy and paste the DebugQ type and the PrintQ/UpdateQ functions. Use
' PrintQ to add a debug message to your game, and UpdateQ in your main
' loop to display/update messages...
Type DebugQ
Global DebugQList:TList
Field message$
Field alpha# = 1
Function Print (message$)
If DebugQList = Null Then DebugQList= New TList
p:DebugQ = New DebugQ
p.message = message$
DebugQList.AddLast p
End Function
Function Update (alphacut# = 0.01)
If DebugQList = Null Then Return
y = 0
For p:DebugQ = EachIn DebugQList
SetBlend ALPHABLEND
SetAlpha p.alpha
DrawText p.message$, 0, y
y = y + TextHeight("")
p.alpha = p.alpha - alphacut; If p.alpha < 0 Then DebugQList.Remove p
Next
SetBlend SOLID ' Need to get old values!
SetAlpha 1 ' Need to get old values!
End Function
End Type
' Functional interfaces for non-OO'ers...
Function PrintQ (message$)
DebugQ.Print message$
End Function
Function UpdateQ ()
DebugQ.Update
End Function
' D E M O . . .
Graphics 640, 480
Repeat
Cls
x = MouseX ()
y = MouseY ()
DrawRect x, y, 32, 32
' Add items to debug print queue...
If MouseHit (1) Then PrintQ "Left mouse button hit!"
If MouseHit (2) Then PrintQ "Right mouse button hit!"
' Print/remove all debug items...
UpdateQ
DrawText "Click mouse...", 0, GraphicsHeight () - 20
Flip
Until KeyHit (KEY_ESCAPE)
End
| BlitzMax | 5 | jabdoa2/blitzmax | samples/hitoro/debugprintq.bmx | [
"Zlib"
] |
(ns hacker-scripts.smack
(:import
(com.twilio Twilio)
(com.twilio.rest.api.v2010.account Message)
(com.twilio.type PhoneNumber)))
(def acc-sid "my twilio account SID")
(def acc-tkn "my twilio secret token")
(def my-num (PhoneNumber. "+10001112222"))
(def her-num (PhoneNumber. "+19998887777"))
(def reasons ["Working hard"
"Gotta ship this feature"
"Someone fucked the system again"])
(defn twilio-init []
(Twilio/init acc-sid acc-tkn))
(defn send-sms [to-num from-num message]
(.. Message (creator to-num from-num message) create))
(def send-sms-girlfriend (partial send-sms her-num my-num))
(defn smack []
(twilio-init)
(let [message (rand-nth reasons)]
(send-sms-girlfriend message)))
| Clojure | 4 | johndemlon/hacker-scripts | clojure/smack.clj | [
"WTFPL"
] |
server.port=8080
debug=false
wrong.three=invalid
| INI | 0 | Martin-real/spring-boot-2.1.0.RELEASE | spring-boot-project/spring-boot-properties-migrator/src/test/resources/config/config-error-no-replacement.properties | [
"Apache-2.0"
] |
%!PS-Adobe-3.0 Resource-Encoding
%%Title: VIM-iso-8859-5
%%Version: 1.0 0
%%EndComments
/VIM-iso-8859-5[
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/space /exclam /quotedbl /numbersign /dollar /percent /ampersand /quotesingle
/parenleft /parenright /asterisk /plus /comma /minus /period /slash
/zero /one /two /three /four /five /six /seven
/eight /nine /colon /semicolon /less /equal /greater /question
/at /A /B /C /D /E /F /G
/H /I /J /K /L /M /N /O
/P /Q /R /S /T /U /V /W
/X /Y /Z /bracketleft /backslash /bracketright /asciicircum /underscore
/grave /a /b /c /d /e /f /g
/h /i /j /k /l /m /n /o
/p /q /r /s /t /u /v /w
/x /y /z /braceleft /bar /braceright /asciitilde /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/space /afii10023 /afii10051 /afii10052 /afii10053 /afii10054 /afii10055 /afii10056
/afii10057 /afii10058 /afii10059 /afii10060 /afii10061 /.notdef /afii10062 /afii10145
/afii10017 /afii10018 /afii10019 /afii10020 /afii10021 /afii10022 /afii10024 /afii10025
/afii10026 /afii10027 /afii10028 /afii10029 /afii10030 /afii10031 /afii10032 /afii10033
/afii10034 /afii10035 /afii10036 /afii10037 /afii10038 /afii10039 /afii10040 /afii10041
/afii10042 /afii10043 /afii10044 /afii10045 /afii10046 /afii10047 /afii10048 /afii10049
/afii10065 /afii10066 /afii10067 /afii10068 /afii10069 /afii10070 /afii10072 /afii10073
/afii10074 /afii10075 /afii10076 /afii10077 /afii10078 /afii10079 /afii10080 /afii10081
/afii10082 /afii10083 /afii10084 /afii10085 /afii10086 /afii10087 /afii10088 /afii10089
/afii10090 /afii10091 /afii10092 /afii10093 /afii10094 /afii10095 /afii10096 /afii10097
/afii61352 /afii10071 /afii10099 /afii10100 /afii10101 /afii10102 /afii10103 /afii10104
/afii10105 /afii10106 /afii10107 /afii10108 /afii10109 /section /afii10110 /afii10193]
/Encoding defineresource pop
% vim:ff=unix:
%%EOF
| PostScript | 1 | uga-rosa/neovim | runtime/print/iso-8859-5.ps | [
"Vim"
] |
(ns lt.objs.connector
"Provide client-selector object for UI and behaviors to choosing a client"
(:require [lt.object :as object]
[lt.objs.canvas :as canvas]
[lt.objs.popup :as popup]
[lt.objs.eval :as eval])
(:require-macros [lt.macros :refer [behavior defui]]))
(behavior ::on-selected
:triggers #{:selected}
:reaction (fn [this client]
(when-let [cb (:cb @this)]
(cb client))
(object/raise this :close!)))
(behavior ::on-close!
:triggers #{:close!}
:reaction (fn [this]
(object/raise (:popup @this) :close!)
(object/destroy! this)
))
(defui client-button [obj client]
[:li.button (:name @client)]
:click (fn []
(object/raise obj :selected client)
))
(object/object* ::client-selector
:tags #{:client.selector}
:init (fn [this clients cb]
(object/merge! this {:cb cb
:popup
(popup/popup!
{:header "Which client?"
:body (list [:p "There are multiple clients that could potentially handle this.
Which one do you want us to use for this file?"]
[:ul
(map (partial client-button this) clients)])})})
nil
))
(behavior ::select-client
:triggers #{:select-client}
:reaction (fn [obj potentials cb]
(object/create ::client-selector potentials cb)))
(object/add-behavior! eval/evaler ::select-client)
| Clojure | 5 | sam-aldis/LightTable | src/lt/objs/connector.cljs | [
"MIT"
] |
only forth also hidden also definitions
decimal
headerless
variable sift-vocabulary
variable sift-string
headers
\ Leave a "hook" for showing the name of the vocabulary
\ only once, the first time a matching name is found.
\ Showing the name of a device can be plugged in here also...
defer .voc ' noop is .voc
: .in ( -- ) ??cr tabstops @ spaces ." In " ;
headerless
: .vocab ( -- )
.in ['] vocabulary .name space
sift-vocabulary @ .name cr
['] noop is .voc
;
\ Show the "sifted" name, preceded by its cfa in parentheses.
\ Show the name of the vocabulary only the first time.
\ Control the display with exit?
: .sift? ( xt -- exit? )
.voc
exit? tuck if drop exit then ( exit? xt )
?cr ( exit? xt )
dup ." (" (u.) type ." ) " ( exit? xt )
.name 2 spaces ( exit? )
;
\ Sift through the given vocabulary, using the sift-string given.
\ Control the display with exit?
: vsift? ( adr len voc-xt -- adr len exit? )
dup sift-vocabulary ! follow 2>r ( r: test$ )
begin another? while ( xt )
dup >name$ ( xt this$ r: test$ )
2r@ search nip nip if ( xt r: test$ )
.sift? if 2r> true exit then ( xt r: test$ )
else ( xt r: test$ )
drop ( r: test$ )
then ( r: test$ )
repeat ( r: test$ )
2r> false
;
forth definitions
: $sift ( adr len -- )
voc-link begin another-link? while ( adr len voc-xt )
['] .vocab is .voc ( adr len voc-xt )
>r r@ vsift? if r> 3drop exit then ( adr len r: voc-xt )
r> >voc-link ( adr len )
repeat ( adr len )
2drop ( )
;
: sifting \ name ( -- )
safe-parse-word $sift
;
only forth also definitions
| Forth | 4 | andrewtholt/cforth-ath | src/cforth/sift.fth | [
"MIT"
] |
var('m')
# n= 129482838368566183932513985441085009497256178329157894268028880326190764173460678124609300536070044580431613060822516831816458170564365043575686770254191899725924667480108873667724655372386495538635971688223364262970073469144676440221627950186354010843274170556535606754054997765209301882273297337063711638001
# z1= 8424250840151611710956733283933835555790880358865384509282994665412717296960704520259442566809522167469432156373187459662466436529883388362170699187756170177843926747584608330434410285645188809403881644702066840043817526556971983221039741812026047932441765396266314490852277245256348522177007232165004727636
# z2= 108558984980396842508755420431729622294812918648500237917905266957134098595337838936866607276568682903903507097507822245988819212277071058352139969194319804154699294345013467238103150245084121501509318335968060815558056317706537806946946176738965199158647136238437262269212686339389988488863137348154017220299
# z3= 105733706825436258573684677211090928835979202775474570298183685697156172584296468284617963952704672959164742917851393233820229400781680048397608270405072784213186705574078196381808457283632817709970529693469335256556459591875590782097873026967121589199662185874493253056830112721062696219002130302624597766428
# e = 3
n = 116411973312833767249983501614250518823915221869887859463215387026693192701752522029206038790307038882010601416275376599984652025192440737975937248861914115288472774443041337716361103196014846135352394859351662673132964677102701253420078525606236715736140368939112915631884026674639153639959278462059142641279
z = [[18242235260412703467109867831558028784085717312433669793845789340745241332962143239461533361941620198939147613644457160535912419961643114957349118767475632989699956297950175515803265602731994512070968181569410524513663571971764458358112331781739138157077905042848450509811111046322477632612741243232775273795],
[69009077263788749866070646215106313567650962097657992624325022768395324611228575872284234472906262573276985390719594385339335259370063136915155903108254726896386641651667370808526072139963572802754918660237379091447985322598123227189491869194634754646890652806833111297383823993328499341963203907393453463499],
[54989991814188782294359972953913246643609231956075769654700142198950570014855382410780941521902371363935480059397144759406944333289733090098480205879234444774192620798213716014194741736913904930670855581886257474600422977150538252496761669844380504268810099734617192289546473844860391218147068088153183203211]]
var('k1','k2','k3','m')
solve_mod([z[0][0] == k1 + m,z[1][0] == k2 + m,z[2][0] == k3 + m, k1^3 == m, k2^3 + 2020 ==m, k3^3 + 4040 == m, m < (1<<128)], n)
n = 93374489802738958304746096948386269484428657871530143700859716422363715438165981175454398945750083579171978262026405009558322493513605972367998552018248741053971937598506661754921960464551657384570598873984090045847981484284282628140272570957437734035721995924024437737917456526346869897218292025656863751233
e = 4097
z1 = list(map(s2n,chunk(bytes.fromhex('377940084bb834d3e5aa6bbbe7df825807bf852ed7da4396ec2f99e098deab6abd77f5a368bb1c4902060e7e46b6289bcb076d143e936bc66896cdcac359fa4c856ee3dbc57637c731decd77de2a43a85351a6492f12ad899ea54cd7c9a43aef029f6f67bc8405f576ad2c04604a050ced7fef9f3678b2268ecca0a8214a2a4006acde065c5476b4c87fd319cafafd80b1506b728abc40ecfbfc95bdb89d3b299696a06921247c1ae8fa39360c7d5c994296716b5473e6ee791329b1f3575e64dfd8db5fd3aa679fb6365f869deac87cef28136e15884e00da6a15f07cd9d577863e15fc40da57e59800af58662798ed33751dcddd0e55f12e382e99e10573d8'),128)))
z2 = list(map(s2n,chunk(bytes.fromhex('0b4f8475dae0f123f6e99f95411c47931367f4790936159e96d161bc35536376e5f8ef473c99b0568ffee43e22ca90fa71a91006acafb5cfbbee0a21be9f50629cd45c77d69dbac9548d680bd0b2eb036e92386a5321b4c383c10480d74a3b002d1028c67246987040b1f0363fa2f56e86549daa1be4577751df04f2e2feb07c82f310098db4e46bf2d991eb3d27fc05d064dfeb080eb2b3940f883d37ce75cc02dfc53a203928054c15cc0e81202c8869eb94768cdcf3fb7bfb4b429efdb7d2ea4d1f83962705baeaa93decd6b887ea32538cd4608d2c7eb7a4f14068966f3ea1103a44b8783e5d8cc7fe33dde8bde0768c5d14c33299c1a2ae027ede31d407'),128)))
z3 = list(map(s2n,chunk(bytes.fromhex('3a38df45c3ce7bb35a6af6fb1b725dde4afbfdc2b07fb6ee9073b8aba338a6fb31adee635c33569e2dfd548d8427ac6952b33019f0299a4ace09e57723860ddaf1e21c02acea1d4c62cf03c1d40f46dd0aa2009fd54a6336372eb6fd48e3beb14b64eea79d4255231adf8cc1d4f2865886188fe7a0d6bea757cd292d6b830f10158ec6d01726b929c10e4da8d7ebbad48f2b9d5f98c5103a00fea86cde242777cf999f48278760b458041490becd0282d5b3375b299ed79419150d736c57f074f3f7db29bb989a4e00738d539d4e09cd21d7e909b6fd84bb3bce627aedf1a41076306bbea59458d24a2e09d9458e780d0332081afea00cd05d3edb2de65e44e4'),128)))
z2[0]
z3[0] - z1[0] | Sage | 3 | bearbear1100/Cryptools | cytro/sage/test.sage | [
"MIT"
] |
# Image metadata and config.
# Ideally, the Node.js version should match what we use on CI.
# See `executors > default-executor` in `.circleci/config.yml`.
FROM cimg/node:14-browsers
LABEL name="Angular dev environment" \
description="This image can be used to create a dev environment for building Angular." \
vendor="angular" \
version="1.0"
EXPOSE 4000 4200 4433 5000 8080 9876
# Switch to `root` (CircleCI images use `circleci` as the user).
USER root
# Configure `Node.js`/`npm`.
RUN npm config --global set user root
# Go! (And keep going.)
CMD ["tail", "--follow", "/dev/null"]
| Dockerfile | 4 | John-Cassidy/angular | .devcontainer/recommended-Dockerfile | [
"MIT"
] |
#ifndef GOOGLEMOCK_INCLUDE_GMOCK_INTERNAL_CUSTOM_GMOCK_GENERATED_ACTIONS_H_
#define GOOGLEMOCK_INCLUDE_GMOCK_INTERNAL_CUSTOM_GMOCK_GENERATED_ACTIONS_H_
#endif // GOOGLEMOCK_INCLUDE_GMOCK_INTERNAL_CUSTOM_GMOCK_GENERATED_ACTIONS_H_
| C | 0 | aweisi/googletest | googlemock/include/gmock/internal/custom/gmock-generated-actions.h | [
"BSD-3-Clause"
] |
Rebol [
Title: "Rebol3 ChaCha20 test script"
Author: "Oldes, Peter W A Wood"
File: %chacha20-test.r3
Tabs: 4
Needs: [%../quick-test-module.r3]
]
~~~start-file~~~ "ChaCha20"
===start-group=== "ChaCha20 test vectors"
foreach [test-id key nonce counter plain cipher] [
1
#{00000000000000000000000000000000}
#{0000000000000000} 0
#{00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000}
#{89670952608364FD00B2F90936F031C8E756E15DBA04B8493D00429259B20F46CC04F111246B6C2CE066BE3BFB32D9AA0FDDFBC12123D4B9E44F34DCA05A103F}
;@@ http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04#appendix-A.2
2
#{0000000000000000000000000000000000000000000000000000000000000000}
#{0000000000000000} 0
#{00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000}
#{76B8E0ADA0F13D90405D6AE55386BD28BDD219B8A08DED1AA836EFCC8B770DC7DA41597C5157488D7724E03FB8D84A376A43B8F41518A11CC387B669B2EE6586}
3
#{0000000000000000000000000000000000000000000000000000000000000001}
#{0000000000000002} 1
#{416e79207375626d697373696f6e20746f20746865204945544620696e74656e6465642062792074686520436f6e7472696275746f7220666f72207075626c69636174696f6e20617320616c6c206f722070617274206f6620616e204945544620496e7465726e65742d4472616674206f722052464320616e6420616e792073746174656d656e74206d6164652077697468696e2074686520636f6e74657874206f6620616e204945544620616374697669747920697320636f6e7369646572656420616e20224945544620436f6e747269627574696f6e222e20537563682073746174656d656e747320696e636c756465206f72616c2073746174656d656e747320696e20494554462073657373696f6e732c2061732077656c6c206173207772697474656e20616e6420656c656374726f6e696320636f6d6d756e69636174696f6e73206d61646520617420616e792074696d65206f7220706c6163652c207768696368206172652061646472657373656420746f}
#{a3fbf07df3fa2fde4f376ca23e82737041605d9f4f4f57bd8cff2c1d4b7955ec2a97948bd3722915c8f3d337f7d370050e9e96d647b7c39f56e031ca5eb6250d4042e02785ececfa4b4bb5e8ead0440e20b6e8db09d881a7c6132f420e52795042bdfa7773d8a9051447b3291ce1411c680465552aa6c405b7764d5e87bea85ad00f8449ed8f72d0d662ab052691ca66424bc86d2df80ea41f43abf937d3259dc4b2d0dfb48a6c9139ddd7f76966e928e635553ba76c5c879d7b35d49eb2e62b0871cdac638939e25e8a1e0ef9d5280fa8ca328b351c3c765989cbcf3daa8b6ccc3aaf9f3979c92b3720fc88dc95ed84a1be059c6499b9fda236e7e818b04b0bc39c1e876b193bfe5569753f88128cc08aaa9b63d1a16f80ef2554d7189c411f5869ca52c5b83fa36ff216b9c1d30062bebcfd2dc5bce0911934fda79a86f6e698ced759c3ff9b6477338f3da4f9cd8514ea9982ccafb341b2384dd902f3d1ab7ac61dd29c6f21ba5b862f3730e37cfdc4fd806c22f221}
4
#{1c9240a5eb55d38af333888604f6b5f0473917c1402b80099dca5cbc207075c0}
#{0000000000000002} 42
#{2754776173206272696c6c69672c20616e642074686520736c6974687920746f7665730a446964206779726520616e642067696d626c6520696e2074686520776162653a0a416c6c206d696d737920776572652074686520626f726f676f7665732c0a416e6420746865206d6f6d65207261746873206f757467726162652e}
#{62e6347f95ed87a45ffae7426f27a1df5fb69110044c0d73118effa95b01e5cf166d3df2d721caf9b21e5fb14c616871fd84c54f9d65b283196c7fe4f60553ebf39c6402c42234e32a356b3e764312a61a5532055716ead6962568f87d3f3f7704c6a8d1bcd1bf4d50d6154b6da731b187b58dfd728afa36757a797ac188d1}
5
#{000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f}
#{000000000000004a00000000} 1
#{4C616469657320616E642047656E746C656D656E206F662074686520636C617373206F66202739393A204966204920636F756C64206F6666657220796F75206F6E6C79206F6E652074697020666F7220746865206675747572652C2073756E73637265656E20776F756C642062652069742E}
#{6e2e359a2568f98041ba0728dd0d6981e97e7aec1d4360c20a27afccfd9fae0bf91b65c5524733ab8f593dabcd62b3571639d624e65152ab8f530c359f0861d807ca0dbf500d6a6156a38e088a22b65e52bc514d16ccf806818ce91ab77937365af90bbf74a35be6b40b8eedf2785e42874d}
][
--test-- join "ChaCha20 test (A) " test-id
; version with multiple steps
--assert handle? k1: chacha20 key
--assert handle? chacha20/init k1 nonce counter
--assert cipher = chacha20/stream k1 plain
--assert handle? k2: chacha20 key
--assert handle? chacha20/init k2 nonce counter
--assert plain = chacha20/stream k2 cipher
--test-- join "ChaCha20 test (B) " test-id
--assert handle? k1: chacha20/init key nonce counter
--assert cipher = chacha20/stream k1 plain
--assert handle? k2: chacha20/init key nonce counter
--assert plain = chacha20/stream k2 cipher
--test-- join "ChaCha20 test (C) " test-id
--assert cipher = chacha20/init/stream key nonce counter plain
--assert handle? k2: chacha20/init key nonce counter
--assert plain = chacha20/init/stream key nonce counter cipher
]
===end-group===
===start-group=== "ChaCha20Poly1305"
--test-- "TLS with ChaCha20Poly1305 use-case simulation"
;- server and client exchange it's keys and initialization vectors
server-key: #{AE8A57A15387FD92E9DAA50FECD6CA31044A7EEC9459EC9C6ED6A93EE4F6CC42}
client-key: #{438D7027FD611C1A5CD532D1151665EA3BB925CF1F37453C109790B604E7A0C4}
server-IV: #{F01A5EF18B11C15FB97AE808}
client-IV: #{9F45E14C213A3719186DDF50}
;- client initialize ChaCha20Poly1305 context
client-ctx: chacha20poly1305/init none client-key client-IV server-key server-IV
--assert handle? client-ctx
;- server initialize ChaCha20Poly1305 context with switched values
server-ctx: chacha20poly1305/init none server-key server-IV client-key client-IV
--assert handle? server-ctx
;- client encrypts data for server with AAD
data: #{1400000C89F6A49D54518857D140BE74}
aad: #{0000000000000000 16 0303 0010}
; AAD structure used in TLS protocol:
; 8 bytes - sequence ID (starting from 0)
; 1 byte - sequence type
; 2 bytes - TLS version
; 2 bytes - length of data
; AAD is internally padded to 16 bytes with zeros
expect: #{AE84B0499E0B7837027C6FD712A68894 3604F4477DCA0C6856559D1DD2EEC03C}
result: chacha20poly1305/encrypt client-ctx data aad
--assert expect = result
; result ends with 16 bytes of MAC used as message authentication
; the MAC is internally computed and authenticated, result of decrypt is NONE on fail.
;- server receives encrypted data and decrypts them using same AAD
--assert data = chacha20poly1305/decrypt server-ctx result aad
;- server responds with encrypted data (also with 16 bytes of MAC at tail):
data: #{291EC39A1BAD9E855CA8EB042014C4AFBDE4C13241E44B5B926435BB79EB89AF}
; it is a little bit more optimal if AAD is already 16 bytes
aad: #{00000000000000001603030010000000} ; as it is first server's message, the seqence is = 0
expect: #{1400000C107581DB64B051DA4C250603}
result: chacha20poly1305/decrypt client-ctx data aad
--assert expect = result
;- and other data...
data: #{567E44EDD0CD6C88EEC4187CE3A7323016561788BE45D5246005025F4691B1C415A6B902F8ABD95A6C57A0168E9FAC5FDC6B606477DE4072AE7B5A78C5B5513217CB213F2DBCBFE9D774A916FABCD4690BD8CDE45847A250FF34F28861553BC7514A0EC51205CCC56D9C294033B015BD}
aad: #{00000000000000011703030060000000} ; notice that seqence is now incremented to 1
expect: #{485454502F312E3120323030204F4B0D0A436F6E74656E742D6C656E6774683A2033310D0A436F6E74656E742D747970653A20746578742F706C61696E0D0A0D0A48656C6C6F20776F726C642066726F6D20544C53652028544C5320312E3229}
result: chacha20poly1305/decrypt client-ctx data aad
--assert expect = result
===end-group===
~~~end-file~~~ | Rebol | 4 | 0branch/r3 | src/tests/units/chacha20-test.r3 | [
"Apache-2.0"
] |
// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
// LEC dofile for script lec_sv2v.sh. A similar script is used in
// OpenTitan, any updates or fixes here may need to be reflected in the
// OpenTitan script as well:
// https://github.com/lowRISC/opentitan/blob/master/hw/formal/lec_sv2v.do
//-------------------------------------------------------------------------
// read in golden (SystemVerilog) and revised (Verilog)
//-------------------------------------------------------------------------
// map all multi-dimensional ports (including structs) onto 1-dim. ports
set naming rule -mdportflatten
read design -golden -sv09 -f flist_gold -rootonly -root $LEC_TOP
read design -revised -sys -f flist_rev -rootonly -root $LEC_TOP
// TODO: instead of using switch -sys (for old SystemVerilog,
// older than sv2009) we should use -ve (for Verilog). But
// this currently doesn't work because sv2v doesn't translate
// .* port connections. Is that an sv2v bug?
//-------------------------------------------------------------------------
// pre-LEC reports
//-------------------------------------------------------------------------
report rule check -verbose
report design data
report black box
report module
//-------------------------------------------------------------------------
// compare
//-------------------------------------------------------------------------
set system mode lec
set parallel option -threads 8
// map unreachable points
set mapping method -nets -mem -unreach
map key points
report unmapped points
add compare point -all
compare -threads 8 -noneq_stop 1
analyze abort -compare
//-------------------------------------------------------------------------
// reports
//-------------------------------------------------------------------------
report compare data -class nonequivalent -class abort -class notcompared
report verification -verbose
report statistics
usage
exit -force
| Stata | 3 | parzival3/Surelog | third_party/tests/Ibex/syn/lec_sv2v.do | [
"Apache-2.0"
] |
L NURON e_nuron_err.h e_nuron_err.c
| eC | 1 | jiangzhu1212/oooii | Ouroboros/External/OpenSSL/openssl-1.0.0e/engines/e_nuron.ec | [
"MIT"
] |
window.__test_defer_executions = (window.__test_defer_executions || 0) + 1
| JavaScript | 1 | blomqma/next.js | test/integration/client-navigation/public/test-defer.js | [
"MIT"
] |
/*
* Program type: Embedded Static SQL
*
* Description:
* This program updates a blob data type.
* Project descriptions are added for a set of projects.
* The contents of this file are subject to the Interbase Public
* License Version 1.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy
* of the License at http://www.Inprise.com/IPL.html
*
* Software distributed under the License is distributed on an
* "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, either express
* or implied. See the License for the specific language governing
* rights and limitations under the License.
*
* The Original Code was created by Inprise Corporation
* and its predecessors. Portions created by Inprise Corporation are
* Copyright (C) Inprise Corporation.
*
* All Rights Reserved.
* Contributor(s): ______________________________________.
*/
#include "example.h"
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
char *get_line (void);
static char *Proj_data[] =
{
"VBASE",
"Design a video data base management system for ",
"controlling on-demand video distribution.",
0,
"DGPII",
"Develop second generation digital pizza maker ",
"with flash-bake heating element and ",
"digital ingredient measuring system.",
0,
"GUIDE",
"Develop a prototype for the automobile version of ",
"the hand-held map browsing device.",
0,
"MAPDB",
"Port the map browsing database software to run ",
"on the automobile model.",
0,
"HWRII",
"Integrate the hand-writing recognition module into the ",
"universal language translator.",
0,
0
};
int Inp_ptr = 0;
EXEC SQL
BEGIN DECLARE SECTION;
EXEC SQL
END DECLARE SECTION;
int main (void)
{
BASED_ON project.proj_id proj_id;
ISC_QUAD blob_id;
int len;
char * line;
int rec_cnt = 0;
EXEC SQL
WHENEVER SQLERROR GO TO Error;
/* Declare a blob insert cursor. */
EXEC SQL
DECLARE bc CURSOR FOR
INSERT BLOB proj_desc INTO project;
/*
* Get the next project id and update the project description.
*/
line = get_line();
while (line)
{
/* Open the blob cursor. */
EXEC SQL
OPEN bc INTO :blob_id;
strcpy(proj_id, line);
printf("\nUpdating description for project: %s\n\n", proj_id);
/* Get a project description segment. */
line = get_line();
while (line)
{
printf(" Inserting segment: %s\n", line);
/* Calculate the length of the segment. */
len = strlen(line);
/* Write the segment. */
EXEC SQL INSERT CURSOR bc VALUES (:line INDICATOR :len);
line = get_line();
}
/* Close the blob cursor. */
EXEC SQL
CLOSE bc;
/* Save the blob id in the project record. */
EXEC SQL
UPDATE project
SET proj_desc = :blob_id
WHERE proj_id = :proj_id;
if (SQLCODE == 0L)
rec_cnt++;
else
printf("Input error -- no project record with key: %s\n", proj_id);
line = get_line();
}
EXEC SQL
COMMIT RELEASE;
printf("\n\nAdded %d project descriptions.\n", rec_cnt);
return 0;
Error:
isc_print_sqlerror((short) SQLCODE, isc_status);
return 1;
}
/*
* Get the next input line, which is either a project id
* or a project description segment.
*/
char *get_line (void)
{
return Proj_data[Inp_ptr++];
}
| Eiffel | 4 | jiemurat/delphimvcframework | unittests/general/Several/bin/firebird/examples/stat/stat8.e | [
"Apache-2.0"
] |
#tag Class
Protected Class FTPSessionWFS
Inherits InternetSessionWFS
#tag Method, Flags = &h0
Sub Connect(url as String, username as String, password as String)
#if TargetWin32
Soft Declare Function InternetConnectA Lib "WinInet" ( handle as Integer, server as CString, _
port as Integer, username as CString, password as CString, servic as Integer, flags as Integer, _
context as Integer ) as Integer
Soft Declare Function InternetConnectW Lib "WinInet" ( handle as Integer, server as WString, _
port as Integer, username as WString, password as WString, servic as Integer, flags as Integer, _
context as Integer ) as Integer
// Setup our connection flags
dim flags as Integer
Const INTERNET_FLAG_PASSIVE = &h8000000
if Passive then flags = INTERNET_FLAG_PASSIVE
Const INTERNET_SERVICE_FTP = 1
// Try to do the connection
if System.IsFunctionAvailable( "InternetConnectW", "WinInet" ) then
mFTPHandle = InternetConnectW( mInetHandle, url, Port, username, password, _
INTERNET_SERVICE_FTP, flags, 0 )
else
mFTPHandle = InternetConnectA( mInetHandle, url, Port, username, password, _
INTERNET_SERVICE_FTP, flags, 0 )
end if
// Now check to make sure we were able to open the connection
if mFTPHandle = 0 then
FireException( "Could not open the FTP connection" )
return
end if
#else
#pragma unused url
#pragma unused username
#pragma unused password
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub Constructor()
// Make sure we get our inet handle
super.Constructor( "" )
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub CreateDirectory(name as String)
// Sanity checks
if mFTPHandle = 0 then
FireException( "Trying to create a directory while not connected" )
return
end if
#if TargetWin32
Soft Declare Function FtpCreateDirectoryW Lib "WinInet" ( handle as Integer, name as WString ) as Boolean
Soft Declare Function FtpCreateDirectoryA Lib "WinInet" ( handle as Integer, name as CString ) as Boolean
dim success as Boolean
if System.IsFunctionAvailable( "FtpCreateDirectoryW", "WinInet" ) then
success = FtpCreateDirectoryW( mFTPHandle, name )
else
success = FtpCreateDirectoryA( mFTPHandle, name )
end if
if not success then
FireException( "Could not create the directory" )
return
end if
#else
#pragma unused name
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Function CurrentDirectory() As String
// Sanity checks
if mFTPHandle = 0 then
FireException( "Trying to get the current directory while not connected" )
return ""
end if
#if TargetWin32
Soft Declare Function FtpGetCurrentDirectoryW Lib "WinInet" ( handle as Integer, buf as Ptr, ByRef size as Integer ) as Boolean
Soft Declare Function FtpGetCurrentDirectoryA Lib "WinInet" ( handle as Integer, buf as Ptr, ByRef size as Integer ) as Boolean
dim success as Boolean
dim buf as new MemoryBlock( 260 * 2 )
dim size as Integer = buf.Size
if System.IsFunctionAvailable( "FtpGetCurrentDirectoryW", "WinInet" ) then
success = FtpGetCurrentDirectoryW( mFTPHandle, buf, size )
if success then return buf.WString( 0 )
else
success = FtpGetCurrentDirectoryW( mFTPHandle, buf, size )
if success then return buf.CString( 0 )
end if
if not success then
FireException( "Could not get the current remote directory" )
return ""
end if
#endif
End Function
#tag EndMethod
#tag Method, Flags = &h0
Sub CurrentDirectory(assigns name as String)
// Sanity checks
if mFTPHandle = 0 then
FireException( "Trying to set the current directory while not connected" )
return
end if
#if TargetWin32
Soft Declare Function FtpSetCurrentDirectoryW Lib "WinInet" ( handle as Integer, dir as WString ) as Boolean
Soft Declare Function FtpSetCurrentDirectoryA Lib "WinInet" ( handle as Integer, dir as CString ) as Boolean
dim success as Boolean
if System.IsFunctionAvailable( "FtpSetCurrentDirectoryW", "WinInet" ) then
success = FtpSetCurrentDirectoryW( mFTPHandle, name )
else
success = FtpSetCurrentDirectoryA( mFTPHandle, name )
end if
if not success then
FireException( "Could not set the current remote directory" )
return
end if
#else
#pragma unused name
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub DeleteDirectory(name as String)
// Sanity checks
if mFTPHandle = 0 then
FireException( "Trying to delete a directory while not connected" )
return
end if
#if TargetWin32
Soft Declare Function FtpRemoveDirectoryW Lib "WinInet" ( handle as Integer, name as WString ) as Boolean
Soft Declare Function FtpRemoveDirectoryA Lib "WinInet" ( handle as Integer, name as CString ) as Boolean
dim success as Boolean
if System.IsFunctionAvailable( "FtpRemoveDirectoryW", "WinInet" ) then
success = FtpRemoveDirectoryW( mFTPHandle, name )
else
success = FtpRemoveDirectoryA( mFTPHandle, name )
end if
if not success then
FireException( "Could not delete the directory" )
return
end if
#else
#pragma unused name
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub DeleteFile(name as String)
// Sanity checks
if mFTPHandle = 0 then
FireException( "Trying to delete a file while not connected" )
return
end if
#if TargetWin32
Soft Declare Function FtpDeleteFileW Lib "WinInet" ( handle as Integer, name as WString ) as Boolean
Soft Declare Function FtpDeleteFileA Lib "WinInet" ( handle as Integer, name as CString ) as Boolean
dim success as Boolean
if System.IsFunctionAvailable( "FtpDeleteFileW", "WinInet" ) then
success = FtpDeleteFileW( mFTPHandle, name )
else
success = FtpDeleteFileA( mFTPHandle, name )
end if
if not success then
FireException( "Could not delete the file" )
return
end if
#else
#pragma unused name
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub Destructor()
CloseHandle( mFTPHandle )
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub FindFinish()
// The user wants to stop this find operation
// early.
if mInternalFindHandle <> 0 then
CloseHandle( mInternalFindHandle )
end if
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Function FindFirstFile(search as String = "") As FindFileWFS
// Sanity checks
if mFTPHandle = 0 then
FireException( "Trying to find the first file while not connected" )
return nil
end if
// If we're already in the process of doing a find, then bail out
if mInternalFindHandle <> 0 then
FireException( "Can only do one find at a time." )
return nil
end if
#if TargetWin32
Soft Declare Function FtpFindFirstFileW Lib "WinInet" ( handle as Integer, search as WString, data as Ptr, _
flags as Integer, context as Integer ) as Integer
Soft Declare Function FtpFindFirstFileA Lib "WinInet" ( handle as Integer, search as CString, data as Ptr, _
flags as Integer, context as Integer ) as Integer
Const INTERNET_FLAG_RELOAD = &h80000000
dim mb as MemoryBlock
if System.IsFunctionAvailable( "FtpFindFirstFileW", "WinInet" ) then
mb = new MemoryBlock( FindFileWFS.kUnicodeSize )
mInternalFindHandle = FtpFindFirstFileW( mFTPHandle, search, mb, INTERNET_FLAG_RELOAD, 0 )
else
mb = new MemoryBlock( FindFileWFS.kANSISize )
mInternalFindHandle = FtpFindFirstFileA( mFTPHandle, search, mb, INTERNET_FLAG_RELOAD, 0 )
end if
Const ERROR_NO_MORE_FILES = 18
if mInternalFindHandle = 0 then
// It could be that there just aren't any files in the directory
if GetLastError = ERROR_NO_MORE_FILES then return nil
FireException( "Could not find the first file" )
return nil
end if
return new FindFileWFS( mb )
#else
#pragma unused search
#endif
End Function
#tag EndMethod
#tag Method, Flags = &h0
Sub GetFile(remoteName as String, local as FolderItem, bFailIfExists as Boolean = false)
// Sanity checks
if mFTPHandle = 0 then
FireException( "Trying to set the current directory while not connected" )
return
end if
// If the local item is a directory, turn it into
// a file
if local.Directory then
local = local.Child( remoteName )
end if
#if TargetWin32
Soft Declare Function FtpGetFileW Lib "WinInet" ( handle as Integer, remote as WString, local as WString, _
fail as Boolean, attribs as Integer, flags as Integer, context as Integer ) as Boolean
Soft Declare Function FtpGetFileA Lib "WinInet" ( handle as Integer, remote as WString, local as WString, _
fail as Boolean, attribs as Integer, flags as Integer, context as Integer ) as Boolean
dim flags as Integer = TransferType
Const FILE_ATTRIBUTE_NORMAL = &h80
dim success as Boolean
if System.IsFunctionAvailable( "FtpGetFileW", "WinInet" ) then
success = FtpGetFileW( mFTPHandle, remoteName, local.AbsolutePath, bFailIfExists, _
FILE_ATTRIBUTE_NORMAL, flags, 0 )
else
success = FtpGetFileW( mFTPHandle, remoteName, local.AbsolutePath, bFailIfExists, _
FILE_ATTRIBUTE_NORMAL, flags, 0 )
end if
if not success then
FireException( "Could not get the remote file" )
return
end if
#else
#pragma unused bFailIfExists
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub PutFile(f as FolderItem, remoteName as String = "")
#if TargetWin32
// Sanity checks
if f.Directory then
FireException( "Trying to put a directory by calling PutFile" )
return
end if
if mFTPHandle = 0 then
FireException( "Trying to put a file while not connected" )
return
end if
// The first thing we need to do is make
// sure that our local directory is correct.
SetLocalDirectory( f.Parent )
// Setup our support code
if remoteName = "" then remoteName = f.Name
dim flags as Integer = TransferType
Soft Declare Function FtpPutFileA Lib "WinInet" ( handle as Integer, localFile as CString, remoteFile as CString, _
flags as Integer, context as Integer ) as Boolean
Soft Declare Function FtpPutFileW Lib "WinInet" ( handle as Integer, localFile as WString, remoteFile as WString, _
flags as Integer, context as Integer ) as Boolean
// Now that we've done that, we can upload the file
dim success as Boolean
if System.IsFunctionAvailable( "FtpPutFileW", "WinInet" ) then
success = FtpPutFileW( mFTPHandle, f.Name, remoteName, flags, 0 )
else
success = FtpPutFileA( mFTPHandle, f.Name, remoteName, flags, 0 )
end if
if not success then
FireException( "Could not upload the file" )
return
end if
#else
#pragma unused f
#pragma unused remoteName
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub PutFolder(f as FolderItem)
// This is a helper function that puts an entire
// folder of data (recursively) onto the server
if f = nil then return
if f.Directory then
// We want to create a directory
CreateDirectory( f.Name )
// Advance into the directory
CurrentDirectory = f.Name
// Loop over all of our children and put
// them into the new directory
dim count as Integer = f.Count
for i as Integer = 1 to count
PutFolder( f.TrueItem( i ) )
next i
// Then go back down a directory
CurrentDirectory = ".."
else
// Just put the file into the current folder
PutFile( f )
end if
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub RenameFile(existing as String, newName as String)
// Sanity checks
if mFTPHandle = 0 then
FireException( "Trying to get the rename a file while not connected" )
return
end if
#if TargetWin32
Soft Declare Function FtpRenameFileW Lib "WinInet" ( handle as Integer, old as WString, newName as WString ) as Boolean
Soft Declare Function FtpRenameFileA Lib "WinInet" ( handle as Integer, old as CString, newName as CString ) as Boolean
dim success as Boolean
if System.IsFunctionAvailable( "FtpRenameFileW", "WinInet" ) then
success = FtpRenameFileW( mFTPHandle, existing, newName )
else
success = FtpRenameFileA( mFTPHandle, existing, newName )
end if
if not success then
FireException( "Could not rename the file" )
return
end if
#else
#pragma unused existing
#pragma unused newName
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h0
Sub SetLocalDirectory(dir as FolderItem)
#if TargetWin32
Soft Declare Function SetCurrentDirectoryA Lib "Kernel32" ( dir as CString ) as Boolean
Soft Declare Function SetCurrentDirectoryW Lib "Kernel32" ( dir as WString ) as Boolean
// Sanity check
if not dir.Directory then
FireException( "Trying to set the local directory to a non-directory FolderItem" )
return
end if
// Set the directory
dim success as Boolean
if System.IsFunctionAvailable( "SetCurrentDirectoryW", "Kernel32" ) then
success = SetCurrentDirectoryW( dir.AbsolutePath )
else
success = SetCurrentDirectoryA( dir.AbsolutePath )
end if
#else
#pragma unused dir
#endif
End Sub
#tag EndMethod
#tag Note, Name = About this class
This class derives from the InternetSession class so that
we can don't have to reinvent the wheel. So be certain
to look on that class for some functionality as well (such as
FindNextFile).
This class should work on all versions of Windows.
Note that the current implementation is synchronous in
nature and does all error handling via Exceptions. A future
implementation could be implemented asynchronously (with
modifications to the InternetSession class) as well.
#tag EndNote
#tag Property, Flags = &h1
Protected mFTPHandle As Integer
#tag EndProperty
#tag Property, Flags = &h0
Passive As Boolean = true
#tag EndProperty
#tag Property, Flags = &h0
Port As Integer = 21
#tag EndProperty
#tag Property, Flags = &h0
TransferType As Integer = &h2
#tag EndProperty
#tag Constant, Name = kTransferTypeASCII, Type = Double, Dynamic = False, Default = \"&h1", Scope = Public
#tag EndConstant
#tag Constant, Name = kTransferTypeBinary, Type = Double, Dynamic = False, Default = \"&h2", Scope = Public
#tag EndConstant
#tag ViewBehavior
#tag ViewProperty
Name="Index"
Visible=true
Group="ID"
InitialValue="-2147483648"
InheritedFrom="InternetSession"
#tag EndViewProperty
#tag ViewProperty
Name="Left"
Visible=true
Group="Position"
InitialValue="0"
InheritedFrom="InternetSession"
#tag EndViewProperty
#tag ViewProperty
Name="Name"
Visible=true
Group="ID"
InheritedFrom="InternetSession"
#tag EndViewProperty
#tag ViewProperty
Name="Passive"
Group="Behavior"
InitialValue="true"
Type="Boolean"
#tag EndViewProperty
#tag ViewProperty
Name="Port"
Group="Behavior"
InitialValue="21"
Type="Integer"
#tag EndViewProperty
#tag ViewProperty
Name="Super"
Visible=true
Group="ID"
InheritedFrom="InternetSession"
#tag EndViewProperty
#tag ViewProperty
Name="Top"
Visible=true
Group="Position"
InitialValue="0"
InheritedFrom="InternetSession"
#tag EndViewProperty
#tag ViewProperty
Name="TransferType"
Group="Behavior"
InitialValue="kTransferTypeBinary"
Type="Integer"
#tag EndViewProperty
#tag EndViewBehavior
End Class
#tag EndClass
| REALbasic | 5 | bskrtich/WFS | Windows Functionality Suite/Networking/Classes/FTPSessionWFS.rbbas | [
"MIT"
] |
-- There are 2 dimensions we want to test
-- 1. run with broadcast hash join, sort merge join or shuffle hash join.
-- 2. run with whole-stage-codegen, operator codegen or no codegen.
--CONFIG_DIM1 spark.sql.autoBroadcastJoinThreshold=10485760
--CONFIG_DIM1 spark.sql.autoBroadcastJoinThreshold=-1,spark.sql.join.preferSortMergeJoin=true
--CONFIG_DIM1 spark.sql.autoBroadcastJoinThreshold=-1,spark.sql.join.forceApplyShuffledHashJoin=true
--CONFIG_DIM2 spark.sql.codegen.wholeStage=true
--CONFIG_DIM2 spark.sql.codegen.wholeStage=false,spark.sql.codegen.factoryMode=CODEGEN_ONLY
--CONFIG_DIM2 spark.sql.codegen.wholeStage=false,spark.sql.codegen.factoryMode=NO_CODEGEN
CREATE TEMPORARY VIEW t1 AS SELECT * FROM VALUES (1) AS GROUPING(a);
CREATE TEMPORARY VIEW t2 AS SELECT * FROM VALUES (1) AS GROUPING(a);
CREATE TEMPORARY VIEW t3 AS SELECT * FROM VALUES (1), (1) AS GROUPING(a);
CREATE TEMPORARY VIEW t4 AS SELECT * FROM VALUES (1), (1) AS GROUPING(a);
CREATE TEMPORARY VIEW ta AS
SELECT a, 'a' AS tag FROM t1
UNION ALL
SELECT a, 'b' AS tag FROM t2;
CREATE TEMPORARY VIEW tb AS
SELECT a, 'a' AS tag FROM t3
UNION ALL
SELECT a, 'b' AS tag FROM t4;
-- SPARK-19766 Constant alias columns in INNER JOIN should not be folded by FoldablePropagation rule
SELECT tb.* FROM ta INNER JOIN tb ON ta.a = tb.a AND ta.tag = tb.tag;
| SQL | 4 | akhalymon-cv/spark | sql/core/src/test/resources/sql-tests/inputs/inner-join.sql | [
"Apache-2.0"
] |
.line[data-v-64c3fae5]{text-align:center} | CSS | 2 | yuanweikang2020/canal | admin/admin-web/src/main/resources/public/static/css/chunk-4f09fed2.70ec0b86.css | [
"Apache-2.0"
] |
MODULE = Agar::UCombo PACKAGE = Agar::UCombo PREFIX = AG_
PROTOTYPES: ENABLE
VERSIONCHECK: DISABLE
Agar::UCombo
new(package, parent, ...)
const char * package
Agar::Widget parent
PREINIT:
Uint flags = 0, wflags = 0;
CODE:
if ((items == 3 && SvTYPE(SvRV(ST(2))) != SVt_PVHV) || items > 3) {
Perl_croak(aTHX_ "Usage: Agar::UCombo->new(parent,[{opts}])");
}
if (items == 3) {
AP_MapHashToFlags(SvRV(ST(2)), apUcomboFlagNames, &flags);
AP_MapHashToFlags(SvRV(ST(2)), apWidgetFlagNames, &wflags);
}
RETVAL = AG_UComboNew(parent, flags);
if (RETVAL) { AGWIDGET(RETVAL)->flags |= wflags; }
OUTPUT:
RETVAL
void
sizeHint(self, text, numItems)
Agar::UCombo self
const char * text
int numItems
CODE:
AG_UComboSizeHint(self, text, numItems);
void
sizeHintPixels(self, w, h)
Agar::UCombo self
int w
int h
CODE:
AG_UComboSizeHintPixels(self, w, h);
Agar::Tlist
list(self)
Agar::UCombo self
CODE:
RETVAL = self->list;
OUTPUT:
RETVAL
Agar::Button
button(self)
Agar::UCombo self
CODE:
RETVAL = self->button;
OUTPUT:
RETVAL
void
setFlag(self, name)
Agar::UCombo self
const char * name
CODE:
if (AP_SetNamedFlag(name, apUcomboFlagNames, &(self->flags))) {
AP_SetNamedFlag(name, apWidgetFlagNames, &(AGWIDGET(self)->flags));
}
void
unsetFlag(self, name)
Agar::UCombo self
const char * name
CODE:
if (AP_UnsetNamedFlag(name, apUcomboFlagNames, &(self->flags))) {
AP_UnsetNamedFlag(name, apWidgetFlagNames, &(AGWIDGET(self)->flags));
}
Uint
getFlag(self, name)
Agar::UCombo self
const char * name
CODE:
if (AP_GetNamedFlag(name, apUcomboFlagNames, self->flags, &RETVAL)) {
if (AP_GetNamedFlag(name, apWidgetFlagNames, AGWIDGET(self)->flags,
&RETVAL)) { XSRETURN_UNDEF; }
}
OUTPUT:
RETVAL
| XS | 4 | auzkok/libagar | p5-Agar/Agar/UCombo.xs | [
"BSD-2-Clause"
] |
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract KeeperBase {
error OnlySimulatedBackend();
/**
* @notice method that allows it to be simulated via eth_call by checking that
* the sender is the zero address.
*/
function preventExecution() internal view {
if (tx.origin != address(0)) {
revert OnlySimulatedBackend();
}
}
/**
* @notice modifier that allows it to be simulated via eth_call by checking
* that the sender is the zero address.
*/
modifier cannotExecute() {
preventExecution();
_;
}
}
| Solidity | 4 | solidity-external-tests/chainlink | contracts/src/v0.8/KeeperBase.sol | [
"MIT"
] |
// Copyright 2010-2012 RethinkDB, all rights reserved.
#ifndef CONCURRENCY_QUEUE_UNLIMITED_FIFO_HPP_
#define CONCURRENCY_QUEUE_UNLIMITED_FIFO_HPP_
#include <list>
#include "concurrency/queue/passive_producer.hpp"
#include "perfmon/perfmon.hpp"
/* `unlimited_fifo_queue_t` is one of the simplest possible implementations of
`passive_producer_t`. It's a first-in, first-out queue. It's called "unlimited"
to emphasize that it can grow to an arbitrary size, which could be dangerous.
It's templated on an underlying data structure so that you can use an
`intrusive_list_t` or something like that if you prefer. */
namespace unlimited_fifo_queue {
template <class T>
T get_front_of_list(const std::list<T>& list) {
return list.front();
}
template <class T>
T *get_front_of_list(const intrusive_list_t<T>& list) {
return list.head();
}
} // namespace unlimited_fifo_queue
template<class value_t, class queue_t = std::list<value_t> >
struct unlimited_fifo_queue_t : public passive_producer_t<value_t> {
unlimited_fifo_queue_t()
: passive_producer_t<value_t>(&available_control),
counter(nullptr)
{ }
explicit unlimited_fifo_queue_t(perfmon_counter_t *_counter)
: passive_producer_t<value_t>(&available_control),
counter(_counter)
{ }
void push(const value_t& value) {
if (counter) {
++(*counter);
}
queue.push_back(value);
available_control.set_available(!queue.empty());
}
int size() {
return queue.size();
}
private:
availability_control_t available_control;
value_t produce_next_value() {
if (counter) {
--(*counter);
}
value_t v = unlimited_fifo_queue::get_front_of_list(queue);
queue.pop_front();
available_control.set_available(!queue.empty());
return v;
}
queue_t queue;
perfmon_counter_t *counter;
DISABLE_COPYING(unlimited_fifo_queue_t);
};
#endif /* CONCURRENCY_QUEUE_UNLIMITED_FIFO_HPP_ */
| C++ | 5 | zadcha/rethinkdb | src/concurrency/queue/unlimited_fifo.hpp | [
"Apache-2.0"
] |
"""
# Strings package
The Strings package provides utilities for working with sequences of strings.
"""
| Pony | 0 | rtpax/ponyc | packages/strings/strings.pony | [
"BSD-2-Clause"
] |
/*
* Copyright (c) 2021, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <AK/CharacterTypes.h>
#include <LibWeb/UIEvents/KeyboardEvent.h>
namespace Web::UIEvents {
// https://www.w3.org/TR/uievents/#determine-keydown-keyup-keyCode
static unsigned long determine_key_code(KeyCode platform_key, u32 code_point)
{
// If input key when pressed without modifiers would insert a numerical character (0-9), return the ASCII code of that numerical character.
if (is_ascii_digit(code_point))
return code_point;
// If input key when pressed without modifiers would insert a lower case character in the a-z alphabetical range, return the ASCII code of the upper case equivalent.
if (is_ascii_lower_alpha(code_point))
return to_ascii_uppercase(code_point);
// If the key’s function, as determined in an implementation-specific way, corresponds to one of the keys in the §8.3.3 Fixed virtual key codes table, return the corresponding key code.
// https://www.w3.org/TR/uievents/#fixed-virtual-key-codes
switch (platform_key) {
case KeyCode::Key_Backspace:
return 8;
case KeyCode::Key_Tab:
return 9;
case KeyCode::Key_Return:
return 13;
case KeyCode::Key_Shift:
return 16;
case KeyCode::Key_Control:
return 17;
case KeyCode::Key_Alt:
return 18;
case KeyCode::Key_CapsLock:
return 20;
case KeyCode::Key_Escape:
return 27;
case KeyCode::Key_Space:
return 32;
case KeyCode::Key_PageUp:
return 33;
case KeyCode::Key_PageDown:
return 34;
case KeyCode::Key_End:
return 35;
case KeyCode::Key_Home:
return 36;
case KeyCode::Key_Left:
return 37;
case KeyCode::Key_Up:
return 38;
case KeyCode::Key_Right:
return 39;
case KeyCode::Key_Down:
return 40;
default:
break;
}
// Return the virtual key code from the operating system.
return platform_key;
}
NonnullRefPtr<KeyboardEvent> KeyboardEvent::create_from_platform_event(FlyString const& event_name, KeyCode platform_key, unsigned modifiers, u32 code_point)
{
// FIXME: Figure out what these should actually contain.
String event_key = key_code_to_string(platform_key);
String event_code = "FIXME";
auto key_code = determine_key_code(platform_key, code_point);
KeyboardEventInit event_init {};
event_init.key = move(event_key);
event_init.code = move(event_code);
event_init.location = 0;
event_init.ctrl_key = modifiers & Mod_Ctrl;
event_init.shift_key = modifiers & Mod_Shift;
event_init.alt_key = modifiers & Mod_Alt;
event_init.meta_key = false;
event_init.repeat = false;
event_init.is_composing = false;
event_init.key_code = key_code;
event_init.char_code = code_point;
event_init.bubbles = true;
event_init.cancelable = true;
event_init.composed = true;
return KeyboardEvent::create(event_name, event_init);
}
bool KeyboardEvent::get_modifier_state(String const& key_arg)
{
if (key_arg == "Alt")
return m_alt_key;
if (key_arg == "Control")
return m_ctrl_key;
if (key_arg == "Shift")
return m_shift_key;
if (key_arg == "Meta")
return m_meta_key;
return false;
}
}
| C++ | 5 | r00ster91/serenity | Userland/Libraries/LibWeb/UIEvents/KeyboardEvent.cpp | [
"BSD-2-Clause"
] |
h1. About Edlo
!https://github.com/ehamiter/Edlo/raw/master/edlo.png!
h3. Version 1.00
Edlo is a modified version of Stephen G. Hartke's "Aurulent Sans":http://www.dafont.com/aurulent-sans-mono.font, which I thought was beautiful, but hazardous for programming because the similarities between zeroes and ohs and ones and els were too close for comfort.
Edlo is the marriage of these edits and Mr. Hartke's original vision. The zero now has a dot in the middle of it, and the lowercase el has its left leg chopped off, making all of these characters unique.
h3. Installation
If you have "homebrew":http://http://brew.sh/ and "cask":http://http://caskroom.io/ installed, you can install it simply using
@brew tap caskroom/fonts # If you've already tapped this cask, no need to do it again@
@brew cask install font-edlo@
If you would like to do it manually, click "Downloads" at the top right, then download the zipped source (.tar.gz or .zip). Unarchive and install "edlo.ttf" on your local machine.
h3. Sublime Text 2 Specific
I have these font options set up in my User File Preferences:
bc. {
[...]
"font_face": "Edlo",
"font_options":
[
"gray_antialias"
],
"font_size": 14,
[...]
}
h3. Copyright and Trademark Information
Aurulent Sans: Copyright © 2007 Stephen G. Hartke
Edlo: Copyright © 2011 by Eric Hamiter
All other brands and product names not specifically listed are trademarks or registered trademarks of their respective owners.
h3. License
To my understanding, Mr. Hartke's license allows modification as long as the license continues and the font is renamed. This font is free to use and free to distribute, but the original license below cannot be changed.
SIL Open Font License 1.1
This Font Software is licensed under the SIL Open Font License, Version 1.1. This license is copied below, and is also available with a FAQ at: "http://scripts.sil.org/OFL":http://scripts.sil.org/OFL
—————————————————————————————-
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
—————————————————————————————-
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide development of collaborative font projects, to support the font creation efforts of academic and linguistic communities, and to provide a free and open framework in which fonts may be shared and improved in partnership with others.
The OFL allows the licensed fonts to be used, studied, modified and redistributed freely as long as they are not sold by themselves. The fonts, including any derivative works, can be bundled, embedded, redistributed and/or sold with any software provided that any reserved names are not used by derivative works. The fonts and derivatives, however, cannot be released under any other type of license. The requirement for fonts to remain under this license does not apply to any document created using the fonts or their derivatives.
DEFINITIONS
“Font Software” refers to the set of files released by the Copyright Holder(s) under this license and clearly marked as such. This may include source files, build scripts and documentation.
“Reserved Font Name” refers to any names specified as such after the copyright statement(s).
“Original Version” refers to the collection of Font Software components as distributed by the Copyright Holder(s).
“Modified Version” refers to any derivative made by adding to, deleting, or substituting—in part or in whole—any of the components of the
Original Version, by changing formats or by porting the Font Software to a new environment.
“Author” refers to any designer, engineer, programmer, technical writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining a copy of the Font Software, to use, study, copy, merge, embed, modify, redistribute, and sell modified and unmodified copies of the Font Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components, in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled, redistributed and/or sold with any software, provided that each copy contains the above copyright notice and this license. These can be included either as stand-alone text files, human-readable headers or in the appropriate machine-readable metadata fields within text or binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font Name(s) unless explicit written permission is granted by the corresponding Copyright Holder. This restriction only applies to the primary font name as presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font Software shall not be used to promote, endorse or advertise any Modified Version, except to acknowledge the contribution(s) of the Copyright Holder(s) and the Author(s) or with their explicit written permission.
5) The Font Software, modified or unmodified, in part or in whole, must be distributed entirely under this license, and must not be distributed under any other license. The requirement for fonts to remain under this license does not apply to any document created using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
| Textile | 1 | riddopic/programmingfonts | fonts/resources/edlo/README.textile | [
"MIT"
] |
cbuffer ClearColorF32 : register(b0) { float4 ClearF32; };
cbuffer ClearColorU32 : register(b0) { uint4 ClearU32; };
cbuffer ClearColorI32 : register(b0) { int4 ClearI32; };
cbuffer ClearColorDepth : register(b0) { float ClearDepth; };
// fullscreen triangle
float4 vs_partial_clear(uint id : SV_VertexID) : SV_Position
{
return float4(
float(id / 2) * 4.0 - 1.0,
float(id % 2) * 4.0 - 1.0,
0.0,
1.0
);
}
// TODO: send constants through VS as flat attributes
float4 ps_partial_clear_float() : SV_Target0 { return ClearF32; }
uint4 ps_partial_clear_uint() : SV_Target0 { return ClearU32; }
int4 ps_partial_clear_int() : SV_Target0 { return ClearI32; }
float ps_partial_clear_depth() : SV_Depth { return ClearDepth; }
void ps_partial_clear_stencil() { }
| HLSL | 4 | Moxinilian/gfx | src/backend/dx11/shaders/clear.hlsl | [
"Apache-2.0",
"MIT"
] |
--TEST--
CURLOPT_SSL* basic client auth tests
--EXTENSIONS--
curl
--SKIPIF--
<?php
if (!function_exists("proc_open")) die("skip no proc_open");
exec('openssl version', $out, $code);
if ($code > 0) die("skip couldn't locate openssl binary");
if (PHP_OS_FAMILY === 'Windows') die('skip not for Windows');
$curl_version = curl_version();
if ($curl_version['version_number'] < 0x074700) {
die("skip: blob options not supported for curl < 7.71.0");
}
?>
--FILE--
<?php
function check_error(CurlHandle $ch) {
if (curl_errno($ch) !== 0) {
echo "CURL ERROR: " . curl_errno($ch) . "\n";
}
}
function check_response($response, $clientCertSubject) {
if (strpos($response, $clientCertSubject) === false) {
echo "client cert subject not in response\n";
} else {
echo "client cert subject in response\n";
}
}
$clientCertSubject = "Subject: C=US, ST=TX, L=Clientlocation, O=Clientcompany, CN=clientname/emailAddress=test@example.com";
// load server cert
$serverCertPath = __DIR__ . DIRECTORY_SEPARATOR . 'curl_setopt_ssl_servercert.pem';
$serverCert = file_get_contents($serverCertPath);
// load server key
$serverKeyPath = __DIR__ . DIRECTORY_SEPARATOR . 'curl_setopt_ssl_serverkey.pem';
$serverKey = file_get_contents($serverKeyPath);
// load client cert
$clientCertPath = __DIR__ . DIRECTORY_SEPARATOR . 'curl_setopt_ssl_clientcert.pem';
$clientCert = file_get_contents($clientCertPath);
// load client key
$clientKeyPath = __DIR__ . DIRECTORY_SEPARATOR . 'curl_setopt_ssl_clientkey.pem';
$clientKey = file_get_contents($clientKeyPath);
if ($serverCert === false
|| $serverKey === false
|| $clientCert === false
|| $clientKey === false
) {
die('failed to load test certs and keys for files');
}
$port = 14430;
// set up local server
$cmd = "openssl s_server -key $serverKeyPath -cert $serverCertPath -accept $port -www -CAfile $clientCertPath -verify_return_error -Verify 1";
$process = proc_open($cmd, [["pipe", "r"], ["pipe", "w"], ["pipe", "w"]], $pipes);
if ($process === false) {
die('failed to start server');
}
try {
// Give the server time to start
sleep(1);
echo "case 1: client cert and key from string\n";
$ch = curl_init("https://127.0.0.1:$port/");
var_dump(curl_setopt($ch, CURLOPT_SSLCERT_BLOB, $clientCert));
var_dump(curl_setopt($ch, CURLOPT_SSLKEY_BLOB, $clientKey));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$response = curl_exec($ch);
check_response($response, $clientCertSubject);
check_error($ch);
curl_close($ch);
echo "\n";
echo "case 2: empty client cert and key from string\n";
$ch = curl_init("https://127.0.0.1:$port/");
var_dump(curl_setopt($ch, CURLOPT_SSLCERT_BLOB, ''));
var_dump(curl_setopt($ch, CURLOPT_SSLKEY_BLOB, $clientKey));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$response = curl_exec($ch);
check_response($response, $clientCertSubject);
check_error($ch);
curl_close($ch);
echo "\n";
echo "case 3: client cert and empty key from string\n";
$ch = curl_init("https://127.0.0.1:$port/");
var_dump(curl_setopt($ch, CURLOPT_SSLCERT_BLOB, $clientCert));
var_dump(curl_setopt($ch, CURLOPT_SSLKEY_BLOB, ''));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$response = curl_exec($ch);
check_response($response, $clientCertSubject);
check_error($ch);
curl_close($ch);
echo "\n";
echo "case 4: client cert and key from file\n";
$ch = curl_init("https://127.0.0.1:$port/");
var_dump(curl_setopt($ch, CURLOPT_SSLCERT, $clientCertPath));
var_dump(curl_setopt($ch, CURLOPT_SSLKEY, $clientKeyPath));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$response = curl_exec($ch);
check_response($response, $clientCertSubject);
check_error($ch);
curl_close($ch);
echo "\n";
echo "case 5: issuer cert from file\n";
$ch = curl_init("https://127.0.0.1:$port/");
var_dump(curl_setopt($ch, CURLOPT_CAINFO, $serverCertPath));
var_dump(curl_setopt($ch, CURLOPT_ISSUERCERT, $serverCertPath));
var_dump(curl_setopt($ch, CURLOPT_SSLCERT, $clientCertPath));
var_dump(curl_setopt($ch, CURLOPT_SSLKEY, $clientKeyPath));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, true));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$response = curl_exec($ch);
check_response($response, $clientCertSubject);
check_error($ch);
curl_close($ch);
echo "\n";
echo "case 6: issuer cert from string\n";
$ch = curl_init("https://127.0.0.1:$port/");
var_dump(curl_setopt($ch, CURLOPT_CAINFO, $serverCertPath));
var_dump(curl_setopt($ch, CURLOPT_ISSUERCERT_BLOB, $serverCert));
var_dump(curl_setopt($ch, CURLOPT_SSLCERT, $clientCertPath));
var_dump(curl_setopt($ch, CURLOPT_SSLKEY, $clientKeyPath));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, true));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$response = curl_exec($ch);
check_response($response, $clientCertSubject);
check_error($ch);
curl_close($ch);
echo "\n";
echo "case 7: empty issuer cert from string\n";
$ch = curl_init("https://127.0.0.1:$port/");
var_dump(curl_setopt($ch, CURLOPT_CAINFO, $serverCertPath));
var_dump(curl_setopt($ch, CURLOPT_ISSUERCERT_BLOB, ''));
var_dump(curl_setopt($ch, CURLOPT_SSLCERT, $clientCertPath));
var_dump(curl_setopt($ch, CURLOPT_SSLKEY, $clientKeyPath));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, true));
var_dump(curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$response = curl_exec($ch);
check_response($response, $clientCertSubject);
check_error($ch);
curl_close($ch);
} finally {
// clean up server process
proc_terminate($process);
proc_close($process);
}
?>
--EXPECT--
case 1: client cert and key from string
bool(true)
bool(true)
bool(true)
bool(true)
client cert subject in response
case 2: empty client cert and key from string
bool(true)
bool(true)
bool(true)
bool(true)
client cert subject not in response
CURL ERROR: 58
case 3: client cert and empty key from string
bool(true)
bool(true)
bool(true)
bool(true)
client cert subject not in response
CURL ERROR: 58
case 4: client cert and key from file
bool(true)
bool(true)
bool(true)
bool(true)
client cert subject in response
case 5: issuer cert from file
bool(true)
bool(true)
bool(true)
bool(true)
bool(true)
bool(true)
client cert subject in response
case 6: issuer cert from string
bool(true)
bool(true)
bool(true)
bool(true)
bool(true)
bool(true)
client cert subject in response
case 7: empty issuer cert from string
bool(true)
bool(true)
bool(true)
bool(true)
bool(true)
bool(true)
client cert subject not in response
CURL ERROR: 83 | PHP | 4 | NathanFreeman/php-src | ext/curl/tests/curl_setopt_ssl.phpt | [
"PHP-3.01"
] |
TDObjectGatewayLeafNode{#name:'Metacello',#contents:'| repoSpec gitCheckout |
gitCheckout := GsFile _expandEnvVariable: \'GS_SHARED_GIT_CHECKOUT_METACELLO\' isClient: false.
repoSpec := GsFile _expandEnvVariable: \'GS_SHARED_REPO_METACELLO\' isClient: false.
^ TDProjectSpecEntryDefinition new
baseline: \'Metacello\'
repository: repoSpec
loads: #(\'default\');
gitCheckout: gitCheckout;
status: #(#\'inactive\');
locked: true;
yourself'}
| STON | 3 | ahdach/GsDevKit_home | sys/default/server/projects/Metacello.ston | [
"MIT"
] |
\application\APD\demo_dll.exe > \program files\demo_dll.exe
\application\APD\APD Print demo.lnk > \windows\start menu\APD Print demo.lnk
\application\APD\prtlib.dll > \windows\prtlib.dll
| COBOL | 0 | mensfeld/rhodes | neon/Helium/HeliumForWindows/AdditionalLibraries/APD Kit v2-19-06/demo_app/APD-demo.cpy | [
"MIT"
] |
impl<T, const SIZE: usize> IntoNormalized for Vector<T, { SIZE }>
where
Vector<T, { SIZE }>: Div<Vector<T, { SIZE }>>,
for<'a> &'a Vector<T, { SIZE }>: IntoLength<Output = T>,
{
type Output = Vector<T, { SIZE }>;
fn into_normalized(self) -> Self::Output {
}
}
| Rust | 3 | mbc-git/rust | src/tools/rustfmt/tests/source/issue-3740.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
import io.vertx.ceylon.platform {
Verticle,
Container
}
import io.vertx.ceylon.core {
Vertx
}
import io.vertx.ceylon.core.net {
NetSocket
}
import org.vertx.java.core.buffer {
Buffer
}
shared class FanoutServer() extends Verticle() {
shared actual void start(Vertx vertx, Container container) {
value connections = vertx.sharedData.getSet<String>("conns");
vertx.createNetServer().connectHandler(void(NetSocket sock) {
connections.add(sock.writeHandlerID);
sock.readStream.dataHandler(void(Buffer buffer) {
for (actorID in connections) {
vertx.eventBus.publish(actorID, buffer);
}
});
sock.closeHandler().onComplete((Anything anything) => connections.remove(sock.writeHandlerID));
}).listen(1234);
}
}
| Ceylon | 4 | vietj/vertx-examples | src/raw/ceylon/fanout/FanoutServer.ceylon | [
"Apache-2.0"
] |
v
>0010p20p130p25*2*6+40p&v
> 1- v <
>10p30g20p30p^ :
^ g02%g04++g03g02g01_10g.@
| Befunge | 0 | SuprDewd/BefungeSimulator | befunge_code/codeforces_130/e.befunge | [
"MIT"
] |
(set-info :smt-lib-version 2.6)
(set-logic QF_UF)
(set-info :source |
Generated by: Aman Goel (amangoel@umich.edu), Karem A. Sakallah (karem@umich.edu)
Generated on: 2018-04-06
Generated by the tool Averroes 2 (successor of [1]) which implements safety property
verification on hardware systems.
This SMT problem belongs to a set of SMT problems generated by applying Averroes 2
to benchmarks derived from [2-5].
A total of 412 systems (345 from [2], 19 from [3], 26 from [4], 22 from [5]) were
syntactically converted from their original formats (using [6, 7]), and given to
Averroes 2 to perform property checking with abstraction (wide bit-vectors -> terms,
wide operators -> UF) using SMT solvers [8, 9].
[1] Lee S., Sakallah K.A. (2014) Unbounded Scalable Verification Based on Approximate
Property-Directed Reachability and Datapath Abstraction. In: Biere A., Bloem R. (eds)
Computer Aided Verification. CAV 2014. Lecture Notes in Computer Science, vol 8559.
Springer, Cham
[2] http://fmv.jku.at/aiger/index.html#beem
[3] http://www.cs.cmu.edu/~modelcheck/vcegar
[4] http://www.cprover.org/hardware/v2c
[5] http://github.com/aman-goel/verilogbench
[6] http://www.clifford.at/yosys
[7] http://github.com/chengyinwu/V3
[8] http://github.com/Z3Prover/z3
[9] http://github.com/SRI-CSL/yices2
id: sw_loop_v
query-maker: "Yices 2"
query-time: 0.001000 ms
query-class: abstract
query-category: oneshot
query-type: regular
status: unsat
|)
(set-info :license "https://creativecommons.org/licenses/by/4.0/")
(set-info :category "industrial")
;
(set-info :status unsat)
(declare-sort utt$6 0)
(declare-fun y$10 () Bool)
(declare-fun y$12 () Bool)
(declare-fun y$16 () Bool)
(declare-fun y$167 () Bool)
(declare-fun y$174 () Bool)
(declare-fun y$178 () Bool)
(declare-fun y$2 () Bool)
(declare-fun y$4 () Bool)
(declare-fun y$6 () Bool)
(declare-fun y$8 () Bool)
(declare-fun y$L0 () Bool)
(declare-fun y$L1 () Bool)
(declare-fun y$L2 () Bool)
(declare-fun y$L3 () Bool)
(declare-fun y$L4 () Bool)
(declare-fun y$L5 () Bool)
(declare-fun y$L6 () Bool)
(declare-fun y$LoneHot () Bool)
(declare-fun y$X () utt$6)
(declare-fun y$n17s6 () utt$6)
(declare-fun y$n19s6 () utt$6)
(declare-fun y$n1s6 () utt$6)
(declare-fun y$n3s6 () utt$6)
(declare-fun y$prop () Bool)
(assert (distinct y$n1s6 y$n19s6 y$n3s6 y$n17s6))
(assert (= y$L1 (not y$2)))
(assert (= y$L2 (not y$4)))
(assert (= y$L3 (not y$6)))
(assert (= y$L4 (not y$8)))
(assert (= y$L5 (not y$10)))
(assert (= y$L6 (not y$12)))
(assert (= y$16 (= y$n1s6 y$X)))
(assert (= y$prop (not y$174)))
(assert (= y$167 (= y$12 y$prop)))
(assert (= y$178 (and y$L0 y$2 y$4 y$6 y$8 y$10 y$12 y$LoneHot y$16 y$174 y$167)))
(assert y$178)
(check-sat)
(exit)
| SMT | 3 | livinlife6751/infer | sledge/test/smt/QF_UF/2018-Goel-hwbench/QF_UF_sw_loop_v_ab_reg_max.smt2 | [
"MIT"
] |
scriptname _DE_CampEnchantingObject extends ObjectReference
message property _DE_EnchantingSelect auto
message property _DE_FurniturePickUpError auto
MiscObject property _DE_EnchantingMISC auto
Sound property ITMGenericUp auto
GlobalVariable property _DE_CampsitePlacementOn auto
Event OnInit()
while !self.Is3DLoaded()
endWhile
self.BlockActivation()
endEvent
Event OnActivate(ObjectReference akActionRef)
GoToState("ActivationLock")
if !((akActionRef as Actor) == Game.GetPlayer())
GoToState("")
return
endif
if _DE_CampsitePlacementOn.GetValueInt() == 2
;Don't allow activation during placement.
GoToState("")
return
endif
int i = _DE_EnchantingSelect.Show()
if i == 0 ;Use
self.Activate(akActionRef, true)
elseif i == 1
;Pack up enchanter's circle
if self.IsFurnitureInUse()
_DE_FurniturePickUpError.Show()
else
akActionRef.AddItem(_DE_EnchantingMISC, abSilent = true)
utility.wait(0.2)
ITMGenericUp.Play(self)
self.Disable()
self.Delete()
endif
else
;Do nothing
endif
GoToState("")
endEvent
State ActivationLock
Event OnActivate(ObjectReference akActionRef)
;debug.trace("[FROSTFALL] Duplicate activation ignored...")
endEvent
endState | Papyrus | 5 | chesko256/Campfire | Scripts/Source/_DE_CampEnchantingObject.psc | [
"MIT"
] |
--TEST--
$http_reponse_header (no redirect)
--SKIPIF--
<?php require 'server.inc'; http_server_skipif(); ?>
--INI--
allow_url_fopen=1
--FILE--
<?php
require 'server.inc';
$responses = array(
"data://text/plain,HTTP/1.0 200 Ok\r\nSome: Header\r\nSome: Header\r\n\r\nBody",
);
['pid' => $pid, 'uri' => $uri] = http_server($responses, $output);
$f = file_get_contents($uri);
var_dump($f);
var_dump($http_response_header);
http_server_kill($pid);
--EXPECT--
string(4) "Body"
array(3) {
[0]=>
string(15) "HTTP/1.0 200 Ok"
[1]=>
string(12) "Some: Header"
[2]=>
string(12) "Some: Header"
}
| PHP | 3 | NathanFreeman/php-src | ext/standard/tests/http/http_response_header_01.phpt | [
"PHP-3.01"
] |
module Search.Negation
import Data.List.Quantifiers
import Data.Nat
%default total
||| It is much easier to look for positive evidence than it is to look
||| for negative evidence. So instead of looking for `Not q`, we may
||| want to look for `p` instead
public export
interface Negates p q | q where
toNegation : p -> Not q
public export
({0 x : a} -> Negates (p x) (q x)) => Negates (All p xs) (Any q xs) where
toNegation all = allNegAny (mapProperty toNegation all)
public export
({0 x : a} -> Negates (p x) (q x)) => Negates (Any p xs) (All q xs) where
toNegation any = anyNegAll (mapProperty toNegation any)
public export
Negates (m `LT` n) (m `GTE` n) where
toNegation = LTImpliesNotGTE
public export
Negates (m `LTE` n) (m `GT` n) where
toNegation = LTEImpliesNotGT
| Idris | 4 | ska80/idris-jvm | libs/contrib/Search/Negation.idr | [
"BSD-3-Clause"
] |
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _jsxRuntime = require("react/jsx-runtime");
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "M20.69 4.05C18.66 4.73 15.86 5.5 12 5.5c-3.89 0-6.95-.84-8.69-1.43-.64-.22-1.31.26-1.31.95V19c0 .68.66 1.17 1.31.95C5.36 19.26 8.1 18.5 12 18.5c3.87 0 6.66.76 8.69 1.45.65.21 1.31-.27 1.31-.95V5c0-.68-.66-1.16-1.31-.95zm-3.41 11.21C15.62 15.1 13.84 15 12 15c-1.87 0-3.63.1-5.28.27-.45.04-.72-.48-.43-.82l2.5-3c.2-.24.57-.24.77 0l1.62 1.94 2.44-2.93c.2-.24.57-.24.77 0l3.32 3.99c.28.34.01.86-.43.81z"
}), 'VrpanoRounded');
exports.default = _default; | JavaScript | 2 | good-gym/material-ui | packages/material-ui-icons/lib/VrpanoRounded.js | [
"MIT"
] |
.. _NEP21:
==================================================
NEP 21 — Simplified and explicit advanced indexing
==================================================
:Author: Sebastian Berg
:Author: Stephan Hoyer <shoyer@google.com>
:Status: Draft
:Type: Standards Track
:Created: 2015-08-27
Abstract
--------
NumPy's "advanced" indexing support for indexing array with other arrays is
one of its most powerful and popular features. Unfortunately, the existing
rules for advanced indexing with multiple array indices are typically confusing
to both new, and in many cases even old, users of NumPy. Here we propose an
overhaul and simplification of advanced indexing, including two new "indexer"
attributes ``oindex`` and ``vindex`` to facilitate explicit indexing.
Background
----------
Existing indexing operations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NumPy arrays currently support a flexible range of indexing operations:
- "Basic" indexing involving only slices, integers, ``np.newaxis`` and ellipsis
(``...``), e.g., ``x[0, :3, np.newaxis]`` for selecting the first element
from the 0th axis, the first three elements from the 1st axis and inserting a
new axis of size 1 at the end. Basic indexing always return a view of the
indexed array's data.
- "Advanced" indexing, also called "fancy" indexing, includes all cases where
arrays are indexed by other arrays. Advanced indexing always makes a copy:
- "Boolean" indexing by boolean arrays, e.g., ``x[x > 0]`` for
selecting positive elements.
- "Vectorized" indexing by one or more integer arrays, e.g., ``x[[0, 1]]``
for selecting the first two elements along the first axis. With multiple
arrays, vectorized indexing uses broadcasting rules to combine indices along
multiple dimensions. This allows for producing a result of arbitrary shape
with arbitrary elements from the original arrays.
- "Mixed" indexing involving any combinations of the other advancing types.
This is no more powerful than vectorized indexing, but is sometimes more
convenient.
For clarity, we will refer to these existing rules as "legacy indexing".
This is only a high-level summary; for more details, see NumPy's documentation
and `Examples` below.
Outer indexing
~~~~~~~~~~~~~~
One broadly useful class of indexing operations is not supported:
- "Outer" or orthogonal indexing treats one-dimensional arrays equivalently to
slices for determining output shapes. The rule for outer indexing is that the
result should be equivalent to independently indexing along each dimension
with integer or boolean arrays as if both the indexed and indexing arrays
were one-dimensional. This form of indexing is familiar to many users of other
programming languages such as MATLAB, Fortran and R.
The reason why NumPy omits support for outer indexing is that the rules for
outer and vectorized conflict. Consider indexing a 2D array by two 1D integer
arrays, e.g., ``x[[0, 1], [0, 1]]``:
- Outer indexing is equivalent to combining multiple integer indices with
``itertools.product()``. The result in this case is another 2D array with
all combinations of indexed elements, e.g.,
``np.array([[x[0, 0], x[0, 1]], [x[1, 0], x[1, 1]]])``
- Vectorized indexing is equivalent to combining multiple integer indices with
``zip()``. The result in this case is a 1D array containing the diagonal
elements, e.g., ``np.array([x[0, 0], x[1, 1]])``.
This difference is a frequent stumbling block for new NumPy users. The outer
indexing model is easier to understand, and is a natural generalization of
slicing rules. But NumPy instead chose to support vectorized indexing, because
it is strictly more powerful.
It is always possible to emulate outer indexing by vectorized indexing with
the right indices. To make this easier, NumPy includes utility objects and
functions such as ``np.ogrid`` and ``np.ix_``, e.g.,
``x[np.ix_([0, 1], [0, 1])]``. However, there are no utilities for emulating
fully general/mixed outer indexing, which could unambiguously allow for slices,
integers, and 1D boolean and integer arrays.
Mixed indexing
~~~~~~~~~~~~~~
NumPy's existing rules for combining multiple types of indexing in the same
operation are quite complex, involving a number of edge cases.
One reason why mixed indexing is particularly confusing is that at first glance
the result works deceptively like outer indexing. Returning to our example of a
2D array, both ``x[:2, [0, 1]]`` and ``x[[0, 1], :2]`` return 2D arrays with
axes in the same order as the original array.
However, as soon as two or more non-slice objects (including integers) are
introduced, vectorized indexing rules apply. The axes introduced by the array
indices are at the front, unless all array indices are consecutive, in which
case NumPy deduces where the user "expects" them to be. Consider indexing a 3D
array ``arr`` with shape ``(X, Y, Z)``:
1. ``arr[:, [0, 1], 0]`` has shape ``(X, 2)``.
2. ``arr[[0, 1], 0, :]`` has shape ``(2, Z)``.
3. ``arr[0, :, [0, 1]]`` has shape ``(2, Y)``, not ``(Y, 2)``!
These first two cases are intuitive and consistent with outer indexing, but
this last case is quite surprising, even to many highly experienced NumPy users.
Mixed cases involving multiple array indices are also surprising, and only
less problematic because the current behavior is so useless that it is rarely
encountered in practice. When a boolean array index is mixed with another boolean or
integer array, boolean array is converted to integer array indices (equivalent
to ``np.nonzero()``) and then broadcast. For example, indexing a 2D array of
size ``(2, 2)`` like ``x[[True, False], [True, False]]`` produces a 1D vector
with shape ``(1,)``, not a 2D sub-matrix with shape ``(1, 1)``.
Mixed indexing seems so tricky that it is tempting to say that it never should
be used. However, it is not easy to avoid, because NumPy implicitly adds full
slices if there are fewer indices than the full dimensionality of the indexed
array. This means that indexing a 2D array like `x[[0, 1]]`` is equivalent to
``x[[0, 1], :]``. These cases are not surprising, but they constrain the
behavior of mixed indexing.
Indexing in other Python array libraries
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Indexing is a useful and widely recognized mechanism for accessing
multi-dimensional array data, so it is no surprise that many other libraries in
the scientific Python ecosystem also support array indexing.
Unfortunately, the full complexity of NumPy's indexing rules mean that it is
both challenging and undesirable for other libraries to copy its behavior in all
of its nuance. The only full implementation of NumPy-style indexing is NumPy
itself. This includes projects like dask.array and h5py, which support *most*
types of array indexing in some form, and otherwise attempt to copy NumPy's API
exactly.
Vectorized indexing in particular can be challenging to implement with array
storage backends not based on NumPy. In contrast, indexing by 1D arrays along
at least one dimension in the style of outer indexing is much more acheivable.
This has led many libraries (including dask and h5py) to attempt to define a
safe subset of NumPy-style indexing that is equivalent to outer indexing, e.g.,
by only allowing indexing with an array along at most one dimension. However,
this is quite challenging to do correctly in a general enough way to be useful.
For example, the current versions of dask and h5py both handle mixed indexing
in case 3 above inconsistently with NumPy. This is quite likely to lead to
bugs.
These inconsistencies, in addition to the broader challenge of implementing
every type of indexing logic, make it challenging to write high-level array
libraries like xarray or dask.array that can interchangeably index many types of
array storage. In contrast, explicit APIs for outer and vectorized indexing in
NumPy would provide a model that external libraries could reliably emulate, even
if they don't support every type of indexing.
High level changes
------------------
Inspired by multiple "indexer" attributes for controlling different types
of indexing behavior in pandas, we propose to:
1. Introduce ``arr.oindex[indices]`` which allows array indices, but
uses outer indexing logic.
2. Introduce ``arr.vindex[indices]`` which use the current
"vectorized"/broadcasted logic but with two differences from
legacy indexing:
* Boolean indices are not supported. All indices must be integers,
integer arrays or slices.
* The integer index result dimensions are always the first axes
of the result array. No transpose is done, even for a single
integer array index.
3. Plain indexing on arrays will start to give warnings and eventually
errors in cases where one of the explicit indexers should be preferred:
* First, in all cases where legacy and outer indexing would give
different results.
* Later, potentially in all cases involving an integer array.
These constraints are sufficient for making indexing generally consistent
with expectations and providing a less surprising learning curve with
``oindex``.
Note that all things mentioned here apply both for assignment as well as
subscription.
Understanding these details is *not* easy. The `Examples` section in the
discussion gives code examples.
And the hopefully easier `Motivational Example` provides some
motivational use-cases for the general ideas and is likely a good start for
anyone not intimately familiar with advanced indexing.
Detailed Description
--------------------
Proposed rules
~~~~~~~~~~~~~~
From the three problems noted above some expectations for NumPy can
be deduced:
1. There should be a prominent outer/orthogonal indexing method such as
``arr.oindex[indices]``.
2. Considering how confusing vectorized/fancy indexing can be, it should
be possible to be made more explicitly (e.g. ``arr.vindex[indices]``).
3. A new ``arr.vindex[indices]`` method, would not be tied to the
confusing transpose rules of fancy indexing, which is for example
needed for the simple case of a single advanced index. Thus,
no transposing should be done. The axes created by the integer array
indices are always inserted at the front, even for a single index.
4. Boolean indexing is conceptionally outer indexing. Broadcasting
together with other advanced indices in the manner of legacy
indexing is generally not helpful or well defined.
A user who wishes the "``nonzero``" plus broadcast behaviour can thus
be expected to do this manually. Thus, ``vindex`` does not need to
support boolean index arrays.
5. An ``arr.legacy_index`` attribute should be implemented to support
legacy indexing. This gives a simple way to update existing codebases
using legacy indexing, which will make the deprecation of plain indexing
behavior easier. The longer name ``legacy_index`` is intentionally chosen
to be explicit and discourage its use in new code.
6. Plain indexing ``arr[...]`` should return an error for ambiguous cases.
For the beginning, this probably means cases where ``arr[ind]`` and
``arr.oindex[ind]`` return different results give deprecation warnings.
This includes every use of vectorized indexing with multiple integer arrays.
Due to the transposing behaviour, this means that``arr[0, :, index_arr]``
will be deprecated, but ``arr[:, 0, index_arr]`` will not for the time being.
7. To ensure that existing subclasses of `ndarray` that override indexing
do not inadvertently revert to default behavior for indexing attributes,
these attribute should have explicit checks that disable them if
``__getitem__`` or ``__setitem__`` has been overridden.
Unlike plain indexing, the new indexing attributes are explicitly aimed
at higher dimensional indexing, several additional changes should be implemented:
* The indexing attributes will enforce exact dimension and indexing match.
This means that no implicit ellipsis (``...``) will be added. Unless
an ellipsis is present the indexing expression will thus only work for
an array with a specific number of dimensions.
This makes the expression more explicit and safeguards against wrong
dimensionality of arrays.
There should be no implications for "duck typing" compatibility with
builtin Python sequences, because Python sequences only support a limited
form of "basic indexing" with integers and slices.
* The current plain indexing allows for the use of non-tuples for
multi-dimensional indexing such as ``arr[[slice(None), 2]]``.
This creates some inconsistencies and thus the indexing attributes
should only allow plain python tuples for this purpose.
(Whether or not this should be the case for plain indexing is a
different issue.)
* The new attributes should not use getitem to implement setitem,
since it is a cludge and not useful for vectorized
indexing. (not implemented yet)
Open Questions
~~~~~~~~~~~~~~
* The names ``oindex``, ``vindex`` and ``legacy_index`` are just suggestions at
the time of writing this, another name NumPy has used for something like
``oindex`` is ``np.ix_``. See also below.
* ``oindex`` and ``vindex`` could always return copies, even when no array
operation occurs. One argument for allowing a view return is that this way
``oindex`` can be used as a general index replacement.
However, there is one argument for returning copies. It is possible for
``arr.vindex[array_scalar, ...]``, where ``array_scalar`` should be
a 0-D array but is not, since 0-D arrays tend to be converted.
Copying always "fixes" this possible inconsistency.
* The final state to morph plain indexing in is not fixed in this PEP.
It is for example possible that `arr[index]`` will be equivalent to
``arr.oindex`` at some point in the future.
Since such a change will take years, it seems unnecessary to make
specific decisions at this time.
* The proposed changes to plain indexing could be postponed indefinitely or
not taken in order to not break or force major fixes to existing code bases.
Alternative Names
~~~~~~~~~~~~~~~~~
Possible names suggested (more suggestions will be added).
============== ============ ========
**Orthogonal** oindex oix
**Vectorized** vindex vix
**Legacy** legacy_index l/findex
============== ============ ========
Subclasses
~~~~~~~~~~
Subclasses are a bit problematic in the light of these changes. There are
some possible solutions for this. For most subclasses (those which do not
provide ``__getitem__`` or ``__setitem__``) the special attributes should
just work. Subclasses that *do* provide it must be updated accordingly
and should preferably not subclass ``oindex`` and ``vindex``.
All subclasses will inherit the attributes, however, the implementation
of ``__getitem__`` on these attributes should test
``subclass.__getitem__ is ndarray.__getitem__``. If not, the
subclass has special handling for indexing and ``NotImplementedError``
should be raised, requiring that the indexing attributes is also explicitly
overwritten. Likewise, implementations of ``__setitem__`` should check to see
if ``__setitem__`` is overridden.
A further question is how to facilitate implementing the special attributes.
Also there is the weird functionality where ``__setitem__`` calls
``__getitem__`` for non-advanced indices. It might be good to avoid it for
the new attributes, but on the other hand, that may make it even more
confusing.
To facilitate implementations we could provide functions similar to
``operator.itemgetter`` and ``operator.setitem`` for the attributes.
Possibly a mixin could be provided to help implementation. These improvements
are not essential to the initial implementation, so they are saved for
future work.
Implementation
--------------
Implementation would start with writing special indexing objects available
through ``arr.oindex``, ``arr.vindex``, and ``arr.legacy_index`` to allow these
indexing operations. Also, we would need to start to deprecate those plain index
operations which are not ambiguous.
Furthermore, the NumPy code base will need to use the new attributes and
tests will have to be adapted.
Backward compatibility
----------------------
As a new feature, no backward compatibility issues with the new ``vindex``
and ``oindex`` attributes would arise.
To facilitate backwards compatibility as much as possible, we expect a long
deprecation cycle for legacy indexing behavior and propose the new
``legacy_index`` attribute.
Some forward compatibility issues with subclasses that do not specifically
implement the new methods may arise.
Alternatives
------------
NumPy may not choose to offer these different type of indexing methods, or
choose to only offer them through specific functions instead of the proposed
notation above.
We don't think that new functions are a good alternative, because indexing
notation ``[]`` offer some syntactic advantages in Python (i.e., direct
creation of slice objects) compared to functions.
A more reasonable alternative would be write new wrapper objects for alternative
indexing with functions rather than methods (e.g., ``np.oindex(arr)[indices]``
instead of ``arr.oindex[indices]``). Functionally, this would be equivalent,
but indexing is such a common operation that we think it is important to
minimize syntax and worth implementing it directly on `ndarray` objects
themselves. Indexing attributes also define a clear interface that is easier
for alternative array implementations to copy, nonwithstanding ongoing
efforts to make it easier to override NumPy functions [2]_.
Discussion
----------
The original discussion about vectorized vs outer/orthogonal indexing arose
on the NumPy mailing list:
* https://mail.python.org/pipermail/numpy-discussion/2015-April/072550.html
Some discussion can be found on the original pull request for this NEP:
* https://github.com/numpy/numpy/pull/6256
Python implementations of the indexing operations can be found at:
* https://github.com/numpy/numpy/pull/5749
* https://gist.github.com/shoyer/c700193625347eb68fee4d1f0dc8c0c8
Examples
~~~~~~~~
Since the various kinds of indexing is hard to grasp in many cases, these
examples hopefully give some more insights. Note that they are all in terms
of shape.
In the examples, all original dimensions have 5 or more elements,
advanced indexing inserts smaller dimensions.
These examples may be hard to grasp without working knowledge of advanced
indexing as of NumPy 1.9.
Example array::
>>> arr = np.ones((5, 6, 7, 8))
Legacy fancy indexing
---------------------
Note that the same result can be achieved with ``arr.legacy_index``, but the
"future error" will still work in this case.
Single index is transposed (this is the same for all indexing types)::
>>> arr[[0], ...].shape
(1, 6, 7, 8)
>>> arr[:, [0], ...].shape
(5, 1, 7, 8)
Multiple indices are transposed *if* consecutive::
>>> arr[:, [0], [0], :].shape # future error
(5, 1, 8)
>>> arr[:, [0], :, [0]].shape # future error
(1, 5, 7)
It is important to note that a scalar *is* integer array index in this sense
(and gets broadcasted with the other advanced index)::
>>> arr[:, [0], 0, :].shape
(5, 1, 8)
>>> arr[:, [0], :, 0].shape # future error (scalar is "fancy")
(1, 5, 7)
Single boolean index can act on multiple dimensions (especially the whole
array). It has to match (as of 1.10. a deprecation warning) the dimensions.
The boolean index is otherwise identical to (multiple consecutive) integer
array indices::
>>> # Create boolean index with one True value for the last two dimensions:
>>> bindx = np.zeros((7, 8), dtype=np.bool_)
>>> bindx[0, 0] = True
>>> arr[:, 0, bindx].shape
(5, 1)
>>> arr[0, :, bindx].shape
(1, 6)
The combination with anything that is not a scalar is confusing, e.g.::
>>> arr[[0], :, bindx].shape # bindx result broadcasts with [0]
(1, 6)
>>> arr[:, [0, 1], bindx].shape # IndexError
Outer indexing
--------------
Multiple indices are "orthogonal" and their result axes are inserted
at the same place (they are not broadcasted)::
>>> arr.oindex[:, [0], [0, 1], :].shape
(5, 1, 2, 8)
>>> arr.oindex[:, [0], :, [0, 1]].shape
(5, 1, 7, 2)
>>> arr.oindex[:, [0], 0, :].shape
(5, 1, 8)
>>> arr.oindex[:, [0], :, 0].shape
(5, 1, 7)
Boolean indices results are always inserted where the index is::
>>> # Create boolean index with one True value for the last two dimensions:
>>> bindx = np.zeros((7, 8), dtype=np.bool_)
>>> bindx[0, 0] = True
>>> arr.oindex[:, 0, bindx].shape
(5, 1)
>>> arr.oindex[0, :, bindx].shape
(6, 1)
Nothing changed in the presence of other advanced indices since::
>>> arr.oindex[[0], :, bindx].shape
(1, 6, 1)
>>> arr.oindex[:, [0, 1], bindx].shape
(5, 2, 1)
Vectorized/inner indexing
-------------------------
Multiple indices are broadcasted and iterated as one like fancy indexing,
but the new axes are always inserted at the front::
>>> arr.vindex[:, [0], [0, 1], :].shape
(2, 5, 8)
>>> arr.vindex[:, [0], :, [0, 1]].shape
(2, 5, 7)
>>> arr.vindex[:, [0], 0, :].shape
(1, 5, 8)
>>> arr.vindex[:, [0], :, 0].shape
(1, 5, 7)
Boolean indices results are always inserted where the index is, exactly
as in ``oindex`` given how specific they are to the axes they operate on::
>>> # Create boolean index with one True value for the last two dimensions:
>>> bindx = np.zeros((7, 8), dtype=np.bool_)
>>> bindx[0, 0] = True
>>> arr.vindex[:, 0, bindx].shape
(5, 1)
>>> arr.vindex[0, :, bindx].shape
(6, 1)
But other advanced indices are again transposed to the front::
>>> arr.vindex[[0], :, bindx].shape
(1, 6, 1)
>>> arr.vindex[:, [0, 1], bindx].shape
(2, 5, 1)
Motivational Example
~~~~~~~~~~~~~~~~~~~~
Imagine having a data acquisition software storing ``D`` channels and
``N`` datapoints along the time. She stores this into an ``(N, D)`` shaped
array. During data analysis, we needs to fetch a pool of channels, for example
to calculate a mean over them.
This data can be faked using::
>>> arr = np.random.random((100, 10))
Now one may remember indexing with an integer array and find the correct code::
>>> group = arr[:, [2, 5]]
>>> mean_value = arr.mean()
However, assume that there were some specific time points (first dimension
of the data) that need to be specially considered. These time points are
already known and given by::
>>> interesting_times = np.array([1, 5, 8, 10], dtype=np.intp)
Now to fetch them, we may try to modify the previous code::
>>> group_at_it = arr[interesting_times, [2, 5]]
IndexError: Ambiguous index, use `.oindex` or `.vindex`
An error such as this will point to read up the indexing documentation.
This should make it clear, that ``oindex`` behaves more like slicing.
So, out of the different methods it is the obvious choice
(for now, this is a shape mismatch, but that could possibly also mention
``oindex``)::
>>> group_at_it = arr.oindex[interesting_times, [2, 5]]
Now of course one could also have used ``vindex``, but it is much less
obvious how to achieve the right thing!::
>>> reshaped_times = interesting_times[:, np.newaxis]
>>> group_at_it = arr.vindex[reshaped_times, [2, 5]]
One may find, that for example our data is corrupt in some places.
So, we need to replace these values by zero (or anything else) for these
times. The first column may for example give the necessary information,
so that changing the values becomes easy remembering boolean indexing::
>>> bad_data = arr[:, 0] > 0.5
>>> arr[bad_data, :] = 0 # (corrupts further examples)
Again, however, the columns may need to be handled more individually (but in
groups), and the ``oindex`` attribute works well::
>>> arr.oindex[bad_data, [2, 5]] = 0
Note that it would be very hard to do this using legacy fancy indexing.
The only way would be to create an integer array first::
>>> bad_data_indx = np.nonzero(bad_data)[0]
>>> bad_data_indx_reshaped = bad_data_indx[:, np.newaxis]
>>> arr[bad_data_indx_reshaped, [2, 5]]
In any case we can use only ``oindex`` to do all of this without getting
into any trouble or confused by the whole complexity of advanced indexing.
But, some new features are added to the data acquisition. Different sensors
have to be used depending on the times. Let us assume we already have
created an array of indices::
>>> correct_sensors = np.random.randint(10, size=(100, 2))
Which lists for each time the two correct sensors in an ``(N, 2)`` array.
A first try to achieve this may be ``arr[:, correct_sensors]`` and this does
not work. It should be clear quickly that slicing cannot achieve the desired
thing. But hopefully users will remember that there is ``vindex`` as a more
powerful and flexible approach to advanced indexing.
One may, if trying ``vindex`` randomly, be confused about::
>>> new_arr = arr.vindex[:, correct_sensors]
which is neither the same, nor the correct result (see transposing rules)!
This is because slicing works still the same in ``vindex``. However, reading
the documentation and examples, one can hopefully quickly find the desired
solution::
>>> rows = np.arange(len(arr))
>>> rows = rows[:, np.newaxis] # make shape fit with correct_sensors
>>> new_arr = arr.vindex[rows, correct_sensors]
At this point we have left the straight forward world of ``oindex`` but can
do random picking of any element from the array. Note that in the last example
a method such as mentioned in the ``Related Questions`` section could be more
straight forward. But this approach is even more flexible, since ``rows``
does not have to be a simple ``arange``, but could be ``interesting_times``::
>>> interesting_times = np.array([0, 4, 8, 9, 10])
>>> correct_sensors_at_it = correct_sensors[interesting_times, :]
>>> interesting_times_reshaped = interesting_times[:, np.newaxis]
>>> new_arr_it = arr[interesting_times_reshaped, correct_sensors_at_it]
Truly complex situation would arise now if you would for example pool ``L``
experiments into an array shaped ``(L, N, D)``. But for ``oindex`` this should
not result into surprises. ``vindex``, being more powerful, will quite
certainly create some confusion in this case but also cover pretty much all
eventualities.
Copyright
---------
This document is placed under the CC0 1.0 Universell (CC0 1.0) Public Domain Dedication [1]_.
References and Footnotes
------------------------
.. [1] To the extent possible under law, the person who associated CC0
with this work has waived all copyright and related or neighboring
rights to this work. The CC0 license may be found at
https://creativecommons.org/publicdomain/zero/1.0/
.. [2] e.g., see NEP 18,
http://www.numpy.org/neps/nep-0018-array-function-protocol.html
| reStructuredText | 5 | iam-abbas/numpy | doc/neps/nep-0021-advanced-indexing.rst | [
"BSD-3-Clause"
] |
<template>
<div>
<h1>Error</h1>
<p>Details: {{ $route.path === '/squared' ? error.response.data : error.message }}</p>
<nuxt-link to="/">
back
</nuxt-link>
</div>
</template>
<script>
export default {
layout: 'error-layout',
// eslint-disable-next-line vue/require-prop-types
props: ['error']
}
</script>
| Vue | 4 | ardyno/nuxt.js | test/fixtures/split-layouts-error/layouts/error.vue | [
"MIT"
] |
T BinarySearch(Range(10,20),11) == [2]
T IntegerPartitions(4) == [[4],[3,1],[2,2],[2,1,1],[1,1,1,1]]
T IntegerPartitions(4,3) == [[4],[3,1],[2,2],[2,1,1]]
T IntegerPartitions(5,[3]) == [[3,1,1],[2,2,1]]
T IntegerPartitions(6,[3,4]) == [[4,1,1],[3,2,1],[2,2,2],[3,1,1,1],[2,2,1,1]]
T Permutations([a,b,c]) == [[a,b,c],[a,c,b],[b,a,c],[b,c,a],[c,a,b],[c,b,a]]
T PrimeList(20) == [2,3,5,7,11,13,17,19]
T Prime(1000) == 7919
T PrimePi(1000) == 168
T FactorInteger(10) == [[2,1],[5,1]]
T NumberOfPartitions(3000) == 496025142797537184410324879054927095334462742231683423624
T Fibbonaci(10) == 55
# FIXME: v0.6 breaks most of the following because 'method too new'
#T Fibbonaci(5,x) == 1 + 3*x^2 + x^4
T LucasL(5) == 11
ClearAll(x,p)
# p = LucasL(5,x)
# T p == 5x + 5*x^3 + x^5
# x = 1
# T p == 11
# ClearAll(x)
# T Head(p) == Plus
ClearAll(x,p)
| Objective-J | 4 | UnofficialJuliaMirrorSnapshots/Symata.jl-a906b1d5-d016-55c4-aab3-8a20cba0db2a | symata_test/numcomb_test.sj | [
"MIT"
] |
(fun say (args..)
(for (args a)
(print stdout a \n))
(flush stdout))
(fun dump (args..)
(for (args a)
(print stderr a \n))
(flush stderr)) | Grammatical Framework | 4 | daota2/fffff | v1/lib/io.gf | [
"MIT"
] |
INSERT INTO `decimal` VALUES
(1, 84.1471),
(2, 90.9297),
(3, 14.112),
(4, -75.6802),
(5, -95.8924),
(6, -27.9415),
(7, 65.6987),
(8, 98.9358),
(9, 41.2118),
(10, -54.4021),
(11, -99.999),
(12, -53.6573),
(13, 42.0167),
(14, 99.0607),
(15, 65.0288),
(16, -28.7903),
(17, -96.1397),
(18, -75.0987),
(19, 14.9877),
(20, 91.2945),
(21, 83.6656),
(22, -0.8851),
(23, -84.622),
(24, -90.5578),
(25, -13.2352),
(26, 76.2558),
(27, 95.6376),
(28, 27.0906),
(29, -66.3634),
(30, -98.8032),
(31, -40.4038),
(32, 55.1427),
(33, 99.9912),
(34, 52.9083),
(35, -42.8183),
(36, -99.1779),
(37, -64.3538),
(38, 29.6369),
(39, 96.3795),
(40, 74.5113),
(41, -15.8623),
(42, -91.6522),
(43, -83.1775),
(44, 1.7702),
(45, 85.0904),
(46, 90.1788),
(47, 12.3573),
(48, -76.8255),
(49, -95.3753),
(50, -26.2375);
| SQL | 2 | WizardXiao/tidb | br/tests/lightning_various_types/data/vt.decimal.sql | [
"Apache-2.0"
] |
export default function B() {
return <div>b</div>
}
| TypeScript | 3 | blomqma/next.js | examples/custom-server-typescript/pages/b.tsx | [
"MIT"
] |
// run-pass
#![allow(dead_code)]
// Test taken from #45641 (https://github.com/rust-lang/rust/issues/45641)
static mut Y: u32 = 0;
unsafe fn should_ok() {
Y = 1;
}
fn main() {}
| Rust | 4 | Eric-Arellano/rust | src/test/ui/borrowck/borrowck-assignment-to-static-mut.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
require 'pl'
local cjson = require 'cjson'
local function pairwise_from_entries(y_dir, x_dirs)
local list = {}
local y_files = dir.getfiles(y_dir, "*")
for i, y_file in ipairs(y_files) do
local basename = path.basename(y_file)
local x_files = {}
for i = 1, #x_dirs do
local x_file = path.join(x_dirs[i], basename)
if path.exists(x_file) then
table.insert(x_files, x_file)
end
end
if #x_files == 1 then
table.insert(list, {y = y_file, x = x_files[1]})
elseif #x_files > 1 then
local r = torch.random(1, #x_files)
table.insert(list, {y = y_file, x = x_files[r]})
end
end
return list
end
local function pairwise_from_list(y_dir, x_dirs, basename_file)
local list = {}
local basenames = utils.split(file.read(basename_file), "\n")
for i, basename in ipairs(basenames) do
local basename = path.basename(basename)
local y_file = path.join(y_dir, basename)
if path.exists(y_file) then
local x_files = {}
for i = 1, #x_dirs do
local x_file = path.join(x_dirs[i], basename)
if path.exists(x_file) then
table.insert(x_files, x_file)
end
end
if #x_files == 1 then
table.insert(list, {y = y_file, x = x_files[1]})
elseif #x_files > 1 then
local r = torch.random(1, #x_files)
table.insert(list, {y = y_file, x = x_files[r]})
end
end
end
return list
end
local function output(list, filters, rate)
local n = math.floor(#list * rate)
if #list > 0 and n == 0 then
n = 1
end
local perm = torch.randperm(#list)
if #filters == 0 then
filters = nil
end
for i = 1, n do
local v = list[perm[i]]
io.stdout:write('"' .. v.y:gsub('"', '""') .. '"' .. "," .. '"' .. cjson.encode({x = v.x, filters = filters}):gsub('"', '""') .. '"' .. "\n")
end
end
local function get_xdirs(opt)
local x_dirs = {}
for k,v in pairs(opt) do
local s, e = k:find("x_dir")
if s == 1 then
table.insert(x_dirs, v)
end
end
return x_dirs
end
local cmd = torch.CmdLine()
cmd:text("waifu2x make_pairwise_list")
cmd:option("-x_dir", "", 'Specify the directory for x(input)')
cmd:option("-y_dir", "", 'Specify the directory for y(groundtruth). The filenames should be same as x_dir')
cmd:option("-rate", 1, 'sampling rate')
cmd:option("-file_list", "", 'Specify the basename list (optional)')
cmd:option("-filters", "", 'Specify the downsampling filters')
cmd:option("-x_dir1", "", 'x for random choice')
cmd:option("-x_dir2", "", 'x for random choice')
cmd:option("-x_dir3", "", 'x for random choice')
cmd:option("-x_dir4", "", 'x for random choice')
cmd:option("-x_dir5", "", 'x for random choice')
cmd:option("-x_dir6", "", 'x for random choice')
cmd:option("-x_dir7", "", 'x for random choice')
cmd:option("-x_dir8", "", 'x for random choice')
cmd:option("-x_dir9", "", 'x for random choice')
torch.manualSeed(71)
local opt = cmd:parse(arg)
local x_dirs = get_xdirs(opt)
if opt.y_dir:len() == 0 or #x_dirs == 0 then
cmd:help()
os.exit(1)
end
local list
if opt.file_list:len() > 0 then
list = pairwise_from_list(opt.y_dir, x_dirs, opt.file_list)
else
list = pairwise_from_entries(opt.y_dir, x_dirs)
end
output(list, utils.split(opt.filters, ","), opt.rate)
| Lua | 4 | Nyanarchy/waifu2x | tools/make_pairwise_list.lua | [
"MIT"
] |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command.v1
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.internal.SQLConf
/**
* This base suite contains unified tests for the `ALTER TABLE .. ADD PARTITION` command that
* check V1 table catalogs. The tests that cannot run for all V1 catalogs are located in more
* specific test suites:
*
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableAddPartitionSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableAddPartitionSuite`
*/
trait AlterTableAddPartitionSuiteBase extends command.AlterTableAddPartitionSuiteBase {
test("empty string as partition value") {
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (col1 INT, p1 STRING) $defaultUsing PARTITIONED BY (p1)")
val errMsg = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ADD PARTITION (p1 = '')")
}.getMessage
assert(errMsg.contains("Partition spec is invalid. " +
"The spec ([p1=]) contains an empty partition column value"))
}
}
test("SPARK-34055: refresh cache in partition adding") {
withTable("t") {
sql(s"CREATE TABLE t (id int, part int) $defaultUsing PARTITIONED BY (part)")
sql("INSERT INTO t PARTITION (part=0) SELECT 0")
assert(!spark.catalog.isCached("t"))
sql("CACHE TABLE t")
assert(spark.catalog.isCached("t"))
checkAnswer(sql("SELECT * FROM t"), Seq(Row(0, 0)))
// Create new partition (part = 1) in the filesystem
val part1Loc = copyPartition("t", "part=0", "part=1")
sql(s"ALTER TABLE t ADD PARTITION (part=1) LOCATION '$part1Loc'")
assert(spark.catalog.isCached("t"))
checkAnswer(sql("SELECT * FROM t"), Seq(Row(0, 0), Row(0, 1)))
}
}
test("SPARK-34084: auto update table stats") {
withNamespaceAndTable("ns", "tbl") { t =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> "false") {
sql(s"CREATE TABLE $t (col0 int, part int) $defaultUsing PARTITIONED BY (part)")
sql(s"INSERT INTO $t PARTITION (part=0) SELECT 0")
val errMsg = intercept[IllegalArgumentException] {
getTableSize(t)
}.getMessage
assert(errMsg.contains(s"The table $t does not have stats"))
}
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> "true") {
sql(s"ALTER TABLE $t ADD PARTITION (part=1)")
assert(getTableSize(t) > 0)
}
}
}
test("SPARK-34060, SPARK-34071: update stats of cached table") {
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> "true") {
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (id int, part int) $defaultUsing PARTITIONED BY (part)")
sql(s"INSERT INTO $t PARTITION (part=0) SELECT 0")
assert(!spark.catalog.isCached(t))
sql(s"CACHE TABLE $t")
assert(spark.catalog.isCached(t))
checkAnswer(sql(s"SELECT * FROM $t"), Seq(Row(0, 0)))
val onePartSize = getTableSize(t)
assert(onePartSize > 0)
// Create new partition (part = 1) in the filesystem
val part1Loc = copyPartition(t, "part=0", "part=1")
sql(s"ALTER TABLE $t ADD PARTITION (part=1) LOCATION '$part1Loc'")
assert(spark.catalog.isCached(t))
val twoPartSize = getTableSize(t)
assert(onePartSize < twoPartSize)
checkAnswer(sql(s"SELECT * FROM $t"), Seq(Row(0, 0), Row(0, 1)))
}
}
}
test("SPARK-34138: keep dependents cached after table altering") {
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (id int, part int) $defaultUsing PARTITIONED BY (part)")
sql(s"INSERT INTO $t PARTITION (part=0) SELECT 0")
cacheRelation(t)
checkCachedRelation(t, Seq(Row(0, 0)))
withView("v0") {
sql(s"CREATE VIEW v0 AS SELECT * FROM $t")
cacheRelation("v0")
val part1Loc = copyPartition(t, "part=0", "part=1")
sql(s"ALTER TABLE $t ADD PARTITION (part=1) LOCATION '$part1Loc'")
checkCachedRelation("v0", Seq(Row(0, 0), Row(0, 1)))
}
withTempView("v1") {
sql(s"CREATE TEMP VIEW v1 AS SELECT * FROM $t")
cacheRelation("v1")
val part2Loc = copyPartition(t, "part=0", "part=2")
sql(s"ALTER TABLE $t ADD PARTITION (part=2) LOCATION '$part2Loc'")
checkCachedRelation("v1", Seq(Row(0, 0), Row(0, 1), Row(0, 2)))
}
val v2 = s"${spark.sharedState.globalTempViewManager.database}.v2"
withGlobalTempView("v2") {
sql(s"CREATE GLOBAL TEMP VIEW v2 AS SELECT * FROM $t")
cacheRelation(v2)
val part3Loc = copyPartition(t, "part=0", "part=3")
sql(s"ALTER TABLE $t ADD PARTITION (part=3) LOCATION '$part3Loc'")
checkCachedRelation(v2, Seq(Row(0, 0), Row(0, 1), Row(0, 2), Row(0, 3)))
}
}
}
}
/**
* The class contains tests for the `ALTER TABLE .. ADD PARTITION` command to check
* V1 In-Memory table catalog.
*/
class AlterTableAddPartitionSuite extends AlterTableAddPartitionSuiteBase with CommandSuiteBase
| Scala | 4 | kesavanvt/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableAddPartitionSuite.scala | [
"BSD-2-Clause",
"Apache-2.0",
"CC0-1.0",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] |
/-
Copyright (c) 2015 Microsoft Corporation. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Author: Jeremy Avigad
Extensional equality for functions, and a proof of function extensionality from quotients.
-/
prelude
import init.data.quot init.logic
universes u v
namespace function
variables {α : Sort u} {β : α → Sort v}
protected def equiv (f₁ f₂ : Π x : α, β x) : Prop := ∀ x, f₁ x = f₂ x
local infix `~` := function.equiv
protected theorem equiv.refl (f : Π x : α, β x) : f ~ f := assume x, rfl
protected theorem equiv.symm {f₁ f₂ : Π x: α, β x} : f₁ ~ f₂ → f₂ ~ f₁ :=
λ h x, eq.symm (h x)
protected theorem equiv.trans {f₁ f₂ f₃ : Π x: α, β x} : f₁ ~ f₂ → f₂ ~ f₃ → f₁ ~ f₃ :=
λ h₁ h₂ x, eq.trans (h₁ x) (h₂ x)
protected theorem equiv.is_equivalence (α : Sort u) (β : α → Sort v) : equivalence (@function.equiv α β) :=
mk_equivalence (@function.equiv α β) (@equiv.refl α β) (@equiv.symm α β) (@equiv.trans α β)
end function
section
open quotient
variables {α : Sort u} {β : α → Sort v}
@[instance]
private def fun_setoid (α : Sort u) (β : α → Sort v) : setoid (Π x : α, β x) :=
setoid.mk (@function.equiv α β) (function.equiv.is_equivalence α β)
private def extfun (α : Sort u) (β : α → Sort v) : Sort (imax u v) :=
quotient (fun_setoid α β)
private def fun_to_extfun (f : Π x : α, β x) : extfun α β :=
⟦f⟧
private def extfun_app (f : extfun α β) : Π x : α, β x :=
assume x,
quot.lift_on f
(λ f : Π x : α, β x, f x)
(λ f₁ f₂ h, h x)
theorem funext {f₁ f₂ : Π x : α, β x} (h : ∀ x, f₁ x = f₂ x) : f₁ = f₂ :=
show extfun_app ⟦f₁⟧ = extfun_app ⟦f₂⟧, from
congr_arg extfun_app (sound h)
end
attribute [intro!] funext
local infix `~` := function.equiv
instance pi.subsingleton {α : Sort u} {β : α → Sort v} [∀ a, subsingleton (β a)] : subsingleton (Π a, β a) :=
⟨λ f₁ f₂, funext (λ a, subsingleton.elim (f₁ a) (f₂ a))⟩
| Lean | 5 | JLimperg/lean | library/init/funext.lean | [
"Apache-2.0"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.