text stringlengths 1 1.05M |
|---|
void print_backward_recursion(node *p){
if(p == NULL){
return ; // Base case: exit condition for the recursion
}
print_backward_recursion(p->next); // Recursively call the function with the next node
cout<<p->data<<" "; // Print the data of the current node after the recursive call
} |
/* http://keith-wood.name/timeEntry.html
Turkish initialisation for the jQuery time entry extension
Written by <NAME> */
(function($) {
$.timeEntry.regional['tr'] = {show24Hours: true, separator: ':',
ampmPrefix: '', ampmNames: ['AM', 'PM'],
spinnerTexts: ['şu an', 'önceki alan', 'sonraki alan', 'arttır', 'azalt']};
$.timeEntry.setDefaults($.timeEntry.regional['tr']);
})(jQuery);
|
import { fromJS } from 'immutable';
import { apiPath } from 'config';
const RECS_LOAD = 'wr/recordings/RECS_LOAD';
const RECS_LOAD_SUCCESS = 'wr/recordings/RECS_LOAD_SUCCESS';
const RECS_LOAD_FAIL = 'wr/recordings/RECS_LOAD_FAIL';
const REC_LOAD = 'wr/recordings/REC_LOAD';
const REC_LOAD_SUCCESS = 'wr/recordings/REC_LOAD_SUCCESS';
const REC_LOAD_FAIL = 'wr/recordings/REC_LOAD_FAIL';
const REC_EDIT = 'wr/rec/REC_EDIT';
const REC_EDIT_SUCCESS = 'wr/recordings/REC_EDIT_SUCCESS';
const REC_EDIT_FAIL = 'wr/recordings/REC_EDIT_FAIL';
const REC_EDITED_RESET = 'wr/recordings/REC_EDITED_RESET';
const REC_BK = 'wr/rec/REC_BK';
const REC_BK_SUCCESS = 'wr/recordings/REC_BK_SUCCESS';
const REC_BK_FAIL = 'wr/recordings/REC_BK_FAIL';
const REC_DELETE = 'wr/recordings/REC_DELETE';
const REC_DELETE_SUCCESS = 'wr/recordings/REC_DELETE_SUCCESS';
const REC_DELETE_FAIL = 'wr/recordings/REC_DELETE_FAIL';
const initialState = fromJS({
deleting: false,
deleted: false,
edited: false,
loaded: false,
loadingRecBK: false,
loadedRecBK: false,
recordingBookmarks: null
});
export default function recordings(state = initialState, action = {}) {
switch (action.type) {
case REC_BK:
return state.merge({
loadingRecBK: true,
loadedRecBK: false,
recordingBookmarks: null
});
case REC_BK_SUCCESS:
return state.merge({
loadingRecBK: false,
loadedRecBK: true,
recordingBookmarks: action.result.page_bookmarks
});
case REC_DELETE:
return state.merge({
deleting: true,
deleted: false
});
case REC_DELETE_SUCCESS:
return state.merge({
deleting: false,
deleted: true
});
case REC_DELETE_FAIL:
return state.merge({
deleting: false,
deleted: false,
error: action.result.error
});
case REC_EDIT_SUCCESS:
return state.merge({
edited: true,
...action.data
});
case REC_EDITED_RESET:
return state.set('edited', false);
case REC_LOAD:
return state.merge({
loading: true
});
case REC_LOAD_SUCCESS:
return state.merge({
loading: false,
loaded: true,
recording: action.result.recording
});
case REC_LOAD_FAIL:
return state.merge({
loading: false,
loaded: false,
error: action.error
});
case RECS_LOAD:
return state.merge('loading': true);
case RECS_LOAD_SUCCESS:
return state.merge({
loading: false,
loaded: true,
recordings: action.result.recordings
});
case RECS_LOAD_FAIL:
return state.merge({
loading: false,
loaded: false,
error: action.error
});
default:
return state;
}
}
export function collRecordings(user, coll) {
return {
types: [RECS_LOAD, RECS_LOAD_SUCCESS, RECS_LOAD_FAIL],
promise: client => client.get(`${apiPath}/recordings`, {
params: { user, coll: decodeURIComponent(coll) }
})
};
}
export function loadRecording(user, coll, rec) {
return {
types: [REC_LOAD, REC_LOAD_SUCCESS, REC_LOAD_FAIL],
promise: client => client.get(`${apiPath}/recording/${rec}`, {
params: { user, coll: decodeURIComponent(coll) }
})
};
}
export function edit(user, coll, rec, data) {
return {
types: [REC_EDIT, REC_EDIT_SUCCESS, REC_EDIT_FAIL],
promise: client => client.post(`${apiPath}/recording/${rec}`, {
params: { user, coll: decodeURIComponent(coll) },
data
}),
data
};
}
export function getRecordingBookmarks(user, coll, rec) {
return {
types: [REC_BK, REC_BK_SUCCESS, REC_BK_FAIL],
promise: client => client.get(`${apiPath}/collection/${coll}/page_bookmarks`, {
params: { user, rec }
})
};
}
export function resetEditState() {
return { type: REC_EDITED_RESET };
}
export function deleteRecording(user, coll, rec) {
return {
types: [REC_DELETE, REC_DELETE_SUCCESS, REC_DELETE_FAIL],
promise: client => client.del(`${apiPath}/recording/${rec}`, {
params: { user, coll: decodeURIComponent(coll) }
})
};
}
|
<reponame>krzys9876/z80_sim_scala<gh_stars>0
package org.kr.scala.z80.test
import org.kr.scala.z80.system.{Debugger, DummyDebugger, OutputFile}
import org.scalatest.funsuite.AnyFunSuite
class OutputFileTest extends AnyFunSuite{
test("check empty output file") {
//given
val outFile=OutputFile.blank
//when
//then
assert(outFile(0,0)==0)
assert(outFile(1,0)==0)
assert(outFile(100,5)==0)
assert(outFile(300,20)==0)
}
test("add elements to output file") {
//given
val outFile=OutputFile.blank
implicit val debugger:Debugger=DummyDebugger
//when
val outFileTest=outFile
.write(10,0x41)
.write(10,0x42)
.write(10,0x43)
.write(20,0x44)
//then
assert(outFileTest(10,0)==0x41)
assert(outFileTest(10,1)==0x42)
assert(outFileTest(10,2)==0x43)
assert(outFileTest(10,3)==0)
assert(outFileTest(20,0)==0x44)
assert(outFileTest(20,1)==0)
//outFileTest.print(10)
}
}
|
source /tmp/lib.sh
source /tmp/lib-apache-httpd.sh
if [ $(httpd -M 2> /dev/null | grep 'proxy_' | wc -l) -eq 0 ]; then exit $PASS; fi
exit $FAIL
|
package hackerrank.icecream_parlor;
import java.util.HashMap;
/**
* https://www.hackerrank.com/challenges/icecream-parlor
*/
public final class SolutionCore {
private SolutionCore() {
}
protected static int[] which(final int m, final int[] c) {
int n = c.length;
HashMap<Integer, Integer> map = new HashMap<>();
for (int i = 0; i < n; i++) {
map.put(c[i], i);
}
for (int i = 0; i < n; i++) {
int first = i;
int value = m - c[i];
if (map.containsKey(value)) {
int second = map.get(value);
if (first == second) {
continue;
}
return new int[] { first + 1, second + 1 };
}
}
return new int[] { 0, 0 };
}
}
|
function makeChart(data,stylename,media,xMin,xMax,numTicksx,showRect,showLine,markers,plotpadding,legAlign,yAlign){
var titleYoffset = d3.select("#"+media+"Title").node().getBBox().height
var subtitleYoffset=d3.select("#"+media+"Subtitle").node().getBBox().height;
//Select the plot space in the frame from which to take measurements
var frame=d3.select("#"+media+"chart")
var plot=d3.select("#"+media+"plot")
var yOffset=d3.select("#"+media+"Subtitle").style("font-size");
yOffset=Number(yOffset.replace(/[^\d.-]/g, ''));
//Get the width,height and the marginins unique to this chart
var w=plot.node().getBBox().width;
var h=plot.node().getBBox().height;
var margin=plotpadding.filter(function(d){
return (d.name === media);
});
margin=margin[0].margin[0]
var colours=stylename.linecolours;
var plotWidth = w-(margin.left+margin.right);
var plotHeight = h-(margin.top+margin.bottom);
var parseDate = d3.time.format("%Y").parse;
// return the series names from the first row of the spreadsheet
var seriesNames = Object.keys(data[0]).filter(function(d){if (d!='name'){return d}});
// console.log(plotWidth,colours,plotHeight,data)
// console.log(margin)
//you now have a chart area, inner margin data and colour palette - with titles pre-rendered
/*plot.append("rect")
.attr("x",margin.left)
.attr("y",margin.top)
.attr("width",plotWidth)
.attr("height",plotHeight)
.attr("fill",colours[0])*/
//sort the data into date order of first column
data.sort(function(a, b){
return a[seriesNames[0]]-b[seriesNames[0]];
});
//identify date range of data
//initialise dates to first date value
xMin=parseDate(xMin)
xMax=parseDate(xMax)
var minDate = data[0][seriesNames[0]]
var maxDate = data[0][seriesNames[0]]
//iterate through dates and compare min/max
seriesNames.forEach(function(d,i){
data.forEach(function(e,j){
minDate = Math.min(xMin,d3.min([minDate,e[d]]))
maxDate = Math.max(xMax,d3.max([maxDate,e[d]]))
})
})
// minDate = Math.min(xMin,minDate)
// maxDate = Math.max(xMax,maxDate)
//min max dates
//console.log(minDate,maxDate)
//y scale for country
var yScale = d3.scale.ordinal()
.domain(data.map(function(d){
return d.name;
}))
.rangeRoundBands([0,plotHeight],0.5);
var yAxis = d3.svg.axis()
.scale(yScale)
.orient("left")
.tickSize(0);
var yLabel=plot.append("g")
.attr("class", media+"yAxis")
.call(yAxis)
//calculate what the ticksize should be now that the text for the labels has been drawn
var yLabelOffset=yLabel.node().getBBox().width
//yLabel.call(yAxis.tickSize(yticksize))
yLabel
.attr("transform",function(){
return "translate("+(margin.left+yLabelOffset)+","+margin.top+")"
})
//AXES
//x scale for dates
var xScale = d3.time.scale()
.domain([minDate,maxDate])
.range([yLabelOffset,plotWidth])
var xAxis = d3.svg.axis()
.scale(xScale)
.orient("bottom")
.tickSize(plotHeight)
.ticks(4)
//secondary axis - more ticks but no labels
var xAxis2 = d3.svg.axis()
.scale(xScale)
.tickSize(plotHeight)
.orient("bottom")
.tickFormat(d3.time.format(""))
.ticks(20)
//call axes
plot.append("g")
.attr("class",media+"xAxis")
.attr("transform","translate("+margin.left+","+(margin.top)+")")
.call(xAxis)
plot.append("g")
.attr("class",media+"xAxis")
.attr("transform","translate("+margin.left+","+(margin.top)+")")
.call(xAxis2)
//create chart geometry
var chart = plot.append("g")
.attr("id","geometry")
.attr("transform","translate("+margin.left+","+margin.top+")")
//work in rows of geometry
var rowGroups = chart.append("g")
.attr("id","chart_rows")
.selectAll("g")
.data(data)
.enter()
.append("g")
rowGroups.each(function(d,i){
//rectangles
if (showRect){
for (k=0;k<seriesNames.length-1;k++){
d3.select(rowGroups[0][i]).append("rect")
.attr("x",function(d){
return xScale(d[seriesNames[k]])
})
.attr("y",function(d){
return yScale(d.name);
})
.attr("width",function(d){
return xScale(d[seriesNames[k+1]])-xScale(d[seriesNames[k]])
})
.attr("height",yScale.rangeBand)
.attr("fill",colours[k])
.attr("fill-opacity",0.8)
}
}
//connecting lines
if (showLine){
d3.select(rowGroups[0][i]).append("line")
.attr("x1",function(d){
return xScale(d[seriesNames[0]])
})
.attr("x2",function(d){
return xScale(d[seriesNames[seriesNames.length-1]])
})
.attr("y1",function(d){
return yScale(d.name)+(yScale.rangeBand()/2);;
})
.attr("y2",function(d){
return yScale(d.name)+(yScale.rangeBand()/2);;
})
.attr("stroke","#777")
.attr("class",media+"stalk")
}
//marker dots
if (markers){
seriesNames.forEach(function(e,j){
d3.select(rowGroups[0][i]).append("circle")
.attr("cx",function(d){
return xScale(d[seriesNames[j]])
})
.attr("cy",function(d){
return yScale(d.name)+(yScale.rangeBand()/2);
})
.attr("r",yOffset/2.4)
.attr("fill",colours[j])
})
}
})
if (markers||showRect){
//key
chart.append("g")
.attr("id","key")
.selectAll("text")
.data(seriesNames)
.enter()
.append("text")
.text(function(d){
return d;
})
.attr("y",yScale.rangeBand()/2)
.attr("x",function(d){
return xScale(data[0][d])
})
.attr("text-anchor","middle")
.attr("fill",function(d,i){
return colours[i]
})
.attr('class',media+'labels' )
chart.append("g")
.attr("id","keylines")
.selectAll("line")
.data(seriesNames)
.enter()
.append("line")
.attr("y1",yScale.rangeBand()*.5)
.attr("y2",yScale.rangeBand())
.attr("x1",function(d){
return xScale(data[0][d])
})
.attr("x2",function(d){
return xScale(data[0][d])
})
.attr("stroke","#777")
.attr("stroke-width","1px")//should use class
}
//Add labels so that the preflight script in illustrator will work
d3.selectAll(".printxAxis text")
.attr("id","xAxisLabel")
d3.selectAll(".printyAxis text")
.attr("id","yAxisLabel")
d3.selectAll(".printyAxis line")
.attr("id","yAxisTick")
d3.selectAll(".printxAxis line")
.attr("id","xAxisTick")
d3.selectAll(".printminorAxis line")
.attr("id","minorTick")
d3.selectAll(".domain").remove()
} |
<gh_stars>10-100
// Get user activity counters across all modules
//
// Params:
// - data.user_id (ObjectId)
// - data.current_user_id (Object), same as env.user_info
//
// Returns:
// - data.count (Number)
//
// Used in:
// - member profile
// - usercard
// - admin interface (for multiple users)
//
'use strict';
module.exports = function (N, apiPath) {
N.wire.before(apiPath, { priority: -100 }, function activity_get_setup(data) {
data.count = Array.isArray(data.user_id) ? Array(data.user_id.length).fill(0) : 0;
});
};
|
enum Orientation {
case horizontal
case vertical
}
struct Segment {
let length: Double
let orientation: Orientation
}
struct Shape {
let topSegment: Segment
let rightSegment: Segment
let bottomSegment: Segment
let leftSegment: Segment
init(topSegment: Segment, rightSegment: Segment, bottomSegment: Segment, leftSegment: Segment) {
self.topSegment = topSegment
self.rightSegment = rightSegment
self.bottomSegment = Segment(length: bottomSegment.length, orientation: .vertical)
self.leftSegment = Segment(length: leftSegment.length, orientation: .vertical)
}
} |
<gh_stars>0
import FinalClean from "./CommomFunctions/FinalClean";
const findTheValuesRegex = /(?<=\:).+[^\s]/gm;
function getTheValuesOfTheCamps(FormData) {
const DirtyValuesGotByTheRegex =
FormData
.match(findTheValuesRegex);
const ValuesCleanned =
DirtyValuesGotByTheRegex
.map(
current =>
{
const withoutTwoPoints = current.replace(":", "");
return FinalClean(withoutTwoPoints);
});
return ValuesCleanned;
}
export default getTheValuesOfTheCamps; |
<gh_stars>0
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.problem;
import java.io.File;
import org.moeaframework.core.NondominatedPopulation;
import org.moeaframework.core.PopulationIO;
import org.moeaframework.core.Problem;
import org.moeaframework.core.Settings;
import org.moeaframework.core.spi.ProblemProvider;
import org.moeaframework.core.spi.ProviderNotFoundException;
/**
* Problem provider for problems enumerated in {@code global.properties}.
* The problems are identified by name as listed in the {@code
* org.moeaframework.problem.problems} property, with the class and optional
* reference set defined by the {@code org.moeaframework.problem.NAME.class}
* and {@code org.moeaframework.problem.NAME.referenceSet} properties.
* Problems instantiated this way must provide an empty constructor.
*/
public class PropertiesProblems extends ProblemProvider {
/**
* Constructs the problem provider for problems enumerated in {@code
* global.properties}.
*/
public PropertiesProblems() {
super();
}
/**
* Returns the case-sensitive version of the problem name. If the problem
* name was not specifically listed in the
* {@code org.moeaframework.problem.problems} property, {@code name} is
* returned unchanged.
*
* @param name the case-insensitive name
* @return the case-sensitive name
*/
protected String getCaseSensitiveProblemName(String name) {
for (String problem : Settings.getProblems()) {
if (problem.equalsIgnoreCase(name)) {
return problem;
}
}
return name;
}
@Override
public Problem getProblem(String name) {
name = getCaseSensitiveProblemName(name);
if (name != null) {
String className = Settings.getProblemClass(name);
if (className != null) {
try {
return (Problem)Class.forName(className).newInstance();
} catch (Exception e) {
throw new ProviderNotFoundException(name, e);
}
}
}
return null;
}
@Override
public NondominatedPopulation getReferenceSet(String name) {
name = getCaseSensitiveProblemName(name);
if (name != null) {
String fileName = Settings.getProblemReferenceSet(name);
if (fileName != null) {
try {
return new NondominatedPopulation(
PopulationIO.readObjectives(new File(fileName)));
} catch (Exception e) {
return null;
}
}
}
return null;
}
}
|
#pragma once
#include "ruletype.hpp"
#include <set>
/**
* @brief class to store multiple rules in on place
*
*/
class RuleBook
{
public:
/**
* @brief add a conversion rule to the collection
*
* @param _rule the conversion rule type to add
* @return true if rule was added
* @return false if was rule already added
*/
bool add(RuleType rule);
/**
* @brief gets the minimum supported input size for conversion
*
* @return size_t minimum supported input size for conversion
*/
size_t getMinInputSize() const noexcept;
/**
* @brief gets the maximum supported input size for conversion
*
* @return size_t maximum supported input size for conversion
*/
size_t getMaxInputSize() const noexcept; // TODO change redundat descriptions to only return desc?
/**
* @brief returns an iterator to the first rule
*
* @return std::set<RuleType>::const_iterator
*/
std::set<RuleType>::const_iterator begin() const noexcept;
/**
* @brief returns an iterator to one past the last rule
*
* @return std::set<RuleType>::const_iterator
*/
std::set<RuleType>::const_iterator end() const noexcept;
/**
* @brief returns the number of added rules
*
* @return size_t the number of rules
*/
size_t size() const noexcept;
/**
* @brief returns the rules at the specified index
*
* @param index the index to return (unchecked)
* @return RuleType the rule at _index
*/
RuleType operator[](size_t index) const;
private:
std::set<RuleType> rules;
size_t minInputSize = std::numeric_limits<size_t>::max();
size_t maxInputSize = std::numeric_limits<size_t>::min();
};
|
class Shape:
def get_area(self):
pass
def get_perimeter(self):
pass |
import React from 'react';
import ReactDOM from 'react-dom';
import { shallow, mount } from 'enzyme';
import Checkbox, { PureCheckbox, BEM } from '../Checkbox';
function BarButton() {
return <div>bar</div>;
}
describe('rowComp(Checkbox)', () => {
it('renders without crashing', () => {
const div = document.createElement('div');
const element = <Checkbox basic="Basic text" />;
ReactDOM.render(element, div);
});
});
describe('Pure <Checkbox>', () => {
let originalGetInputRef;
beforeAll(() => {
originalGetInputRef = PureCheckbox.prototype.getInputRef;
PureCheckbox.prototype.getInputRef = jest.fn(() => ({ indeterminate: false }));
});
afterAll(() => {
PureCheckbox.prototype.getInputRef = originalGetInputRef;
});
it('renders <input type=checkbox> along with rowComp parts inside <RowCompBody>', () => {
const wrapper = shallow(
<PureCheckbox>Foo children</PureCheckbox>
);
expect(wrapper.containsMatchingElement(<input type="checkbox" />)).toBeTruthy();
});
it('renders <input> in icon wrapper before rowComp parts', () => {
const wrapper = shallow(
<PureCheckbox>Foo children</PureCheckbox>
);
expect(wrapper.childAt(0).hasClass('gyp-checkbox__icon-wrapper')).toBeTruthy();
expect(wrapper.childAt(0).find('input').exists()).toBeTruthy();
});
it('passes whitelisted props to <input>', () => {
const handleChange = jest.fn();
const wrapper = shallow(
<PureCheckbox checked defaultChecked disabled onChange={handleChange}>
Foo children
</PureCheckbox>
);
const inputWrapper = wrapper.find('input');
expect(inputWrapper.prop('checked')).toBeTruthy();
expect(inputWrapper.prop('defaultChecked')).toBeTruthy();
expect(inputWrapper.prop('disabled')).toBeTruthy();
expect(inputWrapper.prop('onChange')).toBe(handleChange);
});
it('passes every props to <input> from the input prop', () => {
const wrapper = shallow(
<PureCheckbox input={{ readonly: true, id: 'foo-checkbox' }}>
Foo children
</PureCheckbox>
);
const inputWrapper = wrapper.find('input');
expect(inputWrapper.prop('readonly')).toBeTruthy();
expect(inputWrapper.prop('id')).toBe('foo-checkbox');
});
it('can override checkbox button via overrideButton prop', () => {
const wrapper = shallow(
<PureCheckbox overrideButton={<BarButton />}>
Foo children
</PureCheckbox>
);
expect(wrapper.find(BarButton).exists()).toBeTruthy();
expect(wrapper.find(BarButton).parent().hasClass(BEM.iconWrapper.toString())).toBeTruthy();
});
});
describe('DOM Node operation', () => {
it('updates indeterminate prop on <input type=checkbox>', () => {
const wrapper = mount(
<PureCheckbox>Foo children</PureCheckbox>
);
expect(wrapper.find('input').instance().indeterminate).toBeFalsy();
wrapper.setProps({ indeterminate: true });
expect(wrapper.find('input').instance().indeterminate).toBeTruthy();
});
it('should not touch input.indeterminate when prop not changed', () => {
const wrapper = mount(
<PureCheckbox indeterminate>Foo children</PureCheckbox>
);
expect(wrapper.find('input').instance().indeterminate).toBeTruthy();
wrapper.setProps({ disabled: true });
expect(wrapper.find('input').instance().indeterminate).toBeTruthy();
});
});
|
<gh_stars>0
import { ActivatedRoute, Router } from '@angular/router';
import { ProdutosService } from '../services/produtos.service';
import { Component, OnInit, OnDestroy } from '@angular/core';
import { FormBuilder, Validators } from '@angular/forms';
import { Produtos } from '../models/produtos.model';
import { MatSnackBar } from '@angular/material/snack-bar';
import { BaseFormComponent } from 'src/app/shared/base-form/base-form.component';
import { Subject } from 'rxjs';
import { takeUntil } from 'rxjs/operators';
@Component({
selector: 'app-produtos-form',
templateUrl: './produtos-form.component.html',
styleUrls: ['./produtos-form.component.css']
})
export class ProdutosFormComponent extends BaseFormComponent implements OnInit, OnDestroy {
produto: Produtos;
private sub = new Subject<void>();
pageTitle: string;
constructor(
private produtosService: ProdutosService,
private router: Router,
private route: ActivatedRoute,
protected snackBar: MatSnackBar,
private fb: FormBuilder,
) {
super(snackBar);
}
ngOnInit(): void {
const produtos = this.route.snapshot.data.produto;
produtos.id === 0 ? this.pageTitle = 'Novo Produto' :
this.pageTitle = 'Editar Produto';
this.form = this.fb.group({
nome: [produtos.nome, [Validators.required, Validators.minLength(3)]],
preco: [produtos.preco, [Validators.required, Validators.pattern(/^[0-9]+$/)]],
quantidade: [produtos.quantidade, [Validators.required, Validators.pattern(/^[0-9]+$/)]],
categoria: [produtos.categoria, Validators.required]
});
}
onSubmit(): void {
this.formSaved = true;
if (this.form.dirty && this.form.valid) {
const produto = Object.assign({}, this.produto, this.form.value);
if (this.route.snapshot.data.produto.id === 0) {
this.produtosService.save(produto)
.pipe(takeUntil(this.sub)).pipe(takeUntil(this.sub)).subscribe(() => {
this.openSnackBar('Produto criado com sucesso!', 'Fechar');
this.router.navigate(['/produtos']);
},
(error) => {
this.openSnackBar('Houve um erro ao criar um novo produto', 'Fechar');
});
}
else {
this.updateProduct(produto);
}
}
}
updateProduct(produto: Produtos) {
this.produtosService.update(produto, this.route.snapshot.data.produto.id)
.pipe(takeUntil(this.sub)).subscribe(() => {
this.openSnackBar('Produto atualizado com sucesso!', 'Fechar');
this.router.navigate(['/produtos']);
},
(error) => {
console.log(error);
this.openSnackBar('Houve um erro ao atualizar um produto', 'Fechar');
});
}
ngOnDestroy(): void {
this.sub.next();
}
}
|
#!/bin/bash
for bench in 10000 25000
do
echo $bench
for i in `seq 10`
do
./pycket-c trace-benches/dotproduct.rkt $bench &>> dot${bench}
done
done
exit 0
|
script_dir="$(dirname "${BASH_SOURCE[0]}")"
#
# Using the current environment, which has smart_open installed.
#
cd "$script_dir/.."
python -c 'help("smart_open")' > help.txt
git commit help.txt -m "updated help.txt"
|
#!/bin/sh
if [ -d /media/sageadmin/95B3-978B/Work/Dropbox/ResearchUMN/Unsorted/detect/* ]; then
gnome-screensaver-command -l
rm -rf /media/sageadmin/95B3-978B/Work/Dropbox/ResearchUMN/Unsorted/detect/*
fi
if [ -f /media/sageadmin/95B3-978B/Work/Dropbox/ResearchUMN/Unsorted/detect/* ]; then
/home/sageadmin/Documents/passCompro.sh
gnome-screensaver-command -l
rm -rf /media/sageadmin/95B3-978B/Work/Dropbox/ResearchUMN/Unsorted/detect/*
fi
|
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n1/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n1.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n1.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n1.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n1/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n1/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n1.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n1.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n1/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n2/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n2.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n2.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n2.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n2/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n2/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n2.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n2.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n2/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n3/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n3.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n3.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n3.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n3/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n3/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n3.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n3.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n3/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n4/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n4.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n4.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n4.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n4/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n4/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n4.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n4.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n4/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n5/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n5.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n5.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n5.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n5/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n5/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n5.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n5.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n5/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n6/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n6.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n6.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n6.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n6/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n6/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n6.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n6.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n6/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n7/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n7.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n7.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n7.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n7/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n7/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n7.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n7.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n7/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n8/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n8.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n8.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n8.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n8/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n8/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n8.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n8.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n8/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n9/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n9.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n9.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n9.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n9/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n9/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n9.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n9.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n9/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n10/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n10.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n10.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n10.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n10/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n10/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n10.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n10.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n10/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n11/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n11.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n11.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n11.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n11/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n11/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n11.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n11.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n11/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n12/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n12.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n12.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n12.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n12/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n12/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n12.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n12.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n12/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n13/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n13.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n13.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n13.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n13/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n13/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n13.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n13.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n13/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n14/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n14.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n14.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n14.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n14/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n14/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n14.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n14.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n14/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n15/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n15.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n15.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n15.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n15/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n15/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n15.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n15.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n15/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n16/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n16.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n16.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n16.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n16/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n16/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n16.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n16.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n16/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n17/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n17.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n17.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n17.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n17/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n17/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n17.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n17.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n17/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n18/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n18.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n18.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n18.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n18/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n18/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n18.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n18.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n18/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n19/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n19.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n19.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n19.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n19/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n19/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n19.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n19.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n19/' --dropout_keep_prob 0.8 --num_filter 1000
python3 train_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n20/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n20.txt" --eeg_eval_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/eval_list_n20.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n20.txt" --eog_train_data "" --eog_eval_data "" --eog_test_data "" --emg_train_data "" --emg_eval_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n20/' --dropout_keep_prob 0.8 --num_filter 1000
python3 test_cnn1d_eval_gpu0.py --eeg_pretrainedfb_path "../dnn-filterbank/dnn_filterbank_sleep_20_512_256_512(1)_eeg/n20/filterbank.mat" --eog_pretrainedfb_path "" --emg_pretrainedfb_path "" --eeg_train_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/train_list_n20.txt" --eeg_test_data "../../data_processing/tf_data/cnn_filterbank_eval_eeg/test_list_n20.txt" --eog_train_data "" --eog_test_data "" --emg_train_data "" --emg_test_data "" --out_dir './cnn1d_sleep_357_1000_(08)_eval_1chan/n20/' --dropout_keep_prob 0.8 --num_filter 1000
|
#!/bin/bash
# cat ~/Pictures/image.jpg
scanimage --resolution=600 | pnmtojpeg
|
#!/bin/bash
mkdir -p sst_data
mkdir -p toxic_data
mkdir -p imdb_data
echo "process sst"
##############prepare the sst
#'''
#''' https://nlp.stanford.edu/sentiment/
#'''
data=trainDevTestTrees_PTB.zip
curl --output sst_data/$data -O https://nlp.stanford.edu/sentiment/$data
unzip -o sst_data/$data -d sst_data/
rm -f sst_data/$data
echo "process toxic"
##############prepare the toxic
#'''
#''' https://www.kaggle.com/c/jigsaw-toxic-comment-classification-challenge
#'''
data=jigsaw-toxic-comment-classification-challenge.zip
mkdir -p toxic_data/data/
curl --output toxic_data/data/$data -O "https://raw.githubusercontent.com/alm0st907/ursotoxic/master/jigsaw-toxic-comment-classification-challenge.zip"
unzip -o toxic_data/data/$data -d toxic_data/data/
#unzip -o toxic_data/test.csv.zip -d toxic_data/
#unzip -o toxic_data/train.csv.zip -d toxic_data/
rm -fr toxic_data/data/$data
echo "process imdb"
##############prepare the imdb
#'''
#''' https://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz
#'''
#data=aclImdb_v1.tar.gz
#curl --output imdb_data/$data -O https://ai.stanford.edu/~amaas/data/sentiment/$data
#tar -xf imdb_data/$data -C imdb_data/
#rm -f imdb_data/$data
|
package com.krrrr38.mackerel4s.serializer
import com.krrrr38.mackerel4s.model._
import org.json4s.JsonAST.JString
import org.json4s.jackson.{ JsonMethods, Serialization }
import org.scalatest.{ FunSpec, Matchers }
class GraphUnitTypeSerializerSpec extends FunSpec with Matchers {
implicit val formats = MackerelSerializer.FORMATS
describe("GraphUnitType") {
it("success to serialize/deserialize `float`") {
val jsonStr = Serialization.write(GraphUnitTypeWrapper(GraphUnitTypeFloat))
val json = JsonMethods.parse(jsonStr)
(json \ "typ") shouldBe JString("float")
json.extract[GraphUnitTypeWrapper].typ shouldBe GraphUnitTypeFloat
}
it("success to serialize/deserialize `integer`") {
val jsonStr = Serialization.write(GraphUnitTypeWrapper(GraphUnitTypeInteger))
val json = JsonMethods.parse(jsonStr)
(json \ "typ") shouldBe JString("integer")
json.extract[GraphUnitTypeWrapper].typ shouldBe GraphUnitTypeInteger
}
it("success to serialize/deserialize `percentage`") {
val jsonStr = Serialization.write(GraphUnitTypeWrapper(GraphUnitTypePercentage))
val json = JsonMethods.parse(jsonStr)
(json \ "typ") shouldBe JString("percentage")
json.extract[GraphUnitTypeWrapper].typ shouldBe GraphUnitTypePercentage
}
it("success to serialize/deserialize `bytes`") {
val jsonStr = Serialization.write(GraphUnitTypeWrapper(GraphUnitTypeBytes))
val json = JsonMethods.parse(jsonStr)
(json \ "typ") shouldBe JString("bytes")
json.extract[GraphUnitTypeWrapper].typ shouldBe GraphUnitTypeBytes
}
it("success to serialize/deserialize `bytes/sec`") {
val jsonStr = Serialization.write(GraphUnitTypeWrapper(GraphUnitTypeBytesSec))
val json = JsonMethods.parse(jsonStr)
(json \ "typ") shouldBe JString("bytes/sec")
json.extract[GraphUnitTypeWrapper].typ shouldBe GraphUnitTypeBytesSec
}
it("success to serialize/deserialize `iops`") {
val jsonStr = Serialization.write(GraphUnitTypeWrapper(GraphUnitTypeIOPS))
val json = JsonMethods.parse(jsonStr)
(json \ "typ") shouldBe JString("iops")
json.extract[GraphUnitTypeWrapper].typ shouldBe GraphUnitTypeIOPS
}
}
case class GraphUnitTypeWrapper(typ: GraphUnitType)
}
|
import Radium from 'radium';
import Banner from './component';
export default Radium(Banner);
|
<reponame>collaide/repository-manager<gh_stars>1-10
FactoryBot.define do
factory :group do
sequence :name do |n|
"Group #{ n }"
end
end
end
|
package eu.itdc.internetprovider.service.dto;
public enum RoleTypeEnum {
ROLE_ADMIN, ROLE_MODERATOR, ROLE_CUSTOMER
}
|
/*
* Copyright 2020 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.jbcsrc;
import com.google.template.soy.error.ErrorReporter;
import com.google.template.soy.exprtree.FunctionNode;
import com.google.template.soy.exprtree.MethodCallNode;
import com.google.template.soy.jbcsrc.restricted.Expression;
import com.google.template.soy.jbcsrc.restricted.JbcSrcPluginContext;
import com.google.template.soy.jbcsrc.restricted.SoyExpression;
import com.google.template.soy.plugin.internal.JavaPluginExecContext;
import com.google.template.soy.plugin.java.restricted.SoyJavaSourceFunction;
import com.google.template.soy.shared.restricted.SoySourceFunctionMethod;
import com.google.template.soy.types.SoyTypeRegistry;
import java.util.List;
import javax.annotation.Nullable;
/** Compiles method and function calls. */
final class JavaSourceFunctionCompiler {
private final SoyTypeRegistry typeRegistry;
private final ErrorReporter errorReporter;
JavaSourceFunctionCompiler(SoyTypeRegistry typeRegistry, ErrorReporter errorReporter) {
this.typeRegistry = typeRegistry;
this.errorReporter = errorReporter;
}
/**
* Compile the given method call to a {@link SoyExpression}
*
* @param node The AST node being compiled
* @param method The method object to invoke
* @param args The compiled arguments, position 0 is the {@code receiver} of the method
* @param parameters The parameters for accessing plugin instances and the {@link
* JbcSrcPluginContext}, if {@code null} then we are in constant context.
* @return a SoyExpression with the result of the method call.
*/
SoyExpression compile(
MethodCallNode node,
SoySourceFunctionMethod method,
List<SoyExpression> args,
@Nullable TemplateParameterLookup parameters,
ExpressionDetacher detacher) {
return compile(
JavaPluginExecContext.forMethodCallNode(node, method), args, parameters, detacher);
}
/**
* Compile the given function call to a {@link SoyExpression}
*
* @param node The AST node being compiled
* @param function The function object to invoke
* @param args The compiled arguments, position 0 is the {@code receiver} of the method
* @param parameters The parameters for accessing plugin instances and the {@link
* JbcSrcPluginContext}, if {@code null} then we are in constant context.
* @return a SoyExpression with the result of the function call.
*/
SoyExpression compile(
FunctionNode node,
SoyJavaSourceFunction function,
List<SoyExpression> args,
@Nullable TemplateParameterLookup parameters,
ExpressionDetacher detacher) {
return compile(
JavaPluginExecContext.forFunctionNode(node, function), args, parameters, detacher);
}
private SoyExpression compile(
JavaPluginExecContext context,
List<SoyExpression> args,
@Nullable TemplateParameterLookup parameters,
ExpressionDetacher detacher) {
return new JbcSrcValueFactory(
context,
// parameters is null when we are in a constant context.
parameters == null
? new JbcSrcPluginContext() {
private Expression error() {
throw new UnsupportedOperationException(
"Cannot access contextual data from a pure context");
}
@Override
public Expression getBidiGlobalDir() {
return error();
}
@Override
public Expression getAllRequiredCssNamespaces(SoyExpression template) {
return error();
}
@Override
public Expression getAllRequiredCssPaths(SoyExpression template) {
return error();
}
@Override
public Expression getULocale() {
return error();
}
}
: parameters.getPluginContext(),
pluginName -> {
if (parameters == null) {
throw new UnsupportedOperationException("Pure functions cannot have instances");
}
return parameters.getRenderContext().getPluginInstance(pluginName);
},
errorReporter,
typeRegistry,
detacher)
.computeForJavaSource(args);
}
}
|
#!/usr/bin/env bash
testdir=$(readlink -f $(dirname $0))
rootdir=$(readlink -f $testdir/../../..)
source $rootdir/test/common/autotest_common.sh
source $rootdir/test/vhost/common.sh
# Tested with windows vm with OS Name: Microsoft Windows Server 2012 R2 Datacenter
# and OS Version: 6.3.9600 N/A Build 9600
# In order to run this test with windows vm
# windows virtio scsi driver must be installed
WINDOWS_IMG="/home/sys_sgsw/windows_scsi_compliance/windows_vm_image.qcow2"
aio_file="$testdir/aio_disk"
ssh_pass=""
vm_num=1
keep_results_dir=false
rpc_py="$rootdir/scripts/rpc.py -s $(get_vhost_dir 0)/rpc.sock"
function usage() {
[[ -n $2 ]] && (
echo "$2"
echo ""
)
echo "Windows Server scsi compliance test"
echo "Usage: $(basename $1) [OPTIONS]"
echo " --vm-ssh-pass=PASSWORD Text password for the VM"
echo " --vm-image-path Path of windows image"
echo " --keep_results Do not delete dir with results"
exit 0
}
while getopts 'h-:' optchar; do
case "$optchar" in
-)
case "$OPTARG" in
help) usage $0 ;;
vm-ssh-pass=*) ssh_pass="${OPTARG#*=}" ;;
vm-image-path=*) WINDOWS_IMG="${OPTARG#*=}" ;;
keep_results*) keep_results_dir=true ;;
esac
;;
h) usage $0 ;;
*) usage $0 "Invalid argument '$OPTARG'" ;;
esac
done
trap 'rm -f $aio_file; rm -rf $testdir/results; error_exit' SIGINT SIGTERM ERR
VM_PASSWORD="$ssh_pass"
mkdir -p $testdir/results
dd if=/dev/zero of=$aio_file bs=1M count=512
timing_enter vhost_run
vhost_run 0
$rpc_py bdev_nvme_set_hotplug -e
$rpc_py bdev_malloc_create 256 4096 -b Malloc0
$rpc_py bdev_aio_create $aio_file Aio0 512
$rpc_py bdev_get_bdevs
$rpc_py vhost_create_scsi_controller naa.vhost.1
$rpc_py vhost_scsi_controller_add_target naa.vhost.1 0 Nvme0n1
$rpc_py vhost_scsi_controller_add_target naa.vhost.1 1 Malloc0
# TODO: Currently there is bug for aio device. Disable this test
# $rpc_py vhost_scsi_controller_add_target naa.vhost.1 2 Aio0
timing_exit vhost_run
timing_enter start_vm
vm_setup --force=1 --disk-type=spdk_vhost_scsi --os=$WINDOWS_IMG --disks=vhost --memory=4096
vm_run "1"
# Wait until VM goes up
vm_wait_for_boot "300" "$vm_num"
timing_exit start_vm
vm_scp "$vm_num" $testdir/windows_scsi_compliance.ps1 127.0.0.1:/cygdrive/c/SCSI/
vm_sshpass "$vm_num" "$ssh_pass" "cd /cygdrive/c/SCSI; powershell.exe -file windows_scsi_compliance.ps1"
vm_scp "$vm_num" 127.0.0.1:/cygdrive/c/SCSI/WIN_SCSI_* $testdir/results/
dos2unix $testdir/results/WIN_SCSI_*.log
notice "Kill vm 1"
vm_kill "$vm_num"
notice "Kill spdk"
vhost_kill 0
notice "Remove $aio_file"
rm -f $aio_file
python3 $testdir/windows_scsi_compliance.py
if ! $keep_results_dir; then
rm -rf $testdir/results
fi
|
ALTER TABLE crates DROP COLUMN description; |
<reponame>QuentinQuero/among_us_irl-back<filename>controllers/GameControllers/changeGameStatus.js
'use strict';
const gameService = require('../../services/GameServices');
const changeGameStatus = function (req, res, next) {
gameService.updateGameStatus(req.body.gameId).then((response) => {
res.json({
status: 'success',
message: 'game status updated'
});
}).catch((error) => {
res.json({
status: 'error',
message: error
});
});
};
module.exports = changeGameStatus;
|
#!/bin/bash
if ! type hugo >/dev/null 2>&1; then
echo 'Hugo not installed.';
else
rm -rf public
echo ".................."
echo ". Public deleted ."
echo ".................."
bash githash.sh
hugo --cleanDestinationDir --forceSyncStatic --gc --ignoreCache --minify --enableGitInfo
echo ".............."
echo ". Hugo built ."
echo ".............."
fi
|
<filename>source/static/methods.py<gh_stars>0
from io import BytesIO
from os import path, mkdir
import requests
from core import config
from base64 import encodebytes, decodebytes
from uuid import uuid4
def webp_to_png(img: BytesIO) -> str:
if not path.exists('tmp'):
mkdir('tmp')
img = encodebytes(img.read()).decode('UTF-8')
r = requests.post(config['w2j_url'], json={
'key': config['w2j_key'],
'image': img
})
filename = f'temp{uuid4().hex}.jpeg' # I will optimize it later. Probably.
open(path.join('tmp', filename), 'wb').write(decodebytes(r.text.encode()))
return path.join('tmp', filename)
|
# frozen_string_literal: true
require "minitest/autorun"
require "vvdc/lexer"
class LexerTest < Minitest::Test
def scan(program)
lexer = Vvdc::Lexer.new
lexer.scan(program)
end
def test_simple_tokens
program = "! + - * ; ( ) { } = < >"
tokens = scan(program)
assert_equal program.delete(" ").chars, tokens.map(&:literal)
end
def test_simple_tokens_no_spaces
program = "!+-*;(){}=<>"
tokens = scan(program)
assert_equal program.chars, tokens.map(&:literal)
end
def test_combined_tokens
program = "== != <= >="
tokens = scan(program)
assert_equal program.split(" "), tokens.map(&:literal)
end
def test_combined_tokens_no_spaces
tokens = scan("===!=<=>=")
assert_equal ["==", "=", "!=", "<=", ">="], tokens.map(&:literal)
end
def test_numbers_identifiers_and_strings
tokens = scan('1337 identifier otheridentifier "string with spaces"')
assert_equal ["1337", "identifier", "otheridentifier", "string with spaces"], tokens.map(&:literal)
assert_equal %i[number identifier identifier string], tokens.map(&:type)
end
def test_numbers_identifiers_mixed_with_symbols
tokens = scan("42<16==banana5 32")
assert_equal %w[42 < 16 == banana5 32], tokens.map(&:literal)
assert_equal %i[number symbol number symbol identifier number], tokens.map(&:type)
end
def test_keywords
program = "if print while return let fn notakeyword"
tokens = scan(program)
assert_equal program.split(" "), tokens.map(&:literal)
assert_equal %i[keyword_if keyword_print keyword_while keyword_return
keyword_let keyword_fn identifier], tokens.map(&:type)
end
def test_newlines
tokens = scan("if 42\nvariable\n\n\"mystring\"")
assert_equal %w[if 42 variable mystring], tokens.map(&:literal)
assert_equal %i[keyword_if number identifier string], tokens.map(&:type)
end
end
|
#!/bin/bash
PROJECT=$(gcloud config get-value project)
gcloud builds submit \
--tag gcr.io/$PROJECT/locust-tasks:latest docker-image
gcloud container images list | grep locust-tasks |
#!/bin/bash
# Change to the directory with our code that we plan to work from
cd "$GOPATH/src/lenslockedbr.com"
echo "===== Releasing lenslockedbr.com ====="
echo " Deleting the local binary if it exists (so it isn't uploaded)..."
rm *.exe
echo " Done!"
sleep 2
echo " Packing the code"
cd "$GOPATH/src/"
rm lenslockedbr.com.tar.gz
tar -cvzf lenslockedbr.com.tar.gz --exclude='lenslockedbr.com/.git/*' --exclude='lenslockedbr.com/images/*' --exclude='lenslockedbr.com/*.exe' lenslockedbr.com\
sleep 2
echo " Deleting existing code..."
ssh root@leandr0.net -p 2233 " rm -rf /root/go/src/lenslockedbr.com"
echo " Code deleted successfully!"
sleep 2
echo " Uploading and extract code..."
cd "$GOPATH/src/"
scp -P 2233 lenslockedbr.com.tar.gz root@leandr0.net:/root/go/src/
ssh root@leandr0.net -p 2233 " tar -zxvf /root/go/src/lenslockedbr.com.tar.gz -C /root/go/src/"
#echo " Code uploaded successfully!"
sleep 2
echo " Go getting deps..."
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get golang.org/x/crypto/bcrypt"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get github.com/gorilla/mux"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get github.com/gorilla/schema"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get github.com/gorilla/csrf"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get github.com/lib/pq"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get github.com/jinzhu/gorm"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get gopkg.in/mailgun/mailgun-go.v1"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get github.com/dropbox/dropbox-sdk-go-unofficial/dropbox"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get github.com/dropbox/dropbox-sdk-go-unofficial/dropbox/files"
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; /usr/local/go/bin/go get golang.org/x/oauth2"
sleep 2
echo " Building the code on remote server..."
ssh root@leandr0.net -p 2233 "export GOPATH=/root/go; cd /root/app; /usr/local/go/bin/go build -o ./server /root/go/src/lenslockedbr.com/*.go"
echo " Code built successfully!..."
sleep 2
echo " Moving assets..."
ssh root@leandr0.net -p 2233 " cd /root/app; cp -R /root/go/src/lenslockedbr.com/assets ."
echo " Assets moved successfully!..."
echo " Moving views..."
ssh root@leandr0.net -p 2233 " cd /root/app; cp -R /root/go/src/lenslockedbr.com/views ."
echo " Views moved successfully!..."
echo " Moving Caddyfile..."
ssh root@leandr0.net -p 2233 " cd /root/app; cp /root/go/src/lenslockedbr.com/Caddyfile ."
echo " Caddyfile moved successfully!..."
sleep 2
echo " Restarting the server..."
ssh root@leandr0.net -p 2233 " service leandr0.net restart"
echo " Server restarted successfully!..."
sleep 2
echo " Restarting Caddy server..."
ssh root@leandr0.net -p 2233 " service caddy restart"
echo " Caddy restarted successfully!..."
echo "===== Done releasing lenslockedbr.com ====="
|
import { cat, idx, len, low, pl, pr, rpl, rpt, sbs, slc, trim, up } from './fns';
export const stringFns = {
cat,
idx,
len,
low,
pl,
pr,
rpt,
rpl,
sbs,
slc,
trim,
up,
};
|
#!/bin/bash
set -e
export DEVICE=a11
export VENDOR=htc
if [ $# -eq 0 ]; then
SRC=adb
else
if [ $# -eq 1 ]; then
SRC=$1
else
echo "$0: bad number of arguments"
echo ""
echo "usage: $0 [PATH_TO_EXPANDED_ROM]"
echo ""
echo "If PATH_TO_EXPANDED_ROM is not specified, blobs will be extracted from"
echo "the device using adb pull."
exit 1
fi
fi
BASE=../../../vendor/$VENDOR/$DEVICE/proprietary
rm -rf $BASE/*
if [ -f ../$DEVICE/proprietary-files.txt ]; then
for FILE in `egrep -v '(^#|^$)' ../$DEVICE/proprietary-files.txt`; do
FILE=`echo ${FILE[0]} | sed -e "s/^-//g"`
echo "Extracting /system/$FILE ..."
DIR=`dirname $FILE`
if [ ! -d $BASE/$DIR ]; then
mkdir -p $BASE/$DIR
fi
if [ "$SRC" = "adb" ]; then
adb pull /system/$FILE $BASE/$FILE
else
cp $SRC/system/$FILE $BASE/$FILE
fi
done
fi
../$DEVICE/setup-makefiles.sh
|
package com.infamous.framework.converter;
public class Converter<U, T> {
private ConvertProcessor<U, T> m_convertProcessor;
public Converter(ConvertProcessor<U, T> convertProcessor) {
m_convertProcessor = convertProcessor;
}
public T converter(U value) {
return m_convertProcessor.convert(value);
}
public T converter(U value, ConvertProcessor<U, T> convertProcessor) {
return convertProcessor.convert(value);
}
}
|
#!/bin/bash
consul tls cert create -client
|
<gh_stars>10-100
/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */
/*
VampyHost
Use Vamp audio analysis plugins in Python
<NAME> and <NAME>
Centre for Digital Music, Queen Mary, University of London
Copyright 2008-2015 Queen Mary, University of London
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the names of the Centre for
Digital Music; Queen Mary, University of London; and the authors
shall not be used in advertising or otherwise to promote the sale,
use or other dealings in this Software without prior written
authorization.
*/
/*
StringConversion: A couple of type safe conversion utilities between
Python types and C++ strings.
*/
#ifndef VAMPYHOST_STRING_CONVERSION_H
#define VAMPYHOST_STRING_CONVERSION_H
#include <Python.h>
#include <string>
class StringConversion
{
public:
StringConversion() {}
~StringConversion() {}
PyObject *string2py(const std::string &s) {
#if PY_MAJOR_VERSION < 3
return PyString_FromString(s.c_str());
#else
return PyUnicode_FromString(s.c_str());
#endif
}
std::string py2string(PyObject *obj) {
#if PY_MAJOR_VERSION < 3
char *cstr = PyString_AsString(obj);
if (!cstr) return std::string();
else return cstr;
#else
PyObject *uobj = PyUnicode_AsUTF8String(obj);
if (!uobj) return std::string();
char *cstr = PyBytes_AsString(uobj);
if (!cstr) return std::string();
else return cstr;
#endif
}
};
#endif
|
<gh_stars>10-100
package org.multibit.hd.ui.views.wizards.exit;
/**
* <p>Enum to provide the following to exit wizard model:</p>
* <ul>
* <li>State identification</li>
* </ul>
*
* @since 0.0.1
*
*/
public enum ExitState {
SELECT_RESET_OPTION,
CONFIRM_EXIT,
SWITCH_WALLET,
// End of enum
;
}
|
#!/bin/bash
while getopts ":c:t:z:f:h" opt; do
case $opt in
h)
echo "-c Specify country" >&2
echo >&2
echo "-t Specify the tag to filter. Default: place" >&2
echo >&2
echo "-z Specify the maximum zoom for tippecanoe" >&2
echo >&2
echo "-f Specify the output folder" >&2
echo >&2
echo "-h Prints this help file" >&2
exit 0
;;
\?)
echo "Invalid option: -$OPTARG" >&2
exit 1
;;
:)
echo "Option -$OPTARG requires an argument." >&2
exit 1
;;
c)
COUNTRY=$OPTARG
;;
t)
TAG=$OPTARG
;;
z)
MAX_ZOOM=$OPTARG
;;
f)
FOLDER=$OPTARG
;;
esac
done
if [ -z ${FOLDER+x} ]; then FOLDER="/root/micro-data-service/data"; fi
if [ -z ${TAG+x} ]; then TAG="place"; fi
if [ -z ${MAX_ZOOM+x} ]; then MAX_ZOOM=18; fi
# Download QA-Tile
# ================
if [ ! -f $COUNTRY.mbtiles ]; then
BASE=https://s3.amazonaws.com/mapbox/osm-qa-tiles/latest
if [ "$COUNTRY" = "planet" ]; then
URL=$BASE.planet.mbtiles.gz
else
URL=$BASE.country/$COUNTRY.mbtiles.gz
fi
wget $URL -O $COUNTRY.mbtiles.gz
gzip --force --decompress $COUNTRY.mbtiles.gz
fi
# OSM-Tag-Stats
# =============
FILTER="[
\"all\",
[\"has\", \"$TAG\"],
[\"==\", \"@type\", \"node\"]
]"
echo $FILTER > $TAG.json
if [ ! -f $TAG-$COUNTRY.geojson ]; then
rm -r -f tmp-osm-tag-stats/
osm-tag-stats \
--geojson=$TAG-$COUNTRY.geojson \
--mbtiles=$COUNTRY.mbtiles \
--filter=$TAG.json
fi
# Remove SQL Dumps
# ================
rm -f $TAG-$COUNTRY.dump
# Tippecanoe
# ==========
for ZOOM in $(seq 0 $MAX_ZOOM); do
tippecanoe \
--output=$TAG-$COUNTRY-z$ZOOM.mbtiles \
--force \
--minimum-zoom $ZOOM \
--maximum-zoom $ZOOM \
--full-detail $((16 - ZOOM + 16)) \
--no-line-simplification \
--no-feature-limit \
--no-tile-size-limit \
--no-polygon-splitting \
--no-clipping \
--no-duplication \
$TAG-$COUNTRY.geojson
# SQL Dump
# ========
sqlite3 $TAG-$COUNTRY-z$ZOOM.mbtiles '.dump' >> $TAG-$COUNTRY.dump
rm $TAG-$COUNTRY-z$ZOOM.mbtiles
done
# Merge MBTiles
# =============
rm -f $TAG-$COUNTRY.mbtiles
sqlite3 $TAG-$COUNTRY.mbtiles < $TAG-$COUNTRY.dump
# Clean Files
# ===========
rm -f -r tmp-osm-tag-stats/
rm -f $TAG-$COUNTRY.dump
rm -f $COUNTRY.mbtiles.gz
# Upload to AWS
# =============
aws s3 cp $TAG-$COUNTRY.mbtiles s3://data.osmcanada.ca/$TAG-$COUNTRY.mbtiles
aws s3 cp $TAG-$COUNTRY.geojson s3://data.osmcanada.ca/$TAG-$COUNTRY.geojson
# Copy to Folder
# ==============
cp /tmp/$TAG-$COUNTRY.mbtiles $FOLDER/$TAG-$COUNTRY.mbtiles
|
class Debug:
@staticmethod
def if_not_func(expression: bool) -> bool:
Debug.perror("IF NOT function not defined")
return not expression
@staticmethod
def gen_else_func(left: str = '', op: str = '', right: str = '') -> str:
Debug.perror("ELSE function not defined")
return left + op + right
@staticmethod
def perror(message: str):
print(f"Error: {message}")
# Test the implementation
# Example usage of the debugging utility
print(Debug.if_not_func(True)) # Output: False (with error message)
print(Debug.gen_else_func('Hello', ' ', 'World')) # Output: 'Hello World' (with error message) |
<gh_stars>0
package com.tracy.competition.domain.entity
import scala.beans.BeanProperty
/**
* @author Tracy
* @date 2021/2/9 1:13
*/
class File extends Serializable {
@BeanProperty var fileId: String = _
@BeanProperty var filePath: String = _
@BeanProperty var fileName: String = _
@BeanProperty var competition: Competition = _
@BeanProperty var notification: Notification = _
override def toString: String = {
"File{" +
"fileId='" + fileId + '\'' +
", filePath='" + filePath + '\'' +
", fileName='" + fileName + '\'' +
", competition=" + competition +
", notification=" + notification +
'}'
}
}
|
#!/bin/bash -eux
# prepare binary_name/release_tag/release_asset_name
BINARY_NAME=$(basename ${GITHUB_REPOSITORY})
if [ x${INPUT_BINARY_NAME} != x ]; then
BINARY_NAME=${INPUT_BINARY_NAME}
fi
RELEASE_TAG=$(basename ${GITHUB_REF})
if [ ! -z "${INPUT_RELEASE_TAG}" ]; then
RELEASE_TAG=${INPUT_RELEASE_TAG}
fi
RELEASE_ASSET_NAME=${BINARY_NAME}-${RELEASE_TAG}-${INPUT_GOOS}-${INPUT_GOARCH}
if [ ! -z "${INPUT_ASSET_NAME}" ]; then
RELEASE_ASSET_NAME=${INPUT_ASSET_NAME}
fi
# prompt error if non-supported event
if [ ${GITHUB_EVENT_NAME} == 'release' ]; then
echo "Event: ${GITHUB_EVENT_NAME}"
elif [ ${GITHUB_EVENT_NAME} == 'push' ]; then
echo "Event: ${GITHUB_EVENT_NAME}"
elif [ ${GITHUB_EVENT_NAME} == 'workflow_dispatch' ]; then
echo "Event: ${GITHUB_EVENT_NAME}"
else
echo "Unsupport event: ${GITHUB_EVENT_NAME}!"
exit 1
fi
# execute pre-command if exist, e.g. `go get -v ./...`
if [ ! -z "${INPUT_PRE_COMMAND}" ]; then
eval ${INPUT_PRE_COMMAND}
fi
# binary suffix
EXT=''
if [ ${INPUT_GOOS} == 'windows' ]; then
EXT='.exe'
fi
# prefix for ldflags
LDFLAGS_PREFIX=''
if [ ! -z "${INPUT_LDFLAGS}" ]; then
LDFLAGS_PREFIX="-ldflags"
fi
# build
BUILD_ARTIFACTS_FOLDER=build-artifacts-$(date +%s)
mkdir -p ${INPUT_PROJECT_PATH}/${BUILD_ARTIFACTS_FOLDER}
cd ${INPUT_PROJECT_PATH}
if [[ "${INPUT_BUILD_COMMAND}" =~ ^make.* ]]; then
# start with make, assumes using make to build golang binaries, execute it directly
GOOS=${INPUT_GOOS} GOARCH=${INPUT_GOARCH} eval ${INPUT_BUILD_COMMAND}
if [ -f "${BINARY_NAME}${EXT}" ]; then
# assumes the binary will be generated in current dir, copy it for later processes
cp ${BINARY_NAME}${EXT} ${BUILD_ARTIFACTS_FOLDER}/
fi
else
GOOS=${INPUT_GOOS} GOARCH=${INPUT_GOARCH} ${INPUT_BUILD_COMMAND} -o ${BUILD_ARTIFACTS_FOLDER}/${BINARY_NAME}${EXT} ${INPUT_BUILD_FLAGS} ${LDFLAGS_PREFIX} "${INPUT_LDFLAGS}"
fi
# executable compression
if [ ! -z "${INPUT_EXECUTABLE_COMPRESSION}" ]; then
if [[ "${INPUT_EXECUTABLE_COMPRESSION}" =~ ^upx.* ]]; then
# start with upx, use upx to compress the executable binary
eval ${INPUT_EXECUTABLE_COMPRESSION} ${BUILD_ARTIFACTS_FOLDER}/${BINARY_NAME}${EXT}
else
echo "Unsupport executable compression: ${INPUT_EXECUTABLE_COMPRESSION}!"
exit 1
fi
fi
# prepare extra files
if [ ! -z "${INPUT_EXTRA_FILES}" ]; then
cd ${GITHUB_WORKSPACE}
cp -r ${INPUT_EXTRA_FILES} ${INPUT_PROJECT_PATH}/${BUILD_ARTIFACTS_FOLDER}/
cd ${INPUT_PROJECT_PATH}
fi
cd ${BUILD_ARTIFACTS_FOLDER}
ls -lha
# compress and package binary, then calculate checksum
if [ ${INPUT_DISABLE_PACKAGING^^} == 'TRUE' ]; then
MEDIA_TYPE='application/octet-stream'
RELEASE_ASSET_EXT=${EXT}
mv ${BINARY_NAME}${EXT} ${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT}
else
RELEASE_ASSET_EXT='.tar.gz'
MEDIA_TYPE='application/gzip'
if [ ${INPUT_GOOS} == 'windows' ]; then
RELEASE_ASSET_EXT='.zip'
MEDIA_TYPE='application/zip'
( shopt -s dotglob; zip -vr ${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT} * )
else
( shopt -s dotglob; tar cvfz ${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT} * )
fi
fi
MD5_SUM=$(md5sum ${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT} | cut -d ' ' -f 1)
SHA256_SUM=$(sha256sum ${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT} | cut -d ' ' -f 1)
# prefix upload extra params
GITHUB_ASSETS_UPLOADR_EXTRA_OPTIONS=''
if [ ${INPUT_OVERWRITE^^} == 'TRUE' ]; then
GITHUB_ASSETS_UPLOADR_EXTRA_OPTIONS="-overwrite"
fi
# update binary and checksum
github-assets-uploader -f ${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT} -mediatype ${MEDIA_TYPE} ${GITHUB_ASSETS_UPLOADR_EXTRA_OPTIONS} -repo ${GITHUB_REPOSITORY} -token ${INPUT_GITHUB_TOKEN} -tag ${RELEASE_TAG}
if [ ${INPUT_MD5SUM^^} == 'TRUE' ]; then
MD5_EXT='.md5'
MD5_MEDIA_TYPE='text/plain'
echo ${MD5_SUM} >${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT}${MD5_EXT}
github-assets-uploader -f ${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT}${MD5_EXT} -mediatype ${MD5_MEDIA_TYPE} ${GITHUB_ASSETS_UPLOADR_EXTRA_OPTIONS} -repo ${GITHUB_REPOSITORY} -token ${INPUT_GITHUB_TOKEN} -tag ${RELEASE_TAG}
fi
if [ ${INPUT_SHA256SUM^^} == 'TRUE' ]; then
SHA256_EXT='.sha256'
SHA256_MEDIA_TYPE='text/plain'
echo ${SHA256_SUM} >${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT}${SHA256_EXT}
github-assets-uploader -f ${RELEASE_ASSET_NAME}${RELEASE_ASSET_EXT}${SHA256_EXT} -mediatype ${SHA256_MEDIA_TYPE} ${GITHUB_ASSETS_UPLOADR_EXTRA_OPTIONS} -repo ${GITHUB_REPOSITORY} -token ${INPUT_GITHUB_TOKEN} -tag ${RELEASE_TAG}
fi
|
#!/bin/sh
echo "Running pre -commit checks..."
JAVA_HOME=$(/usr/libexec/java_home -v 1.8)
export JAVA_HOME
OUTPUT="/tmp/res"
./gradlew ktlint lintDebug --daemon > ${OUTPUT}
EXIT_CODE=$?
if [ ${EXIT_CODE} -ne 0 ]; then
cat ${OUTPUT}
rm ${OUTPUT}
echo "Pre Commit Checks Failed. Please fix the above issues before committing"
exit ${EXIT_CODE}
else
rm ${OUTPUT}
echo "Pre Commit Checks Passed -- no problems found"
fi
|
#!/bin/bash
# ---------------------------
# Shell script to run cellSNP
# ---------------------------
# run cellSNP to genotype cells
# notes:
# - running cellSNP in mode 1
# - using .vcf file from best-performing option for genotyping step (matched bulk
# RNA-seq samples using bcftools)
# - requires merged BAM file and merged cell barcodes file from previous steps
# (doublets simulation scenario), and .vcf file from genotyping step
# for more details:
# - https://vireosnp.readthedocs.io/en/latest/genotype.html
# - https://github.com/single-cell-genetics/cellsnp-lite
# runtime: ~2 hours (with 10 cores)
# qsub -V -cwd -pe local 10 -l mem_free=5G,h_vmem=10G,h_fsize=100G run_cellSNP.sh
# arguments:
# $1: directory for runtimes
# $2: directory for timestamp files
# $3: number of threads
# $4: output directory
# $5: genotype directory
# $6: dataset name for simulation scenario
# $7: percentage of doublets for simulation scenario (formatted as e.g. "20pc")
# -----------------------------------
# start runtime
start=`date +%s`
# -----------------------------------
# note: vcf file needs to be uncompressed
# if still in "vcf.bgz" or "vcf.gz" format then uncompress first
# (for .bgz format, can rename to .gz then gunzip)
cellsnp-lite \
-s $4/$6/doublets_sims/$7/bam_merged_doublets_$6_$7.bam \
-b $4/$6/doublets_sims/$7/barcodes_merged_$6_$7.tsv \
-O $4/$6/doublets_sims/$7/cellSNP \
-R $5/bcftools/bcftools_HGSOC_rehead.vcf \
-p $3 \
--minMAF=0.1 \
--minCOUNT=20 \
--gzip
# -----------------------------------
# end runtime
end=`date +%s`
runtime=`expr $end - $start`
# save runtime
mkdir -p $1/$6/doublets_sims/$7/cellSNP
echo runtime: $runtime seconds > $1/$6/doublets_sims/$7/cellSNP/runtime_cellSNP_$6_$7.txt
# -----------------------------------
# -----------------------------------
# save timestamp file (for Snakemake)
mkdir -p $2/$6/doublets_sims/$7/cellSNP
date > $2/$6/doublets_sims/$7/cellSNP/timestamp_cellSNP_$6_$7.txt
# -----------------------------------
|
package dummy
import (
"context"
"sort"
"squirreldb/types"
"sync"
)
// DiscardTSDB will write metrics to /dev/null.
type DiscardTSDB struct{}
type emptyResult struct{}
// ReadIter return an empty result.
func (d DiscardTSDB) ReadIter(ctx context.Context, request types.MetricRequest) (types.MetricDataSet, error) {
return emptyResult{}, nil
}
// Write discard metrics.
func (d DiscardTSDB) Write(ctx context.Context, metrics []types.MetricData) error {
return nil
}
func (d DiscardTSDB) Run(ctx context.Context, readiness chan error) {
readiness <- nil
<-ctx.Done()
}
func (d DiscardTSDB) Flush() {
}
func (r emptyResult) Next() bool {
return false
}
func (r emptyResult) At() types.MetricData {
panic("At() shouldn't be called on emptyResult")
}
func (r emptyResult) Err() error {
return nil
}
// MemoryTSDB store all value in memory. Only useful in unittest.
type MemoryTSDB struct {
mutex sync.Mutex
Data map[types.MetricID]types.MetricData
LogRequest bool
Reads []types.MetricRequest
Writes [][]types.MetricData
}
type readIter struct {
db *MemoryTSDB
current types.MetricData
request types.MetricRequest
offset int
}
// DumpData dump to content of the TSDB. Result is ordered by MetricID.
func (db *MemoryTSDB) DumpData() []types.MetricData {
db.mutex.Lock()
defer db.mutex.Unlock()
result := make([]types.MetricData, 0, len(db.Data))
for _, v := range db.Data {
result = append(result, v)
}
sort.Slice(result, func(i, j int) bool {
return result[i].ID < result[j].ID
})
return result
}
// ReadIter return an empty result.
func (db *MemoryTSDB) ReadIter(ctx context.Context, request types.MetricRequest) (types.MetricDataSet, error) {
db.mutex.Lock()
defer db.mutex.Unlock()
if db.LogRequest {
db.Reads = append(db.Reads, request)
}
return &readIter{
request: request,
db: db,
}, nil
}
// Write store in memory.
func (db *MemoryTSDB) Write(ctx context.Context, metrics []types.MetricData) error {
db.mutex.Lock()
defer db.mutex.Unlock()
if db.LogRequest {
db.Writes = append(db.Writes, metrics)
}
if db.Data == nil {
db.Data = make(map[types.MetricID]types.MetricData)
}
for _, m := range metrics {
m.Points = append(db.Data[m.ID].Points, m.Points...)
db.Data[m.ID] = m
}
return nil
}
func (r *readIter) Next() bool {
if r.offset >= len(r.request.IDs) {
return false
}
id := r.request.IDs[r.offset]
r.offset++
r.db.mutex.Lock()
defer r.db.mutex.Unlock()
r.current = r.db.Data[id]
return true
}
func (r *readIter) At() types.MetricData {
return r.current
}
func (r *readIter) Err() error {
return nil
}
|
task :sample_weather_bootstrap => :environment do
Template.delete_all
Entity.delete_all
Asset.delete_all
template = Template.create( :name => "My Transcription Template",
:description => "A template for transcribing weather recordds",
:project => "My great project",
:display_width => 600,
:default_zoom => 1.5)
weather_entity = Entity.create( :name => "Weather Observation",
:description => "",
:help => "Please fill in all of the values",
:resizeable => false,
:width => 450,
:height => 80)
wind_field = Field.new( :name => "Wind",
:field_key => "wind_direction",
:kind => "select",
:initial_value => "--",
:options => { :select => ['North', 'South', 'East', 'West'] })
force_field = Field.new(:name => "Force",
:field_key => "wind_force",
:kind => "text",
:initial_value => "--",
:options => { :text => { :max_length => 2, :min_length => 0 } })
air_temperature = Field.new(:name => "Air",
:field_key => "air_temperature",
:kind => "text",
:initial_value => "--",
:options => { :text => { :max_length => 3, :min_length => 0 } })
sea_temperature = Field.new(:name => "Air",
:field_key => "sea_temperature",
:kind => "text",
:initial_value => "--",
:options => { :text => { :max_length => 3, :min_length => 0 } })
weather_entity.fields << wind_field
weather_entity.fields << force_field
weather_entity.fields << air_temperature
weather_entity.fields << sea_temperature
weather_entity.save
date_entity = Entity.create(:name => "Date",
:description => "",
:help => "Please fill in the day, month and year",
:resizeable => true,
:width => 450,
:height => 80)
date_field = Field.new( :name => "Date",
:field_key => "date",
:kind => "date",
:initial_value => "",
:options => {})
date_entity.fields << date_field
date_entity.save
location_entity = Entity.create(:name => "Location",
:description => "",
:help => "Please fill in the latitude and longitude or the port name",
:resizeable => true,
:width => 450,
:height => 80)
latitude_field = Field.new( :name => "Latitude",
:field_key => "latitude",
:kind => "text",
:initial_value => "--",
:options => {})
longitude_field = Field.new(:name => "Longitude",
:field_key => "longitude",
:kind => "text",
:initial_value => "--",
:options => {})
location_entity.fields << latitude_field
location_entity.fields << longitude_field
location_entity.save
template.entities << date_entity
template.entities << location_entity
template.entities << weather_entity
template.save
#generate a single asset and a single user for testing just now
voyage = AssetCollection.create(:title => "HMS Attack", :author => "", :extern_ref => "http://en.wikipedia.org/wiki/HMS_Attack_(1911)")
Asset.create(:location => "/images/1.jpeg", :display_width => 800, :height => 2126, :width => 1388, :template => template, :asset_collection => voyage)
Asset.create(:location => "/images/2.jpeg", :display_width => 800, :height => 2107, :width => 1380, :template => template, :asset_collection => voyage)
ZooniverseUser.create()
end |
import { INTEGER, STRING } from 'sequelize';
import sequelize from '../sequelize/sequelize';
const AsyncGame = sequelize.define('async_game', {
gameId: {
type: INTEGER,
autoIncrement: true,
primaryKey: true
},
username: {
type: STRING,
unique: true,
allowNull: false
},
email: {
type: STRING,
unique: true,
allowNull: false,
validate: {
isEmail: true
}
},
password: {
type: STRING,
allowNull: false,
validate: {
len: [6,12]
}
},
elo : {
type: INTEGER,
defaultValue: 800,
validate: {
min: 0
}
}
}, {
freezeTableName: true // Model tableName will be the same as the model name
});
AsyncGame.sync().then(function () {
});
export default AsyncGame |
#!/bin/bash
# Configure environment variables for Bazel build and test.
set -e
export PPROF_PATH=/thirdparty_build/bin/pprof
[ -z "${NUM_CPUS}" ] && NUM_CPUS=`grep -c ^processor /proc/cpuinfo`
[ -z "${ENVOY_SRCDIR}" ] && export ENVOY_SRCDIR=/source
echo "ENVOY_SRCDIR=${ENVOY_SRCDIR}"
function setup_gcc_toolchain() {
if [[ ! -z "${ENVOY_STDLIB}" && "${ENVOY_STDLIB}" != "libstdc++" ]]; then
echo "gcc toolchain doesn't support ${ENVOY_STDLIB}."
exit 1
fi
if [[ -z "${ENVOY_RBE}" ]]; then
export CC=gcc
export CXX=g++
export BAZEL_COMPILER=gcc
echo "$CC/$CXX toolchain configured"
else
export BAZEL_BUILD_OPTIONS="--config=remote-gcc ${BAZEL_BUILD_OPTIONS}"
fi
}
function setup_clang_toolchain() {
ENVOY_STDLIB="${ENVOY_STDLIB:-libc++}"
if [[ -z "${ENVOY_RBE}" ]]; then
if [[ "${ENVOY_STDLIB}" == "libc++" ]]; then
export BAZEL_BUILD_OPTIONS="--config=libc++ ${BAZEL_BUILD_OPTIONS}"
else
export BAZEL_BUILD_OPTIONS="--config=clang ${BAZEL_BUILD_OPTIONS}"
fi
else
if [[ "${ENVOY_STDLIB}" == "libc++" ]]; then
export BAZEL_BUILD_OPTIONS="--config=remote-clang-libc++ ${BAZEL_BUILD_OPTIONS}"
else
export BAZEL_BUILD_OPTIONS="--config=remote-clang ${BAZEL_BUILD_OPTIONS}"
fi
fi
echo "clang toolchain with ${ENVOY_STDLIB} configured"
}
# Create a fake home. Python site libs tries to do getpwuid(3) if we don't and the CI
# Docker image gets confused as it has no passwd entry when running non-root
# unless we do this.
FAKE_HOME=/tmp/fake_home
mkdir -p "${FAKE_HOME}"
export HOME="${FAKE_HOME}"
export PYTHONUSERBASE="${FAKE_HOME}"
export BUILD_DIR=${BUILD_DIR:-/build}
if [[ ! -d "${BUILD_DIR}" ]]
then
echo "${BUILD_DIR} mount missing - did you forget -v <something>:${BUILD_DIR}? Creating."
mkdir -p "${BUILD_DIR}"
fi
export ENVOY_FILTER_EXAMPLE_SRCDIR="${BUILD_DIR}/envoy-filter-example"
# Environment setup.
export USER=bazel
export TEST_TMPDIR=${BUILD_DIR}/tmp
export BAZEL="bazel"
export PATH=/opt/llvm/bin:$PATH
export CLANG_FORMAT=clang-format
if [[ -f "/etc/redhat-release" ]]; then
export BAZEL_BUILD_EXTRA_OPTIONS+="--copt=-DENVOY_IGNORE_GLIBCXX_USE_CXX11_ABI_ERROR=1"
fi
function cleanup() {
# Remove build artifacts. This doesn't mess with incremental builds as these
# are just symlinks.
rm -rf "${ENVOY_SRCDIR}"/bazel-* clang.bazelrc
}
cleanup
trap cleanup EXIT
export LLVM_ROOT=/opt/llvm
bazel/setup_clang.sh "${LLVM_ROOT}"
[[ "${BUILD_REASON}" != "PullRequest" ]] && BAZEL_EXTRA_TEST_OPTIONS+=" --nocache_test_results --test_output=all"
export BAZEL_QUERY_OPTIONS="${BAZEL_OPTIONS}"
export BAZEL_BUILD_OPTIONS="--verbose_failures ${BAZEL_OPTIONS} --action_env=HOME --action_env=PYTHONUSERBASE \
--local_cpu_resources=${NUM_CPUS} --show_task_finish --experimental_generate_json_trace_profile \
--test_env=HOME --test_env=PYTHONUSERBASE --test_output=errors \
--repository_cache=${BUILD_DIR}/repository_cache --experimental_repository_cache_hardlinks \
${BAZEL_BUILD_EXTRA_OPTIONS} ${BAZEL_EXTRA_TEST_OPTIONS}"
[[ "${BAZEL_EXPUNGE}" == "1" ]] && "${BAZEL}" clean --expunge
if [ "$1" != "-nofetch" ]; then
# Setup Envoy consuming project.
if [[ ! -d "${ENVOY_FILTER_EXAMPLE_SRCDIR}/.git" ]]; then
rm -rf "${ENVOY_FILTER_EXAMPLE_SRCDIR}"
git clone https://github.com/envoyproxy/envoy-filter-example.git "${ENVOY_FILTER_EXAMPLE_SRCDIR}"
fi
# This is the hash on https://github.com/envoyproxy/envoy-filter-example.git we pin to.
(cd "${ENVOY_FILTER_EXAMPLE_SRCDIR}" && git fetch origin && git checkout -f 03b45933284b332fd1df42cfb3270751fe543842)
sed -e "s|{ENVOY_SRCDIR}|${ENVOY_SRCDIR}|" "${ENVOY_SRCDIR}"/ci/WORKSPACE.filter.example > "${ENVOY_FILTER_EXAMPLE_SRCDIR}"/WORKSPACE
cp -f "${ENVOY_SRCDIR}"/.bazelversion "${ENVOY_FILTER_EXAMPLE_SRCDIR}"/.bazelversion
fi
# Also setup some space for building Envoy standalone.
export ENVOY_BUILD_DIR="${BUILD_DIR}"/envoy
mkdir -p "${ENVOY_BUILD_DIR}"
# This is where we copy build deliverables to.
export ENVOY_DELIVERY_DIR="${ENVOY_BUILD_DIR}"/source/exe
mkdir -p "${ENVOY_DELIVERY_DIR}"
# This is where we copy the coverage report to.
export ENVOY_COVERAGE_DIR="${ENVOY_BUILD_DIR}"/generated/coverage
mkdir -p "${ENVOY_COVERAGE_DIR}"
# This is where we dump failed test logs for CI collection.
export ENVOY_FAILED_TEST_LOGS="${ENVOY_BUILD_DIR}"/generated/failed-testlogs
mkdir -p "${ENVOY_FAILED_TEST_LOGS}"
# This is where we copy the build profile to.
export ENVOY_BUILD_PROFILE="${ENVOY_BUILD_DIR}"/generated/build-profile
mkdir -p "${ENVOY_BUILD_PROFILE}"
mkdir -p "${ENVOY_FILTER_EXAMPLE_SRCDIR}"/bazel
ln -sf "${ENVOY_SRCDIR}"/bazel/get_workspace_status "${ENVOY_FILTER_EXAMPLE_SRCDIR}"/bazel/
cp -f "${ENVOY_SRCDIR}"/.bazelrc "${ENVOY_FILTER_EXAMPLE_SRCDIR}"/
cp -f "${ENVOY_SRCDIR}"/*.bazelrc "${ENVOY_FILTER_EXAMPLE_SRCDIR}"/
export BUILDIFIER_BIN="/usr/local/bin/buildifier"
export BUILDOZER_BIN="/usr/local/bin/buildozer"
|
import networkx as nx
import matplotlib.pyplot as plt
class GraphVisualizationTool:
def __init__(self):
self.graph_fig = plt.figure()
self.ax = self.graph_fig.add_subplot(111)
def showgraph(self, graph):
G = nx.Graph()
for node, neighbors in graph.items():
for neighbor in neighbors:
G.add_edge(node, neighbor)
pos = nx.spring_layout(G) # Positions for all nodes
nx.draw(G, pos, with_labels=True, node_color='lightblue', node_size=1500, ax=self.ax)
plt.show() |
#!/bin/bash
# script to download datasets
curl=curl
cwd=$(cd -P -- "$(dirname -- "$0")" && pwd -P)
target=$cwd/../attic
# set -x
test ! -d $target && mkdir -p $target
## get filename
function file () {
echo $1 | sed "s/^.*\/\([^\/]*\)$/\1/"
}
## VSOP87 data
function vsop () {
local url=ftp://cdsarc.u-strasbg.fr/pub/cats/VI/81
local file="VSOP87B"
local exts="mer ven ear mar jup sat ura nep"
for ext in $exts; do
$curl $url/$file.$ext > $target/$file.$ext
done
}
## convert VSOP87 data
function vsop_conv () {
node $cwd/vsop87convert.js
}
## DeltaT data
# primary: ftp://maia.usno.navy.mil
# secondary: ftp://toshi.nofs.navy.mil
# iers: ftp://ftp.iers.org/products/eop/rapid/standard
# As of https://www.usno.navy.mil/USNO server maia.usno.navy.mil is being modernized till summer 2020
function deltat () {
local server="http://maia.usno.navy.mil/ser7"
local server2="ftp://ftp.iers.org/products/eop/rapid/standard"
local urls=(
$server2/finals2000A.data
# $server/deltat.preds
# $server/deltat.data
# $server/historic_deltat.data
# $server/finals2000A.data
# $server/tai-utc.dat
)
for url in $urls; do
f=$(file $url)
echo downloading $url
$curl $url > $target/$f
done
}
## convert DeltaT data
function deltat_conv () {
node $cwd/deltat.js > $target/deltat.txt
}
function help () {
cat << EOS
download dataset for VSOP87 data and/or delta-T
Usage:
-t, --deltat download delta T data
-v, --vsop download VSOP87 data
-h, --help this help
EOS
}
case $1 in
-t|--deltat)
deltat
deltat_conv
;;
-v|--vsop)
vsop
vsop_conv
;;
-a|--all)
deltat
deltat_conv
vsop
vsop_conv
;;
-h|--help)
help
;;
*)
help
;;
esac
|
<gh_stars>0
package com.lambo.robot.kits;
import com.lambo.los.kits.io.IOKit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
/**
* 播放器.
* Created by lambo on 2017/7/23.
*/
public class BeepPlayer extends AudioPlayer {
private final Logger logger = LoggerFactory.getLogger(getClass());
private byte[] beep_hi;
public void beepHi() {
if (null == beep_hi) {
try {
InputStream inputStream = IOKit.getInputStream("classpath:/beep_hi.wav");
beep_hi = IOKit.readToByteBuffer(inputStream);
IOKit.closeIo(inputStream);
} catch (IOException e) {
logger.error("load beep_hi failed", e);
}
}
try {
playWAV(beep_hi);
} catch (Exception e) {
logger.error("beepHi failed", e);
}
}
private byte[] beep_lo;
public void beepLo() {
if (null == beep_lo) {
try {
InputStream inputStream = IOKit.getInputStream("classpath:/beep_lo.wav");
beep_lo = IOKit.readToByteBuffer(inputStream);
IOKit.closeIo(inputStream);
} catch (IOException e) {
logger.error("load beep_lo failed", e);
}
}
try {
playWAV(beep_lo);
} catch (Exception e) {
logger.error("beepHi failed", e);
}
}
}
|
<filename>packages/jest-diff/src/__tests__/joinAlignedDiffs.test.ts
/**
* Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, Diff} from '../cleanupSemantic';
import {
joinAlignedDiffsExpand,
joinAlignedDiffsNoExpand,
} from '../joinAlignedDiffs';
import {normalizeDiffOptions} from '../normalizeDiffOptions';
// To align columns so people can review snapshots confidently:
// 1. Use options to omit line colors.
const identity = (string: string) => string;
const changeColor = (string: string) => '<i>' + string + '</i>';
const optionsNoColor = {
aColor: identity,
bColor: identity,
changeColor,
commonColor: identity,
patchColor: identity,
};
// 2. Add string serializer to omit double quote marks.
expect.addSnapshotSerializer({
serialize: (val: string) => val,
test: (val: unknown) => typeof val === 'string',
});
const diffsCommonStartEnd = [
new Diff(DIFF_EQUAL, ''),
new Diff(DIFF_EQUAL, 'common 2 preceding A'),
new Diff(DIFF_EQUAL, 'common 1 preceding A'),
new Diff(DIFF_DELETE, 'delete line'),
new Diff(DIFF_DELETE, ['change ', changeColor('expect'), 'ed A'].join('')),
new Diff(DIFF_INSERT, ['change ', changeColor('receiv'), 'ed A'].join('')),
new Diff(DIFF_EQUAL, 'common 1 following A'),
new Diff(DIFF_EQUAL, 'common 2 following A'),
new Diff(DIFF_EQUAL, 'common 3 following A'),
new Diff(DIFF_EQUAL, 'common 4 following A'),
new Diff(DIFF_EQUAL, 'common 4 preceding B'),
new Diff(DIFF_EQUAL, 'common 3 preceding B'),
new Diff(DIFF_EQUAL, 'common 2 preceding B'),
new Diff(DIFF_EQUAL, 'common 1 preceding B'),
new Diff(DIFF_DELETE, ['change ', changeColor('expect'), 'ed B'].join('')),
new Diff(DIFF_INSERT, ['change ', changeColor('receiv'), 'ed B'].join('')),
new Diff(DIFF_INSERT, 'insert line'),
new Diff(DIFF_EQUAL, 'common 1 following B'),
new Diff(DIFF_EQUAL, 'common 2 following B'),
new Diff(DIFF_EQUAL, 'common 3 between B and C'),
new Diff(DIFF_EQUAL, 'common 2 preceding C'),
new Diff(DIFF_EQUAL, 'common 1 preceding C'),
new Diff(DIFF_DELETE, ['change ', changeColor('expect'), 'ed C'].join('')),
new Diff(DIFF_INSERT, ['change ', changeColor('receiv'), 'ed C'].join('')),
new Diff(DIFF_EQUAL, 'common 1 following C'),
new Diff(DIFF_EQUAL, 'common 2 following C'),
new Diff(DIFF_EQUAL, 'common 3 following C'),
new Diff(DIFF_EQUAL, ''),
new Diff(DIFF_EQUAL, 'common 5 following C'),
];
const diffsChangeStartEnd = [
new Diff(DIFF_DELETE, 'delete'),
new Diff(DIFF_EQUAL, 'common following delete'),
new Diff(DIFF_EQUAL, 'common preceding insert'),
new Diff(DIFF_INSERT, 'insert'),
];
describe('joinAlignedDiffsExpand', () => {
test('first line is empty common', () => {
const options = normalizeDiffOptions(optionsNoColor);
expect(
joinAlignedDiffsExpand(diffsCommonStartEnd, options),
).toMatchSnapshot();
});
});
describe('joinAlignedDiffsNoExpand', () => {
test('patch 0 with context 1 and change at start and end', () => {
const options = normalizeDiffOptions({
...optionsNoColor,
contextLines: 1,
expand: false,
});
expect(
joinAlignedDiffsNoExpand(diffsChangeStartEnd, options),
).toMatchSnapshot();
});
test('patch 0 with context 5 and first line is empty common', () => {
const options = normalizeDiffOptions({...optionsNoColor, expand: false});
expect(
joinAlignedDiffsNoExpand(diffsCommonStartEnd, options),
).toMatchSnapshot();
});
test('patch 1 with context 4 and last line is empty common', () => {
const options = normalizeDiffOptions({
...optionsNoColor,
contextLines: 4,
expand: false,
});
expect(
joinAlignedDiffsNoExpand(diffsCommonStartEnd, options),
).toMatchSnapshot();
});
test('patch 2 with context 3', () => {
const options = normalizeDiffOptions({
...optionsNoColor,
contextLines: 3,
expand: false,
});
expect(
joinAlignedDiffsNoExpand(diffsCommonStartEnd, options),
).toMatchSnapshot();
});
test('patch 3 with context 2 and omit excess common at start', () => {
const options = normalizeDiffOptions({
...optionsNoColor,
contextLines: 2,
expand: false,
});
expect(
joinAlignedDiffsNoExpand(diffsCommonStartEnd, options),
).toMatchSnapshot();
});
});
|
package com.datahub.authorization;
import com.datahub.authentication.Authentication;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.linkedin.common.AuditStamp;
import com.linkedin.common.Owner;
import com.linkedin.common.OwnerArray;
import com.linkedin.common.Ownership;
import com.linkedin.common.OwnershipType;
import com.linkedin.common.UrnArray;
import com.linkedin.common.urn.Urn;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.data.template.StringArray;
import com.linkedin.entity.Aspect;
import com.linkedin.entity.EntityResponse;
import com.linkedin.entity.EnvelopedAspect;
import com.linkedin.entity.EnvelopedAspectMap;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.identity.CorpUserInfo;
import com.linkedin.identity.GroupMembership;
import com.linkedin.policy.DataHubActorFilter;
import com.linkedin.policy.DataHubPolicyInfo;
import com.linkedin.policy.DataHubResourceFilter;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.mockito.Mockito;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME;
import static com.linkedin.metadata.Constants.CORP_USER_INFO_ASPECT_NAME;
import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME;
import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME;
import static com.linkedin.metadata.authorization.PoliciesConfig.ACTIVE_POLICY_STATE;
import static com.linkedin.metadata.authorization.PoliciesConfig.INACTIVE_POLICY_STATE;
import static com.linkedin.metadata.authorization.PoliciesConfig.METADATA_POLICY_TYPE;
import static com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_POLICY_TYPE;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class PolicyEngineTest {
private static final String AUTHORIZED_PRINCIPAL = "urn:li:corpuser:datahub";
private static final String UNAUTHORIZED_PRINCIPAL = "urn:li:corpuser:unauthorized";
private static final String AUTHORIZED_GROUP = "urn:li:corpGroup:authorizedGroup";
private static final String RESOURCE_URN = "urn:li:dataset:test";
private static final String DOMAIN_URN = "urn:li:domain:domain1";
private EntityClient _entityClient;
private PolicyEngine _policyEngine;
private Urn authorizedUserUrn;
private Urn unauthorizedUserUrn;
private Urn resourceUrn;
@BeforeMethod
public void setupTest() throws Exception {
_entityClient = Mockito.mock(EntityClient.class);
_policyEngine = new PolicyEngine(Mockito.mock(Authentication.class), _entityClient);
// Init mocks.
EntityResponse authorizedEntityResponse = createAuthorizedEntityResponse();
authorizedUserUrn = Urn.createFromString(AUTHORIZED_PRINCIPAL);
authorizedEntityResponse.setUrn(authorizedUserUrn);
Map<Urn, EntityResponse> authorizedEntityResponseMap =
Collections.singletonMap(authorizedUserUrn, authorizedEntityResponse);
when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), eq(null),
any())).thenReturn(authorizedEntityResponseMap);
EntityResponse unauthorizedEntityResponse = createUnauthorizedEntityResponse();
unauthorizedUserUrn = Urn.createFromString(UNAUTHORIZED_PRINCIPAL);
unauthorizedEntityResponse.setUrn(unauthorizedUserUrn);
Map<Urn, EntityResponse> unauthorizedEntityResponseMap =
Collections.singletonMap(unauthorizedUserUrn, unauthorizedEntityResponse);
when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(unauthorizedUserUrn)), eq(null),
any())).thenReturn(unauthorizedEntityResponseMap);
EntityResponse entityResponse = new EntityResponse();
EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap();
envelopedAspectMap.put(OWNERSHIP_ASPECT_NAME,
new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(true, true).data())));
entityResponse.setAspects(envelopedAspectMap);
resourceUrn = Urn.createFromString(RESOURCE_URN);
Map<Urn, EntityResponse> mockMap = mock(Map.class);
when(_entityClient.batchGetV2(any(), eq(Collections.singleton(resourceUrn)),
eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), any())).thenReturn(mockMap);
when(mockMap.get(eq(resourceUrn))).thenReturn(entityResponse);
}
@Test
public void testEvaluatePolicyInactivePolicyState() {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(INACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setGroups(new UrnArray());
actorFilter.setUsers(new UrnArray());
actorFilter.setResourceOwners(false);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertFalse(result.isGranted());
}
@Test
public void testEvaluatePolicyPrivilegeFilterNoMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_OWNERS",
Optional.of(resourceSpec));
assertFalse(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePlatformPolicyPrivilegeFilterMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(PLATFORM_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("MANAGE_POLICIES"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(false);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "MANAGE_POLICIES", Optional.empty());
assertTrue(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyActorFilterUserMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
final UrnArray usersUrnArray = new UrnArray();
usersUrnArray.add(Urn.createFromString(AUTHORIZED_PRINCIPAL));
actorFilter.setUsers(usersUrnArray);
actorFilter.setResourceOwners(false);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
// Assert Authorized user can edit entity tags.
PolicyEngine.PolicyEvaluationResult result1 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result1.isGranted());
// Verify we are not making any network calls for these predicates.
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyActorFilterUserNoMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
final UrnArray usersUrnArray = new UrnArray();
usersUrnArray.add(Urn.createFromString(AUTHORIZED_PRINCIPAL));
actorFilter.setUsers(usersUrnArray);
actorFilter.setResourceOwners(false);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
// Assert unauthorized user cannot edit entity tags.
PolicyEngine.PolicyEvaluationResult result2 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, "urn:li:corpuser:test", "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertFalse(result2.isGranted());
// Verify we are not making any network calls for these predicates.
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyActorFilterGroupMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
final UrnArray groupsUrnArray = new UrnArray();
groupsUrnArray.add(Urn.createFromString("urn:li:corpGroup:authorizedGroup"));
actorFilter.setGroups(groupsUrnArray);
actorFilter.setResourceOwners(false);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
// Assert authorized user can edit entity tags, because of group membership.
PolicyEngine.PolicyEvaluationResult result1 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result1.isGranted());
// Verify we are only calling for group during these requests.
verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)),
eq(null), any());
}
@Test
public void testEvaluatePolicyActorFilterGroupNoMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
final UrnArray groupsUrnArray = new UrnArray();
groupsUrnArray.add(Urn.createFromString("urn:li:corpGroup:authorizedGroup"));
actorFilter.setGroups(groupsUrnArray);
actorFilter.setResourceOwners(false);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
// Assert unauthorized user cannot edit entity tags.
PolicyEngine.PolicyEvaluationResult result2 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, UNAUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertFalse(result2.isGranted());
// Verify we are only calling for group during these requests.
verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME),
eq(Collections.singleton(unauthorizedUserUrn)), eq(null), any());
}
@Test
public void testEvaluatePolicyActorFilterAllUsersMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(false);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
// Assert authorized user can edit entity tags, because of group membership.
PolicyEngine.PolicyEvaluationResult result1 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result1.isGranted());
// Assert unauthorized user cannot edit entity tags.
PolicyEngine.PolicyEvaluationResult result2 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, UNAUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result2.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyActorFilterAllGroupsMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(false);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
// Assert authorized user can edit entity tags, because of group membership.
PolicyEngine.PolicyEvaluationResult result1 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result1.isGranted());
// Assert unauthorized user cannot edit entity tags.
PolicyEngine.PolicyEvaluationResult result2 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, UNAUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result2.isGranted());
// Verify we are only calling for group during these requests.
verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)),
eq(null), any());
verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME),
eq(Collections.singleton(unauthorizedUserUrn)), eq(null), any());
}
@Test
public void testEvaluatePolicyActorFilterUserResourceOwnersMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec =
buildResourceResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet());
// Assert authorized user can edit entity tags, because he is a user owner.
PolicyEngine.PolicyEvaluationResult result1 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result1.isGranted());
// Ensure no calls for group membership.
verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)),
eq(null), any());
}
@Test
public void testEvaluatePolicyActorFilterGroupResourceOwnersMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec =
buildResourceResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_GROUP), Collections.emptySet());
// Assert authorized user can edit entity tags, because he is a user owner.
PolicyEngine.PolicyEvaluationResult result1 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result1.isGranted());
// Ensure that caching of groups is working with 1 call to entity client for each principal.
verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)),
eq(null), any());
}
@Test
public void testEvaluatePolicyActorFilterGroupResourceOwnersNoMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(false);
actorFilter.setAllGroups(false);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
// Assert unauthorized user cannot edit entity tags.
PolicyEngine.PolicyEvaluationResult result2 =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, UNAUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertFalse(result2.isGranted());
// Ensure that caching of groups is working with 1 call to entity client for each principal.
verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME),
eq(Collections.singleton(unauthorizedUserUrn)), eq(null), any());
}
@Test
public void testEvaluatePolicyResourceFilterAllResourcesMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec =
buildResourceResolvers("dataset", "urn:li:dataset:random"); // A dataset Authorized principal _does not own_.
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyResourceFilterAllResourcesNoMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(true);
resourceFilter.setType("dataset");
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("chart", RESOURCE_URN); // Notice: Not a dataset.
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertFalse(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyResourceFilterSpecificResourceMatchLegacy() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(false);
resourceFilter.setType("dataset");
StringArray resourceUrns = new StringArray();
resourceUrns.add(RESOURCE_URN); // Filter applies to specific resource.
resourceFilter.setResources(resourceUrns);
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyResourceFilterSpecificResourceMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setFilter(FilterUtils.newFilter(
ImmutableMap.of(ResourceFieldType.RESOURCE_TYPE, Collections.singletonList("dataset"), ResourceFieldType.RESOURCE_URN,
Collections.singletonList(RESOURCE_URN))));
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN);
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyResourceFilterSpecificResourceNoMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setFilter(FilterUtils.newFilter(
ImmutableMap.of(ResourceFieldType.RESOURCE_TYPE, Collections.singletonList("dataset"), ResourceFieldType.RESOURCE_URN,
Collections.singletonList(RESOURCE_URN))));
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec =
buildResourceResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy.
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertFalse(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyResourceFilterSpecificResourceMatchDomain() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setFilter(FilterUtils.newFilter(
ImmutableMap.of(ResourceFieldType.RESOURCE_TYPE, Collections.singletonList("dataset"), ResourceFieldType.DOMAIN,
Collections.singletonList(DOMAIN_URN))));
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec =
buildResourceResolvers("dataset", RESOURCE_URN, Collections.emptySet(), Collections.singleton(DOMAIN_URN));
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertTrue(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testEvaluatePolicyResourceFilterSpecificResourceNoMatchDomain() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setFilter(FilterUtils.newFilter(
ImmutableMap.of(ResourceFieldType.RESOURCE_TYPE, Collections.singletonList("dataset"), ResourceFieldType.DOMAIN,
Collections.singletonList(DOMAIN_URN))));
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN, Collections.emptySet(),
Collections.singleton("urn:li:domain:domain2")); // Domain doesn't match
PolicyEngine.PolicyEvaluationResult result =
_policyEngine.evaluatePolicy(dataHubPolicyInfo, AUTHORIZED_PRINCIPAL, "EDIT_ENTITY_TAGS",
Optional.of(resourceSpec));
assertFalse(result.isGranted());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
@Test
public void testGetGrantedPrivileges() throws Exception {
// Policy 1, match dataset type and domain
final DataHubPolicyInfo dataHubPolicyInfo1 = new DataHubPolicyInfo();
dataHubPolicyInfo1.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo1.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo1.setPrivileges(new StringArray("PRIVILEGE_1"));
dataHubPolicyInfo1.setDisplayName("My Test Display");
dataHubPolicyInfo1.setDescription("My test display!");
dataHubPolicyInfo1.setEditable(true);
final DataHubActorFilter actorFilter1 = new DataHubActorFilter();
actorFilter1.setResourceOwners(true);
actorFilter1.setAllUsers(true);
actorFilter1.setAllGroups(true);
dataHubPolicyInfo1.setActors(actorFilter1);
final DataHubResourceFilter resourceFilter1 = new DataHubResourceFilter();
resourceFilter1.setFilter(FilterUtils.newFilter(
ImmutableMap.of(ResourceFieldType.RESOURCE_TYPE, Collections.singletonList("dataset"), ResourceFieldType.DOMAIN,
Collections.singletonList(DOMAIN_URN))));
dataHubPolicyInfo1.setResources(resourceFilter1);
// Policy 2, match dataset type and resource
final DataHubPolicyInfo dataHubPolicyInfo2 = new DataHubPolicyInfo();
dataHubPolicyInfo2.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo2.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo2.setPrivileges(new StringArray("PRIVILEGE_2_1", "PRIVILEGE_2_2"));
dataHubPolicyInfo2.setDisplayName("My Test Display");
dataHubPolicyInfo2.setDescription("My test display!");
dataHubPolicyInfo2.setEditable(true);
final DataHubActorFilter actorFilter2 = new DataHubActorFilter();
actorFilter2.setResourceOwners(true);
actorFilter2.setAllUsers(true);
actorFilter2.setAllGroups(true);
dataHubPolicyInfo2.setActors(actorFilter2);
final DataHubResourceFilter resourceFilter2 = new DataHubResourceFilter();
resourceFilter2.setFilter(FilterUtils.newFilter(
ImmutableMap.of(ResourceFieldType.RESOURCE_TYPE, Collections.singletonList("dataset"), ResourceFieldType.RESOURCE_URN,
Collections.singletonList(RESOURCE_URN))));
dataHubPolicyInfo2.setResources(resourceFilter2);
// Policy 3, match dataset type and owner (legacy resource filter)
final DataHubPolicyInfo dataHubPolicyInfo3 = new DataHubPolicyInfo();
dataHubPolicyInfo3.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo3.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo3.setPrivileges(new StringArray("PRIVILEGE_3"));
dataHubPolicyInfo3.setDisplayName("My Test Display");
dataHubPolicyInfo3.setDescription("My test display!");
dataHubPolicyInfo3.setEditable(true);
final DataHubActorFilter actorFilter3 = new DataHubActorFilter();
actorFilter3.setResourceOwners(true);
actorFilter3.setAllUsers(false);
actorFilter3.setAllGroups(false);
dataHubPolicyInfo3.setActors(actorFilter3);
final DataHubResourceFilter resourceFilter3 = new DataHubResourceFilter();
resourceFilter3.setAllResources(true);
resourceFilter3.setType("dataset");
dataHubPolicyInfo3.setResources(resourceFilter3);
final List<DataHubPolicyInfo> policies =
ImmutableList.of(dataHubPolicyInfo1, dataHubPolicyInfo2, dataHubPolicyInfo3);
assertEquals(_policyEngine.getGrantedPrivileges(policies, UrnUtils.getUrn(AUTHORIZED_PRINCIPAL), Optional.empty()),
Collections.emptyList());
ResolvedResourceSpec resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN, Collections.emptySet(),
Collections.singleton(DOMAIN_URN)); // Everything matches
assertEquals(
_policyEngine.getGrantedPrivileges(policies, UrnUtils.getUrn(AUTHORIZED_PRINCIPAL), Optional.of(resourceSpec)),
ImmutableList.of("PRIVILEGE_1", "PRIVILEGE_2_1", "PRIVILEGE_2_2"));
resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN, Collections.emptySet(),
Collections.singleton("urn:li:domain:domain2")); // Domain doesn't match
assertEquals(
_policyEngine.getGrantedPrivileges(policies, UrnUtils.getUrn(AUTHORIZED_PRINCIPAL), Optional.of(resourceSpec)),
ImmutableList.of("PRIVILEGE_2_1", "PRIVILEGE_2_2"));
resourceSpec = buildResourceResolvers("dataset", "urn:li:dataset:random", Collections.emptySet(),
Collections.singleton(DOMAIN_URN)); // Resource doesn't match
assertEquals(
_policyEngine.getGrantedPrivileges(policies, UrnUtils.getUrn(AUTHORIZED_PRINCIPAL), Optional.of(resourceSpec)),
ImmutableList.of("PRIVILEGE_1"));
resourceSpec = buildResourceResolvers("dataset", RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL),
Collections.singleton(DOMAIN_URN)); // Is owner
assertEquals(
_policyEngine.getGrantedPrivileges(policies, UrnUtils.getUrn(AUTHORIZED_PRINCIPAL), Optional.of(resourceSpec)),
ImmutableList.of("PRIVILEGE_1", "PRIVILEGE_2_1", "PRIVILEGE_2_2", "PRIVILEGE_3"));
resourceSpec = buildResourceResolvers("chart", RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL),
Collections.singleton(DOMAIN_URN)); // Resource type doesn't match
assertEquals(
_policyEngine.getGrantedPrivileges(policies, UrnUtils.getUrn(AUTHORIZED_PRINCIPAL), Optional.of(resourceSpec)),
Collections.emptyList());
}
@Test
public void testGetMatchingActorsResourceMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
actorFilter.setUsers(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"),
Urn.createFromString("urn:li:corpuser:user2"))));
actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"),
Urn.createFromString("urn:li:corpGroup:group2"))));
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(false);
resourceFilter.setType("dataset");
StringArray resourceUrns = new StringArray();
resourceUrns.add(RESOURCE_URN); // Filter applies to specific resource.
resourceFilter.setResources(resourceUrns);
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec =
buildResourceResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL, AUTHORIZED_GROUP),
Collections.emptySet());
PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec));
assertTrue(actors.allUsers());
assertTrue(actors.allGroups());
assertEquals(actors.getUsers(),
ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2"),
Urn.createFromString(AUTHORIZED_PRINCIPAL) // Resource Owner
));
assertEquals(actors.getGroups(), ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"),
Urn.createFromString("urn:li:corpGroup:group2"), Urn.createFromString(AUTHORIZED_GROUP) // Resource Owner
));
// Verify aspect client called, entity client not called.
verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)),
eq(null), any());
}
@Test
public void testGetMatchingActorsNoResourceMatch() throws Exception {
final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo();
dataHubPolicyInfo.setType(METADATA_POLICY_TYPE);
dataHubPolicyInfo.setState(ACTIVE_POLICY_STATE);
dataHubPolicyInfo.setPrivileges(new StringArray("EDIT_ENTITY_TAGS"));
dataHubPolicyInfo.setDisplayName("My Test Display");
dataHubPolicyInfo.setDescription("My test display!");
dataHubPolicyInfo.setEditable(true);
final DataHubActorFilter actorFilter = new DataHubActorFilter();
actorFilter.setResourceOwners(true);
actorFilter.setAllUsers(true);
actorFilter.setAllGroups(true);
actorFilter.setUsers(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"),
Urn.createFromString("urn:li:corpuser:user2"))));
actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"),
Urn.createFromString("urn:li:corpGroup:group2"))));
dataHubPolicyInfo.setActors(actorFilter);
final DataHubResourceFilter resourceFilter = new DataHubResourceFilter();
resourceFilter.setAllResources(false);
resourceFilter.setType("dataset");
StringArray resourceUrns = new StringArray();
resourceUrns.add(RESOURCE_URN);
resourceFilter.setResources(resourceUrns);
dataHubPolicyInfo.setResources(resourceFilter);
ResolvedResourceSpec resourceSpec =
buildResourceResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy.
PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec));
assertFalse(actors.allUsers());
assertFalse(actors.allGroups());
assertEquals(actors.getUsers(), Collections.emptyList());
assertEquals(actors.getGroups(), Collections.emptyList());
// Verify no network calls
verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any());
}
private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolean addGroupOwner) throws Exception {
final Ownership ownershipAspect = new Ownership();
final OwnerArray owners = new OwnerArray();
if (addUserOwner) {
final Owner userOwner = new Owner();
userOwner.setOwner(Urn.createFromString(AUTHORIZED_PRINCIPAL));
userOwner.setType(OwnershipType.DATAOWNER);
owners.add(userOwner);
}
if (addGroupOwner) {
final Owner groupOwner = new Owner();
groupOwner.setOwner(Urn.createFromString(AUTHORIZED_GROUP));
groupOwner.setType(OwnershipType.DATAOWNER);
owners.add(groupOwner);
}
ownershipAspect.setOwners(owners);
ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo")));
return ownershipAspect;
}
private EntityResponse createAuthorizedEntityResponse() throws URISyntaxException {
final EntityResponse entityResponse = new EntityResponse();
final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap();
final CorpUserInfo userInfo = new CorpUserInfo();
userInfo.setActive(true);
userInfo.setFullName("<NAME>");
userInfo.setFirstName("Data");
userInfo.setLastName("Hub");
userInfo.setEmail("<EMAIL>");
userInfo.setTitle("Admin");
aspectMap.put(CORP_USER_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(userInfo.data())));
final GroupMembership groupsAspect = new GroupMembership();
final UrnArray groups = new UrnArray();
groups.add(Urn.createFromString("urn:li:corpGroup:authorizedGroup"));
groupsAspect.setGroups(groups);
aspectMap.put(GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(groupsAspect.data())));
entityResponse.setAspects(aspectMap);
return entityResponse;
}
private EntityResponse createUnauthorizedEntityResponse() throws URISyntaxException {
final EntityResponse entityResponse = new EntityResponse();
final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap();
final CorpUserInfo userInfo = new CorpUserInfo();
userInfo.setActive(true);
userInfo.setFullName("Unauthorized User");
userInfo.setFirstName("Unauthorized");
userInfo.setLastName("User");
userInfo.setEmail("Unauth");
userInfo.setTitle("Engineer");
aspectMap.put(CORP_USER_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(userInfo.data())));
final GroupMembership groupsAspect = new GroupMembership();
final UrnArray groups = new UrnArray();
groups.add(Urn.createFromString("urn:li:corpGroup:unauthorizedGroup"));
groupsAspect.setGroups(groups);
aspectMap.put(GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(groupsAspect.data())));
entityResponse.setAspects(aspectMap);
return entityResponse;
}
public static ResolvedResourceSpec buildResourceResolvers(String entityType, String entityUrn) {
return buildResourceResolvers(entityType, entityUrn, Collections.emptySet(), Collections.emptySet());
}
public static ResolvedResourceSpec buildResourceResolvers(String entityType, String entityUrn, Set<String> owners,
Set<String> domains) {
return new ResolvedResourceSpec(new ResourceSpec(entityType, entityUrn),
ImmutableMap.of(ResourceFieldType.RESOURCE_TYPE, FieldResolver.getResolverFromValues(Collections.singleton(entityType)),
ResourceFieldType.RESOURCE_URN, FieldResolver.getResolverFromValues(Collections.singleton(entityUrn)),
ResourceFieldType.OWNER, FieldResolver.getResolverFromValues(owners), ResourceFieldType.DOMAIN,
FieldResolver.getResolverFromValues(domains)));
}
}
|
const path = require("path")
const joop = require("../index.js")
const testClassPath = path.resolve(__dirname, "..", "async-test-class")
const testClass = joop(testClassPath)
describe("Async API", () => {
test(
"should wait for object to initialise",
async () => {
const inst = await testClass()
expect(inst).toHaveProperty("done")
expect(inst.done).toEqual("initialised")
}
)
})
|
#!/usr/bin/env bash
# Install composer dependencies with dev-dependencies
composer install |
#!/bin/bash
sleep 1
# IP setup (we need to try different names in different scenarios, but never eth0 which is the docker if)
declare -a PORTS=("l4fw-net-0" "l4fw0-0" "l4fw0-1")
for p in "${PORTS[@]}"
do
ifconfig $p down
ifconfig $p 20.0.0.2 netmask 255.255.255.0
ifconfig $p up
done
ifconfig > /ifconfig.debug
|
var moment = require('moment');
const crypto = require('crypto');
module.exports = {
addPerson: (req, res) => {
let message = '';
let first_name = req.body.first_name;
let last_name = req.body.last_name;
let username = req.body.username;
let age = req.body.age;
let street = req.body.street;
let apt = req.body.apartment;
let city = req.body.city;
let state = req.body.state;
let zip = req.body.zip;
let salt = crypto.randomBytes(16).toString('base64');
let hash = crypto.createHmac('sha512', salt).update(req.body.password).digest("base64");
let password = <PASSWORD>;
let usernameQuery = "SELECT * FROM `persons` WHERE username = '" + username + "'";
db.query(usernameQuery, (err, result) => {
if (err) {
return res.status(500).send(err);
}
if (result.length > 0) {
message = 'Username already exists';
return res.status(500).send(message);
}
else {
// send the person's details to the database
let query = "INSERT INTO `persons` (first_name, last_name, username, age, dob, password, image, street, apartment, city, state, zip, country, creation_time) VALUES ('" +
first_name + "', '" + last_name + "', '" + username + "', '" + age + "', '1990-01-01', '" + password + "', 'test.jpeg', '" +
street + "', '" +
apt + "', '" +
city + "', '" + state + "', '" +
zip + "', 'USA', '" +
moment().format("YYYY-MM-DD hh:mm:ss") + "')";
db.query(query, (err, result) => {
if (err) {
return res.status(500).send(err);
}
res.status(200).send('Person successfully added!');
});
}
});
},
editPerson: (req, res) => {
let personId = req.params.id;
let first_name = req.body.first_name;
let last_name = req.body.last_name;
let age = req.body.age;
let street = req.body.street;
let apt = req.body.apartment;
let city = req.body.city;
let state = req.body.state;
let zip = req.body.zip;
let query = "UPDATE `persons` SET `first_name` = '" + first_name + "', `last_name` = '" + last_name +
"', `age` = '" + age + "', `street` = '" + street +
"', `apartment` = '" + apt + "', `city` = '" + city + "', `state` = '" + state + "', `zip` = '" + zip +
"' WHERE `persons`.`id` = '" + personId + "'";
db.query(query, (err, result) => {
if (err) {
return res.status(500).send(err);
}
res.status(200).send('Person successfully edited!');
});
},
deletePerson: (req, res) => {
let personId = req.params.id;
let deleteUserQuery = 'DELETE FROM persons WHERE id = "' + personId + '"';
db.query(deleteUserQuery, (err, result) => {
if (err) {
return res.status(500).send(err);
}
res.status(200).send('Person successfully deleted!');
});
},
searchPerson: (req, res) => {
let first_name = req.body.first_name;
let last_name = req.body.last_name;
let lower = req.body.age_lower;
let upper = req.body.age_upper;
if (!first_name && !last_name && !lower && !upper) {
message = 'Please provide a person\'s name or age to perfom search.';
return res.status(500).send(message);
}
let where = "";
if (first_name) {
where += "first_name = '" + first_name + "'";
}
if (last_name) {
if (where != "") {
where += " AND ";
}
where += "last_name = '" + last_name + "'";
}
if (lower) {
if (where != "") {
where += " AND ";
}
where += "age >= '" + lower + "'";
}
if (upper) {
if (where != "") {
where += " AND ";
}
where += "age <= '" + upper + "'";
}
let query = "SELECT id, first_name, last_name, username, age, street, apartment, city, state, zip, country, creation_time\n\
FROM `persons` WHERE " + where; // query database to get all the players
// console.log(query);
// execute query
db.query(query, (err, result) => {
if (err) {
return res.status(500).send('Something went with the query! Please check your parameters.');
}
res.status(200).send(result);
});
},
listPersons: (req, res) => {
let query = "SELECT id, first_name, last_name, username, age, dob, street, apartment, city, state, zip, country, creation_time\n\
FROM `persons` ORDER BY id ASC"; // query database to get all the players
// execute query
db.query(query, (err, result) => {
if (err) {
return res.status(500).send('Something went with the query! Please check your parameters.');
}
res.status(200).send(result);
});
}
}; |
import torch.nn.functional as F
class MoCoV2(object):
def __init__(self, outputs_q, outputs_k, queue, t=0.07):
super().__init__()
self.outputs_q = F.normalize(outputs_q, dim=1)
self.outputs_k = F.normalize(outputs_k, dim=1)
self.queue = queue.clone().detach()
def contrastive_loss(self):
# Calculate dot product similarity between normalized query and key outputs
sim_qk = torch.matmul(self.outputs_q, self.outputs_k.t()) / self.t
# Calculate dot product similarity between normalized query outputs and the queue
sim_q_queue = torch.matmul(self.outputs_q, self.queue.t()) / self.t
# Calculate contrastive loss
batch_size = self.outputs_q.size(0)
contrastive_loss = -torch.log(torch.exp(sim_qk) / (torch.exp(sim_qk).sum(dim=1, keepdim=True) + torch.exp(sim_q_queue).sum(dim=1, keepdim=True))).diag().mean()
return contrastive_loss |
def calculate_median_and_mode(data):
median = 0
mode = 0
data_length = len(data)
data.sort()
# Calculate median
if data_length % 2 == 0:
median = (data[int(data_length/2)] + data[int(data_length/2) - 1])/2
else:
median = data[int(data_length/2)]
# Calculate mode
lst = []
max_freq = 1
for i in range(data_length):
freq = 1
for j in range(data_length):
if (data[i] == data[j] and i != j):
freq += 1
if freq > max_freq:
lst.clear()
max_freq = freq
lst.append(data[i])
elif freq == max_freq and data[i] not in lst:
lst.append(data[i])
# If no elements have maximum frequency, function return mode equal to 0
if len(lst) == 0:
mode = 0
else:
mode = lst
return (median, mode)
if __name__ == '__main__':
data = [1, 2, 3, 4, 5, 6]
median_and_mode = calculate_median_and_mode(data)
print("The median of given data set is :", median_and_mode[0])
print("The mode of given data set is :", median_and_mode[1]) |
import * as t from 'io-ts';
import APIWrapper from '@core/APIWrapper';
import { HttpMethod } from "@core/HttpMethod";
export const paymentValidator = t.type({
amountCents: t.number,
expectedDate: t.string,
orderId: t.number,
paid: t.boolean,
staggerId: t.number,
failedCron: t.boolean,
});
export const validator = t.array(paymentValidator);
const path = "/member/open-order-details-ap"
export const getWrapper = new APIWrapper<typeof validator, {}, {}>({
path: path,
type: HttpMethod.GET,
resultValidator: validator
})
|
<reponame>mauriciocc/tcc<gh_stars>0
/*
* Copyright (c) 2013 <NAME>. All rights reserved.
* Licensed under https://github.com/chriswhocodes/JFXRay/blob/master/LICENSE-BSD
* http://www.chrisnewland.com/
*/
package com.simple.app.raytracer.domain;
import java.util.concurrent.ThreadLocalRandom;
/*
* Standing on the shoulders of giants.
* I did not invent this raytracer, I merely converted it from C to Java
* with the help of this web page by <NAME>:
* http://fabiensanglard.net/rayTracing_back_of_business_card/index.php
* The original code is by <NAME>
*/
class JFXRay
{
private byte[] imageData;
private boolean[][] data;
private int rows;
private int cols;
private Vector3f floorColourOdd;
private Vector3f floorColourEven;
private Vector3f skyColour;
private float sphereReflectivity;;
private long renderStart = 0;
private long renderTime = 0;
private void init(String[] lines)
{
cols = lines[0].length();
rows = lines.length;
data = new boolean[rows][cols];
for (int r = 0; r < rows; r++)
{
for (int c = 0; c < cols; c++)
{
char ch = lines[r].charAt(c);
data[rows - 1 - r][cols - 1 - c] = ch == '*';
}
}
}
// The intersection test for line [o,v].
// Return 2 if a hit was found (and also return distance t and bouncing ray
// n).
// Return 0 if no hit was found but ray goes upward
// Return 1 if no hit was found but ray goes downward
// Returns object[] 0 = int (m), 1 = float (t), 2 = Vector3f n
Object[] test(Vector3f o, Vector3f d, Vector3f n)
{
float t = 1e9f;
int m = 0;
float p2 = -o.getZ() / d.getZ();
if (.01 < p2)
{
t = p2;
n = new Vector3f(0, 0, 1);
m = 1;
}
for (int col = 0; col < cols; col++)
{
for (int row = 0; row < rows; row++)
{
// For this row and column is there a sphere?
if (data[row][col])
{
// There is a sphere but does the ray hit it ?
Vector3f p = o.add(new Vector3f(-col, 0, -row - 4));
float b = p.dot(d);
float c = p.dot(p) - 1;
float q = b * b - c;
// Does the ray hit the sphere ?
if (q > 0)
{
float s = -b - (float) Math.sqrt(q);
if (s < t && s > .01)
{ // So far this is the minimum distance, save
// it. And // also // compute the bouncing ray
// vector into 'n'
t = s;
n = (p.add(d.scale(t))).normalise();
m = 2;
}
}
}
}
}
return new Object[] { m, t, n };
}
// sample the world and return the pixel color for
// a ray passing by point o (Origin) and d (Direction)
Vector3f sample(Vector3f origin, Vector3f direction)
{
Vector3f n = new Vector3f(0, 0, 0);
// Search for an intersection ray Vs World.
Object[] result = test(origin, direction, n);
int m = (int) result[0];
float t = (float) result[1];
n = (Vector3f) result[2];
if (m == 0)
{
// No sphere found and the ray goes upward: Generate a sky color
return skyColour.scale((float) Math.pow(1 - direction.getZ(), 4));
}
// A sphere was maybe hit.
// h = intersection coordinate
Vector3f h = origin.add(direction.scale(t));
// 'l' = direction to light (with random delta for soft-shadows).
Vector3f l = new Vector3f(9 + getRandomFloat(), 9 + getRandomFloat(), 16);
l = l.add(h.scale(-1));
l = l.normalise();
// r = The half-vector
Vector3f r = direction.add(n.scale(n.dot(direction.scale(-2f))));
// Calculated the lambertian factor
float b = l.dot(n);
// Calculate illumination factor (lambertian coefficient > 0 or in
// shadow)?
if (b < 0)
{
b = 0;
}
else
{
result = test(h, l, n);
int res = (int) result[0];
t = (float) result[1];
n = (Vector3f) result[2];
if (res > 0)
{
b = 0;
}
}
// Calculate the color 'p' with diffuse and specular component
Vector3f rdash = r.scale(b > 0 ? 1 : 0);
float p = (float) Math.pow(l.dot(rdash), 64);
if (m == 1)
{
// No sphere was hit and the ray was going downward:
h = h.invertScale(4);
// Generate a floor color
int ceil = (int) (Math.ceil(h.getX()) + Math.ceil(h.getY()));
if ((ceil & 1) == 1)
{
return floorColourOdd.scale(b / 4 + .1f);
}
else
{
return floorColourEven.scale(b / 4 + .1f);
}
}
// m == 2 A sphere was hit.
// Cast an ray bouncing from the sphere surface.
// Attenuate color since it is bouncing
return new Vector3f(p, p, p).add(sample(h, r).scale(sphereReflectivity));
}
public byte[] getImageData()
{
renderTime = System.currentTimeMillis() - renderStart;
return imageData;
}
public JFXRay()
{
}
public void render(final RenderConfig config)
{
renderStart = System.currentTimeMillis();
this.floorColourOdd = config.getOddColour();
this.floorColourEven = config.getEvenColour();
this.skyColour = config.getSkyColour();
this.sphereReflectivity = config.getSphereReflectivity();
init(config.getLines());
imageData = new byte[config.getImageWidth() * config.getImageHeight() * 3];
// Camera direction
final Vector3f g = config.getCamDirection().normalise();
// Camera up vector...Seem Z is pointing up :/ WTF !
final Vector3f a = new Vector3f(0, 0, 1).cross(g).normalise().scale(.003f);
// The right vector, obtained via traditional cross-product
final Vector3f b = g.cross(a).normalise().scale(.003f);
// WTF ? See https://news.ycombinator.com/item?id=6425965 for more.
final Vector3f c = a.add(b).scale(-256).add(g);
final int linesPerThread = config.getImageHeight() / config.getThreads();
// System.out.println("LinesPerThread: " + linesPerThread);
Thread[] workers = new Thread[config.getThreads()];
final Vector3f defaultPixelColour = new Vector3f(16, 16, 16);
for (int i = 0; i < config.getThreads(); i++)
{
final int startingLine = config.getImageHeight() - 1 - (i * linesPerThread);
final int pixelBufferOffset = i * linesPerThread;
// System.out.println("Thread " + i + " plotting " + startingLine);
Thread worker = new Thread(new Runnable()
{
@Override
public void run()
{
int pixel = config.getImageWidth() * pixelBufferOffset * 3;
// For each line
for (int y = startingLine; y > startingLine - linesPerThread; y--)
{
// For each pixel in a line
for (int x = config.getImageWidth() - 1; x >= 0; x--)
{
// Reuse the vector class to store not XYZ but an
// RGB
// pixel color
// Default pixel color is almost pitch black
Vector3f p = defaultPixelColour;
// Cast rays per pixel (For blur (stochastic
// sampling) and
// soft-shadows.
for (int r = config.getRays() - 1; r >= 0; r--)
{
// The delta to apply to the origin of the view
// (For Depth of View blur).
// A little bit of delta up/down and left/right
Vector3f t = a.scale(getRandomFloat() - 0.5f);
t = t.scale(64);
Vector3f t2 = b.scale(getRandomFloat() - 0.5f);
t2 = t2.scale(64);
t = t.add(t2);
// Set the camera focal point and
// Cast the ray
// Accumulate the color returned in the p
// variable
// Ray Direction with random deltas for
// stochastic sampling
Vector3f dirA = a.scale(getRandomFloat() + x);
Vector3f dirB = b.scale(getRandomFloat() + y);
Vector3f dirC = dirA.add(dirB).add(c);
Vector3f dir = t.scale(-1).add(dirC.scale(16)).normalise();
// Ray Origin +p for color accumulation
p = sample(config.getRayOrigin().add(t), dir).scale(config.getBrightness()).add(p);
}
imageData[pixel++] = (byte) p.getX();
imageData[pixel++] = (byte) p.getY();
imageData[pixel++] = (byte) p.getZ();
}
}
}
});
worker.start();
workers[i] = worker;
}
for (int i = 0; i < config.getThreads(); i++)
{
try
{
workers[i].join();
}
catch (InterruptedException ie)
{
ie.printStackTrace();
}
}
renderTime = System.currentTimeMillis() - renderStart;
}
private float getRandomFloat()
{
return ThreadLocalRandom.current().nextFloat();
}
public long getRenderTime()
{
return renderTime;
}
}
|
<reponame>ValentinMolina500/wsutc-trio-app
export interface Observer {
update(data: any): void;
} |
<reponame>connorbanderson/CoinREXX
import React, { Component } from 'react'
import {bindActionCreators} from 'redux'
import {connect} from 'react-redux'
import { updateCoins } from '../../../../reducers/crypto'
import { updatePortfolio } from '../../../../reducers/accountinfo'
import { updatePortfolioCoins } from '../../../../reducers/portfolios'
import 'public/PumpJack.svg'
import 'public/MeasurementPointBetter.svg'
import 'public/PJSmall.svg'
import 'public/MP-Small.svg'
import 'public/MP-Small-V3.svg'
import './Dashboard.css'
import { portfolioGenerator } from './helper'
class DashboardPortfolios extends Component {
constructor( props ){
super(props)
this.state = {
}
}
round(value, decimals) {
return Number(Math.round(value+'e'+decimals)+'e-'+decimals);
}
render () {
{console.log('this is props', this.props)}
let portfolio1 = this.round(
(
this.props.crypto['BTC'].price*17.2 +
this.props.crypto['ETH'].price*99.9 +
this.props.crypto['BCH'].price*17.2 +
this.props.crypto['XRP'].price*5750.1 +
this.props.crypto['XEM'].price*5496.8 +
this.props.crypto['ETC'].price*59.9 +
this.props.crypto['ZEC'].price*1.9 +
this.props.crypto['GNT'].price*1317.9 +
this.props.crypto['LTC'].price*58.56 +
this.props.crypto['GNO'].price*1.3 +
this.props.crypto['SC'].price*25545.73 +
this.props.crypto['MAID'].price*586.4 +
this.props.crypto['XLM'].price*9954.8 +
this.props.crypto['XMR'].price*4.9+
this.props.crypto['NEO'].price*26.2 +
this.props.crypto['REP'].price*10 +
this.props.crypto['DOGE'].price*109953.7 +
this.props.crypto['SNT'].price*3016.377 +
this.props.crypto['GAME'].price*90 +
this.props.crypto['WAVES'].price*61.5 +
this.props.crypto['BTS'].price*1288.1 +
this.props.crypto['DASH'].price*1.043 +
this.props.crypto['FCT'].price*9.7 +
this.props.crypto['GBYTE'].price*0.4 +
this.props.crypto['BAT'].price*1435.8 +
this.props.crypto['STRAT'].price*38.2 +
this.props.crypto['ICN'].price*56 +
this.props.crypto['EOS'].price*116 +
this.props.crypto['STEEM'].price*140.9 +
this.props.crypto['LSK'].price*86.7 +
this.props.crypto['NMC'].price*70 +
this.props.crypto['QTUM'].price*14.9 +
this.props.crypto['DCR'].price*3.8 +
this.props.crypto['DGB'].price*4979.1 +
this.props.crypto['POT'].price*877.4 +
this.props.crypto['AMP'].price*213.5
), 0
)
let portfolio2 = this.round(
(
this.props.crypto['BTC'].price*0.65244715 +
this.props.crypto['ETH'].price*10.9 +
this.props.crypto['BCH'].price*0.65244715 +
this.props.crypto['SNGLS'].price*535.8 +
this.props.crypto['BAT'].price*467.0 +
this.props.crypto['WAVES'].price*19.7 +
this.props.crypto['BTS'].price*412.4 +
this.props.crypto['SC'].price*6398.3 +
this.props.crypto['DGB'].price*3218.1 +
this.props.crypto['SNT'].price*945.1 +
this.props.crypto['LSK'].price*27.98081642 +
this.props.crypto['AMP'].price*233.6 +
this.props.crypto['REP'].price*3.02660141 +
this.props.crypto['FCT'].price*2.95104344 +
this.props.crypto['POT'].price*546.5 +
this.props.crypto['QTUM'].price*8.93334814 +
this.props.crypto['XLM'].price*2935.1 +
this.props.crypto['GNT'].price*216.9 +
this.props.crypto['NEO'].price*7.60791627 +
this.props.crypto['GBYTE'].price*0.11548333 +
this.props.crypto['STEEM'].price*41.63180283 +
this.props.crypto['GAME'].price*25.84439777 +
this.props.crypto['STRAT'].price*10.45813369 +
this.props.crypto['ZEC'].price*0.29516474 +
this.props.crypto['GNO'].price*0.27296667 +
this.props.crypto['ETC'].price*3.81198054 +
this.props.crypto['DOGE'].price*30549.534 +
this.props.crypto['MAID'].price*160.56087996 +
this.props.crypto['DCR'].price*2.14797111 +
this.props.crypto['LTC'].price*1.31731249 +
this.props.crypto['DASH'].price*0.281705 +
this.props.crypto['XRP'].price*316.69103918 +
this.props.crypto['XMR'].price*1.19576613
), 0
)
let portfolio3 = this.round(
(
this.props.crypto['BTC'].price*0.60540599 +
this.props.crypto['ETH'].price*8.3 +
this.props.crypto['BCH'].price*0.60540599
), 0
)
return (
<div className='noselect flexx' >
<div className='portfolioDiv'>
<h1>Main ${portfolio1}</h1>
</div>
<h1>Connor ${portfolio2}</h1>
<h1>Suji ${portfolio3}</h1>
<h1>Total ${portfolio1 + portfolio2 + portfolio3}</h1>
<div className='fakeCard'>
<div className='header'>
<img className='pumpJack' src='/PJSmall.svg'></img>
</div>
<div className='body'>
</div>
</div>
<div className='fakeCard'>
<div className='header'>
<img className='mp' src='/MP-Small-V3.svg'></img>
</div>
<div className='body'>
</div>
</div>
<div className='fakeCard'>
<div className='header'>
<img className='mp' src='/nodeSmall.svg'></img>
</div>
<div className='body'>
</div>
</div>
</div>
)
}
}
function mapStateToProps(state, ownProps) {
return {
crypto: state.crypto,
account: state.accountinfo['ACT-1'],
portfolios: state.portfolios
}
}
function matchDispatchToProps(dispatch){
return bindActionCreators({
updateCoins,
updatePortfolio,
updatePortfolioCoins
}, dispatch);
}
export default connect(mapStateToProps, matchDispatchToProps)(DashboardPortfolios)
/*
{portfolioGenerator(this.props)}
*/
|
#!/bin/bash
# Iterate through all .hxml files in the current directory
for target in *.hxml; do
# Echo a message indicating the start of building the target
echo "--------------------------"
echo "Building ${target} ..."
# Execute the Haxe compiler to build the target specified in the .hxml file
haxe -f ${target}
done |
<reponame>Professorvennie/Bronze-Age
package com.professorvennie.bronzeage.blocks;
import com.professorvennie.bronzeage.tileentitys.TileEntitySteamReceiver;
import net.minecraft.block.ITileEntityProvider;
import net.minecraft.block.material.Material;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.world.World;
/**
* Created by ProfessorVennie on 1/31/2015 at 9:04 PM.
*/
public class BlockSteamReceiver extends BlockBase implements ITileEntityProvider {
protected BlockSteamReceiver() {
super(Material.iron, "steamReceiver");
}
@Override
public void registerBlockIcons(IIconRegister iconRegister) {
}
@Override
public TileEntity createNewTileEntity(World p_149915_1_, int p_149915_2_) {
return new TileEntitySteamReceiver();
}
}
|
CREATE TABLE users (
user_id INT PRIMARY KEY AUTO_INCREMENT,
email VARCHAR(100) UNIQUE,
password VARCHAR(100) NOT NULL
);
CREATE TABLE roles (
role_id INT PRIMARY KEY AUTO_INCREMENT,
role_name VARCHAR(100) NOT NULL
);
CREATE TABLE users_roles (
user_id INT,
role_id INT,
FOREIGN KEY (user_id) REFERENCES users(user_id),
FOREIGN KEY (role_id) REFERENCES roles(role_id),
PRIMARY KEY (user_id, role_id)
); |
<filename>comp_debug/src/main/java/cmu/xprize/comp_debug/CD_CONST.java<gh_stars>10-100
package cmu.xprize.comp_debug;
public class CD_CONST {
static final int[] SKILLS_NORMAL = {R.attr.state_skillnormal};
static final int[] SKILLS_CURRENT = {R.attr.state_skillcurrent};
static final int[] SKILLS_NEXT = {R.attr.state_skillnext};
static final int[] SKILLS_HARDER = {R.attr.state_skillharder};
static final int[] SKILLS_EASIER = {R.attr.state_skilleasier};
static final int[] SKILLS_ERROR = {R.attr.state_skillerror};
static final int[] SKILLS_NULL = {R.attr.state_skillnull};
// BOJACK why is it like this??? Why so many steps?
static final int[] TUTOR_AKIRA = {R.attr.tutor_akira};
static final int[] TUTOR_BPOP_LTR = {R.attr.tutor_bpop_ltr};
static final int[] TUTOR_BPOP_NUM = {R.attr.tutor_bpop_num};
static final int[] TUTOR_COMPARE = {R.attr.tutor_compare};
static final int[] TUTOR_MISSINGNO = {R.attr.tutor_missingno};
static final int[] TUTOR_COUNTINGX_1 = {R.attr.tutor_countingx_1};
static final int[] TUTOR_COUNTINGX_10 = {R.attr.tutor_countingx_10};
static final int[] TUTOR_COUNTINGX_100 = {R.attr.tutor_countingx_100};
static final int[] TUTOR_MATH = {R.attr.tutor_math};
static final int[] TUTOR_NUMBERSCALE = {R.attr.tutor_numberscale};
static final int[] TUTOR_STORY_1 = {R.attr.tutor_story_1};
static final int[] TUTOR_STORY_2 = {R.attr.tutor_story_2};
static final int[] TUTOR_STORY_3 = {R.attr.tutor_story_3};
static final int[] TUTOR_STORY_4 = {R.attr.tutor_story_4};
static final int[] TUTOR_STORY_5 = {R.attr.tutor_story_5};
static final int[] TUTOR_STORY_NONSTORY = {R.attr.tutor_story_nonstory};
static final int[] TUTOR_SONG = {R.attr.tutor_song};
static final int[] TUTOR_NUMCOMPARE = {R.attr.tutor_numcompare};
static final int[] TUTOR_PICMATCH = {R.attr.tutor_picmatch};
static final int[] TUTOR_PLACEVALUE = {R.attr.tutor_placevalue};
static final int[] TUTOR_BIGMATH = {R.attr.tutor_bigmath};
static final int[] TUTOR_SPELLING = {R.attr.tutor_spelling};
static final int[] TUTOR_WRITE = {R.attr.tutor_write};
static final int[] TUTOR_LETTERS = {R.attr.tutor_letters};
static final int[] TUTOR_NUMBERS = {R.attr.tutor_numbers};
static final int[] TUTOR_STORIES = {R.attr.tutor_stories};
static final String STATE_NORMAL = "STATE_NORMAL";
static final String STATE_CURRENT = "STATE_CURRENT";
static final String STATE_NEXT = "STATE_NEXT";
static final String STATE_HARDER = "STATE_HARDER";
static final String STATE_EASIER = "STATE_EASIER";
static final String STATE_ERROR = "STATE_ERROR";
static final String STATE_NULL = "STATE_NULL";
public static final String SELECT_WRITING = "SELECT_WRITING";
public static final String SELECT_STORIES = "SELECT_STORIES";
public static final String SELECT_MATH = "SELECT_MATH";
public static final String SELECT_SHAPES = "SELECT_SHAPES";
}
|
UPDATE Customer
SET name = 'John Doe'
WHERE age > 18; |
python3 main.py \
-mode test \
-cuda 1 \
-bs 8 \
-dataset path \
-support 1 \
-neighbor 20 \
-load model.pkl \
#-normal \
#-random \
#-rotate 180 \
#-scale 2 \
#-shift 10.0 \ |
export interface BlogInterface {
id: number;
userId: number;
title: string;
body: string;
}
export interface StateInterface {
data: BlogInterface[];
loading: boolean;
error: string | null;
}
|
class Rectangle:
def __init__(self, bottom_left, top_right):
self.bottom_left = bottom_left
self.top_right = top_right
def calculate_area(self):
width = self.top_right[0] - self.bottom_left[0]
height = self.top_right[1] - self.bottom_left[1]
return abs(width * height)
def calculate_perimeter(self):
width = self.top_right[0] - self.bottom_left[0]
height = self.top_right[1] - self.bottom_left[1]
return 2 * (abs(width) + abs(height))
def is_point_inside(self, x, y):
return self.bottom_left[0] <= x <= self.top_right[0] and self.bottom_left[1] <= y <= self.top_right[1] |
<reponame>kreako/soklaki
import {
dateFromString,
dateJsObj,
dateDiffInDays,
today,
dateToNiceString,
} from "./date";
test("dateFromString", () => {
expect(dateFromString("2021-03-30T17:59:16.637699+00:00")).toBe("2021-03-30");
});
test("dateJsObj", () => {
const d = dateJsObj("2021-03-30T17:59:16.637699+00:00");
expect(d.getFullYear()).toBe(2021);
expect(d.getMonth()).toBe(3 - 1);
expect(d.getDate()).toBe(30);
});
test("dateJsObj error cases", () => {
// separator 1
expect(() => dateJsObj("2021/03-30T17:59:16.637699+00:00")).toThrow();
// separator 2
expect(() => dateJsObj("2021-03/30T17:59:16.637699+00:00")).toThrow();
// year not a number
expect(() => dateJsObj("abcd-03-30T17:59:16.637699+00:00")).toThrow();
// year too small
expect(() => dateJsObj("1874-03-30T17:59:16.637699+00:00")).toThrow();
// year too big
expect(() => dateJsObj("2342-03-30T17:59:16.637699+00:00")).toThrow();
// month not a number
expect(() => dateJsObj("2021-ab-30T17:59:16.637699+00:00")).toThrow();
// month too small
expect(() => dateJsObj("2021-00-30T17:59:16.637699+00:00")).toThrow();
// month too big
expect(() => dateJsObj("2021-13-30T17:59:16.637699+00:00")).toThrow();
// day not a number
expect(() => dateJsObj("2021-03-a0T17:59:16.637699+00:00")).toThrow();
// day too small
expect(() => dateJsObj("2021-03-00T17:59:16.637699+00:00")).toThrow();
// day too big
expect(() => dateJsObj("2021-03-32T17:59:16.637699+00:00")).toThrow();
// Stupid date
expect(() => dateJsObj("2021-02-31T17:59:16.637699+00:00")).toThrow();
});
test("dateDiffInDays", () => {
// 1 day
expect(dateDiffInDays(new Date(2021, 3, 14), new Date(2021, 3, 15))).toBe(1);
// 1 day even in the other direction
expect(dateDiffInDays(new Date(2021, 3, 15), new Date(2021, 3, 14))).toBe(1);
// 10 days
expect(dateDiffInDays(new Date(2021, 3, 15), new Date(2021, 3, 25))).toBe(10);
// 365 days
expect(dateDiffInDays(new Date(2021, 0, 1), new Date(2022, 0, 1))).toBe(365);
});
test("today", () => {
// This can fail if the test is run exactly at
const t1 = new Date();
// 00:00 here
// Oh funny !
const t2 = dateJsObj(today());
expect(t1.getFullYear()).toBe(t2.getFullYear());
expect(t1.getMonth()).toBe(t2.getMonth());
expect(t1.getDate()).toBe(t2.getDate());
});
test("dateToNiceString", () => {
expect(dateToNiceString("2021-03-30T17:59:16.637699+00:00")).toBe(
"le 2021-03-30 à 17:59"
);
expect(dateToNiceString(null)).toBeNull();
});
|
/* Copyright 2010 Smartmobili SARL
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CGPDFARRAY_H_
#define CGPDFARRAY_H_
typedef struct CGPDFArray *CGPDFArrayRef;
#include <CoreGraphics/CGPDFDictionary.h>
#include <CoreGraphics/CGPDFObject.h>
#include <CoreGraphics/CGPDFStream.h>
#include <CoreGraphics/CGPDFString.h>
CG_EXTERN size_t CGPDFArrayGetCount(CGPDFArrayRef array);
CG_EXTERN bool CGPDFArrayGetObject(CGPDFArrayRef array, size_t index,
CGPDFArrayRef *value);
CG_EXTERN bool CGPDFArrayGetNull(CGPDFArrayRef array, size_t index);
CG_EXTERN bool CGPDFArrayGetBoolean(CGPDFArrayRef array, size_t index,
CGPDFBoolean *value);
CG_EXTERN bool CGPDFArrayGetInteger(CGPDFArrayRef array, size_t index,
CGPDFInteger *value);
CG_EXTERN bool CGPDFArrayGetNumber(CGPDFArrayRef array, size_t index,
CGPDFReal *value);
CG_EXTERN bool CGPDFArrayGetName(CGPDFArrayRef array, size_t index,
const char **value);
CG_EXTERN bool CGPDFArrayGetString(CGPDFArrayRef array, size_t index,
CGPDFStringRef *value);
CG_EXTERN bool CGPDFArrayGetArray(CGPDFArrayRef array, size_t index,
CGPDFArrayRef *value);
CG_EXTERN bool CGPDFArrayGetDictionary(CGPDFArrayRef array, size_t index,
CGPDFDictionaryRef *value);
CG_EXTERN bool CGPDFArrayGetStream(CGPDFArrayRef array, size_t index,
CGPDFStreamRef *value);
#endif /* CGPDFARRAY_H_ */
|
class Product {
public $id;
public $app_id;
public $vendor;
public $product_type;
public $status;
public $tags;
public $handle;
public function getFormattedInfo() {
return "Product ID: {$this->id}, App ID: {$this->app_id}, Vendor: {$this->vendor}, Type: {$this->product_type}, Status: {$this->status}, Tags: {$this->tags}, Handle: {$this->handle}";
}
}
// Usage
$product = new Product();
$product->id = 12345;
$product->app_id = 'APP-001';
$product->vendor = 'Example Vendor';
$product->product_type = 'Electronics';
$product->status = 'Active';
$product->tags = ['electronics', 'gadgets'];
$product->handle = 'example-product';
echo $product->getFormattedInfo(); // Output: Product ID: 12345, App ID: APP-001, Vendor: Example Vendor, Type: Electronics, Status: Active, Tags: Array, Handle: example-product |
/**
Copyright (c) 2017 <NAME>
You may use, distribute, or modify this code under the terms of the MIT license.
*/
#ifndef POLY_SYMMETRY_UTIL_H
#define POLY_SYMMETRY_UTIL_H
#include <algorithm>
#include <vector>
using namespace std;
vector<int> intersection(vector<int> &a, vector<int> &b);
bool contains(vector<int> &items, int &value);
#endif |
import numpy as np
import cv2
import SimpleITK as sitk
import torch
def spatial_transformations_Agumentation(IMG):
'''
Create a list representing a random (uniform) sampling of the 3D similarity transformation parameter space.
As theSimpleITK rotation parametrization uses the vector portion of a versor we don't have an intuitive way of specifying rotations.
We therefor use the ZYX Euler angle parametrization and convert to versor.
:param IMG: sitk img
:return: sitk img
'''
def PADDing()
def predict_dataloader(image_path,batch_size):
IMG = sitk.ReadImage(image_path)
IMG = sitk.GetArrayFromImage(IMG)
IMG = torch.tensor(IMG)
IMG_sub_patches = IMG.unfold(0,64,62).unfold(1,64,62).unfold(2,64,62)
print(IMG_sub_patches.shape)
print(IMG_sub_patches[3,4,4,:,:,:])
print(IMG_sub_patches[4,4,4,:,:,:])
if __name__ == '__main__':
predict_dataloader('D:\\Renyi\\super_resolution\\data\\test_1.tif',0)
# img = np.arange(0,350).reshape((-1,1))
# row = np.ones_like(img).reshape(1,-1)
# fig = np.multiply(img,row)+np.multiply(img.T,row.T)
# print(fig.shape)
# row = row[np.newaxis,:].reshape((-1,1,1))
# print(row.shape)
# fig = np.multiply(row,fig)+np.multiply(row,fig).T
#
# fig = sitk.GetImageFromArray(fig.astype('int16'))
# sitk.WriteImage(fig,'D:\\Renyi\\super_resolution\\data\\test_1.tif')
|
<reponame>ch1huizong/learning<gh_stars>0
import re
# an IP is: 4 strings, each of 1-3 digits, joined by periods
ip_specs = r'\.'.join([r'\d{1,3}']*4)
re_ip = re.compile(ip_specs)
for line in contents:
match = re_ip.match(line)
if match:
# Increase by 1 if IP exists; else set hit count = 1
ip = match.group()
ipHitListing[ip] = ipHitListing.get(ip, 0) + 1
|
class HTTPServer:
def __init__(self):
self.routes = {
"/": "Welcome to the server!",
"/about": "This is a simple web server.",
"/contact": "Contact us at example@email.com"
}
def handle_request(self, method, path):
if path in self.routes:
if method == "GET":
return "200 OK", self.routes[path]
elif method == "POST":
self.routes[path] = "New content created"
return "200 OK", "Content created successfully"
elif method == "PUT":
self.routes[path] = "Content updated"
return "200 OK", "Content updated successfully"
elif method == "DELETE":
del self.routes[path]
return "200 OK", "Content deleted successfully"
else:
return "405 Method Not Allowed", "Method not allowed for this route"
else:
return "404 Not Found", "Route not found"
if __name__ == "__main__":
server = HTTPServer()
def app(environ, start_response):
method = environ.get('REQUEST_METHOD')
path = environ.get('PATH_INFO')
response_status, response_content = server.handle_request(method, path)
start_response(response_status, [('Content-Type', 'text/plain')])
return [response_content.encode()]
from wsgiref.simple_server import make_server
httpd = make_server('0.0.0.0', 8080, app)
print("Serving on port 8080...")
httpd.serve_forever() |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.settings = void 0;
var settings = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\t\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\t\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M261.078,275.656l18.578-18.594l-24.719-24.734l-18.594,18.594c-2.953-1.547-6.031-2.812-9.25-3.828v-26.297h-34.984\r\n\t\t\tv26.297c-3.219,1.016-6.297,2.281-9.266,3.828l-18.594-18.578l-24.719,24.719l18.594,18.594c-1.562,2.953-2.812,6.031-3.844,9.25\r\n\t\t\tH128v34.984h26.281c1.031,3.219,2.281,6.297,3.844,9.25l-18.594,18.594l24.734,24.734l18.578-18.594\r\n\t\t\tc2.969,1.547,6.031,2.797,9.266,3.828V384h34.984v-26.281c3.219-1.031,6.297-2.297,9.25-3.844l18.594,18.594l24.734-24.734\r\n\t\t\tl-18.594-18.578c1.547-2.969,2.812-6.031,3.828-9.266h26.297v-34.984h-26.297C263.891,281.688,262.625,278.609,261.078,275.656z\r\n\t\t\t M209.594,337.375c-19.312,0-34.969-15.656-34.969-34.969c0-19.328,15.656-34.969,34.969-34.969\r\n\t\t\tc19.328,0,34.969,15.641,34.969,34.969C244.562,321.719,228.922,337.375,209.594,337.375z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M261.078,275.656l18.578-18.594l-24.719-24.734l-18.594,18.594c-2.953-1.547-6.031-2.812-9.25-3.828v-26.297h-34.984\r\n\t\t\tv26.297c-3.219,1.016-6.297,2.281-9.266,3.828l-18.594-18.578l-24.719,24.719l18.594,18.594c-1.562,2.953-2.812,6.031-3.844,9.25\r\n\t\t\tH128v34.984h26.281c1.031,3.219,2.281,6.297,3.844,9.25l-18.594,18.594l24.734,24.734l18.578-18.594\r\n\t\t\tc2.969,1.547,6.031,2.797,9.266,3.828V384h34.984v-26.281c3.219-1.031,6.297-2.297,9.25-3.844l18.594,18.594l24.734-24.734\r\n\t\t\tl-18.594-18.578c1.547-2.969,2.812-6.031,3.828-9.266h26.297v-34.984h-26.297C263.891,281.688,262.625,278.609,261.078,275.656z\r\n\t\t\t M209.594,337.375c-19.312,0-34.969-15.656-34.969-34.969c0-19.328,15.656-34.969,34.969-34.969\r\n\t\t\tc19.328,0,34.969,15.641,34.969,34.969C244.562,321.719,228.922,337.375,209.594,337.375z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M384,197.281v-24.453h-18.391c-0.703-2.258-1.594-4.406-2.672-6.469l12.969-13.008l-17.281-17.289l-13,13\r\n\t\t\tc-2.047-1.086-4.219-1.969-6.469-2.68V128h-24.438v18.383c-2.25,0.711-4.422,1.602-6.484,2.68l-13-12.992l-17.297,17.289l13,13\r\n\t\t\tc-1.078,2.062-1.969,4.211-2.672,6.469h-18.391v24.453h18.391c0.703,2.258,1.594,4.406,2.672,6.469l-13,13l17.312,17.297\r\n\t\t\tl12.984-13c2.062,1.078,4.219,1.953,6.484,2.672v18.391h24.438v-18.375c2.25-0.719,4.422-1.609,6.469-2.688l13,13l17.312-17.297\r\n\t\t\tl-13-12.984c1.078-2.078,1.969-4.219,2.672-6.484H384z M326.938,209.5c-13.5,0-24.453-10.938-24.453-24.438\r\n\t\t\tc0-13.516,10.953-24.461,24.453-24.461c13.516,0,24.453,10.945,24.453,24.461C351.391,198.562,340.453,209.5,326.938,209.5z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M384,197.281v-24.453h-18.391c-0.703-2.258-1.594-4.406-2.672-6.469l12.969-13.008l-17.281-17.289l-13,13\r\n\t\t\tc-2.047-1.086-4.219-1.969-6.469-2.68V128h-24.438v18.383c-2.25,0.711-4.422,1.602-6.484,2.68l-13-12.992l-17.297,17.289l13,13\r\n\t\t\tc-1.078,2.062-1.969,4.211-2.672,6.469h-18.391v24.453h18.391c0.703,2.258,1.594,4.406,2.672,6.469l-13,13l17.312,17.297\r\n\t\t\tl12.984-13c2.062,1.078,4.219,1.953,6.484,2.672v18.391h24.438v-18.375c2.25-0.719,4.422-1.609,6.469-2.688l13,13l17.312-17.297\r\n\t\t\tl-13-12.984c1.078-2.078,1.969-4.219,2.672-6.484H384z M326.938,209.5c-13.5,0-24.453-10.938-24.453-24.438\r\n\t\t\tc0-13.516,10.953-24.461,24.453-24.461c13.516,0,24.453,10.945,24.453,24.461C351.391,198.562,340.453,209.5,326.938,209.5z"
},
"children": []
}]
}]
}]
}]
}]
};
exports.settings = settings; |
#!/bin/bash
#PBS -l nodes=1:ppn=8
#PBS -l pmem=800mb
#PBS -l walltime=02:00:00
#PBS -V
#This script runs Prokka on a list of Contigs
#The output is a directory with contigs
module load prokka
module load barrnap
module load rnammer
## Catching the variable from the para;elizer script ##
contigs_dir=$bin_dir
o_dir=$o_bin_dir
contigs_file="${temp_dir}divisions/${splitted_file}"
echo "$contigs_dir"
echo "$o_dir"
echo "$contigs_file"
###### required options ##############
#contigs_dir='/scratch/lfs/ebecerra/examples/contigs_examples'
#####################################
#Make the output dir
#And the list of contigs file
#o_dir="${PWD}/o_dir" #o_dir recived by paralelizer
#mkdir -p $o_dir
#contigs_file="${o_dir}/contigs_list.txt" #contigs_dir recived by paralelizer
#ls $contigs_dir > $contigs_file
############################################
old_headers="${o_dir}/old_headers/"
mkdir -p "${o_dir}/old_headers/" #dir for bakup the old names
trim_ext_file="${o_dir}/trim_ext.txt" #Trimming the extension for better printing options
sed 's/\.fasta//' ${contigs_file} > ${trim_ext_file}
echo -e \\n'All working files have been created'\\n
echo "The INPUT directory is ${contigs_dir}"
echo "The OUTPUT directory is ${o_dir}"
echo -e \\n'Starting PROKKA execution'\\n
echo -e 'On the following genomes'\\n
#cat $contigs_file
echo
####### PROKKA execution ######
i='1'
paste ${contigs_file} ${trim_ext_file} | while read -r CONTIG TCONTIG
do
grep '>' ${contigs_dir}/${CONTIG} > ${old_headers}/headers_${TCONTIG}.txt
trim_contig_header.sed -i ${contigs_dir}/${CONTIG}
time prokka --locustag "${TCONTIG}" --cpus 8 ${contigs_dir}/${CONTIG} --outdir "${o_dir}/prokka_${TCONTIG}"
echo -n '.'
echo -n "${i}"
let i="$i + 1"
done
####### PROKKA end ######
echo -e \\n\\n'Finishig Execution'\\n
|
import logging
from kairon import cli
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
cli() |
var _floor_test_impl_8hpp =
[
[ "SimpleFloorTest", "_floor_test_impl_8hpp.xhtml#a6f22e64ef1f5e5d8e5b0392fcc65f0a4", null ]
]; |
package com.pharmacySystem.DTOs;
import java.util.Set;
public class AppointmentSuccessfullyDoneDTO {
private long appointmentId;
private String report;
private Set<AppointmentTherapyDTO> therapies;
public AppointmentSuccessfullyDoneDTO() {
}
public AppointmentSuccessfullyDoneDTO(long appointmentId, String report, Set<AppointmentTherapyDTO> therapies) {
super();
this.appointmentId = appointmentId;
this.report = report;
this.therapies = therapies;
}
public long getAppointmentId() {
return appointmentId;
}
public void setAppointmentId(long appointmentId) {
this.appointmentId = appointmentId;
}
public String getReport() {
return report;
}
public void setReport(String report) {
this.report = report;
}
public Set<AppointmentTherapyDTO> getTherapies() {
return therapies;
}
public void setTherapies(Set<AppointmentTherapyDTO> therapies) {
this.therapies = therapies;
}
} |
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
echo "Check Azure CLI login..."
if ! az group list >/dev/null 2>&1; then
echo "Login Azure CLI required" >&2
exit 1
fi
resource_group=java-on-azure
location=westus
aks_name=wikiBookCluster
dns_name_suffix=wikiBookDns
companion_rg="MC_${resource_group}_${aks_name}_${location}"
echo "Checking resource group $resource_group..."
if [[ "$(az group exists --name "$resource_group")" == "false" ]]; then
echo "Create resource group $resource_group"
az group create -n "$resource_group" -l "$location"
fi
echo "Checking AKS $aks_name..."
if ! az aks show -g "$resource_group" -n "$aks_name" >/dev/null 2>&1; then
echo "Create AKS $aks_name"
az aks create -g "$resource_group" -n "$aks_name" --node-count 2
fi
kubeconfig="$(mktemp)"
echo "Fetch AKS credentials to $kubeconfig"
az aks get-credentials -g "$resource_group" -n "$aks_name" --admin --file "$kubeconfig"
SAVEIFS="$IFS"
IFS=$(echo -en "\n\b")
for config in "$DIR"/*.yml; do
echo "Apply $config"
kubectl apply -f "$config" --kubeconfig "$kubeconfig"
done
IFS="$SAVEIFS"
function assign_dns {
service="$1"
dns_name="$2"
IP=
while true; do
echo "Waiting external IP for $service..."
IP="$(kubectl get service "$service" --kubeconfig "$kubeconfig" | tail -n +2 | awk '{print $4}' | grep -v '<')"
if [[ "$?" == 0 && -n "$IP" ]]; then
echo "Service $service public IP: $IP"
break
fi
sleep 10
done
public_ip="$(az network public-ip list -g "$companion_rg" --query "[?ipAddress==\`$IP\`] | [0].id" -o tsv)"
if [[ -z "$public_ip" ]]; then
echo "Cannot find public IP resource ID for '$service' in companion resource group '$companion_rg'" >&2
exit 1
fi
echo "Assign DNS name '$dns_name' for '$service'"
az network public-ip update --dns-name "$dns_name" --ids "$public_ip"
[[ $? != 0 ]] && exit 1
}
assign_dns todoapp-service "aks-todoapp$dns_name_suffix"
assign_dns todoapp-test-blue "aks-todoapp-blue$dns_name_suffix"
assign_dns todoapp-test-green "aks-todoapp-green$dns_name_suffix"
rm -f "$kubeconfig"
|
/*!
\brief Implementation for a number of functions defined in the HelperVk header file.
\file PVRUtils/Vulkan/HelperVk.cpp
\author PowerVR by Imagination, Developer Technology Team
\copyright Copyright (c) Imagination Technologies Limited.
*/
//!\cond NO_DOXYGEN
#include "HelperVk.h"
#include "PVRCore/texture/PVRTDecompress.h"
#include "PVRCore/textureio/TGAWriter.h"
#include "PVRVk/ImageVk.h"
#include "PVRVk/CommandPoolVk.h"
#include "PVRVk/QueueVk.h"
#include "PVRVk/HeadersVk.h"
#include "PVRVk/SwapchainVk.h"
#include "PVRVk/MemoryBarrierVk.h"
#include "PVRUtils/Vulkan/MemoryAllocator.h"
#include "PVRVk/MemoryBarrierVk.h"
#include "PVRVk/DisplayVk.h"
#include "PVRVk/DisplayModeVk.h"
#include "pvr_openlib.h"
namespace pvr {
namespace utils {
pvrvk::Buffer createBuffer(pvrvk::Device device, const pvrvk::BufferCreateInfo& createInfo, pvrvk::MemoryPropertyFlags requiredMemoryFlags,
pvrvk::MemoryPropertyFlags optimalMemoryFlags, vma::Allocator* bufferAllocator, vma::AllocationCreateFlags vmaAllocationCreateFlags)
{
// create the PVRVk Buffer
pvrvk::Buffer buffer = device->createBuffer(createInfo);
// if the required memory flags is pvrvk::MemoryPropertyFlags::e_NONE then no backing will be provided for the buffer
if (requiredMemoryFlags != pvrvk::MemoryPropertyFlags::e_NONE)
{
// use the allocator
if (bufferAllocator && *bufferAllocator)
{
vma::AllocationCreateInfo allocationInfo;
allocationInfo.usage = vma::MemoryUsage::e_UNKNOWN;
allocationInfo.requiredFlags = requiredMemoryFlags;
allocationInfo.preferredFlags = optimalMemoryFlags | requiredMemoryFlags;
allocationInfo.flags = vmaAllocationCreateFlags;
allocationInfo.memoryTypeBits = buffer->getMemoryRequirement().getMemoryTypeBits();
vma::Allocation allocation;
allocation = (*bufferAllocator)->allocateMemoryForBuffer(buffer, allocationInfo);
buffer->bindMemory(pvrvk::DeviceMemory(allocation), allocation->getOffset());
}
else
{
// get the buffer memory requirements, memory type index and memory property flags required for backing the PVRVk buffer
const pvrvk::MemoryRequirements& memoryRequirements = buffer->getMemoryRequirement();
uint32_t memoryTypeIndex;
pvrvk::MemoryPropertyFlags memoryPropertyFlags;
getMemoryTypeIndex(device->getPhysicalDevice(), memoryRequirements.getMemoryTypeBits(), requiredMemoryFlags, optimalMemoryFlags, memoryTypeIndex, memoryPropertyFlags);
// allocate the buffer memory using the retrieved memory type index and memory property flags
pvrvk::DeviceMemory deviceMemory = device->allocateMemory(pvrvk::MemoryAllocationInfo(buffer->getMemoryRequirement().getSize(), memoryTypeIndex));
// attach the memory to the buffer
buffer->bindMemory(deviceMemory, 0);
}
}
return buffer;
}
pvrvk::Image createImage(pvrvk::Device device, const pvrvk::ImageCreateInfo& createInfo, pvrvk::MemoryPropertyFlags requiredMemoryFlags,
pvrvk::MemoryPropertyFlags optimalMemoryFlags, vma::Allocator* imageAllocator, vma::AllocationCreateFlags vmaAllocationCreateFlags)
{
// create the PVRVk Image
pvrvk::Image image = device->createImage(createInfo);
// if the required memory flags is pvrvk::MemoryPropertyFlags::e_NONE then no backing will be provided for the image
if (requiredMemoryFlags != pvrvk::MemoryPropertyFlags::e_NONE)
{
// if no flags are provided for the optimal flags then just reuse the required set of memory property flags to optimise the getMemoryTypeIndex
if (optimalMemoryFlags == pvrvk::MemoryPropertyFlags::e_NONE) { optimalMemoryFlags = requiredMemoryFlags; }
// Create a memory block if it is non sparse and a valid memory propery flag.
if ((createInfo.getFlags() &
(pvrvk::ImageCreateFlags::e_SPARSE_ALIASED_BIT | pvrvk::ImageCreateFlags::e_SPARSE_BINDING_BIT | pvrvk::ImageCreateFlags::e_SPARSE_RESIDENCY_BIT)) == 0 &&
(requiredMemoryFlags != pvrvk::MemoryPropertyFlags(0)))
// If it's not sparse, create memory backing
{
if (imageAllocator && *imageAllocator)
{
vma::AllocationCreateInfo allocInfo = {};
allocInfo.memoryTypeBits = image->getMemoryRequirement().getMemoryTypeBits();
allocInfo.requiredFlags = requiredMemoryFlags;
allocInfo.preferredFlags = requiredMemoryFlags | optimalMemoryFlags;
allocInfo.flags = vmaAllocationCreateFlags;
vma::Allocation allocation = (*imageAllocator)->allocateMemoryForImage(image, allocInfo);
image->bindMemoryNonSparse(allocation, allocation->getOffset());
}
else
{
// get the image memory requirements, memory type index and memory property flags required for backing the PVRVk image
const pvrvk::MemoryRequirements& memoryRequirements = image->getMemoryRequirement();
uint32_t memoryTypeIndex;
pvrvk::MemoryPropertyFlags memoryPropertyFlags;
getMemoryTypeIndex(device->getPhysicalDevice(), memoryRequirements.getMemoryTypeBits(), requiredMemoryFlags, optimalMemoryFlags, memoryTypeIndex, memoryPropertyFlags);
// allocate the image memory using the retrieved memory type index and memory property flags
pvrvk::DeviceMemory memBlock = device->allocateMemory(pvrvk::MemoryAllocationInfo(memoryRequirements.getSize(), memoryTypeIndex));
// attach the memory to the image
image->bindMemoryNonSparse(memBlock);
}
}
}
return image;
}
pvrvk::ImageAspectFlags inferAspectFromFormat(pvrvk::Format format)
{
pvrvk::ImageAspectFlags imageAspect = pvrvk::ImageAspectFlags::e_COLOR_BIT;
if (format >= pvrvk::Format::e_D16_UNORM && format <= pvrvk::Format::e_D32_SFLOAT_S8_UINT)
{
const pvrvk::ImageAspectFlags aspects[] = {
pvrvk::ImageAspectFlags::e_DEPTH_BIT | pvrvk::ImageAspectFlags::e_STENCIL_BIT, // pvrvk::Format::e_D32_SFLOAT_S8_UINT
pvrvk::ImageAspectFlags::e_DEPTH_BIT | pvrvk::ImageAspectFlags::e_STENCIL_BIT, // pvrvk::Format::e_D24_UNORM_S8_UINT
pvrvk::ImageAspectFlags::e_DEPTH_BIT | pvrvk::ImageAspectFlags::e_STENCIL_BIT, // pvrvk::Format::e_D16_UNORM_S8_UINT
pvrvk::ImageAspectFlags::e_STENCIL_BIT, // pvrvk::Format::e_S8_UINT
pvrvk::ImageAspectFlags::e_DEPTH_BIT, // pvrvk::Format::e_D32_SFLOAT
pvrvk::ImageAspectFlags::e_DEPTH_BIT, // pvrvk::Format::e_X8_D24_UNORM_PACK32
pvrvk::ImageAspectFlags::e_DEPTH_BIT, // pvrvk::Format::e_D16_UNORM
};
// (Depthstenil format end) - format
imageAspect = aspects[static_cast<uint32_t>(pvrvk::Format::e_D32_SFLOAT_S8_UINT) - static_cast<uint32_t>(format)];
}
return imageAspect;
}
void getColorBits(pvrvk::Format format, uint32_t& redBits, uint32_t& greenBits, uint32_t& blueBits, uint32_t& alphaBits)
{
switch (format)
{
case pvrvk::Format::e_R8G8B8A8_SRGB:
case pvrvk::Format::e_R8G8B8A8_UNORM:
case pvrvk::Format::e_R8G8B8A8_SNORM:
case pvrvk::Format::e_B8G8R8A8_UNORM:
case pvrvk::Format::e_B8G8R8A8_SRGB:
redBits = 8;
greenBits = 8;
blueBits = 8;
alphaBits = 8;
break;
case pvrvk::Format::e_B8G8R8_SRGB:
case pvrvk::Format::e_B8G8R8_UNORM:
case pvrvk::Format::e_B8G8R8_SNORM:
case pvrvk::Format::e_R8G8B8_SRGB:
case pvrvk::Format::e_R8G8B8_UNORM:
case pvrvk::Format::e_R8G8B8_SNORM:
redBits = 8;
greenBits = 8;
blueBits = 8;
alphaBits = 0;
break;
case pvrvk::Format::e_R5G6B5_UNORM_PACK16:
redBits = 5;
greenBits = 6;
blueBits = 5;
alphaBits = 0;
break;
default: assertion(0, "UnSupported pvrvk::Format");
}
}
void getDepthStencilBits(pvrvk::Format format, uint32_t& depthBits, uint32_t& stencilBits)
{
switch (format)
{
case pvrvk::Format::e_D16_UNORM:
depthBits = 16;
stencilBits = 0;
break;
case pvrvk::Format::e_D16_UNORM_S8_UINT:
depthBits = 16;
stencilBits = 8;
break;
case pvrvk::Format::e_D24_UNORM_S8_UINT:
depthBits = 24;
stencilBits = 8;
break;
case pvrvk::Format::e_D32_SFLOAT:
depthBits = 32;
stencilBits = 0;
break;
case pvrvk::Format::e_D32_SFLOAT_S8_UINT:
depthBits = 32;
stencilBits = 8;
break;
case pvrvk::Format::e_X8_D24_UNORM_PACK32:
depthBits = 24;
stencilBits = 0;
break;
case pvrvk::Format::e_S8_UINT:
depthBits = 0;
stencilBits = 8;
break;
default: assertion(0, "UnSupported pvrvk::Format");
}
}
pvrvk::ImageView uploadImageAndViewSubmit(pvrvk::Device& device, const Texture& texture, bool allowDecompress, pvrvk::CommandPool& cmdPool, pvrvk::Queue& queue,
pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, vma::Allocator* stagingBufferAllocator, vma::Allocator* imageAllocator,
vma::AllocationCreateFlags imageAllocationCreateFlags)
{
pvrvk::CommandBuffer cmdBuffer = cmdPool->allocateCommandBuffer();
cmdBuffer->begin();
pvr::utils::beginCommandBufferDebugLabel(cmdBuffer, pvrvk::DebugUtilsLabel("PVRUtilsVk::uploadImageAndSubmit"));
pvrvk::ImageView result =
uploadImageAndView(device, texture, allowDecompress, cmdBuffer, usageFlags, finalLayout, stagingBufferAllocator, imageAllocator, imageAllocationCreateFlags);
pvr::utils::endCommandBufferDebugLabel(cmdBuffer);
cmdBuffer->end();
pvrvk::SubmitInfo submitInfo;
submitInfo.commandBuffers = &cmdBuffer;
submitInfo.numCommandBuffers = 1;
pvrvk::Fence fence = device->createFence();
queue->submit(&submitInfo, 1, fence);
fence->wait();
return result;
}
namespace {
void decompressPvrtc(const Texture& texture, Texture& cDecompressedTexture)
{
// Set up the new texture and header.
TextureHeader cDecompressedHeader(texture);
// robin: not sure what should happen here. The PVRTGENPIXELID4 macro is used in the old SDK.
cDecompressedHeader.setPixelFormat(GeneratePixelType4<'r', 'g', 'b', 'a', 8, 8, 8, 8>::ID);
cDecompressedHeader.setChannelType(VariableType::UnsignedByteNorm);
cDecompressedTexture = Texture(cDecompressedHeader);
// Do decompression, one surface at a time.
for (uint32_t uiMipMapLevel = 0; uiMipMapLevel < texture.getNumMipMapLevels(); ++uiMipMapLevel)
{
for (uint32_t uiArray = 0; uiArray < texture.getNumArrayMembers(); ++uiArray)
{
for (uint32_t uiFace = 0; uiFace < texture.getNumFaces(); ++uiFace)
{
PVRTDecompressPVRTC(texture.getDataPointer(uiMipMapLevel, uiArray, uiFace), (texture.getBitsPerPixel() == 2 ? 1 : 0), texture.getWidth(uiMipMapLevel),
texture.getHeight(uiMipMapLevel), cDecompressedTexture.getDataPointer(uiMipMapLevel, uiArray, uiFace));
}
}
}
}
inline pvrvk::Format getDepthStencilFormat(const DisplayAttributes& displayAttribs)
{
uint32_t depthBpp = displayAttribs.depthBPP;
uint32_t stencilBpp = displayAttribs.stencilBPP;
pvrvk::Format dsFormat = pvrvk::Format::e_UNDEFINED;
if (stencilBpp)
{
switch (depthBpp)
{
case 0: dsFormat = pvrvk::Format::e_S8_UINT; break;
case 16: dsFormat = pvrvk::Format::e_D16_UNORM_S8_UINT; break;
case 24: dsFormat = pvrvk::Format::e_D24_UNORM_S8_UINT; break;
case 32: dsFormat = pvrvk::Format::e_D32_SFLOAT_S8_UINT; break;
default: assertion(false, "Unsupported Depth Stencil pvrvk::Format");
}
}
else
{
switch (depthBpp)
{
case 16: dsFormat = pvrvk::Format::e_D16_UNORM; break;
case 24: dsFormat = pvrvk::Format::e_X8_D24_UNORM_PACK32; break;
case 32: dsFormat = pvrvk::Format::e_D32_SFLOAT; break;
default: assertion(false, "Unsupported Depth Stencil pvrvk::Format");
}
}
return dsFormat;
}
// Check a list of formats against the display attributes. Will return the first item that matches. If no items match, will return false and outFormat will be unmodified.
// If both matchColorspace and matchBpp are false, will return the first item of the list. Of course if the list is empty will always return false.
bool checkFormatListAgainstUserPreferences(
const std::vector<pvrvk::SurfaceFormatKHR>& list, const pvr::DisplayAttributes& displayAttributes, bool matchColorspace, bool matchBpp, pvrvk::SurfaceFormatKHR& outFormat)
{
for (auto&& sfmt : list)
{
pvrvk::Format format = sfmt.getFormat();
if (matchColorspace)
{
if (displayAttributes.frameBufferSrgb != isSrgb(format)) { continue; }
}
if (matchBpp)
{
uint32_t currentRedBpp, currentGreenBpp, currentBlueBpp, currentAlphaBpp = 0;
getColorBits(format, currentRedBpp, currentGreenBpp, currentBlueBpp, currentAlphaBpp);
if (currentRedBpp != displayAttributes.redBits || displayAttributes.greenBits != currentGreenBpp || displayAttributes.blueBits != currentBlueBpp ||
displayAttributes.alphaBits != currentAlphaBpp)
{ continue; }
}
outFormat = sfmt;
return true; // This loop will exit as soon as any item passes all of the enabled tests (matching colorspace and or matching bpp).
}
return false;
}
pvrvk::SurfaceFormatKHR findSwapchainFormat(
const std::vector<pvrvk::SurfaceFormatKHR>& supportedFormats, pvr::DisplayAttributes& displayAttributes, const std::vector<pvrvk::Format>& preferredColorFormats)
{
Log(LogLevel::Information, "Supported Swapchain surface device formats:");
for (auto&& format : supportedFormats)
{ Log(LogLevel::Information, "\tFormat: %-30s Colorspace: %s", to_string(format.getFormat()).c_str(), to_string(format.getColorSpace()).c_str()); }
pvrvk::SurfaceFormatKHR swapchainFormat;
std::vector<pvrvk::Format> preferredLinearFormats;
std::vector<pvrvk::Format> preferredSrgbFormats;
if (preferredColorFormats.size())
{
for (uint32_t i = 0; i < static_cast<uint32_t>(preferredColorFormats.size()); ++i)
{
if (pvrvk::isSrgb(preferredColorFormats[i])) { preferredSrgbFormats.emplace_back(preferredColorFormats[i]); }
else
{
preferredLinearFormats.emplace_back(preferredColorFormats[i]);
}
}
}
else
{
pvrvk::Format frameworkPreferredLinearFormats[] = { pvrvk::Format::e_R8G8B8A8_UNORM, pvrvk::Format::e_B8G8R8A8_UNORM, pvrvk::Format::e_R5G6B5_UNORM_PACK16,
pvrvk::Format::e_UNDEFINED };
pvrvk::Format frameworkPreferredSrgbFmts[] = { pvrvk::Format::e_R8G8B8A8_SRGB, pvrvk::Format::e_B8G8R8A8_SRGB, pvrvk::Format::e_A8B8G8R8_SRGB_PACK32, pvrvk::Format::e_UNDEFINED };
preferredLinearFormats.insert(
preferredLinearFormats.begin(), &frameworkPreferredLinearFormats[0], &frameworkPreferredLinearFormats[ARRAY_SIZE(frameworkPreferredLinearFormats)]);
preferredSrgbFormats.insert(preferredSrgbFormats.begin(), &frameworkPreferredSrgbFmts[0], &frameworkPreferredSrgbFmts[ARRAY_SIZE(frameworkPreferredSrgbFmts)]);
}
std::vector<pvrvk::SurfaceFormatKHR> supportedPreferredLinearFmts;
std::vector<pvrvk::SurfaceFormatKHR> supportedPreferredSrgbFmts;
// Our "preferred" formats are typical formats that are widely supported and optimal. Keep two lists: One for Linear formats...
for (auto&& pfmt : preferredLinearFormats)
{
for (auto&& sfmt : supportedFormats)
{
if (sfmt.getFormat() == pfmt) { supportedPreferredLinearFmts.emplace_back(sfmt); }
}
}
// ... and one for SRGB formats.
for (auto&& pfmt : preferredSrgbFormats)
{
for (auto&& sfmt : supportedFormats)
{
if (sfmt.getFormat() == pfmt) { supportedPreferredSrgbFmts.emplace_back(sfmt); }
}
}
// Order of checks for Device support:
// 1. Preferred format that matches the user's preferred Colorspace(CS) and Bits per Pixel(BPP)
// 2. Any format that matches the user's preferred CS and BPP
// 3. Preferred format that matches the user's preferred CS
// 4. Any format that matches the user's preferred CS
// 5. Preferred format that matches the user's BPP
// 6. Any format that matches the user's preferred BPP
// 7. Any Preferred format
// 8. Any format
// 9. Fail (no supported device formats)
auto& preferredFormats = displayAttributes.frameBufferSrgb ? supportedPreferredSrgbFmts : supportedPreferredLinearFmts;
// 1. Preferred format that matches the user's preferred Colorspace(CS) and Bits per Pixel(BPP)
bool found = checkFormatListAgainstUserPreferences(preferredFormats, displayAttributes, true, true, swapchainFormat);
if (!found)
{
// 2. Any format that matches the user's preferred CS and BPP
Log(LogLevel::Information, "Requested swapchain format did not match any of the default preferred formats(RGBA8/BGRA8). This is not an error.");
found = checkFormatListAgainstUserPreferences(supportedFormats, displayAttributes, true, true, swapchainFormat);
}
if (!found)
{
Log(LogLevel::Warning,
"Requested swapchain format could not be found with the requested parameters: [R:%d G:%d B:%d A:%d %s colorspace]. Will attempt to find "
"another supported frambebuffer format.",
displayAttributes.redBits, displayAttributes.greenBits, displayAttributes.blueBits, displayAttributes.alphaBits, displayAttributes.frameBufferSrgb ? "SRGB" : "Linear");
if (displayAttributes.forceColorBPP)
{
Log(LogLevel::Information,
"Color Bits per pixel has been forced in user preferences. Will only attempt to find color formats exactly matching the provided color bits configuration.");
}
else
{
// 3. Preferred format that matches the user's preferred CS
found = checkFormatListAgainstUserPreferences(preferredFormats, displayAttributes, true, false, swapchainFormat);
if (!found)
{
// 4. Any format that matches the user's preferred CS
found = checkFormatListAgainstUserPreferences(supportedFormats, displayAttributes, true, false, swapchainFormat);
}
}
}
// This case will only be hit in the unusual case where the user's platform does not support any format with the requested colorspace type.
// The only feasible scenario is the user requesting an SRGB framebuffer and the platform does not support any srgb framebuffer.
if (!found)
{
Log(LogLevel::Warning, "Could not find any %s framebuffer format. Will attempt to provide a %s framebuffer matching the requested color bits.",
displayAttributes.frameBufferSrgb ? "SRGB" : "Linear", displayAttributes.frameBufferSrgb ? "Linear" : "SRGB");
// 5. Preferred format that matches the user's BPP
found = checkFormatListAgainstUserPreferences(supportedPreferredLinearFmts, displayAttributes, false, true, swapchainFormat);
if (!found)
{
// Still 5. Preferred format that matches the user's BPP
found = checkFormatListAgainstUserPreferences(supportedPreferredSrgbFmts, displayAttributes, false, true, swapchainFormat);
}
if (!found)
{
// 6. Any format that matches the user's preferred BPP
found = checkFormatListAgainstUserPreferences(supportedFormats, displayAttributes, false, true, swapchainFormat);
}
}
// This case will, finally, be hit if both the requested colorspace could not be matched, AND their requested BPP could not be matched. At this point, we
// will ignore all user's preferences and just try to give him ANY framebuffer.
if (!found && !displayAttributes.forceColorBPP)
{
Log(LogLevel::Warning, "Could not find any formats matching either the requested colorspace, or the requested bits per pixel. Will attemt to provide ANY supported framebuffer.");
// 7. Any Preferred format
found = checkFormatListAgainstUserPreferences(supportedPreferredSrgbFmts, displayAttributes, false, false, swapchainFormat);
found = checkFormatListAgainstUserPreferences(supportedPreferredLinearFmts, displayAttributes, false, false, swapchainFormat);
// 8. Any format
found = checkFormatListAgainstUserPreferences(supportedFormats, displayAttributes, false, false, swapchainFormat);
}
if (!found)
{
// 9. Fail (no supported device formats)
if (displayAttributes.forceColorBPP)
{
throw InvalidOperationError("Could not find any supported framebuffer with the requested bit depth of R:" + std::to_string(displayAttributes.redBits) +
" G:" + std::to_string(displayAttributes.greenBits) + " B:" + std::to_string(displayAttributes.blueBits) + " A:" + std::to_string(displayAttributes.alphaBits));
}
else
{
throw InvalidOperationError("Could not find any supported framebuffers. Check that Vulkan implementation and drivers are correctly installed.");
}
}
Log(LogLevel::Information, "Successfully accepted format: %s Colorspace: %s", pvrvk::to_string(swapchainFormat.getFormat()).c_str(),
pvrvk::to_string(swapchainFormat.getColorSpace()).c_str());
return swapchainFormat;
}
pvrvk::Swapchain createSwapchainHelper(pvrvk::Device& device, const pvrvk::Surface& surface, pvr::DisplayAttributes& displayAttributes,
const pvrvk::ImageUsageFlags& swapchainImageUsageFlags, const std::vector<pvrvk::Format>& preferredColorFormats = std::vector<pvrvk::Format>())
{
Log(LogLevel::Information, "Creating Vulkan Swapchain using pvr::DisplayAttributes");
pvrvk::SurfaceCapabilitiesKHR surfaceCapabilities = device->getPhysicalDevice()->getSurfaceCapabilities(surface);
Log(LogLevel::Information, "Queried Surface Capabilities:");
Log(LogLevel::Information, "\tMinimum Image count: %u", surfaceCapabilities.getMinImageCount());
Log(LogLevel::Information, "\tMaximum Image count: %u", surfaceCapabilities.getMaxImageCount());
Log(LogLevel::Information, "\tMaximum Image Array Layers: %u", surfaceCapabilities.getMaxImageArrayLayers());
Log(LogLevel::Information, "\tImage size (now): %ux%u", surfaceCapabilities.getCurrentExtent().getWidth(), surfaceCapabilities.getCurrentExtent().getHeight());
Log(LogLevel::Information, "\tMinimum Image extent: %dx%d", surfaceCapabilities.getMinImageExtent().getWidth(), surfaceCapabilities.getMinImageExtent().getHeight());
Log(LogLevel::Information, "\tMaximum Image extent: %dx%d", surfaceCapabilities.getMaxImageExtent().getWidth(), surfaceCapabilities.getMaxImageExtent().getHeight());
Log(LogLevel::Information, "\tSupported Usage Flags: %s", pvrvk::to_string(surfaceCapabilities.getSupportedUsageFlags()).c_str());
Log(LogLevel::Information, "\tCurrent transform: %s", pvrvk::to_string(surfaceCapabilities.getCurrentTransform()).c_str());
Log(LogLevel::Information, "\tSupported transforms: %s", pvrvk::to_string(surfaceCapabilities.getSupportedTransforms()).c_str());
Log(LogLevel::Information, "\tComposite Alpha Flags: %s", pvrvk::to_string(surfaceCapabilities.getSupportedCompositeAlpha()).c_str());
uint32_t usedWidth = surfaceCapabilities.getCurrentExtent().getWidth();
uint32_t usedHeight = surfaceCapabilities.getCurrentExtent().getHeight();
#if !defined(ANDROID)
usedWidth = std::max<uint32_t>(surfaceCapabilities.getMinImageExtent().getWidth(), std::min<uint32_t>(displayAttributes.width, surfaceCapabilities.getMaxImageExtent().getWidth()));
usedHeight =
std::max<uint32_t>(surfaceCapabilities.getMinImageExtent().getHeight(), std::min<uint32_t>(displayAttributes.height, surfaceCapabilities.getMaxImageExtent().getHeight()));
#endif
// Log modifications made to the surface properties set via DisplayAttributes
Log(LogLevel::Information, "Modified Surface Properties after inspecting DisplayAttributes:");
displayAttributes.width = usedWidth;
displayAttributes.height = usedHeight;
Log(LogLevel::Information, "\tImage size to be used: %dx%d", displayAttributes.width, displayAttributes.height);
std::vector<pvrvk::SurfaceFormatKHR> surfaceFormats = device->getPhysicalDevice()->getSurfaceFormats(surface);
pvrvk::SurfaceFormatKHR imageFormat = findSwapchainFormat(device->getPhysicalDevice()->getSurfaceFormats(surface), displayAttributes, preferredColorFormats);
// update the display attributes
displayAttributes.frameBufferSrgb = pvrvk::isSrgb(imageFormat.getFormat());
std::vector<pvrvk::PresentModeKHR> surfacePresentationModes = device->getPhysicalDevice()->getSurfacePresentModes(surface);
// With VK_PRESENT_MODE_FIFO_KHR the presentation engine will wait for the next vblank (vertical blanking period) to update the current image. When using FIFO tearing
// cannot occur. VK_PRESENT_MODE_FIFO_KHR is required to be supported.
pvrvk::PresentModeKHR swapchainPresentMode = pvrvk::PresentModeKHR::e_FIFO_KHR;
pvrvk::PresentModeKHR desiredSwapMode = pvrvk::PresentModeKHR::e_FIFO_KHR;
// We make use of PVRShell for handling command line arguments for configuring vsync modes using the -vsync command line argument.
switch (displayAttributes.vsyncMode)
{
case VsyncMode::Off:
Log(LogLevel::Information, "Requested presentation mode: Immediate (VsyncMode::Off)");
desiredSwapMode = pvrvk::PresentModeKHR::e_IMMEDIATE_KHR;
break;
case VsyncMode::Mailbox:
Log(LogLevel::Information, "Requested presentation mode: Mailbox (VsyncMode::Mailbox)");
desiredSwapMode = pvrvk::PresentModeKHR::e_MAILBOX_KHR;
break;
case VsyncMode::Relaxed:
Log(LogLevel::Information, "Requested presentation mode: Relaxed (VsyncMode::Relaxed)");
desiredSwapMode = pvrvk::PresentModeKHR::e_FIFO_RELAXED_KHR;
break;
// Default vsync mode
case pvr::VsyncMode::On: Log(LogLevel::Information, "Requested presentation mode: Fifo (VsyncMode::On)"); break;
case pvr::VsyncMode::Half: Log(LogLevel::Information, "Unsupported presentation mode requested: Half. Defaulting to PresentModeKHR::e_FIFO_KHR");
}
std::string supported = "Supported presentation modes: ";
for (size_t i = 0; i < surfacePresentationModes.size(); i++) { supported += (to_string(surfacePresentationModes[i]) + " "); }
Log(LogLevel::Information, supported.c_str());
for (size_t i = 0; i < surfacePresentationModes.size(); i++)
{
pvrvk::PresentModeKHR currentPresentMode = surfacePresentationModes[i];
// Primary matches : Check for a precise match between the desired presentation mode and the presentation modes supported.
if (currentPresentMode == desiredSwapMode)
{
swapchainPresentMode = desiredSwapMode;
break;
}
// Secondary matches : Immediate and Mailbox are better fits for each other than FIFO, so set them as secondaries
// If the user asked for Mailbox, and we found Immediate, set it (in case Mailbox is not found) and keep looking
if ((desiredSwapMode == pvrvk::PresentModeKHR::e_MAILBOX_KHR) && (currentPresentMode == pvrvk::PresentModeKHR::e_IMMEDIATE_KHR))
{ swapchainPresentMode = pvrvk::PresentModeKHR::e_IMMEDIATE_KHR; }
// ... And vice versa: If the user asked for Immediate, and we found Mailbox, set it (in case Immediate is not found) and keep looking
if ((desiredSwapMode == pvrvk::PresentModeKHR::e_IMMEDIATE_KHR) && (currentPresentMode == pvrvk::PresentModeKHR::e_MAILBOX_KHR))
{ swapchainPresentMode = pvrvk::PresentModeKHR::e_MAILBOX_KHR; }
}
switch (swapchainPresentMode)
{
case pvrvk::PresentModeKHR::e_IMMEDIATE_KHR: Log(LogLevel::Information, "Presentation mode: Immediate (Vsync OFF)"); break;
case pvrvk::PresentModeKHR::e_MAILBOX_KHR: Log(LogLevel::Information, "Presentation mode: Mailbox (Triple-buffering)"); break;
case pvrvk::PresentModeKHR::e_FIFO_KHR: Log(LogLevel::Information, "Presentation mode: FIFO (Vsync ON)"); break;
case pvrvk::PresentModeKHR::e_FIFO_RELAXED_KHR: Log(LogLevel::Information, "Presentation mode: Relaxed FIFO (Relaxed Vsync)"); break;
default: assertion(false, "Unrecognised presentation mode"); break;
}
// Set the swapchain length if it has not already been set.
if (!displayAttributes.swapLength) { displayAttributes.swapLength = 3; }
// Check for a supported composite alpha value in a predefined order
pvrvk::CompositeAlphaFlagsKHR supportedCompositeAlphaFlags = pvrvk::CompositeAlphaFlagsKHR::e_NONE;
if ((surfaceCapabilities.getSupportedCompositeAlpha() & pvrvk::CompositeAlphaFlagsKHR::e_OPAQUE_BIT_KHR) != 0)
{ supportedCompositeAlphaFlags = pvrvk::CompositeAlphaFlagsKHR::e_OPAQUE_BIT_KHR; }
else if ((surfaceCapabilities.getSupportedCompositeAlpha() & pvrvk::CompositeAlphaFlagsKHR::e_INHERIT_BIT_KHR) != 0)
{
supportedCompositeAlphaFlags = pvrvk::CompositeAlphaFlagsKHR::e_INHERIT_BIT_KHR;
}
pvrvk::SwapchainCreateInfo createInfo;
createInfo.clipped = true;
createInfo.compositeAlpha = supportedCompositeAlphaFlags;
createInfo.surface = surface;
displayAttributes.swapLength = std::max<uint32_t>(static_cast<uint32_t>(displayAttributes.swapLength), surfaceCapabilities.getMinImageCount());
if (surfaceCapabilities.getMaxImageCount()) { displayAttributes.swapLength = std::min<uint32_t>(displayAttributes.swapLength, surfaceCapabilities.getMaxImageCount()); }
displayAttributes.swapLength = std::min<uint32_t>(displayAttributes.swapLength, pvrvk::FrameworkCaps::MaxSwapChains);
createInfo.minImageCount = displayAttributes.swapLength;
createInfo.imageFormat = imageFormat.getFormat();
createInfo.imageArrayLayers = 1;
createInfo.imageColorSpace = imageFormat.getColorSpace();
createInfo.imageExtent.setWidth(displayAttributes.width);
createInfo.imageExtent.setHeight(displayAttributes.height);
createInfo.imageUsage = swapchainImageUsageFlags;
createInfo.preTransform = pvrvk::SurfaceTransformFlagsKHR::e_IDENTITY_BIT_KHR;
if ((surfaceCapabilities.getSupportedTransforms() & pvrvk::SurfaceTransformFlagsKHR::e_IDENTITY_BIT_KHR) == 0)
{ throw InvalidOperationError("Surface does not support VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR transformation"); }
createInfo.imageSharingMode = pvrvk::SharingMode::e_EXCLUSIVE;
createInfo.presentMode = swapchainPresentMode;
createInfo.numQueueFamilyIndex = 1;
uint32_t queueFamily = 0;
createInfo.queueFamilyIndices = &queueFamily;
pvrvk::Swapchain swapchain;
swapchain = device->createSwapchain(createInfo, surface);
displayAttributes.swapLength = swapchain->getSwapchainLength();
Log(LogLevel::Information, "Swapchain length: %i", displayAttributes.swapLength);
return swapchain;
}
inline static bool areQueueFamiliesSameOrInvalid(uint32_t lhs, uint32_t rhs)
{
debug_assertion((lhs != static_cast<uint32_t>(-1) && rhs != static_cast<uint32_t>(-1)) || (lhs == rhs),
"ImageUtilsVK(areQueueFamiliesSameOrInvalid): Only one queue family was valid. "
"Either both must be valid, or both must be ignored (-1)"); // Don't pass one non-null only...
return lhs == rhs || lhs == uint32_t(-1) || rhs == uint32_t(-1);
}
inline static bool isMultiQueue(uint32_t queueFamilySrc, uint32_t queueFamilyDst) { return !areQueueFamiliesSameOrInvalid(queueFamilySrc, queueFamilyDst); }
inline pvrvk::AccessFlags getAccesFlagsFromLayout(pvrvk::ImageLayout layout)
{
switch (layout)
{
case pvrvk::ImageLayout::e_GENERAL:
return pvrvk::AccessFlags::e_SHADER_READ_BIT | pvrvk::AccessFlags::e_SHADER_WRITE_BIT | pvrvk::AccessFlags::e_COLOR_ATTACHMENT_READ_BIT |
pvrvk::AccessFlags::e_COLOR_ATTACHMENT_WRITE_BIT;
case pvrvk::ImageLayout::e_COLOR_ATTACHMENT_OPTIMAL: return pvrvk::AccessFlags::e_COLOR_ATTACHMENT_READ_BIT | pvrvk::AccessFlags::e_COLOR_ATTACHMENT_WRITE_BIT;
case pvrvk::ImageLayout::e_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
return pvrvk::AccessFlags::e_DEPTH_STENCIL_ATTACHMENT_READ_BIT | pvrvk::AccessFlags::e_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
case pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL: return pvrvk::AccessFlags::e_TRANSFER_WRITE_BIT;
case pvrvk::ImageLayout::e_TRANSFER_SRC_OPTIMAL: return pvrvk::AccessFlags::e_TRANSFER_READ_BIT;
case pvrvk::ImageLayout::e_SHADER_READ_ONLY_OPTIMAL: return pvrvk::AccessFlags::e_SHADER_READ_BIT;
case pvrvk::ImageLayout::e_PRESENT_SRC_KHR: return pvrvk::AccessFlags::e_MEMORY_READ_BIT;
case pvrvk::ImageLayout::e_PREINITIALIZED: return pvrvk::AccessFlags::e_HOST_WRITE_BIT;
default: return (pvrvk::AccessFlags)0;
}
}
} // namespace
namespace impl {
inline bool isSupportedFormat(const pvrvk::PhysicalDevice& pdev, pvrvk::Format fmt)
{
pvrvk::FormatProperties props = pdev->getFormatProperties(fmt);
return (props.getOptimalTilingFeatures() & pvrvk::FormatFeatureFlags::e_SAMPLED_IMAGE_BIT) != 0;
}
const Texture* decompressIfRequired(
const Texture& texture, Texture& decompressedTexture, const pvrvk::PhysicalDevice& pdev, bool allowDecompress, pvrvk::Format& outFormat, bool& isDecompressed)
{
const char* cszUnsupportedFormat = "Texture format is not supported in this implementation.\n";
const char* cszUnsupportedFormatDecompressionAvailable = "Texture format is not supported in this implementation."
" Allowing software decompression (allowDecompress=true) will enable you to use this format.\n";
outFormat = convertToPVRVkPixelFormat(texture.getPixelFormat(), texture.getColorSpace(), texture.getChannelType(), isDecompressed);
if (isSupportedFormat(pdev, outFormat))
{
isDecompressed = false;
return &texture;
}
else
{
if (texture.getPixelFormat().getPixelTypeId() >= uint64_t(CompressedPixelFormat::PVRTCI_2bpp_RGB) &&
texture.getPixelFormat().getPixelTypeId() <= uint64_t(CompressedPixelFormat::PVRTCI_4bpp_RGBA))
{
if (allowDecompress)
{
Log(LogLevel::Information,
"PVRTC texture format support not detected. Decompressing PVRTC to"
" corresponding format (RGBA32 or RGB24)");
decompressPvrtc(texture, decompressedTexture);
isDecompressed = true;
outFormat = convertToPVRVkPixelFormat(decompressedTexture.getPixelFormat(), decompressedTexture.getColorSpace(), decompressedTexture.getChannelType(), isDecompressed);
return &decompressedTexture;
}
else
{
throw TextureDecompressionError(cszUnsupportedFormatDecompressionAvailable, "PVRTC");
}
}
throw TextureDecompressionError(cszUnsupportedFormat, to_string(texture.getPixelFormat()));
}
}
} // namespace impl
pvrvk::Image uploadImageHelper(pvrvk::Device& device, const Texture& texture, bool allowDecompress, pvrvk::CommandBufferBase commandBuffer, pvrvk::ImageUsageFlags usageFlags,
pvrvk::ImageLayout finalLayout, vma::Allocator* bufferAllocator = nullptr, vma::Allocator* imageAllocator = nullptr,
vma::AllocationCreateFlags imageAllocationCreateFlags = vma::AllocationCreateFlags::e_NONE)
{
// Check that the texture is valid.
if (!texture.getDataSize()) { throw pvrvk::ErrorValidationFailedEXT("TextureUtils.h:textureUpload:: Invalid texture supplied, please verify inputs."); }
pvr::utils::beginCommandBufferDebugLabel(commandBuffer, pvrvk::DebugUtilsLabel("PVRUtilsVk::uploadImage"));
bool isDecompressed;
pvrvk::Format format = pvrvk::Format::e_UNDEFINED;
// Texture to use if we decompress in software.
Texture decompressedTexture;
// Texture pointer which points at the texture we should use for the function.
// Allows switching to, for example, a decompressed version of the texture.
const Texture* textureToUse = impl::decompressIfRequired(texture, decompressedTexture, device->getPhysicalDevice(), allowDecompress, format, isDecompressed);
if (format == pvrvk::Format::e_UNDEFINED) { pvrvk::ErrorUnknown("TextureUtils.h:textureUpload:: Texture's pixel type is not supported by this API."); }
uint32_t texWidth = static_cast<uint32_t>(textureToUse->getWidth());
uint32_t texHeight = static_cast<uint32_t>(textureToUse->getHeight());
uint32_t texDepth = static_cast<uint32_t>(textureToUse->getDepth());
uint32_t dataWidth = static_cast<uint32_t>(textureToUse->getWidth());
uint32_t dataHeight = static_cast<uint32_t>(textureToUse->getHeight());
uint16_t texMipLevels = static_cast<uint16_t>(textureToUse->getNumMipMapLevels());
uint16_t texArraySlices = static_cast<uint16_t>(textureToUse->getNumArrayMembers());
uint16_t texFaces = static_cast<uint16_t>(textureToUse->getNumFaces());
pvrvk::Image image;
usageFlags |= pvrvk::ImageUsageFlags::e_TRANSFER_DST_BIT;
if (texDepth > 1)
{
image = createImage(device,
pvrvk::ImageCreateInfo(pvrvk::ImageType::e_3D, format, pvrvk::Extent3D(texWidth, texHeight, texDepth), usageFlags, static_cast<uint8_t>(texMipLevels), texArraySlices,
pvrvk::SampleCountFlags::e_1_BIT),
pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, imageAllocator, imageAllocationCreateFlags);
}
else if (texHeight > 1)
{
image = createImage(device,
pvrvk::ImageCreateInfo(pvrvk::ImageType::e_2D, format, pvrvk::Extent3D(texWidth, texHeight, 1u), usageFlags, static_cast<uint8_t>(texMipLevels),
texArraySlices * (texture.getNumFaces() > 1 ? 6 : 1), pvrvk::SampleCountFlags::e_1_BIT,
pvrvk::ImageCreateFlags::e_CUBE_COMPATIBLE_BIT * (texture.getNumFaces() > 1) |
pvrvk::ImageCreateFlags::e_2D_ARRAY_COMPATIBLE_BIT_KHR * static_cast<uint32_t>(texArraySlices > 1)),
pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, imageAllocator, imageAllocationCreateFlags);
}
else
{
image = createImage(device,
pvrvk::ImageCreateInfo(pvrvk::ImageType::e_1D, format, pvrvk::Extent3D(texWidth, 1u, 1u), usageFlags, static_cast<uint8_t>(texMipLevels), texArraySlices),
pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, imageAllocator, imageAllocationCreateFlags);
}
// POPULATE, TRANSITION ETC
{
// Create a bunch of buffers that will be used as copy destinations - each will be one mip level, one array slice / one face
// Faces are considered array elements, so each Framework array slice in a cube array will be 6 vulkan array slices.
// Edit the info to be the small, linear images that we are using.
std::vector<ImageUpdateInfo> imageUpdates(texMipLevels * texArraySlices * texFaces);
uint32_t imageUpdateIndex = 0;
for (uint32_t mipLevel = 0; mipLevel < texMipLevels; ++mipLevel)
{
uint32_t minWidth, minHeight, minDepth;
textureToUse->getMinDimensionsForFormat(minWidth, minHeight, minDepth);
dataWidth = static_cast<uint32_t>(std::max(textureToUse->getWidth(mipLevel), minWidth));
dataHeight = static_cast<uint32_t>(std::max(textureToUse->getHeight(mipLevel), minHeight));
texWidth = textureToUse->getWidth(mipLevel);
texHeight = textureToUse->getHeight(mipLevel);
texDepth = textureToUse->getDepth(mipLevel);
for (uint32_t arraySlice = 0; arraySlice < texArraySlices; ++arraySlice)
{
for (uint32_t face = 0; face < texFaces; ++face)
{
ImageUpdateInfo& update = imageUpdates[imageUpdateIndex];
update.imageWidth = texWidth;
update.imageHeight = texHeight;
update.dataWidth = dataWidth;
update.dataHeight = dataHeight;
update.depth = texDepth;
update.arrayIndex = arraySlice;
update.cubeFace = face;
update.mipLevel = mipLevel;
update.data = textureToUse->getDataPointer(mipLevel, arraySlice, face);
update.dataSize = textureToUse->getDataSize(mipLevel, false, false);
++imageUpdateIndex;
} // next face
} // next arrayslice
} // next miplevel
updateImage(device, commandBuffer, imageUpdates.data(), static_cast<uint32_t>(imageUpdates.size()), format, finalLayout, texFaces > 1, image, bufferAllocator);
}
pvr::utils::endCommandBufferDebugLabel(commandBuffer);
return image;
}
pvrvk::ImageView uploadImageAndViewHelper(pvrvk::Device& device, const Texture& texture, bool allowDecompress, pvrvk::CommandBufferBase commandBuffer,
pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, vma::Allocator* bufferAllocator = nullptr, vma::Allocator* imageAllocator = nullptr,
vma::AllocationCreateFlags imageAllocationCreateFlags = vma::AllocationCreateFlags::e_NONE)
{
#ifdef VK_USE_PLATFORM_MACOS_MVK
// Initialize the objects needed to configure MoltenVK.
VkInstance instance = device->getPhysicalDevice()->getInstance()->getVkHandle();
MVKConfiguration mvkConfig;
size_t sizeOfMVK = sizeof(MVKConfiguration);
bool isFullImageViewSwizzle = false, isSwizzled = false;
#endif
pvrvk::ComponentMapping components = {
pvrvk::ComponentSwizzle::e_IDENTITY,
pvrvk::ComponentSwizzle::e_IDENTITY,
pvrvk::ComponentSwizzle::e_IDENTITY,
pvrvk::ComponentSwizzle::e_IDENTITY,
};
if (texture.getPixelFormat().getChannelContent(0) == 'l')
{
if (texture.getPixelFormat().getChannelContent(1) == 'a')
{
components.setR(pvrvk::ComponentSwizzle::e_R);
components.setG(pvrvk::ComponentSwizzle::e_R);
components.setB(pvrvk::ComponentSwizzle::e_R);
components.setA(pvrvk::ComponentSwizzle::e_G);
}
else
{
components.setR(pvrvk::ComponentSwizzle::e_R);
components.setG(pvrvk::ComponentSwizzle::e_R);
components.setB(pvrvk::ComponentSwizzle::e_R);
components.setA(pvrvk::ComponentSwizzle::e_ONE);
}
#ifdef VK_USE_PLATFORM_MACOS_MVK
// Get the MoltenVKConfiguration pointer from the instance and set fullImageViewSwizzle to true.
pvrvk::getVkBindings().vkGetMoltenVKConfigurationMVK(instance, &mvkConfig, &sizeOfMVK);
isFullImageViewSwizzle = mvkConfig.fullImageViewSwizzle;
// Check if the swizzle was set to false. if it is not, don't do anything.
if (!isFullImageViewSwizzle)
{
mvkConfig.fullImageViewSwizzle = true;
pvrvk::getVkBindings().vkSetMoltenVKConfigurationMVK(instance, &mvkConfig, &sizeOfMVK);
}
isSwizzled = true;
#endif
}
else if (texture.getPixelFormat().getChannelContent(0) == 'a')
{
components.setR(pvrvk::ComponentSwizzle::e_ZERO);
components.setG(pvrvk::ComponentSwizzle::e_ZERO);
components.setB(pvrvk::ComponentSwizzle::e_ZERO);
components.setA(pvrvk::ComponentSwizzle::e_R);
}
return device->createImageView(pvrvk::ImageViewCreateInfo(
uploadImageHelper(device, texture, allowDecompress, commandBuffer, usageFlags, finalLayout, bufferAllocator, imageAllocator, imageAllocationCreateFlags), components));
#ifdef VK_USE_PLATFORM_MACOS_MVK
// Set fullImageViewSwizzle back to false if it was originally false.
if (isSwizzled && !isFullImageViewSwizzle)
{
mvkConfig.fullImageViewSwizzle = isFullImageViewSwizzle;
pvrvk::getVkBindings().vkSetMoltenVKConfigurationMVK(instance, &mvkConfig, &sizeOfMVK);
}
#endif
}
inline pvrvk::ImageView loadAndUploadImageAndViewHelper(pvrvk::Device& device, const char* fileName, bool allowDecompress, pvrvk::CommandBufferBase commandBuffer,
IAssetProvider& assetProvider, pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, Texture* outAssetTexture = nullptr, vma::Allocator* imageAllocator = nullptr,
vma::Allocator* bufferAllocator = nullptr, vma::AllocationCreateFlags imageAllocationCreateFlags = vma::AllocationCreateFlags::e_NONE)
{
Texture outTexture;
Texture* pOutTexture = &outTexture;
if (outAssetTexture) { pOutTexture = outAssetTexture; }
auto assetStream = assetProvider.getAssetStream(fileName);
*pOutTexture = pvr::textureLoad(*assetStream, pvr::getTextureFormatFromFilename(fileName));
pvrvk::ImageView imageView =
uploadImageAndViewHelper(device, *pOutTexture, allowDecompress, commandBuffer, usageFlags, finalLayout, bufferAllocator, imageAllocator, imageAllocationCreateFlags);
imageView->setObjectName(fileName);
return imageView;
}
inline pvrvk::Image loadAndUploadImageHelper(pvrvk::Device& device, const char* fileName, bool allowDecompress, pvrvk::CommandBufferBase commandBuffer, IAssetProvider& assetProvider,
pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, Texture* outAssetTexture = nullptr, vma::Allocator* stagingBufferAllocator = nullptr,
vma::Allocator* imageAllocator = nullptr, vma::AllocationCreateFlags imageAllocationCreateFlags = vma::AllocationCreateFlags::e_NONE)
{
Texture outTexture;
Texture* pOutTexture = &outTexture;
if (outAssetTexture) { pOutTexture = outAssetTexture; }
auto assetStream = assetProvider.getAssetStream(fileName);
*pOutTexture = pvr::textureLoad(*assetStream, pvr::getTextureFormatFromFilename(fileName));
pvrvk::Image image =
uploadImageHelper(device, *pOutTexture, allowDecompress, commandBuffer, usageFlags, finalLayout, stagingBufferAllocator, imageAllocator, imageAllocationCreateFlags);
image->setObjectName(fileName);
return image;
}
pvrvk::ImageView loadAndUploadImageAndView(pvrvk::Device& device, const char* fileName, bool allowDecompress, pvrvk::CommandBuffer& commandBuffer, IAssetProvider& assetProvider,
pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, Texture* outAssetTexture, vma::Allocator* stagingBufferAllocator, vma::Allocator* imageAllocator,
vma::AllocationCreateFlags imageAllocationCreateFlags)
{
return loadAndUploadImageAndViewHelper(device, fileName, allowDecompress, pvrvk::CommandBufferBase(commandBuffer), assetProvider, usageFlags, finalLayout, outAssetTexture,
imageAllocator, stagingBufferAllocator, imageAllocationCreateFlags);
}
pvrvk::ImageView loadAndUploadImageAndView(pvrvk::Device& device, const char* fileName, bool allowDecompress, pvrvk::SecondaryCommandBuffer& commandBuffer,
IAssetProvider& assetProvider, pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, Texture* outAssetTexture, vma::Allocator* stagingBufferAllocator,
vma::Allocator* imageAllocator, vma::AllocationCreateFlags imageAllocationCreateFlags)
{
return loadAndUploadImageAndViewHelper(device, fileName, allowDecompress, pvrvk::CommandBufferBase(commandBuffer), assetProvider, usageFlags, finalLayout, outAssetTexture,
imageAllocator, stagingBufferAllocator, imageAllocationCreateFlags);
}
pvrvk::Image loadAndUploadImage(pvrvk::Device& device, const char* fileName, bool allowDecompress, pvrvk::CommandBuffer& commandBuffer, IAssetProvider& assetProvider,
pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, Texture* outAssetTexture, vma::Allocator* stagingBufferAllocator, vma::Allocator* imageAllocator,
vma::AllocationCreateFlags imageAllocationCreateFlags)
{
return loadAndUploadImageHelper(device, fileName, allowDecompress, pvrvk::CommandBufferBase(commandBuffer), assetProvider, usageFlags, finalLayout, outAssetTexture,
stagingBufferAllocator, imageAllocator, imageAllocationCreateFlags);
}
pvrvk::Image loadAndUploadImage(pvrvk::Device& device, const std::string& fileName, bool allowDecompress, pvrvk::CommandBuffer& commandBuffer, IAssetProvider& assetProvider,
pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, Texture* outAssetTexture, vma::Allocator* stagingBufferAllocator, vma::Allocator* imageAllocator,
vma::AllocationCreateFlags imageAllocationCreateFlags)
{
return loadAndUploadImageHelper(device, fileName.c_str(), allowDecompress, pvrvk::CommandBufferBase(commandBuffer), assetProvider, usageFlags, finalLayout, outAssetTexture,
stagingBufferAllocator, imageAllocator, imageAllocationCreateFlags);
}
pvrvk::Image loadAndUploadImage(pvrvk::Device& device, const char* fileName, bool allowDecompress, pvrvk::SecondaryCommandBuffer& commandBuffer, IAssetProvider& assetProvider,
pvrvk::ImageUsageFlags usageFlags, pvrvk::ImageLayout finalLayout, Texture* outAssetTexture, vma::Allocator* stagingBufferAllocator, vma::Allocator* imageAllocator,
vma::AllocationCreateFlags imageAllocationCreateFlags)
{
return loadAndUploadImageHelper(device, fileName, allowDecompress, pvrvk::CommandBufferBase(commandBuffer), assetProvider, usageFlags, finalLayout, outAssetTexture,
stagingBufferAllocator, imageAllocator, imageAllocationCreateFlags);
}
pvrvk::ImageView uploadImageAndView(pvrvk::Device& device, const Texture& texture, bool allowDecompress, pvrvk::SecondaryCommandBuffer& commandBuffer, pvrvk::ImageUsageFlags usageFlags,
pvrvk::ImageLayout finalLayout, vma::Allocator* stagingBufferAllocator, vma::Allocator* imageAllocator, vma::AllocationCreateFlags imageAllocationCreateFlags)
{
return uploadImageAndViewHelper(
device, texture, allowDecompress, pvrvk::CommandBufferBase(commandBuffer), usageFlags, finalLayout, stagingBufferAllocator, imageAllocator, imageAllocationCreateFlags);
}
pvrvk::ImageView uploadImageAndView(pvrvk::Device& device, const Texture& texture, bool allowDecompress, pvrvk::CommandBuffer& commandBuffer, pvrvk::ImageUsageFlags usageFlags,
pvrvk::ImageLayout finalLayout, vma::Allocator* stagingBufferAllocator, vma::Allocator* imageAllocator, vma::AllocationCreateFlags imageAllocationCreateFlags)
{
return uploadImageAndViewHelper(
device, texture, allowDecompress, pvrvk::CommandBufferBase(commandBuffer), usageFlags, finalLayout, stagingBufferAllocator, imageAllocator, imageAllocationCreateFlags);
}
void generateTextureAtlas(pvrvk::Device& device, const pvrvk::Image* inputImages, pvrvk::Rect2Df* outUVs, uint32_t numImages, pvrvk::ImageLayout inputImageLayout,
pvrvk::ImageView* outImageView, TextureHeader* outDescriptor, pvrvk::CommandBufferBase cmdBuffer, pvrvk::ImageLayout finalLayout, vma::Allocator* imageAllocator,
vma::AllocationCreateFlags imageAllocationCreateFlags)
{
TextureHeader header;
struct SortedImage
{
uint32_t id;
pvrvk::Image image;
uint16_t width;
uint16_t height;
uint16_t srcX;
uint16_t srcY;
bool hasAlpha;
};
std::vector<SortedImage> sortedImage(numImages);
struct SortCompare
{
bool operator()(const SortedImage& a, const SortedImage& b)
{
uint32_t aSize = a.width * a.height;
uint32_t bSize = b.width * b.height;
return (aSize > bSize);
}
};
struct Area
{
int32_t x;
int32_t y;
int32_t w;
int32_t h;
int32_t size;
bool isFilled;
Area* right;
Area* left;
private:
void setSize(int32_t width, int32_t height)
{
w = width;
h = height;
size = width * height;
}
public:
Area(int32_t width, int32_t height) : x(0), y(0), isFilled(false), right(NULL), left(NULL) { setSize(width, height); }
Area() : x(0), y(0), isFilled(false), right(NULL), left(NULL) { setSize(0, 0); }
Area* insert(int32_t width, int32_t height)
{
// If this area has branches below it (i.e. is not a leaf) then traverse those.
// Check the left branch first.
if (left)
{
Area* tempPtr = NULL;
tempPtr = left->insert(width, height);
if (tempPtr != NULL) { return tempPtr; }
}
// Now check right
if (right) { return right->insert(width, height); }
// Already filled!
if (isFilled) { return NULL; }
// Too small
if (size < width * height || w < width || h < height) { return NULL; }
// Just right!
if (size == width * height && w == width && h == height)
{
isFilled = true;
return this;
}
// Too big. Split up.
if (size > width * height && w >= width && h >= height)
{
// Initializes the children, and sets the left child's coordinates as these don't change.
left = new Area;
right = new Area;
left->x = x;
left->y = y;
// --- Splits the current area depending on the size and position of the placed texture.
// Splits vertically if larger free distance across the texture.
if ((w - width) > (h - height))
{
left->w = width;
left->h = h;
right->x = x + width;
right->y = y;
right->w = w - width;
right->h = h;
}
// Splits horizontally if larger or equal free distance downwards.
else
{
left->w = w;
left->h = height;
right->x = x;
right->y = y + height;
right->w = w;
right->h = h - height;
}
// Initializes the child members' size attributes.
left->size = left->h * left->w;
right->size = right->h * right->w;
// Inserts the texture into the left child member.
return left->insert(width, height);
}
// Catch all error return.
return NULL;
}
bool deleteArea()
{
if (left != NULL)
{
if (left->left != NULL)
{
if (!left->deleteArea()) { return false; }
if (!right->deleteArea()) { return false; }
}
}
if (right != NULL)
{
if (right->left != NULL)
{
if (!left->deleteArea()) { return false; }
if (!right->deleteArea()) { return false; }
}
}
delete right;
right = NULL;
delete left;
left = NULL;
return true;
}
};
// load the textures
for (uint32_t i = 0; i < numImages; ++i)
{
sortedImage[i].image = inputImages[i];
sortedImage[i].id = i;
sortedImage[i].width = static_cast<uint16_t>(inputImages[i]->getWidth());
sortedImage[i].height = static_cast<uint16_t>(inputImages[i]->getHeight());
}
//// sort the sprites
std::sort(sortedImage.begin(), sortedImage.end(), SortCompare());
// find the best width and height
int32_t width = 0, height = 0, area = 0;
uint32_t preferredDim[] = { 8, 16, 32, 64, 128, 256, 512, 1024 };
const uint32_t atlasPixelBorder = 1;
const uint32_t totalBorder = atlasPixelBorder * 2;
uint32_t sortedImagesIterator = 0;
// calculate the total area
for (; sortedImagesIterator < sortedImage.size(); ++sortedImagesIterator)
{ area += (sortedImage[sortedImagesIterator].width + totalBorder) * (sortedImage[sortedImagesIterator].height + totalBorder); }
sortedImagesIterator = 0;
while ((static_cast<int32_t>(preferredDim[sortedImagesIterator]) * static_cast<int32_t>(preferredDim[sortedImagesIterator])) < area &&
sortedImagesIterator < sizeof(preferredDim) / sizeof(preferredDim[0]))
{ ++sortedImagesIterator; }
if (sortedImagesIterator >= sizeof(preferredDim) / sizeof(preferredDim[0])) { throw pvrvk::ErrorValidationFailedEXT("Cannot find a best size for the texture atlas"); }
pvr::utils::beginCommandBufferDebugLabel(cmdBuffer, pvrvk::DebugUtilsLabel("PVRUtilsVk::generateTextureAtlas"));
width = height = preferredDim[sortedImagesIterator];
float oneOverWidth = 1.f / width;
float oneOverHeight = 1.f / height;
Area* head = new Area(width, height);
Area* pRtrn = nullptr;
pvrvk::Offset3D dstOffsets[2];
// create the out texture store
pvrvk::Format outFmt = pvrvk::Format::e_R8G8B8A8_UNORM;
pvrvk::Image outTexStore = createImage(device,
pvrvk::ImageCreateInfo(pvrvk::ImageType::e_2D, outFmt, pvrvk::Extent3D(width, height, 1u), pvrvk::ImageUsageFlags::e_SAMPLED_BIT | pvrvk::ImageUsageFlags::e_TRANSFER_DST_BIT),
pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, imageAllocator, imageAllocationCreateFlags);
utils::setImageLayout(outTexStore, pvrvk::ImageLayout::e_UNDEFINED, pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL, cmdBuffer);
pvrvk::ImageView view = device->createImageView(pvrvk::ImageViewCreateInfo(outTexStore));
cmdBuffer->clearColorImage(view, pvrvk::ClearColorValue(0.0f, 0.f, 0.f, 0.f), pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL);
for (uint32_t i = 0; i < numImages; ++i)
{
const SortedImage& image = sortedImage[i];
pRtrn = head->insert(static_cast<int32_t>(sortedImage[i].width) + totalBorder, static_cast<int32_t>(sortedImage[i].height) + totalBorder);
if (!pRtrn)
{
head->deleteArea();
delete head;
throw pvrvk::ErrorUnknown("Cannot find a best size for the texture atlas");
}
dstOffsets[0].setX(static_cast<uint16_t>(pRtrn->x + atlasPixelBorder));
dstOffsets[0].setY(static_cast<uint16_t>(pRtrn->y + atlasPixelBorder));
dstOffsets[0].setZ(0);
dstOffsets[1].setX(static_cast<uint16_t>(dstOffsets[0].getX() + sortedImage[i].width));
dstOffsets[1].setY(static_cast<uint16_t>(dstOffsets[0].getY() + sortedImage[i].height));
dstOffsets[1].setZ(1);
pvrvk::Offset2Df offset(dstOffsets[0].getX() * oneOverWidth, dstOffsets[0].getY() * oneOverHeight);
pvrvk::Extent2Df extent(sortedImage[i].width * oneOverWidth, sortedImage[i].height * oneOverHeight);
outUVs[image.id].setOffset(offset);
outUVs[image.id].setExtent(extent);
pvrvk::Offset3D srcOffsets[2] = { pvrvk::Offset3D(0, 0, 0), pvrvk::Offset3D(image.width, image.height, 1) };
pvrvk::ImageBlit blit(pvrvk::ImageSubresourceLayers(), srcOffsets, pvrvk::ImageSubresourceLayers(), dstOffsets);
cmdBuffer->blitImage(sortedImage[i].image, outTexStore, &blit, 1, pvrvk::Filter::e_NEAREST, inputImageLayout, pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL);
}
if (outDescriptor)
{
outDescriptor->setWidth(width);
outDescriptor->setHeight(height);
outDescriptor->setChannelType(VariableType::UnsignedByteNorm);
outDescriptor->setColorSpace(ColorSpace::lRGB);
outDescriptor->setDepth(1);
outDescriptor->setPixelFormat(PixelFormat::RGBA_8888());
}
*outImageView = device->createImageView(pvrvk::ImageViewCreateInfo(outTexStore));
const uint32_t queueFamilyId = cmdBuffer->getCommandPool()->getQueueFamilyIndex();
pvrvk::MemoryBarrierSet barrier;
barrier.addBarrier(pvrvk::ImageMemoryBarrier(pvrvk::AccessFlags::e_TRANSFER_WRITE_BIT, pvrvk::AccessFlags::e_SHADER_READ_BIT, outTexStore,
pvrvk::ImageSubresourceRange(pvrvk::ImageAspectFlags::e_COLOR_BIT), pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL, finalLayout, queueFamilyId, queueFamilyId));
cmdBuffer->pipelineBarrier(pvrvk::PipelineStageFlags::e_TRANSFER_BIT, pvrvk::PipelineStageFlags::e_FRAGMENT_SHADER_BIT | pvrvk::PipelineStageFlags::e_COMPUTE_SHADER_BIT, barrier);
head->deleteArea();
delete head;
pvr::utils::endCommandBufferDebugLabel(cmdBuffer);
}
pvrvk::Device createDeviceAndQueues(pvrvk::PhysicalDevice physicalDevice, const QueuePopulateInfo* queueCreateInfos, uint32_t numQueueCreateInfos, QueueAccessInfo* outAccessInfo,
const DeviceExtensions& deviceExtensions)
{
std::vector<pvrvk::DeviceQueueCreateInfo> queueCreateInfo;
const std::vector<pvrvk::QueueFamilyProperties>& queueFamilyProperties = physicalDevice->getQueueFamilyProperties();
const char* graphics = "GRAPHICS ";
const char* compute = "COMPUTE ";
const char* present = "PRESENT ";
const char* transfer = "TRANSFER ";
const char* sparse = "SPARSE_BINDING ";
const char* nothing = "";
// Log the supported queue families
Log(LogLevel::Information, "Supported Queue Families:");
for (uint32_t i = 0; i < static_cast<uint32_t>(queueFamilyProperties.size()); ++i)
{
Log(LogLevel::Information, "\tqueue family %d (#queues %d) FLAGS: %d ( %s%s%s%s%s)", i, queueFamilyProperties[i].getQueueCount(), queueFamilyProperties[i].getQueueFlags(),
((queueFamilyProperties[i].getQueueFlags() & pvrvk::QueueFlags::e_GRAPHICS_BIT) != 0) ? graphics : nothing,
((queueFamilyProperties[i].getQueueFlags() & pvrvk::QueueFlags::e_COMPUTE_BIT) != 0) ? compute : nothing,
((queueFamilyProperties[i].getQueueFlags() & pvrvk::QueueFlags::e_TRANSFER_BIT) != 0) ? transfer : nothing,
((queueFamilyProperties[i].getQueueFlags() & pvrvk::QueueFlags::e_SPARSE_BINDING_BIT) != 0) ? sparse : nothing, nothing, nothing);
}
std::vector<int32_t> queueIndices(queueFamilyProperties.size(), -1);
std::vector<float> queuePrioties;
for (uint32_t i = 0; i < numQueueCreateInfos; ++i)
{
for (uint32_t j = 0; j < queueFamilyProperties.size(); ++j)
{
// if requested, look for presentation support
if (!queueCreateInfos[i].surface || physicalDevice->getSurfaceSupport(j, queueCreateInfos[i].surface))
{
uint32_t supportedFlags = static_cast<uint32_t>(queueFamilyProperties[j].getQueueFlags());
uint32_t requestedFlags = static_cast<uint32_t>(queueCreateInfos[i].queueFlags);
// look for the supported flags
if ((supportedFlags & requestedFlags) == requestedFlags)
{
if (static_cast<uint32_t>(queueIndices[j] + 1) < queueFamilyProperties[j].getQueueCount()) { ++queueIndices[j]; }
outAccessInfo[i].familyId = j;
outAccessInfo[i].queueId = static_cast<uint32_t>(queueIndices[j]);
queuePrioties.emplace_back(queueCreateInfos[i].priority);
break;
}
}
}
}
uint32_t priorityIndex = 0;
// populate the queue create info
for (uint32_t i = 0; i < queueIndices.size(); ++i)
{
if (queueIndices[i] != -1)
{
queueCreateInfo.emplace_back(pvrvk::DeviceQueueCreateInfo());
pvrvk::DeviceQueueCreateInfo& createInfo = queueCreateInfo.back();
createInfo.setQueueFamilyIndex(i);
for (uint32_t j = 0; j < static_cast<uint32_t>(queueIndices[i] + 1); ++j)
{
createInfo.addQueue(queuePrioties[priorityIndex]);
priorityIndex++;
}
}
}
// create the device
pvrvk::DeviceCreateInfo deviceInfo;
pvrvk::PhysicalDeviceFeatures features = physicalDevice->getFeatures();
// Ensure that robustBufferAccess is disabled
features.setRobustBufferAccess(false);
deviceInfo.setEnabledFeatures(&features);
deviceInfo.setDeviceQueueCreateInfos(queueCreateInfo);
// Print out the supported device extensions
const std::vector<pvrvk::ExtensionProperties>& extensionProperties = physicalDevice->getDeviceExtensionsProperties();
Log(LogLevel::Information, "Supported Device Extensions:");
for (uint32_t i = 0; i < static_cast<uint32_t>(extensionProperties.size()); ++i)
{ Log(LogLevel::Information, "\t%s : version [%u]", extensionProperties[i].getExtensionName(), extensionProperties[i].getSpecVersion()); }
// Filter the given set of extensions so only the set of device extensions which are supported by the device remain
if (deviceExtensions.getNumExtensions())
{
pvrvk::VulkanExtensionList supportedRequestedExtensions = pvrvk::Extensions::filterExtensions(extensionProperties, deviceExtensions);
// Determine whether VK_EXT_debug_utils is supported and enabled by the instance
bool debugUtilsSupported = physicalDevice->getInstance()->getEnabledExtensionTable().extDebugUtilsEnabled;
if (debugUtilsSupported)
{
// Determine whether VK_EXT_debug_maker is supported
bool debugReportSupported = supportedRequestedExtensions.containsExtension(VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
// If VK_EXT_debug_utils is supported then remove VK_EXT_debug_maker from the list of extension to enable therefore we are prioritising use of VK_EXT_debug_utils
if (debugUtilsSupported && debugReportSupported)
{
Log(LogLevel::Information, "VK_EXT_debug_utils and VK_EXT_debug_maker are both supported. We will be using VK_EXT_debug_utils.");
supportedRequestedExtensions.removeExtension(VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
}
}
deviceInfo.setExtensionList(supportedRequestedExtensions);
Log(LogLevel::Information, "Supported Device Extensions to be Enabled:");
for (uint32_t i = 0; i < static_cast<uint32_t>(deviceInfo.getExtensionList().getNumExtensions()); ++i)
{
Log(LogLevel::Information, "\t%s : version [%u]", deviceInfo.getExtensionList().getExtension(i).getName().c_str(),
deviceInfo.getExtensionList().getExtension(i).getSpecVersion());
}
if (deviceInfo.getExtensionList().getNumExtensions() != deviceExtensions.getNumExtensions())
{ Log(LogLevel::Warning, "Note that not all requested Logical device extensions are supported"); }
}
pvrvk::Device outDevice = physicalDevice->createDevice(deviceInfo);
outDevice->retrieveQueues();
// Log the retrieved queues
Log(LogLevel::Information, "Queues Created:");
for (uint32_t i = 0; i < queueCreateInfo.size(); ++i)
{
bool supportsWsi = (queueCreateInfos[i].surface && physicalDevice->getSurfaceSupport(i, queueCreateInfos[i].surface));
Log(LogLevel::Information, "\t queue Family: %d ( %s%s%s%s%s) \tqueue count: %d", queueCreateInfo[i].getQueueFamilyIndex(),
((queueFamilyProperties[queueCreateInfo[i].getQueueFamilyIndex()].getQueueFlags() & pvrvk::QueueFlags::e_GRAPHICS_BIT) != 0) ? graphics : nothing,
((queueFamilyProperties[queueCreateInfo[i].getQueueFamilyIndex()].getQueueFlags() & pvrvk::QueueFlags::e_COMPUTE_BIT) != 0) ? compute : nothing,
((queueFamilyProperties[queueCreateInfo[i].getQueueFamilyIndex()].getQueueFlags() & pvrvk::QueueFlags::e_TRANSFER_BIT) != 0) ? transfer : nothing,
((queueFamilyProperties[queueCreateInfo[i].getQueueFamilyIndex()].getQueueFlags() & pvrvk::QueueFlags::e_SPARSE_BINDING_BIT) != 0) ? sparse : nothing,
(supportsWsi ? present : nothing), queueCreateInfo[i].getNumQueues());
}
return outDevice;
}
bool isSupportedDepthStencilFormat(const pvrvk::Device& device, pvrvk::Format format)
{
auto prop = device->getPhysicalDevice()->getFormatProperties(format);
return (prop.getOptimalTilingFeatures() & pvrvk::FormatFeatureFlags::e_DEPTH_STENCIL_ATTACHMENT_BIT) != 0;
};
pvrvk::Format getSupportedDepthStencilFormat(const pvrvk::Device& device, pvr::DisplayAttributes& displayAttributes, std::vector<pvrvk::Format> preferredDepthFormats)
{
if (preferredDepthFormats.empty())
{
preferredDepthFormats = {
pvrvk::Format::e_D32_SFLOAT_S8_UINT,
pvrvk::Format::e_D24_UNORM_S8_UINT,
pvrvk::Format::e_D16_UNORM_S8_UINT,
pvrvk::Format::e_D32_SFLOAT,
pvrvk::Format::e_D16_UNORM,
pvrvk::Format::e_X8_D24_UNORM_PACK32,
};
}
pvrvk::Format depthStencilFormatRequested = getDepthStencilFormat(displayAttributes);
pvrvk::Format supportedDepthStencilFormat = pvrvk::Format::e_UNDEFINED;
// start by checking for the requested depth stencil format
if (isSupportedDepthStencilFormat(device, depthStencilFormatRequested)) { supportedDepthStencilFormat = depthStencilFormatRequested; }
else
{
auto it = std::find_if(
preferredDepthFormats.begin(), preferredDepthFormats.end(), [&device](pvrvk::Format format) -> bool { return isSupportedDepthStencilFormat(device, format); });
if (it != preferredDepthFormats.end()) { supportedDepthStencilFormat = *it; }
Log(LogLevel::Information, "Requested DepthStencil VkFormat %s is not supported. Falling back to %s", to_string(depthStencilFormatRequested).c_str(),
to_string(supportedDepthStencilFormat).c_str());
}
getDepthStencilBits(supportedDepthStencilFormat, displayAttributes.depthBPP, displayAttributes.stencilBPP);
Log(LogLevel::Information, "DepthStencil VkFormat: %s", to_string(supportedDepthStencilFormat).c_str());
return supportedDepthStencilFormat;
}
template<typename ImageContainer>
inline static void createDepthStencilImageAndViewsHelper(pvrvk::Device& device, int32_t imageCount, pvrvk::Format depthFormat, const pvrvk::Extent2D& imageExtent,
const pvrvk::ImageUsageFlags& imageUsageFlags, pvrvk::SampleCountFlags sampleCount, vma::Allocator* dsImageAllocator, vma::AllocationCreateFlags dsImageAllocationCreateFlags,
ImageContainer& outDepthStencilImages)
{
// the required memory property flags
const pvrvk::MemoryPropertyFlags requiredMemoryProperties = pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT;
// more optimal set of memory property flags
const pvrvk::MemoryPropertyFlags optimalMemoryProperties = (imageUsageFlags & pvrvk::ImageUsageFlags::e_TRANSIENT_ATTACHMENT_BIT) != 0
? pvrvk::MemoryPropertyFlags::e_LAZILY_ALLOCATED_BIT
: pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT;
for (int32_t i = 0; i < imageCount; ++i)
{
pvrvk::Image depthStencilImage = createImage(device,
pvrvk::ImageCreateInfo(pvrvk::ImageType::e_2D, depthFormat, pvrvk::Extent3D(imageExtent.getWidth(), imageExtent.getHeight(), 1u), imageUsageFlags, 1, 1, sampleCount),
requiredMemoryProperties, optimalMemoryProperties, dsImageAllocator, dsImageAllocationCreateFlags);
depthStencilImage->setObjectName(std::string("PVRUtilsVk::Depth Stencil Image [") + std::to_string(i) + std::string("]"));
outDepthStencilImages[i] = device->createImageView(pvrvk::ImageViewCreateInfo(depthStencilImage));
outDepthStencilImages[i]->setObjectName(std::string("PVRUtilsVk::Depth Stencil Image View [") + std::to_string(i) + std::string("]"));
}
}
std::vector<pvrvk::ImageView> createDepthStencilImageAndViews(pvrvk::Device& device, int32_t imageCount, pvrvk::Format depthFormat, const pvrvk::Extent2D& imageExtent,
const pvrvk::ImageUsageFlags& imageUsageFlags, pvrvk::SampleCountFlags sampleCount, vma::Allocator* dsImageAllocator, vma::AllocationCreateFlags dsImageAllocationCreateFlags)
{
std::vector<pvrvk::ImageView> depthStencilImages(imageCount);
createDepthStencilImageAndViewsHelper(device, imageCount, depthFormat, imageExtent, imageUsageFlags, sampleCount, dsImageAllocator, dsImageAllocationCreateFlags, depthStencilImages);
return depthStencilImages;
}
void createSwapchainAndDepthStencilImageAndViews(pvrvk::Device& device, const pvrvk::Surface& surface, DisplayAttributes& displayAttributes, pvrvk::Swapchain& outSwapchain,
Multi<pvrvk::ImageView>& outDepthStencilImages, const pvrvk::ImageUsageFlags& swapchainImageUsageFlags, const pvrvk::ImageUsageFlags& dsImageUsageFlags,
vma::Allocator* dsImageAllocator, vma::AllocationCreateFlags dsImageAllocationCreateFlags)
{
outSwapchain = createSwapchain(device, surface, displayAttributes, swapchainImageUsageFlags);
pvrvk::Format supportedDepthStencilFormat = getSupportedDepthStencilFormat(device, displayAttributes);
auto depthStencilImages = createDepthStencilImageAndViews(device, displayAttributes.swapLength, supportedDepthStencilFormat, outSwapchain->getDimension(), dsImageUsageFlags,
pvrvk::SampleCountFlags::e_1_BIT, dsImageAllocator, dsImageAllocationCreateFlags);
outDepthStencilImages.resize(displayAttributes.swapLength);
std::copy_n(depthStencilImages.begin(), outDepthStencilImages.size(), &outDepthStencilImages[0]);
}
void createSwapchainAndDepthStencilImageAndViews(pvrvk::Device& device, const pvrvk::Surface& surface, DisplayAttributes& displayAttributes, pvrvk::Swapchain& outSwapchain,
Multi<pvrvk::ImageView>& outDepthStencilImages, const std::vector<pvrvk::Format>& preferredColorFormats, const std::vector<pvrvk::Format>& preferredDepthStencilFormats,
const pvrvk::ImageUsageFlags& swapchainImageUsageFlags, const pvrvk::ImageUsageFlags& dsImageUsageFlags, vma::Allocator* dsImageAllocator,
vma::AllocationCreateFlags dsImageAllocationCreateFlags)
{
outSwapchain = createSwapchain(device, surface, displayAttributes, preferredColorFormats, swapchainImageUsageFlags);
pvrvk::Format supportedDepthStencilFormat = getSupportedDepthStencilFormat(device, displayAttributes, preferredDepthStencilFormats);
outDepthStencilImages.resize(displayAttributes.swapLength);
createDepthStencilImageAndViewsHelper(device, displayAttributes.swapLength, supportedDepthStencilFormat, outSwapchain->getDimension(), dsImageUsageFlags,
pvrvk::SampleCountFlags::e_1_BIT, dsImageAllocator, dsImageAllocationCreateFlags, outDepthStencilImages);
}
pvrvk::Swapchain createSwapchain(pvrvk::Device& device, const pvrvk::Surface& surface, pvr::DisplayAttributes& displayAttributes,
const std::vector<pvrvk::Format>& preferredColorFormats, pvrvk::ImageUsageFlags swapchainImageUsageFlags)
{
return createSwapchainHelper(device, surface, displayAttributes, swapchainImageUsageFlags, preferredColorFormats);
}
pvrvk::Swapchain createSwapchain(pvrvk::Device& device, const pvrvk::Surface& surface, pvr::DisplayAttributes& displayAttributes, pvrvk::ImageUsageFlags swapchainImageUsageFlags)
{
return createSwapchainHelper(device, surface, displayAttributes, swapchainImageUsageFlags);
}
std::vector<unsigned char> captureImageRegion(pvrvk::Queue& queue, pvrvk::CommandPool& cmdPool, pvrvk::Image& image, pvrvk::Offset3D srcOffset, pvrvk::Extent3D srcExtent,
pvrvk::Format destinationImageFormat, pvrvk::ImageLayout imageInitialLayout, pvrvk::ImageLayout imageFinalLayout, vma::Allocator* bufferAllocator, vma::Allocator* imageAllocator)
{
pvrvk::Device device = image->getDevice();
pvrvk::CommandBuffer cmdBuffer = cmdPool->allocateCommandBuffer();
// create the destination texture which does the format conversion
const pvrvk::FormatProperties& formatProps = device->getPhysicalDevice()->getFormatProperties(destinationImageFormat);
if ((formatProps.getOptimalTilingFeatures() & pvrvk::FormatFeatureFlags::e_BLIT_DST_BIT) == 0)
{ throw pvrvk::ErrorValidationFailedEXT("Screen Capture requested Image format is not supported"); }
pvrvk::Extent3D copyRegion = pvrvk::Extent3D(srcExtent.getWidth() - srcOffset.getX(), srcExtent.getHeight() - srcOffset.getY(), srcExtent.getDepth() - srcOffset.getZ());
// Create the intermediate image which will be used as the format conversion
// when copying from source image and then copied into the buffer
pvrvk::Image dstImage = createImage(device,
pvrvk::ImageCreateInfo(pvrvk::ImageType::e_2D, destinationImageFormat, copyRegion, pvrvk::ImageUsageFlags::e_TRANSFER_DST_BIT | pvrvk::ImageUsageFlags::e_TRANSFER_SRC_BIT),
pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, imageAllocator);
const pvrvk::Offset3D srcOffsets[2] = { srcOffset, pvrvk::Offset3D(srcExtent.getWidth(), srcExtent.getHeight(), srcExtent.getDepth()) };
const pvrvk::Offset3D dstOffsets[2] = { pvrvk::Offset3D(srcOffset.getX(), srcExtent.getHeight(), 0),
pvrvk::Offset3D(copyRegion.getWidth(), srcOffset.getY(), copyRegion.getDepth()) };
std::vector<unsigned char> outData;
outData.resize(static_cast<const unsigned int>(dstImage->getMemoryRequirement().getSize()));
// create the final destination buffer for reading
pvrvk::Buffer buffer = createBuffer(device, pvrvk::BufferCreateInfo(dstImage->getMemoryRequirement().getSize(), pvrvk::BufferUsageFlags::e_TRANSFER_DST_BIT),
pvrvk::MemoryPropertyFlags::e_HOST_VISIBLE_BIT, pvrvk::MemoryPropertyFlags::e_HOST_VISIBLE_BIT | pvrvk::MemoryPropertyFlags::e_DEVICE_LOCAL_BIT, bufferAllocator,
pvr::utils::vma::AllocationCreateFlags::e_MAPPED_BIT);
buffer->setObjectName("PVRUtilsVk::screenCaptureRegion::Temporary Screen Capture Buffer");
cmdBuffer->begin(pvrvk::CommandBufferUsageFlags::e_ONE_TIME_SUBMIT_BIT);
pvr::utils::beginCommandBufferDebugLabel(cmdBuffer, pvrvk::DebugUtilsLabel("PVRUtilsVk::screenCaptureRegion"));
pvrvk::ImageBlit copyRange(pvrvk::ImageSubresourceLayers(), srcOffsets, pvrvk::ImageSubresourceLayers(), dstOffsets);
// transform the layout from the color attachment to transfer src
if (imageInitialLayout != pvrvk::ImageLayout::e_TRANSFER_SRC_OPTIMAL) { setImageLayout(image, imageInitialLayout, pvrvk::ImageLayout::e_TRANSFER_SRC_OPTIMAL, cmdBuffer); }
setImageLayout(dstImage, pvrvk::ImageLayout::e_UNDEFINED, pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL, cmdBuffer);
cmdBuffer->blitImage(image, dstImage, ©Range, 1, pvrvk::Filter::e_NEAREST, pvrvk::ImageLayout::e_TRANSFER_SRC_OPTIMAL, pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL);
pvrvk::ImageSubresourceLayers subResource;
subResource.setAspectMask(pvrvk::ImageAspectFlags::e_COLOR_BIT);
pvrvk::BufferImageCopy region(0, 0, 0, subResource, pvrvk::Offset3D(0, 0, 0), copyRegion);
if (imageInitialLayout != pvrvk::ImageLayout::e_TRANSFER_SRC_OPTIMAL) { setImageLayout(image, pvrvk::ImageLayout::e_TRANSFER_SRC_OPTIMAL, imageFinalLayout, cmdBuffer); }
setImageLayout(dstImage, pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL, pvrvk::ImageLayout::e_TRANSFER_SRC_OPTIMAL, cmdBuffer);
cmdBuffer->copyImageToBuffer(dstImage, pvrvk::ImageLayout::e_TRANSFER_SRC_OPTIMAL, buffer, ®ion, 1);
pvr::utils::endCommandBufferDebugLabel(cmdBuffer);
cmdBuffer->end();
// create a fence for wait.
pvrvk::Fence fenceWait = device->createFence(pvrvk::FenceCreateFlags(0));
pvrvk::SubmitInfo submitInfo;
submitInfo.commandBuffers = &cmdBuffer;
submitInfo.numCommandBuffers = 1;
queue->submit(&submitInfo, 1, fenceWait);
fenceWait->wait(); // wait for the submit to finish so that the command buffer get destroyed properly
// map the buffer and copy the data
void* memory = 0;
unsigned char* data = nullptr;
bool unmap = false;
if (!buffer->getDeviceMemory()->isMapped())
{
memory = buffer->getDeviceMemory()->map(0, dstImage->getMemoryRequirement().getSize());
unmap = true;
}
else
{
memory = buffer->getDeviceMemory()->getMappedData();
}
data = static_cast<unsigned char*>(memory);
memcpy(outData.data(), data, static_cast<size_t>(dstImage->getMemoryRequirement().getSize()));
if (static_cast<uint32_t>(buffer->getDeviceMemory()->getMemoryFlags() & pvrvk::MemoryPropertyFlags::e_HOST_COHERENT_BIT) == 0)
{ buffer->getDeviceMemory()->invalidateRange(0, dstImage->getMemoryRequirement().getSize()); }
if (unmap) { buffer->getDeviceMemory()->unmap(); }
return outData;
}
bool takeScreenshot(pvrvk::Queue& queue, pvrvk::CommandPool& cmdPool, pvrvk::Swapchain& swapchain, const uint32_t swapIndex, const std::string& screenshotFileName,
vma::Allocator* bufferAllocator, vma::Allocator* imageAllocator, const uint32_t screenshotScale)
{
pvr::utils::beginQueueDebugLabel(queue, pvrvk::DebugUtilsLabel("PVRUtilsVk::takeScreenshot"));
if (!swapchain->supportsUsage(pvrvk::ImageUsageFlags::e_TRANSFER_SRC_BIT))
{
Log(LogLevel::Warning, "Could not take screenshot as the swapchain does not support TRANSFER_SRC_BIT");
return false;
}
// force the queue to wait idle prior to taking a copy of the swap chain image
queue->waitIdle();
saveImage(queue, cmdPool, swapchain->getImage(swapIndex), pvrvk::ImageLayout::e_PRESENT_SRC_KHR, pvrvk::ImageLayout::e_PRESENT_SRC_KHR, screenshotFileName, bufferAllocator,
imageAllocator, screenshotScale);
pvr::utils::endQueueDebugLabel(queue);
return true;
}
void saveImage(pvrvk::Queue& queue, pvrvk::CommandPool& cmdPool, pvrvk::Image& image, const pvrvk::ImageLayout imageInitialLayout, const pvrvk::ImageLayout imageFinalLayout,
const std::string& filename, vma::Allocator* bufferAllocator, vma::Allocator* imageAllocator, const uint32_t screenshotScale)
{
pvrvk::Format destinationImageFormat = pvrvk::Format::e_B8G8R8A8_SRGB;
// Handle Linear Images.
if (!pvrvk::isSrgb(image->getFormat())) { destinationImageFormat = pvrvk::Format::e_B8G8R8A8_UNORM; }
std::vector<unsigned char> imageData = captureImageRegion(queue, cmdPool, image, pvrvk::Offset3D(0, 0, 0),
pvrvk::Extent3D(image->getExtent().getWidth(), image->getExtent().getHeight(), image->getExtent().getDepth()), destinationImageFormat, imageInitialLayout, imageFinalLayout,
bufferAllocator, imageAllocator);
Log(LogLevel::Information, "Writing TGA screenshot, filename %s.", filename.c_str());
writeTGA(filename.c_str(), image->getExtent().getWidth(), image->getExtent().getHeight(), imageData.data(), 4, screenshotScale);
}
void updateImage(pvrvk::Device& device, pvrvk::CommandBufferBase cbuffTransfer, ImageUpdateInfo* updateInfos, uint32_t numUpdateInfos, pvrvk::Format format,
pvrvk::ImageLayout layout, bool isCubeMap, pvrvk::Image& image, vma::Allocator* bufferAllocator)
{
using namespace vma;
if (!(cbuffTransfer && cbuffTransfer->isRecording())) { throw pvrvk::ErrorValidationFailedEXT("updateImage - Commandbuffer must be valid and in recording state"); }
uint32_t numFace = (isCubeMap ? 6 : 1);
uint32_t hwSlice;
std::vector<pvrvk::Buffer> stagingBuffers;
{
pvr::utils::beginCommandBufferDebugLabel(cbuffTransfer, pvrvk::DebugUtilsLabel("PVRUtilsVk::updateImage"));
stagingBuffers.resize(numUpdateInfos);
pvrvk::BufferImageCopy imgcp = {};
for (uint32_t i = 0; i < numUpdateInfos; ++i)
{
const ImageUpdateInfo& mipLevelUpdate = updateInfos[i];
assertion(mipLevelUpdate.data && mipLevelUpdate.dataSize, "Data and Data size must be valid");
hwSlice = mipLevelUpdate.arrayIndex * numFace + mipLevelUpdate.cubeFace;
// Will write the switch layout commands from the universal queue to the transfer queue to both the
// transfer command buffer and the universal command buffer
setImageLayoutAndQueueFamilyOwnership(pvrvk::CommandBufferBase(), cbuffTransfer, static_cast<uint32_t>(-1), static_cast<uint32_t>(-1), pvrvk::ImageLayout::e_UNDEFINED,
pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL, image, mipLevelUpdate.mipLevel, 1, hwSlice, 1, inferAspectFromFormat(format));
// Create a staging buffer to use as the source of a copyBufferToImage
stagingBuffers[i] = createBuffer(device, pvrvk::BufferCreateInfo(mipLevelUpdate.dataSize, pvrvk::BufferUsageFlags::e_TRANSFER_SRC_BIT),
pvrvk::MemoryPropertyFlags::e_HOST_VISIBLE_BIT, pvrvk::MemoryPropertyFlags::e_HOST_VISIBLE_BIT, bufferAllocator, vma::AllocationCreateFlags::e_MAPPED_BIT);
stagingBuffers[i]->setObjectName("PVRUtilsVk::updateImage::Temporary Image Upload Buffer");
imgcp.setImageOffset(pvrvk::Offset3D(mipLevelUpdate.offsetX, mipLevelUpdate.offsetY, mipLevelUpdate.offsetZ));
imgcp.setImageExtent(pvrvk::Extent3D(mipLevelUpdate.imageWidth, mipLevelUpdate.imageHeight, 1));
imgcp.setImageSubresource(pvrvk::ImageSubresourceLayers(inferAspectFromFormat(format), updateInfos[i].mipLevel, hwSlice, 1));
imgcp.setBufferRowLength(mipLevelUpdate.dataWidth);
imgcp.setBufferImageHeight(mipLevelUpdate.dataHeight);
const uint8_t* srcData;
uint32_t srcDataSize;
srcData = static_cast<const uint8_t*>(mipLevelUpdate.data);
srcDataSize = mipLevelUpdate.dataSize;
updateHostVisibleBuffer(stagingBuffers[i], srcData, 0, srcDataSize, true);
cbuffTransfer->copyBufferToImage(stagingBuffers[i], image, pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL, 1, &imgcp);
// CAUTION: We swapped src and dst queue families as, if there was no ownership transfer, no problem - queue families
// will be ignored.
// Will write the switch layout commands from the transfer queue to the universal queue to both the
// transfer command buffer and the universal command buffer
setImageLayoutAndQueueFamilyOwnership(cbuffTransfer, pvrvk::CommandBufferBase(), static_cast<uint32_t>(-1), static_cast<uint32_t>(-1),
pvrvk::ImageLayout::e_TRANSFER_DST_OPTIMAL, layout, image, mipLevelUpdate.mipLevel, 1, hwSlice, 1, inferAspectFromFormat(format));
}
pvr::utils::endCommandBufferDebugLabel(cbuffTransfer);
}
}
void create3dPlaneMesh(uint32_t width, uint32_t depth, bool generateTexCoords, bool generateNormalCoords, assets::Mesh& outMesh)
{
const float halfWidth = width * .5f;
const float halfDepth = depth * .5f;
glm::vec3 normal[4] = { glm::vec3(0.0f, 1.0f, 0.0f), glm::vec3(0.0f, 1.0f, 0.0f), glm::vec3(0.0f, 1.0f, 0.0f), glm::vec3(0.0f, 1.0f, 0.0f) };
glm::vec2 texCoord[4] = {
glm::vec2(0.0f, 1.0f),
glm::vec2(0.0f, 0.0f),
glm::vec2(1.0f, 0.0f),
glm::vec2(1.0f, 1.0f),
};
glm::vec3 pos[4] = { glm::vec3(-halfWidth, 0.0f, -halfDepth), glm::vec3(-halfWidth, 0.0f, halfDepth), glm::vec3(halfWidth, 0.0f, halfDepth), glm::vec3(halfWidth, 0.0f, -halfDepth) };
uint32_t indexData[] = { 0, 1, 2, 0, 2, 3 };
float vertData[32];
uint32_t offset = 0;
for (uint32_t i = 0; i < 4; ++i)
{
memcpy(&vertData[offset], &pos[i], sizeof(pos[i]));
offset += 3;
if (generateNormalCoords)
{
memcpy(&vertData[offset], &normal[i], sizeof(normal[i]));
offset += 3;
}
if (generateTexCoords)
{
memcpy(&vertData[offset], &texCoord[i], sizeof(texCoord[i]));
offset += 2;
}
}
uint32_t stride = sizeof(glm::vec3) + (generateNormalCoords ? sizeof(glm::vec3) : 0) + (generateTexCoords ? sizeof(glm::vec2) : 0);
outMesh.addData(reinterpret_cast<const uint8_t*>(vertData), sizeof(vertData), stride, 0);
outMesh.addFaces(reinterpret_cast<const uint8_t*>(indexData), sizeof(indexData), IndexType::IndexType32Bit);
offset = 0;
outMesh.addVertexAttribute("POSITION", DataType::Float32, 3, offset, 0);
offset += sizeof(float) * 3;
if (generateNormalCoords)
{
outMesh.addVertexAttribute("NORMAL", DataType::Float32, 3, offset, 0);
offset += sizeof(float) * 2;
}
if (generateTexCoords) { outMesh.addVertexAttribute("UV0", DataType::Float32, 2, offset, 0); }
outMesh.setPrimitiveType(PrimitiveTopology::TriangleList);
outMesh.setStride(0, stride);
outMesh.setNumFaces(ARRAY_SIZE(indexData) / 3);
outMesh.setNumVertices(ARRAY_SIZE(pos));
}
void setImageLayoutAndQueueFamilyOwnership(pvrvk::CommandBufferBase srccmd, pvrvk::CommandBufferBase dstcmd, uint32_t srcQueueFamily, uint32_t dstQueueFamily,
pvrvk::ImageLayout oldLayout, pvrvk::ImageLayout newLayout, pvrvk::Image& image, uint32_t baseMipLevel, uint32_t numMipLevels, uint32_t baseArrayLayer, uint32_t numArrayLayers,
pvrvk::ImageAspectFlags aspect)
{
bool multiQueue = isMultiQueue(srcQueueFamily, dstQueueFamily);
// No operation required: We don't have a layout transition, and we don't have a queue family change.
if (newLayout == oldLayout && !multiQueue) { return; } // No transition required
if (multiQueue)
{
assertion(srccmd && dstcmd,
"Vulkan Utils setImageLayoutAndQueueOwnership: An ownership change was required, "
"but at least one null command buffers was passed as parameters");
}
else
{
assertion(srccmd || dstcmd,
"Vulkan Utils setImageLayoutAndQueueOwnership: An ownership change was not required, "
"but two non-null command buffers were passed as parameters");
}
pvrvk::MemoryBarrierSet barriers;
pvrvk::ImageMemoryBarrier imageMemBarrier;
imageMemBarrier.setOldLayout(oldLayout);
imageMemBarrier.setNewLayout(newLayout);
imageMemBarrier.setImage(image);
imageMemBarrier.setSubresourceRange(pvrvk::ImageSubresourceRange(aspect, baseMipLevel, numMipLevels, baseArrayLayer, numArrayLayers));
imageMemBarrier.setSrcQueueFamilyIndex(static_cast<uint32_t>(-1));
imageMemBarrier.setDstQueueFamilyIndex(static_cast<uint32_t>(-1));
imageMemBarrier.setSrcAccessMask(getAccesFlagsFromLayout(oldLayout));
imageMemBarrier.setDstAccessMask(getAccesFlagsFromLayout(newLayout));
if (multiQueue)
{
imageMemBarrier.setSrcQueueFamilyIndex(srcQueueFamily);
imageMemBarrier.setDstQueueFamilyIndex(dstQueueFamily);
}
barriers.clearAllBarriers();
// Support any one of the command buffers being NOT null - either first or second is fine.
if (srccmd)
{
barriers.addBarrier(imageMemBarrier);
srccmd->pipelineBarrier(pvrvk::PipelineStageFlags::e_ALL_COMMANDS_BIT, pvrvk::PipelineStageFlags::e_ALL_COMMANDS_BIT, barriers, true);
}
if (dstcmd)
{
barriers.addBarrier(imageMemBarrier);
dstcmd->pipelineBarrier(pvrvk::PipelineStageFlags::e_ALL_COMMANDS_BIT, pvrvk::PipelineStageFlags::e_ALL_COMMANDS_BIT, barriers, true);
}
} // namespace utils
namespace {
std::string debugUtilsMessengerCallbackToString(
VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT msgTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData)
{
std::string messageSeverityString = pvrvk::to_string(static_cast<pvrvk::DebugUtilsMessageSeverityFlagsEXT>(messageSeverity));
std::string messageTypeString = pvrvk::to_string(static_cast<pvrvk::DebugUtilsMessageTypeFlagsEXT>(msgTypes));
std::string exceptionMessage = pvr::strings::createFormatted("%s (%s) - ID: %i, Name: \"%s\":\n\tMESSAGE: %s", messageSeverityString.c_str(), messageTypeString.c_str(),
pCallbackData->messageIdNumber, pCallbackData->pMessageIdName, pCallbackData->pMessage);
if (pCallbackData->objectCount > 0)
{
exceptionMessage += "\n";
std::string objectsMessage = pvr::strings::createFormatted("\tAssociated Objects - (%u)\n", pCallbackData->objectCount);
for (uint32_t i = 0; i < pCallbackData->objectCount; ++i)
{
std::string objectType = pvrvk::to_string(static_cast<pvrvk::ObjectType>(pCallbackData->pObjects[i].objectType));
objectsMessage += pvr::strings::createFormatted("\t\tObject[%u] - Type %s, Value %p, Name \"%s\"\n", i, objectType.c_str(),
(void*)(pCallbackData->pObjects[i].objectHandle), pCallbackData->pObjects[i].pObjectName);
}
exceptionMessage += objectsMessage;
}
if (pCallbackData->cmdBufLabelCount > 0)
{
exceptionMessage += "\n";
std::string cmdBufferLabelsMessage = pvr::strings::createFormatted("\tAssociated Command Buffer Labels - (%u)\n", pCallbackData->cmdBufLabelCount);
for (uint32_t i = 0; i < pCallbackData->cmdBufLabelCount; ++i)
{
cmdBufferLabelsMessage += pvr::strings::createFormatted("\t\tCommand Buffer Label[%u] - %s, Color: {%f, %f, %f, %f}\n", i, pCallbackData->pCmdBufLabels[i].pLabelName,
pCallbackData->pCmdBufLabels[i].color[0], pCallbackData->pCmdBufLabels[i].color[1], pCallbackData->pCmdBufLabels[i].color[2], pCallbackData->pCmdBufLabels[i].color[3]);
}
exceptionMessage += cmdBufferLabelsMessage;
}
if (pCallbackData->queueLabelCount > 0)
{
exceptionMessage += "\n";
std::string queueLabelsMessage = pvr::strings::createFormatted("\tAssociated Queue Labels - (%u)\n", pCallbackData->queueLabelCount);
for (uint32_t i = 0; i < pCallbackData->queueLabelCount; ++i)
{
queueLabelsMessage += pvr::strings::createFormatted("\t\tQueue Label[%u] - %s, Color: {%f, %f, %f, %f}\n", i, pCallbackData->pQueueLabels[i].pLabelName,
pCallbackData->pQueueLabels[i].color[0], pCallbackData->pQueueLabels[i].color[1], pCallbackData->pQueueLabels[i].color[2], pCallbackData->pQueueLabels[i].color[3]);
}
exceptionMessage += queueLabelsMessage;
}
return exceptionMessage;
}
} // namespace
// An application defined callback used as the callback function specified in as pfnCallback in the
// create info VkDebugUtilsMessengerCreateInfoEXT used when creating the debug utils messenger callback vkCreateDebugUtilsMessengerEXT
VKAPI_ATTR VkBool32 VKAPI_CALL throwOnErrorDebugUtilsMessengerCallback(
VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT msgTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData)
{
(void)pUserData;
// throw an exception if the type of DebugUtilsMessageSeverityFlagsEXT contains the ERROR_BIT
if ((static_cast<pvrvk::DebugUtilsMessageSeverityFlagsEXT>(messageSeverity) & (pvrvk::DebugUtilsMessageSeverityFlagsEXT::e_ERROR_BIT_EXT)) !=
pvrvk::DebugUtilsMessageSeverityFlagsEXT::e_NONE)
{ throw pvrvk::ErrorValidationFailedEXT(debugUtilsMessengerCallbackToString(messageSeverity, msgTypes, pCallbackData)); }
return VK_FALSE;
}
// The application defined callback used as the callback function specified in as pfnCallback in the
// create info VkDebugUtilsMessengerCreateInfoEXT used when creating the debug utils messenger callback vkCreateDebugUtilsMessengerEXT
VKAPI_ATTR VkBool32 VKAPI_CALL logMessageDebugUtilsMessengerCallback(
VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT msgTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData)
{
(void)pUserData;
Log(mapDebugUtilsMessageSeverityFlagsToLogLevel(static_cast<pvrvk::DebugUtilsMessageSeverityFlagsEXT>(messageSeverity)),
debugUtilsMessengerCallbackToString(messageSeverity, msgTypes, pCallbackData).c_str());
return VK_FALSE;
}
// The application defined callback used as the callback function specified in as pfnCallback in the
// create info VkDebugReportCallbackCreateInfoEXT used when creating the debug report callback vkCreateDebugReportCallbackEXT
VKAPI_ATTR VkBool32 VKAPI_CALL throwOnErrorDebugReportCallback(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location,
int32_t messageCode, const char* pLayerPrefix, const char* pMessage, void* pUserData)
{
(void)object;
(void)location;
(void)messageCode;
(void)pLayerPrefix;
(void)pUserData;
// throw an exception if the type of VkDebugReportFlagsEXT contains the ERROR_BIT
if ((static_cast<pvrvk::DebugReportFlagsEXT>(flags) & (pvrvk::DebugReportFlagsEXT::e_ERROR_BIT_EXT)) != pvrvk::DebugReportFlagsEXT::e_NONE)
{
throw pvrvk::ErrorValidationFailedEXT(
std::string(pvrvk::to_string(static_cast<pvrvk::DebugReportObjectTypeEXT>(objectType)) + std::string(". VULKAN_LAYER_VALIDATION: ") + pMessage));
}
return VK_FALSE;
}
// The application defined callback used as the callback function specified in as pfnCallback in the
// create info VkDebugReportCallbackCreateInfoEXT used when creating the debug report callback vkCreateDebugReportCallbackEXT
VKAPI_ATTR VkBool32 VKAPI_CALL logMessageDebugReportCallback(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location,
int32_t messageCode, const char* pLayerPrefix, const char* pMessage, void* pUserData)
{
(void)object;
(void)location;
(void)messageCode;
(void)pLayerPrefix;
(void)pUserData;
// map the VkDebugReportFlagsEXT to a suitable log type
// map the VkDebugReportObjectTypeEXT to a stringified representation
// Log the message generated by a lower layer
Log(mapDebugReportFlagsToLogLevel(static_cast<pvrvk::DebugReportFlagsEXT>(flags)),
std::string(pvrvk::to_string(static_cast<pvrvk::DebugReportObjectTypeEXT>(objectType)) + std::string(". VULKAN_LAYER_VALIDATION: %s")).c_str(), pMessage);
return VK_FALSE;
}
pvrvk::Instance createInstance(const std::string& applicationName, VulkanVersion version, const InstanceExtensions& instanceExtensions, const InstanceLayers& instanceLayers)
{
pvrvk::InstanceCreateInfo instanceInfo;
pvrvk::ApplicationInfo appInfo;
appInfo.setApplicationName(applicationName);
appInfo.setApplicationVersion(1);
appInfo.setEngineName("PVRVk");
appInfo.setEngineVersion(0);
// Retrieve the vulkan bindings
uint32_t major = -1;
uint32_t minor = -1;
uint32_t patch = -1;
// If a valid function pointer for vkEnumerateInstanceVersion cannot be retrieved then Vulkan only 1.0 is supported by the implementation otherwise we can use
// vkEnumerateInstanceVersion to determine the api version supported.
if (pvrvk::getVkBindings().vkEnumerateInstanceVersion)
{
uint32_t supportedApiVersion;
pvrvk::getVkBindings().vkEnumerateInstanceVersion(&supportedApiVersion);
major = VK_VERSION_MAJOR(supportedApiVersion);
minor = VK_VERSION_MINOR(supportedApiVersion);
patch = VK_VERSION_PATCH(supportedApiVersion);
Log(LogLevel::Information, "The function pointer for 'vkEnumerateInstanceVersion' was valid. Supported instance version: ([%d].[%d].[%d]).", major, minor, patch);
}
else
{
major = 1;
minor = 0;
patch = 0;
Log(LogLevel::Information, "Could not find a function pointer for 'vkEnumerateInstanceVersion'. Setting instance version to: ([%d].[%d].[%d]).", major, minor, patch);
}
// Print out the supported instance extensions
std::vector<pvrvk::ExtensionProperties> extensionProperties;
pvrvk::Extensions::enumerateInstanceExtensions(extensionProperties);
std::vector<pvrvk::LayerProperties> layerProperties;
pvrvk::Layers::enumerateInstanceLayers(layerProperties);
if (instanceLayers.getNumLayers())
{
pvrvk::VulkanLayerList supportedLayers = pvrvk::Layers::filterLayers(layerProperties, instanceLayers);
std::string standardValidationLayerString = "VK_LAYER_LUNARG_standard_validation";
bool requestedStandardValidation = instanceLayers.containsLayer(standardValidationLayerString);
bool supportsStandardValidation = supportedLayers.containsLayer(standardValidationLayerString);
bool supportsKhronosValidation = supportedLayers.containsLayer("VK_LAYER_KHRONOS_validation");
uint32_t standardValidationRequiredIndex = -1;
// This code is to cover cases where VK_LAYER_LUNARG_standard_validation is requested but is not supported, where on some platforms the
// component layers enabled via VK_LAYER_LUNARG_standard_validation may still be supported even though VK_LAYER_LUNARG_standard_validation is not.
// Only perform the expansion if VK_LAYER_LUNARG_standard_validation is requested and not supported and the newer equivalent layer VK_LAYER_KHRONOS_validation is also not supported
if (requestedStandardValidation && !supportsStandardValidation && !supportsKhronosValidation)
{
for (auto it = layerProperties.begin(); !supportsStandardValidation && it != layerProperties.end(); ++it)
{ supportsStandardValidation = !strcmp(it->getLayerName(), standardValidationLayerString.c_str()); }
if (!supportsStandardValidation)
{
for (uint32_t i = 0; standardValidationRequiredIndex == static_cast<uint32_t>(-1) && i < layerProperties.size(); ++i)
{
if (!strcmp(instanceLayers.getLayer(i).getName().c_str(), standardValidationLayerString.c_str())) { standardValidationRequiredIndex = i; }
}
for (uint32_t j = 0; j < instanceLayers.getNumLayers(); ++j)
{
if (standardValidationRequiredIndex == j && !supportsStandardValidation)
{
const char* stdValComponents[] = { "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation", "VK_LAYER_LUNARG_object_tracker",
"VK_LAYER_LUNARG_core_validation", "VK_LAYER_GOOGLE_unique_objects" };
for (uint32_t k = 0; k < sizeof(stdValComponents) / sizeof(stdValComponents[0]); ++k)
{
for (uint32_t i = 0; i < layerProperties.size(); ++i)
{
if (!strcmp(stdValComponents[k], layerProperties[i].getLayerName()))
{
supportedLayers.addLayer(pvrvk::VulkanLayer(std::string(stdValComponents[k])));
break;
}
}
}
}
}
// filter the layers again checking for support for the component layers enabled via VK_LAYER_LUNARG_standard_validation
supportedLayers = pvrvk::Layers::filterLayers(layerProperties, supportedLayers);
}
}
instanceInfo.setLayerList(supportedLayers);
// For each layer retrieve each of the extensions it provides implementations for and add it into the main list
for (uint32_t i = 0; i < instanceInfo.getLayerList().getNumLayers(); ++i)
{
std::vector<pvrvk::ExtensionProperties> perLayerExtensionProperties;
pvrvk::Extensions::enumerateInstanceExtensions(perLayerExtensionProperties, instanceInfo.getLayerList().getLayer(i).getName());
extensionProperties.insert(extensionProperties.end(), perLayerExtensionProperties.begin(), perLayerExtensionProperties.end());
}
}
if (instanceExtensions.getNumExtensions())
{
pvrvk::VulkanExtensionList supportedRequestedExtensions = pvrvk::Extensions::filterExtensions(extensionProperties, instanceExtensions);
// Determine whether VK_EXT_debug_utils is supported
bool debugUtilsSupported = supportedRequestedExtensions.containsExtension(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
if (debugUtilsSupported)
{
pvrvk::DebugUtilsMessengerCreateInfo debugUtilsMessengerCreateInfo(
pvrvk::DebugUtilsMessageSeverityFlagsEXT::e_ALL_BITS, pvrvk::DebugUtilsMessageTypeFlagsEXT::e_ALL_BITS, pvr::utils::logMessageDebugUtilsMessengerCallback);
instanceInfo.setDebugUtilsMessengerCreateInfo(debugUtilsMessengerCreateInfo);
}
// Determine whether VK_EXT_debug_report is supported
bool debugReportSupported = supportedRequestedExtensions.containsExtension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
// If VK_EXT_debug_utils is supported then remove VK_EXT_debug_report from the list of extension to enable therefore we are prioritising use of VK_EXT_debug_utils
if (debugUtilsSupported && debugReportSupported)
{
Log(LogLevel::Information, "VK_EXT_debug_utils and VK_EXT_debug_report are both supported. We will be using VK_EXT_debug_utils.");
supportedRequestedExtensions.removeExtension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
}
// Determine whether VK_EXT_validation_features is supported
bool validationFeaturesSupported = supportedRequestedExtensions.containsExtension(VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME);
if (validationFeaturesSupported)
{
pvrvk::ValidationFeatures validationFeatures;
validationFeatures.addEnabledValidationFeature(pvrvk::ValidationFeatureEnableEXT::e_GPU_ASSISTED_EXT);
validationFeatures.addEnabledValidationFeature(pvrvk::ValidationFeatureEnableEXT::e_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT);
// sets a list of validation features to enable when creating the instance
instanceInfo.setValidationFeatures(validationFeatures);
}
instanceInfo.setExtensionList(supportedRequestedExtensions);
}
Log(LogLevel::Information, "Supported Instance Extensions:");
for (uint32_t i = 0; i < static_cast<uint32_t>(extensionProperties.size()); ++i)
{ Log(LogLevel::Information, "\t%s : version [%u]", extensionProperties[i].getExtensionName(), extensionProperties[i].getSpecVersion()); }
if (instanceExtensions.getNumExtensions())
{
Log(LogLevel::Information, "Supported Instance Extensions to be Enabled:");
for (uint32_t i = 0; i < static_cast<uint32_t>(instanceInfo.getExtensionList().getNumExtensions()); ++i)
{
Log(LogLevel::Information, "\t%s : version [%u]", instanceInfo.getExtensionList().getExtension(i).getName().c_str(),
instanceInfo.getExtensionList().getExtension(i).getSpecVersion());
}
}
Log(LogLevel::Information, "Supported Instance Layers:");
for (uint32_t i = 0; i < static_cast<uint32_t>(layerProperties.size()); ++i)
{
Log(LogLevel::Information, "\t%s : Spec version [%u], Implementation version [%u]", layerProperties[i].getLayerName(), layerProperties[i].getSpecVersion(),
layerProperties[i].getImplementationVersion());
}
if (instanceLayers.getNumLayers())
{
Log(LogLevel::Information, "Supported Instance Layers to be Enabled:");
for (uint32_t i = 0; i < instanceInfo.getLayerList().getNumLayers(); ++i)
{
Log(LogLevel::Information, "\t%s : Spec version [%u], Spec version [%u]", instanceInfo.getLayerList().getLayer(i).getName().c_str(),
instanceInfo.getLayerList().getLayer(i).getSpecVersion(), instanceInfo.getLayerList().getLayer(i).getImplementationVersion());
}
}
version = VulkanVersion(major, minor, patch);
appInfo.setApiVersion(version.toVulkanVersion());
instanceInfo.setApplicationInfo(appInfo);
pvrvk::Instance outInstance = pvrvk::createInstance(instanceInfo);
outInstance->retrievePhysicalDevices();
const pvrvk::ApplicationInfo& instanceAppInfo = outInstance->getCreateInfo().getApplicationInfo();
Log(LogLevel::Information, "Created Vulkan Instance:");
Log(LogLevel::Information, " Application Name: %s.", instanceAppInfo.getApplicationName().c_str());
Log(LogLevel::Information, " Application Version: %d.", instanceAppInfo.getApplicationVersion());
Log(LogLevel::Information, " Engine Name: %s.", instanceAppInfo.getEngineName().c_str());
Log(LogLevel::Information, " Engine Version: %d.", instanceAppInfo.getEngineVersion());
Log(LogLevel::Information, " Version: %d / ([%d].[%d].[%d]).", instanceAppInfo.getApiVersion(), major, minor, patch);
const std::vector<pvrvk::PhysicalDevice>& physicalDevices = outInstance->getPhysicalDevices();
Log(LogLevel::Information, "Supported Vulkan Physical devices:");
for (uint32_t i = 0; i < physicalDevices.size(); ++i)
{
pvrvk::PhysicalDeviceProperties physicalDeviceProperties = physicalDevices[i]->getProperties();
uint32_t deviceMajor = VK_VERSION_MAJOR(physicalDeviceProperties.getApiVersion());
uint32_t deviceMinor = VK_VERSION_MINOR(physicalDeviceProperties.getApiVersion());
uint32_t devicePatch = VK_VERSION_PATCH(physicalDeviceProperties.getApiVersion());
Log(LogLevel::Information, " Device Name: %s.", physicalDeviceProperties.getDeviceName());
Log(LogLevel::Information, " Device ID: 0x%X.", physicalDeviceProperties.getDeviceID());
Log(LogLevel::Information, " Api Version Supported: %d / ([%d].[%d].[%d]).", physicalDeviceProperties.getApiVersion(), deviceMajor, deviceMinor, devicePatch);
Log(LogLevel::Information, " Device Type: %s.", pvrvk::to_string(physicalDeviceProperties.getDeviceType()).c_str());
Log(LogLevel::Information, " Driver version: 0x%X.", physicalDeviceProperties.getDriverVersion());
Log(LogLevel::Information, " Vendor ID: %d.", physicalDeviceProperties.getVendorID());
Log(LogLevel::Information, " Memory Configuration:");
auto memprop = physicalDevices[i]->getMemoryProperties();
for (uint32_t heapIdx = 0; heapIdx < memprop.getMemoryHeapCount(); ++heapIdx)
{
auto heap = memprop.getMemoryHeaps()[heapIdx];
std::string s = to_string(heap.getFlags());
Log(LogLevel::Information, " Heap:[%d] Size:[%dMB] Flags: [%d (%s) ]", heapIdx, static_cast<uint32_t>(heap.getSize() / 1024ull * 1024ull),
static_cast<uint32_t>(heap.getFlags()), s.c_str());
for (uint32_t typeIdx = 0; typeIdx < memprop.getMemoryTypeCount(); ++typeIdx)
{
auto type = memprop.getMemoryTypes()[typeIdx];
if (type.getHeapIndex() == heapIdx)
Log(LogLevel::Information, " Memory Type: [%d] Flags: [%d (%s) ] ", typeIdx, type.getPropertyFlags(), to_string(type.getPropertyFlags()).c_str());
}
}
}
return outInstance;
}
pvrvk::Surface createSurface(pvrvk::Instance& instance, pvrvk::PhysicalDevice& physicalDevice, void* window, void* display, void* connection)
{
(void)physicalDevice; // hide warning
(void)connection;
(void)display;
#if defined(VK_USE_PLATFORM_ANDROID_KHR)
return pvrvk::Surface(instance->createAndroidSurface(reinterpret_cast<ANativeWindow*>(window)));
#elif defined VK_USE_PLATFORM_WIN32_KHR
return pvrvk::Surface(instance->createWin32Surface(GetModuleHandle(NULL), static_cast<HWND>(window)));
#elif defined(VK_USE_PLATFORM_XCB_KHR)
if (instance->getEnabledExtensionTable().khrXcbSurfaceEnabled)
{ return pvrvk::Surface(instance->createXcbSurface(static_cast<xcb_connection_t*>(connection), *((xcb_window_t*)(&window)))); }
#elif defined(VK_USE_PLATFORM_XLIB_KHR)
if (instance->getEnabledExtensionTable().khrXlibSurfaceEnabled)
{ return pvrvk::Surface(instance->createXlibSurface(static_cast<Display*>(display), reinterpret_cast<Window>(window))); }
#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
if (instance->getEnabledExtensionTable().khrWaylandSurfaceEnabled)
{ return pvrvk::Surface(instance->createWaylandSurface(reinterpret_cast<wl_display*>(display), reinterpret_cast<wl_surface*>(window))); }
#elif defined(VK_USE_PLATFORM_MACOS_MVK)
(void)display;
if (instance->getEnabledExtensionTable().mmacosSurfaceEnabled) { return pvrvk::Surface(instance->createMacOSSurface(window)); }
#else // NullWS
Log("%u Displays supported by the physical device", physicalDevice->getNumDisplays());
Log("Display properties:");
for (uint32_t i = 0; i < physicalDevice->getNumDisplays(); ++i)
{
const pvrvk::Display& display = physicalDevice->getDisplay(i);
Log("Properties for Display [%u]:", i);
Log(" Display Name: '%s':", display->getDisplayName());
Log(" Supports Persistent Content: %u", display->getPersistentContent());
Log(" Physical Dimensions: (%u, %u)", display->getPhysicalDimensions().getWidth(), display->getPhysicalDimensions().getHeight());
Log(" Physical Resolution: (%u, %u)", display->getPhysicalResolution().getWidth(), display->getPhysicalResolution().getHeight());
Log(" Supported Transforms: %s", pvrvk::to_string(display->getSupportedTransforms()).c_str());
Log(" Supports Plane Reorder: %u", display->getPlaneReorderPossible());
Log(" Display supports [%u] display modes:", display->getNumDisplayModes());
for (uint32_t j = 0; j < display->getNumDisplayModes(); ++j)
{
Log(" Properties for Display Mode [%u]:", j);
const pvrvk::DisplayMode& displayMode = display->getDisplayMode(j);
Log(" Refresh Rate: %f", displayMode->getParameters().getRefreshRate());
Log(" Visible Region: (%u, %u)", displayMode->getParameters().getVisibleRegion().getWidth(), displayMode->getParameters().getVisibleRegion().getHeight());
}
}
if (physicalDevice->getNumDisplays() == 0) { throw pvrvk::ErrorInitializationFailed("Could not find a suitable Vulkan Display."); }
// We simply loop through the display planes and find a supported display and display mode
for (uint32_t i = 0; i < physicalDevice->getNumDisplayPlanes(); ++i)
{
uint32_t currentStackIndex = -1;
pvrvk::Display display = physicalDevice->getDisplayPlaneProperties(i, currentStackIndex);
std::vector<pvrvk::Display> supportedDisplaysForPlane = physicalDevice->getDisplayPlaneSupportedDisplays(i);
pvrvk::DisplayMode displayMode;
// if a valid display can be found and its supported then make use of it
if (display && std::find(supportedDisplaysForPlane.begin(), supportedDisplaysForPlane.end(), display) != supportedDisplaysForPlane.end())
{ displayMode = display->getDisplayMode(0); } // else find the first supported display and grab its first display mode
else if (supportedDisplaysForPlane.size())
{
pvrvk::Display& currentDisplay = supportedDisplaysForPlane[0];
displayMode = currentDisplay->getDisplayMode(0);
}
if (displayMode)
{
pvrvk::DisplayPlaneCapabilitiesKHR capabilities = physicalDevice->getDisplayPlaneCapabilities(displayMode, i);
Log("Capabilities for the chosen display mode for Display Plane [%u]:", i);
Log(" Supported Alpha Flags: %s", pvrvk::to_string(capabilities.getSupportedAlpha()).c_str());
Log(" Supported Min Src Position: (%u, %u)", capabilities.getMinSrcPosition().getX(), capabilities.getMinSrcPosition().getY());
Log(" Supported Max Src Position: (%u, %u)", capabilities.getMaxSrcPosition().getX(), capabilities.getMaxSrcPosition().getY());
Log(" Supported Min Src Extent: (%u, %u)", capabilities.getMinSrcExtent().getWidth(), capabilities.getMinSrcExtent().getHeight());
Log(" Supported Max Src Extent: (%u, %u)", capabilities.getMaxSrcExtent().getWidth(), capabilities.getMaxSrcExtent().getHeight());
Log(" Supported Min Dst Position: (%u, %u)", capabilities.getMinDstPosition().getX(), capabilities.getMinDstPosition().getY());
Log(" Supported Max Dst Position: (%u, %u)", capabilities.getMaxDstPosition().getX(), capabilities.getMaxDstPosition().getY());
Log(" Supported Min Dst Extent: (%u, %u)", capabilities.getMinDstExtent().getWidth(), capabilities.getMinDstExtent().getHeight());
Log(" Supported Max Dst Extent: (%u, %u)", capabilities.getMaxDstExtent().getWidth(), capabilities.getMaxDstExtent().getHeight());
return pvrvk::Surface(
instance->createDisplayPlaneSurface(displayMode, displayMode->getParameters().getVisibleRegion(), pvrvk::DisplaySurfaceCreateFlagsKHR::e_NONE, i, currentStackIndex));
}
}
#endif
throw pvrvk::ErrorInitializationFailed("We were unable to create a suitable Surface for the given physical device.");
}
uint32_t numberOfSetBits(uint32_t bits)
{
bits = bits - ((bits >> 1) & 0x55555555);
bits = (bits & 0x33333333) + ((bits >> 2) & 0x33333333);
return (((bits + (bits >> 4)) & 0x0F0F0F0F) * 0x01010101) >> 24;
}
void getMemoryTypeIndex(const pvrvk::PhysicalDevice& physicalDevice, const uint32_t allowedMemoryTypeBits, const pvrvk::MemoryPropertyFlags requiredMemoryProperties,
const pvrvk::MemoryPropertyFlags optimalMemoryProperties, uint32_t& outMemoryTypeIndex, pvrvk::MemoryPropertyFlags& outMemoryPropertyFlags)
{
// attempt to find a memory type index which supports the optimal set of memory property flags
pvrvk::MemoryPropertyFlags memoryPropertyFlags = optimalMemoryProperties;
// ensure that the optimal set of memory property flags is a superset of the required set of memory property flags.
// This also handles cases where the optimal set of memory property flags hasn't been set but the required set has
memoryPropertyFlags |= requiredMemoryProperties;
uint32_t minCost = std::numeric_limits<uint32_t>::max();
// iterate through each memory type supported by the physical device and attempt to find the best possible memory type supporting as many of the optimal bits as possible
for (uint32_t memoryIndex = 0u; memoryIndex < physicalDevice->getMemoryProperties().getMemoryTypeCount(); ++memoryIndex)
{
const uint32_t memoryTypeBits = (1u << memoryIndex);
// ensure the memory type is compatible with the require memory for the given allocation
const bool isRequiredMemoryType = static_cast<uint32_t>(allowedMemoryTypeBits & memoryTypeBits) != 0;
if (isRequiredMemoryType)
{
const pvrvk::MemoryPropertyFlags currentMemoryPropertyFlags = physicalDevice->getMemoryProperties().getMemoryTypes()[memoryIndex].getPropertyFlags();
// ensure the memory property flags for the current memory type supports the required set of memory property flags
const bool hasRequiredProperties = static_cast<uint32_t>(currentMemoryPropertyFlags & requiredMemoryProperties) == requiredMemoryProperties;
if (hasRequiredProperties)
{
// calculate a cost value based on the number of bits from the optimal set of bits which are not present in the current memory type
uint32_t currentCost = numberOfSetBits(static_cast<uint32_t>(memoryPropertyFlags & ~currentMemoryPropertyFlags));
// update the return values if the current cost is less than the current maximum cost value
if (currentCost < minCost)
{
outMemoryTypeIndex = static_cast<uint32_t>(memoryIndex);
outMemoryPropertyFlags = currentMemoryPropertyFlags;
// early return if we have a perfect match
if (currentCost == 0) { return; }
// keep track of the current minimum cost
minCost = currentCost;
}
}
}
}
}
DeviceExtensions::DeviceExtensions() : VulkanExtensionList()
{
#ifdef VK_KHR_swapchain
// enable the swap chain extension
addExtension(pvrvk::VulkanExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, (uint32_t)-1));
#endif
#ifdef VK_IMG_format_pvrtc
// attempt to enable pvrtc extension
addExtension(pvrvk::VulkanExtension(VK_IMG_FORMAT_PVRTC_EXTENSION_NAME, (uint32_t)-1));
#endif
#ifdef VK_IMG_filter_cubic
// attempt to enable IMG cubic filtering
addExtension(pvrvk::VulkanExtension(VK_IMG_FILTER_CUBIC_EXTENSION_NAME, (uint32_t)-1));
#endif
#ifdef VK_KHR_get_memory_requirements2
// attempt to enable VK_KHR_get_memory_requirements2 extension
addExtension(pvrvk::VulkanExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, (uint32_t)-1));
#endif
#ifdef VK_KHR_dedicated_allocation
// attempt to enable VK_KHR_dedicated_allocation extension
addExtension(pvrvk::VulkanExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, (uint32_t)-1));
#endif
#ifdef DEBUG
#ifdef VK_EXT_debug_marker
// if the build is Debug then enable the DEBUG_MARKER extension to aid with debugging
addExtension(pvrvk::VulkanExtension(VK_EXT_DEBUG_MARKER_EXTENSION_NAME, (uint32_t)-1));
#endif
#endif
}
InstanceLayers::InstanceLayers(bool forceLayers)
{
if (forceLayers)
{
// Enable both VK_LAYER_KHRONOS_validation and the deprecated VK_LAYER_LUNARG_standard_validation as the Loader will handle removing duplicate layers
addLayer(pvrvk::VulkanLayer("VK_LAYER_KHRONOS_validation", static_cast<uint32_t>(-1)));
addLayer(pvrvk::VulkanLayer("VK_LAYER_LUNARG_standard_validation", static_cast<uint32_t>(-1)));
addLayer(pvrvk::VulkanLayer("VK_LAYER_LUNARG_assistant_layer", static_cast<uint32_t>(-1)));
addLayer(pvrvk::VulkanLayer("VK_LAYER_LUNARG_monitor", static_cast<uint32_t>(-1)));
}
}
InstanceExtensions::InstanceExtensions()
{
#ifdef VK_KHR_surface
addExtension(pvrvk::VulkanExtension(VK_KHR_SURFACE_EXTENSION_NAME, (uint32_t)-1));
#endif
#if defined(VK_USE_PLATFORM_ANDROID_KHR)
addExtension(pvrvk::VulkanExtension(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME, (uint32_t)-1));
#elif defined VK_USE_PLATFORM_WIN32_KHR
addExtension(pvrvk::VulkanExtension(VK_KHR_WIN32_SURFACE_EXTENSION_NAME, (uint32_t)-1));
#elif defined(VK_USE_PLATFORM_XCB_KHR)
addExtension(pvrvk::VulkanExtension(VK_KHR_XCB_SURFACE_EXTENSION_NAME, (uint32_t)-1));
#elif defined(VK_USE_PLATFORM_XLIB_KHR)
addExtension(pvrvk::VulkanExtension(VK_KHR_XLIB_SURFACE_EXTENSION_NAME, (uint32_t)-1));
#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
addExtension(pvrvk::VulkanExtension(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, (uint32_t)-1));
#elif defined(VK_USE_PLATFORM_MACOS_MVK)
addExtension(pvrvk::VulkanExtension(VK_MVK_MACOS_SURFACE_EXTENSION_NAME, (uint32_t)-1));
#elif defined(VK_KHR_display) // NullWS
addExtension(pvrvk::VulkanExtension(VK_KHR_DISPLAY_EXTENSION_NAME, (uint32_t)-1));
#endif
#ifdef VK_KHR_get_physical_device_properties2
addExtension(pvrvk::VulkanExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, (uint32_t)-1));
#endif
#ifdef DEBUG
// if the build is Debug then attempt to enable the VK_EXT_debug_report extension to aid with debugging
#if defined(VK_EXT_debug_report) && !defined(VK_USE_PLATFORM_MACOS_MVK)
addExtension(pvrvk::VulkanExtension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, (uint32_t)-1));
#endif
// if the build is Debug then attempt to enable the VK_EXT_debug_utils extension to aid with debugging
#if defined(VK_EXT_debug_utils) && !defined(VK_USE_PLATFORM_MACOS_MVK)
addExtension(pvrvk::VulkanExtension(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, (uint32_t)-1));
#endif
// if the build is Debug then attempt to enable the VK_EXT_validation_features extension to aid with debugging
#ifdef VK_EXT_validation_features
addExtension(pvrvk::VulkanExtension(VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME, (uint32_t)-1));
#endif
#endif
}
pvrvk::Image uploadImage(pvrvk::Device& device, const Texture& texture, bool allowDecompress, pvrvk::CommandBuffer& commandBuffer, pvrvk::ImageUsageFlags usageFlags,
pvrvk::ImageLayout finalLayout, vma::Allocator* stagingBufferAllocator, vma::Allocator* imageAllocator, vma::AllocationCreateFlags imageAllocationCreateFlags)
{
return uploadImageHelper(device, texture, allowDecompress, commandBuffer, usageFlags, finalLayout, stagingBufferAllocator, imageAllocator, imageAllocationCreateFlags);
}
DebugUtilsCallbacks createDebugUtilsCallbacks(pvrvk::Instance& instance)
{
DebugUtilsCallbacks debugUtilsCallbacks;
if (instance->getEnabledExtensionTable().extDebugUtilsEnabled)
{
{
// Create a second Debug Utils Messenger for throwing exceptions for Error events.
pvrvk::DebugUtilsMessengerCreateInfo createInfo = pvrvk::DebugUtilsMessengerCreateInfo(
pvrvk::DebugUtilsMessageSeverityFlagsEXT::e_ERROR_BIT_EXT, pvrvk::DebugUtilsMessageTypeFlagsEXT::e_ALL_BITS, pvr::utils::throwOnErrorDebugUtilsMessengerCallback);
debugUtilsCallbacks.debugUtilsMessengers[1] = instance->createDebugUtilsMessenger(createInfo);
}
// Create Debug Utils Messengers
{
// Create a Debug Utils Messenger which will trigger our callback for logging messages for events of warning and error types of all severities
pvrvk::DebugUtilsMessengerCreateInfo createInfo =
pvrvk::DebugUtilsMessengerCreateInfo(pvrvk::DebugUtilsMessageSeverityFlagsEXT::e_ERROR_BIT_EXT | pvrvk::DebugUtilsMessageSeverityFlagsEXT::e_WARNING_BIT_EXT,
pvrvk::DebugUtilsMessageTypeFlagsEXT::e_ALL_BITS, logMessageDebugUtilsMessengerCallback);
debugUtilsCallbacks.debugUtilsMessengers[0] = instance->createDebugUtilsMessenger(createInfo);
}
}
else if (instance->getEnabledExtensionTable().extDebugReportEnabled)
{
{
// Create a second Debug Report Callback for throwing exceptions for Error events.
pvrvk::DebugReportCallbackCreateInfo createInfo =
pvrvk::DebugReportCallbackCreateInfo(pvrvk::DebugReportFlagsEXT::e_ERROR_BIT_EXT, pvr::utils::throwOnErrorDebugReportCallback);
debugUtilsCallbacks.debugCallbacks[1] = instance->createDebugReportCallback(createInfo);
}
// Create Debug Report Callbacks
{
// Create a Debug Report Callback for logging messages for events of error, performance or warning types.
pvrvk::DebugReportCallbackCreateInfo createInfo = pvrvk::DebugReportCallbackCreateInfo(
pvrvk::DebugReportFlagsEXT::e_ERROR_BIT_EXT | pvrvk::DebugReportFlagsEXT::e_PERFORMANCE_WARNING_BIT_EXT | pvrvk::DebugReportFlagsEXT::e_WARNING_BIT_EXT,
logMessageDebugReportCallback);
debugUtilsCallbacks.debugCallbacks[0] = instance->createDebugReportCallback(createInfo);
}
}
return debugUtilsCallbacks;
}
} // namespace utils
} // namespace pvr
//!\endcond
|
<filename>template/page/inc/scripts.js
'use strict';
module.exports = (_, view) => `
<script src="/js/browser-check.js"></script>
<script src="/lib/jquery/jquery.min.js"></script>
`
|
public boolean isArmstrongNumber(int num) {
int len = String.valueOf(num).length();
int sum = 0;
int temp = num;
while (temp != 0) {
int digit = temp % 10;
sum += digit ^ len;
temp /= 10;
}
return sum == num;
}
int num = 153;
System.out.println(isArmstrongNumber(num)); // true |
#!/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
source ${DIR}/../../scripts/utils.sh
if [ ! -f ${DIR}/sol-jms-10.6.4.jar ]
then
log "Downloading sol-jms-10.6.4.jar"
wget https://repo1.maven.org/maven2/com/solacesystems/sol-jms/10.6.4/sol-jms-10.6.4.jar
fi
${DIR}/../../environment/plaintext/start.sh "${PWD}/docker-compose.plaintext.yml"
log "Wait 60 seconds for Solace to be up and running"
sleep 60
log "Solace UI is accessible at http://127.0.0.1:8080 (admin/admin)"
log "Sending messages to topic sink-messages"
seq 10 | docker exec -i broker kafka-console-producer --broker-list broker:9092 --topic sink-messages
log "Creating Solace sink connector"
curl -X PUT \
-H "Content-Type: application/json" \
--data '{
"connector.class": "io.confluent.connect.jms.SolaceSinkConnector",
"tasks.max": "1",
"topics": "sink-messages",
"solace.host": "smf://solace:55555",
"solace.username": "admin",
"solace.password": "admin",
"solace.dynamic.durables": "true",
"jms.destination.type": "queue",
"jms.destination.name": "connector-quickstart",
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
"value.converter": "org.apache.kafka.connect.storage.StringConverter",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1"
}' \
http://localhost:8083/connectors/SolaceSinkConnector/config | jq .
sleep 30
log "Confirm the messages were delivered to the connector-quickstart queue in the default Message VPN using CLI"
docker exec solace bash -c "/usr/sw/loads/currentload/bin/cli -A -s cliscripts/show_queue_cmd"
|
package com.qht.biz;
import org.springframework.stereotype.Service;
import com.github.wxiaoqi.security.common.biz.BaseBiz;
import com.qht.entity.Auth;
import com.qht.mapper.AuthMapper;
/**
* 权限
*
* @author yangtonggan
* @email <EMAIL>
* @date 2018-11-05 18:55:41
*/
@Service
public class AuthBiz extends BaseBiz<AuthMapper,Auth> {
} |
#!/bin/bash
WINDOW=$1
PERIOD=$2
if [[ -z $WINDOW ]] || [[ -z $PERIOD ]]; then
echo "Window or Period not specified!"
echo "Example usage: ./set_strobing.sh <WINDOW VALUE> <PERIOD VALUE>"
echo "Example usage: ./set_strobing.sh 5000 10000"
exit -1
fi
if [[ $EUID != 0 ]]; then
echo "Please run as root"
exit -1
fi
for e in /sys/bus/coresight/devices/etm*/; do
printf "%x" $WINDOW | tee $e/strobe_window > /dev/null
printf "%x" $PERIOD | tee $e/strobe_period > /dev/null
echo "Strobing period for $e set to $((`cat $e/strobe_period`))"
echo "Strobing window for $e set to $((`cat $e/strobe_window`))"
done
## Shows the user a simple usage example
echo ">> Done! <<"
echo "You can now run perf to trace your application, for example:"
echo "perf record -e cs_etm/@tmc_etr0/u -- <your app>"
|
/* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* <p/>
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.rzo.yajsw.tray;
/**
* taken from
* http://groups.google.com/group/comp.lang.java.help/browse_thread/thread
* /0db818517ca9de79/b0a55aa19f911204 thanks to Piotr Kobzda Formatter for Bytes
*/
public class ByteFormat
{
/**
* The Enum StorageUnit.
*/
public enum StorageUnit
{
/** The BYTE. */
BYTE("B", 1L),
/** The KILOBYTE. */
KILOBYTE("KB", 1L << 10),
/** The MEGABYTE. */
MEGABYTE("MB", 1L << 20),
/** The GIGABYTE. */
GIGABYTE("GB", 1L << 30),
/** The TERABYTE. */
TERABYTE("TB", 1L << 40),
/** The PETABYTE. */
PETABYTE("PB", 1L << 50),
/** The EXABYTE. */
EXABYTE("EB", 1L << 60);
/** The Constant BASE. */
public static final StorageUnit BASE = BYTE;
private final String symbol;
private final long divider; // divider of BASE unit
/**
* Instantiates a new storage unit.
*
* @param name
* the name
* @param divider
* the divider
*/
StorageUnit(String name, long divider)
{
this.symbol = name;
this.divider = divider;
}
/**
* Of.
*
* @param number
* the number
*
* @return the storage unit
*/
public static StorageUnit of(final long number)
{
final long n = number > 0 ? -number : number;
if (n > -(1L << 10))
{
return BYTE;
}
else if (n > -(1L << 20))
{
return KILOBYTE;
}
else if (n > -(1L << 30))
{
return MEGABYTE;
}
else if (n > -(1L << 40))
{
return GIGABYTE;
}
else if (n > -(1L << 50))
{
return TERABYTE;
}
else if (n > -(1L << 60))
{
return PETABYTE;
}
else
{ // n >= Long.MIN_VALUE
return EXABYTE;
}
}
}
/**
* Format.
*
* @param number
* the number of bytes
*
* @return the formatted string
*/
public String format(long number)
{
StorageUnit st = StorageUnit.of(number);
return nf.format((double) number / st.divider) + " " + st.symbol;
}
private static java.text.NumberFormat nf = java.text.NumberFormat.getInstance();
static
{
nf.setGroupingUsed(false);
nf.setMinimumFractionDigits(0);
nf.setMaximumFractionDigits(1);
}
}
|
#!/bin/bash
HOST="$1"
HOST_PATH="$2"
SSH_PRIVATE_KEY="$3"
SSH_KNOWN_HOST="$4"
if [ "$#" -ne 4 ]; then
echo -e "not enough arguments USAGE:\n\n$0 \$HOST \$HOST_PATH \$SSH_PRIVATE_KEY \$SSH_KNOWN_HOSTS \n\n" >&2
exit 1
fi
# Use a non-default path to avoid overriding when testing locally
SSH_PRIVATE_KEY_PATH=~/.ssh/github-actions-docs
install -m 600 -D /dev/null "$SSH_PRIVATE_KEY_PATH"
echo "$SSH_PRIVATE_KEY" > "$SSH_PRIVATE_KEY_PATH"
echo "$SSH_KNOWN_HOST" > ~/.ssh/known_hosts
rsync -avze "ssh -i $SSH_PRIVATE_KEY_PATH" build/site/ "$HOST:$HOST_PATH"
rm -f "$SSH_PRIVATE_KEY_PATH"
|
export default function PrestoCalendar({
headerComponent,
hourRange,
headerComponentStyle,
// Add your implementation here
}) {
return (
<div>
<div style={headerComponentStyle}>{headerComponent}</div>
<div>
{hourRange.map((hour) => (
<div key={hour}>{hour}</div>
))}
</div>
{/* Add calendar rendering logic here */}
</div>
);
} |
<gh_stars>0
/**
* @file Implementation - romanToInt
* @module romanToInt
*/
/**
* Converts a Roman numeral into a integer.
*
* See: {@link https://leetcode.com/problems/roman-to-integer}
*
* @example romanToInt('III') => 3
* @example romanToInt('IV') => 4
* @example romanToInt('IX') => 9
* @example romanToInt('LVIII') => 58
* @example romanToInt('MCMXCIV') => 1994
* @example romanToInt('MMMCMXCIX') => 3999
*
* @param {string} s - Roman numeral to convert
* @return {number} Roman numeral as integer
*/
function romanToInt(s: string): number {
// ! Roman numerals in DESCENDING order
const romans = [
'M',
'CM',
'D',
'CD',
'C',
'XC',
'L',
'XL',
'X',
'IX',
'V',
'IV',
'I'
]
// ! Arabic number counterparts in DESCENDING order
const arabics = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1]
// Trim Roman Numeral
let numeral = s.trim()
// Init variable to store final integer
let num: number = 0
// Get highest Roman numeral until `numeral` is empty string
while (romans.includes(numeral.substring(0, 1).trim())) {
for (let i = 0; i < romans.length; i++) {
const roman = romans[i]
if (numeral.length === 0) break
if (numeral.startsWith(roman)) {
numeral = numeral.substring(roman.length, numeral.length)
num += arabics[i]
}
}
}
return num
}
export default romanToInt
|
<reponame>ooooo-youwillsee/leetcode
//
// Created by ooooo on 2020/2/1.
//
#ifndef CPP_0893__SOLUTION1_H_
#define CPP_0893__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <unordered_set>
using namespace std;
/**
*
*/
class Solution {
public:
int numSpecialEquivGroups(vector<string> &A) {
unordered_set<string> set;
for (auto &s:A) {
string odd = "", even = "";
for (int i = 0; i < s.size(); ++i) {
if (i % 2 == 0) even.push_back(s[i]);
else odd.push_back(s[i]);
}
sort(odd.begin(), odd.end());
sort(even.begin(), even.end());
set.insert(odd + even);
}
return set.size();
}
};
#endif //CPP_0893__SOLUTION1_H_
|
class FifoManager:
def __init__(self, package_info):
self._package_info = package_info
def _GetFifo(self):
# The test.fifo path is determined by:
# testing/android/native_test/java/src/org/chromium/native_test/
# NativeTestActivity.java and
# testing/android/native_test_launcher.cc
return '/data/data/' + self._package_info.package + '/files/test.fifo'
def _ClearFifo(self, device):
device.RunShellCommand('rm -f ' + self._GetFifo()) |
<filename>compiler/generate/visitors/index.js
import Comment from './Comment.js';
import EachBlock from './EachBlock.js';
import Element from './Element.js';
import IfBlock from './IfBlock.js';
import MustacheTag from './MustacheTag.js';
import Text from './Text.js';
import YieldTag from './YieldTag.js';
export default {
Comment,
EachBlock,
Element,
IfBlock,
MustacheTag,
Text,
YieldTag
};
|
#!/bin/sh
#
# Copyright (C) 2004, 2007, 2009, 2012 Internet Systems Consortium, Inc. ("ISC")
# Copyright (C) 2001 Internet Software Consortium.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# $Id: setup.sh,v 1.7 2009/03/02 23:47:43 tbox Exp $
RANDFILE=random.data
../../../tools/genrandom 100 $RANDFILE
cd ns1 && sh setup.sh
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.