repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
vtfk/vtfk-minelev-react
|
src/containers/YffCurriculumModal/kompetansemal-selector-form.js
|
<filename>src/containers/YffCurriculumModal/kompetansemal-selector-form.js
/* eslint-env browser */
import { useEffect, useState } from 'react'
import logError from '../../lib/log-error'
import { SelectMultiple, TextField, Icon } from '@vtfk/components'
import serializeForm from '../../lib/serialize-form'
import { API } from '../../config/app'
function Beskrivelse (props) {
const [text, setText] = useState('')
const { tittel, kode } = props
return (
<div className='input-element'>
<TextField
value={text}
onChange={event => setText(event.target.value)}
name={kode}
placeholder={tittel.nb}
/>
</div>
)
}
function Arbeidsoppgaver (props) {
const { maal } = props
if (!maal) return null
return (
<>
<h2 className='subheader'>Beskriv arbeidsoppgaver</h2>
{maal.map(item => <Beskrivelse key={item.kode} {...item} />)}
</>
)
}
function KompetansemalVelger (props) {
const [selectedMaal, setSelectedMaal] = useState([])
const {
utplassering,
skole,
programomraade,
kompetansemaal,
apiPost,
selectedStudentId,
setRefreshLaereplan,
triggerSaveMaal,
onMaalChange,
showError,
setSaving,
setSaveState
} = props
useEffect(() => {
if (triggerSaveMaal === true && selectedMaal.length > 0) {
sendForm()
}
}, [triggerSaveMaal])
if (!kompetansemaal || (Array.isArray(kompetansemaal) && kompetansemaal.length === 0)) {
return null
}
const items = kompetansemaal.map(item => {
return { value: item.kode, label: item.tittel.nb, ...item }
})
const updateMaal = item => {
let copySelectedMaal = [...selectedMaal]
const ids = copySelectedMaal.map(maal => maal.kode)
if (ids.includes(item.kode)) {
copySelectedMaal = copySelectedMaal.filter(maal => maal.kode !== item.kode)
} else {
copySelectedMaal.push(item)
}
setSelectedMaal(copySelectedMaal)
if (onMaalChange) onMaalChange(copySelectedMaal)
}
const generateMaal = (grep, arbeidsoppgaver) => {
const referanseID = (utplassering.value === 'skole' ? skole.value : undefined) || utplassering.value
const referanseTittel = (utplassering.value === 'skole' ? skole.label : undefined) || utplassering.label
return {
studentUserName: selectedStudentId,
referanseID,
referanseTittel,
programomraade,
grep,
arbeidsoppgaver
}
}
async function sendForm () {
const form = document.getElementById('kompetansemaal-form')
const data = new FormData(form)
const json = serializeForm(data)
const keys = Object.keys(json)
const copyOfMaal = [...kompetansemaal]
const selectedMaal = copyOfMaal.filter(maal => keys.includes(maal.kode)).reduce((list, maal) => {
const arbeidsoppgaver = json[maal.kode]
const kompetansemaal = generateMaal(maal, arbeidsoppgaver)
list.push(kompetansemaal)
return list
}, [])
const url = `${API.URL}/yff/${selectedStudentId}/maal`
setSaving(true)
try {
await Promise.all(selectedMaal.map(maal => apiPost(url, maal)))
setSaving(false)
setSaveState('success')
if (!triggerSaveMaal) {
setRefreshLaereplan(true)
}
// nullstiller maal
setSelectedMaal([])
} catch (error) {
logError(error)
setSaving(false)
setSaveState('success')
}
}
const SaveButton = () => {
return (
<button className='check-button button-left-icon button-primary' onClick={sendForm}>
<div className='button-left-icon-icon'>
<Icon name='check' size='small' />
</div>
<div className='button-left-icon-text'>
Lagre kompetansemål i lokal læreplan
</div>
</button>
)
}
return (
<>
<div className='input-element'>
<SelectMultiple
placeholder='Velg kompetansemål'
items={items}
selectedItems={selectedMaal}
onChange={(item) => updateMaal(item)}
error={showError}
/>
</div>
<form id='kompetansemaal-form'>
{selectedMaal.length > 0 && <Arbeidsoppgaver maal={selectedMaal} />}
</form>
{selectedMaal.length > 0 && <SaveButton />}
</>
)
}
export default KompetansemalVelger
|
horaklukas/cookery-book
|
test/spec/components/CookeryBookTest.js
|
<filename>test/spec/components/CookeryBookTest.js
'use strict';
describe('CookeryBook', function() {
let React = require('react');
let {Map, List, fromJS} = require('immutable');
let TestUtils = require('react-shallow-testutils');
let renderer = new TestUtils.Renderer();
let rewire = require('rewire');
var CookeryBook, BookCover, Recipes, BrowseButton;
beforeAll(function() {
this.mockActions = {
setFirstPage: jasmine.createSpy(), setLastPage: jasmine.createSpy(),
setPreviousPage: jasmine.createSpy(), setNextPage: jasmine.createSpy()
};
CookeryBook = rewire('components/CookeryBook.js');
CookeryBook.__set__('CookeryBookActions', this.mockActions);
BookCover = require('components/BookCover');
Recipes = require('components/Recipes');
BrowseButton = require('components/BrowseButton');
});
beforeEach(function() {
this.mockActions.setFirstPage.calls.reset();
this.mockActions.setPreviousPage.calls.reset();
this.mockActions.setNextPage.calls.reset();
this.mockActions.setLastPage.calls.reset();
this.recipes = fromJS([
{id: 'r1', title: 'recipe1'}, {id: 'r2', title: 'recipe2'},
{id: 'r3', title: 'recipe3'}, {id: 'r4', title: 'recipe4'}
]);
this.book = Map({actualRecipe: 'r2'});
let props = {book: this.book, recipes: this.recipes};
this.component = renderer.render(() => <CookeryBook {...props} />, props);
});
it('should set book cover as a actual when recipe id is not defined', function() {
let props = {book: Map({actualRecipe: null}), recipes: this.recipes};
let component = renderer.render(() => <CookeryBook {...props} />, props);
let cover = TestUtils.findWithType(component, BookCover);
expect(cover.props.actual).toEqual(true);
});
it('should not set book cover as a actual when recipe id is not defined', function() {
let cover = TestUtils.findWithType(this.component, BookCover);
expect(cover.props.actual).toEqual(false);
});
it('should create just forward buttons when recipe id is not defined', function() {
let props = {book: Map({actualRecipe: null}), recipes: this.recipes};
let component = renderer.render(() => <CookeryBook {...props} />, props);
let browseButtons = TestUtils.findAllWithType(component, BrowseButton);
expect(browseButtons.length).toEqual(2);
expect(browseButtons[0].props.type).toEqual('forward');
expect(browseButtons[1].props.type).toEqual('fast-forward');
});
it('should create both browse buttons when show recipe in middle of book', function() {
let browseButtons = TestUtils.findAllWithType(this.component, BrowseButton);
expect(browseButtons.length).toEqual(4);
expect(browseButtons[0].props.type).toEqual('backward');
expect(browseButtons[1].props.type).toEqual('fast-backward');
expect(browseButtons[2].props.type).toEqual('forward');
expect(browseButtons[3].props.type).toEqual('fast-forward');
});
it('should create no button when there are no recipes', function() {
let props = {book: Map({actualRecipe: null}), recipes: List([])};
let component = renderer.render(() => <CookeryBook {...props} />, props);
let browseButtons = TestUtils.findAllWithType(component, BrowseButton);
expect(browseButtons.length).toEqual(0);
});
it('should create just backward buttons when recipe id is id of last', function() {
let props = {book: Map({actualRecipe: 'r4'}), recipes: this.recipes};
let component = renderer.render(() => <CookeryBook {...props} />, props);
let browseButtons = TestUtils.findAllWithType(component, BrowseButton);
expect(browseButtons.length).toEqual(2);
expect(browseButtons[0].props.type).toEqual('backward');
expect(browseButtons[1].props.type).toEqual('fast-backward');
});
it('should call appropriete action when clicked button', function() {
let browseButtons = TestUtils.findAllWithType(this.component, BrowseButton);
browseButtons[0].props.onClick();
expect(this.mockActions.setPreviousPage.calls.count(), 'backward').toEqual(1);
browseButtons[1].props.onClick();
expect(this.mockActions.setFirstPage.calls.count(), 'fast-backward').toEqual(1);
browseButtons[2].props.onClick();
expect(this.mockActions.setNextPage.calls.count(), 'forward').toEqual(1);
browseButtons[3].props.onClick();
expect(this.mockActions.setLastPage.calls.count(), 'fast-forward').toEqual(1);
});
it('should create recipes list', function() {
let recipes = TestUtils.findWithType(this.component, Recipes);
expect(recipes.props).toEqual({book: this.book, recipes: this.recipes});
});
});
|
e-foto/e-foto
|
c/photogrammetry/DetectorFiducialMark.h
|
<gh_stars>1-10
#ifndef DETECTORFIDUCIALMARK_H
#define DETECTORFIDUCIALMARK_H
/*Copyright 2002-2021 e-foto team (UERJ)
This file is part of e-foto.
e-foto is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
e-foto is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with e-foto. If not, see <http://www.gnu.org/licenses/>.
*/
#include "AbstractDetectorCoordinate.h"
namespace br {
namespace uerj {
namespace eng {
namespace efoto {
/**
* \file DetectorFiducialMark.h
* \class DetectorFiducialMark
* \brief Classes de marca fiducial analógica. É uma referência do sensor métrico com posicionamento determinado em laboratório de calibração.
* \copyright E-Foto group
* \authors <NAME> & <NAME>
*/
class DetectorFiducialMark : public AbstractDetectorCoordinate {
private:
int id;
int sensorId;
public:
/**
* \brief Construtor vazio.
*/
DetectorFiducialMark();
/**
* \brief Construtor que cria uma marca fiducial analógica com seus identificadores.
* \param myId Identificador da marca fiducial analógica.
* \param mySensorId Identificador do sensor ao qual a marca pertence.
*/
explicit DetectorFiducialMark(int myId, int mySensorId);
/**
* \brief Construtor que cria uma marca fiducial analógica com todos os seus valores.
* \param myId Identificador da marca fiducial analógica.
* \param mySensorId Identificador do sensor ao qual a marca pertence.
* \param myUnit Texto descritor da unidade de medidas considerada para a marca.
* \param myXi Valor Xi.
* \param myEta Valor Eta.
* \param mySigmaXi Valor do erro na direção Xi.
* \param mySigmaEta Valor do erro na direção Eta.
* \param mySigmaXiEta Valor de erro conjunto para as direções Xi e Eta.
*/
DetectorFiducialMark(int myId, int mySensorId, std::string myUnit, double myXi, double myEta, double mySigmaXi = 1, double mySigmaEta = 1, double mySigmaXiEta = 0);
/**
* \brief Construtor que cria uma marca fiducial analógica sem erro conhecido usando matrizes.
* \param myId Identificador da marca fiducial analógica.
* \param mySensorId Identificador do sensor ao qual a marca pertence.
* \param myPosition Matriz coluna com os valores de posicionamento da marca.
*/
DetectorFiducialMark(int myId, int mySensorId, const PositionMatrix& myPosition);
/**
* \brief Construtor que cria uma marca fiducial analógica com todos os seus valores usando matrizes.
* \param myId Identificador da marca fiducial analógica.
* \param mySensorId Identificador do sensor ao qual a marca pertence.
* \param myPositio Matriz coluna com os valores de posicionamento da marca.
* \param myPositionSigmas Matriz sigma com os valores de erro para o posicionamento da marca.
*/
DetectorFiducialMark(int myId, int mySensorId, const PositionMatrix& myPosition, const Matrix& myPositionSigmas);
/**
* \brief Destrutor padrão.
*/
~DetectorFiducialMark();
/**
* \brief Método de acesso que retorna o valor de id da marca fiducial analógica.
* \return int Identificador da marca fiducial analógica.
*/
int getId();
/**
* \brief Método para emitir o nome de classe.
* \return std::string Retorna o nome de classe do objeto.
*/
std::string objectType(void);
/**
* \brief Método para emitir as associações de uma instância.
* \return std::string Retorna vazio para esta classe.
* \deprecated Este método não possui uso ou deve ser evitado o seu uso, pois ele será removido em versões futuras.
*/
std::string objectAssociations(void);
/**
* \brief Método de teste para o nome/tipo de instância.
* \param s Texto com o nome da classe que é esperado.
* \return bool Retorna verdadeiro caso o nome passado seja DetectorFiducialMark. Retorna falso no caso contrário.
*/
bool is(std::string s);
/**
* \brief Método para setar os valores de atributos de uma instância utilizando sua descrição em xml.
* \param xml String contendo o xml com todos os valores de atributos adequados a uma instância da classe DetectorFiducialMark.
*/
void xmlSetData(std::string xml);
/**
* \brief Método para extrair o equivalente em dados xml de uma instância.
* \return std::string Retorna o string contendo o xml para uma instância da classe DetectorFiducialMark.
*/
std::string xmlGetData();
};
} // namespace efoto
} // namespace eng
} // namespace uerj
} // namespace br
#endif // DETECTORFIDUCIALMARK_H
|
RobertBiehl/VoxelPlugin
|
Source/VoxelEditor/Private/Details/RangeAnalysisDebuggerDetails.cpp
|
// Copyright 2020 Phyronnaz
#include "RangeAnalysisDebuggerDetails.h"
#include "VoxelEditorDetailsUtilities.h"
#include "VoxelNodes/VoxelOptimizationNodes.h"
#include "Widgets/Input/SButton.h"
#include "Widgets/Text/STextBlock.h"
#include "DetailLayoutBuilder.h"
#include "DetailCategoryBuilder.h"
#include "DetailWidgetRow.h"
TSharedRef<IDetailCustomization> FRangeAnalysisDebuggerDetails::MakeInstance()
{
return MakeShareable(new FRangeAnalysisDebuggerDetails());
}
FRangeAnalysisDebuggerDetails::FRangeAnalysisDebuggerDetails()
{
}
void FRangeAnalysisDebuggerDetails::CustomizeDetails(IDetailLayoutBuilder& DetailLayout)
{
TArray<TWeakObjectPtr<UObject>> Objects;
DetailLayout.GetObjectsBeingCustomized(Objects);
if (Objects.Num() != 1)
{
return;
}
Node = CastChecked<UVoxelNode_RangeAnalysisDebuggerFloat>(Objects[0].Get());
Node->UpdateFromBin();
SAssignNew(ResetButton, SButton)
.ContentPadding(2)
.VAlign(VAlign_Center)
.HAlign(HAlign_Center)
.OnClicked_Lambda([=]() { if (Node.IsValid()) { Node->Reset(); } return FReply::Handled(); })
.IsEnabled_Lambda([=]() { return Node.IsValid() && Node->Bins->bMinMaxInit; })
[
SNew(STextBlock)
.Font(IDetailLayoutBuilder::GetDetailFont())
.Text(VOXEL_LOCTEXT("Reset"))
];
SAssignNew(UpdateButton, SButton)
.ContentPadding(2)
.VAlign(VAlign_Center)
.HAlign(HAlign_Center)
.OnClicked_Lambda([=]() { if (Node.IsValid()) { Node->UpdateGraph(); } return FReply::Handled(); })
[
SNew(STextBlock)
.Font(IDetailLayoutBuilder::GetDetailFont())
.Text(VOXEL_LOCTEXT("Update"))
];
DetailLayout.EditCategory("Bounds")
.AddCustomRow(VOXEL_LOCTEXT("Reset"))
.NameContent()
[
SNew(STextBlock)
.Font(IDetailLayoutBuilder::GetDetailFont())
.Text(VOXEL_LOCTEXT("Reset bounds"))
]
.ValueContent()
[
ResetButton.ToSharedRef()
];
DetailLayout.EditCategory("Graph")
.AddCustomRow(VOXEL_LOCTEXT("Update"))
.NameContent()
[
SNew(STextBlock)
.Font(IDetailLayoutBuilder::GetDetailFont())
.Text(VOXEL_LOCTEXT("Update Graph"))
]
.ValueContent()
[
UpdateButton.ToSharedRef()
];
}
|
Akirix/akirix-suite
|
api/controllers/user_settings.js
|
<reponame>Akirix/akirix-suite
var _ = require( 'lodash' );
var bcrypt = require( 'bcryptjs' );
var moment = require( 'moment-timezone' );
var db = require( '../models' );
var User = db.User;
var UserSetting = db.UserSetting;
var logger = require( '../lib/akx.logger.js' );
var notifier = require( '../lib/akx.notifier.js' );
var util = require( '../lib/akx.util.js' );
var _this = this;
exports.handleError = function( err, req, res ){
util.handleError( 'user', err, req, res );
};
exports.index = function( req, res, next ){
UserSetting.findAll( {
where: {
user_id: req.user.id
}
} )
.done( function( err, userSettings ){
if( !!err ){
_this.handleError( err, req, res );
return next();
}
else if( !userSettings ){
res.send( 404, { errors: [ 'User setting not found' ] } );
return next();
}
else{
res.send( 200, { userSettings: userSettings } );
return next();
}
} );
};
exports.view = function( req, res, next ){
req.assert( 'user_setting_id', 'isString' );
if( _.isEmpty( req.validationErrors ) ){
UserSetting.find( {
where: {
id: req.params.user_setting_id,
user_id: req.user.id
}
} )
.done( function( err, userSetting ){
if( !!err ){
_this.handleError( err, req, res );
return next();
}
else if( !userSetting ){
res.send( 404 );
return next();
}
else{
res.send( 200, { userSetting: userSetting } );
return next();
}
} );
}
else{
util.handleValidationErrors( req, res );
return next();
}
};
exports.update = function( req, res, next ){
req.assert( 'user_setting_id', 'isString' );
req.assert( 'userSetting', 'isObject' );
if( _.isEmpty( req.validationErrors ) ){
UserSetting.find( {
where: {
id: req.params.user_setting_id,
user_id: req.user.id
}
} )
.done( function( err, userSetting ){
if( !!err ){
_this.handleError( err, req, res );
return next();
}
else if( !userSetting ){
res.send( 404 );
return next();
}
else{
for( var key in req.body.userSetting ){
if( key !== 'id' && key !== 'user_id' ){
userSetting[ key ] = req.body.userSetting[ key ]
}
}
userSetting.save().done( function( err ){
if( !!err ){
_this.handleError( err, req, res );
return next();
}
else{
logger.log( 'user-setting', 'user: ' + req.user.id + ' updated their user settings', {
req: req,
model: 'user-setting',
model_id: userSetting.id
} );
res.send( 200, { userSetting: userSetting } );
return next();
}
} );
}
} );
}
else{
util.handleValidationErrors( req, res );
return next();
}
};
|
longgangfan/underworld2
|
underworld/libUnderworld/Underworld/SysTest/AnalyticPlugins/Velic_solKz/solKz.c
|
<filename>underworld/libUnderworld/Underworld/SysTest/AnalyticPlugins/Velic_solKz/solKz.c<gh_stars>100-1000
/*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*
** **
** This file forms part of the Underworld geophysics modelling application. **
** **
** For full license and copyright information, please refer to the LICENSE.md file **
** located at the project root, or contact the authors. **
** **
**~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*/
#include "solKz.h"
#if 0
int main( int argc, char **argv )
{
int i,j;
double pos[2], vel[2], pressure, total_stress[3], strain_rate[3];
double x,z;
for (i=0;i<101;i++){
for(j=0;j<101;j++){
x = i/100.0;
z = j/100.0;
pos[0] = x;
pos[1] = z;
_Velic_solKz(
pos,
1.0,
(double)M_PI, 1,
2.5,
vel, &pressure, total_stress, strain_rate );
printf("%0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f \n",
pos[0],pos[1],
vel[0],vel[1], pressure,
total_stress[0], total_stress[1], total_stress[2],
strain_rate[0], strain_rate[1], strain_rate[2] );
}
printf("\n");
}
return 0;
}
#endif
void _Velic_solKz(
const double pos[],
double _sigma, /* density */
double _km, int _n, /* wavelength in z, wavenumber in x */
double _B, /* viscosity parameter */
double vel[], double* presssure,
double total_stress[], double strain_rate[] )
{
double Z;
double u1,u2,u3,u4,u5,u6,SS;
double sum1,sum2,sum3,sum4,sum5,sum6,sum7,x,z;
double sigma;
int n;
double kn;
double _C1,_C2,_C3,_C4;
double B, Rp, UU, VV;
double rho,a,b,r,_aa,_bb,AA,BB,Rm,km;
double t1,t2,t3,t4,t5,t6,t7,t8,t9,t10;
double t11,t12,t13,t14,t15,t16,t17,t18,t19,t20;
double t21,t22,t23,t24,t25,t26,t27,t28,t29,t31;
double t33,t34,t35,t37,t38,t40,t41,t42,t43,t45;
double t47,t51,t52,t53,t54,t55,t56,t57,t58,t59;
double t60,t61,t62,t64,t65,t66,t67,t68,t69,t70;
double t71,t72,t73,t74,t75,t76,t77,t78,t79,t80;
double t81,t82,t83,t84,t85,t86,t89,t90,t92,t94;
double t96,t97,t98,t99,t100,t101,t103,t104,t105,t106;
double t107,t108,t109,t110,t111,t112,t113,t114,t115,t116;
double t117,t118,t119,t120,t121,t122,t123,t124,t125,t126;
double t127,t130,t131,t132,t134,t135,t141,t144,t147,t148;
double t150,t151,t152,t161,t171;
/*************************************************************************/
/*************************************************************************/
/* rho = -sigma*sin(km*z)*cos(kn*x) */
/* viscosity Z= exp(2*B*z) */
B = _B; /* viscosity parameter must be non-zero*/
km = _km; /* solution valid for km not zero -- should get trivial solution if km=0 */
n = _n; /* solution valid for n not zero */
sigma = _sigma;
/*************************************************************************/
/*************************************************************************/
kn = (double) _n*M_PI;
a = B*B + kn*kn;
b = 2.0*kn*B;
r = sqrt(a*a + b*b);
Rp = sqrt( (r+a)/2.0 );
Rm = sqrt( (r-a)/2.0 );
UU = Rp - B;
VV = Rp + B;
x = pos[0];
z = pos[1];
sum1=0.0;
sum2=0.0;
sum3=0.0;
sum4=0.0;
sum5=0.0;
sum6=0.0;
sum7=0.0;
/*******************************************/
/* calculate the constants */
/*******************************************/
t3 = kn * kn;
t4 = km * km;
t6 = B * B;
t8 = 0.4e1 * t3 * t6;
t10 = 0.4e1 * t4 * t6;
t13 = 0.8e1 * kn * t6 * km;
t14 = t4 * t4;
t16 = 0.2e1 * t3 * t4;
t17 = t3 * t3;
_aa = -0.4e1 * B * km * kn * (t3 + t4) / (t8 + t10 + t13 + t14 + t16 + t17) / (-t13 + t8 + t10 + t14 + t16 + t17);
t1 = kn * kn;
t2 = t1 * t1;
t3 = B * B;
t5 = 0.4e1 * t1 * t3;
t6 = km * km;
t7 = t6 * t6;
t9 = 0.2e1 * t1 * t6;
t11 = 0.4e1 * t3 * t6;
t16 = 0.8e1 * kn * t3 * km;
_bb = kn * (t2 + t5 + t7 + t9 - t11) / (t5 + t11 + t16 + t7 + t9 + t2) / (-t16 + t5 + t11 + t7 + t9 + t2);
AA = _aa;
BB = _bb;
t1 = B * B;
t2 = t1 * Rp;
t4 = Rm * Rm;
t5 = t4 * Rp;
t7 = t4 * B;
t8 = km * km;
t12 = Rp * Rp;
t13 = B * t12;
t21 = 0.8e1 * t1 * km * BB * Rp;
t23 = 0.2e1 * Rm;
t24 = cos(t23);
t26 = Rm * Rp;
t38 = sin(t23);
t51 = exp(-0.2e1 * Rp);
t53 = B + Rp;
t54 = Rm * t53;
t55 = Rm * B;
t57 = 0.2e1 * B * km;
t58 = t55 + t57 - t26;
t62 = 0.3e1 * t1;
t64 = 0.2e1 * Rp * B;
t65 = t62 + t64 + t4 - t8 - t12;
t67 = t54 * t65 * BB;
t69 = Rm - km;
t70 = cos(t69);
t72 = -t57 + t55 - t26;
t77 = Rm + km;
t78 = cos(t77);
t81 = t54 * t65 * AA;
t86 = sin(t77);
t92 = sin(t69);
t96 = exp(-t53);
t98 = B - Rp;
t99 = Rm * t98;
t100 = t55 + t57 + t26;
t104 = t62 - t64 + t4 - t8 - t12;
t106 = t99 * t104 * BB;
t109 = -t57 + t55 + t26;
t116 = t99 * t104 * AA;
t130 = exp(-0.3e1 * Rp - B);
t135 = exp(-0.4e1 * Rp);
t144 = t4 * t1;
t150 = t4 * t12;
_C1 = (((0.2e1 * Rp * (0.2e1 * t2 + 0.2e1 * t5 + t7 + B * t8 - 0.3e1 * t1 * B + t13) * AA + t21) * t24 + (-0.2e1 * t26 * (t4 - t8 - t12 + 0.5e1 * t1) * AA + 0.8e1 * B * BB * km * Rm * Rp) * t38 - 0.2e1 * B * (0.2e1 * t13 + t12 * Rp - 0.3e1 * t2 + t5 + 0.2e1 * t7 + t8 * Rp) * AA - t21) * t51 + ((0.2e1 * t54 * t58 * AA + t67) * t70 + (0.2e1 * t54 * t72 * AA - t67) * t78 + (t81 + 0.2e1 * t54 * t72 * BB) * t86 + (t81 - 0.2e1 * t54 * t58 * BB) * t92) * t96 + ((-0.2e1 * t99 * t100 * AA - t106) * t70 + (-0.2e1 * t99 * t109 * AA + t106) * t78 + (-t116 - 0.2e1 * t99 * t109 * BB) * t86 + (-t116 + 0.2e1 * t99 * t100 * BB) * t92) * t130 + 0.4e1 * t4 * t98 * t53 * AA * t135) / (((-0.8e1 * t4 - 0.8e1 * t1) * t12 * t24 + 0.8e1 * t144 + 0.8e1 * t12 * t1) * t51 + (0.4e1 * t150 - 0.4e1 * t144) * t135 + 0.4e1 * t150 - 0.4e1 * t144);
t1 = Rm * Rp;
t2 = Rm * Rm;
t3 = km * km;
t4 = Rp * Rp;
t5 = B * B;
t12 = km * Rm;
t17 = 0.2e1 * Rm;
t18 = cos(t17);
t22 = t2 * Rp;
t25 = B * t3;
t26 = t5 * B;
t33 = t5 * km;
t38 = sin(t17);
t40 = Rm * B;
t41 = 0.3e1 * t5;
t51 = exp(-0.2e1 * Rp);
t53 = B + Rp;
t54 = Rm * t53;
t57 = t41 + 0.2e1 * Rp * B + t2 - t3 - t4;
t59 = t54 * t57 * AA;
t60 = B * km;
t61 = 0.2e1 * t60;
t62 = t40 + t61 - t1;
t67 = Rm - km;
t68 = cos(t67);
t70 = -t61 + t40 - t1;
t75 = Rm + km;
t76 = cos(t75);
t82 = t54 * t57 * BB;
t84 = sin(t75);
t90 = sin(t67);
t94 = exp(-t53);
t97 = 0.3e1 * Rm * t26;
t98 = t2 * Rm;
t99 = t98 * B;
t100 = t3 * Rm;
t101 = t100 * Rp;
t103 = Rm * t4 * B;
t104 = t4 * Rp;
t105 = Rm * t104;
t107 = 0.8e1 * t33 * Rp;
t109 = 0.5e1 * t1 * t5;
t110 = t98 * Rp;
t111 = t100 * B;
t112 = t97 + t99 - t101 + t103 - t105 + t107 + t109 + t110 - t111;
t114 = t2 * t4;
t116 = 0.2e1 * t60 * t1;
t117 = t2 * t5;
t119 = 0.3e1 * t26 * Rp;
t120 = t104 * B;
t121 = t4 * t5;
t122 = 0.2e1 * t121;
t123 = t22 * B;
t125 = 0.2e1 * t33 * Rm;
t126 = t25 * Rp;
t127 = t114 + t116 + t117 - t119 + t120 + t122 + t123 + t125 + t126;
t132 = -t107 + t103 - t105 - t101 + t97 - t111 + t110 + t109 + t99;
t134 = t120 - t125 + t123 - t116 + t122 + t117 + t114 + t126 - t119;
t152 = exp(-0.3e1 * Rp - B);
t161 = exp(-0.4e1 * Rp);
_C2 = (((0.2e1 * t1 * (t2 - t3 - t4 + 0.5e1 * t5) * AA - 0.8e1 * B * BB * t12 * Rp) * t18 + (0.2e1 * Rp * (0.2e1 * t5 * Rp + 0.2e1 * t22 + t2 * B + t25 - 0.3e1 * t26 + B * t4) * AA + 0.8e1 * t33 * BB * Rp) * t38 + 0.2e1 * t40 * (t41 + t4 + t2 - t3) * AA - 0.8e1 * t5 * BB * t12) * t51 + ((-t59 + 0.2e1 * t54 * t62 * BB) * t68 + (-t59 - 0.2e1 * t54 * t70 * BB) * t76 + (0.2e1 * t54 * t70 * AA - t82) * t84 + (0.2e1 * t54 * t62 * AA + t82) * t90) * t94 + ((t112 * AA - 0.2e1 * t127 * BB) * t68 + (t132 * AA + 0.2e1 * t134 * BB) * t76 + (-0.2e1 * t134 * AA + t132 * BB) * t84 + (-0.2e1 * t127 * AA - t112 * BB) * t90) * t152 + (-0.2e1 * t59 + 0.8e1 * t40 * km * t53 * BB) * t161) / (((-0.8e1 * t2 - 0.8e1 * t5) * t4 * t18 + 0.8e1 * t117 + 0.8e1 * t121) * t51 + (0.4e1 * t114 - 0.4e1 * t117) * t161 + 0.4e1 * t114 - 0.4e1 * t117);
t1 = B * B;
t2 = t1 * Rp;
t4 = Rm * Rm;
t5 = t4 * Rp;
t7 = Rp * Rp;
t8 = B * t7;
t11 = km * km;
t13 = t4 * B;
t21 = 0.8e1 * t1 * km * BB * Rp;
t23 = 0.2e1 * Rm;
t24 = cos(t23);
t26 = Rm * Rp;
t38 = sin(t23);
t51 = exp(-0.2e1 * Rp);
t53 = B + Rp;
t54 = Rm * t53;
t55 = Rm * B;
t57 = 0.2e1 * B * km;
t58 = t55 + t57 - t26;
t62 = 0.3e1 * t1;
t64 = 0.2e1 * Rp * B;
t65 = t62 + t64 + t4 - t11 - t7;
t67 = t54 * t65 * BB;
t69 = Rm - km;
t70 = cos(t69);
t72 = -t57 + t55 - t26;
t77 = Rm + km;
t78 = cos(t77);
t81 = t54 * t65 * AA;
t86 = sin(t77);
t92 = sin(t69);
t96 = exp(-t53);
t98 = B - Rp;
t99 = Rm * t98;
t100 = t55 + t57 + t26;
t104 = t62 - t64 + t4 - t11 - t7;
t106 = t99 * t104 * BB;
t109 = -t57 + t55 + t26;
t116 = t99 * t104 * AA;
t130 = exp(-0.3e1 * Rp - B);
t141 = t4 * t1;
t147 = t4 * t7;
t151 = exp(-0.4e1 * Rp);
_C3 = (((-0.2e1 * Rp * (-0.2e1 * t2 - 0.2e1 * t5 + t8 - 0.3e1 * t1 * B + B * t11 + t13) * AA - t21) * t24 + (0.2e1 * t26 * (t4 - t11 - t7 + 0.5e1 * t1) * AA - 0.8e1 * B * BB * km * Rm * Rp) * t38 - 0.2e1 * B * (0.2e1 * t8 + 0.2e1 * t13 + 0.3e1 * t2 - t7 * Rp - t5 - t11 * Rp) * AA + t21) * t51 + ((-0.2e1 * t54 * t58 * AA - t67) * t70 + (-0.2e1 * t54 * t72 * AA + t67) * t78 + (-t81 - 0.2e1 * t54 * t72 * BB) * t86 + (-t81 + 0.2e1 * t54 * t58 * BB) * t92) * t96 + ((0.2e1 * t99 * t100 * AA + t106) * t70 + (0.2e1 * t99 * t109 * AA - t106) * t78 + (t116 + 0.2e1 * t99 * t109 * BB) * t86 + (t116 - 0.2e1 * t99 * t100 * BB) * t92) * t130 + 0.4e1 * t4 * t98 * t53 * AA) / (((-0.8e1 * t4 - 0.8e1 * t1) * t7 * t24 + 0.8e1 * t141 + 0.8e1 * t7 * t1) * t51 + (0.4e1 * t147 - 0.4e1 * t141) * t151 + 0.4e1 * t147 - 0.4e1 * t141);
t1 = Rm * Rp;
t2 = Rm * Rm;
t3 = km * km;
t4 = Rp * Rp;
t5 = B * B;
t12 = km * Rm;
t17 = 0.2e1 * Rm;
t18 = cos(t17);
t22 = t2 * Rp;
t25 = t5 * B;
t27 = B * t3;
t33 = t5 * km;
t38 = sin(t17);
t40 = Rm * B;
t41 = 0.3e1 * t5;
t51 = exp(-0.2e1 * Rp);
t53 = t2 * Rm;
t54 = t53 * B;
t56 = 0.5e1 * t1 * t5;
t58 = Rm * t4 * B;
t59 = t3 * Rm;
t60 = t59 * Rp;
t62 = 0.8e1 * t33 * Rp;
t64 = 0.3e1 * Rm * t25;
t65 = t53 * Rp;
t66 = t59 * B;
t67 = t4 * Rp;
t68 = Rm * t67;
t69 = t54 - t56 + t58 + t60 - t62 + t64 - t65 - t66 + t68;
t71 = t2 * t4;
t73 = 0.3e1 * t25 * Rp;
t74 = t2 * t5;
t75 = t27 * Rp;
t76 = B * km;
t78 = 0.2e1 * t76 * t1;
t80 = 0.2e1 * t33 * Rm;
t81 = t22 * B;
t82 = t4 * t5;
t83 = 0.2e1 * t82;
t84 = t67 * B;
t85 = t71 + t73 + t74 - t75 - t78 + t80 - t81 + t83 - t84;
t89 = Rm - km;
t90 = cos(t89);
t92 = t60 - t66 - t65 + t58 + t54 - t56 + t62 + t68 + t64;
t94 = t73 + t78 - t81 + t74 - t80 - t84 - t75 + t83 + t71;
t98 = Rm + km;
t99 = cos(t98);
t105 = sin(t98);
t111 = sin(t89);
t115 = exp(-Rp - B);
t117 = B - Rp;
t118 = Rm * t117;
t121 = t41 - 0.2e1 * Rp * B + t2 - t3 - t4;
t123 = t118 * t121 * AA;
t124 = 0.2e1 * t76;
t125 = t40 + t124 + t1;
t131 = -t124 + t40 + t1;
t141 = t118 * t121 * BB;
t152 = exp(-0.3e1 * Rp - B);
t171 = exp(-0.4e1 * Rp);
_C4 = (((-0.2e1 * t1 * (t2 - t3 - t4 + 0.5e1 * t5) * AA + 0.8e1 * B * BB * t12 * Rp) * t18 + (-0.2e1 * Rp * (-0.2e1 * t5 * Rp - 0.2e1 * t22 + t4 * B - 0.3e1 * t25 + t27 + t2 * B) * AA - 0.8e1 * t33 * BB * Rp) * t38 + 0.2e1 * t40 * (t41 + t4 + t2 - t3) * AA - 0.8e1 * t5 * BB * t12) * t51 + ((t69 * AA - 0.2e1 * t85 * BB) * t90 + (t92 * AA + 0.2e1 * t94 * BB) * t99 + (-0.2e1 * t94 * AA + t92 * BB) * t105 + (-0.2e1 * t85 * AA - t69 * BB) * t111) * t115 + ((-t123 + 0.2e1 * t118 * t125 * BB) * t90 + (-t123 - 0.2e1 * t118 * t131 * BB) * t99 + (0.2e1 * t118 * t131 * AA - t141) * t105 + (0.2e1 * t118 * t125 * AA + t141) * t111) * t152 - 0.2e1 * t123 + 0.8e1 * t40 * km * t117 * BB) / (((-0.8e1 * t2 - 0.8e1 * t5) * t4 * t18 + 0.8e1 * t74 + 0.8e1 * t82) * t51 + (0.4e1 * t71 - 0.4e1 * t74) * t171 + 0.4e1 * t71 - 0.4e1 * t74);
/******************************************************************/
/******************************************************************/
/*******************************************/
/* calculate the velocities etc */
/*******************************************/
t2 = exp(UU * z);
t3 = Rm * z;
t4 = cos(t3);
t6 = sin(t3);
t11 = exp(-VV * z);
t18 = exp(-0.2e1 * z * B);
t19 = km * z;
t20 = cos(t19);
t22 = sin(t19);
u1 = kn * (t2 * (_C1 * t4 + _C2 * t6) + t11 * (_C3 * t4 + _C4 * t6) + t18 * (AA * t20 + BB * t22));
t1 = Rm * z;
t2 = cos(t1);
t4 = sin(t1);
t14 = exp(UU * z);
t26 = exp(-VV * z);
t28 = km * z;
t29 = cos(t28);
t31 = sin(t28);
t43 = exp(-0.2e1 * z * B);
u2 = (-UU * (_C1 * t2 + _C2 * t4) + _C1 * t4 * Rm - _C2 * t2 * Rm) * t14 + (VV * (_C3 * t2 + _C4 * t4) + _C3 * t4 * Rm - _C4 * t2 * Rm) * t26 + (0.2e1 * B * (AA * t29 + BB * t31) + AA * t31 * km - BB * t29 * km) * t43;
t2 = 0.2e1 * z * B;
t3 = exp(t2);
t4 = t3 * kn;
t5 = Rm * z;
t6 = cos(t5);
t8 = sin(t5);
t18 = exp(UU * z);
t31 = exp(-VV * z);
t34 = km * z;
t35 = cos(t34);
t37 = sin(t34);
t47 = exp(-t2);
u3 = 0.2e1 * t4 * (UU * (_C1 * t6 + _C2 * t8) - _C1 * t8 * Rm + _C2 * t6 * Rm) * t18 + 0.2e1 * t4 * (-VV * (_C3 * t6 + _C4 * t8) - _C3 * t8 * Rm + _C4 * t6 * Rm) * t31 + 0.2e1 * t4 * (-0.2e1 * B * (AA * t35 + BB * t37) - AA * t37 * km + BB * t35 * km) * t47;
t1 = Rm * Rm;
t3 = UU * UU;
t8 = kn * kn;
t11 = Rm * z;
t12 = sin(t11);
t14 = cos(t11);
t20 = t14 * Rm;
t27 = 0.2e1 * z * B;
t28 = exp(t27);
t31 = exp(UU * z);
t38 = VV * VV;
t54 = exp(-VV * z);
t56 = km * km;
t59 = B * B;
t66 = km * z;
t67 = sin(t66);
t69 = cos(t66);
t83 = exp(-t27);
u4 = ((_C2 * t1 - t3 * _C2 + 0.2e1 * UU * _C1 * Rm - _C2 * t8) * t12 + _C1 * t14 * t1 - t3 * _C1 * t14 - 0.2e1 * UU * _C2 * t20 - t8 * _C1 * t14) * t28 * t31 + ((-0.2e1 * VV * _C3 * Rm + _C4 * t1 - _C4 * t8 - t38 * _C4) * t12 + 0.2e1 * VV * _C4 * t20 + _C3 * t14 * t1 - t8 * _C3 * t14 - t38 * _C3 * t14) * t28 * t54 + ((BB * t56 - t8 * BB - 0.4e1 * t59 * BB - 0.4e1 * B * AA * km) * t67 + AA * t69 * t56 - t8 * AA * t69 - 0.4e1 * t59 * AA * t69 + 0.4e1 * B * BB * t69 * km) * t28 * t83;
t1 = Rm * z;
t2 = sin(t1);
t3 = Rm * Rm;
t4 = t3 * Rm;
t5 = t2 * t4;
t6 = UU * UU;
t7 = t6 * UU;
t8 = cos(t1);
t15 = 0.2e1 * B * t8 * t3;
t19 = B * UU;
t20 = t2 * Rm;
t23 = kn * kn;
t24 = B * t23;
t26 = 0.2e1 * t24 * t8;
t27 = t23 * UU;
t29 = B * t6;
t33 = t23 * t2 * Rm;
t35 = 0.1e1 / kn;
t42 = 0.2e1 * B * t2 * t3;
t43 = t8 * t4;
t45 = 0.2e1 * t24 * t2;
t52 = t23 * t8 * Rm;
t53 = t8 * Rm;
t64 = 0.2e1 * z * B;
t65 = exp(t64);
t68 = exp(UU * z);
t70 = B * VV;
t76 = t23 * VV;
t78 = VV * VV;
t79 = t78 * VV;
t84 = B * t78;
t108 = exp(-VV * z);
t111 = km * z;
t112 = sin(t111);
t113 = km * km;
t118 = cos(t111);
t119 = t118 * km;
t121 = B * B;
t123 = t112 * km;
t130 = t113 * km;
t148 = exp(-t64);
u5 = (-(-t5 - t7 * t8 + 0.3e1 * UU * t8 * t3 + t15 + 0.3e1 * t6 * t2 * Rm + 0.4e1 * t19 * t20 - t26 + t27 * t8 - 0.2e1 * t29 * t8 - t33) * t35 * _C1 - (-t7 * t2 + t27 * t2 + t42 + t43 - t45 + 0.3e1 * UU * t2 * t3 - 0.2e1 * t29 * t2 + t52 - 0.4e1 * t19 * t53 - 0.3e1 * t6 * t8 * Rm) * t35 * _C2) * t65 * t68 + (-(t15 - 0.4e1 * t70 * t20 - t33 - 0.3e1 * VV * t8 * t3 - t76 * t8 + t79 * t8 + 0.3e1 * t78 * t2 * Rm - 0.2e1 * t84 * t8 - t26 - t5) * t35 * _C3 - (t52 - 0.3e1 * VV * t2 * t3 + t79 * t2 + 0.4e1 * t70 * t53 - 0.3e1 * t78 * t8 * Rm - 0.2e1 * t84 * t2 + t43 - t76 * t2 + t42 - t45) * t35 * _C4) * t65 * t108 - t65 * (-0.4e1 * B * BB * t112 * t113 + t23 * BB * t119 + 0.4e1 * t121 * AA * t123 - 0.4e1 * t121 * BB * t119 + BB * t118 * t130 - AA * t112 * t130 - 0.4e1 * B * AA * t118 * t113 - t23 * AA * t123 - 0.4e1 * t24 * AA * t118 - 0.4e1 * t24 * BB * t112) * t35 * t148;
t2 = 0.2e1 * z * B;
t3 = exp(t2);
t4 = t3 * kn;
t5 = Rm * z;
t6 = cos(t5);
t8 = sin(t5);
t18 = exp(UU * z);
t31 = exp(-VV * z);
t34 = km * z;
t35 = cos(t34);
t37 = sin(t34);
t47 = exp(-t2);
u6 = -0.2e1 * t4 * (UU * (_C1 * t6 + _C2 * t8) - _C1 * t8 * Rm + _C2 * t6 * Rm) * t18 - 0.2e1 * t4 * (-VV * (_C3 * t6 + _C4 * t8) - _C3 * t8 * Rm + _C4 * t6 * Rm) * t31 - 0.2e1 * t4 * (-0.2e1 * B * (AA * t35 + BB * t37) - AA * t37 * km + BB * t35 * km) * t47;
/******************************************************************/
/******************************************************************/
sum5 += u5*cos(n*M_PI*x); /* pressure */
u6 -= u5; /* get total stress */
sum6 += u6*cos(n*M_PI*x); /* xx stress */
u1 *= cos(n*M_PI*x); /* z velocity */
sum1 += u1;
u2 *= sin(n*M_PI*x); /* x velocity */
sum2 += u2;
u3 -= u5; /* get total stress */
u3 *= cos(n*M_PI*x); /* zz stress */
sum3 += u3;
u4 *= sin(n*M_PI*x); /* zx stress */
sum4 += u4;
rho = -sigma*sin(km*z)*cos(kn*x); /* density */
sum7 += rho;
SS = exp(UU*z)*(_C1*cos(Rm*z)+_C2*sin(Rm*z)) +exp(-VV*z)*(_C3*cos(Rm*z)+_C4*sin(Rm*z)) + exp(-2*z*B)*(AA*cos(km*z)+BB*sin(km*z));
SS *= sin(kn*x); /* stream function */
//mag=sqrt(u1*u1+u2*u2);
/*printf("%0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f %0.7f\n",x,z,sum1,sum2,sum3,sum4,sum5,sum6,mag,sum7,SS);*/
/* Output */
if( vel != NULL ) {
vel[0] = sum2;
vel[1] = sum1;
}
if( presssure != NULL ) {
(*presssure) = sum5;
}
if( total_stress != NULL ) {
total_stress[0] = sum6;
total_stress[1] = sum3;
total_stress[2] = sum4;
}
if( strain_rate != NULL ) {
/* sigma = tau - p, tau = sigma + p, tau[] = 2*eta*strain_rate[] */
Z = exp( 2.0 * B * z );
strain_rate[0] = (sum6+sum5)/(2.0*Z);
strain_rate[1] = (sum3+sum5)/(2.0*Z);
strain_rate[2] = (sum4)/(2.0*Z);
}
/* Value checks, could be cleaned up if needed. <NAME> 9-Oct-2006*/
// if( fabs( sum5 - ( -0.5*(sum6+sum3) ) ) > 1e-5 ) {
// assert(0);
// }
}
|
SobolSigizmund/httl
|
httl/src/main/java/httl/spi/loggers/MultiLogger.java
|
<gh_stars>100-1000
/*
* Copyright 2011-2013 HTTL Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package httl.spi.loggers;
import httl.spi.Logger;
import httl.util.Version;
/**
* MultiLogger. (SPI, Singleton, ThreadSafe)
*
* @author <NAME> (<EMAIL>)
* @see httl.spi.engines.DefaultEngine#setLogger(Logger)
*/
public class MultiLogger implements Logger {
private Logger[] loggers;
/**
* httl.properties: loggers=httl.spi.loggers.Log4jLogger
*/
public void setLoggers(Logger[] loggers) {
this.loggers = loggers;
}
public void init() {
if (loggers == null) {
try {
setLoggers(new Logger[]{new Slf4jLogger()});
info("Using slf4j logger for httl.");
} catch (Throwable e1) {
try {
setLoggers(new Logger[]{new JclLogger()});
info("Using jcl logger for httl.");
} catch (Throwable e2) {
try {
setLoggers(new Logger[]{new Log4jLogger()});
info("Using log4j logger for httl.");
} catch (Throwable e3) {
try {
setLoggers(new Logger[]{new JdkLogger()});
info("Using jdk logger for httl.");
} catch (Throwable e4) {
setLoggers(new Logger[]{new SimpleLogger()});
}
}
}
}
}
}
private String appendEnvInfo(Throwable e) {
return appendEnvInfo(e == null ? "" : e.getMessage());
}
private String appendEnvInfo(String msg) {
return msg + ", httl: " + Version.getVersion() + ", jvm: " + System.getProperty("java.version")
+ ", os: " + System.getProperty("os.name") + " " + System.getProperty("os.version") + " " + System.getProperty("os.arch");
}
public void trace(String msg, Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.trace(appendEnvInfo(msg), e);
}
}
} catch (Throwable t) {
}
}
public void trace(Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.trace(appendEnvInfo(e), e);
}
}
} catch (Throwable t) {
}
}
public void trace(String msg) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.trace(appendEnvInfo(msg));
}
}
} catch (Throwable t) {
}
}
public void debug(String msg, Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.debug(appendEnvInfo(msg), e);
}
}
} catch (Throwable t) {
}
}
public void debug(Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.debug(appendEnvInfo(e), e);
}
}
} catch (Throwable t) {
}
}
public void debug(String msg) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.debug(appendEnvInfo(msg));
}
}
} catch (Throwable t) {
}
}
public void info(String msg, Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.info(appendEnvInfo(msg), e);
}
}
} catch (Throwable t) {
}
}
public void info(Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.info(appendEnvInfo(e), e);
}
}
} catch (Throwable t) {
}
}
public void info(String msg) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.info(appendEnvInfo(msg));
}
}
} catch (Throwable t) {
}
}
public void warn(String msg, Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.warn(appendEnvInfo(msg), e);
}
}
} catch (Throwable t) {
}
}
public void warn(Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.warn(appendEnvInfo(e), e);
}
}
} catch (Throwable t) {
}
}
public void warn(String msg) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.warn(appendEnvInfo(msg));
}
}
} catch (Throwable t) {
}
}
public void error(String msg, Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.error(appendEnvInfo(msg), e);
}
}
} catch (Throwable t) {
}
}
public void error(Throwable e) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.error(appendEnvInfo(e), e);
}
}
} catch (Throwable t) {
}
}
public void error(String msg) {
try {
if (loggers != null) {
for (Logger logger : loggers) {
logger.error(appendEnvInfo(msg));
}
}
} catch (Throwable t) {
}
}
public boolean isTraceEnabled() {
try {
if (loggers != null) {
for (Logger logger : loggers) {
if (logger.isTraceEnabled()) {
return true;
}
}
}
return false;
} catch (Throwable t) {
return false;
}
}
public boolean isDebugEnabled() {
try {
if (loggers != null) {
for (Logger logger : loggers) {
if (logger.isDebugEnabled()) {
return true;
}
}
}
return false;
} catch (Throwable t) {
return false;
}
}
public boolean isInfoEnabled() {
try {
if (loggers != null) {
for (Logger logger : loggers) {
if (logger.isInfoEnabled()) {
return true;
}
}
}
return false;
} catch (Throwable t) {
return false;
}
}
public boolean isWarnEnabled() {
try {
if (loggers != null) {
for (Logger logger : loggers) {
if (logger.isWarnEnabled()) {
return true;
}
}
}
return false;
} catch (Throwable t) {
return false;
}
}
public boolean isErrorEnabled() {
try {
if (loggers != null) {
for (Logger logger : loggers) {
if (logger.isErrorEnabled()) {
return true;
}
}
}
return false;
} catch (Throwable t) {
return false;
}
}
}
|
Killarexe/Negative-N-Forge-Version
|
src/main/java/net/killarexe/negativen/world/CaveGameGameRule.java
|
package net.killarexe.negativen.world;
import net.minecraftforge.fml.common.ObfuscationReflectionHelper;
import net.minecraft.world.GameRules;
import net.killarexe.negativen.NegativeNModElements;
import java.lang.reflect.Method;
@NegativeNModElements.ModElement.Tag
public class CaveGameGameRule extends NegativeNModElements.ModElement {
public static final GameRules.RuleKey<GameRules.BooleanValue> gamerule = GameRules.register("caveGame", GameRules.Category.UPDATES,
create(false));
public CaveGameGameRule(NegativeNModElements instance) {
super(instance, 960);
}
public static GameRules.RuleType<GameRules.BooleanValue> create(boolean defaultValue) {
try {
Method createGameruleMethod = ObfuscationReflectionHelper.findMethod(GameRules.BooleanValue.class, "func_223568_b", boolean.class);
createGameruleMethod.setAccessible(true);
return (GameRules.RuleType<GameRules.BooleanValue>) createGameruleMethod.invoke(null, defaultValue);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}
|
vovkos/axl
|
src/axl_sec/axl_sec_Error.cpp
|
<reponame>vovkos/axl
//..............................................................................
//
// This file is part of the AXL library.
//
// AXL is distributed under the MIT license.
// For details see accompanying license.txt file,
// the public copy of which is also available at:
// http://tibbo.com/downloads/archive/axl/license.txt
//
//..............................................................................
#include "pch.h"
#include "axl_sec_Error.h"
#include "axl_cf_String.h"
namespace axl {
namespace sec {
//..............................................................................
sl::StringRef
ErrorProvider::getErrorDescription(OSStatus status) {
CFStringRef description = ::SecCopyErrorMessageString(status, NULL);
sl::String string = cf::String(description, true).getString();
return !string.isEmpty() ? string : sl::formatString("OSStatus(%d)", status);
}
//..............................................................................
} // namespace sec
} // namespace axl
|
zhangaozhi/orange-admin
|
orange-demo-single/orange-demo-single-service/common/common-core/src/main/java/com/orangeforms/common/core/annotation/RelationConstDict.java
|
package com.orangeforms.common.core.annotation;
import java.lang.annotation.*;
/**
* 标识Model和常量字典之间的关联关系。
*
* @author Jerry
* @date 2020-09-24
*/
@Target({ElementType.FIELD, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface RelationConstDict {
/**
* 当前对象的关联Id字段名称。
*
* @return 当前对象的关联Id字段名称。
*/
String masterIdField();
/**
* 被关联的常量字典的Class对象。
*
* @return 关联的常量字典的Class对象。
*/
Class<?> constantDictClass();
}
|
cincodenada/ember-template-lint
|
test/helpers/run.js
|
const execa = require('execa');
module.exports = function run(args, options = {}) {
options.reject = false;
options.cwd = options.cwd || process.cwd();
return execa(
process.execPath,
[require.resolve('../../bin/ember-template-lint.js'), ...args],
options
);
};
|
eivinwi/designsystem
|
packages/ffe-account-selector-react/src/components/account-selector/index.js
|
<reponame>eivinwi/designsystem<gh_stars>100-1000
import AccountSelector from './AccountSelector';
export default AccountSelector;
|
ngocdaothanh/openkh
|
modules/core/tag/app/controllers/tags_controller.rb
|
<gh_stars>1-10
class TagsController < ApplicationController
before_filter :check_admin, :except => ['show']
def show
if mod[:category].nil? and params[:tag] != '*'
redirect_to(root_path)
else
render(:text => '', :layout => 'application')
end
end
# Admin ----------------------------------------------------------------------
def index
end
def new
@tag = Tag.new
end
def create
@tag = Tag.new(params[:tag])
if @tag.save
redirect_to(tags_path)
else
render(:action => 'new')
end
end
def edit
@tag = Tag.find(params[:id])
end
def update
@tag = Tag.find(params[:id])
@tag.update_attributes(params[:tag])
if @tag.save
redirect_to(tags_path)
else
render(:action => 'edit')
end
end
def destroy
Tag.destroy(params[:id])
redirect_to(tags_path)
end
end
|
jazzl0ver/jmxeval
|
src/main/java/com/adahas/tools/jmxeval/model/impl/Check.java
|
<reponame>jazzl0ver/jmxeval<gh_stars>10-100
package com.adahas.tools.jmxeval.model.impl;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.w3c.dom.Node;
import com.adahas.tools.jmxeval.Context;
import com.adahas.tools.jmxeval.exception.JMXEvalException;
import com.adahas.tools.jmxeval.model.Element;
import com.adahas.tools.jmxeval.model.PerfDataSupport;
import com.adahas.tools.jmxeval.response.EvalResult;
import com.adahas.tools.jmxeval.response.Status;
import com.adahas.tools.jmxeval.util.NagiosRange;
/**
* Element to perform Nagios style checks on variables
*/
public class Check extends Element implements PerfDataSupport {
/**
* Supported modes for check
*/
enum Mode {
DEFAULT,
REGEX
}
/**
* Eval name
*/
private final Field eval;
/**
* Variable name
*/
private final Field var;
/**
* Critical value/level
*/
private final Field critical;
/**
* Warning value/level
*/
private final Field warning;
/**
* Output message template
*/
private final Field message;
/**
* Critical/Warning level match mode
*/
private final Field mode;
/**
* Constructs the element
*
* @param context Execution context
* @param node Related XML configuration node
* @param parentElement Parent element
*/
public Check(final Context context, final Node node, final Element parentElement) {
super(context);
this.eval = ((Eval) parentElement).getName();
this.var = getNodeAttr(node, "useVar");
this.critical = getNodeAttr(node, "critical");
this.warning = getNodeAttr(node, "warning");
this.message = getNodeAttr(node, "message");
this.mode = getNodeAttr(node, "mode", "default");
}
/**
* @see Element#process()
*/
@Override
public void process() throws JMXEvalException {
Status status;
final Object valueToCheck = context.getVar(var.get());
if (valueToCheck == null) {
status = Status.UNKNOWN;
} else {
status = getStatus(valueToCheck, critical.get(), warning.get(), Mode.valueOf(mode.get().toUpperCase(Locale.ENGLISH)));
}
// set results to context
context.getResponse().addEvalResult(new EvalResult(eval.get(), status, message.get()));
// process child elements
super.process();
}
/**
* Get the status given a check result value
*
* @param value Value to check
* @param criticalLevel Critical value level
* @param warningLevel Warning value level
* @param mode Check mode
* @return Status of the check
* @throws JMXEvalException if evaluation fails
*/
protected Status getStatus(final Object value, final String criticalLevel, final String warningLevel, final Mode mode) throws JMXEvalException {
Status resultStatus;
if (mode.equals(Mode.REGEX)) {
resultStatus = getStatusInRegExMode(value, criticalLevel, warningLevel);
} else {
resultStatus = getStatusInDefaultMode(value, criticalLevel, warningLevel);
}
return resultStatus;
}
/**
* Get the status given a check result value based on regex mode
*
* @param value Value to check
* @param criticalLevel Critical value level
* @param warningLevel Warning value level
* @return Status of the check
*/
protected Status getStatusInRegExMode(final Object value, final String criticalLevel, final String warningLevel) {
Status resultStatus = null;
if (criticalLevel != null) {
// critical level
final Pattern pattern = Pattern.compile(criticalLevel);
final Matcher matcher = pattern.matcher(value.toString());
if (matcher.matches()) {
resultStatus = Status.CRITICAL;
}
}
if (warningLevel != null && resultStatus == null) {
// warning level (if not critical status already set
final Pattern pattern = Pattern.compile(warningLevel);
final Matcher matcher = pattern.matcher(value.toString());
if (matcher.matches()) {
resultStatus = Status.WARNING;
}
}
// return OK nothing matches
if (resultStatus == null) {
resultStatus = Status.OK;
}
return resultStatus;
}
/**
* Get the status given a check result value based on default mode
*
* @param value Value to check
* @param criticalLevel Critical value level
* @param warningLevel Warning value level
* @return Status of the check
* @throws JMXEvalException if evaluation fails
*/
protected Status getStatusInDefaultMode(final Object value, final String criticalLevel, final String warningLevel) throws JMXEvalException {
Status resultStatus = Status.OK;
// if
// - either levels are null, try an exact match
// - if neither are null and not a number,
if (criticalLevel == null || warningLevel == null || !(value instanceof Number)) {
// give critical level higher priority
if (criticalLevel != null && criticalLevel.equals(value.toString())) {
resultStatus = Status.CRITICAL;
} else if (warningLevel != null && warningLevel.equals(value.toString())) {
resultStatus = Status.WARNING;
}
} else {
resultStatus = getStatusByRangeCheck(value, criticalLevel, warningLevel);
}
// return OK nothing matches
return resultStatus;
}
/**
* Get the status given a check result value within ranges (only for numerical values)
*
* @param value Value to check
* @param criticalLevel Critical value level
* @param warningLevel Warning value level
* @return Status of the check
* @throws JMXEvalException if evaluation fails
*/
protected Status getStatusByRangeCheck(final Object value, final String criticalLevel, final String warningLevel) throws JMXEvalException {
Status resultStatus = Status.OK;
// range check for numerics
final Double doubleValue = ((Number) value).doubleValue();
final NagiosRange criticalRange = new NagiosRange(criticalLevel);
final NagiosRange warningRange = new NagiosRange(warningLevel);
if (!criticalRange.isInRange(doubleValue)) {
resultStatus = Status.CRITICAL;
} else if (!warningRange.isInRange(doubleValue)) {
resultStatus = Status.WARNING;
}
return resultStatus;
}
/**
* @see PerfDataSupport#getVar()
*/
@Override
public Field getVar() {
return var;
}
/**
* @see PerfDataSupport#getCritical()
*/
@Override
public Field getCritical() {
return critical;
}
/**
* @see PerfDataSupport#getWarning()
*/
@Override
public Field getWarning() {
return warning;
}
}
|
axonepro/sdk-ooti
|
tests/test_email.py
|
<gh_stars>1-10
import unittest
from requests.models import Response
from test_helper import HelperTest
from factories.factories import TeamFactory
import random
import string
import time
import os
import sys
from dotenv import load_dotenv
PACKAGE_PARENT = '..'
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
from resources import ooti # noqa E402
# Loading environment variables (stored in .env file)
load_dotenv()
OOTI_AUTH = os.getenv("OOTI_AUTH")
OOTI_PASSWORD = os.getenv("OOTI_PASSWORD")
my_account = ooti.OotiAPI(OOTI_AUTH, OOTI_PASSWORD)
my_account.connect()
team_pk = TeamFactory()
currency_pk = my_account.Currencies.get_currencies_list()['data'][0]['pk']
project_pk = my_account.Projects.get_projects_list()['data'][0]['id']
class TestEmails(unittest.TestCase):
@classmethod
def setUp(cls):
testHelper = HelperTest(my_account)
cls.email_pk = testHelper._create_email_return_pk()
cls.smtp_pk = testHelper._create_email_smtp_return_pk()
### Classic ###
def test_get_emails_list(self):
""" Test that 200 is returned """
res = my_account.Emails.get_emails_list()
self.assertEqual(res['status'], 200)
def test_create_email(self):
""" Test that 201 is returned """
email = {
"name": "UNITTEST",
"email_subject": "UNITTEST",
"email_body": "UNITTEST",
"email_to": "<EMAIL>",
"email_from": "<EMAIL>",
"name_from": "<NAME>"
}
res_creation = my_account.Emails.create_email(email)
my_account.Emails.delete_email(res_creation['data']['id'])
self.assertEqual(res_creation['status'], 201)
def test_get_emails_details(self):
""" Test that 200 is returned """
res = my_account.Emails.get_email_details(self.email_pk)
my_account.Emails.delete_email(self.email_pk)
self.assertEqual(res['status'], 200)
def test_update_email(self):
""" Test that 200 is returned """
data = {'name': 'UNITTEST - update'}
res = my_account.Emails.update_email(self.email_pk, data)
my_account.Emails.delete_email(self.email_pk)
self.assertEqual(res['status'], 200)
def test_delete_email(self):
""" Test that 204 is returned """
res = my_account.Emails.delete_email(self.email_pk)
self.assertEqual(res['status'], 204)
def test_send_test_email(self):
""" Test that 200 is returned """
res = my_account.Emails.send_test_email(self.email_pk)
my_account.Emails.delete_email(self.email_pk)
self.assertEqual(res['status'], 200)
def test_apply_email(self):
""" Test that 200 is returned """
res = my_account.Emails.apply_email(self.email_pk)
self.assertEqual(res['status'], 200)
### smtp ###
def test_get_emails_smtp(self):
""" Test that 200 is returned """
res = my_account.Emails.get_emails_smtp_list()
self.assertEqual(res['status'], 200)
def test_create_email_smtp(self):
""" Test that 201 is returned """
data = {
"from_name": "UNITTEST",
"from_email": "UNITTEST",
"username": "UNITTEST",
"password": "<PASSWORD>",
"protocol": "TLS",
"host": "UNITTEST",
"port": 0
}
res = my_account.Emails.create_email_smtp(data)
my_account.Emails.delete_email_smtp(res['data']['id'])
self.assertEqual(res['status'], 201)
def test_get_email_smtp_details(self):
""" Test that 200 is returned """
res = my_account.Emails.get_email_smtp_details(self.smtp_pk)
my_account.Emails.delete_email_smtp(self.smtp_pk)
self.assertEqual(res['status'], 200)
def test_update_email_smtp(self):
""" Test that 200 is returned """
data = {"from_name": "UNITTEST - Update"}
res = my_account.Emails.update_email_smtp(self.smtp_pk, data)
self.assertEqual(res['status'], 200)
def test_delete_email_smtp(self):
""" Test that 204 is returned """
res = my_account.Emails.delete_email_smtp(self.smtp_pk)
self.assertEqual(res['status'], 204)
def test_send_test_email_smtp(self):
""" Test that 200 is returned """
res = my_account.Emails.send_test_email_smtp(self.smtp_pk)
self.assertEqual(res['status'], 200)
@classmethod
def tearDown(cls):
my_account.Emails.delete_email(cls.email_pk)
my_account.Emails.delete_email_smtp(cls.smtp_pk)
if __name__ == '__main__':
unittest.main()
|
xana43/CodeLyokoMod-1.12.2
|
OriginalMappings/com/Ultra_Nerd/CodeLyokoLegacy/world/WorldGen/Common/CustomGenSettings.java
|
<filename>OriginalMappings/com/Ultra_Nerd/CodeLyokoLegacy/world/WorldGen/Common/CustomGenSettings.java
package com.Ultra_Nerd.CodeLyokoLegacy.world.WorldGen.Common;
public record CustomGenSettings(int baseHeight, float verticalVariance, float horizontalVariance) {
}
|
gurisxie/weex-test
|
src/h5-render/src/components/embed.js
|
<filename>src/h5-render/src/components/embed.js
'use strict'
var Component = require('./component')
var utils = require('../utils')
var ID_PREFIX = 'weex_embed_'
function _generateId() {
return ID_PREFIX + utils.getRandom(10)
}
function Embed (data, nodeType) {
var attr = data.attr
if (attr) {
this.source = attr.src
this.loader = attr.loade || 'xhr'
}
Component.call(this, data, nodeType)
this.initWeex()
}
Embed.prototype = Object.create(Component.prototype)
Embed.prototype.create = function () {
var node = document.createElement('div')
node.id = this.id
node.style.overflow = 'scroll'
// node.classList.add('weex-container')
return node
}
Embed.prototype.initWeex = function () {
this.id = _generateId()
this.node.id = this.id
var config = {
appId: this.id,
source: this.source,
bundleUrl: this.source,
loader: this.loader,
width: this.node.getBoundingClientRect().width,
rootId: this.id
}
window.weex.init(config)
}
Embed.prototype.destroyWeex = function () {
this.id && window.destroyInstance(this.id)
// TODO: unbind events and clear doms.
this.node.innerHTML = ''
}
Embed.prototype.reloadWeex = function () {
this.destroyWeex()
this.initWeex()
}
// src is not updatable temporarily
// Embed.prototype.attr = {
// src: function (value) {
// this.src = value
// this.reloadWeex()
// }
// }
module.exports = Embed
|
dalaoque/rc-bmap
|
src/constants/StatusCode.js
|
<filename>src/constants/StatusCode.js
export default {
PERMISSION_DENIED: 'BMAP_STATUS_PERMISSION_DENIED',
SERVICE_UNAVAILABLE: 'BMAP_STATUS_SERVICE_UNAVAILABLE',
TIMEOUT: 'BMAP_STATUS_TIMEOUT',
};
|
pmarceaujr/CodeCheatSheet
|
controllers/api/snippetsRoutes.js
|
<filename>controllers/api/snippetsRoutes.js
const router = require('express').Router();
const withAuth = require('../../utils/auth');
const { Snippets } = require('../../models');
console.log("In company Routes")
//Add a new comment to the selected post
router.post('/newSnippet', withAuth, async (req, res) => {
try {
const snippetData = await Snippets.create(
{
topic_id: req.session.topic_id,
snippet_text: req.body.snippet,
user_id: req.session.user_id,
}
)
res.redirect('/api/topics/topics/' + req.session.topic_id);
}
catch (err) {
console.log(err);
res.status(500).json(err);
};
});
//Get All HTML Posts for the filter listing page
router.get('/delete/:id', withAuth, async (req, res) => {
const snippetsDelData = await Snippets.destroy({
where: {
id: req.params.id,
},
order: [['created_at', 'DESC',]],
});
res.redirect('/api/topics/topics/' + req.session.topic_id);
})
module.exports = router;
|
bcgov/EDUC-GRAD-TRAX-API
|
api/src/main/java/ca/bc/gov/educ/api/trax/model/dto/GradCountry.java
|
<reponame>bcgov/EDUC-GRAD-TRAX-API<filename>api/src/main/java/ca/bc/gov/educ/api/trax/model/dto/GradCountry.java<gh_stars>0
package ca.bc.gov.educ.api.trax.model.dto;
import lombok.Data;
import org.springframework.stereotype.Component;
@Data
@Component
public class GradCountry {
private String countryCode;
private String countryName;
private String srbCountryCode;
@Override
public String toString() {
return "GradCountry [countryCode=" + countryCode + ", countryName=" + countryName + ", srbCountryCode="
+ srbCountryCode + "]";
}
}
|
betagouv/ecosante
|
ecosante/pages/blueprint.py
|
<reponame>betagouv/ecosante
from flask.globals import current_app
from ecosante.tasks.inscriptions_patients import inscription_patients_task
from flask import (
redirect,
render_template,
request
)
from dataclasses import asdict
from ecosante.utils import Blueprint
from ecosante.utils.decorators import admin_capability_url, webhook_capability_url
from datetime import date, timedelta
from ecosante.newsletter.models import NewsletterDB, Recommandation
from sentry_sdk import capture_event
from indice_pollution import forecast, episodes, raep, availability
from indice_pollution.history.models import PotentielRadon
bp = Blueprint("pages", __name__, url_prefix='/')
@bp.route('/')
def redirection_index():
return redirect("https://recosante.beta.gouv.fr/", code=301)
@bp.route('/admin/<secret_slug>')
@bp.route('/admin/')
@admin_capability_url
def admin():
count_avis_hier = NewsletterDB.query\
.filter(
NewsletterDB.avis.isnot(None),
NewsletterDB.date==date.today() - timedelta(days=1))\
.count()
count_avis_aujourdhui = NewsletterDB.query\
.filter(
NewsletterDB.avis.isnot(None),
NewsletterDB.date==date.today())\
.count()
return render_template("admin.html", count_avis_hier=count_avis_hier, count_avis_aujourdhui=count_avis_aujourdhui)
@bp.route('<secret_slug>/sib_error', methods=['POST'])
@webhook_capability_url
def sib_error(secret_slug):
capture_event(request.json)
return {"body": "ok"}
@bp.route('/inscription-patients', methods=['POST'])
def inscription_patients():
inscription_patients_task.delay(
request.json['nom_medecin'],
request.json['mails']
)
return '"ok"'
@bp.route('/city-availability')
def city_availability():
insee = request.args.get('insee')
if not insee:
{"availability": False}, 404
return {"availability": availability(insee)}
@bp.route('/data')
def data():
d = date.today()
insee = request.args.get('insee')
f = forecast(insee, d)
ep = episodes(insee, d)
r = raep(insee)
polluants = [
{
'1': 'dioxyde_soufre',
'5': 'particules_fines',
'7': 'ozone',
'8': 'dioxyde_azote',
}.get(str(e['code_pol']), f'erreur: {e["code_pol"]}')
for e in ep['data']
if e['etat'] != 'PAS DE DEPASSEMENT'\
and 'date' in e\
and e['date'] == str(d)
]
reco = [
v
for v in Recommandation.published_query().all()
if v.is_relevant(None, f['data'][0]['indice'], polluants, 0, d)
] if f['data'] else []
return {
"forecast": f['data'][0] if f['data'] else [],
"episode": ep['data'][0] if ep['data'] else [],
"recommandation": {k: v for k, v in (asdict(reco[0]) if reco else {}).items() if k in ["precisions", "recommandation"]},
"raep": r.get('data'),
"potentiel_radon": getattr(PotentielRadon.get(insee), 'classe_potentiel'),
"metadata": f['metadata']
}
@bp.route('/recommandation-episodes-pollution')
def recommandation_episode_pollution():
nom_polluants = {
"o3": "à l’Ozone (O3)",
"pm10": "aux particules fines (PM10)",
"no2": "au dioxyde d’azote (NO2)",
"so2": "au dioxyde de soufre (SO2)"
}
polluants = [nom_polluants.get(p.lower(), p) for p in request.args.getlist('polluants')]
return render_template(
"recommandation-episodes-pollution.html",
population=request.args.get('population'),
polluants=polluants
)
@bp.route('/_application_server_key')
def vapid_public_key():
return {"application_server_key": current_app.config['APPLICATION_SERVER_KEY']}
|
onap/oom-platform-cert-service
|
certServicePostProcessor/src/main/java/org/onap/oom/certservice/postprocessor/CertificatePostProcessor.java
|
<filename>certServicePostProcessor/src/main/java/org/onap/oom/certservice/postprocessor/CertificatePostProcessor.java
/*============LICENSE_START=======================================================
* oom-truststore-merger
* ================================================================================
* Copyright (C) 2020 Nokia. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
*/
package org.onap.oom.certservice.postprocessor;
import org.onap.oom.certservice.postprocessor.common.FileTools;
import org.onap.oom.certservice.postprocessor.configuration.AppConfigurationLoader;
import org.onap.oom.certservice.postprocessor.configuration.model.AppConfiguration;
import org.onap.oom.certservice.postprocessor.copier.KeystoreCopier;
import org.onap.oom.certservice.postprocessor.merger.TruststoreMerger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class CertificatePostProcessor implements Runnable {
private static final Logger LOGGER = LoggerFactory.getLogger(CertificatePostProcessor.class);
private AppConfigurationLoader config = new AppConfigurationLoader();
private TruststoreMerger merger = new TruststoreMerger();
private KeystoreCopier copier = new KeystoreCopier(new FileTools());
public void run() {
LOGGER.debug("Loading configuration...");
AppConfiguration configuration = config.loadConfiguration();
LOGGER.debug("Starting TruststoreMerger...");
merger.mergeTruststores(configuration);
LOGGER.debug("Starting KeystoreCopier...");
copier.copyKeystores(configuration);
LOGGER.debug("Certificate post processing finished successfully.");
}
}
|
shunliz/test
|
python/flask/application/__init__.py
|
from flask import Flask
from application.simple_page import simple_page
from application.user_page import user_page
app = Flask(__name__)
app.register_blueprint(simple_page)
app.register_blueprint(user_page)
from application import views
from application.db import db_session
@app.teardown_request
def shutdown_session(exception=None):
db_session.remove()
|
icahoon/vocal
|
install/interim_deploy/install_provdata.cxx
|
#include "deploy.h"
int install_provdata(
const vector < InstallUnit_T >& installConfig,
const vector < string > &hostNames
)
{
char sysCommand[maxFileNameSize];
vector < InstallUnit_T > ::const_iterator iv;
vector < string > ::const_iterator aName;
// tar up the directory and ship it to it's destinations
// cd /tmp; tar czf prov.tar.gz provisioning_data
// use system so the user can see the status of the tar
cout << "create tar of provisioning data" << nl;
memset(sysCommand, 0, sizeof(sysCommand));
if (solarisOS)
{
sprintf(sysCommand, "cd %s; cd ../; tar cvf /usr/local/vocal/tmp/pd.tar provisioning_data", LocalProvDataDir );
}
else
{
sprintf(sysCommand, "cd %s; cd ../; tar cvzf /usr/local/vocal/tmp/pd.tar.gz provisioning_data", LocalProvDataDir );
}
if (mySystem(sysCommand) != 0)
{
cout << "can not make local tar of dir " << LocalProvDataDir << " System not deployed";
return 0;
}
// scp /tmp/etc.tar.gz host:
// ssh hostname '(cd /usr/local/vocal; tar xzf etc.tar.gz)'
for (iv = installConfig.begin(); iv < installConfig.end(); iv++)
{
if (iv->funcUnit == PS_PS)
{
cout << "downloading provisioning configuration to " << iv->hostName.c_str() << nl;
memset(sysCommand, 0, sizeof(sysCommand));
if (solarisOS)
{
sprintf(sysCommand, "%s /usr/local/vocal/tmp/pd.tar %s:/tmp", rcp, iv->hostName.c_str());
}
else
{
sprintf(sysCommand, "%s /usr/local/vocal/tmp/pd.tar.gz %s:/tmp", rcp, iv->hostName.c_str());
}
if (mySystem(sysCommand, true) != 0)
{
cout << "ERROR: can not copy provisioning data to " << iv->hostName.c_str() << "System partially deployed" << nl;
}
else
{
memset(sysCommand, 0, sizeof(sysCommand));
if (solarisOS)
{
sprintf(sysCommand, "%s %s '(cd %s; tar xf /tmp/pd.tar)'",
rlp, iv->hostName.c_str(), RemoteProvDataDir);
}
else
{
sprintf(sysCommand, "%s %s '(cd %s; tar xzf /tmp/pd.tar.gz)'",
rlp, iv->hostName.c_str(), RemoteProvDataDir);
}
if (mySystem(sysCommand) != 0)
{
cout << "ERROR: can not untar /tmp/pd.tar.gz on remote machine" << iv->hostName.c_str()
<< " System partially deployed" << nl;
}
}
}
}
for (aName = hostNames.begin(); aName < hostNames.end(); aName++)
{
cout << "Downloading etc startup configuration to " << aName->c_str() << nl;
sprintf(sysCommand, "%s %s/vocal.conf.%s %s:%s/vocal.conf",
rcp, LocalVocalEtcDir, aName->c_str(), aName->c_str(), RemoteVocalEtcDir );
if (mySystem(sysCommand) != 0)
{
cout << "ERROR: can not copy vocal.conf onto remote machine" << aName->c_str()
<< " System partially deployed" << nl;
}
}
return 1;
}
|
Invarato/Jarroba
|
generadores/ejemplo_8_generator_comprehesion.py
|
<reponame>Invarato/Jarroba
#!/usr/bin/env python
# -*- coding: utf-8 -*-
if __name__ == "__main__":
generador = (num * 10 for num in range(1000000000))
for numero in generador:
print(numero)
|
vita-us/ViTA
|
src/test/java/de/unistuttgart/vis/vita/analysis/modules/LuceneModuleTest.java
|
<reponame>vita-us/ViTA
package de.unistuttgart.vis.vita.analysis.modules;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.search.IndexSearcher;
import org.junit.Before;
import org.junit.Test;
import de.unistuttgart.vis.vita.analysis.ModuleResultProvider;
import de.unistuttgart.vis.vita.analysis.ProgressListener;
import de.unistuttgart.vis.vita.analysis.results.DocumentPersistenceContext;
import de.unistuttgart.vis.vita.analysis.results.ImportResult;
import de.unistuttgart.vis.vita.analysis.results.LuceneResult;
import de.unistuttgart.vis.vita.model.Model;
import de.unistuttgart.vis.vita.model.TextRepository;
import de.unistuttgart.vis.vita.model.document.Chapter;
import de.unistuttgart.vis.vita.model.document.DocumentPart;
/**
* JUnit test on LuceneModuleTest
*/
public class LuceneModuleTest {
private final static String[] CHAPTERS_TEXTS = {"This is the text of chapter one.",
"This is the text of chapter two.",
"This is the text of chapter three.",
"This is the text of chapter four.",};
private LuceneModule luceneModule;
private ModuleResultProvider moduleResultProvider;
private ProgressListener progressListener;
private List<DocumentPart> documentParts = new ArrayList<DocumentPart>();
private String documentId = "document3";
private List<Chapter> chapters = new ArrayList<Chapter>();
private TextRepository providedTextRepository = mock(TextRepository.class);
private IndexSearcher providedIndexSearcher = mock(IndexSearcher.class);
@Before
public void setUp() throws IOException {
luceneModule = new LuceneModule();
moduleResultProvider = mock(ModuleResultProvider.class);
ImportResult importResult = mock(ImportResult.class);
Model model = mock(Model.class);
DocumentPersistenceContext documentPersistenceContext = mock(DocumentPersistenceContext.class);
when(importResult.getParts()).thenReturn(documentParts);
when(model.getTextRepository()).thenReturn(providedTextRepository);
when(providedTextRepository.getIndexSearcherForDocument(documentId)).thenReturn(providedIndexSearcher);
when(documentPersistenceContext.getDocumentId()).thenReturn(documentId);
when(moduleResultProvider.getResultFor(ImportResult.class)).thenReturn(importResult);
when(moduleResultProvider.getResultFor(Model.class)).thenReturn(model);
when(moduleResultProvider.getResultFor(DocumentPersistenceContext.class)).thenReturn(
documentPersistenceContext);
progressListener = mock(ProgressListener.class, withSettings().verboseLogging());
fillText();
}
/**
* Fills the chapter texts with CHAPTERS_TEXTS
*/
private void fillText() {
DocumentPart documentPart = new DocumentPart();
documentParts.add(documentPart);
for (int i = 0; i < 4; i++) {
Chapter chapter = new Chapter();
chapter.setText(CHAPTERS_TEXTS[i]);
documentPart.getChapters().add(chapter);
chapters.add(chapter);
}
}
/**
* Tests that the module has stored all the supplied chapters
*/
@Test
public void testStoresChapters() throws Exception {
luceneModule.execute(moduleResultProvider, progressListener);
verify(providedTextRepository).storeChaptersTexts(chapters, documentId);
}
@Test
public void testReturnsIndexSearcher() throws Exception {
LuceneResult luceneResult = luceneModule.execute(moduleResultProvider, progressListener);
assertEquals(providedIndexSearcher.getIndexReader(), luceneResult.getIndexReader());
}
}
|
agenciaativa/Ingaflex
|
painelAdm/assets/js/states/states.js
|
<gh_stars>0
angular.module('IngaflexApp.states', ['ui.router'])
.config(function($stateProvider, $urlRouterProvider) {
$urlRouterProvider.otherwise('/home');
$stateProvider
.state('home', {
url: '/home',
templateUrl: 'partials/home.html',
controller: 'homeController'
})
.state('manutencao', {
url: '/manutencao',
templateUrl: 'partials/manutencao.html',
controller: 'manutencaoController'
})
.state('configuracoes', {
url: '/configuracoes',
templateUrl: 'partials/configuracoes.html',
controller: 'configuracoesController'
})
});
|
hoyeungw/baro
|
test/util/timer.test.js
|
<reponame>hoyeungw/baro<filename>test/util/timer.test.js
import { says } from '@spare/logger'
import { timeout } from '@valjoux/timeout'
import { time } from '@valjoux/timestamp'
import { range } from '@vect/vector-init'
import { State } from '../../src/State'
import { Escape } from '../../util/Escape'
const test = async () => {
const layout = {}
const state = State.build({ value: 0, total: 100 })
const fn = function (state) {
const layout = this;
`${time()} [value] ${state.value} [layout] ${this}` |> says['escape']
}
const escape = Escape.build({ fn, ctx: layout, arg: state })
escape.loop(500)
for (let i of range(1, 10)) {
state.value += 10
await timeout(300)
}
// await timeout(500)
const result = escape.stop()
result |> says['result']
}
test().then()
|
Kristian-ZH/gardener-extension-provider-vsphere
|
vendor/github.com/vmware/go-vmware-nsxt/loadbalancer/lb_persistence_cookie_time.go
|
/*
* NSX API
*
* VMware NSX REST API
*
* API version: 1.0.0
* Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git)
*/
package loadbalancer
type LbPersistenceCookieTime struct {
// Both session cookie and persistence cookie are supported, Use LbSessionCookieTime for session cookie time setting, Use LbPersistenceCookieTime for persistence cookie time setting
Type_ string `json:"type"`
// HTTP cookie max-age to expire cookie, only available for insert mode.
CookieMaxIdle int64 `json:"cookie_max_idle"`
}
|
tauanybueno/java
|
Projeto - Chatbot/RespostaContador.java
|
<filename>Projeto - Chatbot/RespostaContador.java
import java.util.Locale;
public class RespostaContador extends Resposta {
private int contador;
private String saida4;
public RespostaContador(String entrada) {
super(entrada);
}
@Override
public boolean verifica(String entrada) {
if (entrada.contains("conta".toLowerCase(Locale.ROOT))) {
return true;
} else {
return false;
}
}
@Override
public String produz() {
contador++;
saida4 = "Ok! A conta atualmente está em: " + contador;
return saida4;
}
}
|
acidicMercury8/xray-1.0
|
sdk/stlport/stl/_streambuf.h
|
/*
* Copyright (c) 1999
* Silicon Graphics Computer Systems, Inc.
*
* Copyright (c) 1999
* <NAME>
*
* This material is provided "as is", with absolutely no warranty expressed
* or implied. Any use is at your own risk.
*
* Permission to use or copy this software for any purpose is hereby granted
* without fee, provided the above notices are retained on all copies.
* Permission to modify the code and to distribute modified code is granted,
* provided the above notices are retained, and a notice that the code was
* modified is included with the above copyright notice.
*
*/
#ifndef _STLP_INTERNAL_STREAMBUF
#define _STLP_INTERNAL_STREAMBUF
#ifndef _STLP_IOS_BASE_H
# include <stl/_ios_base.h> // Needed for ios_base bitfield members.
#endif // <ios_base> includes <iosfwd>.
_STLP_BEGIN_NAMESPACE
//----------------------------------------------------------------------
// Class basic_streambuf<>, the base class of the streambuf hierarchy.
// A basic_streambuf<> manages an input (get) area and an output (put)
// area. Each is described by three pointers: a beginning, an end, and a
// current position. basic_streambuf<> contains some very simple member
// functions that manipulate those six pointers, but almost all of the real
// functionality gets delegated to protected virtual member functions.
// All of the public member functions are inline, and most of the protected
// member functions are virtual.
// Although basic_streambuf<> is not abstract, it is useful only as a base
// class. Its virtual member functions have default definitions such that
// reading from a basic_streambuf<> will always yield EOF, and writing to a
// basic_streambuf<> will always fail.
// The second template parameter, _Traits, defaults to char_traits<_CharT>.
// The default is declared in header <iosfwd>, and it isn't declared here
// because C++ language rules do not allow it to be declared twice.
template <class _CharT, class _Traits>
class basic_streambuf {
friend class basic_istream<_CharT, _Traits>;
friend class basic_ostream<_CharT, _Traits>;
public: // Typedefs.
typedef _CharT char_type;
typedef typename _Traits::int_type int_type;
typedef typename _Traits::pos_type pos_type;
typedef typename _Traits::off_type off_type;
typedef _Traits traits_type;
private: // Data members.
char_type* _M_gbegin; // Beginning of get area
char_type* _M_gnext; // Current position within the get area
char_type* _M_gend; // End of get area
char_type* _M_pbegin; // Beginning of put area
char_type* _M_pnext; // Current position within the put area
char_type* _M_pend; // End of put area
locale _M_locale; // The streambuf's locale object
//public: // Extension: locking, for thread safety.
// _STLP_mutex _M_lock;
public: // Destructor.
virtual ~basic_streambuf();
protected: // The default constructor.
basic_streambuf()
#if defined (_STLP_MSVC) && (_STLP_MSVC < 1300) && defined (_STLP_USE_STATIC_LIB)
//We make it inline to avoid unresolved symbol.
: _M_gbegin(0), _M_gnext(0), _M_gend(0),
_M_pbegin(0), _M_pnext(0), _M_pend(0),
_M_locale()
{}
#else
;
#endif
protected: // Protected interface to the get area.
char_type* eback() const { return _M_gbegin; } // Beginning
char_type* gptr() const { return _M_gnext; } // Current position
char_type* egptr() const { return _M_gend; } // End
void gbump(int __n) { _M_gnext += __n; }
void setg(char_type* __gbegin, char_type* __gnext, char_type* __gend) {
_M_gbegin = __gbegin;
_M_gnext = __gnext;
_M_gend = __gend;
}
public:
// An alternate public interface to the above functions
// which allows us to avoid using templated friends which
// are not supported on some compilers.
char_type* _M_eback() const { return eback(); }
char_type* _M_gptr() const { return gptr(); }
char_type* _M_egptr() const { return egptr(); }
void _M_gbump(int __n) { gbump(__n); }
void _M_setg(char_type* __gbegin, char_type* __gnext, char_type* __gend)
{ this->setg(__gbegin, __gnext, __gend); }
protected: // Protected interface to the put area
char_type* pbase() const { return _M_pbegin; } // Beginning
char_type* pptr() const { return _M_pnext; } // Current position
char_type* epptr() const { return _M_pend; } // End
void pbump(int __n) { _M_pnext += __n; }
void setp(char_type* __pbegin, char_type* __pend) {
_M_pbegin = __pbegin;
_M_pnext = __pbegin;
_M_pend = __pend;
}
protected: // Virtual buffer management functions.
virtual basic_streambuf<_CharT, _Traits>* setbuf(char_type*, streamsize);
// Alters the stream position, using an integer offset. In this
// class seekoff does nothing; subclasses are expected to override it.
virtual pos_type seekoff(off_type, ios_base::seekdir,
ios_base::openmode = ios_base::in | ios_base::out);
// Alters the stream position, using a previously obtained streampos. In
// this class seekpos does nothing; subclasses are expected to override it.
virtual pos_type
seekpos(pos_type, ios_base::openmode = ios_base::in | ios_base::out);
// Synchronizes (i.e. flushes) the buffer. All subclasses are expected to
// override this virtual member function.
virtual int sync();
public: // Buffer management.
basic_streambuf<_CharT, _Traits>* pubsetbuf(char_type* __s, streamsize __n)
{ return this->setbuf(__s, __n); }
pos_type pubseekoff(off_type __offset, ios_base::seekdir __way,
ios_base::openmode __mod = ios_base::in | ios_base::out)
{ return this->seekoff(__offset, __way, __mod); }
pos_type pubseekpos(pos_type __sp,
ios_base::openmode __mod = ios_base::in | ios_base::out)
{ return this->seekpos(__sp, __mod); }
int pubsync() { return this->sync(); }
protected: // Virtual get area functions, as defined in
// 17.5.2.4.3 and 17.5.2.4.4 of the standard.
// Returns a lower bound on the number of characters that we can read,
// with underflow, before reaching end of file. (-1 is a special value:
// it means that underflow will fail.) Most subclasses should probably
// override this virtual member function.
virtual streamsize showmanyc();
// Reads up to __n characters. Return value is the number of
// characters read.
virtual streamsize xsgetn(char_type* __s, streamsize __n);
// Called when there is no read position, i.e. when gptr() is null
// or when gptr() >= egptr(). Subclasses are expected to override
// this virtual member function.
virtual int_type underflow();
// Similar to underflow(), but used for unbuffered input. Most
// subclasses should probably override this virtual member function.
virtual int_type uflow();
// Called when there is no putback position, i.e. when gptr() is null
// or when gptr() == eback(). All subclasses are expected to override
// this virtual member function.
virtual int_type pbackfail(int_type = traits_type::eof());
protected: // Virtual put area functions, as defined in
// 27.5.2.4.5 of the standard.
// Writes up to __n characters. Return value is the number of characters
// written.
virtual streamsize xsputn(const char_type* __s, streamsize __n);
// Extension: writes up to __n copies of __c. Return value is the number
// of characters written.
virtual streamsize _M_xsputnc(char_type __c, streamsize __n);
// Called when there is no write position. All subclasses are expected to
// override this virtual member function.
virtual int_type overflow(int_type = traits_type::eof());
public: // Public members for writing characters.
// Write a single character.
int_type sputc(char_type __c) {
return ((_M_pnext < _M_pend) ? _Traits::to_int_type(*_M_pnext++ = __c)
: this->overflow(_Traits::to_int_type(__c)));
}
// Write __n characters.
streamsize sputn(const char_type* __s, streamsize __n)
{ return this->xsputn(__s, __n); }
// Extension: write __n copies of __c.
streamsize _M_sputnc(char_type __c, streamsize __n)
{ return this->_M_xsputnc(__c, __n); }
private: // Helper functions.
int_type _M_snextc_aux();
public: // Public members for reading characters.
streamsize in_avail() {
return (_M_gnext < _M_gend) ? (_M_gend - _M_gnext) : this->showmanyc();
}
// Advance to the next character and return it.
int_type snextc() {
return ( _M_gend - _M_gnext > 1 ?
_Traits::to_int_type(*++_M_gnext) :
this->_M_snextc_aux());
}
// Return the current character and advance to the next.
int_type sbumpc() {
return _M_gnext < _M_gend ? _Traits::to_int_type(*_M_gnext++)
: this->uflow();
}
// Return the current character without advancing to the next.
int_type sgetc() {
return _M_gnext < _M_gend ? _Traits::to_int_type(*_M_gnext)
: this->underflow();
}
streamsize sgetn(char_type* __s, streamsize __n)
{ return this->xsgetn(__s, __n); }
int_type sputbackc(char_type __c) {
return ((_M_gbegin < _M_gnext) && _Traits::eq(__c, *(_M_gnext - 1)))
? _Traits::to_int_type(*--_M_gnext)
: this->pbackfail(_Traits::to_int_type(__c));
}
int_type sungetc() {
return (_M_gbegin < _M_gnext)
? _Traits::to_int_type(*--_M_gnext)
: this->pbackfail();
}
protected: // Virtual locale functions.
// This is a hook, called by pubimbue() just before pubimbue()
// sets the streambuf's locale to __loc. Note that imbue should
// not (and cannot, since it has no access to streambuf's private
// members) set the streambuf's locale itself.
virtual void imbue(const locale&);
public: // Locale-related functions.
locale pubimbue(const locale&);
locale getloc() const { return _M_locale; }
#if !defined (_STLP_NO_ANACHRONISMS)
void stossc() { this->sbumpc(); }
#endif
#if defined (__MVS__) || defined (__OS400__)
private: // Data members.
char_type* _M_gbegin; // Beginning of get area
char_type* _M_gnext; // Current position within the get area
char_type* _M_gend; // End of get area
char_type* _M_pbegin; // Beginning of put area
char_type* _M_pnext; // Current position within the put area
char_type* _M_pend; // End of put area
#endif
};
#if defined (_STLP_USE_TEMPLATE_EXPORT)
_STLP_EXPORT_TEMPLATE_CLASS basic_streambuf<char, char_traits<char> >;
# if !defined (_STLP_NO_WCHAR_T)
_STLP_EXPORT_TEMPLATE_CLASS basic_streambuf<wchar_t, char_traits<wchar_t> >;
# endif // _STLP_NO_WCHAR_T
#endif // _STLP_USE_TEMPLATE_EXPORT
_STLP_END_NAMESPACE
#if defined (_STLP_EXPOSE_STREAM_IMPLEMENTATION) && !defined (_STLP_LINK_TIME_INSTANTIATION)
# include <stl/_streambuf.c>
#endif
#endif
// Local Variables:
// mode:C++
// End:
|
Nebukam/nkm-ecosystem
|
lib/data/models/model-mockup.js
|
'use strict';
const { U } = require(`@nkm/utils`);
const { POOL, DisposableObjectEx } = require(`@nkm/common`);
const ECOSYSTEM_CONSTANTS = require(`../../ecosystem-constants`);
const Model = require(`../model`);
class ModelMockup extends DisposableObjectEx {
/**
* Turns a JS object into a ModelMockup.
* Any property beyond the ones below is treated as a new field to be created (see .newField).
* @param {object} p_object
* @param {string} p_object.id
* @param {Model} p_object.base
* @param {Class} p_object.modelClass
* @param {object} p_object.NFO
* @param {object} p_object.newField { cl: fieldClass, settings: fieldSettings }
*/
static Mock(p_object) {
let mockup = POOL.Rent(ModelMockup);
try {
mockup.mockup = p_object;
} catch (err) {
mockup.Release();
throw err;
}
return mockup;
}
/**
* Turn a JS object into a ModelMockup, with handling of extra mockup properties.
* Any property beyond the ones below is treated as a new field to be created (see .newField).
* @param {object} p_object
* @param {string} p_object.id
* @param {Model} p_object.base
* @param {Class} p_object.modelClass
* @param {object} p_object.NFO
* @param {object} p_object.newField { cl: fieldClass, settings: fieldSettings }
*/
static CompleteMock(p_object) {
let mockup = ModelMockup.Mock(p_object.mockup);
mockup.id = p_object.id;
mockup.base = p_object.base;
mockup.modelClass = (p_object.modelClass || Model);
mockup.NFO = p_object.NFO;
return mockup;
}
/**
* Expands a mockup into an existing model by creating
* missing fields.
* Throws an error if a field with a mockup'd ID already exists
* with a missmatching type.
* @param {Model} p_model
* @param {ModelMockup} p_mockup
*/
static Expand(p_model, p_mockup) {
let oldBase = p_model.base,
p_base = p_mockup.base;
if (p_base) {
if (p_model.base && p_model.base.Inherit(p_base)) {
//Do nothing : base model already extends mocked base at some point
} else {
p_model.base = p_base;
}
}
if (!p_model.NFO) {
p_model.NFO = p_mockup.NFO;
}
let mockup = p_mockup.mockup;
for (let member in mockup) {
let existingField = p_model.Get(member),
mockField = mockup[member]
if (!existingField) {
Model.CreateField(
p_model,
mockField.cl,
member,
{
settings: mockField.settings
});
} else {
if (U.isInstanceOf(existingField.fieldClass, mockField.cl)) {
//Field exists, type matches.
if (mockField.settings) {
existingField.Unpack(mockField.settings);
}
continue;
} else {
//Field exists, type mismatches :()
throw new Error();
}
}
}
return p_model;
}
/**
* Creates and register a model in a given ecosystem, based on
* a given mockup.
* @param {Ecosystem} p_ecosystem
* @param {ModelMockup} p_mockup
* @param {string} p_id
*/
static Register(p_ecosystem, p_mockup, p_id = null) {
let model = p_ecosystem.models.CreateTemp(null, p_mockup.modelClass);
p_ecosystem.models.Register(
ModelMockup.Expand(model, p_mockup), (p_id || p_mockup.id));
return model;
}
constructor() { super(); }
_Init() {
super._Init();
this._id = ``;
this._base = null;
this._NFO = null;
this._modelClass = Model;
this._mockup = {};
this._fieldCount = 0;
}
get NFO() { return this._NFO; }
set NFO(p_value) {
this._NFO = p_value;
if (p_value) {
let cPath = U.Get(p_value, `catalogPath`, null);
if (!cPath || cPath === ECOSYSTEM_CONSTANTS.DEFAULT) {
p_value.catalogPath = `${this._modelClass.name}s/${this._id}/`;
/*
//Register to default path
let arr = new Array(0);
let b = this._base;
while(b != null){
arr.push(b.id.name);
b = b.base;
}
arr.push(`User models`);
arr.reverse();
arr.push(this._id);
p_value.catalogPath = arr.join(`/`)+'/';
*/
}
}
}
get base() { return this._base; }
set base(p_value) { this._base = p_value; }
get id() { return this._id; }
set id(p_value) { this._id = p_value; }
get modelClass() { return this._modelClass; }
set modelClass(p_value) { this._modelClass = p_value; }
get mockup() { return this._mockup; }
set mockup(p_value) {
this._mockup = p_value;
if (!p_value) {
this._fieldCount = 0;
} else {
for (let member in p_value) {
if (!p_value[member].hasOwnProperty(`cl`)) {
throw new Error(`Field ${member} is missing a constructor definition.`);
}
this._fieldCount += 1;
}
}
}
/**
* Checks whether a field exists with a given ID
* @param {string} p_fieldId
*/
Has(p_fieldId) { return this._mockup.hasOwnProperty(p_fieldId); }
Add(p_fieldId, p_fieldClass, p_fieldSettings = null) {
if (U.isEmpty(p_fieldId)) { throw new Error(`Cannot add a field with an empty ID.`); }
if (!p_fieldClass) { throw new Error(`Cannot add a field with an empty type.`); }
let mockup = this._mockup;
if (mockup.hasOwnProperty(p_fieldId)) {
throw new Error(`Mockup already have a field named ${p_fieldId}`);
}
mockup[p_fieldId] = { cl: p_fieldClass, settings: p_fieldSettings };
this._fieldCount += 1;
}
Remove(p_fieldId) {
let mockup = this._mockup;
if (mockup.hasOwnProperty(p_fieldId)) { return; }
delete mockup[p_fieldId];
this._fieldCount -= 1;
}
/**
* Checks whether a given model is an exact match of this
* mockup.
* @param {Model} p_model
* @param {Boolean} p_inspectSettings should the settings match too ?
*/
Equals(p_model, p_inspectSettings = false) {
if (this._fieldCount != p_model.FieldCount(true)) { return false; } //Field count mismatch.
let mockup = this._mockup;
for (let member in mockup) {
let existingField = p_model.Get(member);
if (!existingField) { return false; }//Field missing
let mockField = mockup[member];
if (!U.isInstanceOf(existingField.fieldClass, mockField.cl)) { return false; } //Field type mismatch
if (!p_inspectSettings) { continue; }
throw new Error(`settings inspection not implemented yet`);
}
return true;
}
/**
* Checks whether a given model fits the mockup.
* This is a loose alternative to Equals(), and only
* checks wheter fields are present
* @param {Model} p_model
*/
Fits(p_model) {
let mockup = this._mockup;
for (let member in mockup) {
let existingField = p_model.Get(member);
if (!existingField) { return false; }//Field missing
if (!U.isInstanceOf(existingField.fieldClass, mockup[member].cl)) { return false; } //Field type mismatch
}
return true;
}
/**
*
* @param {Ecosystem} p_ecosystem
* @param {string} p_id
*/
RegisterTo(p_ecosystem, p_id = null) {
return ModelMockup.Register(p_ecosystem, this, p_id);
}
/**
*
* @param {Ecosystem} p_ecosystem
* @param {Model} p_model
*/
ExpandTo(p_model) {
ModelMockup.Expand(p_model, this);
}
_CleanUp() {
this._id = ``;
this._base = null;
this._modelClass = Model;
this._mockup = {};
this._NFO = null;
this._fieldCount = 0;
super._CleanUp();
}
}
module.exports = ModelMockup;
|
CSCfi/fairdata-etsin-qvain
|
etsin_finder/frontend/__tests__/stores/view/qvain.embargoExpDate.test.js
|
<filename>etsin_finder/frontend/__tests__/stores/view/qvain.embargoExpDate.test.js<gh_stars>1-10
import 'chai/register-expect'
import EmbargoExpDate, {
embargoExpDateSchema,
} from '../../../js/stores/view/qvain/qvain.embargoExpDate'
import { makeObservable } from 'mobx'
jest.mock('../../../js/stores/view/qvain/qvain.singleValueField', () => {
class mockSingleValueField {
constructor(...args) {
this.constructorFunc(...args)
}
constructorFunc = jest.fn()
}
return mockSingleValueField
})
jest.mock('mobx', () => {
return {
...jest.requireActual('mobx'),
makeObservable: jest.fn(),
}
})
describe('given Parent object', () => {
const Parent = {
some: 'data',
}
describe('EmbargoExpDate', () => {
let embargoExpDate
beforeEach(() => {
embargoExpDate = new EmbargoExpDate(Parent)
})
describe('when constructor is called', () => {
test('should call super.constructor with Parent and schema', () => {
expect(embargoExpDate.constructorFunc).to.have.beenCalledWith(Parent, embargoExpDateSchema)
})
test('should call makeObservable', () => {
expect(makeObservable).to.have.beenCalledWith(embargoExpDate)
})
})
describe('when fromBackend is called with access_rights_available: true', () => {
const dataset = {
access_rights: {
available: true,
},
}
beforeEach(() => {
embargoExpDate.fromBackend(dataset)
})
test('should set value to true', () => {
embargoExpDate.value.should.be.true
})
})
describe('when fromBackend is called with access_rights_available: false', () => {
const dataset = {
access_rights: {
available: false,
},
}
beforeEach(() => {
embargoExpDate.fromBackend(dataset)
})
test('should set value to undefined', () => {
expect(embargoExpDate.value).to.be.undefined
})
})
})
})
|
IllusionElements/wdmanager
|
packages/parser/lib/dragon.js
|
const pick = key => o => o[key]
export default async () =>
import("./data/results/dragon.json").then(pick("default"))
|
ValtoGameEngines/Fish-Engine
|
Engine/Source/FishGame/GameApp.cpp
|
#include <FishGame/GameApp.hpp>
#include <string>
#include <chrono>
#include <FishEngine/GLEnvironment.hpp>
#include <GLFW/glfw3.h>
#include <FishEngine/Debug.hpp>
#include <FishEngine/Resources.hpp>
#include <FishEngine/Input.hpp>
#include <FishEngine/Screen.hpp>
#include <FishEngine/RenderSystem.hpp>
#include <FishEngine/Scene.hpp>
#include <FishEngine/Camera.hpp>
#include <FishEngine/PhysicsSystem.hpp>
#include <FishEngine/RenderTarget.hpp>
#include <FishEngine/Pipeline.hpp>
#include <FishEngine/Material.hpp>
#include <FishEngine/Graphics.hpp>
#include <FishEngine/Shader.hpp>
#include <FishEngine/ShaderCompiler.hpp>
#include <FishEngine/Mesh.hpp>
using namespace std;
using namespace FishEngine;
//using namespace FishGame;
GLFWwindow* GameApp::m_window = nullptr;
int GameApp::m_windowWidth = 640;
int GameApp::m_windowHeight = 480;
int GameApp::Run()
{
Debug::Init();
Debug::setColorMode(true);
glfwInit();
// Set all the required options for GLFW
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 1);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
//glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
// Create a GLFWwindow object that we can use for GLFW's functions
m_window = glfwCreateWindow(m_windowWidth, m_windowHeight, "FishEngine", nullptr, nullptr);
glfwMakeContextCurrent(m_window);
glCheckError();
glfwSetKeyCallback(m_window, GameApp::KeyCallBack);
//glfwSetCursorPosCallback(m_window, GameApp::MouseCallback);
glfwSetScrollCallback(m_window, GameApp::MouseScrollCallback);
//glfwSetCharCallback(m_window, GameApp::CharacterCallback);
glfwSetWindowSizeCallback(m_window, GameApp::WindowSizeCallback);
glfwSetMouseButtonCallback(m_window, GameApp::MouseButtonCallback);
glfwGetWindowSize(m_window, &m_windowWidth, &m_windowHeight);
glfwSwapInterval(0);
#if FISHENGINE_PLATFORM_WINDOWS
glewExperimental = GL_TRUE;
// Initialize GLEW to setup the OpenGL Function pointers
auto err = glewInit();
if (err != GLEW_OK)
{
//LogError(glewGetErrorString(err));
LogError("GLEW not initialized");
}
else
{
LogInfo("GlEW initialized");
}
#endif
int w, h;
glfwGetFramebufferSize(m_window, &w, &h);
Screen::m_width = w;
Screen::m_height = h;
Screen::m_pixelsPerPoint = static_cast<float>(w) / m_windowWidth;
#if FISHENGINE_PLATFORM_WINDOWS
auto shaderRoot = FishEngine::Path(R"(D:\program\github\FishEngine\Engine\Shaders)");
#else
auto shaderRoot = FishEngine::Path("/Users/yushroom/program/FishEngine/Engine/Shaders");
#endif
auto shaderIncludeDir = shaderRoot / "include";
ShaderCompiler::setShaderIncludeDir(shaderIncludeDir.string());
Shader::Init(shaderRoot.string());
#if FISHENGINE_PLATFORM_WINDOWS
Mesh::Init(R"(D:\program\github\FishEngine\assets\Models)");
#else
Mesh::Init("/Users/yushroom/program/FishEngine/assets/Models");
#endif
//Resources::Init();
Input::Init();
RenderSystem::Init();
//WindowSizeCallback(m_window, m_windowWidth, m_windowHeight);
Init();
Scene::Init();
PhysicsSystem::Init();
Scene::Start();
//PhysicsSystem::Start();
Init();
constexpr int report_frames = 1000;
int frames = 0;
int fps = 30;
//float time_stamp = static_cast<float>(glfwGetTime());
auto time_stamp = std::chrono::high_resolution_clock::now();
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(m_window))
{
/* Poll for and process events */
Input::Update();
glfwPollEvents();
double xpos, ypos;
glfwGetCursorPos(m_window, &xpos, &ypos);
float x = static_cast<float>(xpos);
float y = static_cast<float>(ypos);
Input::UpdateMousePosition(x / m_windowWidth, 1.0f - y / m_windowHeight);
Scene::Update();
PhysicsSystem::FixedUpdate();
glViewport(0, 0, Screen::width(), Screen::height());
RenderSystem::Render();
frames++;
if (frames >= report_frames)
{
auto new_time_stamp = std::chrono::high_resolution_clock::now();
//float new_time_stamp = static_cast<float>(glfwGetTime());
auto elapse = new_time_stamp - time_stamp;
auto seconds = std::chrono::duration_cast<std::chrono::milliseconds>(elapse).count() / 1000.0f;
fps = static_cast<int>(report_frames / seconds);
string title = "FishEngine FPS: " + to_string(fps);
glfwSetWindowTitle(m_window, title.c_str());
time_stamp = new_time_stamp;
frames = 0;
}
/* Swap front and back buffers */
glfwSwapBuffers(m_window);
}
glfwTerminate();
return 0;
}
int KeyCodeFromGLFWKey(int key)
{
if (key >= GLFW_KEY_A && key <= GLFW_KEY_Z)
{
constexpr int offset = static_cast<int>(KeyCode::A) - GLFW_KEY_A;
return key + offset;
}
else if (key >= GLFW_KEY_0 && key <= GLFW_KEY_9)
{
constexpr int offset = static_cast<int>(KeyCode::Alpha0) - GLFW_KEY_0;
return key + offset;
}
else if (key >= GLFW_KEY_F1 && key <= GLFW_KEY_F15)
{
constexpr int offset = static_cast<int>(KeyCode::F1) - GLFW_KEY_F1;
return key + offset;
}
}
void GameApp::KeyCallBack(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
//ImGui_ImplGlfwGL3_KeyCallback(window, key, scancode, action, mods);
KeyState state = action == GLFW_RELEASE ? KeyState::Up : KeyState::Down;
Input::UpdateKeyState(KeyCodeFromGLFWKey(key), state);
if (mods & GLFW_MOD_ALT)
{
Input::UpdateKeyState(KeyCode::LeftAlt, state);
Input::UpdateKeyState(KeyCode::RightAlt, state);
}
if (mods & GLFW_MOD_CONTROL)
{
Input::UpdateKeyState(KeyCode::LeftControl, state);
Input::UpdateKeyState(KeyCode::RightControl, state);
}
if (mods & GLFW_MOD_SUPER)
{
Input::UpdateKeyState(KeyCode::LeftCommand, state);
Input::UpdateKeyState(KeyCode::RightCommand, state);
}
if (mods & GLFW_MOD_SHIFT)
{
Input::UpdateKeyState(KeyCode::LeftShift, state);
Input::UpdateKeyState(KeyCode::RightShift, state);
}
}
void FishEngine::GameApp::MouseScrollCallback(GLFWwindow* window, double xoffset, double yoffset)
{
//ImGui_ImplGlfwGL3_ScrollCallback(window, xoffset, yoffset);
Input::UpdateAxis(Axis::MouseScrollWheel, (float)yoffset);
//if (Input::m_mainSceneViewEditor->isMouseHovered())
//{
//Input::UpdateAxis(Axis::MouseScrollWheel, (float)yoffset);
//}
}
void GameApp::MouseButtonCallback(GLFWwindow* window, int button, int action, int mods)
{
//ImGui_ImplGlfwGL3_MouseButtonCallback(window, button, action, mods);
MouseButtonState s = action == GLFW_PRESS ? MouseButtonState::Down : MouseButtonState::Up;
Input::UpdateMouseButtonState(button, s);
//if (Input::m_mainSceneViewEditor->isMouseHovered())
//{
//Input::UpdateMouseButtonState(button, s);
//}
}
void GameApp::WindowSizeCallback(GLFWwindow* window, int width, int height)
{
//Debug::Log("window size changed");
m_windowWidth = width;
m_windowHeight = height;
int w, h;
glfwGetFramebufferSize(window, &w, &h);
//Screen::m_width = w;
//Screen::m_height = h;
Screen::m_pixelsPerPoint = static_cast<float>(w) / width;
//m_pixelsPerPoint = static_cast<float>(w) / width;
if (w != 0 && h != 0)
{
//auto size = EditorGUI::sceneViewSize();
Screen::m_width = w;
Screen::m_height = h;
RenderSystem::ResizeBufferSize(w, h);
Camera::OnWindowSizeChanged(w, h);
//EditorRenderSystem::OnWindowSizeChanged(w, h);
//EditorGUI::OnWindowSizeChanged(w, h);
}
}
//void GameApp::MouseCallback(GLFWwindow* window, double xpos, double ypos)
//{
//}
//void GameApp::CharacterCallback(GLFWwindow* window, unsigned int codepoint)
//{
// ImGui_ImplGlfwGL3_CharCallback(window, codepoint);
//}
|
immune-gmbh/agent
|
pkg/firmware/pci/cfgspace.go
|
<reponame>immune-gmbh/agent
package pci
import (
"github.com/immune-gmbh/agent/v3/pkg/api"
"github.com/immune-gmbh/agent/v3/pkg/firmware/common"
"github.com/sirupsen/logrus"
)
func reportConfigSpace(request *api.PCIConfigSpace) error {
buf, err := readConfigSpace(uint32(request.Bus), uint32(request.Device), uint32(request.Function), 0, 4096)
if err != nil {
logrus.Debugf("pci.ReportConfigSpace(): %s", err.Error())
request.Error = common.ServeApiError(common.MapFSErrors(err))
return err
}
request.Value = buf
return nil
}
func ReportConfigSpaces(requests []api.PCIConfigSpace) (err error) {
logrus.Traceln("ReportConfigSpaces()")
allFailed := true
for i := range requests {
v := &requests[i]
err = reportConfigSpace(v)
allFailed = allFailed && err != nil
}
if allFailed && len(requests) > 0 {
logrus.Warnf("Failed to read PCI configuration space")
return
}
err = nil
return
}
|
momo-i/Mekanism
|
src/main/java/mekanism/client/gui/qio/GuiQIOFilerSelect.java
|
<reponame>momo-i/Mekanism
package mekanism.client.gui.qio;
import javax.annotation.Nonnull;
import mekanism.client.gui.IGuiWrapper;
import mekanism.client.gui.element.window.filter.GuiFilterSelect;
import mekanism.client.gui.element.window.filter.qio.GuiQIOItemStackFilter;
import mekanism.client.gui.element.window.filter.qio.GuiQIOModIDFilter;
import mekanism.client.gui.element.window.filter.qio.GuiQIOTagFilter;
import mekanism.common.tile.qio.TileEntityQIOFilterHandler;
public class GuiQIOFilerSelect extends GuiFilterSelect<TileEntityQIOFilterHandler> {
public GuiQIOFilerSelect(IGuiWrapper gui, TileEntityQIOFilterHandler tile) {
super(gui, tile, 3);
}
@Nonnull
@Override
protected GuiFilterCreator<TileEntityQIOFilterHandler> getItemStackFilterCreator() {
return GuiQIOItemStackFilter::create;
}
@Nonnull
@Override
protected GuiFilterCreator<TileEntityQIOFilterHandler> getTagFilterCreator() {
return GuiQIOTagFilter::create;
}
@Nonnull
@Override
protected GuiFilterCreator<TileEntityQIOFilterHandler> getModIDFilterCreator() {
return GuiQIOModIDFilter::create;
}
}
|
CalebLogin/JavaDemo
|
algorithm/src/com/caleb/algorithm/leetcode/SingleNumber136.java
|
package com.caleb.algorithm.leetcode;
/**
* 只出现一次的数字
* 给定一个非空整数数组,除了某个元素只出现一次以外,其余每个元素均出现两次。找出那个只出现了一次的元素。
* 说明:
* 你的算法应该具有线性时间复杂度。 你可以不使用额外空间来实现吗?
*
* @author:hanzhigang
* @Date : 2021/4/21 9:46 PM
*/
public class SingleNumber136 {
/**
* 利用异或来进行筛选
* 5 ^ 5 = 0
* 5 ^ 6 ^ 5 = 6
*
* @param nums
* @return
*/
public int singleNumber(int[] nums) {
int res = 0;
for (int i = 0; i < nums.length; i++) {
res ^= nums[i];
}
return res;
}
public static void main(String[] args) {
SingleNumber136 singleNumber136 = new SingleNumber136();
System.out.println(singleNumber136.singleNumber(new int[]{4, 1, 2, 1, 2}));
}
}
|
aws/aws-cdk-go
|
awscdk/awstimestream/awstimestream.go
|
package awstimestream
import (
_init_ "github.com/aws/aws-cdk-go/awscdk/jsii"
_jsii_ "github.com/aws/jsii-runtime-go/runtime"
"github.com/aws/aws-cdk-go/awscdk"
"github.com/aws/aws-cdk-go/awscdk/awstimestream/internal"
"github.com/aws/constructs-go/constructs/v3"
)
// A CloudFormation `AWS::Timestream::Database`.
//
// Creates a new Timestream database. If the AWS KMS key is not specified, the database will be encrypted with a Timestream managed AWS KMS key located in your account. Refer to [AWS managed AWS KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk) for more info. [Service quotas apply](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html) . See [code sample](https://docs.aws.amazon.com/timestream/latest/developerguide/code-samples.create-db.html) for details.
//
// TODO: EXAMPLE
//
type CfnDatabase interface {
awscdk.CfnResource
awscdk.IInspectable
AttrArn() *string
CfnOptions() awscdk.ICfnResourceOptions
CfnProperties() *map[string]interface{}
CfnResourceType() *string
CreationStack() *[]*string
DatabaseName() *string
SetDatabaseName(val *string)
KmsKeyId() *string
SetKmsKeyId(val *string)
LogicalId() *string
Node() awscdk.ConstructNode
Ref() *string
Stack() awscdk.Stack
Tags() awscdk.TagManager
UpdatedProperites() *map[string]interface{}
AddDeletionOverride(path *string)
AddDependsOn(target awscdk.CfnResource)
AddMetadata(key *string, value interface{})
AddOverride(path *string, value interface{})
AddPropertyDeletionOverride(propertyPath *string)
AddPropertyOverride(propertyPath *string, value interface{})
ApplyRemovalPolicy(policy awscdk.RemovalPolicy, options *awscdk.RemovalPolicyOptions)
GetAtt(attributeName *string) awscdk.Reference
GetMetadata(key *string) interface{}
Inspect(inspector awscdk.TreeInspector)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
OverrideLogicalId(newLogicalId *string)
Prepare()
RenderProperties(props *map[string]interface{}) *map[string]interface{}
ShouldSynthesize() *bool
Synthesize(session awscdk.ISynthesisSession)
ToString() *string
Validate() *[]*string
ValidateProperties(_properties interface{})
}
// The jsii proxy struct for CfnDatabase
type jsiiProxy_CfnDatabase struct {
internal.Type__awscdkCfnResource
internal.Type__awscdkIInspectable
}
func (j *jsiiProxy_CfnDatabase) AttrArn() *string {
var returns *string
_jsii_.Get(
j,
"attrArn",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) CfnOptions() awscdk.ICfnResourceOptions {
var returns awscdk.ICfnResourceOptions
_jsii_.Get(
j,
"cfnOptions",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) CfnProperties() *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Get(
j,
"cfnProperties",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) CfnResourceType() *string {
var returns *string
_jsii_.Get(
j,
"cfnResourceType",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) CreationStack() *[]*string {
var returns *[]*string
_jsii_.Get(
j,
"creationStack",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) DatabaseName() *string {
var returns *string
_jsii_.Get(
j,
"databaseName",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) KmsKeyId() *string {
var returns *string
_jsii_.Get(
j,
"kmsKeyId",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) LogicalId() *string {
var returns *string
_jsii_.Get(
j,
"logicalId",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) Node() awscdk.ConstructNode {
var returns awscdk.ConstructNode
_jsii_.Get(
j,
"node",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) Ref() *string {
var returns *string
_jsii_.Get(
j,
"ref",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) Stack() awscdk.Stack {
var returns awscdk.Stack
_jsii_.Get(
j,
"stack",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) Tags() awscdk.TagManager {
var returns awscdk.TagManager
_jsii_.Get(
j,
"tags",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnDatabase) UpdatedProperites() *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Get(
j,
"updatedProperites",
&returns,
)
return returns
}
// Create a new `AWS::Timestream::Database`.
func NewCfnDatabase(scope awscdk.Construct, id *string, props *CfnDatabaseProps) CfnDatabase {
_init_.Initialize()
j := jsiiProxy_CfnDatabase{}
_jsii_.Create(
"monocdk.aws_timestream.CfnDatabase",
[]interface{}{scope, id, props},
&j,
)
return &j
}
// Create a new `AWS::Timestream::Database`.
func NewCfnDatabase_Override(c CfnDatabase, scope awscdk.Construct, id *string, props *CfnDatabaseProps) {
_init_.Initialize()
_jsii_.Create(
"monocdk.aws_timestream.CfnDatabase",
[]interface{}{scope, id, props},
c,
)
}
func (j *jsiiProxy_CfnDatabase) SetDatabaseName(val *string) {
_jsii_.Set(
j,
"databaseName",
val,
)
}
func (j *jsiiProxy_CfnDatabase) SetKmsKeyId(val *string) {
_jsii_.Set(
j,
"kmsKeyId",
val,
)
}
// Returns `true` if a construct is a stack element (i.e. part of the synthesized cloudformation template).
//
// Uses duck-typing instead of `instanceof` to allow stack elements from different
// versions of this library to be included in the same stack.
//
// Returns: The construct as a stack element or undefined if it is not a stack element.
// Experimental.
func CfnDatabase_IsCfnElement(x interface{}) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnDatabase",
"isCfnElement",
[]interface{}{x},
&returns,
)
return returns
}
// Check whether the given construct is a CfnResource.
// Experimental.
func CfnDatabase_IsCfnResource(construct constructs.IConstruct) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnDatabase",
"isCfnResource",
[]interface{}{construct},
&returns,
)
return returns
}
// Return whether the given object is a Construct.
// Experimental.
func CfnDatabase_IsConstruct(x interface{}) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnDatabase",
"isConstruct",
[]interface{}{x},
&returns,
)
return returns
}
func CfnDatabase_CFN_RESOURCE_TYPE_NAME() *string {
_init_.Initialize()
var returns *string
_jsii_.StaticGet(
"monocdk.aws_timestream.CfnDatabase",
"CFN_RESOURCE_TYPE_NAME",
&returns,
)
return returns
}
// Syntactic sugar for `addOverride(path, undefined)`.
// Experimental.
func (c *jsiiProxy_CfnDatabase) AddDeletionOverride(path *string) {
_jsii_.InvokeVoid(
c,
"addDeletionOverride",
[]interface{}{path},
)
}
// Indicates that this resource depends on another resource and cannot be provisioned unless the other resource has been successfully provisioned.
//
// This can be used for resources across stacks (or nested stack) boundaries
// and the dependency will automatically be transferred to the relevant scope.
// Experimental.
func (c *jsiiProxy_CfnDatabase) AddDependsOn(target awscdk.CfnResource) {
_jsii_.InvokeVoid(
c,
"addDependsOn",
[]interface{}{target},
)
}
// Add a value to the CloudFormation Resource Metadata.
// See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/metadata-section-structure.html
//
// Note that this is a different set of metadata from CDK node metadata; this
// metadata ends up in the stack template under the resource, whereas CDK
// node metadata ends up in the Cloud Assembly.
//
// Experimental.
func (c *jsiiProxy_CfnDatabase) AddMetadata(key *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addMetadata",
[]interface{}{key, value},
)
}
// Adds an override to the synthesized CloudFormation resource.
//
// To add a
// property override, either use `addPropertyOverride` or prefix `path` with
// "Properties." (i.e. `Properties.TopicName`).
//
// If the override is nested, separate each nested level using a dot (.) in the path parameter.
// If there is an array as part of the nesting, specify the index in the path.
//
// To include a literal `.` in the property name, prefix with a `\`. In most
// programming languages you will need to write this as `"\\."` because the
// `\` itself will need to be escaped.
//
// For example,
// ```typescript
// cfnResource.addOverride('Properties.GlobalSecondaryIndexes.0.Projection.NonKeyAttributes', ['myattribute']);
// cfnResource.addOverride('Properties.GlobalSecondaryIndexes.1.ProjectionType', 'INCLUDE');
// ```
// would add the overrides
// ```json
// "Properties": {
// "GlobalSecondaryIndexes": [
// {
// "Projection": {
// "NonKeyAttributes": [ "myattribute" ]
// ...
// }
// ...
// },
// {
// "ProjectionType": "INCLUDE"
// ...
// },
// ]
// ...
// }
// ```
// Experimental.
func (c *jsiiProxy_CfnDatabase) AddOverride(path *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addOverride",
[]interface{}{path, value},
)
}
// Adds an override that deletes the value of a property from the resource definition.
// Experimental.
func (c *jsiiProxy_CfnDatabase) AddPropertyDeletionOverride(propertyPath *string) {
_jsii_.InvokeVoid(
c,
"addPropertyDeletionOverride",
[]interface{}{propertyPath},
)
}
// Adds an override to a resource property.
//
// Syntactic sugar for `addOverride("Properties.<...>", value)`.
// Experimental.
func (c *jsiiProxy_CfnDatabase) AddPropertyOverride(propertyPath *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addPropertyOverride",
[]interface{}{propertyPath, value},
)
}
// Sets the deletion policy of the resource based on the removal policy specified.
//
// The Removal Policy controls what happens to this resource when it stops
// being managed by CloudFormation, either because you've removed it from the
// CDK application or because you've made a change that requires the resource
// to be replaced.
//
// The resource can be deleted (`RemovalPolicy.DESTROY`), or left in your AWS
// account for data recovery and cleanup later (`RemovalPolicy.RETAIN`).
// Experimental.
func (c *jsiiProxy_CfnDatabase) ApplyRemovalPolicy(policy awscdk.RemovalPolicy, options *awscdk.RemovalPolicyOptions) {
_jsii_.InvokeVoid(
c,
"applyRemovalPolicy",
[]interface{}{policy, options},
)
}
// Returns a token for an runtime attribute of this resource.
//
// Ideally, use generated attribute accessors (e.g. `resource.arn`), but this can be used for future compatibility
// in case there is no generated attribute.
// Experimental.
func (c *jsiiProxy_CfnDatabase) GetAtt(attributeName *string) awscdk.Reference {
var returns awscdk.Reference
_jsii_.Invoke(
c,
"getAtt",
[]interface{}{attributeName},
&returns,
)
return returns
}
// Retrieve a value value from the CloudFormation Resource Metadata.
// See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/metadata-section-structure.html
//
// Note that this is a different set of metadata from CDK node metadata; this
// metadata ends up in the stack template under the resource, whereas CDK
// node metadata ends up in the Cloud Assembly.
//
// Experimental.
func (c *jsiiProxy_CfnDatabase) GetMetadata(key *string) interface{} {
var returns interface{}
_jsii_.Invoke(
c,
"getMetadata",
[]interface{}{key},
&returns,
)
return returns
}
// Examines the CloudFormation resource and discloses attributes.
func (c *jsiiProxy_CfnDatabase) Inspect(inspector awscdk.TreeInspector) {
_jsii_.InvokeVoid(
c,
"inspect",
[]interface{}{inspector},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
// Experimental.
func (c *jsiiProxy_CfnDatabase) OnPrepare() {
_jsii_.InvokeVoid(
c,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
// Experimental.
func (c *jsiiProxy_CfnDatabase) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
c,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if the construct is valid.
// Experimental.
func (c *jsiiProxy_CfnDatabase) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
c,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Overrides the auto-generated logical ID with a specific ID.
// Experimental.
func (c *jsiiProxy_CfnDatabase) OverrideLogicalId(newLogicalId *string) {
_jsii_.InvokeVoid(
c,
"overrideLogicalId",
[]interface{}{newLogicalId},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
// Experimental.
func (c *jsiiProxy_CfnDatabase) Prepare() {
_jsii_.InvokeVoid(
c,
"prepare",
nil, // no parameters
)
}
func (c *jsiiProxy_CfnDatabase) RenderProperties(props *map[string]interface{}) *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Invoke(
c,
"renderProperties",
[]interface{}{props},
&returns,
)
return returns
}
// Can be overridden by subclasses to determine if this resource will be rendered into the cloudformation template.
//
// Returns: `true` if the resource should be included or `false` is the resource
// should be omitted.
// Experimental.
func (c *jsiiProxy_CfnDatabase) ShouldSynthesize() *bool {
var returns *bool
_jsii_.Invoke(
c,
"shouldSynthesize",
nil, // no parameters
&returns,
)
return returns
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
// Experimental.
func (c *jsiiProxy_CfnDatabase) Synthesize(session awscdk.ISynthesisSession) {
_jsii_.InvokeVoid(
c,
"synthesize",
[]interface{}{session},
)
}
// Returns a string representation of this construct.
//
// Returns: a string representation of this resource
// Experimental.
func (c *jsiiProxy_CfnDatabase) ToString() *string {
var returns *string
_jsii_.Invoke(
c,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if the construct is valid.
// Experimental.
func (c *jsiiProxy_CfnDatabase) Validate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
c,
"validate",
nil, // no parameters
&returns,
)
return returns
}
// Experimental.
func (c *jsiiProxy_CfnDatabase) ValidateProperties(_properties interface{}) {
_jsii_.InvokeVoid(
c,
"validateProperties",
[]interface{}{_properties},
)
}
// Properties for defining a `CfnDatabase`.
//
// TODO: EXAMPLE
//
type CfnDatabaseProps struct {
// The name of the Timestream database.
//
// *Length Constraints* : Minimum length of 3 bytes. Maximum length of 256 bytes.
DatabaseName *string `json:"databaseName"`
// The identifier of the AWS KMS key used to encrypt the data stored in the database.
KmsKeyId *string `json:"kmsKeyId"`
// The tags to add to the database.
Tags *[]*awscdk.CfnTag `json:"tags"`
}
// A CloudFormation `AWS::Timestream::ScheduledQuery`.
//
// Create a scheduled query that will be run on your behalf at the configured schedule. Timestream assumes the execution role provided as part of the `ScheduledQueryExecutionRoleArn` parameter to run the query. You can use the `NotificationConfiguration` parameter to configure notification for your scheduled query operations.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery interface {
awscdk.CfnResource
awscdk.IInspectable
AttrArn() *string
AttrSqErrorReportConfiguration() *string
AttrSqKmsKeyId() *string
AttrSqName() *string
AttrSqNotificationConfiguration() *string
AttrSqQueryString() *string
AttrSqScheduleConfiguration() *string
AttrSqScheduledQueryExecutionRoleArn() *string
AttrSqTargetConfiguration() *string
CfnOptions() awscdk.ICfnResourceOptions
CfnProperties() *map[string]interface{}
CfnResourceType() *string
ClientToken() *string
SetClientToken(val *string)
CreationStack() *[]*string
ErrorReportConfiguration() interface{}
SetErrorReportConfiguration(val interface{})
KmsKeyId() *string
SetKmsKeyId(val *string)
LogicalId() *string
Node() awscdk.ConstructNode
NotificationConfiguration() interface{}
SetNotificationConfiguration(val interface{})
QueryString() *string
SetQueryString(val *string)
Ref() *string
ScheduleConfiguration() interface{}
SetScheduleConfiguration(val interface{})
ScheduledQueryExecutionRoleArn() *string
SetScheduledQueryExecutionRoleArn(val *string)
ScheduledQueryName() *string
SetScheduledQueryName(val *string)
Stack() awscdk.Stack
Tags() awscdk.TagManager
TargetConfiguration() interface{}
SetTargetConfiguration(val interface{})
UpdatedProperites() *map[string]interface{}
AddDeletionOverride(path *string)
AddDependsOn(target awscdk.CfnResource)
AddMetadata(key *string, value interface{})
AddOverride(path *string, value interface{})
AddPropertyDeletionOverride(propertyPath *string)
AddPropertyOverride(propertyPath *string, value interface{})
ApplyRemovalPolicy(policy awscdk.RemovalPolicy, options *awscdk.RemovalPolicyOptions)
GetAtt(attributeName *string) awscdk.Reference
GetMetadata(key *string) interface{}
Inspect(inspector awscdk.TreeInspector)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
OverrideLogicalId(newLogicalId *string)
Prepare()
RenderProperties(props *map[string]interface{}) *map[string]interface{}
ShouldSynthesize() *bool
Synthesize(session awscdk.ISynthesisSession)
ToString() *string
Validate() *[]*string
ValidateProperties(_properties interface{})
}
// The jsii proxy struct for CfnScheduledQuery
type jsiiProxy_CfnScheduledQuery struct {
internal.Type__awscdkCfnResource
internal.Type__awscdkIInspectable
}
func (j *jsiiProxy_CfnScheduledQuery) AttrArn() *string {
var returns *string
_jsii_.Get(
j,
"attrArn",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) AttrSqErrorReportConfiguration() *string {
var returns *string
_jsii_.Get(
j,
"attrSqErrorReportConfiguration",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) AttrSqKmsKeyId() *string {
var returns *string
_jsii_.Get(
j,
"attrSqKmsKeyId",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) AttrSqName() *string {
var returns *string
_jsii_.Get(
j,
"attrSqName",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) AttrSqNotificationConfiguration() *string {
var returns *string
_jsii_.Get(
j,
"attrSqNotificationConfiguration",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) AttrSqQueryString() *string {
var returns *string
_jsii_.Get(
j,
"attrSqQueryString",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) AttrSqScheduleConfiguration() *string {
var returns *string
_jsii_.Get(
j,
"attrSqScheduleConfiguration",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) AttrSqScheduledQueryExecutionRoleArn() *string {
var returns *string
_jsii_.Get(
j,
"attrSqScheduledQueryExecutionRoleArn",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) AttrSqTargetConfiguration() *string {
var returns *string
_jsii_.Get(
j,
"attrSqTargetConfiguration",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) CfnOptions() awscdk.ICfnResourceOptions {
var returns awscdk.ICfnResourceOptions
_jsii_.Get(
j,
"cfnOptions",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) CfnProperties() *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Get(
j,
"cfnProperties",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) CfnResourceType() *string {
var returns *string
_jsii_.Get(
j,
"cfnResourceType",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) ClientToken() *string {
var returns *string
_jsii_.Get(
j,
"clientToken",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) CreationStack() *[]*string {
var returns *[]*string
_jsii_.Get(
j,
"creationStack",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) ErrorReportConfiguration() interface{} {
var returns interface{}
_jsii_.Get(
j,
"errorReportConfiguration",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) KmsKeyId() *string {
var returns *string
_jsii_.Get(
j,
"kmsKeyId",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) LogicalId() *string {
var returns *string
_jsii_.Get(
j,
"logicalId",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) Node() awscdk.ConstructNode {
var returns awscdk.ConstructNode
_jsii_.Get(
j,
"node",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) NotificationConfiguration() interface{} {
var returns interface{}
_jsii_.Get(
j,
"notificationConfiguration",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) QueryString() *string {
var returns *string
_jsii_.Get(
j,
"queryString",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) Ref() *string {
var returns *string
_jsii_.Get(
j,
"ref",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) ScheduleConfiguration() interface{} {
var returns interface{}
_jsii_.Get(
j,
"scheduleConfiguration",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) ScheduledQueryExecutionRoleArn() *string {
var returns *string
_jsii_.Get(
j,
"scheduledQueryExecutionRoleArn",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) ScheduledQueryName() *string {
var returns *string
_jsii_.Get(
j,
"scheduledQueryName",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) Stack() awscdk.Stack {
var returns awscdk.Stack
_jsii_.Get(
j,
"stack",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) Tags() awscdk.TagManager {
var returns awscdk.TagManager
_jsii_.Get(
j,
"tags",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) TargetConfiguration() interface{} {
var returns interface{}
_jsii_.Get(
j,
"targetConfiguration",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnScheduledQuery) UpdatedProperites() *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Get(
j,
"updatedProperites",
&returns,
)
return returns
}
// Create a new `AWS::Timestream::ScheduledQuery`.
func NewCfnScheduledQuery(scope awscdk.Construct, id *string, props *CfnScheduledQueryProps) CfnScheduledQuery {
_init_.Initialize()
j := jsiiProxy_CfnScheduledQuery{}
_jsii_.Create(
"monocdk.aws_timestream.CfnScheduledQuery",
[]interface{}{scope, id, props},
&j,
)
return &j
}
// Create a new `AWS::Timestream::ScheduledQuery`.
func NewCfnScheduledQuery_Override(c CfnScheduledQuery, scope awscdk.Construct, id *string, props *CfnScheduledQueryProps) {
_init_.Initialize()
_jsii_.Create(
"monocdk.aws_timestream.CfnScheduledQuery",
[]interface{}{scope, id, props},
c,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetClientToken(val *string) {
_jsii_.Set(
j,
"clientToken",
val,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetErrorReportConfiguration(val interface{}) {
_jsii_.Set(
j,
"errorReportConfiguration",
val,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetKmsKeyId(val *string) {
_jsii_.Set(
j,
"kmsKeyId",
val,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetNotificationConfiguration(val interface{}) {
_jsii_.Set(
j,
"notificationConfiguration",
val,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetQueryString(val *string) {
_jsii_.Set(
j,
"queryString",
val,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetScheduleConfiguration(val interface{}) {
_jsii_.Set(
j,
"scheduleConfiguration",
val,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetScheduledQueryExecutionRoleArn(val *string) {
_jsii_.Set(
j,
"scheduledQueryExecutionRoleArn",
val,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetScheduledQueryName(val *string) {
_jsii_.Set(
j,
"scheduledQueryName",
val,
)
}
func (j *jsiiProxy_CfnScheduledQuery) SetTargetConfiguration(val interface{}) {
_jsii_.Set(
j,
"targetConfiguration",
val,
)
}
// Returns `true` if a construct is a stack element (i.e. part of the synthesized cloudformation template).
//
// Uses duck-typing instead of `instanceof` to allow stack elements from different
// versions of this library to be included in the same stack.
//
// Returns: The construct as a stack element or undefined if it is not a stack element.
// Experimental.
func CfnScheduledQuery_IsCfnElement(x interface{}) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnScheduledQuery",
"isCfnElement",
[]interface{}{x},
&returns,
)
return returns
}
// Check whether the given construct is a CfnResource.
// Experimental.
func CfnScheduledQuery_IsCfnResource(construct constructs.IConstruct) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnScheduledQuery",
"isCfnResource",
[]interface{}{construct},
&returns,
)
return returns
}
// Return whether the given object is a Construct.
// Experimental.
func CfnScheduledQuery_IsConstruct(x interface{}) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnScheduledQuery",
"isConstruct",
[]interface{}{x},
&returns,
)
return returns
}
func CfnScheduledQuery_CFN_RESOURCE_TYPE_NAME() *string {
_init_.Initialize()
var returns *string
_jsii_.StaticGet(
"monocdk.aws_timestream.CfnScheduledQuery",
"CFN_RESOURCE_TYPE_NAME",
&returns,
)
return returns
}
// Syntactic sugar for `addOverride(path, undefined)`.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) AddDeletionOverride(path *string) {
_jsii_.InvokeVoid(
c,
"addDeletionOverride",
[]interface{}{path},
)
}
// Indicates that this resource depends on another resource and cannot be provisioned unless the other resource has been successfully provisioned.
//
// This can be used for resources across stacks (or nested stack) boundaries
// and the dependency will automatically be transferred to the relevant scope.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) AddDependsOn(target awscdk.CfnResource) {
_jsii_.InvokeVoid(
c,
"addDependsOn",
[]interface{}{target},
)
}
// Add a value to the CloudFormation Resource Metadata.
// See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/metadata-section-structure.html
//
// Note that this is a different set of metadata from CDK node metadata; this
// metadata ends up in the stack template under the resource, whereas CDK
// node metadata ends up in the Cloud Assembly.
//
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) AddMetadata(key *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addMetadata",
[]interface{}{key, value},
)
}
// Adds an override to the synthesized CloudFormation resource.
//
// To add a
// property override, either use `addPropertyOverride` or prefix `path` with
// "Properties." (i.e. `Properties.TopicName`).
//
// If the override is nested, separate each nested level using a dot (.) in the path parameter.
// If there is an array as part of the nesting, specify the index in the path.
//
// To include a literal `.` in the property name, prefix with a `\`. In most
// programming languages you will need to write this as `"\\."` because the
// `\` itself will need to be escaped.
//
// For example,
// ```typescript
// cfnResource.addOverride('Properties.GlobalSecondaryIndexes.0.Projection.NonKeyAttributes', ['myattribute']);
// cfnResource.addOverride('Properties.GlobalSecondaryIndexes.1.ProjectionType', 'INCLUDE');
// ```
// would add the overrides
// ```json
// "Properties": {
// "GlobalSecondaryIndexes": [
// {
// "Projection": {
// "NonKeyAttributes": [ "myattribute" ]
// ...
// }
// ...
// },
// {
// "ProjectionType": "INCLUDE"
// ...
// },
// ]
// ...
// }
// ```
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) AddOverride(path *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addOverride",
[]interface{}{path, value},
)
}
// Adds an override that deletes the value of a property from the resource definition.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) AddPropertyDeletionOverride(propertyPath *string) {
_jsii_.InvokeVoid(
c,
"addPropertyDeletionOverride",
[]interface{}{propertyPath},
)
}
// Adds an override to a resource property.
//
// Syntactic sugar for `addOverride("Properties.<...>", value)`.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) AddPropertyOverride(propertyPath *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addPropertyOverride",
[]interface{}{propertyPath, value},
)
}
// Sets the deletion policy of the resource based on the removal policy specified.
//
// The Removal Policy controls what happens to this resource when it stops
// being managed by CloudFormation, either because you've removed it from the
// CDK application or because you've made a change that requires the resource
// to be replaced.
//
// The resource can be deleted (`RemovalPolicy.DESTROY`), or left in your AWS
// account for data recovery and cleanup later (`RemovalPolicy.RETAIN`).
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) ApplyRemovalPolicy(policy awscdk.RemovalPolicy, options *awscdk.RemovalPolicyOptions) {
_jsii_.InvokeVoid(
c,
"applyRemovalPolicy",
[]interface{}{policy, options},
)
}
// Returns a token for an runtime attribute of this resource.
//
// Ideally, use generated attribute accessors (e.g. `resource.arn`), but this can be used for future compatibility
// in case there is no generated attribute.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) GetAtt(attributeName *string) awscdk.Reference {
var returns awscdk.Reference
_jsii_.Invoke(
c,
"getAtt",
[]interface{}{attributeName},
&returns,
)
return returns
}
// Retrieve a value value from the CloudFormation Resource Metadata.
// See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/metadata-section-structure.html
//
// Note that this is a different set of metadata from CDK node metadata; this
// metadata ends up in the stack template under the resource, whereas CDK
// node metadata ends up in the Cloud Assembly.
//
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) GetMetadata(key *string) interface{} {
var returns interface{}
_jsii_.Invoke(
c,
"getMetadata",
[]interface{}{key},
&returns,
)
return returns
}
// Examines the CloudFormation resource and discloses attributes.
func (c *jsiiProxy_CfnScheduledQuery) Inspect(inspector awscdk.TreeInspector) {
_jsii_.InvokeVoid(
c,
"inspect",
[]interface{}{inspector},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) OnPrepare() {
_jsii_.InvokeVoid(
c,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
c,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if the construct is valid.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
c,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Overrides the auto-generated logical ID with a specific ID.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) OverrideLogicalId(newLogicalId *string) {
_jsii_.InvokeVoid(
c,
"overrideLogicalId",
[]interface{}{newLogicalId},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) Prepare() {
_jsii_.InvokeVoid(
c,
"prepare",
nil, // no parameters
)
}
func (c *jsiiProxy_CfnScheduledQuery) RenderProperties(props *map[string]interface{}) *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Invoke(
c,
"renderProperties",
[]interface{}{props},
&returns,
)
return returns
}
// Can be overridden by subclasses to determine if this resource will be rendered into the cloudformation template.
//
// Returns: `true` if the resource should be included or `false` is the resource
// should be omitted.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) ShouldSynthesize() *bool {
var returns *bool
_jsii_.Invoke(
c,
"shouldSynthesize",
nil, // no parameters
&returns,
)
return returns
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) Synthesize(session awscdk.ISynthesisSession) {
_jsii_.InvokeVoid(
c,
"synthesize",
[]interface{}{session},
)
}
// Returns a string representation of this construct.
//
// Returns: a string representation of this resource
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) ToString() *string {
var returns *string
_jsii_.Invoke(
c,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if the construct is valid.
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) Validate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
c,
"validate",
nil, // no parameters
&returns,
)
return returns
}
// Experimental.
func (c *jsiiProxy_CfnScheduledQuery) ValidateProperties(_properties interface{}) {
_jsii_.InvokeVoid(
c,
"validateProperties",
[]interface{}{_properties},
)
}
// This type is used to map column(s) from the query result to a dimension in the destination table.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_DimensionMappingProperty struct {
// Type for the dimension.
DimensionValueType *string `json:"dimensionValueType"`
// Column name from query result.
Name *string `json:"name"`
}
// Configuration required for error reporting.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_ErrorReportConfigurationProperty struct {
// The S3 configuration for the error reports.
S3Configuration interface{} `json:"s3Configuration"`
}
// MixedMeasureMappings are mappings that can be used to ingest data into a mixture of narrow and multi measures in the derived table.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_MixedMeasureMappingProperty struct {
// Type of the value that is to be read from sourceColumn.
//
// If the mapping is for MULTI, use MeasureValueType.MULTI.
MeasureValueType *string `json:"measureValueType"`
// Refers to the value of measure_name in a result row.
//
// This field is required if MeasureNameColumn is provided.
MeasureName *string `json:"measureName"`
// Required when measureValueType is MULTI.
//
// Attribute mappings for MULTI value measures.
MultiMeasureAttributeMappings interface{} `json:"multiMeasureAttributeMappings"`
// This field refers to the source column from which measure-value is to be read for result materialization.
SourceColumn *string `json:"sourceColumn"`
// Target measure name to be used.
//
// If not provided, the target measure name by default would be measure-name if provided, or sourceColumn otherwise.
TargetMeasureName *string `json:"targetMeasureName"`
}
// Attribute mapping for MULTI value measures.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_MultiMeasureAttributeMappingProperty struct {
// Type of the attribute to be read from the source column.
MeasureValueType *string `json:"measureValueType"`
// Source column from where the attribute value is to be read.
SourceColumn *string `json:"sourceColumn"`
// Custom name to be used for attribute name in derived table.
//
// If not provided, source column name would be used.
TargetMultiMeasureAttributeName *string `json:"targetMultiMeasureAttributeName"`
}
// Only one of MixedMeasureMappings or MultiMeasureMappings is to be provided.
//
// MultiMeasureMappings can be used to ingest data as multi measures in the derived table.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_MultiMeasureMappingsProperty struct {
// Required.
//
// Attribute mappings to be used for mapping query results to ingest data for multi-measure attributes.
MultiMeasureAttributeMappings interface{} `json:"multiMeasureAttributeMappings"`
// The name of the target multi-measure name in the derived table.
//
// This input is required when measureNameColumn is not provided. If MeasureNameColumn is provided, then value from that column will be used as multi-measure name.
TargetMultiMeasureName *string `json:"targetMultiMeasureName"`
}
// Notification configuration for a scheduled query.
//
// A notification is sent by Timestream when a scheduled query is created, its state is updated or when it is deleted.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_NotificationConfigurationProperty struct {
// Details on SNS configuration.
SnsConfiguration interface{} `json:"snsConfiguration"`
}
// Details on S3 location for error reports that result from running a query.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_S3ConfigurationProperty struct {
// Name of the S3 bucket under which error reports will be created.
BucketName *string `json:"bucketName"`
// Encryption at rest options for the error reports.
//
// If no encryption option is specified, Timestream will choose SSE_S3 as default.
EncryptionOption *string `json:"encryptionOption"`
// Prefix for the error report key.
//
// Timestream by default adds the following prefix to the error report path.
ObjectKeyPrefix *string `json:"objectKeyPrefix"`
}
// Configuration of the schedule of the query.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_ScheduleConfigurationProperty struct {
// An expression that denotes when to trigger the scheduled query run.
//
// This can be a cron expression or a rate expression.
ScheduleExpression *string `json:"scheduleExpression"`
}
// Details on SNS that are required to send the notification.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_SnsConfigurationProperty struct {
// SNS topic ARN that the scheduled query status notifications will be sent to.
TopicArn *string `json:"topicArn"`
}
// Configuration used for writing the output of a query.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_TargetConfigurationProperty struct {
// Configuration needed to write data into the Timestream database and table.
TimestreamConfiguration interface{} `json:"timestreamConfiguration"`
}
// Configuration to write data into Timestream database and table.
//
// This configuration allows the user to map the query result select columns into the destination table columns.
//
// TODO: EXAMPLE
//
type CfnScheduledQuery_TimestreamConfigurationProperty struct {
// Name of Timestream database to which the query result will be written.
DatabaseName *string `json:"databaseName"`
// This is to allow mapping column(s) from the query result to the dimension in the destination table.
DimensionMappings interface{} `json:"dimensionMappings"`
// Name of Timestream table that the query result will be written to.
//
// The table should be within the same database that is provided in Timestream configuration.
TableName *string `json:"tableName"`
// Column from query result that should be used as the time column in destination table.
//
// Column type for this should be TIMESTAMP.
TimeColumn *string `json:"timeColumn"`
// Name of the measure column.
MeasureNameColumn *string `json:"measureNameColumn"`
// Specifies how to map measures to multi-measure records.
MixedMeasureMappings interface{} `json:"mixedMeasureMappings"`
// Multi-measure mappings.
MultiMeasureMappings interface{} `json:"multiMeasureMappings"`
}
// Properties for defining a `CfnScheduledQuery`.
//
// TODO: EXAMPLE
//
type CfnScheduledQueryProps struct {
// Configuration for error reporting.
//
// Error reports will be generated when a problem is encountered when writing the query results.
ErrorReportConfiguration interface{} `json:"errorReportConfiguration"`
// Notification configuration for the scheduled query.
//
// A notification is sent by Timestream when a query run finishes, when the state is updated or when you delete it.
NotificationConfiguration interface{} `json:"notificationConfiguration"`
// The query string to run.
//
// Parameter names can be specified in the query string `@` character followed by an identifier. The named Parameter `@scheduled_runtime` is reserved and can be used in the query to get the time at which the query is scheduled to run.
//
// The timestamp calculated according to the ScheduleConfiguration parameter, will be the value of `@scheduled_runtime` paramater for each query run. For example, consider an instance of a scheduled query executing on 2021-12-01 00:00:00. For this instance, the `@scheduled_runtime` parameter is initialized to the timestamp 2021-12-01 00:00:00 when invoking the query.
QueryString *string `json:"queryString"`
// Schedule configuration.
ScheduleConfiguration interface{} `json:"scheduleConfiguration"`
// The ARN for the IAM role that Timestream will assume when running the scheduled query.
ScheduledQueryExecutionRoleArn *string `json:"scheduledQueryExecutionRoleArn"`
// Using a ClientToken makes the call to CreateScheduledQuery idempotent, in other words, making the same request repeatedly will produce the same result.
//
// Making multiple identical CreateScheduledQuery requests has the same effect as making a single request.
//
// - If CreateScheduledQuery is called without a `ClientToken` , the Query SDK generates a `ClientToken` on your behalf.
// - After 8 hours, any request with the same `ClientToken` is treated as a new request.
ClientToken *string `json:"clientToken"`
// The Amazon KMS key used to encrypt the scheduled query resource, at-rest.
//
// If the Amazon KMS key is not specified, the scheduled query resource will be encrypted with a Timestream owned Amazon KMS key. To specify a KMS key, use the key ID, key ARN, alias name, or alias ARN. When using an alias name, prefix the name with *alias/*
//
// If ErrorReportConfiguration uses `SSE_KMS` as encryption type, the same KmsKeyId is used to encrypt the error report at rest.
KmsKeyId *string `json:"kmsKeyId"`
// A name for the query.
//
// Scheduled query names must be unique within each Region.
ScheduledQueryName *string `json:"scheduledQueryName"`
// A list of key-value pairs to label the scheduled query.
Tags *[]*awscdk.CfnTag `json:"tags"`
// Scheduled query target store configuration.
TargetConfiguration interface{} `json:"targetConfiguration"`
}
// A CloudFormation `AWS::Timestream::Table`.
//
// The CreateTable operation adds a new table to an existing database in your account. In an AWS account, table names must be at least unique within each Region if they are in the same database. You may have identical table names in the same Region if the tables are in separate databases. While creating the table, you must specify the table name, database name, and the retention properties. [Service quotas apply](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html) . See [code sample](https://docs.aws.amazon.com/timestream/latest/developerguide/code-samples.create-table.html) for details.
//
// TODO: EXAMPLE
//
type CfnTable interface {
awscdk.CfnResource
awscdk.IInspectable
AttrArn() *string
AttrName() *string
CfnOptions() awscdk.ICfnResourceOptions
CfnProperties() *map[string]interface{}
CfnResourceType() *string
CreationStack() *[]*string
DatabaseName() *string
SetDatabaseName(val *string)
LogicalId() *string
Node() awscdk.ConstructNode
Ref() *string
RetentionProperties() interface{}
SetRetentionProperties(val interface{})
Stack() awscdk.Stack
TableName() *string
SetTableName(val *string)
Tags() awscdk.TagManager
UpdatedProperites() *map[string]interface{}
AddDeletionOverride(path *string)
AddDependsOn(target awscdk.CfnResource)
AddMetadata(key *string, value interface{})
AddOverride(path *string, value interface{})
AddPropertyDeletionOverride(propertyPath *string)
AddPropertyOverride(propertyPath *string, value interface{})
ApplyRemovalPolicy(policy awscdk.RemovalPolicy, options *awscdk.RemovalPolicyOptions)
GetAtt(attributeName *string) awscdk.Reference
GetMetadata(key *string) interface{}
Inspect(inspector awscdk.TreeInspector)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
OverrideLogicalId(newLogicalId *string)
Prepare()
RenderProperties(props *map[string]interface{}) *map[string]interface{}
ShouldSynthesize() *bool
Synthesize(session awscdk.ISynthesisSession)
ToString() *string
Validate() *[]*string
ValidateProperties(_properties interface{})
}
// The jsii proxy struct for CfnTable
type jsiiProxy_CfnTable struct {
internal.Type__awscdkCfnResource
internal.Type__awscdkIInspectable
}
func (j *jsiiProxy_CfnTable) AttrArn() *string {
var returns *string
_jsii_.Get(
j,
"attrArn",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) AttrName() *string {
var returns *string
_jsii_.Get(
j,
"attrName",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) CfnOptions() awscdk.ICfnResourceOptions {
var returns awscdk.ICfnResourceOptions
_jsii_.Get(
j,
"cfnOptions",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) CfnProperties() *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Get(
j,
"cfnProperties",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) CfnResourceType() *string {
var returns *string
_jsii_.Get(
j,
"cfnResourceType",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) CreationStack() *[]*string {
var returns *[]*string
_jsii_.Get(
j,
"creationStack",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) DatabaseName() *string {
var returns *string
_jsii_.Get(
j,
"databaseName",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) LogicalId() *string {
var returns *string
_jsii_.Get(
j,
"logicalId",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) Node() awscdk.ConstructNode {
var returns awscdk.ConstructNode
_jsii_.Get(
j,
"node",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) Ref() *string {
var returns *string
_jsii_.Get(
j,
"ref",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) RetentionProperties() interface{} {
var returns interface{}
_jsii_.Get(
j,
"retentionProperties",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) Stack() awscdk.Stack {
var returns awscdk.Stack
_jsii_.Get(
j,
"stack",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) TableName() *string {
var returns *string
_jsii_.Get(
j,
"tableName",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) Tags() awscdk.TagManager {
var returns awscdk.TagManager
_jsii_.Get(
j,
"tags",
&returns,
)
return returns
}
func (j *jsiiProxy_CfnTable) UpdatedProperites() *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Get(
j,
"updatedProperites",
&returns,
)
return returns
}
// Create a new `AWS::Timestream::Table`.
func NewCfnTable(scope awscdk.Construct, id *string, props *CfnTableProps) CfnTable {
_init_.Initialize()
j := jsiiProxy_CfnTable{}
_jsii_.Create(
"monocdk.aws_timestream.CfnTable",
[]interface{}{scope, id, props},
&j,
)
return &j
}
// Create a new `AWS::Timestream::Table`.
func NewCfnTable_Override(c CfnTable, scope awscdk.Construct, id *string, props *CfnTableProps) {
_init_.Initialize()
_jsii_.Create(
"monocdk.aws_timestream.CfnTable",
[]interface{}{scope, id, props},
c,
)
}
func (j *jsiiProxy_CfnTable) SetDatabaseName(val *string) {
_jsii_.Set(
j,
"databaseName",
val,
)
}
func (j *jsiiProxy_CfnTable) SetRetentionProperties(val interface{}) {
_jsii_.Set(
j,
"retentionProperties",
val,
)
}
func (j *jsiiProxy_CfnTable) SetTableName(val *string) {
_jsii_.Set(
j,
"tableName",
val,
)
}
// Returns `true` if a construct is a stack element (i.e. part of the synthesized cloudformation template).
//
// Uses duck-typing instead of `instanceof` to allow stack elements from different
// versions of this library to be included in the same stack.
//
// Returns: The construct as a stack element or undefined if it is not a stack element.
// Experimental.
func CfnTable_IsCfnElement(x interface{}) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnTable",
"isCfnElement",
[]interface{}{x},
&returns,
)
return returns
}
// Check whether the given construct is a CfnResource.
// Experimental.
func CfnTable_IsCfnResource(construct constructs.IConstruct) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnTable",
"isCfnResource",
[]interface{}{construct},
&returns,
)
return returns
}
// Return whether the given object is a Construct.
// Experimental.
func CfnTable_IsConstruct(x interface{}) *bool {
_init_.Initialize()
var returns *bool
_jsii_.StaticInvoke(
"monocdk.aws_timestream.CfnTable",
"isConstruct",
[]interface{}{x},
&returns,
)
return returns
}
func CfnTable_CFN_RESOURCE_TYPE_NAME() *string {
_init_.Initialize()
var returns *string
_jsii_.StaticGet(
"monocdk.aws_timestream.CfnTable",
"CFN_RESOURCE_TYPE_NAME",
&returns,
)
return returns
}
// Syntactic sugar for `addOverride(path, undefined)`.
// Experimental.
func (c *jsiiProxy_CfnTable) AddDeletionOverride(path *string) {
_jsii_.InvokeVoid(
c,
"addDeletionOverride",
[]interface{}{path},
)
}
// Indicates that this resource depends on another resource and cannot be provisioned unless the other resource has been successfully provisioned.
//
// This can be used for resources across stacks (or nested stack) boundaries
// and the dependency will automatically be transferred to the relevant scope.
// Experimental.
func (c *jsiiProxy_CfnTable) AddDependsOn(target awscdk.CfnResource) {
_jsii_.InvokeVoid(
c,
"addDependsOn",
[]interface{}{target},
)
}
// Add a value to the CloudFormation Resource Metadata.
// See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/metadata-section-structure.html
//
// Note that this is a different set of metadata from CDK node metadata; this
// metadata ends up in the stack template under the resource, whereas CDK
// node metadata ends up in the Cloud Assembly.
//
// Experimental.
func (c *jsiiProxy_CfnTable) AddMetadata(key *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addMetadata",
[]interface{}{key, value},
)
}
// Adds an override to the synthesized CloudFormation resource.
//
// To add a
// property override, either use `addPropertyOverride` or prefix `path` with
// "Properties." (i.e. `Properties.TopicName`).
//
// If the override is nested, separate each nested level using a dot (.) in the path parameter.
// If there is an array as part of the nesting, specify the index in the path.
//
// To include a literal `.` in the property name, prefix with a `\`. In most
// programming languages you will need to write this as `"\\."` because the
// `\` itself will need to be escaped.
//
// For example,
// ```typescript
// cfnResource.addOverride('Properties.GlobalSecondaryIndexes.0.Projection.NonKeyAttributes', ['myattribute']);
// cfnResource.addOverride('Properties.GlobalSecondaryIndexes.1.ProjectionType', 'INCLUDE');
// ```
// would add the overrides
// ```json
// "Properties": {
// "GlobalSecondaryIndexes": [
// {
// "Projection": {
// "NonKeyAttributes": [ "myattribute" ]
// ...
// }
// ...
// },
// {
// "ProjectionType": "INCLUDE"
// ...
// },
// ]
// ...
// }
// ```
// Experimental.
func (c *jsiiProxy_CfnTable) AddOverride(path *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addOverride",
[]interface{}{path, value},
)
}
// Adds an override that deletes the value of a property from the resource definition.
// Experimental.
func (c *jsiiProxy_CfnTable) AddPropertyDeletionOverride(propertyPath *string) {
_jsii_.InvokeVoid(
c,
"addPropertyDeletionOverride",
[]interface{}{propertyPath},
)
}
// Adds an override to a resource property.
//
// Syntactic sugar for `addOverride("Properties.<...>", value)`.
// Experimental.
func (c *jsiiProxy_CfnTable) AddPropertyOverride(propertyPath *string, value interface{}) {
_jsii_.InvokeVoid(
c,
"addPropertyOverride",
[]interface{}{propertyPath, value},
)
}
// Sets the deletion policy of the resource based on the removal policy specified.
//
// The Removal Policy controls what happens to this resource when it stops
// being managed by CloudFormation, either because you've removed it from the
// CDK application or because you've made a change that requires the resource
// to be replaced.
//
// The resource can be deleted (`RemovalPolicy.DESTROY`), or left in your AWS
// account for data recovery and cleanup later (`RemovalPolicy.RETAIN`).
// Experimental.
func (c *jsiiProxy_CfnTable) ApplyRemovalPolicy(policy awscdk.RemovalPolicy, options *awscdk.RemovalPolicyOptions) {
_jsii_.InvokeVoid(
c,
"applyRemovalPolicy",
[]interface{}{policy, options},
)
}
// Returns a token for an runtime attribute of this resource.
//
// Ideally, use generated attribute accessors (e.g. `resource.arn`), but this can be used for future compatibility
// in case there is no generated attribute.
// Experimental.
func (c *jsiiProxy_CfnTable) GetAtt(attributeName *string) awscdk.Reference {
var returns awscdk.Reference
_jsii_.Invoke(
c,
"getAtt",
[]interface{}{attributeName},
&returns,
)
return returns
}
// Retrieve a value value from the CloudFormation Resource Metadata.
// See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/metadata-section-structure.html
//
// Note that this is a different set of metadata from CDK node metadata; this
// metadata ends up in the stack template under the resource, whereas CDK
// node metadata ends up in the Cloud Assembly.
//
// Experimental.
func (c *jsiiProxy_CfnTable) GetMetadata(key *string) interface{} {
var returns interface{}
_jsii_.Invoke(
c,
"getMetadata",
[]interface{}{key},
&returns,
)
return returns
}
// Examines the CloudFormation resource and discloses attributes.
func (c *jsiiProxy_CfnTable) Inspect(inspector awscdk.TreeInspector) {
_jsii_.InvokeVoid(
c,
"inspect",
[]interface{}{inspector},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
// Experimental.
func (c *jsiiProxy_CfnTable) OnPrepare() {
_jsii_.InvokeVoid(
c,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
// Experimental.
func (c *jsiiProxy_CfnTable) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
c,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if the construct is valid.
// Experimental.
func (c *jsiiProxy_CfnTable) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
c,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Overrides the auto-generated logical ID with a specific ID.
// Experimental.
func (c *jsiiProxy_CfnTable) OverrideLogicalId(newLogicalId *string) {
_jsii_.InvokeVoid(
c,
"overrideLogicalId",
[]interface{}{newLogicalId},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
// Experimental.
func (c *jsiiProxy_CfnTable) Prepare() {
_jsii_.InvokeVoid(
c,
"prepare",
nil, // no parameters
)
}
func (c *jsiiProxy_CfnTable) RenderProperties(props *map[string]interface{}) *map[string]interface{} {
var returns *map[string]interface{}
_jsii_.Invoke(
c,
"renderProperties",
[]interface{}{props},
&returns,
)
return returns
}
// Can be overridden by subclasses to determine if this resource will be rendered into the cloudformation template.
//
// Returns: `true` if the resource should be included or `false` is the resource
// should be omitted.
// Experimental.
func (c *jsiiProxy_CfnTable) ShouldSynthesize() *bool {
var returns *bool
_jsii_.Invoke(
c,
"shouldSynthesize",
nil, // no parameters
&returns,
)
return returns
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
// Experimental.
func (c *jsiiProxy_CfnTable) Synthesize(session awscdk.ISynthesisSession) {
_jsii_.InvokeVoid(
c,
"synthesize",
[]interface{}{session},
)
}
// Returns a string representation of this construct.
//
// Returns: a string representation of this resource
// Experimental.
func (c *jsiiProxy_CfnTable) ToString() *string {
var returns *string
_jsii_.Invoke(
c,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if the construct is valid.
// Experimental.
func (c *jsiiProxy_CfnTable) Validate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
c,
"validate",
nil, // no parameters
&returns,
)
return returns
}
// Experimental.
func (c *jsiiProxy_CfnTable) ValidateProperties(_properties interface{}) {
_jsii_.InvokeVoid(
c,
"validateProperties",
[]interface{}{_properties},
)
}
// Properties for defining a `CfnTable`.
//
// TODO: EXAMPLE
//
type CfnTableProps struct {
// The name of the Timestream database that contains this table.
//
// *Length Constraints* : Minimum length of 3 bytes. Maximum length of 256 bytes.
DatabaseName *string `json:"databaseName"`
// The retention duration for the memory store and magnetic store. This object has the following attributes:.
//
// - *MemoryStoreRetentionPeriodInHours* : Retention duration for memory store, in hours.
// - *MagneticStoreRetentionPeriodInDays* : Retention duration for magnetic store, in days.
//
// Both attributes are of type `string` . Both attributes are *required* when `RetentionProperties` is specified.
//
// See the following examples:
//
// *JSON*
//
// `{ "Type" : AWS::Timestream::Table", "Properties" : { "DatabaseName" : "TestDatabase", "TableName" : "TestTable", "RetentionProperties" : { "MemoryStoreRetentionPeriodInHours": "24", "MagneticStoreRetentionPeriodInDays": "7" } } }`
//
// *YAML*
//
// ```
// Type: AWS::Timestream::Table
// DependsOn: TestDatabase
// Properties: TableName: "TestTable" DatabaseName: "TestDatabase" RetentionProperties: MemoryStoreRetentionPeriodInHours: "24" MagneticStoreRetentionPeriodInDays: "7"
// ```
RetentionProperties interface{} `json:"retentionProperties"`
// The name of the Timestream table.
//
// *Length Constraints* : Minimum length of 3 bytes. Maximum length of 256 bytes.
TableName *string `json:"tableName"`
// The tags to add to the table.
Tags *[]*awscdk.CfnTag `json:"tags"`
}
|
trespasserw/MPS
|
plugins/mps-testing/languages/lang.test/source_gen/jetbrains/mps/lang/test/behavior/NodeTypeCheckingAction.java
|
<gh_stars>0
package jetbrains.mps.lang.test.behavior;
/*Generated by MPS */
import jetbrains.mps.typesystem.inference.ITypechecking;
import org.jetbrains.mps.openapi.model.SNode;
import jetbrains.mps.typesystem.inference.TypeCheckingContext;
public abstract class NodeTypeCheckingAction implements ITypechecking.Action {
private final SNode myNodeToCheck;
public NodeTypeCheckingAction(final SNode nodeToCheck) {
this.myNodeToCheck = nodeToCheck;
}
public abstract void checkOperation(TypeCheckingContext context);
public void run(TypeCheckingContext context) {
context.checkIfNotChecked(myNodeToCheck, true);
checkOperation(context);
}
}
|
trucnguyenlam/mucke
|
src/booleman.cc
|
<gh_stars>0
#include "booleman.h"
#include "io.h"
#include "init.h"
#ifdef DEBUG
#include "except.h"
void BooleManager::bmchk(BooleManager* bm)
{
if(bm!=this)
internal << "wrong BooleManager in BooleManager::bmchk"
<< Internal();
}
void BooleManager::bmmchk(BooleManagerManaged* bmm)
{
if(!bmm)
internal << "empty BooleManagerManaged in BooleManager::bmmchk"
<< Internal();
bmchk(bmm->BM);
}
#endif
class BManManagedIterator :
public Iterator_on_the_Heap<BooleManagerManaged*>
{
BooleManagerManaged * cursor;
BooleManagerManaged * start;
public:
BManManagedIterator(BooleManagerManaged * f) : cursor(f), start(f) { }
void first() { cursor = start; }
bool isDone() { return !cursor; }
void next() { cursor = cursor -> next(); }
BooleManagerManaged * get() { return cursor; }
};
BooleManager::operator Iterator_on_the_Heap<BooleManagerManaged*> * ()
{
return new BManManagedIterator(_first);
}
#if 0
void BooleManager::mark()
{
Iterator<BooleManagerManaged*> it(*this);
for(it.first(); !it.isDone(); it.next())
it.get() -> mark();
}
#endif
void BooleManager::reset_all_reprs()
{
Iterator<BooleManagerManaged*> it(*this);
for(it.first(); !it.isDone(); it.next())
it.get() -> reset();
}
void BooleManager::_store(BooleManagerManaged* bmm)
{
if(_first)
_first->_prev = bmm;
bmm->_next = _first;
bmm->_prev = 0;
_first = bmm;
num_reprs++;
}
void BooleManager::_dequeue(BooleManagerManaged* bmm)
{
if(bmm==_first)
{
ASSERT(bmm->_prev == 0);
_first = bmm->_next;
if(_first)
_first->_prev=0; // just in case
}
else
{
ASSERT(bmm->_prev != 0);
bmm->_prev->_next = bmm->_next;
if(bmm->_next)
bmm->_next->_prev = bmm->_prev;
}
num_reprs--;
}
BooleRepr* BooleManager::substitute(BooleRepr* br, BooleSubsData* bsd)
{
bmmchk(br);
bmmchk(bsd);
return _substitute(br,bsd);
}
BooleRepr* BooleManager::exists(BooleRepr* br, BooleQuantData* bqd)
{
bmmchk(br);
bmmchk(bqd);
return _exists(br,bqd);
}
BooleRepr* BooleManager::forall(BooleRepr* br, BooleQuantData* bqd)
{
bmmchk(br);
bmmchk(bqd);
return _forall(br,bqd);
}
BooleRepr * BooleManager::_relprod(
BooleRepr* abr, BooleQuantData* bqd, BooleRepr * bbr, BooleSubsData * s)
{
ASSERT(s);
BooleRepr * tmp = _substitute(bbr, s);
BooleRepr * res = _relprod(abr, bqd, tmp);
delete tmp;
return res;
}
BooleRepr* BooleManager::relprod(
BooleRepr* abr, BooleQuantData* bqd, BooleRepr * bbr, BooleSubsData * s)
{
bmmchk(abr);
bmmchk(bqd);
bmmchk(bbr);
if(s)
{
bmmchk(s);
return _relprod(abr, bqd, bbr, s);
}
else return _relprod(abr, bqd, bbr);
}
BooleRepr* BooleManager::forallImplies(
BooleRepr* abr, BooleQuantData* bqd, BooleRepr * bbr, BooleSubsData * s)
{
bmmchk(abr);
bmmchk(bqd);
bmmchk(bbr);
if(s)
{
bmmchk(s);
return _forallImplies(abr, bqd, bbr, s);
}
else return _forallImplies(abr, bqd, bbr);
}
BooleRepr * BooleManager::_forallImplies(
BooleRepr* abr, BooleQuantData* bqd, BooleRepr * bbr, BooleSubsData * s)
{
ASSERT(s);
BooleRepr * tmp = _substitute(bbr, s);
BooleRepr * res = _forallImplies(abr, bqd, tmp);
delete tmp;
return res;
}
BooleRepr* BooleManager::forallOr(
BooleRepr* abr, BooleQuantData* bqd, BooleRepr * bbr, BooleSubsData * s)
{
bmmchk(abr);
bmmchk(bqd);
bmmchk(bbr);
if(s)
{
bmmchk(s);
return _forallOr(abr, bqd, bbr, s);
}
else return _forallOr(abr, bqd, bbr);
}
BooleRepr * BooleManager::_forallOr(
BooleRepr* abr, BooleQuantData* bqd, BooleRepr * bbr, BooleSubsData * s)
{
ASSERT(s);
BooleRepr * tmp = _substitute(bbr, s);
BooleRepr * res = _forallOr(abr, bqd, tmp);
delete tmp;
return res;
}
// just make sure that someone else does the initialization
// of Boole::manager
//%%NSPI%% booleManPreInitializer Boole::manager PreInitializer
INITCLASS(
BooleManInitializer,
booleManPreInitializer,
"Check if Boole::manager is initialized",
{
if(Boole::manager==0 || !IOStream::initialized()) return false;
verbose << "Boole::manager is initialized!\n";
}
)
bool BooleManager::initialized()
{
BooleManInitializer * bmaninit = BooleManInitializer::instance();
return bmaninit ? bmaninit -> initialized() : false;
}
|
yaoguangju/monolith
|
monolith-mp/src/test/java/com/mochen/mp/LuaTests.java
|
<filename>monolith-mp/src/test/java/com/mochen/mp/LuaTests.java
package com.mochen.mp;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.script.DefaultRedisScript;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
@SpringBootTest
public class LuaTests {
private static final String RELEASE_LOCK_LUA_SCRIPT = "if redis.call('get', KEYS[1]) == ARGV[1] then return redis.call('del', KEYS[1]) else return 0 end";
@Resource
private RedisTemplate<String, String> redisTemplate;
@Test
public void testLua(){
String lockKey = "12334";
String UUID = cn.hutool.core.lang.UUID.fastUUID().toString();
boolean success = redisTemplate.opsForValue().setIfAbsent(lockKey,UUID,3, TimeUnit.MINUTES);
if (!success){
System.out.println("锁已存在");
}
// 指定 lua 脚本,并且指定返回值类型
DefaultRedisScript<Long> redisScript = new DefaultRedisScript<>(RELEASE_LOCK_LUA_SCRIPT,Long.class);
// 参数一:redisScript,参数二:key列表,参数三:arg(可多个)
Long result = (Long) redisTemplate.execute(redisScript, Collections.singletonList(lockKey),UUID);
System.out.println(result);
}
@Test
public void testLua1(){
final String RELEASE_LOCK_LUA_SCRIPT = "local score = tonumber(redis.call('ZSCORE', KEYS[1], KEYS[2])); local poetry = math.floor(score / 10^13); local updateScore = (poetry + 1) * 10^13 + tonumber(ARGV[1]);redis.call('ZADD', KEYS[1],updateScore , KEYS[2]); return 0;";
// 指定 lua 脚本,并且指定返回值类型
DefaultRedisScript<Long> redisScript = new DefaultRedisScript<>(RELEASE_LOCK_LUA_SCRIPT,Long.class);
List<String> keyList = new ArrayList<>();
keyList.add("123");
keyList.add("2L");
// 参数一:redisScript,参数二:key列表,参数三:arg(可多个)
Long result = redisTemplate.execute(redisScript, keyList,"1649304543407");
System.out.println(result);
}
@Test
public void testLua2(){
final String RELEASE_LOCK_LUA_SCRIPT = "local score = tonumber(redis.call('ZSCORE', KEYS[1], KEYS[2]));local poetry = math.floor(score / 10^13);local updateScore = (poetry + 1) * 10^13 + ARGV[1];redis.call('ZADD', KEYS[1],updateScore , KEYS[2]); return 0;";
// 指定 lua 脚本,并且指定返回值类型
DefaultRedisScript<Long> redisScript = new DefaultRedisScript<>(RELEASE_LOCK_LUA_SCRIPT,Long.class);
List<String> keyList = new ArrayList<>();
keyList.add("123");
keyList.add("2L");
// 参数一:redisScript,参数二:key列表,参数三:arg(可多个)
Long result = redisTemplate.execute(redisScript, keyList,System.currentTimeMillis());
System.out.println(result);
}
}
|
sevenfang/etl-Lealone
|
lealone-sql/src/main/java/org/lealone/sql/ddl/DefineStatement.java
|
<gh_stars>0
/*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.lealone.sql.ddl;
import org.lealone.db.ServerSession;
import org.lealone.db.result.Result;
import org.lealone.sql.StatementBase;
/**
* This class represents a non-transaction statement, for example a CREATE or DROP.
*/
public abstract class DefineStatement extends StatementBase {
/**
* Create a new command for the given session.
*
* @param session the session
*/
protected DefineStatement(ServerSession session) {
super(session);
}
@Override
public Result getMetaData() {
return null;
}
@Override
public int getPriority() {
priority = MIN_PRIORITY;
return priority;
}
@Override
public boolean isDDL() {
return true;
}
}
|
dk00/old-stuff
|
usaco/C5/554betsy.cpp
|
<reponame>dk00/old-stuff
/*
ID: s0000151
PROG: betsy
LANG: C++
*/
#include<stdio.h>
int n,path,pt,map[9][9],nb[9][9],row[9],col[9],
dx[]={1,0,-1,0},dy[]={0,1,0,-1};
void dfs(int x,int y,int left){
pt++;
if(x==n && y==1){
if(!left)path++;
return;
}
if(y>=n && !map[x-1][n])return;
if(x>=n && !map[n][y+1])return;
if(y==1 && !map[x-1][1])return;
if(x==1 && !map[1][y-1])return;
if(row[y-1]>=n || col[x+1]>=n)return;
if(x==2 && y==2 && n*n==left+3){
if(map[x-1][y]){
map[x][y-1]=1;
row[y-1]++;
col[x]++;
dfs(x,y-1,left-1);
map[x][y-1]=0;
row[y-1]--;
col[x]--;
}
else{
row[y]++;
col[x-1]++;
map[x-1][y]=1;
dfs(x-1,y,left-1);
map[x-1][y]=0;
row[y]--;
col[x-1]--;
}
return;
}
int i,v=-1;
for(i=0;i<4;i++)
if((++nb[x+dx[i]][y+dy[i]])*(!map[x+dx[i]][y+dy[i]])>=3){
if(v>=0){
for(;i>=0;i--)--nb[x+dx[i]][y+dy[i]];
return;
}
else v=i;
}
for(i=0;i<4 && v<0;i++)
if(!map[x+dx[i]][y+dy[i]]){
map[x+dx[i]][y+dy[i]]=1;
col[x+dx[i]]++;
row[y+dy[i]]++;
dfs(x+dx[i],y+dy[i],left-1);
map[x+dx[i]][y+dy[i]]=0;
col[x+dx[i]]--;
row[y+dy[i]]--;
}
if(v>=0){
col[x+dx[v]]++;
row[y+dy[v]]++;
map[x+dx[v]][y+dy[v]]=1;
dfs(x+dx[v],y+dy[v],left-1);
map[x+dx[v]][y+dy[v]]=0;
col[x+dx[v]]--;
row[y+dy[v]]--;
}
for(i=0;i<4;i++)--nb[x+dx[i]][y+dy[i]];
}
main(){
//freopen("betsy.in","r",stdin);
//freopen("betsy.out","w",stdout);
int i,j;
while(scanf("%d",&n)!=EOF){
for(i=1;i<=n;i++)
for(j=1;j<=n;j++)nb[i][j]=map[i][j]=0;
for(i=0;i<=n+1;i++){
map[0][i]=map[n+1][i]=map[i][0]=map[i][n+1]=1;
col[i]=row[i]=0;
}
path=pt=0;
col[1]=row[1]=map[1][1]=1;
nb[1][2]=nb[2][1]=1;
dfs(1,1,n*n-1);
printf("%d/%d\n",path,pt);
}
}
//88418 1770 86 8 2 1 1
|
wuchen-huawei/huaweicloud-sdk-java
|
core/src/main/java/com/huawei/openstack4j/openstack/bss/v1/domain/periodOrder/ResultForOrderList.java
|
<reponame>wuchen-huawei/huaweicloud-sdk-java<gh_stars>10-100
/*******************************************************************************
* Copyright 2019 Huawei Technologies Co.,Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*******************************************************************************/
package com.huawei.openstack4j.openstack.bss.v1.domain.periodOrder;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.huawei.openstack4j.model.ModelEntity;
import lombok.*;
import java.util.List;
@Getter
@Setter
@ToString
@Builder(toBuilder = true)
@NoArgsConstructor
@AllArgsConstructor
public class ResultForOrderList implements ModelEntity
{
private static final long serialVersionUID = -6881267813327224789L;
/**
* Number of records per page.
*/
@JsonProperty("pageSize")
private Integer pageSize;
/**
* Current page number.
*/
@JsonProperty("pageIndex")
private Integer pageIndex;
/**
* Number of records that match the query conditions.
*/
@JsonProperty("totalSize")
private Integer totalSize;
/**
* Order details.
*/
@JsonProperty("orderInfos")
private List<CustomerOrderForOrderList> orderInfos;
}
|
alphagov/performanceplatform-collector
|
performanceplatform/collector/main.py
|
import os
import logging
import importlib
from collections import OrderedDict
from performanceplatform.collector import arguments
from performanceplatform.collector.logging_setup import (
set_up_logging, close_down_logging)
from performanceplatform.utils.collector import get_config
def _get_data_group(query):
return query['data-set']['data-group']
def _get_data_type(query):
return query['data-set']['data-type']
def _get_data_group_data_type(query):
return '{}/{}'.format(_get_data_group(query), _get_data_type(query))
def _get_query_params(query):
"""
>>> _get_query_params({'query': {'a': 1, 'c': 3, 'b': 5}})
'a=1 b=5 c=3'
"""
query_params = OrderedDict(sorted(query['query'].items()))
return ' '.join(['{}={}'.format(k, v) for k, v in query_params.items()])
def _get_path_to_json_file(query):
return query['path_to_json_file']
def make_extra_json_fields(args):
"""
From the parsed command-line arguments, generate a dictionary of additional
fields to be inserted into JSON logs (logstash_formatter module)
"""
extra_json_fields = {
'data_group': _get_data_group(args.query),
'data_type': _get_data_type(args.query),
'data_group_data_type': _get_data_group_data_type(args.query),
'query': _get_query_params(args.query),
}
if "path_to_json_file" in args.query:
extra_json_fields['path_to_query'] = _get_path_to_json_file(args.query)
return extra_json_fields
def logging_for_entrypoint(
entrypoint, json_fields, logfile_path, logfile_name):
if logfile_path is None:
logfile_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), '..', '..', 'log')
loglevel = getattr(logging, os.environ.get('LOGLEVEL', 'INFO').upper())
set_up_logging(
entrypoint, loglevel, logfile_path, logfile_name, json_fields)
def _log_collector_instead_of_running(entrypoint, args):
logged_args = {
'start_at': args.start_at,
'end_at': args.end_at,
'query': {k: args.query[k] for k in ('data-set', 'query', 'options')}
}
logging.info(
'Collector {} NOT run with the following {}'.format(entrypoint,
logged_args))
def merge_performanceplatform_config(
performanceplatform, data_set, token, dry_run=False):
return {
'url': '{0}/{1}/{2}'.format(
performanceplatform['backdrop_url'],
data_set['data-group'],
data_set['data-type']
),
'token': token['token'],
'data-group': data_set['data-group'],
'data-type': data_set['data-type'],
'dry_run': dry_run
}
def _run_collector(entrypoint, args, logfile_path=None, logfile_name=None):
if args.console_logging:
logging.basicConfig(level=logging.INFO)
else:
logging_for_entrypoint(
entrypoint,
make_extra_json_fields(args),
logfile_path,
logfile_name
)
if os.environ.get('DISABLE_COLLECTORS', 'false') == 'true':
_log_collector_instead_of_running(entrypoint, args)
else:
entrypoint_module = importlib.import_module(entrypoint)
logging.info('Running collection into {}/{}'.format(
args.query.get('data-set')['data-group'],
args.query.get('data-set')['data-type']))
entrypoint_module.main(
args.credentials,
merge_performanceplatform_config(
args.performanceplatform,
args.query['data-set'],
args.token,
args.dry_run
),
args.query['query'],
args.query['options'],
args.start_at,
args.end_at
)
if not args.console_logging:
close_down_logging()
def main():
args = arguments.parse_args('Performance Platform Collector')
if args.collector_slug:
args.query = get_config(args.collector_slug, args.performanceplatform)
_run_collector(args.query['entrypoint'], args)
if __name__ == '__main__':
main()
|
Manu343726/raytracer
|
src/lib/canvas.cpp
|
#include <fmt/format.h>
#include <fmt/ostream.h>
#include <spdlog/spdlog.h>
#include <fstream>
#include <raytracer/canvas.hpp>
#include <raytracer/jobs/engine.hpp>
#include <raytracer/math.hpp>
using namespace rt;
canvas::canvas(const std::size_t width, const std::size_t height)
: _width{width},
_height{height},
_canvas{pixel_count(), color::rgb(0.0f, 0.0f, 0.0f)}
{
}
color& canvas::pixel(const std::size_t row, const std::size_t column)
{
return _canvas[row * _width + column];
}
const color&
canvas::pixel(const std::size_t row, const std::size_t column) const
{
return _canvas[row * _width + column];
}
void canvas::dump_to_file(const std::string& filename) const
{
std::ofstream os{filename, std::ios_base::out};
// PPM header
fmt::print(
os,
"P3\n"
"{} {}\n"
"255\n",
_width,
_height);
for(std::size_t row = 0; row < _height; ++row)
{
for(std::size_t column = 0; column < _width; ++column)
{
const auto& pixel = this->pixel(row, column);
fmt::print(
os,
"{:>3} {:>3} {:>3} ",
rt::linearToSRGB(pixel.r),
rt::linearToSRGB(pixel.g),
rt::linearToSRGB(pixel.b));
}
fmt::print(os, "\n");
}
}
std::size_t canvas::width() const
{
return _width;
}
std::size_t canvas::height() const
{
return _height;
}
std::size_t canvas::pixel_count() const
{
return _width * _height;
}
void canvas::foreach(
canvas::pixel_function function,
const rt::kernel_constants& constants,
const std::size_t threads,
const std::vector<std::size_t>& jobsPerThread)
{
ZoneScoped;
TracyCZoneN(EngineInit, "Job engine initialization", true);
rt::jobs::Engine engine{threads, jobsPerThread, pixel_count()};
TracyCZoneEnd(EngineInit);
const auto start = std::chrono::high_resolution_clock::now();
const float x_ratio = 1.0f / _width;
const float y_ratio = 1.0f / _height;
auto* worker = engine.threadWorker();
auto* root = worker->pool().createJob([](rt::jobs::Job&) {});
for(std::size_t row = 0; row < _height; ++row)
{
for(std::size_t column = 0; column < _width; ++column)
{
ZoneNamedN(PixelJobAlloc, "pixel job allocation", true);
auto& pixel = this->pixel(row, column);
auto* pixelJob = worker->pool().createClosureJobAsChild(
[function, row, column, x_ratio, y_ratio, &pixel, &constants](
rt::jobs::Job& job) {
ZoneNamedN(PixelJob, "pixel job", true);
for(std::size_t i = 0; i < constants.samples_per_pixel; ++i)
{
ZoneNamedN(Sample, "sample", true);
const float x = (column + rt::random()) * x_ratio;
const float y = 1.0f - (row + rt::random()) * y_ratio;
color local_pixel = color::rgb(0.0f, 0.0f, 0.0f);
function(x, y, constants, local_pixel);
pixel += local_pixel;
}
pixel /= constants.samples_per_pixel;
},
root);
worker->submit(pixelJob);
}
}
worker->submit(root);
worker->wait(root);
const auto elapsed = std::chrono::high_resolution_clock::now() - start;
const auto elapsed_us =
std::chrono::duration_cast<std::chrono::microseconds>(elapsed);
const auto rays_s = pixel_count() * constants.samples_per_pixel /
elapsed_us.count() * 1000 * 1000;
spdlog::info(
"elapsed time: {} ms ({} rays/s)",
std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count(),
rays_s);
}
|
loriab/qccddb
|
qcdb/tests/test_dfmp2_4_psi4.py
|
import os
import pytest
import qcdb
from .utils import *
#! conventional and density-fitting mp2 test of mp2 itself and setting scs-mp2
def check_mp2(return_value, is_df, is_5050=False):
if is_df:
ref = -76.0167614256151865
mp2ss = -0.0527406422061238
mp2os = -0.1562926850310142
mp2corl = -0.2090333272371381
mp2tot = -76.2257947528523232
scscorl = -0.2051314361059251
scstot = -76.2218928617211162
a5050corl = 0.5 * (mp2ss + mp2os)
a5050tot = a5050corl + ref
else:
ref = -76.01678947133706
mp2ss = -0.05268120425816
mp2os = -0.15637564436589
mp2corl = -0.20905684862405
mp2tot = -76.22584631996111
scscorl = -0.20521117465845
scstot = -76.22200064599551
a5050corl = 0.5 * (mp2ss + mp2os)
a5050tot = a5050corl + ref
assert compare_values(ref, qcdb.variable("SCF TOTAL ENERGY"), 5, "mp2 ref")
assert compare_values(mp2ss, qcdb.variable("MP2 SAME-SPIN CORRELATION ENERGY"), 5, "mp2 ss")
assert compare_values(mp2os, qcdb.variable("MP2 OPPOSITE-SPIN CORRELATION ENERGY"), 5, "mp2 os")
assert compare_values(mp2corl, qcdb.variable("MP2 CORRELATION ENERGY"), 5, "mp2 corl")
assert compare_values(mp2tot, qcdb.variable("MP2 TOTAL ENERGY"), 5, "mp2 tot")
assert compare_values(scscorl, qcdb.variable("SCS-MP2 CORRELATION ENERGY"), 5, "mp2 scscorl")
assert compare_values(scstot, qcdb.variable("SCS-MP2 TOTAL ENERGY"), 5, "mp2 scstot")
if is_5050:
assert compare_values(a5050corl, qcdb.variable("custom SCS-MP2 CORRELATION ENERGY"), 5, "mp2 scscorl")
assert compare_values(a5050tot, qcdb.variable("custom SCS-MP2 TOTAL ENERGY"), 5, "mp2 scstot")
assert compare_values(ref, qcdb.variable("CURRENT REFERENCE ENERGY"), 5, "mp2 ref")
assert compare_values(mp2corl, qcdb.variable("CURRENT CORRELATION ENERGY"), 5, "mp2 corl")
assert compare_values(mp2tot, qcdb.variable("CURRENT ENERGY"), 5, "mp2 tot")
assert compare_values(mp2tot, return_value, 5, "mp2 return")
@pytest.fixture
def h2o():
return """
O
H 1 1.0
H 1 1.0 2 90.0
"""
@using("psi4")
def test_1_df_mp2(h2o):
qcdb.set_molecule(h2o)
qcdb.set_options({"basis": "cc-pvdz"})
print(" Testing mp2 (df) ...")
val = qcdb.energy("mp2")
check_mp2(val, is_df=True)
@using("psi4")
def test_2_conv_mp2(h2o):
qcdb.set_molecule(h2o)
qcdb.set_options({"basis": "cc-pvdz", "psi4_mp2_type": "conv"})
print(" Testing mp2 (conv) ...")
val = qcdb.energy("mp2")
check_mp2(val, is_df=False)
@using("psi4")
def test_3_df_scs_mp2(h2o):
qcdb.set_molecule(h2o)
qcdb.set_options(
{
"basis": "cc-pvdz",
"psi4_mp2_os_scale": 1.2,
"psi4_mp2_ss_scale": 0.33333333333333333,
"psi4_mp2_type": "df",
}
)
# set mp2_type df
print(" Testing explicit scs mp2 (df) ...")
val = qcdb.energy("mp2")
check_mp2(val, is_df=True)
@using("psi4")
def test_4_conv_scs_mp2(h2o):
qcdb.set_molecule(h2o)
qcdb.set_options(
{
"basis": "cc-pvdz",
"psi4_mp2_os_scale": 1.2,
"psi4_mp2_ss_scale": 0.33333333333333333,
"psi4_mp2_type": "conv",
}
)
print(" Testing explicit scs mp2 (conv) ...")
val = qcdb.energy("mp2")
check_mp2(val, is_df=False)
@using("psi4")
def test_5_df_custom_scs_mp2(h2o):
qcdb.set_molecule(h2o)
qcdb.set_options(
{
"basis": "cc-pvdz",
"psi4_mp2_os_scale": 0.5,
"psi4_mp2_ss_scale": 0.5,
}
)
# set mp2_type df
print(" Testing user-def scs mp2 (df) ...")
val = qcdb.energy("mp2")
check_mp2(val, is_df=True, is_5050=True)
@pytest.mark.xfail(reason="custom scs NYI")
@using("psi4")
def test_6_conv_custom_scs_mp2(h2o):
qcdb.set_molecule(h2o)
qcdb.set_options(
{
"basis": "cc-pvdz",
"psi4_mp2_os_scale": 0.5,
"psi4_mp2_ss_scale": 0.5,
"psi4_mp2_type": "conv",
}
)
#
# set mp2_type conv
print(" Testing user-def scs mp2 (conv) ...")
val = qcdb.energy("mp2")
check_mp2(val, is_df=False, is_5050=True)
|
leofidus/electron-forge
|
src/installers/linux/rpm.js
|
<reponame>leofidus/electron-forge
import { sudo } from '../../util/linux-installer';
export default async (filePath) => {
await sudo('RPM', 'dnf', `--assumeyes --nogpgcheck install ${filePath}`);
};
|
khelle/bounce-ball
|
test/Loop/LoopTest.js
|
<gh_stars>1-10
import chai, { expect } from 'chai';
import spies from 'chai-spies';
import { Loop } from '../../src/Loop/Loop';
chai.use(spies);
describe('Loop', () => {
describe('start()', () => {
it('should start the loop', (done) => {
var loop = new Loop();
var spy = chai.spy.on(loop, 'tick');
loop.start();
setTimeout(() => {
expect(spy).to.have.been.called.min(1);
done();
}, 30);
});
});
describe('stop()', () => {
it('should stop the loop', (done) => {
var loop = new Loop();
var spy = chai.spy.on(loop, 'tick');
loop.start();
loop.stop();
setTimeout(() => {
expect(spy).to.have.been.called.exactly(0);
done();
}, 30);
});
});
describe('tick()', () => {
it('should execute each callback once per tick', (done) => {
var loop = new Loop();
var spy = chai.spy();
loop.onTick(spy);
loop.tick();
loop.tick();
expect(spy).to.have.been.called.exactly(2);
done();
});
});
describe('onTick()', () => {
it('should add function to be executed on tick', (done) => {
var loop = new Loop();
var spy = chai.spy();
loop.onTick(spy);
loop.tick();
expect(spy).to.have.been.called.exactly(1);
done();
});
});
});
|
codenplay/CodeCoopersLms
|
Projects/Vta/Vta.WebClient/app/account/signin.controller.js
|
var Vta;
(function (Vta) {
"use strict";
var SigninController = (function () {
function SigninController(authService, $state, $rootScope) {
this.authService = authService;
this.stateService = $state;
this.rootScopeService = $rootScope;
var acc = this.authService.accountInfo;
if (acc && acc.isAuth) {
this.stateService.go("home");
}
}
SigninController.prototype.signin = function () {
var self = this;
var signinSuccess = function (response) {
self.stateService.go("home");
console.log(response);
self.rootScopeService.$broadcast("signedIn");
return response;
};
self.authService.signin(new Vta.SigninRequest(self.user.email, self.user.password)).then(signinSuccess);
};
SigninController.$inject = ["authService", "$state", "$rootScope"];
return SigninController;
}());
Vta.SigninController = SigninController;
angular.module("vta").controller("SigninController", SigninController);
})(Vta || (Vta = {}));
//# sourceMappingURL=signin.controller.js.map
|
licehammer/perun
|
perun-notification/src/main/java/cz/metacentrum/perun/notif/mail/PerunNotifHTMLMessage.java
|
<filename>perun-notification/src/main/java/cz/metacentrum/perun/notif/mail/PerunNotifHTMLMessage.java<gh_stars>10-100
package cz.metacentrum.perun.notif.mail;
/**
* Implementation of html email message
*/
public class PerunNotifHTMLMessage extends MessagePreparator implements EmailMessage {
public PerunNotifHTMLMessage(String from, String fromText, String subject, String messageContent) {
super(from, fromText, subject, messageContent, EmailType.HTML);
}
}
|
mitodl/mit-xpro
|
static/js/containers/pages/admin/CreateCouponPage.js
|
<reponame>mitodl/mit-xpro
// @flow
/* global SETTINGS: false */
import React from "react"
import DocumentTitle from "react-document-title"
import { CREATE_COUPON_PAGE_TITLE } from "../../../constants"
import { mergeAll } from "ramda"
import { connectRequest, mutateAsync } from "redux-query"
import { compose } from "redux"
import { connect } from "react-redux"
import { Link } from "react-router-dom"
import { CouponForm } from "../../../components/forms/CouponForm"
import queries from "../../../lib/queries"
import { routes } from "../../../lib/urls"
import type { Response } from "redux-query"
import type {
Company,
CouponPaymentVersion,
Product
} from "../../../flow/ecommerceTypes"
import { createStructuredSelector } from "reselect"
import { COUPON_TYPE_SINGLE_USE } from "../../../constants"
type State = {
couponId: ?string
}
type StateProps = {|
products: Array<Product>,
companies: Array<Company>,
coupons: Map<string, CouponPaymentVersion>
|}
type DispatchProps = {|
createCoupon: (coupon: Object) => Promise<Response<CouponPaymentVersion>>
|}
type Props = {|
...StateProps,
...DispatchProps
|}
export class CreateCouponPage extends React.Component<Props, State> {
constructor(props: Props) {
super(props)
this.state = {
couponId: null
}
}
onSubmit = async (
couponData: Object,
{ setSubmitting, setErrors }: Object
) => {
const { createCoupon } = this.props
couponData.product_ids = couponData.products.map(product => product.id)
if (couponData.coupon_type === COUPON_TYPE_SINGLE_USE) {
couponData.max_redemptions = 1
} else {
couponData.num_coupon_codes = 1
}
couponData.amount = couponData.discount / 100
try {
const result = await createCoupon(couponData)
if (result.body && result.body.id) {
this.setState({ couponId: result.body.id })
} else if (result.body && result.body.errors) {
setErrors(mergeAll(result.body.errors))
}
} finally {
setSubmitting(false)
}
}
clearSuccess = async () => {
await this.setState({ couponId: null })
}
render() {
const { couponId } = this.state
const { coupons, companies, products } = this.props
// $FlowFixMe: flow doesn't like coupons[couponId] but it works fine
const newCoupon = coupons && couponId ? coupons[couponId] : null
return (
<DocumentTitle
title={`${SETTINGS.site_name} | ${CREATE_COUPON_PAGE_TITLE}`}
>
<div className="ecommerce-admin-body">
<p>
<Link to={routes.ecommerceAdmin.index}>
Back to Ecommerce Admin
</Link>
</p>
<h3>Create a Coupon</h3>
{newCoupon ? (
<div className="coupon-success-div">
{newCoupon.coupon_type === "promo" ? (
<span>{`Coupon "${
newCoupon.payment.name
}" successfully created.`}</span>
) : (
// $FlowFixMe: couponId will never be null here
<a href={`/couponcodes/${couponId}`}>
{`Download coupon codes for "${newCoupon.payment.name}"`}
</a>
)}
<div>
<input
type="button"
value="Generate another coupon"
onClick={this.clearSuccess}
/>
</div>
</div>
) : (
<CouponForm
onSubmit={this.onSubmit}
products={products}
companies={companies}
/>
)}
</div>
</DocumentTitle>
)
}
}
const createCoupon = (coupon: Object) =>
mutateAsync(queries.ecommerce.couponsMutation(coupon))
const mapPropsToConfig = () => [
queries.ecommerce.productsQuery(),
queries.ecommerce.companiesQuery()
]
const mapStateToProps = createStructuredSelector({
products: queries.ecommerce.productsSelector,
companies: queries.ecommerce.companiesSelector,
coupons: queries.ecommerce.couponsSelector
})
const mapDispatchToProps = {
createCoupon: createCoupon
}
export default compose(
connect<Props, _, _, DispatchProps, _, _>(
mapStateToProps,
mapDispatchToProps
),
connectRequest(mapPropsToConfig)
)(CreateCouponPage)
|
MusaddikAccuKnox/spire
|
pkg/common/catalog/constraints.go
|
package catalog
import (
"fmt"
)
func ExactlyOne() Constraints {
return Constraints{Min: 1, Max: 1}
}
func MaybeOne() Constraints {
return Constraints{Min: 0, Max: 1}
}
func AtLeastOne() Constraints {
return Constraints{Min: 1, Max: 0}
}
func ZeroOrMore() Constraints {
return Constraints{Min: 0, Max: 0}
}
type Constraints struct {
// Min is the minimum number of plugins required of a specific type. If
// zero, there is no lower bound (i.e. the plugin type is optional).
Min int
// Max is the the maximum number of plugins required of a specific type. If
// zero, there is no upper bound.
Max int
}
func (c Constraints) Check(count int) error {
switch {
case c.Max > 0 && c.Min == c.Max && c.Min != count:
return fmt.Errorf("expected exactly %d but got %d", c.Min, count)
case c.Min > 0 && c.Min > count:
return fmt.Errorf("expected at least %d but got %d", c.Min, count)
case c.Max > 0 && c.Max < count:
return fmt.Errorf("expected at most %d but got %d", c.Max, count)
}
return nil
}
|
specter01wj/LAB-Lynda
|
Misc/Become_a_Python_Developer/2_Programming Fundamentals in the Real World/Ex_Files_Programming_Realworld/Exercise Files/Ch05/05_02/end_05_02_parking_lots.py
|
<filename>Misc/Become_a_Python_Developer/2_Programming Fundamentals in the Real World/Ex_Files_Programming_Realworld/Exercise Files/Ch05/05_02/end_05_02_parking_lots.py
""" A 3-Dimensional Valet Service """
# 2D list of lists
# - index cars by row, spot
lot_2d = [['Toyota','Audi','BMW'], # 0th row
['Lexus','Jeep'], # 1st row
['Honda','Kia','Mazda']] # 2nd row
# 3D list of lists of lists
# - index cars by floor, row, spot
lot_3d = [[['Telsa','Fiat','BMW'], # 0th floor
['Honda','Jeep'],
['Saab','Kia','Ford']],
[['Subaru','Nissan'], # 1st floor
['Volvo']],
[['Mazda','Chevy'], # 2nd floor
[],
['Volkswagen']]]
# indexing 2D lists
print(lot_2d) # 2D list of parking lot
print(lot_2d[2]) # 1D list of cars in row 2
print(lot_2d[2][1]) # car parking in row 2, spot 1
# indexing 3D lists
print(lot_3d) # 3D list of multi-story garage
print(lot_3d[0]) # 2D list of cars on floor 0
print(lot_3d[0][2]) # 1D list of cars on floor 0, row 2
print(lot_3d[0][2][1]) # car parked on floor 0, row 2, spot 1
# accessing all cars in the multi-story garage
for floor in lot_3d: # cycle through each floor in the multi-story garage
for row in floor: # cycle through each row in the floor
for car in row: # cycle through each car in the row
print(car)
|
Questandachievement7Developer/JitsiwithCalla_Stable
|
ServerConfiguration/Calla/Calla-Lib/game/src/avatars/BaseAvatar.js
|
import { Canvas } from "../html/tags.js";
/**
* A base class for different types of avatars.
**/
export class BaseAvatar {
/**
* Encapsulates a resource to use as an avatar.
* @param {boolean} canSwim
*/
constructor(canSwim) {
this.canSwim = canSwim;
this.element = Canvas(128, 128);
this.g = this.element.getContext("2d");
}
/**
* Render the avatar at a certain size.
* @param {CanvasRenderingContext2D} g - the context to render to
* @param {number} width - the width the avatar should be rendered at
* @param {number} height - the height the avatar should be rendered at.
* @param {boolean} isMe - whether the avatar is the local user
*/
draw(g, width, height, isMe) {
const aspectRatio = this.element.width / this.element.height,
w = aspectRatio > 1 ? width : aspectRatio * height,
h = aspectRatio > 1 ? width / aspectRatio : height,
dx = (width - w) / 2,
dy = (height - h) / 2;
g.drawImage(
this.element,
dx, dy,
w, h);
}
}
|
jgornick/async-p
|
src/parallelLimit.js
|
<filename>src/parallelLimit.js
import throat from 'throat';
import tryFn from './tryFn';
import promised from './promised';
export default promised(function parallelLimit(tasks, limit, ...args) {
if (! limit > 0) {
return Promise.reject(new Error('Limit must be a number greater than 0.'));
}
return Promise.all(tasks.map(throat(limit, (task) => tryFn(task, ...args))));
});
|
gwt-unit/gwt-unit
|
gwt-unit-core/src/test/java/com/googlecode/gwt/test/utils/GwtDomUtilsTest.java
|
package com.googlecode.gwt.test.utils;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.googlecode.gwt.test.GwtTestTest;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class GwtDomUtilsTest extends GwtTestTest {
@Test
public void setClientHeight() {
// Given
Element e = Document.get().createAnchorElement();
// When
GwtDomUtils.setClientHeight(e, 4);
// Then
assertThat(e.getClientHeight()).isEqualTo(4);
}
@Test
public void setClientWidth() {
// Given
Element e = Document.get().createAnchorElement();
// When
GwtDomUtils.setClientWidth(e, 4);
// Then
assertThat(e.getClientWidth()).isEqualTo(4);
}
}
|
ScalablyTyped/SlinkyTyped
|
w/wonder_dot_js/src/main/scala/typingsSlinky/wonderJs/mod/Program.scala
|
package typingsSlinky.wonderJs.mod
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation._
@JSImport("wonder.js/dist/es2015", "Program")
@js.native
class Program ()
extends typingsSlinky.wonderJs.programMod.Program
/* static members */
@JSImport("wonder.js/dist/es2015", "Program")
@js.native
object Program extends js.Object {
def create(): typingsSlinky.wonderJs.programMod.Program = js.native
}
|
UbuntuEvangelist/OG-Platform
|
projects/OG-Integration/src/main/java/com/opengamma/integration/tool/config/CurveNodeValidator.java
|
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.integration.tool.config;
import java.util.Map;
import org.threeten.bp.LocalDate;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.core.DateSet;
import com.opengamma.core.config.ConfigSource;
import com.opengamma.core.convention.Convention;
import com.opengamma.core.security.Security;
import com.opengamma.core.security.SecuritySource;
import com.opengamma.financial.analytics.curve.CurveNodeIdMapper;
import com.opengamma.financial.analytics.ircurve.CurveInstrumentProvider;
import com.opengamma.financial.analytics.ircurve.StaticCurvePointsInstrumentProvider;
import com.opengamma.financial.analytics.ircurve.strips.BillNode;
import com.opengamma.financial.analytics.ircurve.strips.BondNode;
import com.opengamma.financial.analytics.ircurve.strips.CalendarSwapNode;
import com.opengamma.financial.analytics.ircurve.strips.CashNode;
import com.opengamma.financial.analytics.ircurve.strips.ContinuouslyCompoundedRateNode;
import com.opengamma.financial.analytics.ircurve.strips.CreditSpreadNode;
import com.opengamma.financial.analytics.ircurve.strips.CurveNode;
import com.opengamma.financial.analytics.ircurve.strips.CurveNodeVisitor;
import com.opengamma.financial.analytics.ircurve.strips.DeliverableSwapFutureNode;
import com.opengamma.financial.analytics.ircurve.strips.DiscountFactorNode;
import com.opengamma.financial.analytics.ircurve.strips.FRANode;
import com.opengamma.financial.analytics.ircurve.strips.FXForwardNode;
import com.opengamma.financial.analytics.ircurve.strips.FXSwapNode;
import com.opengamma.financial.analytics.ircurve.strips.PeriodicallyCompoundedRateNode;
import com.opengamma.financial.analytics.ircurve.strips.RateFutureNode;
import com.opengamma.financial.analytics.ircurve.strips.RollDateFRANode;
import com.opengamma.financial.analytics.ircurve.strips.RollDateSwapNode;
import com.opengamma.financial.analytics.ircurve.strips.SwapNode;
import com.opengamma.financial.analytics.ircurve.strips.ThreeLegBasisSwapNode;
import com.opengamma.financial.analytics.ircurve.strips.ZeroCouponInflationNode;
import com.opengamma.id.ExternalId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.convention.ManageableConvention;
import com.opengamma.util.time.Tenor;
/**
* Visitor for validating curve nodes.
*/
public final class CurveNodeValidator implements CurveNodeVisitor<Void> {
/**
*
*/
private final ValidationNode _validationNode;
private final CurveNodeIdMapper _curveNodeIdMapper;
private final ConfigValidationUtils _configValidationUtils;
private final SecuritySource _securitySource;
private final ConfigSource _configSource;
private final LocalDate _curveDate;
/**
* @param curveDate
* @param configValidationUtils
* @param securitySource
* @param validationNode
* @param curveNodeIdMapper
*/
public CurveNodeValidator(LocalDate curveDate, ConfigValidationUtils configValidationUtils, SecuritySource securitySource, ValidationNode validationNode, CurveNodeIdMapper curveNodeIdMapper, ConfigSource configSource) {
_curveDate = curveDate;
_configValidationUtils = configValidationUtils;
_securitySource = securitySource;
_validationNode = validationNode;
_curveNodeIdMapper = curveNodeIdMapper;
_configSource = configSource;
}
ValidationNode createInvalidCurveNodeValidationNode(Tenor tenor, Class<? extends CurveNode> curveNodeType, ValidationNode parentNode, String message) {
ValidationNode validationNode = new ValidationNode();
validationNode.setName(tenor.toFormattedString());
validationNode.setType(curveNodeType);
if (message != null) {
validationNode.getErrors().add(message);
validationNode.setError(true);
}
parentNode.getSubNodes().add(validationNode);
return validationNode;
}
@Override
public Void visitBondNode(BondNode node) {
ExternalId bondNodeId;
try {
bondNodeId = _curveNodeIdMapper.getBondNodeId(_curveDate, node.getMaturityTenor());
} catch (OpenGammaRuntimeException ogre) {
bondNodeId = null;
}
ValidationNode bondNodeValidationNode;
if (bondNodeId != null) {
try {
Security bond = _securitySource.getSingle(bondNodeId.toBundle());
if (bond == null) {
bondNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), BondNode.class, _validationNode, "Bond " + bondNodeId + " not found in security master");
} else {
bondNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), BondNode.class, _validationNode, null);
}
} catch (IllegalArgumentException iae) {
bondNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), BondNode.class, _validationNode,
"Bond " + bondNodeId + " error thrown by security master when resolving, probably invalid ID format");
}
} else {
bondNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), BondNode.class, _validationNode, "Entry missing for this tenor in CurveNodeIdMapper");
}
return null;
}
@Override
public Void visitCalendarSwapNode(CalendarSwapNode node) {
ExternalId calendarNodeId;
try {
calendarNodeId = _curveNodeIdMapper.getCalendarSwapNodeId(_curveDate, node.getStartTenor(), node.getStartDateNumber(), node.getEndDateNumber());
} catch (OpenGammaRuntimeException ogre) {
calendarNodeId = null;
}
ValidationNode calendarSwapValidationNode;
if (calendarNodeId == null) {
calendarSwapValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), CalendarSwapNode.class, _validationNode, "Entry missing for this tenor in CurveNodeIdMapper");
} else {
calendarSwapValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), CalendarSwapNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getSwapConvention().getValue());
if (_configValidationUtils.conventionExists(node.getSwapConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getSwapConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find swap convention using ID " + node.getSwapConvention());
validationNode.setError(true);
}
ValidationNode validationNode2 = new ValidationNode();
validationNode2.setName(node.getSwapConvention().getValue());
if (_configSource.get(DateSet.class, node.getDateSetName(), VersionCorrection.LATEST) != null) {
validationNode2.setType(DateSet.class);
} else {
validationNode2.setType(DateSet.class);
validationNode2.getErrors().add("Can't find calendar (DateSet) named " + node.getDateSetName());
validationNode2.setError(true);
}
calendarSwapValidationNode.getSubNodes().add(validationNode2);
return null;
}
@Override
public Void visitCashNode(CashNode node) {
ExternalId cashNodeId;
try {
cashNodeId = _curveNodeIdMapper.getCashNodeId(_curveDate, node.getMaturityTenor());
} catch (OpenGammaRuntimeException ogre) {
cashNodeId = null;
}
ValidationNode cashNodeValidationNode;
if (cashNodeId == null) {
cashNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), CashNode.class, _validationNode, "No curve node id mapper entry for " + node.getResolvedMaturity());
} else {
cashNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), CashNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getConvention().getValue());
if (_configValidationUtils.conventionExists(node.getConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find convention using ID " + node.getConvention());
validationNode.setError(true);
}
cashNodeValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitContinuouslyCompoundedRateNode(ContinuouslyCompoundedRateNode node) {
ExternalId continuouslyCompoundedRateNodeId;
try {
continuouslyCompoundedRateNodeId = _curveNodeIdMapper.getContinuouslyCompoundedRateNodeId(_curveDate, node.getTenor());
} catch (OpenGammaRuntimeException ogre) {
continuouslyCompoundedRateNodeId = null;
}
if (continuouslyCompoundedRateNodeId == null) {
// the node get's attached to parent inside this call.
createInvalidCurveNodeValidationNode(node.getResolvedMaturity(), CashNode.class, _validationNode, "No curve node id mapper entry for " + node.getResolvedMaturity());
} else {
// the node get's attached to parent inside this call.
createInvalidCurveNodeValidationNode(node.getResolvedMaturity(), CashNode.class, _validationNode, null);
}
return null;
}
@Override
public Void visitPeriodicallyCompoundedRateNode(PeriodicallyCompoundedRateNode node) {
ExternalId id;
try {
id = _curveNodeIdMapper.getPeriodicallyCompoundedRateNodeId(_curveDate, node.getTenor());
} catch (OpenGammaRuntimeException ogre) {
id = null;
}
if (id == null) {
// the node get's attached to parent inside this call.
createInvalidCurveNodeValidationNode(node.getResolvedMaturity(), CashNode.class, _validationNode, "No curve node id mapper entry for " + node.getResolvedMaturity());
} else {
// the node get's attached to parent inside this call.
createInvalidCurveNodeValidationNode(node.getResolvedMaturity(), CashNode.class, _validationNode, null);
}
return null;
}
@Override
public Void visitCreditSpreadNode(CreditSpreadNode node) {
ExternalId creditSpreadNodeId;
try {
creditSpreadNodeId = _curveNodeIdMapper.getCreditSpreadNodeId(_curveDate, node.getTenor());
} catch (OpenGammaRuntimeException ogre) {
creditSpreadNodeId = null;
}
if (creditSpreadNodeId == null) {
// the node get's attached to parent inside this call.
createInvalidCurveNodeValidationNode(node.getResolvedMaturity(), CreditSpreadNode.class, _validationNode, "No curve node id mapper entry for " + node.getResolvedMaturity());
} else {
// the node get's attached to parent inside this call.
createInvalidCurveNodeValidationNode(node.getResolvedMaturity(), CreditSpreadNode.class, _validationNode, null);
}
return null;
}
@Override
public Void visitDeliverableSwapFutureNode(DeliverableSwapFutureNode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getDeliverableSwapFutureNodeId(_curveDate, node.getStartTenor(), node.getFutureTenor(), node.getFutureNumber());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
ValidationNode dsValidationNode;
if (nodeId == null) {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), DeliverableSwapFutureNode.class, _validationNode, "No curve node id mapper entry for " + node.getStartTenor());
} else {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), DeliverableSwapFutureNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getFutureConvention().getValue());
if (_configValidationUtils.conventionExists(node.getFutureConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getFutureConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find future convention using ID " + node.getFutureConvention());
validationNode.setError(true);
}
dsValidationNode.getSubNodes().add(validationNode);
validationNode = new ValidationNode();
validationNode.setName(node.getSwapConvention().getValue());
if (_configValidationUtils.conventionExists(node.getSwapConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getSwapConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find swap convention using ID " + node.getSwapConvention());
validationNode.setError(true);
}
dsValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitDiscountFactorNode(DiscountFactorNode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getDiscountFactorNodeId(_curveDate, node.getTenor());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
if (nodeId == null) {
// the node get's attached to parent inside this call.
createInvalidCurveNodeValidationNode(node.getTenor(), DiscountFactorNode.class, _validationNode, "No curve node id mapper entry for " + node.getTenor());
} else {
// the node get's attached to parent inside this call.
createInvalidCurveNodeValidationNode(node.getTenor(), DiscountFactorNode.class, _validationNode, null);
}
return null;
}
@Override
public Void visitFRANode(FRANode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getFRANodeId(_curveDate, node.getFixingEnd());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
ValidationNode dsValidationNode;
if (nodeId == null) {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getFixingEnd(), FRANode.class, _validationNode, "No curve node id mapper entry for " + node.getFixingEnd());
} else {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getFixingEnd(), FRANode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getConvention().getValue());
if (_configValidationUtils.conventionExists(node.getConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find convention using ID " + node.getConvention());
validationNode.setError(true);
}
dsValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitFXForwardNode(FXForwardNode node) {
Map<Tenor, CurveInstrumentProvider> fxForwardNodeIds = _curveNodeIdMapper.getFXForwardNodeIds();
CurveInstrumentProvider curveInstrumentProvider = fxForwardNodeIds.get(node.getMaturityTenor());
ExternalId nodeId;
if (curveInstrumentProvider instanceof StaticCurvePointsInstrumentProvider) {
nodeId = curveInstrumentProvider.getInstrument(_curveDate, node.getMaturityTenor());
} else {
nodeId = _curveNodeIdMapper.getFXForwardNodeId(_curveDate, node.getMaturityTenor());
}
ValidationNode fxValidationNode;
if (nodeId == null) {
fxValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), FXForwardNode.class, _validationNode, "No curve node id mapper entry for " + node.getMaturityTenor());
} else {
fxValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), FXForwardNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getFxForwardConvention().getValue());
if (_configValidationUtils.conventionExists(node.getFxForwardConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getFxForwardConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find convention using ID " + node.getFxForwardConvention());
validationNode.setError(true);
}
fxValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitFXSwapNode(FXSwapNode node) {
Map<Tenor, CurveInstrumentProvider> fxSwapNodeIds = _curveNodeIdMapper.getFXSwapNodeIds();
CurveInstrumentProvider curveInstrumentProvider = fxSwapNodeIds.get(node.getMaturityTenor());
ExternalId nodeId = curveInstrumentProvider instanceof StaticCurvePointsInstrumentProvider ?
curveInstrumentProvider.getInstrument(_curveDate, node.getMaturityTenor()) :
_curveNodeIdMapper.getFXSwapNodeId(_curveDate, node.getMaturityTenor());
ValidationNode fxValidationNode;
fxValidationNode = nodeId == null ?
createInvalidCurveNodeValidationNode(node.getMaturityTenor(), FXSwapNode.class, _validationNode,
"No curve node id mapper entry for " + node.getMaturityTenor()) :
createInvalidCurveNodeValidationNode(node.getMaturityTenor(), FXSwapNode.class, _validationNode, null);
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getFxSwapConvention().getValue());
if (_configValidationUtils.conventionExists(node.getFxSwapConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getFxSwapConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find convention using ID " + node.getFxSwapConvention());
validationNode.setError(true);
}
fxValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitRollDateFRANode(RollDateFRANode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getIMMFRANodeId(_curveDate, node.getStartTenor(), node.getRollDateStartNumber(), node.getRollDateEndNumber());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
ValidationNode dsValidationNode;
if (nodeId == null) {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), RollDateFRANode.class, _validationNode, "No curve node id mapper entry for " + node.getStartTenor());
} else {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), RollDateFRANode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getRollDateFRAConvention().getValue());
if (_configValidationUtils.conventionExists(node.getRollDateFRAConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getRollDateFRAConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find convention using ID " + node.getRollDateFRAConvention());
validationNode.setError(true);
}
dsValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitRollDateSwapNode(RollDateSwapNode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getIMMSwapNodeId(_curveDate, node.getStartTenor(), node.getRollDateStartNumber(), node.getRollDateEndNumber());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
ValidationNode dsValidationNode;
if (nodeId == null) {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), RollDateSwapNode.class, _validationNode, "No curve node id mapper entry for " + node.getStartTenor());
} else {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), RollDateSwapNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getRollDateSwapConvention().getValue());
if (_configValidationUtils.conventionExists(node.getRollDateSwapConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getRollDateSwapConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find convention using ID " + node.getRollDateSwapConvention());
validationNode.setError(true);
}
dsValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitRateFutureNode(RateFutureNode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getRateFutureNodeId(_curveDate, node.getStartTenor(), node.getFutureTenor(), node.getFutureNumber());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
ValidationNode dsValidationNode;
if (nodeId == null) {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), RateFutureNode.class, _validationNode, "No curve node id mapper entry for " + node.getStartTenor());
} else {
dsValidationNode = createInvalidCurveNodeValidationNode(node.getStartTenor(), RateFutureNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getFutureConvention().getValue());
if (_configValidationUtils.conventionExists(node.getFutureConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getFutureConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find convention using ID " + node.getFutureConvention());
validationNode.setError(true);
}
dsValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitSwapNode(SwapNode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getSwapNodeId(_curveDate, node.getMaturityTenor());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
ValidationNode sValidationNode;
if (nodeId == null) {
sValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), SwapNode.class, _validationNode, "No curve node id mapper entry for " + node.getMaturityTenor());
} else {
sValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), SwapNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getPayLegConvention().getValue());
if (_configValidationUtils.conventionExists(node.getPayLegConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getPayLegConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find pay leg convention using ID " + node.getPayLegConvention());
validationNode.setError(true);
}
sValidationNode.getSubNodes().add(validationNode);
validationNode = new ValidationNode();
validationNode.setName(node.getReceiveLegConvention().getValue());
if (_configValidationUtils.conventionExists(node.getReceiveLegConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getReceiveLegConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find receive leg convention using ID " + node.getReceiveLegConvention());
validationNode.setError(true);
}
sValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitThreeLegBasisSwapNode(ThreeLegBasisSwapNode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getThreeLegBasisSwapNodeId(_curveDate, node.getMaturityTenor());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
ValidationNode sValidationNode;
if (nodeId == null) {
sValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), ThreeLegBasisSwapNode.class, _validationNode, "No curve node id mapper entry for " + node.getMaturityTenor());
} else {
sValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), ThreeLegBasisSwapNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getPayLegConvention().getValue());
if (_configValidationUtils.conventionExists(node.getPayLegConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getPayLegConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find pay leg convention using ID " + node.getPayLegConvention());
validationNode.setError(true);
}
sValidationNode.getSubNodes().add(validationNode);
validationNode = new ValidationNode();
validationNode.setName(node.getReceiveLegConvention().getValue());
if (_configValidationUtils.conventionExists(node.getReceiveLegConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getReceiveLegConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find receive leg convention using ID " + node.getReceiveLegConvention());
validationNode.setError(true);
}
sValidationNode.getSubNodes().add(validationNode);
validationNode = new ValidationNode();
validationNode.setName(node.getSpreadLegConvention().getValue());
if (_configValidationUtils.conventionExists(node.getSpreadLegConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getSpreadLegConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find spread leg convention using ID " + node.getReceiveLegConvention());
validationNode.setError(true);
}
sValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitZeroCouponInflationNode(ZeroCouponInflationNode node) {
ExternalId nodeId;
try {
nodeId = _curveNodeIdMapper.getSwapNodeId(_curveDate, node.getTenor());
} catch (OpenGammaRuntimeException ogre) {
nodeId = null;
}
ValidationNode sValidationNode;
if (nodeId == null) {
sValidationNode = createInvalidCurveNodeValidationNode(node.getTenor(), ZeroCouponInflationNode.class, _validationNode, "No curve node id mapper entry for " + node.getTenor());
} else {
sValidationNode = createInvalidCurveNodeValidationNode(node.getTenor(), ZeroCouponInflationNode.class, _validationNode, null);
}
ValidationNode validationNode = new ValidationNode();
validationNode.setName(node.getFixedLegConvention().getValue());
if (_configValidationUtils.conventionExists(node.getFixedLegConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getFixedLegConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find fixed leg convention using ID " + node.getFixedLegConvention());
validationNode.setError(true);
}
sValidationNode.getSubNodes().add(validationNode);
validationNode = new ValidationNode();
validationNode.setName(node.getInflationLegConvention().getValue());
if (_configValidationUtils.conventionExists(node.getInflationLegConvention())) {
ManageableConvention convention = _configValidationUtils.getConvention(node.getInflationLegConvention());
validationNode.setType(convention.getClass());
} else {
validationNode.setType(Convention.class);
validationNode.getErrors().add("Can't find inflation leg convention using ID " + node.getInflationLegConvention());
validationNode.setError(true);
}
sValidationNode.getSubNodes().add(validationNode);
return null;
}
@Override
public Void visitBillNode(BillNode node) {
ExternalId billNodeId;
try {
billNodeId = _curveNodeIdMapper.getBillNodeId(_curveDate, node.getMaturityTenor());
} catch (OpenGammaRuntimeException ogre) {
billNodeId = null;
}
ValidationNode billNodeValidationNode;
if (billNodeId != null) {
try {
Security bill = _securitySource.getSingle(billNodeId.toBundle());
if (bill == null) {
billNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), BillNode.class, _validationNode, "Bill " + billNodeId + " not found in security master");
} else {
billNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), BillNode.class, _validationNode, null);
}
} catch (IllegalArgumentException iae) {
billNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), BillNode.class, _validationNode,
"Bond " + billNodeId + " error thrown by security master when resolving, probably invalid ID format");
}
} else {
billNodeValidationNode = createInvalidCurveNodeValidationNode(node.getMaturityTenor(), BillNode.class, _validationNode, "Entry missing for this tenor in CurveNodeIdMapper");
}
return null;
}
}
|
fujaba/fulibScenarios
|
test/src/gen/java/org/example/CarModel.java
|
<filename>test/src/gen/java/org/example/CarModel.java
package org.example;
import org.fulib.builder.ClassModelDecorator;
import org.fulib.builder.ClassModelManager;
import org.fulib.classmodel.Clazz;
public class CarModel implements ClassModelDecorator
{
@Override
public void decorate(ClassModelManager m)
{
// class SuperCar extends Car
Clazz car = m.haveClass("Car");
Clazz superCar = m.haveClass("SuperCar");
superCar.setSuperClass(car);
}
}
|
jokade/angulate2
|
bindings/src/main/scala/angulate2/core/NgModule.scala
|
<gh_stars>10-100
// Project: angulate2
// Description: Angular2 @NgModule macro annotation
// Copyright (c) 2016 Johannes.Kastner <<EMAIL>>
// Distributed under the MIT License (see included LICENSE file)
package angulate2.core
import angulate2.internal.ClassDecorator
import scala.annotation.{StaticAnnotation, compileTimeOnly}
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
import scala.scalajs.js
import scala.scalajs.js.annotation.JSImport
@js.native
@JSImport("@angular/core","NgModule")
object NgModuleFacade extends js.Object {
def apply() : js.Object = js.native
def apply(options: js.Object) : js.Object = js.native
}
// NOTE: keep the constructor parameter list and Component.Macro.annotationParamNames in sync!
@compileTimeOnly("enable macro paradise to expand macro annotations")
class NgModule(providers: js.Array[js.Any] = null,
declarations: js.Array[js.Any] = null,
imports: js.Array[js.Any] = null,
exports: js.Array[js.Any] = null,
entryComponents: js.Array[js.Any] = null,
bootstrap: js.Array[js.Any] = null,
schemas: js.Array[js.Any] = null,
id: String = null) extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro NgModule.Macro.impl
}
object NgModule {
private[angulate2] class Macro(val c: whitebox.Context) extends ClassDecorator {
import c.universe._
val annotationParamNames = Seq(
"providers",
"declarations",
"imports",
"exports",
"entryComponents",
"bootstrap",
"schemas",
"id"
)
override val annotationName: String = "NgModule"
override def mainAnnotationObject = q"angulate2.core.NgModuleFacade"
}
}
|
Krzyciu/A3CS
|
addons/editor/attributes/mission_garbageCollection.hpp
|
<reponame>Krzyciu/A3CS<filename>addons/editor/attributes/mission_garbageCollection.hpp
class GarbageCollection {
collapsed = 0;
class Attributes {
class CorpseManagerMode {
control = QGVAR(hiddenCombo);
};
class WreckManagerMode {
control = QGVAR(hiddenCombo);
};
class GVAR(garbageCollectionSetuped) {
property = QGVAR(garbageCollectionSetuped);
control = QGVAR(hiddenCheckbox);
defaultValue = "false";
};
};
};
class DynamicSimulation {
class Attributes {
class DynSimEnabled {
control = QGVAR(hiddenCheckbox);
};
};
};
|
Largo/Lurnby
|
app/__init__.py
|
from datetime import datetime
import logging
from logging.handlers import SMTPHandler, RotatingFileHandler
import os
from config import Config
from flask import Flask, session
from flask_cors import CORS
from flask_login import LoginManager, current_user
from flask_mail import Mail
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from flask_talisman import Talisman
from flask_wtf.csrf import CSRFProtect
from redis import Redis
import rq
from sqlalchemy import MetaData
import boto3
from botocore.client import Config as AZConfig
convention = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(column_0_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
metadata = MetaData(naming_convention=convention)
db = SQLAlchemy(metadata=metadata)
migrate = Migrate()
login = LoginManager()
login.login_view = 'auth.login'
login.login_message = None
mail = Mail()
cors = CORS()
csrf = CSRFProtect()
talisman = Talisman()
my_config = AZConfig(
region_name = 'us-east-2',
signature_version = 's3v4',
)
s3 = boto3.client('s3',
aws_access_key_id=os.environ.get('AWS_ACCESS_KEY_ID'),
aws_secret_access_key=os.environ.get('AWS_SECRET_ACCESS_KEY'),
config=my_config
)
bucket = os.environ.get('AWS_BUCKET')
csp = {
'default-src': [
'\'self\'',
'\'unsafe-inline\'',
'*.getbootsrap.com/*',
'*.bootstrapcdn.com/*',
'*.jquery.com/*',
'*.cloudflare.com/ajax/libs/*'
]
}
def create_app(config_class=Config):
app = Flask(__name__)
app.config.from_object(config_class)
app.redis = Redis.from_url(app.config['REDIS_URL'])
app.task_queue = rq.Queue('lurnby-tasks', connection=app.redis)
@app.before_request
def before_request_func():
if current_user.is_authenticated:
current_user.last_active = datetime.utcnow()
db.session.commit()
# Add a variable into the app that can be used in all routes and blueprints
# This one is so that I can have a now variable that automatically updates the copyright notice at the bottom.
@app.context_processor
def inject():
if os.environ.get('DEV'):
staging=True
else:
staging=False
return {'now': datetime.utcnow(),'staging':staging}
cors.init_app(app, resources={r"/app/api/*": {"origins": "*"}})
db.init_app(app)
migrate.init_app(app, db)
login.init_app(app)
mail.init_app(app)
talisman.init_app(app, content_security_policy=None)
csrf.init_app(app)
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp, url_prefix='/app')
from app.auth import bp as auth_bp
app.register_blueprint(auth_bp, url_prefix='/app/auth')
from app.main import bp as main_bp
app.register_blueprint(main_bp, url_prefix='/app')
from app.settings import bp as settings_bp
app.register_blueprint(settings_bp, url_prefix='/app')
from app.api import bp as api_bp
app.register_blueprint(api_bp,url_prefix='/api')
csrf.exempt(api_bp)
from app.experiments import bp as experiments_bp
app.register_blueprint(experiments_bp, url_prefix='/app')
from app.content import bp as content_bp
app.register_blueprint(content_bp, url_prefix='/app')
from app.dotcom import bp as dotcom_bp
app.register_blueprint(dotcom_bp)
if __name__ == "__main__":
app.run(ssl_context=('cert.pem', 'key.pem'))
# OAuth 2 client setup
if not app.debug and not app.testing:
if app.config['MAIL_SERVER']:
auth = None
if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']:
auth = (app.config['MAIL_USERNAME'],
app.config['MAIL_PASSWORD'])
secure = None
if app.config['MAIL_USE_TLS']:
secure = ()
mail_handler = SMTPHandler(
mailhost=(app.config['MAIL_SERVER'],
app.config['MAIL_PORT']),
fromaddr='<EMAIL>',
toaddrs=app.config['ADMINS'], subject='Lurnby Failure',
credentials=auth, secure=secure
)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
if app.config['LOG_TO_STDOUT']:
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
app.logger.addHandler(stream_handler)
else:
if not os.path.exists('logs'):
os.mkdir('logs')
file_handler = RotatingFileHandler('logs/lurnby.log',
maxBytes=10240, backupCount=10)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('Lurnby')
return app
from app import models # noqa : E402, F401
|
chengxj2017/atlas
|
sql/mysql/sqlspec_test.go
|
package mysql
import (
"fmt"
"log"
"testing"
"ariga.io/atlas/schema/schemaspec"
"ariga.io/atlas/schema/schemaspec/schemahcl"
"ariga.io/atlas/sql/internal/specutil"
"ariga.io/atlas/sql/schema"
"ariga.io/atlas/sql/sqlspec"
"github.com/hashicorp/hcl/v2/hclwrite"
"github.com/stretchr/testify/require"
)
var hclState = schemahcl.New(schemahcl.WithTypes(TypeRegistry.Specs()))
func TestSQLSpec(t *testing.T) {
f := `
schema "schema" {
}
table "table" {
column "col" {
type = "int"
}
column "age" {
type = "int"
}
column "account_name" {
type = "string"
size = 32
}
primary_key {
columns = [table.table.column.col]
}
index "index" {
unique = true
columns = [
table.table.column.col,
table.table.column.age,
]
}
foreign_key "accounts" {
columns = [
table.table.column.account_name,
]
ref_columns = [
table.accounts.column.name,
]
on_delete = "SET NULL"
}
}
table "accounts" {
column "name" {
type = "string"
size = 32
}
primary_key {
columns = [table.accounts.column.name]
}
}
`
var s schema.Schema
err := UnmarshalSpec([]byte(f), schemahcl.Unmarshal, &s)
require.NoError(t, err)
exp := &schema.Schema{
Name: "schema",
}
exp.Tables = []*schema.Table{
{
Name: "table",
Schema: exp,
Columns: []*schema.Column{
{
Name: "col",
Type: &schema.ColumnType{
Type: &schema.IntegerType{
T: tInt,
},
},
},
{
Name: "age",
Type: &schema.ColumnType{
Type: &schema.IntegerType{
T: tInt,
},
},
},
{
Name: "account_name",
Type: &schema.ColumnType{
Type: &schema.StringType{
T: tVarchar,
Size: 32,
},
},
},
},
},
{
Name: "accounts",
Schema: exp,
Columns: []*schema.Column{
{
Name: "name",
Type: &schema.ColumnType{
Type: &schema.StringType{
T: tVarchar,
Size: 32,
},
},
},
},
},
}
exp.Tables[0].PrimaryKey = &schema.Index{
Table: exp.Tables[0],
Parts: []*schema.IndexPart{
{SeqNo: 0, C: exp.Tables[0].Columns[0]},
},
}
exp.Tables[0].Indexes = []*schema.Index{
{
Name: "index",
Table: exp.Tables[0],
Unique: true,
Parts: []*schema.IndexPart{
{SeqNo: 0, C: exp.Tables[0].Columns[0]},
{SeqNo: 1, C: exp.Tables[0].Columns[1]},
},
},
}
exp.Tables[0].ForeignKeys = []*schema.ForeignKey{
{
Symbol: "accounts",
Table: exp.Tables[0],
Columns: []*schema.Column{exp.Tables[0].Columns[2]},
RefTable: exp.Tables[1],
RefColumns: []*schema.Column{exp.Tables[1].Columns[0]},
OnDelete: schema.SetNull,
},
}
exp.Tables[1].PrimaryKey = &schema.Index{
Table: exp.Tables[1],
Parts: []*schema.IndexPart{
{SeqNo: 0, C: exp.Tables[1].Columns[0]},
},
}
require.EqualValues(t, exp, &s)
}
func TestMarshalSpec_Charset(t *testing.T) {
s := &schema.Schema{
Name: "test",
Attrs: []schema.Attr{
&schema.Charset{V: "utf8mb4"},
&schema.Collation{V: "utf8mb4_0900_ai_ci"},
},
Tables: []*schema.Table{
{
Name: "users",
Attrs: []schema.Attr{
&schema.Charset{V: "utf8mb4"},
&schema.Collation{V: "utf8mb4_0900_ai_ci"},
},
Columns: []*schema.Column{
{
Name: "a",
Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}},
Attrs: []schema.Attr{
&schema.Charset{V: "latin1"},
&schema.Collation{V: "latin1_swedish_ci"},
},
},
{
Name: "b",
Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}},
Attrs: []schema.Attr{
&schema.Charset{V: "utf8mb4"},
&schema.Collation{V: "utf8mb4_0900_ai_ci"},
},
},
},
},
{
Name: "posts",
Attrs: []schema.Attr{
&schema.Charset{V: "latin1"},
&schema.Collation{V: "latin1_swedish_ci"},
},
Columns: []*schema.Column{
{
Name: "a",
Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}},
Attrs: []schema.Attr{
&schema.Charset{V: "latin1"},
&schema.Collation{V: "latin1_swedish_ci"},
},
},
{
Name: "b",
Type: &schema.ColumnType{Type: &schema.StringType{T: "text"}},
Attrs: []schema.Attr{
&schema.Charset{V: "utf8mb4"},
&schema.Collation{V: "utf8mb4_0900_ai_ci"},
},
},
},
},
},
}
s.Tables[0].Schema = s
s.Tables[1].Schema = s
buf, err := MarshalSpec(s, schemahcl.Marshal)
require.NoError(t, err)
// Charset and collate that are identical to their parent elements
// should not be printed as they are inherited by default from it.
const expected = `table "users" {
schema = schema.test
column "a" {
null = false
type = "text"
charset = "latin1"
collation = "latin1_swedish_ci"
}
column "b" {
null = false
type = "text"
}
}
table "posts" {
schema = schema.test
charset = "latin1"
collation = "latin1_swedish_ci"
column "a" {
null = false
type = "text"
}
column "b" {
null = false
type = "text"
charset = "utf8mb4"
collation = "utf8mb4_0900_ai_ci"
}
}
schema "test" {
charset = "utf8mb4"
collation = "utf8mb4_0900_ai_ci"
}
`
require.EqualValues(t, expected, buf)
var (
s2 schema.Schema
latin = []schema.Attr{
&schema.Charset{V: "latin1"},
&schema.Collation{V: "latin1_swedish_ci"},
}
utf8mb4 = []schema.Attr{
&schema.Charset{V: "utf8mb4"},
&schema.Collation{V: "utf8mb4_0900_ai_ci"},
}
)
require.NoError(t, UnmarshalSpec(buf, schemahcl.Unmarshal, &s2))
require.Equal(t, utf8mb4, s2.Attrs)
posts, ok := s2.Table("posts")
require.True(t, ok)
require.Equal(t, latin, posts.Attrs)
users, ok := s2.Table("users")
require.True(t, ok)
require.Empty(t, users.Attrs)
a, ok := users.Column("a")
require.True(t, ok)
require.Equal(t, latin, a.Attrs)
b, ok := posts.Column("b")
require.True(t, ok)
require.Equal(t, utf8mb4, b.Attrs)
}
func TestUnmarshalSpecColumnTypes(t *testing.T) {
for _, tt := range []struct {
spec *sqlspec.Column
expected schema.Type
}{
{
spec: specutil.NewCol("int", "int"),
expected: &schema.IntegerType{
T: tInt,
Unsigned: false,
},
},
{
spec: specutil.NewCol("uint", "uint"),
expected: &schema.IntegerType{
T: tInt,
Unsigned: true,
},
},
{
spec: specutil.NewCol("int8", "int8"),
expected: &schema.IntegerType{
T: tTinyInt,
Unsigned: false,
},
},
{
spec: specutil.NewCol("int64", "int64"),
expected: &schema.IntegerType{
T: tBigInt,
Unsigned: false,
},
},
{
spec: specutil.NewCol("uint64", "uint64"),
expected: &schema.IntegerType{
T: tBigInt,
Unsigned: true,
},
},
{
spec: specutil.NewCol("string_varchar", "string", specutil.LitAttr("size", "255")),
expected: &schema.StringType{
T: tVarchar,
Size: 255,
},
},
{
spec: specutil.NewCol("string_mediumtext", "string", specutil.LitAttr("size", "100000")),
expected: &schema.StringType{
T: tMediumText,
Size: 100_000,
},
},
{
spec: specutil.NewCol("string_longtext", "string", specutil.LitAttr("size", "17000000")),
expected: &schema.StringType{
T: tLongText,
Size: 17_000_000,
},
},
{
spec: specutil.NewCol("varchar(255)", "varchar(255)"),
expected: &schema.StringType{
T: tVarchar,
Size: 255,
},
},
{
spec: specutil.NewCol("decimal(10, 2) unsigned", "decimal(10, 2) unsigned"),
expected: &schema.DecimalType{
T: tDecimal,
Scale: 2,
Precision: 10,
},
},
{
spec: specutil.NewCol("blob", "binary"),
expected: &schema.BinaryType{
T: tBlob,
},
},
{
spec: specutil.NewCol("tinyblob", "binary", specutil.LitAttr("size", "16")),
expected: &schema.BinaryType{
T: tTinyBlob,
Size: 16,
},
},
{
spec: specutil.NewCol("mediumblob", "binary", specutil.LitAttr("size", "100000")),
expected: &schema.BinaryType{
T: tMediumBlob,
Size: 100_000,
},
},
{
spec: specutil.NewCol("longblob", "binary", specutil.LitAttr("size", "20000000")),
expected: &schema.BinaryType{
T: tLongBlob,
Size: 20_000_000,
},
},
{
spec: specutil.NewCol("enum", "enum", specutil.ListAttr("values", `"a"`, `"b"`, `"c"`)),
expected: &schema.EnumType{Values: []string{"a", "b", "c"}},
},
{
spec: specutil.NewCol("bool", "boolean"),
expected: &schema.BoolType{T: "boolean"},
},
{
spec: specutil.NewCol("decimal", "decimal", specutil.LitAttr("precision", "10"), specutil.LitAttr("scale", "2")),
expected: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 2},
},
{
spec: specutil.NewCol("float", "float", specutil.LitAttr("precision", "10")),
expected: &schema.FloatType{T: "float", Precision: 10},
},
{
spec: specutil.NewCol("float", "float", specutil.LitAttr("precision", "25")),
expected: &schema.FloatType{T: "double", Precision: 25},
},
} {
t.Run(tt.spec.Name, func(t *testing.T) {
var s schema.Schema
err := UnmarshalSpec(hcl(tt.spec), schemahcl.Unmarshal, &s)
require.NoError(t, err)
tbl, ok := s.Table("table")
require.True(t, ok)
col, ok := tbl.Column(tt.spec.Name)
require.True(t, ok)
require.EqualValues(t, tt.expected, col.Type.Type)
})
}
}
// hcl returns an Atlas HCL document containing the column spec.
func hcl(c *sqlspec.Column) []byte {
mm, err := schemahcl.Marshal(c)
if err != nil {
log.Fatalln(err)
}
tmpl := `
schema "default" {
}
table "table" {
schema = schema.default
%s
}
`
body := fmt.Sprintf(tmpl, string(mm))
return []byte(body)
}
func TestMarshalSpecColumnType(t *testing.T) {
for _, tt := range []struct {
schem schema.Type
expected *sqlspec.Column
}{
{
schem: &schema.IntegerType{
T: tInt,
Unsigned: false,
},
expected: specutil.NewCol("column", "int"),
},
{
schem: &schema.IntegerType{
T: tInt,
Unsigned: true,
},
expected: specutil.NewCol("column", "uint"),
},
{
schem: &schema.IntegerType{
T: tTinyInt,
Unsigned: false,
},
expected: specutil.NewCol("column", "int8"),
},
{
schem: &schema.IntegerType{
T: tMediumInt,
Unsigned: false,
},
expected: specutil.NewCol("column", tMediumInt),
},
{
schem: &schema.IntegerType{
T: tSmallInt,
Unsigned: false,
},
expected: specutil.NewCol("column", tSmallInt),
},
{
schem: &schema.IntegerType{
T: tBigInt,
Unsigned: false,
},
expected: specutil.NewCol("column", "int64"),
},
{
schem: &schema.IntegerType{
T: tBigInt,
Unsigned: true,
},
expected: specutil.NewCol("column", "uint64"),
},
{
schem: &schema.StringType{
T: tVarchar,
Size: 255,
},
expected: specutil.NewCol("column", "string", specutil.LitAttr("size", "255")),
},
{
schem: &schema.StringType{
T: tTinyText,
Size: 255,
},
expected: specutil.NewCol("column", "string", specutil.LitAttr("size", "255")),
},
{
schem: &schema.StringType{
T: tText,
Size: 255,
},
expected: specutil.NewCol("column", "string", specutil.LitAttr("size", "255")),
},
{
schem: &schema.StringType{
T: tChar,
Size: 255,
},
expected: specutil.NewCol("column", "string", specutil.LitAttr("size", "255")),
},
{
schem: &schema.StringType{
T: tMediumText,
Size: 100_000,
},
expected: specutil.NewCol("column", "string", specutil.LitAttr("size", "100000")),
},
{
schem: &schema.StringType{
T: tLongText,
Size: 17_000_000,
},
expected: specutil.NewCol("column", "string", specutil.LitAttr("size", "17000000")),
},
{
schem: &schema.DecimalType{T: "decimal", Precision: 10, Scale: 2},
expected: specutil.NewCol("column", "decimal", specutil.LitAttr("precision", "10"), specutil.LitAttr("scale", "2")),
},
{
schem: &schema.BinaryType{
T: tBlob,
},
expected: specutil.NewCol("column", "binary"),
},
{
schem: &schema.BinaryType{
T: tTinyBlob,
Size: 16,
},
expected: specutil.NewCol("column", "binary", specutil.LitAttr("size", "16")),
},
{
schem: &schema.BinaryType{
T: tMediumBlob,
Size: 100_000,
},
expected: specutil.NewCol("column", "binary", specutil.LitAttr("size", "100000")),
},
{
schem: &schema.BinaryType{
T: tLongBlob,
Size: 20_000_000,
},
expected: specutil.NewCol("column", "binary", specutil.LitAttr("size", "20000000")),
},
{
schem: &schema.EnumType{Values: []string{"a", "b", "c"}},
expected: specutil.NewCol("column", "enum", specutil.ListAttr("values", `"a"`, `"b"`, `"c"`)),
},
{
schem: &schema.BoolType{T: "boolean"},
expected: specutil.NewCol("column", "boolean"),
},
{
schem: &schema.FloatType{T: "float", Precision: 10},
expected: specutil.NewCol("column", "float", specutil.LitAttr("precision", "10")),
},
{
schem: &schema.FloatType{T: "double", Precision: 25},
expected: specutil.NewCol("column", "float", specutil.LitAttr("precision", "25")),
},
{
schem: &schema.TimeType{T: "date"},
expected: specutil.NewCol("column", "date"),
},
{
schem: &schema.TimeType{T: "datetime"},
expected: specutil.NewCol("column", "datetime"),
},
{
schem: &schema.TimeType{T: "time"},
expected: specutil.NewCol("column", "time"),
},
{
schem: &schema.TimeType{T: "timestamp"},
expected: specutil.NewCol("column", "timestamp"),
},
{
schem: &schema.TimeType{T: "year"},
expected: specutil.NewCol("column", "year"),
},
{
schem: &schema.TimeType{T: "year(4)"},
expected: specutil.NewCol("column", "year(4)"),
},
} {
t.Run(tt.expected.Type, func(t *testing.T) {
s := schema.Schema{
Tables: []*schema.Table{
{
Name: "table",
Columns: []*schema.Column{
{
Name: "column",
Type: &schema.ColumnType{Type: tt.schem},
},
},
},
},
}
s.Tables[0].Schema = &s
ddl, err := MarshalSpec(&s, schemahcl.Marshal)
require.NoError(t, err)
var test struct {
Table *sqlspec.Table `spec:"table"`
}
err = schemahcl.Unmarshal(ddl, &test)
require.NoError(t, err)
require.EqualValues(t, tt.expected.Type, test.Table.Columns[0].Type)
require.ElementsMatch(t, tt.expected.Extra.Attrs, test.Table.Columns[0].Extra.Attrs)
})
}
}
func TestTypes(t *testing.T) {
for _, tt := range []struct {
typeExpr string
extraAttr string
expected schema.Type
}{
{
typeExpr: "varchar(255)",
expected: &schema.StringType{T: tVarchar, Size: 255},
},
{
typeExpr: "char(255)",
expected: &schema.StringType{T: tChar, Size: 255},
},
{
typeExpr: "binary(255)",
expected: &schema.BinaryType{T: tBinary, Size: 255},
},
{
typeExpr: "varbinary(255)",
expected: &schema.BinaryType{T: tVarBinary, Size: 255},
},
{
typeExpr: "int",
expected: &schema.IntegerType{T: tInt},
},
{
typeExpr: "int",
extraAttr: "unsigned=true",
expected: &schema.IntegerType{T: tInt, Unsigned: true},
},
{
typeExpr: "bigint",
expected: &schema.IntegerType{T: tBigInt},
},
{
typeExpr: "bigint",
extraAttr: "unsigned=true",
expected: &schema.IntegerType{T: tBigInt, Unsigned: true},
},
{
typeExpr: "tinyint",
expected: &schema.IntegerType{T: tTinyInt},
},
{
typeExpr: "tinyint",
extraAttr: "unsigned=true",
expected: &schema.IntegerType{T: tTinyInt, Unsigned: true},
},
{
typeExpr: "smallint",
expected: &schema.IntegerType{T: tSmallInt},
},
{
typeExpr: "smallint",
extraAttr: "unsigned=true",
expected: &schema.IntegerType{T: tSmallInt, Unsigned: true},
},
{
typeExpr: "mediumint",
expected: &schema.IntegerType{T: tMediumInt},
},
{
typeExpr: "mediumint",
extraAttr: "unsigned=true",
expected: &schema.IntegerType{T: tMediumInt, Unsigned: true},
},
{
typeExpr: "tinytext",
expected: &schema.StringType{T: tTinyText},
},
{
typeExpr: "mediumtext",
expected: &schema.StringType{T: tMediumText},
},
{
typeExpr: "longtext",
expected: &schema.StringType{T: tLongText},
},
{
typeExpr: "text",
expected: &schema.StringType{T: tText},
},
{
typeExpr: `enum("on","off")`,
expected: &schema.EnumType{Values: []string{"on", "off"}},
},
{
typeExpr: "bit(10)",
expected: &BitType{T: tBit},
},
{
typeExpr: "int(10)",
expected: &schema.IntegerType{T: tInt},
},
{
typeExpr: "tinyint(10)",
expected: &schema.IntegerType{T: tTinyInt},
},
{
typeExpr: "smallint(10)",
expected: &schema.IntegerType{T: tSmallInt},
},
{
typeExpr: "mediumint(10)",
expected: &schema.IntegerType{T: tMediumInt},
},
{
typeExpr: "bigint(10)",
expected: &schema.IntegerType{T: tBigInt},
},
{
typeExpr: "decimal",
expected: &schema.DecimalType{T: tDecimal},
},
{
typeExpr: "numeric",
expected: &schema.DecimalType{T: tNumeric},
},
{
typeExpr: "float(10,0)",
expected: &schema.FloatType{T: tFloat, Precision: 10},
},
{
typeExpr: "double(10,0)",
expected: &schema.FloatType{T: tDouble, Precision: 10},
},
{
typeExpr: "real",
expected: &schema.FloatType{T: tReal},
},
{
typeExpr: "timestamp",
expected: &schema.TimeType{T: tTimestamp},
},
{
typeExpr: "date",
expected: &schema.TimeType{T: tDate},
},
{
typeExpr: "time",
expected: &schema.TimeType{T: tTime},
},
{
typeExpr: "datetime",
expected: &schema.TimeType{T: tDateTime},
},
{
typeExpr: "year",
expected: &schema.TimeType{T: tYear},
},
{
typeExpr: "varchar(10)",
expected: &schema.StringType{T: tVarchar, Size: 10},
},
{
typeExpr: "char(25)",
expected: &schema.StringType{T: tChar, Size: 25},
},
{
typeExpr: "varbinary(30)",
expected: &schema.BinaryType{T: tVarBinary, Size: 30},
},
{
typeExpr: "binary(5)",
expected: &schema.BinaryType{T: tBinary, Size: 5},
},
{
typeExpr: "blob(5)",
expected: &schema.StringType{T: tBlob},
},
{
typeExpr: "tinyblob",
expected: &schema.StringType{T: tTinyBlob},
},
{
typeExpr: "mediumblob",
expected: &schema.StringType{T: tMediumBlob},
},
{
typeExpr: "longblob",
expected: &schema.StringType{T: tLongBlob},
},
{
typeExpr: "text(13)",
expected: &schema.StringType{T: tText},
},
{
typeExpr: "tinytext",
expected: &schema.StringType{T: tTinyText},
},
{
typeExpr: "mediumtext",
expected: &schema.StringType{T: tMediumText},
},
{
typeExpr: "longtext",
expected: &schema.StringType{T: tLongText},
},
{
typeExpr: `enum("a","b")`,
expected: &schema.EnumType{Values: []string{"a", "b"}},
},
{
typeExpr: `set("a","b")`,
expected: &SetType{Values: []string{"a", "b"}},
},
{
typeExpr: "geometry",
expected: &schema.SpatialType{T: tGeometry},
},
{
typeExpr: "point",
expected: &schema.SpatialType{T: tPoint},
},
{
typeExpr: "multipoint",
expected: &schema.SpatialType{T: tMultiPoint},
},
{
typeExpr: "linestring",
expected: &schema.SpatialType{T: tLineString},
},
{
typeExpr: "multilinestring",
expected: &schema.SpatialType{T: tMultiLineString},
},
{
typeExpr: "polygon",
expected: &schema.SpatialType{T: tPolygon},
},
{
typeExpr: "multipolygon",
expected: &schema.SpatialType{T: tMultiPolygon},
},
{
typeExpr: "geometrycollection",
expected: &schema.SpatialType{T: tGeometryCollection},
},
} {
t.Run(tt.typeExpr, func(t *testing.T) {
// simulates sqlspec.Column until we change its Type field.
type col struct {
Type *schemaspec.Type `spec:"type"`
schemaspec.DefaultExtension
}
var test struct {
Columns []*col `spec:"column"`
}
doc := fmt.Sprintf(`column {
type = %s%s
}
`, tt.typeExpr, lineIfSet(tt.extraAttr))
err := hclState.UnmarshalSpec([]byte(doc), &test)
require.NoError(t, err)
column := test.Columns[0]
typ, err := TypeRegistry.Type(column.Type, column.Extra.Attrs, parseRawType)
require.NoError(t, err)
require.EqualValues(t, tt.expected, typ)
spec, err := hclState.MarshalSpec(&test)
require.NoError(t, err)
hclEqual(t, []byte(doc), spec)
})
}
}
func hclEqual(t *testing.T, expected, actual []byte) {
require.EqualValues(t, string(hclwrite.Format(expected)), string(hclwrite.Format(actual)))
}
func lineIfSet(s string) string {
if s != "" {
return "\n" + s
}
return s
}
|
jastination/software-engineering-excercise-repository
|
seer_java/src/common/datastructure/LRU.java
|
package common.datastructure;
public interface LRU<K, V> {
boolean put(K key, V val);
V get(K key);
}
|
rohe/fedservice
|
src/fedservice/utils.py
|
import json
import logging
import ssl
import sys
from oidcrp.exception import ResponseError
logger = logging.getLogger(__name__)
def load_json(file_name):
with open(file_name) as fp:
js = json.load(fp)
return js
def fed_parse_response(instance, info, sformat="", state="", **kwargs):
if sformat in ['jose', 'jws', 'jwe']:
resp = instance.post_parse_response(info, state=state)
if not resp:
logger.error('Missing or faulty response')
raise ResponseError("Missing or faulty response")
return resp
else:
return instance.parse_response(info, sformat, state, **kwargs)
def compact(qsdict):
res = {}
for key, val in qsdict.items():
if isinstance(val, int):
res[key] = val
elif len(val) == 1:
res[key] = val[0]
else:
res[key] = val
return res
|
oladimillion/react-form
|
example/Sample/Form/index.js
|
<reponame>oladimillion/react-form<filename>example/Sample/Form/index.js
import React from 'react'
import { Form as BaseForm, Field, SubmitButton, FieldArray } from '@oladimillion/react-form'
import { countries } from '../consts'
const validationRules = {
text: {
validation: ['required'],
message: {
required: 'This field is required'
}
},
password: {
validation: 'required|min:3',
},
confirm_password: {
validation: 'same:password',
},
url: {
validation: 'url',
},
file_multiple: {
validation: 'required',
},
email: {
validation: 'email',
},
'fieldArray.*.textarea': {
validation: 'required',
message: {
required: 'This field is required'
}
},
'fieldArray.*.number': {
validation: 'required|numeric',
message: {
required: 'This field is number',
numeric: 'This field expect a number value'
}
},
'fieldArray.*.myemail': {
validation: 'email',
message: {
required: 'This field is required',
email: 'Invalid email provided',
},
depend: ({ fieldArray }, name, index) => {
const { number } = fieldArray[index] || {}
return number === '2'
}
},
date: {
validation: 'required'
},
}
export const Form = ({ readOnly }) => {
// const onSubmit = async () => { }
const onSubmit = async (args) => {
console.log(args)
}
const initialValues = {
email: '<EMAIL>',
number: '123349823983928',
password: '<PASSWORD>',
select: 'af',
file_multiple: ['https://google.com', 'https://wikipedia.com'],
radio: 'yes',
switch: true,
}
return (
<BaseForm
onSubmit={onSubmit}
validationRules={validationRules}
initialValues={initialValues}
readOnly={readOnly}
>
<Field type='test' label='Unsupported' name='test' />
<Field type='text' label='Text Field' name='text' placeholder='Text field' />
<Field type='password' label='Password Field' name='password' />
<Field type='password' label='Confirm Password Field' name='confirm_password' />
<Field type='url' label='Url Field' name='url' />
<Field type='email' label='Email Field' name='email' />
<Field useFileLink type='file' label='File Field' name='file' />
<Field useFileLink type='file' label='Multi-File Field' name='file_multiple' multiple />
<FieldArray name='fieldArray' label='Field Array'>
{({ values, add, remove }) => (
<FieldArray.Item mb={2}>
{values.map((_, index) => (
<FieldArray.Item key={index}>
<FieldArray.RemoveButton onClick={() => remove(index)} />
<Field type='textarea' label='TextArea Field' name={`fieldArray.${index}.textarea`} />
<Field type='number' label='Number Field' name={`fieldArray.${index}.number`} />
<Field type='email' label='Email Field' name={`fieldArray.${index}.myemail`} />
<Field
label='Radio Field'
options={[{ text: 'Yes', value: 'yes' }, {text: 'No', value: 'no' }]}
type='radio'
name={`fieldArray.${index}.radio`}
/>
<Field
label='Switch Field'
type='switch'
name={`fieldArray.${index}.switch`}
/>
<Field
label='Checkbox Field'
type='checkbox'
name={`fieldArray.${index}.checkbox`}
/>
<Field
label='Select Field'
options={countries}
type='select'
name={`fieldArray.${index}.select`}
placeholder='Select your country'
/>
<FieldArray.Divider />
</FieldArray.Item>
))}
<FieldArray.AddButton onClick={add} />
</FieldArray.Item>
)}
</FieldArray>
<Field type='number' label='Number Field' name='number' />
<Field
label='Select Field'
options={countries}
type='select'
name='select'
placeholder='Select your country'
/>
<Field
label='Select Field'
options={countries}
type='select'
name='select2'
placeholder='Select your country'
/>
<Field
label='Radio Field'
options={[{ text: 'Yes', value: 'yes' }, {text: 'No', value: 'no' }]}
type='radio'
name='radio'
/>
<Field
label='Switch Field'
type='switch'
name='switch'
/>
<Field
label='Checkbox Field'
type='checkbox'
name='checkbox'
/>
<Field
label='Date Field'
type='date'
name='date'
/>
<SubmitButton>Save</SubmitButton>
</BaseForm>
)
}
|
Kirishikesan/haiku
|
src/add-ons/kernel/file_systems/cdda/cdda.cpp
|
/*
* Copyright 2007-2010, <NAME>, <EMAIL>.
* Distributed under the terms of the MIT License.
*/
#include "cdda.h"
#include <KernelExport.h>
#include <device/scsi.h>
#include <algorithm>
#include <ctype.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <strings.h>
struct cdtext_pack_data {
uint8 id;
uint8 track;
uint8 number;
uint8 character_position : 4;
uint8 block_number : 3;
uint8 double_byte : 1;
char text[12];
uint8 crc[2];
} _PACKED;
enum {
kTrackID = 0x80,
kArtistID = 0x81,
kMessageID = 0x85,
};
static const uint32 kBufferSize = 16384;
static const uint32 kSenseSize = 1024;
// #pragma mark - string functions
static char *
copy_string(const char *string)
{
if (string == NULL || !string[0])
return NULL;
return strdup(string);
}
static char *
to_utf8(const char* string)
{
char buffer[256];
size_t out = 0;
// TODO: assume CP1252 or ISO-8859-1 character set for now
while (uint32 c = (uint8)string[0]) {
if (c < 0x80) {
if (out >= sizeof(buffer) - 1)
break;
// ASCII character: no change needed
buffer[out++] = c;
} else {
if (c < 0xA0) {
// Windows CP-1252 - Use a lookup table
static const uint32 lookup[] = {
0x20AC, 0, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021,
0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0, 0x017D, 0,
0, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014,
0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0, 0x017E, 0x0178
};
c = lookup[c - 0x80];
}
// Convert to 2 or 3-byte representation
if (c == 0) {
// invalid character, ignore
} else if (c < 0x800) {
if (out >= sizeof(buffer) - 2)
break;
buffer[out++] = 0xc0 | (c >> 6);
buffer[out++] = 0x80 | (c & 0x3f);
} else {
if (out >= sizeof(buffer) - 3)
break;
buffer[out++] = 0xe0 | (c >> 12);
buffer[out++] = 0x80 | ((c >> 6) & 0x3f);
buffer[out++] = 0x80 | (c & 0x3f);
}
}
string++;
}
buffer[out++] = '\0';
char *copy = (char *)malloc(out);
if (copy == NULL)
return NULL;
memcpy(copy, buffer, out);
return copy;
}
static bool
is_garbage(char c)
{
return isspace(c) || c == '-' || c == '/' || c == '\\';
}
static void
sanitize_string(char *&string)
{
if (string == NULL)
return;
// strip garbage at the start
uint32 length = strlen(string);
uint32 garbage = 0;
while (is_garbage(string[garbage])) {
garbage++;
}
length -= garbage;
if (garbage)
memmove(string, string + garbage, length + 1);
// strip garbage from the end
while (length > 1 && isspace(string[length - 1])) {
string[--length] = '\0';
}
if (!string[0]) {
// free string if it's empty
free(string);
string = NULL;
}
}
//! Finds the first occurrence of \a find in \a string, ignores case.
static char*
find_string(const char *string, const char *find)
{
if (string == NULL || find == NULL)
return NULL;
char first = tolower(find[0]);
if (first == '\0')
return (char *)string;
int32 findLength = strlen(find) - 1;
find++;
for (; string[0]; string++) {
if (tolower(string[0]) != first)
continue;
if (strncasecmp(string + 1, find, findLength) == 0)
return (char *)string;
}
return NULL;
}
static void
cut_string(char *string, const char *cut)
{
if (string == NULL || cut == NULL)
return;
char *found = find_string(string, cut);
if (found != NULL) {
uint32 foundLength = strlen(found);
uint32 cutLength = strlen(cut);
memmove(found, found + cutLength, foundLength + 1 - cutLength);
}
}
static void
sanitize_album(cdtext &text)
{
cut_string(text.album, text.artist);
sanitize_string(text.album);
if (text.album != NULL && !strcasecmp(text.album, "My CD")) {
// don't laugh, people really do that!
free(text.album);
text.album = NULL;
}
if ((text.artist == NULL || text.artist[0] == '\0') && text.album != NULL) {
// try to extract artist from album
char *space = strstr(text.album, " ");
if (space != NULL) {
space[0] = '\0';
text.artist = text.album;
text.album = copy_string(space + 2);
sanitize_string(text.artist);
sanitize_string(text.album);
}
}
}
static void
sanitize_titles(cdtext &text)
{
for (uint8 i = 0; i < text.track_count; i++) {
cut_string(text.titles[i], "(Album Version)");
sanitize_string(text.titles[i]);
sanitize_string(text.artists[i]);
if (text.artists[i] != NULL && text.artist != NULL
&& !strcasecmp(text.artists[i], text.artist)) {
// if the title artist is the same as the main artist, remove it
free(text.artists[i]);
text.artists[i] = NULL;
}
if (text.titles[i] != NULL && text.titles[i][0] == '\t' && i > 0)
text.titles[i] = copy_string(text.titles[i - 1]);
}
}
static bool
single_case(const char *string, bool &upper, bool &first)
{
if (string == NULL)
return true;
while (string[0]) {
while (!isalpha(string[0])) {
string++;
}
if (first) {
upper = isupper(string[0]) != 0;
first = false;
} else if ((isupper(string[0]) != 0) ^ upper)
return false;
string++;
}
return true;
}
static void
capitalize_string(char *string)
{
if (string == NULL)
return;
bool newWord = isalpha(string[0]) || isspace(string[0]);
while (string[0]) {
if (isalpha(string[0])) {
if (newWord) {
string[0] = toupper(string[0]);
newWord = false;
} else
string[0] = tolower(string[0]);
} else if (string[0] != '\'')
newWord = true;
string++;
}
}
static void
correct_case(cdtext &text)
{
// check if all titles share a single case
bool first = true;
bool upper;
if (!single_case(text.album, upper, first)
|| !single_case(text.artist, upper, first))
return;
for (int32 i = 0; i < text.track_count; i++) {
if (!single_case(text.titles[i], upper, first)
|| !single_case(text.artists[i], upper, first))
return;
}
// If we get here, everything has a single case; we fix that
// and capitalize each word
capitalize_string(text.album);
capitalize_string(text.artist);
for (int32 i = 0; i < text.track_count; i++) {
capitalize_string(text.titles[i]);
capitalize_string(text.artists[i]);
}
}
// #pragma mark - CD-Text
cdtext::cdtext()
:
artist(NULL),
album(NULL),
genre(NULL),
track_count(0)
{
memset(titles, 0, sizeof(titles));
memset(artists, 0, sizeof(artists));
}
cdtext::~cdtext()
{
free(album);
free(artist);
free(genre);
for (uint8 i = 0; i < track_count; i++) {
free(titles[i]);
free(artists[i]);
}
}
static bool
is_string_id(uint8 id)
{
return id >= kTrackID && id <= kMessageID;
}
/*! Parses a \a pack data into the provided text buffer; the corresponding
track number will be left in \a track, and the type of the data in \a id.
The pack data is explained in SCSI MMC-3.
\a id, \a track, and \a state must stay constant between calls to this
function. \a state must be initialized to zero for the first call.
*/
static bool
parse_pack_data(cdtext_pack_data *&pack, uint32 &packLeft,
cdtext_pack_data *&lastPack, uint8 &id, uint8 &track, uint8 &state,
char *buffer, size_t &length)
{
if (packLeft < sizeof(cdtext_pack_data))
return false;
uint8 number = pack->number;
size_t size = length;
if (state != 0) {
// we had a terminated string and a missing track
track++;
memcpy(buffer, lastPack->text + state, 12 - state);
if (pack->track - track == 1)
state = 0;
else
state += strnlen(buffer, 12 - state);
return true;
}
id = pack->id;
track = pack->track;
buffer[0] = '\0';
length = 0;
size_t position = pack->character_position;
if (position > 0 && lastPack != NULL) {
memcpy(buffer, &lastPack->text[12 - position], position);
length = position;
}
while (id == pack->id && track == pack->track) {
#if 0
dprintf("%u.%u.%u, %u.%u.%u, ", pack->id, pack->track, pack->number,
pack->double_byte, pack->block_number, pack->character_position);
for (int32 i = 0; i < 12; i++) {
if (isprint(pack->text[i]))
dprintf("%c", pack->text[i]);
else
dprintf("-");
}
dprintf("\n");
#endif
if (is_string_id(id)) {
// TODO: support double byte characters
if (length + 12 < size) {
memcpy(buffer + length, pack->text, 12);
length += 12;
}
}
packLeft -= sizeof(cdtext_pack_data);
if (packLeft < sizeof(cdtext_pack_data))
return false;
lastPack = pack;
number++;
pack++;
if (pack->number != number)
return false;
}
if (id == pack->id) {
length -= pack->character_position;
if (length >= size)
length = size - 1;
buffer[length] = '\0';
if (pack->track > lastPack->track + 1) {
// there is a missing track
for (int32 i = 0; i < 12; i++) {
if (lastPack->text[i] == '\0') {
state = i + (lastPack->double_byte ? 2 : 1);
break;
}
}
}
}
return true;
}
static void
dump_cdtext(cdtext &text)
{
if (text.album)
dprintf("Album: \"%s\"\n", text.album);
if (text.artist)
dprintf("Artist: \"%s\"\n", text.artist);
for (uint8 i = 0; i < text.track_count; i++) {
dprintf("Track %02u: \"%s\"%s%s%s\n", i + 1, text.titles[i],
text.artists[i] ? " (" : "", text.artists[i] ? text.artists[i] : "",
text.artists[i] ? ")" : "");
}
}
static void
dump_toc(scsi_toc_toc *toc)
{
int32 numTracks = toc->last_track + 1 - toc->first_track;
for (int32 i = 0; i < numTracks; i++) {
scsi_toc_track& track = toc->tracks[i];
scsi_cd_msf& next = toc->tracks[i + 1].start.time;
// the last track is always lead-out
scsi_cd_msf& start = toc->tracks[i].start.time;
scsi_cd_msf length;
uint64 diff = next.minute * kFramesPerMinute
+ next.second * kFramesPerSecond + next.frame
- start.minute * kFramesPerMinute
- start.second * kFramesPerSecond - start.frame;
length.minute = diff / kFramesPerMinute;
length.second = (diff % kFramesPerMinute) / kFramesPerSecond;
length.frame = diff % kFramesPerSecond;
dprintf("%02u. %02u:%02u.%02u (length %02u:%02u.%02u)\n",
track.track_number, start.minute, start.second, start.frame,
length.minute, length.second, length.frame);
}
}
static status_t
read_frames(int fd, off_t firstFrame, uint8 *buffer, size_t count)
{
size_t framesLeft = count;
while (framesLeft > 0) {
// If the initial count was >= 32, and not a multiple of 8, and the
// ioctl fails, we switch to reading 8 frames at a time. However the
// last read can read between 1 and 7 frames only, to not overflow
// the buffer.
count = std::min(count, framesLeft);
scsi_read_cd read;
read.start_m = firstFrame / kFramesPerMinute;
read.start_s = (firstFrame / kFramesPerSecond) % 60;
read.start_f = firstFrame % kFramesPerSecond;
read.length_m = count / kFramesPerMinute;
read.length_s = (count / kFramesPerSecond) % 60;
read.length_f = count % kFramesPerSecond;
read.buffer_length = count * kFrameSize;
read.buffer = (char *)buffer;
read.play = false;
if (ioctl(fd, B_SCSI_READ_CD, &read, sizeof(scsi_read_cd)) < 0) {
// drive couldn't read data - try again to read with a smaller block size
if (count == 1)
return errno;
if (count >= 32)
count = 8;
else
count = 1;
continue;
}
buffer += count * kFrameSize;
framesLeft -= count;
firstFrame += count;
}
return B_OK;
}
static status_t
read_table_of_contents(int fd, uint32 track, uint8 format, uint8 *buffer,
size_t bufferSize)
{
raw_device_command raw;
uint8 *senseData = (uint8 *)malloc(kSenseSize);
if (senseData == NULL)
return B_NO_MEMORY;
memset(&raw, 0, sizeof(raw_device_command));
memset(senseData, 0, kSenseSize);
memset(buffer, 0, bufferSize);
scsi_cmd_read_toc &toc = *(scsi_cmd_read_toc*)&raw.command;
toc.opcode = SCSI_OP_READ_TOC;
toc.time = 1;
toc.format = format;
toc.track = track;
toc.allocation_length = B_HOST_TO_BENDIAN_INT16(bufferSize);
raw.command_length = 10;
raw.flags = B_RAW_DEVICE_DATA_IN | B_RAW_DEVICE_REPORT_RESIDUAL
| B_RAW_DEVICE_SHORT_READ_VALID;
raw.scsi_status = 0;
raw.cam_status = 0;
raw.data = buffer;
raw.data_length = bufferSize;
raw.timeout = 10000000LL; // 10 secs
raw.sense_data = senseData;
raw.sense_data_length = sizeof(kSenseSize);
if (ioctl(fd, B_RAW_DEVICE_COMMAND, &raw, sizeof(raw)) == 0
&& raw.scsi_status == 0 && raw.cam_status == 1) {
free(senseData);
return B_OK;
}
free(senseData);
return B_ERROR;
}
// #pragma mark - exported functions
status_t
read_cdtext(int fd, struct cdtext &cdtext)
{
uint8 *buffer = (uint8 *)malloc(kBufferSize);
if (buffer == NULL)
return B_NO_MEMORY;
// do it twice, just in case...
// (at least my CD-ROM sometimes returned broken data on first try)
read_table_of_contents(fd, 1, SCSI_TOC_FORMAT_CD_TEXT, buffer,
kBufferSize);
if (read_table_of_contents(fd, 1, SCSI_TOC_FORMAT_CD_TEXT, buffer,
kBufferSize) != B_OK) {
free(buffer);
return B_ERROR;
}
scsi_toc_general *header = (scsi_toc_general *)buffer;
uint32 packLength = B_BENDIAN_TO_HOST_INT16(header->data_length) - 2;
cdtext_pack_data *pack = (cdtext_pack_data *)(header + 1);
cdtext_pack_data *lastPack = NULL;
uint8 state = 0;
uint8 track = 0;
uint8 id = 0;
char text[256];
// TODO: determine encoding!
while (true) {
size_t length = sizeof(text);
if (!parse_pack_data(pack, packLength, lastPack, id, track,
state, text, length))
break;
switch (id) {
case kTrackID:
if (track == 0) {
if (cdtext.album == NULL)
cdtext.album = to_utf8(text);
} else if (track <= kMaxTracks) {
if (cdtext.titles[track - 1] == NULL)
cdtext.titles[track - 1] = to_utf8(text);
if (track > cdtext.track_count)
cdtext.track_count = track;
}
break;
case kArtistID:
if (track == 0) {
if (cdtext.artist == NULL)
cdtext.artist = to_utf8(text);
} else if (track <= kMaxTracks) {
if (cdtext.artists[track - 1] == NULL)
cdtext.artists[track - 1] = to_utf8(text);
}
break;
default:
if (is_string_id(id))
dprintf("UNKNOWN %u: \"%s\"\n", id, text);
break;
}
}
free(buffer);
if (cdtext.artist == NULL && cdtext.album == NULL)
return B_ERROR;
for (int i = 0; i < cdtext.track_count; i++) {
if (cdtext.titles[i] == NULL)
return B_ERROR;
}
sanitize_string(cdtext.artist);
sanitize_album(cdtext);
sanitize_titles(cdtext);
correct_case(cdtext);
dump_cdtext(cdtext);
return B_OK;
}
status_t
read_table_of_contents(int fd, scsi_toc_toc *toc, size_t length)
{
status_t status = read_table_of_contents(fd, 1, SCSI_TOC_FORMAT_TOC,
(uint8*)toc, length);
if (status < B_OK)
return status;
// make sure the values in the TOC make sense
int32 lastTrack = toc->last_track + 1 - toc->first_track;
size_t dataLength = B_BENDIAN_TO_HOST_INT16(toc->data_length) + 2;
if (dataLength < sizeof(scsi_toc_toc) || lastTrack <= 0)
return B_BAD_DATA;
if (length > dataLength)
length = dataLength;
length -= sizeof(scsi_toc_general);
if (lastTrack * sizeof(scsi_toc_track) > length)
toc->last_track = length / sizeof(scsi_toc_track) + toc->first_track;
dump_toc(toc);
return B_OK;
}
status_t
read_cdda_data(int fd, off_t endFrame, off_t offset, void *data, size_t length,
off_t bufferOffset, void *buffer, size_t bufferSize)
{
if (bufferOffset >= 0 && bufferOffset <= offset + (off_t)length
&& bufferOffset + (off_t)bufferSize > offset) {
if (offset >= bufferOffset) {
// buffer reaches into the beginning of the request
off_t dataOffset = offset - bufferOffset;
size_t bytes = min_c(bufferSize - dataOffset, length);
if (user_memcpy(data, (uint8 *)buffer + dataOffset, bytes) < B_OK)
return B_BAD_ADDRESS;
data = (void *)((uint8 *)data + bytes);
length -= bytes;
offset += bytes;
} else if (offset < bufferOffset
&& offset + length < bufferOffset + bufferSize) {
// buffer overlaps at the end of the request
off_t dataOffset = bufferOffset - offset;
size_t bytes = length - dataOffset;
if (user_memcpy((uint8 *)data + dataOffset, buffer, bytes) < B_OK)
return B_BAD_ADDRESS;
length -= bytes;
}
// we don't handle the case where we would need to split the request
}
while (length > 0) {
off_t frame = offset / kFrameSize;
uint32 count = bufferSize / kFrameSize;
if (frame + count > endFrame)
count = endFrame - frame;
status_t status = read_frames(fd, frame, (uint8 *)buffer, count);
if (status < B_OK)
return status;
off_t dataOffset = offset % kFrameSize;
size_t bytes = bufferSize - dataOffset;
if (bytes > length)
bytes = length;
if (user_memcpy(data, (uint8 *)buffer + dataOffset, bytes) < B_OK)
return B_BAD_ADDRESS;
data = (void *)((uint8 *)data + bytes);
length -= bytes;
offset += bytes;
}
return B_OK;
}
|
jjbrosnan/deephaven-core
|
Plot/src/main/java/io/deephaven/plot/datasets/category/CategoryDataSeriesPartitionedTable.java
|
/*
* Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending
*/
package io.deephaven.plot.datasets.category;
import io.deephaven.base.verify.RequirementFailure;
import io.deephaven.plot.AxesImpl;
import io.deephaven.plot.TableSnapshotSeries;
import io.deephaven.plot.util.ArgumentValidations;
import io.deephaven.plot.util.tables.TableHandle;
import io.deephaven.engine.table.Table;
import java.util.Collection;
/**
* A dataset for table-based categorical data.
*/
public class CategoryDataSeriesPartitionedTable extends AbstractTableBasedCategoryDataSeries
implements CategoryTableDataSeriesInternal, TableSnapshotSeries {
private static final long serialVersionUID = 2L;
private final TableHandle tableHandle;
private transient boolean isInit = false;
private final String categoryCol;
private final String valueCol;
private transient CategoryDataSeriesKernel kernel;
/**
* Creates a new CategoryDataSeriesPartitionedTable instance.
*
* @param axes {@link AxesImpl} on which this dataset is being plotted
* @param id data series id
* @param name series name
* @param tableHandle holds the underlying table
* @param categories column in the underlying table containing the categorical data
* @param values column in the underlying table containing the numerical data
* @param <T> type of the categorical data
* @throws RequirementFailure {@code chart}, {@code tableHandle}, {@code categories}, and {@code values} must not be
* null
* @throws RuntimeException {@code categories} column must be either time, char/{@link Character},
* {@link Comparable}, or numeric {@code values} column must be numeric
*/
public <T extends Comparable> CategoryDataSeriesPartitionedTable(
final AxesImpl axes,
final int id,
final Comparable<?> name,
final TableHandle tableHandle,
final String categories,
final String values) {
super(axes, id, name);
ArgumentValidations.assertNotNull(axes, "axes", getPlotInfo());
ArgumentValidations.assertNotNull(tableHandle, "table", getPlotInfo());
ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(tableHandle.getFinalTableDefinition(),
categories, "Invalid data type in category column: column=" + categories, getPlotInfo());
ArgumentValidations.assertIsNumericOrTime(tableHandle.getFinalTableDefinition(), values,
"Invalid data type in data column: column=" + values, getPlotInfo());
this.tableHandle = tableHandle;
this.categoryCol = categories;
this.valueCol = values;
this.kernel = new CategoryDataSeriesKernel(categoryCol, values, getPlotInfo());
}
/**
* Creates a copy of a series using a different Axes.
*
* @param series series to copy.
* @param axes new axes to use.
*/
private CategoryDataSeriesPartitionedTable(final CategoryDataSeriesPartitionedTable series, final AxesImpl axes) {
super(series, axes);
this.tableHandle = series.tableHandle;
this.categoryCol = series.categoryCol;
this.valueCol = series.valueCol;
this.kernel = new CategoryDataSeriesKernel(categoryCol, valueCol, getPlotInfo());
}
@Override
public CategoryDataSeriesPartitionedTable copy(AxesImpl axes) {
return new CategoryDataSeriesPartitionedTable(this, axes);
}
@Override
public int size() {
return kernel.size();
}
@Override
public Collection<Comparable> categories() {
return kernel.categories();
}
@Override
public Number getValue(final Comparable category) {
return kernel.getValue(category);
}
@Override
public long getCategoryLocation(final Comparable category) {
return kernel.getCategoryKey(category);
}
public TableHandle getTableHandle() {
return tableHandle;
}
@Override
public String getCategoryCol() {
return categoryCol;
}
@Override
public String getValueCol() {
return valueCol;
}
@Override
protected Table getTable() {
return tableHandle.getTable();
}
}
|
tomquist/iDroidLayout
|
iDroidLayout/Resources/StateList/IDLResourceStateList.h
|
<reponame>tomquist/iDroidLayout<gh_stars>10-100
//
// IDLResourceStateList.h
// iDroidLayout
//
// Created by <NAME> on 07.12.12.
// Copyright (c) 2012 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface IDLResourceStateList : NSObject
@property (weak, nonatomic, readonly) NSArray *items;
+ (instancetype)createFromXMLData:(NSData *)data;
+ (instancetype)createFromXMLURL:(NSURL *)url;
@end
|
jixianu/TheoremJS
|
src/includes/functions/math/generators/sieve.js
|
* sieve() {
let n = 2;
while (true) {
if (this.isPrime(n)) yield n;
n++;
}
}
|
mongonta0716/Core2-for-AWS-IoT-EduKit
|
Factory-Firmware/main/crypto.c
|
/*
* AWS IoT EduKit - Core2 for AWS IoT EduKit
* Factory Firmware v2.2.0
* crypto.c
*
* Copyright (C) 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include <stdio.h>
#include <stdlib.h>
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "freertos/semphr.h"
#include "esp_log.h"
#include "core2forAWS.h"
#include "crypto.h"
static const char* TAG = CRYPTO_TAB_NAME;
void display_crypto_tab(lv_obj_t* tv){
xSemaphoreTake(xGuiSemaphore, portMAX_DELAY); // Takes (blocks) the xGuiSemaphore mutex from being read/written by another task.
lv_obj_t* crypto_tab = lv_tabview_add_tab(tv, CRYPTO_TAB_NAME); // Create a tab
/* Create the main body object and set background within the tab*/
static lv_style_t bg_style;
lv_obj_t* crypto_bg = lv_obj_create(crypto_tab, NULL);
lv_obj_align(crypto_bg, NULL, LV_ALIGN_IN_TOP_LEFT, 16, 36);
lv_obj_set_size(crypto_bg, 290, 190);
lv_obj_set_click(crypto_bg, false);
lv_style_init(&bg_style);
lv_style_set_bg_color(&bg_style, LV_STATE_DEFAULT, lv_color_make(4, 151, 150));
lv_obj_add_style(crypto_bg, LV_OBJ_PART_MAIN, &bg_style);
/* Create the title within the main body object */
static lv_style_t title_style;
lv_style_init(&title_style);
lv_style_set_text_font(&title_style, LV_STATE_DEFAULT, LV_THEME_DEFAULT_FONT_TITLE);
lv_style_set_text_color(&title_style, LV_STATE_DEFAULT, LV_COLOR_BLACK);
lv_obj_t* tab_title_label = lv_label_create(crypto_bg, NULL);
lv_obj_add_style(tab_title_label, LV_OBJ_PART_MAIN, &title_style);
lv_label_set_static_text(tab_title_label, "ATECC608 Crypto-Auth");
lv_obj_align(tab_title_label, crypto_bg, LV_ALIGN_IN_TOP_MID, 0, 10);
/* Create the sensor information label object */
lv_obj_t* body_label = lv_label_create(crypto_bg, NULL);
lv_label_set_long_mode(body_label, LV_LABEL_LONG_BREAK);
lv_label_set_static_text(body_label, "The ATECC608 comes with pre-provisioned static certificates, along with Elliptic Curve Digital Signature Algorithm (ECDSA) sign/verify capability.");
lv_obj_set_width(body_label, 252);
lv_obj_align(body_label, crypto_bg, LV_ALIGN_IN_TOP_LEFT, 20, 40);
static lv_style_t body_style;
lv_style_init(&body_style);
lv_style_set_text_color(&body_style, LV_STATE_DEFAULT, LV_COLOR_BLACK);
lv_obj_add_style(body_label, LV_OBJ_PART_MAIN, &body_style);
xSemaphoreGive(xGuiSemaphore);
static const uint16_t device_serial_len = ATCA_SERIAL_NUM_SIZE * 2 + 1;
char* device_serial = heap_caps_malloc(device_serial_len, MALLOC_CAP_DEFAULT | MALLOC_CAP_SPIRAM); // Dynamically allocate enough memory to store the serial number string. ATCA_SERIAL_NUM_SIZE is the size of the hexadecimal serial number, which has two bytes per value and a string needs a trailing null terminator at the end.
ATCA_STATUS ret = Atecc608_GetSerialString(device_serial); // Gets the serial number. If successful, it will return ATCA_SUCCESS, which has a value of 0.
if (ret == ATCA_SUCCESS){
char sn_label_text[device_serial_len + 9];
snprintf(sn_label_text, device_serial_len + 9, "Serial # %s", device_serial);
xSemaphoreTake(xGuiSemaphore, portMAX_DELAY);
lv_obj_t* serial_label = lv_label_create(crypto_bg, NULL);
lv_label_set_text(serial_label, sn_label_text);
lv_label_set_align(serial_label, LV_LABEL_ALIGN_CENTER);
lv_obj_align(serial_label, crypto_bg, LV_ALIGN_IN_BOTTOM_MID, 0, -14);
lv_obj_add_style(serial_label, LV_OBJ_PART_MAIN, &body_style);
xSemaphoreGive(xGuiSemaphore);
heap_caps_free(device_serial);
} else{
ESP_LOGE(TAG, "Secure element failure. Error code: %d", ret);
}
}
|
janstol/advent-of-code-2020
|
test/day14/solution_test.rb
|
<filename>test/day14/solution_test.rb
# frozen_string_literal: true
require 'test_helper'
module Day14
class SolutionTest < Minitest::Test
def setup
@solution = Solution.new
end
def test_day14_part01_example
@solution = Solution.new 'test/day14/input.txt'
assert_equal 165, @solution.solve_part01
end
def test_day14_part01_solution
assert_equal 6631883285184, @solution.solve_part01
end
def test_day14_part02_example
@solution = Solution.new 'test/day14/input2.txt'
assert_equal 208, @solution.solve_part02
end
def test_day14_part02_solution
assert_equal 3161838538691, @solution.solve_part02
end
end
end
|
jbzdarkid/TwitchLink
|
Download/Downloader/FFmpeg/Config.py
|
from Core.Config import Config as CoreConfig, _P
class Config:
PATH = _P(CoreConfig.DEPENDENCIES_ROOT, "ffmpeg.exe")
KILL_TIMEOUT = 10
|
danielogen/msc_research
|
selected projects/desktop/SweetHome3D-5.6-src/src/com/eteks/sweethome3d/io/DefaultUserPreferences.java
|
/*
* DefaultUserPreferences.java 15 mai 2006
*
* Sweet Home 3D, Copyright (c) 2006 <NAME> / eTeks <<EMAIL>>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.eteks.sweethome3d.io;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import com.eteks.sweethome3d.model.Content;
import com.eteks.sweethome3d.model.FurnitureCatalog;
import com.eteks.sweethome3d.model.HomeDescriptor;
import com.eteks.sweethome3d.model.LengthUnit;
import com.eteks.sweethome3d.model.Library;
import com.eteks.sweethome3d.model.PatternsCatalog;
import com.eteks.sweethome3d.model.RecorderException;
import com.eteks.sweethome3d.model.TextureImage;
import com.eteks.sweethome3d.model.TexturesCatalog;
import com.eteks.sweethome3d.model.UserPreferences;
import com.eteks.sweethome3d.tools.ResourceURLContent;
import com.eteks.sweethome3d.tools.URLContent;
/**
* Default user preferences.
* @author <NAME>
*/
public class DefaultUserPreferences extends UserPreferences {
/**
* Creates default user preferences read from resource files in the default language.
*/
public DefaultUserPreferences() {
this(true, null);
}
/**
* Creates default user preferences read from resource files.
* @param readCatalogs if <code>false</code> furniture and texture catalog won't be read
* @param localizedPreferences preferences used to read localized resource files
*/
DefaultUserPreferences(boolean readCatalogs,
UserPreferences localizedPreferences) {
if (localizedPreferences == null) {
localizedPreferences = this;
} else {
setLanguage(localizedPreferences.getLanguage());
}
// Read default furniture catalog
setFurnitureCatalog(readCatalogs
? new DefaultFurnitureCatalog(localizedPreferences, (File)null)
: new FurnitureCatalog());
// Read default textures catalog
setTexturesCatalog(readCatalogs
? new DefaultTexturesCatalog(localizedPreferences, (File)null)
: new TexturesCatalog());
// Build default patterns catalog
List<TextureImage> patterns = new ArrayList<TextureImage>();
patterns.add(new DefaultPatternTexture("foreground"));
patterns.add(new DefaultPatternTexture("reversedHatchUp"));
patterns.add(new DefaultPatternTexture("reversedHatchDown"));
patterns.add(new DefaultPatternTexture("reversedCrossHatch"));
patterns.add(new DefaultPatternTexture("background"));
patterns.add(new DefaultPatternTexture("hatchUp"));
patterns.add(new DefaultPatternTexture("hatchDown"));
patterns.add(new DefaultPatternTexture("crossHatch"));
PatternsCatalog patternsCatalog = new PatternsCatalog(patterns);
setPatternsCatalog(patternsCatalog);
// Read other preferences from resource bundle
setFurnitureCatalogViewedInTree(Boolean.parseBoolean(
localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "furnitureCatalogViewedInTree")));
setNavigationPanelVisible(Boolean.parseBoolean(localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "navigationPanelVisible")));
setAerialViewCenteredOnSelectionEnabled(Boolean.parseBoolean(getOptionalLocalizedString(localizedPreferences, "aerialViewCenteredOnSelectionEnabled", "false")));
setObserverCameraSelectedAtChange(Boolean.parseBoolean(getOptionalLocalizedString(localizedPreferences, "observerCameraSelectedAtChange", "true")));
setUnit(LengthUnit.valueOf(localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "unit").toUpperCase(Locale.ENGLISH)));
setRulersVisible(Boolean.parseBoolean(localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "rulersVisible")));
setGridVisible(Boolean.parseBoolean(localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "gridVisible")));
// Allow furnitureViewedFromTop and roomFloorColoredOrTextured to be different according to the running OS
String osName = System.getProperty("os.name");
setFurnitureViewedFromTop(Boolean.parseBoolean(getOptionalLocalizedString(localizedPreferences, "furnitureViewedFromTop." + osName,
localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "furnitureViewedFromTop"))));
setFurnitureModelIconSize(Integer.parseInt(getOptionalLocalizedString(localizedPreferences, "furnitureModelIconSize", "128")));
setFloorColoredOrTextured(Boolean.parseBoolean(getOptionalLocalizedString(localizedPreferences, "roomFloorColoredOrTextured." + osName,
localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "roomFloorColoredOrTextured"))));
setWallPattern(patternsCatalog.getPattern(localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "wallPattern")));
String newWallPattern = localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "wallPattern");
if (newWallPattern != null) {
setNewWallPattern(patternsCatalog.getPattern(newWallPattern));
}
setNewWallThickness(Float.parseFloat(localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "newWallThickness")));
setNewWallHeight(Float.parseFloat(localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "newHomeWallHeight")));
setNewWallBaseboardThickness(Float.parseFloat(getOptionalLocalizedString(localizedPreferences, "newWallBaseboardThickness", "1")));
setNewWallBaseboardHeight(Float.parseFloat(getOptionalLocalizedString(localizedPreferences, "newWallBaseboardlHeight", "7")));
setNewFloorThickness(Float.parseFloat(getOptionalLocalizedString(localizedPreferences, "newFloorThickness", "12")));
setCheckUpdatesEnabled(Boolean.parseBoolean(getOptionalLocalizedString(localizedPreferences, "checkUpdatesEnabled", "false")));
setAutoSaveDelayForRecovery(Integer.parseInt(getOptionalLocalizedString(localizedPreferences, "autoSaveDelayForRecovery", "0")));
setCurrency(getOptionalLocalizedString(localizedPreferences, "currency", null));
for (String property : new String [] {"LevelName", "HomePieceOfFurnitureName", "RoomName", "LabelText"}) {
String autoCompletionStringsList = getOptionalLocalizedString(localizedPreferences, "autoCompletionStrings#" + property, null);
if (autoCompletionStringsList != null) {
String [] autoCompletionStrings = autoCompletionStringsList.trim().split(",");
if (autoCompletionStrings.length > 0) {
for (int i = 0; i < autoCompletionStrings.length; i++) {
autoCompletionStrings [i] = autoCompletionStrings [i].trim();
}
setAutoCompletionStrings(property, Arrays.asList(autoCompletionStrings));
}
}
}
List<HomeDescriptor> homeExamples = new ArrayList<HomeDescriptor>();
int i = 0;
while (true) {
try {
String homeExampleName = localizedPreferences.getLocalizedString(DefaultUserPreferences.class, "homeExampleName#" + ++i);
homeExamples.add(new HomeDescriptor(homeExampleName,
getContent(localizedPreferences, "homeExampleContent#" + i, false),
getContent(localizedPreferences, "homeExampleIcon#" + i, true)));
} catch (IllegalArgumentException ex) {
break;
}
}
setHomeExamples(homeExamples);
}
/**
* Returns the content of matching the value of the given content key.
*/
private Content getContent(UserPreferences localizedPreferences,
String contentKey,
boolean optional) {
String contentFile = optional
? getOptionalLocalizedString(localizedPreferences, contentKey, null)
: localizedPreferences.getLocalizedString(DefaultUserPreferences.class, contentKey);
if (optional && contentFile == null) {
return null;
}
try {
// Try first to interpret contentFile as an absolute URL
return new URLContent(new URL(contentFile));
} catch (MalformedURLException ex) {
// Otherwise find if it's a resource
return new ResourceURLContent(DefaultFurnitureCatalog.class, contentFile);
}
}
private String getOptionalLocalizedString(UserPreferences localizedPreferences,
String resourceKey,
String defaultValue) {
try {
return localizedPreferences.getLocalizedString(DefaultUserPreferences.class, resourceKey);
} catch (IllegalArgumentException ex) {
return defaultValue;
}
}
/**
* Throws an exception because default user preferences can't be written
* with this class.
*/
@Override
public void write() throws RecorderException {
throw new UnsupportedOperationException("Default user preferences can't be written");
}
/**
* Throws an exception because default user preferences can't manage language libraries.
*/
@Override
public boolean languageLibraryExists(String name) throws RecorderException {
throw new UnsupportedOperationException("Default user preferences can't manage language libraries");
}
/**
* Throws an exception because default user preferences can't manage additional language libraries.
*/
@Override
public void addLanguageLibrary(String name) throws RecorderException {
throw new UnsupportedOperationException("Default user preferences can't manage language libraries");
}
/**
* Throws an exception because default user preferences can't manage additional furniture libraries.
*/
@Override
public boolean furnitureLibraryExists(String name) throws RecorderException {
throw new UnsupportedOperationException("Default user preferences can't manage furniture libraries");
}
/**
* Throws an exception because default user preferences can't manage additional furniture libraries.
*/
@Override
public void addFurnitureLibrary(String name) throws RecorderException {
throw new UnsupportedOperationException("Default user preferences can't manage furniture libraries");
}
/**
* Throws an exception because default user preferences can't manage textures libraries.
*/
@Override
public boolean texturesLibraryExists(String name) throws RecorderException {
throw new UnsupportedOperationException("Default user preferences can't manage textures libraries");
}
/**
* Throws an exception because default user preferences can't manage additional textures libraries.
*/
@Override
public void addTexturesLibrary(String name) throws RecorderException {
throw new UnsupportedOperationException("Default user preferences can't manage textures libraries");
}
/**
* Throws an exception because default user preferences don't support libraries.
* @since 4.0
*/
@Override
public List<Library> getLibraries() {
throw new UnsupportedOperationException();
}
}
|
beeradb/ddev
|
pkg/plugins/platform/router.go
|
package platform
import (
"bytes"
"fmt"
"html/template"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/drud/ddev/pkg/dockerutil"
"github.com/drud/ddev/pkg/util"
"github.com/drud/ddev/pkg/version"
"github.com/fatih/color"
)
const routerProjectName = "ddev-router"
// RouterComposeYAMLPath returns the full filepath to the routers docker-compose yaml file.
func RouterComposeYAMLPath() string {
ddevDir := util.GetGlobalDdevDir()
dest := path.Join(ddevDir, "router-compose.yaml")
return dest
}
// StopRouter stops the local router if there are no ddev containers running.
func StopRouter() error {
containersRunning, err := ddevContainersRunning()
if err != nil {
return err
}
if !containersRunning {
dest := RouterComposeYAMLPath()
return dockerutil.ComposeCmd([]string{dest}, "-p", routerProjectName, "down", "-v")
}
return nil
}
// StartDdevRouter ensures the router is running.
func StartDdevRouter() error {
exposedPorts := determineRouterPorts()
dest := RouterComposeYAMLPath()
routerdir := filepath.Dir(dest)
err := os.MkdirAll(routerdir, 0755)
if err != nil {
return fmt.Errorf("unable to create directory for ddev router: %s", err)
}
var doc bytes.Buffer
f, ferr := os.Create(dest)
if ferr != nil {
return ferr
}
defer util.CheckClose(f)
templ := template.New("compose template")
templ, err = templ.Parse(DdevRouterTemplate)
if err != nil {
return err
}
templateVars := map[string]interface{}{
"router_image": version.RouterImage,
"router_tag": version.RouterTag,
"ports": exposedPorts,
}
err = templ.Execute(&doc, templateVars)
util.CheckErr(err)
_, err = f.WriteString(doc.String())
util.CheckErr(err)
// run docker-compose up -d in the newly created directory
err = dockerutil.ComposeCmd([]string{dest}, "-p", routerProjectName, "up", "-d")
if err != nil {
return fmt.Errorf("failed to start ddev-router: %v", err)
}
fmt.Println("Starting service health checks...")
// ensure we have a happy router
label := map[string]string{"com.docker.compose.service": "ddev-router"}
err = dockerutil.ContainerWait(containerWaitTimeout, label)
if err != nil {
return fmt.Errorf("ddev-router failed to become ready: %v", err)
}
return nil
}
// PrintRouterStatus outputs router status and warning if not
// running or healthy, as applicable.
func PrintRouterStatus() string {
var status string
badRouter := "\nThe router is not currently running. Your sites are likely inaccessible at this time.\nTry running 'ddev start' on a site to recreate the router."
label := map[string]string{"com.docker.compose.service": "ddev-router"}
container, err := dockerutil.FindContainerByLabels(label)
if err != nil {
status = color.RedString(SiteNotFound) + badRouter
} else {
status = dockerutil.GetContainerHealth(container)
}
switch status {
case "healthy":
status = color.CyanString(SiteRunning)
case "exited":
status = color.RedString(SiteStopped) + badRouter
default:
status = color.RedString(status) + badRouter
}
return fmt.Sprintf("\nDDEV ROUTER STATUS: %v", status)
}
// determineRouterPorts returns a list of port mappings retrieved from running site
// containers defining VIRTUAL_PORT env var
func determineRouterPorts() []string {
var routerPorts []string
containers, err := dockerutil.GetDockerContainers(false)
if err != nil {
log.Fatal("failed to retreive containers for determining port mappings", err)
}
// loop through all containers with site-name label
for _, container := range containers {
if _, ok := container.Labels["com.ddev.site-name"]; ok {
var exposePorts []string
httpPorts := dockerutil.GetContainerEnv("HTTP_EXPOSE", container)
if httpPorts != "" {
ports := strings.Split(httpPorts, ",")
exposePorts = append(exposePorts, ports...)
}
for _, exposePort := range exposePorts {
// ports defined as hostPort:containerPort allow for router to configure upstreams
// for containerPort, with server listening on hostPort. exposed ports for router
// should be hostPort:hostPort so router can determine what port a request came from
// and route the request to the correct upstream
if strings.Contains(exposePort, ":") {
ports := strings.Split(exposePort, ":")
exposePort = ports[0]
}
var match bool
for _, routerPort := range routerPorts {
if exposePort == routerPort {
match = true
}
}
// if no match, we are adding a new port mapping
if !match {
routerPorts = append(routerPorts, exposePort)
}
}
}
}
return routerPorts
}
|
ThomasMarches/my_rpg
|
src/options/components/sound_indicator/create_sound_indicator_extend_from_conf.c
|
/*
** EPITECH PROJECT, 2020
** MUL_my_rpg_2019
** File description:
** create_sound_indicator_extend_from_conf
*/
#include "options_menu/sound_indicator.h"
#include <stdlib.h>
bool rpg_options_sound_indicator_create_extend_from_conf(game_object_t *object,
json_object_t *js)
{
rpg_options_sound_indicator_t *indicator =
malloc(sizeof(rpg_options_sound_indicator_t));
json_value_t *value = NULL;
if (indicator == NULL)
return (false);
value = json_get_element_by_key(js, "volume_unit");
indicator->volume_unit = (value && value->value_type == INT) ?
*((int *) value->value): -1;
value = json_get_element_by_key(js, "bar_width");
indicator->bar_width = (value && value->value_type == INT) ?
*((int *) value->value): -1;
indicator->default_x = object->pos.x;
if (indicator->volume_unit < 0 || indicator->bar_width < 0) {
free(indicator);
return (false);
}
object->extend = indicator;
object->free_extend = &free;
return (true);
}
|
wastevensv/flipper
|
carbon/atsam4s/spi.c
|
#include <flipper/spi.h>
int spi_configure() {
/* Enable the SPI clock. */
PMC->PMC_PCER0 = (1 << ID_SPI);
/* Create a pinmask for the peripheral pins. */
const unsigned int SPI_PIN_MASK = (PIO_PA14A_SPCK | PIO_PA13A_MOSI | PIO_PA12A_MISO | PIO_PA31A_NPCS1 | PIO_PA11A_NPCS0);
/* Disable PIOA interrupts on the peripheral pins. */
PIOA->PIO_IDR = SPI_PIN_MASK;
/* Disable the peripheral pins from use by the PIOA. */
PIOA->PIO_PDR = SPI_PIN_MASK;
/* Hand control of the peripheral pins to peripheral A. */
PIOA->PIO_ABCDSR[0] &= ~SPI_PIN_MASK;
PIOA->PIO_ABCDSR[1] &= ~SPI_PIN_MASK;
/* Reset the SPI. */
SPI->SPI_CR = SPI_CR_SWRST;
/* Reset the SPI again. Eratta. */
SPI->SPI_CR = SPI_CR_SWRST;
/* Enable the mode fault interrupt. */
SPI->SPI_IER = SPI_IER_MODF;
/* Enter master mode, no mode fault detection, activate user SPI peripheral. */
SPI->SPI_MR = SPI_MR_PCS(USER_PCS) | SPI_MR_MSTR | SPI_MR_MODFDIS;
/* Configure the user SPI peripheral. 8 bits per transfer. SPI mode 3. SCK = MCK / 8. */
SPI->SPI_CSR[USER_PCS] = SPI_CSR_SCBR(8) | SPI_CSR_DLYBCT(1) | SPI_CSR_BITS_8_BIT | SPI_CSR_CPOL | SPI_CSR_CSAAT;
/* Disable the PDC channels. */
SPI->SPI_PTCR = SPI_PTCR_TXTDIS | SPI_PTCR_RXTDIS;
/* Clear the secondary PDC channel. */
SPI->SPI_TNCR = 0;
SPI->SPI_TNPR = (uintptr_t)(NULL);
/* Enable the SPI interrupt. */
NVIC_EnableIRQ(SPI_IRQn);
/* Enable the SPI. */
SPI->SPI_CR = SPI_CR_SPIEN;
return lf_success;
}
void spi_enable(void) {
SPI->SPI_CR = SPI_CR_SPIEN;
}
void spi_disable(void) {
SPI->SPI_CR = SPI_CR_SPIDIS;
}
uint8_t spi_ready(void) {
return (SPI->SPI_SR & SPI_SR_TXEMPTY);
}
void spi_end(void) {
SPI->SPI_CR |= SPI_CR_LASTXFER;
}
void spi_put(uint8_t byte) {
/* Transmit the byte. */
SPI->SPI_TDR = byte;
/* Wait until data has been transmitted. */
while (!(SPI->SPI_SR & SPI_SR_TDRE));
}
uint8_t spi_get(void) {
/* Write a dummy byte. */
spi_put(0xff);
/* Wait until data has been received. */
while (!(SPI->SPI_SR & SPI_SR_RDRF));
/* Return the received byte. */
return SPI->SPI_RDR;
}
int spi_push(void *source, uint32_t length) {
/* Set the transmission length and destination pointer. */
SPI->SPI_TCR = length;
SPI->SPI_TPR = (uintptr_t)(source);
/* Enable the PDC transmitter to start the transmission. */
SPI->SPI_PTCR = SPI_PTCR_TXTEN;
/* Wait until the transfer has finished. */
while (!(SPI->SPI_SR & SPI_SR_ENDTX));
/* Disable the PDC transmitter. */
SPI->SPI_PTCR = SPI_PTCR_TXTDIS;
return lf_success;
}
int spi_pull(void *destination, uint32_t length) {
/* Set the transmission length and destination pointer. */
SPI->SPI_RCR = length;
SPI->SPI_RPR = (uintptr_t)(destination);
/* Enable the receiver. */
SPI->SPI_PTCR = SPI_PTCR_RXTEN;
/* If defined, usart_pull will not use interrupts. */
/* Wait until the transfer has finished. */
while (!(SPI->SPI_SR & SPI_SR_ENDRX)) {
SPI->SPI_TDR = 0x00;
while (!(SPI->SPI_SR & SPI_SR_TDRE));
}
SPI->SPI_CR |= SPI_CR_LASTXFER;
/* Disable the PDC receiver. */
SPI->SPI_PTCR = SPI_PTCR_RXTDIS;
return lf_success;
}
/* Interrupt hander for this peripheral. */
void spi_isr(void) {
/* Falls through if a mode fault has occured. This fires when the masters drive the slave out of sync. */
if (SPI->SPI_SR & SPI_SR_MODF) {
/* Re-enable the SPI bus. */
SPI->SPI_CR = SPI_CR_SPIEN;
}
}
|
Picolab/ManifoldRewrite
|
src/components/Cards/DropTargetCard.js
|
<filename>src/components/Cards/DropTargetCard.js
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { DropTarget } from 'react-dnd';
import DragTypes from '../DragTypes';
import Card from './Card';
import './cardStyles.css';
const dropCardSpec = {
drop(props, monitor, component) {
if(monitor.didDrop()){
return;
}
const draggedCard = monitor.getItem();
const thisCard = {
cardType: props.cardType
//...(props.object)
}
console.log("successfully dropped!", draggedCard);
props.handleDrop(thisCard, draggedCard);
}
}
const collect = (connect, monitor) => {
return {
connectDropTarget: connect.dropTarget(),
isOver: monitor.isOver()
}
}
class DropTargetCard extends Component {
render() {
const { isOver, connectDropTarget } = this.props
return connectDropTarget(
<div className={"dropTargetCard " + (isOver ? "hoveredTarget" : "nonHoveredTarget")}>
<Card
cardType={this.props.cardType}
picoID={this.props.picoID}
overlay={{ isActive: isOver}}/>
</div>
)
}
}
DropTargetCard.propTypes = {
picoID: PropTypes.string.isRequired,
cardType: PropTypes.string.isRequired,
handleDrop: PropTypes.func.isRequired
}
export default DropTarget(DragTypes.Card, dropCardSpec, collect)(DropTargetCard);
|
hua-lun/main
|
src/main/java/dukecooks/model/util/DiarySampleDataUtil.java
|
package dukecooks.model.util;
import dukecooks.model.diary.DiaryRecords;
import dukecooks.model.diary.ReadOnlyDiary;
import dukecooks.model.diary.components.Diary;
import dukecooks.model.diary.components.DiaryName;
/**
* Contains utility methods for populating {@code DiaryRecords} with sample data.
*/
public class DiarySampleDataUtil {
public static Diary[] getSampleDiaries() {
return new Diary[] {
new Diary(new DiaryName("Asian Cuisines")),
new Diary(new DiaryName("Healthy Living")),
new Diary(new DiaryName("Meat Lovers")),
new Diary(new DiaryName("Vegan Diet")),
new Diary(new DiaryName("One Week Slimming")),
new Diary(new DiaryName("Core Exercises")),
};
}
public static ReadOnlyDiary getSampleDiaryRecords() {
DiaryRecords sampleDc = new DiaryRecords();
for (Diary sampleDiary : getSampleDiaries()) {
sampleDc.addDiary(sampleDiary);
}
return sampleDc;
}
}
|
bitcaster-io/bitcaster
|
src/bitcaster/cli/commands/check.py
|
<filename>src/bitcaster/cli/commands/check.py
import os
import sys
import time
import click
# from django.core.checks import Warning
from django.db import OperationalError
from bitcaster.cli import global_options
from bitcaster.cli.utils import ErrorLeveParamType, wait_for_service
from bitcaster.exceptions import ImproperlyConfigured
def checkdb(wait=True, timeout=60, debug=False, connection='default'):
from django.db import connections
elapsed = 0
retcode = 0
try:
click.echo(f'Checking db connnection {connection}...')
conn = connections[connection]
while True:
try:
conn = conn.cursor()
except OperationalError as e:
if wait and elapsed < timeout:
sys.stdout.write('.' * elapsed)
sys.stdout.flush()
time.sleep(1)
elapsed += 1
else:
sys.stderr.write(f"\nDatabase on {conn.settings_dict['HOST']}:{conn.settings_dict['PORT']} "
f'is not available after {elapsed} secs')
if debug:
sys.stderr.write(f'Error is: {e}')
retcode = 1
break
else:
sys.stdout.write(f'Connection {connection} successful\n')
break
except KeyboardInterrupt: # pragma: no-cover
sys.stdout.write('Interrupted')
return retcode
@click.command() # noqa
@global_options
@click.option('--debug', '-d', default=False, is_flag=True,
help='debug mode')
@click.option('--list-tags', default=False, is_flag=True,
help='List available tags.')
@click.option('--fail-level', default='ERROR',
type=ErrorLeveParamType(),
help='Message level that will cause the command '
'to exit with a non-zero status. Default is ERROR.')
@click.option('--tag', '-t', 'tags', multiple=True,
help='Run only checks labeled with given tag.')
@click.option('--deploy', default=False, is_flag=True,
help='Check deployment settings.')
@click.option('--wait-services', default=False, is_flag=True,
help='wait until required services are ready')
@click.option('--timeout', default=60, type=int,
help='Timeout for waiting for services')
@click.option('--sleep', default=5, type=int,
help='Sleep time waiting for services')
@click.pass_context
def check(ctx, debug, deploy, tags, list_tags, fail_level, wait_services,
timeout, sleep, verbose, **kwargs):
if debug:
os.environ['BITCASTER_DEBUG'] = 'True'
os.environ['BITCASTER_PLUGINS_AUTOLOAD'] = 'False'
# os.environ['BITCASTER_CONF'] = ctx.obj['config']
from bitcaster.config.environ import env
# env.load_config()
if deploy:
wait_services = True
if wait_services:
for service, name in [('DATABASE_URL', 'database'),
('CELERY_BROKER_URL', 'celery broker'),
('REDIS_CACHE_URL', 'cache server'),
('REDIS_LOCK_URL', 'lock server'),
('REDIS_TSDB_URL', 'tsdb server'),
]:
try:
sys.stdout.write(f'Check {name} {env(service)}')
sys.stdout.flush()
time.sleep(5)
wait_for_service(env(service),
caption='.',
sleep=sleep,
timeout=timeout,
stdout=sys.stdout if verbose > 0 else None)
sys.stdout.flush()
except TimeoutError:
click.echo(f"Timeout checking {name}: '{env(service)}'")
sys.exit(1)
except ImproperlyConfigured as e:
click.echo(f"Error checking {name}: '{env(service)}'")
click.echo(e)
sys.exit(1)
checkdb(wait=True, timeout=timeout)
extra = ['--fail-level', fail_level, ]
if deploy:
extra = ['--deploy']
if list_tags:
extra = ['--list_tags']
try:
from django.core.management import execute_from_command_line
import django
django.setup()
execute_from_command_line(argv=['manage'] + ['check'] + extra)
except Exception as e:
click.echo(str(e))
ctx.abort()
#
# if deploy:
# try:
# click.echo(f"Checking cryptography keys")
# checkfernet()
# except ImproperlyConfigured:
# click.echo(f"Error in cryptography keys")
# ctx.abort()
# except Exception as e:
# click.echo(str(e))
# ctx.abort()
|
psihodelik/alpakka
|
mqtt/src/main/scala/akka/stream/alpakka/mqtt/javadsl/MqttCommittableMessage.scala
|
/*
* Copyright (C) 2016-2017 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.stream.alpakka.mqtt.javadsl
import java.util.concurrent.CompletionStage
import akka.Done
import akka.stream.alpakka.mqtt.MqttMessage
trait MqttCommittableMessage {
val message: MqttMessage
def messageArrivedComplete(): CompletionStage[Done]
}
|
hmcts/sscs-dwp
|
src/main/java/uk/gov/hmcts/reform/sscs/callback/handlers/ManualCaseCreatedHandler.java
|
<filename>src/main/java/uk/gov/hmcts/reform/sscs/callback/handlers/ManualCaseCreatedHandler.java
package uk.gov.hmcts.reform.sscs.callback.handlers;
import static java.util.Collections.singletonMap;
import static java.util.Objects.requireNonNull;
import static uk.gov.hmcts.reform.sscs.callback.handlers.helper.CaseAccessManagementFieldsHelper.setCaseAccessManagementFields;
import static uk.gov.hmcts.reform.sscs.ccd.callback.CallbackType.SUBMITTED;
import static uk.gov.hmcts.reform.sscs.ccd.callback.DispatchPriority.LATEST;
import static uk.gov.hmcts.reform.sscs.ccd.domain.EventType.INCOMPLETE_APPLICATION_RECEIVED;
import static uk.gov.hmcts.reform.sscs.ccd.domain.EventType.NON_COMPLIANT;
import static uk.gov.hmcts.reform.sscs.ccd.domain.EventType.VALID_APPEAL_CREATED;
import java.util.HashMap;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import uk.gov.hmcts.reform.sscs.callback.CallbackHandler;
import uk.gov.hmcts.reform.sscs.ccd.callback.Callback;
import uk.gov.hmcts.reform.sscs.ccd.callback.CallbackType;
import uk.gov.hmcts.reform.sscs.ccd.callback.DispatchPriority;
import uk.gov.hmcts.reform.sscs.ccd.domain.SscsCaseData;
import uk.gov.hmcts.reform.sscs.ccd.service.CcdService;
import uk.gov.hmcts.reform.sscs.idam.IdamService;
import uk.gov.hmcts.reform.sscs.idam.IdamTokens;
@Slf4j
@Service
@RequiredArgsConstructor
public class ManualCaseCreatedHandler implements CallbackHandler<SscsCaseData> {
private final CcdService ccdService;
private final IdamService idamService;
@Value("${feature.case-access-management.enabled}")
private boolean caseAccessManagementFeature;
@Override
public boolean canHandle(CallbackType callbackType, Callback<SscsCaseData> callback) {
requireNonNull(callback, "callback must not be null");
requireNonNull(callbackType, "callbackType must not be null");
return callbackType.equals(SUBMITTED)
&& (callback.getEvent() == NON_COMPLIANT
|| callback.getEvent() == INCOMPLETE_APPLICATION_RECEIVED
|| callback.getEvent() == VALID_APPEAL_CREATED);
}
@Override
public void handle(CallbackType callbackType, Callback<SscsCaseData> callback) {
if (!canHandle(callbackType, callback)) {
throw new IllegalStateException("Cannot handle callback");
}
Long caseId = callback.getCaseDetails().getId();
log.info("Manually created case handler for case id {}", caseId);
IdamTokens idamTokens = idamService.getIdamTokens();
try {
log.info("Setting supplementary data for: {}", caseId);
setSupplementaryData(caseId, idamTokens);
if (caseAccessManagementFeature) {
log.info("Setting CAM data for: {}", caseId);
setCaseAccessManagementFields(callback
.getCaseDetails()
.getCaseData());
log.info("Set CAM fields for {} as follows: {}",
caseId, callback.getCaseDetails().getCaseData().getCaseAccessManagementFields().toString());
ccdService.updateCase(
callback.getCaseDetails().getCaseData(),
callback.getCaseDetails().getId(),
callback.getEvent().getCcdType(),
"Case Update - Manual Case Created",
"Case was updated in SSCS-Evidence-Share",
idamService.getIdamTokens()
);
}
} catch (Exception e) {
log.error("Error sending supplementary for caseId {}", caseId, e);
}
}
@Override
public DispatchPriority getPriority() {
return LATEST;
}
private void setSupplementaryData(Long caseId, IdamTokens idamTokens) {
Map<String, Map<String, Map<String, Object>>> supplementaryDataUpdates = new HashMap<>();
supplementaryDataUpdates.put("supplementary_data_updates", singletonMap("$set", singletonMap("HMCTSServiceId", "BBA3")));
ccdService.setSupplementaryData(idamTokens, caseId, supplementaryDataUpdates);
}
}
|
oirad21/react_oirad
|
node_modules/@fortawesome/pro-regular-svg-icons/faEuroSign.js
|
<gh_stars>0
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var prefix = 'far';
var iconName = 'euro-sign';
var width = 320;
var height = 512;
var ligatures = [];
var unicode = 'f153';
var svgPathData = 'M315.595 458.559l-6.504-29.443c-1.446-6.544-7.962-10.628-14.485-9.087-10.306 2.435-26.461 5.352-44.699 5.352-65.469 0-117.018-39.532-138.208-97.382h129.515a12 12 0 0 0 11.749-9.558l4.989-24c1.549-7.45-4.139-14.442-11.749-14.442H98.159c-1.495-16.139-2.068-32.264-.591-48H260.06a12 12 0 0 0 11.737-9.499l5.114-24c1.591-7.466-4.103-14.501-11.737-14.501H108.057c21.02-58.359 72.527-97.995 140.009-97.995 14.663 0 28.909 2.084 38.245 3.823 6.167 1.149 12.175-2.635 13.796-8.695l7.907-29.567c1.809-6.766-2.528-13.633-9.416-14.902C287.192 34.562 269.204 32 249.294 32 149.268 32 69.61 96.076 43.43 184H12c-6.627 0-12 5.373-12 12v24c0 6.627 5.373 12 12 12h23.129c-1.159 15.771-1.031 35.474.383 48H12c-6.627 0-12 5.373-12 12v24c0 6.627 5.373 12 12 12h32.248c25.98 88.704 103.428 152 205.045 152 24.402 0 45.381-4.155 57.538-7.221 6.288-1.585 10.163-7.889 8.764-14.22z';
exports.definition = {
prefix: prefix,
iconName: iconName,
icon: [
width,
height,
ligatures,
unicode,
svgPathData
]};
exports.faEuroSign = exports.definition;
exports.prefix = prefix;
exports.iconName = iconName;
exports.width = width;
exports.height = height;
exports.ligatures = ligatures;
exports.unicode = unicode;
exports.svgPathData = svgPathData;
|
tusharchoudhary0003/Custom-Football-Game
|
sources/p005cm/aptoide/p006pt/store/view/C4977Y.java
|
package p005cm.aptoide.p006pt.store.view;
import android.content.res.Resources;
import android.view.View;
import android.view.View.OnClickListener;
import p005cm.aptoide.p006pt.navigator.FragmentNavigator;
/* renamed from: cm.aptoide.pt.store.view.Y */
/* compiled from: lambda */
public final /* synthetic */ class C4977Y implements OnClickListener {
/* renamed from: a */
private final /* synthetic */ GridStoreMetaWidget f8630a;
/* renamed from: b */
private final /* synthetic */ GridStoreMetaDisplayable f8631b;
/* renamed from: c */
private final /* synthetic */ FragmentNavigator f8632c;
/* renamed from: d */
private final /* synthetic */ Resources f8633d;
public /* synthetic */ C4977Y(GridStoreMetaWidget gridStoreMetaWidget, GridStoreMetaDisplayable gridStoreMetaDisplayable, FragmentNavigator fragmentNavigator, Resources resources) {
this.f8630a = gridStoreMetaWidget;
this.f8631b = gridStoreMetaDisplayable;
this.f8632c = fragmentNavigator;
this.f8633d = resources;
}
public final void onClick(View view) {
this.f8630a.mo16535a(this.f8631b, this.f8632c, this.f8633d, view);
}
}
|
suncht/pentaho-kettle-serial
|
kettle5.0.1-src-eclipse/engine/org/pentaho/di/trans/steps/delete/DeleteMeta.java
|
<filename>kettle5.0.1-src-eclipse/engine/org/pentaho/di/trans/steps/delete/DeleteMeta.java
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.delete;
import java.util.List;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.SQLStatement;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.trans.DatabaseImpact;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This class takes care of deleting values in a table using a certain condition and values for input.
*
* @author Tom, Matt
* @since 28-March-2006
*/
public class DeleteMeta extends BaseStepMeta implements StepMetaInterface
{
private static Class<?> PKG = DeleteMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
/** The target schema name */
private String schemaName;
/** The lookup table name */
private String tableName;
/** database connection */
private DatabaseMeta databaseMeta;
/** which field in input stream to compare with? */
private String keyStream[];
/** field in table */
private String keyLookup[];
/** Comparator: =, <>, BETWEEN, ... */
private String keyCondition[];
/** Extra field for between... */
private String keyStream2[];
/** Commit size for inserts/updates */
private int commitSize;
public DeleteMeta()
{
super(); // allocate BaseStepMeta
}
/**
* @return Returns the commitSize.
*/
public int getCommitSize()
{
return commitSize;
}
/**
* @param commitSize The commitSize to set.
*/
public void setCommitSize(int commitSize)
{
this.commitSize = commitSize;
}
/**
* @return Returns the database.
*/
public DatabaseMeta getDatabaseMeta()
{
return databaseMeta;
}
/**
* @param database The database to set.
*/
public void setDatabaseMeta(DatabaseMeta database)
{
this.databaseMeta = database;
}
/**
* @return Returns the keyCondition.
*/
public String[] getKeyCondition()
{
return keyCondition;
}
/**
* @param keyCondition The keyCondition to set.
*/
public void setKeyCondition(String[] keyCondition)
{
this.keyCondition = keyCondition;
}
/**
* @return Returns the keyLookup.
*/
public String[] getKeyLookup()
{
return keyLookup;
}
/**
* @param keyLookup The keyLookup to set.
*/
public void setKeyLookup(String[] keyLookup)
{
this.keyLookup = keyLookup;
}
/**
* @return Returns the keyStream.
*/
public String[] getKeyStream()
{
return keyStream;
}
/**
* @param keyStream The keyStream to set.
*/
public void setKeyStream(String[] keyStream)
{
this.keyStream = keyStream;
}
/**
* @return Returns the keyStream2.
*/
public String[] getKeyStream2()
{
return keyStream2;
}
/**
* @param keyStream2 The keyStream2 to set.
*/
public void setKeyStream2(String[] keyStream2)
{
this.keyStream2 = keyStream2;
}
/**
* @return Returns the tableName.
*/
public String getTableName()
{
return tableName;
}
/**
* @param tableName The tableName to set.
*/
public void setTableName(String tableName)
{
this.tableName = tableName;
}
public void loadXML(Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore) throws KettleXMLException {
readData(stepnode, databases);
}
public void allocate(int nrkeys)
{
keyStream = new String[nrkeys];
keyLookup = new String[nrkeys];
keyCondition = new String[nrkeys];
keyStream2 = new String[nrkeys];
}
public Object clone()
{
DeleteMeta retval = (DeleteMeta)super.clone();
int nrkeys = keyStream.length;
retval.allocate(nrkeys);
for (int i=0;i<nrkeys;i++)
{
retval.keyStream [i] = keyStream[i];
retval.keyLookup [i] = keyLookup[i];
retval.keyCondition[i] = keyCondition[i];
retval.keyStream2 [i] = keyStream2[i];
}
return retval;
}
private void readData(Node stepnode,List<? extends SharedObjectInterface> databases)
throws KettleXMLException
{
try
{
String csize;
int nrkeys;
String con = XMLHandler.getTagValue(stepnode, "connection");
databaseMeta = DatabaseMeta.findDatabase(databases, con);
csize = XMLHandler.getTagValue(stepnode, "commit");
commitSize = Const.toInt(csize, 0);
schemaName = XMLHandler.getTagValue(stepnode, "lookup", "schema");
tableName = XMLHandler.getTagValue(stepnode, "lookup", "table");
Node lookup = XMLHandler.getSubNode(stepnode, "lookup");
nrkeys = XMLHandler.countNodes(lookup, "key");
allocate(nrkeys);
for (int i=0;i<nrkeys;i++)
{
Node knode = XMLHandler.getSubNodeByNr(lookup, "key", i);
keyStream [i] = XMLHandler.getTagValue(knode, "name");
keyLookup [i] = XMLHandler.getTagValue(knode, "field");
keyCondition[i] = XMLHandler.getTagValue(knode, "condition");
if (keyCondition[i]==null) keyCondition[i]="=";
keyStream2 [i] = XMLHandler.getTagValue(knode, "name2");
}
}
catch(Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "DeleteMeta.Exception.UnableToReadStepInfoFromXML"), e);
}
}
public void setDefault()
{
keyStream = null;
databaseMeta = null;
commitSize = 100;
schemaName = "";
tableName = BaseMessages.getString(PKG, "DeleteMeta.DefaultTableName.Label");
int nrkeys = 0;
allocate(nrkeys);
}
public String getXML()
{
StringBuffer retval=new StringBuffer(500);
retval.append(" ").append(XMLHandler.addTagValue("connection", databaseMeta==null?"":databaseMeta.getName())); //$NON-NLS-3$
retval.append(" ").append(XMLHandler.addTagValue("commit", commitSize));
retval.append(" <lookup>").append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("schema", schemaName));
retval.append(" ").append(XMLHandler.addTagValue("table", tableName));
for (int i=0;i<keyStream.length;i++)
{
retval.append(" <key>").append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("name", keyStream[i]));
retval.append(" ").append(XMLHandler.addTagValue("field", keyLookup[i]));
retval.append(" ").append(XMLHandler.addTagValue("condition", keyCondition[i]));
retval.append(" ").append(XMLHandler.addTagValue("name2", keyStream2[i]));
retval.append(" </key>").append(Const.CR);
}
retval.append(" </lookup>").append(Const.CR);
return retval.toString();
}
public void readRep(Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases) throws KettleException
{
try
{
databaseMeta = rep.loadDatabaseMetaFromStepAttribute(id_step, "id_connection", databases);
commitSize = (int)rep.getStepAttributeInteger(id_step, "commit");
schemaName = rep.getStepAttributeString(id_step, "schema");
tableName = rep.getStepAttributeString(id_step, "table");
int nrkeys = rep.countNrStepAttributes(id_step, "key_name");
allocate(nrkeys);
for (int i=0;i<nrkeys;i++)
{
keyStream[i] = rep.getStepAttributeString(id_step, i, "key_name");
keyLookup[i] = rep.getStepAttributeString(id_step, i, "key_field");
keyCondition[i] = rep.getStepAttributeString(id_step, i, "key_condition");
keyStream2[i] = rep.getStepAttributeString(id_step, i, "key_name2");
}
}
catch(Exception e)
{
throw new KettleException(BaseMessages.getString(PKG, "DeleteMeta.Exception.UnexpectedErrorInReadingStepInfo"), e);
}
}
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step)
throws KettleException
{
try
{
rep.saveDatabaseMetaStepAttribute(id_transformation, id_step, "id_connection", databaseMeta);
rep.saveStepAttribute(id_transformation, id_step, "commit", commitSize);
rep.saveStepAttribute(id_transformation, id_step, "schema", schemaName);
rep.saveStepAttribute(id_transformation, id_step, "table", tableName);
for (int i=0;i<keyStream.length;i++)
{
rep.saveStepAttribute(id_transformation, id_step, i, "key_name", keyStream[i]);
rep.saveStepAttribute(id_transformation, id_step, i, "key_field", keyLookup[i]);
rep.saveStepAttribute(id_transformation, id_step, i, "key_condition", keyCondition[i]);
rep.saveStepAttribute(id_transformation, id_step, i, "key_name2", keyStream2[i]);
}
// Also, save the step-database relationship!
if (databaseMeta!=null) rep.insertStepDatabase(id_transformation, id_step, databaseMeta.getObjectId());
}
catch(Exception e)
{
throw new KettleException(BaseMessages.getString(PKG, "DeleteMeta.Exception.UnableToSaveStepInfo")+id_step, e);
}
}
public void getFields(RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore) throws KettleStepException
{
// Default: nothing changes to rowMeta
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore)
{
CheckResult cr;
String error_message = "";
if (databaseMeta!=null)
{
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try
{
db.connect();
if (!Const.isEmpty(tableName))
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DeleteMeta.CheckResult.TablenameOK"), stepMeta);
remarks.add(cr);
boolean first=true;
boolean error_found=false;
error_message = "";
// Check fields in table
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
RowMetaInterface r = db.getTableFields(schemaTable);
if (r!=null)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DeleteMeta.CheckResult.VisitTableSuccessfully"), stepMeta);
remarks.add(cr);
for (int i=0;i<keyLookup.length;i++)
{
String lufield = keyLookup[i];
ValueMetaInterface v = r.searchValueMeta(lufield);
if (v==null)
{
if (first)
{
first=false;
error_message+=BaseMessages.getString(PKG, "DeleteMeta.CheckResult.MissingCompareFieldsInTargetTable")+Const.CR;
}
error_found=true;
error_message+="\t\t"+lufield+Const.CR;
}
}
if (error_found)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
}
else
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DeleteMeta.CheckResult.FoundLookupFields"), stepMeta);
}
remarks.add(cr);
}
else
{
error_message=BaseMessages.getString(PKG, "DeleteMeta.CheckResult.CouldNotReadTableInfo");
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
}
// Look up fields in the input stream <prev>
if (prev!=null && prev.size()>0)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DeleteMeta.CheckResult.ConnectedStepSuccessfully",String.valueOf(prev.size())), stepMeta);
remarks.add(cr);
boolean first=true;
error_message = "";
boolean error_found = false;
for (int i=0;i<keyStream.length;i++)
{
ValueMetaInterface v = prev.searchValueMeta(keyStream[i]);
if (v==null)
{
if (first)
{
first=false;
error_message+=BaseMessages.getString(PKG, "DeleteMeta.CheckResult.MissingFields")+Const.CR;
}
error_found=true;
error_message+="\t\t"+keyStream[i]+Const.CR;
}
}
for (int i=0;i<keyStream2.length;i++)
{
if (keyStream2[i]!=null && keyStream2[i].length()>0)
{
ValueMetaInterface v = prev.searchValueMeta(keyStream2[i]);
if (v==null)
{
if (first)
{
first=false;
error_message+=BaseMessages.getString(PKG, "DeleteMeta.CheckResult.MissingFields2")+Const.CR;
}
error_found=true;
error_message+="\t\t"+keyStream[i]+Const.CR;
}
}
}
if (error_found)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
}
else
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DeleteMeta.CheckResult.AllFieldsFound"), stepMeta);
}
remarks.add(cr);
// How about the fields to insert/update the table with?
first=true;
error_found=false;
error_message = "";
}
else
{
error_message=BaseMessages.getString(PKG, "DeleteMeta.CheckResult.MissingFields3")+Const.CR;
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
}
catch(KettleException e)
{
error_message = BaseMessages.getString(PKG, "DeleteMeta.CheckResult.DatabaseError")+e.getMessage();
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
finally
{
db.disconnect();
}
}
else
{
error_message = BaseMessages.getString(PKG, "DeleteMeta.CheckResult.InvalidConnection");
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
// See if we have input streams leading to this step!
if (input.length>0)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DeleteMeta.CheckResult.StepReceivingInfo"), stepMeta);
remarks.add(cr);
}
else
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "DeleteMeta.CheckResult.NoInputReceived"), stepMeta);
remarks.add(cr);
}
}
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore)
{
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do!
if (databaseMeta!=null)
{
if (prev!=null && prev.size()>0)
{
if (!Const.isEmpty(tableName))
{
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try
{
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
String cr_table = db.getDDL(schemaTable,
prev,
null,
false,
null,
true
);
String cr_index = "";
String idx_fields[] = null;
if (keyLookup!=null && keyLookup.length>0)
{
idx_fields = new String[keyLookup.length];
for (int i=0;i<keyLookup.length;i++) idx_fields[i] = keyLookup[i];
}
else
{
retval.setError(BaseMessages.getString(PKG, "DeleteMeta.CheckResult.KeyFieldsRequired"));
}
// Key lookup dimensions...
if (idx_fields!=null && idx_fields.length>0 &&
!db.checkIndexExists(schemaTable, idx_fields)
)
{
String indexname = "idx_"+tableName+"_lookup";
cr_index = db.getCreateIndexStatement(schemaName, tableName, indexname, idx_fields, false, false, false, true);
}
String sql = cr_table+cr_index;
if (sql.length()==0) retval.setSQL(null); else retval.setSQL(sql);
}
catch(KettleException e)
{
retval.setError(BaseMessages.getString(PKG, "DeleteMeta.Returnvalue.ErrorOccurred")+e.getMessage());
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "DeleteMeta.Returnvalue.NoTableDefinedOnConnection"));
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "DeleteMeta.Returnvalue.NoReceivingAnyFields"));
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "DeleteMeta.Returnvalue.NoConnectionDefined"));
}
return retval;
}
public void analyseImpact(List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input,
String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore)
throws KettleStepException
{
if (prev != null)
{
// Lookup: we do a lookup on the natural keys
for (int i = 0; i < keyLookup.length; i++)
{
ValueMetaInterface v = prev.searchValueMeta(keyStream[i]);
DatabaseImpact ii = new DatabaseImpact(DatabaseImpact.TYPE_IMPACT_DELETE, transMeta.getName(), stepMeta.getName(), databaseMeta
.getDatabaseName(), tableName, keyLookup[i], keyStream[i], v!=null?v.getOrigin():"?", "", "Type = " + v.toStringMeta()); //$NON-NLS-3$
impact.add(ii);
}
}
}
public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans)
{
return new Delete(stepMeta, stepDataInterface, cnr, tr, trans);
}
public StepDataInterface getStepData()
{
return new DeleteData();
}
public DatabaseMeta[] getUsedDatabaseConnections()
{
if (databaseMeta!=null)
{
return new DatabaseMeta[] { databaseMeta };
}
else
{
return super.getUsedDatabaseConnections();
}
}
/**
* @return the schemaName
*/
public String getSchemaName()
{
return schemaName;
}
/**
* @param schemaName the schemaName to set
*/
public void setSchemaName(String schemaName)
{
this.schemaName = schemaName;
}
public boolean supportsErrorHandling()
{
return true;
}
}
|
jiangmuhua/saluki
|
saluki-gateway/src/main/java/com/quancheng/saluki/gateway/oauth2/entity/RefreshTokenEntity.java
|
package com.quancheng.saluki.gateway.oauth2.entity;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Convert;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Lob;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import org.springframework.security.oauth2.common.OAuth2RefreshToken;
import org.springframework.security.oauth2.provider.OAuth2Authentication;
import com.quancheng.saluki.gateway.oauth2.util.OAuth2RefreshTokenPersistenceConverters;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.ToString;
@Data
@EqualsAndHashCode(of = "tokenId", callSuper = false)
@ToString(of = "tokenId", callSuper = false)
@NoArgsConstructor
@AllArgsConstructor
@Builder
@Entity
@Table(name = "refresh_token")
public class RefreshTokenEntity extends AbstractAuditable<Long> {
@NonNull
@NotNull
@Column(name = "token_id", nullable = false, unique = true, length = 36)
private String tokenId;
@NonNull
@NotNull
@Convert(converter = OAuth2RefreshTokenPersistenceConverters.class)
@Column(name = "serialized_token", nullable = false)
private OAuth2RefreshToken token;
@NonNull
@NotNull
@Lob
@Column(name = "serialized_authentication", nullable = false)
private OAuth2Authentication authentication;
@OneToMany(mappedBy = "refreshToken", fetch = FetchType.LAZY)
private Set<AccessTokenEntity> accessTokens;
}
|
wumo/vkg
|
src/vkg/render/model/aabb.hpp
|
#pragma once
#include "vkg/math/glm_common.hpp"
#include "transform.hpp"
#include <ostream>
namespace vkg {
struct AABB {
glm::vec3 min{std::numeric_limits<float>::infinity()};
glm::vec3 max{-std::numeric_limits<float>::infinity()};
auto transform(glm::mat4 m) const -> AABB;
auto transform(Transform transform) const -> AABB;
template<typename... Args>
auto merge(glm::vec3 p, Args &&... points) -> void {
merge(p);
merge(points...);
}
auto merge(glm::vec3 p) -> void;
auto merge(AABB other) -> void;
auto center() const -> glm::vec3;
auto halfRange() const -> glm::vec3;
auto range() const -> glm::vec3;
friend auto operator<<(std::ostream &os, const AABB &aabb) -> std::ostream &;
};
}
|
ashinzekene/algorithms
|
symmetric-tree/index.go
|
package algorithms
import "github.com/ashinzekene/algorithms/utils/trees"
func isSymmetric(root *trees.TreeNode) bool {
if root == nil {
return true
}
return isMirror(root.Left, root.Right)
}
func isMirror(left, right *trees.TreeNode) bool {
if left == nil && right == nil {
return true
}
if left == nil || right == nil {
return false
}
if left.Val != right.Val {
return false
}
return isMirror(left.Left, right.Right) && isMirror(right.Left, left.Right)
}
func isSymmetric2(root *trees.TreeNode) bool {
queue := make([]*trees.TreeNode, 0)
if root == nil {
return true
}
queue = append(queue, root.Left, root.Right)
for len(queue) > 0 {
l := queue[0]
r := queue[1]
queue = queue[2:]
if l == nil && r == nil {
continue
}
if l == nil || r == nil {
return false
}
if l.Val != r.Val {
return false
}
queue = append(queue, l.Left, r.Right)
queue = append(queue, r.Left, l.Right)
}
return true
}
|
ogii-test/ddp-study-server
|
pepper-apis/src/main/java/org/broadinstitute/ddp/model/activity/definition/i18n/SummaryTranslation.java
|
<filename>pepper-apis/src/main/java/org/broadinstitute/ddp/model/activity/definition/i18n/SummaryTranslation.java
package org.broadinstitute.ddp.model.activity.definition.i18n;
import java.util.Objects;
import javax.validation.constraints.NotNull;
import com.google.gson.annotations.SerializedName;
import org.broadinstitute.ddp.model.activity.types.InstanceStatusType;
import org.broadinstitute.ddp.util.MiscUtil;
import org.jdbi.v3.core.mapper.reflect.ColumnName;
import org.jdbi.v3.core.mapper.reflect.JdbiConstructor;
public class SummaryTranslation extends Translation {
@NotNull
@SerializedName("statusCode")
private InstanceStatusType statusType;
private transient long activityId;
@JdbiConstructor
public SummaryTranslation(
@ColumnName("i18n_study_activity_summary_trans_id") long id,
@ColumnName("study_activity_id") long activityId,
@ColumnName("instance_status_type") InstanceStatusType statusType,
@ColumnName("iso_language_code") String languageCode,
@ColumnName("translation_text") String text) {
super(id, languageCode, text, null);
this.activityId = activityId;
this.statusType = statusType;
}
public SummaryTranslation(
String languageCode,
String text,
InstanceStatusType statusType
) {
super(languageCode, text);
this.statusType = MiscUtil.checkNonNull(statusType, "statusType");
}
public long getActivityId() {
return activityId;
}
public InstanceStatusType getStatusType() {
return statusType;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SummaryTranslation that = (SummaryTranslation) o;
return Objects.equals(id, that.id)
&& activityId == that.activityId
&& statusType == that.statusType
&& Objects.equals(languageCode, that.languageCode)
&& Objects.equals(text, that.text);
}
@Override
public int hashCode() {
return Objects.hash(id, activityId, statusType, languageCode, text);
}
}
|
chinmaymhatre91/beacon
|
net.beaconcontroller.packet/src/test/java/net/beaconcontroller/packet/ICMPTest.java
|
<filename>net.beaconcontroller.packet/src/test/java/net/beaconcontroller/packet/ICMPTest.java
/**
* Copyright 2010-2013, Stanford University. This file is licensed under the
* BSD license as described in the included LICENSE.txt.
*/
package net.beaconcontroller.packet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import org.junit.Test;
/**
* @author <NAME> (<EMAIL>)
*
*/
public class ICMPTest {
public byte[] icmpRequest = new byte[] { (byte) 0x00, (byte) 0x50,
(byte) 0x56, (byte) 0xfc, (byte) 0x8a, (byte) 0xb7, (byte) 0x00,
(byte) 0x0c, (byte) 0x29, (byte) 0xea, (byte) 0x51, (byte) 0x0c,
(byte) 0x08, (byte) 0x00, (byte) 0x45, (byte) 0x00, (byte) 0x00,
(byte) 0x54, (byte) 0x00, (byte) 0x00, (byte) 0x40, (byte) 0x00,
(byte) 0x40, (byte) 0x01, (byte) 0x57, (byte) 0xf6, (byte) 0xc0,
(byte) 0xa8, (byte) 0xce, (byte) 0x03, (byte) 0x4a, (byte) 0x35,
(byte) 0x09, (byte) 0xd2, (byte) 0x08, (byte) 0x00, (byte) 0x77,
(byte) 0xb3, (byte) 0xf6, (byte) 0x1c, (byte) 0x00, (byte) 0x03,
(byte) 0xcc, (byte) 0xa8, (byte) 0x75, (byte) 0x50, (byte) 0x00,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x7c, (byte) 0x60,
(byte) 0x0d, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x10, (byte) 0x11, (byte) 0x12, (byte) 0x13,
(byte) 0x14, (byte) 0x15, (byte) 0x16, (byte) 0x17, (byte) 0x18,
(byte) 0x19, (byte) 0x1a, (byte) 0x1b, (byte) 0x1c, (byte) 0x1d,
(byte) 0x1e, (byte) 0x1f, (byte) 0x20, (byte) 0x21, (byte) 0x22,
(byte) 0x23, (byte) 0x24, (byte) 0x25, (byte) 0x26, (byte) 0x27,
(byte) 0x28, (byte) 0x29, (byte) 0x2a, (byte) 0x2b, (byte) 0x2c,
(byte) 0x2d, (byte) 0x2e, (byte) 0x2f, (byte) 0x30, (byte) 0x31,
(byte) 0x32, (byte) 0x33, (byte) 0x34, (byte) 0x35, (byte) 0x36,
(byte) 0x37 };
public byte[] icmpReply = new byte[] { 0x00, 0x0c, 0x29, (byte) 0xea, 0x51,
0x0c, 0x00, 0x50, 0x56, (byte) 0xfc, (byte) 0x8a, (byte) 0xb7,
0x08, 0x00, 0x45, 0x00, 0x00, 0x54, 0x00, 0x5d, 0x00, 0x00,
(byte) 0x80, 0x01, 0x57, (byte) 0x99, 0x4a, 0x35, 0x09,
(byte) 0xd2, (byte) 0xc0, (byte) 0xa8, (byte) 0xce, 0x03, 0x00,
0x00, 0x7f, (byte) 0xb3, (byte) 0xf6, 0x1c, 0x00, 0x03,
(byte) 0xcc, (byte) 0xa8, 0x75, 0x50, 0x00, 0x00, 0x00, 0x00, 0x7c,
0x60, 0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x11, 0x12, 0x13,
0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e,
0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29,
0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34,
0x35, 0x36, 0x37 };
@Test
public void testICMPRequest() {
Ethernet eth = new Ethernet();
eth.deserialize(icmpRequest, 0, icmpRequest.length);
assertTrue(eth.getPayload() instanceof IPv4);
IPv4 ip = (IPv4) eth.getPayload();
assertTrue(ip.getPayload() instanceof ICMP);
ICMP icmp = (ICMP) ip.getPayload();
assertEquals(icmp.getType(), 8);
assertEquals(icmp.getCode(), 0);
assertEquals(icmp.getChecksum(), 0x77b3);
assertEquals(icmp.getData().length, 60);
byte[] serialized = eth.serialize();
for (int i = 0; i < serialized.length; ++i) {
if (serialized[i] != icmpRequest[i])
System.out.println(i);
}
assertTrue(Arrays.equals(icmpRequest, serialized));
IPacket packet = new Ethernet()
.setSourceMACAddress("00:0c:29:ea:51:0c")
.setDestinationMACAddress("00:50:56:fc:8a:b7")
.setPayload(new IPv4()
.setDestinationAddress("192.168.3.11")
.setFlags((byte) 0x2)
.setSourceAddress("192.168.206.3")
.setTtl((byte) 64)
.setPayload(new ICMP()
.setType((byte) 8)
.setCode((byte) 0)
.setData(new byte [] {(byte) 0xf6, (byte) 0x1c, (byte) 0x00, (byte) 0x03,
(byte) 0xcc, (byte) 0xa8, (byte) 0x75, (byte) 0x50, (byte) 0x00,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x7c, (byte) 0x60,
(byte) 0x0d, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x10, (byte) 0x11, (byte) 0x12, (byte) 0x13,
(byte) 0x14, (byte) 0x15, (byte) 0x16, (byte) 0x17, (byte) 0x18,
(byte) 0x19, (byte) 0x1a, (byte) 0x1b, (byte) 0x1c, (byte) 0x1d,
(byte) 0x1e, (byte) 0x1f, (byte) 0x20, (byte) 0x21, (byte) 0x22,
(byte) 0x23, (byte) 0x24, (byte) 0x25, (byte) 0x26, (byte) 0x27,
(byte) 0x28, (byte) 0x29, (byte) 0x2a, (byte) 0x2b, (byte) 0x2c,
(byte) 0x2d, (byte) 0x2e, (byte) 0x2f, (byte) 0x30, (byte) 0x31,
(byte) 0x32, (byte) 0x33, (byte) 0x34, (byte) 0x35, (byte) 0x36,
(byte) 0x37})
)
);
serialized = packet.serialize();
assertEquals(eth, packet);
assertTrue(Arrays.equals(icmpRequest, serialized));
}
@Test
public void testICMPReply() {
Ethernet eth = new Ethernet();
eth.deserialize(icmpReply, 0, icmpReply.length);
assertTrue(eth.getPayload() instanceof IPv4);
IPv4 ip = (IPv4) eth.getPayload();
assertTrue(ip.getPayload() instanceof ICMP);
ICMP icmp = (ICMP) ip.getPayload();
assertEquals(icmp.getType(), 0);
assertEquals(icmp.getCode(), 0);
assertEquals(icmp.getChecksum(), 0x7fb3);
assertEquals(icmp.getData().length, 60);
byte[] serialized = eth.serialize();
assertTrue(Arrays.equals(icmpReply, serialized));
IPacket packet = new Ethernet()
.setSourceMACAddress("00:50:56:fc:8a:b7")
.setDestinationMACAddress("00:0c:29:ea:51:0c")
.setPayload(new IPv4()
.setDestinationAddress("192.168.206.3")
.setIdentification((short) 0x5d)
.setSourceAddress("192.168.3.11")
.setTtl((byte) 128)
.setPayload(new ICMP()
.setType((byte) 0)
.setCode((byte) 0)
.setData(new byte [] { (byte) 0xf6, 0x1c, 0x00, 0x03,
(byte) 0xcc, (byte) 0xa8, 0x75, 0x50, 0x00, 0x00,
0x00, 0x00, 0x7c, 0x60, 0x0d, 0x00, 0x00, 0x00,
0x00, 0x00, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15,
0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d,
0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25,
0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d,
0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35,
0x36, 0x37 })
)
);
serialized = packet.serialize();
assertEquals(eth, packet);
assertTrue(Arrays.equals(icmpReply, serialized));
}
}
|
GFTN/gftn-services
|
quotes-service/utility/authservice/client_test.go
|
// © Copyright IBM Corporation 2020. All rights reserved.
// SPDX-License-Identifier: Apache2.0
//
package authservice
import (
"net/http"
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
"github.com/GFTN/gftn-services/gftn-models/model"
)
func TestRequestSigning(t *testing.T) {
part := model.Participant{}
URL := "http://localhost:8888"
part.URLCallback = &URL
csc := Client{
HTTP: &http.Client{Timeout: time.Second * 10},
}
Convey("Successful get caller identity", t, func() {
// So(err, ShouldBeNil)
// So(signedXdr, ShouldNotBeNil)
})
}
|
eneufeld/emfcloud-modelserver
|
bundles/org.eclipse.emfcloud.modelserver.emf/src/org/eclipse/emfcloud/modelserver/emf/common/EMFFacetConstraints.java
|
<gh_stars>0
/********************************************************************************
* Copyright (c) 2020 EclipseSource and others.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v. 2.0 which is available at
* https://www.eclipse.org/legal/epl-2.0, or the MIT License which is
* available at https://opensource.org/licenses/MIT.
*
* SPDX-License-Identifier: EPL-2.0 OR MIT
********************************************************************************/
package org.eclipse.emfcloud.modelserver.emf.common;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.eclipse.emf.ecore.EcorePackage;
import org.eclipse.emf.ecore.util.ExtendedMetaData;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
@JsonInclude(Include.NON_NULL)
public class EMFFacetConstraints {
public static final String WHITESPACE = "whiteSpace";
public static final String ENUMERATION = "enumeration";
public static final String PATTERN = "pattern";
public static final String TOTALDIGITS = "totalDigits";
public static final String FRACTIONDIGITS = "fractionDigits";
public static final String LENGTH = "length";
public static final String MINLENGTH = "minLength";
public static final String MAXLENGTH = "maxLength";
public static final String MINEXCLUSIVE = "minExclusive";
public static final String MAXEXCLUSIVE = "maxExclusive";
public static final String MININCLUSIVE = "minInclusive";
public static final String MAXINCLUSIVE = "maxInclusive";
public static final int WHITESPACE_DEFAULT = ExtendedMetaData.INSTANCE
.getWhiteSpaceFacet(EcorePackage.Literals.EBOOLEAN);
public static final List<String> ENUMERATION_DEFAULT = ExtendedMetaData.INSTANCE
.getEnumerationFacet(EcorePackage.Literals.EBOOLEAN);
public static final List<String> PATTERN_DEFAULT = ExtendedMetaData.INSTANCE
.getPatternFacet(EcorePackage.Literals.EBOOLEAN);
public static final int TOTALDIGITS_DEFAULT = ExtendedMetaData.INSTANCE
.getTotalDigitsFacet(EcorePackage.Literals.EBOOLEAN);
public static final int FRACTIONDIGITS_DEFAULT = ExtendedMetaData.INSTANCE
.getFractionDigitsFacet(EcorePackage.Literals.EBOOLEAN);
public static final int LENGTH_DEFAULT = ExtendedMetaData.INSTANCE
.getLengthFacet(EcorePackage.Literals.EBOOLEAN);
public static final int MINLENGTH_DEFAULT = ExtendedMetaData.INSTANCE
.getMinLengthFacet(EcorePackage.Literals.EBOOLEAN);
public static final int MAXLENGTH_DEFAULT = ExtendedMetaData.INSTANCE
.getMaxLengthFacet(EcorePackage.Literals.EBOOLEAN);
public static final String MINEXCLUSIVE_DEFAULT = ExtendedMetaData.INSTANCE
.getMinExclusiveFacet(EcorePackage.Literals.EBOOLEAN);
public static final String MAXEXCLUSIVE_DEFAULT = ExtendedMetaData.INSTANCE
.getMaxExclusiveFacet(EcorePackage.Literals.EBOOLEAN);
public static final String MININCLUSIVE_DEFAULT = ExtendedMetaData.INSTANCE
.getMinInclusiveFacet(EcorePackage.Literals.EBOOLEAN);
public static final String MAXINCLUSIVE_DEFAULT = ExtendedMetaData.INSTANCE
.getMaxInclusiveFacet(EcorePackage.Literals.EBOOLEAN);
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = WhiteSpaceFilter.class)
private Integer whiteSpace;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = EnumerationFilter.class)
private List<String> enumeration;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = PatternFilter.class)
private List<String> pattern;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = TotalDigitsFilter.class)
private Integer totalDigits;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = FractionDigitsFilter.class)
private Integer fractionDigits;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = LengthFilter.class)
private Integer length;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = MinLengthFilter.class)
private Integer minLength;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = MaxLengthFilter.class)
private Integer maxLength;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = MinExclusiveFilter.class)
private String minExclusive;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = MaxExclusiveFilter.class)
private String maxExclusive;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = MinInclusiveFilter.class)
private String minInclusive;
@JsonInclude(value = JsonInclude.Include.CUSTOM, valueFilter = MaxInclusiveFilter.class)
private String maxInclusive;
public Integer getWhiteSpace() { return whiteSpace; }
public void setWhiteSpace(final Integer whiteSpace) { this.whiteSpace = whiteSpace; }
public List<String> getEnumeration() { return enumeration; }
public void setEnumeration(final List<String> enumeration) { this.enumeration = enumeration; }
public List<String> getPattern() { return pattern; }
public void setPattern(final List<String> pattern) { this.pattern = pattern; }
public Integer getTotalDigits() { return totalDigits; }
public void setTotalDigits(final Integer totalDigits) { this.totalDigits = totalDigits; }
public Integer getFractionDigits() { return fractionDigits; }
public void setFractionDigits(final Integer fractionDigits) { this.fractionDigits = fractionDigits; }
public Integer getLength() { return length; }
public void setLength(final Integer length) { this.length = length; }
public Integer getMinLength() { return minLength; }
public void setMinLength(final Integer minLength) { this.minLength = minLength; }
public Integer getMaxLength() { return maxLength; }
public void setMaxLength(final Integer maxLength) { this.maxLength = maxLength; }
public String getMinExclusive() { return minExclusive; }
public void setMinExclusive(final String minExclusive) { this.minExclusive = minExclusive; }
public String getMaxExclusive() { return maxExclusive; }
public void setMaxExclusive(final String maxExclusive) { this.maxExclusive = maxExclusive; }
public String getMinInclusive() { return minInclusive; }
public void setMinInclusive(final String minInclusive) { this.minInclusive = minInclusive; }
public String getMaxInclusive() { return maxInclusive; }
public void setMaxInclusive(final String maxInclusive) { this.maxInclusive = maxInclusive; }
@SuppressWarnings("unchecked")
public EMFFacetConstraints(final Map<String, Object> facetMap) {
this.whiteSpace = (Integer) facetMap.getOrDefault(WHITESPACE, null);
this.enumeration = (List<String>) facetMap.getOrDefault(ENUMERATION, null);
this.pattern = (List<String>) facetMap.getOrDefault(PATTERN, null);
this.totalDigits = (Integer) facetMap.getOrDefault(TOTALDIGITS, null);
this.fractionDigits = (Integer) facetMap.getOrDefault(FRACTIONDIGITS, null);
this.length = (Integer) facetMap.getOrDefault(LENGTH, null);
this.minLength = (Integer) facetMap.getOrDefault(MINLENGTH, null);
this.maxLength = (Integer) facetMap.getOrDefault(MAXLENGTH, null);
this.minExclusive = (String) facetMap.getOrDefault(MINEXCLUSIVE, null);
this.maxExclusive = (String) facetMap.getOrDefault(MAXEXCLUSIVE, null);
this.minInclusive = (String) facetMap.getOrDefault(MININCLUSIVE, null);
this.maxInclusive = (String) facetMap.getOrDefault(MAXINCLUSIVE, null);
}
public EMFFacetConstraints() {}
@SuppressWarnings({ "CyclomaticComplexity", "BooleanExpressionComplexity" })
public boolean hasConstraints() {
return !(Objects.equals(this.whiteSpace, WHITESPACE_DEFAULT)
&& Objects.equals(this.enumeration, ENUMERATION_DEFAULT)
&& Objects.equals(this.pattern, PATTERN_DEFAULT)
&& Objects.equals(this.totalDigits, TOTALDIGITS_DEFAULT)
&& Objects.equals(this.fractionDigits, FRACTIONDIGITS_DEFAULT)
&& Objects.equals(this.length, LENGTH_DEFAULT)
&& Objects.equals(this.minLength, MINLENGTH_DEFAULT)
&& Objects.equals(this.maxLength, MAXLENGTH_DEFAULT)
&& Objects.equals(this.minExclusive, MINEXCLUSIVE_DEFAULT)
&& Objects.equals(this.maxExclusive, MAXEXCLUSIVE_DEFAULT)
&& Objects.equals(this.minInclusive, MININCLUSIVE_DEFAULT)
&& Objects.equals(this.maxInclusive, MAXINCLUSIVE_DEFAULT));
}
}
class DefaultValueFilter {
protected Object defaultValue;
DefaultValueFilter(final Object defaultValue) {
this.defaultValue = defaultValue;
}
@Override
public boolean equals(final Object obj) {
return Objects.equals(defaultValue, obj);
}
@Override
public int hashCode() {
return super.hashCode();
}
}
class WhiteSpaceFilter extends DefaultValueFilter {
WhiteSpaceFilter() {
super(EMFFacetConstraints.WHITESPACE_DEFAULT);
}
}
class EnumerationFilter extends DefaultValueFilter {
EnumerationFilter() {
super(EMFFacetConstraints.ENUMERATION_DEFAULT);
}
}
class PatternFilter extends DefaultValueFilter {
PatternFilter() {
super(EMFFacetConstraints.PATTERN_DEFAULT);
}
}
class TotalDigitsFilter extends DefaultValueFilter {
TotalDigitsFilter() {
super(EMFFacetConstraints.TOTALDIGITS_DEFAULT);
}
}
class FractionDigitsFilter extends DefaultValueFilter {
FractionDigitsFilter() {
super(EMFFacetConstraints.FRACTIONDIGITS_DEFAULT);
}
}
class LengthFilter extends DefaultValueFilter {
LengthFilter() {
super(EMFFacetConstraints.LENGTH_DEFAULT);
}
}
class MinLengthFilter extends DefaultValueFilter {
MinLengthFilter() {
super(EMFFacetConstraints.MINLENGTH_DEFAULT);
}
}
class MaxLengthFilter extends DefaultValueFilter {
MaxLengthFilter() {
super(EMFFacetConstraints.MAXLENGTH_DEFAULT);
}
}
class MinExclusiveFilter extends DefaultValueFilter {
MinExclusiveFilter() {
super(EMFFacetConstraints.MINEXCLUSIVE_DEFAULT);
}
}
class MaxExclusiveFilter extends DefaultValueFilter {
MaxExclusiveFilter() {
super(EMFFacetConstraints.MAXEXCLUSIVE_DEFAULT);
}
}
class MinInclusiveFilter extends DefaultValueFilter {
MinInclusiveFilter() {
super(EMFFacetConstraints.MININCLUSIVE_DEFAULT);
}
}
class MaxInclusiveFilter extends DefaultValueFilter {
MaxInclusiveFilter() {
super(EMFFacetConstraints.MAXINCLUSIVE_DEFAULT);
}
}
|
thechief389/homebrew-core
|
Formula/a52dec.rb
|
class A52dec < Formula
desc "Library for decoding ATSC A/52 streams (AKA 'AC-3')"
homepage "https://liba52.sourceforge.io/"
url "https://liba52.sourceforge.io/files/a52dec-0.7.4.tar.gz"
sha256 "a21d724ab3b3933330194353687df82c475b5dfb997513eef4c25de6c865ec33"
bottle do
cellar :any
sha256 "5186add25fb7aae66c80139b56ea81fb3aff1f87e74354c2cebe022374532286" => :mojave
sha256 "c7485fae127d02a6628186b821d72fc40bc82019ae30ddee0abfacf2b63701dc" => :high_sierra
sha256 "150123fdf33421b85f4625d73c015a660300db698a82fd0165475fe8e7081d0b" => :sierra
sha256 "2feac8f578fd1aa43385ce8714b059c90f4aaf3a0401d8ba825939e8e8ab7b1b" => :el_capitan
sha256 "cfe0ddcf275b03d9ad0c626174962601ae18d3f77fa35266228425b818ce4a7f" => :yosemite
sha256 "cb40eed81300fa32069bac07244cf7f56363cc04af853ddb75bada292e8d9912" => :mavericks
end
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--prefix=#{prefix}",
"--enable-shared",
"--mandir=#{man}"
system "make", "install"
end
test do
touch testpath/"test"
system "#{bin}/a52dec", "-o", "null", "test"
end
end
|
chalk-xx/kernel
|
bundles/captcha/src/main/java/org/sakaiproject/nakamura/captcha/ReCaptchaRequestTrustValidator.java
|
<gh_stars>0
/*
* Licensed to the Sakai Foundation (SF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.sakaiproject.nakamura.captcha;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Properties;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.sakaiproject.nakamura.api.auth.trusted.RequestTrustValidator;
import org.sakaiproject.nakamura.api.captcha.CaptchaService;
import javax.servlet.http.HttpServletRequest;
@Service
@Component
@Properties(value = {
@Property(name = "service.vendor", value = "The Sakai Foundation"),
@Property(name = "service.description", value = "Validates a request with the reCAPTCHA.net service."),
@Property(name = RequestTrustValidator.VALIDATOR_NAME, value = "reCAPTCHA.net") })
public class ReCaptchaRequestTrustValidator implements RequestTrustValidator {
@Reference
protected transient CaptchaService captchaService;
/**
* {@inheritDoc}
*
* @see org.sakaiproject.nakamura.api.auth.trusted.RequestTrustValidator#getLevel()
*/
public int getLevel() {
return RequestTrustValidator.CREATE_USER;
}
/**
* {@inheritDoc}
*
* @see org.sakaiproject.nakamura.api.auth.trusted.RequestTrustValidator#isTrusted(javax.servlet.http.HttpServletRequest)
*/
public boolean isTrusted(HttpServletRequest request) {
return captchaService.checkRequest(request);
}
}
|
akshitdewan/cs61a-apps
|
examtool/examtool/cli/login.py
|
import click
from examtool.api.auth import refresh_token
@click.command()
@click.option(
"--browser/--no-browser",
default=True,
help="Choose between browser-based and browserless authentication",
)
def login(browser):
"""
Login to OKPy.
"""
token = refresh_token(no_browser=not browser)
print("Token = {}".format(token))
print("Token automatically saved")
if __name__ == "__main__":
login()
|
Rayato159/paper-plane-backend
|
internals/accounts/usecases/usecase.go
|
<filename>internals/accounts/usecases/usecase.go
package usecases
import (
"github.com/paper-plane/internals/accounts"
"github.com/paper-plane/internals/models"
)
type accountsUsecase struct {
accountsRepo accounts.Repository
}
func NewAccountsUsecase(accountsRepo accounts.Repository) accounts.Usecase {
return &accountsUsecase{accountsRepo: accountsRepo}
}
func (u *accountsUsecase) GetAccountInfo(accountId string, userId string) (*models.AccountInfo, error) {
res, err := u.accountsRepo.GetAccountInfo(accountId, userId)
if err != nil {
return nil, err
}
return res, nil
}
func (u *accountsUsecase) UpdateBalance(accountId string, balance float64, userId string) (*models.AccountInfo, error) {
if err := u.accountsRepo.UpdateBalance(accountId, balance, userId); err != nil {
return nil, err
}
res, err := u.accountsRepo.GetAccountInfo(accountId, userId)
if err != nil {
return nil, err
}
return res, nil
}
|
alexica3000/tekwill-homework
|
src/com/tekwill/learning/hw/LongestCommonPrefix.java
|
<reponame>alexica3000/tekwill-homework
package com.tekwill.learning.hw;
import java.util.Scanner;
public class LongestCommonPrefix {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.print("Enter the first string: ");
String string1 = scanner.nextLine();
System.out.print("Enter the second string: ");
String string2 = scanner.nextLine();
String common = commonPrefix(string1, string2);
if (common.isEmpty()) {
System.out.println(string1 + " and " + string2 + " have no common prefix");
} else {
System.out.println("The common prefix is " + common);
}
}
public static String commonPrefix(String string1, String string2) {
int minLength = Math.min(string1.length(), string2.length());
for (int i = 0; i < minLength; i++) {
if (string1.charAt(i) != string2.charAt(i)) {
return string1.substring(0, i);
}
}
return string1.substring(0, minLength);
}
}
|
Ahleroy/deeplodocus
|
setup.py
|
import os
from setuptools import find_packages, setup
from deeplodocus import __version__
# Dynamically calculate the version based on django.VERSION.
#version = __import__('deeplodocus').get_version()
version = __version__
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
EXCLUDE_FROM_PACKAGES = ['deeplodocus.bin']
setup(
name='Deeplodocus',
version=version,
python_requires='>=3.5.3',
url='https://www.deeplodocus.org/',
author='<NAME> and <NAME>',
author_email='<EMAIL>',
description=('The Deep Learning framework keeping your head above water'),
long_description=read('README.rst'),
license='MIT',
#packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
packages=find_packages(),
include_package_data=True,
scripts=['deeplodocus/bin/deeplodocus-admin.py'],
entry_points={'console_scripts': [
'deeplodocus = deeplodocus.core.management:execute_from_command_line',
]},
install_requires=['numpy>=1.15.1',
'pyyaml>=3.13',
'pandas>=0.23.1',
'matplotlib>=2.2.2',
'aiohttp>=3.4.0',
'aiohttp_jinja2>=1.1.0',
'psutil>=5-4.8',
'graphviz',
'pydot',
"opencv-python >= 3.4.1"],
extras_require={
},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
project_urls={
'Documentation': 'https://www.deeplodocus.org/',
'Source': 'https://github.com/Deeplodocus/deeplodocus/',
},
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.