prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>calculate_size.js<|end_file_name|><|fim▁begin|>"use strict"
var writeIEEE754 = require('../float_parser').writeIEEE754
, readIEEE754 = require('../float_parser').readIEEE754
, Long = require('../long').Long
, Double = require('../double').Double
, Timestamp = require('../timestamp').Timestamp
, ObjectID = require('../objectid').ObjectID
, Symbol = require('../symbol').Symbol
, BSONRegExp = require('../regexp').BSONRegExp
, Code = require('../code').Code
, Decimal128 = require('../decimal128')
, MinKey = require('../min_key').MinKey
, MaxKey = require('../max_key').MaxKey
, DBRef = require('../db_ref').DBRef
, Binary = require('../binary').Binary;
// To ensure that 0.4 of node works correctly
var isDate = function isDate(d) {
return typeof d === 'object' && Object.prototype.toString.call(d) === '[object Date]';
}
var calculateObjectSize = function calculateObjectSize(object, serializeFunctions, ignoreUndefined) {
var totalLength = (4 + 1);
if(Array.isArray(object)) {
for(var i = 0; i < object.length; i++) {
totalLength += calculateElement(i.toString(), object[i], serializeFunctions, true, ignoreUndefined)
}
} else {
// If we have toBSON defined, override the current object
if(object.toBSON) {
object = object.toBSON();
}
// Calculate size
for(var key in object) {
totalLength += calculateElement(key, object[key], serializeFunctions, false, ignoreUndefined)
}
}
return totalLength;
}
/**
* @ignore
* @api private
*/
<|fim▁hole|> }
switch(typeof value) {
case 'string':
return 1 + Buffer.byteLength(name, 'utf8') + 1 + 4 + Buffer.byteLength(value, 'utf8') + 1;
case 'number':
if(Math.floor(value) === value && value >= BSON.JS_INT_MIN && value <= BSON.JS_INT_MAX) {
if(value >= BSON.BSON_INT32_MIN && value <= BSON.BSON_INT32_MAX) { // 32 bit
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (4 + 1);
} else {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (8 + 1);
}
} else { // 64 bit
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (8 + 1);
}
case 'undefined':
if(isArray || !ignoreUndefined) return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (1);
return 0;
case 'boolean':
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (1 + 1);
case 'object':
if(value == null || value instanceof MinKey || value instanceof MaxKey || value['_bsontype'] == 'MinKey' || value['_bsontype'] == 'MaxKey') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (1);
} else if(value instanceof ObjectID || value['_bsontype'] == 'ObjectID') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (12 + 1);
} else if(value instanceof Date || isDate(value)) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (8 + 1);
} else if(typeof Buffer !== 'undefined' && Buffer.isBuffer(value)) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (1 + 4 + 1) + value.length;
} else if(value instanceof Long || value instanceof Double || value instanceof Timestamp
|| value['_bsontype'] == 'Long' || value['_bsontype'] == 'Double' || value['_bsontype'] == 'Timestamp') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (8 + 1);
} else if(value instanceof Decimal128 || value['_bsontype'] == 'Decimal128') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (16 + 1);
} else if(value instanceof Code || value['_bsontype'] == 'Code') {
// Calculate size depending on the availability of a scope
if(value.scope != null && Object.keys(value.scope).length > 0) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + 4 + Buffer.byteLength(value.code.toString(), 'utf8') + 1 + calculateObjectSize(value.scope, serializeFunctions, ignoreUndefined);
} else {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + Buffer.byteLength(value.code.toString(), 'utf8') + 1;
}
} else if(value instanceof Binary || value['_bsontype'] == 'Binary') {
// Check what kind of subtype we have
if(value.sub_type == Binary.SUBTYPE_BYTE_ARRAY) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (value.position + 1 + 4 + 1 + 4);
} else {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (value.position + 1 + 4 + 1);
}
} else if(value instanceof Symbol || value['_bsontype'] == 'Symbol') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + Buffer.byteLength(value.value, 'utf8') + 4 + 1 + 1;
} else if(value instanceof DBRef || value['_bsontype'] == 'DBRef') {
// Set up correct object for serialization
var ordered_values = {
'$ref': value.namespace
, '$id' : value.oid
};
// Add db reference if it exists
if(null != value.db) {
ordered_values['$db'] = value.db;
}
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + calculateObjectSize(ordered_values, serializeFunctions, ignoreUndefined);
} else if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + Buffer.byteLength(value.source, 'utf8') + 1
+ (value.global ? 1 : 0) + (value.ignoreCase ? 1 : 0) + (value.multiline ? 1 : 0) + 1
} else if(value instanceof BSONRegExp || value['_bsontype'] == 'BSONRegExp') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + Buffer.byteLength(value.pattern, 'utf8') + 1
+ Buffer.byteLength(value.options, 'utf8') + 1
} else {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + calculateObjectSize(value, serializeFunctions, ignoreUndefined) + 1;
}
case 'function':
// WTF for 0.4.X where typeof /someregexp/ === 'function'
if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]' || String.call(value) == '[object RegExp]') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + Buffer.byteLength(value.source, 'utf8') + 1
+ (value.global ? 1 : 0) + (value.ignoreCase ? 1 : 0) + (value.multiline ? 1 : 0) + 1
} else {
if(serializeFunctions && value.scope != null && Object.keys(value.scope).length > 0) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + 4 + Buffer.byteLength(value.toString(), 'utf8') + 1 + calculateObjectSize(value.scope, serializeFunctions, ignoreUndefined);
} else if(serializeFunctions) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + Buffer.byteLength(value.toString(), 'utf8') + 1;
}
}
}
return 0;
}
var BSON = {};
// BSON MAX VALUES
BSON.BSON_INT32_MAX = 0x7FFFFFFF;
BSON.BSON_INT32_MIN = -0x80000000;
// JS MAX PRECISE VALUES
BSON.JS_INT_MAX = 0x20000000000000; // Any integer up to 2^53 can be precisely represented by a double.
BSON.JS_INT_MIN = -0x20000000000000; // Any integer down to -2^53 can be precisely represented by a double.
module.exports = calculateObjectSize;<|fim▁end|> | function calculateElement(name, value, serializeFunctions, isArray, ignoreUndefined) {
// If we have toBSON defined, override the current object
if(value && value.toBSON){
value = value.toBSON();
|
<|file_name|>auth.guard.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { ActivatedRouteSnapshot, CanActivate, Router, RouterStateSnapshot } from '@angular/router';<|fim▁hole|>
@Injectable()
export class AuthGuard implements CanActivate {
constructor(protected _router: Router,
private _appStore: AppState) {}
/**
* Checks that the user is connected and if so, permits the activation of the wanted state. If the user is not
* authenticated, he is redirected toward home page with login inputs presented.
*
* @param route current route
* @param state wanted state
* @returns {boolean} true if the user is authenticated
*/
canActivate(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): boolean {
const canGo = this._appStore.tokens.accessToken !== undefined;
console.log('# AuthGuard :: can activate ', state.url, ' ? : ', canGo, state);
if (!canGo) {
this._router.navigate([ '/sign-in' ]);
}
return canGo;
}
}<|fim▁end|> | import { AppState } from '@genesis/$core/store/app.state'; |
<|file_name|>store_test.go<|end_file_name|><|fim▁begin|>package gomdi
import (
"fmt"
"testing"
)
// Modify which store we're using since we want to test with a fast datastore
func init() {
// Implicitly create the tables.
Store = NewTestStore(&testModel{})
}
// Even more basic than the memory implementation
type testStore map[string]map[string]interface{}
func NewTestStore(models ...Model) testStore {
m := make(map[string]map[string]interface{})
for _, model := range models {
t := make(map[string]interface{})
m[model.Table()] = t
}
return m
}
func (m testStore) CreateTable(model Model) {}
func (m testStore) Save(model Model) error {
model.SetId("1")
m[model.Table()][model.Id()] = interface{}(model)
return nil
}
func (m testStore) Get(id string, model Model) (interface{}, error) {
return m[model.Table()][id], nil
}
func (m testStore) Filter(field string, value interface{}, model Model) ([]interface{}, error) {
return []interface{}{}, nil
}
func (m testStore) Exists(model Model) bool {
if _, ok := m[model.Table()][model.Id()]; ok {
return true
}
return false
}<|fim▁hole|>// Make a fake Model
type testModel struct {
Pk string
Data string
}
func newTestModel(data string) *testModel {
return &testModel{
Data: data,
}
}
func (t *testModel) Id() string { return t.Pk }
func (t *testModel) SetId(s string) { t.Pk = s }
func (t *testModel) Convert(i interface{}) { *t = *i.(*testModel) }
func (t *testModel) Table() string { return "testModels" }
func (t *testModel) Validate() error {
if t.Data == "ALWAYS FAIL" {
return fmt.Errorf("Failed.")
}
return nil
}
func (t *testModel) Equal(i interface{}) bool {
test := i.(*testModel)
return test.Data == t.Data
}
// The Save function is responsible for setting the Id on a model
func TestSave(t *testing.T) {
model := newTestModel("data")
err := Save(model)
if model.Id() == "" {
t.Errorf("Id is not set correctly")
}
if err != nil {
t.Errorf("Should not be an error saving")
}
model = newTestModel("ALWAYS FAIL")
err = Save(model)
if err == nil {
t.Errorf("We should have had an error here")
}
}
func TestGet(t *testing.T) {
model := newTestModel("cookie")
Save(model)
newModel := &testModel{}
Get(model.Id(), newModel)
if newModel.Data != "cookie" {
t.Errorf("Get is not working properly")
}
}
func TestFilter(t *testing.T) {
model := newTestModel("banana")
Save(model)
models, err := Filter("Data", "banana", &testModel{})
if err != nil {
t.Errorf("Generic filter function is broken")
}
if models == nil {
t.Errorf("Generic filter did not filter properly")
}
}
func TestExists(t *testing.T) {
model := newTestModel("banana")
Save(model)
if !Exists(model) {
t.Errorf("Existance check failure")
}
}<|fim▁end|> | func (m testStore) Clear() {}
func (m testStore) Len() int { return 0 }
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>jQuery(document).ready(function($){
var is_firefox = navigator.userAgent.indexOf('Firefox') > -1;<|fim▁hole|> $('#cd-team').find('ul a').on('click', function(event){
event.preventDefault();
var selected_member = $(this).data('type');
$('.cd-member-bio.'+selected_member+'').addClass('slide-in');
$('#mainNav').hide();
$('.cd-member-bio-close').addClass('is-visible');
// firefox transitions break when parent overflow is changed, so we need to wait for the end of the trasition to give the body an overflow hidden
if( is_firefox ) {
$('main').addClass('slide-out').one('webkitTransitionEnd otransitionend oTransitionEnd msTransitionEnd transitionend', function(){
$('body').addClass('overflow-hidden');
});
} else {
$('main').addClass('slide-out');
$('body').addClass('overflow-hidden');
}
});
//close team-member bio
$(document).on('click', '.cd-overlay, .cd-member-bio-close', function(event){
event.preventDefault();
$('.cd-member-bio').removeClass('slide-in');
$('#mainNav').show();
$('.cd-member-bio-close').removeClass('is-visible');
if( is_firefox ) {
$('main').removeClass('slide-out').one('webkitTransitionEnd otransitionend oTransitionEnd msTransitionEnd transitionend', function(){
$('body').removeClass('overflow-hidden');
});
} else {
$('main').removeClass('slide-out');
$('body').removeClass('overflow-hidden');
}
});
});<|fim▁end|> |
//open team-member bio |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Peerz - P2P python library using ZeroMQ sockets and gevent
# Copyright (C) 2014-2015 Steve Henderson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import time
from transitions import Machine
class MessageState(object):<|fim▁hole|> {'trigger': 'query', 'source': 'initialised', 'dest': 'waiting response', 'before': '_update', 'after': '_send_query'},
{'trigger': 'response', 'source': 'waiting response', 'dest': 'complete', 'before': '_update', 'after': '_completed'},
{'trigger': 'timeout', 'source': '*', 'dest': 'timedout', 'before': '_update', 'after': '_completed', },
]
def __init__(self, engine, txid, msg, callback=None, max_duration=5000, max_concurrency=3):
self.engine = engine
self.callback = callback
self.machine = Machine(model=self,
states=self.states,
transitions=self.transitions,
initial='initialised')
self.start = self.last_change = time.time() * 1000
self.max_duration = max_duration
self.max_concurrency = max_concurrency
self.txid = txid
self.times = {}
self.parse_message(msg)
self.query()
def query(self):
pass
def parse_message(self, msg):
self.val = msg.pop(0)
def is_complete(self):
return self.state in ['complete', 'timedout']
def pack_request(self):
return None
@staticmethod
def unpack_response(content):
return None
@staticmethod
def pack_response(content):
return None
def _update(self):
now = time.time() * 1000
self.times.setdefault(self.state, 0.0)
self.times[self.state] += (now - self.last_change)
self.last_change = now
def duration(self):
return time.time() * 1000 - self.start
def latency(self):
return self.times.setdefault('waiting response', 0.0)
def _send_query(self):
pass
def _completed(self):
pass<|fim▁end|> | states = ['initialised', 'waiting response', 'complete', 'timedout']
transitions = [ |
<|file_name|>trait-safety-inherent-impl.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that inherent impls cannot be unsafe.
struct SomeStruct;
unsafe impl SomeStruct { //~ ERROR inherent impls cannot be declared as unsafe
fn foo(self) { }
}<|fim▁hole|><|fim▁end|> |
fn main() { } |
<|file_name|>sqlalchemy.py<|end_file_name|><|fim▁begin|>""" SQLAlchemy support. """
from __future__ import absolute_import
import datetime
from types import GeneratorType
import decimal
from sqlalchemy import func
# from sqlalchemy.orm.interfaces import MANYTOONE
from sqlalchemy.orm.collections import InstrumentedList
from sqlalchemy.sql.type_api import TypeDecorator
try:
from sqlalchemy.orm.relationships import RelationshipProperty
except ImportError:
from sqlalchemy.orm.properties import RelationshipProperty
from sqlalchemy.types import (
BIGINT, BOOLEAN, BigInteger, Boolean, CHAR, DATE, DATETIME, DECIMAL, Date,
DateTime, FLOAT, Float, INT, INTEGER, Integer, NCHAR, NVARCHAR, NUMERIC,
Numeric, SMALLINT, SmallInteger, String, TEXT, TIME, Text, Time, Unicode,
UnicodeText, VARCHAR, Enum)
from .. import mix_types as t
from ..main import (
SKIP_VALUE, LOGGER, TypeMixer as BaseTypeMixer, GenFactory as BaseFactory,
Mixer as BaseMixer, partial, faker)
class GenFactory(BaseFactory):
""" Map a sqlalchemy classes to simple types. """
types = {
(String, VARCHAR, Unicode, NVARCHAR, NCHAR, CHAR): str,
(Text, UnicodeText, TEXT): t.Text,
(Boolean, BOOLEAN): bool,
(Date, DATE): datetime.date,
(DateTime, DATETIME): datetime.datetime,
(Time, TIME): datetime.time,
(DECIMAL, Numeric, NUMERIC): decimal.Decimal,
(Float, FLOAT): float,
(Integer, INTEGER, INT): int,
(BigInteger, BIGINT): t.BigInteger,
(SmallInteger, SMALLINT): t.SmallInteger,
}
class TypeMixer(BaseTypeMixer):
""" TypeMixer for SQLAlchemy. """
factory = GenFactory
def __init__(self, cls, **params):
""" Init TypeMixer and save the mapper. """
super(TypeMixer, self).__init__(cls, **params)
self.mapper = self.__scheme._sa_class_manager.mapper
def postprocess(self, target, postprocess_values):
""" Fill postprocess values. """
mixed = []
for name, deffered in postprocess_values:
value = deffered.value
if isinstance(value, GeneratorType):
value = next(value)
if isinstance(value, t.Mix):
mixed.append((name, value))
continue
if isinstance(getattr(target, name), InstrumentedList) and not isinstance(value, list):
value = [value]
setattr(target, name, value)
for name, mix in mixed:
setattr(target, name, mix & target)
if self.__mixer:
target = self.__mixer.postprocess(target)
return target
@staticmethod
def get_default(field):
""" Get default value from field.
:return value: A default value or NO_VALUE
"""
column = field.scheme
if isinstance(column, RelationshipProperty):
column = column.local_remote_pairs[0][0]
if not column.default:
return SKIP_VALUE
if column.default.is_callable:
return column.default.arg(None)
return getattr(column.default, 'arg', SKIP_VALUE)
def gen_select(self, field_name, select):
""" Select exists value from database.
:param field_name: Name of field for generation.
:return : None or (name, value) for later use
"""
if not self.__mixer or not self.__mixer.params.get('session'):
return field_name, SKIP_VALUE
relation = self.mapper.get_property(field_name)
session = self.__mixer.params.get('session')
value = session.query(
relation.mapper.class_
).filter(*select.choices).order_by(func.random()).first()
return self.get_value(field_name, value)
@staticmethod
def is_unique(field):
""" Return True is field's value should be a unique.
:return bool:
"""
scheme = field.scheme
if isinstance(scheme, RelationshipProperty):
scheme = scheme.local_remote_pairs[0][0]
return scheme.unique
@staticmethod
def is_required(field):
""" Return True is field's value should be defined.
:return bool:
"""
column = field.scheme
if isinstance(column, RelationshipProperty):
column = column.local_remote_pairs[0][0]
if field.params:
return True
# According to the SQLAlchemy docs, autoincrement "only has an effect for columns which are
# Integer derived (i.e. INT, SMALLINT, BIGINT) [and] Part of the primary key [...]".
return not column.nullable and not (column.autoincrement and column.primary_key and
isinstance(column.type, Integer))
def get_value(self, field_name, field_value):
""" Get `value` as `field_name`.
:return : None or (name, value) for later use
"""
field = self.__fields.get(field_name)
if field and isinstance(field.scheme, RelationshipProperty):
return field_name, t._Deffered(field_value, field.scheme)
return super(TypeMixer, self).get_value(field_name, field_value)
def make_fabric(self, column, field_name=None, fake=False, kwargs=None): # noqa
""" Make values fabric for column.
:param column: SqlAlchemy column
:param field_name: Field name
:param fake: Force fake data
:return function:
"""
kwargs = {} if kwargs is None else kwargs
if isinstance(column, RelationshipProperty):
return partial(type(self)(
column.mapper.class_, mixer=self.__mixer, fake=self.__fake, factory=self.__factory
).blend, **kwargs)
ftype = type(column.type)
# augmented types created with TypeDecorator
# don't directly inherit from the base types
if TypeDecorator in ftype.__bases__:
ftype = ftype.impl
stype = self.__factory.cls_to_simple(ftype)
if stype is str:
fab = super(TypeMixer, self).make_fabric(
stype, field_name=field_name, fake=fake, kwargs=kwargs)
return lambda: fab()[:column.type.length]
if ftype is Enum:
return partial(faker.random_element, column.type.enums)
return super(TypeMixer, self).make_fabric(
stype, field_name=field_name, fake=fake, kwargs=kwargs)
def guard(self, *args, **kwargs):
""" Look objects in database.
:returns: A finded object or False
"""
try:
session = self.__mixer.params.get('session')
assert session
except (AttributeError, AssertionError):
raise ValueError('Cannot make request to DB.')
qs = session.query(self.mapper).filter(*args, **kwargs)
count = qs.count()
if count == 1:
return qs.first()
if count:
return qs.all()
return False
def reload(self, obj):
""" Reload object from database. """
try:
session = self.__mixer.params.get('session')
session.expire(obj)
session.refresh(obj)
return obj
except (AttributeError, AssertionError):
raise ValueError('Cannot make request to DB.')
def __load_fields(self):
""" Prepare SQLALchemyTypeMixer.
Select columns and relations for data generation.
"""
mapper = self.__scheme._sa_class_manager.mapper
relations = set()
if hasattr(mapper, 'relationships'):
for rel in mapper.relationships:
relations |= rel.local_columns
yield rel.key, t.Field(rel, rel.key)
for key, column in mapper.columns.items():
if column not in relations:
yield key, t.Field(column, key)
class Mixer(BaseMixer):
""" Integration with SQLAlchemy. """
type_mixer_cls = TypeMixer<|fim▁hole|>
def __init__(self, session=None, commit=True, **params):
"""Initialize the SQLAlchemy Mixer.
:param fake: (True) Generate fake data instead of random data.
:param session: SQLAlchemy session. Using for commits.
:param commit: (True) Commit instance to session after creation.
"""
super(Mixer, self).__init__(**params)
self.params['session'] = session
self.params['commit'] = bool(session) and commit
def postprocess(self, target):
""" Save objects in db.
:return value: A generated value
"""
if self.params.get('commit'):
session = self.params.get('session')
if not session:
LOGGER.warn("'commit' set true but session not initialized.")
else:
session.add(target)
session.commit()
return target
# Default mixer
mixer = Mixer()
# pylama:ignore=E1120,E0611<|fim▁end|> | |
<|file_name|>TransformableModel.ts<|end_file_name|><|fim▁begin|>import { TransformOptions } from "./TransformOptions";<|fim▁hole|><|fim▁end|> | import { BaseModel, MongoBound } from "../models/base";
type TransformProperty<T> = {_apiTransform: (model: T | MongoBound<T>, options: TransformOptions) => any};
export type TransformableModel<T> = BaseModel<T> & TransformProperty<T>; |
<|file_name|>test_multinic.py<|end_file_name|><|fim▁begin|># Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo.serialization import jsonutils
import webob
from nova import compute
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
UUID = '70f6db34-de8d-4fbd-aafb-4065bdfa6114'
last_add_fixed_ip = (None, None)
last_remove_fixed_ip = (None, None)
def compute_api_add_fixed_ip(self, context, instance, network_id):
global last_add_fixed_ip
last_add_fixed_ip = (instance['uuid'], network_id)
def compute_api_remove_fixed_ip(self, context, instance, address):
global last_remove_fixed_ip
last_remove_fixed_ip = (instance['uuid'], address)
def compute_api_get(self, context, instance_id, want_objects=False,
expected_attrs=None):
instance = objects.Instance()
instance.uuid = instance_id
instance.id = 1
instance.vm_state = 'fake'
instance.task_state = 'fake'
instance.obj_reset_changes()
return instance
class FixedIpTestV21(test.NoDBTestCase):
def setUp(self):
super(FixedIpTestV21, self).setUp()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
self.stubs.Set(compute.api.API, "add_fixed_ip",
compute_api_add_fixed_ip)
self.stubs.Set(compute.api.API, "remove_fixed_ip",
compute_api_remove_fixed_ip)
self.stubs.Set(compute.api.API, 'get', compute_api_get)
self.app = self._get_app()
def _get_app(self):
return fakes.wsgi_app_v21(init_only=('servers', 'os-multinic'))
<|fim▁hole|> def _get_url(self):
return '/v2/fake'
def test_add_fixed_ip(self):
global last_add_fixed_ip
last_add_fixed_ip = (None, None)
body = dict(addFixedIp=dict(networkId='test_net'))
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
self.assertEqual(last_add_fixed_ip, (UUID, 'test_net'))
def _test_add_fixed_ip_bad_request(self, body):
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(400, resp.status_int)
def test_add_fixed_ip_empty_network_id(self):
body = {'addFixedIp': {'network_id': ''}}
self._test_add_fixed_ip_bad_request(body)
def test_add_fixed_ip_network_id_bigger_than_36(self):
body = {'addFixedIp': {'network_id': 'a' * 37}}
self._test_add_fixed_ip_bad_request(body)
def test_add_fixed_ip_no_network(self):
global last_add_fixed_ip
last_add_fixed_ip = (None, None)
body = dict(addFixedIp=dict())
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual(last_add_fixed_ip, (None, None))
@mock.patch.object(compute.api.API, 'add_fixed_ip')
def test_add_fixed_ip_no_more_ips_available(self, mock_add_fixed_ip):
mock_add_fixed_ip.side_effect = exception.NoMoreFixedIps(net='netid')
body = dict(addFixedIp=dict(networkId='test_net'))
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
def test_remove_fixed_ip(self):
global last_remove_fixed_ip
last_remove_fixed_ip = (None, None)
body = dict(removeFixedIp=dict(address='10.10.10.1'))
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
self.assertEqual(last_remove_fixed_ip, (UUID, '10.10.10.1'))
def test_remove_fixed_ip_no_address(self):
global last_remove_fixed_ip
last_remove_fixed_ip = (None, None)
body = dict(removeFixedIp=dict())
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual(last_remove_fixed_ip, (None, None))
def test_remove_fixed_ip_invalid_address(self):
body = {'remove_fixed_ip': {'address': ''}}
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(400, resp.status_int)
@mock.patch.object(compute.api.API, 'remove_fixed_ip',
side_effect=exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=UUID, ip='10.10.10.1'))
def test_remove_fixed_ip_not_found(self, _remove_fixed_ip):
body = {'remove_fixed_ip': {'address': '10.10.10.1'}}
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(400, resp.status_int)
class FixedIpTestV2(FixedIpTestV21):
def setUp(self):
super(FixedIpTestV2, self).setUp()
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Multinic'])
def _get_app(self):
return fakes.wsgi_app(init_only=('servers',))
def test_remove_fixed_ip_invalid_address(self):
# NOTE(cyeoh): This test is disabled for the V2 API because it is
# has poorer input validation.
pass<|fim▁end|> | |
<|file_name|>prune.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-<|fim▁hole|>import asyncio
import yaml
from pathlib import Path
from logzero import logger
from rizza import entity_tester
from rizza import genetic_tester
def genetic_prune(conf, entity='All'):
"""Check all saved genetic_tester tests for an entity, prune failures"""
if entity == 'All':
for target in list(entity_tester.EntityTester.pull_entities()):
genetic_prune(conf, target)
else:
test_file = conf.base_dir.joinpath(
'data/genetic_tests/{}.yaml'.format(entity))
logger.debug('Current target file: {}'.format(test_file))
to_remove = []
if test_file.exists() and test_file.stat().st_size > 10:
logger.debug('Beginning tests for {}'.format(entity))
tests = yaml.load(test_file.open('r'))
for test in tests:
ent, method, mode = test.split(' ')
if mode == 'positive':
logger.debug('Running test {}'.format(method))
result = genetic_tester.GeneticEntityTester(
conf, entity, method
).run_best()
if result == -1:
logger.debug('{} failed.'.format(test))
to_remove.append(test)
else:
logger.debug('{} passed.'.format(test))
for test in to_remove:
logger.warning('Removing {} from {}'.format(test, test_file))
del tests[test]
logger.debug('Deleting file {}'.format(test_file))
test_file.unlink()
logger.debug('Writing tests to {}'.format(test_file))
yaml.dump(tests, test_file.open('w+'), default_flow_style=False)
logger.info('Done pruning {}'.format(entity))
if test_file.exists() and test_file.stat().st_size < 10:
logger.warning('Deleting empty file {}'.format(test_file))
test_file.unlink()
async def _async_prune(conf, entity, loop, sem):
"""Run an individual prune task"""
async with sem:
await loop.run_in_executor(
None, # use default executor
genetic_prune, conf, entity # function and args
)
async def _async_prune_all(conf, loop, sem):
"""Construct all the prune tasks, and await them"""
tasks = [
asyncio.ensure_future(_async_prune(conf, entity, loop, sem))
for entity in list(entity_tester.EntityTester.pull_entities())
]
await asyncio.wait(tasks)
def async_genetic_prune(conf, entity='All', async_limit=100):
"""Asynchronously perform a genetic prune for all entities"""
if entity != 'All':
genetic_prune(conf, entity)
return
sem = asyncio.Semaphore(async_limit)
loop = asyncio.get_event_loop()
loop.run_until_complete(_async_prune_all(conf, loop, sem))
loop.close()<|fim▁end|> | """A utility that tries saved genetic tests and removes those failing""" |
<|file_name|>version.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.<|fim▁hole|>
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package version
import (
"fmt"
"os"
"runtime"
)
var (
// Version shows the version of kube batch.
Version = "Not provided."
// GitSHA shoows the git commit id of kube batch.
GitSHA = "Not provided."
// Built shows the built time of the binary.
Built = "Not provided."
)
// PrintVersionAndExit prints versions from the array returned by Info() and exit
func PrintVersionAndExit(apiVersion string) {
for _, i := range Info(apiVersion) {
fmt.Printf("%v\n", i)
}
os.Exit(0)
}
// Info returns an array of various service versions
func Info(apiVersion string) []string {
return []string{
fmt.Sprintf("API Version: %s", apiVersion),
fmt.Sprintf("Version: %s", Version),
fmt.Sprintf("Git SHA: %s", GitSHA),
fmt.Sprintf("Built At: %s", Built),
fmt.Sprintf("Go Version: %s", runtime.Version()),
fmt.Sprintf("Go OS/Arch: %s/%s", runtime.GOOS, runtime.GOARCH),
}
}<|fim▁end|> | You may obtain a copy of the License at |
<|file_name|>libusb0.py<|end_file_name|><|fim▁begin|># Copyright (C) 2009-2013 Wander Lairson Costa
#
# The following terms apply to all files associated
# with the software unless explicitly disclaimed in individual files.
#
# The authors hereby grant permission to use, copy, modify, distribute,
# and license this software and its documentation for any purpose, provided
# that existing copyright notices are retained in all copies and that this
# notice is included verbatim in any distributions. No written agreement,
# license, or royalty fee is required for any of the authorized uses.
# Modifications to this software may be copyrighted by their authors
# and need not follow the licensing terms described here, provided that
# the new terms are clearly indicated on the first page of each file where
# they apply.
#
# IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY
# FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
# ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY
# DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE
# IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE
# NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
# MODIFICATIONS.
from ctypes import *
import ctypes.util
import os
import usb.backend
import usb.util
import sys
from usb.core import USBError
from usb._debug import methodtrace
import usb._interop as _interop
import logging
__author__ = 'Wander Lairson Costa'
__all__ = ['get_backend']
_logger = logging.getLogger('usb.backend.libusb0')
# usb.h
_PC_PATH_MAX = 4
if sys.platform.find('bsd') != -1 or sys.platform.find('mac') != -1 or \
sys.platform.find('darwin') != -1:
_PATH_MAX = 1024
elif sys.platform == 'win32' or sys.platform == 'cygwin':
_PATH_MAX = 511
else:
_PATH_MAX = os.pathconf('.', _PC_PATH_MAX)
# libusb-win32 makes all structures packed, while
# default libusb only does for some structures
# _PackPolicy defines the structure packing according
# to the platform.
class _PackPolicy(object):
pass
if sys.platform == 'win32' or sys.platform == 'cygwin':
_PackPolicy._pack_ = 1
# Data structures
class _usb_descriptor_header(Structure):
_pack_ = 1
_fields_ = [('blength', c_uint8),
('bDescriptorType', c_uint8)]
class _usb_string_descriptor(Structure):
_pack_ = 1
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('wData', c_uint16)]
class _usb_endpoint_descriptor(Structure, _PackPolicy):
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('bEndpointAddress', c_uint8),
('bmAttributes', c_uint8),
('wMaxPacketSize', c_uint16),
('bInterval', c_uint8),
('bRefresh', c_uint8),
('bSynchAddress', c_uint8),
('extra', POINTER(c_uint8)),
('extralen', c_int)]
class _usb_interface_descriptor(Structure, _PackPolicy):
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('bInterfaceNumber', c_uint8),
('bAlternateSetting', c_uint8),
('bNumEndpoints', c_uint8),
('bInterfaceClass', c_uint8),
('bInterfaceSubClass', c_uint8),
('bInterfaceProtocol', c_uint8),
('iInterface', c_uint8),
('endpoint', POINTER(_usb_endpoint_descriptor)),
('extra', POINTER(c_uint8)),
('extralen', c_int)]
class _usb_interface(Structure, _PackPolicy):
_fields_ = [('altsetting', POINTER(_usb_interface_descriptor)),
('num_altsetting', c_int)]
class _usb_config_descriptor(Structure, _PackPolicy):
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('wTotalLength', c_uint16),
('bNumInterfaces', c_uint8),
('bConfigurationValue', c_uint8),
('iConfiguration', c_uint8),
('bmAttributes', c_uint8),
('bMaxPower', c_uint8),
('interface', POINTER(_usb_interface)),
('extra', POINTER(c_uint8)),
('extralen', c_int)]
class _usb_device_descriptor(Structure, _PackPolicy):
_pack_ = 1
_fields_ = [('bLength', c_uint8),
('bDescriptorType', c_uint8),
('bcdUSB', c_uint16),
('bDeviceClass', c_uint8),
('bDeviceSubClass', c_uint8),
('bDeviceProtocol', c_uint8),
('bMaxPacketSize0', c_uint8),
('idVendor', c_uint16),
('idProduct', c_uint16),
('bcdDevice', c_uint16),
('iManufacturer', c_uint8),
('iProduct', c_uint8),
('iSerialNumber', c_uint8),
('bNumConfigurations', c_uint8)]
class _usb_device(Structure, _PackPolicy):
pass
class _usb_bus(Structure, _PackPolicy):
pass
_usb_device._fields_ = [('next', POINTER(_usb_device)),
('prev', POINTER(_usb_device)),
('filename', c_int8 * (_PATH_MAX + 1)),
('bus', POINTER(_usb_bus)),
('descriptor', _usb_device_descriptor),
('config', POINTER(_usb_config_descriptor)),
('dev', c_void_p),
('devnum', c_uint8),
('num_children', c_ubyte),
('children', POINTER(POINTER(_usb_device)))]
_usb_bus._fields_ = [('next', POINTER(_usb_bus)),
('prev', POINTER(_usb_bus)),
('dirname', c_char * (_PATH_MAX + 1)),
('devices', POINTER(_usb_device)),
('location', c_uint32),
('root_dev', POINTER(_usb_device))]
_usb_dev_handle = c_void_p
class _DeviceDescriptor:
def __init__(self, dev):
desc = dev.descriptor
self.bLength = desc.bLength
self.bDescriptorType = desc.bDescriptorType
self.bcdUSB = desc.bcdUSB
self.bDeviceClass = desc.bDeviceClass
self.bDeviceSubClass = desc.bDeviceSubClass
self.bDeviceProtocol = desc.bDeviceProtocol
self.bMaxPacketSize0 = desc.bMaxPacketSize0
self.idVendor = desc.idVendor
self.idProduct = desc.idProduct
self.bcdDevice = desc.bcdDevice
self.iManufacturer = desc.iManufacturer
self.iProduct = desc.iProduct
self.iSerialNumber = desc.iSerialNumber
self.bNumConfigurations = desc.bNumConfigurations
self.address = dev.devnum
self.bus = dev.bus[0].location
self.port_number = None
_lib = None
def _load_library():
if sys.platform != 'cygwin':
candidates = ('usb-0.1', 'usb', 'libusb0')
for candidate in candidates:
# Workaround for CPython 3.3 issue#16283 / pyusb #14
if sys.platform == 'win32':
candidate = candidate + '.dll'
libname = ctypes.util.find_library(candidate)
if libname is not None: break
else:
# corner cases
# cygwin predefines library names with 'cyg' instead of 'lib'
try:
return CDLL('cygusb0.dll')
except:
_logger.error('Libusb 0 could not be loaded in cygwin', exc_info=True)
raise OSError('USB library could not be found')
return CDLL(libname)
def _setup_prototypes(lib):
# usb_dev_handle *usb_open(struct usb_device *dev);
lib.usb_open.argtypes = [POINTER(_usb_device)]
lib.usb_open.restype = _usb_dev_handle
# int usb_close(usb_dev_handle *dev);
lib.usb_close.argtypes = [_usb_dev_handle]
# int usb_get_string(usb_dev_handle *dev,
# int index,
# int langid,
# char *buf,
# size_t buflen);
lib.usb_get_string.argtypes = [
_usb_dev_handle,
c_int,
c_int,
c_char_p,
c_size_t
]
# int usb_get_string_simple(usb_dev_handle *dev,
# int index,
# char *buf,
# size_t buflen);
lib.usb_get_string_simple.argtypes = [
_usb_dev_handle,
c_int,
c_char_p,
c_size_t
]
# int usb_get_descriptor_by_endpoint(usb_dev_handle *udev,
# int ep,
# unsigned char type,
# unsigned char index,
# void *buf,
# int size);
lib.usb_get_descriptor_by_endpoint.argtypes = [
_usb_dev_handle,
c_int,
c_ubyte,
c_ubyte,
c_void_p,
c_int
]
# int usb_get_descriptor(usb_dev_handle *udev,
# unsigned char type,
# unsigned char index,
# void *buf,
# int size);
lib.usb_get_descriptor.argtypes = [
_usb_dev_handle,
c_ubyte,
c_ubyte,
c_void_p,
c_int
]
# int usb_bulk_write(usb_dev_handle *dev,
# int ep,
# const char *bytes,
# int size,
# int timeout);
lib.usb_bulk_write.argtypes = [
_usb_dev_handle,
c_int,
c_char_p,
c_int,
c_int
]
# int usb_bulk_read(usb_dev_handle *dev,
# int ep,
# char *bytes,
# int size,
# int timeout);
lib.usb_bulk_read.argtypes = [
_usb_dev_handle,
c_int,
c_char_p,
c_int,
c_int
]
# int usb_interrupt_write(usb_dev_handle *dev,
# int ep,
# const char *bytes,
# int size,
# int timeout);
lib.usb_interrupt_write.argtypes = [
_usb_dev_handle,
c_int,
c_char_p,
c_int,
c_int
]
# int usb_interrupt_read(usb_dev_handle *dev,
# int ep,
# char *bytes,
# int size,
# int timeout);
lib.usb_interrupt_read.argtypes = [
_usb_dev_handle,
c_int,
c_char_p,
c_int,
c_int
]
# int usb_control_msg(usb_dev_handle *dev,
# int requesttype,
# int request,
# int value,
# int index,
# char *bytes,
# int size,
# int timeout);
lib.usb_control_msg.argtypes = [
_usb_dev_handle,
c_int,
c_int,
c_int,
c_int,
c_char_p,
c_int,
c_int
]
# int usb_set_configuration(usb_dev_handle *dev, int configuration);
lib.usb_set_configuration.argtypes = [_usb_dev_handle, c_int]
# int usb_claim_interface(usb_dev_handle *dev, int interface);
lib.usb_claim_interface.argtypes = [_usb_dev_handle, c_int]
# int usb_release_interface(usb_dev_handle *dev, int interface);
lib.usb_release_interface.argtypes = [_usb_dev_handle, c_int]
# int usb_set_altinterface(usb_dev_handle *dev, int alternate);
lib.usb_set_altinterface.argtypes = [_usb_dev_handle, c_int]
# int usb_resetep(usb_dev_handle *dev, unsigned int ep);
lib.usb_resetep.argtypes = [_usb_dev_handle, c_int]
# int usb_clear_halt(usb_dev_handle *dev, unsigned int ep);
lib.usb_clear_halt.argtypes = [_usb_dev_handle, c_int]
# int usb_reset(usb_dev_handle *dev);
lib.usb_reset.argtypes = [_usb_dev_handle]
# char *usb_strerror(void);
lib.usb_strerror.argtypes = []
lib.usb_strerror.restype = c_char_p
# void usb_set_debug(int level);
lib.usb_set_debug.argtypes = [c_int]
# struct usb_device *usb_device(usb_dev_handle *dev);
lib.usb_device.argtypes = [_usb_dev_handle]
lib.usb_device.restype = POINTER(_usb_device)
# struct usb_bus *usb_get_busses(void);
lib.usb_get_busses.restype = POINTER(_usb_bus)
def _check(retval):
if retval is None:
errmsg = _lib.usb_strerror()
else:
ret = int(retval)
if ret < 0:
errmsg = _lib.usb_strerror()
# No error means that we need to get the error
# message from the return code
# Thanks to Nicholas Wheeler to point out the problem...
# Also see issue #2860940
if errmsg.lower() == 'no error':
errmsg = os.strerror(-ret)
else:
return ret
raise USBError(errmsg, ret)
# implementation of libusb 0.1.x backend
class _LibUSB(usb.backend.IBackend):
@methodtrace(_logger)
def enumerate_devices(self):
_check(_lib.usb_find_busses())
_check(_lib.usb_find_devices())
bus = _lib.usb_get_busses()
while bool(bus):
dev = bus[0].devices
while bool(dev):
yield dev[0]
dev = dev[0].next
bus = bus[0].next
@methodtrace(_logger)
def get_device_descriptor(self, dev):
return _DeviceDescriptor(dev)
@methodtrace(_logger)
def get_configuration_descriptor(self, dev, config):
if config >= dev.descriptor.bNumConfigurations:
raise IndexError('Invalid configuration index ' + str(config))
return dev.config[config]
@methodtrace(_logger)
def get_interface_descriptor(self, dev, intf, alt, config):
cfgdesc = self.get_configuration_descriptor(dev, config)
if intf >= cfgdesc.bNumInterfaces:
raise IndexError('Invalid interface index ' + str(interface))
interface = cfgdesc.interface[intf]
if alt >= interface.num_altsetting:
raise IndexError('Invalid alternate setting index ' + str(alt))
return interface.altsetting[alt]
@methodtrace(_logger)<|fim▁hole|> return interface.endpoint[ep]
@methodtrace(_logger)
def open_device(self, dev):
return _check(_lib.usb_open(dev))
@methodtrace(_logger)
def close_device(self, dev_handle):
_check(_lib.usb_close(dev_handle))
@methodtrace(_logger)
def set_configuration(self, dev_handle, config_value):
_check(_lib.usb_set_configuration(dev_handle, config_value))
@methodtrace(_logger)
def set_interface_altsetting(self, dev_handle, intf, altsetting):
_check(_lib.usb_set_altinterface(dev_handle, altsetting))
@methodtrace(_logger)
def get_configuration(self, dev_handle):
bmRequestType = usb.util.build_request_type(
usb.util.CTRL_IN,
usb.util.CTRL_TYPE_STANDARD,
usb.util.CTRL_RECIPIENT_DEVICE
)
return self.ctrl_transfer(dev_handle,
bmRequestType,
0x08,
0,
0,
1,
100
)[0]
@methodtrace(_logger)
def claim_interface(self, dev_handle, intf):
_check(_lib.usb_claim_interface(dev_handle, intf))
@methodtrace(_logger)
def release_interface(self, dev_handle, intf):
_check(_lib.usb_release_interface(dev_handle, intf))
@methodtrace(_logger)
def bulk_write(self, dev_handle, ep, intf, data, timeout):
return self.__write(_lib.usb_bulk_write,
dev_handle,
ep,
intf,
data, timeout)
@methodtrace(_logger)
def bulk_read(self, dev_handle, ep, intf, size, timeout):
return self.__read(_lib.usb_bulk_read,
dev_handle,
ep,
intf,
size,
timeout)
@methodtrace(_logger)
def intr_write(self, dev_handle, ep, intf, data, timeout):
return self.__write(_lib.usb_interrupt_write,
dev_handle,
ep,
intf,
data,
timeout)
@methodtrace(_logger)
def intr_read(self, dev_handle, ep, intf, size, timeout):
return self.__read(_lib.usb_interrupt_read,
dev_handle,
ep,
intf,
size,
timeout)
@methodtrace(_logger)
def ctrl_transfer(self,
dev_handle,
bmRequestType,
bRequest,
wValue,
wIndex,
data_or_wLength,
timeout):
if usb.util.ctrl_direction(bmRequestType) == usb.util.CTRL_OUT:
address, length = data_or_wLength.buffer_info()
length *= data_or_wLength.itemsize
return _check(_lib.usb_control_msg(
dev_handle,
bmRequestType,
bRequest,
wValue,
wIndex,
cast(address, c_char_p),
length,
timeout
))
else:
data = _interop.as_array((0,) * data_or_wLength)
read = int(_check(_lib.usb_control_msg(
dev_handle,
bmRequestType,
bRequest,
wValue,
wIndex,
cast(data.buffer_info()[0],
c_char_p),
data_or_wLength,
timeout
)))
return data[:read]
@methodtrace(_logger)
def reset_device(self, dev_handle):
_check(_lib.usb_reset(dev_handle))
@methodtrace(_logger)
def detach_kernel_driver(self, dev_handle, intf):
_check(_lib.usb_detach_kernel_driver_np(dev_handle, intf))
def __write(self, fn, dev_handle, ep, intf, data, timeout):
address, length = data.buffer_info()
length *= data.itemsize
return int(_check(fn(
dev_handle,
ep,
cast(address, c_char_p),
length,
timeout
)))
def __read(self, fn, dev_handle, ep, intf, size, timeout):
data = _interop.as_array('\x00' * size)
address, length = data.buffer_info()
length *= data.itemsize
ret = int(_check(fn(
dev_handle,
ep,
cast(address, c_char_p),
length,
timeout
)))
return data[:ret]
def get_backend():
global _lib
try:
if _lib is None:
_lib = _load_library()
_setup_prototypes(_lib)
_lib.usb_init()
return _LibUSB()
except Exception:
_logger.error('Error loading libusb 0.1 backend', exc_info=True)
return None<|fim▁end|> | def get_endpoint_descriptor(self, dev, ep, intf, alt, config):
interface = self.get_interface_descriptor(dev, intf, alt, config)
if ep >= interface.bNumEndpoints:
raise IndexError('Invalid endpoint index ' + str(ep)) |
<|file_name|>ihandlers.py<|end_file_name|><|fim▁begin|>#********************************************************************************
#* Dionaea
#* - catches bugs -
#*
#*
#*
#* Copyright (C) 2010 Markus Koetter & Tan Kean Siong
#* Copyright (C) 2009 Paul Baecher & Markus Koetter & Mark Schloesser
#*
#* This program is free software; you can redistribute it and/or
#* modify it under the terms of the GNU General Public License
#* as published by the Free Software Foundation; either version 2
#* of the License, or (at your option) any later version.
#*
#* This program is distributed in the hope that it will be useful,
#* but WITHOUT ANY WARRANTY; without even the implied warranty of
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#* GNU General Public License for more details.
#*
#* You should have received a copy of the GNU General Public License
#* along with this program; if not, write to the Free Software
#* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#*
#*
#* contact nepenthesdev@gmail.com
#*
#*******************************************************************************/
import logging
import os
import imp
from dionaea.core import g_dionaea
# service imports
import dionaea.tftp
import dionaea.cmd
import dionaea.emu
import dionaea.store
import dionaea.test
import dionaea.ftp
logger = logging.getLogger('ihandlers')
logger.setLevel(logging.DEBUG)
# reload service imports
#imp.reload(dionaea.tftp)
#imp.reload(dionaea.ftp)
#imp.reload(dionaea.cmd)<|fim▁hole|>#imp.reload(dionaea.store)
# global handler list
# keeps a ref on our handlers
# allows restarting
global g_handlers
def start():
logger.warn("START THE IHANDLERS")
for i in g_handlers:
method = getattr(i, "start", None)
if method != None:
method()
def new():
global g_handlers
g_handlers = []
if "hpfeeds" in g_dionaea.config()['modules']['python']['ihandlers']['handlers'] and 'hpfeeds' in g_dionaea.config()['modules']['python']:
import dionaea.hpfeeds
for client in g_dionaea.config()['modules']['python']['hpfeeds']:
conf = g_dionaea.config()['modules']['python']['hpfeeds'][client]
x = dionaea.hpfeeds.hpfeedihandler(conf)
g_handlers.append(x)
if "ftpdownload" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.ftp
g_handlers.append(dionaea.ftp.ftpdownloadhandler('dionaea.download.offer'))
if "tftpdownload" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
g_handlers.append(dionaea.tftp.tftpdownloadhandler('dionaea.download.offer'))
if "emuprofile" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
g_handlers.append(dionaea.emu.emuprofilehandler('dionaea.module.emu.profile'))
if "cmdshell" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
g_handlers.append(dionaea.cmd.cmdshellhandler('dionaea.service.shell.*'))
if "store" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
g_handlers.append(dionaea.store.storehandler('dionaea.download.complete'))
if "uniquedownload" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
g_handlers.append(dionaea.test.uniquedownloadihandler('dionaea.download.complete.unique'))
if "surfids" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.surfids
g_handlers.append(dionaea.surfids.surfidshandler('*'))
if "logsql" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.logsql
g_handlers.append(dionaea.logsql.logsqlhandler("*"))
if "p0f" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.p0f
g_handlers.append(dionaea.p0f.p0fhandler(g_dionaea.config()['modules']['python']['p0f']['path']))
if "logxmpp" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.logxmpp
from random import choice
import string
for client in g_dionaea.config()['modules']['python']['logxmpp']:
conf = g_dionaea.config()['modules']['python']['logxmpp'][client]
if 'resource' in conf:
resource = conf['resource']
else:
resource = ''.join([choice(string.ascii_letters) for i in range(8)])
print("client %s \n\tserver %s:%s username %s password %s resource %s muc %s\n\t%s" % (client, conf['server'], conf['port'], conf['username'], conf['password'], resource, conf['muc'], conf['config']))
x = dionaea.logxmpp.logxmpp(conf['server'], int(conf['port']), conf['username'], conf['password'], resource, conf['muc'], conf['config'])
g_handlers.append(x)
if "nfq" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.nfq
g_handlers.append(dionaea.nfq.nfqhandler())
if "virustotal" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.virustotal
g_handlers.append(dionaea.virustotal.virustotalhandler('*'))
if "mwserv" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.mwserv
g_handlers.append(dionaea.mwserv.mwservhandler('*'))
if "submit_http" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.submit_http
g_handlers.append(dionaea.submit_http.handler('*'))
if "fail2ban" in g_dionaea.config()['modules']['python']['ihandlers']['handlers']:
import dionaea.fail2ban
g_handlers.append(dionaea.fail2ban.fail2banhandler())
def stop():
global g_handlers
for i in g_handlers:
logger.debug("deleting %s" % str(i))
i.stop()
del i
del g_handlers<|fim▁end|> | #imp.reload(dionaea.emu) |
<|file_name|>import_localities.py<|end_file_name|><|fim▁begin|>"""
Usage:
import_localities < Localities.csv
"""
from django.contrib.gis.geos import GEOSGeometry
from django.utils.text import slugify
from ..import_from_csv import ImportFromCSVCommand
from ...utils import parse_nptg_datetime
from ...models import Locality
class Command(ImportFromCSVCommand):
"""
Imports localities from the NPTG
"""
def handle_rows(self, rows):
existing_localities = Locality.objects.defer('search_vector', 'latlong').in_bulk()
slugs = {
locality.slug: locality for locality in existing_localities.values()
}
to_update = []
to_create = []
for row in rows:
modified_at = parse_nptg_datetime(row["ModificationDateTime"])
locality_code = row['NptgLocalityCode']
if locality_code in existing_localities:
locality = existing_localities[locality_code]
if modified_at and modified_at == locality.modified_at:
continue
else:
locality = Locality()
created_at = parse_nptg_datetime(row["CreationDateTime"])
locality.modified_at = modified_at
locality.created_at = created_at
locality.name = row['LocalityName'].replace('\'', '\u2019')
locality.short_name = row['ShortName']
if locality.name == locality.short_name:
locality.short_name = ''
locality.qualifier_name = row['QualifierName']
locality.admin_area_id = row['AdministrativeAreaCode']
locality.latlong = GEOSGeometry(f"SRID=27700;POINT({row['Easting']} {row['Northing']})")
if row['NptgDistrictCode'] == '310': # bogus code seemingly used for localities with no district
locality.district_id = None
else:
locality.district_id = row['NptgDistrictCode']
if locality.id:<|fim▁hole|> to_update.append(locality)
else:
locality.id = locality_code
slug = slugify(locality.get_qualified_name())
locality.slug = slug
i = 0
while locality.slug in slugs:
i += 1
locality.slug = f"{slug}-{i}"
slugs[locality.slug] = locality
to_create.append(locality)
Locality.objects.bulk_update(to_update, fields=[
'name', 'qualifier_name', 'short_name', 'admin_area', 'latlong', 'modified_at', 'created_at', 'district'
], batch_size=100)
Locality.objects.bulk_create(to_create)<|fim▁end|> | |
<|file_name|>issue-1895.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let x = 1;
let y: @fn() -> int = || x;
let z = y();
}<|fim▁end|> | // file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. |
<|file_name|>manager.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#coding: utf-8
"""
This module simply sends request to the Digital Ocean API,
and returns their response as a dict.
"""
import requests
API_ENDPOINT = 'https://api.digitalocean.com'
class DoError(RuntimeError):
pass
class DoManager(object):
def __init__(self, client_id, api_key):
self.client_id = client_id
self.api_key = api_key
def all_active_droplets(self):
json = self.request('/droplets/')
return json['droplets']
def new_droplet(self, name, size_id, image_id, region_id,
ssh_key_ids=None, virtio=False, private_networking=False,
backups_enabled=False):
params = {
'name': name,
'size_id': size_id,
'image_id': image_id,
'region_id': region_id,
'virtio': virtio,
'private_networking': private_networking,
'backups_enabled': backups_enabled,
}
if ssh_key_ids:
params['ssh_key_ids'] = ssh_key_ids
json = self.request('/droplets/new', params=params)
return json['droplet']
def show_droplet(self, id):
json = self.request('/droplets/%s' % id)
return json['droplet']
def reboot_droplet(self, id):
json = self.request('/droplets/%s/reboot/' % id)
json.pop('status', None)
return json
def power_cycle_droplet(self, id):
json = self.request('/droplets/%s/power_cycle/' % id)
json.pop('status', None)<|fim▁hole|>
def shutdown_droplet(self, id):
json = self.request('/droplets/%s/shutdown/' % id)
json.pop('status', None)
return json
def power_off_droplet(self, id):
json = self.request('/droplets/%s/power_off/' % id)
json.pop('status', None)
return json
def power_on_droplet(self, id):
json = self.request('/droplets/%s/power_on/' % id)
json.pop('status', None)
return json
def password_reset_droplet(self, id):
json = self.request('/droplets/%s/password_reset/' % id)
json.pop('status', None)
return json
def resize_droplet(self, id, size_id):
params = {'size_id': size_id}
json = self.request('/droplets/%s/resize/' % id, params)
json.pop('status', None)
return json
def snapshot_droplet(self, id, name):
params = {'name': name}
json = self.request('/droplets/%s/snapshot/' % id, params)
json.pop('status', None)
return json
def restore_droplet(self, id, image_id):
params = {'image_id': image_id}
json = self.request('/droplets/%s/restore/' % id, params)
json.pop('status', None)
return json
def rebuild_droplet(self, id, image_id):
params = {'image_id': image_id}
json = self.request('/droplets/%s/rebuild/' % id, params)
json.pop('status', None)
return json
def enable_backups_droplet(self, id):
json = self.request('/droplets/%s/enable_backups/' % id)
json.pop('status', None)
return json
def disable_backups_droplet(self, id):
json = self.request('/droplets/%s/disable_backups/' % id)
json.pop('status', None)
return json
def rename_droplet(self, id, name):
params = {'name': name}
json = self.request('/droplets/%s/rename/' % id, params)
json.pop('status', None)
return json
def destroy_droplet(self, id, scrub_data=True):
params = {'scrub_data': '1' if scrub_data else '0'}
json = self.request('/droplets/%s/destroy/' % id, params)
json.pop('status', None)
return json
#regions==========================================
def all_regions(self):
json = self.request('/regions/')
return json['regions']
#images==========================================
def all_images(self, filter='global'):
params = {'filter': filter}
json = self.request('/images/', params)
return json['images']
def show_image(self, image_id):
params= {'image_id': image_id}
json = self.request('/images/%s/' % image_id, params)
return json['image']
def destroy_image(self, image_id):
self.request('/images/%s/destroy' % image_id)
return True
def transfer_image(self, image_id, region_id):
params = {'region_id': region_id}
json = self.request('/images/%s/transfer/' % image_id, params)
json.pop('status', None)
return json
#ssh_keys=========================================
def all_ssh_keys(self):
json = self.request('/ssh_keys/')
return json['ssh_keys']
def new_ssh_key(self, name, pub_key):
params = {'name': name, 'ssh_pub_key': pub_key}
json = self.request('/ssh_keys/new/', params)
return json['ssh_key']
def show_ssh_key(self, key_id):
json = self.request('/ssh_keys/%s/' % key_id)
return json['ssh_key']
def edit_ssh_key(self, key_id, name, pub_key):
params = {'name': name, 'ssh_pub_key': pub_key} # the doc needs to be improved
json = self.request('/ssh_keys/%s/edit/' % key_id, params)
return json['ssh_key']
def destroy_ssh_key(self, key_id):
self.request('/ssh_keys/%s/destroy/' % key_id)
return True
#sizes============================================
def sizes(self):
json = self.request('/sizes/')
return json['sizes']
#domains==========================================
def all_domains(self):
json = self.request('/domains/')
return json['domains']
def new_domain(self, name, ip):
params = {
'name': name,
'ip_address': ip
}
json = self.request('/domains/new/', params)
return json['domain']
def show_domain(self, domain_id):
json = self.request('/domains/%s/' % domain_id)
return json['domain']
def destroy_domain(self, domain_id):
self.request('/domains/%s/destroy/' % domain_id)
return True
def all_domain_records(self, domain_id):
json = self.request('/domains/%s/records/' % domain_id)
return json['records']
def new_domain_record(self, domain_id, record_type, data, name=None, priority=None, port=None, weight=None):
params = {
'record_type': record_type,
'data': data,
}
if name: params['name'] = name
if priority: params['priority'] = priority
if port: params['port'] = port
if weight: params['weight'] = port
json = self.request('/domains/%s/records/new/' % domain_id, params)
return json['domain_record'] if 'domain_record' in json else json['record'] # DO API docs say 'domain_record', but actually it 'record'
def show_domain_record(self, domain_id, record_id):
json = self.request('/domains/%s/records/%s' % (domain_id, record_id))
return json['record']
def edit_domain_record(self, domain_id, record_id, record_type, data, name=None, priority=None, port=None, weight=None):
params = {
'record_type': record_type,
'data': data,
}
if name: params['name'] = name
if priority: params['priority'] = priority
if port: params['port'] = port
if weight: params['weight'] = port
json = self.request('/domains/%s/records/%s/edit/' % (domain_id, record_id), params)
return json['domain_record'] if 'domain_record' in json else json['record'] # DO API docs say 'domain_record' for /new/ but 'record' for /edit/.
def destroy_domain_record(self, domain_id, record_id):
return self.request('/domains/%s/records/%s/destroy/' % (domain_id, record_id))
return True
#events===========================================
def show_event(self, event_id):
json = self.request('/events/%s' % event_id)
return json['event']
#low_level========================================
def request(self, path, params={}, method='GET'):
params['client_id'] = self.client_id
params['api_key'] = self.api_key
if not path.startswith('/'):
path = '/'+path
url = API_ENDPOINT+path
try:
resp = requests.get(url, params=params, timeout=60)
json = resp.json()
except ValueError: # requests.models.json.JSONDecodeError
raise ValueError("The API server doesn't respond with a valid json")
except requests.RequestException as e: # errors from requests
raise RuntimeError(e)
if resp.status_code != requests.codes.ok:
if json:
if 'error_message' in json:
raise DoError(json['error_message'])
elif 'message' in json:
raise DoError(json['message'])
# The JSON reponse is bad, so raise an exception with the HTTP status
resp.raise_for_status()
if json.get('status') != 'OK':
raise DoError(json['error_message'])
return json
if __name__=='__main__':
import os
client_id = os.environ['DO_CLIENT_ID']
api_key = os.environ['DO_API_KEY']
do = DoManager(client_id, api_key)
import sys
fname = sys.argv[1]
import pprint
# size_id: 66, image_id: 1601, region_id: 1
pprint.pprint(getattr(do, fname)(*sys.argv[2:]))<|fim▁end|> | return json |
<|file_name|>ConfiguredBuckOutIntegrationTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.io;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
import com.facebook.buck.io.file.MorePaths;
import com.facebook.buck.testutil.ProcessResult;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Splitter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class ConfiguredBuckOutIntegrationTest {
private ProjectWorkspace workspace;
@Rule public TemporaryPaths tmp = new TemporaryPaths();
@Before
public void setUp() throws IOException {
workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "configured_buck_out", tmp);
workspace.setUp();
}
@Test
public void outputPathsUseConfiguredBuckOut() throws IOException {
String buckOut = "new-buck-out";
Path output = workspace.buildAndReturnOutput("-c", "project.buck_out=" + buckOut, "//:dummy");
assertTrue(Files.exists(output));
assertThat(workspace.getDestPath().relativize(output).toString(), Matchers.startsWith(buckOut));
}
@Test
public void configuredBuckOutAffectsRuleKey() throws IOException {
String out =
workspace
.runBuckCommand("targets", "--show-rulekey", "//:dummy")
.assertSuccess()
.getStdout();
String ruleKey = Splitter.on(' ').splitToList(out).get(1);
String configuredOut =
workspace
.runBuckCommand(
"targets", "--show-rulekey", "-c", "project.buck_out=something", "//:dummy")
.assertSuccess()
.getStdout();
String configuredRuleKey = Splitter.on(' ').splitToList(configuredOut).get(1);
assertThat(ruleKey, Matchers.not(Matchers.equalTo(configuredRuleKey)));
}
@Test
public void buckOutCompatSymlink() throws IOException {
assumeThat(Platform.detect(), Matchers.not(Matchers.is(Platform.WINDOWS)));
ProcessResult result =
workspace.runBuckBuild(
"-c",
"project.buck_out=something",
"-c",
"project.buck_out_compat_link=true",
"//:dummy");
result.assertSuccess();
assertThat(
Files.readSymbolicLink(workspace.resolve("buck-out/gen")),
Matchers.equalTo(workspace.getDestPath().getFileSystem().getPath("../something/gen")));
}
@Test
public void verifyTogglingConfiguredBuckOut() throws IOException {
assumeThat(Platform.detect(), Matchers.not(Matchers.is(Platform.WINDOWS)));<|fim▁hole|> "project.buck_out=something",
"-c",
"project.buck_out_compat_link=true",
"//:dummy")
.assertSuccess();
workspace.runBuckBuild("//:dummy").assertSuccess();
workspace
.runBuckBuild(
"-c",
"project.buck_out=something",
"-c",
"project.buck_out_compat_link=true",
"//:dummy")
.assertSuccess();
}
@Test
public void verifyNoopBuildWithCompatSymlink() throws IOException {
assumeThat(Platform.detect(), Matchers.not(Matchers.is(Platform.WINDOWS)));
// Do an initial build.
workspace
.runBuckBuild(
"-c",
"project.buck_out=something",
"-c",
"project.buck_out_compat_link=true",
"//:dummy")
.assertSuccess();
workspace.getBuildLog().assertTargetBuiltLocally("//:dummy");
// Run another build immediately after and verify everything was up to date.
workspace
.runBuckBuild(
"-c",
"project.buck_out=something",
"-c",
"project.buck_out_compat_link=true",
"//:dummy")
.assertSuccess();
workspace.getBuildLog().assertTargetHadMatchingRuleKey("//:dummy");
}
@Test
public void targetsShowOutput() throws IOException {
String output =
workspace
.runBuckCommand(
"targets", "--show-output", "-c", "project.buck_out=something", "//:dummy")
.assertSuccess()
.getStdout()
.trim();
output = Splitter.on(' ').splitToList(output).get(1);
assertThat(MorePaths.pathWithUnixSeparators(output), Matchers.startsWith("something/"));
}
@Test
public void targetsShowOutputCompatSymlink() throws IOException {
assumeThat(Platform.detect(), Matchers.not(Matchers.is(Platform.WINDOWS)));
String output =
workspace
.runBuckCommand(
"targets",
"--show-output",
"-c",
"project.buck_out=something",
"-c",
"project.buck_out_compat_link=true",
"//:dummy")
.assertSuccess()
.getStdout()
.trim();
output = Splitter.on(' ').splitToList(output).get(1);
assertThat(MorePaths.pathWithUnixSeparators(output), Matchers.startsWith("buck-out/gen/"));
}
@Test
public void buildShowOutput() throws IOException {
Path output = workspace.buildAndReturnOutput("-c", "project.buck_out=something", "//:dummy");
assertThat(
MorePaths.pathWithUnixSeparators(workspace.getDestPath().relativize(output)),
Matchers.startsWith("something/"));
}
@Test
public void buildShowOutputCompatSymlink() throws IOException {
assumeThat(Platform.detect(), Matchers.not(Matchers.is(Platform.WINDOWS)));
Path output =
workspace.buildAndReturnOutput(
"-c",
"project.buck_out=something",
"-c",
"project.buck_out_compat_link=true",
"//:dummy");
assertThat(
MorePaths.pathWithUnixSeparators(workspace.getDestPath().relativize(output)),
Matchers.startsWith("buck-out/gen/"));
}
}<|fim▁end|> | workspace
.runBuckBuild(
"-c", |
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# OpenCraft -- tools to aid developing and hosting free software projects
# Copyright (C) 2015 OpenCraft <xavier@opencraft.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Worker tasks for instance hosting & management
"""
# Imports #####################################################################
from huey.djhuey import crontab, periodic_task, task
from django.conf import settings
from django.template.defaultfilters import truncatewords
from instance.github import get_username_list_from_team, get_pr_list_from_username
from instance.models.instance import OpenEdXInstance
<|fim▁hole|>logger = logging.getLogger(__name__)
# Tasks #######################################################################
@task()
def provision_instance(instance_pk):
"""
Run provisioning on an existing instance
"""
logger.info('Retreiving instance: pk=%s', instance_pk)
instance = OpenEdXInstance.objects.get(pk=instance_pk)
logger.info('Running provisioning on %s', instance)
instance.provision()
@periodic_task(crontab(minute='*/1'))
def watch_pr():
"""
Automatically create/update sandboxes for PRs opened by members of the watched
organization on the watched repository
"""
team_username_list = get_username_list_from_team(settings.WATCH_ORGANIZATION)
for username in team_username_list:
for pr in get_pr_list_from_username(username, settings.WATCH_FORK):
pr_sub_domain = 'pr{number}.sandbox'.format(number=pr.number)
instance, created = OpenEdXInstance.objects.get_or_create(
sub_domain=pr_sub_domain,
fork_name=pr.fork_name,
branch_name=pr.branch_name,
)
truncated_title = truncatewords(pr.title, 4)
instance.name = 'PR#{pr.number}: {truncated_title} ({pr.username}) - {i.reference_name}'\
.format(pr=pr, i=instance, truncated_title=truncated_title)
instance.github_pr_number = pr.number
instance.ansible_extra_settings = pr.extra_settings
instance.save()
if created:
logger.info('New PR found, creating sandbox: %s', pr)
provision_instance(instance.pk)<|fim▁end|> | # Logging #####################################################################
import logging |
<|file_name|>Title.react.js<|end_file_name|><|fim▁begin|>import React, {PropTypes} from 'react';
import Anchor from './Anchor';
import getIdFromTitle from '../util/getIdFromTitle';
const Title = ({children}) => (
<h3>
<Anchor id={getIdFromTitle(children)}>
{children}
</Anchor>
</h3>
);
Title.propTypes = {
children: PropTypes.string.isRequired,<|fim▁hole|>};
export default Title;<|fim▁end|> | |
<|file_name|>Remove.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2006-2008 Kazuyuki Shudo.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dhtaccess.tools;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import dhtaccess.core.DHTAccessor;
public class Remove {
private static final String COMMAND = "rm";
private static final String ENCODE = "UTF-8";
private static final String DEFAULT_GATEWAY = "http://opendht.nyuld.net:5851/";
private static void usage(String command) {
System.out.println("usage: " + command
+ " [-h] [-g <gateway>] [-t <ttl (sec)>] <key> <value> <secret>");
}
public static void main(String[] args) {
int ttl = 3600;
// parse properties
Properties prop = System.getProperties();
String gateway = prop.getProperty("dhtaccess.gateway");
if (gateway == null || gateway.length() <= 0) {
gateway = DEFAULT_GATEWAY;
}
// parse options
Options options = new Options();
options.addOption("h", "help", false, "print help");
options.addOption("g", "gateway", true, "gateway URI, list at http://opendht.org/servers.txt");
options.addOption("t", "ttl", true, "how long (in seconds) to store the value");
CommandLineParser parser = new PosixParser();<|fim▁hole|> System.out.println("There is an invalid option.");
e.printStackTrace();
System.exit(1);
}
String optVal;
if (cmd.hasOption('h')) {
usage(COMMAND);
System.exit(1);
}
optVal = cmd.getOptionValue('g');
if (optVal != null) {
gateway = optVal;
}
optVal = cmd.getOptionValue('t');
if (optVal != null) {
ttl = Integer.parseInt(optVal);
}
args = cmd.getArgs();
// parse arguments
if (args.length < 3) {
usage(COMMAND);
System.exit(1);
}
byte[] key = null, value = null, secret = null;
try {
key = args[0].getBytes(ENCODE);
value = args[1].getBytes(ENCODE);
secret = args[2].getBytes(ENCODE);
} catch (UnsupportedEncodingException e1) {
// NOTREACHED
}
// prepare for RPC
DHTAccessor accessor = null;
try {
accessor = new DHTAccessor(gateway);
} catch (MalformedURLException e) {
e.printStackTrace();
System.exit(1);
}
// RPC
int res = accessor.remove(key, value, ttl, secret);
String resultString;
switch (res) {
case 0:
resultString = "Success";
break;
case 1:
resultString = "Capacity";
break;
case 2:
resultString = "Again";
break;
default:
resultString = "???";
}
System.out.println(resultString);
}
}<|fim▁end|> | CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) { |
<|file_name|>DeletePermissionMutation.js<|end_file_name|><|fim▁begin|>/* eslint-disable class-methods-use-this */
import Relay from 'react-relay';
class DeletePermissionMutation extends Relay.Mutation {
getMutation() {
return Relay.QL`
mutation {
deletePermission(input: $input)
}
`;
}
getVariables() {
return {
projectId: this.props.project.id,
userId: this.props.userId,
};
}
getFatQuery() {
return Relay.QL`
fragment on DeletePermissionPayload {
deletedPermissionId
viewer {
id
project(name: "${this.props.project.name}") {
users(first: 10) {
edges {
node {
id
fullname
}<|fim▁hole|> id
fullname
}
}
}
`;
}
getConfigs() {
return [{
type: 'NODE_DELETE',
parentName: 'viewer',
parentID: this.props.viewer.id,
connectionName: 'users',
deletedIDFieldName: 'deletedPermissionId',
}];
}
}
export default DeletePermissionMutation;<|fim▁end|> | }
}
}
allUsers { |
<|file_name|>CustomizedHandlerExceptionResolver.java<|end_file_name|><|fim▁begin|>package com.zk.web.interceptor;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.Ordered;
import org.springframework.http.HttpStatus;<|fim▁hole|>
public class CustomizedHandlerExceptionResolver implements HandlerExceptionResolver, Ordered {
private static final Logger LOGGER = LoggerFactory.getLogger(CustomizedHandlerExceptionResolver.class);
public int getOrder() {
return Integer.MIN_VALUE;
}
public ModelAndView resolveException(HttpServletRequest aReq, HttpServletResponse aRes, Object aHandler,
Exception exception) {
if (aHandler instanceof HandlerMethod) {
if (exception instanceof BindException) {
return null;
}
}
LOGGER.error(StringUtils.EMPTY, exception);
ModelAndView mav = new ModelAndView("common/error");
String errorMsg = exception.getMessage();
aRes.setStatus(HttpStatus.INTERNAL_SERVER_ERROR.value());
if ("XMLHttpRequest".equals(aReq.getHeader("X-Requested-With"))) {
try {
aRes.setContentType("application/text; charset=utf-8");
PrintWriter writer = aRes.getWriter();
aRes.setStatus(HttpServletResponse.SC_FORBIDDEN);
writer.print(errorMsg);
writer.flush();
writer.close();
return null;
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
}
}
mav.addObject("errorMsg", errorMsg);
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
exception.printStackTrace(pw);
mav.addObject("stackTrace", sw.getBuffer().toString());
mav.addObject("exception", exception);
return mav;
}
}<|fim▁end|> | import org.springframework.validation.BindException;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.servlet.HandlerExceptionResolver;
import org.springframework.web.servlet.ModelAndView; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | # 主要是为了使用中文显示 app 于 admin 界面
default_app_config = 'bespeak_meal.apps.Bespeak_meal_config' |
<|file_name|>lib.js<|end_file_name|><|fim▁begin|>/**
* @author Ultimo <von.ultimo@gmail.com>
* @license http://www.opensource.org/licenses/mit-license.html MIT License
* @version 0.1 (25-06-2017)
*
* Hier schreiben wir die JavaScript Funktionen.
* */<|fim▁hole|>$(document).ready(function(){
$("td:contains('-')").filter(":contains('€')").addClass('neg');
$(".betrag").filter(":contains('-')").addClass('neg');
});
function goBack() {
window.history.back();
}<|fim▁end|> |
src="jquery-3.2.1.min";
|
<|file_name|>issue-888-enum-var-decl-jump.rs<|end_file_name|><|fim▁begin|>#![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
pub mod root {
#[allow(unused_imports)]
use self::super::root;
pub mod Halide {
#[allow(unused_imports)]
use self::super::super::root;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct Type {
pub _address: u8,
}<|fim▁hole|> #[link_name = "\u{1}_ZN6Halide4Type1bE"]
pub static mut Type_b: root::a;
}
#[test]
fn bindgen_test_layout_Type() {
assert_eq!(
::std::mem::size_of::<Type>(),
1usize,
concat!("Size of: ", stringify!(Type))
);
assert_eq!(
::std::mem::align_of::<Type>(),
1usize,
concat!("Alignment of ", stringify!(Type))
);
}
}
#[repr(u32)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub enum a {
__bindgen_cannot_repr_c_on_empty_enum = 0,
}
}<|fim▁end|> | extern "C" { |
<|file_name|>sandglass.go<|end_file_name|><|fim▁begin|>package sandglass
import (
"fmt"
<|fim▁hole|> "google.golang.org/grpc"
)
type Node struct {
ID string
Name string
IP string
GRPCAddr string
RAFTAddr string
HTTPAddr string
Status serf.MemberStatus
conn *grpc.ClientConn
sgproto.BrokerServiceClient
sgproto.InternalServiceClient
}
func (n *Node) Dial() (err error) {
n.conn, err = grpc.Dial(n.GRPCAddr, grpc.WithInsecure())
if err != nil {
return err
}
n.BrokerServiceClient = sgproto.NewBrokerServiceClient(n.conn)
n.InternalServiceClient = sgproto.NewInternalServiceClient(n.conn)
return nil
}
func (n *Node) Close() error {
if n.conn == nil {
return nil
}
if err := n.conn.Close(); err != nil {
return err
}
n.conn = nil
return nil
}
func (n *Node) String() string {
return fmt.Sprintf("%s(%s)", n.Name, n.GRPCAddr)
}
func (n *Node) IsAlive() bool {
return n.conn != nil
}<|fim▁end|> | "github.com/hashicorp/serf/serf"
"github.com/sandglass/sandglass-grpc/go/sgproto" |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import taggit
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django.utils.translation import ugettext as _
from geonode.base.models import ContactRole
from captcha.fields import ReCaptchaField
# Ported in from django-registration
attrs_dict = {'class': 'required'}
class AllauthReCaptchaSignupForm(forms.Form):
captcha = ReCaptchaField()
def signup(self, request, user):
""" Required, or else it thorws deprecation warnings """
pass
class ProfileCreationForm(UserCreationForm):
class Meta:
model = get_user_model()
fields = ("username",)
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
get_user_model().objects.get(username=username)
except get_user_model().DoesNotExist:
return username
raise forms.ValidationError(
self.error_messages['duplicate_username'],
code='duplicate_username',
)
class ProfileChangeForm(UserChangeForm):
class Meta:
model = get_user_model()
fields = '__all__'
class ForgotUsernameForm(forms.Form):
email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict,
maxlength=75)),
label=_('Email Address'))
class RoleForm(forms.ModelForm):
class Meta:
model = ContactRole
exclude = ('contact', 'layer')
class PocForm(forms.Form):
contact = forms.ModelChoiceField(label="New point of contact",
queryset=get_user_model().objects.all())<|fim▁hole|> keywords = taggit.forms.TagField(
label=_("Keywords"),
required=False,
help_text=_("A space or comma-separated list of keywords"))
class Meta:
model = get_user_model()
exclude = (
'user',
'password',
'last_login',
'groups',
'user_permissions',
'username',
'is_staff',
'is_superuser',
'is_active',
'date_joined'
)<|fim▁end|> |
class ProfileForm(forms.ModelForm): |
<|file_name|>block_test.go<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package common_test
import (
jc "github.com/juju/testing/checkers"
gc "gopkg.in/check.v1"
"github.com/juju/errors"
"github.com/juju/juju/apiserver/common"
"github.com/juju/juju/environs/config"
"github.com/juju/juju/testing"
)
type blocksSuite struct {
testing.FakeJujuHomeSuite
destroy, remove, change bool
cfg *config.Config
}
var _ = gc.Suite(&blocksSuite{})
func (s *blocksSuite) TearDownTest(c *gc.C) {
s.destroy, s.remove, s.change = false, false, false
}
func (s *blocksSuite) SetUpTest(c *gc.C) {
s.FakeJujuHomeSuite.SetUpTest(c)
cfg, err := config.New(
config.UseDefaults,
map[string]interface{}{
"name": "block-env",
"type": "any-type",
},
)
c.Assert(err, jc.ErrorIsNil)
s.cfg = cfg
}
func (s *blocksSuite) TestBlockOperationErrorDestroy(c *gc.C) {
// prevent destroy-environment
s.blockDestroys(c)
s.assertDestroyOperationBlocked(c, true)
// prevent remove-object
s.blockRemoves(c)
s.assertDestroyOperationBlocked(c, true)
// prevent all-changes
s.blockAllChanges(c)
s.assertDestroyOperationBlocked(c, true)
}
func (s *blocksSuite) TestBlockOperationErrorRemove(c *gc.C) {
// prevent destroy-environment
s.blockDestroys(c)
s.assertRemoveOperationBlocked(c, false)
// prevent remove-object
s.blockRemoves(c)
s.assertRemoveOperationBlocked(c, true)
// prevent all-changes
s.blockAllChanges(c)
s.assertRemoveOperationBlocked(c, true)
}
func (s *blocksSuite) TestBlockOperationErrorChange(c *gc.C) {
// prevent destroy-environment
s.blockDestroys(c)
s.assertChangeOperationBlocked(c, false)
// prevent remove-object
s.blockRemoves(c)
s.assertChangeOperationBlocked(c, false)
// prevent all-changes
s.blockAllChanges(c)
s.assertChangeOperationBlocked(c, true)
}
func (s *blocksSuite) blockDestroys(c *gc.C) {
s.destroy, s.remove, s.change = true, false, false
}
func (s *blocksSuite) blockRemoves(c *gc.C) {
s.remove, s.destroy, s.change = true, false, false
}
func (s *blocksSuite) blockAllChanges(c *gc.C) {
s.change, s.destroy, s.remove = true, false, false
}
func (s *blocksSuite) assertDestroyOperationBlocked(c *gc.C, value bool) {
s.assertOperationBlocked(c, common.DestroyOperation, value)
}
func (s *blocksSuite) assertRemoveOperationBlocked(c *gc.C, value bool) {
s.assertOperationBlocked(c, common.RemoveOperation, value)
}
func (s *blocksSuite) assertChangeOperationBlocked(c *gc.C, value bool) {
s.assertOperationBlocked(c, common.ChangeOperation, value)
}
func (s *blocksSuite) assertOperationBlocked(c *gc.C, operation common.Operation, value bool) {
c.Assert(common.IsOperationBlocked(operation, s.getCurrentConfig(c)), gc.Equals, value)
}
func (s *blocksSuite) getCurrentConfig(c *gc.C) *config.Config {
cfg, err := s.cfg.Apply(map[string]interface{}{
"block-destroy-environment": s.destroy,
"block-remove-object": s.remove,
"block-all-changes": s.change,
})
c.Assert(err, jc.ErrorIsNil)
return cfg
}
type blockCheckerSuite struct {
blocksSuite
getter *mockGetter
blockchecker *common.BlockChecker
}
var _ = gc.Suite(&blockCheckerSuite{})
func (s *blockCheckerSuite) SetUpTest(c *gc.C) {
s.blocksSuite.SetUpTest(c)
s.getter = &mockGetter{
suite: s,
c: c,
}
s.blockchecker = common.NewBlockChecker(s.getter)
}
type mockGetter struct {
suite *blockCheckerSuite
c *gc.C
}
func (mock *mockGetter) EnvironConfig() (*config.Config, error) {
return mock.suite.getCurrentConfig(mock.c), nil
}
func (s *blockCheckerSuite) TestDestroyBlockChecker(c *gc.C) {
s.blockDestroys(c)
s.assertDestroyBlocked(c)
s.blockRemoves(c)<|fim▁hole|>}
func (s *blockCheckerSuite) TestRemoveBlockChecker(c *gc.C) {
s.blockDestroys(c)
s.assertRemoveBlocked(c, false)
s.blockRemoves(c)
s.assertRemoveBlocked(c, true)
s.blockAllChanges(c)
s.assertRemoveBlocked(c, true)
}
func (s *blockCheckerSuite) TestChangeBlockChecker(c *gc.C) {
s.blockDestroys(c)
s.assertChangeBlocked(c, false)
s.blockRemoves(c)
s.assertChangeBlocked(c, false)
s.blockAllChanges(c)
s.assertChangeBlocked(c, true)
}
func (s *blockCheckerSuite) assertDestroyBlocked(c *gc.C) {
c.Assert(errors.Cause(s.blockchecker.DestroyAllowed()), gc.Equals, common.ErrOperationBlocked)
}
func (s *blockCheckerSuite) assertRemoveBlocked(c *gc.C, blocked bool) {
if blocked {
c.Assert(errors.Cause(s.blockchecker.RemoveAllowed()), gc.Equals, common.ErrOperationBlocked)
} else {
c.Assert(errors.Cause(s.blockchecker.RemoveAllowed()), jc.ErrorIsNil)
}
}
func (s *blockCheckerSuite) assertChangeBlocked(c *gc.C, blocked bool) {
if blocked {
c.Assert(errors.Cause(s.blockchecker.ChangeAllowed()), gc.Equals, common.ErrOperationBlocked)
} else {
c.Assert(errors.Cause(s.blockchecker.ChangeAllowed()), jc.ErrorIsNil)
}
}<|fim▁end|> | s.assertDestroyBlocked(c)
s.blockAllChanges(c)
s.assertDestroyBlocked(c) |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>"""
A misc set of utilities useful in the data-import domain.
"""
import logging
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from open_municipio.people.models import Person, municipality
from open_municipio.data_import.models import LookupInstitutionCharge, LookupCompanyCharge, LookupAdministrationCharge, LookupPerson
import socket
# configure xml namespaces
NS = {
'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'om': 'http://www.openmunicipio.it',
'xlink': 'http://www.w3.org/1999/xlink'
}
XLINK_NAMESPACE = NS['xlink']
XLINK = "{%s}" % XLINK_NAMESPACE
class ChargeSeekerMixin:
"""
Codice di Guglielmo. Per recuperare la carica si basa su un file XML che
riporta la corrispondenza delle persone (People) e dalle date si
risale all'incarico desiderato
"""
logger = logging.getLogger('import')
def lookupCharge(self, people_tree, ds_charge_id, institution=None, moment=None):
"""
look for the correct open municipio charge, or return None
starting from an internal, domain-specific, charge id
using the mapping in people_tree (lxml.etree)
if the moment parameter is not passed, then current charges are looked up
"""
try:
people_charges = people_tree.xpath(
'//om:Person[@id="%s"]' % ds_charge_id,
namespaces=NS
)
if len(people_charges):
om_id = people_charges[0].get('om_id')
if om_id is None:
self.logger.warning("charge with id %s has no om_id (past charge?). Skipping." % ds_charge_id)
return None
if institution is None:
charge_type = people_charges[0].get('charge')
if charge_type is None:
self.logger.warning("charge with id %s has no charge attribute. Skipping." % ds_charge_id)
return None
# institution is grabbed from charge attribute, in acts import
# since mayor and deputies may sign acts, not only counselor
if charge_type == 'counselor':
institution = municipality.council.as_institution
elif charge_type == 'deputy' or charge_type == 'firstdeputy':
institution = municipality.gov.as_institution
elif charge_type == 'mayor':
institution = municipality.mayor.as_institution
else:
self.logger.error("Warning: charge with id %s has wrong charge attribute %s. Skipping." %
(ds_charge_id, charge_type))
return None
try:
person = Person.objects.get(pk=int(om_id))
charge = person.get_current_charge_in_institution(institution, moment=moment)
self.logger.debug("id %s (%s) mapped to %s (%s)" %
(ds_charge_id, institution, person, charge))
return charge
except ObjectDoesNotExist:
self.logger.warning("could not find person or charge for id = %s (om_id=%s) (%s) in OM DB. Skipping." % (ds_charge_id, om_id, institution))
return None
except MultipleObjectsReturned:
self.logger.error("found more than one person or charge for id %s (om_id=%s) (%s) in OM DB. Skipping." % (ds_charge_id, om_id, institution))
return None
else:
self.logger.warning("could not find person for id %s in people XML file. Skipping." % ds_charge_id)
return None
except ObjectDoesNotExist:
self.logger.warning("could not find charge for %s in Open Municipio DB. Skipping." % ds_charge_id)
return None
def netcat(hostname, port, content):
"""
netcat (nc) implementation in python<|fim▁hole|> s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((hostname, port))
s.sendall(content)
s.shutdown(socket.SHUT_WR)
res = ''
while 1:
data = s.recv(1024)
if data == "":
break
res += data
s.close()
return res
def get_row_dicts(cursor, query, params=()):
"""
Convert a sequence of row (record) tuples -- as returned by a call to Python DBAPI's ``cursor.connect()``
method -- to a list of row dicts keyed by column names.
Take the following arguments:
* ``cursor``: a DBAPI cursor object
* ``query``: a SQL statement string (possibily including parameter markers)
* ``params``: a sequence of parameters for the SQL query string to be interpolated with
"""
cursor.execute(query, params)
colnames = [desc[0] for desc in cursor.description]
row_dicts = [dict(zip(colnames, row)) for row in cursor.fetchall()]
return row_dicts
def create_table_schema(table_name, table_schema):
"""
Generate the SQL statement to execute in order to create a DB table.
Takes the following parameters:
* ``table_name``: a string to be used as the table name
* ``table_schema``: a dict mapping column names to column types (as strings)
Note that supported column types may vary depending on the RDBMS of choice.
"""
sql = "CREATE TABLE %s \n" % table_name
sql += "(\n"
for (col_name, col_type) in table_schema.items():
sql += " %(col_name)s\t%(col_type)s,\n" % {'col_name': col_name, 'col_type': col_type}
# remove last comma (otherwise RBDMS may complain)
sql = sql[:-2] + '\n'
sql += ");\n"
return sql
class PersonSeekerMixin:
logger = logging.getLogger("import")
def lookup_person(self, external, provider):
return LookupPerson.lookup(external, provider)
class ChargeMapCache:
cache = {}
@staticmethod
def lookup(external, provider, as_of):
if provider in ChargeMapCache.cache:
if external in ChargeMapCache.cache[provider]:
if as_of in ChargeMapCache.cache[provider][external]:
return ChargeMapCache.cache[provider][external][as_of]
return None
@staticmethod
def update(external, provider, as_of, value):
if not provider in ChargeMapCache.cache:
ChargeMapCache.cache[provider] = {}
if not external in ChargeMapCache.cache[provider]:
ChargeMapCache.cache[provider][external] = {}
ChargeMapCache.cache[provider][external][as_of] = value
class ChargeSeekerFromMapMixin:
"""
Associates external identifiers with internal ones. Use a dictionary as
cache in order to reduce the number of executed queries. This assume that the
Lookup* objects are **ALL** created before invoking the lookup_charge
method
"""
logger = logging.getLogger("import")
def lookup_charge(self, external, provider, as_of=None):
# if already mapped, return the result from the cache
found_internal = ChargeMapCache.lookup(external, provider, as_of)
if not found_internal:
try:
found_internal = LookupInstitutionCharge.lookup(external,provider,as_of)
except ObjectDoesNotExist:
pass
if not found_internal:
try:
found_internal = LookupCompanyCharge.lookup(external,provider, as_of)
except ObjectDoesNotExist:
pass
if not found_internal:
try:
found_internal = LookupAdministrationCharge.lookup(external, provider, as_of)
except ObjectDoesNotExist:
pass
# store in the cache for future use
#self.charge_map_cache[provider][external][as_of] = found_internal
ChargeMapCache.update(external, provider, as_of, found_internal)
return found_internal
class OMChargeSeekerMixin:
logger = logging.getLogger("import")
def lookup_charge(self, person, institution, as_of=None):
"""
lookup for the charge of the person at a specific moment in time. the
person is an instance of class Person. institution is an instance of
class Institution. as_of is a string of format "YYYY-MM-DD"
"""
if person == None:
raise Exception("Can't search a charge for no person")
if institution is None:
raise Exception("Can't search a charge without an institution")
try:
charge = person.get_current_charge_in_institution(institution,
moment=as_of)
return charge
except ObjectDoesNotExist:
self.logger.warning("Can't find charge for person %s" % person)
return None
except MultipleObjectsReturned:
self.logger.warning("Found more than one person or charge for id %s (institution %s) in OM. Skipping." % (person, institution))
return None<|fim▁end|> | """ |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from open_municipio.locations.models import Location<|fim▁hole|>
class LocationAdmin(admin.ModelAdmin):
list_display = ('name', 'count')
admin.site.register(Location, LocationAdmin)<|fim▁end|> | |
<|file_name|>constant_op_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ConstantOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np<|fim▁hole|>
from tensorflow.python.ops import array_ops
class ConstantTest(tf.test.TestCase):
def _testCpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=False):
tf_ans = tf.convert_to_tensor(x).eval()
if np_ans.dtype in [np.float32, np.float64, np.complex64, np.complex128]:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testGpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=True):
tf_ans = tf.convert_to_tensor(x).eval()
if np_ans.dtype in [np.float32, np.float64, np.complex64, np.complex128]:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testAll(self, x):
self._testCpu(x)
self._testGpu(x)
def testFloat(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float32))
self._testAll(np.empty((2, 0, 5)).astype(np.float32))
def testDouble(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float64))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float64))
self._testAll(np.empty((2, 0, 5)).astype(np.float64))
def testInt32(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int32))
self._testAll(
(100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(np.int32))
self._testAll(np.empty((2, 0, 5)).astype(np.int32))
def testInt64(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int64))
self._testAll(
(100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(np.int64))
self._testAll(np.empty((2, 0, 5)).astype(np.int64))
def testComplex64(self):
self._testAll(
np.complex(1, 2) * np.arange(-15, 15).reshape([2, 3, 5]).astype(
np.complex64))
self._testAll(np.complex(
1, 2) * np.random.normal(size=30).reshape([2, 3, 5]).astype(
np.complex64))
self._testAll(np.empty((2, 0, 5)).astype(np.complex64))
def testComplex128(self):
self._testAll(
np.complex(1, 2) * np.arange(-15, 15).reshape([2, 3, 5]).astype(
np.complex128))
self._testAll(np.complex(
1, 2) * np.random.normal(size=30).reshape([2, 3, 5]).astype(
np.complex128))
self._testAll(np.empty((2, 0, 5)).astype(np.complex128))
def testString(self):
self._testCpu(np.array([tf.compat.as_bytes(str(x))
for x in np.arange(-15, 15)]).reshape([2, 3, 5]))
self._testCpu(np.empty((2, 0, 5)).astype(np.str_))
def testStringWithNulls(self):
with self.test_session():
val = tf.convert_to_tensor(b"\0\0\0\0").eval()
self.assertEqual(len(val), 4)
self.assertEqual(val, b"\0\0\0\0")
with self.test_session():
val = tf.convert_to_tensor(b"xx\0xx").eval()
self.assertEqual(len(val), 5)
self.assertAllEqual(val, b"xx\0xx")
nested = [[b"\0\0\0\0", b"xx\0xx"], [b"\0_\0_\0_\0", b"\0"]]
with self.test_session():
val = tf.convert_to_tensor(nested).eval()
# NOTE(mrry): Do not use assertAllEqual, because it converts nested to a
# numpy array, which loses the null terminators.
self.assertEqual(val.tolist(), nested)
def testExplicitShapeNumPy(self):
with tf.Graph().as_default():
c = tf.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32),
shape=[2, 3, 5])
self.assertEqual(c.get_shape(), [2, 3, 5])
def testImplicitShapeNumPy(self):
with tf.Graph().as_default():
c = tf.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self.assertEqual(c.get_shape(), [2, 3, 5])
def testExplicitShapeList(self):
with tf.Graph().as_default():
c = tf.constant([1, 2, 3, 4, 5, 6, 7], shape=[7])
self.assertEqual(c.get_shape(), [7])
def testImplicitShapeList(self):
with tf.Graph().as_default():
c = tf.constant([1, 2, 3, 4, 5, 6, 7])
self.assertEqual(c.get_shape(), [7])
def testExplicitShapeNumber(self):
with tf.Graph().as_default():
c = tf.constant(1, shape=[1])
self.assertEqual(c.get_shape(), [1])
def testImplicitShapeNumber(self):
with tf.Graph().as_default():
c = tf.constant(1)
self.assertEqual(c.get_shape(), [])
def testShapeInconsistent(self):
with tf.Graph().as_default():
c = tf.constant([1, 2, 3, 4, 5, 6, 7], shape=[10])
self.assertEqual(c.get_shape(), [10])
# pylint: disable=g-long-lambda
def testShapeWrong(self):
with tf.Graph().as_default():
with self.assertRaisesWithPredicateMatch(
ValueError,
lambda e: ("Too many elements provided. Needed at most 5, "
"but received 7" == str(e))):
tf.constant([1, 2, 3, 4, 5, 6, 7], shape=[5])
# pylint: enable=g-long-lambda
def testTooLargeConstant(self):
with tf.Graph().as_default():
large_array = np.zeros((512, 1024, 1024), dtype=np.float32)
with self.assertRaisesRegexp(
ValueError,
"Cannot create a tensor proto whose content is larger than 2GB."):
c = tf.constant(large_array)
def testTooLargeGraph(self):
with tf.Graph().as_default() as g:
large_array = np.zeros((256, 1024, 1024), dtype=np.float32)
c = tf.constant(large_array)
d = tf.constant(large_array)
with self.assertRaisesRegexp(
ValueError, "GraphDef cannot be larger than 2GB."):
g.as_graph_def()
def testSparseValuesRaiseErrors(self):
with self.assertRaisesRegexp(ValueError,
"setting an array element with a sequence"):
c = tf.constant([[1, 2], [3]], dtype=tf.int32)
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = tf.constant([[1, 2], [3]])
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = tf.constant([[1, 2], [3], [4, 5]])
class AsTensorTest(tf.test.TestCase):
def testAsTensorForTensorInput(self):
with tf.Graph().as_default():
t = tf.constant(10.0)
x = tf.convert_to_tensor(t)
self.assertIs(t, x)
def testAsTensorForNonTensorInput(self):
with tf.Graph().as_default():
x = tf.convert_to_tensor(10.0)
self.assertTrue(isinstance(x, tf.Tensor))
def testAsTensorForShapeInput(self):
with self.test_session():
x = tf.convert_to_tensor(tf.TensorShape([]))
self.assertEqual(tf.int32, x.dtype)
self.assertAllEqual([], x.eval())
x = tf.convert_to_tensor(tf.TensorShape([1, 2, 3]))
self.assertEqual(tf.int32, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = tf.convert_to_tensor(tf.TensorShape([1, 2, 3]), dtype=tf.int64)
self.assertEqual(tf.int64, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = tf.reshape(tf.zeros([6]), tf.TensorShape([2, 3]))
self.assertAllEqual([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], x.eval())
with self.assertRaisesRegexp(ValueError, "partially known"):
tf.convert_to_tensor(tf.TensorShape(None))
with self.assertRaisesRegexp(ValueError, "partially known"):
tf.convert_to_tensor(tf.TensorShape([1, None, 64]))
with self.assertRaises(TypeError):
tf.convert_to_tensor(tf.TensorShape([1, 2, 3]), dtype=tf.float32)
def testAsTensorForDimensionInput(self):
with self.test_session():
x = tf.convert_to_tensor(tf.TensorShape([1, 2, 3])[1])
self.assertEqual(tf.int32, x.dtype)
self.assertAllEqual(2, x.eval())
x = tf.convert_to_tensor(tf.TensorShape([1, 2, 3])[1], dtype=tf.int64)
self.assertEqual(tf.int64, x.dtype)
self.assertAllEqual(2, x.eval())
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
tf.convert_to_tensor(tf.TensorShape(None)[1])
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
tf.convert_to_tensor(tf.TensorShape([1, None, 64])[1])
with self.assertRaises(TypeError):
tf.convert_to_tensor(tf.TensorShape([1, 2, 3])[1], dtype=tf.float32)
class IdentityOpTest(tf.test.TestCase):
def testIdTensor(self):
with tf.Graph().as_default():
x = tf.constant(2.0, shape=[6], name="input")
id_op = tf.identity(x, name="id")
self.assertTrue(isinstance(id_op.op.inputs[0], tf.Tensor))
self.assertProtoEquals(
"name: 'id' op: 'Identity' input: 'input' "
"attr { key: 'T' value { type: DT_FLOAT } }", id_op.op.node_def)
class ZerosTest(tf.test.TestCase):
def _Zeros(self, shape):
with self.test_session():
ret = tf.zeros(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(np.array_equal(self._Zeros([2, 3]), np.array([[0] * 3] *
2)))
def testScalar(self):
self.assertEqual(0, self._Zeros([]))
self.assertEqual(0, self._Zeros(()))
with self.test_session():
scalar = tf.zeros(tf.constant([], dtype=tf.int32))
self.assertEqual(0, scalar.eval())
def testDynamicSizes(self):
np_ans = np.array([[0] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = tf.fill([2, 3], 12., name="fill")
# Constructs a tensor of zeros of the same dimensions as "d".
z = tf.zeros(tf.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testDtype(self):
with self.test_session():
d = tf.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = tf.zeros([2, 3])
self.assertEqual(z.dtype, tf.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
z = tf.zeros(tf.shape(d))
self.assertEqual(z.dtype, tf.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
# Test explicit type control
for dtype in [tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.complex128, tf.int64, tf.bool]:
z = tf.zeros([2, 3], dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
z = tf.zeros(tf.shape(d), dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
class ZerosLikeTest(tf.test.TestCase):
def _compareZeros(self, dtype, use_gpu):
with self.test_session(use_gpu=False):
# Creates a tensor of non-zero values with shape 2 x 3.
numpy_dtype = dtype.as_numpy_dtype
d = tf.constant(np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = tf.zeros_like(d)
# Test that the type is correct
self.assertEqual(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[0] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
def testZerosLikeCPU(self):
for dtype in [tf.float32, tf.float64, tf.int32, tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.complex128, tf.int64]:
self._compareZeros(dtype, False)
def testZerosLikeGPU(self):
for dtype in [tf.float32, tf.float64, tf.int32]:
self._compareZeros(dtype, True)
def testZerosLikePartialShape(self):
d = tf.placeholder(tf.float32, shape=[None, 4, None])
z = tf.zeros_like(d)
self.assertEqual(d.get_shape().as_list(), z.get_shape().as_list())
def testZerosLikeDtype(self):
# Make sure zeros_like works even for dtypes that cannot be cast between
with self.test_session():
shape = (3, 5)
dtypes = np.float32, np.complex64
for in_type in dtypes:
x = np.arange(15).astype(in_type).reshape(*shape)
for out_type in dtypes:
y = tf.zeros_like(x, dtype=out_type).eval()
self.assertEqual(y.dtype, out_type)
self.assertEqual(y.shape, shape)
self.assertAllEqual(y, np.zeros(shape, dtype=out_type))
class OnesTest(tf.test.TestCase):
def _Ones(self, shape):
with self.test_session():
ret = tf.ones(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(np.array_equal(self._Ones([2, 3]), np.array([[1] * 3] * 2)))
def testScalar(self):
self.assertEqual(1, self._Ones([]))
self.assertEqual(1, self._Ones(()))
with self.test_session():
scalar = tf.ones(tf.constant([], dtype=tf.int32))
self.assertEqual(1, scalar.eval())
def testDynamicSizes(self):
np_ans = np.array([[1] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = tf.fill([2, 3], 12., name="fill")
# Constructs a tensor of ones of the same dimensions as "d".
z = tf.ones(tf.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testAutoPack(self):
with self.test_session():
h = tf.placeholder(tf.int32, shape=[])
w = tf.placeholder(tf.int32, shape=[])
z = tf.ones([h, w])
out = z.eval(feed_dict={h: 4, w: 16})
self.assertAllEqual(out, np.array([[1] * 16] * 4))
def testDtype(self):
with self.test_session():
d = tf.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = tf.ones([2, 3])
self.assertEqual(z.dtype, tf.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
z = tf.ones(tf.shape(d))
self.assertEqual(z.dtype, tf.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
# Test explicit type control
for dtype in (tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.complex128, tf.int64, tf.bool):
z = tf.ones([2, 3], dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
z = tf.ones(tf.shape(d), dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
class OnesLikeTest(tf.test.TestCase):
def testOnesLike(self):
for dtype in [tf.float32, tf.float64, tf.int32,
tf.uint8, tf.int16, tf.int8,
tf.complex64, tf.complex128, tf.int64]:
numpy_dtype = dtype.as_numpy_dtype
with self.test_session():
# Creates a tensor of non-zero values with shape 2 x 3.
d = tf.constant(np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = tf.ones_like(d)
# Test that the type is correct
self.assertEqual(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[1] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
def testOnesLikePartialShape(self):
d = tf.placeholder(tf.float32, shape=[None, 4, None])
z = tf.ones_like(d)
self.assertEqual(d.get_shape().as_list(), z.get_shape().as_list())
class FillTest(tf.test.TestCase):
def _compare(self, dims, val, np_ans, use_gpu):
with self.test_session(use_gpu=use_gpu):
tf_ans = tf.fill(dims, val, name="fill")
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
# Fill does not set the shape.
# self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, dims, val, np_ans):
self._compare(dims, val, np_ans, False)
self._compare(dims, val, np_ans, True)
def testFillFloat(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillDouble(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt32(self):
np_ans = np.array([[42] * 3] * 2).astype(np.int32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt64(self):
np_ans = np.array([[-42] * 3] * 2).astype(np.int64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillComplex64(self):
np_ans = np.array([[0.15] * 3] * 2).astype(np.complex64)
self._compare([2, 3], np_ans[0][0], np_ans, use_gpu=False)
def testFillComplex128(self):
np_ans = np.array([[0.15] * 3] * 2).astype(np.complex128)
self._compare([2, 3], np_ans[0][0], np_ans, use_gpu=False)
def testFillString(self):
np_ans = np.array([[b"yolo"] * 3] * 2)
with self.test_session(use_gpu=False):
tf_ans = tf.fill([2, 3], np_ans[0][0], name="fill").eval()
self.assertAllEqual(np_ans, tf_ans)
def testFillNegative(self):
with self.test_session():
for shape in (-1,), (2, -1), (-1, 2):
with self.assertRaises(ValueError):
tf.fill(shape, 7)
# Using a placeholder so this won't be caught in Python.
dims = tf.placeholder(tf.int32)
fill_t = tf.fill(dims, 3.0)
for shape in (-1,), (2, -1), (-1, 2):
with self.assertRaises(tf.errors.InvalidArgumentError):
fill_t.eval({dims: shape})
def testShapeFunctionEdgeCases(self):
# Non-vector dimensions.
with self.assertRaises(ValueError):
tf.fill([[0, 1], [2, 3]], 1.0)
# Non-scalar value.
with self.assertRaises(ValueError):
tf.fill([3, 2], [1.0, 2.0])
# Partial dimension information.
f = tf.fill(
tf.placeholder(tf.int32, shape=(4,)), 3.0)
self.assertEqual([None, None, None, None], f.get_shape().as_list())
f = tf.fill([tf.placeholder(tf.int32, shape=()), 17], 1.0)
self.assertEqual([None, 17], f.get_shape().as_list())
def testGradient(self):
with self.test_session():
in_v = tf.constant(5.0)
out_shape = [3, 2]
out_filled = tf.fill(out_shape, in_v)
err = tf.test.compute_gradient_error(in_v, [],
out_filled, out_shape)
self.assertLess(err, 1e-3)
class PlaceholderTest(tf.test.TestCase):
def testDtype(self):
with self.test_session():
p = tf.placeholder(tf.float32, name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(p_identity.eval(feed_dict={p: feed_array}),
feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testShape(self):
with self.test_session():
p = tf.placeholder(tf.float32, shape=(10, 10), name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(p_identity.eval(feed_dict={p: feed_array}),
feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float and "
r"shape \[10,10\]"):
p_identity.eval()
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :5]})
def testPartialShape(self):
with self.test_session():
p = tf.placeholder(tf.float32, shape=[None, 3], name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 3)
self.assertAllClose(p_identity.eval(feed_dict={p: feed_array}),
feed_array)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :2]})
def testControlDependency(self):
with self.test_session():
p = tf.placeholder(tf.int32, shape=[], name="p")
with tf.control_dependencies([p]):
c = tf.constant(5, tf.int32)
d = tf.mul(p, c)
self.assertEqual(10, d.eval(feed_dict={p: 2}))
def testBadShape(self):
with self.assertRaises(ValueError):
tf.placeholder(tf.float32, shape=(-1, 10))
def testTensorStr(self):
a = tf.placeholder(tf.float32, name="a")
self.assertEqual("<tf.Tensor 'a:0' shape=<unknown> dtype=float32>", repr(a))
b = tf.placeholder(tf.int32, shape=(32, 40), name="b")
self.assertEqual(
"<tf.Tensor 'b:0' shape=(32, 40) dtype=int32>",
repr(b))
c = tf.placeholder(tf.qint32, shape=(32, None, 2), name="c")
self.assertEqual(
"<tf.Tensor 'c:0' shape=(32, ?, 2) dtype=qint32>",
repr(c))
class PlaceholderV2Test(tf.test.TestCase):
def testDtype(self):
with self.test_session():
p = array_ops.placeholder_v2(tf.float32, shape=None, name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={
p: feed_array
}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testShape(self):
with self.test_session():
p = array_ops.placeholder_v2(tf.float32, shape=(10, 10), name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={
p: feed_array
}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float and "
r"shape \[10,10\]"):
p_identity.eval()
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :5]})
def testUnknownShape(self):
with self.test_session():
p = array_ops.placeholder_v2(tf.float32, shape=None, name="p")
p_identity = tf.identity(p)
# can feed anything
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={
p: feed_array
}), feed_array)
feed_array = np.random.rand(4, 2, 5)
self.assertAllClose(
p_identity.eval(feed_dict={
p: feed_array
}), feed_array)
def testScalarShape(self):
with self.test_session():
p = array_ops.placeholder_v2(tf.float32, shape=[], name="p")
p_identity = tf.identity(p)
self.assertAllClose(p_identity.eval(feed_dict={p: 5}), 5)
def testPartialShape(self):
with self.test_session():
p = array_ops.placeholder_v2(tf.float32, shape=[None, 3], name="p")
p_identity = tf.identity(p)
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={
p: feed_array
}), feed_array)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :2]})
def testControlDependency(self):
with self.test_session():
p = array_ops.placeholder_v2(tf.int32, shape=[], name="p")
with tf.control_dependencies([p]):
c = tf.constant(5, tf.int32)
d = tf.mul(p, c)
val = np.array(2).astype(np.int)
self.assertEqual(10, d.eval(feed_dict={p: val}))
def testBadShape(self):
with self.assertRaises(ValueError):
array_ops.placeholder_v2(tf.float32, shape=(-1, 10))
def testTensorStr(self):
a = array_ops.placeholder_v2(tf.float32, shape=None, name="a")
self.assertEqual("<tf.Tensor 'a:0' shape=<unknown> dtype=float32>", repr(a))
b = array_ops.placeholder_v2(tf.int32, shape=(32, 40), name="b")
self.assertEqual("<tf.Tensor 'b:0' shape=(32, 40) dtype=int32>", repr(b))
c = array_ops.placeholder_v2(tf.qint32, shape=(32, None, 2), name="c")
self.assertEqual("<tf.Tensor 'c:0' shape=(32, ?, 2) dtype=qint32>", repr(c))
class PlaceholderWithDefaultTest(tf.test.TestCase):
def testFullShape(self):
with self.test_session():
p = tf.placeholder_with_default([[2, 2], [2, 2]], shape=[2, 2])
a = tf.identity(p)
self.assertAllEqual([[2, 2], [2, 2]], a.eval())
self.assertAllEqual([[3, 3], [3, 3]],
a.eval(feed_dict={p: [[3, 3], [3, 3]]}))
with self.assertRaises(ValueError):
a.eval(feed_dict={p: [[6, 6, 6], [6, 6, 6]]})
def testPartialShape(self):
with self.test_session():
p = tf.placeholder_with_default([1, 2, 3], shape=[None])
a = tf.identity(p)
self.assertAllEqual([1, 2, 3], a.eval())
self.assertAllEqual([3, 37], a.eval(feed_dict={p: [3, 37]}))
with self.assertRaises(ValueError):
a.eval(feed_dict={p: [[2, 2], [2, 2]]})
def testNoShape(self):
with self.test_session():
p = tf.placeholder_with_default([17], shape=None)
a = tf.identity(p)
self.assertAllEqual([17], a.eval())
self.assertAllEqual([3, 37], a.eval(feed_dict={p: [3, 37]}))
self.assertAllEqual([[3, 3], [3, 3]],
a.eval(feed_dict={p: [[3, 3], [3, 3]]}))
if __name__ == "__main__":
tf.test.main()<|fim▁end|> | import tensorflow as tf |
<|file_name|>limit.d.ts<|end_file_name|><|fim▁begin|>declare type Params = {
min: number;
max: number;
};
export declare type Limit = {
min: number;
max: number;
loop: (n: number) => number;
constrain: (n: number) => number;
reachedAny: (n: number) => boolean;
reachedMax: (n: number) => boolean;<|fim▁hole|>};
export declare function Limit(params: Params): Limit;
export {};<|fim▁end|> | reachedMin: (n: number) => boolean; |
<|file_name|>_labs_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_by_subscription_request(
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-11-15-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.LabServices/labs')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_by_resource_group_request(
subscription_id: str,
resource_group_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-11-15-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
subscription_id: str,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-11-15-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"labName": _SERIALIZER.url("lab_name", lab_name, 'str', max_length=100, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_or_update_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2021-11-15-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"labName": _SERIALIZER.url("lab_name", lab_name, 'str', max_length=100, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_update_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2021-11-15-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"labName": _SERIALIZER.url("lab_name", lab_name, 'str', max_length=100, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-11-15-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"labName": _SERIALIZER.url("lab_name", lab_name, 'str', max_length=100, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_publish_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-11-15-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}/publish')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"labName": _SERIALIZER.url("lab_name", lab_name, 'str', max_length=100, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_sync_group_request_initial(
subscription_id: str,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-11-15-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}/syncGroup')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"labName": _SERIALIZER.url("lab_name", lab_name, 'str', max_length=100, min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",<|fim▁hole|> url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class LabsOperations(object):
"""LabsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.labservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_by_subscription(
self,
filter: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.PagedLabs"]:
"""Get all labs for a subscription.
Returns a list of all labs for a subscription.
:param filter: The filter to apply to the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PagedLabs or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.labservices.models.PagedLabs]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PagedLabs"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_by_subscription.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PagedLabs", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.LabServices/labs'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> Iterable["_models.PagedLabs"]:
"""Get all labs for a subscription and resource group.
Returns a list of all labs in a resource group.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PagedLabs or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.labservices.models.PagedLabs]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PagedLabs"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PagedLabs", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> "_models.Lab":
"""Get a lab resource.
Returns the properties of a lab resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param lab_name: The name of the lab that uniquely identifies it within containing lab account.
Used in resource URIs.
:type lab_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Lab, or the result of cls(response)
:rtype: ~azure.mgmt.labservices.models.Lab
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Lab"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
lab_name=lab_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Lab', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
lab_name: str,
body: "_models.Lab",
**kwargs: Any
) -> "_models.Lab":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Lab"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(body, 'Lab')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
lab_name=lab_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Lab', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Lab', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('Lab', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}'} # type: ignore
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
lab_name: str,
body: "_models.Lab",
**kwargs: Any
) -> LROPoller["_models.Lab"]:
"""Create or update a lab resource.
Operation to create or update a lab resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param lab_name: The name of the lab that uniquely identifies it within containing lab account.
Used in resource URIs.
:type lab_name: str
:param body: The request body.
:type body: ~azure.mgmt.labservices.models.Lab
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Lab or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.labservices.models.Lab]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Lab"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
lab_name=lab_name,
body=body,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Lab', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}'} # type: ignore
def _update_initial(
self,
resource_group_name: str,
lab_name: str,
body: "_models.LabUpdate",
**kwargs: Any
) -> "_models.Lab":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Lab"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(body, 'LabUpdate')
request = build_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
lab_name=lab_name,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Lab', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('Lab', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}'} # type: ignore
@distributed_trace
def begin_update(
self,
resource_group_name: str,
lab_name: str,
body: "_models.LabUpdate",
**kwargs: Any
) -> LROPoller["_models.Lab"]:
"""Update a lab resource.
Operation to update a lab resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param lab_name: The name of the lab that uniquely identifies it within containing lab account.
Used in resource URIs.
:type lab_name: str
:param body: The request body.
:type body: ~azure.mgmt.labservices.models.LabUpdate
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Lab or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.labservices.models.Lab]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Lab"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
lab_name=lab_name,
body=body,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Lab', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
lab_name=lab_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> LROPoller[None]:
"""Deletes a lab resource.
Operation to delete a lab resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param lab_name: The name of the lab that uniquely identifies it within containing lab account.
Used in resource URIs.
:type lab_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
lab_name=lab_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}'} # type: ignore
def _publish_initial(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_publish_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
lab_name=lab_name,
template_url=self._publish_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_publish_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}/publish'} # type: ignore
@distributed_trace
def begin_publish(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> LROPoller[None]:
"""Publish or re-publish a lab.
Publish or re-publish a lab. This will create or update all lab resources, such as virtual
machines.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param lab_name: The name of the lab that uniquely identifies it within containing lab account.
Used in resource URIs.
:type lab_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._publish_initial(
resource_group_name=resource_group_name,
lab_name=lab_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_publish.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}/publish'} # type: ignore
def _sync_group_initial(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_sync_group_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
lab_name=lab_name,
template_url=self._sync_group_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_sync_group_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}/syncGroup'} # type: ignore
@distributed_trace
def begin_sync_group(
self,
resource_group_name: str,
lab_name: str,
**kwargs: Any
) -> LROPoller[None]:
"""Manually sync the lab group.
Action used to manually kick off an AAD group sync job.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param lab_name: The name of the lab that uniquely identifies it within containing lab account.
Used in resource URIs.
:type lab_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._sync_group_initial(
resource_group_name=resource_group_name,
lab_name=lab_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_sync_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LabServices/labs/{labName}/syncGroup'} # type: ignore<|fim▁end|> | |
<|file_name|>cors_cache.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! An implementation of the [CORS preflight cache](https://fetch.spec.whatwg.org/#cors-preflight-cache)
//! For now this library is XHR-specific.
//! For stuff involving `<img>`, `<iframe>`, `<form>`, etc please check what
//! the request mode should be and compare with the fetch spec
//! This library will eventually become the core of the Fetch crate
//! with CORSRequest being expanded into FetchRequest (etc)
use hyper::method::Method;
use net_traits::request::{CredentialsMode, Origin, Request};
use servo_url::ServoUrl;
use std::ascii::AsciiExt;
use time::{self, Timespec};
/// Union type for CORS cache entries
///
/// Each entry might pertain to a header or method
#[derive(Clone, Debug)]
pub enum HeaderOrMethod {
HeaderData(String),
MethodData(Method)
}
impl HeaderOrMethod {
fn match_header(&self, header_name: &str) -> bool {
match *self {
HeaderOrMethod::HeaderData(ref s) => (&**s).eq_ignore_ascii_case(header_name),
_ => false
}
}
fn match_method(&self, method: &Method) -> bool {
match *self {
HeaderOrMethod::MethodData(ref m) => m == method,
_ => false
}
}
}
/// An entry in the CORS cache
#[derive(Clone, Debug)]
pub struct CorsCacheEntry {
pub origin: Origin,
pub url: ServoUrl,
pub max_age: u32,
pub credentials: bool,
pub header_or_method: HeaderOrMethod,
created: Timespec
}
impl CorsCacheEntry {
fn new(origin: Origin, url: ServoUrl, max_age: u32, credentials: bool,
header_or_method: HeaderOrMethod) -> CorsCacheEntry {
CorsCacheEntry {
origin: origin,
url: url,
max_age: max_age,
credentials: credentials,
header_or_method: header_or_method,
created: time::now().to_timespec()
}
}
}
fn match_headers(cors_cache: &CorsCacheEntry, cors_req: &Request) -> bool {
cors_cache.origin == cors_req.origin &&
cors_cache.url == cors_req.current_url() &&
(cors_cache.credentials || cors_req.credentials_mode != CredentialsMode::Include)
}
/// A simple, vector-based CORS Cache
#[derive(Clone)]
pub struct CorsCache(Vec<CorsCacheEntry>);
impl CorsCache {
pub fn new() -> CorsCache {
CorsCache(vec![])
}
fn find_entry_by_header<'a>(&'a mut self, request: &Request,
header_name: &str) -> Option<&'a mut CorsCacheEntry> {
self.cleanup();
self.0.iter_mut().find(|e| match_headers(e, request) && e.header_or_method.match_header(header_name))
}
fn find_entry_by_method<'a>(&'a mut self, request: &Request,
method: Method) -> Option<&'a mut CorsCacheEntry> {
// we can take the method from CorSRequest itself
self.cleanup();
self.0.iter_mut().find(|e| match_headers(e, request) && e.header_or_method.match_method(&method))
}
/// [Clear the cache](https://fetch.spec.whatwg.org/#concept-cache-clear)
pub fn clear(&mut self, request: &Request) {
let CorsCache(buf) = self.clone();
let new_buf: Vec<CorsCacheEntry> =
buf.into_iter().filter(|e| e.origin == request.origin &&
request.current_url() == e.url).collect();
*self = CorsCache(new_buf);
}
/// Remove old entries
pub fn cleanup(&mut self) {
let CorsCache(buf) = self.clone();
let now = time::now().to_timespec();
let new_buf: Vec<CorsCacheEntry> = buf.into_iter()
.filter(|e| now.sec < e.created.sec + e.max_age as i64)
.collect();
*self = CorsCache(new_buf);
}
/// Returns true if an entry with a
/// [matching header](https://fetch.spec.whatwg.org/#concept-cache-match-header) is found
pub fn match_header(&mut self, request: &Request, header_name: &str) -> bool {
self.find_entry_by_header(&request, header_name).is_some()
}
/// Updates max age if an entry for a
/// [matching header](https://fetch.spec.whatwg.org/#concept-cache-match-header) is found.
///
/// If not, it will insert an equivalent entry
pub fn match_header_and_update(&mut self, request: &Request,
header_name: &str, new_max_age: u32) -> bool {
match self.find_entry_by_header(&request, header_name).map(|e| e.max_age = new_max_age) {
Some(_) => true,
None => {
self.insert(CorsCacheEntry::new(request.origin.clone(), request.current_url(), new_max_age,
request.credentials_mode == CredentialsMode::Include,
HeaderOrMethod::HeaderData(header_name.to_owned())));
false
}
}
}
/// Returns true if an entry with a
/// [matching method](https://fetch.spec.whatwg.org/#concept-cache-match-method) is found
pub fn match_method(&mut self, request: &Request, method: Method) -> bool {
self.find_entry_by_method(&request, method).is_some()
}
/// Updates max age if an entry for
/// [a matching method](https://fetch.spec.whatwg.org/#concept-cache-match-method) is found.
///
/// If not, it will insert an equivalent entry
pub fn match_method_and_update(&mut self, request: &Request, method: Method, new_max_age: u32) -> bool {
match self.find_entry_by_method(&request, method.clone()).map(|e| e.max_age = new_max_age) {
Some(_) => true,
None => {
self.insert(CorsCacheEntry::new(request.origin.clone(), request.current_url(), new_max_age,
request.credentials_mode == CredentialsMode::Include,
HeaderOrMethod::MethodData(method)));
false
}
}
}<|fim▁hole|> self.0.push(entry);
}
}<|fim▁end|> |
/// Insert an entry
pub fn insert(&mut self, entry: CorsCacheEntry) {
self.cleanup(); |
<|file_name|>storySettings.js<|end_file_name|><|fim▁begin|>import { Category } from '../../../stories/storiesHierarchy';<|fim▁hole|> storyName: 'ColorPicker',
dataHook: 'storybook-colorpicker',
};<|fim▁end|> |
export const storySettings = {
category: Category.COMPONENTS, |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>""" Fantasm: A taskqueue-based Finite State Machine for App Engine Python
Docs and examples: http://code.google.com/p/fantasm/
Copyright 2010 VendAsta Technologies Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from fantasm import constants
from google.appengine.api.taskqueue.taskqueue import Queue
class NoOpQueue( Queue ):
""" A Queue instance that does not Queue """
def add(self, task, transactional=False):
""" see taskqueue.Queue.add """
pass
def knuthHash(number):
"""A decent hash function for integers."""
return (number * 2654435761) % 2**32
def boolConverter(boolStr):
""" A converter that maps some common bool string to True """
return {'1': True, 'True': True, 'true': True}.get(boolStr, False)
def outputAction(action):
""" Outputs the name of the action
@param action: an FSMAction instance
"""
if action:
return str(action.__class__.__name__).split('.')[-1]
def outputTransitionConfig(transitionConfig):
""" Outputs a GraphViz directed graph node
@param transitionConfig: a config._TransitionConfig instance
@return: a string
"""
label = transitionConfig.event
if transitionConfig.action:
label += '/ ' + outputAction(transitionConfig.action)
return '"%(fromState)s" -> "%(toState)s" [label="%(label)s"];' % \
{'fromState': transitionConfig.fromState.name,
'toState': transitionConfig.toState.name,
'label': label}
def outputStateConfig(stateConfig, colorMap=None):
""" Outputs a GraphViz directed graph node
@param stateConfig: a config._StateConfig instance
@return: a string
"""
colorMap = colorMap or {}
actions = []
if stateConfig.entry:
actions.append('entry/ %(entry)s' % {'entry': outputAction(stateConfig.entry)})
if stateConfig.action:
actions.append('do/ %(do)s' % {'do': outputAction(stateConfig.action)})
if stateConfig.exit:
actions.append('exit/ %(exit)s' % {'exit': outputAction(stateConfig.exit)})
label = '%(stateName)s|%(actions)s' % {'stateName': stateConfig.name, 'actions': '\\l'.join(actions)}
if stateConfig.continuation:
label += '|continuation = True'
if stateConfig.fanInPeriod != constants.NO_FAN_IN:
label += '|fan in period = %(fanin)ds' % {'fanin': stateConfig.fanInPeriod}<|fim▁hole|> 'fillcolor': colorMap.get(stateConfig.name, 'white'),
'shape': shape,
'label': label}
else:
return '"%(stateName)s" [shape=%(shape)s,label="{%(label)s}"];' % \
{'stateName': stateConfig.name,
'shape': shape,
'label': label}
def outputMachineConfig(machineConfig, colorMap=None, skipStateNames=None):
""" Outputs a GraphViz directed graph of the state machine
@param machineConfig: a config._MachineConfig instance
@return: a string
"""
skipStateNames = skipStateNames or ()
lines = []
lines.append('digraph G {')
lines.append('label="%(machineName)s"' % {'machineName': machineConfig.name})
lines.append('labelloc="t"')
lines.append('"__start__" [label="start",shape=circle,style=filled,fillcolor=black,fontcolor=white,fontsize=9];')
lines.append('"__end__" [label="end",shape=doublecircle,style=filled,fillcolor=black,fontcolor=white,fontsize=9];')
for stateConfig in machineConfig.states.values():
if stateConfig.name in skipStateNames:
continue
lines.append(outputStateConfig(stateConfig, colorMap=colorMap))
if stateConfig.initial:
lines.append('"__start__" -> "%(stateName)s"' % {'stateName': stateConfig.name})
if stateConfig.final:
lines.append('"%(stateName)s" -> "__end__"' % {'stateName': stateConfig.name})
for transitionConfig in machineConfig.transitions.values():
if transitionConfig.fromState.name in skipStateNames or \
transitionConfig.toState.name in skipStateNames:
continue
lines.append(outputTransitionConfig(transitionConfig))
lines.append('}')
return '\n'.join(lines)<|fim▁end|> | shape = 'Mrecord'
if colorMap.get(stateConfig.name):
return '"%(stateName)s" [style=filled,fillcolor="%(fillcolor)s",shape=%(shape)s,label="{%(label)s}"];' % \
{'stateName': stateConfig.name, |
<|file_name|>test_docx.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.6
'''
Test docx module
'''
import os
import lxml
from docx import *
TEST_FILE = 'ShortTest.docx'
IMAGE1_FILE = 'image1.png'
# --- Setup & Support Functions ---
def setup_module():
'''Set up test fixtures'''
import shutil
if IMAGE1_FILE not in os.listdir('.'):
shutil.copyfile(os.path.join(os.path.pardir,IMAGE1_FILE), IMAGE1_FILE)
testsavedocument()
def teardown_module():
'''Tear down test fixtures'''<|fim▁hole|> if TEST_FILE in os.listdir('.'):
os.remove(TEST_FILE)
def simpledoc():
'''Make a docx (document, relationships) for use in other docx tests'''
doc = newdocx('Python docx testnewdocument','A short example of making docx from Python','Alan Brooks',['python','Office Open XML','Word'])
document = getdocument(doc)
relationships = getrelationshiplist(doc)
docbody = document.xpath('/w:document/w:body', namespaces=nsprefixes)[0]
docbody.append(heading('Heading 1',1) )
docbody.append(heading('Heading 2',2))
docbody.append(paragraph('Paragraph 1'))
for point in ['List Item 1','List Item 2','List Item 3']:
docbody.append(paragraph(point,style='ListNumber'))
docbody.append(pagebreak(type='page'))
docbody.append(paragraph('Paragraph 2'))
docbody.append(table([['A1','A2','A3'],['B1','B2','B3'],['C1','C2','C3']]))
docbody.append(pagebreak(type='section', orient='portrait'))
relationships,picpara = picture(relationships,IMAGE1_FILE,'This is a test description')
docbody.append(picpara)
docbody.append(pagebreak(type='section', orient='landscape'))
docbody.append(paragraph('Paragraph 3'))
doc['word/document.xml'] = document
doc['word/_rels/document.xml.rels'] = wordrelationships(relationships)
return doc
# --- Test Functions ---
def testsearchandreplace():
'''Ensure search and replace functions work'''
doc = simpledoc()
document = getdocument(doc)
docbody = getdocbody(document)
assert search(docbody, 'ing 1')
assert search(docbody, 'ing 2')
assert search(docbody, 'graph 3')
assert search(docbody, 'ist Item')
assert search(docbody, 'A1')
if search(docbody, 'Paragraph 2'):
docbody = replace(docbody,'Paragraph 2','Whacko 55')
assert search(docbody, 'Whacko 55')
def testtextextraction():
'''Ensure text can be pulled out of a document'''
document = opendocx(TEST_FILE)
paratextlist = getdocumenttext(document)
assert len(paratextlist) > 0
def testunsupportedpagebreak():
'''Ensure unsupported page break types are trapped'''
document = newdocument()
docbody = document.xpath('/w:document/w:body', namespaces=nsprefixes)[0]
try:
docbody.append(pagebreak(type='unsup'))
except ValueError:
return # passed
assert False # failed
def testsavedocument():
'''Tests a new document can be saved'''
document = simpledoc()
savedocx(document, TEST_FILE)
def testgetdocument():
'''Ensure an etree element is returned'''
doc = opendocx(TEST_FILE)
document = getdocument(doc)
if isinstance(document,lxml.etree._Element):
pass
else:
assert False
def testmakeelement():
'''Ensure custom elements get created'''
testelement = makeelement('testname',attributes={'testattribute':'testvalue'},tagtext='testtagtext')
assert testelement.tag == '{http://schemas.openxmlformats.org/wordprocessingml/2006/main}testname'
assert testelement.attrib == {'{http://schemas.openxmlformats.org/wordprocessingml/2006/main}testattribute': 'testvalue'}
assert testelement.text == 'testtagtext'
def testparagraph():
'''Ensure paragraph creates p elements'''
testpara = paragraph('paratext',style='BodyText')
assert testpara.tag == '{http://schemas.openxmlformats.org/wordprocessingml/2006/main}p'
pass
def testtable():
'''Ensure tables make sense'''
testtable = table([['A1','A2'],['B1','B2'],['C1','C2']])
ns = '{http://schemas.openxmlformats.org/wordprocessingml/2006/main}'
assert testtable.xpath('/ns0:tbl/ns0:tr[2]/ns0:tc[2]/ns0:p/ns0:r/ns0:t',namespaces={'ns0':'http://schemas.openxmlformats.org/wordprocessingml/2006/main'})[0].text == 'B2'
if __name__=='__main__':
import nose
nose.main()<|fim▁end|> | |
<|file_name|>Crowbar.cpp<|end_file_name|><|fim▁begin|>#include <zmq.h>
#include <zlib.h>
#include <czmq.h>
#include <zframe.h>
#include "Crowbar.h"
#include "boost/thread.hpp"
#include "g2log.hpp"
#include "Death.h"
/**
* Construct a crowbar for beating things at the binding location
*
* @param binding
* A std::string description of a ZMQ socket
*/
Crowbar::Crowbar(const std::string& binding) : mContext(NULL),
mBinding(binding), mTip(NULL), mOwnsContext(true) {
}
/**
* Construct a crowbar for beating the specific headcrab
*
* @param target
* A living(initialized) headcrab
*/
Crowbar::Crowbar(const Headcrab& target) : mContext(target.GetContext()),
mBinding(target.GetBinding()), mTip(NULL), mOwnsContext(false) {
if (mContext == NULL) {
mOwnsContext = true;
}
}
/**
* Construct a crowbar for beating things at binding with the given context
* @param binding
* The binding of the bound socket for the given context
* @param context
* A working context
*/
Crowbar::Crowbar(const std::string& binding, zctx_t* context) : mContext(context),
mBinding(binding), mTip(NULL), mOwnsContext(false) {
}
/**
* Default deconstructor
*/
Crowbar::~Crowbar() {
if (mOwnsContext && mContext != NULL) {
zctx_destroy(&mContext);
}
}
/**
* Get the high water mark for socket sends
*
* @return
* the high water mark
*/
int Crowbar::GetHighWater() {
return 1024;
}
/**
* Get the "tip" socket used to hit things
*
* @return
* A pointer to a zmq socket (or NULL in a failure)
*/
void* Crowbar::GetTip() {
void* tip = zsocket_new(mContext, ZMQ_REQ);
if (!tip) {
return NULL;
}
zsocket_set_sndhwm(tip, GetHighWater());
zsocket_set_rcvhwm(tip, GetHighWater());
zsocket_set_linger(tip, 0);
int connectRetries = 100;
while (zsocket_connect(tip, mBinding.c_str()) != 0 && connectRetries-- > 0 && !zctx_interrupted) {
boost::this_thread::interruption_point();
int err = zmq_errno();
if (err == ETERM) {
zsocket_destroy(mContext, tip);
return NULL;
}
std::string error(zmq_strerror(err));
LOG(WARNING) << "Could not connect to " << mBinding << ":" << error;
zclock_sleep(100);
}
Death::Instance().RegisterDeathEvent(&Death::DeleteIpcFiles, mBinding);
if (zctx_interrupted) {
LOG(INFO) << "Caught Interrupt Signal";
}
if (connectRetries <= 0) {
zsocket_destroy(mContext, tip);
return NULL;
}
return tip;
}
bool Crowbar::Wield() {
if (!mContext) {
mContext = zctx_new();
zctx_set_linger(mContext, 0); // linger for a millisecond on close
zctx_set_sndhwm(mContext, GetHighWater());
zctx_set_rcvhwm(mContext, GetHighWater()); // HWM on internal thread communicaiton
zctx_set_iothreads(mContext, 1);
}
if (!mTip) {
mTip = GetTip();
if (!mTip && mOwnsContext) {
zctx_destroy(&mContext);
mContext = NULL;
}
}
return ((mContext != NULL) && (mTip != NULL));
}
bool Crowbar::Swing(const std::string& hit) {
//std::cout << "sending " << hit << std::endl;
std::vector<std::string> hits;
hits.push_back(hit);
return Flurry(hits);
}
/**
* Poll to see if the other side of the socket is ready
* @return
*/
bool Crowbar::PollForReady() {
zmq_pollitem_t item;
if (!mTip) {
return false;
}
item.socket = mTip;
item.events = ZMQ_POLLOUT;
int returnVal = zmq_poll(&item, 1, 0);
if (returnVal < 0) {
LOG(WARNING) << "Socket error: " << zmq_strerror(zmq_errno());<|fim▁hole|> return (returnVal >= 1);
}
/**
* Send a bunch of strings to a socket
* @param hits
* @return
*/
bool Crowbar::Flurry(std::vector<std::string>& hits) {
if (!mTip) {
LOG(WARNING) << "Cannot send, not Wielded";
return false;
}
if (!PollForReady()) {
LOG(WARNING) << "Cannot send, no listener ready";
return false;
}
zmsg_t* message = zmsg_new();
for (auto it = hits.begin();
it != hits.end(); it++) {
zmsg_addmem(message, &((*it)[0]), it->size());
}
bool success = true;
//std::cout << "Sending message with " << zmsg_size(message) << " " << hits.size() << std::endl;
if (zmsg_send(&message, mTip) != 0) {
LOG(WARNING) << "zmsg_send returned non-zero exit " << zmq_strerror(zmq_errno());
success = false;
}
if (message) {
zmsg_destroy(&message);
}
return success;
}
bool Crowbar::BlockForKill(std::string& guts) {
std::vector<std::string> allReplies;
if (BlockForKill(allReplies) && !allReplies.empty()) {
guts = allReplies[0];
return true;
}
return false;
}
bool Crowbar::BlockForKill(std::vector<std::string>& guts) {
if (!mTip) {
return false;
}
zmsg_t* message = zmsg_recv(mTip);
if (!message) {
return false;
}
guts.clear();
int msgSize = zmsg_size(message);
for (int i = 0; i < msgSize; i++) {
zframe_t* frame = zmsg_pop(message);
std::string aString;
aString.insert(0, reinterpret_cast<const char*> (zframe_data(frame)), zframe_size(frame));
guts.push_back(aString);
zframe_destroy(&frame);
//std::cout << guts[0] << " found " << aString << std::endl;
}
zmsg_destroy(&message);
return true;
}
bool Crowbar::WaitForKill(std::string& guts, const int timeout) {
std::vector<std::string> allReplies;
if (WaitForKill(allReplies, timeout) && !allReplies.empty()) {
guts = allReplies[0];
return true;
}
return false;
}
bool Crowbar::WaitForKill(std::vector<std::string>& guts, const int timeout) {
if (!mTip) {
return false;
}
if (zsocket_poll(mTip, timeout)) {
return BlockForKill(guts);
}
return false;
}
zctx_t* Crowbar::GetContext() {
return mContext;
}<|fim▁end|> | }
|
<|file_name|>resource_compute_route_generated_test.go<|end_file_name|><|fim▁begin|>// ----------------------------------------------------------------------------
//
// *** AUTO GENERATED CODE *** Type: MMv1 ***
//
// ----------------------------------------------------------------------------
//
// This file is automatically generated by Magic Modules and manual
// changes will be clobbered when the file is regenerated.
//
// Please read more about how to change this file in
// .github/CONTRIBUTING.md.
//
// ----------------------------------------------------------------------------
package google
import (
"fmt"
"strings"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)
func TestAccComputeRoute_routeBasicExample(t *testing.T) {
t.Parallel()
context := map[string]interface{}{
"random_suffix": randString(t, 10),
}
vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckComputeRouteDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccComputeRoute_routeBasicExample(context),
},
{
ResourceName: "google_compute_route.default",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"network", "next_hop_instance", "next_hop_vpn_tunnel"},
},
},
})
}
func testAccComputeRoute_routeBasicExample(context map[string]interface{}) string {
return Nprintf(`
resource "google_compute_route" "default" {
name = "tf-test-network-route%{random_suffix}"
dest_range = "15.0.0.0/24"
network = google_compute_network.default.name
next_hop_ip = "10.132.1.5"
priority = 100
}
resource "google_compute_network" "default" {
name = "tf-test-compute-network%{random_suffix}"
}
`, context)
}
func TestAccComputeRoute_routeIlbExample(t *testing.T) {
t.Parallel()
context := map[string]interface{}{
"random_suffix": randString(t, 10),
}
vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckComputeRouteDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccComputeRoute_routeIlbExample(context),
},
{
ResourceName: "google_compute_route.route-ilb",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"network", "next_hop_instance", "next_hop_vpn_tunnel"},
},
},
})
}
func testAccComputeRoute_routeIlbExample(context map[string]interface{}) string {
return Nprintf(`
resource "google_compute_network" "default" {
name = "tf-test-compute-network%{random_suffix}"
auto_create_subnetworks = false
}
resource "google_compute_subnetwork" "default" {
name = "tf-test-compute-subnet%{random_suffix}"
ip_cidr_range = "10.0.1.0/24"
region = "us-central1"
network = google_compute_network.default.id
}
resource "google_compute_health_check" "hc" {
name = "tf-test-proxy-health-check%{random_suffix}"
check_interval_sec = 1
timeout_sec = 1
tcp_health_check {
port = "80"
}
}
resource "google_compute_region_backend_service" "backend" {
name = "tf-test-compute-backend%{random_suffix}"
region = "us-central1"
health_checks = [google_compute_health_check.hc.id]
}
resource "google_compute_forwarding_rule" "default" {
name = "tf-test-compute-forwarding-rule%{random_suffix}"
region = "us-central1"
load_balancing_scheme = "INTERNAL"
backend_service = google_compute_region_backend_service.backend.id
all_ports = true
network = google_compute_network.default.name
subnetwork = google_compute_subnetwork.default.name
}
resource "google_compute_route" "route-ilb" {
name = "tf-test-route-ilb%{random_suffix}"
dest_range = "0.0.0.0/0"
network = google_compute_network.default.name
next_hop_ilb = google_compute_forwarding_rule.default.id
priority = 2000
}
`, context)
}
func TestAccComputeRoute_routeIlbVipExample(t *testing.T) {
t.Parallel()
context := map[string]interface{}{
"random_suffix": randString(t, 10),
}
vcrTest(t, resource.TestCase{<|fim▁hole|> Steps: []resource.TestStep{
{
Config: testAccComputeRoute_routeIlbVipExample(context),
},
{
ResourceName: "google_compute_route.route-ilb",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"network", "next_hop_instance", "next_hop_vpn_tunnel"},
},
},
})
}
func testAccComputeRoute_routeIlbVipExample(context map[string]interface{}) string {
return Nprintf(`
resource "google_compute_network" "producer" {
provider = google-beta
name = "producer%{random_suffix}-vpc"
auto_create_subnetworks = false
}
resource "google_compute_subnetwork" "producer" {
provider = google-beta
name = "producer%{random_suffix}-subnet"
ip_cidr_range = "10.0.1.0/24"
region = "us-central1"
network = google_compute_network.producer.id
}
resource "google_compute_network" "consumer" {
provider = google-beta
name = "consumer%{random_suffix}-vpc"
auto_create_subnetworks = false
}
resource "google_compute_subnetwork" "consumer" {
provider = google-beta
name = "consumer%{random_suffix}-subnet"
ip_cidr_range = "10.0.2.0/24"
region = "us-central1"
network = google_compute_network.consumer.id
}
resource "google_compute_network_peering" "peering1" {
provider = google-beta
name = "peering-producer%{random_suffix}-to-consumer%{random_suffix}"
network = google_compute_network.consumer.id
peer_network = google_compute_network.producer.id
}
resource "google_compute_network_peering" "peering2" {
provider = google-beta
name = "peering-consumer%{random_suffix}-to-producer%{random_suffix}"
network = google_compute_network.producer.id
peer_network = google_compute_network.consumer.id
}
resource "google_compute_health_check" "hc" {
provider = google-beta
name = "tf-test-proxy-health-check%{random_suffix}"
check_interval_sec = 1
timeout_sec = 1
tcp_health_check {
port = "80"
}
}
resource "google_compute_region_backend_service" "backend" {
provider = google-beta
name = "tf-test-compute-backend%{random_suffix}"
region = "us-central1"
health_checks = [google_compute_health_check.hc.id]
}
resource "google_compute_forwarding_rule" "default" {
provider = google-beta
name = "tf-test-compute-forwarding-rule%{random_suffix}"
region = "us-central1"
load_balancing_scheme = "INTERNAL"
backend_service = google_compute_region_backend_service.backend.id
all_ports = true
network = google_compute_network.producer.name
subnetwork = google_compute_subnetwork.producer.name
}
resource "google_compute_route" "route-ilb" {
provider = google-beta
name = "tf-test-route-ilb%{random_suffix}"
dest_range = "0.0.0.0/0"
network = google_compute_network.consumer.name
next_hop_ilb = google_compute_forwarding_rule.default.ip_address
priority = 2000
tags = ["tag1", "tag2"]
depends_on = [
google_compute_network_peering.peering1,
google_compute_network_peering.peering2
]
}
`, context)
}
func testAccCheckComputeRouteDestroyProducer(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
if rs.Type != "google_compute_route" {
continue
}
if strings.HasPrefix(name, "data.") {
continue
}
config := googleProviderConfig(t)
url, err := replaceVarsForTest(config, rs, "{{ComputeBasePath}}projects/{{project}}/global/routes/{{name}}")
if err != nil {
return err
}
billingProject := ""
if config.BillingProject != "" {
billingProject = config.BillingProject
}
_, err = sendRequest(config, "GET", billingProject, url, config.userAgent, nil, isPeeringOperationInProgress)
if err == nil {
return fmt.Errorf("ComputeRoute still exists at %s", url)
}
}
return nil
}
}<|fim▁end|> | PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProvidersOiCS,
CheckDestroy: testAccCheckComputeRouteDestroyProducer(t), |
<|file_name|>0004_auto_20171023_2354.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-23 23:54
from __future__ import unicode_literals
<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('goods', '0002_auto_20171017_2017'),
('trade', '0003_auto_20171022_1507'),
]
operations = [
migrations.AlterField(
model_name='ordergoods',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goods', to='trade.OrderInfo', verbose_name='订单信息'),
),
migrations.AlterField(
model_name='orderinfo',
name='order_sn',
field=models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='订单号'),
),
migrations.AlterField(
model_name='orderinfo',
name='pay_status',
field=models.CharField(choices=[('TRADE_SUCCESS', '成功'), ('TRADE_CLOSE', '超时关闭'), ('WAIT_BUYER_PAY', '交易创建,等待付款'), ('TRADE_FINISHED', '交易结束')], default='paying', max_length=30, verbose_name='订单状态'),
),
migrations.AlterUniqueTogether(
name='shoppingcart',
unique_together=set([('user', 'goods')]),
),
]<|fim▁end|> |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
|
<|file_name|>en-gb.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'basicstyles', 'en-gb', {<|fim▁hole|> superscript: 'Superscript',
underline: 'Underline'
} );<|fim▁end|> | bold: 'Bold',
italic: 'Italic',
strike: 'Strike Through',
subscript: 'Subscript', |
<|file_name|>x1tool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding:utf-8
import hashlib
import logging
from x1category import X1Category
TOOL_PREFIX = 'X1Tool'
<|fim▁hole|> def __init__(self, metadata=None):
if metadata is None:
metadata = self.DEFAULT_METADATA
self.__metadata = metadata
@classmethod
def appid(cls):
"""per tool GUID"""
try:
return hashlib.sha224(cls.__name__).hexdigest()
except Exception, e:
logging.error('Fail to get appid: %s' % e)
return "0000000000"
def run(self, args):
return args
def metadata(self, attr_key=None, attr_value=None):
try:
if attr_key is None:
return self.__metadata
if attr_value is not None:
self.__metadata[attr_key] = attr_value
return self.__metadata[attr_key]
except Exception, e:
logging.error('Fail to set attr: %s(%s)' % (attr_key, e))
if __name__ == '__main__':
app = X1Tool()
print app.appid()
print app.metadata()<|fim▁end|> | class X1Tool(object):
'appid:6376477c731a89e3280657eb88422645f2d1e2a684541222e21371f3110110d2'
DEFAULT_METADATA = {'name': "X1Tool", 'author': "admin", 'comments': "default", 'template': "default/index.html", 'category': X1Category.DEFAULT}
|
<|file_name|>colorschemes.py<|end_file_name|><|fim▁begin|># These color schemes come from d3: http://d3js.org/
#
# They are licensed under the following license:
#
# Copyright (c) 2010-2015, Michael Bostock
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * The name Michael Bostock may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,<|fim▁hole|># EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#: 10 colors that work well together as data category colors
CATEGORY10 = ['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd', '#8c564b',
'#e377c2', '#7f7f7f', '#bcbd22', '#17becf']
#: 20 colors that work well together as data category colors
CATEGORY20 = ['#1f77b4', '#aec7e8', '#ff7f0e', '#ffbb78', '#2ca02c', '#98df8a',
'#d62728', '#ff9896', '#9467bd', '#c5b0d5', '#8c564b', '#c49c94',
'#e377c2', '#f7b6d2', '#7f7f7f', '#c7c7c7', '#bcbd22', '#dbdb8d',
'#17becf', '#9edae5']
#: 20 colors that work well together as data category colors
CATEGORY20b = ['#393b79', '#5254a3', '#6b6ecf', '#9c9ede', '#637939',
'#8ca252', '#b5cf6b', '#cedb9c', '#8c6d31', '#bd9e39',
'#e7ba52', '#e7cb94', '#843c39', '#ad494a', '#d6616b',
'#e7969c', '#7b4173', '#a55194', '#ce6dbd', '#de9ed6']
#: 20 colors that work well together as data category colors
CATEGORY20c = ['#3182bd', '#6baed6', '#9ecae1', '#c6dbef', '#e6550d',
'#fd8d3c', '#fdae6b', '#fdd0a2', '#31a354', '#74c476',
'#a1d99b', '#c7e9c0', '#756bb1', '#9e9ac8', '#bcbddc',
'#dadaeb', '#636363', '#969696', '#bdbdbd', '#d9d9d9']<|fim▁end|> | # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, |
<|file_name|>catch.hpp<|end_file_name|><|fim▁begin|>/*
* Catch v1.7.2
* Generated: 2017-02-13 15:57:33.350226
* ----------------------------------------------------------
* This file has been merged from multiple headers. Please don't edit it directly
* Copyright (c) 2012 Two Blue Cubes Ltd. All rights reserved.
*
* Distributed under the Boost Software License, Version 1.0. (See accompanying
* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef TWOBLUECUBES_SINGLE_INCLUDE_CATCH_HPP_INCLUDED
#define TWOBLUECUBES_SINGLE_INCLUDE_CATCH_HPP_INCLUDED
#define TWOBLUECUBES_CATCH_HPP_INCLUDED
#ifdef __clang__
# pragma clang system_header
#elif defined __GNUC__
# pragma GCC system_header
#endif
// #included from: internal/catch_suppress_warnings.h
#ifdef __clang__
# ifdef __ICC // icpc defines the __clang__ macro
# pragma warning(push)
# pragma warning(disable: 161 1682)
# else // __ICC
# pragma clang diagnostic ignored "-Wglobal-constructors"
# pragma clang diagnostic ignored "-Wvariadic-macros"
# pragma clang diagnostic ignored "-Wc99-extensions"
# pragma clang diagnostic ignored "-Wunused-variable"
# pragma clang diagnostic push
# pragma clang diagnostic ignored "-Wpadded"
# pragma clang diagnostic ignored "-Wc++98-compat"
# pragma clang diagnostic ignored "-Wc++98-compat-pedantic"
# pragma clang diagnostic ignored "-Wswitch-enum"
# pragma clang diagnostic ignored "-Wcovered-switch-default"
# endif
#elif defined __GNUC__
# pragma GCC diagnostic ignored "-Wvariadic-macros"
# pragma GCC diagnostic ignored "-Wunused-variable"
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wpadded"
#endif
#if defined(CATCH_CONFIG_MAIN) || defined(CATCH_CONFIG_RUNNER)
# define CATCH_IMPL
#endif
#ifdef CATCH_IMPL
# ifndef CLARA_CONFIG_MAIN
# define CLARA_CONFIG_MAIN_NOT_DEFINED
# define CLARA_CONFIG_MAIN
# endif
#endif
// #included from: internal/catch_notimplemented_exception.h
#define TWOBLUECUBES_CATCH_NOTIMPLEMENTED_EXCEPTION_H_INCLUDED
// #included from: catch_common.h
#define TWOBLUECUBES_CATCH_COMMON_H_INCLUDED
// #included from: catch_compiler_capabilities.h
#define TWOBLUECUBES_CATCH_COMPILER_CAPABILITIES_HPP_INCLUDED
// Detect a number of compiler features - mostly C++11/14 conformance - by compiler
// The following features are defined:
//
// CATCH_CONFIG_CPP11_NULLPTR : is nullptr supported?
// CATCH_CONFIG_CPP11_NOEXCEPT : is noexcept supported?
// CATCH_CONFIG_CPP11_GENERATED_METHODS : The delete and default keywords for compiler generated methods
// CATCH_CONFIG_CPP11_IS_ENUM : std::is_enum is supported?
// CATCH_CONFIG_CPP11_TUPLE : std::tuple is supported
// CATCH_CONFIG_CPP11_LONG_LONG : is long long supported?
// CATCH_CONFIG_CPP11_OVERRIDE : is override supported?
// CATCH_CONFIG_CPP11_UNIQUE_PTR : is unique_ptr supported (otherwise use auto_ptr)
// CATCH_CONFIG_CPP11_SHUFFLE : is std::shuffle supported?
// CATCH_CONFIG_CPP11_TYPE_TRAITS : are type_traits and enable_if supported?
// CATCH_CONFIG_CPP11_OR_GREATER : Is C++11 supported?
// CATCH_CONFIG_VARIADIC_MACROS : are variadic macros supported?
// CATCH_CONFIG_COUNTER : is the __COUNTER__ macro supported?
// CATCH_CONFIG_WINDOWS_SEH : is Windows SEH supported?
// ****************
// Note to maintainers: if new toggles are added please document them
// in configuration.md, too
// ****************
// In general each macro has a _NO_<feature name> form
// (e.g. CATCH_CONFIG_CPP11_NO_NULLPTR) which disables the feature.
// Many features, at point of detection, define an _INTERNAL_ macro, so they
// can be combined, en-mass, with the _NO_ forms later.
// All the C++11 features can be disabled with CATCH_CONFIG_NO_CPP11
#ifdef __cplusplus
# if __cplusplus >= 201103L
# define CATCH_CPP11_OR_GREATER
# endif
# if __cplusplus >= 201402L
# define CATCH_CPP14_OR_GREATER
# endif
#endif
#ifdef __clang__
# if __has_feature(cxx_nullptr)
# define CATCH_INTERNAL_CONFIG_CPP11_NULLPTR
# endif
# if __has_feature(cxx_noexcept)
# define CATCH_INTERNAL_CONFIG_CPP11_NOEXCEPT
# endif
# if defined(CATCH_CPP11_OR_GREATER)
# define CATCH_INTERNAL_SUPPRESS_PARENTHESES_WARNINGS _Pragma( "clang diagnostic ignored \"-Wparentheses\"" )
# endif
#endif // __clang__
////////////////////////////////////////////////////////////////////////////////
// Borland
#ifdef __BORLANDC__
#endif // __BORLANDC__
////////////////////////////////////////////////////////////////////////////////
// EDG
#ifdef __EDG_VERSION__
#endif // __EDG_VERSION__
////////////////////////////////////////////////////////////////////////////////
// Digital Mars
#ifdef __DMC__
#endif // __DMC__
////////////////////////////////////////////////////////////////////////////////
// GCC
#ifdef __GNUC__
# if __GNUC__ == 4 && __GNUC_MINOR__ >= 6 && defined(__GXX_EXPERIMENTAL_CXX0X__)
# define CATCH_INTERNAL_CONFIG_CPP11_NULLPTR
# endif
# if !defined(CATCH_INTERNAL_SUPPRESS_PARENTHESES_WARNINGS) && defined(CATCH_CPP11_OR_GREATER)
# define CATCH_INTERNAL_SUPPRESS_PARENTHESES_WARNINGS _Pragma( "GCC diagnostic ignored \"-Wparentheses\"" )
# endif
// - otherwise more recent versions define __cplusplus >= 201103L
// and will get picked up below
#endif // __GNUC__
////////////////////////////////////////////////////////////////////////////////
// Visual C++
#ifdef _MSC_VER
#define CATCH_INTERNAL_CONFIG_WINDOWS_SEH
#if (_MSC_VER >= 1600)
# define CATCH_INTERNAL_CONFIG_CPP11_NULLPTR
# define CATCH_INTERNAL_CONFIG_CPP11_UNIQUE_PTR
#endif
#if (_MSC_VER >= 1900 ) // (VC++ 13 (VS2015))
#define CATCH_INTERNAL_CONFIG_CPP11_NOEXCEPT
#define CATCH_INTERNAL_CONFIG_CPP11_GENERATED_METHODS
#define CATCH_INTERNAL_CONFIG_CPP11_SHUFFLE
#define CATCH_INTERNAL_CONFIG_CPP11_TYPE_TRAITS
#endif
#endif // _MSC_VER
////////////////////////////////////////////////////////////////////////////////
// Use variadic macros if the compiler supports them
#if ( defined _MSC_VER && _MSC_VER > 1400 && !defined __EDGE__) || \
( defined __WAVE__ && __WAVE_HAS_VARIADICS ) || \
( defined __GNUC__ && __GNUC__ >= 3 ) || \
( !defined __cplusplus && __STDC_VERSION__ >= 199901L || __cplusplus >= 201103L )
#define CATCH_INTERNAL_CONFIG_VARIADIC_MACROS
#endif
// Use __COUNTER__ if the compiler supports it
#if ( defined _MSC_VER && _MSC_VER >= 1300 ) || \
( defined __GNUC__ && __GNUC__ >= 4 && __GNUC_MINOR__ >= 3 ) || \
( defined __clang__ && __clang_major__ >= 3 )
#define CATCH_INTERNAL_CONFIG_COUNTER
#endif
////////////////////////////////////////////////////////////////////////////////
// C++ language feature support
// catch all support for C++11
#if defined(CATCH_CPP11_OR_GREATER)
# if !defined(CATCH_INTERNAL_CONFIG_CPP11_NULLPTR)
# define CATCH_INTERNAL_CONFIG_CPP11_NULLPTR
# endif
# ifndef CATCH_INTERNAL_CONFIG_CPP11_NOEXCEPT
# define CATCH_INTERNAL_CONFIG_CPP11_NOEXCEPT
# endif
# ifndef CATCH_INTERNAL_CONFIG_CPP11_GENERATED_METHODS
# define CATCH_INTERNAL_CONFIG_CPP11_GENERATED_METHODS
# endif
# ifndef CATCH_INTERNAL_CONFIG_CPP11_IS_ENUM
# define CATCH_INTERNAL_CONFIG_CPP11_IS_ENUM
# endif
# ifndef CATCH_INTERNAL_CONFIG_CPP11_TUPLE
# define CATCH_INTERNAL_CONFIG_CPP11_TUPLE
# endif
# ifndef CATCH_INTERNAL_CONFIG_VARIADIC_MACROS
# define CATCH_INTERNAL_CONFIG_VARIADIC_MACROS
# endif
# if !defined(CATCH_INTERNAL_CONFIG_CPP11_LONG_LONG)
# define CATCH_INTERNAL_CONFIG_CPP11_LONG_LONG
# endif
# if !defined(CATCH_INTERNAL_CONFIG_CPP11_OVERRIDE)
# define CATCH_INTERNAL_CONFIG_CPP11_OVERRIDE
# endif
# if !defined(CATCH_INTERNAL_CONFIG_CPP11_UNIQUE_PTR)
# define CATCH_INTERNAL_CONFIG_CPP11_UNIQUE_PTR
# endif
# if !defined(CATCH_INTERNAL_CONFIG_CPP11_SHUFFLE)
# define CATCH_INTERNAL_CONFIG_CPP11_SHUFFLE
# endif
# if !defined(CATCH_INTERNAL_CONFIG_CPP11_TYPE_TRAITS)
# define CATCH_INTERNAL_CONFIG_CPP11_TYPE_TRAITS
# endif
#endif // __cplusplus >= 201103L
// Now set the actual defines based on the above + anything the user has configured
#if defined(CATCH_INTERNAL_CONFIG_CPP11_NULLPTR) && !defined(CATCH_CONFIG_CPP11_NO_NULLPTR) && !defined(CATCH_CONFIG_CPP11_NULLPTR) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_NULLPTR
#endif
#if defined(CATCH_INTERNAL_CONFIG_CPP11_NOEXCEPT) && !defined(CATCH_CONFIG_CPP11_NO_NOEXCEPT) && !defined(CATCH_CONFIG_CPP11_NOEXCEPT) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_NOEXCEPT
#endif
#if defined(CATCH_INTERNAL_CONFIG_CPP11_GENERATED_METHODS) && !defined(CATCH_CONFIG_CPP11_NO_GENERATED_METHODS) && !defined(CATCH_CONFIG_CPP11_GENERATED_METHODS) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_GENERATED_METHODS
#endif
#if defined(CATCH_INTERNAL_CONFIG_CPP11_IS_ENUM) && !defined(CATCH_CONFIG_CPP11_NO_IS_ENUM) && !defined(CATCH_CONFIG_CPP11_IS_ENUM) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_IS_ENUM
#endif
#if defined(CATCH_INTERNAL_CONFIG_CPP11_TUPLE) && !defined(CATCH_CONFIG_CPP11_NO_TUPLE) && !defined(CATCH_CONFIG_CPP11_TUPLE) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_TUPLE
#endif
#if defined(CATCH_INTERNAL_CONFIG_VARIADIC_MACROS) && !defined(CATCH_CONFIG_NO_VARIADIC_MACROS) && !defined(CATCH_CONFIG_VARIADIC_MACROS)
# define CATCH_CONFIG_VARIADIC_MACROS
#endif
#if defined(CATCH_INTERNAL_CONFIG_CPP11_LONG_LONG) && !defined(CATCH_CONFIG_CPP11_NO_LONG_LONG) && !defined(CATCH_CONFIG_CPP11_LONG_LONG) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_LONG_LONG
#endif
#if defined(CATCH_INTERNAL_CONFIG_CPP11_OVERRIDE) && !defined(CATCH_CONFIG_CPP11_NO_OVERRIDE) && !defined(CATCH_CONFIG_CPP11_OVERRIDE) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_OVERRIDE
#endif
#if defined(CATCH_INTERNAL_CONFIG_CPP11_UNIQUE_PTR) && !defined(CATCH_CONFIG_CPP11_NO_UNIQUE_PTR) && !defined(CATCH_CONFIG_CPP11_UNIQUE_PTR) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_UNIQUE_PTR
#endif
// Use of __COUNTER__ is suppressed if __JETBRAINS_IDE__ is #defined (meaning we're being parsed by a JetBrains IDE for
// analytics) because, at time of writing, __COUNTER__ is not properly handled by it.
// This does not affect compilation
#if defined(CATCH_INTERNAL_CONFIG_COUNTER) && !defined(CATCH_CONFIG_NO_COUNTER) && !defined(CATCH_CONFIG_COUNTER) && !defined(__JETBRAINS_IDE__)
# define CATCH_CONFIG_COUNTER
#endif
#if defined(CATCH_INTERNAL_CONFIG_CPP11_SHUFFLE) && !defined(CATCH_CONFIG_CPP11_NO_SHUFFLE) && !defined(CATCH_CONFIG_CPP11_SHUFFLE) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_SHUFFLE
#endif
# if defined(CATCH_INTERNAL_CONFIG_CPP11_TYPE_TRAITS) && !defined(CATCH_CONFIG_CPP11_NO_TYPE_TRAITS) && !defined(CATCH_CONFIG_CPP11_TYPE_TRAITS) && !defined(CATCH_CONFIG_NO_CPP11)
# define CATCH_CONFIG_CPP11_TYPE_TRAITS
# endif
#if defined(CATCH_INTERNAL_CONFIG_WINDOWS_SEH) && !defined(CATCH_CONFIG_NO_WINDOWS_SEH) && !defined(CATCH_CONFIG_WINDOWS_SEH)
# define CATCH_CONFIG_WINDOWS_SEH
#endif
#if !defined(CATCH_INTERNAL_SUPPRESS_PARENTHESES_WARNINGS)
# define CATCH_INTERNAL_SUPPRESS_PARENTHESES_WARNINGS
#endif
// noexcept support:
#if defined(CATCH_CONFIG_CPP11_NOEXCEPT) && !defined(CATCH_NOEXCEPT)
# define CATCH_NOEXCEPT noexcept
# define CATCH_NOEXCEPT_IS(x) noexcept(x)
#else
# define CATCH_NOEXCEPT throw()
# define CATCH_NOEXCEPT_IS(x)
#endif
// nullptr support
#ifdef CATCH_CONFIG_CPP11_NULLPTR
# define CATCH_NULL nullptr
#else
# define CATCH_NULL NULL
#endif
// override support
#ifdef CATCH_CONFIG_CPP11_OVERRIDE
# define CATCH_OVERRIDE override
#else
# define CATCH_OVERRIDE
#endif
// unique_ptr support
#ifdef CATCH_CONFIG_CPP11_UNIQUE_PTR
# define CATCH_AUTO_PTR( T ) std::unique_ptr<T>
#else
# define CATCH_AUTO_PTR( T ) std::auto_ptr<T>
#endif
#define INTERNAL_CATCH_UNIQUE_NAME_LINE2( name, line ) name##line
#define INTERNAL_CATCH_UNIQUE_NAME_LINE( name, line ) INTERNAL_CATCH_UNIQUE_NAME_LINE2( name, line )
#ifdef CATCH_CONFIG_COUNTER
# define INTERNAL_CATCH_UNIQUE_NAME( name ) INTERNAL_CATCH_UNIQUE_NAME_LINE( name, __COUNTER__ )
#else
# define INTERNAL_CATCH_UNIQUE_NAME( name ) INTERNAL_CATCH_UNIQUE_NAME_LINE( name, __LINE__ )
#endif
#define INTERNAL_CATCH_STRINGIFY2( expr ) #expr
#define INTERNAL_CATCH_STRINGIFY( expr ) INTERNAL_CATCH_STRINGIFY2( expr )
#include <sstream>
#include <algorithm>
namespace Catch {
struct IConfig;
struct CaseSensitive { enum Choice {
Yes,
No
}; };
class NonCopyable {
#ifdef CATCH_CONFIG_CPP11_GENERATED_METHODS
NonCopyable( NonCopyable const& ) = delete;
NonCopyable( NonCopyable && ) = delete;
NonCopyable& operator = ( NonCopyable const& ) = delete;
NonCopyable& operator = ( NonCopyable && ) = delete;
#else
NonCopyable( NonCopyable const& info );
NonCopyable& operator = ( NonCopyable const& );
#endif
protected:
NonCopyable() {}
virtual ~NonCopyable();
};
class SafeBool {
public:
typedef void (SafeBool::*type)() const;
static type makeSafe( bool value ) {
return value ? &SafeBool::trueValue : 0;
}
private:
void trueValue() const {}
};
template<typename ContainerT>
inline void deleteAll( ContainerT& container ) {
typename ContainerT::const_iterator it = container.begin();
typename ContainerT::const_iterator itEnd = container.end();
for(; it != itEnd; ++it )
delete *it;
}
template<typename AssociativeContainerT>
inline void deleteAllValues( AssociativeContainerT& container ) {
typename AssociativeContainerT::const_iterator it = container.begin();
typename AssociativeContainerT::const_iterator itEnd = container.end();
for(; it != itEnd; ++it )
delete it->second;
}
bool startsWith( std::string const& s, std::string const& prefix );
bool startsWith( std::string const& s, char prefix );
bool endsWith( std::string const& s, std::string const& suffix );
bool endsWith( std::string const& s, char suffix );
bool contains( std::string const& s, std::string const& infix );
void toLowerInPlace( std::string& s );
std::string toLower( std::string const& s );
std::string trim( std::string const& str );
bool replaceInPlace( std::string& str, std::string const& replaceThis, std::string const& withThis );
struct pluralise {
pluralise( std::size_t count, std::string const& label );
friend std::ostream& operator << ( std::ostream& os, pluralise const& pluraliser );
std::size_t m_count;
std::string m_label;
};
struct SourceLineInfo {
SourceLineInfo();
SourceLineInfo( char const* _file, std::size_t _line );
# ifdef CATCH_CONFIG_CPP11_GENERATED_METHODS
SourceLineInfo(SourceLineInfo const& other) = default;
SourceLineInfo( SourceLineInfo && ) = default;
SourceLineInfo& operator = ( SourceLineInfo const& ) = default;
SourceLineInfo& operator = ( SourceLineInfo && ) = default;
# endif
bool empty() const;
bool operator == ( SourceLineInfo const& other ) const;
bool operator < ( SourceLineInfo const& other ) const;
char const* file;
std::size_t line;
};
std::ostream& operator << ( std::ostream& os, SourceLineInfo const& info );
// This is just here to avoid compiler warnings with macro constants and boolean literals
inline bool isTrue( bool value ){ return value; }
inline bool alwaysTrue() { return true; }
inline bool alwaysFalse() { return false; }
void throwLogicError( std::string const& message, SourceLineInfo const& locationInfo );
void seedRng( IConfig const& config );
unsigned int rngSeed();
// Use this in variadic streaming macros to allow
// >> +StreamEndStop
// as well as
// >> stuff +StreamEndStop
struct StreamEndStop {
std::string operator+() {
return std::string();
}
};
template<typename T>
T const& operator + ( T const& value, StreamEndStop ) {
return value;
}
}
#define CATCH_INTERNAL_LINEINFO ::Catch::SourceLineInfo( __FILE__, static_cast<std::size_t>( __LINE__ ) )
#define CATCH_INTERNAL_ERROR( msg ) ::Catch::throwLogicError( msg, CATCH_INTERNAL_LINEINFO );
namespace Catch {
class NotImplementedException : public std::exception
{
public:
NotImplementedException( SourceLineInfo const& lineInfo );
NotImplementedException( NotImplementedException const& ) {}
virtual ~NotImplementedException() CATCH_NOEXCEPT {}
virtual const char* what() const CATCH_NOEXCEPT;
private:
std::string m_what;
SourceLineInfo m_lineInfo;
};
} // end namespace Catch
///////////////////////////////////////////////////////////////////////////////
#define CATCH_NOT_IMPLEMENTED throw Catch::NotImplementedException( CATCH_INTERNAL_LINEINFO )
// #included from: internal/catch_context.h
#define TWOBLUECUBES_CATCH_CONTEXT_H_INCLUDED
// #included from: catch_interfaces_generators.h
#define TWOBLUECUBES_CATCH_INTERFACES_GENERATORS_H_INCLUDED
#include <string>
namespace Catch {
struct IGeneratorInfo {
virtual ~IGeneratorInfo();
virtual bool moveNext() = 0;
virtual std::size_t getCurrentIndex() const = 0;
};
struct IGeneratorsForTest {
virtual ~IGeneratorsForTest();
virtual IGeneratorInfo& getGeneratorInfo( std::string const& fileInfo, std::size_t size ) = 0;
virtual bool moveNext() = 0;
};
IGeneratorsForTest* createGeneratorsForTest();
} // end namespace Catch
// #included from: catch_ptr.hpp
#define TWOBLUECUBES_CATCH_PTR_HPP_INCLUDED
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpadded"
#endif
namespace Catch {
// An intrusive reference counting smart pointer.
// T must implement addRef() and release() methods
// typically implementing the IShared interface
template<typename T>
class Ptr {
public:
Ptr() : m_p( CATCH_NULL ){}
Ptr( T* p ) : m_p( p ){
if( m_p )
m_p->addRef();
}
Ptr( Ptr const& other ) : m_p( other.m_p ){
if( m_p )
m_p->addRef();
}
~Ptr(){
if( m_p )
m_p->release();
}
void reset() {
if( m_p )
m_p->release();
m_p = CATCH_NULL;
}
Ptr& operator = ( T* p ){
Ptr temp( p );
swap( temp );
return *this;
}
Ptr& operator = ( Ptr const& other ){
Ptr temp( other );
swap( temp );
return *this;
}
void swap( Ptr& other ) { std::swap( m_p, other.m_p ); }
T* get() const{ return m_p; }
T& operator*() const { return *m_p; }
T* operator->() const { return m_p; }
bool operator !() const { return m_p == CATCH_NULL; }
operator SafeBool::type() const { return SafeBool::makeSafe( m_p != CATCH_NULL ); }
private:
T* m_p;
};
struct IShared : NonCopyable {
virtual ~IShared();
virtual void addRef() const = 0;
virtual void release() const = 0;
};
template<typename T = IShared>
struct SharedImpl : T {
SharedImpl() : m_rc( 0 ){}
virtual void addRef() const {
++m_rc;
}
virtual void release() const {
if( --m_rc == 0 )
delete this;
}
mutable unsigned int m_rc;
};
} // end namespace Catch
#ifdef __clang__
#pragma clang diagnostic pop
#endif
namespace Catch {
class TestCase;
class Stream;
struct IResultCapture;
struct IRunner;
struct IGeneratorsForTest;
struct IConfig;
struct IContext
{
virtual ~IContext();
virtual IResultCapture* getResultCapture() = 0;
virtual IRunner* getRunner() = 0;
virtual size_t getGeneratorIndex( std::string const& fileInfo, size_t totalSize ) = 0;
virtual bool advanceGeneratorsForCurrentTest() = 0;
virtual Ptr<IConfig const> getConfig() const = 0;
};
struct IMutableContext : IContext
{
virtual ~IMutableContext();
virtual void setResultCapture( IResultCapture* resultCapture ) = 0;
virtual void setRunner( IRunner* runner ) = 0;
virtual void setConfig( Ptr<IConfig const> const& config ) = 0;
};
IContext& getCurrentContext();
IMutableContext& getCurrentMutableContext();
void cleanUpContext();
Stream createStream( std::string const& streamName );
}
// #included from: internal/catch_test_registry.hpp
#define TWOBLUECUBES_CATCH_TEST_REGISTRY_HPP_INCLUDED
// #included from: catch_interfaces_testcase.h
#define TWOBLUECUBES_CATCH_INTERFACES_TESTCASE_H_INCLUDED
#include <vector>
namespace Catch {
class TestSpec;
struct ITestCase : IShared {
virtual void invoke () const = 0;
protected:
virtual ~ITestCase();
};
class TestCase;
struct IConfig;
struct ITestCaseRegistry {
virtual ~ITestCaseRegistry();
virtual std::vector<TestCase> const& getAllTests() const = 0;
virtual std::vector<TestCase> const& getAllTestsSorted( IConfig const& config ) const = 0;
};
bool matchTest( TestCase const& testCase, TestSpec const& testSpec, IConfig const& config );
std::vector<TestCase> filterTests( std::vector<TestCase> const& testCases, TestSpec const& testSpec, IConfig const& config );
std::vector<TestCase> const& getAllTestCasesSorted( IConfig const& config );
}
namespace Catch {
template<typename C>
class MethodTestCase : public SharedImpl<ITestCase> {
public:
MethodTestCase( void (C::*method)() ) : m_method( method ) {}
virtual void invoke() const {
C obj;
(obj.*m_method)();
}
private:
virtual ~MethodTestCase() {}
void (C::*m_method)();
};
typedef void(*TestFunction)();
struct NameAndDesc {
NameAndDesc( const char* _name = "", const char* _description= "" )
: name( _name ), description( _description )
{}
const char* name;
const char* description;
};
void registerTestCase
( ITestCase* testCase,
char const* className,
NameAndDesc const& nameAndDesc,
SourceLineInfo const& lineInfo );
struct AutoReg {
AutoReg
( TestFunction function,
SourceLineInfo const& lineInfo,
NameAndDesc const& nameAndDesc );
template<typename C>
AutoReg
( void (C::*method)(),
char const* className,
NameAndDesc const& nameAndDesc,
SourceLineInfo const& lineInfo ) {
registerTestCase
( new MethodTestCase<C>( method ),
className,
nameAndDesc,
lineInfo );
}
~AutoReg();
private:
AutoReg( AutoReg const& );
void operator= ( AutoReg const& );
};
void registerTestCaseFunction
( TestFunction function,
SourceLineInfo const& lineInfo,
NameAndDesc const& nameAndDesc );
} // end namespace Catch
#ifdef CATCH_CONFIG_VARIADIC_MACROS
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_TESTCASE2( TestName, ... ) \
static void TestName(); \
namespace{ Catch::AutoReg INTERNAL_CATCH_UNIQUE_NAME( autoRegistrar )( &TestName, CATCH_INTERNAL_LINEINFO, Catch::NameAndDesc( __VA_ARGS__ ) ); }\
static void TestName()
#define INTERNAL_CATCH_TESTCASE( ... ) \
INTERNAL_CATCH_TESTCASE2( INTERNAL_CATCH_UNIQUE_NAME( ____C_A_T_C_H____T_E_S_T____ ), __VA_ARGS__ )
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_METHOD_AS_TEST_CASE( QualifiedMethod, ... ) \
namespace{ Catch::AutoReg INTERNAL_CATCH_UNIQUE_NAME( autoRegistrar )( &QualifiedMethod, "&" #QualifiedMethod, Catch::NameAndDesc( __VA_ARGS__ ), CATCH_INTERNAL_LINEINFO ); }
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_TEST_CASE_METHOD2( TestName, ClassName, ... )\
namespace{ \
struct TestName : ClassName{ \
void test(); \
}; \
Catch::AutoReg INTERNAL_CATCH_UNIQUE_NAME( autoRegistrar ) ( &TestName::test, #ClassName, Catch::NameAndDesc( __VA_ARGS__ ), CATCH_INTERNAL_LINEINFO ); \
} \
void TestName::test()
#define INTERNAL_CATCH_TEST_CASE_METHOD( ClassName, ... ) \
INTERNAL_CATCH_TEST_CASE_METHOD2( INTERNAL_CATCH_UNIQUE_NAME( ____C_A_T_C_H____T_E_S_T____ ), ClassName, __VA_ARGS__ )
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_REGISTER_TESTCASE( Function, ... ) \
Catch::AutoReg( Function, CATCH_INTERNAL_LINEINFO, Catch::NameAndDesc( __VA_ARGS__ ) );
#else
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_TESTCASE2( TestName, Name, Desc ) \
static void TestName(); \
namespace{ Catch::AutoReg INTERNAL_CATCH_UNIQUE_NAME( autoRegistrar )( &TestName, CATCH_INTERNAL_LINEINFO, Catch::NameAndDesc( Name, Desc ) ); }\
static void TestName()
#define INTERNAL_CATCH_TESTCASE( Name, Desc ) \
INTERNAL_CATCH_TESTCASE2( INTERNAL_CATCH_UNIQUE_NAME( ____C_A_T_C_H____T_E_S_T____ ), Name, Desc )
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_METHOD_AS_TEST_CASE( QualifiedMethod, Name, Desc ) \
namespace{ Catch::AutoReg INTERNAL_CATCH_UNIQUE_NAME( autoRegistrar )( &QualifiedMethod, "&" #QualifiedMethod, Catch::NameAndDesc( Name, Desc ), CATCH_INTERNAL_LINEINFO ); }
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_TEST_CASE_METHOD2( TestCaseName, ClassName, TestName, Desc )\
namespace{ \
struct TestCaseName : ClassName{ \
void test(); \
}; \
Catch::AutoReg INTERNAL_CATCH_UNIQUE_NAME( autoRegistrar ) ( &TestCaseName::test, #ClassName, Catch::NameAndDesc( TestName, Desc ), CATCH_INTERNAL_LINEINFO ); \
} \
void TestCaseName::test()
#define INTERNAL_CATCH_TEST_CASE_METHOD( ClassName, TestName, Desc )\
INTERNAL_CATCH_TEST_CASE_METHOD2( INTERNAL_CATCH_UNIQUE_NAME( ____C_A_T_C_H____T_E_S_T____ ), ClassName, TestName, Desc )
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_REGISTER_TESTCASE( Function, Name, Desc ) \
Catch::AutoReg( Function, CATCH_INTERNAL_LINEINFO, Catch::NameAndDesc( Name, Desc ) );
#endif
// #included from: internal/catch_capture.hpp
#define TWOBLUECUBES_CATCH_CAPTURE_HPP_INCLUDED
// #included from: catch_result_builder.h
#define TWOBLUECUBES_CATCH_RESULT_BUILDER_H_INCLUDED
// #included from: catch_result_type.h
#define TWOBLUECUBES_CATCH_RESULT_TYPE_H_INCLUDED
namespace Catch {
// ResultWas::OfType enum
struct ResultWas { enum OfType {
Unknown = -1,
Ok = 0,
Info = 1,
Warning = 2,
FailureBit = 0x10,
ExpressionFailed = FailureBit | 1,
ExplicitFailure = FailureBit | 2,
Exception = 0x100 | FailureBit,
ThrewException = Exception | 1,
DidntThrowException = Exception | 2,
FatalErrorCondition = 0x200 | FailureBit
}; };
inline bool isOk( ResultWas::OfType resultType ) {
return ( resultType & ResultWas::FailureBit ) == 0;
}
inline bool isJustInfo( int flags ) {
return flags == ResultWas::Info;
}
// ResultDisposition::Flags enum
struct ResultDisposition { enum Flags {
Normal = 0x01,
ContinueOnFailure = 0x02, // Failures fail test, but execution continues
FalseTest = 0x04, // Prefix expression with !
SuppressFail = 0x08 // Failures are reported but do not fail the test
}; };
inline ResultDisposition::Flags operator | ( ResultDisposition::Flags lhs, ResultDisposition::Flags rhs ) {
return static_cast<ResultDisposition::Flags>( static_cast<int>( lhs ) | static_cast<int>( rhs ) );
}
inline bool shouldContinueOnFailure( int flags ) { return ( flags & ResultDisposition::ContinueOnFailure ) != 0; }
inline bool isFalseTest( int flags ) { return ( flags & ResultDisposition::FalseTest ) != 0; }
inline bool shouldSuppressFailure( int flags ) { return ( flags & ResultDisposition::SuppressFail ) != 0; }
} // end namespace Catch
// #included from: catch_assertionresult.h
#define TWOBLUECUBES_CATCH_ASSERTIONRESULT_H_INCLUDED
#include <string>
namespace Catch {
struct STATIC_ASSERT_Expression_Too_Complex_Please_Rewrite_As_Binary_Comparison;
struct DecomposedExpression
{
virtual ~DecomposedExpression() {}
virtual bool isBinaryExpression() const {
return false;
}
virtual void reconstructExpression( std::string& dest ) const = 0;
// Only simple binary comparisons can be decomposed.
// If more complex check is required then wrap sub-expressions in parentheses.
template<typename T> STATIC_ASSERT_Expression_Too_Complex_Please_Rewrite_As_Binary_Comparison& operator + ( T const& );
template<typename T> STATIC_ASSERT_Expression_Too_Complex_Please_Rewrite_As_Binary_Comparison& operator - ( T const& );
template<typename T> STATIC_ASSERT_Expression_Too_Complex_Please_Rewrite_As_Binary_Comparison& operator * ( T const& );
template<typename T> STATIC_ASSERT_Expression_Too_Complex_Please_Rewrite_As_Binary_Comparison& operator / ( T const& );
template<typename T> STATIC_ASSERT_Expression_Too_Complex_Please_Rewrite_As_Binary_Comparison& operator % ( T const& );
template<typename T> STATIC_ASSERT_Expression_Too_Complex_Please_Rewrite_As_Binary_Comparison& operator && ( T const& );
template<typename T> STATIC_ASSERT_Expression_Too_Complex_Please_Rewrite_As_Binary_Comparison& operator || ( T const& );
};
struct AssertionInfo
{
AssertionInfo() {}
AssertionInfo( std::string const& _macroName,
SourceLineInfo const& _lineInfo,
std::string const& _capturedExpression,
ResultDisposition::Flags _resultDisposition );
std::string macroName;
SourceLineInfo lineInfo;
std::string capturedExpression;
ResultDisposition::Flags resultDisposition;
};
struct AssertionResultData
{
AssertionResultData() : decomposedExpression( CATCH_NULL )
, resultType( ResultWas::Unknown )
, negated( false )
, parenthesized( false ) {}
void negate( bool parenthesize ) {
negated = !negated;
parenthesized = parenthesize;
if( resultType == ResultWas::Ok )
resultType = ResultWas::ExpressionFailed;
else if( resultType == ResultWas::ExpressionFailed )
resultType = ResultWas::Ok;
}
std::string const& reconstructExpression() const {
if( decomposedExpression != CATCH_NULL ) {
decomposedExpression->reconstructExpression( reconstructedExpression );
if( parenthesized ) {
reconstructedExpression.insert( 0, 1, '(' );
reconstructedExpression.append( 1, ')' );
}
if( negated ) {
reconstructedExpression.insert( 0, 1, '!' );
}
decomposedExpression = CATCH_NULL;
}
return reconstructedExpression;
}
mutable DecomposedExpression const* decomposedExpression;
mutable std::string reconstructedExpression;
std::string message;
ResultWas::OfType resultType;
bool negated;
bool parenthesized;
};
class AssertionResult {
public:
AssertionResult();
AssertionResult( AssertionInfo const& info, AssertionResultData const& data );
~AssertionResult();
# ifdef CATCH_CONFIG_CPP11_GENERATED_METHODS
AssertionResult( AssertionResult const& ) = default;
AssertionResult( AssertionResult && ) = default;
AssertionResult& operator = ( AssertionResult const& ) = default;
AssertionResult& operator = ( AssertionResult && ) = default;
# endif
bool isOk() const;
bool succeeded() const;
ResultWas::OfType getResultType() const;
bool hasExpression() const;
bool hasMessage() const;
std::string getExpression() const;
std::string getExpressionInMacro() const;
bool hasExpandedExpression() const;
std::string getExpandedExpression() const;
std::string getMessage() const;
SourceLineInfo getSourceInfo() const;
std::string getTestMacroName() const;
void discardDecomposedExpression() const;
void expandDecomposedExpression() const;
protected:
AssertionInfo m_info;
AssertionResultData m_resultData;
};
} // end namespace Catch
// #included from: catch_matchers.hpp
#define TWOBLUECUBES_CATCH_MATCHERS_HPP_INCLUDED
namespace Catch {
namespace Matchers {
namespace Impl {
namespace Generic {
template<typename ExpressionT> class AllOf;
template<typename ExpressionT> class AnyOf;
template<typename ExpressionT> class Not;
}
template<typename ExpressionT>
struct Matcher : SharedImpl<IShared>
{
typedef ExpressionT ExpressionType;
virtual ~Matcher() {}
virtual Ptr<Matcher> clone() const = 0;
virtual bool match( ExpressionT const& expr ) const = 0;
virtual std::string toString() const = 0;
Generic::AllOf<ExpressionT> operator && ( Matcher<ExpressionT> const& other ) const;
Generic::AnyOf<ExpressionT> operator || ( Matcher<ExpressionT> const& other ) const;
Generic::Not<ExpressionT> operator ! () const;
};
template<typename DerivedT, typename ExpressionT>
struct MatcherImpl : Matcher<ExpressionT> {
virtual Ptr<Matcher<ExpressionT> > clone() const {
return Ptr<Matcher<ExpressionT> >( new DerivedT( static_cast<DerivedT const&>( *this ) ) );
}
};
namespace Generic {
template<typename ExpressionT>
class Not : public MatcherImpl<Not<ExpressionT>, ExpressionT> {
public:
explicit Not( Matcher<ExpressionT> const& matcher ) : m_matcher(matcher.clone()) {}
Not( Not const& other ) : m_matcher( other.m_matcher ) {}
virtual bool match( ExpressionT const& expr ) const CATCH_OVERRIDE {
return !m_matcher->match( expr );
}
virtual std::string toString() const CATCH_OVERRIDE {
return "not " + m_matcher->toString();
}
private:
Ptr< Matcher<ExpressionT> > m_matcher;
};
template<typename ExpressionT>
class AllOf : public MatcherImpl<AllOf<ExpressionT>, ExpressionT> {
public:
AllOf() {}
AllOf( AllOf const& other ) : m_matchers( other.m_matchers ) {}
AllOf& add( Matcher<ExpressionT> const& matcher ) {
m_matchers.push_back( matcher.clone() );
return *this;
}
virtual bool match( ExpressionT const& expr ) const
{
for( std::size_t i = 0; i < m_matchers.size(); ++i )
if( !m_matchers[i]->match( expr ) )
return false;
return true;
}
virtual std::string toString() const {
std::ostringstream oss;
oss << "( ";
for( std::size_t i = 0; i < m_matchers.size(); ++i ) {
if( i != 0 )
oss << " and ";
oss << m_matchers[i]->toString();
}
oss << " )";
return oss.str();
}
AllOf operator && ( Matcher<ExpressionT> const& other ) const {
AllOf allOfExpr( *this );
allOfExpr.add( other );
return allOfExpr;
}
private:
std::vector<Ptr<Matcher<ExpressionT> > > m_matchers;
};
template<typename ExpressionT>
class AnyOf : public MatcherImpl<AnyOf<ExpressionT>, ExpressionT> {
public:
AnyOf() {}
AnyOf( AnyOf const& other ) : m_matchers( other.m_matchers ) {}
AnyOf& add( Matcher<ExpressionT> const& matcher ) {
m_matchers.push_back( matcher.clone() );
return *this;
}
virtual bool match( ExpressionT const& expr ) const
{
for( std::size_t i = 0; i < m_matchers.size(); ++i )
if( m_matchers[i]->match( expr ) )
return true;
return false;
}
virtual std::string toString() const {
std::ostringstream oss;
oss << "( ";
for( std::size_t i = 0; i < m_matchers.size(); ++i ) {
if( i != 0 )
oss << " or ";
oss << m_matchers[i]->toString();
}
oss << " )";
return oss.str();
}
AnyOf operator || ( Matcher<ExpressionT> const& other ) const {
AnyOf anyOfExpr( *this );
anyOfExpr.add( other );
return anyOfExpr;
}
private:
std::vector<Ptr<Matcher<ExpressionT> > > m_matchers;
};
} // namespace Generic
template<typename ExpressionT>
Generic::AllOf<ExpressionT> Matcher<ExpressionT>::operator && ( Matcher<ExpressionT> const& other ) const {
Generic::AllOf<ExpressionT> allOfExpr;
allOfExpr.add( *this );
allOfExpr.add( other );
return allOfExpr;
}
template<typename ExpressionT>
Generic::AnyOf<ExpressionT> Matcher<ExpressionT>::operator || ( Matcher<ExpressionT> const& other ) const {
Generic::AnyOf<ExpressionT> anyOfExpr;
anyOfExpr.add( *this );
anyOfExpr.add( other );
return anyOfExpr;
}
template<typename ExpressionT>
Generic::Not<ExpressionT> Matcher<ExpressionT>::operator ! () const {
return Generic::Not<ExpressionT>( *this );
}
namespace StdString {
inline std::string makeString( std::string const& str ) { return str; }
inline std::string makeString( const char* str ) { return str ? std::string( str ) : std::string(); }
struct CasedString
{
CasedString( std::string const& str, CaseSensitive::Choice caseSensitivity )
: m_caseSensitivity( caseSensitivity ),
m_str( adjustString( str ) )
{}
std::string adjustString( std::string const& str ) const {
return m_caseSensitivity == CaseSensitive::No
? toLower( str )
: str;
}
std::string toStringSuffix() const
{
return m_caseSensitivity == CaseSensitive::No
? " (case insensitive)"
: std::string();
}
CaseSensitive::Choice m_caseSensitivity;
std::string m_str;
};
struct Equals : MatcherImpl<Equals, std::string> {
Equals( std::string const& str, CaseSensitive::Choice caseSensitivity = CaseSensitive::Yes )
: m_data( str, caseSensitivity )
{}
Equals( Equals const& other ) : m_data( other.m_data ){}
virtual ~Equals();
virtual bool match( std::string const& expr ) const {
return m_data.m_str == m_data.adjustString( expr );;
}
virtual std::string toString() const {
return "equals: \"" + m_data.m_str + '"' + m_data.toStringSuffix();
}
CasedString m_data;
};
struct Contains : MatcherImpl<Contains, std::string> {
Contains( std::string const& substr, CaseSensitive::Choice caseSensitivity = CaseSensitive::Yes )
: m_data( substr, caseSensitivity ){}
Contains( Contains const& other ) : m_data( other.m_data ){}
virtual ~Contains();
virtual bool match( std::string const& expr ) const {
return m_data.adjustString( expr ).find( m_data.m_str ) != std::string::npos;
}
virtual std::string toString() const {
return "contains: \"" + m_data.m_str + '"' + m_data.toStringSuffix();
}
CasedString m_data;
};
struct StartsWith : MatcherImpl<StartsWith, std::string> {
StartsWith( std::string const& substr, CaseSensitive::Choice caseSensitivity = CaseSensitive::Yes )
: m_data( substr, caseSensitivity ){}
StartsWith( StartsWith const& other ) : m_data( other.m_data ){}
virtual ~StartsWith();
virtual bool match( std::string const& expr ) const {
return startsWith( m_data.adjustString( expr ), m_data.m_str );
}
virtual std::string toString() const {
return "starts with: \"" + m_data.m_str + '"' + m_data.toStringSuffix();
}
CasedString m_data;
};
struct EndsWith : MatcherImpl<EndsWith, std::string> {
EndsWith( std::string const& substr, CaseSensitive::Choice caseSensitivity = CaseSensitive::Yes )
: m_data( substr, caseSensitivity ){}
EndsWith( EndsWith const& other ) : m_data( other.m_data ){}
virtual ~EndsWith();
virtual bool match( std::string const& expr ) const {
return endsWith( m_data.adjustString( expr ), m_data.m_str );
}
virtual std::string toString() const {
return "ends with: \"" + m_data.m_str + '"' + m_data.toStringSuffix();
}
CasedString m_data;
};
} // namespace StdString
} // namespace Impl
// The following functions create the actual matcher objects.
// This allows the types to be inferred
template<typename ExpressionT>
inline Impl::Generic::Not<ExpressionT> Not( Impl::Matcher<ExpressionT> const& m ) {
return Impl::Generic::Not<ExpressionT>( m );
}
template<typename ExpressionT>
inline Impl::Generic::AllOf<ExpressionT> AllOf( Impl::Matcher<ExpressionT> const& m1,
Impl::Matcher<ExpressionT> const& m2 ) {
return Impl::Generic::AllOf<ExpressionT>().add( m1 ).add( m2 );
}
template<typename ExpressionT>
inline Impl::Generic::AllOf<ExpressionT> AllOf( Impl::Matcher<ExpressionT> const& m1,
Impl::Matcher<ExpressionT> const& m2,
Impl::Matcher<ExpressionT> const& m3 ) {
return Impl::Generic::AllOf<ExpressionT>().add( m1 ).add( m2 ).add( m3 );
}
template<typename ExpressionT>
inline Impl::Generic::AnyOf<ExpressionT> AnyOf( Impl::Matcher<ExpressionT> const& m1,
Impl::Matcher<ExpressionT> const& m2 ) {
return Impl::Generic::AnyOf<ExpressionT>().add( m1 ).add( m2 );
}
template<typename ExpressionT>
inline Impl::Generic::AnyOf<ExpressionT> AnyOf( Impl::Matcher<ExpressionT> const& m1,
Impl::Matcher<ExpressionT> const& m2,
Impl::Matcher<ExpressionT> const& m3 ) {
return Impl::Generic::AnyOf<ExpressionT>().add( m1 ).add( m2 ).add( m3 );
}
inline Impl::StdString::Equals Equals( std::string const& str, CaseSensitive::Choice caseSensitivity = CaseSensitive::Yes ) {
return Impl::StdString::Equals( str, caseSensitivity );
}
inline Impl::StdString::Equals Equals( const char* str, CaseSensitive::Choice caseSensitivity = CaseSensitive::Yes ) {
return Impl::StdString::Equals( Impl::StdString::makeString( str ), caseSensitivity );
}
inline Impl::StdString::Contains Contains( std::string const& substr, CaseSensitive::Choice caseSensitivity = CaseSensitive::Yes ) {
return Impl::StdString::Contains( substr, caseSensitivity );
}
inline Impl::StdString::Contains Contains( const char* substr, CaseSensitive::Choice caseSensitivity = CaseSensitive::Yes ) {
return Impl::StdString::Contains( Impl::StdString::makeString( substr ), caseSensitivity );
}
inline Impl::StdString::StartsWith StartsWith( std::string const& substr ) {
return Impl::StdString::StartsWith( substr );
}
inline Impl::StdString::StartsWith StartsWith( const char* substr ) {
return Impl::StdString::StartsWith( Impl::StdString::makeString( substr ) );
}
inline Impl::StdString::EndsWith EndsWith( std::string const& substr ) {
return Impl::StdString::EndsWith( substr );
}
inline Impl::StdString::EndsWith EndsWith( const char* substr ) {
return Impl::StdString::EndsWith( Impl::StdString::makeString( substr ) );
}
} // namespace Matchers
using namespace Matchers;
} // namespace Catch
namespace Catch {
struct TestFailureException{};
template<typename T> class ExpressionLhs;
struct CopyableStream {
CopyableStream() {}
CopyableStream( CopyableStream const& other ) {
oss << other.oss.str();
}
CopyableStream& operator=( CopyableStream const& other ) {
oss.str(std::string());
oss << other.oss.str();
return *this;
}
std::ostringstream oss;
};
class ResultBuilder : public DecomposedExpression {
public:
ResultBuilder( char const* macroName,
SourceLineInfo const& lineInfo,
char const* capturedExpression,
ResultDisposition::Flags resultDisposition,
char const* secondArg = "" );
template<typename T>
ExpressionLhs<T const&> operator <= ( T const& operand );
ExpressionLhs<bool> operator <= ( bool value );
template<typename T>
ResultBuilder& operator << ( T const& value ) {
m_stream.oss << value;
return *this;
}
ResultBuilder& setResultType( ResultWas::OfType result );
ResultBuilder& setResultType( bool result );
void endExpression( DecomposedExpression const& expr );
virtual void reconstructExpression( std::string& dest ) const CATCH_OVERRIDE;
AssertionResult build() const;
AssertionResult build( DecomposedExpression const& expr ) const;
void useActiveException( ResultDisposition::Flags resultDisposition = ResultDisposition::Normal );
void captureResult( ResultWas::OfType resultType );
void captureExpression();
void captureExpectedException( std::string const& expectedMessage );
void captureExpectedException( Matchers::Impl::Matcher<std::string> const& matcher );
void handleResult( AssertionResult const& result );
void react();
bool shouldDebugBreak() const;
bool allowThrows() const;
template<typename ArgT, typename MatcherT>
void captureMatch( ArgT const& arg, MatcherT const& matcher, char const* matcherString );
private:
AssertionInfo m_assertionInfo;
AssertionResultData m_data;
CopyableStream m_stream;
bool m_shouldDebugBreak;
bool m_shouldThrow;
};
} // namespace Catch
// Include after due to circular dependency:
// #included from: catch_expression_lhs.hpp
#define TWOBLUECUBES_CATCH_EXPRESSION_LHS_HPP_INCLUDED
// #included from: catch_evaluate.hpp
#define TWOBLUECUBES_CATCH_EVALUATE_HPP_INCLUDED
#ifdef _MSC_VER
#pragma warning(push)
#pragma warning(disable:4389) // '==' : signed/unsigned mismatch
#endif
#include <cstddef>
namespace Catch {
namespace Internal {
enum Operator {
IsEqualTo,
IsNotEqualTo,
IsLessThan,
IsGreaterThan,
IsLessThanOrEqualTo,
IsGreaterThanOrEqualTo
};
template<Operator Op> struct OperatorTraits { static const char* getName(){ return "*error*"; } };
template<> struct OperatorTraits<IsEqualTo> { static const char* getName(){ return "=="; } };
template<> struct OperatorTraits<IsNotEqualTo> { static const char* getName(){ return "!="; } };
template<> struct OperatorTraits<IsLessThan> { static const char* getName(){ return "<"; } };
template<> struct OperatorTraits<IsGreaterThan> { static const char* getName(){ return ">"; } };
template<> struct OperatorTraits<IsLessThanOrEqualTo> { static const char* getName(){ return "<="; } };
template<> struct OperatorTraits<IsGreaterThanOrEqualTo>{ static const char* getName(){ return ">="; } };
template<typename T>
inline T& opCast(T const& t) { return const_cast<T&>(t); }
// nullptr_t support based on pull request #154 from Konstantin Baumann
#ifdef CATCH_CONFIG_CPP11_NULLPTR
inline std::nullptr_t opCast(std::nullptr_t) { return nullptr; }
#endif // CATCH_CONFIG_CPP11_NULLPTR
// So the compare overloads can be operator agnostic we convey the operator as a template
// enum, which is used to specialise an Evaluator for doing the comparison.
template<typename T1, typename T2, Operator Op>
class Evaluator{};
template<typename T1, typename T2>
struct Evaluator<T1, T2, IsEqualTo> {
static bool evaluate( T1 const& lhs, T2 const& rhs) {
return bool( opCast( lhs ) == opCast( rhs ) );
}
};
template<typename T1, typename T2>
struct Evaluator<T1, T2, IsNotEqualTo> {
static bool evaluate( T1 const& lhs, T2 const& rhs ) {
return bool( opCast( lhs ) != opCast( rhs ) );
}
};
template<typename T1, typename T2>
struct Evaluator<T1, T2, IsLessThan> {
static bool evaluate( T1 const& lhs, T2 const& rhs ) {
return bool( opCast( lhs ) < opCast( rhs ) );
}
};
template<typename T1, typename T2>
struct Evaluator<T1, T2, IsGreaterThan> {
static bool evaluate( T1 const& lhs, T2 const& rhs ) {
return bool( opCast( lhs ) > opCast( rhs ) );
}
};
template<typename T1, typename T2>
struct Evaluator<T1, T2, IsGreaterThanOrEqualTo> {
static bool evaluate( T1 const& lhs, T2 const& rhs ) {
return bool( opCast( lhs ) >= opCast( rhs ) );
}
};
template<typename T1, typename T2>
struct Evaluator<T1, T2, IsLessThanOrEqualTo> {
static bool evaluate( T1 const& lhs, T2 const& rhs ) {
return bool( opCast( lhs ) <= opCast( rhs ) );
}
};
template<Operator Op, typename T1, typename T2>
bool applyEvaluator( T1 const& lhs, T2 const& rhs ) {
return Evaluator<T1, T2, Op>::evaluate( lhs, rhs );
}
// This level of indirection allows us to specialise for integer types
// to avoid signed/ unsigned warnings
// "base" overload
template<Operator Op, typename T1, typename T2>
bool compare( T1 const& lhs, T2 const& rhs ) {
return Evaluator<T1, T2, Op>::evaluate( lhs, rhs );
}
// unsigned X to int
template<Operator Op> bool compare( unsigned int lhs, int rhs ) {
return applyEvaluator<Op>( lhs, static_cast<unsigned int>( rhs ) );
}
template<Operator Op> bool compare( unsigned long lhs, int rhs ) {
return applyEvaluator<Op>( lhs, static_cast<unsigned int>( rhs ) );
}
template<Operator Op> bool compare( unsigned char lhs, int rhs ) {
return applyEvaluator<Op>( lhs, static_cast<unsigned int>( rhs ) );
}
// unsigned X to long
template<Operator Op> bool compare( unsigned int lhs, long rhs ) {
return applyEvaluator<Op>( lhs, static_cast<unsigned long>( rhs ) );
}
template<Operator Op> bool compare( unsigned long lhs, long rhs ) {
return applyEvaluator<Op>( lhs, static_cast<unsigned long>( rhs ) );
}
template<Operator Op> bool compare( unsigned char lhs, long rhs ) {
return applyEvaluator<Op>( lhs, static_cast<unsigned long>( rhs ) );
}
// int to unsigned X
template<Operator Op> bool compare( int lhs, unsigned int rhs ) {
return applyEvaluator<Op>( static_cast<unsigned int>( lhs ), rhs );
}
template<Operator Op> bool compare( int lhs, unsigned long rhs ) {
return applyEvaluator<Op>( static_cast<unsigned int>( lhs ), rhs );
}
template<Operator Op> bool compare( int lhs, unsigned char rhs ) {
return applyEvaluator<Op>( static_cast<unsigned int>( lhs ), rhs );
}
// long to unsigned X
template<Operator Op> bool compare( long lhs, unsigned int rhs ) {
return applyEvaluator<Op>( static_cast<unsigned long>( lhs ), rhs );
}
template<Operator Op> bool compare( long lhs, unsigned long rhs ) {
return applyEvaluator<Op>( static_cast<unsigned long>( lhs ), rhs );
}
template<Operator Op> bool compare( long lhs, unsigned char rhs ) {
return applyEvaluator<Op>( static_cast<unsigned long>( lhs ), rhs );
}
// pointer to long (when comparing against NULL)
template<Operator Op, typename T> bool compare( long lhs, T* rhs ) {
return Evaluator<T*, T*, Op>::evaluate( reinterpret_cast<T*>( lhs ), rhs );
}
template<Operator Op, typename T> bool compare( T* lhs, long rhs ) {
return Evaluator<T*, T*, Op>::evaluate( lhs, reinterpret_cast<T*>( rhs ) );
}
// pointer to int (when comparing against NULL)
template<Operator Op, typename T> bool compare( int lhs, T* rhs ) {
return Evaluator<T*, T*, Op>::evaluate( reinterpret_cast<T*>( lhs ), rhs );
}
template<Operator Op, typename T> bool compare( T* lhs, int rhs ) {
return Evaluator<T*, T*, Op>::evaluate( lhs, reinterpret_cast<T*>( rhs ) );
}
#ifdef CATCH_CONFIG_CPP11_LONG_LONG
// long long to unsigned X
template<Operator Op> bool compare( long long lhs, unsigned int rhs ) {
return applyEvaluator<Op>( static_cast<unsigned long>( lhs ), rhs );
}
template<Operator Op> bool compare( long long lhs, unsigned long rhs ) {
return applyEvaluator<Op>( static_cast<unsigned long>( lhs ), rhs );
}
template<Operator Op> bool compare( long long lhs, unsigned long long rhs ) {
return applyEvaluator<Op>( static_cast<unsigned long>( lhs ), rhs );
}
template<Operator Op> bool compare( long long lhs, unsigned char rhs ) {
return applyEvaluator<Op>( static_cast<unsigned long>( lhs ), rhs );
}
// unsigned long long to X
template<Operator Op> bool compare( unsigned long long lhs, int rhs ) {
return applyEvaluator<Op>( static_cast<long>( lhs ), rhs );
}
template<Operator Op> bool compare( unsigned long long lhs, long rhs ) {
return applyEvaluator<Op>( static_cast<long>( lhs ), rhs );
}
template<Operator Op> bool compare( unsigned long long lhs, long long rhs ) {
return applyEvaluator<Op>( static_cast<long>( lhs ), rhs );
}
template<Operator Op> bool compare( unsigned long long lhs, char rhs ) {
return applyEvaluator<Op>( static_cast<long>( lhs ), rhs );
}
// pointer to long long (when comparing against NULL)
template<Operator Op, typename T> bool compare( long long lhs, T* rhs ) {
return Evaluator<T*, T*, Op>::evaluate( reinterpret_cast<T*>( lhs ), rhs );
}
template<Operator Op, typename T> bool compare( T* lhs, long long rhs ) {
return Evaluator<T*, T*, Op>::evaluate( lhs, reinterpret_cast<T*>( rhs ) );
}
#endif // CATCH_CONFIG_CPP11_LONG_LONG
#ifdef CATCH_CONFIG_CPP11_NULLPTR
// pointer to nullptr_t (when comparing against nullptr)
template<Operator Op, typename T> bool compare( std::nullptr_t, T* rhs ) {
return Evaluator<T*, T*, Op>::evaluate( nullptr, rhs );
}
template<Operator Op, typename T> bool compare( T* lhs, std::nullptr_t ) {
return Evaluator<T*, T*, Op>::evaluate( lhs, nullptr );
}
#endif // CATCH_CONFIG_CPP11_NULLPTR
} // end of namespace Internal
} // end of namespace Catch
#ifdef _MSC_VER
#pragma warning(pop)
#endif
// #included from: catch_tostring.h
#define TWOBLUECUBES_CATCH_TOSTRING_H_INCLUDED
#include <sstream>
#include <iomanip>
#include <limits>
#include <vector>
#include <cstddef>
#ifdef __OBJC__
// #included from: catch_objc_arc.hpp
#define TWOBLUECUBES_CATCH_OBJC_ARC_HPP_INCLUDED
#import <Foundation/Foundation.h>
#ifdef __has_feature
#define CATCH_ARC_ENABLED __has_feature(objc_arc)
#else
#define CATCH_ARC_ENABLED 0
#endif
void arcSafeRelease( NSObject* obj );
id performOptionalSelector( id obj, SEL sel );
#if !CATCH_ARC_ENABLED
inline void arcSafeRelease( NSObject* obj ) {
[obj release];
}
inline id performOptionalSelector( id obj, SEL sel ) {
if( [obj respondsToSelector: sel] )
return [obj performSelector: sel];
return nil;
}
#define CATCH_UNSAFE_UNRETAINED
#define CATCH_ARC_STRONG
#else
inline void arcSafeRelease( NSObject* ){}
inline id performOptionalSelector( id obj, SEL sel ) {
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
#endif
if( [obj respondsToSelector: sel] )
return [obj performSelector: sel];
#ifdef __clang__
#pragma clang diagnostic pop
#endif
return nil;
}
#define CATCH_UNSAFE_UNRETAINED __unsafe_unretained
#define CATCH_ARC_STRONG __strong
#endif
#endif
#ifdef CATCH_CONFIG_CPP11_TUPLE
#include <tuple>
#endif
#ifdef CATCH_CONFIG_CPP11_IS_ENUM
#include <type_traits>
#endif
namespace Catch {
// Why we're here.
template<typename T>
std::string toString( T const& value );
// Built in overloads
std::string toString( std::string const& value );
std::string toString( std::wstring const& value );
std::string toString( const char* const value );
std::string toString( char* const value );
std::string toString( const wchar_t* const value );
std::string toString( wchar_t* const value );
std::string toString( int value );
std::string toString( unsigned long value );
std::string toString( unsigned int value );
std::string toString( const double value );
std::string toString( const float value );
std::string toString( bool value );
std::string toString( char value );
std::string toString( signed char value );
std::string toString( unsigned char value );
#ifdef CATCH_CONFIG_CPP11_LONG_LONG
std::string toString( long long value );
std::string toString( unsigned long long value );
#endif
#ifdef CATCH_CONFIG_CPP11_NULLPTR
std::string toString( std::nullptr_t );
#endif
#ifdef __OBJC__
std::string toString( NSString const * const& nsstring );
std::string toString( NSString * CATCH_ARC_STRONG const& nsstring );
std::string toString( NSObject* const& nsObject );
#endif
namespace Detail {
extern const std::string unprintableString;
struct BorgType {
template<typename T> BorgType( T const& );
};
struct TrueType { char sizer[1]; };
struct FalseType { char sizer[2]; };
TrueType& testStreamable( std::ostream& );
FalseType testStreamable( FalseType );
FalseType operator<<( std::ostream const&, BorgType const& );
template<typename T>
struct IsStreamInsertable {
static std::ostream &s;
static T const&t;
enum { value = sizeof( testStreamable(s << t) ) == sizeof( TrueType ) };
};
#if defined(CATCH_CONFIG_CPP11_IS_ENUM)
template<typename T,
bool IsEnum = std::is_enum<T>::value
>
struct EnumStringMaker
{
static std::string convert( T const& ) { return unprintableString; }
};
template<typename T>
struct EnumStringMaker<T,true>
{
static std::string convert( T const& v )
{
return ::Catch::toString(
static_cast<typename std::underlying_type<T>::type>(v)
);
}
};
#endif
template<bool C>
struct StringMakerBase {
#if defined(CATCH_CONFIG_CPP11_IS_ENUM)
template<typename T>
static std::string convert( T const& v )
{
return EnumStringMaker<T>::convert( v );
}
#else
template<typename T>
static std::string convert( T const& ) { return unprintableString; }
#endif
};
template<>
struct StringMakerBase<true> {
template<typename T>
static std::string convert( T const& _value ) {
std::ostringstream oss;
oss << _value;
return oss.str();
}
};
std::string rawMemoryToString( const void *object, std::size_t size );
template<typename T>
inline std::string rawMemoryToString( const T& object ) {
return rawMemoryToString( &object, sizeof(object) );
}
} // end namespace Detail
template<typename T>
struct StringMaker :
Detail::StringMakerBase<Detail::IsStreamInsertable<T>::value> {};
template<typename T>
struct StringMaker<T*> {
template<typename U>
static std::string convert( U* p ) {
if( !p )
return "NULL";
else
return Detail::rawMemoryToString( p );
}
};
template<typename R, typename C>
struct StringMaker<R C::*> {
static std::string convert( R C::* p ) {
if( !p )
return "NULL";
else
return Detail::rawMemoryToString( p );
}
};
namespace Detail {
template<typename InputIterator>
std::string rangeToString( InputIterator first, InputIterator last );
}
//template<typename T, typename Allocator>
//struct StringMaker<std::vector<T, Allocator> > {
// static std::string convert( std::vector<T,Allocator> const& v ) {
// return Detail::rangeToString( v.begin(), v.end() );
// }
//};
template<typename T, typename Allocator>
std::string toString( std::vector<T,Allocator> const& v ) {
return Detail::rangeToString( v.begin(), v.end() );
}
#ifdef CATCH_CONFIG_CPP11_TUPLE
// toString for tuples
namespace TupleDetail {
template<
typename Tuple,
std::size_t N = 0,
bool = (N < std::tuple_size<Tuple>::value)
>
struct ElementPrinter {
static void print( const Tuple& tuple, std::ostream& os )
{
os << ( N ? ", " : " " )
<< Catch::toString(std::get<N>(tuple));
ElementPrinter<Tuple,N+1>::print(tuple,os);
}
};
template<
typename Tuple,
std::size_t N
>
struct ElementPrinter<Tuple,N,false> {
static void print( const Tuple&, std::ostream& ) {}
};
}
template<typename ...Types>
struct StringMaker<std::tuple<Types...>> {
static std::string convert( const std::tuple<Types...>& tuple )
{
std::ostringstream os;
os << '{';
TupleDetail::ElementPrinter<std::tuple<Types...>>::print( tuple, os );
os << " }";
return os.str();
}
};
#endif // CATCH_CONFIG_CPP11_TUPLE
namespace Detail {
template<typename T>
std::string makeString( T const& value ) {
return StringMaker<T>::convert( value );
}
} // end namespace Detail
/// \brief converts any type to a string
///
/// The default template forwards on to ostringstream - except when an
/// ostringstream overload does not exist - in which case it attempts to detect
/// that and writes {?}.
/// Overload (not specialise) this template for custom typs that you don't want
/// to provide an ostream overload for.
template<typename T>
std::string toString( T const& value ) {
return StringMaker<T>::convert( value );
}
namespace Detail {
template<typename InputIterator>
std::string rangeToString( InputIterator first, InputIterator last ) {
std::ostringstream oss;
oss << "{ ";
if( first != last ) {
oss << Catch::toString( *first );
for( ++first ; first != last ; ++first )
oss << ", " << Catch::toString( *first );
}
oss << " }";
return oss.str();
}
}
} // end namespace Catch
namespace Catch {
template<typename LhsT, Internal::Operator Op, typename RhsT>
class BinaryExpression;
template<typename ArgT, typename MatcherT>
class MatchExpression;
// Wraps the LHS of an expression and overloads comparison operators
// for also capturing those and RHS (if any)
template<typename T>
class ExpressionLhs : public DecomposedExpression {
public:
ExpressionLhs( ResultBuilder& rb, T lhs ) : m_rb( rb ), m_lhs( lhs ), m_truthy(false) {}
template<typename RhsT>
BinaryExpression<T, Internal::IsEqualTo, RhsT const&>
operator == ( RhsT const& rhs ) {
return captureExpression<Internal::IsEqualTo>( rhs );
}
template<typename RhsT>
BinaryExpression<T, Internal::IsNotEqualTo, RhsT const&>
operator != ( RhsT const& rhs ) {
return captureExpression<Internal::IsNotEqualTo>( rhs );
}
template<typename RhsT>
BinaryExpression<T, Internal::IsLessThan, RhsT const&>
operator < ( RhsT const& rhs ) {
return captureExpression<Internal::IsLessThan>( rhs );
}
template<typename RhsT>
BinaryExpression<T, Internal::IsGreaterThan, RhsT const&>
operator > ( RhsT const& rhs ) {
return captureExpression<Internal::IsGreaterThan>( rhs );
}
template<typename RhsT>
BinaryExpression<T, Internal::IsLessThanOrEqualTo, RhsT const&>
operator <= ( RhsT const& rhs ) {
return captureExpression<Internal::IsLessThanOrEqualTo>( rhs );
}
template<typename RhsT>
BinaryExpression<T, Internal::IsGreaterThanOrEqualTo, RhsT const&>
operator >= ( RhsT const& rhs ) {
return captureExpression<Internal::IsGreaterThanOrEqualTo>( rhs );
}
BinaryExpression<T, Internal::IsEqualTo, bool> operator == ( bool rhs ) {
return captureExpression<Internal::IsEqualTo>( rhs );
}
BinaryExpression<T, Internal::IsNotEqualTo, bool> operator != ( bool rhs ) {
return captureExpression<Internal::IsNotEqualTo>( rhs );
}
void endExpression() {
m_truthy = m_lhs ? true : false;
m_rb
.setResultType( m_truthy )
.endExpression( *this );
}
virtual void reconstructExpression( std::string& dest ) const CATCH_OVERRIDE {
dest = Catch::toString( m_truthy );
}
private:
template<Internal::Operator Op, typename RhsT>
BinaryExpression<T, Op, RhsT&> captureExpression( RhsT& rhs ) const {
return BinaryExpression<T, Op, RhsT&>( m_rb, m_lhs, rhs );
}
template<Internal::Operator Op>
BinaryExpression<T, Op, bool> captureExpression( bool rhs ) const {
return BinaryExpression<T, Op, bool>( m_rb, m_lhs, rhs );
}
private:
ResultBuilder& m_rb;
T m_lhs;
bool m_truthy;
};
template<typename LhsT, Internal::Operator Op, typename RhsT>
class BinaryExpression : public DecomposedExpression {
public:
BinaryExpression( ResultBuilder& rb, LhsT lhs, RhsT rhs )
: m_rb( rb ), m_lhs( lhs ), m_rhs( rhs ) {}
void endExpression() const {
m_rb
.setResultType( Internal::compare<Op>( m_lhs, m_rhs ) )
.endExpression( *this );
}
virtual bool isBinaryExpression() const CATCH_OVERRIDE {
return true;
}
virtual void reconstructExpression( std::string& dest ) const CATCH_OVERRIDE {
std::string lhs = Catch::toString( m_lhs );
std::string rhs = Catch::toString( m_rhs );
char delim = lhs.size() + rhs.size() < 40 &&
lhs.find('\n') == std::string::npos &&
rhs.find('\n') == std::string::npos ? ' ' : '\n';
dest.reserve( 7 + lhs.size() + rhs.size() );
// 2 for spaces around operator
// 2 for operator
// 2 for parentheses (conditionally added later)
// 1 for negation (conditionally added later)
dest = lhs;
dest += delim;
dest += Internal::OperatorTraits<Op>::getName();
dest += delim;
dest += rhs;
}
private:
ResultBuilder& m_rb;
LhsT m_lhs;
RhsT m_rhs;
};
template<typename ArgT, typename MatcherT>
class MatchExpression : public DecomposedExpression {
public:
MatchExpression( ArgT arg, MatcherT matcher, char const* matcherString )
: m_arg( arg ), m_matcher( matcher ), m_matcherString( matcherString ) {}
virtual bool isBinaryExpression() const CATCH_OVERRIDE {
return true;
}
virtual void reconstructExpression( std::string& dest ) const CATCH_OVERRIDE {
std::string matcherAsString = m_matcher.toString();
dest = Catch::toString( m_arg );
dest += ' ';
if( matcherAsString == Detail::unprintableString )
dest += m_matcherString;
else
dest += matcherAsString;
}
private:
ArgT m_arg;
MatcherT m_matcher;
char const* m_matcherString;
};
} // end namespace Catch
namespace Catch {
template<typename T>
inline ExpressionLhs<T const&> ResultBuilder::operator <= ( T const& operand ) {
return ExpressionLhs<T const&>( *this, operand );
}
inline ExpressionLhs<bool> ResultBuilder::operator <= ( bool value ) {
return ExpressionLhs<bool>( *this, value );
}
template<typename ArgT, typename MatcherT>
inline void ResultBuilder::captureMatch( ArgT const& arg, MatcherT const& matcher,
char const* matcherString ) {
MatchExpression<ArgT const&, MatcherT const&> expr( arg, matcher, matcherString );
setResultType( matcher.match( arg ) );
endExpression( expr );
}
} // namespace Catch
// #included from: catch_message.h
#define TWOBLUECUBES_CATCH_MESSAGE_H_INCLUDED
#include <string>
namespace Catch {
struct MessageInfo {
MessageInfo( std::string const& _macroName,
SourceLineInfo const& _lineInfo,
ResultWas::OfType _type );
std::string macroName;
SourceLineInfo lineInfo;
ResultWas::OfType type;
std::string message;
unsigned int sequence;
bool operator == ( MessageInfo const& other ) const {
return sequence == other.sequence;
}
bool operator < ( MessageInfo const& other ) const {
return sequence < other.sequence;
}
private:
static unsigned int globalCount;
};
struct MessageBuilder {
MessageBuilder( std::string const& macroName,
SourceLineInfo const& lineInfo,
ResultWas::OfType type )
: m_info( macroName, lineInfo, type )
{}
template<typename T>
MessageBuilder& operator << ( T const& value ) {
m_stream << value;
return *this;
}
MessageInfo m_info;
std::ostringstream m_stream;
};
class ScopedMessage {
public:
ScopedMessage( MessageBuilder const& builder );
ScopedMessage( ScopedMessage const& other );
~ScopedMessage();
MessageInfo m_info;
};
} // end namespace Catch
// #included from: catch_interfaces_capture.h
#define TWOBLUECUBES_CATCH_INTERFACES_CAPTURE_H_INCLUDED
#include <string>
namespace Catch {
class TestCase;
class AssertionResult;
struct AssertionInfo;
struct SectionInfo;
struct SectionEndInfo;
struct MessageInfo;
class ScopedMessageBuilder;
struct Counts;
struct IResultCapture {
virtual ~IResultCapture();
virtual void assertionEnded( AssertionResult const& result ) = 0;
virtual bool sectionStarted( SectionInfo const& sectionInfo,
Counts& assertions ) = 0;
virtual void sectionEnded( SectionEndInfo const& endInfo ) = 0;
virtual void sectionEndedEarly( SectionEndInfo const& endInfo ) = 0;
virtual void pushScopedMessage( MessageInfo const& message ) = 0;
virtual void popScopedMessage( MessageInfo const& message ) = 0;
virtual std::string getCurrentTestName() const = 0;
virtual const AssertionResult* getLastResult() const = 0;
virtual void handleFatalErrorCondition( std::string const& message ) = 0;
};
IResultCapture& getResultCapture();
}
// #included from: catch_debugger.h
#define TWOBLUECUBES_CATCH_DEBUGGER_H_INCLUDED
// #included from: catch_platform.h
#define TWOBLUECUBES_CATCH_PLATFORM_H_INCLUDED
#if defined(__MAC_OS_X_VERSION_MIN_REQUIRED)
# define CATCH_PLATFORM_MAC
#elif defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
# define CATCH_PLATFORM_IPHONE
#elif defined(linux) || defined(__linux) || defined(__linux__)
# define CATCH_PLATFORM_LINUX
#elif defined(WIN32) || defined(__WIN32__) || defined(_WIN32) || defined(_MSC_VER)
# define CATCH_PLATFORM_WINDOWS
# if !defined(NOMINMAX) && !defined(CATCH_CONFIG_NO_NOMINMAX)
# define CATCH_DEFINES_NOMINMAX
# endif
# if !defined(WIN32_LEAN_AND_MEAN) && !defined(CATCH_CONFIG_NO_WIN32_LEAN_AND_MEAN)
# define CATCH_DEFINES_WIN32_LEAN_AND_MEAN
# endif
#endif
#include <string>
namespace Catch{
bool isDebuggerActive();
void writeToDebugConsole( std::string const& text );
}
#ifdef CATCH_PLATFORM_MAC
// The following code snippet based on:
// http://cocoawithlove.com/2008/03/break-into-debugger.html
#if defined(__ppc64__) || defined(__ppc__)
#define CATCH_TRAP() \
__asm__("li r0, 20\nsc\nnop\nli r0, 37\nli r4, 2\nsc\nnop\n" \
: : : "memory","r0","r3","r4" )
#else
#define CATCH_TRAP() __asm__("int $3\n" : : )
#endif
#elif defined(CATCH_PLATFORM_LINUX)
// If we can use inline assembler, do it because this allows us to break
// directly at the location of the failing check instead of breaking inside
// raise() called from it, i.e. one stack frame below.
#if defined(__GNUC__) && (defined(__i386) || defined(__x86_64))
#define CATCH_TRAP() asm volatile ("int $3")
#else // Fall back to the generic way.
#include <signal.h>
#define CATCH_TRAP() raise(SIGTRAP)
#endif
#elif defined(_MSC_VER)
#define CATCH_TRAP() __debugbreak()
#elif defined(__MINGW32__)
extern "C" __declspec(dllimport) void __stdcall DebugBreak();
#define CATCH_TRAP() DebugBreak()
#endif
#ifdef CATCH_TRAP
#define CATCH_BREAK_INTO_DEBUGGER() if( Catch::isDebuggerActive() ) { CATCH_TRAP(); }
#else
#define CATCH_BREAK_INTO_DEBUGGER() Catch::alwaysTrue();
#endif
// #included from: catch_interfaces_runner.h
#define TWOBLUECUBES_CATCH_INTERFACES_RUNNER_H_INCLUDED
namespace Catch {
class TestCase;
struct IRunner {
virtual ~IRunner();
virtual bool aborting() const = 0;
};
}
// #included from: catch_type_traits.hpp
#define TWOBLUECUBES_CATCH_TYPE_TRAITS_HPP_INCLUDED
#if defined(CATCH_CONFIG_CPP11_TYPE_TRAITS)
#include <type_traits>
#endif
namespace Catch {
#if defined(CATCH_CONFIG_CPP11_TYPE_TRAITS)
template <typename T>
using add_lvalue_reference = std::add_lvalue_reference<T>;
template <typename T>
using add_const = std::add_const<T>;
#else
template <typename T>
struct add_const {
typedef const T type;
};
template <typename T>
struct add_lvalue_reference {
typedef T& type;
};
template <typename T>
struct add_lvalue_reference<T&> {
typedef T& type;
};
// No && overload, because that is C++11, in which case we have
// proper type_traits implementation from the standard library
#endif
}
///////////////////////////////////////////////////////////////////////////////
// In the event of a failure works out if the debugger needs to be invoked
// and/or an exception thrown and takes appropriate action.
// This needs to be done as a macro so the debugger will stop in the user
// source code rather than in Catch library code
#define INTERNAL_CATCH_REACT( resultBuilder ) \
if( resultBuilder.shouldDebugBreak() ) CATCH_BREAK_INTO_DEBUGGER(); \
resultBuilder.react();
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_TEST( expr, resultDisposition, macroName ) \
do { \
Catch::ResultBuilder __catchResult( macroName, CATCH_INTERNAL_LINEINFO, #expr, resultDisposition ); \
try { \
CATCH_INTERNAL_SUPPRESS_PARENTHESES_WARNINGS \
( __catchResult <= expr ).endExpression(); \
} \
catch( ... ) { \
__catchResult.useActiveException( resultDisposition ); \
} \
INTERNAL_CATCH_REACT( __catchResult ) \
} while( Catch::isTrue( false && static_cast<bool>( !!(expr) ) ) ) // expr here is never evaluated at runtime but it forces the compiler to give it a look
// The double negation silences MSVC's C4800 warning, the static_cast forces short-circuit evaluation if the type has overloaded &&.
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_IF( expr, resultDisposition, macroName ) \
INTERNAL_CATCH_TEST( expr, resultDisposition, macroName ); \
if( Catch::getResultCapture().getLastResult()->succeeded() )
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_ELSE( expr, resultDisposition, macroName ) \
INTERNAL_CATCH_TEST( expr, resultDisposition, macroName ); \
if( !Catch::getResultCapture().getLastResult()->succeeded() )
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_NO_THROW( expr, resultDisposition, macroName ) \
do { \
Catch::ResultBuilder __catchResult( macroName, CATCH_INTERNAL_LINEINFO, #expr, resultDisposition ); \
try { \
static_cast<void>(expr); \
__catchResult.captureResult( Catch::ResultWas::Ok ); \
} \
catch( ... ) { \
__catchResult.useActiveException( resultDisposition ); \
} \
INTERNAL_CATCH_REACT( __catchResult ) \
} while( Catch::alwaysFalse() )
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_THROWS( expr, resultDisposition, matcher, macroName ) \
do { \
Catch::ResultBuilder __catchResult( macroName, CATCH_INTERNAL_LINEINFO, #expr, resultDisposition, #matcher ); \
if( __catchResult.allowThrows() ) \
try { \
static_cast<void>(expr); \
__catchResult.captureResult( Catch::ResultWas::DidntThrowException ); \
} \
catch( ... ) { \
__catchResult.captureExpectedException( matcher ); \
} \
else \
__catchResult.captureResult( Catch::ResultWas::Ok ); \
INTERNAL_CATCH_REACT( __catchResult ) \
} while( Catch::alwaysFalse() )
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_THROWS_AS( expr, exceptionType, resultDisposition, macroName ) \
do { \
Catch::ResultBuilder __catchResult( macroName, CATCH_INTERNAL_LINEINFO, #expr ", " #exceptionType, resultDisposition ); \
if( __catchResult.allowThrows() ) \
try { \
static_cast<void>(expr); \
__catchResult.captureResult( Catch::ResultWas::DidntThrowException ); \
} \
catch( Catch::add_const<Catch::add_lvalue_reference<exceptionType>::type>::type ) { \
__catchResult.captureResult( Catch::ResultWas::Ok ); \
} \
catch( ... ) { \
__catchResult.useActiveException( resultDisposition ); \
} \
else \
__catchResult.captureResult( Catch::ResultWas::Ok ); \
INTERNAL_CATCH_REACT( __catchResult ) \
} while( Catch::alwaysFalse() )
///////////////////////////////////////////////////////////////////////////////
#ifdef CATCH_CONFIG_VARIADIC_MACROS
#define INTERNAL_CATCH_MSG( messageType, resultDisposition, macroName, ... ) \
do { \
Catch::ResultBuilder __catchResult( macroName, CATCH_INTERNAL_LINEINFO, "", resultDisposition ); \
__catchResult << __VA_ARGS__ + ::Catch::StreamEndStop(); \
__catchResult.captureResult( messageType ); \
INTERNAL_CATCH_REACT( __catchResult ) \
} while( Catch::alwaysFalse() )
#else
#define INTERNAL_CATCH_MSG( messageType, resultDisposition, macroName, log ) \
do { \
Catch::ResultBuilder __catchResult( macroName, CATCH_INTERNAL_LINEINFO, "", resultDisposition ); \
__catchResult << log + ::Catch::StreamEndStop(); \
__catchResult.captureResult( messageType ); \
INTERNAL_CATCH_REACT( __catchResult ) \
} while( Catch::alwaysFalse() )
#endif
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_INFO( log, macroName ) \
Catch::ScopedMessage INTERNAL_CATCH_UNIQUE_NAME( scopedMessage ) = Catch::MessageBuilder( macroName, CATCH_INTERNAL_LINEINFO, Catch::ResultWas::Info ) << log;
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CHECK_THAT( arg, matcher, resultDisposition, macroName ) \
do { \
Catch::ResultBuilder __catchResult( macroName, CATCH_INTERNAL_LINEINFO, #arg ", " #matcher, resultDisposition ); \
try { \
__catchResult.captureMatch( arg, matcher, #matcher ); \
} catch( ... ) { \
__catchResult.useActiveException( resultDisposition | Catch::ResultDisposition::ContinueOnFailure ); \
} \
INTERNAL_CATCH_REACT( __catchResult ) \
} while( Catch::alwaysFalse() )
// #included from: internal/catch_section.h
#define TWOBLUECUBES_CATCH_SECTION_H_INCLUDED
// #included from: catch_section_info.h
#define TWOBLUECUBES_CATCH_SECTION_INFO_H_INCLUDED
// #included from: catch_totals.hpp
#define TWOBLUECUBES_CATCH_TOTALS_HPP_INCLUDED
#include <cstddef>
namespace Catch {
struct Counts {
Counts() : passed( 0 ), failed( 0 ), failedButOk( 0 ) {}
Counts operator - ( Counts const& other ) const {
Counts diff;
diff.passed = passed - other.passed;
diff.failed = failed - other.failed;
diff.failedButOk = failedButOk - other.failedButOk;
return diff;
}
Counts& operator += ( Counts const& other ) {
passed += other.passed;
failed += other.failed;
failedButOk += other.failedButOk;
return *this;
}
std::size_t total() const {
return passed + failed + failedButOk;
}
bool allPassed() const {
return failed == 0 && failedButOk == 0;
}
bool allOk() const {
return failed == 0;
}
std::size_t passed;
std::size_t failed;
std::size_t failedButOk;
};
struct Totals {
Totals operator - ( Totals const& other ) const {
Totals diff;
diff.assertions = assertions - other.assertions;
diff.testCases = testCases - other.testCases;
return diff;
}
Totals delta( Totals const& prevTotals ) const {
Totals diff = *this - prevTotals;
if( diff.assertions.failed > 0 )
++diff.testCases.failed;
else if( diff.assertions.failedButOk > 0 )
++diff.testCases.failedButOk;
else
++diff.testCases.passed;
return diff;
}
Totals& operator += ( Totals const& other ) {
assertions += other.assertions;
testCases += other.testCases;
return *this;
}
Counts assertions;
Counts testCases;
};
}
#include <string>
namespace Catch {
struct SectionInfo {
SectionInfo
( SourceLineInfo const& _lineInfo,
std::string const& _name,
std::string const& _description = std::string() );
std::string name;
std::string description;
SourceLineInfo lineInfo;
};
struct SectionEndInfo {
SectionEndInfo( SectionInfo const& _sectionInfo, Counts const& _prevAssertions, double _durationInSeconds )
: sectionInfo( _sectionInfo ), prevAssertions( _prevAssertions ), durationInSeconds( _durationInSeconds )
{}
SectionInfo sectionInfo;
Counts prevAssertions;
double durationInSeconds;
};
} // end namespace Catch
// #included from: catch_timer.h
#define TWOBLUECUBES_CATCH_TIMER_H_INCLUDED
#ifdef CATCH_PLATFORM_WINDOWS
typedef unsigned long long uint64_t;
#else
#include <stdint.h>
#endif
namespace Catch {
class Timer {
public:
Timer() : m_ticks( 0 ) {}
void start();
unsigned int getElapsedMicroseconds() const;
unsigned int getElapsedMilliseconds() const;
double getElapsedSeconds() const;
private:
uint64_t m_ticks;
};
} // namespace Catch
#include <string>
namespace Catch {
class Section : NonCopyable {
public:
Section( SectionInfo const& info );
~Section();
// This indicates whether the section should be executed or not
operator bool() const;
private:
SectionInfo m_info;
std::string m_name;
Counts m_assertions;
bool m_sectionIncluded;
Timer m_timer;
};
} // end namespace Catch
#ifdef CATCH_CONFIG_VARIADIC_MACROS
#define INTERNAL_CATCH_SECTION( ... ) \
if( Catch::Section const& INTERNAL_CATCH_UNIQUE_NAME( catch_internal_Section ) = Catch::SectionInfo( CATCH_INTERNAL_LINEINFO, __VA_ARGS__ ) )
#else
#define INTERNAL_CATCH_SECTION( name, desc ) \
if( Catch::Section const& INTERNAL_CATCH_UNIQUE_NAME( catch_internal_Section ) = Catch::SectionInfo( CATCH_INTERNAL_LINEINFO, name, desc ) )
#endif
// #included from: internal/catch_generators.hpp
#define TWOBLUECUBES_CATCH_GENERATORS_HPP_INCLUDED
#include <iterator>
#include <vector>
#include <string>
#include <stdlib.h>
namespace Catch {
template<typename T>
struct IGenerator {
virtual ~IGenerator() {}
virtual T getValue( std::size_t index ) const = 0;
virtual std::size_t size () const = 0;
};
template<typename T>
class BetweenGenerator : public IGenerator<T> {
public:
BetweenGenerator( T from, T to ) : m_from( from ), m_to( to ){}
virtual T getValue( std::size_t index ) const {
return m_from+static_cast<int>( index );
}
virtual std::size_t size() const {
return static_cast<std::size_t>( 1+m_to-m_from );
}
private:
T m_from;
T m_to;
};
template<typename T>
class ValuesGenerator : public IGenerator<T> {
public:
ValuesGenerator(){}
void add( T value ) {
m_values.push_back( value );
}
virtual T getValue( std::size_t index ) const {
return m_values[index];
}
virtual std::size_t size() const {
return m_values.size();
}
private:
std::vector<T> m_values;
};
template<typename T>
class CompositeGenerator {
public:
CompositeGenerator() : m_totalSize( 0 ) {}
// *** Move semantics, similar to auto_ptr ***
CompositeGenerator( CompositeGenerator& other )
: m_fileInfo( other.m_fileInfo ),
m_totalSize( 0 )
{
move( other );
}
CompositeGenerator& setFileInfo( const char* fileInfo ) {
m_fileInfo = fileInfo;
return *this;
}
~CompositeGenerator() {
deleteAll( m_composed );
}
operator T () const {
size_t overallIndex = getCurrentContext().getGeneratorIndex( m_fileInfo, m_totalSize );
typename std::vector<const IGenerator<T>*>::const_iterator it = m_composed.begin();
typename std::vector<const IGenerator<T>*>::const_iterator itEnd = m_composed.end();
for( size_t index = 0; it != itEnd; ++it )
{
const IGenerator<T>* generator = *it;
if( overallIndex >= index && overallIndex < index + generator->size() )
{
return generator->getValue( overallIndex-index );
}
index += generator->size();
}
CATCH_INTERNAL_ERROR( "Indexed past end of generated range" );
return T(); // Suppress spurious "not all control paths return a value" warning in Visual Studio - if you know how to fix this please do so
}
void add( const IGenerator<T>* generator ) {
m_totalSize += generator->size();
m_composed.push_back( generator );
}
CompositeGenerator& then( CompositeGenerator& other ) {
move( other );
return *this;
}
CompositeGenerator& then( T value ) {
ValuesGenerator<T>* valuesGen = new ValuesGenerator<T>();
valuesGen->add( value );
add( valuesGen );
return *this;
}
private:
void move( CompositeGenerator& other ) {
std::copy( other.m_composed.begin(), other.m_composed.end(), std::back_inserter( m_composed ) );
m_totalSize += other.m_totalSize;
other.m_composed.clear();
}
std::vector<const IGenerator<T>*> m_composed;
std::string m_fileInfo;
size_t m_totalSize;
};
namespace Generators
{
template<typename T>
CompositeGenerator<T> between( T from, T to ) {
CompositeGenerator<T> generators;
generators.add( new BetweenGenerator<T>( from, to ) );
return generators;
}
template<typename T>
CompositeGenerator<T> values( T val1, T val2 ) {
CompositeGenerator<T> generators;
ValuesGenerator<T>* valuesGen = new ValuesGenerator<T>();
valuesGen->add( val1 );
valuesGen->add( val2 );
generators.add( valuesGen );
return generators;
}
template<typename T>
CompositeGenerator<T> values( T val1, T val2, T val3 ){
CompositeGenerator<T> generators;
ValuesGenerator<T>* valuesGen = new ValuesGenerator<T>();
valuesGen->add( val1 );
valuesGen->add( val2 );
valuesGen->add( val3 );
generators.add( valuesGen );
return generators;
}
template<typename T>
CompositeGenerator<T> values( T val1, T val2, T val3, T val4 ) {
CompositeGenerator<T> generators;
ValuesGenerator<T>* valuesGen = new ValuesGenerator<T>();
valuesGen->add( val1 );
valuesGen->add( val2 );
valuesGen->add( val3 );
valuesGen->add( val4 );
generators.add( valuesGen );
return generators;
}
} // end namespace Generators
using namespace Generators;
} // end namespace Catch
#define INTERNAL_CATCH_LINESTR2( line ) #line
#define INTERNAL_CATCH_LINESTR( line ) INTERNAL_CATCH_LINESTR2( line )
#define INTERNAL_CATCH_GENERATE( expr ) expr.setFileInfo( __FILE__ "(" INTERNAL_CATCH_LINESTR( __LINE__ ) ")" )
// #included from: internal/catch_interfaces_exception.h
#define TWOBLUECUBES_CATCH_INTERFACES_EXCEPTION_H_INCLUDED
#include <string>
#include <vector>
// #included from: catch_interfaces_registry_hub.h
#define TWOBLUECUBES_CATCH_INTERFACES_REGISTRY_HUB_H_INCLUDED
#include <string>
namespace Catch {
class TestCase;
struct ITestCaseRegistry;
struct IExceptionTranslatorRegistry;
struct IExceptionTranslator;
struct IReporterRegistry;
struct IReporterFactory;
struct IRegistryHub {
virtual ~IRegistryHub();
virtual IReporterRegistry const& getReporterRegistry() const = 0;
virtual ITestCaseRegistry const& getTestCaseRegistry() const = 0;
virtual IExceptionTranslatorRegistry& getExceptionTranslatorRegistry() = 0;
};
struct IMutableRegistryHub {
virtual ~IMutableRegistryHub();
virtual void registerReporter( std::string const& name, Ptr<IReporterFactory> const& factory ) = 0;
virtual void registerListener( Ptr<IReporterFactory> const& factory ) = 0;
virtual void registerTest( TestCase const& testInfo ) = 0;
virtual void registerTranslator( const IExceptionTranslator* translator ) = 0;
};
IRegistryHub& getRegistryHub();
IMutableRegistryHub& getMutableRegistryHub();
void cleanUp();
std::string translateActiveException();
}
namespace Catch {
typedef std::string(*exceptionTranslateFunction)();
struct IExceptionTranslator;
typedef std::vector<const IExceptionTranslator*> ExceptionTranslators;
struct IExceptionTranslator {
virtual ~IExceptionTranslator();
virtual std::string translate( ExceptionTranslators::const_iterator it, ExceptionTranslators::const_iterator itEnd ) const = 0;
};
struct IExceptionTranslatorRegistry {
virtual ~IExceptionTranslatorRegistry();
virtual std::string translateActiveException() const = 0;
};
class ExceptionTranslatorRegistrar {
template<typename T>
class ExceptionTranslator : public IExceptionTranslator {
public:
ExceptionTranslator( std::string(*translateFunction)( T& ) )
: m_translateFunction( translateFunction )
{}
virtual std::string translate( ExceptionTranslators::const_iterator it, ExceptionTranslators::const_iterator itEnd ) const CATCH_OVERRIDE {
try {
if( it == itEnd )
throw;
else
return (*it)->translate( it+1, itEnd );
}
catch( T& ex ) {
return m_translateFunction( ex );
}
}
protected:
std::string(*m_translateFunction)( T& );
};
public:
template<typename T>
ExceptionTranslatorRegistrar( std::string(*translateFunction)( T& ) ) {
getMutableRegistryHub().registerTranslator
( new ExceptionTranslator<T>( translateFunction ) );
}
};
}
///////////////////////////////////////////////////////////////////////////////
#define INTERNAL_CATCH_TRANSLATE_EXCEPTION2( translatorName, signature ) \
static std::string translatorName( signature ); \
namespace{ Catch::ExceptionTranslatorRegistrar INTERNAL_CATCH_UNIQUE_NAME( catch_internal_ExceptionRegistrar )( &translatorName ); }\
static std::string translatorName( signature )
#define INTERNAL_CATCH_TRANSLATE_EXCEPTION( signature ) INTERNAL_CATCH_TRANSLATE_EXCEPTION2( INTERNAL_CATCH_UNIQUE_NAME( catch_internal_ExceptionTranslator ), signature )
// #included from: internal/catch_approx.hpp
#define TWOBLUECUBES_CATCH_APPROX_HPP_INCLUDED
#include <cmath>
#include <limits>
#if defined(CATCH_CONFIG_CPP11_TYPE_TRAITS)
#include <type_traits>
#endif
namespace Catch {
namespace Detail {
class Approx {
public:
explicit Approx ( double value )
: m_epsilon( std::numeric_limits<float>::epsilon()*100 ),
m_scale( 1.0 ),
m_value( value )
{}
Approx( Approx const& other )
: m_epsilon( other.m_epsilon ),
m_scale( other.m_scale ),
m_value( other.m_value )
{}
static Approx custom() {
return Approx( 0 );
}
Approx operator()( double value ) {
Approx approx( value );
approx.epsilon( m_epsilon );
approx.scale( m_scale );
return approx;
}
#if defined(CATCH_CONFIG_CPP11_TYPE_TRAITS)
template <typename T, typename = typename std::enable_if<std::is_constructible<double, T>::value>::type>
friend bool operator == ( const T& lhs, Approx const& rhs ) {
// Thanks to Richard Harris for his help refining this formula
auto lhs_v = double(lhs);
return std::fabs( lhs_v - rhs.m_value ) < rhs.m_epsilon * (rhs.m_scale + (std::max)( std::fabs(lhs_v), std::fabs(rhs.m_value) ) );
}
template <typename T, typename = typename std::enable_if<std::is_constructible<double, T>::value>::type>
friend bool operator == ( Approx const& lhs, const T& rhs ) {
return operator==( rhs, lhs );
}
template <typename T, typename = typename std::enable_if<std::is_constructible<double, T>::value>::type>
friend bool operator != ( T lhs, Approx const& rhs ) {
return !operator==( lhs, rhs );
}
template <typename T, typename = typename std::enable_if<std::is_constructible<double, T>::value>::type>
friend bool operator != ( Approx const& lhs, T rhs ) {
return !operator==( rhs, lhs );
}
template <typename T, typename = typename std::enable_if<std::is_constructible<double, T>::value>::type>
friend bool operator <= ( T lhs, Approx const& rhs )
{
return double(lhs) < rhs.m_value || lhs == rhs;
}
template <typename T, typename = typename std::enable_if<std::is_constructible<double, T>::value>::type>
friend bool operator <= ( Approx const& lhs, T rhs )
{
return lhs.m_value < double(rhs) || lhs == rhs;
}
template <typename T, typename = typename std::enable_if<std::is_constructible<double, T>::value>::type>
friend bool operator >= ( T lhs, Approx const& rhs )
{
return double(lhs) > rhs.m_value || lhs == rhs;
}
template <typename T, typename = typename std::enable_if<std::is_constructible<double, T>::value>::type>
friend bool operator >= ( Approx const& lhs, T rhs )
{
return lhs.m_value > double(rhs) || lhs == rhs;
}
#else
friend bool operator == ( double lhs, Approx const& rhs ) {
// Thanks to Richard Harris for his help refining this formula
return std::fabs( lhs - rhs.m_value ) < rhs.m_epsilon * (rhs.m_scale + (std::max)( std::fabs(lhs), std::fabs(rhs.m_value) ) );
}
friend bool operator == ( Approx const& lhs, double rhs ) {
return operator==( rhs, lhs );
}
friend bool operator != ( double lhs, Approx const& rhs ) {
return !operator==( lhs, rhs );
}
friend bool operator != ( Approx const& lhs, double rhs ) {
return !operator==( rhs, lhs );
}
friend bool operator <= ( double lhs, Approx const& rhs )
{
return lhs < rhs.m_value || lhs == rhs;
}
friend bool operator <= ( Approx const& lhs, double rhs )
{
return lhs.m_value < rhs || lhs == rhs;
}
friend bool operator >= ( double lhs, Approx const& rhs )
{
return lhs > rhs.m_value || lhs == rhs;
}
friend bool operator >= ( Approx const& lhs, double rhs )
{
return lhs.m_value > rhs || lhs == rhs;
}
#endif
Approx& epsilon( double newEpsilon ) {
m_epsilon = newEpsilon;
return *this;
}
Approx& scale( double newScale ) {
m_scale = newScale;
return *this;
}
std::string toString() const {
std::ostringstream oss;
oss << "Approx( " << Catch::toString( m_value ) << " )";
return oss.str();
}
private:
double m_epsilon;
double m_scale;
double m_value;
};
}
template<>
inline std::string toString<Detail::Approx>( Detail::Approx const& value ) {
return value.toString();
}
} // end namespace Catch
// #included from: internal/catch_interfaces_tag_alias_registry.h
#define TWOBLUECUBES_CATCH_INTERFACES_TAG_ALIAS_REGISTRY_H_INCLUDED
// #included from: catch_tag_alias.h
#define TWOBLUECUBES_CATCH_TAG_ALIAS_H_INCLUDED
#include <string>
namespace Catch {
struct TagAlias {
TagAlias( std::string _tag, SourceLineInfo _lineInfo ) : tag( _tag ), lineInfo( _lineInfo ) {}
std::string tag;
SourceLineInfo lineInfo;
};
struct RegistrarForTagAliases {
RegistrarForTagAliases( char const* alias, char const* tag, SourceLineInfo const& lineInfo );
};
} // end namespace Catch
#define CATCH_REGISTER_TAG_ALIAS( alias, spec ) namespace{ Catch::RegistrarForTagAliases INTERNAL_CATCH_UNIQUE_NAME( AutoRegisterTagAlias )( alias, spec, CATCH_INTERNAL_LINEINFO ); }
// #included from: catch_option.hpp
#define TWOBLUECUBES_CATCH_OPTION_HPP_INCLUDED
namespace Catch {
// An optional type
template<typename T>
class Option {
public:
Option() : nullableValue( CATCH_NULL ) {}
Option( T const& _value )
: nullableValue( new( storage ) T( _value ) )
{}
Option( Option const& _other )
: nullableValue( _other ? new( storage ) T( *_other ) : CATCH_NULL )
{}
~Option() {
reset();
}
Option& operator= ( Option const& _other ) {
if( &_other != this ) {
reset();
if( _other )
nullableValue = new( storage ) T( *_other );
}
return *this;
}
Option& operator = ( T const& _value ) {
reset();
nullableValue = new( storage ) T( _value );
return *this;
}
void reset() {
if( nullableValue )
nullableValue->~T();
nullableValue = CATCH_NULL;
}
T& operator*() { return *nullableValue; }
T const& operator*() const { return *nullableValue; }
T* operator->() { return nullableValue; }
const T* operator->() const { return nullableValue; }
T valueOr( T const& defaultValue ) const {
return nullableValue ? *nullableValue : defaultValue;
}
bool some() const { return nullableValue != CATCH_NULL; }
bool none() const { return nullableValue == CATCH_NULL; }
bool operator !() const { return nullableValue == CATCH_NULL; }
operator SafeBool::type() const {
return SafeBool::makeSafe( some() );
}
private:
T* nullableValue;
char storage[sizeof(T)];
};
} // end namespace Catch
namespace Catch {
struct ITagAliasRegistry {
virtual ~ITagAliasRegistry();
virtual Option<TagAlias> find( std::string const& alias ) const = 0;
virtual std::string expandAliases( std::string const& unexpandedTestSpec ) const = 0;
static ITagAliasRegistry const& get();
};
} // end namespace Catch
// These files are included here so the single_include script doesn't put them
// in the conditionally compiled sections
// #included from: internal/catch_test_case_info.h
#define TWOBLUECUBES_CATCH_TEST_CASE_INFO_H_INCLUDED
#include <string>
#include <set>
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpadded"
#endif
namespace Catch {
struct ITestCase;
struct TestCaseInfo {
enum SpecialProperties{
None = 0,
IsHidden = 1 << 1,
ShouldFail = 1 << 2,
MayFail = 1 << 3,
Throws = 1 << 4,
NonPortable = 1 << 5
};
TestCaseInfo( std::string const& _name,
std::string const& _className,
std::string const& _description,
std::set<std::string> const& _tags,
SourceLineInfo const& _lineInfo );
TestCaseInfo( TestCaseInfo const& other );
friend void setTags( TestCaseInfo& testCaseInfo, std::set<std::string> const& tags );
bool isHidden() const;
bool throws() const;
bool okToFail() const;
bool expectedToFail() const;
std::string name;
std::string className;
std::string description;
std::set<std::string> tags;
std::set<std::string> lcaseTags;
std::string tagsAsString;
SourceLineInfo lineInfo;
SpecialProperties properties;
};
class TestCase : public TestCaseInfo {
public:
TestCase( ITestCase* testCase, TestCaseInfo const& info );
TestCase( TestCase const& other );
TestCase withName( std::string const& _newName ) const;
void invoke() const;
TestCaseInfo const& getTestCaseInfo() const;
void swap( TestCase& other );
bool operator == ( TestCase const& other ) const;
bool operator < ( TestCase const& other ) const;
TestCase& operator = ( TestCase const& other );
private:
Ptr<ITestCase> test;
};
TestCase makeTestCase( ITestCase* testCase,
std::string const& className,
std::string const& name,
std::string const& description,
SourceLineInfo const& lineInfo );
}
#ifdef __clang__
#pragma clang diagnostic pop
#endif
#ifdef __OBJC__
// #included from: internal/catch_objc.hpp
#define TWOBLUECUBES_CATCH_OBJC_HPP_INCLUDED
#import <objc/runtime.h>
#include <string>
// NB. Any general catch headers included here must be included
// in catch.hpp first to make sure they are included by the single
// header for non obj-usage
///////////////////////////////////////////////////////////////////////////////
// This protocol is really only here for (self) documenting purposes, since
// all its methods are optional.
@protocol OcFixture
@optional
-(void) setUp;
-(void) tearDown;
@end
namespace Catch {
class OcMethod : public SharedImpl<ITestCase> {
public:
OcMethod( Class cls, SEL sel ) : m_cls( cls ), m_sel( sel ) {}
virtual void invoke() const {
id obj = [[m_cls alloc] init];
performOptionalSelector( obj, @selector(setUp) );
performOptionalSelector( obj, m_sel );
performOptionalSelector( obj, @selector(tearDown) );
arcSafeRelease( obj );
}
private:
virtual ~OcMethod() {}
Class m_cls;
SEL m_sel;
};
namespace Detail{
inline std::string getAnnotation( Class cls,
std::string const& annotationName,
std::string const& testCaseName ) {
NSString* selStr = [[NSString alloc] initWithFormat:@"Catch_%s_%s", annotationName.c_str(), testCaseName.c_str()];
SEL sel = NSSelectorFromString( selStr );
arcSafeRelease( selStr );
id value = performOptionalSelector( cls, sel );
if( value )
return [(NSString*)value UTF8String];
return "";
}
}
inline size_t registerTestMethods() {
size_t noTestMethods = 0;
int noClasses = objc_getClassList( CATCH_NULL, 0 );
Class* classes = (CATCH_UNSAFE_UNRETAINED Class *)malloc( sizeof(Class) * noClasses);
objc_getClassList( classes, noClasses );
for( int c = 0; c < noClasses; c++ ) {
Class cls = classes[c];
{
u_int count;
Method* methods = class_copyMethodList( cls, &count );
for( u_int m = 0; m < count ; m++ ) {
SEL selector = method_getName(methods[m]);
std::string methodName = sel_getName(selector);
if( startsWith( methodName, "Catch_TestCase_" ) ) {
std::string testCaseName = methodName.substr( 15 );
std::string name = Detail::getAnnotation( cls, "Name", testCaseName );
std::string desc = Detail::getAnnotation( cls, "Description", testCaseName );
const char* className = class_getName( cls );
getMutableRegistryHub().registerTest( makeTestCase( new OcMethod( cls, selector ), className, name.c_str(), desc.c_str(), SourceLineInfo() ) );
noTestMethods++;
}
}
free(methods);
}
}
return noTestMethods;
}
namespace Matchers {
namespace Impl {
namespace NSStringMatchers {
template<typename MatcherT>
struct StringHolder : MatcherImpl<MatcherT, NSString*>{
StringHolder( NSString* substr ) : m_substr( [substr copy] ){}
StringHolder( StringHolder const& other ) : m_substr( [other.m_substr copy] ){}
StringHolder() {
arcSafeRelease( m_substr );
}
NSString* m_substr;
};
struct Equals : StringHolder<Equals> {
Equals( NSString* substr ) : StringHolder( substr ){}
virtual bool match( ExpressionType const& str ) const {
return (str != nil || m_substr == nil ) &&
[str isEqualToString:m_substr];
}
virtual std::string toString() const {
return "equals string: " + Catch::toString( m_substr );
}
};
struct Contains : StringHolder<Contains> {
Contains( NSString* substr ) : StringHolder( substr ){}
virtual bool match( ExpressionType const& str ) const {
return (str != nil || m_substr == nil ) &&
[str rangeOfString:m_substr].location != NSNotFound;
}
virtual std::string toString() const {
return "contains string: " + Catch::toString( m_substr );
}
};
struct StartsWith : StringHolder<StartsWith> {
StartsWith( NSString* substr ) : StringHolder( substr ){}
virtual bool match( ExpressionType const& str ) const {
return (str != nil || m_substr == nil ) &&
[str rangeOfString:m_substr].location == 0;
}
virtual std::string toString() const {
return "starts with: " + Catch::toString( m_substr );
}
};
struct EndsWith : StringHolder<EndsWith> {
EndsWith( NSString* substr ) : StringHolder( substr ){}
virtual bool match( ExpressionType const& str ) const {
return (str != nil || m_substr == nil ) &&
[str rangeOfString:m_substr].location == [str length] - [m_substr length];
}
virtual std::string toString() const {
return "ends with: " + Catch::toString( m_substr );
}
};
} // namespace NSStringMatchers
} // namespace Impl
inline Impl::NSStringMatchers::Equals
Equals( NSString* substr ){ return Impl::NSStringMatchers::Equals( substr ); }
inline Impl::NSStringMatchers::Contains
Contains( NSString* substr ){ return Impl::NSStringMatchers::Contains( substr ); }
inline Impl::NSStringMatchers::StartsWith
StartsWith( NSString* substr ){ return Impl::NSStringMatchers::StartsWith( substr ); }
inline Impl::NSStringMatchers::EndsWith
EndsWith( NSString* substr ){ return Impl::NSStringMatchers::EndsWith( substr ); }
} // namespace Matchers
using namespace Matchers;
} // namespace Catch
///////////////////////////////////////////////////////////////////////////////
#define OC_TEST_CASE( name, desc )\
+(NSString*) INTERNAL_CATCH_UNIQUE_NAME( Catch_Name_test ) \
{\
return @ name; \
}\
+(NSString*) INTERNAL_CATCH_UNIQUE_NAME( Catch_Description_test ) \
{ \
return @ desc; \
} \
-(void) INTERNAL_CATCH_UNIQUE_NAME( Catch_TestCase_test )
#endif
#ifdef CATCH_IMPL
// #included from: internal/catch_impl.hpp
#define TWOBLUECUBES_CATCH_IMPL_HPP_INCLUDED
// Collect all the implementation files together here
// These are the equivalent of what would usually be cpp files
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wweak-vtables"
#endif
// #included from: ../catch_session.hpp
#define TWOBLUECUBES_CATCH_RUNNER_HPP_INCLUDED
// #included from: internal/catch_commandline.hpp
#define TWOBLUECUBES_CATCH_COMMANDLINE_HPP_INCLUDED
// #included from: catch_config.hpp
#define TWOBLUECUBES_CATCH_CONFIG_HPP_INCLUDED
// #included from: catch_test_spec_parser.hpp
#define TWOBLUECUBES_CATCH_TEST_SPEC_PARSER_HPP_INCLUDED
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpadded"
#endif
// #included from: catch_test_spec.hpp
#define TWOBLUECUBES_CATCH_TEST_SPEC_HPP_INCLUDED
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpadded"
#endif
// #included from: catch_wildcard_pattern.hpp
#define TWOBLUECUBES_CATCH_WILDCARD_PATTERN_HPP_INCLUDED
#include <stdexcept>
namespace Catch
{
class WildcardPattern {
enum WildcardPosition {
NoWildcard = 0,
WildcardAtStart = 1,
WildcardAtEnd = 2,
WildcardAtBothEnds = WildcardAtStart | WildcardAtEnd
};
public:
WildcardPattern( std::string const& pattern, CaseSensitive::Choice caseSensitivity )
: m_caseSensitivity( caseSensitivity ),
m_wildcard( NoWildcard ),
m_pattern( adjustCase( pattern ) )
{
if( startsWith( m_pattern, '*' ) ) {
m_pattern = m_pattern.substr( 1 );
m_wildcard = WildcardAtStart;
}
if( endsWith( m_pattern, '*' ) ) {
m_pattern = m_pattern.substr( 0, m_pattern.size()-1 );
m_wildcard = static_cast<WildcardPosition>( m_wildcard | WildcardAtEnd );
}
}
virtual ~WildcardPattern();
virtual bool matches( std::string const& str ) const {
switch( m_wildcard ) {
case NoWildcard:
return m_pattern == adjustCase( str );
case WildcardAtStart:
return endsWith( adjustCase( str ), m_pattern );
case WildcardAtEnd:
return startsWith( adjustCase( str ), m_pattern );
case WildcardAtBothEnds:
return contains( adjustCase( str ), m_pattern );
}
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunreachable-code"
#endif
throw std::logic_error( "Unknown enum" );
#ifdef __clang__
#pragma clang diagnostic pop
#endif
}
private:
std::string adjustCase( std::string const& str ) const {
return m_caseSensitivity == CaseSensitive::No ? toLower( str ) : str;
}
CaseSensitive::Choice m_caseSensitivity;
WildcardPosition m_wildcard;
std::string m_pattern;
};
}
#include <string>
#include <vector>
namespace Catch {
class TestSpec {
struct Pattern : SharedImpl<> {
virtual ~Pattern();
virtual bool matches( TestCaseInfo const& testCase ) const = 0;
};
class NamePattern : public Pattern {
public:
NamePattern( std::string const& name )
: m_wildcardPattern( toLower( name ), CaseSensitive::No )
{}
virtual ~NamePattern();
virtual bool matches( TestCaseInfo const& testCase ) const {
return m_wildcardPattern.matches( toLower( testCase.name ) );
}
private:
WildcardPattern m_wildcardPattern;
};
class TagPattern : public Pattern {
public:
TagPattern( std::string const& tag ) : m_tag( toLower( tag ) ) {}
virtual ~TagPattern();
virtual bool matches( TestCaseInfo const& testCase ) const {
return testCase.lcaseTags.find( m_tag ) != testCase.lcaseTags.end();
}
private:
std::string m_tag;
};
class ExcludedPattern : public Pattern {
public:
ExcludedPattern( Ptr<Pattern> const& underlyingPattern ) : m_underlyingPattern( underlyingPattern ) {}
virtual ~ExcludedPattern();
virtual bool matches( TestCaseInfo const& testCase ) const { return !m_underlyingPattern->matches( testCase ); }
private:
Ptr<Pattern> m_underlyingPattern;
};
struct Filter {
std::vector<Ptr<Pattern> > m_patterns;
bool matches( TestCaseInfo const& testCase ) const {
// All patterns in a filter must match for the filter to be a match
for( std::vector<Ptr<Pattern> >::const_iterator it = m_patterns.begin(), itEnd = m_patterns.end(); it != itEnd; ++it ) {
if( !(*it)->matches( testCase ) )
return false;
}
return true;
}
};
public:
bool hasFilters() const {
return !m_filters.empty();
}
bool matches( TestCaseInfo const& testCase ) const {
// A TestSpec matches if any filter matches
for( std::vector<Filter>::const_iterator it = m_filters.begin(), itEnd = m_filters.end(); it != itEnd; ++it )
if( it->matches( testCase ) )
return true;
return false;
}
private:
std::vector<Filter> m_filters;
friend class TestSpecParser;
};
}
#ifdef __clang__
#pragma clang diagnostic pop
#endif
namespace Catch {
class TestSpecParser {
enum Mode{ None, Name, QuotedName, Tag, EscapedName };
Mode m_mode;
bool m_exclusion;
std::size_t m_start, m_pos;
std::string m_arg;
std::vector<std::size_t> m_escapeChars;
TestSpec::Filter m_currentFilter;
TestSpec m_testSpec;
ITagAliasRegistry const* m_tagAliases;
public:
TestSpecParser( ITagAliasRegistry const& tagAliases ) : m_tagAliases( &tagAliases ) {}
TestSpecParser& parse( std::string const& arg ) {
m_mode = None;
m_exclusion = false;
m_start = std::string::npos;
m_arg = m_tagAliases->expandAliases( arg );
m_escapeChars.clear();
for( m_pos = 0; m_pos < m_arg.size(); ++m_pos )
visitChar( m_arg[m_pos] );
if( m_mode == Name )
addPattern<TestSpec::NamePattern>();
return *this;
}
TestSpec testSpec() {
addFilter();
return m_testSpec;
}
private:
void visitChar( char c ) {
if( m_mode == None ) {
switch( c ) {
case ' ': return;
case '~': m_exclusion = true; return;
case '[': return startNewMode( Tag, ++m_pos );
case '"': return startNewMode( QuotedName, ++m_pos );
case '\\': return escape();
default: startNewMode( Name, m_pos ); break;
}
}
if( m_mode == Name ) {
if( c == ',' ) {
addPattern<TestSpec::NamePattern>();
addFilter();
}
else if( c == '[' ) {
if( subString() == "exclude:" )
m_exclusion = true;
else
addPattern<TestSpec::NamePattern>();
startNewMode( Tag, ++m_pos );
}
else if( c == '\\' )
escape();
}
else if( m_mode == EscapedName )
m_mode = Name;
else if( m_mode == QuotedName && c == '"' )
addPattern<TestSpec::NamePattern>();
else if( m_mode == Tag && c == ']' )
addPattern<TestSpec::TagPattern>();
}
void startNewMode( Mode mode, std::size_t start ) {
m_mode = mode;
m_start = start;
}
void escape() {
if( m_mode == None )
m_start = m_pos;
m_mode = EscapedName;
m_escapeChars.push_back( m_pos );
}
std::string subString() const { return m_arg.substr( m_start, m_pos - m_start ); }
template<typename T>
void addPattern() {
std::string token = subString();
for( size_t i = 0; i < m_escapeChars.size(); ++i )
token = token.substr( 0, m_escapeChars[i]-i ) + token.substr( m_escapeChars[i]+1-i );
m_escapeChars.clear();
if( startsWith( token, "exclude:" ) ) {
m_exclusion = true;
token = token.substr( 8 );
}
if( !token.empty() ) {
Ptr<TestSpec::Pattern> pattern = new T( token );
if( m_exclusion )
pattern = new TestSpec::ExcludedPattern( pattern );
m_currentFilter.m_patterns.push_back( pattern );
}
m_exclusion = false;
m_mode = None;
}
void addFilter() {
if( !m_currentFilter.m_patterns.empty() ) {
m_testSpec.m_filters.push_back( m_currentFilter );
m_currentFilter = TestSpec::Filter();
}
}
};
inline TestSpec parseTestSpec( std::string const& arg ) {
return TestSpecParser( ITagAliasRegistry::get() ).parse( arg ).testSpec();
}
} // namespace Catch
#ifdef __clang__
#pragma clang diagnostic pop
#endif
// #included from: catch_interfaces_config.h
#define TWOBLUECUBES_CATCH_INTERFACES_CONFIG_H_INCLUDED
#include <iosfwd>
#include <string>
#include <vector>
namespace Catch {
struct Verbosity { enum Level {
NoOutput = 0,
Quiet,
Normal
}; };
struct WarnAbout { enum What {
Nothing = 0x00,
NoAssertions = 0x01
}; };
struct ShowDurations { enum OrNot {
DefaultForReporter,
Always,
Never
}; };
struct RunTests { enum InWhatOrder {
InDeclarationOrder,
InLexicographicalOrder,
InRandomOrder
}; };
struct UseColour { enum YesOrNo {
Auto,
Yes,
No
}; };
class TestSpec;
struct IConfig : IShared {
virtual ~IConfig();
virtual bool allowThrows() const = 0;
virtual std::ostream& stream() const = 0;
virtual std::string name() const = 0;
virtual bool includeSuccessfulResults() const = 0;
virtual bool shouldDebugBreak() const = 0;
virtual bool warnAboutMissingAssertions() const = 0;
virtual int abortAfter() const = 0;
virtual bool showInvisibles() const = 0;
virtual ShowDurations::OrNot showDurations() const = 0;
virtual TestSpec const& testSpec() const = 0;
virtual RunTests::InWhatOrder runOrder() const = 0;
virtual unsigned int rngSeed() const = 0;
virtual UseColour::YesOrNo useColour() const = 0;
virtual std::vector<std::string> const& getSectionsToRun() const = 0;
};
}
// #included from: catch_stream.h
#define TWOBLUECUBES_CATCH_STREAM_H_INCLUDED
// #included from: catch_streambuf.h
#define TWOBLUECUBES_CATCH_STREAMBUF_H_INCLUDED
#include <streambuf>
namespace Catch {
class StreamBufBase : public std::streambuf {
public:
virtual ~StreamBufBase() CATCH_NOEXCEPT;
};
}
#include <streambuf>
#include <ostream>
#include <fstream>
#include <memory>
namespace Catch {
std::ostream& cout();
std::ostream& cerr();
struct IStream {
virtual ~IStream() CATCH_NOEXCEPT;
virtual std::ostream& stream() const = 0;
};
class FileStream : public IStream {
mutable std::ofstream m_ofs;
public:
FileStream( std::string const& filename );
virtual ~FileStream() CATCH_NOEXCEPT;
public: // IStream
virtual std::ostream& stream() const CATCH_OVERRIDE;
};
class CoutStream : public IStream {
mutable std::ostream m_os;
public:
CoutStream();
virtual ~CoutStream() CATCH_NOEXCEPT;
public: // IStream
virtual std::ostream& stream() const CATCH_OVERRIDE;
};
class DebugOutStream : public IStream {
CATCH_AUTO_PTR( StreamBufBase ) m_streamBuf;
mutable std::ostream m_os;
public:
DebugOutStream();
virtual ~DebugOutStream() CATCH_NOEXCEPT;
public: // IStream
virtual std::ostream& stream() const CATCH_OVERRIDE;
};
}
#include <memory>
#include <vector>
#include <string>
#include <stdexcept>
#ifndef CATCH_CONFIG_CONSOLE_WIDTH
#define CATCH_CONFIG_CONSOLE_WIDTH 80
#endif
namespace Catch {
struct ConfigData {
ConfigData()
: listTests( false ),
listTags( false ),
listReporters( false ),
listTestNamesOnly( false ),
showSuccessfulTests( false ),
shouldDebugBreak( false ),
noThrow( false ),
showHelp( false ),
showInvisibles( false ),
filenamesAsTags( false ),
abortAfter( -1 ),
rngSeed( 0 ),
verbosity( Verbosity::Normal ),
warnings( WarnAbout::Nothing ),
showDurations( ShowDurations::DefaultForReporter ),
runOrder( RunTests::InDeclarationOrder ),
useColour( UseColour::Auto )
{}
bool listTests;
bool listTags;
bool listReporters;
bool listTestNamesOnly;
bool showSuccessfulTests;
bool shouldDebugBreak;
bool noThrow;
bool showHelp;
bool showInvisibles;
bool filenamesAsTags;
int abortAfter;
unsigned int rngSeed;
Verbosity::Level verbosity;
WarnAbout::What warnings;
ShowDurations::OrNot showDurations;
RunTests::InWhatOrder runOrder;
UseColour::YesOrNo useColour;
std::string outputFilename;
std::string name;
std::string processName;
std::vector<std::string> reporterNames;
std::vector<std::string> testsOrTags;
std::vector<std::string> sectionsToRun;
};
class Config : public SharedImpl<IConfig> {
private:
Config( Config const& other );
Config& operator = ( Config const& other );
virtual void dummy();
public:
Config()
{}
Config( ConfigData const& data )
: m_data( data ),
m_stream( openStream() )
{
if( !data.testsOrTags.empty() ) {
TestSpecParser parser( ITagAliasRegistry::get() );
for( std::size_t i = 0; i < data.testsOrTags.size(); ++i )
parser.parse( data.testsOrTags[i] );
m_testSpec = parser.testSpec();
}
}
virtual ~Config() {}
std::string const& getFilename() const {
return m_data.outputFilename ;
}
bool listTests() const { return m_data.listTests; }
bool listTestNamesOnly() const { return m_data.listTestNamesOnly; }
bool listTags() const { return m_data.listTags; }
bool listReporters() const { return m_data.listReporters; }
std::string getProcessName() const { return m_data.processName; }
std::vector<std::string> const& getReporterNames() const { return m_data.reporterNames; }
std::vector<std::string> const& getSectionsToRun() const CATCH_OVERRIDE { return m_data.sectionsToRun; }
virtual TestSpec const& testSpec() const CATCH_OVERRIDE { return m_testSpec; }
bool showHelp() const { return m_data.showHelp; }
// IConfig interface
virtual bool allowThrows() const CATCH_OVERRIDE { return !m_data.noThrow; }
virtual std::ostream& stream() const CATCH_OVERRIDE { return m_stream->stream(); }
virtual std::string name() const CATCH_OVERRIDE { return m_data.name.empty() ? m_data.processName : m_data.name; }
virtual bool includeSuccessfulResults() const CATCH_OVERRIDE { return m_data.showSuccessfulTests; }
virtual bool warnAboutMissingAssertions() const CATCH_OVERRIDE { return m_data.warnings & WarnAbout::NoAssertions; }
virtual ShowDurations::OrNot showDurations() const CATCH_OVERRIDE { return m_data.showDurations; }
virtual RunTests::InWhatOrder runOrder() const CATCH_OVERRIDE { return m_data.runOrder; }
virtual unsigned int rngSeed() const CATCH_OVERRIDE { return m_data.rngSeed; }
virtual UseColour::YesOrNo useColour() const CATCH_OVERRIDE { return m_data.useColour; }
virtual bool shouldDebugBreak() const CATCH_OVERRIDE { return m_data.shouldDebugBreak; }
virtual int abortAfter() const CATCH_OVERRIDE { return m_data.abortAfter; }
virtual bool showInvisibles() const CATCH_OVERRIDE { return m_data.showInvisibles; }
private:
IStream const* openStream() {
if( m_data.outputFilename.empty() )
return new CoutStream();
else if( m_data.outputFilename[0] == '%' ) {
if( m_data.outputFilename == "%debug" )
return new DebugOutStream();
else
throw std::domain_error( "Unrecognised stream: " + m_data.outputFilename );
}
else
return new FileStream( m_data.outputFilename );
}
ConfigData m_data;
CATCH_AUTO_PTR( IStream const ) m_stream;
TestSpec m_testSpec;
};
} // end namespace Catch
// #included from: catch_clara.h
#define TWOBLUECUBES_CATCH_CLARA_H_INCLUDED
// Use Catch's value for console width (store Clara's off to the side, if present)
#ifdef CLARA_CONFIG_CONSOLE_WIDTH
#define CATCH_TEMP_CLARA_CONFIG_CONSOLE_WIDTH CLARA_CONFIG_CONSOLE_WIDTH
#undef CLARA_CONFIG_CONSOLE_WIDTH
#endif
#define CLARA_CONFIG_CONSOLE_WIDTH CATCH_CONFIG_CONSOLE_WIDTH
// Declare Clara inside the Catch namespace
#define STITCH_CLARA_OPEN_NAMESPACE namespace Catch {
// #included from: ../external/clara.h
// Version 0.0.2.4
// Only use header guard if we are not using an outer namespace
#if !defined(TWOBLUECUBES_CLARA_H_INCLUDED) || defined(STITCH_CLARA_OPEN_NAMESPACE)
#ifndef STITCH_CLARA_OPEN_NAMESPACE
#define TWOBLUECUBES_CLARA_H_INCLUDED
#define STITCH_CLARA_OPEN_NAMESPACE
#define STITCH_CLARA_CLOSE_NAMESPACE
#else
#define STITCH_CLARA_CLOSE_NAMESPACE }
#endif
#define STITCH_TBC_TEXT_FORMAT_OPEN_NAMESPACE STITCH_CLARA_OPEN_NAMESPACE
// ----------- #included from tbc_text_format.h -----------
// Only use header guard if we are not using an outer namespace
#if !defined(TBC_TEXT_FORMAT_H_INCLUDED) || defined(STITCH_TBC_TEXT_FORMAT_OUTER_NAMESPACE)
#ifndef STITCH_TBC_TEXT_FORMAT_OUTER_NAMESPACE
#define TBC_TEXT_FORMAT_H_INCLUDED
#endif
#include <string>
#include <vector>
#include <sstream>
#include <algorithm>
// Use optional outer namespace
#ifdef STITCH_TBC_TEXT_FORMAT_OUTER_NAMESPACE
namespace STITCH_TBC_TEXT_FORMAT_OUTER_NAMESPACE {
#endif
namespace Tbc {
#ifdef TBC_TEXT_FORMAT_CONSOLE_WIDTH
const unsigned int consoleWidth = TBC_TEXT_FORMAT_CONSOLE_WIDTH;
#else
const unsigned int consoleWidth = 80;
#endif
struct TextAttributes {
TextAttributes()
: initialIndent( std::string::npos ),
indent( 0 ),
width( consoleWidth-1 ),
tabChar( '\t' )
{}
TextAttributes& setInitialIndent( std::size_t _value ) { initialIndent = _value; return *this; }
TextAttributes& setIndent( std::size_t _value ) { indent = _value; return *this; }
TextAttributes& setWidth( std::size_t _value ) { width = _value; return *this; }
TextAttributes& setTabChar( char _value ) { tabChar = _value; return *this; }
std::size_t initialIndent; // indent of first line, or npos
std::size_t indent; // indent of subsequent lines, or all if initialIndent is npos
std::size_t width; // maximum width of text, including indent. Longer text will wrap
char tabChar; // If this char is seen the indent is changed to current pos
};
class Text {
public:
Text( std::string const& _str, TextAttributes const& _attr = TextAttributes() )
: attr( _attr )
{
std::string wrappableChars = " [({.,/|\\-";
std::size_t indent = _attr.initialIndent != std::string::npos
? _attr.initialIndent
: _attr.indent;
std::string remainder = _str;
while( !remainder.empty() ) {
if( lines.size() >= 1000 ) {
lines.push_back( "... message truncated due to excessive size" );
return;
}
std::size_t tabPos = std::string::npos;
std::size_t width = (std::min)( remainder.size(), _attr.width - indent );
std::size_t pos = remainder.find_first_of( '\n' );
if( pos <= width ) {
width = pos;
}
pos = remainder.find_last_of( _attr.tabChar, width );
if( pos != std::string::npos ) {
tabPos = pos;
if( remainder[width] == '\n' )
width--;
remainder = remainder.substr( 0, tabPos ) + remainder.substr( tabPos+1 );
}
if( width == remainder.size() ) {
spliceLine( indent, remainder, width );
}
else if( remainder[width] == '\n' ) {
spliceLine( indent, remainder, width );
if( width <= 1 || remainder.size() != 1 )
remainder = remainder.substr( 1 );
indent = _attr.indent;
}
else {
pos = remainder.find_last_of( wrappableChars, width );
if( pos != std::string::npos && pos > 0 ) {
spliceLine( indent, remainder, pos );
if( remainder[0] == ' ' )
remainder = remainder.substr( 1 );
}
else {
spliceLine( indent, remainder, width-1 );
lines.back() += "-";
}
if( lines.size() == 1 )
indent = _attr.indent;
if( tabPos != std::string::npos )
indent += tabPos;
}
}
}
void spliceLine( std::size_t _indent, std::string& _remainder, std::size_t _pos ) {
lines.push_back( std::string( _indent, ' ' ) + _remainder.substr( 0, _pos ) );
_remainder = _remainder.substr( _pos );
}
typedef std::vector<std::string>::const_iterator const_iterator;
const_iterator begin() const { return lines.begin(); }
const_iterator end() const { return lines.end(); }
std::string const& last() const { return lines.back(); }
std::size_t size() const { return lines.size(); }
std::string const& operator[]( std::size_t _index ) const { return lines[_index]; }
std::string toString() const {
std::ostringstream oss;
oss << *this;
return oss.str();
}
inline friend std::ostream& operator << ( std::ostream& _stream, Text const& _text ) {
for( Text::const_iterator it = _text.begin(), itEnd = _text.end();
it != itEnd; ++it ) {
if( it != _text.begin() )
_stream << "\n";
_stream << *it;
}
return _stream;
}
private:
std::string str;
TextAttributes attr;
std::vector<std::string> lines;
};
} // end namespace Tbc
#ifdef STITCH_TBC_TEXT_FORMAT_OUTER_NAMESPACE
} // end outer namespace
#endif
#endif // TBC_TEXT_FORMAT_H_INCLUDED
// ----------- end of #include from tbc_text_format.h -----------
// ........... back in clara.h
#undef STITCH_TBC_TEXT_FORMAT_OPEN_NAMESPACE
// ----------- #included from clara_compilers.h -----------
#ifndef TWOBLUECUBES_CLARA_COMPILERS_H_INCLUDED
#define TWOBLUECUBES_CLARA_COMPILERS_H_INCLUDED
// Detect a number of compiler features - mostly C++11/14 conformance - by compiler
// The following features are defined:
//
// CLARA_CONFIG_CPP11_NULLPTR : is nullptr supported?
// CLARA_CONFIG_CPP11_NOEXCEPT : is noexcept supported?
// CLARA_CONFIG_CPP11_GENERATED_METHODS : The delete and default keywords for compiler generated methods
// CLARA_CONFIG_CPP11_OVERRIDE : is override supported?
// CLARA_CONFIG_CPP11_UNIQUE_PTR : is unique_ptr supported (otherwise use auto_ptr)
// CLARA_CONFIG_CPP11_OR_GREATER : Is C++11 supported?
// CLARA_CONFIG_VARIADIC_MACROS : are variadic macros supported?
// In general each macro has a _NO_<feature name> form
// (e.g. CLARA_CONFIG_CPP11_NO_NULLPTR) which disables the feature.
// Many features, at point of detection, define an _INTERNAL_ macro, so they
// can be combined, en-mass, with the _NO_ forms later.
// All the C++11 features can be disabled with CLARA_CONFIG_NO_CPP11
#ifdef __clang__
#if __has_feature(cxx_nullptr)
#define CLARA_INTERNAL_CONFIG_CPP11_NULLPTR
#endif
#if __has_feature(cxx_noexcept)
#define CLARA_INTERNAL_CONFIG_CPP11_NOEXCEPT
#endif
#endif // __clang__
////////////////////////////////////////////////////////////////////////////////
// GCC
#ifdef __GNUC__
#if __GNUC__ == 4 && __GNUC_MINOR__ >= 6 && defined(__GXX_EXPERIMENTAL_CXX0X__)
#define CLARA_INTERNAL_CONFIG_CPP11_NULLPTR
#endif
// - otherwise more recent versions define __cplusplus >= 201103L
// and will get picked up below
#endif // __GNUC__
////////////////////////////////////////////////////////////////////////////////
// Visual C++
#ifdef _MSC_VER
#if (_MSC_VER >= 1600)
#define CLARA_INTERNAL_CONFIG_CPP11_NULLPTR
#define CLARA_INTERNAL_CONFIG_CPP11_UNIQUE_PTR
#endif
#if (_MSC_VER >= 1900 ) // (VC++ 13 (VS2015))
#define CLARA_INTERNAL_CONFIG_CPP11_NOEXCEPT
#define CLARA_INTERNAL_CONFIG_CPP11_GENERATED_METHODS
#endif
#endif // _MSC_VER
////////////////////////////////////////////////////////////////////////////////
// C++ language feature support
// catch all support for C++11
#if defined(__cplusplus) && __cplusplus >= 201103L
#define CLARA_CPP11_OR_GREATER
#if !defined(CLARA_INTERNAL_CONFIG_CPP11_NULLPTR)
#define CLARA_INTERNAL_CONFIG_CPP11_NULLPTR
#endif
#ifndef CLARA_INTERNAL_CONFIG_CPP11_NOEXCEPT
#define CLARA_INTERNAL_CONFIG_CPP11_NOEXCEPT
#endif
#ifndef CLARA_INTERNAL_CONFIG_CPP11_GENERATED_METHODS
#define CLARA_INTERNAL_CONFIG_CPP11_GENERATED_METHODS
#endif
#if !defined(CLARA_INTERNAL_CONFIG_CPP11_OVERRIDE)
#define CLARA_INTERNAL_CONFIG_CPP11_OVERRIDE
#endif
#if !defined(CLARA_INTERNAL_CONFIG_CPP11_UNIQUE_PTR)
#define CLARA_INTERNAL_CONFIG_CPP11_UNIQUE_PTR
#endif
#endif // __cplusplus >= 201103L
// Now set the actual defines based on the above + anything the user has configured
#if defined(CLARA_INTERNAL_CONFIG_CPP11_NULLPTR) && !defined(CLARA_CONFIG_CPP11_NO_NULLPTR) && !defined(CLARA_CONFIG_CPP11_NULLPTR) && !defined(CLARA_CONFIG_NO_CPP11)
#define CLARA_CONFIG_CPP11_NULLPTR
#endif
#if defined(CLARA_INTERNAL_CONFIG_CPP11_NOEXCEPT) && !defined(CLARA_CONFIG_CPP11_NO_NOEXCEPT) && !defined(CLARA_CONFIG_CPP11_NOEXCEPT) && !defined(CLARA_CONFIG_NO_CPP11)
#define CLARA_CONFIG_CPP11_NOEXCEPT
#endif
#if defined(CLARA_INTERNAL_CONFIG_CPP11_GENERATED_METHODS) && !defined(CLARA_CONFIG_CPP11_NO_GENERATED_METHODS) && !defined(CLARA_CONFIG_CPP11_GENERATED_METHODS) && !defined(CLARA_CONFIG_NO_CPP11)
#define CLARA_CONFIG_CPP11_GENERATED_METHODS
#endif
#if defined(CLARA_INTERNAL_CONFIG_CPP11_OVERRIDE) && !defined(CLARA_CONFIG_NO_OVERRIDE) && !defined(CLARA_CONFIG_CPP11_OVERRIDE) && !defined(CLARA_CONFIG_NO_CPP11)
#define CLARA_CONFIG_CPP11_OVERRIDE
#endif
#if defined(CLARA_INTERNAL_CONFIG_CPP11_UNIQUE_PTR) && !defined(CLARA_CONFIG_NO_UNIQUE_PTR) && !defined(CLARA_CONFIG_CPP11_UNIQUE_PTR) && !defined(CLARA_CONFIG_NO_CPP11)
#define CLARA_CONFIG_CPP11_UNIQUE_PTR
#endif
// noexcept support:
#if defined(CLARA_CONFIG_CPP11_NOEXCEPT) && !defined(CLARA_NOEXCEPT)
#define CLARA_NOEXCEPT noexcept
# define CLARA_NOEXCEPT_IS(x) noexcept(x)
#else
#define CLARA_NOEXCEPT throw()
# define CLARA_NOEXCEPT_IS(x)
#endif
// nullptr support
#ifdef CLARA_CONFIG_CPP11_NULLPTR
#define CLARA_NULL nullptr
#else
#define CLARA_NULL NULL
#endif
// override support
#ifdef CLARA_CONFIG_CPP11_OVERRIDE
#define CLARA_OVERRIDE override
#else
#define CLARA_OVERRIDE
#endif
// unique_ptr support
#ifdef CLARA_CONFIG_CPP11_UNIQUE_PTR
# define CLARA_AUTO_PTR( T ) std::unique_ptr<T>
#else
# define CLARA_AUTO_PTR( T ) std::auto_ptr<T>
#endif
#endif // TWOBLUECUBES_CLARA_COMPILERS_H_INCLUDED
// ----------- end of #include from clara_compilers.h -----------
// ........... back in clara.h
#include <map>
#include <stdexcept>
#include <memory>
#if defined(WIN32) || defined(__WIN32__) || defined(_WIN32) || defined(_MSC_VER)
#define CLARA_PLATFORM_WINDOWS
#endif
// Use optional outer namespace
#ifdef STITCH_CLARA_OPEN_NAMESPACE
STITCH_CLARA_OPEN_NAMESPACE
#endif
namespace Clara {
struct UnpositionalTag {};
extern UnpositionalTag _;
#ifdef CLARA_CONFIG_MAIN
UnpositionalTag _;
#endif
namespace Detail {
#ifdef CLARA_CONSOLE_WIDTH
const unsigned int consoleWidth = CLARA_CONFIG_CONSOLE_WIDTH;
#else
const unsigned int consoleWidth = 80;
#endif
using namespace Tbc;
inline bool startsWith( std::string const& str, std::string const& prefix ) {
return str.size() >= prefix.size() && str.substr( 0, prefix.size() ) == prefix;
}
template<typename T> struct RemoveConstRef{ typedef T type; };
template<typename T> struct RemoveConstRef<T&>{ typedef T type; };
template<typename T> struct RemoveConstRef<T const&>{ typedef T type; };
template<typename T> struct RemoveConstRef<T const>{ typedef T type; };
template<typename T> struct IsBool { static const bool value = false; };
template<> struct IsBool<bool> { static const bool value = true; };
template<typename T>
void convertInto( std::string const& _source, T& _dest ) {
std::stringstream ss;
ss << _source;
ss >> _dest;
if( ss.fail() )
throw std::runtime_error( "Unable to convert " + _source + " to destination type" );
}
inline void convertInto( std::string const& _source, std::string& _dest ) {
_dest = _source;
}
char toLowerCh(char c) {
return static_cast<char>( ::tolower( c ) );
}
inline void convertInto( std::string const& _source, bool& _dest ) {
std::string sourceLC = _source;
std::transform( sourceLC.begin(), sourceLC.end(), sourceLC.begin(), toLowerCh );
if( sourceLC == "y" || sourceLC == "1" || sourceLC == "true" || sourceLC == "yes" || sourceLC == "on" )
_dest = true;
else if( sourceLC == "n" || sourceLC == "0" || sourceLC == "false" || sourceLC == "no" || sourceLC == "off" )
_dest = false;
else
throw std::runtime_error( "Expected a boolean value but did not recognise:\n '" + _source + "'" );
}
template<typename ConfigT>
struct IArgFunction {
virtual ~IArgFunction() {}
#ifdef CLARA_CONFIG_CPP11_GENERATED_METHODS
IArgFunction() = default;
IArgFunction( IArgFunction const& ) = default;
#endif
virtual void set( ConfigT& config, std::string const& value ) const = 0;
virtual bool takesArg() const = 0;
virtual IArgFunction* clone() const = 0;
};
template<typename ConfigT>
class BoundArgFunction {
public:
BoundArgFunction() : functionObj( CLARA_NULL ) {}
BoundArgFunction( IArgFunction<ConfigT>* _functionObj ) : functionObj( _functionObj ) {}
BoundArgFunction( BoundArgFunction const& other ) : functionObj( other.functionObj ? other.functionObj->clone() : CLARA_NULL ) {}
BoundArgFunction& operator = ( BoundArgFunction const& other ) {
IArgFunction<ConfigT>* newFunctionObj = other.functionObj ? other.functionObj->clone() : CLARA_NULL;
delete functionObj;
functionObj = newFunctionObj;
return *this;
}
~BoundArgFunction() { delete functionObj; }
void set( ConfigT& config, std::string const& value ) const {
functionObj->set( config, value );
}
bool takesArg() const { return functionObj->takesArg(); }
bool isSet() const {
return functionObj != CLARA_NULL;
}
private:
IArgFunction<ConfigT>* functionObj;
};
template<typename C>
struct NullBinder : IArgFunction<C>{
virtual void set( C&, std::string const& ) const {}
virtual bool takesArg() const { return true; }
virtual IArgFunction<C>* clone() const { return new NullBinder( *this ); }
};
template<typename C, typename M>
struct BoundDataMember : IArgFunction<C>{
BoundDataMember( M C::* _member ) : member( _member ) {}
virtual void set( C& p, std::string const& stringValue ) const {
convertInto( stringValue, p.*member );
}
virtual bool takesArg() const { return !IsBool<M>::value; }
virtual IArgFunction<C>* clone() const { return new BoundDataMember( *this ); }
M C::* member;
};
template<typename C, typename M>
struct BoundUnaryMethod : IArgFunction<C>{
BoundUnaryMethod( void (C::*_member)( M ) ) : member( _member ) {}
virtual void set( C& p, std::string const& stringValue ) const {
typename RemoveConstRef<M>::type value;
convertInto( stringValue, value );
(p.*member)( value );
}
virtual bool takesArg() const { return !IsBool<M>::value; }
virtual IArgFunction<C>* clone() const { return new BoundUnaryMethod( *this ); }
void (C::*member)( M );
};
template<typename C>
struct BoundNullaryMethod : IArgFunction<C>{
BoundNullaryMethod( void (C::*_member)() ) : member( _member ) {}
virtual void set( C& p, std::string const& stringValue ) const {
bool value;
convertInto( stringValue, value );
if( value )
(p.*member)();
}
virtual bool takesArg() const { return false; }
virtual IArgFunction<C>* clone() const { return new BoundNullaryMethod( *this ); }
void (C::*member)();
};
template<typename C>
struct BoundUnaryFunction : IArgFunction<C>{
BoundUnaryFunction( void (*_function)( C& ) ) : function( _function ) {}
virtual void set( C& obj, std::string const& stringValue ) const {
bool value;
convertInto( stringValue, value );
if( value )
function( obj );
}
virtual bool takesArg() const { return false; }
virtual IArgFunction<C>* clone() const { return new BoundUnaryFunction( *this ); }
void (*function)( C& );
};
template<typename C, typename T>
struct BoundBinaryFunction : IArgFunction<C>{
BoundBinaryFunction( void (*_function)( C&, T ) ) : function( _function ) {}
virtual void set( C& obj, std::string const& stringValue ) const {
typename RemoveConstRef<T>::type value;
convertInto( stringValue, value );
function( obj, value );
}
virtual bool takesArg() const { return !IsBool<T>::value; }
virtual IArgFunction<C>* clone() const { return new BoundBinaryFunction( *this ); }
void (*function)( C&, T );
};
} // namespace Detail
inline std::vector<std::string> argsToVector( int argc, char const* const* const argv ) {
std::vector<std::string> args( static_cast<std::size_t>( argc ) );
for( std::size_t i = 0; i < static_cast<std::size_t>( argc ); ++i )
args[i] = argv[i];
return args;
}
class Parser {
enum Mode { None, MaybeShortOpt, SlashOpt, ShortOpt, LongOpt, Positional };
Mode mode;
std::size_t from;
bool inQuotes;
public:
struct Token {
enum Type { Positional, ShortOpt, LongOpt };
Token( Type _type, std::string const& _data ) : type( _type ), data( _data ) {}
Type type;
std::string data;
};
Parser() : mode( None ), from( 0 ), inQuotes( false ){}
void parseIntoTokens( std::vector<std::string> const& args, std::vector<Token>& tokens ) {
const std::string doubleDash = "--";
for( std::size_t i = 1; i < args.size() && args[i] != doubleDash; ++i )
parseIntoTokens( args[i], tokens);
}
void parseIntoTokens( std::string const& arg, std::vector<Token>& tokens ) {
for( std::size_t i = 0; i <= arg.size(); ++i ) {
char c = arg[i];
if( c == '"' )
inQuotes = !inQuotes;
mode = handleMode( i, c, arg, tokens );
}
}
Mode handleMode( std::size_t i, char c, std::string const& arg, std::vector<Token>& tokens ) {
switch( mode ) {
case None: return handleNone( i, c );
case MaybeShortOpt: return handleMaybeShortOpt( i, c );
case ShortOpt:
case LongOpt:
case SlashOpt: return handleOpt( i, c, arg, tokens );
case Positional: return handlePositional( i, c, arg, tokens );
default: throw std::logic_error( "Unknown mode" );
}
}
Mode handleNone( std::size_t i, char c ) {
if( inQuotes ) {
from = i;
return Positional;
}
switch( c ) {
case '-': return MaybeShortOpt;
#ifdef CLARA_PLATFORM_WINDOWS
case '/': from = i+1; return SlashOpt;
#endif
default: from = i; return Positional;
}
}
Mode handleMaybeShortOpt( std::size_t i, char c ) {
switch( c ) {
case '-': from = i+1; return LongOpt;
default: from = i; return ShortOpt;
}
}
Mode handleOpt( std::size_t i, char c, std::string const& arg, std::vector<Token>& tokens ) {
if( std::string( ":=\0", 3 ).find( c ) == std::string::npos )
return mode;
std::string optName = arg.substr( from, i-from );
if( mode == ShortOpt )
for( std::size_t j = 0; j < optName.size(); ++j )
tokens.push_back( Token( Token::ShortOpt, optName.substr( j, 1 ) ) );
else if( mode == SlashOpt && optName.size() == 1 )
tokens.push_back( Token( Token::ShortOpt, optName ) );
else
tokens.push_back( Token( Token::LongOpt, optName ) );
return None;
}
Mode handlePositional( std::size_t i, char c, std::string const& arg, std::vector<Token>& tokens ) {
if( inQuotes || std::string( "\0", 1 ).find( c ) == std::string::npos )
return mode;
std::string data = arg.substr( from, i-from );
tokens.push_back( Token( Token::Positional, data ) );
return None;
}
};
template<typename ConfigT>
struct CommonArgProperties {
CommonArgProperties() {}
CommonArgProperties( Detail::BoundArgFunction<ConfigT> const& _boundField ) : boundField( _boundField ) {}
Detail::BoundArgFunction<ConfigT> boundField;
std::string description;
std::string detail;
std::string placeholder; // Only value if boundField takes an arg
bool takesArg() const {
return !placeholder.empty();
}
void validate() const {
if( !boundField.isSet() )
throw std::logic_error( "option not bound" );
}
};
struct OptionArgProperties {
std::vector<std::string> shortNames;
std::string longName;
bool hasShortName( std::string const& shortName ) const {
return std::find( shortNames.begin(), shortNames.end(), shortName ) != shortNames.end();
}
bool hasLongName( std::string const& _longName ) const {
return _longName == longName;
}
};
struct PositionalArgProperties {
PositionalArgProperties() : position( -1 ) {}
int position; // -1 means non-positional (floating)
bool isFixedPositional() const {
return position != -1;
}
};
template<typename ConfigT>
class CommandLine {
struct Arg : CommonArgProperties<ConfigT>, OptionArgProperties, PositionalArgProperties {
Arg() {}
Arg( Detail::BoundArgFunction<ConfigT> const& _boundField ) : CommonArgProperties<ConfigT>( _boundField ) {}
using CommonArgProperties<ConfigT>::placeholder; // !TBD
std::string dbgName() const {
if( !longName.empty() )
return "--" + longName;
if( !shortNames.empty() )
return "-" + shortNames[0];
return "positional args";
}
std::string commands() const {
std::ostringstream oss;
bool first = true;
std::vector<std::string>::const_iterator it = shortNames.begin(), itEnd = shortNames.end();
for(; it != itEnd; ++it ) {
if( first )
first = false;
else
oss << ", ";
oss << "-" << *it;
}
if( !longName.empty() ) {
if( !first )
oss << ", ";
oss << "--" << longName;
}
if( !placeholder.empty() )
oss << " <" << placeholder << ">";
return oss.str();
}
};
typedef CLARA_AUTO_PTR( Arg ) ArgAutoPtr;
friend void addOptName( Arg& arg, std::string const& optName )
{
if( optName.empty() )
return;
if( Detail::startsWith( optName, "--" ) ) {
if( !arg.longName.empty() )
throw std::logic_error( "Only one long opt may be specified. '"
+ arg.longName
+ "' already specified, now attempting to add '"
+ optName + "'" );
arg.longName = optName.substr( 2 );
}
else if( Detail::startsWith( optName, "-" ) )
arg.shortNames.push_back( optName.substr( 1 ) );
else
throw std::logic_error( "option must begin with - or --. Option was: '" + optName + "'" );
}
friend void setPositionalArg( Arg& arg, int position )
{
arg.position = position;
}
class ArgBuilder {
public:
ArgBuilder( Arg* arg ) : m_arg( arg ) {}
// Bind a non-boolean data member (requires placeholder string)
template<typename C, typename M>
void bind( M C::* field, std::string const& placeholder ) {
m_arg->boundField = new Detail::BoundDataMember<C,M>( field );
m_arg->placeholder = placeholder;
}
// Bind a boolean data member (no placeholder required)
template<typename C>
void bind( bool C::* field ) {
m_arg->boundField = new Detail::BoundDataMember<C,bool>( field );
}
// Bind a method taking a single, non-boolean argument (requires a placeholder string)
template<typename C, typename M>
void bind( void (C::* unaryMethod)( M ), std::string const& placeholder ) {
m_arg->boundField = new Detail::BoundUnaryMethod<C,M>( unaryMethod );
m_arg->placeholder = placeholder;
}
// Bind a method taking a single, boolean argument (no placeholder string required)
template<typename C>
void bind( void (C::* unaryMethod)( bool ) ) {
m_arg->boundField = new Detail::BoundUnaryMethod<C,bool>( unaryMethod );
}
// Bind a method that takes no arguments (will be called if opt is present)
template<typename C>
void bind( void (C::* nullaryMethod)() ) {
m_arg->boundField = new Detail::BoundNullaryMethod<C>( nullaryMethod );
}
// Bind a free function taking a single argument - the object to operate on (no placeholder string required)
template<typename C>
void bind( void (* unaryFunction)( C& ) ) {
m_arg->boundField = new Detail::BoundUnaryFunction<C>( unaryFunction );
}
// Bind a free function taking a single argument - the object to operate on (requires a placeholder string)
template<typename C, typename T>
void bind( void (* binaryFunction)( C&, T ), std::string const& placeholder ) {
m_arg->boundField = new Detail::BoundBinaryFunction<C, T>( binaryFunction );
m_arg->placeholder = placeholder;
}
ArgBuilder& describe( std::string const& description ) {
m_arg->description = description;
return *this;
}
ArgBuilder& detail( std::string const& detail ) {
m_arg->detail = detail;
return *this;
}
protected:
Arg* m_arg;
};
class OptBuilder : public ArgBuilder {
public:
OptBuilder( Arg* arg ) : ArgBuilder( arg ) {}
OptBuilder( OptBuilder& other ) : ArgBuilder( other ) {}
OptBuilder& operator[]( std::string const& optName ) {
addOptName( *ArgBuilder::m_arg, optName );
return *this;
}
};
public:
CommandLine()
: m_boundProcessName( new Detail::NullBinder<ConfigT>() ),
m_highestSpecifiedArgPosition( 0 ),
m_throwOnUnrecognisedTokens( false )
{}
CommandLine( CommandLine const& other )
: m_boundProcessName( other.m_boundProcessName ),
m_options ( other.m_options ),
m_positionalArgs( other.m_positionalArgs ),
m_highestSpecifiedArgPosition( other.m_highestSpecifiedArgPosition ),
m_throwOnUnrecognisedTokens( other.m_throwOnUnrecognisedTokens )
{
if( other.m_floatingArg.get() )
m_floatingArg.reset( new Arg( *other.m_floatingArg ) );
}
CommandLine& setThrowOnUnrecognisedTokens( bool shouldThrow = true ) {
m_throwOnUnrecognisedTokens = shouldThrow;
return *this;
}
OptBuilder operator[]( std::string const& optName ) {
m_options.push_back( Arg() );
addOptName( m_options.back(), optName );
OptBuilder builder( &m_options.back() );
return builder;
}
ArgBuilder operator[]( int position ) {
m_positionalArgs.insert( std::make_pair( position, Arg() ) );
if( position > m_highestSpecifiedArgPosition )
m_highestSpecifiedArgPosition = position;
setPositionalArg( m_positionalArgs[position], position );
ArgBuilder builder( &m_positionalArgs[position] );
return builder;
}
// Invoke this with the _ instance
ArgBuilder operator[]( UnpositionalTag ) {
if( m_floatingArg.get() )
throw std::logic_error( "Only one unpositional argument can be added" );
m_floatingArg.reset( new Arg() );
ArgBuilder builder( m_floatingArg.get() );
return builder;
}
template<typename C, typename M>
void bindProcessName( M C::* field ) {
m_boundProcessName = new Detail::BoundDataMember<C,M>( field );
}
template<typename C, typename M>
void bindProcessName( void (C::*_unaryMethod)( M ) ) {
m_boundProcessName = new Detail::BoundUnaryMethod<C,M>( _unaryMethod );
}
void optUsage( std::ostream& os, std::size_t indent = 0, std::size_t width = Detail::consoleWidth ) const {
typename std::vector<Arg>::const_iterator itBegin = m_options.begin(), itEnd = m_options.end(), it;
std::size_t maxWidth = 0;
for( it = itBegin; it != itEnd; ++it )
maxWidth = (std::max)( maxWidth, it->commands().size() );
for( it = itBegin; it != itEnd; ++it ) {
Detail::Text usage( it->commands(), Detail::TextAttributes()
.setWidth( maxWidth+indent )
.setIndent( indent ) );
Detail::Text desc( it->description, Detail::TextAttributes()
.setWidth( width - maxWidth - 3 ) );
for( std::size_t i = 0; i < (std::max)( usage.size(), desc.size() ); ++i ) {
std::string usageCol = i < usage.size() ? usage[i] : "";
os << usageCol;
if( i < desc.size() && !desc[i].empty() )
os << std::string( indent + 2 + maxWidth - usageCol.size(), ' ' )
<< desc[i];
os << "\n";
}
}
}
std::string optUsage() const {
std::ostringstream oss;
optUsage( oss );
return oss.str();
}
void argSynopsis( std::ostream& os ) const {
for( int i = 1; i <= m_highestSpecifiedArgPosition; ++i ) {
if( i > 1 )
os << " ";
typename std::map<int, Arg>::const_iterator it = m_positionalArgs.find( i );
if( it != m_positionalArgs.end() )
os << "<" << it->second.placeholder << ">";
else if( m_floatingArg.get() )
os << "<" << m_floatingArg->placeholder << ">";
else
throw std::logic_error( "non consecutive positional arguments with no floating args" );
}
// !TBD No indication of mandatory args
if( m_floatingArg.get() ) {
if( m_highestSpecifiedArgPosition > 1 )
os << " ";
os << "[<" << m_floatingArg->placeholder << "> ...]";
}
}
std::string argSynopsis() const {
std::ostringstream oss;
argSynopsis( oss );
return oss.str();
}
void usage( std::ostream& os, std::string const& procName ) const {
validate();
os << "usage:\n " << procName << " ";
argSynopsis( os );
if( !m_options.empty() ) {
os << " [options]\n\nwhere options are: \n";
optUsage( os, 2 );
}
os << "\n";
}
std::string usage( std::string const& procName ) const {
std::ostringstream oss;
usage( oss, procName );
return oss.str();
}
ConfigT parse( std::vector<std::string> const& args ) const {
ConfigT config;
parseInto( args, config );
return config;
}
std::vector<Parser::Token> parseInto( std::vector<std::string> const& args, ConfigT& config ) const {
std::string processName = args[0];
std::size_t lastSlash = processName.find_last_of( "/\\" );
if( lastSlash != std::string::npos )
processName = processName.substr( lastSlash+1 );
m_boundProcessName.set( config, processName );
std::vector<Parser::Token> tokens;
Parser parser;
parser.parseIntoTokens( args, tokens );
return populate( tokens, config );
}
std::vector<Parser::Token> populate( std::vector<Parser::Token> const& tokens, ConfigT& config ) const {
validate();
std::vector<Parser::Token> unusedTokens = populateOptions( tokens, config );
unusedTokens = populateFixedArgs( unusedTokens, config );
unusedTokens = populateFloatingArgs( unusedTokens, config );
return unusedTokens;
}
std::vector<Parser::Token> populateOptions( std::vector<Parser::Token> const& tokens, ConfigT& config ) const {
std::vector<Parser::Token> unusedTokens;
std::vector<std::string> errors;
for( std::size_t i = 0; i < tokens.size(); ++i ) {
Parser::Token const& token = tokens[i];
typename std::vector<Arg>::const_iterator it = m_options.begin(), itEnd = m_options.end();
for(; it != itEnd; ++it ) {
Arg const& arg = *it;
try {
if( ( token.type == Parser::Token::ShortOpt && arg.hasShortName( token.data ) ) ||
( token.type == Parser::Token::LongOpt && arg.hasLongName( token.data ) ) ) {
if( arg.takesArg() ) {
if( i == tokens.size()-1 || tokens[i+1].type != Parser::Token::Positional )
errors.push_back( "Expected argument to option: " + token.data );
else
arg.boundField.set( config, tokens[++i].data );
}
else {
arg.boundField.set( config, "true" );
}
break;
}
}
catch( std::exception& ex ) {
errors.push_back( std::string( ex.what() ) + "\n- while parsing: (" + arg.commands() + ")" );
}
}
if( it == itEnd ) {
if( token.type == Parser::Token::Positional || !m_throwOnUnrecognisedTokens )
unusedTokens.push_back( token );
else if( errors.empty() && m_throwOnUnrecognisedTokens )
errors.push_back( "unrecognised option: " + token.data );
}
}
if( !errors.empty() ) {
std::ostringstream oss;
for( std::vector<std::string>::const_iterator it = errors.begin(), itEnd = errors.end();
it != itEnd;
++it ) {
if( it != errors.begin() )
oss << "\n";
oss << *it;
}
throw std::runtime_error( oss.str() );
}
return unusedTokens;
}
std::vector<Parser::Token> populateFixedArgs( std::vector<Parser::Token> const& tokens, ConfigT& config ) const {
std::vector<Parser::Token> unusedTokens;
int position = 1;
for( std::size_t i = 0; i < tokens.size(); ++i ) {
Parser::Token const& token = tokens[i];
typename std::map<int, Arg>::const_iterator it = m_positionalArgs.find( position );
if( it != m_positionalArgs.end() )
it->second.boundField.set( config, token.data );
else
unusedTokens.push_back( token );
if( token.type == Parser::Token::Positional )
position++;
}
return unusedTokens;
}
std::vector<Parser::Token> populateFloatingArgs( std::vector<Parser::Token> const& tokens, ConfigT& config ) const {
if( !m_floatingArg.get() )
return tokens;
std::vector<Parser::Token> unusedTokens;
for( std::size_t i = 0; i < tokens.size(); ++i ) {
Parser::Token const& token = tokens[i];
if( token.type == Parser::Token::Positional )
m_floatingArg->boundField.set( config, token.data );
else
unusedTokens.push_back( token );
}
return unusedTokens;
}
void validate() const
{
if( m_options.empty() && m_positionalArgs.empty() && !m_floatingArg.get() )
throw std::logic_error( "No options or arguments specified" );
for( typename std::vector<Arg>::const_iterator it = m_options.begin(),
itEnd = m_options.end();
it != itEnd; ++it )
it->validate();
}
private:
Detail::BoundArgFunction<ConfigT> m_boundProcessName;
std::vector<Arg> m_options;
std::map<int, Arg> m_positionalArgs;
ArgAutoPtr m_floatingArg;
int m_highestSpecifiedArgPosition;
bool m_throwOnUnrecognisedTokens;
};
} // end namespace Clara
STITCH_CLARA_CLOSE_NAMESPACE
#undef STITCH_CLARA_OPEN_NAMESPACE
#undef STITCH_CLARA_CLOSE_NAMESPACE
#endif // TWOBLUECUBES_CLARA_H_INCLUDED
#undef STITCH_CLARA_OPEN_NAMESPACE
// Restore Clara's value for console width, if present
#ifdef CATCH_TEMP_CLARA_CONFIG_CONSOLE_WIDTH
#define CLARA_CONFIG_CONSOLE_WIDTH CATCH_TEMP_CLARA_CONFIG_CONSOLE_WIDTH
#undef CATCH_TEMP_CLARA_CONFIG_CONSOLE_WIDTH
#endif
#include <fstream>
#include <ctime>
namespace Catch {
inline void abortAfterFirst( ConfigData& config ) { config.abortAfter = 1; }
inline void abortAfterX( ConfigData& config, int x ) {
if( x < 1 )
throw std::runtime_error( "Value after -x or --abortAfter must be greater than zero" );
config.abortAfter = x;
}
inline void addTestOrTags( ConfigData& config, std::string const& _testSpec ) { config.testsOrTags.push_back( _testSpec ); }
inline void addSectionToRun( ConfigData& config, std::string const& sectionName ) { config.sectionsToRun.push_back( sectionName ); }
inline void addReporterName( ConfigData& config, std::string const& _reporterName ) { config.reporterNames.push_back( _reporterName ); }
inline void addWarning( ConfigData& config, std::string const& _warning ) {
if( _warning == "NoAssertions" )
config.warnings = static_cast<WarnAbout::What>( config.warnings | WarnAbout::NoAssertions );
else
throw std::runtime_error( "Unrecognised warning: '" + _warning + '\'' );
}
inline void setOrder( ConfigData& config, std::string const& order ) {
if( startsWith( "declared", order ) )
config.runOrder = RunTests::InDeclarationOrder;
else if( startsWith( "lexical", order ) )
config.runOrder = RunTests::InLexicographicalOrder;
else if( startsWith( "random", order ) )
config.runOrder = RunTests::InRandomOrder;
else
throw std::runtime_error( "Unrecognised ordering: '" + order + '\'' );
}
inline void setRngSeed( ConfigData& config, std::string const& seed ) {
if( seed == "time" ) {
config.rngSeed = static_cast<unsigned int>( std::time(0) );
}
else {
std::stringstream ss;
ss << seed;
ss >> config.rngSeed;
if( ss.fail() )
throw std::runtime_error( "Argument to --rng-seed should be the word 'time' or a number" );
}
}
inline void setVerbosity( ConfigData& config, int level ) {
// !TBD: accept strings?
config.verbosity = static_cast<Verbosity::Level>( level );
}
inline void setShowDurations( ConfigData& config, bool _showDurations ) {
config.showDurations = _showDurations
? ShowDurations::Always
: ShowDurations::Never;
}
inline void setUseColour( ConfigData& config, std::string const& value ) {
std::string mode = toLower( value );
if( mode == "yes" )
config.useColour = UseColour::Yes;
else if( mode == "no" )
config.useColour = UseColour::No;
else if( mode == "auto" )
config.useColour = UseColour::Auto;
else
throw std::runtime_error( "colour mode must be one of: auto, yes or no" );
}
inline void forceColour( ConfigData& config ) {
config.useColour = UseColour::Yes;
}
inline void loadTestNamesFromFile( ConfigData& config, std::string const& _filename ) {
std::ifstream f( _filename.c_str() );
if( !f.is_open() )
throw std::domain_error( "Unable to load input file: " + _filename );
std::string line;
while( std::getline( f, line ) ) {
line = trim(line);
if( !line.empty() && !startsWith( line, '#' ) ) {
if( !startsWith( line, '"' ) )
line = '"' + line + '"';
addTestOrTags( config, line + ',' );
}
}
}
inline Clara::CommandLine<ConfigData> makeCommandLineParser() {
using namespace Clara;
CommandLine<ConfigData> cli;
cli.bindProcessName( &ConfigData::processName );
cli["-?"]["-h"]["--help"]
.describe( "display usage information" )
.bind( &ConfigData::showHelp );
cli["-l"]["--list-tests"]
.describe( "list all/matching test cases" )
.bind( &ConfigData::listTests );
cli["-t"]["--list-tags"]
.describe( "list all/matching tags" )
.bind( &ConfigData::listTags );
cli["-s"]["--success"]
.describe( "include successful tests in output" )
.bind( &ConfigData::showSuccessfulTests );
cli["-b"]["--break"]
.describe( "break into debugger on failure" )
.bind( &ConfigData::shouldDebugBreak );
cli["-e"]["--nothrow"]
.describe( "skip exception tests" )
.bind( &ConfigData::noThrow );
cli["-i"]["--invisibles"]
.describe( "show invisibles (tabs, newlines)" )
.bind( &ConfigData::showInvisibles );
cli["-o"]["--out"]
.describe( "output filename" )
.bind( &ConfigData::outputFilename, "filename" );
cli["-r"]["--reporter"]
// .placeholder( "name[:filename]" )
.describe( "reporter to use (defaults to console)" )
.bind( &addReporterName, "name" );
cli["-n"]["--name"]
.describe( "suite name" )
.bind( &ConfigData::name, "name" );
cli["-a"]["--abort"]
.describe( "abort at first failure" )
.bind( &abortAfterFirst );
cli["-x"]["--abortx"]
.describe( "abort after x failures" )
.bind( &abortAfterX, "no. failures" );
cli["-w"]["--warn"]
.describe( "enable warnings" )
.bind( &addWarning, "warning name" );
// - needs updating if reinstated
// cli.into( &setVerbosity )
// .describe( "level of verbosity (0=no output)" )
// .shortOpt( "v")
// .longOpt( "verbosity" )
// .placeholder( "level" );
cli[_]
.describe( "which test or tests to use" )
.bind( &addTestOrTags, "test name, pattern or tags" );
cli["-d"]["--durations"]
.describe( "show test durations" )
.bind( &setShowDurations, "yes|no" );
cli["-f"]["--input-file"]
.describe( "load test names to run from a file" )
.bind( &loadTestNamesFromFile, "filename" );
cli["-#"]["--filenames-as-tags"]
.describe( "adds a tag for the filename" )
.bind( &ConfigData::filenamesAsTags );
cli["-c"]["--section"]
.describe( "specify section to run" )
.bind( &addSectionToRun, "section name" );
// Less common commands which don't have a short form
cli["--list-test-names-only"]
.describe( "list all/matching test cases names only" )
.bind( &ConfigData::listTestNamesOnly );
cli["--list-reporters"]
.describe( "list all reporters" )
.bind( &ConfigData::listReporters );
cli["--order"]
.describe( "test case order (defaults to decl)" )
.bind( &setOrder, "decl|lex|rand" );
cli["--rng-seed"]
.describe( "set a specific seed for random numbers" )
.bind( &setRngSeed, "'time'|number" );
cli["--force-colour"]
.describe( "force colourised output (deprecated)" )
.bind( &forceColour );
cli["--use-colour"]
.describe( "should output be colourised" )
.bind( &setUseColour, "yes|no" );
return cli;
}
} // end namespace Catch
// #included from: internal/catch_list.hpp
#define TWOBLUECUBES_CATCH_LIST_HPP_INCLUDED
// #included from: catch_text.h
#define TWOBLUECUBES_CATCH_TEXT_H_INCLUDED
#define TBC_TEXT_FORMAT_CONSOLE_WIDTH CATCH_CONFIG_CONSOLE_WIDTH
#define CLICHE_TBC_TEXT_FORMAT_OUTER_NAMESPACE Catch
// #included from: ../external/tbc_text_format.h
// Only use header guard if we are not using an outer namespace
#ifndef CLICHE_TBC_TEXT_FORMAT_OUTER_NAMESPACE
# ifdef TWOBLUECUBES_TEXT_FORMAT_H_INCLUDED
# ifndef TWOBLUECUBES_TEXT_FORMAT_H_ALREADY_INCLUDED
# define TWOBLUECUBES_TEXT_FORMAT_H_ALREADY_INCLUDED
# endif
# else
# define TWOBLUECUBES_TEXT_FORMAT_H_INCLUDED
# endif
#endif
#ifndef TWOBLUECUBES_TEXT_FORMAT_H_ALREADY_INCLUDED
#include <string>
#include <vector>
#include <sstream>
// Use optional outer namespace
#ifdef CLICHE_TBC_TEXT_FORMAT_OUTER_NAMESPACE
namespace CLICHE_TBC_TEXT_FORMAT_OUTER_NAMESPACE {
#endif
namespace Tbc {
#ifdef TBC_TEXT_FORMAT_CONSOLE_WIDTH
const unsigned int consoleWidth = TBC_TEXT_FORMAT_CONSOLE_WIDTH;
#else
const unsigned int consoleWidth = 80;
#endif
struct TextAttributes {
TextAttributes()
: initialIndent( std::string::npos ),
indent( 0 ),
width( consoleWidth-1 )
{}
TextAttributes& setInitialIndent( std::size_t _value ) { initialIndent = _value; return *this; }
TextAttributes& setIndent( std::size_t _value ) { indent = _value; return *this; }
TextAttributes& setWidth( std::size_t _value ) { width = _value; return *this; }
std::size_t initialIndent; // indent of first line, or npos
std::size_t indent; // indent of subsequent lines, or all if initialIndent is npos
std::size_t width; // maximum width of text, including indent. Longer text will wrap
};
class Text {
public:
Text( std::string const& _str, TextAttributes const& _attr = TextAttributes() )
: attr( _attr )
{
const std::string wrappableBeforeChars = "[({<\t";
const std::string wrappableAfterChars = "])}>-,./|\\";
const std::string wrappableInsteadOfChars = " \n\r";
std::string indent = _attr.initialIndent != std::string::npos
? std::string( _attr.initialIndent, ' ' )
: std::string( _attr.indent, ' ' );
typedef std::string::const_iterator iterator;
iterator it = _str.begin();
const iterator strEnd = _str.end();
while( it != strEnd ) {
if( lines.size() >= 1000 ) {
lines.push_back( "... message truncated due to excessive size" );
return;
}
std::string suffix;
std::size_t width = (std::min)( static_cast<size_t>( strEnd-it ), _attr.width-static_cast<size_t>( indent.size() ) );
iterator itEnd = it+width;
iterator itNext = _str.end();
iterator itNewLine = std::find( it, itEnd, '\n' );
if( itNewLine != itEnd )
itEnd = itNewLine;
if( itEnd != strEnd ) {
bool foundWrapPoint = false;
iterator findIt = itEnd;
do {
if( wrappableAfterChars.find( *findIt ) != std::string::npos && findIt != itEnd ) {
itEnd = findIt+1;
itNext = findIt+1;
foundWrapPoint = true;
}
else if( findIt > it && wrappableBeforeChars.find( *findIt ) != std::string::npos ) {
itEnd = findIt;
itNext = findIt;
foundWrapPoint = true;
}
else if( wrappableInsteadOfChars.find( *findIt ) != std::string::npos ) {
itNext = findIt+1;
itEnd = findIt;
foundWrapPoint = true;
}
if( findIt == it )
break;
else
--findIt;
}
while( !foundWrapPoint );
if( !foundWrapPoint ) {
// No good wrap char, so we'll break mid word and add a hyphen
--itEnd;
itNext = itEnd;
suffix = "-";
}
else {
while( itEnd > it && wrappableInsteadOfChars.find( *(itEnd-1) ) != std::string::npos )
--itEnd;
}
}
lines.push_back( indent + std::string( it, itEnd ) + suffix );
if( indent.size() != _attr.indent )
indent = std::string( _attr.indent, ' ' );
it = itNext;
}
}
typedef std::vector<std::string>::const_iterator const_iterator;
const_iterator begin() const { return lines.begin(); }
const_iterator end() const { return lines.end(); }
std::string const& last() const { return lines.back(); }
std::size_t size() const { return lines.size(); }
std::string const& operator[]( std::size_t _index ) const { return lines[_index]; }
std::string toString() const {
std::ostringstream oss;
oss << *this;
return oss.str();
}
inline friend std::ostream& operator << ( std::ostream& _stream, Text const& _text ) {
for( Text::const_iterator it = _text.begin(), itEnd = _text.end();
it != itEnd; ++it ) {
if( it != _text.begin() )
_stream << "\n";
_stream << *it;
}
return _stream;
}
private:
std::string str;
TextAttributes attr;
std::vector<std::string> lines;
};
} // end namespace Tbc
#ifdef CLICHE_TBC_TEXT_FORMAT_OUTER_NAMESPACE
} // end outer namespace
#endif
#endif // TWOBLUECUBES_TEXT_FORMAT_H_ALREADY_INCLUDED
#undef CLICHE_TBC_TEXT_FORMAT_OUTER_NAMESPACE
namespace Catch {
using Tbc::Text;
using Tbc::TextAttributes;
}
// #included from: catch_console_colour.hpp
#define TWOBLUECUBES_CATCH_CONSOLE_COLOUR_HPP_INCLUDED
namespace Catch {
struct Colour {
enum Code {
None = 0,
White,
Red,
Green,
Blue,
Cyan,
Yellow,
Grey,
Bright = 0x10,
BrightRed = Bright | Red,
BrightGreen = Bright | Green,
LightGrey = Bright | Grey,
BrightWhite = Bright | White,
// By intention
FileName = LightGrey,
Warning = Yellow,
ResultError = BrightRed,
ResultSuccess = BrightGreen,
ResultExpectedFailure = Warning,
Error = BrightRed,
Success = Green,
OriginalExpression = Cyan,
ReconstructedExpression = Yellow,
SecondaryText = LightGrey,
Headers = White
};
// Use constructed object for RAII guard
Colour( Code _colourCode );
Colour( Colour const& other );
~Colour();
// Use static method for one-shot changes
static void use( Code _colourCode );
private:
bool m_moved;
};
inline std::ostream& operator << ( std::ostream& os, Colour const& ) { return os; }
} // end namespace Catch
// #included from: catch_interfaces_reporter.h
#define TWOBLUECUBES_CATCH_INTERFACES_REPORTER_H_INCLUDED
#include <string>
#include <ostream>
#include <map>
namespace Catch
{
struct ReporterConfig {
explicit ReporterConfig( Ptr<IConfig const> const& _fullConfig )
: m_stream( &_fullConfig->stream() ), m_fullConfig( _fullConfig ) {}
ReporterConfig( Ptr<IConfig const> const& _fullConfig, std::ostream& _stream )
: m_stream( &_stream ), m_fullConfig( _fullConfig ) {}
std::ostream& stream() const { return *m_stream; }
Ptr<IConfig const> fullConfig() const { return m_fullConfig; }
private:
std::ostream* m_stream;
Ptr<IConfig const> m_fullConfig;
};
struct ReporterPreferences {
ReporterPreferences()
: shouldRedirectStdOut( false )
{}
bool shouldRedirectStdOut;
};
template<typename T>
struct LazyStat : Option<T> {
LazyStat() : used( false ) {}
LazyStat& operator=( T const& _value ) {
Option<T>::operator=( _value );
used = false;
return *this;
}
void reset() {
Option<T>::reset();
used = false;
}
bool used;
};
struct TestRunInfo {
TestRunInfo( std::string const& _name ) : name( _name ) {}
std::string name;
};
struct GroupInfo {
GroupInfo( std::string const& _name,
std::size_t _groupIndex,
std::size_t _groupsCount )
: name( _name ),
groupIndex( _groupIndex ),
groupsCounts( _groupsCount )
{}
std::string name;
std::size_t groupIndex;
std::size_t groupsCounts;
};
struct AssertionStats {
AssertionStats( AssertionResult const& _assertionResult,
std::vector<MessageInfo> const& _infoMessages,
Totals const& _totals )
: assertionResult( _assertionResult ),
infoMessages( _infoMessages ),
totals( _totals )
{
if( assertionResult.hasMessage() ) {
// Copy message into messages list.
// !TBD This should have been done earlier, somewhere
MessageBuilder builder( assertionResult.getTestMacroName(), assertionResult.getSourceInfo(), assertionResult.getResultType() );
builder << assertionResult.getMessage();
builder.m_info.message = builder.m_stream.str();
infoMessages.push_back( builder.m_info );
}
}
virtual ~AssertionStats();
# ifdef CATCH_CONFIG_CPP11_GENERATED_METHODS
AssertionStats( AssertionStats const& ) = default;
AssertionStats( AssertionStats && ) = default;
AssertionStats& operator = ( AssertionStats const& ) = default;
AssertionStats& operator = ( AssertionStats && ) = default;
# endif
AssertionResult assertionResult;
std::vector<MessageInfo> infoMessages;
Totals totals;
};
struct SectionStats {
SectionStats( SectionInfo const& _sectionInfo,
Counts const& _assertions,
double _durationInSeconds,
bool _missingAssertions )
: sectionInfo( _sectionInfo ),
assertions( _assertions ),
durationInSeconds( _durationInSeconds ),
missingAssertions( _missingAssertions )
{}
virtual ~SectionStats();
# ifdef CATCH_CONFIG_CPP11_GENERATED_METHODS
SectionStats( SectionStats const& ) = default;
SectionStats( SectionStats && ) = default;
SectionStats& operator = ( SectionStats const& ) = default;
SectionStats& operator = ( SectionStats && ) = default;
# endif
SectionInfo sectionInfo;
Counts assertions;
double durationInSeconds;
bool missingAssertions;
};
struct TestCaseStats {
TestCaseStats( TestCaseInfo const& _testInfo,
Totals const& _totals,
std::string const& _stdOut,
std::string const& _stdErr,
bool _aborting )
: testInfo( _testInfo ),
totals( _totals ),
stdOut( _stdOut ),
stdErr( _stdErr ),
aborting( _aborting )
{}
virtual ~TestCaseStats();
# ifdef CATCH_CONFIG_CPP11_GENERATED_METHODS
TestCaseStats( TestCaseStats const& ) = default;
TestCaseStats( TestCaseStats && ) = default;
TestCaseStats& operator = ( TestCaseStats const& ) = default;
TestCaseStats& operator = ( TestCaseStats && ) = default;
# endif
TestCaseInfo testInfo;
Totals totals;
std::string stdOut;
std::string stdErr;
bool aborting;
};
struct TestGroupStats {
TestGroupStats( GroupInfo const& _groupInfo,
Totals const& _totals,
bool _aborting )
: groupInfo( _groupInfo ),
totals( _totals ),
aborting( _aborting )
{}
TestGroupStats( GroupInfo const& _groupInfo )
: groupInfo( _groupInfo ),
aborting( false )
{}
virtual ~TestGroupStats();
# ifdef CATCH_CONFIG_CPP11_GENERATED_METHODS
TestGroupStats( TestGroupStats const& ) = default;
TestGroupStats( TestGroupStats && ) = default;
TestGroupStats& operator = ( TestGroupStats const& ) = default;
TestGroupStats& operator = ( TestGroupStats && ) = default;
# endif
GroupInfo groupInfo;
Totals totals;
bool aborting;
};
struct TestRunStats {
TestRunStats( TestRunInfo const& _runInfo,
Totals const& _totals,
bool _aborting )
: runInfo( _runInfo ),
totals( _totals ),
aborting( _aborting )
{}
virtual ~TestRunStats();
# ifndef CATCH_CONFIG_CPP11_GENERATED_METHODS
TestRunStats( TestRunStats const& _other )
: runInfo( _other.runInfo ),
totals( _other.totals ),
aborting( _other.aborting )
{}
# else
TestRunStats( TestRunStats const& ) = default;
TestRunStats( TestRunStats && ) = default;
TestRunStats& operator = ( TestRunStats const& ) = default;
TestRunStats& operator = ( TestRunStats && ) = default;
# endif
TestRunInfo runInfo;
Totals totals;
bool aborting;
};
class MultipleReporters;
struct IStreamingReporter : IShared {
virtual ~IStreamingReporter();
// Implementing class must also provide the following static method:
// static std::string getDescription();
virtual ReporterPreferences getPreferences() const = 0;
virtual void noMatchingTestCases( std::string const& spec ) = 0;
virtual void testRunStarting( TestRunInfo const& testRunInfo ) = 0;
virtual void testGroupStarting( GroupInfo const& groupInfo ) = 0;
virtual void testCaseStarting( TestCaseInfo const& testInfo ) = 0;
virtual void sectionStarting( SectionInfo const& sectionInfo ) = 0;
virtual void assertionStarting( AssertionInfo const& assertionInfo ) = 0;
// The return value indicates if the messages buffer should be cleared:
virtual bool assertionEnded( AssertionStats const& assertionStats ) = 0;
virtual void sectionEnded( SectionStats const& sectionStats ) = 0;
virtual void testCaseEnded( TestCaseStats const& testCaseStats ) = 0;
virtual void testGroupEnded( TestGroupStats const& testGroupStats ) = 0;
virtual void testRunEnded( TestRunStats const& testRunStats ) = 0;
virtual void skipTest( TestCaseInfo const& testInfo ) = 0;
virtual MultipleReporters* tryAsMulti() { return CATCH_NULL; }
};
struct IReporterFactory : IShared {
virtual ~IReporterFactory();
virtual IStreamingReporter* create( ReporterConfig const& config ) const = 0;
virtual std::string getDescription() const = 0;
};
struct IReporterRegistry {
typedef std::map<std::string, Ptr<IReporterFactory> > FactoryMap;
typedef std::vector<Ptr<IReporterFactory> > Listeners;
virtual ~IReporterRegistry();
virtual IStreamingReporter* create( std::string const& name, Ptr<IConfig const> const& config ) const = 0;
virtual FactoryMap const& getFactories() const = 0;
virtual Listeners const& getListeners() const = 0;
};
Ptr<IStreamingReporter> addReporter( Ptr<IStreamingReporter> const& existingReporter, Ptr<IStreamingReporter> const& additionalReporter );
}
#include <limits>
#include <algorithm>
namespace Catch {
inline std::size_t listTests( Config const& config ) {
TestSpec testSpec = config.testSpec();
if( config.testSpec().hasFilters() )
Catch::cout() << "Matching test cases:\n";
else {
Catch::cout() << "All available test cases:\n";
testSpec = TestSpecParser( ITagAliasRegistry::get() ).parse( "*" ).testSpec();
}
std::size_t matchedTests = 0;
TextAttributes nameAttr, tagsAttr;
nameAttr.setInitialIndent( 2 ).setIndent( 4 );
tagsAttr.setIndent( 6 );
std::vector<TestCase> matchedTestCases = filterTests( getAllTestCasesSorted( config ), testSpec, config );
for( std::vector<TestCase>::const_iterator it = matchedTestCases.begin(), itEnd = matchedTestCases.end();
it != itEnd;
++it ) {
matchedTests++;
TestCaseInfo const& testCaseInfo = it->getTestCaseInfo();
Colour::Code colour = testCaseInfo.isHidden()
? Colour::SecondaryText
: Colour::None;
Colour colourGuard( colour );
Catch::cout() << Text( testCaseInfo.name, nameAttr ) << std::endl;
if( !testCaseInfo.tags.empty() )
Catch::cout() << Text( testCaseInfo.tagsAsString, tagsAttr ) << std::endl;
}
if( !config.testSpec().hasFilters() )
Catch::cout() << pluralise( matchedTests, "test case" ) << '\n' << std::endl;
else
Catch::cout() << pluralise( matchedTests, "matching test case" ) << '\n' << std::endl;
return matchedTests;
}
inline std::size_t listTestsNamesOnly( Config const& config ) {
TestSpec testSpec = config.testSpec();
if( !config.testSpec().hasFilters() )
testSpec = TestSpecParser( ITagAliasRegistry::get() ).parse( "*" ).testSpec();
std::size_t matchedTests = 0;
std::vector<TestCase> matchedTestCases = filterTests( getAllTestCasesSorted( config ), testSpec, config );
for( std::vector<TestCase>::const_iterator it = matchedTestCases.begin(), itEnd = matchedTestCases.end();
it != itEnd;
++it ) {
matchedTests++;
TestCaseInfo const& testCaseInfo = it->getTestCaseInfo();
if( startsWith( testCaseInfo.name, '#' ) )
Catch::cout() << '"' << testCaseInfo.name << '"' << std::endl;
else
Catch::cout() << testCaseInfo.name << std::endl;
}
return matchedTests;
}
struct TagInfo {
TagInfo() : count ( 0 ) {}
void add( std::string const& spelling ) {
++count;
spellings.insert( spelling );
}
std::string all() const {
std::string out;
for( std::set<std::string>::const_iterator it = spellings.begin(), itEnd = spellings.end();
it != itEnd;
++it )
out += "[" + *it + "]";
return out;
}
std::set<std::string> spellings;
std::size_t count;
};
inline std::size_t listTags( Config const& config ) {
TestSpec testSpec = config.testSpec();
if( config.testSpec().hasFilters() )
Catch::cout() << "Tags for matching test cases:\n";
else {
Catch::cout() << "All available tags:\n";
testSpec = TestSpecParser( ITagAliasRegistry::get() ).parse( "*" ).testSpec();
}
std::map<std::string, TagInfo> tagCounts;
std::vector<TestCase> matchedTestCases = filterTests( getAllTestCasesSorted( config ), testSpec, config );
for( std::vector<TestCase>::const_iterator it = matchedTestCases.begin(), itEnd = matchedTestCases.end();
it != itEnd;
++it ) {
for( std::set<std::string>::const_iterator tagIt = it->getTestCaseInfo().tags.begin(),
tagItEnd = it->getTestCaseInfo().tags.end();
tagIt != tagItEnd;
++tagIt ) {
std::string tagName = *tagIt;
std::string lcaseTagName = toLower( tagName );
std::map<std::string, TagInfo>::iterator countIt = tagCounts.find( lcaseTagName );
if( countIt == tagCounts.end() )
countIt = tagCounts.insert( std::make_pair( lcaseTagName, TagInfo() ) ).first;
countIt->second.add( tagName );
}
}
for( std::map<std::string, TagInfo>::const_iterator countIt = tagCounts.begin(),
countItEnd = tagCounts.end();
countIt != countItEnd;
++countIt ) {
std::ostringstream oss;
oss << " " << std::setw(2) << countIt->second.count << " ";
Text wrapper( countIt->second.all(), TextAttributes()
.setInitialIndent( 0 )
.setIndent( oss.str().size() )
.setWidth( CATCH_CONFIG_CONSOLE_WIDTH-10 ) );
Catch::cout() << oss.str() << wrapper << '\n';
}
Catch::cout() << pluralise( tagCounts.size(), "tag" ) << '\n' << std::endl;
return tagCounts.size();
}
inline std::size_t listReporters( Config const& /*config*/ ) {
Catch::cout() << "Available reporters:\n";
IReporterRegistry::FactoryMap const& factories = getRegistryHub().getReporterRegistry().getFactories();
IReporterRegistry::FactoryMap::const_iterator itBegin = factories.begin(), itEnd = factories.end(), it;
std::size_t maxNameLen = 0;
for(it = itBegin; it != itEnd; ++it )
maxNameLen = (std::max)( maxNameLen, it->first.size() );
for(it = itBegin; it != itEnd; ++it ) {
Text wrapper( it->second->getDescription(), TextAttributes()
.setInitialIndent( 0 )
.setIndent( 7+maxNameLen )
.setWidth( CATCH_CONFIG_CONSOLE_WIDTH - maxNameLen-8 ) );
Catch::cout() << " "
<< it->first
<< ':'
<< std::string( maxNameLen - it->first.size() + 2, ' ' )
<< wrapper << '\n';
}
Catch::cout() << std::endl;
return factories.size();
}
inline Option<std::size_t> list( Config const& config ) {
Option<std::size_t> listedCount;
if( config.listTests() )
listedCount = listedCount.valueOr(0) + listTests( config );
if( config.listTestNamesOnly() )
listedCount = listedCount.valueOr(0) + listTestsNamesOnly( config );
if( config.listTags() )
listedCount = listedCount.valueOr(0) + listTags( config );
if( config.listReporters() )
listedCount = listedCount.valueOr(0) + listReporters( config );
return listedCount;
}
} // end namespace Catch
// #included from: internal/catch_run_context.hpp
#define TWOBLUECUBES_CATCH_RUNNER_IMPL_HPP_INCLUDED
// #included from: catch_test_case_tracker.hpp
#define TWOBLUECUBES_CATCH_TEST_CASE_TRACKER_HPP_INCLUDED
#include <map>
#include <string>
#include <assert.h>
#include <vector>
#include <iterator>
#include <stdexcept>
namespace Catch {
namespace TestCaseTracking {
struct NameAndLocation {
std::string name;
SourceLineInfo location;
NameAndLocation( std::string const& _name, SourceLineInfo const& _location )
: name( _name ),
location( _location )
{}
};
struct ITracker : SharedImpl<> {
virtual ~ITracker();
// static queries
virtual NameAndLocation const& nameAndLocation() const = 0;
// dynamic queries
virtual bool isComplete() const = 0; // Successfully completed or failed
virtual bool isSuccessfullyCompleted() const = 0;
virtual bool isOpen() const = 0; // Started but not complete
virtual bool hasChildren() const = 0;
virtual ITracker& parent() = 0;
// actions
virtual void close() = 0; // Successfully complete
virtual void fail() = 0;
virtual void markAsNeedingAnotherRun() = 0;
virtual void addChild( Ptr<ITracker> const& child ) = 0;
virtual ITracker* findChild( NameAndLocation const& nameAndLocation ) = 0;
virtual void openChild() = 0;
// Debug/ checking
virtual bool isSectionTracker() const = 0;
virtual bool isIndexTracker() const = 0;
};
class TrackerContext {
enum RunState {
NotStarted,
Executing,
CompletedCycle
};
Ptr<ITracker> m_rootTracker;
ITracker* m_currentTracker;
RunState m_runState;
public:
static TrackerContext& instance() {
static TrackerContext s_instance;
return s_instance;
}
TrackerContext()
: m_currentTracker( CATCH_NULL ),
m_runState( NotStarted )
{}
ITracker& startRun();
void endRun() {
m_rootTracker.reset();
m_currentTracker = CATCH_NULL;
m_runState = NotStarted;
}
void startCycle() {
m_currentTracker = m_rootTracker.get();
m_runState = Executing;
}
void completeCycle() {
m_runState = CompletedCycle;
}
bool completedCycle() const {
return m_runState == CompletedCycle;
}
ITracker& currentTracker() {
return *m_currentTracker;
}
void setCurrentTracker( ITracker* tracker ) {
m_currentTracker = tracker;
}
};
class TrackerBase : public ITracker {
protected:
enum CycleState {
NotStarted,
Executing,
ExecutingChildren,
NeedsAnotherRun,
CompletedSuccessfully,
Failed
};
class TrackerHasName {
NameAndLocation m_nameAndLocation;
public:
TrackerHasName( NameAndLocation const& nameAndLocation ) : m_nameAndLocation( nameAndLocation ) {}
bool operator ()( Ptr<ITracker> const& tracker ) {
return
tracker->nameAndLocation().name == m_nameAndLocation.name &&
tracker->nameAndLocation().location == m_nameAndLocation.location;
}
};
typedef std::vector<Ptr<ITracker> > Children;
NameAndLocation m_nameAndLocation;
TrackerContext& m_ctx;
ITracker* m_parent;
Children m_children;
CycleState m_runState;
public:
TrackerBase( NameAndLocation const& nameAndLocation, TrackerContext& ctx, ITracker* parent )
: m_nameAndLocation( nameAndLocation ),
m_ctx( ctx ),
m_parent( parent ),
m_runState( NotStarted )
{}
virtual ~TrackerBase();
virtual NameAndLocation const& nameAndLocation() const CATCH_OVERRIDE {
return m_nameAndLocation;
}
virtual bool isComplete() const CATCH_OVERRIDE {
return m_runState == CompletedSuccessfully || m_runState == Failed;
}
virtual bool isSuccessfullyCompleted() const CATCH_OVERRIDE {
return m_runState == CompletedSuccessfully;
}
virtual bool isOpen() const CATCH_OVERRIDE {
return m_runState != NotStarted && !isComplete();
}
virtual bool hasChildren() const CATCH_OVERRIDE {
return !m_children.empty();
}
virtual void addChild( Ptr<ITracker> const& child ) CATCH_OVERRIDE {
m_children.push_back( child );
}
virtual ITracker* findChild( NameAndLocation const& nameAndLocation ) CATCH_OVERRIDE {
Children::const_iterator it = std::find_if( m_children.begin(), m_children.end(), TrackerHasName( nameAndLocation ) );
return( it != m_children.end() )
? it->get()
: CATCH_NULL;
}
virtual ITracker& parent() CATCH_OVERRIDE {
assert( m_parent ); // Should always be non-null except for root
return *m_parent;
}
virtual void openChild() CATCH_OVERRIDE {
if( m_runState != ExecutingChildren ) {
m_runState = ExecutingChildren;
if( m_parent )
m_parent->openChild();
}
}
virtual bool isSectionTracker() const CATCH_OVERRIDE { return false; }
virtual bool isIndexTracker() const CATCH_OVERRIDE { return false; }
void open() {
m_runState = Executing;
moveToThis();
if( m_parent )
m_parent->openChild();
}
virtual void close() CATCH_OVERRIDE {
// Close any still open children (e.g. generators)
while( &m_ctx.currentTracker() != this )
m_ctx.currentTracker().close();
switch( m_runState ) {
case NotStarted:
case CompletedSuccessfully:
case Failed:
throw std::logic_error( "Illogical state" );
case NeedsAnotherRun:
break;;
case Executing:
m_runState = CompletedSuccessfully;
break;
case ExecutingChildren:
if( m_children.empty() || m_children.back()->isComplete() )
m_runState = CompletedSuccessfully;
break;
default:
throw std::logic_error( "Unexpected state" );
}
moveToParent();
m_ctx.completeCycle();
}
virtual void fail() CATCH_OVERRIDE {
m_runState = Failed;
if( m_parent )
m_parent->markAsNeedingAnotherRun();
moveToParent();
m_ctx.completeCycle();
}
virtual void markAsNeedingAnotherRun() CATCH_OVERRIDE {
m_runState = NeedsAnotherRun;
}
private:
void moveToParent() {
assert( m_parent );
m_ctx.setCurrentTracker( m_parent );
}
void moveToThis() {
m_ctx.setCurrentTracker( this );
}
};
class SectionTracker : public TrackerBase {
std::vector<std::string> m_filters;
public:
SectionTracker( NameAndLocation const& nameAndLocation, TrackerContext& ctx, ITracker* parent )
: TrackerBase( nameAndLocation, ctx, parent )
{
if( parent ) {
while( !parent->isSectionTracker() )
parent = &parent->parent();
SectionTracker& parentSection = static_cast<SectionTracker&>( *parent );
addNextFilters( parentSection.m_filters );
}
}
virtual ~SectionTracker();
virtual bool isSectionTracker() const CATCH_OVERRIDE { return true; }
static SectionTracker& acquire( TrackerContext& ctx, NameAndLocation const& nameAndLocation ) {
SectionTracker* section = CATCH_NULL;
ITracker& currentTracker = ctx.currentTracker();
if( ITracker* childTracker = currentTracker.findChild( nameAndLocation ) ) {
assert( childTracker );
assert( childTracker->isSectionTracker() );
section = static_cast<SectionTracker*>( childTracker );
}
else {
section = new SectionTracker( nameAndLocation, ctx, ¤tTracker );
currentTracker.addChild( section );
}
if( !ctx.completedCycle() )
section->tryOpen();
return *section;
}
void tryOpen() {
if( !isComplete() && (m_filters.empty() || m_filters[0].empty() || m_filters[0] == m_nameAndLocation.name ) )
open();
}
void addInitialFilters( std::vector<std::string> const& filters ) {
if( !filters.empty() ) {
m_filters.push_back(""); // Root - should never be consulted
m_filters.push_back(""); // Test Case - not a section filter
std::copy( filters.begin(), filters.end(), std::back_inserter( m_filters ) );
}
}
void addNextFilters( std::vector<std::string> const& filters ) {
if( filters.size() > 1 )
std::copy( filters.begin()+1, filters.end(), std::back_inserter( m_filters ) );
}
};
class IndexTracker : public TrackerBase {
int m_size;
int m_index;
public:
IndexTracker( NameAndLocation const& nameAndLocation, TrackerContext& ctx, ITracker* parent, int size )
: TrackerBase( nameAndLocation, ctx, parent ),
m_size( size ),
m_index( -1 )
{}
virtual ~IndexTracker();
virtual bool isIndexTracker() const CATCH_OVERRIDE { return true; }
static IndexTracker& acquire( TrackerContext& ctx, NameAndLocation const& nameAndLocation, int size ) {
IndexTracker* tracker = CATCH_NULL;
ITracker& currentTracker = ctx.currentTracker();
if( ITracker* childTracker = currentTracker.findChild( nameAndLocation ) ) {
assert( childTracker );
assert( childTracker->isIndexTracker() );
tracker = static_cast<IndexTracker*>( childTracker );
}
else {
tracker = new IndexTracker( nameAndLocation, ctx, ¤tTracker, size );
currentTracker.addChild( tracker );
}
if( !ctx.completedCycle() && !tracker->isComplete() ) {
if( tracker->m_runState != ExecutingChildren && tracker->m_runState != NeedsAnotherRun )
tracker->moveNext();
tracker->open();
}
return *tracker;
}
int index() const { return m_index; }
void moveNext() {
m_index++;
m_children.clear();
}
virtual void close() CATCH_OVERRIDE {
TrackerBase::close();
if( m_runState == CompletedSuccessfully && m_index < m_size-1 )
m_runState = Executing;
}
};
inline ITracker& TrackerContext::startRun() {
m_rootTracker = new SectionTracker( NameAndLocation( "{root}", CATCH_INTERNAL_LINEINFO ), *this, CATCH_NULL );
m_currentTracker = CATCH_NULL;
m_runState = Executing;
return *m_rootTracker;
}
} // namespace TestCaseTracking
using TestCaseTracking::ITracker;
using TestCaseTracking::TrackerContext;
using TestCaseTracking::SectionTracker;
using TestCaseTracking::IndexTracker;
} // namespace Catch
// #included from: catch_fatal_condition.hpp
#define TWOBLUECUBES_CATCH_FATAL_CONDITION_H_INCLUDED
namespace Catch {
// Report the error condition
inline void reportFatal( std::string const& message ) {
IContext& context = Catch::getCurrentContext();
IResultCapture* resultCapture = context.getResultCapture();
resultCapture->handleFatalErrorCondition( message );
}
} // namespace Catch
#if defined ( CATCH_PLATFORM_WINDOWS ) /////////////////////////////////////////
// #included from: catch_windows_h_proxy.h
#define TWOBLUECUBES_CATCH_WINDOWS_H_PROXY_H_INCLUDED
#ifdef CATCH_DEFINES_NOMINMAX
# define NOMINMAX
#endif
#ifdef CATCH_DEFINES_WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN
#endif
#ifdef __AFXDLL
#include <AfxWin.h>
#else
#include <windows.h>
#endif
#ifdef CATCH_DEFINES_NOMINMAX
# undef NOMINMAX
#endif
#ifdef CATCH_DEFINES_WIN32_LEAN_AND_MEAN
# undef WIN32_LEAN_AND_MEAN
#endif
# if !defined ( CATCH_CONFIG_WINDOWS_SEH )
namespace Catch {
struct FatalConditionHandler {
void reset() {}
};
}
# else // CATCH_CONFIG_WINDOWS_SEH is defined
namespace Catch {
struct SignalDefs { DWORD id; const char* name; };
extern SignalDefs signalDefs[];
// There is no 1-1 mapping between signals and windows exceptions.
// Windows can easily distinguish between SO and SigSegV,
// but SigInt, SigTerm, etc are handled differently.
SignalDefs signalDefs[] = {
{ EXCEPTION_ILLEGAL_INSTRUCTION, "SIGILL - Illegal instruction signal" },
{ EXCEPTION_STACK_OVERFLOW, "SIGSEGV - Stack overflow" },
{ EXCEPTION_ACCESS_VIOLATION, "SIGSEGV - Segmentation violation signal" },
{ EXCEPTION_INT_DIVIDE_BY_ZERO, "Divide by zero error" },
};
struct FatalConditionHandler {
static LONG CALLBACK handleVectoredException(PEXCEPTION_POINTERS ExceptionInfo) {
for (int i = 0; i < sizeof(signalDefs) / sizeof(SignalDefs); ++i) {
if (ExceptionInfo->ExceptionRecord->ExceptionCode == signalDefs[i].id) {
reset();
reportFatal(signalDefs[i].name);
}
}
// If its not an exception we care about, pass it along.
// This stops us from eating debugger breaks etc.
return EXCEPTION_CONTINUE_SEARCH;
}
FatalConditionHandler() {
isSet = true;
// 32k seems enough for Catch to handle stack overflow,
// but the value was found experimentally, so there is no strong guarantee
guaranteeSize = 32 * 1024;
exceptionHandlerHandle = CATCH_NULL;
// Register as first handler in current chain
exceptionHandlerHandle = AddVectoredExceptionHandler(1, handleVectoredException);
// Pass in guarantee size to be filled
SetThreadStackGuarantee(&guaranteeSize);
}
static void reset() {
if (isSet) {
// Unregister handler and restore the old guarantee
RemoveVectoredExceptionHandler(exceptionHandlerHandle);
SetThreadStackGuarantee(&guaranteeSize);
exceptionHandlerHandle = CATCH_NULL;
isSet = false;
}
}
~FatalConditionHandler() {
reset();
}
private:
static bool isSet;
static ULONG guaranteeSize;
static PVOID exceptionHandlerHandle;
};
bool FatalConditionHandler::isSet = false;
ULONG FatalConditionHandler::guaranteeSize = 0;
PVOID FatalConditionHandler::exceptionHandlerHandle = CATCH_NULL;
} // namespace Catch
# endif // CATCH_CONFIG_WINDOWS_SEH
#else // Not Windows - assumed to be POSIX compatible //////////////////////////
#include <signal.h>
namespace Catch {
struct SignalDefs {
int id;
const char* name;
};
extern SignalDefs signalDefs[];
SignalDefs signalDefs[] = {
{ SIGINT, "SIGINT - Terminal interrupt signal" },
{ SIGILL, "SIGILL - Illegal instruction signal" },
{ SIGFPE, "SIGFPE - Floating point error signal" },
{ SIGSEGV, "SIGSEGV - Segmentation violation signal" },
{ SIGTERM, "SIGTERM - Termination request signal" },
{ SIGABRT, "SIGABRT - Abort (abnormal termination) signal" }
};
struct FatalConditionHandler {
static bool isSet;
static struct sigaction oldSigActions [sizeof(signalDefs)/sizeof(SignalDefs)];
static stack_t oldSigStack;
static char altStackMem[SIGSTKSZ];
static void handleSignal( int sig ) {
std::string name = "<unknown signal>";
for (std::size_t i = 0; i < sizeof(signalDefs) / sizeof(SignalDefs); ++i) {
SignalDefs &def = signalDefs[i];
if (sig == def.id) {
name = def.name;
break;
}
}
reset();
reportFatal(name);
raise( sig );
}
FatalConditionHandler() {
isSet = true;
stack_t sigStack;
sigStack.ss_sp = altStackMem;
sigStack.ss_size = SIGSTKSZ;
sigStack.ss_flags = 0;
sigaltstack(&sigStack, &oldSigStack);
struct sigaction sa = { 0 };
sa.sa_handler = handleSignal;
sa.sa_flags = SA_ONSTACK;
for (std::size_t i = 0; i < sizeof(signalDefs)/sizeof(SignalDefs); ++i) {
sigaction(signalDefs[i].id, &sa, &oldSigActions[i]);
}
}
~FatalConditionHandler() {
reset();
}
static void reset() {
if( isSet ) {
// Set signals back to previous values -- hopefully nobody overwrote them in the meantime
for( std::size_t i = 0; i < sizeof(signalDefs)/sizeof(SignalDefs); ++i ) {
sigaction(signalDefs[i].id, &oldSigActions[i], CATCH_NULL);
}
// Return the old stack
sigaltstack(&oldSigStack, CATCH_NULL);
isSet = false;
}
}
};
bool FatalConditionHandler::isSet = false;
struct sigaction FatalConditionHandler::oldSigActions[sizeof(signalDefs)/sizeof(SignalDefs)] = {};
stack_t FatalConditionHandler::oldSigStack = {};
char FatalConditionHandler::altStackMem[SIGSTKSZ] = {};
} // namespace Catch
#endif // not Windows
#include <set>
#include <string>
namespace Catch {
class StreamRedirect {
public:
StreamRedirect( std::ostream& stream, std::string& targetString )
: m_stream( stream ),
m_prevBuf( stream.rdbuf() ),
m_targetString( targetString )
{
stream.rdbuf( m_oss.rdbuf() );
}
~StreamRedirect() {
m_targetString += m_oss.str();
m_stream.rdbuf( m_prevBuf );
}
private:
std::ostream& m_stream;
std::streambuf* m_prevBuf;
std::ostringstream m_oss;
std::string& m_targetString;
};
///////////////////////////////////////////////////////////////////////////
class RunContext : public IResultCapture, public IRunner {
RunContext( RunContext const& );
void operator =( RunContext const& );
public:
explicit RunContext( Ptr<IConfig const> const& _config, Ptr<IStreamingReporter> const& reporter )
: m_runInfo( _config->name() ),
m_context( getCurrentMutableContext() ),
m_activeTestCase( CATCH_NULL ),
m_config( _config ),
m_reporter( reporter )
{
m_context.setRunner( this );
m_context.setConfig( m_config );
m_context.setResultCapture( this );
m_reporter->testRunStarting( m_runInfo );
}
virtual ~RunContext() {
m_reporter->testRunEnded( TestRunStats( m_runInfo, m_totals, aborting() ) );
}
void testGroupStarting( std::string const& testSpec, std::size_t groupIndex, std::size_t groupsCount ) {
m_reporter->testGroupStarting( GroupInfo( testSpec, groupIndex, groupsCount ) );
}
void testGroupEnded( std::string const& testSpec, Totals const& totals, std::size_t groupIndex, std::size_t groupsCount ) {
m_reporter->testGroupEnded( TestGroupStats( GroupInfo( testSpec, groupIndex, groupsCount ), totals, aborting() ) );
}
Totals runTest( TestCase const& testCase ) {
Totals prevTotals = m_totals;
std::string redirectedCout;
std::string redirectedCerr;
TestCaseInfo testInfo = testCase.getTestCaseInfo();
m_reporter->testCaseStarting( testInfo );
m_activeTestCase = &testCase;
do {
ITracker& rootTracker = m_trackerContext.startRun();
assert( rootTracker.isSectionTracker() );
static_cast<SectionTracker&>( rootTracker ).addInitialFilters( m_config->getSectionsToRun() );
do {
m_trackerContext.startCycle();
m_testCaseTracker = &SectionTracker::acquire( m_trackerContext, TestCaseTracking::NameAndLocation( testInfo.name, testInfo.lineInfo ) );
runCurrentTest( redirectedCout, redirectedCerr );
}
while( !m_testCaseTracker->isSuccessfullyCompleted() && !aborting() );
}
// !TBD: deprecated - this will be replaced by indexed trackers
while( getCurrentContext().advanceGeneratorsForCurrentTest() && !aborting() );
Totals deltaTotals = m_totals.delta( prevTotals );
if( testInfo.expectedToFail() && deltaTotals.testCases.passed > 0 ) {
deltaTotals.assertions.failed++;
deltaTotals.testCases.passed--;
deltaTotals.testCases.failed++;
}
m_totals.testCases += deltaTotals.testCases;
m_reporter->testCaseEnded( TestCaseStats( testInfo,
deltaTotals,
redirectedCout,
redirectedCerr,
aborting() ) );
m_activeTestCase = CATCH_NULL;
m_testCaseTracker = CATCH_NULL;
return deltaTotals;
}
Ptr<IConfig const> config() const {
return m_config;
}
private: // IResultCapture
virtual void assertionEnded( AssertionResult const& result ) {
if( result.getResultType() == ResultWas::Ok ) {
m_totals.assertions.passed++;
}
else if( !result.isOk() ) {
m_totals.assertions.failed++;
}
if( m_reporter->assertionEnded( AssertionStats( result, m_messages, m_totals ) ) )
m_messages.clear();
// Reset working state
m_lastAssertionInfo = AssertionInfo( std::string(), m_lastAssertionInfo.lineInfo, "{Unknown expression after the reported line}" , m_lastAssertionInfo.resultDisposition );
m_lastResult = result;
}
virtual bool sectionStarted (
SectionInfo const& sectionInfo,
Counts& assertions
)
{
ITracker& sectionTracker = SectionTracker::acquire( m_trackerContext, TestCaseTracking::NameAndLocation( sectionInfo.name, sectionInfo.lineInfo ) );
if( !sectionTracker.isOpen() )
return false;
m_activeSections.push_back( §ionTracker );
m_lastAssertionInfo.lineInfo = sectionInfo.lineInfo;
m_reporter->sectionStarting( sectionInfo );
assertions = m_totals.assertions;
return true;
}
bool testForMissingAssertions( Counts& assertions ) {
if( assertions.total() != 0 )
return false;
if( !m_config->warnAboutMissingAssertions() )
return false;
if( m_trackerContext.currentTracker().hasChildren() )
return false;
m_totals.assertions.failed++;
assertions.failed++;
return true;
}
virtual void sectionEnded( SectionEndInfo const& endInfo ) {
Counts assertions = m_totals.assertions - endInfo.prevAssertions;
bool missingAssertions = testForMissingAssertions( assertions );
if( !m_activeSections.empty() ) {
m_activeSections.back()->close();
m_activeSections.pop_back();
}
m_reporter->sectionEnded( SectionStats( endInfo.sectionInfo, assertions, endInfo.durationInSeconds, missingAssertions ) );
m_messages.clear();
}
virtual void sectionEndedEarly( SectionEndInfo const& endInfo ) {
if( m_unfinishedSections.empty() )
m_activeSections.back()->fail();
else
m_activeSections.back()->close();
m_activeSections.pop_back();
m_unfinishedSections.push_back( endInfo );
}
virtual void pushScopedMessage( MessageInfo const& message ) {
m_messages.push_back( message );
}
virtual void popScopedMessage( MessageInfo const& message ) {
m_messages.erase( std::remove( m_messages.begin(), m_messages.end(), message ), m_messages.end() );
}
virtual std::string getCurrentTestName() const {
return m_activeTestCase
? m_activeTestCase->getTestCaseInfo().name
: std::string();
}
virtual const AssertionResult* getLastResult() const {
return &m_lastResult;
}
virtual void handleFatalErrorCondition( std::string const& message ) {
ResultBuilder resultBuilder = makeUnexpectedResultBuilder();
resultBuilder.setResultType( ResultWas::FatalErrorCondition );
resultBuilder << message;
resultBuilder.captureExpression();
handleUnfinishedSections();
// Recreate section for test case (as we will lose the one that was in scope)
TestCaseInfo const& testCaseInfo = m_activeTestCase->getTestCaseInfo();
SectionInfo testCaseSection( testCaseInfo.lineInfo, testCaseInfo.name, testCaseInfo.description );
Counts assertions;
assertions.failed = 1;
SectionStats testCaseSectionStats( testCaseSection, assertions, 0, false );
m_reporter->sectionEnded( testCaseSectionStats );
TestCaseInfo testInfo = m_activeTestCase->getTestCaseInfo();
Totals deltaTotals;
deltaTotals.testCases.failed = 1;
m_reporter->testCaseEnded( TestCaseStats( testInfo,
deltaTotals,
std::string(),
std::string(),
false ) );
m_totals.testCases.failed++;
testGroupEnded( std::string(), m_totals, 1, 1 );
m_reporter->testRunEnded( TestRunStats( m_runInfo, m_totals, false ) );
}
public:
// !TBD We need to do this another way!
bool aborting() const {
return m_totals.assertions.failed == static_cast<std::size_t>( m_config->abortAfter() );
}
private:
void runCurrentTest( std::string& redirectedCout, std::string& redirectedCerr ) {
TestCaseInfo const& testCaseInfo = m_activeTestCase->getTestCaseInfo();
SectionInfo testCaseSection( testCaseInfo.lineInfo, testCaseInfo.name, testCaseInfo.description );
m_reporter->sectionStarting( testCaseSection );
Counts prevAssertions = m_totals.assertions;
double duration = 0;
try {
m_lastAssertionInfo = AssertionInfo( "TEST_CASE", testCaseInfo.lineInfo, std::string(), ResultDisposition::Normal );
seedRng( *m_config );
Timer timer;
timer.start();
if( m_reporter->getPreferences().shouldRedirectStdOut ) {
StreamRedirect coutRedir( Catch::cout(), redirectedCout );
StreamRedirect cerrRedir( Catch::cerr(), redirectedCerr );
invokeActiveTestCase();
}
else {
invokeActiveTestCase();
}
duration = timer.getElapsedSeconds();
}
catch( TestFailureException& ) {
// This just means the test was aborted due to failure
}
catch(...) {
makeUnexpectedResultBuilder().useActiveException();
}
m_testCaseTracker->close();
handleUnfinishedSections();
m_messages.clear();
Counts assertions = m_totals.assertions - prevAssertions;
bool missingAssertions = testForMissingAssertions( assertions );
if( testCaseInfo.okToFail() ) {
std::swap( assertions.failedButOk, assertions.failed );
m_totals.assertions.failed -= assertions.failedButOk;
m_totals.assertions.failedButOk += assertions.failedButOk;
}
SectionStats testCaseSectionStats( testCaseSection, assertions, duration, missingAssertions );
m_reporter->sectionEnded( testCaseSectionStats );
}
void invokeActiveTestCase() {
FatalConditionHandler fatalConditionHandler; // Handle signals
m_activeTestCase->invoke();
fatalConditionHandler.reset();
}
private:
ResultBuilder makeUnexpectedResultBuilder() const {
return ResultBuilder( m_lastAssertionInfo.macroName.c_str(),
m_lastAssertionInfo.lineInfo,
m_lastAssertionInfo.capturedExpression.c_str(),
m_lastAssertionInfo.resultDisposition );
}
void handleUnfinishedSections() {
// If sections ended prematurely due to an exception we stored their
// infos here so we can tear them down outside the unwind process.
for( std::vector<SectionEndInfo>::const_reverse_iterator it = m_unfinishedSections.rbegin(),
itEnd = m_unfinishedSections.rend();
it != itEnd;
++it )
sectionEnded( *it );
m_unfinishedSections.clear();
}
TestRunInfo m_runInfo;
IMutableContext& m_context;
TestCase const* m_activeTestCase;
ITracker* m_testCaseTracker;
ITracker* m_currentSectionTracker;
AssertionResult m_lastResult;
Ptr<IConfig const> m_config;
Totals m_totals;
Ptr<IStreamingReporter> m_reporter;
std::vector<MessageInfo> m_messages;
AssertionInfo m_lastAssertionInfo;
std::vector<SectionEndInfo> m_unfinishedSections;
std::vector<ITracker*> m_activeSections;
TrackerContext m_trackerContext;
};
IResultCapture& getResultCapture() {
if( IResultCapture* capture = getCurrentContext().getResultCapture() )
return *capture;
else
throw std::logic_error( "No result capture instance" );
}
} // end namespace Catch
// #included from: internal/catch_version.h
#define TWOBLUECUBES_CATCH_VERSION_H_INCLUDED
namespace Catch {
// Versioning information
struct Version {
Version( unsigned int _majorVersion,
unsigned int _minorVersion,
unsigned int _patchNumber,
std::string const& _branchName,
unsigned int _buildNumber );
unsigned int const majorVersion;
unsigned int const minorVersion;
unsigned int const patchNumber;
// buildNumber is only used if branchName is not null
std::string const branchName;
unsigned int const buildNumber;
friend std::ostream& operator << ( std::ostream& os, Version const& version );
private:
void operator=( Version const& );
};
extern Version libraryVersion;
}
#include <fstream>
#include <stdlib.h>
#include <limits>
namespace Catch {
Ptr<IStreamingReporter> createReporter( std::string const& reporterName, Ptr<Config> const& config ) {
Ptr<IStreamingReporter> reporter = getRegistryHub().getReporterRegistry().create( reporterName, config.get() );
if( !reporter ) {
std::ostringstream oss;
oss << "No reporter registered with name: '" << reporterName << "'";
throw std::domain_error( oss.str() );
}
return reporter;
}
Ptr<IStreamingReporter> makeReporter( Ptr<Config> const& config ) {
std::vector<std::string> reporters = config->getReporterNames();
if( reporters.empty() )
reporters.push_back( "console" );
Ptr<IStreamingReporter> reporter;
for( std::vector<std::string>::const_iterator it = reporters.begin(), itEnd = reporters.end();
it != itEnd;
++it )
reporter = addReporter( reporter, createReporter( *it, config ) );
return reporter;
}
Ptr<IStreamingReporter> addListeners( Ptr<IConfig const> const& config, Ptr<IStreamingReporter> reporters ) {
IReporterRegistry::Listeners listeners = getRegistryHub().getReporterRegistry().getListeners();
for( IReporterRegistry::Listeners::const_iterator it = listeners.begin(), itEnd = listeners.end();
it != itEnd;
++it )
reporters = addReporter(reporters, (*it)->create( ReporterConfig( config ) ) );
return reporters;
}
Totals runTests( Ptr<Config> const& config ) {
Ptr<IConfig const> iconfig = config.get();
Ptr<IStreamingReporter> reporter = makeReporter( config );
reporter = addListeners( iconfig, reporter );
RunContext context( iconfig, reporter );
Totals totals;
context.testGroupStarting( config->name(), 1, 1 );
TestSpec testSpec = config->testSpec();
if( !testSpec.hasFilters() )
testSpec = TestSpecParser( ITagAliasRegistry::get() ).parse( "~[.]" ).testSpec(); // All not hidden tests
std::vector<TestCase> const& allTestCases = getAllTestCasesSorted( *iconfig );
for( std::vector<TestCase>::const_iterator it = allTestCases.begin(), itEnd = allTestCases.end();
it != itEnd;
++it ) {
if( !context.aborting() && matchTest( *it, testSpec, *iconfig ) )
totals += context.runTest( *it );
else
reporter->skipTest( *it );
}
context.testGroupEnded( iconfig->name(), totals, 1, 1 );
return totals;
}
void applyFilenamesAsTags( IConfig const& config ) {
std::vector<TestCase> const& tests = getAllTestCasesSorted( config );
for(std::size_t i = 0; i < tests.size(); ++i ) {
TestCase& test = const_cast<TestCase&>( tests[i] );
std::set<std::string> tags = test.tags;
std::string filename = test.lineInfo.file;
std::string::size_type lastSlash = filename.find_last_of( "\\/" );
if( lastSlash != std::string::npos )
filename = filename.substr( lastSlash+1 );
std::string::size_type lastDot = filename.find_last_of( "." );
if( lastDot != std::string::npos )
filename = filename.substr( 0, lastDot );
tags.insert( "#" + filename );
setTags( test, tags );
}
}
class Session : NonCopyable {
static bool alreadyInstantiated;
public:
struct OnUnusedOptions { enum DoWhat { Ignore, Fail }; };
Session()
: m_cli( makeCommandLineParser() ) {
if( alreadyInstantiated ) {
std::string msg = "Only one instance of Catch::Session can ever be used";
Catch::cerr() << msg << std::endl;
throw std::logic_error( msg );
}
alreadyInstantiated = true;
}
~Session() {
Catch::cleanUp();
}
void showHelp( std::string const& processName ) {
Catch::cout() << "\nCatch v" << libraryVersion << "\n";
m_cli.usage( Catch::cout(), processName );
Catch::cout() << "For more detail usage please see the project docs\n" << std::endl;
}
int applyCommandLine( int argc, char const* const* const argv, OnUnusedOptions::DoWhat unusedOptionBehaviour = OnUnusedOptions::Fail ) {
try {
m_cli.setThrowOnUnrecognisedTokens( unusedOptionBehaviour == OnUnusedOptions::Fail );
m_unusedTokens = m_cli.parseInto( Clara::argsToVector( argc, argv ), m_configData );
if( m_configData.showHelp )
showHelp( m_configData.processName );
m_config.reset();
}
catch( std::exception& ex ) {
{
Colour colourGuard( Colour::Red );
Catch::cerr()
<< "\nError(s) in input:\n"
<< Text( ex.what(), TextAttributes().setIndent(2) )
<< "\n\n";
}
m_cli.usage( Catch::cout(), m_configData.processName );
return (std::numeric_limits<int>::max)();
}
return 0;
}
void useConfigData( ConfigData const& _configData ) {
m_configData = _configData;
m_config.reset();
}
int run( int argc, char const* const* const argv ) {
int returnCode = applyCommandLine( argc, argv );
if( returnCode == 0 )
returnCode = run();
return returnCode;
}
int run() {
if( m_configData.showHelp )
return 0;
try
{
config(); // Force config to be constructed
seedRng( *m_config );
if( m_configData.filenamesAsTags )
applyFilenamesAsTags( *m_config );
// Handle list request
if( Option<std::size_t> listed = list( config() ) )
return static_cast<int>( *listed );
return static_cast<int>( runTests( m_config ).assertions.failed );
}
catch( std::exception& ex ) {
Catch::cerr() << ex.what() << std::endl;
return (std::numeric_limits<int>::max)();
}
}
Clara::CommandLine<ConfigData> const& cli() const {
return m_cli;
}
std::vector<Clara::Parser::Token> const& unusedTokens() const {
return m_unusedTokens;
}
ConfigData& configData() {
return m_configData;
}
Config& config() {
if( !m_config )
m_config = new Config( m_configData );
return *m_config;
}
private:
Clara::CommandLine<ConfigData> m_cli;
std::vector<Clara::Parser::Token> m_unusedTokens;
ConfigData m_configData;
Ptr<Config> m_config;
};
bool Session::alreadyInstantiated = false;
} // end namespace Catch
// #included from: catch_registry_hub.hpp
#define TWOBLUECUBES_CATCH_REGISTRY_HUB_HPP_INCLUDED
// #included from: catch_test_case_registry_impl.hpp
#define TWOBLUECUBES_CATCH_TEST_CASE_REGISTRY_IMPL_HPP_INCLUDED
#include <vector>
#include <set>
#include <sstream>
#include <algorithm>
namespace Catch {
struct RandomNumberGenerator {
typedef std::ptrdiff_t result_type;
result_type operator()( result_type n ) const { return std::rand() % n; }
#ifdef CATCH_CONFIG_CPP11_SHUFFLE
static constexpr result_type min() { return 0; }
static constexpr result_type max() { return 1000000; }
result_type operator()() const { return std::rand() % max(); }
#endif
template<typename V>
static void shuffle( V& vector ) {
RandomNumberGenerator rng;
#ifdef CATCH_CONFIG_CPP11_SHUFFLE
std::shuffle( vector.begin(), vector.end(), rng );
#else
std::random_shuffle( vector.begin(), vector.end(), rng );
#endif
}
};
inline std::vector<TestCase> sortTests( IConfig const& config, std::vector<TestCase> const& unsortedTestCases ) {
std::vector<TestCase> sorted = unsortedTestCases;
switch( config.runOrder() ) {
case RunTests::InLexicographicalOrder:
std::sort( sorted.begin(), sorted.end() );
break;
case RunTests::InRandomOrder:
{
seedRng( config );
RandomNumberGenerator::shuffle( sorted );
}
break;
case RunTests::InDeclarationOrder:
// already in declaration order
break;
}
return sorted;
}
bool matchTest( TestCase const& testCase, TestSpec const& testSpec, IConfig const& config ) {
return testSpec.matches( testCase ) && ( config.allowThrows() || !testCase.throws() );
}
void enforceNoDuplicateTestCases( std::vector<TestCase> const& functions ) {
std::set<TestCase> seenFunctions;
for( std::vector<TestCase>::const_iterator it = functions.begin(), itEnd = functions.end();
it != itEnd;
++it ) {
std::pair<std::set<TestCase>::const_iterator, bool> prev = seenFunctions.insert( *it );
if( !prev.second ) {
std::ostringstream ss;
ss << Colour( Colour::Red )
<< "error: TEST_CASE( \"" << it->name << "\" ) already defined.\n"
<< "\tFirst seen at " << prev.first->getTestCaseInfo().lineInfo << '\n'
<< "\tRedefined at " << it->getTestCaseInfo().lineInfo << std::endl;
throw std::runtime_error(ss.str());
}
}
}
std::vector<TestCase> filterTests( std::vector<TestCase> const& testCases, TestSpec const& testSpec, IConfig const& config ) {
std::vector<TestCase> filtered;
filtered.reserve( testCases.size() );
for( std::vector<TestCase>::const_iterator it = testCases.begin(), itEnd = testCases.end();
it != itEnd;
++it )
if( matchTest( *it, testSpec, config ) )
filtered.push_back( *it );
return filtered;
}
std::vector<TestCase> const& getAllTestCasesSorted( IConfig const& config ) {
return getRegistryHub().getTestCaseRegistry().getAllTestsSorted( config );
}
class TestRegistry : public ITestCaseRegistry {
public:
TestRegistry()
: m_currentSortOrder( RunTests::InDeclarationOrder ),
m_unnamedCount( 0 )
{}
virtual ~TestRegistry();
virtual void registerTest( TestCase const& testCase ) {
std::string name = testCase.getTestCaseInfo().name;
if( name.empty() ) {
std::ostringstream oss;
oss << "Anonymous test case " << ++m_unnamedCount;
return registerTest( testCase.withName( oss.str() ) );
}
m_functions.push_back( testCase );
}
virtual std::vector<TestCase> const& getAllTests() const {
return m_functions;
}
virtual std::vector<TestCase> const& getAllTestsSorted( IConfig const& config ) const {
if( m_sortedFunctions.empty() )
enforceNoDuplicateTestCases( m_functions );
if( m_currentSortOrder != config.runOrder() || m_sortedFunctions.empty() ) {
m_sortedFunctions = sortTests( config, m_functions );
m_currentSortOrder = config.runOrder();
}
return m_sortedFunctions;
}
private:
std::vector<TestCase> m_functions;
mutable RunTests::InWhatOrder m_currentSortOrder;
mutable std::vector<TestCase> m_sortedFunctions;
size_t m_unnamedCount;
std::ios_base::Init m_ostreamInit; // Forces cout/ cerr to be initialised
};
///////////////////////////////////////////////////////////////////////////
class FreeFunctionTestCase : public SharedImpl<ITestCase> {
public:
FreeFunctionTestCase( TestFunction fun ) : m_fun( fun ) {}
virtual void invoke() const {
m_fun();
}
private:
virtual ~FreeFunctionTestCase();
TestFunction m_fun;
};
inline std::string extractClassName( std::string const& classOrQualifiedMethodName ) {
std::string className = classOrQualifiedMethodName;
if( startsWith( className, '&' ) )
{
std::size_t lastColons = className.rfind( "::" );
std::size_t penultimateColons = className.rfind( "::", lastColons-1 );
if( penultimateColons == std::string::npos )
penultimateColons = 1;
className = className.substr( penultimateColons, lastColons-penultimateColons );
}
return className;
}
void registerTestCase
( ITestCase* testCase,
char const* classOrQualifiedMethodName,
NameAndDesc const& nameAndDesc,
SourceLineInfo const& lineInfo ) {
getMutableRegistryHub().registerTest
( makeTestCase
( testCase,
extractClassName( classOrQualifiedMethodName ),
nameAndDesc.name,
nameAndDesc.description,
lineInfo ) );
}
void registerTestCaseFunction
( TestFunction function,
SourceLineInfo const& lineInfo,
NameAndDesc const& nameAndDesc ) {
registerTestCase( new FreeFunctionTestCase( function ), "", nameAndDesc, lineInfo );
}
///////////////////////////////////////////////////////////////////////////
AutoReg::AutoReg
( TestFunction function,
SourceLineInfo const& lineInfo,
NameAndDesc const& nameAndDesc ) {
registerTestCaseFunction( function, lineInfo, nameAndDesc );
}
AutoReg::~AutoReg() {}
} // end namespace Catch
// #included from: catch_reporter_registry.hpp
#define TWOBLUECUBES_CATCH_REPORTER_REGISTRY_HPP_INCLUDED
#include <map>
namespace Catch {
class ReporterRegistry : public IReporterRegistry {
public:
virtual ~ReporterRegistry() CATCH_OVERRIDE {}
virtual IStreamingReporter* create( std::string const& name, Ptr<IConfig const> const& config ) const CATCH_OVERRIDE {
FactoryMap::const_iterator it = m_factories.find( name );
if( it == m_factories.end() )
return CATCH_NULL;
return it->second->create( ReporterConfig( config ) );
}
void registerReporter( std::string const& name, Ptr<IReporterFactory> const& factory ) {
m_factories.insert( std::make_pair( name, factory ) );
}
void registerListener( Ptr<IReporterFactory> const& factory ) {
m_listeners.push_back( factory );
}
virtual FactoryMap const& getFactories() const CATCH_OVERRIDE {
return m_factories;
}
virtual Listeners const& getListeners() const CATCH_OVERRIDE {
return m_listeners;
}
private:
FactoryMap m_factories;
Listeners m_listeners;
};
}
// #included from: catch_exception_translator_registry.hpp
#define TWOBLUECUBES_CATCH_EXCEPTION_TRANSLATOR_REGISTRY_HPP_INCLUDED
#ifdef __OBJC__
#import "Foundation/Foundation.h"
#endif
namespace Catch {
class ExceptionTranslatorRegistry : public IExceptionTranslatorRegistry {
public:
~ExceptionTranslatorRegistry() {
deleteAll( m_translators );
}
virtual void registerTranslator( const IExceptionTranslator* translator ) {
m_translators.push_back( translator );
}
virtual std::string translateActiveException() const {
try {
#ifdef __OBJC__
// In Objective-C try objective-c exceptions first
@try {
return tryTranslators();
}
@catch (NSException *exception) {
return Catch::toString( [exception description] );
}
#else
return tryTranslators();
#endif
}
catch( TestFailureException& ) {
throw;
}
catch( std::exception& ex ) {
return ex.what();
}
catch( std::string& msg ) {
return msg;
}
catch( const char* msg ) {
return msg;
}
catch(...) {
return "Unknown exception";
}
}
std::string tryTranslators() const {
if( m_translators.empty() )
throw;
else
return m_translators[0]->translate( m_translators.begin()+1, m_translators.end() );
}
private:
std::vector<const IExceptionTranslator*> m_translators;
};
}
namespace Catch {
namespace {
class RegistryHub : public IRegistryHub, public IMutableRegistryHub {
RegistryHub( RegistryHub const& );
void operator=( RegistryHub const& );
public: // IRegistryHub
RegistryHub() {
}
virtual IReporterRegistry const& getReporterRegistry() const CATCH_OVERRIDE {
return m_reporterRegistry;
}
virtual ITestCaseRegistry const& getTestCaseRegistry() const CATCH_OVERRIDE {
return m_testCaseRegistry;
}
virtual IExceptionTranslatorRegistry& getExceptionTranslatorRegistry() CATCH_OVERRIDE {
return m_exceptionTranslatorRegistry;
}
public: // IMutableRegistryHub
virtual void registerReporter( std::string const& name, Ptr<IReporterFactory> const& factory ) CATCH_OVERRIDE {
m_reporterRegistry.registerReporter( name, factory );
}
virtual void registerListener( Ptr<IReporterFactory> const& factory ) CATCH_OVERRIDE {
m_reporterRegistry.registerListener( factory );
}
virtual void registerTest( TestCase const& testInfo ) CATCH_OVERRIDE {
m_testCaseRegistry.registerTest( testInfo );
}
virtual void registerTranslator( const IExceptionTranslator* translator ) CATCH_OVERRIDE {
m_exceptionTranslatorRegistry.registerTranslator( translator );
}
private:
TestRegistry m_testCaseRegistry;
ReporterRegistry m_reporterRegistry;
ExceptionTranslatorRegistry m_exceptionTranslatorRegistry;
};
// Single, global, instance
inline RegistryHub*& getTheRegistryHub() {
static RegistryHub* theRegistryHub = CATCH_NULL;
if( !theRegistryHub )
theRegistryHub = new RegistryHub();
return theRegistryHub;
}
}
IRegistryHub& getRegistryHub() {
return *getTheRegistryHub();
}
IMutableRegistryHub& getMutableRegistryHub() {
return *getTheRegistryHub();
}
void cleanUp() {
delete getTheRegistryHub();
getTheRegistryHub() = CATCH_NULL;
cleanUpContext();
}
std::string translateActiveException() {
return getRegistryHub().getExceptionTranslatorRegistry().translateActiveException();
}
} // end namespace Catch
// #included from: catch_notimplemented_exception.hpp
#define TWOBLUECUBES_CATCH_NOTIMPLEMENTED_EXCEPTION_HPP_INCLUDED
#include <sstream>
namespace Catch {
NotImplementedException::NotImplementedException( SourceLineInfo const& lineInfo )
: m_lineInfo( lineInfo ) {
std::ostringstream oss;
oss << lineInfo << ": function ";
oss << "not implemented";
m_what = oss.str();
}
const char* NotImplementedException::what() const CATCH_NOEXCEPT {
return m_what.c_str();
}
} // end namespace Catch
// #included from: catch_context_impl.hpp
#define TWOBLUECUBES_CATCH_CONTEXT_IMPL_HPP_INCLUDED
// #included from: catch_stream.hpp
#define TWOBLUECUBES_CATCH_STREAM_HPP_INCLUDED
#include <stdexcept>
#include <cstdio>
#include <iostream>
namespace Catch {
template<typename WriterF, size_t bufferSize=256>
class StreamBufImpl : public StreamBufBase {
char data[bufferSize];
WriterF m_writer;
public:
StreamBufImpl() {
setp( data, data + sizeof(data) );
}
~StreamBufImpl() CATCH_NOEXCEPT {
sync();
}
private:
int overflow( int c ) {
sync();
if( c != EOF ) {
if( pbase() == epptr() )
m_writer( std::string( 1, static_cast<char>( c ) ) );
else
sputc( static_cast<char>( c ) );
}
return 0;
}
int sync() {
if( pbase() != pptr() ) {
m_writer( std::string( pbase(), static_cast<std::string::size_type>( pptr() - pbase() ) ) );
setp( pbase(), epptr() );
}
return 0;
}
};
///////////////////////////////////////////////////////////////////////////
FileStream::FileStream( std::string const& filename ) {
m_ofs.open( filename.c_str() );
if( m_ofs.fail() ) {
std::ostringstream oss;
oss << "Unable to open file: '" << filename << '\'';
throw std::domain_error( oss.str() );
}
}
std::ostream& FileStream::stream() const {
return m_ofs;
}
struct OutputDebugWriter {
void operator()( std::string const&str ) {
writeToDebugConsole( str );
}
};
DebugOutStream::DebugOutStream()
: m_streamBuf( new StreamBufImpl<OutputDebugWriter>() ),
m_os( m_streamBuf.get() )
{}
std::ostream& DebugOutStream::stream() const {
return m_os;
}
// Store the streambuf from cout up-front because
// cout may get redirected when running tests
CoutStream::CoutStream()
: m_os( Catch::cout().rdbuf() )
{}
std::ostream& CoutStream::stream() const {
return m_os;
}
#ifndef CATCH_CONFIG_NOSTDOUT // If you #define this you must implement these functions
std::ostream& cout() {
return std::cout;
}
std::ostream& cerr() {
return std::cerr;
}
#endif
}
namespace Catch {
class Context : public IMutableContext {
Context() : m_config( CATCH_NULL ), m_runner( CATCH_NULL ), m_resultCapture( CATCH_NULL ) {}
Context( Context const& );
void operator=( Context const& );
public:
virtual ~Context() {
deleteAllValues( m_generatorsByTestName );
}
public: // IContext
virtual IResultCapture* getResultCapture() {
return m_resultCapture;
}
virtual IRunner* getRunner() {
return m_runner;
}
virtual size_t getGeneratorIndex( std::string const& fileInfo, size_t totalSize ) {
return getGeneratorsForCurrentTest()
.getGeneratorInfo( fileInfo, totalSize )
.getCurrentIndex();
}
virtual bool advanceGeneratorsForCurrentTest() {
IGeneratorsForTest* generators = findGeneratorsForCurrentTest();
return generators && generators->moveNext();
}
virtual Ptr<IConfig const> getConfig() const {
return m_config;
}
public: // IMutableContext
virtual void setResultCapture( IResultCapture* resultCapture ) {
m_resultCapture = resultCapture;
}
virtual void setRunner( IRunner* runner ) {
m_runner = runner;
}
virtual void setConfig( Ptr<IConfig const> const& config ) {
m_config = config;
}
friend IMutableContext& getCurrentMutableContext();
private:
IGeneratorsForTest* findGeneratorsForCurrentTest() {
std::string testName = getResultCapture()->getCurrentTestName();
std::map<std::string, IGeneratorsForTest*>::const_iterator it =
m_generatorsByTestName.find( testName );
return it != m_generatorsByTestName.end()
? it->second
: CATCH_NULL;
}
IGeneratorsForTest& getGeneratorsForCurrentTest() {
IGeneratorsForTest* generators = findGeneratorsForCurrentTest();
if( !generators ) {
std::string testName = getResultCapture()->getCurrentTestName();
generators = createGeneratorsForTest();
m_generatorsByTestName.insert( std::make_pair( testName, generators ) );
}
return *generators;
}
private:
Ptr<IConfig const> m_config;
IRunner* m_runner;
IResultCapture* m_resultCapture;
std::map<std::string, IGeneratorsForTest*> m_generatorsByTestName;
};
namespace {
Context* currentContext = CATCH_NULL;
}
IMutableContext& getCurrentMutableContext() {
if( !currentContext )
currentContext = new Context();
return *currentContext;
}
IContext& getCurrentContext() {
return getCurrentMutableContext();
}
void cleanUpContext() {
delete currentContext;
currentContext = CATCH_NULL;
}
}
// #included from: catch_console_colour_impl.hpp
#define TWOBLUECUBES_CATCH_CONSOLE_COLOUR_IMPL_HPP_INCLUDED
namespace Catch {
namespace {
struct IColourImpl {
virtual ~IColourImpl() {}
virtual void use( Colour::Code _colourCode ) = 0;
};
struct NoColourImpl : IColourImpl {
void use( Colour::Code ) {}
static IColourImpl* instance() {
static NoColourImpl s_instance;
return &s_instance;
}
};
} // anon namespace
} // namespace Catch
#if !defined( CATCH_CONFIG_COLOUR_NONE ) && !defined( CATCH_CONFIG_COLOUR_WINDOWS ) && !defined( CATCH_CONFIG_COLOUR_ANSI )
# ifdef CATCH_PLATFORM_WINDOWS
# define CATCH_CONFIG_COLOUR_WINDOWS
# else
# define CATCH_CONFIG_COLOUR_ANSI
# endif
#endif
#if defined ( CATCH_CONFIG_COLOUR_WINDOWS ) /////////////////////////////////////////
namespace Catch {
namespace {
class Win32ColourImpl : public IColourImpl {
public:
Win32ColourImpl() : stdoutHandle( GetStdHandle(STD_OUTPUT_HANDLE) )
{
CONSOLE_SCREEN_BUFFER_INFO csbiInfo;
GetConsoleScreenBufferInfo( stdoutHandle, &csbiInfo );
originalForegroundAttributes = csbiInfo.wAttributes & ~( BACKGROUND_GREEN | BACKGROUND_RED | BACKGROUND_BLUE | BACKGROUND_INTENSITY );
originalBackgroundAttributes = csbiInfo.wAttributes & ~( FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_BLUE | FOREGROUND_INTENSITY );
}
virtual void use( Colour::Code _colourCode ) {
switch( _colourCode ) {
case Colour::None: return setTextAttribute( originalForegroundAttributes );
case Colour::White: return setTextAttribute( FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_BLUE );
case Colour::Red: return setTextAttribute( FOREGROUND_RED );
case Colour::Green: return setTextAttribute( FOREGROUND_GREEN );
case Colour::Blue: return setTextAttribute( FOREGROUND_BLUE );
case Colour::Cyan: return setTextAttribute( FOREGROUND_BLUE | FOREGROUND_GREEN );
case Colour::Yellow: return setTextAttribute( FOREGROUND_RED | FOREGROUND_GREEN );
case Colour::Grey: return setTextAttribute( 0 );
case Colour::LightGrey: return setTextAttribute( FOREGROUND_INTENSITY );
case Colour::BrightRed: return setTextAttribute( FOREGROUND_INTENSITY | FOREGROUND_RED );
case Colour::BrightGreen: return setTextAttribute( FOREGROUND_INTENSITY | FOREGROUND_GREEN );
case Colour::BrightWhite: return setTextAttribute( FOREGROUND_INTENSITY | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_BLUE );
case Colour::Bright: throw std::logic_error( "not a colour" );
}
}
private:
void setTextAttribute( WORD _textAttribute ) {
SetConsoleTextAttribute( stdoutHandle, _textAttribute | originalBackgroundAttributes );
}
HANDLE stdoutHandle;
WORD originalForegroundAttributes;
WORD originalBackgroundAttributes;
};
IColourImpl* platformColourInstance() {
static Win32ColourImpl s_instance;
Ptr<IConfig const> config = getCurrentContext().getConfig();
UseColour::YesOrNo colourMode = config
? config->useColour()
: UseColour::Auto;
if( colourMode == UseColour::Auto )
colourMode = !isDebuggerActive()
? UseColour::Yes
: UseColour::No;
return colourMode == UseColour::Yes
? &s_instance
: NoColourImpl::instance();
}
} // end anon namespace
} // end namespace Catch
#elif defined( CATCH_CONFIG_COLOUR_ANSI ) //////////////////////////////////////
#include <unistd.h>
namespace Catch {
namespace {
// use POSIX/ ANSI console terminal codes
// Thanks to Adam Strzelecki for original contribution
// (http://github.com/nanoant)
// https://github.com/philsquared/Catch/pull/131
class PosixColourImpl : public IColourImpl {
public:
virtual void use( Colour::Code _colourCode ) {
switch( _colourCode ) {
case Colour::None:
case Colour::White: return setColour( "[0m" );
case Colour::Red: return setColour( "[0;31m" );
case Colour::Green: return setColour( "[0;32m" );
case Colour::Blue: return setColour( "[0;34m" );
case Colour::Cyan: return setColour( "[0;36m" );
case Colour::Yellow: return setColour( "[0;33m" );
case Colour::Grey: return setColour( "[1;30m" );
case Colour::LightGrey: return setColour( "[0;37m" );
case Colour::BrightRed: return setColour( "[1;31m" );
case Colour::BrightGreen: return setColour( "[1;32m" );
case Colour::BrightWhite: return setColour( "[1;37m" );
case Colour::Bright: throw std::logic_error( "not a colour" );
}
}
static IColourImpl* instance() {
static PosixColourImpl s_instance;
return &s_instance;
}
private:
void setColour( const char* _escapeCode ) {
Catch::cout() << '\033' << _escapeCode;
}
};
IColourImpl* platformColourInstance() {
Ptr<IConfig const> config = getCurrentContext().getConfig();
UseColour::YesOrNo colourMode = config
? config->useColour()
: UseColour::Auto;
if( colourMode == UseColour::Auto )
colourMode = (!isDebuggerActive() && isatty(STDOUT_FILENO) )
? UseColour::Yes
: UseColour::No;
return colourMode == UseColour::Yes
? PosixColourImpl::instance()
: NoColourImpl::instance();
}
} // end anon namespace
} // end namespace Catch
#else // not Windows or ANSI ///////////////////////////////////////////////
namespace Catch {
static IColourImpl* platformColourInstance() { return NoColourImpl::instance(); }
} // end namespace Catch
#endif // Windows/ ANSI/ None
namespace Catch {
Colour::Colour( Code _colourCode ) : m_moved( false ) { use( _colourCode ); }
Colour::Colour( Colour const& _other ) : m_moved( false ) { const_cast<Colour&>( _other ).m_moved = true; }
Colour::~Colour(){ if( !m_moved ) use( None ); }
void Colour::use( Code _colourCode ) {
static IColourImpl* impl = platformColourInstance();
impl->use( _colourCode );
}
} // end namespace Catch
// #included from: catch_generators_impl.hpp
#define TWOBLUECUBES_CATCH_GENERATORS_IMPL_HPP_INCLUDED
#include <vector>
#include <string>
#include <map>
namespace Catch {
struct GeneratorInfo : IGeneratorInfo {
GeneratorInfo( std::size_t size )
: m_size( size ),
m_currentIndex( 0 )
{}
bool moveNext() {
if( ++m_currentIndex == m_size ) {
m_currentIndex = 0;
return false;
}
return true;
}
std::size_t getCurrentIndex() const {
return m_currentIndex;
}
std::size_t m_size;
std::size_t m_currentIndex;
};
///////////////////////////////////////////////////////////////////////////
class GeneratorsForTest : public IGeneratorsForTest {
public:
~GeneratorsForTest() {
deleteAll( m_generatorsInOrder );
}
IGeneratorInfo& getGeneratorInfo( std::string const& fileInfo, std::size_t size ) {
std::map<std::string, IGeneratorInfo*>::const_iterator it = m_generatorsByName.find( fileInfo );
if( it == m_generatorsByName.end() ) {
IGeneratorInfo* info = new GeneratorInfo( size );
m_generatorsByName.insert( std::make_pair( fileInfo, info ) );
m_generatorsInOrder.push_back( info );
return *info;
}
return *it->second;
}
bool moveNext() {
std::vector<IGeneratorInfo*>::const_iterator it = m_generatorsInOrder.begin();
std::vector<IGeneratorInfo*>::const_iterator itEnd = m_generatorsInOrder.end();
for(; it != itEnd; ++it ) {
if( (*it)->moveNext() )
return true;
}
return false;
}
private:
std::map<std::string, IGeneratorInfo*> m_generatorsByName;
std::vector<IGeneratorInfo*> m_generatorsInOrder;
};
IGeneratorsForTest* createGeneratorsForTest()
{
return new GeneratorsForTest();
}
} // end namespace Catch
// #included from: catch_assertionresult.hpp
#define TWOBLUECUBES_CATCH_ASSERTIONRESULT_HPP_INCLUDED
namespace Catch {
AssertionInfo::AssertionInfo( std::string const& _macroName,
SourceLineInfo const& _lineInfo,
std::string const& _capturedExpression,
ResultDisposition::Flags _resultDisposition )
: macroName( _macroName ),
lineInfo( _lineInfo ),
capturedExpression( _capturedExpression ),
resultDisposition( _resultDisposition )
{}
AssertionResult::AssertionResult() {}
AssertionResult::AssertionResult( AssertionInfo const& info, AssertionResultData const& data )
: m_info( info ),
m_resultData( data )
{}
AssertionResult::~AssertionResult() {}
// Result was a success
bool AssertionResult::succeeded() const {
return Catch::isOk( m_resultData.resultType );
}
// Result was a success, or failure is suppressed
bool AssertionResult::isOk() const {
return Catch::isOk( m_resultData.resultType ) || shouldSuppressFailure( m_info.resultDisposition );
}
ResultWas::OfType AssertionResult::getResultType() const {
return m_resultData.resultType;
}
bool AssertionResult::hasExpression() const {
return !m_info.capturedExpression.empty();
}
bool AssertionResult::hasMessage() const {
return !m_resultData.message.empty();
}
std::string AssertionResult::getExpression() const {
if( isFalseTest( m_info.resultDisposition ) )
return '!' + m_info.capturedExpression;
else
return m_info.capturedExpression;
}
std::string AssertionResult::getExpressionInMacro() const {
if( m_info.macroName.empty() )
return m_info.capturedExpression;
else
return m_info.macroName + "( " + m_info.capturedExpression + " )";
}
bool AssertionResult::hasExpandedExpression() const {
return hasExpression() && getExpandedExpression() != getExpression();
}
std::string AssertionResult::getExpandedExpression() const {
return m_resultData.reconstructExpression();
}
std::string AssertionResult::getMessage() const {
return m_resultData.message;
}
SourceLineInfo AssertionResult::getSourceInfo() const {
return m_info.lineInfo;
}
std::string AssertionResult::getTestMacroName() const {
return m_info.macroName;
}
void AssertionResult::discardDecomposedExpression() const {
m_resultData.decomposedExpression = CATCH_NULL;
}
void AssertionResult::expandDecomposedExpression() const {
m_resultData.reconstructExpression();
}
} // end namespace Catch
// #included from: catch_test_case_info.hpp
#define TWOBLUECUBES_CATCH_TEST_CASE_INFO_HPP_INCLUDED
#include <cctype>
namespace Catch {
inline TestCaseInfo::SpecialProperties parseSpecialTag( std::string const& tag ) {
if( startsWith( tag, '.' ) ||
tag == "hide" ||
tag == "!hide" )
return TestCaseInfo::IsHidden;
else if( tag == "!throws" )
return TestCaseInfo::Throws;
else if( tag == "!shouldfail" )
return TestCaseInfo::ShouldFail;
else if( tag == "!mayfail" )
return TestCaseInfo::MayFail;
else if( tag == "!nonportable" )
return TestCaseInfo::NonPortable;
else
return TestCaseInfo::None;
}
inline bool isReservedTag( std::string const& tag ) {
return parseSpecialTag( tag ) == TestCaseInfo::None && tag.size() > 0 && !std::isalnum( static_cast<unsigned char>(tag[0]) );
}
inline void enforceNotReservedTag( std::string const& tag, SourceLineInfo const& _lineInfo ) {
if( isReservedTag( tag ) ) {
{
Colour colourGuard( Colour::Red );
Catch::cerr()
<< "Tag name [" << tag << "] not allowed.\n"
<< "Tag names starting with non alpha-numeric characters are reserved\n";
}
{
Colour colourGuard( Colour::FileName );
Catch::cerr() << _lineInfo << std::endl;
}
exit(1);
}
}
TestCase makeTestCase( ITestCase* _testCase,
std::string const& _className,
std::string const& _name,
std::string const& _descOrTags,
SourceLineInfo const& _lineInfo )
{
bool isHidden( startsWith( _name, "./" ) ); // Legacy support
// Parse out tags
std::set<std::string> tags;
std::string desc, tag;
bool inTag = false;
for( std::size_t i = 0; i < _descOrTags.size(); ++i ) {
char c = _descOrTags[i];
if( !inTag ) {
if( c == '[' )
inTag = true;
else
desc += c;
}
else {
if( c == ']' ) {
TestCaseInfo::SpecialProperties prop = parseSpecialTag( tag );
if( prop == TestCaseInfo::IsHidden )
isHidden = true;
else if( prop == TestCaseInfo::None )
enforceNotReservedTag( tag, _lineInfo );
tags.insert( tag );
tag.clear();
inTag = false;
}
else
tag += c;
}
}
if( isHidden ) {
tags.insert( "hide" );
tags.insert( "." );
}
TestCaseInfo info( _name, _className, desc, tags, _lineInfo );
return TestCase( _testCase, info );
}
void setTags( TestCaseInfo& testCaseInfo, std::set<std::string> const& tags )
{
testCaseInfo.tags = tags;
testCaseInfo.lcaseTags.clear();
std::ostringstream oss;
for( std::set<std::string>::const_iterator it = tags.begin(), itEnd = tags.end(); it != itEnd; ++it ) {
oss << '[' << *it << ']';
std::string lcaseTag = toLower( *it );
testCaseInfo.properties = static_cast<TestCaseInfo::SpecialProperties>( testCaseInfo.properties | parseSpecialTag( lcaseTag ) );
testCaseInfo.lcaseTags.insert( lcaseTag );
}
testCaseInfo.tagsAsString = oss.str();
}
TestCaseInfo::TestCaseInfo( std::string const& _name,
std::string const& _className,
std::string const& _description,
std::set<std::string> const& _tags,
SourceLineInfo const& _lineInfo )
: name( _name ),
className( _className ),
description( _description ),
lineInfo( _lineInfo ),
properties( None )
{
setTags( *this, _tags );
}
TestCaseInfo::TestCaseInfo( TestCaseInfo const& other )
: name( other.name ),
className( other.className ),
description( other.description ),
tags( other.tags ),
lcaseTags( other.lcaseTags ),
tagsAsString( other.tagsAsString ),
lineInfo( other.lineInfo ),
properties( other.properties )
{}
bool TestCaseInfo::isHidden() const {
return ( properties & IsHidden ) != 0;
}
bool TestCaseInfo::throws() const {
return ( properties & Throws ) != 0;
}
bool TestCaseInfo::okToFail() const {
return ( properties & (ShouldFail | MayFail ) ) != 0;
}
bool TestCaseInfo::expectedToFail() const {
return ( properties & (ShouldFail ) ) != 0;
}
TestCase::TestCase( ITestCase* testCase, TestCaseInfo const& info ) : TestCaseInfo( info ), test( testCase ) {}
TestCase::TestCase( TestCase const& other )
: TestCaseInfo( other ),
test( other.test )
{}
TestCase TestCase::withName( std::string const& _newName ) const {
TestCase other( *this );
other.name = _newName;
return other;
}
void TestCase::swap( TestCase& other ) {
test.swap( other.test );
name.swap( other.name );
className.swap( other.className );
description.swap( other.description );
tags.swap( other.tags );
lcaseTags.swap( other.lcaseTags );
tagsAsString.swap( other.tagsAsString );
std::swap( TestCaseInfo::properties, static_cast<TestCaseInfo&>( other ).properties );
std::swap( lineInfo, other.lineInfo );
}
void TestCase::invoke() const {
test->invoke();
}
bool TestCase::operator == ( TestCase const& other ) const {
return test.get() == other.test.get() &&
name == other.name &&
className == other.className;
}
bool TestCase::operator < ( TestCase const& other ) const {
return name < other.name;
}
TestCase& TestCase::operator = ( TestCase const& other ) {
TestCase temp( other );
swap( temp );
return *this;
}
TestCaseInfo const& TestCase::getTestCaseInfo() const
{
return *this;
}
} // end namespace Catch
// #included from: catch_version.hpp
#define TWOBLUECUBES_CATCH_VERSION_HPP_INCLUDED
namespace Catch {
Version::Version
( unsigned int _majorVersion,
unsigned int _minorVersion,
unsigned int _patchNumber,
std::string const& _branchName,
unsigned int _buildNumber )
: majorVersion( _majorVersion ),
minorVersion( _minorVersion ),
patchNumber( _patchNumber ),
branchName( _branchName ),
buildNumber( _buildNumber )
{}
std::ostream& operator << ( std::ostream& os, Version const& version ) {
os << version.majorVersion << '.'
<< version.minorVersion << '.'
<< version.patchNumber;
if( !version.branchName.empty() ) {
os << '-' << version.branchName
<< '.' << version.buildNumber;
}
return os;
}
Version libraryVersion( 1, 7, 2, "", 0 );
}
// #included from: catch_message.hpp
#define TWOBLUECUBES_CATCH_MESSAGE_HPP_INCLUDED
namespace Catch {
MessageInfo::MessageInfo( std::string const& _macroName,
SourceLineInfo const& _lineInfo,
ResultWas::OfType _type )
: macroName( _macroName ),
lineInfo( _lineInfo ),
type( _type ),
sequence( ++globalCount )
{}
// This may need protecting if threading support is added
unsigned int MessageInfo::globalCount = 0;
////////////////////////////////////////////////////////////////////////////
ScopedMessage::ScopedMessage( MessageBuilder const& builder )
: m_info( builder.m_info )
{
m_info.message = builder.m_stream.str();
getResultCapture().pushScopedMessage( m_info );
}
ScopedMessage::ScopedMessage( ScopedMessage const& other )
: m_info( other.m_info )
{}
ScopedMessage::~ScopedMessage() {
getResultCapture().popScopedMessage( m_info );
}
} // end namespace Catch
// #included from: catch_legacy_reporter_adapter.hpp
#define TWOBLUECUBES_CATCH_LEGACY_REPORTER_ADAPTER_HPP_INCLUDED
// #included from: catch_legacy_reporter_adapter.h
#define TWOBLUECUBES_CATCH_LEGACY_REPORTER_ADAPTER_H_INCLUDED
namespace Catch
{
// Deprecated
struct IReporter : IShared {
virtual ~IReporter();
virtual bool shouldRedirectStdout() const = 0;
virtual void StartTesting() = 0;
virtual void EndTesting( Totals const& totals ) = 0;
virtual void StartGroup( std::string const& groupName ) = 0;
virtual void EndGroup( std::string const& groupName, Totals const& totals ) = 0;
virtual void StartTestCase( TestCaseInfo const& testInfo ) = 0;
virtual void EndTestCase( TestCaseInfo const& testInfo, Totals const& totals, std::string const& stdOut, std::string const& stdErr ) = 0;
virtual void StartSection( std::string const& sectionName, std::string const& description ) = 0;
virtual void EndSection( std::string const& sectionName, Counts const& assertions ) = 0;
virtual void NoAssertionsInSection( std::string const& sectionName ) = 0;
virtual void NoAssertionsInTestCase( std::string const& testName ) = 0;
virtual void Aborted() = 0;
virtual void Result( AssertionResult const& result ) = 0;
};
class LegacyReporterAdapter : public SharedImpl<IStreamingReporter>
{
public:
LegacyReporterAdapter( Ptr<IReporter> const& legacyReporter );
virtual ~LegacyReporterAdapter();
virtual ReporterPreferences getPreferences() const;
virtual void noMatchingTestCases( std::string const& );
virtual void testRunStarting( TestRunInfo const& );
virtual void testGroupStarting( GroupInfo const& groupInfo );
virtual void testCaseStarting( TestCaseInfo const& testInfo );
virtual void sectionStarting( SectionInfo const& sectionInfo );
virtual void assertionStarting( AssertionInfo const& );
virtual bool assertionEnded( AssertionStats const& assertionStats );
virtual void sectionEnded( SectionStats const& sectionStats );
virtual void testCaseEnded( TestCaseStats const& testCaseStats );
virtual void testGroupEnded( TestGroupStats const& testGroupStats );
virtual void testRunEnded( TestRunStats const& testRunStats );
virtual void skipTest( TestCaseInfo const& );
private:
Ptr<IReporter> m_legacyReporter;
};
}
namespace Catch
{
LegacyReporterAdapter::LegacyReporterAdapter( Ptr<IReporter> const& legacyReporter )
: m_legacyReporter( legacyReporter )
{}
LegacyReporterAdapter::~LegacyReporterAdapter() {}
ReporterPreferences LegacyReporterAdapter::getPreferences() const {
ReporterPreferences prefs;
prefs.shouldRedirectStdOut = m_legacyReporter->shouldRedirectStdout();
return prefs;
}
void LegacyReporterAdapter::noMatchingTestCases( std::string const& ) {}
void LegacyReporterAdapter::testRunStarting( TestRunInfo const& ) {
m_legacyReporter->StartTesting();
}
void LegacyReporterAdapter::testGroupStarting( GroupInfo const& groupInfo ) {
m_legacyReporter->StartGroup( groupInfo.name );
}
void LegacyReporterAdapter::testCaseStarting( TestCaseInfo const& testInfo ) {
m_legacyReporter->StartTestCase( testInfo );
}
void LegacyReporterAdapter::sectionStarting( SectionInfo const& sectionInfo ) {
m_legacyReporter->StartSection( sectionInfo.name, sectionInfo.description );
}
void LegacyReporterAdapter::assertionStarting( AssertionInfo const& ) {
// Not on legacy interface
}
bool LegacyReporterAdapter::assertionEnded( AssertionStats const& assertionStats ) {
if( assertionStats.assertionResult.getResultType() != ResultWas::Ok ) {
for( std::vector<MessageInfo>::const_iterator it = assertionStats.infoMessages.begin(), itEnd = assertionStats.infoMessages.end();
it != itEnd;
++it ) {
if( it->type == ResultWas::Info ) {
ResultBuilder rb( it->macroName.c_str(), it->lineInfo, "", ResultDisposition::Normal );
rb << it->message;
rb.setResultType( ResultWas::Info );
AssertionResult result = rb.build();
m_legacyReporter->Result( result );
}
}
}
m_legacyReporter->Result( assertionStats.assertionResult );
return true;
}
void LegacyReporterAdapter::sectionEnded( SectionStats const& sectionStats ) {
if( sectionStats.missingAssertions )
m_legacyReporter->NoAssertionsInSection( sectionStats.sectionInfo.name );
m_legacyReporter->EndSection( sectionStats.sectionInfo.name, sectionStats.assertions );
}
void LegacyReporterAdapter::testCaseEnded( TestCaseStats const& testCaseStats ) {
m_legacyReporter->EndTestCase
( testCaseStats.testInfo,
testCaseStats.totals,
testCaseStats.stdOut,
testCaseStats.stdErr );
}
void LegacyReporterAdapter::testGroupEnded( TestGroupStats const& testGroupStats ) {
if( testGroupStats.aborting )
m_legacyReporter->Aborted();
m_legacyReporter->EndGroup( testGroupStats.groupInfo.name, testGroupStats.totals );
}
void LegacyReporterAdapter::testRunEnded( TestRunStats const& testRunStats ) {
m_legacyReporter->EndTesting( testRunStats.totals );
}
void LegacyReporterAdapter::skipTest( TestCaseInfo const& ) {
}
}
// #included from: catch_timer.hpp
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wc++11-long-long"
#endif
#ifdef CATCH_PLATFORM_WINDOWS
#else
#include <sys/time.h>
#endif
namespace Catch {
namespace {
#ifdef CATCH_PLATFORM_WINDOWS
uint64_t getCurrentTicks() {
static uint64_t hz=0, hzo=0;
if (!hz) {
QueryPerformanceFrequency( reinterpret_cast<LARGE_INTEGER*>( &hz ) );
QueryPerformanceCounter( reinterpret_cast<LARGE_INTEGER*>( &hzo ) );
}
uint64_t t;
QueryPerformanceCounter( reinterpret_cast<LARGE_INTEGER*>( &t ) );
return ((t-hzo)*1000000)/hz;
}
#else
uint64_t getCurrentTicks() {
timeval t;
gettimeofday(&t,CATCH_NULL);
return static_cast<uint64_t>( t.tv_sec ) * 1000000ull + static_cast<uint64_t>( t.tv_usec );
}
#endif
}
void Timer::start() {
m_ticks = getCurrentTicks();
}
unsigned int Timer::getElapsedMicroseconds() const {
return static_cast<unsigned int>(getCurrentTicks() - m_ticks);
}
unsigned int Timer::getElapsedMilliseconds() const {
return static_cast<unsigned int>(getElapsedMicroseconds()/1000);
}
double Timer::getElapsedSeconds() const {
return getElapsedMicroseconds()/1000000.0;
}
} // namespace Catch
#ifdef __clang__
#pragma clang diagnostic pop
#endif
// #included from: catch_common.hpp
#define TWOBLUECUBES_CATCH_COMMON_HPP_INCLUDED
#include <cstring>
#include <cctype>
namespace Catch {
bool startsWith( std::string const& s, std::string const& prefix ) {
return s.size() >= prefix.size() && std::equal(prefix.begin(), prefix.end(), s.begin());
}
bool startsWith( std::string const& s, char prefix ) {
return !s.empty() && s[0] == prefix;
}
bool endsWith( std::string const& s, std::string const& suffix ) {
return s.size() >= suffix.size() && std::equal(suffix.rbegin(), suffix.rend(), s.rbegin());
}
bool endsWith( std::string const& s, char suffix ) {
return !s.empty() && s[s.size()-1] == suffix;
}
bool contains( std::string const& s, std::string const& infix ) {
return s.find( infix ) != std::string::npos;
}
char toLowerCh(char c) {
return static_cast<char>( std::tolower( c ) );
}
void toLowerInPlace( std::string& s ) {
std::transform( s.begin(), s.end(), s.begin(), toLowerCh );
}
std::string toLower( std::string const& s ) {
std::string lc = s;
toLowerInPlace( lc );
return lc;
}
std::string trim( std::string const& str ) {
static char const* whitespaceChars = "\n\r\t ";
std::string::size_type start = str.find_first_not_of( whitespaceChars );
std::string::size_type end = str.find_last_not_of( whitespaceChars );
return start != std::string::npos ? str.substr( start, 1+end-start ) : std::string();
}
bool replaceInPlace( std::string& str, std::string const& replaceThis, std::string const& withThis ) {
bool replaced = false;
std::size_t i = str.find( replaceThis );
while( i != std::string::npos ) {
replaced = true;
str = str.substr( 0, i ) + withThis + str.substr( i+replaceThis.size() );
if( i < str.size()-withThis.size() )
i = str.find( replaceThis, i+withThis.size() );
else
i = std::string::npos;
}
return replaced;
}
pluralise::pluralise( std::size_t count, std::string const& label )
: m_count( count ),
m_label( label )
{}
std::ostream& operator << ( std::ostream& os, pluralise const& pluraliser ) {
os << pluraliser.m_count << ' ' << pluraliser.m_label;
if( pluraliser.m_count != 1 )
os << 's';
return os;
}
SourceLineInfo::SourceLineInfo() : file(""), line( 0 ){}
SourceLineInfo::SourceLineInfo( char const* _file, std::size_t _line )
: file( _file ),
line( _line )
{}
bool SourceLineInfo::empty() const {
return file[0] == '\0';
}
bool SourceLineInfo::operator == ( SourceLineInfo const& other ) const {
return line == other.line && (file == other.file || std::strcmp(file, other.file) == 0);
}
bool SourceLineInfo::operator < ( SourceLineInfo const& other ) const {
return line < other.line || ( line == other.line && (std::strcmp(file, other.file) < 0));
}
void seedRng( IConfig const& config ) {
if( config.rngSeed() != 0 )
std::srand( config.rngSeed() );
}
unsigned int rngSeed() {
return getCurrentContext().getConfig()->rngSeed();
}
std::ostream& operator << ( std::ostream& os, SourceLineInfo const& info ) {
#ifndef __GNUG__
os << info.file << '(' << info.line << ')';
#else
os << info.file << ':' << info.line;
#endif
return os;
}
void throwLogicError( std::string const& message, SourceLineInfo const& locationInfo ) {
std::ostringstream oss;
oss << locationInfo << ": Internal Catch error: '" << message << '\'';
if( alwaysTrue() )
throw std::logic_error( oss.str() );
}
}
// #included from: catch_section.hpp
#define TWOBLUECUBES_CATCH_SECTION_HPP_INCLUDED
namespace Catch {
SectionInfo::SectionInfo
( SourceLineInfo const& _lineInfo,
std::string const& _name,
std::string const& _description )
: name( _name ),
description( _description ),
lineInfo( _lineInfo )
{}
Section::Section( SectionInfo const& info )
: m_info( info ),
m_sectionIncluded( getResultCapture().sectionStarted( m_info, m_assertions ) )
{
m_timer.start();
}
Section::~Section() {
if( m_sectionIncluded ) {
SectionEndInfo endInfo( m_info, m_assertions, m_timer.getElapsedSeconds() );
if( std::uncaught_exception() )
getResultCapture().sectionEndedEarly( endInfo );
else
getResultCapture().sectionEnded( endInfo );
}
}
// This indicates whether the section should be executed or not
Section::operator bool() const {
return m_sectionIncluded;
}
} // end namespace Catch
// #included from: catch_debugger.hpp
#define TWOBLUECUBES_CATCH_DEBUGGER_HPP_INCLUDED
#ifdef CATCH_PLATFORM_MAC
#include <assert.h>
#include <stdbool.h>
#include <sys/types.h>
#include <unistd.h>
#include <sys/sysctl.h>
namespace Catch{
// The following function is taken directly from the following technical note:
// http://developer.apple.com/library/mac/#qa/qa2004/qa1361.html
// Returns true if the current process is being debugged (either
// running under the debugger or has a debugger attached post facto).
bool isDebuggerActive(){
int mib[4];
struct kinfo_proc info;
size_t size;
// Initialize the flags so that, if sysctl fails for some bizarre
// reason, we get a predictable result.
info.kp_proc.p_flag = 0;
// Initialize mib, which tells sysctl the info we want, in this case
// we're looking for information about a specific process ID.
mib[0] = CTL_KERN;
mib[1] = KERN_PROC;
mib[2] = KERN_PROC_PID;
mib[3] = getpid();
// Call sysctl.
size = sizeof(info);
if( sysctl(mib, sizeof(mib) / sizeof(*mib), &info, &size, CATCH_NULL, 0) != 0 ) {
Catch::cerr() << "\n** Call to sysctl failed - unable to determine if debugger is active **\n" << std::endl;
return false;
}
// We're being debugged if the P_TRACED flag is set.
return ( (info.kp_proc.p_flag & P_TRACED) != 0 );
}
} // namespace Catch
#elif defined(CATCH_PLATFORM_LINUX)
#include <fstream>
#include <string>
namespace Catch{
// The standard POSIX way of detecting a debugger is to attempt to
// ptrace() the process, but this needs to be done from a child and not
// this process itself to still allow attaching to this process later
// if wanted, so is rather heavy. Under Linux we have the PID of the
// "debugger" (which doesn't need to be gdb, of course, it could also
// be strace, for example) in /proc/$PID/status, so just get it from
// there instead.
bool isDebuggerActive(){
std::ifstream in("/proc/self/status");
for( std::string line; std::getline(in, line); ) {
static const int PREFIX_LEN = 11;
if( line.compare(0, PREFIX_LEN, "TracerPid:\t") == 0 ) {
// We're traced if the PID is not 0 and no other PID starts
// with 0 digit, so it's enough to check for just a single
// character.
return line.length() > PREFIX_LEN && line[PREFIX_LEN] != '0';
}
}
return false;
}
} // namespace Catch
#elif defined(_MSC_VER)
extern "C" __declspec(dllimport) int __stdcall IsDebuggerPresent();
namespace Catch {
bool isDebuggerActive() {
return IsDebuggerPresent() != 0;
}
}
#elif defined(__MINGW32__)
extern "C" __declspec(dllimport) int __stdcall IsDebuggerPresent();
namespace Catch {
bool isDebuggerActive() {
return IsDebuggerPresent() != 0;
}
}
#else
namespace Catch {
inline bool isDebuggerActive() { return false; }
}
#endif // Platform
#ifdef CATCH_PLATFORM_WINDOWS
namespace Catch {
void writeToDebugConsole( std::string const& text ) {
::OutputDebugStringA( text.c_str() );
}
}
#else
namespace Catch {
void writeToDebugConsole( std::string const& text ) {
// !TBD: Need a version for Mac/ XCode and other IDEs
Catch::cout() << text;
}
}
#endif // Platform
// #included from: catch_tostring.hpp
#define TWOBLUECUBES_CATCH_TOSTRING_HPP_INCLUDED
namespace Catch {
namespace Detail {
const std::string unprintableString = "{?}";
namespace {
const int hexThreshold = 255;
struct Endianness {
enum Arch { Big, Little };
static Arch which() {
union _{
int asInt;
char asChar[sizeof (int)];
} u;
u.asInt = 1;
return ( u.asChar[sizeof(int)-1] == 1 ) ? Big : Little;
}
};
}
std::string rawMemoryToString( const void *object, std::size_t size )
{
// Reverse order for little endian architectures
int i = 0, end = static_cast<int>( size ), inc = 1;
if( Endianness::which() == Endianness::Little ) {
i = end-1;
end = inc = -1;
}
unsigned char const *bytes = static_cast<unsigned char const *>(object);
std::ostringstream os;
os << "0x" << std::setfill('0') << std::hex;
for( ; i != end; i += inc )
os << std::setw(2) << static_cast<unsigned>(bytes[i]);
return os.str();
}
}
std::string toString( std::string const& value ) {
std::string s = value;
if( getCurrentContext().getConfig()->showInvisibles() ) {
for(size_t i = 0; i < s.size(); ++i ) {
std::string subs;
switch( s[i] ) {
case '\n': subs = "\\n"; break;
case '\t': subs = "\\t"; break;
default: break;
}
if( !subs.empty() ) {
s = s.substr( 0, i ) + subs + s.substr( i+1 );
++i;
}
}
}
return '"' + s + '"';
}
std::string toString( std::wstring const& value ) {
std::string s;
s.reserve( value.size() );
for(size_t i = 0; i < value.size(); ++i )
s += value[i] <= 0xff ? static_cast<char>( value[i] ) : '?';
return Catch::toString( s );
}
std::string toString( const char* const value ) {
return value ? Catch::toString( std::string( value ) ) : std::string( "{null string}" );
}
std::string toString( char* const value ) {
return Catch::toString( static_cast<const char*>( value ) );
}
std::string toString( const wchar_t* const value )
{
return value ? Catch::toString( std::wstring(value) ) : std::string( "{null string}" );
}
std::string toString( wchar_t* const value )
{
return Catch::toString( static_cast<const wchar_t*>( value ) );
}
std::string toString( int value ) {
std::ostringstream oss;
oss << value;
if( value > Detail::hexThreshold )
oss << " (0x" << std::hex << value << ')';
return oss.str();
}
std::string toString( unsigned long value ) {
std::ostringstream oss;
oss << value;
if( value > Detail::hexThreshold )
oss << " (0x" << std::hex << value << ')';
return oss.str();
}
std::string toString( unsigned int value ) {
return Catch::toString( static_cast<unsigned long>( value ) );
}
template<typename T>
std::string fpToString( T value, int precision ) {
std::ostringstream oss;
oss << std::setprecision( precision )
<< std::fixed
<< value;
std::string d = oss.str();
std::size_t i = d.find_last_not_of( '0' );
if( i != std::string::npos && i != d.size()-1 ) {
if( d[i] == '.' )
i++;
d = d.substr( 0, i+1 );
}
return d;
}
std::string toString( const double value ) {
return fpToString( value, 10 );
}
std::string toString( const float value ) {
return fpToString( value, 5 ) + 'f';
}
std::string toString( bool value ) {
return value ? "true" : "false";
}
std::string toString( char value ) {
if ( value == '\r' )
return "'\\r'";
if ( value == '\f' )
return "'\\f'";
if ( value == '\n' )
return "'\\n'";
if ( value == '\t' )
return "'\\t'";
if ( '\0' <= value && value < ' ' )
return toString( static_cast<unsigned int>( value ) );
char chstr[] = "' '";
chstr[1] = value;
return chstr;
}
std::string toString( signed char value ) {
return toString( static_cast<char>( value ) );
}
std::string toString( unsigned char value ) {
return toString( static_cast<char>( value ) );
}
#ifdef CATCH_CONFIG_CPP11_LONG_LONG
std::string toString( long long value ) {
std::ostringstream oss;
oss << value;
if( value > Detail::hexThreshold )
oss << " (0x" << std::hex << value << ')';
return oss.str();
}
std::string toString( unsigned long long value ) {
std::ostringstream oss;
oss << value;
if( value > Detail::hexThreshold )
oss << " (0x" << std::hex << value << ')';
return oss.str();
}
#endif
#ifdef CATCH_CONFIG_CPP11_NULLPTR
std::string toString( std::nullptr_t ) {
return "nullptr";
}
#endif
#ifdef __OBJC__
std::string toString( NSString const * const& nsstring ) {
if( !nsstring )
return "nil";
return "@" + toString([nsstring UTF8String]);
}
std::string toString( NSString * CATCH_ARC_STRONG const& nsstring ) {
if( !nsstring )
return "nil";
return "@" + toString([nsstring UTF8String]);
}
std::string toString( NSObject* const& nsObject ) {
return toString( [nsObject description] );
}
#endif
} // end namespace Catch
// #included from: catch_result_builder.hpp
#define TWOBLUECUBES_CATCH_RESULT_BUILDER_HPP_INCLUDED
namespace Catch {
std::string capturedExpressionWithSecondArgument( std::string const& capturedExpression, std::string const& secondArg ) {
return secondArg.empty() || secondArg == "\"\""
? capturedExpression
: capturedExpression + ", " + secondArg;
}
ResultBuilder::ResultBuilder( char const* macroName,
SourceLineInfo const& lineInfo,
char const* capturedExpression,
ResultDisposition::Flags resultDisposition,
char const* secondArg )
: m_assertionInfo( macroName, lineInfo, capturedExpressionWithSecondArgument( capturedExpression, secondArg ), resultDisposition ),
m_shouldDebugBreak( false ),
m_shouldThrow( false )
{}
ResultBuilder& ResultBuilder::setResultType( ResultWas::OfType result ) {
m_data.resultType = result;
return *this;
}
ResultBuilder& ResultBuilder::setResultType( bool result ) {
m_data.resultType = result ? ResultWas::Ok : ResultWas::ExpressionFailed;
return *this;
}
void ResultBuilder::endExpression( DecomposedExpression const& expr ) {
AssertionResult result = build( expr );
handleResult( result );
}
void ResultBuilder::useActiveException( ResultDisposition::Flags resultDisposition ) {
m_assertionInfo.resultDisposition = resultDisposition;
m_stream.oss << Catch::translateActiveException();
captureResult( ResultWas::ThrewException );
}
void ResultBuilder::captureResult( ResultWas::OfType resultType ) {
setResultType( resultType );
captureExpression();
}
void ResultBuilder::captureExpectedException( std::string const& expectedMessage ) {
if( expectedMessage.empty() )
captureExpectedException( Matchers::Impl::Generic::AllOf<std::string>() );
else
captureExpectedException( Matchers::Equals( expectedMessage ) );
}
void ResultBuilder::captureExpectedException( Matchers::Impl::Matcher<std::string> const& matcher ) {
assert( !isFalseTest( m_assertionInfo.resultDisposition ) );
AssertionResultData data = m_data;
data.resultType = ResultWas::Ok;
data.reconstructedExpression = m_assertionInfo.capturedExpression;
std::string actualMessage = Catch::translateActiveException();
if( !matcher.match( actualMessage ) ) {
data.resultType = ResultWas::ExpressionFailed;
data.reconstructedExpression = actualMessage;
}
AssertionResult result( m_assertionInfo, data );
handleResult( result );
}
void ResultBuilder::captureExpression() {
AssertionResult result = build();
handleResult( result );
}
void ResultBuilder::handleResult( AssertionResult const& result )
{
getResultCapture().assertionEnded( result );
if( !result.isOk() ) {
if( getCurrentContext().getConfig()->shouldDebugBreak() )
m_shouldDebugBreak = true;
if( getCurrentContext().getRunner()->aborting() || (m_assertionInfo.resultDisposition & ResultDisposition::Normal) )
m_shouldThrow = true;
}
}
void ResultBuilder::react() {
if( m_shouldThrow )
throw Catch::TestFailureException();
}
bool ResultBuilder::shouldDebugBreak() const { return m_shouldDebugBreak; }
bool ResultBuilder::allowThrows() const { return getCurrentContext().getConfig()->allowThrows(); }
AssertionResult ResultBuilder::build() const
{
return build( *this );
}
// CAVEAT: The returned AssertionResult stores a pointer to the argument expr,
// a temporary DecomposedExpression, which in turn holds references to
// operands, possibly temporary as well.
// It should immediately be passed to handleResult; if the expression
// needs to be reported, its string expansion must be composed before
// the temporaries are destroyed.
AssertionResult ResultBuilder::build( DecomposedExpression const& expr ) const
{
assert( m_data.resultType != ResultWas::Unknown );
AssertionResultData data = m_data;
// Flip bool results if FalseTest flag is set
if( isFalseTest( m_assertionInfo.resultDisposition ) ) {
data.negate( expr.isBinaryExpression() );
}
data.message = m_stream.oss.str();
data.decomposedExpression = &expr; // for lazy reconstruction
return AssertionResult( m_assertionInfo, data );
}
void ResultBuilder::reconstructExpression( std::string& dest ) const {
dest = m_assertionInfo.capturedExpression;
}
} // end namespace Catch
// #included from: catch_tag_alias_registry.hpp
#define TWOBLUECUBES_CATCH_TAG_ALIAS_REGISTRY_HPP_INCLUDED
// #included from: catch_tag_alias_registry.h
#define TWOBLUECUBES_CATCH_TAG_ALIAS_REGISTRY_H_INCLUDED
#include <map>
namespace Catch {
class TagAliasRegistry : public ITagAliasRegistry {
public:
virtual ~TagAliasRegistry();
virtual Option<TagAlias> find( std::string const& alias ) const;
virtual std::string expandAliases( std::string const& unexpandedTestSpec ) const;
void add( char const* alias, char const* tag, SourceLineInfo const& lineInfo );
static TagAliasRegistry& get();
private:
std::map<std::string, TagAlias> m_registry;
};
} // end namespace Catch
namespace Catch {
TagAliasRegistry::~TagAliasRegistry() {}
Option<TagAlias> TagAliasRegistry::find( std::string const& alias ) const {
std::map<std::string, TagAlias>::const_iterator it = m_registry.find( alias );
if( it != m_registry.end() )
return it->second;
else
return Option<TagAlias>();
}
std::string TagAliasRegistry::expandAliases( std::string const& unexpandedTestSpec ) const {
std::string expandedTestSpec = unexpandedTestSpec;
for( std::map<std::string, TagAlias>::const_iterator it = m_registry.begin(), itEnd = m_registry.end();
it != itEnd;
++it ) {
std::size_t pos = expandedTestSpec.find( it->first );
if( pos != std::string::npos ) {
expandedTestSpec = expandedTestSpec.substr( 0, pos ) +
it->second.tag +
expandedTestSpec.substr( pos + it->first.size() );
}
}
return expandedTestSpec;
}
void TagAliasRegistry::add( char const* alias, char const* tag, SourceLineInfo const& lineInfo ) {
if( !startsWith( alias, "[@" ) || !endsWith( alias, ']' ) ) {
std::ostringstream oss;
oss << "error: tag alias, \"" << alias << "\" is not of the form [@alias name].\n" << lineInfo;
throw std::domain_error( oss.str().c_str() );
}
if( !m_registry.insert( std::make_pair( alias, TagAlias( tag, lineInfo ) ) ).second ) {
std::ostringstream oss;
oss << "error: tag alias, \"" << alias << "\" already registered.\n"
<< "\tFirst seen at " << find(alias)->lineInfo << '\n'
<< "\tRedefined at " << lineInfo;
throw std::domain_error( oss.str().c_str() );
}
}
TagAliasRegistry& TagAliasRegistry::get() {
static TagAliasRegistry instance;
return instance;
}
ITagAliasRegistry::~ITagAliasRegistry() {}
ITagAliasRegistry const& ITagAliasRegistry::get() { return TagAliasRegistry::get(); }
RegistrarForTagAliases::RegistrarForTagAliases( char const* alias, char const* tag, SourceLineInfo const& lineInfo ) {
try {
TagAliasRegistry::get().add( alias, tag, lineInfo );
}
catch( std::exception& ex ) {
Colour colourGuard( Colour::Red );
Catch::cerr() << ex.what() << std::endl;
exit(1);
}
}
} // end namespace Catch
// #included from: ../reporters/catch_reporter_multi.hpp
#define TWOBLUECUBES_CATCH_REPORTER_MULTI_HPP_INCLUDED
namespace Catch {
class MultipleReporters : public SharedImpl<IStreamingReporter> {
typedef std::vector<Ptr<IStreamingReporter> > Reporters;
Reporters m_reporters;
public:
void add( Ptr<IStreamingReporter> const& reporter ) {
m_reporters.push_back( reporter );
}
public: // IStreamingReporter
virtual ReporterPreferences getPreferences() const CATCH_OVERRIDE {
return m_reporters[0]->getPreferences();
}
virtual void noMatchingTestCases( std::string const& spec ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->noMatchingTestCases( spec );
}
virtual void testRunStarting( TestRunInfo const& testRunInfo ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->testRunStarting( testRunInfo );
}
virtual void testGroupStarting( GroupInfo const& groupInfo ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->testGroupStarting( groupInfo );
}
virtual void testCaseStarting( TestCaseInfo const& testInfo ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->testCaseStarting( testInfo );
}
virtual void sectionStarting( SectionInfo const& sectionInfo ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->sectionStarting( sectionInfo );
}
virtual void assertionStarting( AssertionInfo const& assertionInfo ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->assertionStarting( assertionInfo );
}
// The return value indicates if the messages buffer should be cleared:
virtual bool assertionEnded( AssertionStats const& assertionStats ) CATCH_OVERRIDE {
bool clearBuffer = false;
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
clearBuffer |= (*it)->assertionEnded( assertionStats );
return clearBuffer;
}
virtual void sectionEnded( SectionStats const& sectionStats ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->sectionEnded( sectionStats );
}
virtual void testCaseEnded( TestCaseStats const& testCaseStats ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->testCaseEnded( testCaseStats );
}
virtual void testGroupEnded( TestGroupStats const& testGroupStats ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->testGroupEnded( testGroupStats );
}
virtual void testRunEnded( TestRunStats const& testRunStats ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->testRunEnded( testRunStats );
}
virtual void skipTest( TestCaseInfo const& testInfo ) CATCH_OVERRIDE {
for( Reporters::const_iterator it = m_reporters.begin(), itEnd = m_reporters.end();
it != itEnd;
++it )
(*it)->skipTest( testInfo );
}
virtual MultipleReporters* tryAsMulti() CATCH_OVERRIDE {
return this;
}
};
Ptr<IStreamingReporter> addReporter( Ptr<IStreamingReporter> const& existingReporter, Ptr<IStreamingReporter> const& additionalReporter ) {
Ptr<IStreamingReporter> resultingReporter;
if( existingReporter ) {
MultipleReporters* multi = existingReporter->tryAsMulti();
if( !multi ) {
multi = new MultipleReporters;
resultingReporter = Ptr<IStreamingReporter>( multi );
if( existingReporter )
multi->add( existingReporter );
}
else
resultingReporter = existingReporter;
multi->add( additionalReporter );
}
else
resultingReporter = additionalReporter;
return resultingReporter;
}
} // end namespace Catch
// #included from: ../reporters/catch_reporter_xml.hpp
#define TWOBLUECUBES_CATCH_REPORTER_XML_HPP_INCLUDED
// #included from: catch_reporter_bases.hpp
#define TWOBLUECUBES_CATCH_REPORTER_BASES_HPP_INCLUDED
#include <cstring>
namespace Catch {
struct StreamingReporterBase : SharedImpl<IStreamingReporter> {
StreamingReporterBase( ReporterConfig const& _config )
: m_config( _config.fullConfig() ),
stream( _config.stream() )
{
m_reporterPrefs.shouldRedirectStdOut = false;
}
virtual ReporterPreferences getPreferences() const CATCH_OVERRIDE {
return m_reporterPrefs;
}
virtual ~StreamingReporterBase() CATCH_OVERRIDE;
virtual void noMatchingTestCases( std::string const& ) CATCH_OVERRIDE {}
virtual void testRunStarting( TestRunInfo const& _testRunInfo ) CATCH_OVERRIDE {
currentTestRunInfo = _testRunInfo;
}
virtual void testGroupStarting( GroupInfo const& _groupInfo ) CATCH_OVERRIDE {
currentGroupInfo = _groupInfo;
}
virtual void testCaseStarting( TestCaseInfo const& _testInfo ) CATCH_OVERRIDE {
currentTestCaseInfo = _testInfo;
}
virtual void sectionStarting( SectionInfo const& _sectionInfo ) CATCH_OVERRIDE {
m_sectionStack.push_back( _sectionInfo );
}
virtual void sectionEnded( SectionStats const& /* _sectionStats */ ) CATCH_OVERRIDE {
m_sectionStack.pop_back();
}
virtual void testCaseEnded( TestCaseStats const& /* _testCaseStats */ ) CATCH_OVERRIDE {
currentTestCaseInfo.reset();
}
virtual void testGroupEnded( TestGroupStats const& /* _testGroupStats */ ) CATCH_OVERRIDE {
currentGroupInfo.reset();
}
virtual void testRunEnded( TestRunStats const& /* _testRunStats */ ) CATCH_OVERRIDE {
currentTestCaseInfo.reset();
currentGroupInfo.reset();
currentTestRunInfo.reset();
}
virtual void skipTest( TestCaseInfo const& ) CATCH_OVERRIDE {
// Don't do anything with this by default.
// It can optionally be overridden in the derived class.
}
Ptr<IConfig const> m_config;
std::ostream& stream;
LazyStat<TestRunInfo> currentTestRunInfo;
LazyStat<GroupInfo> currentGroupInfo;
LazyStat<TestCaseInfo> currentTestCaseInfo;
std::vector<SectionInfo> m_sectionStack;
ReporterPreferences m_reporterPrefs;
};
struct CumulativeReporterBase : SharedImpl<IStreamingReporter> {
template<typename T, typename ChildNodeT>
struct Node : SharedImpl<> {
explicit Node( T const& _value ) : value( _value ) {}
virtual ~Node() {}
typedef std::vector<Ptr<ChildNodeT> > ChildNodes;
T value;
ChildNodes children;
};
struct SectionNode : SharedImpl<> {
explicit SectionNode( SectionStats const& _stats ) : stats( _stats ) {}
virtual ~SectionNode();
bool operator == ( SectionNode const& other ) const {
return stats.sectionInfo.lineInfo == other.stats.sectionInfo.lineInfo;
}
bool operator == ( Ptr<SectionNode> const& other ) const {
return operator==( *other );
}
SectionStats stats;
typedef std::vector<Ptr<SectionNode> > ChildSections;
typedef std::vector<AssertionStats> Assertions;
ChildSections childSections;
Assertions assertions;
std::string stdOut;
std::string stdErr;
};
struct BySectionInfo {
BySectionInfo( SectionInfo const& other ) : m_other( other ) {}
BySectionInfo( BySectionInfo const& other ) : m_other( other.m_other ) {}
bool operator() ( Ptr<SectionNode> const& node ) const {
return node->stats.sectionInfo.lineInfo == m_other.lineInfo;
}
private:
void operator=( BySectionInfo const& );
SectionInfo const& m_other;
};
typedef Node<TestCaseStats, SectionNode> TestCaseNode;
typedef Node<TestGroupStats, TestCaseNode> TestGroupNode;
typedef Node<TestRunStats, TestGroupNode> TestRunNode;
CumulativeReporterBase( ReporterConfig const& _config )
: m_config( _config.fullConfig() ),
stream( _config.stream() )
{
m_reporterPrefs.shouldRedirectStdOut = false;
}
~CumulativeReporterBase();
virtual ReporterPreferences getPreferences() const CATCH_OVERRIDE {
return m_reporterPrefs;
}
virtual void testRunStarting( TestRunInfo const& ) CATCH_OVERRIDE {}
virtual void testGroupStarting( GroupInfo const& ) CATCH_OVERRIDE {}
virtual void testCaseStarting( TestCaseInfo const& ) CATCH_OVERRIDE {}
virtual void sectionStarting( SectionInfo const& sectionInfo ) CATCH_OVERRIDE {
SectionStats incompleteStats( sectionInfo, Counts(), 0, false );
Ptr<SectionNode> node;
if( m_sectionStack.empty() ) {
if( !m_rootSection )
m_rootSection = new SectionNode( incompleteStats );
node = m_rootSection;
}
else {
SectionNode& parentNode = *m_sectionStack.back();
SectionNode::ChildSections::const_iterator it =
std::find_if( parentNode.childSections.begin(),
parentNode.childSections.end(),
BySectionInfo( sectionInfo ) );
if( it == parentNode.childSections.end() ) {
node = new SectionNode( incompleteStats );
parentNode.childSections.push_back( node );
}
else
node = *it;
}
m_sectionStack.push_back( node );
m_deepestSection = node;
}
virtual void assertionStarting( AssertionInfo const& ) CATCH_OVERRIDE {}
virtual bool assertionEnded( AssertionStats const& assertionStats ) CATCH_OVERRIDE {
assert( !m_sectionStack.empty() );
SectionNode& sectionNode = *m_sectionStack.back();
sectionNode.assertions.push_back( assertionStats );
// AssertionResult holds a pointer to a temporary DecomposedExpression,
// which getExpandedExpression() calls to build the expression string.
// Our section stack copy of the assertionResult will likely outlive the
// temporary, so it must be expanded or discarded now to avoid calling
// a destroyed object later.
prepareExpandedExpression( sectionNode.assertions.back().assertionResult );
return true;
}
virtual void sectionEnded( SectionStats const& sectionStats ) CATCH_OVERRIDE {
assert( !m_sectionStack.empty() );
SectionNode& node = *m_sectionStack.back();
node.stats = sectionStats;
m_sectionStack.pop_back();
}
virtual void testCaseEnded( TestCaseStats const& testCaseStats ) CATCH_OVERRIDE {
Ptr<TestCaseNode> node = new TestCaseNode( testCaseStats );
assert( m_sectionStack.size() == 0 );
node->children.push_back( m_rootSection );
m_testCases.push_back( node );
m_rootSection.reset();
assert( m_deepestSection );
m_deepestSection->stdOut = testCaseStats.stdOut;
m_deepestSection->stdErr = testCaseStats.stdErr;
}
virtual void testGroupEnded( TestGroupStats const& testGroupStats ) CATCH_OVERRIDE {
Ptr<TestGroupNode> node = new TestGroupNode( testGroupStats );
node->children.swap( m_testCases );
m_testGroups.push_back( node );
}
virtual void testRunEnded( TestRunStats const& testRunStats ) CATCH_OVERRIDE {
Ptr<TestRunNode> node = new TestRunNode( testRunStats );
node->children.swap( m_testGroups );
m_testRuns.push_back( node );
testRunEndedCumulative();
}
virtual void testRunEndedCumulative() = 0;
virtual void skipTest( TestCaseInfo const& ) CATCH_OVERRIDE {}
virtual void prepareExpandedExpression( AssertionResult& result ) const {
if( result.isOk() )
result.discardDecomposedExpression();
else
result.expandDecomposedExpression();
}
Ptr<IConfig const> m_config;
std::ostream& stream;
std::vector<AssertionStats> m_assertions;
std::vector<std::vector<Ptr<SectionNode> > > m_sections;
std::vector<Ptr<TestCaseNode> > m_testCases;
std::vector<Ptr<TestGroupNode> > m_testGroups;
std::vector<Ptr<TestRunNode> > m_testRuns;
Ptr<SectionNode> m_rootSection;
Ptr<SectionNode> m_deepestSection;
std::vector<Ptr<SectionNode> > m_sectionStack;
ReporterPreferences m_reporterPrefs;
};
template<char C>
char const* getLineOfChars() {
static char line[CATCH_CONFIG_CONSOLE_WIDTH] = {0};
if( !*line ) {
std::memset( line, C, CATCH_CONFIG_CONSOLE_WIDTH-1 );
line[CATCH_CONFIG_CONSOLE_WIDTH-1] = 0;
}
return line;
}
struct TestEventListenerBase : StreamingReporterBase {
TestEventListenerBase( ReporterConfig const& _config )
: StreamingReporterBase( _config )
{}
virtual void assertionStarting( AssertionInfo const& ) CATCH_OVERRIDE {}
virtual bool assertionEnded( AssertionStats const& ) CATCH_OVERRIDE {
return false;
}
};
} // end namespace Catch
// #included from: ../internal/catch_reporter_registrars.hpp
#define TWOBLUECUBES_CATCH_REPORTER_REGISTRARS_HPP_INCLUDED
namespace Catch {
template<typename T>
class LegacyReporterRegistrar {
class ReporterFactory : public IReporterFactory {
virtual IStreamingReporter* create( ReporterConfig const& config ) const {
return new LegacyReporterAdapter( new T( config ) );
}
virtual std::string getDescription() const {
return T::getDescription();
}
};
public:
LegacyReporterRegistrar( std::string const& name ) {
getMutableRegistryHub().registerReporter( name, new ReporterFactory() );
}
};
template<typename T>
class ReporterRegistrar {
class ReporterFactory : public SharedImpl<IReporterFactory> {
// *** Please Note ***:
// - If you end up here looking at a compiler error because it's trying to register
// your custom reporter class be aware that the native reporter interface has changed
// to IStreamingReporter. The "legacy" interface, IReporter, is still supported via
// an adapter. Just use REGISTER_LEGACY_REPORTER to take advantage of the adapter.
// However please consider updating to the new interface as the old one is now
// deprecated and will probably be removed quite soon!
// Please contact me via github if you have any questions at all about this.
// In fact, ideally, please contact me anyway to let me know you've hit this - as I have
// no idea who is actually using custom reporters at all (possibly no-one!).
// The new interface is designed to minimise exposure to interface changes in the future.
virtual IStreamingReporter* create( ReporterConfig const& config ) const {
return new T( config );
}
virtual std::string getDescription() const {
return T::getDescription();
}
};
public:
ReporterRegistrar( std::string const& name ) {
getMutableRegistryHub().registerReporter( name, new ReporterFactory() );
}
};
template<typename T>
class ListenerRegistrar {
class ListenerFactory : public SharedImpl<IReporterFactory> {
virtual IStreamingReporter* create( ReporterConfig const& config ) const {
return new T( config );
}
virtual std::string getDescription() const {
return std::string();
}
};
public:
ListenerRegistrar() {
getMutableRegistryHub().registerListener( new ListenerFactory() );
}
};
}
#define INTERNAL_CATCH_REGISTER_LEGACY_REPORTER( name, reporterType ) \
namespace{ Catch::LegacyReporterRegistrar<reporterType> catch_internal_RegistrarFor##reporterType( name ); }
#define INTERNAL_CATCH_REGISTER_REPORTER( name, reporterType ) \
namespace{ Catch::ReporterRegistrar<reporterType> catch_internal_RegistrarFor##reporterType( name ); }
#define INTERNAL_CATCH_REGISTER_LISTENER( listenerType ) \
namespace{ Catch::ListenerRegistrar<listenerType> catch_internal_RegistrarFor##listenerType; }
// #included from: ../internal/catch_xmlwriter.hpp
#define TWOBLUECUBES_CATCH_XMLWRITER_HPP_INCLUDED
#include <sstream>
#include <string>
#include <vector>
#include <iomanip>
namespace Catch {
class XmlEncode {
public:
enum ForWhat { ForTextNodes, ForAttributes };
XmlEncode( std::string const& str, ForWhat forWhat = ForTextNodes )
: m_str( str ),
m_forWhat( forWhat )
{}
void encodeTo( std::ostream& os ) const {
// Apostrophe escaping not necessary if we always use " to write attributes
// (see: http://www.w3.org/TR/xml/#syntax)
for( std::size_t i = 0; i < m_str.size(); ++ i ) {
char c = m_str[i];
switch( c ) {
case '<': os << "<"; break;
case '&': os << "&"; break;
case '>':
// See: http://www.w3.org/TR/xml/#syntax
if( i > 2 && m_str[i-1] == ']' && m_str[i-2] == ']' )
os << ">";
else
os << c;
break;
case '\"':
if( m_forWhat == ForAttributes )
os << """;
else
os << c;
break;
default:
// Escape control chars - based on contribution by @espenalb in PR #465 and
// by @mrpi PR #588
if ( ( c >= 0 && c < '\x09' ) || ( c > '\x0D' && c < '\x20') || c=='\x7F' ) {
// see http://stackoverflow.com/questions/404107/why-are-control-characters-illegal-in-xml-1-0
os << "\\x" << std::uppercase << std::hex << std::setfill('0') << std::setw(2)
<< static_cast<int>( c );
}
else
os << c;
}
}
}
friend std::ostream& operator << ( std::ostream& os, XmlEncode const& xmlEncode ) {
xmlEncode.encodeTo( os );
return os;
}
private:
std::string m_str;
ForWhat m_forWhat;
};
class XmlWriter {
public:
class ScopedElement {
public:
ScopedElement( XmlWriter* writer )
: m_writer( writer )
{}
ScopedElement( ScopedElement const& other )
: m_writer( other.m_writer ){
other.m_writer = CATCH_NULL;
}
~ScopedElement() {
if( m_writer )
m_writer->endElement();
}
ScopedElement& writeText( std::string const& text, bool indent = true ) {
m_writer->writeText( text, indent );
return *this;
}
template<typename T>
ScopedElement& writeAttribute( std::string const& name, T const& attribute ) {
m_writer->writeAttribute( name, attribute );
return *this;
}
private:
mutable XmlWriter* m_writer;
};
XmlWriter()
: m_tagIsOpen( false ),
m_needsNewline( false ),
m_os( Catch::cout() )
{
writeDeclaration();
}
XmlWriter( std::ostream& os )
: m_tagIsOpen( false ),
m_needsNewline( false ),
m_os( os )
{
writeDeclaration();
}
~XmlWriter() {
while( !m_tags.empty() )
endElement();
}
XmlWriter& startElement( std::string const& name ) {
ensureTagClosed();
newlineIfNecessary();
m_os << m_indent << '<' << name;
m_tags.push_back( name );
m_indent += " ";
m_tagIsOpen = true;
return *this;
}
ScopedElement scopedElement( std::string const& name ) {
ScopedElement scoped( this );
startElement( name );
return scoped;
}
XmlWriter& endElement() {
newlineIfNecessary();
m_indent = m_indent.substr( 0, m_indent.size()-2 );
if( m_tagIsOpen ) {
m_os << "/>";
m_tagIsOpen = false;
}
else {
m_os << m_indent << "</" << m_tags.back() << ">";
}
m_os << std::endl;
m_tags.pop_back();
return *this;
}
XmlWriter& writeAttribute( std::string const& name, std::string const& attribute ) {
if( !name.empty() && !attribute.empty() )
m_os << ' ' << name << "=\"" << XmlEncode( attribute, XmlEncode::ForAttributes ) << '"';
return *this;
}
XmlWriter& writeAttribute( std::string const& name, bool attribute ) {
m_os << ' ' << name << "=\"" << ( attribute ? "true" : "false" ) << '"';
return *this;
}
template<typename T>
XmlWriter& writeAttribute( std::string const& name, T const& attribute ) {
std::ostringstream oss;
oss << attribute;
return writeAttribute( name, oss.str() );
}
XmlWriter& writeText( std::string const& text, bool indent = true ) {
if( !text.empty() ){
bool tagWasOpen = m_tagIsOpen;
ensureTagClosed();
if( tagWasOpen && indent )
m_os << m_indent;
m_os << XmlEncode( text );
m_needsNewline = true;
}
return *this;
}
XmlWriter& writeComment( std::string const& text ) {
ensureTagClosed();
m_os << m_indent << "<!--" << text << "-->";
m_needsNewline = true;
return *this;
}
void writeStylesheetRef( std::string const& url ) {
m_os << "<?xml-stylesheet type=\"text/xsl\" href=\"" << url << "\"?>\n";
}
XmlWriter& writeBlankLine() {
ensureTagClosed();
m_os << '\n';
return *this;
}
void ensureTagClosed() {
if( m_tagIsOpen ) {
m_os << ">" << std::endl;
m_tagIsOpen = false;
}
}
private:
XmlWriter( XmlWriter const& );
void operator=( XmlWriter const& );
void writeDeclaration() {
m_os << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
}
void newlineIfNecessary() {
if( m_needsNewline ) {
m_os << std::endl;
m_needsNewline = false;
}
}
bool m_tagIsOpen;
bool m_needsNewline;
std::vector<std::string> m_tags;
std::string m_indent;
std::ostream& m_os;
};
}
// #included from: catch_reenable_warnings.h
#define TWOBLUECUBES_CATCH_REENABLE_WARNINGS_H_INCLUDED
#ifdef __clang__
# ifdef __ICC // icpc defines the __clang__ macro
# pragma warning(pop)
# else
# pragma clang diagnostic pop
# endif
#elif defined __GNUC__
# pragma GCC diagnostic pop
#endif
namespace Catch {
class XmlReporter : public StreamingReporterBase {
public:
XmlReporter( ReporterConfig const& _config )
: StreamingReporterBase( _config ),
m_xml(_config.stream()),
m_sectionDepth( 0 )
{
m_reporterPrefs.shouldRedirectStdOut = true;
}
virtual ~XmlReporter() CATCH_OVERRIDE;
static std::string getDescription() {
return "Reports test results as an XML document";
}
virtual std::string getStylesheetRef() const {
return std::string();
}
public: // StreamingReporterBase
virtual void noMatchingTestCases( std::string const& s ) CATCH_OVERRIDE {
StreamingReporterBase::noMatchingTestCases( s );
}
virtual void testRunStarting( TestRunInfo const& testInfo ) CATCH_OVERRIDE {
StreamingReporterBase::testRunStarting( testInfo );
std::string stylesheetRef = getStylesheetRef();
if( !stylesheetRef.empty() )
m_xml.writeStylesheetRef( stylesheetRef );
m_xml.startElement( "Catch" );
if( !m_config->name().empty() )
m_xml.writeAttribute( "name", m_config->name() );
}
virtual void testGroupStarting( GroupInfo const& groupInfo ) CATCH_OVERRIDE {
StreamingReporterBase::testGroupStarting( groupInfo );
m_xml.startElement( "Group" )
.writeAttribute( "name", groupInfo.name );
}
virtual void testCaseStarting( TestCaseInfo const& testInfo ) CATCH_OVERRIDE {
StreamingReporterBase::testCaseStarting(testInfo);
m_xml.startElement( "TestCase" )
.writeAttribute( "name", trim( testInfo.name ) )
.writeAttribute( "description", testInfo.description )
.writeAttribute( "tags", testInfo.tagsAsString );
if ( m_config->showDurations() == ShowDurations::Always )
m_testCaseTimer.start();
m_xml.ensureTagClosed();
}
virtual void sectionStarting( SectionInfo const& sectionInfo ) CATCH_OVERRIDE {
StreamingReporterBase::sectionStarting( sectionInfo );
if( m_sectionDepth++ > 0 ) {
m_xml.startElement( "Section" )
.writeAttribute( "name", trim( sectionInfo.name ) )
.writeAttribute( "description", sectionInfo.description );
m_xml.ensureTagClosed();
}
}
virtual void assertionStarting( AssertionInfo const& ) CATCH_OVERRIDE { }
virtual bool assertionEnded( AssertionStats const& assertionStats ) CATCH_OVERRIDE {
const AssertionResult& assertionResult = assertionStats.assertionResult;
// Print any info messages in <Info> tags.
if( assertionStats.assertionResult.getResultType() != ResultWas::Ok ) {
for( std::vector<MessageInfo>::const_iterator it = assertionStats.infoMessages.begin(), itEnd = assertionStats.infoMessages.end();
it != itEnd;
++it ) {
if( it->type == ResultWas::Info ) {
m_xml.scopedElement( "Info" )
.writeText( it->message );
} else if ( it->type == ResultWas::Warning ) {
m_xml.scopedElement( "Warning" )
.writeText( it->message );
}
}
}
// Drop out if result was successful but we're not printing them.
if( !m_config->includeSuccessfulResults() && isOk(assertionResult.getResultType()) )
return true;
// Print the expression if there is one.
if( assertionResult.hasExpression() ) {
m_xml.startElement( "Expression" )
.writeAttribute( "success", assertionResult.succeeded() )
.writeAttribute( "type", assertionResult.getTestMacroName() )
.writeAttribute( "filename", assertionResult.getSourceInfo().file )
.writeAttribute( "line", assertionResult.getSourceInfo().line );
m_xml.scopedElement( "Original" )
.writeText( assertionResult.getExpression() );
m_xml.scopedElement( "Expanded" )
.writeText( assertionResult.getExpandedExpression() );
}
// And... Print a result applicable to each result type.
switch( assertionResult.getResultType() ) {
case ResultWas::ThrewException:
m_xml.scopedElement( "Exception" )
.writeAttribute( "filename", assertionResult.getSourceInfo().file )
.writeAttribute( "line", assertionResult.getSourceInfo().line )
.writeText( assertionResult.getMessage() );
break;
case ResultWas::FatalErrorCondition:
m_xml.scopedElement( "FatalErrorCondition" )
.writeAttribute( "filename", assertionResult.getSourceInfo().file )
.writeAttribute( "line", assertionResult.getSourceInfo().line )
.writeText( assertionResult.getMessage() );
break;
case ResultWas::Info:
m_xml.scopedElement( "Info" )
.writeText( assertionResult.getMessage() );
break;
case ResultWas::Warning:
// Warning will already have been written
break;
case ResultWas::ExplicitFailure:
m_xml.scopedElement( "Failure" )
.writeText( assertionResult.getMessage() );
break;
default:
break;
}
if( assertionResult.hasExpression() )
m_xml.endElement();
return true;
}
virtual void sectionEnded( SectionStats const& sectionStats ) CATCH_OVERRIDE {
StreamingReporterBase::sectionEnded( sectionStats );
if( --m_sectionDepth > 0 ) {
XmlWriter::ScopedElement e = m_xml.scopedElement( "OverallResults" );
e.writeAttribute( "successes", sectionStats.assertions.passed );
e.writeAttribute( "failures", sectionStats.assertions.failed );
e.writeAttribute( "expectedFailures", sectionStats.assertions.failedButOk );
if ( m_config->showDurations() == ShowDurations::Always )
e.writeAttribute( "durationInSeconds", sectionStats.durationInSeconds );
m_xml.endElement();
}
}
virtual void testCaseEnded( TestCaseStats const& testCaseStats ) CATCH_OVERRIDE {
StreamingReporterBase::testCaseEnded( testCaseStats );
XmlWriter::ScopedElement e = m_xml.scopedElement( "OverallResult" );
e.writeAttribute( "success", testCaseStats.totals.assertions.allOk() );
if ( m_config->showDurations() == ShowDurations::Always )
e.writeAttribute( "durationInSeconds", m_testCaseTimer.getElapsedSeconds() );
if( !testCaseStats.stdOut.empty() )
m_xml.scopedElement( "StdOut" ).writeText( trim( testCaseStats.stdOut ), false );
if( !testCaseStats.stdErr.empty() )
m_xml.scopedElement( "StdErr" ).writeText( trim( testCaseStats.stdErr ), false );
m_xml.endElement();
}
virtual void testGroupEnded( TestGroupStats const& testGroupStats ) CATCH_OVERRIDE {
StreamingReporterBase::testGroupEnded( testGroupStats );
// TODO: Check testGroupStats.aborting and act accordingly.
m_xml.scopedElement( "OverallResults" )
.writeAttribute( "successes", testGroupStats.totals.assertions.passed )
.writeAttribute( "failures", testGroupStats.totals.assertions.failed )
.writeAttribute( "expectedFailures", testGroupStats.totals.assertions.failedButOk );
m_xml.endElement();
}
virtual void testRunEnded( TestRunStats const& testRunStats ) CATCH_OVERRIDE {
StreamingReporterBase::testRunEnded( testRunStats );
m_xml.scopedElement( "OverallResults" )
.writeAttribute( "successes", testRunStats.totals.assertions.passed )
.writeAttribute( "failures", testRunStats.totals.assertions.failed )
.writeAttribute( "expectedFailures", testRunStats.totals.assertions.failedButOk );
m_xml.endElement();
}
private:
Timer m_testCaseTimer;
XmlWriter m_xml;
int m_sectionDepth;
};
INTERNAL_CATCH_REGISTER_REPORTER( "xml", XmlReporter )
} // end namespace Catch
// #included from: ../reporters/catch_reporter_junit.hpp
#define TWOBLUECUBES_CATCH_REPORTER_JUNIT_HPP_INCLUDED
#include <assert.h>
namespace Catch {
namespace {
std::string getCurrentTimestamp() {
// Beware, this is not reentrant because of backward compatibility issues
// Also, UTC only, again because of backward compatibility (%z is C++11)
time_t rawtime;
std::time(&rawtime);
const size_t timeStampSize = sizeof("2017-01-16T17:06:45Z");
#ifdef _MSC_VER
std::tm timeInfo = {};
gmtime_s(&timeInfo, &rawtime);
#else
std::tm* timeInfo;
timeInfo = std::gmtime(&rawtime);
#endif
char timeStamp[timeStampSize];
const char * const fmt = "%Y-%m-%dT%H:%M:%SZ";
#ifdef _MSC_VER
std::strftime(timeStamp, timeStampSize, fmt, &timeInfo);
#else
std::strftime(timeStamp, timeStampSize, fmt, timeInfo);
#endif
return std::string(timeStamp);
}
}
class JunitReporter : public CumulativeReporterBase {
public:
JunitReporter( ReporterConfig const& _config )
: CumulativeReporterBase( _config ),
xml( _config.stream() )
{
m_reporterPrefs.shouldRedirectStdOut = true;
}
virtual ~JunitReporter() CATCH_OVERRIDE;
static std::string getDescription() {
return "Reports test results in an XML format that looks like Ant's junitreport target";
}
virtual void noMatchingTestCases( std::string const& /*spec*/ ) CATCH_OVERRIDE {}
virtual void testRunStarting( TestRunInfo const& runInfo ) CATCH_OVERRIDE {
CumulativeReporterBase::testRunStarting( runInfo );
xml.startElement( "testsuites" );
}
virtual void testGroupStarting( GroupInfo const& groupInfo ) CATCH_OVERRIDE {
suiteTimer.start();
stdOutForSuite.str("");
stdErrForSuite.str("");
unexpectedExceptions = 0;
CumulativeReporterBase::testGroupStarting( groupInfo );
}
virtual bool assertionEnded( AssertionStats const& assertionStats ) CATCH_OVERRIDE {
if( assertionStats.assertionResult.getResultType() == ResultWas::ThrewException )
unexpectedExceptions++;
return CumulativeReporterBase::assertionEnded( assertionStats );
}
virtual void testCaseEnded( TestCaseStats const& testCaseStats ) CATCH_OVERRIDE {
stdOutForSuite << testCaseStats.stdOut;
stdErrForSuite << testCaseStats.stdErr;
CumulativeReporterBase::testCaseEnded( testCaseStats );
}
virtual void testGroupEnded( TestGroupStats const& testGroupStats ) CATCH_OVERRIDE {
double suiteTime = suiteTimer.getElapsedSeconds();
CumulativeReporterBase::testGroupEnded( testGroupStats );
writeGroup( *m_testGroups.back(), suiteTime );
}
virtual void testRunEndedCumulative() CATCH_OVERRIDE {
xml.endElement();
}
void writeGroup( TestGroupNode const& groupNode, double suiteTime ) {
XmlWriter::ScopedElement e = xml.scopedElement( "testsuite" );
TestGroupStats const& stats = groupNode.value;
xml.writeAttribute( "name", stats.groupInfo.name );
xml.writeAttribute( "errors", unexpectedExceptions );
xml.writeAttribute( "failures", stats.totals.assertions.failed-unexpectedExceptions );
xml.writeAttribute( "tests", stats.totals.assertions.total() );
xml.writeAttribute( "hostname", "tbd" ); // !TBD
if( m_config->showDurations() == ShowDurations::Never )
xml.writeAttribute( "time", "" );
else
xml.writeAttribute( "time", suiteTime );
xml.writeAttribute( "timestamp", getCurrentTimestamp() );
// Write test cases
for( TestGroupNode::ChildNodes::const_iterator
it = groupNode.children.begin(), itEnd = groupNode.children.end();
it != itEnd;
++it )
writeTestCase( **it );
xml.scopedElement( "system-out" ).writeText( trim( stdOutForSuite.str() ), false );
xml.scopedElement( "system-err" ).writeText( trim( stdErrForSuite.str() ), false );
}
void writeTestCase( TestCaseNode const& testCaseNode ) {
TestCaseStats const& stats = testCaseNode.value;
// All test cases have exactly one section - which represents the
// test case itself. That section may have 0-n nested sections
assert( testCaseNode.children.size() == 1 );
SectionNode const& rootSection = *testCaseNode.children.front();
std::string className = stats.testInfo.className;
if( className.empty() ) {
if( rootSection.childSections.empty() )
className = "global";
}
writeSection( className, "", rootSection );
}
void writeSection( std::string const& className,
std::string const& rootName,
SectionNode const& sectionNode ) {
std::string name = trim( sectionNode.stats.sectionInfo.name );
if( !rootName.empty() )
name = rootName + '/' + name;
if( !sectionNode.assertions.empty() ||
!sectionNode.stdOut.empty() ||
!sectionNode.stdErr.empty() ) {
XmlWriter::ScopedElement e = xml.scopedElement( "testcase" );
if( className.empty() ) {
xml.writeAttribute( "classname", name );
xml.writeAttribute( "name", "root" );
}
else {
xml.writeAttribute( "classname", className );
xml.writeAttribute( "name", name );
}
xml.writeAttribute( "time", Catch::toString( sectionNode.stats.durationInSeconds ) );
writeAssertions( sectionNode );
if( !sectionNode.stdOut.empty() )
xml.scopedElement( "system-out" ).writeText( trim( sectionNode.stdOut ), false );
if( !sectionNode.stdErr.empty() )
xml.scopedElement( "system-err" ).writeText( trim( sectionNode.stdErr ), false );
}
for( SectionNode::ChildSections::const_iterator
it = sectionNode.childSections.begin(),
itEnd = sectionNode.childSections.end();
it != itEnd;
++it )
if( className.empty() )
writeSection( name, "", **it );
else
writeSection( className, name, **it );
}
void writeAssertions( SectionNode const& sectionNode ) {
for( SectionNode::Assertions::const_iterator
it = sectionNode.assertions.begin(), itEnd = sectionNode.assertions.end();
it != itEnd;
++it )
writeAssertion( *it );
}
void writeAssertion( AssertionStats const& stats ) {
AssertionResult const& result = stats.assertionResult;
if( !result.isOk() ) {
std::string elementName;
switch( result.getResultType() ) {
case ResultWas::ThrewException:
case ResultWas::FatalErrorCondition:
elementName = "error";
break;
case ResultWas::ExplicitFailure:
elementName = "failure";
break;
case ResultWas::ExpressionFailed:
elementName = "failure";
break;
case ResultWas::DidntThrowException:
elementName = "failure";
break;
// We should never see these here:
case ResultWas::Info:
case ResultWas::Warning:
case ResultWas::Ok:
case ResultWas::Unknown:
case ResultWas::FailureBit:
case ResultWas::Exception:
elementName = "internalError";
break;
}
XmlWriter::ScopedElement e = xml.scopedElement( elementName );
xml.writeAttribute( "message", result.getExpandedExpression() );
xml.writeAttribute( "type", result.getTestMacroName() );
std::ostringstream oss;
if( !result.getMessage().empty() )
oss << result.getMessage() << '\n';
for( std::vector<MessageInfo>::const_iterator
it = stats.infoMessages.begin(),
itEnd = stats.infoMessages.end();
it != itEnd;
++it )
if( it->type == ResultWas::Info )
oss << it->message << '\n';
oss << "at " << result.getSourceInfo();
xml.writeText( oss.str(), false );
}
}
XmlWriter xml;
Timer suiteTimer;
std::ostringstream stdOutForSuite;
std::ostringstream stdErrForSuite;
unsigned int unexpectedExceptions;
};
INTERNAL_CATCH_REGISTER_REPORTER( "junit", JunitReporter )
} // end namespace Catch
// #included from: ../reporters/catch_reporter_console.hpp
#define TWOBLUECUBES_CATCH_REPORTER_CONSOLE_HPP_INCLUDED
namespace Catch {
struct ConsoleReporter : StreamingReporterBase {
ConsoleReporter( ReporterConfig const& _config )
: StreamingReporterBase( _config ),
m_headerPrinted( false )
{}
virtual ~ConsoleReporter() CATCH_OVERRIDE;
static std::string getDescription() {
return "Reports test results as plain lines of text";
}
virtual void noMatchingTestCases( std::string const& spec ) CATCH_OVERRIDE {
stream << "No test cases matched '" << spec << '\'' << std::endl;
}
virtual void assertionStarting( AssertionInfo const& ) CATCH_OVERRIDE {
}
virtual bool assertionEnded( AssertionStats const& _assertionStats ) CATCH_OVERRIDE {
AssertionResult const& result = _assertionStats.assertionResult;
bool printInfoMessages = true;
// Drop out if result was successful and we're not printing those
if( !m_config->includeSuccessfulResults() && result.isOk() ) {
if( result.getResultType() != ResultWas::Warning )
return false;
printInfoMessages = false;
}
lazyPrint();
AssertionPrinter printer( stream, _assertionStats, printInfoMessages );
printer.print();
stream << std::endl;
return true;
}
virtual void sectionStarting( SectionInfo const& _sectionInfo ) CATCH_OVERRIDE {
m_headerPrinted = false;
StreamingReporterBase::sectionStarting( _sectionInfo );
}
virtual void sectionEnded( SectionStats const& _sectionStats ) CATCH_OVERRIDE {
if( _sectionStats.missingAssertions ) {
lazyPrint();
Colour colour( Colour::ResultError );
if( m_sectionStack.size() > 1 )
stream << "\nNo assertions in section";
else
stream << "\nNo assertions in test case";
stream << " '" << _sectionStats.sectionInfo.name << "'\n" << std::endl;
}
if( m_headerPrinted ) {
if( m_config->showDurations() == ShowDurations::Always )
stream << "Completed in " << _sectionStats.durationInSeconds << 's' << std::endl;
m_headerPrinted = false;
}
else {
if( m_config->showDurations() == ShowDurations::Always )
stream << _sectionStats.sectionInfo.name << " completed in " << _sectionStats.durationInSeconds << 's' << std::endl;
}
StreamingReporterBase::sectionEnded( _sectionStats );
}
virtual void testCaseEnded( TestCaseStats const& _testCaseStats ) CATCH_OVERRIDE {
StreamingReporterBase::testCaseEnded( _testCaseStats );
m_headerPrinted = false;
}
virtual void testGroupEnded( TestGroupStats const& _testGroupStats ) CATCH_OVERRIDE {
if( currentGroupInfo.used ) {
printSummaryDivider();
stream << "Summary for group '" << _testGroupStats.groupInfo.name << "':\n";
printTotals( _testGroupStats.totals );
stream << '\n' << std::endl;
}
StreamingReporterBase::testGroupEnded( _testGroupStats );
}
virtual void testRunEnded( TestRunStats const& _testRunStats ) CATCH_OVERRIDE {
printTotalsDivider( _testRunStats.totals );
printTotals( _testRunStats.totals );
stream << std::endl;
StreamingReporterBase::testRunEnded( _testRunStats );
}
private:
class AssertionPrinter {
void operator= ( AssertionPrinter const& );
public:
AssertionPrinter( std::ostream& _stream, AssertionStats const& _stats, bool _printInfoMessages )
: stream( _stream ),
stats( _stats ),
result( _stats.assertionResult ),
colour( Colour::None ),
message( result.getMessage() ),
messages( _stats.infoMessages ),
printInfoMessages( _printInfoMessages )
{
switch( result.getResultType() ) {
case ResultWas::Ok:
colour = Colour::Success;
passOrFail = "PASSED";
//if( result.hasMessage() )
if( _stats.infoMessages.size() == 1 )
messageLabel = "with message";
if( _stats.infoMessages.size() > 1 )
messageLabel = "with messages";
break;
case ResultWas::ExpressionFailed:
if( result.isOk() ) {
colour = Colour::Success;
passOrFail = "FAILED - but was ok";
}
else {
colour = Colour::Error;
passOrFail = "FAILED";
}
if( _stats.infoMessages.size() == 1 )
messageLabel = "with message";
if( _stats.infoMessages.size() > 1 )
messageLabel = "with messages";
break;
case ResultWas::ThrewException:
colour = Colour::Error;
passOrFail = "FAILED";
messageLabel = "due to unexpected exception with message";
break;
case ResultWas::FatalErrorCondition:
colour = Colour::Error;
passOrFail = "FAILED";
messageLabel = "due to a fatal error condition";
break;
case ResultWas::DidntThrowException:
colour = Colour::Error;
passOrFail = "FAILED";
messageLabel = "because no exception was thrown where one was expected";
break;
case ResultWas::Info:
messageLabel = "info";
break;
case ResultWas::Warning:
messageLabel = "warning";
break;
case ResultWas::ExplicitFailure:
passOrFail = "FAILED";
colour = Colour::Error;
if( _stats.infoMessages.size() == 1 )
messageLabel = "explicitly with message";
if( _stats.infoMessages.size() > 1 )
messageLabel = "explicitly with messages";
break;
// These cases are here to prevent compiler warnings
case ResultWas::Unknown:
case ResultWas::FailureBit:
case ResultWas::Exception:
passOrFail = "** internal error **";
colour = Colour::Error;
break;
}
}
void print() const {
printSourceInfo();
if( stats.totals.assertions.total() > 0 ) {
if( result.isOk() )
stream << '\n';
printResultType();
printOriginalExpression();
printReconstructedExpression();
}
else {
stream << '\n';
}
printMessage();
}
private:
void printResultType() const {
if( !passOrFail.empty() ) {
Colour colourGuard( colour );
stream << passOrFail << ":\n";
}
}
void printOriginalExpression() const {
if( result.hasExpression() ) {
Colour colourGuard( Colour::OriginalExpression );
stream << " ";
stream << result.getExpressionInMacro();
stream << '\n';
}
}
void printReconstructedExpression() const {
if( result.hasExpandedExpression() ) {
stream << "with expansion:\n";
Colour colourGuard( Colour::ReconstructedExpression );
stream << Text( result.getExpandedExpression(), TextAttributes().setIndent(2) ) << '\n';
}
}
void printMessage() const {
if( !messageLabel.empty() )
stream << messageLabel << ':' << '\n';
for( std::vector<MessageInfo>::const_iterator it = messages.begin(), itEnd = messages.end();
it != itEnd;
++it ) {
// If this assertion is a warning ignore any INFO messages
if( printInfoMessages || it->type != ResultWas::Info )
stream << Text( it->message, TextAttributes().setIndent(2) ) << '\n';
}
}
void printSourceInfo() const {
Colour colourGuard( Colour::FileName );
stream << result.getSourceInfo() << ": ";
}
std::ostream& stream;
AssertionStats const& stats;
AssertionResult const& result;
Colour::Code colour;
std::string passOrFail;
std::string messageLabel;
std::string message;
std::vector<MessageInfo> messages;
bool printInfoMessages;
};
void lazyPrint() {
if( !currentTestRunInfo.used )
lazyPrintRunInfo();
if( !currentGroupInfo.used )
lazyPrintGroupInfo();
if( !m_headerPrinted ) {
printTestCaseAndSectionHeader();
m_headerPrinted = true;
}
}
void lazyPrintRunInfo() {
stream << '\n' << getLineOfChars<'~'>() << '\n';
Colour colour( Colour::SecondaryText );
stream << currentTestRunInfo->name
<< " is a Catch v" << libraryVersion << " host application.\n"
<< "Run with -? for options\n\n";
if( m_config->rngSeed() != 0 )
stream << "Randomness seeded to: " << m_config->rngSeed() << "\n\n";
currentTestRunInfo.used = true;
}
void lazyPrintGroupInfo() {
if( !currentGroupInfo->name.empty() && currentGroupInfo->groupsCounts > 1 ) {
printClosedHeader( "Group: " + currentGroupInfo->name );
currentGroupInfo.used = true;
}
}
void printTestCaseAndSectionHeader() {
assert( !m_sectionStack.empty() );
printOpenHeader( currentTestCaseInfo->name );
if( m_sectionStack.size() > 1 ) {
Colour colourGuard( Colour::Headers );
std::vector<SectionInfo>::const_iterator
it = m_sectionStack.begin()+1, // Skip first section (test case)
itEnd = m_sectionStack.end();
for( ; it != itEnd; ++it )
printHeaderString( it->name, 2 );
}
SourceLineInfo lineInfo = m_sectionStack.back().lineInfo;
if( !lineInfo.empty() ){
stream << getLineOfChars<'-'>() << '\n';
Colour colourGuard( Colour::FileName );
stream << lineInfo << '\n';
}
stream << getLineOfChars<'.'>() << '\n' << std::endl;
}
void printClosedHeader( std::string const& _name ) {
printOpenHeader( _name );
stream << getLineOfChars<'.'>() << '\n';
}
void printOpenHeader( std::string const& _name ) {
stream << getLineOfChars<'-'>() << '\n';
{
Colour colourGuard( Colour::Headers );
printHeaderString( _name );
}
}
// if string has a : in first line will set indent to follow it on
// subsequent lines
void printHeaderString( std::string const& _string, std::size_t indent = 0 ) {
std::size_t i = _string.find( ": " );
if( i != std::string::npos )
i+=2;
else
i = 0;
stream << Text( _string, TextAttributes()
.setIndent( indent+i)
.setInitialIndent( indent ) ) << '\n';
}
struct SummaryColumn {
SummaryColumn( std::string const& _label, Colour::Code _colour )
: label( _label ),
colour( _colour )
{}
SummaryColumn addRow( std::size_t count ) {
std::ostringstream oss;
oss << count;
std::string row = oss.str();
for( std::vector<std::string>::iterator it = rows.begin(); it != rows.end(); ++it ) {
while( it->size() < row.size() )
*it = ' ' + *it;
while( it->size() > row.size() )
row = ' ' + row;
}
rows.push_back( row );
return *this;
}
std::string label;
Colour::Code colour;
std::vector<std::string> rows;
};
void printTotals( Totals const& totals ) {
if( totals.testCases.total() == 0 ) {
stream << Colour( Colour::Warning ) << "No tests ran\n";
}
else if( totals.assertions.total() > 0 && totals.testCases.allPassed() ) {
stream << Colour( Colour::ResultSuccess ) << "All tests passed";
stream << " ("
<< pluralise( totals.assertions.passed, "assertion" ) << " in "
<< pluralise( totals.testCases.passed, "test case" ) << ')'
<< '\n';
}
else {
std::vector<SummaryColumn> columns;
columns.push_back( SummaryColumn( "", Colour::None )
.addRow( totals.testCases.total() )
.addRow( totals.assertions.total() ) );
columns.push_back( SummaryColumn( "passed", Colour::Success )
.addRow( totals.testCases.passed )
.addRow( totals.assertions.passed ) );
columns.push_back( SummaryColumn( "failed", Colour::ResultError )
.addRow( totals.testCases.failed )
.addRow( totals.assertions.failed ) );
columns.push_back( SummaryColumn( "failed as expected", Colour::ResultExpectedFailure )
.addRow( totals.testCases.failedButOk )
.addRow( totals.assertions.failedButOk ) );
printSummaryRow( "test cases", columns, 0 );
printSummaryRow( "assertions", columns, 1 );
}
}
void printSummaryRow( std::string const& label, std::vector<SummaryColumn> const& cols, std::size_t row ) {
for( std::vector<SummaryColumn>::const_iterator it = cols.begin(); it != cols.end(); ++it ) {
std::string value = it->rows[row];
if( it->label.empty() ) {
stream << label << ": ";
if( value != "0" )
stream << value;
else
stream << Colour( Colour::Warning ) << "- none -";
}
else if( value != "0" ) {
stream << Colour( Colour::LightGrey ) << " | ";
stream << Colour( it->colour )
<< value << ' ' << it->label;
}
}
stream << '\n';
}
static std::size_t makeRatio( std::size_t number, std::size_t total ) {
std::size_t ratio = total > 0 ? CATCH_CONFIG_CONSOLE_WIDTH * number/ total : 0;
return ( ratio == 0 && number > 0 ) ? 1 : ratio;
}
static std::size_t& findMax( std::size_t& i, std::size_t& j, std::size_t& k ) {
if( i > j && i > k )
return i;
else if( j > k )
return j;
else
return k;
}
void printTotalsDivider( Totals const& totals ) {
if( totals.testCases.total() > 0 ) {
std::size_t failedRatio = makeRatio( totals.testCases.failed, totals.testCases.total() );
std::size_t failedButOkRatio = makeRatio( totals.testCases.failedButOk, totals.testCases.total() );
std::size_t passedRatio = makeRatio( totals.testCases.passed, totals.testCases.total() );
while( failedRatio + failedButOkRatio + passedRatio < CATCH_CONFIG_CONSOLE_WIDTH-1 )
findMax( failedRatio, failedButOkRatio, passedRatio )++;
while( failedRatio + failedButOkRatio + passedRatio > CATCH_CONFIG_CONSOLE_WIDTH-1 )
findMax( failedRatio, failedButOkRatio, passedRatio )--;
stream << Colour( Colour::Error ) << std::string( failedRatio, '=' );
stream << Colour( Colour::ResultExpectedFailure ) << std::string( failedButOkRatio, '=' );
if( totals.testCases.allPassed() )
stream << Colour( Colour::ResultSuccess ) << std::string( passedRatio, '=' );
else
stream << Colour( Colour::Success ) << std::string( passedRatio, '=' );
}
else {
stream << Colour( Colour::Warning ) << std::string( CATCH_CONFIG_CONSOLE_WIDTH-1, '=' );
}
stream << '\n';
}
void printSummaryDivider() {
stream << getLineOfChars<'-'>() << '\n';
}
private:
bool m_headerPrinted;
};
INTERNAL_CATCH_REGISTER_REPORTER( "console", ConsoleReporter )
} // end namespace Catch
// #included from: ../reporters/catch_reporter_compact.hpp
#define TWOBLUECUBES_CATCH_REPORTER_COMPACT_HPP_INCLUDED
namespace Catch {
struct CompactReporter : StreamingReporterBase {
CompactReporter( ReporterConfig const& _config )
: StreamingReporterBase( _config )
{}
virtual ~CompactReporter();
static std::string getDescription() {
return "Reports test results on a single line, suitable for IDEs";
}
virtual ReporterPreferences getPreferences() const {
ReporterPreferences prefs;
prefs.shouldRedirectStdOut = false;
return prefs;
}
virtual void noMatchingTestCases( std::string const& spec ) {
stream << "No test cases matched '" << spec << '\'' << std::endl;
}
virtual void assertionStarting( AssertionInfo const& ) {
}
virtual bool assertionEnded( AssertionStats const& _assertionStats ) {
AssertionResult const& result = _assertionStats.assertionResult;
bool printInfoMessages = true;
// Drop out if result was successful and we're not printing those
if( !m_config->includeSuccessfulResults() && result.isOk() ) {
if( result.getResultType() != ResultWas::Warning )
return false;
printInfoMessages = false;
}
AssertionPrinter printer( stream, _assertionStats, printInfoMessages );
printer.print();
stream << std::endl;
return true;
}
virtual void testRunEnded( TestRunStats const& _testRunStats ) {
printTotals( _testRunStats.totals );
stream << '\n' << std::endl;
StreamingReporterBase::testRunEnded( _testRunStats );
}
private:
class AssertionPrinter {
void operator= ( AssertionPrinter const& );
public:
AssertionPrinter( std::ostream& _stream, AssertionStats const& _stats, bool _printInfoMessages )
: stream( _stream )
, stats( _stats )<|fim▁hole|> , messages( _stats.infoMessages )
, itMessage( _stats.infoMessages.begin() )
, printInfoMessages( _printInfoMessages )
{}
void print() {
printSourceInfo();
itMessage = messages.begin();
switch( result.getResultType() ) {
case ResultWas::Ok:
printResultType( Colour::ResultSuccess, passedString() );
printOriginalExpression();
printReconstructedExpression();
if ( ! result.hasExpression() )
printRemainingMessages( Colour::None );
else
printRemainingMessages();
break;
case ResultWas::ExpressionFailed:
if( result.isOk() )
printResultType( Colour::ResultSuccess, failedString() + std::string( " - but was ok" ) );
else
printResultType( Colour::Error, failedString() );
printOriginalExpression();
printReconstructedExpression();
printRemainingMessages();
break;
case ResultWas::ThrewException:
printResultType( Colour::Error, failedString() );
printIssue( "unexpected exception with message:" );
printMessage();
printExpressionWas();
printRemainingMessages();
break;
case ResultWas::FatalErrorCondition:
printResultType( Colour::Error, failedString() );
printIssue( "fatal error condition with message:" );
printMessage();
printExpressionWas();
printRemainingMessages();
break;
case ResultWas::DidntThrowException:
printResultType( Colour::Error, failedString() );
printIssue( "expected exception, got none" );
printExpressionWas();
printRemainingMessages();
break;
case ResultWas::Info:
printResultType( Colour::None, "info" );
printMessage();
printRemainingMessages();
break;
case ResultWas::Warning:
printResultType( Colour::None, "warning" );
printMessage();
printRemainingMessages();
break;
case ResultWas::ExplicitFailure:
printResultType( Colour::Error, failedString() );
printIssue( "explicitly" );
printRemainingMessages( Colour::None );
break;
// These cases are here to prevent compiler warnings
case ResultWas::Unknown:
case ResultWas::FailureBit:
case ResultWas::Exception:
printResultType( Colour::Error, "** internal error **" );
break;
}
}
private:
// Colour::LightGrey
static Colour::Code dimColour() { return Colour::FileName; }
#ifdef CATCH_PLATFORM_MAC
static const char* failedString() { return "FAILED"; }
static const char* passedString() { return "PASSED"; }
#else
static const char* failedString() { return "failed"; }
static const char* passedString() { return "passed"; }
#endif
void printSourceInfo() const {
Colour colourGuard( Colour::FileName );
stream << result.getSourceInfo() << ':';
}
void printResultType( Colour::Code colour, std::string passOrFail ) const {
if( !passOrFail.empty() ) {
{
Colour colourGuard( colour );
stream << ' ' << passOrFail;
}
stream << ':';
}
}
void printIssue( std::string issue ) const {
stream << ' ' << issue;
}
void printExpressionWas() {
if( result.hasExpression() ) {
stream << ';';
{
Colour colour( dimColour() );
stream << " expression was:";
}
printOriginalExpression();
}
}
void printOriginalExpression() const {
if( result.hasExpression() ) {
stream << ' ' << result.getExpression();
}
}
void printReconstructedExpression() const {
if( result.hasExpandedExpression() ) {
{
Colour colour( dimColour() );
stream << " for: ";
}
stream << result.getExpandedExpression();
}
}
void printMessage() {
if ( itMessage != messages.end() ) {
stream << " '" << itMessage->message << '\'';
++itMessage;
}
}
void printRemainingMessages( Colour::Code colour = dimColour() ) {
if ( itMessage == messages.end() )
return;
// using messages.end() directly yields compilation error:
std::vector<MessageInfo>::const_iterator itEnd = messages.end();
const std::size_t N = static_cast<std::size_t>( std::distance( itMessage, itEnd ) );
{
Colour colourGuard( colour );
stream << " with " << pluralise( N, "message" ) << ':';
}
for(; itMessage != itEnd; ) {
// If this assertion is a warning ignore any INFO messages
if( printInfoMessages || itMessage->type != ResultWas::Info ) {
stream << " '" << itMessage->message << '\'';
if ( ++itMessage != itEnd ) {
Colour colourGuard( dimColour() );
stream << " and";
}
}
}
}
private:
std::ostream& stream;
AssertionStats const& stats;
AssertionResult const& result;
std::vector<MessageInfo> messages;
std::vector<MessageInfo>::const_iterator itMessage;
bool printInfoMessages;
};
// Colour, message variants:
// - white: No tests ran.
// - red: Failed [both/all] N test cases, failed [both/all] M assertions.
// - white: Passed [both/all] N test cases (no assertions).
// - red: Failed N tests cases, failed M assertions.
// - green: Passed [both/all] N tests cases with M assertions.
std::string bothOrAll( std::size_t count ) const {
return count == 1 ? std::string() : count == 2 ? "both " : "all " ;
}
void printTotals( const Totals& totals ) const {
if( totals.testCases.total() == 0 ) {
stream << "No tests ran.";
}
else if( totals.testCases.failed == totals.testCases.total() ) {
Colour colour( Colour::ResultError );
const std::string qualify_assertions_failed =
totals.assertions.failed == totals.assertions.total() ?
bothOrAll( totals.assertions.failed ) : std::string();
stream <<
"Failed " << bothOrAll( totals.testCases.failed )
<< pluralise( totals.testCases.failed, "test case" ) << ", "
"failed " << qualify_assertions_failed <<
pluralise( totals.assertions.failed, "assertion" ) << '.';
}
else if( totals.assertions.total() == 0 ) {
stream <<
"Passed " << bothOrAll( totals.testCases.total() )
<< pluralise( totals.testCases.total(), "test case" )
<< " (no assertions).";
}
else if( totals.assertions.failed ) {
Colour colour( Colour::ResultError );
stream <<
"Failed " << pluralise( totals.testCases.failed, "test case" ) << ", "
"failed " << pluralise( totals.assertions.failed, "assertion" ) << '.';
}
else {
Colour colour( Colour::ResultSuccess );
stream <<
"Passed " << bothOrAll( totals.testCases.passed )
<< pluralise( totals.testCases.passed, "test case" ) <<
" with " << pluralise( totals.assertions.passed, "assertion" ) << '.';
}
}
};
INTERNAL_CATCH_REGISTER_REPORTER( "compact", CompactReporter )
} // end namespace Catch
namespace Catch {
// These are all here to avoid warnings about not having any out of line
// virtual methods
NonCopyable::~NonCopyable() {}
IShared::~IShared() {}
IStream::~IStream() CATCH_NOEXCEPT {}
FileStream::~FileStream() CATCH_NOEXCEPT {}
CoutStream::~CoutStream() CATCH_NOEXCEPT {}
DebugOutStream::~DebugOutStream() CATCH_NOEXCEPT {}
StreamBufBase::~StreamBufBase() CATCH_NOEXCEPT {}
IContext::~IContext() {}
IResultCapture::~IResultCapture() {}
ITestCase::~ITestCase() {}
ITestCaseRegistry::~ITestCaseRegistry() {}
IRegistryHub::~IRegistryHub() {}
IMutableRegistryHub::~IMutableRegistryHub() {}
IExceptionTranslator::~IExceptionTranslator() {}
IExceptionTranslatorRegistry::~IExceptionTranslatorRegistry() {}
IReporter::~IReporter() {}
IReporterFactory::~IReporterFactory() {}
IReporterRegistry::~IReporterRegistry() {}
IStreamingReporter::~IStreamingReporter() {}
AssertionStats::~AssertionStats() {}
SectionStats::~SectionStats() {}
TestCaseStats::~TestCaseStats() {}
TestGroupStats::~TestGroupStats() {}
TestRunStats::~TestRunStats() {}
CumulativeReporterBase::SectionNode::~SectionNode() {}
CumulativeReporterBase::~CumulativeReporterBase() {}
StreamingReporterBase::~StreamingReporterBase() {}
ConsoleReporter::~ConsoleReporter() {}
CompactReporter::~CompactReporter() {}
IRunner::~IRunner() {}
IMutableContext::~IMutableContext() {}
IConfig::~IConfig() {}
XmlReporter::~XmlReporter() {}
JunitReporter::~JunitReporter() {}
TestRegistry::~TestRegistry() {}
FreeFunctionTestCase::~FreeFunctionTestCase() {}
IGeneratorInfo::~IGeneratorInfo() {}
IGeneratorsForTest::~IGeneratorsForTest() {}
WildcardPattern::~WildcardPattern() {}
TestSpec::Pattern::~Pattern() {}
TestSpec::NamePattern::~NamePattern() {}
TestSpec::TagPattern::~TagPattern() {}
TestSpec::ExcludedPattern::~ExcludedPattern() {}
Matchers::Impl::StdString::Equals::~Equals() {}
Matchers::Impl::StdString::Contains::~Contains() {}
Matchers::Impl::StdString::StartsWith::~StartsWith() {}
Matchers::Impl::StdString::EndsWith::~EndsWith() {}
void Config::dummy() {}
namespace TestCaseTracking {
ITracker::~ITracker() {}
TrackerBase::~TrackerBase() {}
SectionTracker::~SectionTracker() {}
IndexTracker::~IndexTracker() {}
}
}
#ifdef __clang__
#pragma clang diagnostic pop
#endif
#endif
#ifdef CATCH_CONFIG_MAIN
// #included from: internal/catch_default_main.hpp
#define TWOBLUECUBES_CATCH_DEFAULT_MAIN_HPP_INCLUDED
#ifndef __OBJC__
// Standard C/C++ main entry point
int main (int argc, char * argv[]) {
int result = Catch::Session().run( argc, argv );
return ( result < 0xff ? result : 0xff );
}
#else // __OBJC__
// Objective-C entry point
int main (int argc, char * const argv[]) {
#if !CATCH_ARC_ENABLED
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
#endif
Catch::registerTestMethods();
int result = Catch::Session().run( argc, (char* const*)argv );
#if !CATCH_ARC_ENABLED
[pool drain];
#endif
return ( result < 0xff ? result : 0xff );
}
#endif // __OBJC__
#endif
#ifdef CLARA_CONFIG_MAIN_NOT_DEFINED
# undef CLARA_CONFIG_MAIN
#endif
//////
// If this config identifier is defined then all CATCH macros are prefixed with CATCH_
#ifdef CATCH_CONFIG_PREFIX_ALL
#define CATCH_REQUIRE( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::Normal, "CATCH_REQUIRE" )
#define CATCH_REQUIRE_FALSE( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::Normal | Catch::ResultDisposition::FalseTest, "CATCH_REQUIRE_FALSE" )
#define CATCH_REQUIRE_THROWS( expr ) INTERNAL_CATCH_THROWS( expr, Catch::ResultDisposition::Normal, "", "CATCH_REQUIRE_THROWS" )
#define CATCH_REQUIRE_THROWS_AS( expr, exceptionType ) INTERNAL_CATCH_THROWS_AS( expr, exceptionType, Catch::ResultDisposition::Normal, "CATCH_REQUIRE_THROWS_AS" )
#define CATCH_REQUIRE_THROWS_WITH( expr, matcher ) INTERNAL_CATCH_THROWS( expr, Catch::ResultDisposition::Normal, matcher, "CATCH_REQUIRE_THROWS_WITH" )
#define CATCH_REQUIRE_NOTHROW( expr ) INTERNAL_CATCH_NO_THROW( expr, Catch::ResultDisposition::Normal, "CATCH_REQUIRE_NOTHROW" )
#define CATCH_CHECK( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::ContinueOnFailure, "CATCH_CHECK" )
#define CATCH_CHECK_FALSE( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::ContinueOnFailure | Catch::ResultDisposition::FalseTest, "CATCH_CHECK_FALSE" )
#define CATCH_CHECKED_IF( expr ) INTERNAL_CATCH_IF( expr, Catch::ResultDisposition::ContinueOnFailure, "CATCH_CHECKED_IF" )
#define CATCH_CHECKED_ELSE( expr ) INTERNAL_CATCH_ELSE( expr, Catch::ResultDisposition::ContinueOnFailure, "CATCH_CHECKED_ELSE" )
#define CATCH_CHECK_NOFAIL( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::ContinueOnFailure | Catch::ResultDisposition::SuppressFail, "CATCH_CHECK_NOFAIL" )
#define CATCH_CHECK_THROWS( expr ) INTERNAL_CATCH_THROWS( expr, Catch::ResultDisposition::ContinueOnFailure, "", "CATCH_CHECK_THROWS" )
#define CATCH_CHECK_THROWS_AS( expr, exceptionType ) INTERNAL_CATCH_THROWS_AS( expr, exceptionType, Catch::ResultDisposition::ContinueOnFailure, "CATCH_CHECK_THROWS_AS" )
#define CATCH_CHECK_THROWS_WITH( expr, matcher ) INTERNAL_CATCH_THROWS( expr, Catch::ResultDisposition::ContinueOnFailure, matcher, "CATCH_CHECK_THROWS_WITH" )
#define CATCH_CHECK_NOTHROW( expr ) INTERNAL_CATCH_NO_THROW( expr, Catch::ResultDisposition::ContinueOnFailure, "CATCH_CHECK_NOTHROW" )
#define CATCH_CHECK_THAT( arg, matcher ) INTERNAL_CHECK_THAT( arg, matcher, Catch::ResultDisposition::ContinueOnFailure, "CATCH_CHECK_THAT" )
#define CATCH_REQUIRE_THAT( arg, matcher ) INTERNAL_CHECK_THAT( arg, matcher, Catch::ResultDisposition::Normal, "CATCH_REQUIRE_THAT" )
#define CATCH_INFO( msg ) INTERNAL_CATCH_INFO( msg, "CATCH_INFO" )
#define CATCH_WARN( msg ) INTERNAL_CATCH_MSG( Catch::ResultWas::Warning, Catch::ResultDisposition::ContinueOnFailure, "CATCH_WARN", msg )
#define CATCH_SCOPED_INFO( msg ) INTERNAL_CATCH_INFO( msg, "CATCH_INFO" )
#define CATCH_CAPTURE( msg ) INTERNAL_CATCH_INFO( #msg " := " << msg, "CATCH_CAPTURE" )
#define CATCH_SCOPED_CAPTURE( msg ) INTERNAL_CATCH_INFO( #msg " := " << msg, "CATCH_CAPTURE" )
#ifdef CATCH_CONFIG_VARIADIC_MACROS
#define CATCH_TEST_CASE( ... ) INTERNAL_CATCH_TESTCASE( __VA_ARGS__ )
#define CATCH_TEST_CASE_METHOD( className, ... ) INTERNAL_CATCH_TEST_CASE_METHOD( className, __VA_ARGS__ )
#define CATCH_METHOD_AS_TEST_CASE( method, ... ) INTERNAL_CATCH_METHOD_AS_TEST_CASE( method, __VA_ARGS__ )
#define CATCH_REGISTER_TEST_CASE( Function, ... ) INTERNAL_CATCH_REGISTER_TESTCASE( Function, __VA_ARGS__ )
#define CATCH_SECTION( ... ) INTERNAL_CATCH_SECTION( __VA_ARGS__ )
#define CATCH_FAIL( ... ) INTERNAL_CATCH_MSG( Catch::ResultWas::ExplicitFailure, Catch::ResultDisposition::Normal, "CATCH_FAIL", __VA_ARGS__ )
#define CATCH_SUCCEED( ... ) INTERNAL_CATCH_MSG( Catch::ResultWas::Ok, Catch::ResultDisposition::ContinueOnFailure, "CATCH_SUCCEED", __VA_ARGS__ )
#else
#define CATCH_TEST_CASE( name, description ) INTERNAL_CATCH_TESTCASE( name, description )
#define CATCH_TEST_CASE_METHOD( className, name, description ) INTERNAL_CATCH_TEST_CASE_METHOD( className, name, description )
#define CATCH_METHOD_AS_TEST_CASE( method, name, description ) INTERNAL_CATCH_METHOD_AS_TEST_CASE( method, name, description )
#define CATCH_REGISTER_TEST_CASE( function, name, description ) INTERNAL_CATCH_REGISTER_TESTCASE( function, name, description )
#define CATCH_SECTION( name, description ) INTERNAL_CATCH_SECTION( name, description )
#define CATCH_FAIL( msg ) INTERNAL_CATCH_MSG( Catch::ResultWas::ExplicitFailure, Catch::ResultDisposition::Normal, "CATCH_FAIL", msg )
#define CATCH_SUCCEED( msg ) INTERNAL_CATCH_MSG( Catch::ResultWas::Ok, Catch::ResultDisposition::ContinueOnFailure, "CATCH_SUCCEED", msg )
#endif
#define CATCH_ANON_TEST_CASE() INTERNAL_CATCH_TESTCASE( "", "" )
#define CATCH_REGISTER_REPORTER( name, reporterType ) INTERNAL_CATCH_REGISTER_REPORTER( name, reporterType )
#define CATCH_REGISTER_LEGACY_REPORTER( name, reporterType ) INTERNAL_CATCH_REGISTER_LEGACY_REPORTER( name, reporterType )
#define CATCH_GENERATE( expr) INTERNAL_CATCH_GENERATE( expr )
// "BDD-style" convenience wrappers
#ifdef CATCH_CONFIG_VARIADIC_MACROS
#define CATCH_SCENARIO( ... ) CATCH_TEST_CASE( "Scenario: " __VA_ARGS__ )
#define CATCH_SCENARIO_METHOD( className, ... ) INTERNAL_CATCH_TEST_CASE_METHOD( className, "Scenario: " __VA_ARGS__ )
#else
#define CATCH_SCENARIO( name, tags ) CATCH_TEST_CASE( "Scenario: " name, tags )
#define CATCH_SCENARIO_METHOD( className, name, tags ) INTERNAL_CATCH_TEST_CASE_METHOD( className, "Scenario: " name, tags )
#endif
#define CATCH_GIVEN( desc ) CATCH_SECTION( std::string( "Given: ") + desc, "" )
#define CATCH_WHEN( desc ) CATCH_SECTION( std::string( " When: ") + desc, "" )
#define CATCH_AND_WHEN( desc ) CATCH_SECTION( std::string( " And: ") + desc, "" )
#define CATCH_THEN( desc ) CATCH_SECTION( std::string( " Then: ") + desc, "" )
#define CATCH_AND_THEN( desc ) CATCH_SECTION( std::string( " And: ") + desc, "" )
// If CATCH_CONFIG_PREFIX_ALL is not defined then the CATCH_ prefix is not required
#else
#define REQUIRE( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::Normal, "REQUIRE" )
#define REQUIRE_FALSE( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::Normal | Catch::ResultDisposition::FalseTest, "REQUIRE_FALSE" )
#define REQUIRE_THROWS( expr ) INTERNAL_CATCH_THROWS( expr, Catch::ResultDisposition::Normal, "", "REQUIRE_THROWS" )
#define REQUIRE_THROWS_AS( expr, exceptionType ) INTERNAL_CATCH_THROWS_AS( expr, exceptionType, Catch::ResultDisposition::Normal, "REQUIRE_THROWS_AS" )
#define REQUIRE_THROWS_WITH( expr, matcher ) INTERNAL_CATCH_THROWS( expr, Catch::ResultDisposition::Normal, matcher, "REQUIRE_THROWS_WITH" )
#define REQUIRE_NOTHROW( expr ) INTERNAL_CATCH_NO_THROW( expr, Catch::ResultDisposition::Normal, "REQUIRE_NOTHROW" )
#define CHECK( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::ContinueOnFailure, "CHECK" )
#define CHECK_FALSE( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::ContinueOnFailure | Catch::ResultDisposition::FalseTest, "CHECK_FALSE" )
#define CHECKED_IF( expr ) INTERNAL_CATCH_IF( expr, Catch::ResultDisposition::ContinueOnFailure, "CHECKED_IF" )
#define CHECKED_ELSE( expr ) INTERNAL_CATCH_ELSE( expr, Catch::ResultDisposition::ContinueOnFailure, "CHECKED_ELSE" )
#define CHECK_NOFAIL( expr ) INTERNAL_CATCH_TEST( expr, Catch::ResultDisposition::ContinueOnFailure | Catch::ResultDisposition::SuppressFail, "CHECK_NOFAIL" )
#define CHECK_THROWS( expr ) INTERNAL_CATCH_THROWS( expr, Catch::ResultDisposition::ContinueOnFailure, "", "CHECK_THROWS" )
#define CHECK_THROWS_AS( expr, exceptionType ) INTERNAL_CATCH_THROWS_AS( expr, exceptionType, Catch::ResultDisposition::ContinueOnFailure, "CHECK_THROWS_AS" )
#define CHECK_THROWS_WITH( expr, matcher ) INTERNAL_CATCH_THROWS( expr, Catch::ResultDisposition::ContinueOnFailure, matcher, "CHECK_THROWS_WITH" )
#define CHECK_NOTHROW( expr ) INTERNAL_CATCH_NO_THROW( expr, Catch::ResultDisposition::ContinueOnFailure, "CHECK_NOTHROW" )
#define CHECK_THAT( arg, matcher ) INTERNAL_CHECK_THAT( arg, matcher, Catch::ResultDisposition::ContinueOnFailure, "CHECK_THAT" )
#define REQUIRE_THAT( arg, matcher ) INTERNAL_CHECK_THAT( arg, matcher, Catch::ResultDisposition::Normal, "REQUIRE_THAT" )
#define INFO( msg ) INTERNAL_CATCH_INFO( msg, "INFO" )
#define WARN( msg ) INTERNAL_CATCH_MSG( Catch::ResultWas::Warning, Catch::ResultDisposition::ContinueOnFailure, "WARN", msg )
#define SCOPED_INFO( msg ) INTERNAL_CATCH_INFO( msg, "INFO" )
#define CAPTURE( msg ) INTERNAL_CATCH_INFO( #msg " := " << msg, "CAPTURE" )
#define SCOPED_CAPTURE( msg ) INTERNAL_CATCH_INFO( #msg " := " << msg, "CAPTURE" )
#ifdef CATCH_CONFIG_VARIADIC_MACROS
#define TEST_CASE( ... ) INTERNAL_CATCH_TESTCASE( __VA_ARGS__ )
#define TEST_CASE_METHOD( className, ... ) INTERNAL_CATCH_TEST_CASE_METHOD( className, __VA_ARGS__ )
#define METHOD_AS_TEST_CASE( method, ... ) INTERNAL_CATCH_METHOD_AS_TEST_CASE( method, __VA_ARGS__ )
#define REGISTER_TEST_CASE( Function, ... ) INTERNAL_CATCH_REGISTER_TESTCASE( Function, __VA_ARGS__ )
#define SECTION( ... ) INTERNAL_CATCH_SECTION( __VA_ARGS__ )
#define FAIL( ... ) INTERNAL_CATCH_MSG( Catch::ResultWas::ExplicitFailure, Catch::ResultDisposition::Normal, "FAIL", __VA_ARGS__ )
#define SUCCEED( ... ) INTERNAL_CATCH_MSG( Catch::ResultWas::Ok, Catch::ResultDisposition::ContinueOnFailure, "SUCCEED", __VA_ARGS__ )
#else
#define TEST_CASE( name, description ) INTERNAL_CATCH_TESTCASE( name, description )
#define TEST_CASE_METHOD( className, name, description ) INTERNAL_CATCH_TEST_CASE_METHOD( className, name, description )
#define METHOD_AS_TEST_CASE( method, name, description ) INTERNAL_CATCH_METHOD_AS_TEST_CASE( method, name, description )
#define REGISTER_TEST_CASE( method, name, description ) INTERNAL_CATCH_REGISTER_TESTCASE( method, name, description )
#define SECTION( name, description ) INTERNAL_CATCH_SECTION( name, description )
#define FAIL( msg ) INTERNAL_CATCH_MSG( Catch::ResultWas::ExplicitFailure, Catch::ResultDisposition::Normal, "FAIL", msg )
#define SUCCEED( msg ) INTERNAL_CATCH_MSG( Catch::ResultWas::Ok, Catch::ResultDisposition::ContinueOnFailure, "SUCCEED", msg )
#endif
#define ANON_TEST_CASE() INTERNAL_CATCH_TESTCASE( "", "" )
#define REGISTER_REPORTER( name, reporterType ) INTERNAL_CATCH_REGISTER_REPORTER( name, reporterType )
#define REGISTER_LEGACY_REPORTER( name, reporterType ) INTERNAL_CATCH_REGISTER_LEGACY_REPORTER( name, reporterType )
#define GENERATE( expr) INTERNAL_CATCH_GENERATE( expr )
#endif
#define CATCH_TRANSLATE_EXCEPTION( signature ) INTERNAL_CATCH_TRANSLATE_EXCEPTION( signature )
// "BDD-style" convenience wrappers
#ifdef CATCH_CONFIG_VARIADIC_MACROS
#define SCENARIO( ... ) TEST_CASE( "Scenario: " __VA_ARGS__ )
#define SCENARIO_METHOD( className, ... ) INTERNAL_CATCH_TEST_CASE_METHOD( className, "Scenario: " __VA_ARGS__ )
#else
#define SCENARIO( name, tags ) TEST_CASE( "Scenario: " name, tags )
#define SCENARIO_METHOD( className, name, tags ) INTERNAL_CATCH_TEST_CASE_METHOD( className, "Scenario: " name, tags )
#endif
#define GIVEN( desc ) SECTION( std::string(" Given: ") + desc, "" )
#define WHEN( desc ) SECTION( std::string(" When: ") + desc, "" )
#define AND_WHEN( desc ) SECTION( std::string("And when: ") + desc, "" )
#define THEN( desc ) SECTION( std::string(" Then: ") + desc, "" )
#define AND_THEN( desc ) SECTION( std::string(" And: ") + desc, "" )
using Catch::Detail::Approx;
#endif // TWOBLUECUBES_SINGLE_INCLUDE_CATCH_HPP_INCLUDED<|fim▁end|> | , result( _stats.assertionResult ) |
<|file_name|>regions-static-closure.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
#![feature(unboxed_closures)]
struct closure_box<'a> {
cl: Box<FnMut() + 'a>,
}
fn box_it<'a>(x: Box<FnMut() + 'a>) -> closure_box<'a> {
closure_box {cl: x}
}
fn call_static_closure(mut cl: closure_box<'static>) {
cl.cl.call_mut(())
}
pub fn main() {<|fim▁hole|>}<|fim▁end|> | let cl_box = box_it(box || println!("Hello, world!"));
call_static_closure(cl_box); |
<|file_name|>check_const.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies that the types and values of const and static items
// are safe. The rules enforced by this module are:
//
// - For each *mutable* static item, it checks that its **type**:
// - doesn't have a destructor
// - doesn't own a box
//
// - For each *immutable* static item, it checks that its **value**:
// - doesn't own a box
// - doesn't contain a struct literal or a call to an enum variant / struct constructor where
// - the type of the struct/enum has a dtor
//
// Rules Enforced Elsewhere:
// - It's not possible to take the address of a static item with unsafe interior. This is enforced
// by borrowck::gather_loans
use middle::cast::{CastKind};
use middle::const_eval;
use middle::def;
use middle::expr_use_visitor as euv;
use middle::infer;
use middle::mem_categorization as mc;
use middle::traits;
use middle::ty::{self, Ty};
use util::nodemap::NodeMap;
use syntax::ast;
use syntax::codemap::Span;
use syntax::visit::{self, Visitor};
use std::collections::hash_map::Entry;
// Const qualification, from partial to completely promotable.
bitflags! {
#[derive(RustcEncodable, RustcDecodable)]
flags ConstQualif: u8 {
// Inner mutability (can not be placed behind a reference) or behind
// &mut in a non-global expression. Can be copied from static memory.
const MUTABLE_MEM = 1 << 0,
// Constant value with a type that implements Drop. Can be copied
// from static memory, similar to MUTABLE_MEM.
const NEEDS_DROP = 1 << 1,
// Even if the value can be placed in static memory, copying it from
// there is more expensive than in-place instantiation, and/or it may
// be too large. This applies to [T; N] and everything containing it.
// N.B.: references need to clear this flag to not end up on the stack.
const PREFER_IN_PLACE = 1 << 2,
// May use more than 0 bytes of memory, doesn't impact the constness
// directly, but is not allowed to be borrowed mutably in a constant.
const NON_ZERO_SIZED = 1 << 3,
// Actually borrowed, has to always be in static memory. Does not
// propagate, and requires the expression to behave like a 'static
// lvalue. The set of expressions with this flag is the minimum
// that have to be promoted.
const HAS_STATIC_BORROWS = 1 << 4,
// Invalid const for miscellaneous reasons (e.g. not implemented).
const NOT_CONST = 1 << 5,
// Borrowing the expression won't produce &'static T if any of these
// bits are set, though the value could be copied from static memory
// if `NOT_CONST` isn't set.
const NON_STATIC_BORROWS = ConstQualif::MUTABLE_MEM.bits |
ConstQualif::NEEDS_DROP.bits |
ConstQualif::NOT_CONST.bits
}
}
#[derive(Copy, Clone, Eq, PartialEq)]
enum Mode {
Const,
ConstFn,
Static,
StaticMut,
// An expression that occurs outside of any constant context
// (i.e. `const`, `static`, array lengths, etc.). The value
// can be variable at runtime, but will be promotable to
// static memory if we can prove it is actually constant.
Var,
}
struct CheckCrateVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
mode: Mode,
qualif: ConstQualif,
rvalue_borrows: NodeMap<ast::Mutability>
}
impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
fn with_mode<F, R>(&mut self, mode: Mode, f: F) -> R where
F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>) -> R,
{
let (old_mode, old_qualif) = (self.mode, self.qualif);
self.mode = mode;
self.qualif = ConstQualif::empty();
let r = f(self);
self.mode = old_mode;
self.qualif = old_qualif;
r
}
fn with_euv<'b, F, R>(&'b mut self, item_id: Option<ast::NodeId>, f: F) -> R where
F: for<'t> FnOnce(&mut euv::ExprUseVisitor<'b, 't, 'b, 'tcx>) -> R,
{
let param_env = match item_id {
Some(item_id) => ty::ParameterEnvironment::for_item(self.tcx, item_id),
None => self.tcx.empty_parameter_environment()
};
let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, Some(param_env), false);
f(&mut euv::ExprUseVisitor::new(self, &infcx))
}
fn global_expr(&mut self, mode: Mode, expr: &ast::Expr) -> ConstQualif {
assert!(mode != Mode::Var);
match self.tcx.const_qualif_map.borrow_mut().entry(expr.id) {
Entry::Occupied(entry) => return *entry.get(),
Entry::Vacant(entry) => {
// Prevent infinite recursion on re-entry.
entry.insert(ConstQualif::empty());
}
}
self.with_mode(mode, |this| {
this.with_euv(None, |euv| euv.consume_expr(expr));
this.visit_expr(expr);
this.qualif
})
}
fn fn_like(&mut self,
fk: visit::FnKind,
fd: &ast::FnDecl,
b: &ast::Block,
s: Span,
fn_id: ast::NodeId)
-> ConstQualif {
match self.tcx.const_qualif_map.borrow_mut().entry(fn_id) {
Entry::Occupied(entry) => return *entry.get(),
Entry::Vacant(entry) => {
// Prevent infinite recursion on re-entry.
entry.insert(ConstQualif::empty());
}
}
let mode = match fk {
visit::FkItemFn(_, _, _, ast::Constness::Const, _, _) => {
Mode::ConstFn
}
visit::FkMethod(_, m, _) => {
if m.constness == ast::Constness::Const {
Mode::ConstFn
} else {
Mode::Var
}
}
_ => Mode::Var
};
// Ensure the arguments are simple, not mutable/by-ref or patterns.
if mode == Mode::ConstFn {
for arg in &fd.inputs {
match arg.pat.node {
ast::PatIdent(ast::BindByValue(ast::MutImmutable), _, None) => {}
_ => {
span_err!(self.tcx.sess, arg.pat.span, E0022,
"arguments of constant functions can only \
be immutable by-value bindings");
}
}
}
}
let qualif = self.with_mode(mode, |this| {
this.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b));
visit::walk_fn(this, fk, fd, b, s);
this.qualif
});
// Keep only bits that aren't affected by function body (NON_ZERO_SIZED),
// and bits that don't change semantics, just optimizations (PREFER_IN_PLACE).
let qualif = qualif & (ConstQualif::NON_ZERO_SIZED | ConstQualif::PREFER_IN_PLACE);
self.tcx.const_qualif_map.borrow_mut().insert(fn_id, qualif);
qualif
}
fn add_qualif(&mut self, qualif: ConstQualif) {
self.qualif = self.qualif | qualif;
}<|fim▁hole|> fn handle_const_fn_call(&mut self,
expr: &ast::Expr,
def_id: ast::DefId,
ret_ty: Ty<'tcx>)
-> bool {
if let Some(fn_like) = const_eval::lookup_const_fn_by_id(self.tcx, def_id) {
if
// we are in a static/const initializer
self.mode != Mode::Var &&
// feature-gate is not enabled
!self.tcx.sess.features.borrow().const_fn &&
// this doesn't come from a macro that has #[allow_internal_unstable]
!self.tcx.sess.codemap().span_allows_unstable(expr.span)
{
self.tcx.sess.span_err(
expr.span,
&format!("const fns are an unstable feature"));
fileline_help!(
self.tcx.sess,
expr.span,
"in Nightly builds, add `#![feature(const_fn)]` to the crate \
attributes to enable");
}
let qualif = self.fn_like(fn_like.kind(),
fn_like.decl(),
fn_like.body(),
fn_like.span(),
fn_like.id());
self.add_qualif(qualif);
if ret_ty.type_contents(self.tcx).interior_unsafe() {
self.add_qualif(ConstQualif::MUTABLE_MEM);
}
true
} else {
false
}
}
fn record_borrow(&mut self, id: ast::NodeId, mutbl: ast::Mutability) {
match self.rvalue_borrows.entry(id) {
Entry::Occupied(mut entry) => {
// Merge the two borrows, taking the most demanding
// one, mutability-wise.
if mutbl == ast::MutMutable {
entry.insert(mutbl);
}
}
Entry::Vacant(entry) => {
entry.insert(mutbl);
}
}
}
fn msg(&self) -> &'static str {
match self.mode {
Mode::Const => "constant",
Mode::ConstFn => "constant function",
Mode::StaticMut | Mode::Static => "static",
Mode::Var => unreachable!(),
}
}
fn check_static_mut_type(&self, e: &ast::Expr) {
let node_ty = self.tcx.node_id_to_type(e.id);
let tcontents = node_ty.type_contents(self.tcx);
let suffix = if tcontents.has_dtor() {
"destructors"
} else if tcontents.owns_owned() {
"boxes"
} else {
return
};
span_err!(self.tcx.sess, e.span, E0397,
"mutable statics are not allowed to have {}", suffix);
}
fn check_static_type(&self, e: &ast::Expr) {
let ty = self.tcx.node_id_to_type(e.id);
let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, None, false);
let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic);
let mut fulfill_cx = infcx.fulfillment_cx.borrow_mut();
fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause);
match fulfill_cx.select_all_or_error(&infcx) {
Ok(()) => { },
Err(ref errors) => {
traits::report_fulfillment_errors(&infcx, errors);
}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &ast::Item) {
debug!("visit_item(item={})", self.tcx.map.node_to_string(i.id));
match i.node {
ast::ItemStatic(_, ast::MutImmutable, ref expr) => {
self.check_static_type(&**expr);
self.global_expr(Mode::Static, &**expr);
}
ast::ItemStatic(_, ast::MutMutable, ref expr) => {
self.check_static_mut_type(&**expr);
self.global_expr(Mode::StaticMut, &**expr);
}
ast::ItemConst(_, ref expr) => {
self.global_expr(Mode::Const, &**expr);
}
ast::ItemEnum(ref enum_definition, _) => {
for var in &enum_definition.variants {
if let Some(ref ex) = var.node.disr_expr {
self.global_expr(Mode::Const, &**ex);
}
}
}
_ => {
self.with_mode(Mode::Var, |v| visit::walk_item(v, i));
}
}
}
fn visit_trait_item(&mut self, t: &'v ast::TraitItem) {
match t.node {
ast::ConstTraitItem(_, ref default) => {
if let Some(ref expr) = *default {
self.global_expr(Mode::Const, &*expr);
} else {
visit::walk_trait_item(self, t);
}
}
_ => self.with_mode(Mode::Var, |v| visit::walk_trait_item(v, t)),
}
}
fn visit_impl_item(&mut self, i: &'v ast::ImplItem) {
match i.node {
ast::ConstImplItem(_, ref expr) => {
self.global_expr(Mode::Const, &*expr);
}
_ => self.with_mode(Mode::Var, |v| visit::walk_impl_item(v, i)),
}
}
fn visit_fn(&mut self,
fk: visit::FnKind<'v>,
fd: &'v ast::FnDecl,
b: &'v ast::Block,
s: Span,
fn_id: ast::NodeId) {
self.fn_like(fk, fd, b, s, fn_id);
}
fn visit_pat(&mut self, p: &ast::Pat) {
match p.node {
ast::PatLit(ref lit) => {
self.global_expr(Mode::Const, &**lit);
}
ast::PatRange(ref start, ref end) => {
self.global_expr(Mode::Const, &**start);
self.global_expr(Mode::Const, &**end);
}
_ => visit::walk_pat(self, p)
}
}
fn visit_block(&mut self, block: &ast::Block) {
// Check all statements in the block
for stmt in &block.stmts {
let span = match stmt.node {
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(_) => decl.span,
// Item statements are allowed
ast::DeclItem(_) => continue
}
}
ast::StmtExpr(ref expr, _) => expr.span,
ast::StmtSemi(ref semi, _) => semi.span,
ast::StmtMac(..) => {
self.tcx.sess.span_bug(stmt.span, "unexpanded statement \
macro in const?!")
}
};
self.add_qualif(ConstQualif::NOT_CONST);
if self.mode != Mode::Var {
span_err!(self.tcx.sess, span, E0016,
"blocks in {}s are limited to items and \
tail expressions", self.msg());
}
}
visit::walk_block(self, block);
}
fn visit_expr(&mut self, ex: &ast::Expr) {
let mut outer = self.qualif;
self.qualif = ConstQualif::empty();
let node_ty = self.tcx.node_id_to_type(ex.id);
check_expr(self, ex, node_ty);
check_adjustments(self, ex);
// Special-case some expressions to avoid certain flags bubbling up.
match ex.node {
ast::ExprCall(ref callee, ref args) => {
for arg in args {
self.visit_expr(&**arg)
}
let inner = self.qualif;
self.visit_expr(&**callee);
// The callee's size doesn't count in the call.
let added = self.qualif - inner;
self.qualif = inner | (added - ConstQualif::NON_ZERO_SIZED);
}
ast::ExprRepeat(ref element, _) => {
self.visit_expr(&**element);
// The count is checked elsewhere (typeck).
let count = match node_ty.sty {
ty::TyArray(_, n) => n,
_ => unreachable!()
};
// [element; 0] is always zero-sized.
if count == 0 {
self.qualif.remove(ConstQualif::NON_ZERO_SIZED | ConstQualif::PREFER_IN_PLACE);
}
}
ast::ExprMatch(ref discr, ref arms, _) => {
// Compute the most demanding borrow from all the arms'
// patterns and set that on the discriminator.
let mut borrow = None;
for pat in arms.iter().flat_map(|arm| &arm.pats) {
let pat_borrow = self.rvalue_borrows.remove(&pat.id);
match (borrow, pat_borrow) {
(None, _) | (_, Some(ast::MutMutable)) => {
borrow = pat_borrow;
}
_ => {}
}
}
if let Some(mutbl) = borrow {
self.record_borrow(discr.id, mutbl);
}
visit::walk_expr(self, ex);
}
// Division by zero and overflow checking.
ast::ExprBinary(op, _, _) => {
visit::walk_expr(self, ex);
let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem;
match node_ty.sty {
ty::TyUint(_) | ty::TyInt(_) if div_or_rem => {
if !self.qualif.intersects(ConstQualif::NOT_CONST) {
match const_eval::eval_const_expr_partial(self.tcx, ex, None) {
Ok(_) => {}
Err(msg) => {
span_err!(self.tcx.sess, msg.span, E0020,
"{} in a constant expression",
msg.description())
}
}
}
}
_ => {}
}
}
_ => visit::walk_expr(self, ex)
}
// Handle borrows on (or inside the autorefs of) this expression.
match self.rvalue_borrows.remove(&ex.id) {
Some(ast::MutImmutable) => {
// Constants cannot be borrowed if they contain interior mutability as
// it means that our "silent insertion of statics" could change
// initializer values (very bad).
// If the type doesn't have interior mutability, then `ConstQualif::MUTABLE_MEM` has
// propagated from another error, so erroring again would be just noise.
let tc = node_ty.type_contents(self.tcx);
if self.qualif.intersects(ConstQualif::MUTABLE_MEM) && tc.interior_unsafe() {
outer = outer | ConstQualif::NOT_CONST;
if self.mode != Mode::Var {
self.tcx.sess.span_err(ex.span,
"cannot borrow a constant which contains \
interior mutability, create a static instead");
}
}
// If the reference has to be 'static, avoid in-place initialization
// as that will end up pointing to the stack instead.
if !self.qualif.intersects(ConstQualif::NON_STATIC_BORROWS) {
self.qualif = self.qualif - ConstQualif::PREFER_IN_PLACE;
self.add_qualif(ConstQualif::HAS_STATIC_BORROWS);
}
}
Some(ast::MutMutable) => {
// `&mut expr` means expr could be mutated, unless it's zero-sized.
if self.qualif.intersects(ConstQualif::NON_ZERO_SIZED) {
if self.mode == Mode::Var {
outer = outer | ConstQualif::NOT_CONST;
self.add_qualif(ConstQualif::MUTABLE_MEM);
} else {
span_err!(self.tcx.sess, ex.span, E0017,
"references in {}s may only refer \
to immutable values", self.msg())
}
}
if !self.qualif.intersects(ConstQualif::NON_STATIC_BORROWS) {
self.add_qualif(ConstQualif::HAS_STATIC_BORROWS);
}
}
None => {}
}
self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif);
// Don't propagate certain flags.
self.qualif = outer | (self.qualif - ConstQualif::HAS_STATIC_BORROWS);
}
}
/// This function is used to enforce the constraints on
/// const/static items. It walks through the *value*
/// of the item walking down the expression and evaluating
/// every nested expression. If the expression is not part
/// of a const/static item, it is qualified for promotion
/// instead of producing errors.
fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>,
e: &ast::Expr, node_ty: Ty<'tcx>) {
match node_ty.sty {
ty::TyStruct(did, _) |
ty::TyEnum(did, _) if v.tcx.has_dtor(did) => {
v.add_qualif(ConstQualif::NEEDS_DROP);
if v.mode != Mode::Var {
v.tcx.sess.span_err(e.span,
&format!("{}s are not allowed to have destructors",
v.msg()));
}
}
_ => {}
}
let method_call = ty::MethodCall::expr(e.id);
match e.node {
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) if v.tcx.tables.borrow().method_map.contains_key(&method_call) => {
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0011,
"user-defined operators are not allowed in {}s", v.msg());
}
}
ast::ExprBox(..) |
ast::ExprUnary(ast::UnUniq, _) => {
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0010,
"allocations are not allowed in {}s", v.msg());
}
}
ast::ExprUnary(op, ref inner) => {
match v.tcx.node_id_to_type(inner.id).sty {
ty::TyRawPtr(_) => {
assert!(op == ast::UnDeref);
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0396,
"raw pointers cannot be dereferenced in {}s", v.msg());
}
}
_ => {}
}
}
ast::ExprBinary(op, ref lhs, _) => {
match v.tcx.node_id_to_type(lhs.id).sty {
ty::TyRawPtr(_) => {
assert!(op.node == ast::BiEq || op.node == ast::BiNe ||
op.node == ast::BiLe || op.node == ast::BiLt ||
op.node == ast::BiGe || op.node == ast::BiGt);
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0395,
"raw pointers cannot be compared in {}s", v.msg());
}
}
_ => {}
}
}
ast::ExprCast(ref from, _) => {
debug!("Checking const cast(id={})", from.id);
match v.tcx.cast_kinds.borrow().get(&from.id) {
None => v.tcx.sess.span_bug(e.span, "no kind for cast"),
Some(&CastKind::PtrAddrCast) | Some(&CastKind::FnPtrAddrCast) => {
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0018,
"raw pointers cannot be cast to integers in {}s", v.msg());
}
}
_ => {}
}
}
ast::ExprPath(..) => {
let def = v.tcx.def_map.borrow().get(&e.id).map(|d| d.full_def());
match def {
Some(def::DefVariant(_, _, _)) => {
// Count the discriminator or function pointer.
v.add_qualif(ConstQualif::NON_ZERO_SIZED);
}
Some(def::DefStruct(_)) => {
if let ty::TyBareFn(..) = node_ty.sty {
// Count the function pointer.
v.add_qualif(ConstQualif::NON_ZERO_SIZED);
}
}
Some(def::DefFn(..)) | Some(def::DefMethod(..)) => {
// Count the function pointer.
v.add_qualif(ConstQualif::NON_ZERO_SIZED);
}
Some(def::DefStatic(..)) => {
match v.mode {
Mode::Static | Mode::StaticMut => {}
Mode::Const | Mode::ConstFn => {
span_err!(v.tcx.sess, e.span, E0013,
"{}s cannot refer to other statics, insert \
an intermediate constant instead", v.msg());
}
Mode::Var => v.add_qualif(ConstQualif::NOT_CONST)
}
}
Some(def::DefConst(did)) |
Some(def::DefAssociatedConst(did, _)) => {
if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did,
Some(e.id)) {
let inner = v.global_expr(Mode::Const, expr);
v.add_qualif(inner);
} else {
v.tcx.sess.span_bug(e.span,
"DefConst or DefAssociatedConst \
doesn't point to a constant");
}
}
Some(def::DefLocal(_)) if v.mode == Mode::ConstFn => {
// Sadly, we can't determine whether the types are zero-sized.
v.add_qualif(ConstQualif::NOT_CONST | ConstQualif::NON_ZERO_SIZED);
}
def => {
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
debug!("(checking const) found bad def: {:?}", def);
span_err!(v.tcx.sess, e.span, E0014,
"paths in {}s may only refer to constants \
or functions", v.msg());
}
}
}
}
ast::ExprCall(ref callee, _) => {
let mut callee = &**callee;
loop {
callee = match callee.node {
ast::ExprParen(ref inner) => &**inner,
ast::ExprBlock(ref block) => match block.expr {
Some(ref tail) => &**tail,
None => break
},
_ => break
};
}
let def = v.tcx.def_map.borrow().get(&callee.id).map(|d| d.full_def());
let is_const = match def {
Some(def::DefStruct(..)) => true,
Some(def::DefVariant(..)) => {
// Count the discriminator.
v.add_qualif(ConstQualif::NON_ZERO_SIZED);
true
}
Some(def::DefMethod(did, def::FromImpl(_))) |
Some(def::DefFn(did, _)) => {
v.handle_const_fn_call(e, did, node_ty)
}
_ => false
};
if !is_const {
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0015,
"function calls in {}s are limited to \
constant functions, \
struct and enum constructors", v.msg());
}
}
}
ast::ExprMethodCall(..) => {
let method = v.tcx.tables.borrow().method_map[&method_call];
let is_const = match v.tcx.impl_or_trait_item(method.def_id).container() {
ty::ImplContainer(_) => v.handle_const_fn_call(e, method.def_id, node_ty),
ty::TraitContainer(_) => false
};
if !is_const {
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0378,
"method calls in {}s are limited to \
constant inherent methods", v.msg());
}
}
}
ast::ExprStruct(..) => {
let did = v.tcx.def_map.borrow().get(&e.id).map(|def| def.def_id());
if did == v.tcx.lang_items.unsafe_cell_type() {
v.add_qualif(ConstQualif::MUTABLE_MEM);
}
}
ast::ExprLit(_) |
ast::ExprAddrOf(..) => {
v.add_qualif(ConstQualif::NON_ZERO_SIZED);
}
ast::ExprRepeat(..) => {
v.add_qualif(ConstQualif::PREFER_IN_PLACE);
}
ast::ExprClosure(..) => {
// Paths in constant contexts cannot refer to local variables,
// as there are none, and thus closures can't have upvars there.
if v.tcx.with_freevars(e.id, |fv| !fv.is_empty()) {
assert!(v.mode == Mode::Var,
"global closures can't capture anything");
v.add_qualif(ConstQualif::NOT_CONST);
}
}
ast::ExprBlock(_) |
ast::ExprIndex(..) |
ast::ExprField(..) |
ast::ExprTupField(..) |
ast::ExprVec(_) |
ast::ExprParen(..) |
ast::ExprTup(..) => {}
// Conditional control flow (possible to implement).
ast::ExprMatch(..) |
ast::ExprIf(..) |
ast::ExprIfLet(..) |
// Loops (not very meaningful in constants).
ast::ExprWhile(..) |
ast::ExprWhileLet(..) |
ast::ExprForLoop(..) |
ast::ExprLoop(..) |
// More control flow (also not very meaningful).
ast::ExprBreak(_) |
ast::ExprAgain(_) |
ast::ExprRet(_) |
// Miscellaneous expressions that could be implemented.
ast::ExprRange(..) |
// Expressions with side-effects.
ast::ExprAssign(..) |
ast::ExprAssignOp(..) |
ast::ExprInlineAsm(_) |
ast::ExprMac(_) => {
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0019,
"{} contains unimplemented expression type", v.msg());
}
}
}
}
/// Check the adjustments of an expression
fn check_adjustments<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &ast::Expr) {
match v.tcx.tables.borrow().adjustments.get(&e.id) {
None | Some(&ty::AdjustReifyFnPointer) | Some(&ty::AdjustUnsafeFnPointer) => {}
Some(&ty::AdjustDerefRef(ty::AutoDerefRef { autoderefs, .. })) => {
if (0..autoderefs as u32).any(|autoderef| {
v.tcx.is_overloaded_autoderef(e.id, autoderef)
}) {
v.add_qualif(ConstQualif::NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0400,
"user-defined dereference operators are not allowed in {}s",
v.msg());
}
}
}
}
}
pub fn check_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut CheckCrateVisitor {
tcx: tcx,
mode: Mode::Var,
qualif: ConstQualif::NOT_CONST,
rvalue_borrows: NodeMap()
}, tcx.map.krate());
tcx.sess.abort_if_errors();
}
impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> {
fn consume(&mut self,
_consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt,
_mode: euv::ConsumeMode) {
let mut cur = &cmt;
loop {
match cur.cat {
mc::cat_static_item => {
if self.mode != Mode::Var {
// statics cannot be consumed by value at any time, that would imply
// that they're an initializer (what a const is for) or kept in sync
// over time (not feasible), so deny it outright.
span_err!(self.tcx.sess, consume_span, E0394,
"cannot refer to other statics by value, use the \
address-of operator or a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => cur = cmt,
mc::cat_rvalue(..) |
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
cmt: mc::cmt<'tcx>,
_loan_region: ty::Region,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause)
{
// Kind of hacky, but we allow Unsafe coercions in constants.
// These occur when we convert a &T or *T to a *U, as well as
// when making a thin pointer (e.g., `*T`) into a fat pointer
// (e.g., `*Trait`).
match loan_cause {
euv::LoanCause::AutoUnsafe => {
return;
}
_ => { }
}
let mut cur = &cmt;
let mut is_interior = false;
loop {
match cur.cat {
mc::cat_rvalue(..) => {
if loan_cause == euv::MatchDiscriminant {
// Ignore the dummy immutable borrow created by EUV.
break;
}
let mutbl = bk.to_mutbl_lossy();
if mutbl == ast::MutMutable && self.mode == Mode::StaticMut {
// Mutable slices are the only `&mut` allowed in
// globals, but only in `static mut`, nowhere else.
// FIXME: This exception is really weird... there isn't
// any fundamental reason to restrict this based on
// type of the expression. `&mut [1]` has exactly the
// same representation as &mut 1.
match cmt.ty.sty {
ty::TyArray(_, _) | ty::TySlice(_) => break,
_ => {}
}
}
self.record_borrow(borrow_id, mutbl);
break;
}
mc::cat_static_item => {
if is_interior && self.mode != Mode::Var {
// Borrowed statics can specifically *only* have their address taken,
// not any number of other borrows such as borrowing fields, reading
// elements of an array, etc.
self.tcx.sess.span_err(borrow_span,
"cannot refer to the interior of another \
static, use a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => {
is_interior = true;
cur = cmt;
}
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn decl_without_init(&mut self,
_id: ast::NodeId,
_span: Span) {}
fn mutate(&mut self,
_assignment_id: ast::NodeId,
_assignment_span: Span,
_assignee_cmt: mc::cmt,
_mode: euv::MutateMode) {}
fn matched_pat(&mut self,
_: &ast::Pat,
_: mc::cmt,
_: euv::MatchMode) {}
fn consume_pat(&mut self,
_consume_pat: &ast::Pat,
_cmt: mc::cmt,
_mode: euv::ConsumeMode) {}
}<|fim▁end|> |
/// Returns true if the call is to a const fn or method. |
<|file_name|>healthy.go<|end_file_name|><|fim▁begin|>package etcdutil
import (
"log"
"math/rand"
"path"
"strconv"
"time"
"github.com/coreos/go-etcd/etcd"
)
// heartbeat to etcd cluster until stop
func Heartbeat(client *etcd.Client, name string, taskID uint64, interval time.Duration, stop chan struct{}) error {
for {
_, err := client.Set(TaskHealthyPath(name, taskID), "health", computeTTL(interval))
if err != nil {
return err
}
select {
case <-time.After(interval):
case <-stop:
return nil
}
}
}
// detect failure of the given taskID
func DetectFailure(client *etcd.Client, name string, stop chan bool) error {
receiver := make(chan *etcd.Response, 1)
go client.Watch(HealthyPath(name), 0, true, receiver, stop)
for resp := range receiver {
if resp.Action != "expire" {
continue
}
if err := ReportFailure(client, name, path.Base(resp.Node.Key)); err != nil {
return err
}
}
return nil
}
// report failure to etcd cluster
// If a framework detects a failure, it tries to report failure to /FreeTasks/{taskID}
func ReportFailure(client *etcd.Client, name, failedTask string) error {
_, err := client.Set(FreeTaskPath(name, failedTask), "failed", 0)
return err
}
// WaitFreeTask blocks until it gets a hint of free task
func WaitFreeTask(client *etcd.Client, name string, logger *log.Logger) (uint64, error) {
slots, err := client.Get(FreeTaskDir(name), false, true)
if err != nil {
return 0, err
}
if total := len(slots.Node.Nodes); total > 0 {
ri := rand.Intn(total)
s := slots.Node.Nodes[ri]
idStr := path.Base(s.Key)
id, err := strconv.ParseUint(idStr, 0, 64)
if err != nil {
return 0, err<|fim▁hole|>
watchIndex := slots.EtcdIndex + 1
respChan := make(chan *etcd.Response, 1)
go func() {
for {
logger.Printf("start to wait failure at index %d", watchIndex)
resp, err := client.Watch(FreeTaskDir(name), watchIndex, true, nil, nil)
if err != nil {
logger.Printf("WARN: WaitFailure watch failed: %v", err)
return
}
if resp.Action == "set" {
respChan <- resp
return
}
watchIndex = resp.EtcdIndex + 1
}
}()
var resp *etcd.Response
var waitTime uint64 = 0
for {
select {
case resp = <-respChan:
idStr := path.Base(resp.Node.Key)
id, err := strconv.ParseUint(idStr, 10, 64)
if err != nil {
return 0, err
}
return id, nil
case <-time.After(10 * time.Second):
waitTime++
logger.Printf("Node already wait failure for %d0s", waitTime)
}
}
}
func computeTTL(interval time.Duration) uint64 {
if interval/time.Second < 1 {
return 3
}
return 3 * uint64(interval/time.Second)
}<|fim▁end|> | }
logger.Printf("got free task %v, randomly choose %d to try...", ListKeys(slots.Node.Nodes), ri)
return id, nil
} |
<|file_name|>notifier.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2011 Chris Dekter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>..
#import pynotify, gtk, gettext
from gi.repository import Gtk, Gdk, Notify
import gettext
import popupmenu
from autokey.configmanager import *
from autokey import common
HAVE_APPINDICATOR = False
try:
from gi.repository import AppIndicator3
HAVE_APPINDICATOR = True
except ImportError:
pass
gettext.install("autokey")
TOOLTIP_RUNNING = _("AutoKey - running")
TOOLTIP_PAUSED = _("AutoKey - paused")
def get_notifier(app):
if HAVE_APPINDICATOR:
return IndicatorNotifier(app)
else:
return Notifier(app)
class Notifier:
"""
Encapsulates all functionality related to the notification icon, notifications, and tray menu.
"""
def __init__(self, autokeyApp):
Notify.init("AutoKey")
self.app = autokeyApp
self.configManager = autokeyApp.service.configManager
self.icon = Gtk.StatusIcon.new_from_icon_name(ConfigManager.SETTINGS[NOTIFICATION_ICON])
self.update_tool_tip()
self.icon.connect("popup_menu", self.on_popup_menu)
self.icon.connect("activate", self.on_show_configure)
self.errorItem = None
self.update_visible_status()
def update_visible_status(self):
if ConfigManager.SETTINGS[SHOW_TRAY_ICON]:
self.icon.set_visible(True)
else:
self.icon.set_visible(False)
def update_tool_tip(self):
if ConfigManager.SETTINGS[SHOW_TRAY_ICON]:
if ConfigManager.SETTINGS[SERVICE_RUNNING]:
self.icon.set_tooltip_text(TOOLTIP_RUNNING)
else:
self.icon.set_tooltip_text(TOOLTIP_PAUSED)
def hide_icon(self):
self.icon.set_visible(False)
def rebuild_menu(self):
pass
# Signal Handlers ----
def on_popup_menu(self, status_icon, button, activate_time, data=None):
# Main Menu items
enableMenuItem = Gtk.CheckMenuItem(_("Enable Expansions"))
enableMenuItem.set_active(self.app.service.is_running())
enableMenuItem.set_sensitive(not self.app.serviceDisabled)
configureMenuItem = Gtk.ImageMenuItem(_("Show Main Window"))
configureMenuItem.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_PREFERENCES, Gtk.IconSize.MENU))
removeMenuItem = Gtk.ImageMenuItem(_("Remove icon"))
removeMenuItem.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_CLOSE, Gtk.IconSize.MENU))
quitMenuItem = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_QUIT, None)
# Menu signals
enableMenuItem.connect("toggled", self.on_enable_toggled)
configureMenuItem.connect("activate", self.on_show_configure)
removeMenuItem.connect("activate", self.on_remove_icon)
quitMenuItem.connect("activate", self.on_destroy_and_exit)
# Get phrase folders to add to main menu
folders = []
items = []
for folder in self.configManager.allFolders:
if folder.showInTrayMenu:
folders.append(folder)
for item in self.configManager.allItems:
if item.showInTrayMenu:
items.append(item)
# Construct main menu
menu = popupmenu.PopupMenu(self.app.service, folders, items, False)
if len(items) > 0:
menu.append(Gtk.SeparatorMenuItem())
menu.append(enableMenuItem)
if self.errorItem is not None:
menu.append(self.errorItem)
menu.append(configureMenuItem)
menu.append(removeMenuItem)
menu.append(quitMenuItem)
menu.show_all()
menu.popup(None, None, None, None, button, activate_time)
def on_enable_toggled(self, widget, data=None):
if widget.active:
self.app.unpause_service()
else:
self.app.pause_service()
def on_show_configure(self, widget, data=None):
self.app.show_configure()
def on_remove_icon(self, widget, data=None):
self.icon.set_visible(False)
ConfigManager.SETTINGS[SHOW_TRAY_ICON] = False
def on_destroy_and_exit(self, widget, data=None):
self.app.shutdown()
def notify_error(self, message):
self.show_notify(message, Gtk.STOCK_DIALOG_ERROR)
self.errorItem = Gtk.MenuItem(_("View script error"))
self.errorItem.connect("activate", self.on_show_error)
self.icon.set_from_icon_name(common.ICON_FILE_NOTIFICATION_ERROR)
def on_show_error(self, widget, data=None):
self.app.show_script_error()
self.errorItem = None
self.icon.set_from_icon_name(ConfigManager.SETTINGS[NOTIFICATION_ICON])
def show_notify(self, message, iconName):
Gdk.threads_enter()
n = Notify.Notification.new("AutoKey", message, iconName)
n.set_urgency(Notify.Urgency.LOW)
if ConfigManager.SETTINGS[SHOW_TRAY_ICON]:
n.attach_to_status_icon(self.icon)
n.show()
Gdk.threads_leave()
class IndicatorNotifier:
def __init__(self, autokeyApp):
Notify.init("AutoKey")
self.app = autokeyApp<|fim▁hole|> AppIndicator3.IndicatorCategory.APPLICATION_STATUS)
self.indicator.set_attention_icon(common.ICON_FILE_NOTIFICATION_ERROR)
self.update_visible_status()
self.rebuild_menu()
def update_visible_status(self):
if ConfigManager.SETTINGS[SHOW_TRAY_ICON]:
self.indicator.set_status(AppIndicator3.IndicatorStatus.ACTIVE)
else:
self.indicator.set_status(AppIndicator3.IndicatorStatus.PASSIVE)
def hide_icon(self):
self.indicator.set_status(AppIndicator3.IndicatorStatus.PASSIVE)
def rebuild_menu(self):
# Main Menu items
self.errorItem = Gtk.MenuItem(_("View script error"))
enableMenuItem = Gtk.CheckMenuItem(_("Enable Expansions"))
enableMenuItem.set_active(self.app.service.is_running())
enableMenuItem.set_sensitive(not self.app.serviceDisabled)
configureMenuItem = Gtk.ImageMenuItem(_("Show Main Window"))
configureMenuItem.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_PREFERENCES, Gtk.IconSize.MENU))
removeMenuItem = Gtk.ImageMenuItem(_("Remove icon"))
removeMenuItem.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_CLOSE, Gtk.IconSize.MENU))
quitMenuItem = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_QUIT, None)
# Menu signals
enableMenuItem.connect("toggled", self.on_enable_toggled)
configureMenuItem.connect("activate", self.on_show_configure)
removeMenuItem.connect("activate", self.on_remove_icon)
quitMenuItem.connect("activate", self.on_destroy_and_exit)
self.errorItem.connect("activate", self.on_show_error)
# Get phrase folders to add to main menu
folders = []
items = []
for folder in self.configManager.allFolders:
if folder.showInTrayMenu:
folders.append(folder)
for item in self.configManager.allItems:
if item.showInTrayMenu:
items.append(item)
# Construct main menu
self.menu = popupmenu.PopupMenu(self.app.service, folders, items, False)
if len(items) > 0:
self.menu.append(Gtk.SeparatorMenuItem())
self.menu.append(self.errorItem)
self.menu.append(enableMenuItem)
self.menu.append(configureMenuItem)
self.menu.append(removeMenuItem)
self.menu.append(quitMenuItem)
self.menu.show_all()
self.errorItem.hide()
self.indicator.set_menu(self.menu)
def notify_error(self, message):
self.show_notify(message, Gtk.STOCK_DIALOG_ERROR)
self.errorItem.show()
self.indicator.set_status(AppIndicator3.IndicatorStatus.ATTENTION)
def show_notify(self, message, iconName):
Gdk.threads_enter()
n = Notify.Notification.new("AutoKey", message, iconName)
n.set_urgency(Notify.Urgency.LOW)
n.show()
Gdk.threads_leave()
def update_tool_tip(self):
pass
def on_show_error(self, widget, data=None):
self.app.show_script_error()
self.errorItem.hide()
self.update_visible_status()
def on_enable_toggled(self, widget, data=None):
if widget.active:
self.app.unpause_service()
else:
self.app.pause_service()
def on_show_configure(self, widget, data=None):
self.app.show_configure()
def on_remove_icon(self, widget, data=None):
self.indicator.set_status(AppIndicator3.IndicatorStatus.PASSIVE)
ConfigManager.SETTINGS[SHOW_TRAY_ICON] = False
def on_destroy_and_exit(self, widget, data=None):
self.app.shutdown()
class UnityLauncher(IndicatorNotifier):
SHOW_ITEM_STRING = _("Add to quicklist/notification menu")
#def __init__(self, autokeyApp):
# IndicatorNotifier.__init__(self, autokeyApp)
def __getQuickItem(self, label):
item = Dbusmenu.Menuitem.new()
item.property_set(Dbusmenu.MENUITEM_PROP_LABEL, label)
item.property_set_bool(Dbusmenu.MENUITEM_PROP_VISIBLE, True)
return item
def rebuild_menu(self):
IndicatorNotifier.rebuild_menu(self)
print threading.currentThread().name
#try:
from gi.repository import Unity, Dbusmenu
HAVE_UNITY = True
print "have unity"
#except ImportError:
# return
print "rebuild unity menu"
self.launcher = Unity.LauncherEntry.get_for_desktop_id ("autokey-gtk.desktop")
# Main Menu items
enableMenuItem = self.__getQuickItem(_("Enable Expansions"))
enableMenuItem.property_set(Dbusmenu.MENUITEM_PROP_TOGGLE_TYPE, Dbusmenu.MENUITEM_TOGGLE_CHECK)
#if self.app.service.is_running():
# enableMenuItem.property_set_int(Dbusmenu.MENUITEM_PROP_TOGGLE_STATE, Dbusmenu.MENUITEM_TOGGLE_STATE_CHECKED)
#else:
# enableMenuItem.property_set_int(Dbusmenu.MENUITEM_PROP_TOGGLE_STATE, Dbusmenu.MENUITEM_TOGGLE_STATE_UNCHECKED)
enableMenuItem.property_set_int(Dbusmenu.MENUITEM_PROP_TOGGLE_STATE, int(self.app.service.is_running()))
enableMenuItem.property_set_bool(Dbusmenu.MENUITEM_PROP_ENABLED, not self.app.serviceDisabled)
configureMenuItem = self.__getQuickItem(_("Show Main Window"))
# Menu signals
enableMenuItem.connect("item-activated", self.on_ql_enable_toggled, None)
configureMenuItem.connect("item-activated", self.on_show_configure, None)
# Get phrase folders to add to main menu
# folders = []
# items = []
# for folder in self.configManager.allFolders:
# if folder.showInTrayMenu:
# folders.append(folder)
#
# for item in self.configManager.allItems:
# if item.showInTrayMenu:
# items.append(item)
# Construct main menu
quicklist = Dbusmenu.Menuitem.new()
#if len(items) > 0:
# self.menu.append(Gtk.SeparatorMenuItem())
quicklist.child_append(enableMenuItem)
quicklist.child_append(configureMenuItem)
self.launcher.set_property ("quicklist", quicklist)
def on_ql_enable_toggled(self, menuitem, data=None):
if menuitem.property_get_int(Menuitem.MENUITEM_PROP_TOGGLE_STATE) == Menuitem.MENUITEM_TOGGLE_STATE_CHECKED:
self.app.unpause_service()
else:
self.app.pause_service()<|fim▁end|> | self.configManager = autokeyApp.service.configManager
self.indicator = AppIndicator3.Indicator.new("AutoKey", ConfigManager.SETTINGS[NOTIFICATION_ICON], |
<|file_name|>test_tutorial001.py<|end_file_name|><|fim▁begin|>from fastapi.testclient import TestClient
from docs_src.request_files.tutorial001 import app
client = TestClient(app)
openapi_schema = {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/files/": {
"post": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"summary": "Create File",
"operationId": "create_file_files__post",
"requestBody": {
"content": {
"multipart/form-data": {
"schema": {
"$ref": "#/components/schemas/Body_create_file_files__post"
}
}
},
"required": True,
},
}
},
"/uploadfile/": {
"post": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}<|fim▁hole|> },
"summary": "Create Upload File",
"operationId": "create_upload_file_uploadfile__post",
"requestBody": {
"content": {
"multipart/form-data": {
"schema": {
"$ref": "#/components/schemas/Body_create_upload_file_uploadfile__post"
}
}
},
"required": True,
},
}
},
},
"components": {
"schemas": {
"Body_create_upload_file_uploadfile__post": {
"title": "Body_create_upload_file_uploadfile__post",
"required": ["file"],
"type": "object",
"properties": {
"file": {"title": "File", "type": "string", "format": "binary"}
},
},
"Body_create_file_files__post": {
"title": "Body_create_file_files__post",
"required": ["file"],
"type": "object",
"properties": {
"file": {"title": "File", "type": "string", "format": "binary"}
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {"type": "string"},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == openapi_schema
file_required = {
"detail": [
{
"loc": ["body", "file"],
"msg": "field required",
"type": "value_error.missing",
}
]
}
def test_post_form_no_body():
response = client.post("/files/")
assert response.status_code == 422, response.text
assert response.json() == file_required
def test_post_body_json():
response = client.post("/files/", json={"file": "Foo"})
assert response.status_code == 422, response.text
assert response.json() == file_required
def test_post_file(tmp_path):
path = tmp_path / "test.txt"
path.write_bytes(b"<file content>")
client = TestClient(app)
with path.open("rb") as file:
response = client.post("/files/", files={"file": file})
assert response.status_code == 200, response.text
assert response.json() == {"file_size": 14}
def test_post_large_file(tmp_path):
default_pydantic_max_size = 2 ** 16
path = tmp_path / "test.txt"
path.write_bytes(b"x" * (default_pydantic_max_size + 1))
client = TestClient(app)
with path.open("rb") as file:
response = client.post("/files/", files={"file": file})
assert response.status_code == 200, response.text
assert response.json() == {"file_size": default_pydantic_max_size + 1}
def test_post_upload_file(tmp_path):
path = tmp_path / "test.txt"
path.write_bytes(b"<file content>")
client = TestClient(app)
with path.open("rb") as file:
response = client.post("/uploadfile/", files={"file": file})
assert response.status_code == 200, response.text
assert response.json() == {"filename": "test.txt"}<|fim▁end|> | },
}, |
<|file_name|>usergroup.py<|end_file_name|><|fim▁begin|># https://djangosnippets.org/snippets/2566/
from django import template
from django.template import resolve_variable, NodeList
from django.contrib.auth.models import Group
register = template.Library()
@register.tag()<|fim▁hole|> Usage: {% ifusergroup Admins %} ... {% endifusergroup %}, or
{% ifusergroup Admins|Group1|"Group 2" %} ... {% endifusergroup %}, or
{% ifusergroup Admins %} ... {% else %} ... {% endifusergroup %}
"""
try:
_, group = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError("Tag 'ifusergroup' requires 1 argument.")
nodelist_true = parser.parse(('else', 'endifusergroup'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifusergroup',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
return GroupCheckNode(group, nodelist_true, nodelist_false)
class GroupCheckNode(template.Node):
def __init__(self, group, nodelist_true, nodelist_false):
self.group = group
self.nodelist_true = nodelist_true
self.nodelist_false = nodelist_false
def render(self, context):
user = resolve_variable('user', context)
if not user.is_authenticated():
return self.nodelist_false.render(context)
for group in self.group.split("|"):
group = group[1:-1] if group.startswith('"') and group.endswith('"') else group
try:
if Group.objects.get(name=group) in user.groups.all():
return self.nodelist_true.render(context)
except Group.DoesNotExist:
pass
return self.nodelist_false.render(context)<|fim▁end|> | def ifusergroup(parser, token):
""" Check to see if the currently logged in user belongs to a specific
group. Requires the Django authentication contrib app and middleware.
|
<|file_name|>test_update_network.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2015,2016,2018 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the update network command."""<|fim▁hole|>
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestUpdateNetwork(TestBrokerCommand):
def test_100_update(self):
command = ["update", "network", "--network", "excx-net",
"--network_environment", "excx",
"--building", "ut", "--type", "dmz-net",
"--side", "b", "--comments", "New network comments"]
self.noouttest(command)
def test_110_verify(self):
command = ["show", "network", "--network", "excx-net",
"--network_environment", "excx"]
out = self.commandtest(command)
self.matchoutput(out, "Comments: New network comments", command)
self.matchoutput(out, "Sysloc: ut.ny.na", command)
self.matchoutput(out, "Network Type: dmz-net", command)
self.matchoutput(out, "Side: b", command)
def test_120_update_rename(self):
command = ["update", "network", "--network", "netsvcmap",
"--rename_to", "rename-test", "--comments", "New comment"]
self.noouttest(command)
def test_121_update_rename_verify(self):
command = ["show", "network", "--network", "rename-test"]
out = self.commandtest(command)
self.matchoutput(out, "Network: rename-test", command)
self.matchoutput(out, "Comments: New comment", command)
def test_122_update_rename_existing(self):
net = self.net["np06bals03_v103"]
command = ["update", "network", "--network", "rename-test",
"--rename_to", "np06bals03_v103"]
out,err = self.successtest(command)
self.matchoutput(err, "WARNING: Network name {} is already used for address {}/{}."
.format("np06bals03_v103", net.ip, net.prefixlen), command)
command = ["update", "network", "--ip", net.ip, "--rename_to", "netsvcmap"]
self.noouttest(command)
def test_200_update_utdmz1(self):
net = self.net["ut_dmz1"]
command = ["update_network",
"--ip=%s" % net.ip,
"--network_compartment="]
self.noouttest(command)
def test_201_verify_utdmz1(self):
command = ["search", "network", "--network_compartment", "perimeter.ut"]
self.noouttest(command)
# There should be a test_constraint_network.py one day...
def test_900_delinuse(self):
net = self.net["unknown0"]
command = ["del", "network", "--ip", net.ip]
out = self.badrequesttest(command)
self.matchoutput(out, "Network %s [%s] is still in use" %
(net.name, net), command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestUpdateNetwork)
unittest.TextTestRunner(verbosity=2).run(suite)<|fim▁end|> | |
<|file_name|>L_gen.py<|end_file_name|><|fim▁begin|>import random
T = 10
for tt in range(T):
n = 10 ** 6
print(n)<|fim▁hole|> print()
for i in range(n):
print(random.randint(1, 1000), end = ' ')
print()<|fim▁end|> | for i in range(n):
print(random.randint(0, 1000), end = ' ') |
<|file_name|>client_mock_tcp.rs<|end_file_name|><|fim▁begin|>#![feature(default_type_params)]
extern crate curl;
extern crate http;
extern crate hyper;
extern crate test;
use std::fmt::{mod, Show};
use std::str::from_str;
use std::io::{IoResult, MemReader};
use std::io::net::ip::{SocketAddr, ToSocketAddr};
use std::os;
use std::path::BytesContainer;
use http::connecter::Connecter;
use hyper::net;
static README: &'static [u8] = include_bin!("../README.md");
struct MockStream {
read: MemReader,
}
impl Clone for MockStream {
fn clone(&self) -> MockStream {
MockStream::new()
}
}
impl MockStream {
fn new() -> MockStream {
let head = b"HTTP/1.1 200 OK\r\nServer: Mock\r\n\r\n";
let mut res = head.to_vec();
res.push_all(README);
MockStream {
read: MemReader::new(res),
}
}
}
impl Reader for MockStream {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
self.read.read(buf)
}
}
impl Writer for MockStream {
fn write(&mut self, _msg: &[u8]) -> IoResult<()> {
// we're mocking, what do we care.
Ok(())
}
}
#[bench]
fn bench_mock_curl(b: &mut test::Bencher) {
let mut cwd = os::getcwd().unwrap();
cwd.push("README.md");
let s = format!("file://{}", cwd.container_as_str().unwrap());
let url = s.as_slice();
b.iter(|| {
curl::http::handle()
.get(url)
.header("X-Foo", "Bar")
.exec()
.unwrap()
});
}
#[deriving(Clone)]
struct Foo;
impl hyper::header::Header for Foo {
fn header_name(_: Option<Foo>) -> &'static str {
"x-foo"
}
fn parse_header(_: &[Vec<u8>]) -> Option<Foo> {
None
}
}
impl hyper::header::HeaderFormat for Foo {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {<|fim▁hole|> }
}
impl net::NetworkStream for MockStream {
fn peer_name(&mut self) -> IoResult<SocketAddr> {
Ok(from_str("127.0.0.1:1337").unwrap())
}
}
impl net::NetworkConnector for MockStream {
fn connect<To: ToSocketAddr>(_addr: To, _scheme: &str) -> IoResult<MockStream> {
Ok(MockStream::new())
}
}
#[bench]
fn bench_mock_hyper(b: &mut test::Bencher) {
let url = "http://127.0.0.1:1337/";
b.iter(|| {
let mut req = hyper::client::Request::with_stream::<MockStream>(
hyper::Get, hyper::Url::parse(url).unwrap()).unwrap();
req.headers_mut().set(Foo);
req
.start().unwrap()
.send().unwrap()
.read_to_string().unwrap()
});
}
impl Connecter for MockStream {
fn connect(_addr: SocketAddr, _host: &str, _use_ssl: bool) -> IoResult<MockStream> {
Ok(MockStream::new())
}
}
#[bench]
fn bench_mock_http(b: &mut test::Bencher) {
let url = "http://127.0.0.1:1337/";
b.iter(|| {
let mut req: http::client::RequestWriter<MockStream> = http::client::RequestWriter::new(
http::method::Get,
hyper::Url::parse(url).unwrap()
).unwrap();
req.headers.extensions.insert("x-foo".to_string(), "Bar".to_string());
// cant unwrap because Err contains RequestWriter, which does not implement Show
let mut res = match req.read_response() {
Ok(res) => res,
Err(..) => panic!("http response failed")
};
res.read_to_string().unwrap();
});
}<|fim▁end|> | "Bar".fmt(fmt) |
<|file_name|>CircuitRequest.java<|end_file_name|><|fim▁begin|>package org.opennaas.extensions.genericnetwork.model.circuit.request;
/*
* #%L
* OpenNaaS :: Generic Network
* %%
* Copyright (C) 2007 - 2014 Fundació Privada i2CAT, Internet i Innovació a Catalunya
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.NormalizedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.opennaas.extensions.genericnetwork.model.circuit.QoSPolicy;
/**
*
* @author Adrian Rosello Rey (i2CAT)
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"source",
"destination",
"label",
"qosPolicy"
})
@XmlRootElement(name = "qos_policy_request", namespace = "opennaas.api")
public class CircuitRequest {
@XmlAttribute(name = "atomic")
@XmlJavaTypeAdapter(NormalizedStringAdapter.class)
private String atomic;
@XmlElement(required = true)
private Source source;
@XmlElement(required = true)
private Destination destination;
@XmlElement(required = true)
private String label;
@XmlElement(name = "qos_policy")
private QoSPolicy qosPolicy;
public String getAtomic() {
return atomic;
}
public void setAtomic(String atomic) {
this.atomic = atomic;
}
public Source getSource() {
return source;
}
public void setSource(Source source) {
this.source = source;
}
public Destination getDestination() {
return destination;
}
public void setDestination(Destination destination) {
this.destination = destination;
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public QoSPolicy getQosPolicy() {
return qosPolicy;
}
public void setQosPolicy(QoSPolicy qosPolicy) {
this.qosPolicy = qosPolicy;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((atomic == null) ? 0 : atomic.hashCode());
result = prime * result + ((destination == null) ? 0 : destination.hashCode());
result = prime * result + ((label == null) ? 0 : label.hashCode());
result = prime * result + ((qosPolicy == null) ? 0 : qosPolicy.hashCode());
result = prime * result + ((source == null) ? 0 : source.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CircuitRequest other = (CircuitRequest) obj;
if (atomic == null) {
if (other.atomic != null)
return false;
} else if (!atomic.equals(other.atomic))
return false;
if (destination == null) {
if (other.destination != null)
return false;
} else if (!destination.equals(other.destination))
return false;
if (label == null) {
if (other.label != null)
return false;
} else if (!label.equals(other.label))
return false;
if (qosPolicy == null) {
if (other.qosPolicy != null)
return false;
} else if (!qosPolicy.equals(other.qosPolicy))
return false;
if (source == null) {
if (other.source != null)
return false;
} else if (!source.equals(other.source))
return false;
return true;
}
}<|fim▁end|> | *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
<|file_name|>shuffle.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import platform
import shutil
import warnings
import gc
import itertools
import operator
import random
import pyspark.heapq3 as heapq
from pyspark.serializers import BatchedSerializer, PickleSerializer, FlattenedValuesSerializer, \
CompressedSerializer, AutoBatchedSerializer
try:
import psutil
process = None
def get_used_memory():
""" Return the used memory in MB """
global process
if process is None or process._pid != os.getpid():
process = psutil.Process(os.getpid())
if hasattr(process, "memory_info"):
info = process.memory_info()
else:
info = process.get_memory_info()
return info.rss >> 20
except ImportError:
def get_used_memory():
""" Return the used memory in MB """
if platform.system() == 'Linux':
for line in open('/proc/self/status'):
if line.startswith('VmRSS:'):
return int(line.split()[1]) >> 10
else:
warnings.warn("Please install psutil to have better "
"support with spilling")
if platform.system() == "Darwin":
import resource
rss = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
return rss >> 20
# TODO: support windows
return 0
def _get_local_dirs(sub):
""" Get all the directories """
path = os.environ.get("SPARK_LOCAL_DIRS", "/tmp")
dirs = path.split(",")
if len(dirs) > 1:
# different order in different processes and instances
rnd = random.Random(os.getpid() + id(dirs))
random.shuffle(dirs, rnd.random)
return [os.path.join(d, "python", str(os.getpid()), sub) for d in dirs]
# global stats
MemoryBytesSpilled = 0
DiskBytesSpilled = 0
class Aggregator(object):
"""
Aggregator has tree functions to merge values into combiner.
createCombiner: (value) -> combiner
mergeValue: (combine, value) -> combiner
mergeCombiners: (combiner, combiner) -> combiner
"""
def __init__(self, createCombiner, mergeValue, mergeCombiners):
self.createCombiner = createCombiner
self.mergeValue = mergeValue
self.mergeCombiners = mergeCombiners
class SimpleAggregator(Aggregator):
"""
SimpleAggregator is useful for the cases that combiners have
same type with values
"""
def __init__(self, combiner):
Aggregator.__init__(self, lambda x: x, combiner, combiner)
class Merger(object):
"""
Merge shuffled data together by aggregator
"""
def __init__(self, aggregator):
self.agg = aggregator
def mergeValues(self, iterator):
""" Combine the items by creator and combiner """
raise NotImplementedError
def mergeCombiners(self, iterator):
""" Merge the combined items by mergeCombiner """
raise NotImplementedError
def items(self):
""" Return the merged items ad iterator """
raise NotImplementedError
def _compressed_serializer(self, serializer=None):
# always use PickleSerializer to simplify implementation
ser = PickleSerializer()
return AutoBatchedSerializer(CompressedSerializer(ser))
class ExternalMerger(Merger):
"""
External merger will dump the aggregated data into disks when
memory usage goes above the limit, then merge them together.
This class works as follows:
- It repeatedly combine the items and save them in one dict in
memory.
- When the used memory goes above memory limit, it will split
the combined data into partitions by hash code, dump them
into disk, one file per partition.
- Then it goes through the rest of the iterator, combine items
into different dict by hash. Until the used memory goes over
memory limit, it dump all the dicts into disks, one file per
dict. Repeat this again until combine all the items.
- Before return any items, it will load each partition and
combine them separately. Yield them before loading next
partition.
- During loading a partition, if the memory goes over limit,
it will partition the loaded data and dump them into disks
and load them partition by partition again.
`data` and `pdata` are used to hold the merged items in memory.
At first, all the data are merged into `data`. Once the used
memory goes over limit, the items in `data` are dumped into
disks, `data` will be cleared, all rest of items will be merged
into `pdata` and then dumped into disks. Before returning, all
the items in `pdata` will be dumped into disks.
Finally, if any items were spilled into disks, each partition
will be merged into `data` and be yielded, then cleared.
>>> agg = SimpleAggregator(lambda x, y: x + y)
>>> merger = ExternalMerger(agg, 10)
>>> N = 10000
>>> merger.mergeValues(zip(range(N), range(N)))
>>> assert merger.spills > 0
>>> sum(v for k,v in merger.items())
49995000
>>> merger = ExternalMerger(agg, 10)
>>> merger.mergeCombiners(zip(range(N), range(N)))
>>> assert merger.spills > 0
>>> sum(v for k,v in merger.items())
49995000
"""
# the max total partitions created recursively
MAX_TOTAL_PARTITIONS = 4096
def __init__(self, aggregator, memory_limit=512, serializer=None,
localdirs=None, scale=1, partitions=59, batch=1000):
Merger.__init__(self, aggregator)
self.memory_limit = memory_limit
self.serializer = _compressed_serializer(serializer)
self.localdirs = localdirs or _get_local_dirs(str(id(self)))
# number of partitions when spill data into disks
self.partitions = partitions
# check the memory after # of items merged
self.batch = batch
# scale is used to scale down the hash of key for recursive hash map
self.scale = scale
# un-partitioned merged data
self.data = {}
# partitioned merged data, list of dicts
self.pdata = []
# number of chunks dumped into disks
self.spills = 0
# randomize the hash of key, id(o) is the address of o (aligned by 8)
self._seed = id(self) + 7
def _get_spill_dir(self, n):
""" Choose one directory for spill by number n """
return os.path.join(self.localdirs[n % len(self.localdirs)], str(n))
def _next_limit(self):
"""
Return the next memory limit. If the memory is not released
after spilling, it will dump the data only when the used memory
starts to increase.
"""
return max(self.memory_limit, get_used_memory() * 1.05)
def mergeValues(self, iterator):
""" Combine the items by creator and combiner """
# speedup attribute lookup
creator, comb = self.agg.createCombiner, self.agg.mergeValue
c, data, pdata, hfun, batch = 0, self.data, self.pdata, self._partition, self.batch
limit = self.memory_limit
for k, v in iterator:
d = pdata[hfun(k)] if pdata else data
d[k] = comb(d[k], v) if k in d else creator(v)
c += 1
if c >= batch:
if get_used_memory() >= limit:
self._spill()
limit = self._next_limit()
batch /= 2
c = 0
else:
batch *= 1.5
if get_used_memory() >= limit:
self._spill()
def _partition(self, key):
""" Return the partition for key """
return hash((key, self._seed)) % self.partitions
def _object_size(self, obj):
""" How much of memory for this obj, assume that all the objects
consume similar bytes of memory
"""
return 1
def mergeCombiners(self, iterator, limit=None):
""" Merge (K,V) pair by mergeCombiner """
if limit is None:
limit = self.memory_limit
# speedup attribute lookup
comb, hfun, objsize = self.agg.mergeCombiners, self._partition, self._object_size
c, data, pdata, batch = 0, self.data, self.pdata, self.batch
for k, v in iterator:
d = pdata[hfun(k)] if pdata else data
d[k] = comb(d[k], v) if k in d else v
if not limit:
continue
c += objsize(v)
if c > batch:
if get_used_memory() > limit:
self._spill()
limit = self._next_limit()
batch /= 2
c = 0
else:
batch *= 1.5
if limit and get_used_memory() >= limit:
self._spill()
def _spill(self):
"""
dump already partitioned data into disks.
It will dump the data in batch for better performance.
"""
global MemoryBytesSpilled, DiskBytesSpilled
path = self._get_spill_dir(self.spills)
if not os.path.exists(path):
os.makedirs(path)
used_memory = get_used_memory()
if not self.pdata:
# The data has not been partitioned, it will iterator the
# dataset once, write them into different files, has no
# additional memory. It only called when the memory goes
# above limit at the first time.
# open all the files for writing
streams = [open(os.path.join(path, str(i)), 'wb')
for i in range(self.partitions)]
for k, v in self.data.items():
h = self._partition(k)
# put one item in batch, make it compatible with load_stream
# it will increase the memory if dump them in batch
<|fim▁hole|>
for s in streams:
DiskBytesSpilled += s.tell()
s.close()
self.data.clear()
self.pdata.extend([{} for i in range(self.partitions)])
else:
for i in range(self.partitions):
p = os.path.join(path, str(i))
with open(p, "wb") as f:
# dump items in batch
self.serializer.dump_stream(iter(self.pdata[i].items()), f)
self.pdata[i].clear()
DiskBytesSpilled += os.path.getsize(p)
self.spills += 1
gc.collect() # release the memory as much as possible
MemoryBytesSpilled += max(used_memory - get_used_memory(), 0) << 20
def items(self):
""" Return all merged items as iterator """
if not self.pdata and not self.spills:
return iter(self.data.items())
return self._external_items()
def _external_items(self):
""" Return all partitioned items as iterator """
assert not self.data
if any(self.pdata):
self._spill()
# disable partitioning and spilling when merge combiners from disk
self.pdata = []
try:
for i in range(self.partitions):
for v in self._merged_items(i):
yield v
self.data.clear()
# remove the merged partition
for j in range(self.spills):
path = self._get_spill_dir(j)
os.remove(os.path.join(path, str(i)))
finally:
self._cleanup()
def _merged_items(self, index):
self.data = {}
limit = self._next_limit()
for j in range(self.spills):
path = self._get_spill_dir(j)
p = os.path.join(path, str(index))
# do not check memory during merging
with open(p, "rb") as f:
self.mergeCombiners(self.serializer.load_stream(f), 0)
# limit the total partitions
if (self.scale * self.partitions < self.MAX_TOTAL_PARTITIONS
and j < self.spills - 1
and get_used_memory() > limit):
self.data.clear() # will read from disk again
gc.collect() # release the memory as much as possible
return self._recursive_merged_items(index)
return self.data.items()
def _recursive_merged_items(self, index):
"""
merge the partitioned items and return the as iterator
If one partition can not be fit in memory, then them will be
partitioned and merged recursively.
"""
subdirs = [os.path.join(d, "parts", str(index)) for d in self.localdirs]
m = ExternalMerger(self.agg, self.memory_limit, self.serializer, subdirs,
self.scale * self.partitions, self.partitions, self.batch)
m.pdata = [{} for _ in range(self.partitions)]
limit = self._next_limit()
for j in range(self.spills):
path = self._get_spill_dir(j)
p = os.path.join(path, str(index))
with open(p, 'rb') as f:
m.mergeCombiners(self.serializer.load_stream(f), 0)
if get_used_memory() > limit:
m._spill()
limit = self._next_limit()
return m._external_items()
def _cleanup(self):
""" Clean up all the files in disks """
for d in self.localdirs:
shutil.rmtree(d, True)
class ExternalSorter(object):
"""
ExtenalSorter will divide the elements into chunks, sort them in
memory and dump them into disks, finally merge them back.
The spilling will only happen when the used memory goes above
the limit.
>>> sorter = ExternalSorter(1) # 1M
>>> import random
>>> l = list(range(1024))
>>> random.shuffle(l)
>>> sorted(l) == list(sorter.sorted(l))
True
>>> sorted(l) == list(sorter.sorted(l, key=lambda x: -x, reverse=True))
True
"""
def __init__(self, memory_limit, serializer=None):
self.memory_limit = memory_limit
self.local_dirs = _get_local_dirs("sort")
self.serializer = _compressed_serializer(serializer)
def _get_path(self, n):
""" Choose one directory for spill by number n """
d = self.local_dirs[n % len(self.local_dirs)]
if not os.path.exists(d):
os.makedirs(d)
return os.path.join(d, str(n))
def _next_limit(self):
"""
Return the next memory limit. If the memory is not released
after spilling, it will dump the data only when the used memory
starts to increase.
"""
return max(self.memory_limit, get_used_memory() * 1.05)
def sorted(self, iterator, key=None, reverse=False):
"""
Sort the elements in iterator, do external sort when the memory
goes above the limit.
"""
global MemoryBytesSpilled, DiskBytesSpilled
batch, limit = 100, self._next_limit()
chunks, current_chunk = [], []
iterator = iter(iterator)
while True:
# pick elements in batch
chunk = list(itertools.islice(iterator, batch))
current_chunk.extend(chunk)
if len(chunk) < batch:
break
used_memory = get_used_memory()
if used_memory > limit:
# sort them inplace will save memory
current_chunk.sort(key=key, reverse=reverse)
path = self._get_path(len(chunks))
with open(path, 'wb') as f:
self.serializer.dump_stream(current_chunk, f)
def load(f):
for v in self.serializer.load_stream(f):
yield v
# close the file explicit once we consume all the items
# to avoid ResourceWarning in Python3
f.close()
chunks.append(load(open(path, 'rb')))
current_chunk = []
MemoryBytesSpilled += max(used_memory - get_used_memory(), 0) << 20
DiskBytesSpilled += os.path.getsize(path)
os.unlink(path) # data will be deleted after close
elif not chunks:
batch = min(int(batch * 1.5), 10000)
current_chunk.sort(key=key, reverse=reverse)
if not chunks:
return current_chunk
if current_chunk:
chunks.append(iter(current_chunk))
return heapq.merge(chunks, key=key, reverse=reverse)
class ExternalList(object):
"""
ExternalList can have many items which cannot be hold in memory in
the same time.
>>> l = ExternalList(list(range(100)))
>>> len(l)
100
>>> l.append(10)
>>> len(l)
101
>>> for i in range(20240):
... l.append(i)
>>> len(l)
20341
>>> import pickle
>>> l2 = pickle.loads(pickle.dumps(l))
>>> len(l2)
20341
>>> list(l2)[100]
10
"""
LIMIT = 10240
def __init__(self, values):
self.values = values
self.count = len(values)
self._file = None
self._ser = None
def __getstate__(self):
if self._file is not None:
self._file.flush()
with os.fdopen(os.dup(self._file.fileno()), "rb") as f:
f.seek(0)
serialized = f.read()
else:
serialized = b''
return self.values, self.count, serialized
def __setstate__(self, item):
self.values, self.count, serialized = item
if serialized:
self._open_file()
self._file.write(serialized)
else:
self._file = None
self._ser = None
def __iter__(self):
if self._file is not None:
self._file.flush()
# read all items from disks first
with os.fdopen(os.dup(self._file.fileno()), 'rb') as f:
f.seek(0)
for v in self._ser.load_stream(f):
yield v
for v in self.values:
yield v
def __len__(self):
return self.count
def append(self, value):
self.values.append(value)
self.count += 1
# dump them into disk if the key is huge
if len(self.values) >= self.LIMIT:
self._spill()
def _open_file(self):
dirs = _get_local_dirs("objects")
d = dirs[id(self) % len(dirs)]
if not os.path.exists(d):
os.makedirs(d)
p = os.path.join(d, str(id(self)))
self._file = open(p, "w+b", 65536)
self._ser = BatchedSerializer(CompressedSerializer(PickleSerializer()), 1024)
os.unlink(p)
def __del__(self):
if self._file:
self._file.close()
self._file = None
def _spill(self):
""" dump the values into disk """
global MemoryBytesSpilled, DiskBytesSpilled
if self._file is None:
self._open_file()
used_memory = get_used_memory()
pos = self._file.tell()
self._ser.dump_stream(self.values, self._file)
self.values = []
gc.collect()
DiskBytesSpilled += self._file.tell() - pos
MemoryBytesSpilled += max(used_memory - get_used_memory(), 0) << 20
class ExternalListOfList(ExternalList):
"""
An external list for list.
>>> l = ExternalListOfList([[i, i] for i in range(100)])
>>> len(l)
200
>>> l.append(range(10))
>>> len(l)
210
>>> len(list(l))
210
"""
def __init__(self, values):
ExternalList.__init__(self, values)
self.count = sum(len(i) for i in values)
def append(self, value):
ExternalList.append(self, value)
# already counted 1 in ExternalList.append
self.count += len(value) - 1
def __iter__(self):
for values in ExternalList.__iter__(self):
for v in values:
yield v
class GroupByKey(object):
"""
Group a sorted iterator as [(k1, it1), (k2, it2), ...]
>>> k = [i // 3 for i in range(6)]
>>> v = [[i] for i in range(6)]
>>> g = GroupByKey(zip(k, v))
>>> [(k, list(it)) for k, it in g]
[(0, [0, 1, 2]), (1, [3, 4, 5])]
"""
def __init__(self, iterator):
self.iterator = iterator
def __iter__(self):
key, values = None, None
for k, v in self.iterator:
if values is not None and k == key:
values.append(v)
else:
if values is not None:
yield (key, values)
key = k
values = ExternalListOfList([v])
if values is not None:
yield (key, values)
class ExternalGroupBy(ExternalMerger):
"""
Group by the items by key. If any partition of them can not been
hold in memory, it will do sort based group by.
This class works as follows:
- It repeatedly group the items by key and save them in one dict in
memory.
- When the used memory goes above memory limit, it will split
the combined data into partitions by hash code, dump them
into disk, one file per partition. If the number of keys
in one partitions is smaller than 1000, it will sort them
by key before dumping into disk.
- Then it goes through the rest of the iterator, group items
by key into different dict by hash. Until the used memory goes over
memory limit, it dump all the dicts into disks, one file per
dict. Repeat this again until combine all the items. It
also will try to sort the items by key in each partition
before dumping into disks.
- It will yield the grouped items partitions by partitions.
If the data in one partitions can be hold in memory, then it
will load and combine them in memory and yield.
- If the dataset in one partition cannot be hold in memory,
it will sort them first. If all the files are already sorted,
it merge them by heap.merge(), so it will do external sort
for all the files.
- After sorting, `GroupByKey` class will put all the continuous
items with the same key as a group, yield the values as
an iterator.
"""
SORT_KEY_LIMIT = 1000
def flattened_serializer(self):
assert isinstance(self.serializer, BatchedSerializer)
ser = self.serializer
return FlattenedValuesSerializer(ser, 20)
def _object_size(self, obj):
return len(obj)
def _spill(self):
"""
dump already partitioned data into disks.
"""
global MemoryBytesSpilled, DiskBytesSpilled
path = self._get_spill_dir(self.spills)
if not os.path.exists(path):
os.makedirs(path)
used_memory = get_used_memory()
if not self.pdata:
# The data has not been partitioned, it will iterator the
# data once, write them into different files, has no
# additional memory. It only called when the memory goes
# above limit at the first time.
# open all the files for writing
streams = [open(os.path.join(path, str(i)), 'wb')
for i in range(self.partitions)]
# If the number of keys is small, then the overhead of sort is small
# sort them before dumping into disks
self._sorted = len(self.data) < self.SORT_KEY_LIMIT
if self._sorted:
self.serializer = self.flattened_serializer()
for k in sorted(self.data.keys()):
h = self._partition(k)
self.serializer.dump_stream([(k, self.data[k])], streams[h])
else:
for k, v in self.data.items():
h = self._partition(k)
self.serializer.dump_stream([(k, v)], streams[h])
for s in streams:
DiskBytesSpilled += s.tell()
s.close()
self.data.clear()
# self.pdata is cached in `mergeValues` and `mergeCombiners`
self.pdata.extend([{} for i in range(self.partitions)])
else:
for i in range(self.partitions):
p = os.path.join(path, str(i))
with open(p, "wb") as f:
# dump items in batch
if self._sorted:
# sort by key only (stable)
sorted_items = sorted(self.pdata[i].items(), key=operator.itemgetter(0))
self.serializer.dump_stream(sorted_items, f)
else:
self.serializer.dump_stream(self.pdata[i].items(), f)
self.pdata[i].clear()
DiskBytesSpilled += os.path.getsize(p)
self.spills += 1
gc.collect() # release the memory as much as possible
MemoryBytesSpilled += max(used_memory - get_used_memory(), 0) << 20
def _merged_items(self, index):
size = sum(os.path.getsize(os.path.join(self._get_spill_dir(j), str(index)))
for j in range(self.spills))
# if the memory can not hold all the partition,
# then use sort based merge. Because of compression,
# the data on disks will be much smaller than needed memory
if size >= self.memory_limit << 17: # * 1M / 8
return self._merge_sorted_items(index)
self.data = {}
for j in range(self.spills):
path = self._get_spill_dir(j)
p = os.path.join(path, str(index))
# do not check memory during merging
with open(p, "rb") as f:
self.mergeCombiners(self.serializer.load_stream(f), 0)
return self.data.items()
def _merge_sorted_items(self, index):
""" load a partition from disk, then sort and group by key """
def load_partition(j):
path = self._get_spill_dir(j)
p = os.path.join(path, str(index))
with open(p, 'rb', 65536) as f:
for v in self.serializer.load_stream(f):
yield v
disk_items = [load_partition(j) for j in range(self.spills)]
if self._sorted:
# all the partitions are already sorted
sorted_items = heapq.merge(disk_items, key=operator.itemgetter(0))
else:
# Flatten the combined values, so it will not consume huge
# memory during merging sort.
ser = self.flattened_serializer()
sorter = ExternalSorter(self.memory_limit, ser)
sorted_items = sorter.sorted(itertools.chain(*disk_items),
key=operator.itemgetter(0))
return ((k, vs) for k, vs in GroupByKey(sorted_items))
if __name__ == "__main__":
import doctest
(failure_count, test_count) = doctest.testmod()
if failure_count:
exit(-1)<|fim▁end|> | self.serializer.dump_stream([(k, v)], streams[h])
|
<|file_name|>api.py<|end_file_name|><|fim▁begin|>from jsonapi_requests import base
from jsonapi_requests.orm import registry
class OrmApi:
def __init__(self, api):
self.type_registry = registry.TypeRegistry()
self.api = api
@classmethod
def config(cls, *args, **kwargs):
return cls(base.Api.config(*args, **kwargs))
def endpoint(self, path):<|fim▁hole|><|fim▁end|> | return self.api.endpoint(path) |
<|file_name|>repl.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
#Written by Kjetil Matheussen: k.s.matheussen@notam02.no
import sys
import os
import urllib2
import readline
executable_path = os.path.split(os.path.abspath(os.path.realpath(sys.argv[0])))[0]
# TODO: Use bin/packages/s7/s7webserver instead, and delete the local s7webserver directory.
sys.path += [os.path.join(executable_path,os.path.pardir,"s7webserver")]
import s7webserver_repl
portnum = "5080"<|fim▁hole|>
s7webserver_repl.start("radium>", "http://localhost:"+portnum)<|fim▁end|> | if len(sys.argv)>1:
portnum = sys.argv[1] |
<|file_name|>orgUserServiceTest.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
describe("Unit: orgUserService", function () {
var $httpBackend;
var orgUserServiceSUT;
var targetProviderStub;
var $rootScope;
beforeEach(module('app'));
beforeEach(module(function($provide){
targetProviderStub = {}
$provide.value('targetProvider', targetProviderStub);
}));
beforeEach(inject(function (_orgUserService_, $injector, _$httpBackend_, _$rootScope_) {
$httpBackend = _$httpBackend_;
orgUserServiceSUT = _orgUserService_;
$rootScope = _$rootScope_;
}));
afterEach(function () {
$httpBackend.verifyNoOutstandingExpectation();
$httpBackend.verifyNoOutstandingRequest();<|fim▁hole|>
it('should call for users from apropriate organization on refresh', inject(function () {
targetProviderStub.getOrganization = sinon.stub().returns({ guid: "1234" });
var callbackSpied = sinon.stub();
$httpBackend.expectGET('/rest/orgs/1234/users').respond(200, []);
orgUserServiceSUT.getAll()
.then(callbackSpied);
$httpBackend.flush();
expect(callbackSpied.called).to.be.true;
}));
it('should fail while calling for users form unavailable organization on refresh', inject(function () {
targetProviderStub.getOrganization = sinon.stub().returns(null);
var errcallbackSpied = sinon.stub();
orgUserServiceSUT.getAll().catch(errcallbackSpied);
$rootScope.$digest();
expect(errcallbackSpied.called).to.be.true;
}));
it('should send POST on adding user', inject(function () {
var user = {
username: "waclaw",
roles: ["manager"]
};
targetProviderStub.getOrganization = sinon.stub().returns({ guid: "1234" });
$httpBackend.expectPOST('/rest/orgs/1234/users').respond(201, {});
var callbackSpied = sinon.stub();
orgUserServiceSUT.addUser(user).then(callbackSpied);
$httpBackend.flush();
expect(callbackSpied.called).to.be.true;
}));
});<|fim▁end|> | }); |
<|file_name|>addfiledialog.py<|end_file_name|><|fim▁begin|>from PyQt4.QtGui import *
import pypipe.formats
import pypipe.basefile
from pypipe.core import pipeline
from widgets.combobox import ComboBox
class AddFileDialog(QDialog):
def __init__(self, parent=None):
super(AddFileDialog, self).__init__(parent)
self.formats_combo = ComboBox()
self.filename_edit = QLineEdit()
self.open_button = QPushButton('Open')
self.ok_button = QPushButton('&OK')
self.cancel_button = QPushButton('&Cancel')
self.setWindowTitle('Add file')
top_layout = QVBoxLayout()
top_layout.addWidget(QLabel('<b>File format:</b>'))
top_layout.addWidget(self.formats_combo)
top_layout.addWidget(QLabel('<b>File Name:</b>'))
center_layout = QHBoxLayout()
center_layout.addWidget(self.filename_edit)
center_layout.addWidget(self.open_button)
bottom_layout = QHBoxLayout()
bottom_layout.addWidget(self.ok_button)
bottom_layout.addWidget(self.cancel_button)
layout = QVBoxLayout()
layout.addLayout(top_layout)
layout.addLayout(center_layout)
layout.addLayout(bottom_layout)
self.setLayout(layout)
self.formats_combo.add_classes_from_module(pypipe.formats)
self.connect_all()
def connect_all(self):
self.cancel_button.clicked.connect(self.reject)<|fim▁hole|> self.formats_combo.currentIndexChanged.connect(self.turn_ok_button)
self.ok_button.clicked.connect(self.accept)
self.open_button.clicked.connect(self.open_file)
def turn_ok_button(self):
try:
f = self.get_file()
self.ok_button.setEnabled(True)
except pypipe.basefile.FileNotExistsError:
self.ok_button.setEnabled(False)
return
if pypipe.core.pipeline.can_add_file(f):
self.ok_button.setEnabled(True)
else:
self.ok_button.setEnabled(False)
def open_file(self):
file_name = QFileDialog.getOpenFileName(self, 'Open file')
self.filename_edit.setText(file_name)
def get_file(self):
init = self.formats_combo.get_current_item()
path = str(self.filename_edit.text())
return init(path)
def exec_(self):
self.turn_ok_button()
super(AddFileDialog, self).exec_()<|fim▁end|> | self.filename_edit.textChanged.connect(self.turn_ok_button) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
const EmberApp = require('./ember-app');
/**
* FastBoot renders your Ember.js applications in Node.js. Start by
* instantiating this class with the path to your compiled Ember app:
*
*
* #### Sandboxing
*
* For security and correctness reasons, Ember applications running in FastBoot
* are run inside a sandbox that prohibits them from accessing the normal
* Node.js environment.
*
* This sandbox is the built-in `VMSandbox` class, which uses
* Node's `vm` module. You may add and/or override sandbox variables by
* passing the `addOrOverrideSandboxGlobals` option.
*
* @example
* const FastBoot = require('fastboot');
*
* let app = new FastBoot({
* distPath: 'path/to/dist',
* buildSandboxGlobals(globals) {
* return Object.assign({}, globals, {
* // custom globals
* });
* },
* });
*
* app.visit('/photos')
* .then(result => result.html())
* .then(html => res.send(html));
*/
class FastBoot {
/**
* Create a new FastBoot instance.
*
* @param {Object} options
* @param {string} options.distPath the path to the built Ember application
* @param {Boolean} [options.resilient=false] if true, errors during rendering won't reject the `visit()` promise but instead resolve to a {@link Result}
* @param {Function} [options.buildSandboxGlobals] a function used to build the final set of global properties setup within the sandbox
* @param {Number} [options.maxSandboxQueueSize] - maximum sandbox queue size when using buildSandboxPerRequest flag.
*/
constructor(options = {}) {
let { distPath, buildSandboxGlobals, maxSandboxQueueSize } = options;
this.resilient = 'resilient' in options ? Boolean(options.resilient) : false;
this.distPath = distPath;
// deprecate the legacy path, but support it
if (buildSandboxGlobals === undefined && options.sandboxGlobals !== undefined) {
console.warn(
'[DEPRECATION] Instantiating `fastboot` with a `sandboxGlobals` option has been deprecated. Please migrate to specifying `buildSandboxGlobals` instead.'
);
buildSandboxGlobals = globals => Object.assign({}, globals, options.sandboxGlobals);
}
this.buildSandboxGlobals = buildSandboxGlobals;
this.maxSandboxQueueSize = maxSandboxQueueSize;
this._buildEmberApp(this.distPath, this.buildSandboxGlobals, maxSandboxQueueSize);
}
/**
* Renders the Ember app at a specific URL, returning a promise that resolves
* to a {@link Result}, giving you access to the rendered HTML as well as
* metadata about the request such as the HTTP status code.
*
* @param {string} path the URL path to render, like `/photos/1`
* @param {Object} options
* @param {Boolean} [options.resilient] whether to reject the returned promise if there is an error during rendering. Overrides the instance's `resilient` setting
* @param {string} [options.html] the HTML document to insert the rendered app into. Uses the built app's index.html by default.
* @param {Object} [options.metadata] per request meta data that need to be exposed in the app.
* @param {Boolean} [options.shouldRender] whether the app should do rendering or not. If set to false, it puts the app in routing-only.
* @param {Boolean} [options.disableShoebox] whether we should send the API data in the shoebox. If set to false, it will not send the API data used for rendering the app on server side in the index.html.
* @param {Integer} [options.destroyAppInstanceInMs] whether to destroy the instance in the given number of ms. This is a failure mechanism to not wedge the Node process (See: https://github.com/ember-fastboot/fastboot/issues/90)
* @param {Boolean} [options.buildSandboxPerVisit=false] whether to create a new sandbox context per-visit (slows down each visit, but guarantees no prototype leakages can occur), or reuse the existing sandbox (faster per-request, but each request shares the same set of prototypes)
* @returns {Promise<Result>} result
*/
async visit(path, options = {}) {
let resilient = 'resilient' in options ? options.resilient : this.resilient;
let result = await this._app.visit(path, options);
if (!resilient && result.error) {
throw result.error;
} else {
return result;
}
}
/**
* Destroy the existing Ember application instance, and recreate it from the provided dist path.
* This is commonly done when `dist` has been updated, and you need to prepare to serve requests<|fim▁hole|> *
* @param {Object} options
* @param {string} options.distPath the path to the built Ember application
*/
reload({ distPath }) {
if (this._app) {
this._app.destroy();
}
this._buildEmberApp(distPath);
}
_buildEmberApp(
distPath = this.distPath,
buildSandboxGlobals = this.buildSandboxGlobals,
maxSandboxQueueSize = this.maxSandboxQueueSize
) {
if (!distPath) {
throw new Error(
'You must instantiate FastBoot with a distPath ' +
'option that contains a path to a dist directory ' +
'produced by running ember fastboot:build in your Ember app:' +
'\n\n' +
'new FastBootServer({\n' +
" distPath: 'path/to/dist'\n" +
'});'
);
}
this.distPath = distPath;
this._app = new EmberApp({
distPath,
buildSandboxGlobals,
maxSandboxQueueSize,
});
}
}
module.exports = FastBoot;<|fim▁end|> | * with the updated assets. |
<|file_name|>member-add.component.spec.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-unused-variable */
import { TestBed, async } from "@angular/core/testing";
import { FormBuilder } from "@angular/forms";
import { Router } from "@angular/router";
import { MemberAddComponent } from "./member-add.component";
import { StubMemberService } from "../member.service.stub";
<|fim▁hole|> function mockComp(): MemberAddComponent {
const keys = [];
for (const key in Router.prototype) {
if (Router.prototype.hasOwnProperty(key)) {
keys.push(key);
}
}
const builder = new FormBuilder();
const router = jasmine.createSpyObj("MockRouter", keys);
const service = new StubMemberService();
return new MemberAddComponent(builder, service, router);
}
it("should create an instance", async(() => {
const component = mockComp();
expect(component).toBeTruthy();
}));
});<|fim▁end|> | describe("Component: MemberAdd", () => { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>macro_rules! os_required {
() => {
panic!("mio must be compiled with `os-poll` to run.")
};
}
mod selector;
pub(crate) use self::selector::{event, Event, Events, Selector};
mod waker;
pub(crate) use self::waker::Waker;
cfg_net! {
pub(crate) mod tcp;
pub(crate) mod udp;
#[cfg(unix)]
pub(crate) mod uds;
}
cfg_io_source! {
use std::io;
#[cfg(windows)]
use std::os::windows::io::RawSocket;
#[cfg(windows)]
use crate::{Registry, Token, Interest};
pub(crate) struct IoSourceState;
impl IoSourceState {
pub fn new() -> IoSourceState {
IoSourceState
}
pub fn do_io<T, F, R>(&self, f: F, io: &T) -> io::Result<R>
where
F: FnOnce(&T) -> io::Result<R>,
{
// We don't hold state, so we can just call the function and
// return.
f(io)
}
}
#[cfg(windows)]
impl IoSourceState {
pub fn register(
&mut self,
_: &Registry,
_: Token,
_: Interest,
_: RawSocket,
) -> io::Result<()> {
os_required!()
}
pub fn reregister(
&mut self,
_: &Registry,
_: Token,
_: Interest,
) -> io::Result<()> {
os_required!()
}
pub fn deregister(&mut self) -> io::Result<()> {<|fim▁hole|><|fim▁end|> | os_required!()
}
}
} |
<|file_name|>error.cc<|end_file_name|><|fim▁begin|>/*
* descripten - ECMAScript to native compiler
* Copyright (C) 2011-2014 Christian Kindahl
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*<|fim▁hole|> * This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "conversion.hh"
#include "error.hh"
#include "frame.hh"
#include "global.hh"
#include "property.hh"
#include "prototype.hh"
#include "standard.hh"
#include "utility.hh"
EsFunction::NativeFunction EsError::default_fun_ = es_std_err;
EsFunction *EsError::default_constr_ = NULL;
EsObject *EsError::prototype()
{
return es_proto_err();
}
EsError::EsError()
: name_(_ESTR("Error")) // VERIFIED: 15.11.4.2
{
}
EsError::EsError(const EsString *message)
: name_(_ESTR("Error"))
, message_(message)
{
}
EsError::EsError(const EsString *name, const EsString *message)
: name_(name)
, message_(message)
{
}
EsError::~EsError()
{
}
void EsError::make_proto()
{
prototype_ = es_proto_obj(); // VERIFIED: 15.11.4
class_ = _USTR("Error"); // VERIFIED: 15.11.4
extensible_ = true;
// 15.11.4
define_new_own_property(property_keys.constructor,
EsPropertyDescriptor(false, true, true,
EsValue::from_obj(default_constr()))); // VERIFIED: 15.11.4.1
define_new_own_property(property_keys.name,
EsPropertyDescriptor(false, true, true,
EsValue::from_str(_ESTR("Error")))); // VERIFIED: 15.11.4.2
define_new_own_property(property_keys.message,
EsPropertyDescriptor(false, true, true,
EsValue::from_str(EsString::create()))); // VERIFIED: 15.11.4.3
define_new_own_property(property_keys.to_string,
EsPropertyDescriptor(false, true, true,
EsValue::from_obj(EsBuiltinFunction::create_inst(es_global_env(),
es_std_err_proto_to_str, 0)))); // VERIFIED: 15.11.4.4
}
EsError *EsError::create_raw()
{
return new (GC)EsError();
}
EsError *EsError::create_inst(const EsString *message)
{
EsError *e = new (GC)EsError(message);
e->prototype_ = es_proto_err(); // VERIFIED: 15.11.5
e->class_ = _USTR("Error"); // VERIFIED: 15.11.5
e->extensible_ = true;
if (!message->empty())
{
e->define_new_own_property(property_keys.message,
EsPropertyDescriptor(false, true, true,
EsValue::from_str(message)));
}
return e;
}
EsFunction *EsError::default_constr()
{
if (default_constr_ == NULL)
default_constr_ = EsErrorConstructor<EsError>::create_inst();
return default_constr_;
}
template <typename T>
EsFunction *EsNativeError<T>::default_constr_ = NULL;
template <typename T>
EsNativeError<T>::EsNativeError(const EsString *name, const EsString *message)
: EsError(name, message)
{
}
template <typename T>
EsNativeError<T>::~EsNativeError()
{
}
template <typename T>
void EsNativeError<T>::make_proto()
{
prototype_ = es_proto_err(); // VERIFIED: 15.11.7.7
class_ = _USTR("Error"); // VERIFIED: 15.11.7.7
extensible_ = true;
// 15.11.7
define_new_own_property(property_keys.constructor,
EsPropertyDescriptor(false, true, true,
EsValue::from_obj(default_constr()))); // VERIFIED: 15.11.7.8
define_new_own_property(property_keys.name,
EsPropertyDescriptor(false, true, true,
EsValue::from_str(name()))); // VERIFIED: 15.11.7.9
define_new_own_property(property_keys.message,
EsPropertyDescriptor(false, true, true,
EsValue::from_str(EsString::create()))); // VERIFIED: 15.11.7.10
}
template <typename T>
T *EsNativeError<T>::create_raw()
{
return new (GC)T(EsString::create());
}
template <typename T>
T *EsNativeError<T>::create_inst(const EsString *message)
{
T *e = new (GC)T(message);
e->prototype_ = T::prototype(); // VERIFIED: 15.11.7.2
e->class_ = _USTR("Error"); // VERIFIED: 15.11.7.2
e->extensible_ = true; // VERIFIED: 15.11.7.2
if (!message->empty())
{
e->define_new_own_property(property_keys.message,
EsPropertyDescriptor(false, true, true,
EsValue::from_str(message)));
}
return e;
}
template <typename T>
EsFunction *EsNativeError<T>::default_constr()
{
if (default_constr_ == NULL)
default_constr_ = EsErrorConstructor<T>::create_inst();
return default_constr_;
}
template <typename T>
EsErrorConstructor<T>::EsErrorConstructor(EsLexicalEnvironment *scope,
NativeFunction fun, int len, bool strict)
: EsFunction(scope, fun, strict, 1, false)
{
}
template <typename T>
EsFunction *EsErrorConstructor<T>::create_inst()
{
EsErrorConstructor *f = new (GC)EsErrorConstructor(es_global_env(), T::default_fun_, 1, false);
f->prototype_ = es_proto_fun(); // VERIFIED: 15.11.7.5
f->class_ = _USTR("Function");
f->extensible_ = true;
// 15.11.7
f->define_new_own_property(property_keys.length,
EsPropertyDescriptor(false, false, false,
EsValue::from_u32(1))); // VERIFIED: 15.11.7.5
f->define_new_own_property(property_keys.prototype,
EsPropertyDescriptor(false, false, false,
EsValue::from_obj(T::prototype()))); // VERIFIED: 15.11.7.6
return f;
}
template <typename T>
bool EsErrorConstructor<T>::constructT(EsCallFrame &frame)
{
const EsString *msg_str = EsString::create();
EsValue msg = frame.arg(0);
if (!msg.is_undefined())
{
msg_str = msg.to_stringT();
if (!msg_str)
return false;
}
frame.set_result(EsValue::from_obj(T::create_inst(msg_str)));
return true;
}
EsObject *EsEvalError::prototype()
{
return es_proto_eval_err();
}
EsObject *EsRangeError::prototype()
{
return es_proto_range_err();
}
EsObject *EsReferenceError::prototype()
{
return es_proto_ref_err();
}
EsObject *EsSyntaxError::prototype()
{
return es_proto_syntax_err();
}
EsObject *EsTypeError::prototype()
{
return es_proto_type_err();
}
EsObject *EsUriError::prototype()
{
return es_proto_uri_err();
}
// Template specialization.
template <>
EsFunction::NativeFunction EsNativeError<EsEvalError>::default_fun_ = es_std_eval_err;
template <>
EsFunction::NativeFunction EsNativeError<EsRangeError>::default_fun_ = es_std_range_err;
template <>
EsFunction::NativeFunction EsNativeError<EsReferenceError>::default_fun_ = es_std_ref_err;
template <>
EsFunction::NativeFunction EsNativeError<EsSyntaxError>::default_fun_ = es_std_syntax_err;
template <>
EsFunction::NativeFunction EsNativeError<EsTypeError>::default_fun_ = es_std_type_err;
template <>
EsFunction::NativeFunction EsNativeError<EsUriError>::default_fun_ = es_std_uri_err;
// Explicit template instantiation.
template class EsNativeError<EsEvalError>;
template class EsNativeError<EsRangeError>;
template class EsNativeError<EsReferenceError>;
template class EsNativeError<EsSyntaxError>;
template class EsNativeError<EsTypeError>;
template class EsNativeError<EsUriError>;<|fim▁end|> | |
<|file_name|>PageProcessorCompiler.java<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.gen;
import com.facebook.presto.byteCode.ByteCodeBlock;
import com.facebook.presto.byteCode.ByteCodeNode;
import com.facebook.presto.byteCode.ClassDefinition;
import com.facebook.presto.byteCode.MethodDefinition;
import com.facebook.presto.byteCode.Parameter;
import com.facebook.presto.byteCode.ParameterizedType;
import com.facebook.presto.byteCode.Scope;
import com.facebook.presto.byteCode.Variable;
import com.facebook.presto.byteCode.control.ForLoop;
import com.facebook.presto.byteCode.control.IfStatement;
import com.facebook.presto.byteCode.instruction.LabelNode;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.operator.PageProcessor;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.PageBuilder;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.relational.CallExpression;
import com.facebook.presto.sql.relational.ConstantExpression;
import com.facebook.presto.sql.relational.Expressions;
import com.facebook.presto.sql.relational.InputReferenceExpression;
import com.facebook.presto.sql.relational.RowExpression;
import com.facebook.presto.sql.relational.RowExpressionVisitor;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.primitives.Primitives;
import io.airlift.slice.Slice;
import java.util.List;
import java.util.TreeSet;
import static com.facebook.presto.byteCode.Access.PUBLIC;
import static com.facebook.presto.byteCode.Access.a;
import static com.facebook.presto.byteCode.OpCode.NOP;
import static com.facebook.presto.byteCode.Parameter.arg;
import static com.facebook.presto.byteCode.ParameterizedType.type;
import static com.facebook.presto.sql.gen.ByteCodeUtils.generateWrite;
import static com.facebook.presto.sql.gen.ByteCodeUtils.loadConstant;
import static java.lang.String.format;
import static java.util.Collections.nCopies;
public class PageProcessorCompiler
implements BodyCompiler<PageProcessor>
{
private final Metadata metadata;
public PageProcessorCompiler(Metadata metadata)
{
this.metadata = metadata;
}
@Override
public void generateMethods(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, RowExpression filter, List<RowExpression> projections)
{
generateProcessMethod(classDefinition, filter, projections);
generateFilterMethod(classDefinition, callSiteBinder, filter);
for (int i = 0; i < projections.size(); i++) {
generateProjectMethod(classDefinition, callSiteBinder, "project_" + i, projections.get(i));
}
}
private void generateProcessMethod(ClassDefinition classDefinition, RowExpression filter, List<RowExpression> projections)
{
Parameter session = arg("session", ConnectorSession.class);
Parameter page = arg("page", Page.class);
Parameter start = arg("start", int.class);
Parameter end = arg("end", int.class);
Parameter pageBuilder = arg("pageBuilder", PageBuilder.class);
MethodDefinition method = classDefinition.declareMethod(a(PUBLIC), "process", type(int.class), session, page, start, end, pageBuilder);
Scope scope = method.getScope();
Variable thisVariable = method.getThis();
Variable position = scope.declareVariable(int.class, "position");
method.getBody()
.comment("int position = start;")
.getVariable(start)
.putVariable(position);
List<Integer> allInputChannels = getInputChannels(Iterables.concat(projections, ImmutableList.of(filter)));
for (int channel : allInputChannels) {
Variable blockVariable = scope.declareVariable(Block.class, "block_" + channel);
method.getBody()
.comment("Block %s = page.getBlock(%s);", blockVariable.getName(), channel)
.getVariable(page)
.push(channel)
.invokeVirtual(Page.class, "getBlock", Block.class, int.class)
.putVariable(blockVariable);
}
//
// for loop loop body
//
LabelNode done = new LabelNode("done");
ByteCodeBlock loopBody = new ByteCodeBlock();
ForLoop loop = new ForLoop()
.initialize(NOP)
.condition(new ByteCodeBlock()
.comment("position < end")
.getVariable(position)
.getVariable(end)
.invokeStatic(CompilerOperations.class, "lessThan", boolean.class, int.class, int.class)
)
.update(new ByteCodeBlock()
.comment("position++")
.incrementVariable(position, (byte) 1))
.body(loopBody);
loopBody.comment("if (pageBuilder.isFull()) break;")
.getVariable(pageBuilder)
.invokeVirtual(PageBuilder.class, "isFull", boolean.class)
.ifTrueGoto(done);
// if (filter(cursor))
IfStatement filterBlock = new IfStatement();
filterBlock.condition()
.append(thisVariable)
.getVariable(session)
.append(pushBlockVariables(scope, getInputChannels(filter)))
.getVariable(position)
.invokeVirtual(classDefinition.getType(),
"filter",
type(boolean.class),
ImmutableList.<ParameterizedType>builder()
.add(type(ConnectorSession.class))
.addAll(nCopies(getInputChannels(filter).size(), type(Block.class)))
.add(type(int.class))
.build());
filterBlock.ifTrue()
.append(pageBuilder)
.invokeVirtual(PageBuilder.class, "declarePosition", void.class);
for (int projectionIndex = 0; projectionIndex < projections.size(); projectionIndex++) {
List<Integer> inputChannels = getInputChannels(projections.get(projectionIndex));
filterBlock.ifTrue()
.append(thisVariable)
.append(session)
.append(pushBlockVariables(scope, inputChannels))
.getVariable(position);
filterBlock.ifTrue()
.comment("pageBuilder.getBlockBuilder(%d)", projectionIndex)
.append(pageBuilder)
.push(projectionIndex)
.invokeVirtual(PageBuilder.class, "getBlockBuilder", BlockBuilder.class, int.class);
filterBlock.ifTrue()
.comment("project_%d(session, block_%s, position, blockBuilder)", projectionIndex, inputChannels)
.invokeVirtual(classDefinition.getType(),
"project_" + projectionIndex,
type(void.class),
ImmutableList.<ParameterizedType>builder()
.add(type(ConnectorSession.class))
.addAll(nCopies(inputChannels.size(), type(Block.class)))
.add(type(int.class))
.add(type(BlockBuilder.class))
.build());
}
loopBody.append(filterBlock);
method.getBody()
.append(loop)
.visitLabel(done)
.comment("return position;")
.getVariable(position)
.retInt();
}
private void generateFilterMethod(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, RowExpression filter)
{
Parameter session = arg("session", ConnectorSession.class);
List<Parameter> blocks = toBlockParameters(getInputChannels(filter));
Parameter position = arg("position", int.class);
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
"filter",
type(boolean.class),
ImmutableList.<Parameter>builder()
.add(session)
.addAll(blocks)
.add(position)
.build());
method.comment("Filter: %s", filter.toString());
Scope scope = method.getScope();<|fim▁hole|> Variable wasNullVariable = scope.declareVariable(type(boolean.class), "wasNull");
ByteCodeExpressionVisitor visitor = new ByteCodeExpressionVisitor(
callSiteBinder,
fieldReferenceCompiler(callSiteBinder, position, wasNullVariable),
metadata.getFunctionRegistry());
ByteCodeNode body = filter.accept(visitor, scope);
LabelNode end = new LabelNode("end");
method
.getBody()
.comment("boolean wasNull = false;")
.putVariable(wasNullVariable, false)
.append(body)
.getVariable(wasNullVariable)
.ifFalseGoto(end)
.pop(boolean.class)
.push(false)
.visitLabel(end)
.retBoolean();
}
private void generateProjectMethod(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, String methodName, RowExpression projection)
{
Parameter session = arg("session", ConnectorSession.class);
List<Parameter> inputs = toBlockParameters(getInputChannels(projection));
Parameter position = arg("position", int.class);
Parameter output = arg("output", BlockBuilder.class);
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
methodName,
type(void.class),
ImmutableList.<Parameter>builder()
.add(session)
.addAll(inputs)
.add(position)
.add(output)
.build());
method.comment("Projection: %s", projection.toString());
Scope scope = method.getScope();
Variable wasNullVariable = scope.declareVariable(type(boolean.class), "wasNull");
ByteCodeBlock body = method.getBody()
.comment("boolean wasNull = false;")
.putVariable(wasNullVariable, false);
ByteCodeExpressionVisitor visitor = new ByteCodeExpressionVisitor(callSiteBinder, fieldReferenceCompiler(callSiteBinder, position, wasNullVariable), metadata.getFunctionRegistry());
body.getVariable(output)
.comment("evaluate projection: " + projection.toString())
.append(projection.accept(visitor, scope))
.append(generateWrite(callSiteBinder, scope, wasNullVariable, projection.getType()))
.ret();
}
private static List<Integer> getInputChannels(Iterable<RowExpression> expressions)
{
TreeSet<Integer> channels = new TreeSet<>();
for (RowExpression expression : Expressions.subExpressions(expressions)) {
if (expression instanceof InputReferenceExpression) {
channels.add(((InputReferenceExpression) expression).getField());
}
}
return ImmutableList.copyOf(channels);
}
private static List<Integer> getInputChannels(RowExpression expression)
{
return getInputChannels(ImmutableList.of(expression));
}
private static List<Parameter> toBlockParameters(List<Integer> inputChannels)
{
ImmutableList.Builder<Parameter> parameters = ImmutableList.builder();
for (int channel : inputChannels) {
parameters.add(arg("block_" + channel, Block.class));
}
return parameters.build();
}
private static ByteCodeNode pushBlockVariables(Scope scope, List<Integer> inputs)
{
ByteCodeBlock block = new ByteCodeBlock();
for (int channel : inputs) {
block.append(scope.getVariable("block_" + channel));
}
return block;
}
private RowExpressionVisitor<Scope, ByteCodeNode> fieldReferenceCompiler(final CallSiteBinder callSiteBinder, final Variable positionVariable, final Variable wasNullVariable)
{
return new RowExpressionVisitor<Scope, ByteCodeNode>()
{
@Override
public ByteCodeNode visitInputReference(InputReferenceExpression node, Scope scope)
{
int field = node.getField();
Type type = node.getType();
Variable block = scope.getVariable("block_" + field);
Class<?> javaType = type.getJavaType();
if (!javaType.isPrimitive() && javaType != Slice.class) {
javaType = Object.class;
}
IfStatement ifStatement = new IfStatement();
ifStatement.condition()
.setDescription(format("block_%d.get%s()", field, type))
.append(block)
.getVariable(positionVariable)
.invokeInterface(Block.class, "isNull", boolean.class, int.class);
ifStatement.ifTrue()
.putVariable(wasNullVariable, true)
.pushJavaDefault(javaType);
String methodName = "get" + Primitives.wrap(javaType).getSimpleName();
ifStatement.ifFalse()
.append(loadConstant(callSiteBinder.bind(type, Type.class)))
.append(block)
.getVariable(positionVariable)
.invokeInterface(Type.class, methodName, javaType, Block.class, int.class);
return ifStatement;
}
@Override
public ByteCodeNode visitCall(CallExpression call, Scope scope)
{
throw new UnsupportedOperationException("not yet implemented");
}
@Override
public ByteCodeNode visitConstant(ConstantExpression literal, Scope scope)
{
throw new UnsupportedOperationException("not yet implemented");
}
};
}
}<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from app.models import Home, Room, Thermostat, Door, Light, Refrigerator
"""
Administrator interface customization
This module contains customization classes to the admin interface
rendered by Django. This file is interpreted at run time to serve
the custom administrator actions that correspond to the application's
custom models.
"""
class ThermostatAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','home','current_temp','set_temp','pk')
search_fields = ('name','home')
class ThermostatInline(admin.StackedInline):
"""
StackedInline
"""
model = Thermostat
class DoorAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','room','is_locked','is_open','pk')
search_fields = ('name','room')
class DoorInline(admin.StackedInline):
"""<|fim▁hole|>
class LightAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','room','is_on','pk')
search_fields = ('name','room')
class LightInline(admin.StackedInline):
"""
StackedInline
"""
model = Light
class RefrigeratorAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','room','fridge_set_temp','fridge_current_temp','freezer_set_temp','freezer_current_temp','pk')
search_fields = ('name','room')
class RefrigeratorInline(admin.StackedInline):
"""
StackedInline
"""
model = Refrigerator
class RoomAdmin(admin.ModelAdmin):
"""
ModelAdmin
"""
list_display = ('name','home','room_type','pk')
search_fields = ('name','home')
inlines = (DoorInline, LightInline, RefrigeratorInline,)
class RoomInline(admin.StackedInline):
"""
StackedInline
"""
model = Room
class HomeAdmin(admin.ModelAdmin):
list_display = ('name','owner','position','secret_key','pk')
search_fields = ('name',)
readonly_fields=('secret_key',)
inlines = (ThermostatInline, RoomInline, )
admin.site.register(Home, HomeAdmin)
admin.site.register(Thermostat, ThermostatAdmin)
admin.site.register(Room, RoomAdmin)
admin.site.register(Door, DoorAdmin)
admin.site.register(Light, LightAdmin)
admin.site.register(Refrigerator, RefrigeratorAdmin)<|fim▁end|> | StackedInline
"""
model = Door |
<|file_name|>vmware_local_user_manager.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, IBM Corp
# Author(s): Andreas Nafpliotis <nafpliot@de.ibm.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_local_user_manager
short_description: Manage local users on an ESXi host
description:
- Manage local users on an ESXi host
version_added: "2.2"
author:
- Andreas Nafpliotis (@nafpliot-ibm)
notes:
- Tested on ESXi 6.0
- Be sure that the ESXi user used for login, has the appropriate rights to create / delete / edit users
requirements:
- "python >= 2.6"
- PyVmomi installed
options:
local_user_name:
description:
- The local user name to be changed.
required: True
local_user_password:
description:
- The password to be set.
required: False
local_user_description:
description:
- Description for the user.
required: False
state:
description:
- Indicate desired state of the user. If the user already exists when C(state=present), the user info is updated
choices: ['present', 'absent']
default: present
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Example vmware_local_user_manager command from Ansible Playbooks
- name: Add local user to ESXi
local_action:
module: vmware_local_user_manager
hostname: esxi_hostname
username: root
password: vmware
local_user_name: foo
'''
RETURN = '''# '''
try:
from pyVmomi import vim, vmodl
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec
class VMwareLocalUserManager(PyVmomi):
def __init__(self, module):
super(VMwareLocalUserManager, self).__init__(module)
self.local_user_name = self.module.params['local_user_name']
self.local_user_password = self.module.params['local_user_password']
self.local_user_description = self.module.params['local_user_description']
self.state = self.module.params['state']
if self.is_vcenter():
self.module.fail_json(msg="Failed to get local account manager settings "
"from ESXi server: %s" % self.module.params['hostname'],
details="It seems that %s is a vCenter server instead of an "
"ESXi server" % self.module.params['hostname'])
def process_state(self):
try:
local_account_manager_states = {
'absent': {
'present': self.state_remove_user,
'absent': self.state_exit_unchanged,
},
'present': {
'present': self.state_update_user,
'absent': self.state_create_user,
}
}
local_account_manager_states[self.state][self.check_local_user_manager_state()]()
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
except Exception as e:
self.module.fail_json(msg=str(e))
def check_local_user_manager_state(self):
user_account = self.find_user_account()
if not user_account:
return 'absent'
else:
return 'present'
def find_user_account(self):
searchStr = self.local_user_name
exactMatch = True
findUsers = True
findGroups = False
user_account = self.content.userDirectory.RetrieveUserGroups(None, searchStr, None, None, exactMatch, findUsers, findGroups)
return user_account
def create_account_spec(self):
account_spec = vim.host.LocalAccountManager.AccountSpecification()
account_spec.id = self.local_user_name
account_spec.password = self.local_user_password
account_spec.description = self.local_user_description
return account_spec
def state_create_user(self):
account_spec = self.create_account_spec()
try:
self.content.accountManager.CreateUser(account_spec)
self.module.exit_json(changed=True)
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
def state_update_user(self):
account_spec = self.create_account_spec()
try:
self.content.accountManager.UpdateUser(account_spec)<|fim▁hole|> self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
def state_remove_user(self):
try:
self.content.accountManager.RemoveUser(self.local_user_name)
self.module.exit_json(changed=True)
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
def state_exit_unchanged(self):
self.module.exit_json(changed=False)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(local_user_name=dict(required=True, type='str'),
local_user_password=dict(type='str', no_log=True),
local_user_description=dict(type='str'),
state=dict(default='present', choices=['present', 'absent'], type='str')))
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=False)
vmware_local_user_manager = VMwareLocalUserManager(module)
vmware_local_user_manager.process_state()
if __name__ == '__main__':
main()<|fim▁end|> | self.module.exit_json(changed=True)
except vmodl.RuntimeFault as runtime_fault: |
<|file_name|>clob_string.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------
# clob_string.py (Section 7.2)
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# Copyright (c) 2017, 2018, Oracle and/or its affiliates. All rights reserved.
#------------------------------------------------------------------------------
from __future__ import print_function
import cx_Oracle
import db_config
con = cx_Oracle.connect(db_config.user, db_config.pw, db_config.dsn)
cur = con.cursor()
print("Inserting data...")
cur.execute("truncate table testclobs")
longString = ""
for i in range(5):
char = chr(ord('A') + i)<|fim▁hole|> longString += char * 250
cur.execute("insert into testclobs values (:1, :2)",
(i + 1, "String data " + longString + ' End of string'))
con.commit()
def OutputTypeHandler(cursor, name, defaultType, size, precision, scale):
if defaultType == cx_Oracle.CLOB:
return cursor.var(cx_Oracle.LONG_STRING, arraysize = cursor.arraysize)
con.outputtypehandler = OutputTypeHandler
print("Querying data...")
cur.prepare("select * from testclobs where id = :id")
cur.execute(None, {'id': 1})
(id, clobdata) = cur.fetchone()
print("CLOB length:", len(clobdata))
print("CLOB data:", clobdata)<|fim▁end|> | |
<|file_name|>testRaise.py<|end_file_name|><|fim▁begin|>class fooexception(Exception):
def __init__(self, msg):
Exception.__init__(self)
print msg
def __init__(self):
Exception.__init__(self)
print "i am a fooexception"
data = 2
raise "foo"
raise "foo", data
# before
raise fooexception # on-line
# after
# before
raise fooexception, "bla" # on-line
# after
raise fooexception, [1, 2, 3]
raise fooexception, range(3)
raise fooexception, (1, 2, 3)
raise fooexception, (1, 2, 3), 1
# after
raise fooexception, (1, 2, 3), "foo" # on-line
# after
raise fooexception, (1, 2, 3), (1, 2, 3)
raise fooexception, (1, 2, 3), [1, 2, 3]
# after
raise fooexception, (1, 2, 3), range(1) # on-line
# after
raise fooexception, (1, 2, 3), (1 + 1)<|fim▁hole|><|fim▁end|> | raise
raise fooexception, (1, 2, 3), 1 + 1
raise |
<|file_name|>0013_auto__add_field_userprofile_receive_notification.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'UserProfile.receive_notification'
db.add_column('truekko_userprofile', 'receive_notification',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'UserProfile.receive_notification'
db.delete_column('truekko_userprofile', 'receive_notification')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'truekko.channel': {
'Meta': {'object_name': 'Channel'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '800', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'}),
'wall': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'channels'", 'null': 'True', 'to': "orm['truekko.Wall']"})
},
'truekko.commitment': {
'Meta': {'ordering': "['-date']", 'object_name': 'Commitment'},
'comment': ('django.db.models.fields.TextField', [], {}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'WAI'", 'max_length': '3'}),
'swap': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commitments'", 'to': "orm['truekko.Swap']"}),
'user_from': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'my_commitments'", 'to': "orm['auth.User']"}),
'user_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commitments_to_me'", 'to': "orm['auth.User']"})
},
'truekko.denounce': {
'Meta': {'ordering': "['-date']", 'object_name': 'Denounce'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['truekko.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'msg': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'default': "'PEN'", 'max_length': '3'}),
'user_from': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dennounces_from'", 'to': "orm['auth.User']"}),
'user_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dennounces_to'", 'to': "orm['auth.User']"})
},
'truekko.follow': {
'Meta': {'object_name': 'Follow'},
'follower': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followings'", 'to': "orm['auth.User']"}),
'following': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'truekko.group': {
'Meta': {'object_name': 'Group'},
'channel': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['truekko.Channel']"}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '800', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'web': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'truekko.item': {
'Meta': {'ordering': "['-pub_date']", 'object_name': 'Item'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'offer_or_demand': ('django.db.models.fields.CharField', [], {'default': "'OFF'", 'max_length': '3'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.IntegerField', [], {}),
'price_type': ('django.db.models.fields.CharField', [], {'default': "'ETK'", 'max_length': '20'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'IT'", 'max_length': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['auth.User']"})
},
'truekko.itemtagged': {
'Meta': {'object_name': 'ItemTagged'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['truekko.Item']"}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['truekko.Tag']"})
},
'truekko.membership': {
'Meta': {'object_name': 'Membership'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['truekko.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),<|fim▁hole|> },
'truekko.swap': {
'Meta': {'ordering': "['-date']", 'object_name': 'Swap'},
'credits_from': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'credits_to': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'done_msg': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'swap_mode': ('django.db.models.fields.CharField', [], {'default': "'NON'", 'max_length': '3'}),
'user_from': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'swaps_from'", 'to': "orm['auth.User']"}),
'user_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'swaps_to'", 'to': "orm['auth.User']"})
},
'truekko.swapcomment': {
'Meta': {'object_name': 'SwapComment'},
'comment': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'swap': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['truekko.Swap']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'truekko.swapitems': {
'Meta': {'object_name': 'SwapItems'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['truekko.Item']"}),
'swap': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['truekko.Swap']"})
},
'truekko.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'truekko.transfer': {
'Meta': {'object_name': 'Transfer'},
'concept': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'credits': ('django.db.models.fields.PositiveIntegerField', [], {}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'group_from': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'transfer_form'", 'null': 'True', 'to': "orm['truekko.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user_from': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'transfer_from'", 'null': 'True', 'to': "orm['auth.User']"}),
'user_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'transfer_to'", 'to': "orm['auth.User']"})
},
'truekko.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'credits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '300', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'Unlocated'", 'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Unnamed'", 'max_length': '100'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'rating_score': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'rating_votes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'blank': 'True'}),
'receive_notification': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'web': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'truekko.wall': {
'Meta': {'object_name': 'Wall'},
'description': ('django.db.models.fields.TextField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'walls'", 'null': 'True', 'to': "orm['truekko.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'walls'", 'null': 'True', 'to': "orm['auth.User']"})
},
'truekko.wallmessage': {
'Meta': {'ordering': "['-date']", 'object_name': 'WallMessage'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'msg': ('django.db.models.fields.TextField', [], {}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'childs'", 'null': 'True', 'to': "orm['truekko.WallMessage']"}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'to': "orm['auth.User']"}),
'wall': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'to': "orm['truekko.Wall']"})
}
}
complete_apps = ['truekko']<|fim▁end|> | 'role': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) |
<|file_name|>effectset.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
################################################################################
# Copyright 2014, Distributed Meta-Analysis System
################################################################################
"""Software structure for generating Monte-Carlo collections of results.
NOTE: Highest resolution regions are implicitly assumed to be
FIPS-coded counties, but the logic does not require them to be. FIPS
language should be replaced with generic ID references.
A key structure is the make_generator(fips, times, values) function.
make_generator is passed to the functions that iterate through
different weather forecasts, such as make_tar_ncdf. It is then called
with each location and daily weather data. fips is a single county
code, times is a list of yyyyddd formated date values, values is a
list of weather values.
The output of make_generator() is a generator, producing tuples (year,
effect), for whichever years an effect can be computed.
Output file structure:
Each bundle of output impact results of a given type and for a given
weather forecast are in a gzipped tar file containing a single
directory <name>, containing a separate csv file (an effect file) for each
region. The format of the csv file is:
year,<label>[,<other labels>]*
<year>,<impact>[,<prior calculated impact>]*
Basic processing logic:
Some functions, like find_ncdfs_allreal, discover collections of
forecasted variables (within the WDS directory structure), and provide
through enumerators. Variable collections are dictionaries {variable:
REFERENCE}, where REFERENCE may be a filename, a netCDF, or a
dictionary of {original: netCDF object, data: [days x counties],
times: [yyyyddd]}. [VRD]
Controllers (elsewhere) loop through these, and for each available
forecast call a make_tar_* function passing in a make_generator
function. The make_tar_* functions call make_generator with each
individual region, retrieving a set of results, and then package those
results into the output file format.
Temporary directories (characterized by random letters) are used to
hold the results as they're being generated (before being bundled into
tars).
"""
__copyright__ = "Copyright 2014, Distributed Meta-Analysis System"
__author__ = "James Rising"
__credits__ = ["James Rising"]
__maintainer__ = "James Rising"
__email__ = "jar2234@columbia.edu"
__status__ = "Production"
__version__ = "$Revision$"
# $Source$
import tarfile, os, csv, re, random, string
import numpy as np
try:
# this is required for nc4's, but we can wait to fail
from netCDF4 import Dataset
except:
pass
FIPS_COMPLETE = '__complete__' # special FIPS code for the last county
LATEX_STRING = '__latexstr__' # special FIPS code for making a LaTeX representation
### Effect Bundle Generation
## Temporary directory management
def enter_local_tempdir(prefix=''):
"""Create and set the working directory as a new temporary directory.
Returns the name of the temporary directory (to be passed to
exit_local_tempdir).<|fim▁hole|> os.mkdir(prefix + suffix)
os.chdir(prefix + suffix)
return prefix + suffix
def exit_local_tempdir(tempdir, killit=True):
"""Return to the root output directory (and optionally delete the
temporary directory).
tempdir is the output of enter_local_tempdir.
"""
os.chdir("..")
if killit:
kill_local_tempdir(tempdir)
def kill_local_tempdir(tempdir):
"""Remove all contents of a temporary directory.
Call after exit_local_tempdir is called, only if killit=False.
"""
os.system("rm -r " + tempdir)
## General helper functions for creation
def send_fips_complete(make_generator):
"""Call after the last county of a loop of counties, to clean up any memory.
"""
print "Complete the FIPS"
try:
iterator = make_generator(FIPS_COMPLETE, None, None).next()
print "Success"
except StopIteration, e:
pass
except Exception, e:
print e
pass
def get_target_path(targetdir, name):
"""Helper function to use the targetdir directory if its provided.
"""
if targetdir is not None:
return os.path.join(targetdir, name)
else:
return name
def write_effect_file(path, fips, generator, collabel):
"""Write the effects for a single FIPS-coded county.
path: relative path for file
fips: the unique id of the region
generator: a enumerator of tuples/lists with individual rows
collabel: label for one (string) or more (list) columns after the
year column
"""
# Create the CSV file
with open(os.path.join(path, fips + '.csv'), 'wb') as csvfp:
writer = csv.writer(csvfp, quoting=csv.QUOTE_MINIMAL)
# Write the header row
if not isinstance(collabel, list):
writer.writerow(["year", collabel])
else:
writer.writerow(["year"] + collabel)
# Write all data rows
for values in generator:
writer.writerow(values)
## Top-level bundle creation functions
def make_tar_dummy(name, acradir, make_generator, targetdir=None, collabel="fraction"):
"""Constructs a tar of files for each county, using NO DATA.
Calls make_generator for each county, using a filename of
counties.
name: the name of the effect bundle.
acradir: path to the DMAS acra directory.
make_generator(fips, times, daily): returns an iterator of (year, effect).
targetdir: path to a final destination for the bundle
collabel: the label for the effect column
"""
tempdir = enter_local_tempdir()
os.mkdir(name) # directory for county files
# Generate a effect file for each county in regionsA
with open(os.path.join(acradir, 'regions/regionsANSI.csv')) as countyfp:
reader = csv.reader(countyfp)
reader.next() # ignore header
# Each row is a county
for row in reader:
fips = canonical_fips(row[0])
print fips
# Call generator (with no data)
generator = make_generator(fips, None, None)
if generator is None:
continue
# Construct the effect file
write_effect_file(name, fips, generator, collabel)
send_fips_complete(make_generator)
# Generate the bundle tar
target = get_target_path(targetdir, name)
os.system("tar -czf " + os.path.join("..", target) + ".tar.gz " + name)
# Remove the working directory
exit_local_tempdir(tempdir)
def make_tar_duplicate(name, filepath, make_generator, targetdir=None, collabel="fraction"):
"""Constructs a tar of files for each county that is described in
an existing bundle. Passes NO DATA to make_generator.
name: the name of the effect bundle.
filepath: path to an existing effect bundle
make_generator(fips, times, daily): returns an iterator of (year, effect).
targetdir: path to a final destination for the bundle
collabel: the label for the effect column
"""
tempdir = enter_local_tempdir()
os.mkdir(name)
# Iterate through all FIPS-titled files in the effect bundle
with tarfile.open(filepath) as tar:
for item in tar.getnames()[1:]:
fips = item.split('/')[1][0:-4]
print fips
# Call make_generator with no data
generator = make_generator(fips, None, None)
if generator is None:
continue
# Construct the effect file
write_effect_file(name, fips, generator, collabel)
send_fips_complete(make_generator)
# Generate the bundle tar
target = get_target_path(targetdir, name)
os.system("tar -czf " + os.path.join("..", target) + ".tar.gz " + name)
# Remove the working directory
exit_local_tempdir(tempdir)
def make_tar_ncdf(name, weather_ncdf, var, make_generator, targetdir=None, collabel="fraction"):
"""Constructs a tar of files for each county, describing yearly results.
name: the name of the effect bundle.
weather_ncdf: str for one, or {variable: filename} for calling
generator with {variable: data}.
var: str for one, or [str] for calling generator with {variable: data}
make_generator(fips, times, daily): returns an iterator of (year, effect).
targetdir: path to a final destination for the bundle, or a
function to take the data
collabel: the label for the effect column
"""
# If this is a function, we just start iterating
if hasattr(targetdir, '__call__'):
call_with_generator(name, weather_ncdf, var, make_generator, targetdir)
return
# Create the working directory
tempdir = enter_local_tempdir()
os.mkdir(name)
# Helper function for calling write_effect_file with collabel
def write_csv(name, fips, generator):
write_effect_file(name, fips, generator, collabel)
# Iterate through the data
call_with_generator(name, weather_ncdf, var, make_generator, write_csv)
# Create the effect bundle
target = get_target_path(targetdir, name)
os.system("tar -czf " + os.path.join("..", target) + ".tar.gz " + name)
# Remove the working directory
exit_local_tempdir(tempdir)
def yield_given(name, yyyyddd, weather, make_generator):
"""Yields (as an iterator) rows of the result of applying make_generator to the given weather.
name: the name of the effect bundle.
yyyyddd: YYYYDDD formated date values.
weather: a dictionary to call generator with {variable: data}.
make_generator(fips, times, daily): returns an iterator of (year, effect).
"""
generator = make_generator(0, yyyyddd, weather)
if generator is None:
return
# Call targetfunc with the result
for values in generator:
yield values
# Signal the end of the counties
send_fips_complete(make_generator)
def call_with_generator(name, weather_ncdf, var, make_generator, targetfunc):
"""Helper function for calling make_generator with each variable
set. In cases with multiple weather datasets, assumes all use the
same clock (sequence of times) and geography (sequence of
counties).
name: the name of the effect bundle.
weather_ncdf: str for one, or {variable: filename} for calling
generator with {variable: data}.
var: str for one, or [str] for calling generator with {variable: data}
make_generator(fips, times, daily): returns an iterator of (year, effect).
targetfunc: function(name, fips, generator) to handle results
"""
if isinstance(weather_ncdf, dict) and isinstance(var, list):
# In this case, we generate a dictionary of variables
weather = {}
times = None # All input assumed to have same clock
# Filter by the variables in var
for variable in var:
# Retrieve the netcdf object (rootgrp) and add to weather dict
if isinstance(weather_ncdf[variable], str):
# Open this up as a netCDF and read data into array
rootgrp = Dataset(weather_ncdf[variable], 'r+', format='NETCDF4')
weather[variable] = rootgrp.variables[variable][:,:]
elif isinstance(weather_ncdf[variable], dict):
# This is an {original, data, times} dictionary
rootgrp = weather_ncdf[variable]['original']
weather[variable] = weather_ncdf[variable]['data']
if 'times' in weather_ncdf[variable]:
times = weather_ncdf[variable]['times']
else:
# This is already a netcdf object
rootgrp = weather_ncdf[variable]
weather[variable] = rootgrp.variables[variable][:,:]
# Collect additional information from netcdf object
counties = rootgrp.variables['fips']
lats = rootgrp.variables['lat']
lons = rootgrp.variables['lon']
if times is None:
times = rootgrp.variables['time']
else:
# We just want a single variable (not a dictionary of them)
# Retrieve the netcdf object (rootgrp) and add to weather dict
if isinstance(weather_ncdf, str):
# Open this up as a netCDF and read into array
rootgrp = Dataset(weather_ncdf, 'r+', format='NETCDF4')
weather = rootgrp.variables[var][:,:]
elif isinstance(weather_ncdf, dict):
# This is an {original, data, times} dictionary
rootgrp = weather_ncdf['original']
weather = weather_ncdf['data']
else:
# This is already a netcdf object
rootgrp = weather_ncdf
weather = rootgrp.variables[var][:,:]
# Collect additional information from netcdf object
counties = rootgrp.variables['fips']
lats = rootgrp.variables['lat']
lons = rootgrp.variables['lon']
times = rootgrp.variables['time']
# Loop through counties, calling make_generator with each
for ii in range(len(counties)):
fips = canonical_fips(counties[ii])
print fips
# Extract the weather just for this county
if not isinstance(weather, dict):
daily = weather[:,ii]
else:
daily = {}
for variable in weather:
daily[variable] = weather[variable][:,ii]
# Call make_generator for this county
generator = make_generator(fips, times, daily, lat=lats[ii], lon=lons[ii])
if generator is None:
continue
# Call targetfunc with the result
targetfunc(name, fips, generator)
# Signal the end of the counties
send_fips_complete(make_generator)
def make_tar_ncdf_profile(weather_ncdf, var, make_generator):
"""Like make_tar_ncdf, except that just goes through the motions,
and only for 100 counties
weather_ncdf: str for one, or {variable: filename} for calling
generator with {variable: data}.
var: str for one, or [str] for calling generator with {variable: data}
"""
# Open a single netCDF if only one filename passed in
if isinstance(weather_ncdf, str):
# Collect the necessary info
rootgrp = Dataset(weather_ncdf, 'r+', format='NETCDF4')
counties = rootgrp.variables['fips']
lats = rootgrp.variables['lat']
lons = rootgrp.variables['lon']
times = rootgrp.variables['time']
weather = rootgrp.variables[var][:,:]
else:
# Open all netCDF referenced in var
weather = {} # Construct a dictionary of [yyyyddd x county] arrays
for variable in var:
rootgrp = Dataset(weather_ncdf[variable], 'r+', format='NETCDF4')
counties = rootgrp.variables['fips']
lats = rootgrp.variables['lat']
lons = rootgrp.variables['lon']
times = rootgrp.variables['time']
weather[variable] = rootgrp.variables[variable][:,:]
# Just do 100 counties
for ii in range(100):
# Always using 5 digit fips
fips = canonical_fips(counties[ii])
print fips
# Construct the input array for this county
if not isinstance(weather, dict):
daily = weather[:,ii]
else:
daily = {}
for variable in weather:
daily[variable] = weather[variable][:,ii]
# Generate the generator
generator = make_generator(fips, times, daily, lat=lats[ii], lon=lons[ii])
if generator is None:
continue
# Just print out the results
print "year", "fraction"
for (year, effect) in generator:
print year, effect
### Effect calculation functions
## make_generator functions
def load_tar_make_generator(targetdir, name, column=None):
"""Load existing data for additional calculations.
targetdir: relative path to a directory of effect bundles.
name: the effect name (so the effect bundle is at <targetdir>/<name>.tar.gz
"""
# Extract the existing tar into a loader tempdir
tempdir = enter_local_tempdir('loader-')
os.system("tar -xzf " + os.path.join("..", targetdir, name + ".tar.gz"))
exit_local_tempdir(tempdir, killit=False)
def generate(fips, yyyyddd, temps, *args, **kw):
# When all of the counties are done, kill the local dir
if fips == FIPS_COMPLETE:
print "Remove", tempdir
# We might be in another tempdir-- check
if os.path.exists(tempdir):
kill_local_tempdir(tempdir)
else:
kill_local_tempdir(os.path.join('..', tempdir))
return
# Open up the effect for this bundle
fipspath = os.path.join(tempdir, name, fips + ".csv")
if not os.path.exists(fipspath):
fipspath = os.path.join('..', fipspath)
if not os.path.exists(fipspath):
# If we can't find this, just return a single year with 0 effect
print fipspath + " doesn't exist"
yield (yyyyddd[0] / 1000, 0)
raise StopIteration()
with open(fipspath) as fp:
reader = csv.reader(fp)
reader.next() # ignore header
# yield the same values that generated this effect file
for row in reader:
if column is None:
yield [int(row[0])] + map(float, row[1:])
else:
yield (int(row[0]), float(row[column]))
return generate
### Aggregation from counties to larger regions
def aggregate_tar(name, scale_dict=None, targetdir=None, collabel="fraction", get_region=None, report_all=False):
"""Aggregates results from counties to larger regions.
name: the name of an impact, already constructed into an effect bundle
scale_dict: a dictionary of weights, per county
targetdir: directory holding both county bundle and to hold region bundle
collabel: Label for result column(s)
get_region: either None (uses first two digits of FIPS-- aggregates to state),
True (combine all counties-- aggregate to national),
or a function(fips) => code which aggregates each set of counties producing the same name
report_all: if true, include a whole sequence of results; otherwise, just take first one
"""
# Get a region name and a get_region function
region_name = 'region' # final bundle will use this as a suffix
if get_region is None: # aggregate to state
get_region = lambda fips: fips[0:2]
region_name = 'state'
elif get_region is True: # aggregate to nation
get_region = lambda fips: 'national'
region_name = 'national'
else:
# get a title, if get_region returns one for dummy-fips "_title_"
try:
title = get_region('_title_')
if title is not None:
region_name = title
except:
pass
regions = {} # {region code: {year: (numer, denom)}}
# This is the effect bundle to aggregate
target = get_target_path(targetdir, name)
# Generate a temporary directory to extract county results
tempdir = enter_local_tempdir()
# Extract all of the results
os.system("tar -xzf " + os.path.join("..", target) + ".tar.gz")
# Go through all counties
for filename in os.listdir(name):
# If this is a county file
match = re.match(r'(\d{5})\.csv', filename)
if match:
code = match.groups(1)[0] # get the FIPS code
# Check that it's in the scale_dict
if scale_dict is not None and code not in scale_dict:
continue
# Check which region it is in
region = get_region(code)
if region is None:
continue
# Prepare the dictionary of results for this region, if necessary
if region not in regions:
regions[region] = {} # year => (numer, denom)
# Get out the current dictioanry of years
years = regions[region]
# Go through every year in this effect file
with open(os.path.join(name, filename)) as csvfp:
reader = csv.reader(csvfp, delimiter=',')
reader.next()
if report_all: # Report entire sequence of results
for row in reader:
# Get the numerator and denominator for this weighted sum
if row[0] not in years:
numer, denom = (np.array([0] * (len(row)-1)), 0)
else:
numer, denom = years[row[0]]
# Add on one more value to the weighted sum
try:
numer = numer + np.array(map(float, row[1:])) * (scale_dict[code] if scale_dict is not None else 1)
denom = denom + (scale_dict[code] if scale_dict is not None else 1)
except Exception, e:
print e
# Put the weighted sum calculation back in for this year
years[row[0]] = (numer, denom)
else: # Just report the first result
for row in reader:
# Get the numerator and denominator for this weighted sum
if row[0] not in years:
numer, denom = (0, 0)
else:
numer, denom = years[row[0]]
# Add on one more value to the weighted sum
numer = numer + float(row[1]) * (scale_dict[code] if scale_dict is not None else 1)
denom = denom + (scale_dict[code] if scale_dict is not None else 1)
# Put the weighted sum calculation back in for this year
years[row[0]] = (numer, denom)
# Remove all county results from extracted tar
os.system("rm -r " + name)
# Start producing directory of region results
dirregion = name + '-' + region_name
if not os.path.exists(dirregion):
os.mkdir(dirregion)
# For each region that got a result
for region in regions:
# Create a new CSV effect file
with open(os.path.join(dirregion, region + '.csv'), 'wb') as csvfp:
writer = csv.writer(csvfp, quoting=csv.QUOTE_MINIMAL)
# Include a header row
if not isinstance(collabel, list):
writer.writerow(["year", collabel])
else:
writer.writerow(["year"] + collabel)
# Construct a sorted list of years from the keys of this region's dictionary
years = map(str, sorted(map(int, regions[region].keys())))
# For each year, output the weighted average
for year in years:
if regions[region][year][1] == 0: # the denom is 0-- never got a value
writer.writerow([year, 'NA'])
else:
# Write out the year's result
if report_all:
writer.writerow([year] + list(regions[region][year][0] / float(regions[region][year][1])))
else:
writer.writerow([year, float(regions[region][year][0]) / regions[region][year][1]])
# Construct the effect bundle
target = get_target_path(targetdir, dirregion)
os.system("tar -czf " + os.path.join("..", target) + ".tar.gz " + dirregion)
# Clean up temporary directory
exit_local_tempdir(tempdir)<|fim▁end|> | """
suffix = ''.join(random.choice(string.lowercase) for i in range(6))
|
<|file_name|>ProceduralShape.cpp<|end_file_name|><|fim▁begin|>/* This file is part of TSRE5.
*
* TSRE5 - train sim game engine and MSTS/OR Editors.
* Copyright (C) 2016 Piotr Gadecki <pgadecki@gmail.com>
*
* Licensed under GNU General Public License 3.0 or later.
*
* See LICENSE.md or https://www.gnu.org/licenses/gpl.html
*/
#include "ProceduralShape.h"
#include "ObjFile.h"
#include "Game.h"
#include "GLMatrix.h"
#include "TrackShape.h"
#include "Route.h"
#include "TSectionDAT.h"
#include <QDateTime>
#include <QFile>
#include <math.h>
#include "Intersections.h"
#include "ComplexLine.h"
#include "ShapeTemplates.h"
QHash<QString, QVector<OglObj*>> ProceduralShape::Shapes;
ShapeTemplates *ProceduralShape::ShapeTemplateFile = NULL;
GlobalDefinitions *ProceduralShape::GlobalDefinitionFile = NULL;
bool ProceduralShape::Loaded = false;
QMap<QString, ObjFile*> ProceduralShape::Files;
float ProceduralShape::Alpha = 0;
unsigned int ProceduralShape::ShapeCount = 0;
ObjFile* ProceduralShape::GetObjFile(QString name) {
QString pathRoute = Game::root + "/routes/" + Game::route + "/procedural/" + name;
pathRoute.replace("//", "/");
QString pathApp = QString("tsre_appdata/") + Game::AppDataVersion + "/procedural/" + name;
pathApp.replace("//", "/");
QString path = pathApp;
QFile file(pathRoute);
if(file.exists())
path = pathRoute;
if (Files[path] == NULL)
Files[path] = new ObjFile(path);
return Files[path];
}
QString ProceduralShape::GetTexturePath(QString textureName){
QString pathRoute = Game::root + "/routes/" + Game::route + "/procedural/" + textureName;
pathRoute.replace("//", "/");
QString pathApp = QString("tsre_appdata/") + Game::AppDataVersion + "/procedural/" + textureName;
pathApp.replace("//", "/");
QString path = pathApp;
QFile file(pathRoute);
if(file.exists())
path = pathRoute;
return path;
}
void ProceduralShape::Load() {
if(Loaded)
return;
// Load Templates
ShapeTemplateFile = new ShapeTemplates();
Alpha = -0.3;
Loaded = true;
}
QString ProceduralShape::GetShapeHash(QString templateName, TrackShape* tsh, QMap<int, float> &angles, int shapeOffset){
QString angless;
QMapIterator<int, float> i(angles);
while (i.hasNext()) {
i.next();
angless += QString::number(i.key(), 16) + QString::number((int)(i.value()*100), 16) + "_";
}
return tsh->getHashString() + QString::number(shapeOffset, 16) + angless + templateName;
//return QString::number(QTime::currentTime().msecsSinceStartOfDay());
}
QString ProceduralShape::GetShapeHash(QString templateName, QVector<TSection> §ions, int shapeOffset){
QString sectionHash;
for(int i = 0; i < sections.size(); i++)
sectionHash += QString::number(sections[i].getHash(), 16);
return sectionHash + QString::number(shapeOffset, 16) + templateName;
//return QString::number(QTime::currentTime().msecsSinceStartOfDay());
}
QString ProceduralShape::GetShapeHash(QString templateName, ComplexLine &line, int shapeOffset){
QString lineHash = line.getHash();
return lineHash + QString::number(shapeOffset, 16) + templateName;
//return QString::number(QTime::currentTime().msecsSinceStartOfDay());
}
void ProceduralShape::GetShape(QString templateName, QVector<OglObj*>& shape, TrackShape* tsh, QMap<int, float> &angles) {
QString hash = ProceduralShape::GetShapeHash(templateName, tsh, angles, 0);
if(ProceduralShape::Shapes[hash].size() == 0){
qDebug() << "New Procedural Shape: "<< ShapeCount++ << hash;
ProceduralShape::GenShape(templateName, ProceduralShape::Shapes[hash], tsh, angles);
}
shape.append(ProceduralShape::Shapes[hash]);
}
void ProceduralShape::GenShape(QString templateName, QVector<OglObj*>& shape, TrackShape* tsh, QMap<int, float> &angles) {
if (!Loaded)
Load();
if (tsh == NULL)
return;
/*if(tsh->numpaths == 2 && tsh->xoverpts > 0){
return GenTrackShape(shape, tsh, angles);
}
if(tsh->numpaths == 2 && tsh->mainroute > -1){
return GenTrackShape(shape, tsh, angles);
}*/
if(templateName == "" || templateName == "DEFAULT")
templateName = "DefaultTrack";
if(ShapeTemplateFile->templates[templateName] == NULL)
return;
ShapeTemplate *sTemplate = ShapeTemplateFile->templates[templateName];
ComplexLine *line = new ComplexLine[tsh->numpaths];
for (int j = 0; j < tsh->numpaths; j++) {
TrackShape::SectionIdx *section = &tsh->path[j];
QVector<TSection> sections;
for (int i = 0; i < section->n; i++) {
if (Game::currentRoute->tsection->sekcja[(int) section->sect[i]] != NULL)
sections.push_back(*Game::currentRoute->tsection->sekcja[(int) section->sect[i]]);
}
line[j].init(sections);
}
QHashIterator<QString, ShapeTemplateElement*> i(sTemplate->elements);
while (i.hasNext()) {
i.next();
if(i.value() == NULL)
continue;
if(i.value()->type == ShapeTemplateElement::TIE){
if(tsh->numpaths == 2 && tsh->xoverpts > 0){
GenAdvancedTie(i.value(), shape, tsh, angles);
} else if(tsh->numpaths == 2 && tsh->mainroute > -1){
GenAdvancedTie(i.value(), shape, tsh, angles);
} else {
for (int j = 0; j < tsh->numpaths; j++) {
GenTie(i.value(), shape, line[j], tsh->path[j].pos, -tsh->path[j].rotDeg, angles[j * 2], angles[j * 2 + 1]);
}
}
}
if(i.value()->type == ShapeTemplateElement::RAIL)
for (int j = 0; j < tsh->numpaths; j++)
GenRails(i.value(), shape, line[j], tsh->path[j].pos, -tsh->path[j].rotDeg, angles[j * 2], angles[j * 2 + 1]);
if(i.value()->type == ShapeTemplateElement::BALLAST)
for (int j = 0; j < tsh->numpaths; j++)
GenBallast(i.value(), shape, line[j], tsh->path[j].pos, -tsh->path[j].rotDeg, angles[j * 2], angles[j * 2 + 1]);
}
delete[] line;
return;
}
void ProceduralShape::GetShape(QString templateName, QVector<OglObj*>& shape, QVector<TSection> §ions, int shapeOffset) {
QString hash = ProceduralShape::GetShapeHash(templateName, sections, shapeOffset);
if(ProceduralShape::Shapes[hash].size() == 0){
qDebug() << "New Procedural Shape: "<< ShapeCount++ << hash;
ProceduralShape::GenShape(templateName, ProceduralShape::Shapes[hash], sections, shapeOffset);
}
shape.append(ProceduralShape::Shapes[hash]);
}
void ProceduralShape::GenShape(QString templateName, QVector<OglObj*>& shape, QVector<TSection> §ions, int shapeOffset) {
if (!Loaded)
Load();
ComplexLine line;
line.init(sections);
ProceduralShape::GenShape(templateName, shape, line, shapeOffset);
}
void ProceduralShape::GetShape(QString templateName, QVector<OglObj*>& shape, ComplexLine& line, int shapeOffset) {
QString hash = ProceduralShape::GetShapeHash(templateName, line, shapeOffset);
if(ProceduralShape::Shapes[hash].size() == 0){
qDebug() << "New Procedural Shape: "<< ShapeCount++ << hash;
ProceduralShape::GenShape(templateName, ProceduralShape::Shapes[hash], line, shapeOffset);
}
shape.append(ProceduralShape::Shapes[hash]);
}
void ProceduralShape::GenShape(QString templateName, QVector<OglObj*>& shape, ComplexLine& line, int shapeOffset){
//unsigned long long int timeNow = QDateTime::currentMSecsSinceEpoch();
Alpha = -0.3;
if(templateName == "" || templateName == "DEFAULT")
templateName = "DefaultTrack";
if(ShapeTemplateFile->templates[templateName] == NULL)
return;
ShapeTemplate *sTemplate = ShapeTemplateFile->templates[templateName];
QHashIterator<QString, ShapeTemplateElement*> i(sTemplate->elements);
while (i.hasNext()) {
i.next();
if(i.value()->type == NULL)
continue;
if(i.value()->type == ShapeTemplateElement::TIE)
GenTie(i.value(), shape, line);
if(i.value()->type == ShapeTemplateElement::RAIL)
GenRails(i.value(), shape, line);
if(i.value()->type == ShapeTemplateElement::BALLAST)
GenBallast(i.value(), shape, line);
if(i.value()->type == ShapeTemplateElement::STRETCH)
GenStretch(i.value(), shape, line, shapeOffset);
if(i.value()->type == ShapeTemplateElement::POINT)
GenPointShape(i.value(), shape, line, shapeOffset);
}
}
void ProceduralShape::GenRails(ShapeTemplateElement *stemplate, QVector<OglObj*> &shape, ComplexLine &line) {
float* p = new float[2000000];
float* ptr = p;
float q[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
float vOffset[3];
ObjFile *tFile;
QString* texturePath;
tFile = GetObjFile(stemplate->shape.first());
<|fim▁hole|> Quat::fromRotationXYZ(q, (float*) (posRot + 3));
Vec3::set(vOffset, stemplate->xOffset, 0.0, 0.0);
Vec3::transformQuat(vOffset, vOffset, q);
Vec3::add(posRot, vOffset, posRot);
Mat4::fromRotationTranslation(matrix1, q, posRot);
line.getDrawPosition(posRot, i + step, stemplate->xOffset);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
Vec3::set(vOffset, stemplate->xOffset, 0.0, 0.0);
Vec3::transformQuat(vOffset, vOffset, q);
Vec3::add(posRot, vOffset, posRot);
Mat4::fromRotationTranslation(matrix2, q, posRot);
PushShapePartExpand(ptr, tFile, stemplate->yOffset, matrix1, matrix2, q, i, i + step);
}
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
delete[] p;
}
void ProceduralShape::GenRails(ShapeTemplateElement *stemplate, QVector<OglObj*>& shape, ComplexLine& line, float* sPos, float sAngle, float angleB, float angleE) {
float matrixS[16];
float* p = new float[4000000];
float* ptr = p;
float q[4];
float qr[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
QString* texturePath;
float pp[3];
float zangle;
float vOffset[3];
Quat::fill(q);
Quat::rotateY(q, q, sAngle * M_PI / 180.0);
Vec3::set(pp, -sPos[0], sPos[1], sPos[2]);
Mat4::fromRotationTranslation(matrixS, q, pp);
tFile = GetObjFile(stemplate->shape.first());
float step = 3;
for (float i = 0; i < line.length; i += step) {
line.getDrawPosition(posRot, i, stemplate->xOffset);
Quat::fill(qr);
Quat::rotateY(qr, qr, sAngle * M_PI / 180.0);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
zangle = angleB*(1.0 - i / line.length) + angleE*(i / line.length);
Quat::rotateZ(q, q, zangle);
Vec3::set(vOffset, stemplate->xOffset, 0.0, 0.0);
Vec3::transformQuat(vOffset, vOffset, q);
Vec3::add(posRot, vOffset, posRot);
Mat4::fromRotationTranslation(matrix1, q, posRot);
Mat4::multiply(matrix1, matrixS, matrix1);
line.getDrawPosition(posRot, i + step, stemplate->xOffset);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
zangle = angleB*(1.0 - (i + step) / line.length) + angleE*((i + step) / line.length);
Quat::rotateZ(q, q, zangle);
Vec3::set(vOffset, stemplate->xOffset, 0.0, 0.0);
Vec3::transformQuat(vOffset, vOffset, q);
Vec3::add(posRot, vOffset, posRot);
Mat4::fromRotationTranslation(matrix2, q, posRot);
Mat4::multiply(matrix2, matrixS, matrix2);
Quat::multiply(qr, qr, q);
PushShapePartExpand(ptr, tFile, stemplate->yOffset, matrix1, matrix2, qr, i, i + step);
}
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
delete[] p;
}
void ProceduralShape::GenPointShape(ShapeTemplateElement *stemplate, QVector<OglObj*> &shape, ComplexLine &line, int shapeOffset) {
float* p = new float[2000000];
float* ptr = p;
float q[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
QString* texturePath;
shapeOffset = shapeOffset % stemplate->shape.size();
tFile = GetObjFile(stemplate->shape[shapeOffset]);
line.getDrawPosition(posRot, 0);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
Mat4::fromRotationTranslation(matrix1, q, posRot);
//PushShapePart(ptr, tFile, 0.0, matrix1, q, line.length);
PushShapePart(ptr, tFile, 0.0, matrix1, q);
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
ptr = p;
delete[] p;
}
void ProceduralShape::GenStretch(ShapeTemplateElement *stemplate, QVector<OglObj*> &shape, ComplexLine &line, int shapeOffset) {
float* p = new float[2000000];
float* ptr = p;
float q[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
QString* texturePath;
shapeOffset = shapeOffset % stemplate->shape.size();
tFile = GetObjFile(stemplate->shape[shapeOffset]);
line.getDrawPosition(posRot, 0);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
Mat4::fromRotationTranslation(matrix1, q, posRot);
PushShapePartStretch(ptr, tFile, 0.0, matrix1, q, line.length);
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
ptr = p;
delete[] p;
}
void ProceduralShape::GenBallast(ShapeTemplateElement *stemplate, QVector<OglObj*> &shape, ComplexLine &line) {
float* p = new float[2000000];
float* ptr = p;
float q[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
QString* texturePath;
tFile = GetObjFile(stemplate->shape.first());
float step = 4;
for (float i = 0; i < line.length; i += step) {
line.getDrawPosition(posRot, i);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
Mat4::fromRotationTranslation(matrix1, q, posRot);
line.getDrawPosition(posRot, i + step);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
Mat4::fromRotationTranslation(matrix2, q, posRot);
PushShapePartExpand(ptr, tFile, stemplate->yOffset, matrix1, matrix2, q, i, i + step);
}
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
ptr = p;
delete[] p;
}
void ProceduralShape::GenBallast(ShapeTemplateElement *stemplate, QVector<OglObj*>& shape, ComplexLine& line, float* sPos, float sAngle, float angleB, float angleE) {
float matrixS[16];
float* p = new float[4000000];
float* ptr = p;
float q[4];
float qr[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
QString* texturePath;
float pp[3];
float zangle;
Quat::fill(q);
Quat::rotateY(q, q, sAngle * M_PI / 180.0);
Vec3::set(pp, -sPos[0], sPos[1], sPos[2]);
Mat4::fromRotationTranslation(matrixS, q, pp);
tFile = GetObjFile(stemplate->shape.first());
float step = 4;
for (float i = 0; i < line.length; i += step) {
line.getDrawPosition(posRot, i);
Quat::fill(qr);
Quat::rotateY(qr, qr, sAngle * M_PI / 180.0);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
zangle = angleB*(1.0 - i / line.length) + angleE*(i / line.length);
Quat::rotateZ(q, q, zangle);
Mat4::fromRotationTranslation(matrix1, q, posRot);
Mat4::multiply(matrix1, matrixS, matrix1);
line.getDrawPosition(posRot, i + step);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
zangle = angleB*(1.0 - (i + step) / line.length) + angleE*((i + step) / line.length);
Quat::rotateZ(q, q, zangle);
Mat4::fromRotationTranslation(matrix2, q, posRot);
Mat4::multiply(matrix2, matrixS, matrix2);
Quat::multiply(qr, qr, q);
PushShapePartExpand(ptr, tFile, stemplate->yOffset, matrix1, matrix2, qr, i, i + step);
}
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
delete[] p;
}
void ProceduralShape::GenTie(ShapeTemplateElement *stemplate, QVector<OglObj*> &shape, ComplexLine &line) {
float* p = new float[2000000];
float* ptr = p;
float q[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
QString* texturePath;
tFile = GetObjFile(stemplate->shape.first());
for (float i = 0; i < line.length; i += 0.65) {
line.getDrawPosition(posRot, i);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
Mat4::fromRotationTranslation(matrix1, q, posRot);
PushShapePart(ptr, tFile, 0.155, matrix1, q);
}
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
delete[] p;
}
void ProceduralShape::GenTie(ShapeTemplateElement *stemplate, QVector<OglObj*> &shape, ComplexLine &line, float *sPos, float sAngle, float angleB, float angleE) {
float matrixS[16];
float* p = new float[4000000];
float* ptr = p;
float q[4];
float qr[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
QString* texturePath;
float pp[3];
float zangle;
Quat::fill(q);
Quat::rotateY(q, q, sAngle * M_PI / 180.0);
Vec3::set(pp, -sPos[0], sPos[1], sPos[2]);
Mat4::fromRotationTranslation(matrixS, q, pp);
tFile = GetObjFile(stemplate->shape.first());
for (float i = 0; i < line.length; i += 0.65) {
line.getDrawPosition(posRot, i);
Quat::fill(qr);
Quat::rotateY(qr, qr, sAngle * M_PI / 180.0);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
zangle = angleB * (1.0 - i / line.length) + angleE * (i / line.length);
Quat::rotateZ(q, q, zangle);
Mat4::fromRotationTranslation(matrix1, q, posRot);
Quat::multiply(qr, qr, q);
Mat4::multiply(matrix1, matrixS, matrix1);
PushShapePart(ptr, tFile, 0.155, matrix1, qr);
}
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
delete[] p;
}
void ProceduralShape::GenAdvancedTie(ShapeTemplateElement *stemplate, QVector<OglObj*>& shape, TrackShape* tsh, QMap<int, float>& angles) {
float matrixS[16];
float matrixS1[16];
float matrixS2[16];
QVector<QVector < ShapePrimitive>> primitives;
if (tsh->numpaths == 2) {
//for(int j = 0; j < tsh->numpaths; j++){
TrackShape::SectionIdx *section = &tsh->path[0];
QVector<TSection> sections1;
QVector<TSection> sections2;
for (int i = 0; i < section->n; i++) {
if (Game::currentRoute->tsection->sekcja[(int) section->sect[i]] != NULL)
sections1.push_back(*Game::currentRoute->tsection->sekcja[(int) section->sect[i]]);
}
section = &tsh->path[1];
for (int i = 0; i < section->n; i++) {
if (Game::currentRoute->tsection->sekcja[(int) section->sect[i]] != NULL)
sections2.push_back(*Game::currentRoute->tsection->sekcja[(int) section->sect[i]]);
}
//float* p = new float[4000000];
//float* ptr = p;
float q[4];
float q1[4];
float q2[4];
float qr[4];
float posRot[6];
float posRot1[6];
float posRot2[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
ComplexLine line1;
ComplexLine line2;
line1.init(sections1);
line2.init(sections2);
//qDebug() << line.length << "length";
float pp[3];
float zangle;
Quat::fill(q1);
Quat::rotateY(q1, q1, -tsh->path[0].rotDeg * M_PI / 180.0);
Vec3::set(pp, -tsh->path[0].pos[0], tsh->path[0].pos[1], tsh->path[0].pos[2]);
Mat4::fromRotationTranslation(matrixS1, q1, pp);
Quat::fill(q2);
Quat::rotateY(q2, q2, -tsh->path[1].rotDeg * M_PI / 180.0);
Vec3::set(pp, -tsh->path[1].pos[0], tsh->path[1].pos[1], tsh->path[1].pos[2]);
Mat4::fromRotationTranslation(matrixS2, q2, pp);
//Quat::multiply(q, q1, q2);
//Quat
bool junct = false;
if (tsh->mainroute > -1)
junct = true;
primitives.push_back(QVector<ShapePrimitive>());
tFile = GetObjFile(stemplate->shape.first());
float length = line1.length;
ComplexLine *line3 = &line2;
int pathidx = 1;
Mat4::copy(matrixS, matrixS2);
if (line2.length < line1.length) {
length = line2.length;
line3 = &line1;
pathidx = 0;
Mat4::copy(matrixS, matrixS1);
}
float i = 0;
for (i = 0; i < length; i += 0.65) {
line1.getDrawPosition(posRot1, i);
line2.getDrawPosition(posRot2, i);
Vec3::transformMat4(posRot1, posRot1, matrixS1);
Vec3::transformMat4(posRot2, posRot2, matrixS2);
float distance = Vec3::dist(posRot1, posRot2)*0.5;
Vec3::add(posRot, posRot1, posRot2);
Vec3::scale(posRot, posRot, 0.5);
posRot[3] = posRot1[3]*0.5 + posRot2[3]*0.5;
posRot[4] = posRot1[4]*0.5 + posRot2[4]*0.5;
posRot[5] = posRot1[5]*0.5 + posRot2[5]*0.5;
Quat::fill(qr);
Quat::rotateY(qr, qr, -tsh->path[0].rotDeg * M_PI / 180.0);
Quat::fromRotationXYZ(q1, (float*) (posRot1 + 3));
Quat::multiply(q1, qr, q1);
Quat::fill(qr);
Quat::rotateY(qr, qr, -tsh->path[1].rotDeg * M_PI / 180.0);
Quat::fromRotationXYZ(q2, (float*) (posRot2 + 3));
Quat::multiply(q2, qr, q2);
if (!junct)
Quat::slerp(q, q1, q2, 0.5);
else if (tsh->mainroute == 0)
Quat::copy(q, q1);
else
Quat::copy(q, q2);
//Quat::multiply(q, q1, q2);
zangle = 0; //angles[j*2]*(1.0-i/line1.length) + angles[j*2+1]*(i/line1.length);
//Quat::rotateZ(q, q, zangle);
Mat4::fromRotationTranslation(matrix1, q, posRot);
//Quat::multiply(qr, qr, q);
//Mat4::multiply(matrix1, matrixS, matrix1);
primitives[0].push_back(ShapePrimitive());
primitives[0].back().data = new float[1800];
float* ptr = primitives[0].back().data;
primitives[0].back().count = tFile->count;
//qDebug() << "count" << tFile->count;
//Vec3::copy(primitives[0].back().pos, posRot);
//Vec3::transformMat4(primitives[0].back().pos, primitives[0].back().pos, matrixS);
//Mat4::copy(primitives[0].back().matrix, matrix1);
//Quat::copy(primitives[0].back().quat, qr);
//primitives[0].back().rotY = -tsh->path[0].rotDeg*M_PI/180.0;
//primitives[0].back().rotZ = zangle;
//primitives[0].back().templatePtr = tFile;
PushShapePart(ptr, tFile, 0.155, matrix1, q, distance);
}
for (; i < line3->length; i += 0.65) {
line3->getDrawPosition(posRot, i);
Quat::fill(qr);
Quat::rotateY(qr, qr, -tsh->path[pathidx].rotDeg * M_PI / 180.0);
Quat::fromRotationXYZ(q, (float*) (posRot + 3));
zangle = 0;
Quat::rotateZ(q, q, zangle);
Mat4::fromRotationTranslation(matrix1, q, posRot);
Quat::multiply(qr, qr, q);
Mat4::multiply(matrix1, matrixS, matrix1);
primitives[0].push_back(ShapePrimitive());
primitives[0].back().data = new float[1800];
float* ptr = primitives[0].back().data;
primitives[0].back().count = tFile->count;
//qDebug() << "count" << tFile->count;
Vec3::copy(primitives[0].back().pos, posRot);
Vec3::transformMat4(primitives[0].back().pos, primitives[0].back().pos, matrixS);
Mat4::copy(primitives[0].back().matrix, matrix1);
Quat::copy(primitives[0].back().quat, qr);
primitives[0].back().rotY = -tsh->path[pathidx].rotDeg * M_PI / 180.0;
primitives[0].back().rotZ = zangle;
primitives[0].back().templatePtr = tFile;
PushShapePart(ptr, tFile, 0.155, matrix1, qr);
}
}
/*for(int j = 0; j < tsh->numpaths; j++){
TrackShape::SectionIdx *section = &tsh->path[j];
QVector<TSection> sections;
for(int i = 0; i < section->n; i++){
if(Game::currentRoute->tsection->sekcja[(int)section->sect[i]] != NULL)
sections.push_back(*Game::currentRoute->tsection->sekcja[(int)section->sect[i]]);
}
//float* p = new float[4000000];
//float* ptr = p;
float q[4];
float qr[4];
float posRot[6];
float matrix1[16];
float matrix2[16];
ObjFile *tFile;
ComplexLine line;
line.init(sections);
//qDebug() << line.length << "length";
float pp[3];
float zangle;
Quat::fill(q);
Quat::rotateY(q, q, -tsh->path[j].rotDeg*M_PI/180.0);
Vec3::set(pp, -tsh->path[j].pos[0], tsh->path[j].pos[1], tsh->path[j].pos[2]);
Mat4::fromRotationTranslation(matrixS, q, pp);
primitives.push_back(QVector<ShapePrimitive>());
tFile = GetObjFile("inbk3.obj");
for(float i = 0; i < line.length; i += 0.65){
line.getDrawPosition(posRot, i);
Quat::fill(qr);
Quat::rotateY(qr, qr, -tsh->path[j].rotDeg*M_PI/180.0);
Quat::fromRotationXYZ(q, (float*)(posRot+3));
zangle = angles[j*2]*(1.0-i/line.length) + angles[j*2+1]*(i/line.length);
Quat::rotateZ(q, q, zangle);
Mat4::fromRotationTranslation(matrix1, q, posRot);
Quat::multiply(qr, qr, q);
Mat4::multiply(matrix1, matrixS, matrix1);
primitives[j].push_back(ShapePrimitive());
primitives[j].back().data = new float[1800];
float* ptr = primitives[j].back().data;
primitives[j].back().count = tFile->count;
qDebug() << "count" << tFile->count;
Vec3::copy(primitives[j].back().pos, posRot);
Vec3::transformMat4(primitives[j].back().pos, primitives[j].back().pos, matrixS);
Mat4::copy(primitives[j].back().matrix, matrix1);
Quat::copy(primitives[j].back().quat, qr);
primitives[j].back().rotY = -tsh->path[j].rotDeg*M_PI/180.0;
primitives[j].back().rotZ = zangle;
primitives[j].back().templatePtr = tFile;
PushShapePart(ptr, tFile, 0.155, matrix1, qr);
}
}
if(primitives.count() == 2){
primitives.push_back(QVector<ShapePrimitive>());
ObjFile *tFile;
float pos[3];
Vec3::set(pos, 0, 0, 0);
for(int j = 0; j < primitives[0].count(); j++){
for(int i = 0; i < primitives[1].count(); i++){
if(primitives[0][j].disabled || primitives[1][i].disabled)
continue;
if(Intersections::shapeIntersectsShape(
primitives[0][j].data,
primitives[1][i].data,
primitives[0][j].count*9,
primitives[1][i].count*9,
9,
9,
pos) > 0
){
primitives[0][j].disabled = true;
primitives[1][i].disabled = true;
primitives[2].push_back(ShapePrimitive());
primitives[2].back().data = new float[1800];
float* ptr = primitives[2].back().data;
tFile = primitives[0][j].templatePtr;
primitives[2].back().count = tFile->count;
float matrix[16];
float q[4];
float pos[3];
Vec3::add(pos, primitives[0][j].pos, primitives[1][i].pos);
Vec3::scale(pos, pos, 0.5);
/*
Quat::fill(q);
float zangle = primitives[0][j].rotZ*0.5 + primitives[1][i].rotZ * 0.5;
float yangle = primitives[0][j].rotY*0.5 + primitives[1][i].rotY * 0.5;
Quat::rotateY(q, q, yangle);
Quat::rotateZ(q, q, zangle);*/
//Quat::multiply(q, primitives[0][j].quat, primitives[1][i].quat);
//Mat4::fromRotationTranslation(matrix, q, pos);
//for(int i = 0; i < 16; i++){
// matrix[i] = primitives[0][j].matrix[i]*0.5 + primitives[1][i].matrix[i]*0.5;
//}
//Mat4::multiply(matrix, primitives[0][j].matrix, primitives[1][i].matrix);
//PushShapePart(ptr, tFile, 0.155, matrix, q);
//PushShapePart(ptr, tFile, 0.155, primitives[1][i].matrix, primitives[1][i].quat);
//}
//}
//}
//}*/
float* p = new float[4000000];
float* ptr = p;
QString* texturePath;
for (int i = 0; i < primitives.count(); i++) {
for (int j = 0; j < (primitives[i]).count(); j++) {
if ((primitives[i])[j].disabled)
continue;
memcpy(ptr, (primitives[i])[j].data, (primitives[i])[j].count * 9 * 4);
ptr += (primitives[i])[j].count * 9;
}
}
texturePath = new QString(ProceduralShape::GetTexturePath(stemplate->texture));
shape.push_back(new OglObj());
shape.back()->setMaterial(texturePath);
shape.back()->init(p, ptr - p, RenderItem::VNTA, GL_TRIANGLES);
shape.back()->setDistanceRange(stemplate->minDistance, stemplate->maxDistance);
delete[] p;
}
void ProceduralShape::PushShapePart(float* &ptr, ObjFile* tFile, float offsetY, float* matrix, float* qrot, float distance) {
int j = 0;
float p[3];
for (int i = 0; i < tFile->count; i++) {
p[0] = tFile->points[j++];
p[1] = tFile->points[j++];
p[2] = tFile->points[j++];
if (distance > 0.01) {
if (p[0] > 0)
p[0] += distance;
if (p[0] < 0)
p[0] -= distance;
}
Vec3::transformMat4(p, p, matrix);
*ptr++ = p[0];
*ptr++ = p[1] + offsetY;
*ptr++ = p[2];
p[0] = tFile->points[j++];
p[1] = tFile->points[j++];
p[2] = tFile->points[j++];
Vec3::transformQuat(p, p, qrot);
*ptr++ = p[0];
*ptr++ = p[1];
*ptr++ = p[2];
*ptr++ = tFile->points[j++];
*ptr++ = tFile->points[j++];
*ptr++ = Alpha;
}
}
void ProceduralShape::PushShapePartExpand(float* &ptr, ObjFile* tFile, float offsetY, float* matrix1, float* matrix2, float* qrot, float dist1, float dist2) {
int j = 0;
float p[3];
float texY = tFile->texYmin;
float texYstep = tFile->texYmax - tFile->texYmin;
float itexy = 0;
for (int i = 0; i < tFile->count; i++) {
p[0] = tFile->points[j++];
p[1] = tFile->points[j++];
p[2] = tFile->points[j++];
if (fabs(p[2]) < 0.5) {
p[2] = 0;
Vec3::transformMat4(p, p, matrix1);
itexy = texYstep*dist1;
} else {
p[2] = 0;
Vec3::transformMat4(p, p, matrix2);
itexy = texYstep*dist2;
}
*ptr++ = p[0];
*ptr++ = p[1] + offsetY;
*ptr++ = p[2];
p[0] = tFile->points[j++];
p[1] = tFile->points[j++];
p[2] = tFile->points[j++];
Vec3::transformQuat(p, p, qrot);
*ptr++ = p[0];
*ptr++ = p[1];
*ptr++ = p[2];
*ptr++ = tFile->points[j++];
j++;
*ptr++ = texY + itexy;
*ptr++ = Alpha;
}
}
void ProceduralShape::PushShapePartStretch(float* &ptr, ObjFile* tFile, float offsetY, float* matrix, float* qrot, float length) {
int j = 0;
float p[3];
for (int i = 0; i < tFile->count; i++) {
p[0] = tFile->points[j++];
p[1] = tFile->points[j++];
p[2] = tFile->points[j++]*length;
Vec3::transformMat4(p, p, matrix);
*ptr++ = p[0];
*ptr++ = p[1] + offsetY;
*ptr++ = p[2];
p[0] = tFile->points[j++];
p[1] = tFile->points[j++];
p[2] = tFile->points[j++];
Vec3::transformQuat(p, p, qrot);
*ptr++ = p[0];
*ptr++ = p[1];
*ptr++ = p[2];
*ptr++ = tFile->points[j++];
*ptr++ = tFile->points[j++];
*ptr++ = Alpha;
}
}<|fim▁end|> | float step = 3;
for (float i = 0; i < line.length; i += step) {
line.getDrawPosition(posRot, i, stemplate->xOffset);
|
<|file_name|>bmn-paraBlastKepler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
### cut a fasta query (arg1) in "nbPart" (arg3)
### subFasta files and launches a blast programm (arg4)
### vs the bank (arg2) for each subfasta and then
### concatenates the tab delimited blast results in
### one file. You can add any additional blastall option in the
### arg5 with quotes like this : "-e 0.001 -a 2 -W 5"
### example of execution :
### paraBlast.py query.fasta blastdb 10 tblastx "-e 0.001 -a 2 -W 5"
import string
import sys
import os
import subprocess
import tempfile
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from StringIO import StringIO
fastafile = sys.argv[1]
#fastafile = "/bank/fasta/Roth/E1.454.fasta"
bank = sys.argv[2]
#bank = "/bank/blastdb/E1"
nbPart=int(sys.argv[3])
#nbPart=int(5)
my_blast_prog = sys.argv[4]
#my_blast_prog = "tblastx"
blastOpt=sys.argv[5]
#blastOpt="-e 0.001 -a 2 -W 5"
my_blast_exe = "/usr/local/bin/blast/bin/blastall"
nbResidues=0
meanLen=0
nbSeqs=0
seqs=[]
#### reading the fasta file to cut
handle = open(fastafile)<|fim▁hole|>for seq_record in SeqIO.parse(handle, "fasta"):
seqs.append(seq_record)
nbSeqs+=1
nbResidues+=len(seq_record.seq)
handle.close()
#### prints some infos about the input fasta file
meanLen=nbResidues/nbSeqs
print "sequences -- residues -- mean sequence length"
print nbSeqs,"--",nbResidues,"--", meanLen
#### creates a temp directory and
#### writes the divided-input fasta files into it
wDir= "/scratch/USERS/prestat"
tmpDir=tempfile.mkdtemp(prefix="parablast",dir= wDir)
nbSeqsbyfile=nbSeqs/nbPart
modulo=nbSeqs%nbPart
iteSeqs=0
for i in range(0,nbPart-1):
tmpFasta=tempfile.mkstemp(dir=tmpDir,suffix="."+str(i)+".fasta")
SeqIO.write(seqs[iteSeqs:iteSeqs+nbSeqsbyfile], tmpFasta[1], "fasta")
iteSeqs+=nbSeqsbyfile
tmpFasta=tempfile.mkstemp(dir=tmpDir,suffix="."+str(nbPart)+".fasta")
SeqIO.write(seqs[iteSeqs:nbSeqs], tmpFasta[1], "fasta")
#### runs the blast
my_blast_files = os.listdir(tmpDir)
myProcesses=[]
for blast_file in my_blast_files:
cmd= "blastall -m 8"+" "+ " "+\
"-p"+" "+ my_blast_prog + " "+\
"-i"+" "+ tmpDir+"/"+blast_file + " "+\
"-d"+" "+ bank + " "+\
"-o"+" "+ tmpDir+"/"+blast_file.replace("fasta","blast") + " "+\
blastOpt
myProcesses.append(subprocess.Popen(cmd,shell=True))
#### waits for the end of all processes
for i in myProcesses:
i.wait()
#### concatenates the blast files results
#### and removes the temp files used
os.system("cat " + tmpDir+"/"+"*.blast > "+ wDir + '/' + str.split(fastafile,'/')[-1]+".vs."+str.split(bank,'/')[-1]+".blast")
os.system("rm -rf "+ tmpDir)<|fim▁end|> | |
<|file_name|>rules.py<|end_file_name|><|fim▁begin|># Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from dataclasses import dataclass
from typing import Tuple
from pants.backend.python.lint.docformatter.skip_field import SkipDocformatterField
from pants.backend.python.lint.docformatter.subsystem import Docformatter
from pants.backend.python.lint.python_fmt import PythonFmtRequest
from pants.backend.python.target_types import PythonSources
from pants.backend.python.util_rules import pex
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import PexRequest, PexRequirements, VenvPex, VenvPexProcess
from pants.core.goals.fmt import FmtResult
from pants.core.goals.lint import LintRequest, LintResult, LintResults
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.fs import Digest
from pants.engine.process import FallibleProcessResult, Process, ProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import FieldSet, Target
from pants.engine.unions import UnionRule
from pants.util.logging import LogLevel
from pants.util.strutil import pluralize
@dataclass(frozen=True)
class DocformatterFieldSet(FieldSet):
required_fields = (PythonSources,)
sources: PythonSources
@classmethod
def opt_out(cls, tgt: Target) -> bool:
return tgt.get(SkipDocformatterField).value
class DocformatterRequest(PythonFmtRequest, LintRequest):
field_set_type = DocformatterFieldSet
@dataclass(frozen=True)
class SetupRequest:
request: DocformatterRequest
check_only: bool
@dataclass(frozen=True)
class Setup:
process: Process<|fim▁hole|> *, source_files: SourceFiles, docformatter: Docformatter, check_only: bool
) -> Tuple[str, ...]:
return ("--check" if check_only else "--in-place", *docformatter.args, *source_files.files)
@rule(level=LogLevel.DEBUG)
async def setup_docformatter(setup_request: SetupRequest, docformatter: Docformatter) -> Setup:
docformatter_pex_request = Get(
VenvPex,
PexRequest(
output_filename="docformatter.pex",
internal_only=True,
requirements=PexRequirements(docformatter.all_requirements),
interpreter_constraints=InterpreterConstraints(docformatter.interpreter_constraints),
main=docformatter.main,
),
)
source_files_request = Get(
SourceFiles,
SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets),
)
source_files, docformatter_pex = await MultiGet(source_files_request, docformatter_pex_request)
source_files_snapshot = (
source_files.snapshot
if setup_request.request.prior_formatter_result is None
else setup_request.request.prior_formatter_result
)
process = await Get(
Process,
VenvPexProcess(
docformatter_pex,
argv=generate_args(
source_files=source_files,
docformatter=docformatter,
check_only=setup_request.check_only,
),
input_digest=source_files_snapshot.digest,
output_files=source_files_snapshot.files,
description=(
f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'file')}."
),
level=LogLevel.DEBUG,
),
)
return Setup(process, original_digest=source_files_snapshot.digest)
@rule(desc="Format with docformatter", level=LogLevel.DEBUG)
async def docformatter_fmt(request: DocformatterRequest, docformatter: Docformatter) -> FmtResult:
if docformatter.skip:
return FmtResult.skip(formatter_name="Docformatter")
setup = await Get(Setup, SetupRequest(request, check_only=False))
result = await Get(ProcessResult, Process, setup.process)
return FmtResult.from_process_result(
result, original_digest=setup.original_digest, formatter_name="Docformatter"
)
@rule(desc="Lint with docformatter", level=LogLevel.DEBUG)
async def docformatter_lint(
request: DocformatterRequest, docformatter: Docformatter
) -> LintResults:
if docformatter.skip:
return LintResults([], linter_name="Docformatter")
setup = await Get(Setup, SetupRequest(request, check_only=True))
result = await Get(FallibleProcessResult, Process, setup.process)
return LintResults(
[LintResult.from_fallible_process_result(result)], linter_name="Docformatter"
)
def rules():
return [
*collect_rules(),
UnionRule(PythonFmtRequest, DocformatterRequest),
UnionRule(LintRequest, DocformatterRequest),
*pex.rules(),
]<|fim▁end|> | original_digest: Digest
def generate_args( |
<|file_name|>traversal.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Traversals over the DOM and flow trees, running the layout computations.
use construct::FlowConstructor;
use context::LayoutContext;
use display_list_builder::DisplayListBuildState;
use flow::{FlowFlags, Flow, GetBaseFlow, ImmutableFlowUtils};
use script_layout_interface::wrapper_traits::{LayoutNode, ThreadSafeLayoutNode};
use servo_config::opts;
use style::context::{SharedStyleContext, StyleContext};
use style::data::ElementData;
use style::dom::{NodeInfo, TElement, TNode};
use style::selector_parser::RestyleDamage;
use style::servo::restyle_damage::ServoRestyleDamage;
use style::traversal::{DomTraversal, recalc_style_at};
use style::traversal::PerLevelTraversalData;
use wrapper::{GetRawData, LayoutNodeLayoutData};
use wrapper::ThreadSafeLayoutNodeHelpers;
pub struct RecalcStyleAndConstructFlows<'a> {
context: LayoutContext<'a>,
}
impl<'a> RecalcStyleAndConstructFlows<'a> {
pub fn layout_context(&self) -> &LayoutContext<'a> {
&self.context
}
}
impl<'a> RecalcStyleAndConstructFlows<'a> {
/// Creates a traversal context, taking ownership of the shared layout context.
pub fn new(context: LayoutContext<'a>) -> Self {
RecalcStyleAndConstructFlows {
context: context,
}
}
/// Consumes this traversal context, returning ownership of the shared layout
/// context to the caller.
pub fn destroy(self) -> LayoutContext<'a> {
self.context
}
}
#[allow(unsafe_code)]
impl<'a, E> DomTraversal<E> for RecalcStyleAndConstructFlows<'a>
where E: TElement,
E::ConcreteNode: LayoutNode,
E::FontMetricsProvider: Send,
{
fn process_preorder<F>(&self, traversal_data: &PerLevelTraversalData,
context: &mut StyleContext<E>, node: E::ConcreteNode,
note_child: F)
where F: FnMut(E::ConcreteNode)
{
// FIXME(pcwalton): Stop allocating here. Ideally this should just be
// done by the HTML parser.
unsafe { node.initialize_data() };
if !node.is_text_node() {
let el = node.as_element().unwrap();
let mut data = el.mutate_data().unwrap();
recalc_style_at(self, traversal_data, context, el, &mut data, note_child);
}
}
fn process_postorder(&self, _style_context: &mut StyleContext<E>, node: E::ConcreteNode) {
construct_flows_at(&self.context, node);
}
fn text_node_needs_traversal(node: E::ConcreteNode, parent_data: &ElementData) -> bool {
// Text nodes never need styling. However, there are two cases they may need
// flow construction:
// (1) They child doesn't yet have layout data (preorder traversal initializes it).
// (2) The parent element has restyle damage (so the text flow also needs fixup).
node.get_raw_data().is_none() || !parent_data.damage.is_empty()
}
fn shared_context(&self) -> &SharedStyleContext {
&self.context.style_context
}
}
/// A top-down traversal.
pub trait PreorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns true if this node should be processed and false if neither this node nor its
/// descendants should be processed.
fn should_process_subtree(&self, _flow: &mut Flow) -> bool {
true
}
/// Returns true if this node must be processed in-order. If this returns false,
/// we skip the operation for this node, but continue processing the descendants.
/// This is called *after* parent nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in preorder.
fn traverse(&self, flow: &mut Flow) {
if !self.should_process_subtree(flow) {
return;
}
if self.should_process(flow) {
self.process(flow);
}
for kid in flow.mut_base().child_iter_mut() {
self.traverse(kid);
}
}
/// Traverse the Absolute flow tree in preorder.
///
/// Traverse all your direct absolute descendants, who will then traverse
/// their direct absolute descendants.
///
/// Return true if the traversal is to continue or false to stop.
fn traverse_absolute_flows(&self, flow: &mut Flow) {
if self.should_process(flow) {
self.process(flow);
}
for descendant_link in flow.mut_base().abs_descendants.iter() {
self.traverse_absolute_flows(descendant_link)
}
}
}
/// A bottom-up traversal, with a optional in-order pass.
pub trait PostorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns false if this node must be processed in-order. If this returns false, we skip the
/// operation for this node, but continue processing the ancestors. This is called *after*
/// child nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in postorder.
fn traverse(&self, flow: &mut Flow) {
for kid in flow.mut_base().child_iter_mut() {
self.traverse(kid);
}
if self.should_process(flow) {
self.process(flow);<|fim▁hole|>
/// An in-order (sequential only) traversal.
pub trait InorderFlowTraversal {
/// The operation to perform. Returns the level of the tree we're at.
fn process(&mut self, flow: &mut Flow, level: u32);
/// Returns true if this node should be processed and false if neither this node nor its
/// descendants should be processed.
fn should_process_subtree(&mut self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in-order.
fn traverse(&mut self, flow: &mut Flow, level: u32) {
if !self.should_process_subtree(flow) {
return;
}
self.process(flow, level);
for kid in flow.mut_base().child_iter_mut() {
self.traverse(kid, level + 1);
}
}
}
/// A bottom-up, parallelizable traversal.
pub trait PostorderNodeMutTraversal<ConcreteThreadSafeLayoutNode: ThreadSafeLayoutNode> {
/// The operation to perform. Return true to continue or false to stop.
fn process(&mut self, node: &ConcreteThreadSafeLayoutNode);
}
/// The flow construction traversal, which builds flows for styled nodes.
#[inline]
#[allow(unsafe_code)]
fn construct_flows_at<N>(context: &LayoutContext, node: N)
where N: LayoutNode,
{
debug!("construct_flows_at: {:?}", node);
// Construct flows for this node.
{
let tnode = node.to_threadsafe();
// Always reconstruct if incremental layout is turned off.
let nonincremental_layout = opts::get().nonincremental_layout;
if nonincremental_layout || tnode.restyle_damage() != RestyleDamage::empty() ||
node.as_element().map_or(false, |el| el.has_dirty_descendants()) {
let mut flow_constructor = FlowConstructor::new(context);
if nonincremental_layout || !flow_constructor.repair_if_possible(&tnode) {
flow_constructor.process(&tnode);
debug!("Constructed flow for {:?}: {:x}",
tnode,
tnode.flow_debug_id());
}
}
tnode.mutate_layout_data().unwrap().flags.insert(::data::LayoutDataFlags::HAS_BEEN_TRAVERSED);
}
if let Some(el) = node.as_element() {
unsafe { el.unset_dirty_descendants(); }
}
}
/// The bubble-inline-sizes traversal, the first part of layout computation. This computes
/// preferred and intrinsic inline-sizes and bubbles them up the tree.
pub struct BubbleISizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PostorderFlowTraversal for BubbleISizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
flow.bubble_inline_sizes();
flow.mut_base().restyle_damage.remove(ServoRestyleDamage::BUBBLE_ISIZES);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
flow.base().restyle_damage.contains(ServoRestyleDamage::BUBBLE_ISIZES)
}
}
/// The assign-inline-sizes traversal. In Gecko this corresponds to `Reflow`.
#[derive(Clone, Copy)]
pub struct AssignISizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PreorderFlowTraversal for AssignISizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
flow.assign_inline_sizes(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
flow.base().restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW)
}
}
/// The assign-block-sizes-and-store-overflow traversal, the last (and most expensive) part of
/// layout computation. Determines the final block-sizes for all layout objects and computes
/// positions. In Gecko this corresponds to `Reflow`.
#[derive(Clone, Copy)]
pub struct AssignBSizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PostorderFlowTraversal for AssignBSizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
// Can't do anything with anything that floats might flow through until we reach their
// inorder parent.
//
// NB: We must return without resetting the restyle bits for these, as we haven't actually
// reflowed anything!
if flow.floats_might_flow_through() {
return
}
flow.assign_block_size(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
let base = flow.base();
base.restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW) &&
// The fragmentation countainer is responsible for calling Flow::fragment recursively
!base.flags.contains(FlowFlags::CAN_BE_FRAGMENTED)
}
}
pub struct ComputeStackingRelativePositions<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PreorderFlowTraversal for ComputeStackingRelativePositions<'a> {
#[inline]
fn should_process_subtree(&self, flow: &mut Flow) -> bool {
flow.base().restyle_damage.contains(ServoRestyleDamage::REPOSITION)
}
#[inline]
fn process(&self, flow: &mut Flow) {
flow.compute_stacking_relative_position(self.layout_context);
flow.mut_base().restyle_damage.remove(ServoRestyleDamage::REPOSITION)
}
}
pub struct BuildDisplayList<'a> {
pub state: DisplayListBuildState<'a>,
}
impl<'a> BuildDisplayList<'a> {
#[inline]
pub fn traverse(&mut self, flow: &mut Flow) {
let parent_stacking_context_id = self.state.current_stacking_context_id;
self.state.current_stacking_context_id = flow.base().stacking_context_id;
let parent_clipping_and_scrolling = self.state.current_clipping_and_scrolling;
self.state.current_clipping_and_scrolling = flow.clipping_and_scrolling();
flow.build_display_list(&mut self.state);
flow.mut_base().restyle_damage.remove(ServoRestyleDamage::REPAINT);
for kid in flow.mut_base().child_iter_mut() {
self.traverse(kid);
}
self.state.current_stacking_context_id = parent_stacking_context_id;
self.state.current_clipping_and_scrolling = parent_clipping_and_scrolling;
}
}<|fim▁end|> | }
}
} |
<|file_name|>day8.rs<|end_file_name|><|fim▁begin|>use std::io::prelude::*;
use std::fs::File;
const MAX_X: usize = 50;
const MAX_Y: usize = 6;
fn print_board(board: &Vec<Vec<bool>>) {
let mut count = 0;
for i in 0..MAX_Y {
let mut line = String::new();;
for j in 0..MAX_X {<|fim▁hole|> line += "#";
count += 1;
} else {
line += ".";
}
}
println!("{}", line);
}
println!("\n{} pixels lit", count);
}
fn add_rect(board: &mut Vec<Vec<bool>>, x: usize, y: usize)
{
for i in 0..y {
for j in 0..x {
board[i][j] = true;
}
}
}
fn rotate_row(board: &mut Vec<Vec<bool>>, y: usize)
{
let mut val = board[y][0];
for x in 1..MAX_X {
let temp = board[y][x];
board[y][x] = val;
val = temp;
}
board[y][0] = val;
}
fn rotate_column(board: &mut Vec<Vec<bool>>, x: usize)
{
let mut val = board[0][x];
for y in 1..MAX_Y {
let temp = board[y][x];
board[y][x] = val;
val = temp;
}
board[0][x] = val;
}
fn main() {
let mut f = File::open("day8.txt").unwrap();
let mut f_s = String::new();
f.read_to_string(&mut f_s).unwrap();
let mut board = vec![vec![false; 50]; 6];
for l in f_s.lines() {
let line = l.to_string();
if line.contains("rect") {
let (_, last) = l.split_at(5);
let mut last_str = last.to_string();
let cross_pos = last_str.find("x").unwrap();
last_str.remove(cross_pos);
let (x, y) = last_str.split_at(cross_pos);
add_rect(&mut board, x.parse().unwrap(), y.parse().unwrap())
} else if line.contains("column") {
let (_, last) = l.split_at(16);
let mut last_str = last.to_string();
let by_pos = last_str.find(" by").unwrap();
for _ in 0..4{
last_str.remove(by_pos);
}
let (x, times) = last_str.split_at(by_pos);
for _ in 0..times.parse().unwrap() {
rotate_column(&mut board, x.parse().unwrap())
}
} else if line.contains("row") {
let (_, last) = l.split_at(13);
let mut last_str = last.to_string();
let by_pos = last_str.find(" by").unwrap();
for _ in 0..4{
last_str.remove(by_pos);
}
let (y, times) = last_str.split_at(by_pos);
for _ in 0..times.parse().unwrap() {
rotate_row(&mut board, y.parse().unwrap())
}
}
}
print_board(&board);
}<|fim▁end|> | if board[i][j] { |
<|file_name|>print_MODFLOW_inputs_res_NWT.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Sun Sep 17 22:06:52 2017
Based on: print_MODFLOW_inputs_res_NWT.m
@author: gcng
"""
# print_MODFLOW_inputs
import numpy as np
import MODFLOW_NWT_lib as mf # functions to write individual MODFLOW files
import os # os functions
from ConfigParser import SafeConfigParser
parser = SafeConfigParser()
parser.read('settings.ini')
LOCAL_DIR = parser.get('settings', 'local_dir')
GSFLOW_DIR = LOCAL_DIR + "/GSFLOW"
# - directories
sw_2005_NWT = 2 # 1 for MODFLOW-2005; 2 for MODFLOW-NWT algorithm (both can be
# carried out with MODFLOW-NWT code)
fl_BoundConstH = 0 # 1 for const head at high elev boundary, needed for numerical
# convergence for AGU2016 poster. Maybe resolved with MODFLOW-NWT?
if sw_2005_NWT == 1:
# MODFLOW input files
GSFLOW_indir = GSFLOW_DIR + '/inputs/MODFLOW_2005/'<|fim▁hole|> # MODFLOW input files
GSFLOW_indir = GSFLOW_DIR + '/inputs/MODFLOW_NWT/'
# MODFLOW output files
GSFLOW_outdir = GSFLOW_DIR + '/outputs/MODFLOW_NWT/'
infile_pre = 'test2lay_py';
NLAY = 2;
DZ = [100, 50] # [NLAYx1] [m] ***testing
# DZ = [350, 100] # [NLAYx1] [m] ***testing
# length of transient stress period (follows 1-day steady-state period) [d]
# perlen_tr = 365; # [d], ok if too long
# perlen_tr = 365*5 + ceil(365*5/4); # [d], includes leap years; ok if too long (I think, but maybe run time is longer?)
perlen_tr = 365*30 + np.ceil(365*30/4) # [d], includes leap years; ok if too long (I think, but maybe run time is longer?)
GIS_indir = GSFLOW_DIR + '/DataToReadIn/GIS/';
# use restart file as initial cond (empty string to not use restart file)
fil_res_in = '' # empty string to not use restart file
#fil_res_in = '/home/gcng/workspace/Pfil_res_inrojectFiles/AndesWaterResources/GSFLOW/outputs/MODFLOW/test2lay_melt_30yr.out' % empty string to not use restart file
# for various files: ba6, dis, uzf, lpf
surfz_fil = GIS_indir + 'topo.asc'
# surfz_fil = GIS_indir + 'SRTM_new_20161208.asc'
# for various files: ba6, uzf
mask_fil = GIS_indir + 'basinmask_dischargept.asc'
# for sfr
reach_fil = GIS_indir + 'reach_data.txt'
segment_fil_all = [GIS_indir + 'segment_data_4A_INFORMATION_Man.csv',
GIS_indir + 'segment_data_4B_UPSTREAM_Man.csv',
GIS_indir + 'segment_data_4C_DOWNSTREAM_Man.csv']
# create MODFLOW input directory if it does not exist:
if not os.path.isdir(GSFLOW_indir):
os.makedirs(GSFLOW_indir)
# while we're at it, create MODFLOW output file if it does not exist:
if not os.path.isdir(GSFLOW_outdir):
os.makedirs(GSFLOW_outdir)
##
mf.write_dis_MOD2_f(GSFLOW_indir, infile_pre, surfz_fil, NLAY, DZ, perlen_tr);
mf.write_ba6_MOD3_2(GSFLOW_indir, infile_pre, mask_fil, fl_BoundConstH); # list this below write_dis_MOD2_f
# flow algorithm
if sw_2005_NWT == 1:
mf.write_lpf_MOD2_f2_2(GSFLOW_indir, infile_pre, surfz_fil, NLAY);
elif sw_2005_NWT == 2:
# MODFLOW-NWT files
mf.write_upw_MOD2_f2_2(GSFLOW_indir, infile_pre, surfz_fil, NLAY);
mf.NWT_write_file(GSFLOW_indir, infile_pre);
# unsat zone and streamflow input files
mf.make_uzf3_f_2(GSFLOW_indir, infile_pre, surfz_fil, mask_fil);
mf.make_sfr2_f_Mannings(GSFLOW_indir, infile_pre, reach_fil, segment_fil_all); # list this below write_dis_MOD2_f
# Write PCG file (only used for MODFLOW-2005, but this function also creates OC file)
mf.write_OC_PCG_MOD_f(GSFLOW_indir, infile_pre, perlen_tr);
# Write namefile
mf.write_nam_MOD_f2_NWT(GSFLOW_indir, GSFLOW_outdir, infile_pre, fil_res_in, sw_2005_NWT);<|fim▁end|> | # MODFLOW output files
GSFLOW_outdir = GSFLOW_DIR + '/outputs/MODFLOW_2005/'
elif sw_2005_NWT == 2: |
<|file_name|>ws_BinaryClass_100_SVC_sigmoid_sqlite_code_gen.py<|end_file_name|><|fim▁begin|>from sklearn2sql_heroku.tests.classification import generic as class_gen
<|fim▁hole|><|fim▁end|> |
class_gen.test_model("SVC_sigmoid" , "BinaryClass_100" , "sqlite") |
<|file_name|>HUnitDaughter2Woman_ConnectedLHS.py<|end_file_name|><|fim▁begin|>from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HUnitDaughter2Woman_ConnectedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""<|fim▁hole|>
super(HUnitDaughter2Woman_ConnectedLHS, self).__init__(name='HUnitDaughter2Woman_ConnectedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule']
self["MT_constraint__"] = """return True"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'HUnitDaughter2Woman_ConnectedLHS')
self["equations"] = []
# Set the node attributes
# match class Family(Fam) node
self.add_node()
self.vs[0]["MT_pre__attr1"] = """return True"""
self.vs[0]["MT_label__"] = """1"""
self.vs[0]["mm__"] = """MT_pre__Family"""
self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Fam')
# match class Child(Child) node
self.add_node()
self.vs[1]["MT_pre__attr1"] = """return True"""
self.vs[1]["MT_label__"] = """2"""
self.vs[1]["mm__"] = """MT_pre__Child"""
self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Child')
# match association null--daughters-->nullnode
self.add_node()
self.vs[2]["MT_pre__attr1"] = """return attr_value == "daughters" """
self.vs[2]["MT_label__"] = """3"""
self.vs[2]["mm__"] = """MT_pre__directLink_S"""
self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Famassoc2Child')
# Add the edges
self.add_edges([
(0,2), # match class null(Fam) -> association daughters
(2,1), # association null -> match class null(Child)
])
# define evaluation methods for each match class.
def eval_attr11(self, attr_value, this):
return True
def eval_attr12(self, attr_value, this):
return True
# define evaluation methods for each match association.
def eval_attr13(self, attr_value, this):
return attr_value == "daughters"
def constraint(self, PreNode, graph):
return True<|fim▁end|> | Creates the himesis graph representing the AToM3 model HUnitDaughter2Woman_ConnectedLHS
"""
# Flag this instance as compiled now
self.is_compiled = True |
<|file_name|>fan.py<|end_file_name|><|fim▁begin|>"""
Support for ISY994 fans.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/fan.isy994/
"""
import logging
from typing import Callable
from homeassistant.components.fan import (FanEntity, DOMAIN, SPEED_OFF,
SPEED_LOW, SPEED_MEDIUM,
SPEED_HIGH, SUPPORT_SET_SPEED)
from homeassistant.components.isy994 import (ISY994_NODES, ISY994_PROGRAMS,
ISYDevice)
from homeassistant.helpers.typing import ConfigType
_LOGGER = logging.getLogger(__name__)
VALUE_TO_STATE = {
0: SPEED_OFF,
63: SPEED_LOW,
64: SPEED_LOW,
190: SPEED_MEDIUM,
191: SPEED_MEDIUM,
255: SPEED_HIGH,<|fim▁hole|> STATE_TO_VALUE[VALUE_TO_STATE[key]] = key
def setup_platform(hass, config: ConfigType,
add_entities: Callable[[list], None], discovery_info=None):
"""Set up the ISY994 fan platform."""
devices = []
for node in hass.data[ISY994_NODES][DOMAIN]:
devices.append(ISYFanDevice(node))
for name, status, actions in hass.data[ISY994_PROGRAMS][DOMAIN]:
devices.append(ISYFanProgram(name, status, actions))
add_entities(devices)
class ISYFanDevice(ISYDevice, FanEntity):
"""Representation of an ISY994 fan device."""
@property
def speed(self) -> str:
"""Return the current speed."""
return VALUE_TO_STATE.get(self.value)
@property
def is_on(self) -> bool:
"""Get if the fan is on."""
return self.value != 0
def set_speed(self, speed: str) -> None:
"""Send the set speed command to the ISY994 fan device."""
self._node.on(val=STATE_TO_VALUE.get(speed, 255))
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Send the turn on command to the ISY994 fan device."""
self.set_speed(speed)
def turn_off(self, **kwargs) -> None:
"""Send the turn off command to the ISY994 fan device."""
self._node.off()
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_SET_SPEED
class ISYFanProgram(ISYFanDevice):
"""Representation of an ISY994 fan program."""
def __init__(self, name: str, node, actions) -> None:
"""Initialize the ISY994 fan program."""
super().__init__(node)
self._name = name
self._actions = actions
def turn_off(self, **kwargs) -> None:
"""Send the turn on command to ISY994 fan program."""
if not self._actions.runThen():
_LOGGER.error("Unable to turn off the fan")
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Send the turn off command to ISY994 fan program."""
if not self._actions.runElse():
_LOGGER.error("Unable to turn on the fan")
@property
def supported_features(self) -> int:
"""Flag supported features."""
return 0<|fim▁end|> | }
STATE_TO_VALUE = {}
for key in VALUE_TO_STATE: |
<|file_name|>_y.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class YValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="y", parent_name="volume.caps", **kwargs):
super(YValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Y"),
data_docs=kwargs.pop(
"data_docs",
"""
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the y `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
""",
),<|fim▁hole|><|fim▁end|> | **kwargs
) |
<|file_name|>newc.go<|end_file_name|><|fim▁begin|>// Copyright 2013-2017 the u-root Authors. All rights reserved
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cpio
import (
"bytes"
"encoding/binary"
"encoding/hex"
"fmt"
"io"
"os"
"github.com/u-root/u-root/pkg/uio"
)
const (
newcMagic = "070701"
magicLen = 6
)
// Newc is the newc CPIO record format.
var Newc RecordFormat = newc{magic: newcMagic}
type header struct {
Ino uint32
Mode uint32
UID uint32
GID uint32
NLink uint32
MTime uint32
FileSize uint32
Major uint32
Minor uint32
Rmajor uint32
Rminor uint32
NameLength uint32
CRC uint32
}
func headerFromInfo(i Info) header {
var h header
h.Ino = uint32(i.Ino)
h.Mode = uint32(i.Mode)
h.UID = uint32(i.UID)
h.GID = uint32(i.GID)
h.NLink = uint32(i.NLink)
h.MTime = uint32(i.MTime)
h.FileSize = uint32(i.FileSize)
h.Major = uint32(i.Major)
h.Minor = uint32(i.Minor)
h.Rmajor = uint32(i.Rmajor)
h.Rminor = uint32(i.Rminor)
h.NameLength = uint32(len(i.Name)) + 1
return h
}
func (h header) Info() Info {
var i Info
i.Ino = uint64(h.Ino)
i.Mode = uint64(h.Mode)
i.UID = uint64(h.UID)
i.GID = uint64(h.GID)
i.NLink = uint64(h.NLink)
i.MTime = uint64(h.MTime)
i.FileSize = uint64(h.FileSize)
i.Major = uint64(h.Major)
i.Minor = uint64(h.Minor)
i.Rmajor = uint64(h.Rmajor)
i.Rminor = uint64(h.Rminor)
return i
}
// newc implements RecordFormat for the newc format.
type newc struct {
magic string
}
// round4 returns the next multiple of 4 close to n.
func round4(n int64) int64 {
return (n + 3) &^ 0x3
}
type writer struct {
n newc
w io.Writer
pos int64
}
// Writer implements RecordFormat.Writer.
func (n newc) Writer(w io.Writer) RecordWriter {
return NewDedupWriter(&writer{n: n, w: w})
}
func (w *writer) Write(b []byte) (int, error) {
n, err := w.w.Write(b)
if err != nil {
return 0, err
}
w.pos += int64(n)
return n, nil
}
func (w *writer) pad() error {
if o := round4(w.pos); o != w.pos {
var pad [3]byte
if _, err := w.Write(pad[:o-w.pos]); err != nil {
return err
}
}
return nil
}
// WriteRecord writes newc cpio records. It pads the header+name write to 4
// byte alignment and pads the data write as well.
func (w *writer) WriteRecord(f Record) error {
// Write magic.
if _, err := w.Write([]byte(w.n.magic)); err != nil {
return err
}
buf := &bytes.Buffer{}
hdr := headerFromInfo(f.Info)
if f.ReaderAt == nil {
hdr.FileSize = 0
}
hdr.CRC = 0
if err := binary.Write(buf, binary.BigEndian, hdr); err != nil {
return err
}
hexBuf := make([]byte, hex.EncodedLen(buf.Len()))
n := hex.Encode(hexBuf, buf.Bytes())
// It's much easier to debug if we match GNU output format.
hexBuf = bytes.ToUpper(hexBuf)
// Write header.
if _, err := w.Write(hexBuf[:n]); err != nil {
return err
}
// Append NULL char.
cstr := append([]byte(f.Info.Name), 0)
// Write name.
if _, err := w.Write(cstr); err != nil {
return err
}
// Pad to a multiple of 4.
if err := w.pad(); err != nil {
return err
}
// Some files do not have any content.
if f.ReaderAt == nil {
return nil
}
// Write file contents.
m, err := io.Copy(w, uio.Reader(f))
if err != nil {
return err
}
if m != int64(f.Info.FileSize) {
return fmt.Errorf("WriteRecord: %s: wrote %d bytes of file instead of %d bytes; archive is now corrupt", f.Info.Name, m, f.Info.FileSize)
}
if c, ok := f.ReaderAt.(io.Closer); ok {
if err := c.Close(); err != nil {
return err
}
}
if m > 0 {
return w.pad()
}
return nil
}
type reader struct {
n newc
r io.ReaderAt
pos int64
}
// discarder is used to implement ReadAt from a Reader<|fim▁hole|>// by reading, and discarding, data until the offset
// is reached. It can only go forward. It is designed
// for pipe-like files.
type discarder struct {
r io.Reader
pos int64
}
// ReadAt implements ReadAt for a discarder.
// It is an error for the offset to be negative.
func (r *discarder) ReadAt(p []byte, off int64) (int, error) {
if off-r.pos < 0 {
return 0, fmt.Errorf("negative seek on discarder not allowed")
}
if off != r.pos {
i, err := io.Copy(io.Discard, io.LimitReader(r.r, off-r.pos))
if err != nil || i != off-r.pos {
return 0, err
}
r.pos += i
}
n, err := io.ReadFull(r.r, p)
if err != nil {
return n, err
}
r.pos += int64(n)
return n, err
}
var _ io.ReaderAt = &discarder{}
// Reader implements RecordFormat.Reader.
func (n newc) Reader(r io.ReaderAt) RecordReader {
return EOFReader{&reader{n: n, r: r}}
}
// NewFileReader implements RecordFormat.Reader. If the file
// implements ReadAt, then it is used for greater efficiency.
// If it only implements Read, then a discarder will be used
// instead.
// Note a complication:
// r, _, _ := os.Pipe()
// var b [2]byte
// _, err := r.ReadAt(b[:], 0)
// fmt.Printf("%v", err)
// Pipes claim to implement ReadAt; most Unix kernels
// do not agree. Even a seek to the current position fails.
// This means that
// if rat, ok := r.(io.ReaderAt); ok {
// would seem to work, but would fail when the
// actual ReadAt on the pipe occurs, even for offset 0,
// which does not require a seek! The kernel checks for
// whether the fd is seekable and returns an error,
// even for values of offset which won't require a seek.
// So, the code makes a simple test: can we seek to
// current offset? If not, then the file is wrapped with a
// discardreader. The discard reader is far less efficient
// but allows cpio to read from a pipe.
func (n newc) NewFileReader(f *os.File) (RecordReader, error) {
_, err := f.Seek(0, 0)
if err == nil {
return EOFReader{&reader{n: n, r: f}}, nil
}
return EOFReader{&reader{n: n, r: &discarder{r: f}}}, nil
}
func (r *reader) read(p []byte) error {
n, err := r.r.ReadAt(p, r.pos)
if err == io.EOF {
return io.EOF
}
if err != nil || n != len(p) {
return fmt.Errorf("ReadAt(pos = %d): got %d, want %d bytes; error %v", r.pos, n, len(p), err)
}
r.pos += int64(n)
return nil
}
func (r *reader) readAligned(p []byte) error {
err := r.read(p)
r.pos = round4(r.pos)
return err
}
// ReadRecord implements RecordReader for the newc cpio format.
func (r *reader) ReadRecord() (Record, error) {
hdr := header{}
recPos := r.pos
buf := make([]byte, hex.EncodedLen(binary.Size(hdr))+magicLen)
if err := r.read(buf); err != nil {
return Record{}, err
}
// Check the magic.
if magic := string(buf[:magicLen]); magic != r.n.magic {
return Record{}, fmt.Errorf("reader: magic got %q, want %q", magic, r.n.magic)
}
// Decode hex header fields.
dst := make([]byte, binary.Size(hdr))
if _, err := hex.Decode(dst, buf[magicLen:]); err != nil {
return Record{}, fmt.Errorf("reader: error decoding hex: %v", err)
}
if err := binary.Read(bytes.NewReader(dst), binary.BigEndian, &hdr); err != nil {
return Record{}, err
}
Debug("Decoded header is %v\n", hdr)
// Get the name.
nameBuf := make([]byte, hdr.NameLength)
if err := r.readAligned(nameBuf); err != nil {
Debug("name read failed")
return Record{}, err
}
info := hdr.Info()
info.Name = string(nameBuf[:hdr.NameLength-1])
recLen := uint64(r.pos - recPos)
filePos := r.pos
content := io.NewSectionReader(r.r, r.pos, int64(hdr.FileSize))
r.pos = round4(r.pos + int64(hdr.FileSize))
return Record{
Info: info,
ReaderAt: content,
RecLen: recLen,
RecPos: recPos,
FilePos: filePos,
}, nil
}
func init() {
formatMap["newc"] = Newc
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .DiscreteFactor import State, DiscreteFactor
from .CPD import TabularCPD<|fim▁hole|> 'State'
]<|fim▁end|> | from .JointProbabilityDistribution import JointProbabilityDistribution
__all__ = ['TabularCPD',
'DiscreteFactor', |
<|file_name|>vlog.go<|end_file_name|><|fim▁begin|>package common
import (
"fmt"
"os"
)
type LogLevelType int
var (
ShowLogLevel = true
ErrorsFatal = true
LogLevel = LogLevelType(0)
)
var (
levelString = []string{"[ERR] ", "[WRN] ", "[LOG] "}
unknownLevel = "[???] "
)
func (l LogLevelType) String() string {
if l < 0 || int(l) >= len(levelString) {
return unknownLevel
}
s := levelString[l]
return s
}
const ( //FIXME imp. a type w/ String() instead
ERR = iota
WARN
LOG
)
const (
LogLevelMin = 0
LogLevelMax = 2
)
func SetLogLevel(l uint) {
if l > LogLevelMax {
l = LogLevelMax
}
LogLevel = LogLevelType(int(l))
}
/* Verbose print function.
* Prints out given message on a given level (with proper suffix if ShowLogLevel is set)
* If level is ERR, exits the program with error code 1. */
func Printf(level LogLevelType, format string, a ...interface{}) {
if level == ERR && ErrorsFatal {
defer os.Exit(1)
}
if LogLevel < level {<|fim▁hole|> }
fmt.Fprintf(os.Stderr, format, a...)
}
func Print(level LogLevelType, a ...interface{}) {
if level == ERR && ErrorsFatal {
defer os.Exit(1)
}
if LogLevel < level {
return
}
if ShowLogLevel {
fmt.Fprint(os.Stderr, level)
}
fmt.Fprint(os.Stderr, a...)
}
func Println(level LogLevelType, a ...interface{}) {
if level == ERR && ErrorsFatal {
defer os.Exit(1)
}
if LogLevel < level {
return
}
if ShowLogLevel {
fmt.Fprint(os.Stderr, level)
}
fmt.Fprintln(os.Stderr, a...)
}
func Logf(format string, a ...interface{}) {
Printf(LOG, format, a...)
}
func Log(a ...interface{}) {
Print(LOG, a...)
}
func Logln(a ...interface{}) {
Println(LOG, a...)
}
func Warnf(format string, a ...interface{}) {
Printf(WARN, format, a...)
}
func Warn(a ...interface{}) {
Print(WARN, a...)
}
func Warnln(a ...interface{}) {
Println(WARN, a...)
}
func Errorf(format string, a ...interface{}) {
Printf(ERR, format, a...)
}
func Error(a ...interface{}) {
Print(ERR, a...)
}
func Errorln(a ...interface{}) {
Println(ERR, a...)
}<|fim▁end|> | return
}
if ShowLogLevel {
fmt.Fprint(os.Stderr, level) |
<|file_name|>query.go<|end_file_name|><|fim▁begin|>// Copyright 2015, Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package vtctl
import (
"encoding/hex"
"encoding/json"
"flag"
"fmt"
"strconv"
"strings"
"time"
"github.com/youtube/vitess/go/vt/tabletserver/tabletconn"
"github.com/youtube/vitess/go/vt/topo"
"github.com/youtube/vitess/go/vt/topo/topoproto"
"github.com/youtube/vitess/go/vt/vtgate/vtgateconn"
"github.com/youtube/vitess/go/vt/wrangler"
"golang.org/x/net/context"
topodatapb "github.com/youtube/vitess/go/vt/proto/topodata"
)
// This file contains the query command group for vtctl.
const queriesGroupName = "Queries"
func init() {
addCommandGroup(queriesGroupName)
// VtGate commands
addCommand(queriesGroupName, command{
"VtGateExecute",
commandVtGateExecute,
"-server <vtgate> [-bind_variables <JSON map>] [-connect_timeout <connect timeout>] [-tablet_type <tablet type>] <sql>",
"Executes the given SQL query with the provided bound variables against the vtgate server."})
addCommand(queriesGroupName, command{
"VtGateExecuteShards",
commandVtGateExecuteShards,
"-server <vtgate> -keyspace <keyspace> -shards <shard0>,<shard1>,... [-bind_variables <JSON map>] [-connect_timeout <connect timeout>] [-tablet_type <tablet type>] <sql>",
"Executes the given SQL query with the provided bound variables against the vtgate server. It is routed to the provided shards."})
addCommand(queriesGroupName, command{
"VtGateExecuteKeyspaceIds",
commandVtGateExecuteKeyspaceIds,
"-server <vtgate> -keyspace <keyspace> -keyspace_ids <ks1 in hex>,<k2 in hex>,... [-bind_variables <JSON map>] [-connect_timeout <connect timeout>] [-tablet_type <tablet type>] <sql>",
"Executes the given SQL query with the provided bound variables against the vtgate server. It is routed to the shards that contain the provided keyspace ids."})
addCommand(queriesGroupName, command{
"VtGateSplitQuery",
commandVtGateSplitQuery,
"-server <vtgate> -keyspace <keyspace> [-split_column <split_column>] -split_count <split_count> [-bind_variables <JSON map>] [-connect_timeout <connect timeout>] <sql>",
"Executes the SplitQuery computation for the given SQL query with the provided bound variables against the vtgate server (this is the base query for Map-Reduce workloads, and is provided here for debug / test purposes)."})
// VtTablet commands
addCommand(queriesGroupName, command{
"VtTabletExecute",
commandVtTabletExecute,
"[-bind_variables <JSON map>] [-connect_timeout <connect timeout>] [-transaction_id <transaction_id>] [-tablet_type <tablet_type>] -keyspace <keyspace> -shard <shard> <tablet alias> <sql>",
"Executes the given query on the given tablet."})
addCommand(queriesGroupName, command{
"VtTabletBegin",
commandVtTabletBegin,
"[-connect_timeout <connect timeout>] [-tablet_type <tablet_type>] -keyspace <keyspace> -shard <shard> <tablet alias>",
"Starts a transaction on the provided server."})
addCommand(queriesGroupName, command{
"VtTabletCommit",
commandVtTabletCommit,
"[-connect_timeout <connect timeout>] [-tablet_type <tablet_type>] -keyspace <keyspace> -shard <shard> <tablet alias> <transaction_id>",
"Commits a transaction on the provided server."})
addCommand(queriesGroupName, command{
"VtTabletRollback",
commandVtTabletRollback,
"[-connect_timeout <connect timeout>] [-tablet_type <tablet_type>] -keyspace <keyspace> -shard <shard> <tablet alias> <transaction_id>",
"Rollbacks a transaction on the provided server."})
addCommand(queriesGroupName, command{
"VtTabletStreamHealth",
commandVtTabletStreamHealth,
"[-count <count, default 1>] [-connect_timeout <connect timeout>] <tablet alias>",
"Executes the StreamHealth streaming query to a vttablet process. Will stop after getting <count> answers."})
}
type bindvars map[string]interface{}
func (bv *bindvars) String() string {
b, err := json.Marshal(bv)
if err != nil {
return err.Error()
}
return string(b)
}
func (bv *bindvars) Set(s string) (err error) {
err = json.Unmarshal([]byte(s), &bv)
if err != nil {
return fmt.Errorf("error json-unmarshaling '%v': %v", s, err)
}
// json reads all numbers as float64
// So, we just ditch floats for bindvars
for k, v := range *bv {
if f, ok := v.(float64); ok {
if f > 0 {
(*bv)[k] = uint64(f)
} else {
(*bv)[k] = int64(f)
}
}
}
return nil
}
// For internal flag compatibility
func (bv *bindvars) Get() interface{} {
return bv
}
func newBindvars(subFlags *flag.FlagSet) *bindvars {
var bv bindvars
subFlags.Var(&bv, "bind_variables", "bind variables as a json list")
return &bv
}
func commandVtGateExecute(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
server := subFlags.String("server", "", "VtGate server to connect to")
bindVariables := newBindvars(subFlags)
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vtgate client")
tabletType := subFlags.String("tablet_type", "master", "tablet type to query")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 1 {
return fmt.Errorf("the <sql> argument is required for the VtGateExecute command")
}
t, err := parseTabletType(*tabletType, []topodatapb.TabletType{topodatapb.TabletType_MASTER, topodatapb.TabletType_REPLICA, topodatapb.TabletType_RDONLY})
if err != nil {
return err
}
vtgateConn, err := vtgateconn.Dial(ctx, *server, *connectTimeout)
if err != nil {
return fmt.Errorf("error connecting to vtgate '%v': %v", *server, err)
}
defer vtgateConn.Close()
qr, err := vtgateConn.Execute(ctx, subFlags.Arg(0), *bindVariables, t)
if err != nil {
return fmt.Errorf("Execute failed: %v", err)
}
return printJSON(wr, qr)
}
func commandVtGateExecuteShards(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
server := subFlags.String("server", "", "VtGate server to connect to")
bindVariables := newBindvars(subFlags)
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vtgate client")
tabletType := subFlags.String("tablet_type", "master", "tablet type to query")
keyspace := subFlags.String("keyspace", "", "keyspace to send query to")
shardsStr := subFlags.String("shards", "", "comma-separated list of shards to send query to")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 1 {
return fmt.Errorf("the <sql> argument is required for the VtGateExecuteShards command")
}
t, err := parseTabletType(*tabletType, []topodatapb.TabletType{topodatapb.TabletType_MASTER, topodatapb.TabletType_REPLICA, topodatapb.TabletType_RDONLY})
if err != nil {
return err
}
var shards []string
if *shardsStr != "" {
shards = strings.Split(*shardsStr, ",")
}
vtgateConn, err := vtgateconn.Dial(ctx, *server, *connectTimeout)
if err != nil {
return fmt.Errorf("error connecting to vtgate '%v': %v", *server, err)
}
defer vtgateConn.Close()
qr, err := vtgateConn.ExecuteShards(ctx, subFlags.Arg(0), *keyspace, shards, *bindVariables, t)
if err != nil {
return fmt.Errorf("Execute failed: %v", err)
}
return printJSON(wr, qr)
}
func commandVtGateExecuteKeyspaceIds(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
server := subFlags.String("server", "", "VtGate server to connect to")
bindVariables := newBindvars(subFlags)
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vtgate client")
tabletType := subFlags.String("tablet_type", "master", "tablet type to query")
keyspace := subFlags.String("keyspace", "", "keyspace to send query to")
keyspaceIDsStr := subFlags.String("keyspace_ids", "", "comma-separated list of keyspace ids (in hex) that will map into shards to send query to")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 1 {
return fmt.Errorf("the <sql> argument is required for the VtGateExecuteKeyspaceIds command")
}
t, err := parseTabletType(*tabletType, []topodatapb.TabletType{topodatapb.TabletType_MASTER, topodatapb.TabletType_REPLICA, topodatapb.TabletType_RDONLY})
if err != nil {
return err
}
var keyspaceIDs [][]byte
if *keyspaceIDsStr != "" {
keyspaceIDHexs := strings.Split(*keyspaceIDsStr, ",")
keyspaceIDs = make([][]byte, len(keyspaceIDHexs))
for i, keyspaceIDHex := range keyspaceIDHexs {
keyspaceIDs[i], err = hex.DecodeString(keyspaceIDHex)
if err != nil {
return fmt.Errorf("cannot hex-decode value %v '%v': %v", i, keyspaceIDHex, err)
}
}
}
vtgateConn, err := vtgateconn.Dial(ctx, *server, *connectTimeout)
if err != nil {
return fmt.Errorf("error connecting to vtgate '%v': %v", *server, err)
}
defer vtgateConn.Close()
qr, err := vtgateConn.ExecuteKeyspaceIds(ctx, subFlags.Arg(0), *keyspace, keyspaceIDs, *bindVariables, t)
if err != nil {
return fmt.Errorf("Execute failed: %v", err)
}
return printJSON(wr, qr)
}
func commandVtGateSplitQuery(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
server := subFlags.String("server", "", "VtGate server to connect to")
bindVariables := newBindvars(subFlags)
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vtgate client")
splitColumn := subFlags.String("split_column", "", "force the use of this column to split the query")
splitCount := subFlags.Int("split_count", 16, "number of splits to generate")
keyspace := subFlags.String("keyspace", "", "keyspace to send query to")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 1 {
return fmt.Errorf("the <sql> argument is required for the VtGateSplitQuery command")
}
vtgateConn, err := vtgateconn.Dial(ctx, *server, *connectTimeout)
if err != nil {
return fmt.Errorf("error connecting to vtgate '%v': %v", *server, err)
}
defer vtgateConn.Close()
r, err := vtgateConn.SplitQuery(ctx, *keyspace, subFlags.Arg(0), *bindVariables, *splitColumn, int64(*splitCount))
if err != nil {
return fmt.Errorf("SplitQuery failed: %v", err)
}
return printJSON(wr, r)
}
func commandVtTabletExecute(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
transactionID := subFlags.Int("transaction_id", 0, "transaction id to use, if inside a transaction.")
bindVariables := newBindvars(subFlags)
keyspace := subFlags.String("keyspace", "", "keyspace the tablet belongs to")
shard := subFlags.String("shard", "", "shard the tablet belongs to")
tabletType := subFlags.String("tablet_type", "unknown", "tablet type we expect from the tablet (use unknown to use sessionId)")
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vttablet client")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 2 {
return fmt.Errorf("the <tablet_alias> and <sql> arguments are required for the VtTabletExecute command")
}
tt, err := topoproto.ParseTabletType(*tabletType)
if err != nil {
return err
}
tabletAlias, err := topoproto.ParseTabletAlias(subFlags.Arg(0))
if err != nil {
return err
}
tabletInfo, err := wr.TopoServer().GetTablet(ctx, tabletAlias)
if err != nil {
return err
}
ep, err := topo.TabletEndPoint(tabletInfo.Tablet)
if err != nil {
return fmt.Errorf("cannot get EndPoint from tablet record: %v", err)
}
conn, err := tabletconn.GetDialer()(ctx, ep, *keyspace, *shard, tt, *connectTimeout)
if err != nil {
return fmt.Errorf("cannot connect to tablet %v: %v", tabletAlias, err)
}
defer conn.Close()
qr, err := conn.Execute(ctx, subFlags.Arg(1), *bindVariables, int64(*transactionID))
if err != nil {
return fmt.Errorf("Execute failed: %v", err)
}
return printJSON(wr, qr)
}
func commandVtTabletBegin(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
keyspace := subFlags.String("keyspace", "", "keyspace the tablet belongs to")
shard := subFlags.String("shard", "", "shard the tablet belongs to")
tabletType := subFlags.String("tablet_type", "unknown", "tablet type we expect from the tablet (use unknown to use sessionId)")
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vttablet client")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 1 {
return fmt.Errorf("the <tablet_alias> argument is required for the VtTabletBegin command")
}
tt, err := topoproto.ParseTabletType(*tabletType)
if err != nil {
return err
}
tabletAlias, err := topoproto.ParseTabletAlias(subFlags.Arg(0))
if err != nil {
return err
}
tabletInfo, err := wr.TopoServer().GetTablet(ctx, tabletAlias)
if err != nil {
return err
}
ep, err := topo.TabletEndPoint(tabletInfo.Tablet)
if err != nil {
return fmt.Errorf("cannot get EndPoint from tablet record: %v", err)
}
conn, err := tabletconn.GetDialer()(ctx, ep, *keyspace, *shard, tt, *connectTimeout)
if err != nil {
return fmt.Errorf("cannot connect to tablet %v: %v", tabletAlias, err)
}
defer conn.Close()
transactionID, err := conn.Begin(ctx)
if err != nil {
return fmt.Errorf("Begin failed: %v", err)
}
result := map[string]int64{
"transaction_id": transactionID,
}
return printJSON(wr, result)
}
func commandVtTabletCommit(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
keyspace := subFlags.String("keyspace", "", "keyspace the tablet belongs to")
shard := subFlags.String("shard", "", "shard the tablet belongs to")
tabletType := subFlags.String("tablet_type", "unknown", "tablet type we expect from the tablet (use unknown to use sessionId)")
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vttablet client")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 2 {
return fmt.Errorf("the <tablet_alias> and <transaction_id> arguments are required for the VtTabletCommit command")
}
transactionID, err := strconv.ParseInt(subFlags.Arg(1), 10, 64)
if err != nil {
return err
}
tt, err := topoproto.ParseTabletType(*tabletType)
if err != nil {
return err
}
tabletAlias, err := topoproto.ParseTabletAlias(subFlags.Arg(0))
if err != nil {
return err
}
tabletInfo, err := wr.TopoServer().GetTablet(ctx, tabletAlias)
if err != nil {
return err
}
ep, err := topo.TabletEndPoint(tabletInfo.Tablet)
if err != nil {
return fmt.Errorf("cannot get EndPoint from tablet record: %v", err)
}
conn, err := tabletconn.GetDialer()(ctx, ep, *keyspace, *shard, tt, *connectTimeout)
if err != nil {
return fmt.Errorf("cannot connect to tablet %v: %v", tabletAlias, err)
}
defer conn.Close()
return conn.Commit(ctx, transactionID)
}
func commandVtTabletRollback(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
keyspace := subFlags.String("keyspace", "", "keyspace the tablet belongs to")
shard := subFlags.String("shard", "", "shard the tablet belongs to")
tabletType := subFlags.String("tablet_type", "unknown", "tablet type we expect from the tablet (use unknown to use sessionId)")
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vttablet client")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 2 {
return fmt.Errorf("the <tablet_alias> and <transaction_id> arguments are required for the VtTabletRollback command")
}
transactionID, err := strconv.ParseInt(subFlags.Arg(1), 10, 64)
if err != nil {
return err
}
tt, err := topoproto.ParseTabletType(*tabletType)
if err != nil {
return err
}
tabletAlias, err := topoproto.ParseTabletAlias(subFlags.Arg(0))
if err != nil {
return err
}
tabletInfo, err := wr.TopoServer().GetTablet(ctx, tabletAlias)
if err != nil {
return err
}
ep, err := topo.TabletEndPoint(tabletInfo.Tablet)
if err != nil {
return fmt.Errorf("cannot get EndPoint from tablet record: %v", err)
}
conn, err := tabletconn.GetDialer()(ctx, ep, *keyspace, *shard, tt, *connectTimeout)
if err != nil {
return fmt.Errorf("cannot connect to tablet %v: %v", tabletAlias, err)
}
defer conn.Close()
return conn.Rollback(ctx, transactionID)
}
func commandVtTabletStreamHealth(ctx context.Context, wr *wrangler.Wrangler, subFlags *flag.FlagSet, args []string) error {
count := subFlags.Int("count", 1, "number of responses to wait for")
connectTimeout := subFlags.Duration("connect_timeout", 30*time.Second, "Connection timeout for vttablet client")
if err := subFlags.Parse(args); err != nil {
return err
}
if subFlags.NArg() != 1 {
return fmt.Errorf("The <tablet alias> argument is required for the VtTabletStreamHealth command.")
}
tabletAlias, err := topoproto.ParseTabletAlias(subFlags.Arg(0))
if err != nil {
return err
}
tabletInfo, err := wr.TopoServer().GetTablet(ctx, tabletAlias)
if err != nil {
return err
}
ep, err := topo.TabletEndPoint(tabletInfo.Tablet)<|fim▁hole|> // pass in a non-UNKNOWN tablet type to not use sessionId
conn, err := tabletconn.GetDialer()(ctx, ep, "", "", topodatapb.TabletType_MASTER, *connectTimeout)
if err != nil {
return fmt.Errorf("cannot connect to tablet %v: %v", tabletAlias, err)
}
stream, errFunc, err := conn.StreamHealth(ctx)
if err != nil {
return err
}
for i := 0; i < *count; i++ {
shr, ok := <-stream
if !ok {
return fmt.Errorf("stream ended early: %v", errFunc())
}
data, err := json.Marshal(shr)
if err != nil {
wr.Logger().Errorf("cannot json-marshal structure: %v", err)
} else {
wr.Logger().Printf("%v\n", string(data))
}
}
return nil
}<|fim▁end|> | if err != nil {
return fmt.Errorf("cannot get EndPoint from tablet record: %v", err)
}
|
<|file_name|>test_admin_global_index.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the main global admin page."""
import copy
import config
import view_tests_base
class AdminGlobalIndexViewTests(view_tests_base.ViewTestsBase):
"""Tests the global admin index view."""
_PRIOR_CONFIG = {
'sms_number_to_repo': '{"+15551234567": "haiti"}',
'repo_aliases': '{"h": "haiti"}',
'brand': 'none',
'privacy_policy_url': 'www.example.com/privacy',
'tos_url': 'www.example.com/tos',
'feedback_url': 'www.example.com/feedback',
'captcha_site_key': 'captcha-key',
'captcha_secret_key': 'captcha-secret-key',
'analytics_id': 'analytics-id',
'amp_gtm_id': 'amp-gtm-id',
'maps_api_key': 'maps-api-key',
'translate_api_key': 'translate-api-key',
'notification_email': 'notifications@example.com',
'unreviewed_notes_threshold': 12,
}
_BASE_POST_PARAMS = {
'sms_number_to_repo': '{"+15551234567": "haiti"}',
'repo_aliases': '{"h": "haiti"}',
'brand': 'none',
'privacy_policy_url': 'www.example.com/privacy',
'tos_url': 'www.example.com/tos',
'feedback_url': 'www.example.com/feedback',
'captcha_site_key': 'captcha-key',
'captcha_secret_key': 'captcha-secret-key',
'analytics_id': 'analytics-id',
'amp_gtm_id': 'amp-gtm-id',
'maps_api_key': 'maps-api-key',
'translate_api_key': 'translate-api-key',
'notification_email': 'notifications@example.com',
'unreviewed_notes_threshold': '12',
}
def setUp(self):
super(AdminGlobalIndexViewTests, self).setUp()
self.data_generator.repo()
config.set_for_repo('*', **AdminGlobalIndexViewTests._PRIOR_CONFIG)
self.login_as_superadmin()
def test_get(self):
"""Tests GET requests."""
resp = self.client.get('/global/admin/', secure=True)
self.assertEqual(
resp.context.get('sms_config'), {
'sms_number_to_repo': '"{\\"+15551234567\\": \\"haiti\\"}"',
})
self.assertEqual(
resp.context.get('repo_alias_config'), {
'repo_aliases': '"{\\"h\\": \\"haiti\\"}"',
})
self.assertEqual(
resp.context.get('site_info_config'), {
'brand': 'none',
'privacy_policy_url': 'www.example.com/privacy',
'tos_url': 'www.example.com/tos',
'feedback_url': 'www.example.com/feedback',
})
self.assertEqual(
resp.context.get('recaptcha_config'), {
'captcha_site_key': 'captcha-key',
'captcha_secret_key': 'captcha-secret-key',
})
self.assertEqual(
resp.context.get('ganalytics_config'), {
'analytics_id': 'analytics-id',
'amp_gtm_id': 'amp-gtm-id',
})
self.assertEqual(
resp.context.get('gmaps_config'), {
'maps_api_key': 'maps-api-key',
})
self.assertEqual(
resp.context.get('gtranslate_config'), {
'translate_api_key': 'translate-api-key',
})
self.assertEqual(
resp.context.get('notification_config'), {
'notification_email': 'notifications@example.com',
'unreviewed_notes_threshold': '12',
})<|fim▁hole|> conf = config.Configuration('*')
self.assertEqual(conf.sms_number_to_repo, {'+1800pfhaiti': 'haiti'})
def test_edit_repo_alias_config(self):
self._post_with_params(repo_aliases='{"e": "ecuador"}')
conf = config.Configuration('*')
self.assertEqual(conf.repo_aliases, {'e': 'ecuador'})
def test_edit_site_info_config(self):
self._post_with_params(
brand='google',
privacy_policy_url='othersite.org/privacy',
tos_url='othersite.org/tos',
feedback_url='othersite.org/feedback')
conf = config.Configuration('*')
self.assertEqual(conf.brand, 'google')
self.assertEqual(conf.privacy_policy_url, 'othersite.org/privacy')
self.assertEqual(conf.tos_url, 'othersite.org/tos')
self.assertEqual(conf.feedback_url, 'othersite.org/feedback')
def test_edit_recaptcha_config(self):
self._post_with_params(
captcha_site_key='NEW-captcha-key',
captcha_secret_key='NEW-captcha-secret-key')
conf = config.Configuration('*')
self.assertEqual(conf.captcha_site_key, 'NEW-captcha-key')
self.assertEqual(conf.captcha_secret_key, 'NEW-captcha-secret-key')
def test_edit_ganalytics_config(self):
self._post_with_params(
analytics_id='NEW-analytics-id',
amp_gtm_id='NEW-amp-gtm-id')
conf = config.Configuration('*')
self.assertEqual(conf.analytics_id, 'NEW-analytics-id')
self.assertEqual(conf.amp_gtm_id, 'NEW-amp-gtm-id')
def test_edit_gmaps_config(self):
self._post_with_params(maps_api_key='NEW-maps-api-key')
conf = config.Configuration('*')
self.assertEqual(conf.maps_api_key, 'NEW-maps-api-key')
def test_edit_gtranslate_config(self):
self._post_with_params(translate_api_key='NEW-translate-api-key')
conf = config.Configuration('*')
self.assertEqual(conf.translate_api_key, 'NEW-translate-api-key')
def test_edit_notification_config(self):
self._post_with_params(
notification_email='notifications@othersite.org',
unreviewed_notes_threshold='86')
conf = config.Configuration('*')
self.assertEqual(conf.notification_email, 'notifications@othersite.org')
self.assertEqual(conf.unreviewed_notes_threshold, 86)
def _post_with_params(self, **kwargs):
get_doc = self.to_doc(self.client.get('/global/admin', secure=True))
xsrf_token = get_doc.cssselect_one('input[name="xsrf_token"]').get(
'value')
post_params = copy.deepcopy(AdminGlobalIndexViewTests._BASE_POST_PARAMS)
post_params['xsrf_token'] = xsrf_token
post_params.update(kwargs)
return self.client.post('/global/admin/', post_params, secure=True)<|fim▁end|> |
def test_edit_sms_config(self):
self._post_with_params(sms_number_to_repo='{"+1800pfhaiti": "haiti"}') |
<|file_name|>gen.go<|end_file_name|><|fim▁begin|>// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build ignore
//go:generate go run gen.go
// This program generates internet protocol constants and tables by
// reading IANA protocol registries.
package main
import (
"bytes"
"encoding/xml"
"fmt"
"go/format"
"io"
"io/ioutil"
"net/http"
"os"
"strconv"
"strings"
)
var registries = []struct {
url string
parse func(io.Writer, io.Reader) error
}{
{
"http://www.iana.org/assignments/dscp-registry/dscp-registry.xml",
parseDSCPRegistry,
},
{
"http://www.iana.org/assignments/ipv4-tos-byte/ipv4-tos-byte.xml",
parseTOSTCByte,
},
{
"http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xml",
parseProtocolNumbers,
},
}
func main() {
var bb bytes.Buffer
fmt.Fprintf(&bb, "// go generate gen.go\n")
fmt.Fprintf(&bb, "// GENERATED BY THE COMMAND ABOVE; DO NOT EDIT\n\n")
fmt.Fprintf(&bb, "// Package iana provides protocol number resources managed by the Internet Assigned Numbers Authority (IANA).\n")
fmt.Fprintf(&bb, `package iana // import "golang.org/x/net/internal/iana"` + "\n\n")
for _, r := range registries {
resp, err := http.Get(r.url)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
fmt.Fprintf(os.Stderr, "got HTTP status code %v for %v\n", resp.StatusCode, r.url)
os.Exit(1)<|fim▁hole|> os.Exit(1)
}
fmt.Fprintf(&bb, "\n")
}
b, err := format.Source(bb.Bytes())
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
if err := ioutil.WriteFile("const.go", b, 0644); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func parseDSCPRegistry(w io.Writer, r io.Reader) error {
dec := xml.NewDecoder(r)
var dr dscpRegistry
if err := dec.Decode(&dr); err != nil {
return err
}
drs := dr.escape()
fmt.Fprintf(w, "// %s, Updated: %s\n", dr.Title, dr.Updated)
fmt.Fprintf(w, "const (\n")
for _, dr := range drs {
fmt.Fprintf(w, "DiffServ%s = %#x", dr.Name, dr.Value)
fmt.Fprintf(w, "// %s\n", dr.OrigName)
}
fmt.Fprintf(w, ")\n")
return nil
}
type dscpRegistry struct {
XMLName xml.Name `xml:"registry"`
Title string `xml:"title"`
Updated string `xml:"updated"`
Note string `xml:"note"`
RegTitle string `xml:"registry>title"`
PoolRecords []struct {
Name string `xml:"name"`
Space string `xml:"space"`
} `xml:"registry>record"`
Records []struct {
Name string `xml:"name"`
Space string `xml:"space"`
} `xml:"registry>registry>record"`
}
type canonDSCPRecord struct {
OrigName string
Name string
Value int
}
func (drr *dscpRegistry) escape() []canonDSCPRecord {
drs := make([]canonDSCPRecord, len(drr.Records))
sr := strings.NewReplacer(
"+", "",
"-", "",
"/", "",
".", "",
" ", "",
)
for i, dr := range drr.Records {
s := strings.TrimSpace(dr.Name)
drs[i].OrigName = s
drs[i].Name = sr.Replace(s)
n, err := strconv.ParseUint(dr.Space, 2, 8)
if err != nil {
continue
}
drs[i].Value = int(n) << 2
}
return drs
}
func parseTOSTCByte(w io.Writer, r io.Reader) error {
dec := xml.NewDecoder(r)
var ttb tosTCByte
if err := dec.Decode(&ttb); err != nil {
return err
}
trs := ttb.escape()
fmt.Fprintf(w, "// %s, Updated: %s\n", ttb.Title, ttb.Updated)
fmt.Fprintf(w, "const (\n")
for _, tr := range trs {
fmt.Fprintf(w, "%s = %#x", tr.Keyword, tr.Value)
fmt.Fprintf(w, "// %s\n", tr.OrigKeyword)
}
fmt.Fprintf(w, ")\n")
return nil
}
type tosTCByte struct {
XMLName xml.Name `xml:"registry"`
Title string `xml:"title"`
Updated string `xml:"updated"`
Note string `xml:"note"`
RegTitle string `xml:"registry>title"`
Records []struct {
Binary string `xml:"binary"`
Keyword string `xml:"keyword"`
} `xml:"registry>record"`
}
type canonTOSTCByteRecord struct {
OrigKeyword string
Keyword string
Value int
}
func (ttb *tosTCByte) escape() []canonTOSTCByteRecord {
trs := make([]canonTOSTCByteRecord, len(ttb.Records))
sr := strings.NewReplacer(
"Capable", "",
"(", "",
")", "",
"+", "",
"-", "",
"/", "",
".", "",
" ", "",
)
for i, tr := range ttb.Records {
s := strings.TrimSpace(tr.Keyword)
trs[i].OrigKeyword = s
ss := strings.Split(s, " ")
if len(ss) > 1 {
trs[i].Keyword = strings.Join(ss[1:], " ")
} else {
trs[i].Keyword = ss[0]
}
trs[i].Keyword = sr.Replace(trs[i].Keyword)
n, err := strconv.ParseUint(tr.Binary, 2, 8)
if err != nil {
continue
}
trs[i].Value = int(n)
}
return trs
}
func parseProtocolNumbers(w io.Writer, r io.Reader) error {
dec := xml.NewDecoder(r)
var pn protocolNumbers
if err := dec.Decode(&pn); err != nil {
return err
}
prs := pn.escape()
prs = append([]canonProtocolRecord{{
Name: "IP",
Descr: "IPv4 encapsulation, pseudo protocol number",
Value: 0,
}}, prs...)
fmt.Fprintf(w, "// %s, Updated: %s\n", pn.Title, pn.Updated)
fmt.Fprintf(w, "const (\n")
for _, pr := range prs {
if pr.Name == "" {
continue
}
fmt.Fprintf(w, "Protocol%s = %d", pr.Name, pr.Value)
s := pr.Descr
if s == "" {
s = pr.OrigName
}
fmt.Fprintf(w, "// %s\n", s)
}
fmt.Fprintf(w, ")\n")
return nil
}
type protocolNumbers struct {
XMLName xml.Name `xml:"registry"`
Title string `xml:"title"`
Updated string `xml:"updated"`
RegTitle string `xml:"registry>title"`
Note string `xml:"registry>note"`
Records []struct {
Value string `xml:"value"`
Name string `xml:"name"`
Descr string `xml:"description"`
} `xml:"registry>record"`
}
type canonProtocolRecord struct {
OrigName string
Name string
Descr string
Value int
}
func (pn *protocolNumbers) escape() []canonProtocolRecord {
prs := make([]canonProtocolRecord, len(pn.Records))
sr := strings.NewReplacer(
"-in-", "in",
"-within-", "within",
"-over-", "over",
"+", "P",
"-", "",
"/", "",
".", "",
" ", "",
)
for i, pr := range pn.Records {
if strings.Contains(pr.Name, "Deprecated") ||
strings.Contains(pr.Name, "deprecated") {
continue
}
prs[i].OrigName = pr.Name
s := strings.TrimSpace(pr.Name)
switch pr.Name {
case "ISIS over IPv4":
prs[i].Name = "ISIS"
case "manet":
prs[i].Name = "MANET"
default:
prs[i].Name = sr.Replace(s)
}
ss := strings.Split(pr.Descr, "\n")
for i := range ss {
ss[i] = strings.TrimSpace(ss[i])
}
if len(ss) > 1 {
prs[i].Descr = strings.Join(ss, " ")
} else {
prs[i].Descr = ss[0]
}
prs[i].Value, _ = strconv.Atoi(pr.Value)
}
return prs
}<|fim▁end|> | }
if err := r.parse(&bb, resp.Body); err != nil {
fmt.Fprintln(os.Stderr, err) |
<|file_name|>test_RegexQuery.py<|end_file_name|><|fim▁begin|># ====================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ====================================================================
from unittest import TestCase, main
from lucene import *
from PyLuceneTestCase import PyLuceneTestCase
class TestRegexQuery(PyLuceneTestCase):
FN = "field"
def setUp(self):
PyLuceneTestCase.setUp(self)
writer = self.getWriter(analyzer=SimpleAnalyzer(self.TEST_VERSION))
doc = Document()
doc.add(Field(self.FN, "the quick brown fox jumps over the lazy dog", TextField.TYPE_NOT_STORED))
writer.addDocument(doc)
writer.commit()
writer.close()
self.searcher = self.getSearcher()
def tearDown(self):
del self.searcher
def newTerm(self, value):
return Term(self.FN, value)
def regexQueryNrHits(self, regex):
query = RegexQuery(self.newTerm(regex))
return self.searcher.search(query, 50).totalHits
def spanRegexQueryNrHits(self, regex1, regex2, slop, ordered):
srq1 = SpanMultiTermQueryWrapper(RegexQuery(self.newTerm(regex1)))
srq2 = SpanMultiTermQueryWrapper(RegexQuery(self.newTerm(regex2)))
query = SpanNearQuery([srq1, srq2], slop, ordered)
return self.searcher.search(query, 50).totalHits
def testRegex1(self):
self.assertEqual(1, self.regexQueryNrHits("^q.[aeiou]c.*$"))
def testRegex2(self):
self.assertEqual(0, self.regexQueryNrHits("^.[aeiou]c.*$"))
def testRegex3(self):
self.assertEqual(0, self.regexQueryNrHits("^q.[aeiou]c$"))
def testSpanRegex1(self):
self.assertEqual(1, self.spanRegexQueryNrHits("^q.[aeiou]c.*$",
"dog", 6, True))
def testSpanRegex2(self):
self.assertEqual(0, self.spanRegexQueryNrHits("^q.[aeiou]c.*$",
"dog", 5, True))
if __name__ == "__main__":
import sys, lucene
lucene.initVM()
if '-loop' in sys.argv:
sys.argv.remove('-loop')
while True:
try:
main()
except:
pass
else:
main()<|fim▁end|> | |
<|file_name|>modules.py<|end_file_name|><|fim▁begin|>__author__ = 'flaviocaetano'
from django.utils.translation import ugettext_lazy as _
from admin_tools.dashboard import modules
import psutil
class PelicanAdmin(modules.DashboardModule):
"""Dashboard module for Pelican service administration.
"""
title = 'Pelican Admin'
template = 'pelican_admin.html'
def __init__(self, *args, **kwargs):
super(PelicanAdmin, self).__init__(*args, **kwargs)
self.pelican_status = False
for p in psutil.process_iter():<|fim▁hole|> break
except psutil.AccessDenied, e:
pass
def is_empty(self):
return False<|fim▁end|> | try:
if "pelican" in str(p.cmdline).lower():
self.pelican_status = True |
<|file_name|>SHAKE256.py<|end_file_name|><|fim▁begin|># ===================================================================
#
# Copyright (c) 2015, Legrandin <helderijs@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
from Cryptodome.Util.py3compat import bord
from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib,
VoidPointer, SmartPointer,
create_string_buffer,
get_raw_buffer, c_size_t,
c_uint8_ptr)
from Cryptodome.Hash.keccak import _raw_keccak_lib
class SHAKE256_XOF(object):
"""A SHAKE256 hash object.
Do not instantiate directly.
Use the :func:`new` function.
:ivar oid: ASN.1 Object ID
:vartype oid: string
"""
# ASN.1 Object ID
oid = "2.16.840.1.101.3.4.2.12"
def __init__(self, data=None):
state = VoidPointer()
result = _raw_keccak_lib.keccak_init(state.address_of(),
c_size_t(64),
0x1F)<|fim▁hole|> self._state = SmartPointer(state.get(),
_raw_keccak_lib.keccak_destroy)
self._is_squeezing = False
if data:
self.update(data)
def update(self, data):
"""Continue hashing of a message by consuming the next chunk of data.
Args:
data (byte string/byte array/memoryview): The next chunk of the message being hashed.
"""
if self._is_squeezing:
raise TypeError("You cannot call 'update' after the first 'read'")
result = _raw_keccak_lib.keccak_absorb(self._state.get(),
c_uint8_ptr(data),
c_size_t(len(data)))
if result:
raise ValueError("Error %d while updating SHAKE256 state"
% result)
return self
def read(self, length):
"""
Compute the next piece of XOF output.
.. note::
You cannot use :meth:`update` anymore after the first call to
:meth:`read`.
Args:
length (integer): the amount of bytes this method must return
:return: the next piece of XOF output (of the given length)
:rtype: byte string
"""
self._is_squeezing = True
bfr = create_string_buffer(length)
result = _raw_keccak_lib.keccak_squeeze(self._state.get(),
bfr,
c_size_t(length))
if result:
raise ValueError("Error %d while extracting from SHAKE256"
% result)
return get_raw_buffer(bfr)
def new(self, data=None):
return type(self)(data=data)
def new(data=None):
"""Return a fresh instance of a SHAKE256 object.
Args:
data (byte string/byte array/memoryview):
The very first chunk of the message to hash.
It is equivalent to an early call to :meth:`update`.
Optional.
:Return: A :class:`SHAKE256_XOF` object
"""
return SHAKE256_XOF(data=data)<|fim▁end|> | if result:
raise ValueError("Error %d while instantiating SHAKE256"
% result) |
<|file_name|>find_cmake.py<|end_file_name|><|fim▁begin|># -*- python -*-
# stdlib imports ---
import os
import os.path as osp
import textwrap
# waf imports ---
import waflib.Utils
import waflib.Logs as msg
from waflib.Configure import conf
#
_heptooldir = osp.dirname(osp.abspath(__file__))
def options(opt):<|fim▁hole|> opt.add_option(
'--with-cmake',
default=None,
help="Look for CMake at the given path")
return
def configure(conf):
conf.load('hwaf-base', tooldir=_heptooldir)
return
@conf
def find_cmake(ctx, **kwargs):
if not ctx.env.HWAF_FOUND_C_COMPILER:
ctx.fatal('load a C compiler first')
pass
if not ctx.env.HWAF_FOUND_CXX_COMPILER:
ctx.fatal('load a C++ compiler first')
pass
path_list = waflib.Utils.to_list(kwargs.get('path_list', []))
if getattr(ctx.options, 'with_cmake', None):
topdir = ctx.options.with_cmake
topdir = ctx.hwaf_subst_vars(topdir)
path_list.append(osp.join(topdir, "bin"))
pass
kwargs['path_list'] = path_list
ctx.find_program(
"cmake",
var="CMAKE",
**kwargs)
kwargs['mandatory'] = False
ctx.find_program(
"ccmake",
var="CCMAKE",
**kwargs)
ctx.find_program(
"cpack",
var="CPACK",
**kwargs)
ctx.find_program(
"ctest",
var="CTEST",
**kwargs)
version="N/A"
cmd = [ctx.env.CMAKE, "--version"]
lines=ctx.cmd_and_log(cmd).splitlines()
for l in lines:
l = l.lower()
if "version" in l:
version=l[l.find("version")+len("version"):].strip()
break
pass
ctx.start_msg("CMake version")
ctx.end_msg(version)
ctx.hwaf_declare_runtime_env('CMAKE')
ctx.env.CMAKE_HOME = osp.dirname(osp.dirname(ctx.env.CMAKE))
ctx.env.CMAKE_VERSION = version
ctx.env.HWAF_FOUND_CMAKE = 1
return
## EOF ##<|fim▁end|> |
opt.load('hwaf-base', tooldir=_heptooldir)
|
<|file_name|>chartable.rs<|end_file_name|><|fim▁begin|>//! char table related functions
use remacs_macros::lisp_fn;
<|fim▁hole|> hashtable::LispHashTableRef,
lisp::{ExternalPtr, LispObject, LispStructuralEqual},
remacs_sys::{
char_table_specials, equal_kind, pvec_type, EmacsInt, Lisp_Char_Table, Lisp_Sub_Char_Table,
Lisp_Type, More_Lisp_Bits, CHARTAB_SIZE_BITS,
},
remacs_sys::{uniprop_table_uncompress, CHAR_TABLE_SET},
remacs_sys::{Qchar_code_property_table, Qchar_table_p},
vectors::LispVectorlikeRef,
};
pub type LispCharTableRef = ExternalPtr<Lisp_Char_Table>;
pub type LispSubCharTableRef = ExternalPtr<Lisp_Sub_Char_Table>;
#[repr(transparent)]
pub struct LispSubCharTableAsciiRef(ExternalPtr<Lisp_Sub_Char_Table>);
impl LispObject {
pub fn is_char_table(self) -> bool {
self.as_vectorlike()
.map_or(false, |v| v.is_pseudovector(pvec_type::PVEC_CHAR_TABLE))
}
pub fn as_char_table(self) -> Option<LispCharTableRef> {
self.into()
}
pub fn force_char_table(self) -> LispCharTableRef {
unsafe { self.to_char_table_unchecked() }
}
pub unsafe fn to_char_table_unchecked(self) -> LispCharTableRef {
LispCharTableRef::new(self.get_untaggedptr() as *mut Lisp_Char_Table)
}
}
impl From<LispObject> for LispCharTableRef {
fn from(o: LispObject) -> Self {
if let Some(chartable) = o.as_char_table() {
chartable
} else {
wrong_type!(Qchar_table_p, o)
}
}
}
impl From<LispObject> for Option<LispCharTableRef> {
fn from(o: LispObject) -> Self {
o.as_vectorlike().and_then(LispVectorlikeRef::as_char_table)
}
}
impl From<LispCharTableRef> for LispObject {
fn from(ct: LispCharTableRef) -> Self {
Self::tag_ptr(ct, Lisp_Type::Lisp_Vectorlike)
}
}
impl LispObject {
pub fn as_sub_char_table(self) -> Option<LispSubCharTableRef> {
self.as_vectorlike()
.and_then(LispVectorlikeRef::as_sub_char_table)
}
pub fn as_sub_char_table_ascii(self) -> Option<LispSubCharTableAsciiRef> {
self.as_vectorlike()
.and_then(LispVectorlikeRef::as_sub_char_table_ascii)
}
}
fn chartab_size(depth: i32) -> usize {
match depth {
0 => 1 << CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_0 as isize,
1 => 1 << CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_1 as isize,
2 => 1 << CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_2 as isize,
3 => 1 << CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_3 as isize,
_ => panic!("Invalid depth for chartab"),
}
}
fn chartab_idx(c: isize, depth: i32, min_char: i32) -> usize {
// Number of characters (in bits) each element of Nth level char-table covers.
let bits = match depth {
0 => {
CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_1
+ CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_2
+ CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_3
}
1 => CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_2 + CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_3,
2 => CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_3,
3 => 0,
_ => {
error!("Invalid char table depth");
}
};
((c - min_char as isize) >> bits) as usize
}
/// Nonzero iff OBJ is a string representing uniprop values of 128
/// succeeding characters (the bottom level of a char-table) by a
/// compressed format. We are sure that no property value has a string
/// starting with '\001' nor '\002'.
fn uniprop_compressed_form_p(obj: LispObject) -> bool {
match obj.as_string() {
Some(s) => !s.is_empty() && (s.byte_at(0) == 1 || s.byte_at(0) == 2),
None => false,
}
}
impl LispCharTableRef {
pub fn is_uniprop(self) -> bool {
self.purpose == Qchar_code_property_table && self.extra_slots() == 5
}
pub fn extra_slots(self) -> isize {
(unsafe { self.header.size } & More_Lisp_Bits::PSEUDOVECTOR_SIZE_MASK as isize)
- (1 << CHARTAB_SIZE_BITS::CHARTAB_SIZE_BITS_0 as isize)
}
pub fn get(self, c: isize) -> LispObject {
let mut val = if is_ascii(c) {
let tmp = self.ascii;
if let Some(sub) = tmp.as_sub_char_table_ascii() {
sub.get(c)
} else {
tmp
}
} else {
let tmp = self
.contents
.get(chartab_idx(c, 0, 0) as usize)
.map_or_else(|| error!("Index out of range"), |tmp| *tmp);
if let Some(sub) = tmp.as_sub_char_table() {
sub.get(c, self.is_uniprop())
} else {
tmp
}
};
if val.is_nil() {
val = self.defalt; // sic
if val.is_nil() {
if let Some(parent) = self.parent.as_char_table() {
val = parent.get(c);
}
}
}
val
}
pub fn set(self, idx: isize, value: LispObject) {
verify_lisp_type!(idx as EmacsInt, Qcharacterp);
self.set_unchecked(idx, value);
}
pub fn set_unchecked(self, idx: isize, value: LispObject) {
unsafe { CHAR_TABLE_SET(self.into(), idx as i32, value) };
}
}
impl LispStructuralEqual for LispCharTableRef {
fn equal(
&self,
other: Self,
kind: equal_kind::Type,
depth: i32,
ht: &mut LispHashTableRef,
) -> bool {
let mut size1 = (unsafe { self.header.size }
& More_Lisp_Bits::PSEUDOVECTOR_SIZE_MASK as isize) as usize;
let size2 = (unsafe { other.header.size } & More_Lisp_Bits::PSEUDOVECTOR_SIZE_MASK as isize)
as usize;
if size1 != size2 {
return false;
}
let extras = if size1 > char_table_specials::CHAR_TABLE_STANDARD_SLOTS as usize {
let tmp = size1 - char_table_specials::CHAR_TABLE_STANDARD_SLOTS as usize;
size1 = char_table_specials::CHAR_TABLE_STANDARD_SLOTS as usize;
tmp
} else {
0
};
// char table is 4 LispObjects + an array
size1 -= 4;
if !self
.defalt
.equal_internal(other.defalt, kind, depth + 1, ht)
{
return false;
}
if !self
.parent
.equal_internal(other.parent, kind, depth + 1, ht)
{
return false;
}
if !self
.purpose
.equal_internal(other.purpose, kind, depth + 1, ht)
{
return false;
}
if !self.ascii.equal_internal(other.ascii, kind, depth + 1, ht) {
return false;
}
let all_equal = (0..size1).all(|i| {
let v1 = self.contents[i];
let v2 = other.contents[i];
v1.equal_internal(v2, kind, depth + 1, ht)
});
if !all_equal {
return false;
}
if extras == 0 {
true
} else {
let self_extras = unsafe { self.extras.as_slice(extras) };
let other_extras = unsafe { other.extras.as_slice(extras) };
(0..extras).all(|i| {
let v1 = self_extras[i];
let v2 = other_extras[i];
v1.equal_internal(v2, kind, depth + 1, ht)
})
}
}
}
impl LispSubCharTableAsciiRef {
fn _get(self, idx: usize) -> LispObject {
self.0._get(idx)
}
pub fn get(self, c: isize) -> LispObject {
let d = self.0.depth;
let m = self.0.min_char;
self._get(chartab_idx(c, d, m))
}
}
impl LispStructuralEqual for LispSubCharTableAsciiRef {
fn equal(
&self,
other: Self,
kind: equal_kind::Type,
depth: i32,
ht: &mut LispHashTableRef,
) -> bool {
self.0.equal(other.0, kind, depth, ht)
}
}
impl From<LispSubCharTableAsciiRef> for LispObject {
fn from(s: LispSubCharTableAsciiRef) -> Self {
Self::tag_ptr(s.0, Lisp_Type::Lisp_Vectorlike)
}
}
impl From<LispSubCharTableRef> for LispObject {
fn from(s: LispSubCharTableRef) -> Self {
Self::tag_ptr(s, Lisp_Type::Lisp_Vectorlike)
}
}
impl LispSubCharTableRef {
fn _get(self, idx: usize) -> LispObject {
unsafe {
let d = self.depth;
self.contents.as_slice(chartab_size(d))[idx]
}
}
pub fn get(self, c: isize, is_uniprop: bool) -> LispObject {
let idx = chartab_idx(c, self.depth, self.min_char);
let mut val = self._get(idx);
if is_uniprop && uniprop_compressed_form_p(val) {
val = unsafe { uniprop_table_uncompress(self.into(), idx as libc::c_int) };
}
if let Some(sub) = val.as_sub_char_table() {
val = sub.get(c, is_uniprop)
}
val
}
}
impl LispStructuralEqual for LispSubCharTableRef {
fn equal(
&self,
other: Self,
kind: equal_kind::Type,
depth: i32,
ht: &mut LispHashTableRef,
) -> bool {
let mut size1 =
unsafe { self.header.size as usize & More_Lisp_Bits::PSEUDOVECTOR_SIZE_MASK as usize };
let size2 =
unsafe { other.header.size as usize & More_Lisp_Bits::PSEUDOVECTOR_SIZE_MASK as usize };
if size1 != size2 {
return false;
}
size1 -= 2; // account for depth and min_char
if self.depth != other.depth {
return false;
}
if self.min_char != other.min_char {
return false;
}
let slice1 = unsafe { self.contents.as_slice(size1) };
let slice2 = unsafe { other.contents.as_slice(size1) };
(0..size1).all(|i| {
let v1 = slice1[i];
let v2 = slice2[i];
v1.equal_internal(v2, kind, depth + 1, ht)
})
}
}
const fn is_ascii(c: isize) -> bool {
c < 128
}
/// Return the subtype of char-table CHARTABLE. The value is a symbol.
#[lisp_fn]
pub fn char_table_subtype(chartable: LispCharTableRef) -> LispObject {
chartable.purpose
}
/// Return the parent char-table of CHARTABLE.
/// The value is either nil or another char-table.
/// If CHAR-TABLE holds nil for a given character,
/// then the actual applicable value is inherited from the parent char-table
/// (or from its parents, if necessary).
#[lisp_fn]
pub fn char_table_parent(chartable: LispCharTableRef) -> Option<LispCharTableRef> {
chartable.parent.as_char_table()
}
/// Set the parent char-table of CHARTABLE to PARENT.
/// Return PARENT. PARENT must be either nil or another char-table.
#[lisp_fn]
pub fn set_char_table_parent(mut chartable: LispCharTableRef, parent: Option<LispCharTableRef>) {
let mut temp = parent;
while temp.is_some() {
if let Some(p) = temp {
if chartable.eq(&p) {
error!("Attempt to make a chartable to be its own parent");
}
temp = char_table_parent(p);
}
}
chartable.parent = parent.into();
//parent
}
include!(concat!(env!("OUT_DIR"), "/chartable_exports.rs"));<|fim▁end|> | use crate::{ |
<|file_name|>irc.go<|end_file_name|><|fim▁begin|>// Package irc provides a basic implementation of the IRC protocol.
package irc
import (
"bufio"
"fmt"
"io"
"log"
"net"
"regexp"
"strings"
"time"
)
// IRC represents an connection to a channel.
type IRC struct {
// The IRC server to connect to.
server string
// The server port to connect to.
port int
// The IRC channel to connect to.
Channel string
// The connection to the IRC server.
conn net.Conn
// The channel where to send PING messages.
ping chan string
// The channel where to send messages that should
// be sent back to the server.
out chan string<|fim▁hole|> subscriptions map[*regexp.Regexp]chan string
}
// New connects to the specified server:port and returns
// an IRC value for interacting with the server.
func NewIRC(server string, port int, channel string) IRC {
conn := connect(server, port)
irc := IRC{
server: server,
port: port,
Channel: channel,
conn: conn,
ping: make(chan string),
out: make(chan string),
subscriptions: make(map[*regexp.Regexp]chan string),
}
go irc.handleRead()
go irc.handlePing()
go irc.handleWrite()
return irc
}
// Close closes the underlying IRC connection.
func (irc IRC) Close() {
irc.conn.Close()
close(irc.ping)
close(irc.out)
for _, c := range irc.subscriptions {
close(c)
}
}
// SendMessages sends the given list of messages over the wire
// to the connected channel.
func (irc IRC) SendMessages(messages ...string) {
for _, msg := range messages {
irc.out <- fmt.Sprintf("PRIVMSG %s :%s", irc.Channel, msg)
}
}
// Join joins the configured channel with the given
// user credentials.
func (irc IRC) Join(user string, passwd string) {
irc.out <- fmt.Sprintf("NICK %s", user)
irc.out <- fmt.Sprintf("USER %s 0.0.0.0 0.0.0.0 :%s", user, user)
irc.out <- fmt.Sprintf("JOIN %s %s", irc.Channel, passwd)
}
// Subscribe configures a message subscription pattern that,
// when matched, causes the message to be sent to the specified
// channel.
func (irc IRC) Subscribe(pattern *regexp.Regexp, channel chan string) {
irc.subscriptions[pattern] = channel
}
// handleRead reads all messages sent to the IRC channel.
// If it's a "PING" message, forwards it to the ping channel;
// otherwise, looks for a subscription that matches the message
// and forwards it to the registered channel.
func (irc *IRC) handleRead() {
buf := bufio.NewReaderSize(irc.conn, 512)
for {
msg, err := buf.ReadString('\n')
if err != nil {
if recoverable(err) {
log.Printf("Error [%s] while reading message, reconnecting in 1s...\n", err)
<-time.After(1 * time.Second)
irc.conn = connect(irc.server, irc.port)
continue
} else {
log.Fatalf("Unrecoverable error while reading message: %v\n", err)
}
}
msg = msg[:len(msg)-2]
if strings.Contains(msg, "PING") {
irc.ping <- msg
} else {
for pattern, channel := range irc.subscriptions {
if pattern.Match([]byte(msg)) {
channel <- msg
}
}
}
}
}
// handleWrite reads messages from the out channel
// and sends them over the wire.
func (irc IRC) handleWrite() {
for msg := range irc.out {
irc.send(msg)
}
}
// handlePing reads messages from the ping channel
// and sends the "PONG" response to the server originating
// the "PING" request.
func (irc IRC) handlePing() {
for ping := range irc.ping {
server := strings.Split(ping, ":")[1]
irc.out <- fmt.Sprintf("PONG %s", server)
log.Printf("[IRC] PONG sent to %s\n", server)
}
}
// send is responsible for writing the bytes over the wire.
func (irc IRC) send(msg string) {
_, err := irc.conn.Write([]byte(fmt.Sprintf("%s\r\n", msg)))
if err != nil {
log.Fatal(err)
}
}
// connect dials to the configured server and returns
// the connection.
func connect(server string, port int) net.Conn {
conn, err := net.Dial("tcp", fmt.Sprintf("%s:%d", server, port))
if err != nil {
log.Fatal(err)
}
log.Printf("[IRC] Connected to %s (%s).\n", server, conn.RemoteAddr())
return conn
}
// recoverable checks if the given error is temporary and could
// be recovered from.
func recoverable(err error) bool {
if e, netError := err.(net.Error); netError && e.Temporary() {
return true
} else if err == io.EOF {
return true
}
return false
}<|fim▁end|> |
// A map where the key is a regexp pattern to be matched against,
// and the value is a channel where to send messages that match
// the specified pattern. |
<|file_name|>dominators.rs<|end_file_name|><|fim▁begin|>//! Compute dominators of a control-flow graph.
//!
//! # The Dominance Relation
//!
//! In a directed graph with a root node **R**, a node **A** is said to *dominate* a
//! node **B** iff every path from **R** to **B** contains **A**.
//!
//! The node **A** is said to *strictly dominate* the node **B** iff **A** dominates
//! **B** and **A ≠ B**.
//!
//! The node **A** is said to be the *immediate dominator* of a node **B** iff it
//! strictly dominates **B** and there does not exist any node **C** where **A**
//! dominates **C** and **C** dominates **B**.
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet, hash_map::Iter};
use std::hash::Hash;
use crate::visit::{DfsPostOrder, GraphBase, IntoNeighbors, Visitable, Walker};
/// The dominance relation for some graph and root.
#[derive(Debug, Clone)]
pub struct Dominators<N>
where
N: Copy + Eq + Hash,
{
root: N,
dominators: HashMap<N, N>,
}
impl<N> Dominators<N>
where
N: Copy + Eq + Hash,
{
/// Get the root node used to construct these dominance relations.
pub fn root(&self) -> N {
self.root
}
/// Get the immediate dominator of the given node.
///
/// Returns `None` for any node that is not reachable from the root, and for
/// the root itself.<|fim▁hole|> pub fn immediate_dominator(&self, node: N) -> Option<N> {
if node == self.root {
None
} else {
self.dominators.get(&node).cloned()
}
}
/// Iterate over the given node's strict dominators.
///
/// If the given node is not reachable from the root, then `None` is
/// returned.
pub fn strict_dominators(&self, node: N) -> Option<DominatorsIter<N>> {
if self.dominators.contains_key(&node) {
Some(DominatorsIter {
dominators: self,
node: self.immediate_dominator(node),
})
} else {
None
}
}
/// Iterate over all of the given node's dominators (including the given
/// node itself).
///
/// If the given node is not reachable from the root, then `None` is
/// returned.
pub fn dominators(&self, node: N) -> Option<DominatorsIter<N>> {
if self.dominators.contains_key(&node) {
Some(DominatorsIter {
dominators: self,
node: Some(node),
})
} else {
None
}
}
/// Iterate over all nodes immediately dominated by the given node (not
/// including the given node itself).
pub fn immediately_dominated_by(&self, node: N) -> DominatedByIter<N> {
DominatedByIter {
iter: self.dominators.iter(),
node: node
}
}
}
/// Iterator for a node's dominators.
pub struct DominatorsIter<'a, N>
where
N: 'a + Copy + Eq + Hash,
{
dominators: &'a Dominators<N>,
node: Option<N>,
}
impl<'a, N> Iterator for DominatorsIter<'a, N>
where
N: 'a + Copy + Eq + Hash,
{
type Item = N;
fn next(&mut self) -> Option<Self::Item> {
let next = self.node.take();
if let Some(next) = next {
self.node = self.dominators.immediate_dominator(next);
}
next
}
}
/// Iterator for nodes dominated by a given node.
pub struct DominatedByIter<'a, N>
where
N: 'a + Copy + Eq + Hash,
{
iter: Iter<'a, N, N>,
node: N,
}
impl<'a, N> Iterator for DominatedByIter<'a, N>
where
N: 'a + Copy + Eq + Hash,
{
type Item = N;
fn next(&mut self) -> Option<Self::Item> {
while let Some(next) = self.iter.next() {
if next.1 == &self.node {
return Some(*next.0);
}
}
None
}
}
/// The undefined dominator sentinel, for when we have not yet discovered a
/// node's dominator.
const UNDEFINED: usize = ::std::usize::MAX;
/// This is an implementation of the engineered ["Simple, Fast Dominance
/// Algorithm"][0] discovered by Cooper et al.
///
/// This algorithm is **O(|V|²)**, and therefore has slower theoretical running time
/// than the Lengauer-Tarjan algorithm (which is **O(|E| log |V|)**. However,
/// Cooper et al found it to be faster in practice on control flow graphs of up
/// to ~30,000 vertices.
///
/// [0]: http://www.cs.rice.edu/~keith/EMBED/dom.pdf
pub fn simple_fast<G>(graph: G, root: G::NodeId) -> Dominators<G::NodeId>
where
G: IntoNeighbors + Visitable,
<G as GraphBase>::NodeId: Eq + Hash,
{
let (post_order, predecessor_sets) = simple_fast_post_order(graph, root);
let length = post_order.len();
debug_assert!(length > 0);
debug_assert!(post_order.last() == Some(&root));
// From here on out we use indices into `post_order` instead of actual
// `NodeId`s wherever possible. This greatly improves the performance of
// this implementation, but we have to pay a little bit of upfront cost to
// convert our data structures to play along first.
// Maps a node to its index into `post_order`.
let node_to_post_order_idx: HashMap<_, _> = post_order
.iter()
.enumerate()
.map(|(idx, &node)| (node, idx))
.collect();
// Maps a node's `post_order` index to its set of predecessors's indices
// into `post_order` (as a vec).
let idx_to_predecessor_vec =
predecessor_sets_to_idx_vecs(&post_order, &node_to_post_order_idx, predecessor_sets);
let mut dominators = vec![UNDEFINED; length];
dominators[length - 1] = length - 1;
let mut changed = true;
while changed {
changed = false;
// Iterate in reverse post order, skipping the root.
for idx in (0..length - 1).rev() {
debug_assert!(post_order[idx] != root);
// Take the intersection of every predecessor's dominator set; that
// is the current best guess at the immediate dominator for this
// node.
let new_idom_idx = {
let mut predecessors = idx_to_predecessor_vec[idx]
.iter()
.filter(|&&p| dominators[p] != UNDEFINED);
let new_idom_idx = predecessors.next().expect(
"Because the root is initialized to dominate itself, and is the \
first node in every path, there must exist a predecessor to this \
node that also has a dominator",
);
predecessors.fold(*new_idom_idx, |new_idom_idx, &predecessor_idx| {
intersect(&dominators, new_idom_idx, predecessor_idx)
})
};
debug_assert!(new_idom_idx < length);
if new_idom_idx != dominators[idx] {
dominators[idx] = new_idom_idx;
changed = true;
}
}
}
// All done! Translate the indices back into proper `G::NodeId`s.
debug_assert!(!dominators.iter().any(|&dom| dom == UNDEFINED));
Dominators {
root,
dominators: dominators
.into_iter()
.enumerate()
.map(|(idx, dom_idx)| (post_order[idx], post_order[dom_idx]))
.collect(),
}
}
fn intersect(dominators: &[usize], mut finger1: usize, mut finger2: usize) -> usize {
loop {
match finger1.cmp(&finger2) {
Ordering::Less => finger1 = dominators[finger1],
Ordering::Greater => finger2 = dominators[finger2],
Ordering::Equal => return finger1,
}
}
}
fn predecessor_sets_to_idx_vecs<N>(
post_order: &[N],
node_to_post_order_idx: &HashMap<N, usize>,
mut predecessor_sets: HashMap<N, HashSet<N>>,
) -> Vec<Vec<usize>>
where
N: Copy + Eq + Hash,
{
post_order
.iter()
.map(|node| {
predecessor_sets
.remove(node)
.map(|predecessors| {
predecessors
.into_iter()
.map(|p| *node_to_post_order_idx.get(&p).unwrap())
.collect()
})
.unwrap_or_else(Vec::new)
})
.collect()
}
fn simple_fast_post_order<G>(
graph: G,
root: G::NodeId,
) -> (Vec<G::NodeId>, HashMap<G::NodeId, HashSet<G::NodeId>>)
where
G: IntoNeighbors + Visitable,
<G as GraphBase>::NodeId: Eq + Hash,
{
let mut post_order = vec![];
let mut predecessor_sets = HashMap::new();
for node in DfsPostOrder::new(graph, root).iter(graph) {
post_order.push(node);
for successor in graph.neighbors(node) {
predecessor_sets
.entry(successor)
.or_insert_with(HashSet::new)
.insert(node);
}
}
(post_order, predecessor_sets)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_iter_dominators() {
let doms: Dominators<u32> = Dominators {
root: 0,
dominators: [(2, 1), (1, 0), (0, 0)].iter().cloned().collect(),
};
let all_doms: Vec<_> = doms.dominators(2).unwrap().collect();
assert_eq!(vec![2, 1, 0], all_doms);
assert_eq!(None::<()>, doms.dominators(99).map(|_| unreachable!()));
let strict_doms: Vec<_> = doms.strict_dominators(2).unwrap().collect();
assert_eq!(vec![1, 0], strict_doms);
assert_eq!(
None::<()>,
doms.strict_dominators(99).map(|_| unreachable!())
);
let dom_by: Vec<_> = doms.immediately_dominated_by(1).collect();
assert_eq!(vec![2], dom_by);
assert_eq!(None, doms.immediately_dominated_by(99).next());
}
}<|fim▁end|> | |
<|file_name|>HashTableTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.operators.hash;
import org.apache.flink.api.common.typeutils.GenericPairComparator;
import org.apache.flink.api.common.typeutils.TypeComparator;
import org.apache.flink.api.common.typeutils.TypePairComparator;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.ByteValueSerializer;
import org.apache.flink.api.common.typeutils.base.LongComparator;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArrayComparator;
import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.runtime.TupleComparator;
import org.apache.flink.api.java.typeutils.runtime.TupleSerializer;
import org.apache.flink.api.java.typeutils.runtime.ValueComparator;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.core.memory.MemorySegmentFactory;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync;
import org.apache.flink.types.ByteValue;
import org.apache.flink.util.MutableObjectIterator;
import org.junit.Test;
import org.junit.Assert;
import org.mockito.Mockito;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
public class HashTableTest {
private final TypeSerializer<Tuple2<Long, byte[]>> buildSerializer;
private final TypeSerializer<Long> probeSerializer;
private final TypeComparator<Tuple2<Long, byte[]>> buildComparator;
private final TypeComparator<Long> probeComparator;
private final TypePairComparator<Long, Tuple2<Long, byte[]>> pairComparator;
public HashTableTest() {
TypeSerializer<?>[] fieldSerializers = { LongSerializer.INSTANCE, BytePrimitiveArraySerializer.INSTANCE };
@SuppressWarnings("unchecked")
Class<Tuple2<Long, byte[]>> clazz = (Class<Tuple2<Long, byte[]>>) (Class<?>) Tuple2.class;
this.buildSerializer = new TupleSerializer<Tuple2<Long, byte[]>>(clazz, fieldSerializers);
this.probeSerializer = LongSerializer.INSTANCE;
TypeComparator<?>[] comparators = { new LongComparator(true) };
TypeSerializer<?>[] comparatorSerializers = { LongSerializer.INSTANCE };
this.buildComparator = new TupleComparator<Tuple2<Long, byte[]>>(new int[] {0}, comparators, comparatorSerializers);
this.probeComparator = new LongComparator(true);
this.pairComparator = new TypePairComparator<Long, Tuple2<Long, byte[]>>() {
private long ref;
@Override
public void setReference(Long reference) {
ref = reference;
}
@Override
public boolean equalToReference(Tuple2<Long, byte[]> candidate) {
//noinspection UnnecessaryUnboxing
return candidate.f0.longValue() == ref;
}
@Override
public int compareToReference(Tuple2<Long, byte[]> candidate) {
long x = ref;
long y = candidate.f0;
return (x < y) ? -1 : ((x == y) ? 0 : 1);
}
};
}
// ------------------------------------------------------------------------
// Tests
// ------------------------------------------------------------------------
/**
* This tests a combination of values that lead to a corner case situation where memory
* was missing and the computation deadlocked.
*/
@Test
public void testBufferMissingForProbing() {
final IOManager ioMan = new IOManagerAsync();
try {
final int pageSize = 32*1024;
final int numSegments = 34;
final int numRecords = 3400;
final int recordLen = 270;
final byte[] payload = new byte[recordLen - 8 - 4];
List<MemorySegment> memory = getMemory(numSegments, pageSize);
MutableHashTable<Tuple2<Long, byte[]>, Long> table = new MutableHashTable<>(
buildSerializer, probeSerializer, buildComparator, probeComparator,
pairComparator, memory, ioMan, 16, false);
table.open(new TupleBytesIterator(payload, numRecords), new LongIterator(10000));
try {
while (table.nextRecord()) {
MutableObjectIterator<Tuple2<Long, byte[]>> matches = table.getBuildSideIterator();
while (matches.next() != null);
}
}
catch (RuntimeException e) {
if (!e.getMessage().contains("exceeded maximum number of recursions")) {
e.printStackTrace();
fail("Test failed with unexpected exception");
}
}
finally {
table.close();
}
checkNoTempFilesRemain(ioMan);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
finally {
ioMan.shutdown();
}
}
/**
* This tests the case where no additional partition buffers are used at the point when spilling
* is triggered, testing that overflow bucket buffers are taken into account when deciding which
* partition to spill.
*/
@Test
public void testSpillingFreesOnlyOverflowSegments() {
final IOManager ioMan = new IOManagerAsync();
final TypeSerializer<ByteValue> serializer = ByteValueSerializer.INSTANCE;
final TypeComparator<ByteValue> buildComparator = new ValueComparator<>(true, ByteValue.class);
final TypeComparator<ByteValue> probeComparator = new ValueComparator<>(true, ByteValue.class);
@SuppressWarnings("unchecked")
final TypePairComparator<ByteValue, ByteValue> pairComparator = Mockito.mock(TypePairComparator.class);
try {
final int pageSize = 32*1024;
final int numSegments = 34;
List<MemorySegment> memory = getMemory(numSegments, pageSize);
MutableHashTable<ByteValue, ByteValue> table = new MutableHashTable<>(
serializer, serializer, buildComparator, probeComparator,
pairComparator, memory, ioMan, 1, false);
table.open(new ByteValueIterator(100000000), new ByteValueIterator(1));
table.close();
checkNoTempFilesRemain(ioMan);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
finally {
ioMan.shutdown();
}<|fim▁hole|> /**
* Tests that the MutableHashTable spills its partitions when creating the initial table
* without overflow segments in the partitions. This means that the records are large.
*/
@Test
public void testSpillingWhenBuildingTableWithoutOverflow() throws Exception {
final IOManager ioMan = new IOManagerAsync();
final TypeSerializer<byte[]> serializer = BytePrimitiveArraySerializer.INSTANCE;
final TypeComparator<byte[]> buildComparator = new BytePrimitiveArrayComparator(true);
final TypeComparator<byte[]> probeComparator = new BytePrimitiveArrayComparator(true);
@SuppressWarnings("unchecked")
final TypePairComparator<byte[], byte[]> pairComparator = new GenericPairComparator<>(
new BytePrimitiveArrayComparator(true), new BytePrimitiveArrayComparator(true));
final int pageSize = 128;
final int numSegments = 33;
List<MemorySegment> memory = getMemory(numSegments, pageSize);
MutableHashTable<byte[], byte[]> table = new MutableHashTable<byte[], byte[]>(
serializer,
serializer,
buildComparator,
probeComparator,
pairComparator,
memory,
ioMan,
1,
false);
int numElements = 9;
table.open(
new CombiningIterator<byte[]>(
new ByteArrayIterator(numElements, 128,(byte) 0),
new ByteArrayIterator(numElements, 128,(byte) 1)),
new CombiningIterator<byte[]>(
new ByteArrayIterator(1, 128,(byte) 0),
new ByteArrayIterator(1, 128,(byte) 1)));
while(table.nextRecord()) {
MutableObjectIterator<byte[]> iterator = table.getBuildSideIterator();
int counter = 0;
while(iterator.next() != null) {
counter++;
}
// check that we retrieve all our elements
Assert.assertEquals(numElements, counter);
}
table.close();
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
private static List<MemorySegment> getMemory(int numSegments, int segmentSize) {
ArrayList<MemorySegment> list = new ArrayList<MemorySegment>(numSegments);
for (int i = 0; i < numSegments; i++) {
list.add(MemorySegmentFactory.allocateUnpooledSegment(segmentSize));
}
return list;
}
private static void checkNoTempFilesRemain(IOManager ioManager) {
for (File dir : ioManager.getSpillingDirectories()) {
for (String file : dir.list()) {
if (file != null && !(file.equals(".") || file.equals(".."))) {
fail("hash table did not clean up temp files. remaining file: " + file);
}
}
}
}
private static class TupleBytesIterator implements MutableObjectIterator<Tuple2<Long, byte[]>> {
private final byte[] payload;
private final int numRecords;
private int count = 0;
TupleBytesIterator(byte[] payload, int numRecords) {
this.payload = payload;
this.numRecords = numRecords;
}
@Override
public Tuple2<Long, byte[]> next(Tuple2<Long, byte[]> reuse) {
return next();
}
@Override
public Tuple2<Long, byte[]> next() {
if (count++ < numRecords) {
return new Tuple2<>(42L, payload);
} else {
return null;
}
}
}
private static class ByteArrayIterator implements MutableObjectIterator<byte[]> {
private final long numRecords;
private long counter = 0;
private final byte[] arrayValue;
ByteArrayIterator(long numRecords, int length, byte value) {
this.numRecords = numRecords;
arrayValue = new byte[length];
Arrays.fill(arrayValue, value);
}
@Override
public byte[] next(byte[] array) {
return next();
}
@Override
public byte[] next() {
if (counter++ < numRecords) {
return arrayValue;
} else {
return null;
}
}
}
private static class LongIterator implements MutableObjectIterator<Long> {
private final long numRecords;
private long value = 0;
LongIterator(long numRecords) {
this.numRecords = numRecords;
}
@Override
public Long next(Long aLong) {
return next();
}
@Override
public Long next() {
if (value < numRecords) {
return value++;
} else {
return null;
}
}
}
private static class ByteValueIterator implements MutableObjectIterator<ByteValue> {
private final long numRecords;
private long value = 0;
ByteValueIterator(long numRecords) {
this.numRecords = numRecords;
}
@Override
public ByteValue next(ByteValue aLong) {
return next();
}
@Override
public ByteValue next() {
if (value++ < numRecords) {
return new ByteValue((byte) 0);
} else {
return null;
}
}
}
private static class CombiningIterator<T> implements MutableObjectIterator<T> {
private final MutableObjectIterator<T> left;
private final MutableObjectIterator<T> right;
public CombiningIterator(MutableObjectIterator<T> left, MutableObjectIterator<T> right) {
this.left = left;
this.right = right;
}
@Override
public T next(T reuse) throws IOException {
T value = left.next(reuse);
if (value == null) {
return right.next(reuse);
} else {
return value;
}
}
@Override
public T next() throws IOException {
T value = left.next();
if (value == null) {
return right.next();
} else {
return value;
}
}
}
}<|fim▁end|> | }
|
<|file_name|>ItemOnItemVerificationHandler.java<|end_file_name|><|fim▁begin|>package org.apollo.game.event.handler.impl;
import org.apollo.game.event.handler.EventHandler;
import org.apollo.game.event.handler.EventHandlerContext;
import org.apollo.game.event.impl.ItemOnItemEvent;
import org.apollo.game.model.Inventory;
import org.apollo.game.model.Item;
import org.apollo.game.model.Player;
/**
* An {@link EventHandler} which verifies the target item in {@link ItemOnItemEvent}s.
*
* @author Chris Fletcher
*/
public final class ItemOnItemVerificationHandler extends EventHandler<ItemOnItemEvent> {
@Override
public void handle(EventHandlerContext ctx, Player player, ItemOnItemEvent event) {
Inventory inventory = ItemVerificationHandler.interfaceToInventory(player, event.getTargetInterfaceId());
int slot = event.getTargetSlot();
if (slot < 0 || slot >= inventory.capacity()) {<|fim▁hole|> }
Item item = inventory.get(slot);
if (item == null || item.getId() != event.getTargetId()) {
ctx.breakHandlerChain();
}
}
}<|fim▁end|> | ctx.breakHandlerChain();
return; |
<|file_name|>disk.py<|end_file_name|><|fim▁begin|>#coding:UTF-8
"""
磁盘监控模块<|fim▁hole|>
from config import disk
from lib import core
import os,re
def init():
"对外接口"
sign=True
for t in disk.DISK_PATH:
warn,data=check(t)
if not warn:
login_time=time.time()
message="磁盘监控预警提示,磁盘使用率超过%s"%(disk.DISK_USED)+"%\n监控结果:"+data
message=message.decode("UTF-8")
print message
core.sendEmail(message)
print u"邮件已经发出"
sign=False
return sign
def getIntervalTime():
"获取检测间隔时间"
return disk.DISK_DELAY
def check(path):
"检测是否超出预警"
r=os.popen("df -h "+path)
for line in r:
data=line.rstrip()
datas=re.split(r'\s+',data)
used=datas[4].encode("UTF-8").replace("%","")
return int(used) < disk.DISK_USED,data<|fim▁end|> | """ |
<|file_name|>history.js<|end_file_name|><|fim▁begin|>import {createBrowserHistory} from 'history';<|fim▁hole|>const history = createBrowserHistory({basename: stringUtil.withoutTrailingSlash(ConfigStore.getBaseURI())});
export default history;<|fim▁end|> |
import stringUtil from '@shared/util/stringUtil';
import ConfigStore from '../stores/ConfigStore';
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export * from './types';
export * from './toast.component';
export * from './toast-instance';
export * from './toast.module';
export * from './toast.service';
export * from './toast';<|fim▁end|> | |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
<|fim▁hole|> os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sim.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)<|fim▁end|> | if __name__ == "__main__": |
<|file_name|>iter.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2016, ilammy
//
// Licensed under MIT license (see LICENSE file in the root directory).
// This file may be copied, distributed, and modified only in accordance
// with the terms specified by this license.
//! Iterator utilities.
use std::iter::Iterator;
/// Non-short-circuiting version of `Zip`. See `longest_zip()` for details.
pub struct LongestZip<A, B> {
a: A,
b: B,
}
impl<A, B> Iterator for LongestZip<A, B> where A: Iterator, B: Iterator {
type Item = (Option<A::Item>, Option<B::Item>);
fn next(&mut self) -> Option<Self::Item> {
let v_a = self.a.next();
let v_b = self.b.next();
if v_a.is_some() || v_b.is_some() {
return Some((v_a, v_b));
} else {
return None;
}
}
}
/// Returns an iterator which simulataneously walks over two other iterators until _both_ of
/// them are exhausted. It is similar to `zip()` method of `Iterator`, but it does not stop
/// when one of the iterators in exhausted.
///
/// Example:
/// ```<|fim▁hole|>/// (Some(&2), Some(&6)),
/// (Some(&3), None)
/// ]);
/// ```
pub fn longest_zip<A, B>(iter1: A, iter2: B) -> LongestZip<A::IntoIter, B::IntoIter>
where A: IntoIterator, B: IntoIterator
{
LongestZip { a: iter1.into_iter(), b: iter2.into_iter() }
}<|fim▁end|> | /// assert_eq!(longest_zip(&[1, 2, 3], &[5, 6]).collect::<Vec<_>>(),
/// &[
/// (Some(&1), Some(&5)), |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.