file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
index.ts | import AssertAgainstNamedBlocks from './assert-against-named-blocks';
import AssertIfHelperWithoutArguments from './assert-if-helper-without-arguments';
import AssertInputHelperWithoutBlock from './assert-input-helper-without-block';
import AssertLocalVariableShadowingHelperInvocation from './assert-local-variable-shadowing-helper-invocation';
import AssertReservedNamedArguments from './assert-reserved-named-arguments';
import AssertSplattributeExpressions from './assert-splattribute-expression';
import DeprecateSendAction from './deprecate-send-action';
import TransformActionSyntax from './transform-action-syntax';
import TransformAttrsIntoArgs from './transform-attrs-into-args';
import TransformComponentInvocation from './transform-component-invocation';
import TransformEachInIntoEach from './transform-each-in-into-each';
import TransformEachTrackArray from './transform-each-track-array';
import TransformHasBlockSyntax from './transform-has-block-syntax';
import TransformInElement from './transform-in-element';
import TransformLinkTo from './transform-link-to';
import TransformOldClassBindingSyntax from './transform-old-class-binding-syntax';
import TransformQuotedBindingsIntoJustBindings from './transform-quoted-bindings-into-just-bindings';
import TransformWrapMountAndOutlet from './transform-wrap-mount-and-outlet';
import { EMBER_NAMED_BLOCKS } from '@ember/canary-features';
import { SEND_ACTION } from '@ember/deprecated-features';
// order of plugins is important
const transforms = [
TransformComponentInvocation,
TransformOldClassBindingSyntax,
TransformQuotedBindingsIntoJustBindings,
AssertReservedNamedArguments,
TransformActionSyntax,
TransformAttrsIntoArgs,
TransformEachInIntoEach,
TransformHasBlockSyntax,
AssertLocalVariableShadowingHelperInvocation,
TransformLinkTo,
AssertInputHelperWithoutBlock,
TransformInElement,
AssertIfHelperWithoutArguments,
AssertSplattributeExpressions,
TransformEachTrackArray,
TransformWrapMountAndOutlet,
];
if (SEND_ACTION) |
if (!EMBER_NAMED_BLOCKS) {
transforms.push(AssertAgainstNamedBlocks);
}
export default Object.freeze(transforms);
| {
transforms.push(DeprecateSendAction);
} | conditional_block |
getRelayQueries.js | /**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getRelayQueries
* @flow
*/
'use strict';
var Map = require('Map');
import type {
RelayLazyContainer,
RelayQueryConfig
} from 'RelayContainer';
import type {RelayQuerySet} from 'RelayInternalTypes';
var RelayMetaRoute = require('RelayMetaRoute');
var RelayProfiler = require('RelayProfiler');
var RelayQuery = require('RelayQuery');
var buildRQL = require('buildRQL');
var invariant = require('invariant');
var stableStringify = require('stableStringify');
var warning = require('warning');
var queryCache = new Map();
/**
* @internal
*
* `getRelayQueries` retrieves all queries for a component given a route.
*/
function getRelayQueries(
Component: RelayLazyContainer,
route: RelayQueryConfig
): RelayQuerySet {
if (!queryCache.has(Component)) { | return cache[cacheKey];
}
var querySet = {};
Component.getFragmentNames().forEach(fragmentName => {
querySet[fragmentName] = null;
});
Object.keys(route.queries).forEach(queryName => {
if (!Component.hasFragment(queryName)) {
warning(
false,
'Relay.QL: query `%s.queries.%s` is invalid, expected fragment ' +
'`%s.fragments.%s` to be defined.',
route.name,
queryName,
Component.displayName,
queryName
);
return;
}
var queryBuilder = route.queries[queryName];
if (queryBuilder) {
var concreteQuery = buildRQL.Query(
queryBuilder,
Component,
queryName,
route.params
);
invariant(
concreteQuery !== undefined,
'Relay.QL: query `%s.queries.%s` is invalid, a typical query is ' +
'defined using: () => Relay.QL`query { ... }`.',
route.name,
queryName
);
if (concreteQuery) {
var rootQuery = RelayQuery.Node.createQuery(
concreteQuery,
RelayMetaRoute.get(route.name),
route.params
);
var rootCall = rootQuery.getRootCall();
if (rootCall.value !== undefined) {
querySet[queryName] = rootQuery;
return;
}
}
}
querySet[queryName] = null;
});
cache[cacheKey] = querySet;
return querySet;
}
module.exports = RelayProfiler.instrument('Relay.getQueries', getRelayQueries); | queryCache.set(Component, {});
}
var cacheKey = route.name + ':' + stableStringify(route.params);
var cache = queryCache.get(Component);
if (cache.hasOwnProperty(cacheKey)) { | random_line_split |
getRelayQueries.js | /**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getRelayQueries
* @flow
*/
'use strict';
var Map = require('Map');
import type {
RelayLazyContainer,
RelayQueryConfig
} from 'RelayContainer';
import type {RelayQuerySet} from 'RelayInternalTypes';
var RelayMetaRoute = require('RelayMetaRoute');
var RelayProfiler = require('RelayProfiler');
var RelayQuery = require('RelayQuery');
var buildRQL = require('buildRQL');
var invariant = require('invariant');
var stableStringify = require('stableStringify');
var warning = require('warning');
var queryCache = new Map();
/**
* @internal
*
* `getRelayQueries` retrieves all queries for a component given a route.
*/
function getRelayQueries(
Component: RelayLazyContainer,
route: RelayQueryConfig
): RelayQuerySet |
module.exports = RelayProfiler.instrument('Relay.getQueries', getRelayQueries);
| {
if (!queryCache.has(Component)) {
queryCache.set(Component, {});
}
var cacheKey = route.name + ':' + stableStringify(route.params);
var cache = queryCache.get(Component);
if (cache.hasOwnProperty(cacheKey)) {
return cache[cacheKey];
}
var querySet = {};
Component.getFragmentNames().forEach(fragmentName => {
querySet[fragmentName] = null;
});
Object.keys(route.queries).forEach(queryName => {
if (!Component.hasFragment(queryName)) {
warning(
false,
'Relay.QL: query `%s.queries.%s` is invalid, expected fragment ' +
'`%s.fragments.%s` to be defined.',
route.name,
queryName,
Component.displayName,
queryName
);
return;
}
var queryBuilder = route.queries[queryName];
if (queryBuilder) {
var concreteQuery = buildRQL.Query(
queryBuilder,
Component,
queryName,
route.params
);
invariant(
concreteQuery !== undefined,
'Relay.QL: query `%s.queries.%s` is invalid, a typical query is ' +
'defined using: () => Relay.QL`query { ... }`.',
route.name,
queryName
);
if (concreteQuery) {
var rootQuery = RelayQuery.Node.createQuery(
concreteQuery,
RelayMetaRoute.get(route.name),
route.params
);
var rootCall = rootQuery.getRootCall();
if (rootCall.value !== undefined) {
querySet[queryName] = rootQuery;
return;
}
}
}
querySet[queryName] = null;
});
cache[cacheKey] = querySet;
return querySet;
} | identifier_body |
getRelayQueries.js | /**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getRelayQueries
* @flow
*/
'use strict';
var Map = require('Map');
import type {
RelayLazyContainer,
RelayQueryConfig
} from 'RelayContainer';
import type {RelayQuerySet} from 'RelayInternalTypes';
var RelayMetaRoute = require('RelayMetaRoute');
var RelayProfiler = require('RelayProfiler');
var RelayQuery = require('RelayQuery');
var buildRQL = require('buildRQL');
var invariant = require('invariant');
var stableStringify = require('stableStringify');
var warning = require('warning');
var queryCache = new Map();
/**
* @internal
*
* `getRelayQueries` retrieves all queries for a component given a route.
*/
function | (
Component: RelayLazyContainer,
route: RelayQueryConfig
): RelayQuerySet {
if (!queryCache.has(Component)) {
queryCache.set(Component, {});
}
var cacheKey = route.name + ':' + stableStringify(route.params);
var cache = queryCache.get(Component);
if (cache.hasOwnProperty(cacheKey)) {
return cache[cacheKey];
}
var querySet = {};
Component.getFragmentNames().forEach(fragmentName => {
querySet[fragmentName] = null;
});
Object.keys(route.queries).forEach(queryName => {
if (!Component.hasFragment(queryName)) {
warning(
false,
'Relay.QL: query `%s.queries.%s` is invalid, expected fragment ' +
'`%s.fragments.%s` to be defined.',
route.name,
queryName,
Component.displayName,
queryName
);
return;
}
var queryBuilder = route.queries[queryName];
if (queryBuilder) {
var concreteQuery = buildRQL.Query(
queryBuilder,
Component,
queryName,
route.params
);
invariant(
concreteQuery !== undefined,
'Relay.QL: query `%s.queries.%s` is invalid, a typical query is ' +
'defined using: () => Relay.QL`query { ... }`.',
route.name,
queryName
);
if (concreteQuery) {
var rootQuery = RelayQuery.Node.createQuery(
concreteQuery,
RelayMetaRoute.get(route.name),
route.params
);
var rootCall = rootQuery.getRootCall();
if (rootCall.value !== undefined) {
querySet[queryName] = rootQuery;
return;
}
}
}
querySet[queryName] = null;
});
cache[cacheKey] = querySet;
return querySet;
}
module.exports = RelayProfiler.instrument('Relay.getQueries', getRelayQueries);
| getRelayQueries | identifier_name |
getRelayQueries.js | /**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getRelayQueries
* @flow
*/
'use strict';
var Map = require('Map');
import type {
RelayLazyContainer,
RelayQueryConfig
} from 'RelayContainer';
import type {RelayQuerySet} from 'RelayInternalTypes';
var RelayMetaRoute = require('RelayMetaRoute');
var RelayProfiler = require('RelayProfiler');
var RelayQuery = require('RelayQuery');
var buildRQL = require('buildRQL');
var invariant = require('invariant');
var stableStringify = require('stableStringify');
var warning = require('warning');
var queryCache = new Map();
/**
* @internal
*
* `getRelayQueries` retrieves all queries for a component given a route.
*/
function getRelayQueries(
Component: RelayLazyContainer,
route: RelayQueryConfig
): RelayQuerySet {
if (!queryCache.has(Component)) {
queryCache.set(Component, {});
}
var cacheKey = route.name + ':' + stableStringify(route.params);
var cache = queryCache.get(Component);
if (cache.hasOwnProperty(cacheKey)) {
return cache[cacheKey];
}
var querySet = {};
Component.getFragmentNames().forEach(fragmentName => {
querySet[fragmentName] = null;
});
Object.keys(route.queries).forEach(queryName => {
if (!Component.hasFragment(queryName)) {
warning(
false,
'Relay.QL: query `%s.queries.%s` is invalid, expected fragment ' +
'`%s.fragments.%s` to be defined.',
route.name,
queryName,
Component.displayName,
queryName
);
return;
}
var queryBuilder = route.queries[queryName];
if (queryBuilder) |
querySet[queryName] = null;
});
cache[cacheKey] = querySet;
return querySet;
}
module.exports = RelayProfiler.instrument('Relay.getQueries', getRelayQueries);
| {
var concreteQuery = buildRQL.Query(
queryBuilder,
Component,
queryName,
route.params
);
invariant(
concreteQuery !== undefined,
'Relay.QL: query `%s.queries.%s` is invalid, a typical query is ' +
'defined using: () => Relay.QL`query { ... }`.',
route.name,
queryName
);
if (concreteQuery) {
var rootQuery = RelayQuery.Node.createQuery(
concreteQuery,
RelayMetaRoute.get(route.name),
route.params
);
var rootCall = rootQuery.getRootCall();
if (rootCall.value !== undefined) {
querySet[queryName] = rootQuery;
return;
}
}
} | conditional_block |
hello-js.js | var stream = require('stream'),
util = require('util'),
Transform = stream.Transform;
function HelloJsTransform () | ;
util.inherits(HelloJsTransform, Transform);
HelloJsTransform.prototype._transform = function (chunk, enc, cb) {
if (this.data != '') {
this.push(this.data);
this.data = '';
}
for (var i = 0, char; i < chunk.length; i++) {
char = chunk[i];
if (this.startedReading) {
this.push(',')
}
this.push(char.toString());
this.startedReading = true;
}
cb();
};
HelloJsTransform.prototype._flush = function (cb) {
this.push(this.endData);
cb();
};
module.exports = HelloJsTransform;
| {
if (!(this instanceof HelloJsTransform)) {
return new HelloJsTransform();
}
this.data = 'var str=[';
this.endData = '];\nstr.forEach(function (char) {\n' +
' process.stdout.write(String.fromCharCode(char));\n' +
'});';
this.startedReading = false;
Transform.call(this);
} | identifier_body |
hello-js.js | var stream = require('stream'),
util = require('util'),
Transform = stream.Transform;
function HelloJsTransform () {
if (!(this instanceof HelloJsTransform)) {
return new HelloJsTransform();
}
this.data = 'var str=[';
this.endData = '];\nstr.forEach(function (char) {\n' +
' process.stdout.write(String.fromCharCode(char));\n' +
'});';
this.startedReading = false;
Transform.call(this);
};
util.inherits(HelloJsTransform, Transform);
HelloJsTransform.prototype._transform = function (chunk, enc, cb) {
if (this.data != '') {
this.push(this.data);
this.data = '';
}
for (var i = 0, char; i < chunk.length; i++) |
cb();
};
HelloJsTransform.prototype._flush = function (cb) {
this.push(this.endData);
cb();
};
module.exports = HelloJsTransform;
| {
char = chunk[i];
if (this.startedReading) {
this.push(',')
}
this.push(char.toString());
this.startedReading = true;
} | conditional_block |
hello-js.js | var stream = require('stream'),
util = require('util'),
Transform = stream.Transform;
function | () {
if (!(this instanceof HelloJsTransform)) {
return new HelloJsTransform();
}
this.data = 'var str=[';
this.endData = '];\nstr.forEach(function (char) {\n' +
' process.stdout.write(String.fromCharCode(char));\n' +
'});';
this.startedReading = false;
Transform.call(this);
};
util.inherits(HelloJsTransform, Transform);
HelloJsTransform.prototype._transform = function (chunk, enc, cb) {
if (this.data != '') {
this.push(this.data);
this.data = '';
}
for (var i = 0, char; i < chunk.length; i++) {
char = chunk[i];
if (this.startedReading) {
this.push(',')
}
this.push(char.toString());
this.startedReading = true;
}
cb();
};
HelloJsTransform.prototype._flush = function (cb) {
this.push(this.endData);
cb();
};
module.exports = HelloJsTransform;
| HelloJsTransform | identifier_name |
hello-js.js | var stream = require('stream'),
util = require('util'),
Transform = stream.Transform;
| this.data = 'var str=[';
this.endData = '];\nstr.forEach(function (char) {\n' +
' process.stdout.write(String.fromCharCode(char));\n' +
'});';
this.startedReading = false;
Transform.call(this);
};
util.inherits(HelloJsTransform, Transform);
HelloJsTransform.prototype._transform = function (chunk, enc, cb) {
if (this.data != '') {
this.push(this.data);
this.data = '';
}
for (var i = 0, char; i < chunk.length; i++) {
char = chunk[i];
if (this.startedReading) {
this.push(',')
}
this.push(char.toString());
this.startedReading = true;
}
cb();
};
HelloJsTransform.prototype._flush = function (cb) {
this.push(this.endData);
cb();
};
module.exports = HelloJsTransform; | function HelloJsTransform () {
if (!(this instanceof HelloJsTransform)) {
return new HelloJsTransform();
}
| random_line_split |
EntryWidgetPieAlertsXDR.py | import demistomock as demisto
incident = demisto.incidents()
data = {
"Type": 17,
"ContentsFormat": "pie",
"Contents": {
"stats": [
{
"data": [ | "name": "high",
"label": "incident.severity.high",
"color": "rgb(255, 23, 68)"
},
{
"data": [
int(incident[0].get('CustomFields', {}).get('xdrmediumseverityalertcount', 0))
],
"groups": None,
"name": "medium",
"label": "incident.severity.medium",
"color": "rgb(255, 144, 0)"
},
{
"data": [
int(incident[0].get('CustomFields', {}).get('xdrlowseverityalertcount', 0))
],
"groups": None,
"name": "low",
"label": "incident.severity.low",
"color": "rgb(0, 205, 51)"
},
],
"params": {
"layout": "horizontal"
}
}
}
demisto.results(data) | int(incident[0].get('CustomFields', {}).get('xdrhighseverityalertcount', 0))
],
"groups": None, | random_line_split |
power.model.ts | export class Power{
public id:string;
public code:string;
public url:string;
public title:string;
public explain:string;
public menuId:string;
public type:string;
public isValid:boolean;
public isChecked:boolean=false;
public operation:Array<string>=new Array<string>();
public operationChecked:Array<string>=new Array<string>();
public operationMap:Array<string>=new Array<string>();
public checkboxList:Array<CheckboxList>=[]
}
export class RolePower extends Power{
public roleId:string;
}
export class NavMenu{
public id:string;
public code:string;
public url:string;
public isValid:boolean=true;
public isLeaf:boolean=false;
public title:string;
public isChecked:boolean=false;
}
export class PowerFun{
public isSHOW:boolean;
public isADD:boolean;
public isUPDATE:boolean;
public isDELETE:boolean;
public isCHECK:boolean;
}
export class RoleInfo{
public id:string;
public roleName:string;
public name:string;
public desc:string;
}
export class Tree{
public id:string;
public pid:string;
public name:string;
public isLeaf:boolean;
public IsSubMenu:boolean;
public subTrees:Array<Tree>=[];
constructor(id:string,pid:string,name:string,isLeaf:boolean){
this.id=id;
this.pid=pid; | this.name=name;
this.isLeaf=isLeaf;
}
} | random_line_split | |
power.model.ts | export class Power{
public id:string;
public code:string;
public url:string;
public title:string;
public explain:string;
public menuId:string;
public type:string;
public isValid:boolean;
public isChecked:boolean=false;
public operation:Array<string>=new Array<string>();
public operationChecked:Array<string>=new Array<string>();
public operationMap:Array<string>=new Array<string>();
public checkboxList:Array<CheckboxList>=[]
}
export class RolePower extends Power{
public roleId:string;
}
export class NavMenu{
public id:string;
public code:string;
public url:string;
public isValid:boolean=true;
public isLeaf:boolean=false;
public title:string;
public isChecked:boolean=false;
}
export class PowerFun{
public isSHOW:boolean;
public isADD:boolean;
public isUPDATE:boolean;
public isDELETE:boolean;
public isCHECK:boolean;
}
export class | {
public id:string;
public roleName:string;
public name:string;
public desc:string;
}
export class Tree{
public id:string;
public pid:string;
public name:string;
public isLeaf:boolean;
public IsSubMenu:boolean;
public subTrees:Array<Tree>=[];
constructor(id:string,pid:string,name:string,isLeaf:boolean){
this.id=id;
this.pid=pid;
this.name=name;
this.isLeaf=isLeaf;
}
} | RoleInfo | identifier_name |
patterngenerator.py | """
PatternGenerator abstract class, basic example concrete class, and
multichannel support.
PatternGenerators support both single-channel patterns, i.e. bare
arrays, and multiple channels, such as for color images. See
``PatternGenerator.__call__`` and ``PatternGenerator.channels`` for
more information.
"""
import numpy as np
from numpy import pi
import collections
import param
from param.parameterized import ParamOverrides
from holoviews import HoloMap, Image, RGB, Dimension
from holoviews.core import BoundingBox, BoundingRegionParameter, SheetCoordinateSystem
from .transferfn import TransferFn
# CEBALERT: PatternGenerator has become a bit of a monster abstract
# class. Can it be split into the minimum required to specify the
# interface, with a subclass implementing the rest (this subclass
# still being above the rest of the PatternGenerators)? We want to
# make it easy to add new types of PatternGenerator that don't match
# the assumptions of the current ones (OneDPowerSpectrum is an example
# of a PG that doesn't match the current assumptions), but still lets
# them be used like the current ones.
# (PatternGenerator-->TwoDPatternGenerator?)
# JLALERT: PatternGenerator should have
# override_plasticity_state/restore_plasticity_state functions which
# can override the plasticity of any output_fn that has state, in case
# anyone ever uses such an object in a PatternGenerator. Will also
# need to support Composite patterns.
class PatternGenerator(param.Parameterized):
"""
A class hierarchy for callable objects that can generate 2D patterns.
Once initialized, PatternGenerators can be called to generate a
value or a matrix of values from a 2D function, typically
accepting at least x and y.
A PatternGenerator's Parameters can make use of Parameter's
precedence attribute to specify the order in which they should
appear, e.g. in a GUI. The precedence attribute has a nominal
range of 0.0 to 1.0, with ordering going from 0.0 (first) to 1.0
(last), but any value is allowed.
The orientation and layout of the pattern matrices is defined by
the SheetCoordinateSystem class, which see.
Note that not every parameter defined for a PatternGenerator will
be used by every subclass. For instance, a Constant pattern will
ignore the x, y, orientation, and size parameters, because the
pattern does not vary with any of those parameters. However,
those parameters are still defined for all PatternGenerators, even
Constant patterns, to allow PatternGenerators to be scaled, rotated,
translated, etc. uniformly.
"""
__abstract = True
bounds = BoundingRegionParameter(
default=BoundingBox(points=((-0.5,-0.5), (0.5,0.5))),precedence=-1,
doc="BoundingBox of the area in which the pattern is generated.")
xdensity = param.Number(default=256,bounds=(0,None),precedence=-1,doc="""
Density (number of samples per 1.0 length) in the x direction.""")
ydensity = param.Number(default=256,bounds=(0,None),precedence=-1,doc="""
Density (number of samples per 1.0 length) in the y direction.
Typically the same as the xdensity.""")
x = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.20,doc="""
X-coordinate location of pattern center.""")
y = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.21,doc="""
Y-coordinate location of pattern center.""")
z = param.ClassSelector(default=None, precedence=-1, class_=Dimension, doc="""
The Dimension object associated with the z-values generated by
the PatternGenerator . If None, uses the default set by
HoloViews.Image.""")
group = param.String(default='Pattern', precedence=-1, doc="""
The group name assigned to the returned HoloViews object.""")
position = param.Composite(attribs=['x','y'],precedence=-1,doc="""
Coordinates of location of pattern center.
Provides a convenient way to set the x and y parameters together
as a tuple (x,y), but shares the same actual storage as x and y
(and thus only position OR x and y need to be specified).""")
orientation = param.Number(default=0.0,softbounds=(0.0,2*pi),precedence=0.40,doc="""
Polar angle of pattern, i.e., the orientation in the Cartesian coordinate
system, with zero at 3 o'clock and increasing counterclockwise.""")
size = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,6.0),
precedence=0.30,doc="""Determines the overall size of the pattern.""")
scale = param.Number(default=1.0,softbounds=(0.0,2.0),precedence=0.10,doc="""
Multiplicative strength of input pattern, defaulting to 1.0""")
offset = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.11,doc="""
Additive offset to input pattern, defaulting to 0.0""")
mask = param.Parameter(default=None,precedence=-1,doc="""
Optional object (expected to be an array) with which to multiply the
pattern array after it has been created, before any output_fns are
applied. This can be used to shape the pattern.""")
# Note that the class type is overridden to PatternGenerator below
mask_shape = param.ClassSelector(param.Parameterized,default=None,precedence=0.06,doc="""
Optional PatternGenerator used to construct a mask to be applied to
the pattern.""")
output_fns = param.HookList(default=[], precedence=0.08,doc="""
Optional function(s) to apply to the pattern array after it has been created.
Can be used for normalization, thresholding, etc.""")
def __init__(self,**params):
super(PatternGenerator, self).__init__(**params)
self.set_matrix_dimensions(self.bounds, self.xdensity, self.ydensity)
def __call__(self,**params_to_override):
"""
Call the subclass's 'function' method on a rotated and scaled
coordinate system.
Creates and fills an array with the requested pattern. If
called without any params, uses the values for the Parameters
as currently set on the object. Otherwise, any params
specified override those currently set on the object.
"""
if 'output_fns' in params_to_override:
self.warning("Output functions specified through the call method will be ignored.")
p=ParamOverrides(self,params_to_override)
# CEBERRORALERT: position parameter is not currently
# supported. We should delete the position parameter or fix
# this.
#
# position=params_to_override.get('position',None) if position
# is not None: x,y = position
self._setup_xy(p.bounds,p.xdensity,p.ydensity,p.x,p.y,p.orientation)
fn_result = self.function(p)
self._apply_mask(p,fn_result)
if p.scale != 1.0:
result = p.scale * fn_result
else:
result = fn_result
if p.offset != 0.0:
result += p.offset
for of in p.output_fns:
of(result)
return result
def __getitem__(self, coords):
value_dims = {}
if self.num_channels() in [0, 1]:
raster, data = Image, self()
value_dims = {'value_dimensions':[self.z]} if self.z else value_dims
elif self.num_channels() in [3,4]:
|
image = raster(data, bounds=self.bounds,
**dict(group=self.group,
label=self.__class__.__name__, **value_dims))
# Works round a bug fixed shortly after HoloViews 1.0.0 release
return image if isinstance(coords, slice) else image.__getitem__(coords)
def channels(self, use_cached=False, **params_to_override):
"""
Channels() adds a shared interface for single channel and
multichannel structures. It will always return an ordered
dict: its first element is the single channel of the pattern
(if single-channel) or the channel average (if multichannel);
the successive elements are the individual channels' arrays
(key: 0,1,..N-1).
"""
return collections.OrderedDict({ 'default':self.__call__(**params_to_override) })
def num_channels(self):
"""
Query the number of channels implemented by the
PatternGenerator. In case of single-channel generators this
will return 1; in case of multichannel, it will return the
number of channels (eg, in the case of RGB images it would
return '3', Red-Green-Blue, even though the OrderedDict
returned by channels() will have 4 elements -- the 3 channels
+ their average).
"""
return 1
def _setup_xy(self,bounds,xdensity,ydensity,x,y,orientation):
"""
Produce pattern coordinate matrices from the bounds and
density (or rows and cols), and transforms them according to
x, y, and orientation.
"""
self.debug("bounds=%s, xdensity=%s, ydensity=%s, x=%s, y=%s, orientation=%s",bounds,xdensity,ydensity,x,y,orientation)
# Generate vectors representing coordinates at which the pattern
# will be sampled.
# CB: note to myself - use slice_._scs if supplied?
x_points,y_points = SheetCoordinateSystem(bounds,xdensity,ydensity).sheetcoordinates_of_matrixidx()
# Generate matrices of x and y sheet coordinates at which to
# sample pattern, at the correct orientation
self.pattern_x, self.pattern_y = self._create_and_rotate_coordinate_arrays(x_points-x,y_points-y,orientation)
def function(self,p):
"""
Function to draw a pattern that will then be scaled and rotated.
Instead of implementing __call__ directly, PatternGenerator
subclasses will typically implement this helper function used
by __call__, because that way they can let __call__ handle the
scaling and rotation for them. Alternatively, __call__ itself
can be reimplemented entirely by a subclass (e.g. if it does
not need to do any scaling or rotation), in which case this
function will be ignored.
"""
raise NotImplementedError
def _create_and_rotate_coordinate_arrays(self, x, y, orientation):
"""
Create pattern matrices from x and y vectors, and rotate them
to the specified orientation.
"""
# Using this two-liner requires that x increase from left to
# right and y decrease from left to right; I don't think it
# can be rewritten in so little code otherwise - but please
# prove me wrong.
pattern_y = np.subtract.outer(np.cos(orientation)*y, np.sin(orientation)*x)
pattern_x = np.add.outer(np.sin(orientation)*y, np.cos(orientation)*x)
return pattern_x, pattern_y
def _apply_mask(self,p,mat):
"""Create (if necessary) and apply the mask to the given matrix mat."""
mask = p.mask
ms=p.mask_shape
if ms is not None:
mask = ms(x=p.x+p.size*(ms.x*np.cos(p.orientation)-ms.y*np.sin(p.orientation)),
y=p.y+p.size*(ms.x*np.sin(p.orientation)+ms.y*np.cos(p.orientation)),
orientation=ms.orientation+p.orientation,size=ms.size*p.size,
bounds=p.bounds,ydensity=p.ydensity,xdensity=p.xdensity)
if mask is not None:
mat*=mask
def set_matrix_dimensions(self, bounds, xdensity, ydensity):
"""
Change the dimensions of the matrix into which the pattern
will be drawn. Users of this class should call this method
rather than changing the bounds, xdensity, and ydensity
parameters directly. Subclasses can override this method to
update any internal data structures that may depend on the
matrix dimensions.
"""
self.bounds = bounds
self.xdensity = xdensity
self.ydensity = ydensity
scs = SheetCoordinateSystem(bounds, xdensity, ydensity)
for of in self.output_fns:
if isinstance(of, TransferFn):
of.initialize(SCS=scs, shape=scs.shape)
def state_push(self):
"Save the state of the output functions, to be restored with state_pop."
for of in self.output_fns:
if hasattr(of,'state_push'):
of.state_push()
super(PatternGenerator, self).state_push()
def state_pop(self):
"Restore the state of the output functions saved by state_push."
for of in self.output_fns:
if hasattr(of,'state_pop'):
of.state_pop()
super(PatternGenerator, self).state_pop()
def anim(self, duration, offset=0, timestep=1,
label=None, unit=None,
time_fn=param.Dynamic.time_fn):
"""
duration: The temporal duration to animate in the units
defined on the global time function.
offset: The temporal offset from which the animation is
generated given the supplied pattern
timestep: The time interval between successive frames. The
duration must be an exact multiple of the timestep.
label: A label string to override the label of the global time
function (if not None).
unit: The unit string to override the unit value of the global
time function (if not None).
time_fn: The global time function object that is shared across
the time-varying objects that are being sampled.
Note that the offset, timestep and time_fn only affect
patterns parameterized by time-dependent number
generators. Otherwise, the frames are generated by successive
call to the pattern which may or may not be varying (e.g to
view the patterns contained within a Selector).
"""
frames = (duration // timestep) + 1
if duration % timestep != 0:
raise ValueError("The duration value must be an exact multiple of the timestep.")
if label is None:
label = time_fn.label if hasattr(time_fn, 'label') else 'Time'
unit = time_fn.unit if (not unit and hasattr(time_fn, 'unit')) else unit
vmap = HoloMap(kdims=[Dimension(label, unit=unit if unit else '')])
self.state_push()
with time_fn as t:
t(offset)
for i in range(frames):
vmap[t()] = self[:]
t += timestep
self.state_pop()
return vmap
## Support for compositional expressions of PatternGenerator objects
def _promote(self,other):
if not isinstance(other,PatternGenerator):
other = Constant(scale=other,offset=0)
return [self,other]
def _rpromote(self,other):
if not isinstance(other,PatternGenerator):
other = Constant(scale=other,offset=0)
return [other,self]
# Could define any of Python's operators here, esp. if they have operator or ufunc equivalents
def __add__ (self,other): return Composite(generators=self._promote(other),operator=np.add)
def __sub__ (self,other): return Composite(generators=self._promote(other),operator=np.subtract)
def __mul__ (self,other): return Composite(generators=self._promote(other),operator=np.multiply)
def __mod__ (self,other): return Composite(generators=self._promote(other),operator=np.mod)
def __pow__ (self,other): return Composite(generators=self._promote(other),operator=np.power)
def __div__ (self,other): return Composite(generators=self._promote(other),operator=np.divide)
def __and__ (self,other): return Composite(generators=self._promote(other),operator=np.minimum)
def __or__ (self,other): return Composite(generators=self._promote(other),operator=np.maximum)
def __radd__ (self,other): return Composite(generators=self._rpromote(other),operator=np.add)
def __rsub__ (self,other): return Composite(generators=self._rpromote(other),operator=np.subtract)
def __rmul__ (self,other): return Composite(generators=self._rpromote(other),operator=np.multiply)
def __rmod__ (self,other): return Composite(generators=self._rpromote(other),operator=np.mod)
def __rpow__ (self,other): return Composite(generators=self._rpromote(other),operator=np.power)
def __rdiv__ (self,other): return Composite(generators=self._rpromote(other),operator=np.divide)
def __rand__ (self,other): return Composite(generators=self._rpromote(other),operator=np.minimum)
def __ror__ (self,other): return Composite(generators=self._rpromote(other),operator=np.maximum)
def __neg__ (self): return Composite(generators=[Constant(scale=0),self],operator=np.subtract)
class abs_first(object):
@staticmethod
def reduce(x): return np.abs(x[0])
def __abs__ (self): return Composite(generators=[self],operator=self.abs_first)
def pil(self, **params_to_override):
"""Returns a PIL image for this pattern, overriding parameters if provided."""
from PIL.Image import fromarray
nchans = self.num_channels()
if nchans in [0, 1]:
mode, arr = None, self(**params_to_override)
arr = (255.0 / arr.max() * (arr - arr.min())).astype(np.uint8)
elif nchans in [3,4]:
mode = 'RGB' if nchans==3 else 'RGBA'
arr = np.dstack(self.channels(**params_to_override).values()[1:])
arr = (255.0*arr).astype(np.uint8)
else:
raise ValueError("Unsupported number of channels")
return fromarray(arr, mode)
# Override class type; must be set here rather than when mask_shape is declared,
# to avoid referring to class not yet constructed
PatternGenerator.params('mask_shape').class_=PatternGenerator
# Trivial example of a PatternGenerator, provided for when a default is
# needed. The other concrete PatternGenerator classes are stored
# elsewhere, to be imported as needed.
class Constant(PatternGenerator):
"""Constant pattern generator, i.e., a solid, uniform field of the same value."""
# The orientation is ignored, so we don't show it in
# auto-generated lists of parameters (e.g. in the GUI)
orientation = param.Number(precedence=-1)
# Optimization: We use a simpler __call__ method here to skip the
# coordinate transformations (which would have no effect anyway)
def __call__(self,**params_to_override):
p = ParamOverrides(self,params_to_override)
shape = SheetCoordinateSystem(p.bounds,p.xdensity,p.ydensity).shape
result = p.scale*np.ones(shape, np.float)+p.offset
self._apply_mask(p,result)
for of in p.output_fns:
of(result)
return result
class CompositeBase(PatternGenerator):
"""
PatternGenerator that combines or selects from a list of other
PatternGenerators.
"""
__abstract=True
generators = param.List(class_=PatternGenerator,default=[Constant(scale=0.0)],
bounds=(1,None),precedence=0.97, doc="""
List of patterns to combine or select from. The default pattern is a blank pattern,
and thus should be overridden for any useful work.""")
size = param.Number(default=1.0,doc="""Scaling factor applied to all sub-patterns.""")
class Composite(CompositeBase):
"""
PatternGenerator that accepts a list of other PatternGenerators.
To create a new pattern, asks each of the PatternGenerators in the
list to create a pattern, then it combines the patterns to create
a single pattern that it returns.
"""
# The Accum_Replace operator from LISSOM is not yet supported,
# but it should be added once PatternGenerator bounding boxes
# are respected and/or GenericImage patterns support transparency.
operator = param.Parameter(np.maximum,precedence=0.98,doc="""
Binary Numpy function used to combine the individual patterns.
Any binary Numpy array "ufunc" returning the same
type of array as the operands and supporting the reduce
operator is allowed here. Supported ufuncs include::
add
subtract
multiply
divide
maximum
minimum
remainder
power
The most useful ones are probably add and maximum, but there
are uses for at least some of the others as well (e.g. to
remove pieces of other patterns).
You can also write your own operators, by making a class that
has a static method named "reduce" that returns an array of the
same size and type as the arrays in the list. For example::
class return_first(object):
@staticmethod
def reduce(x):
return x[0]
""")
def _advance_pattern_generators(self,p):
"""
Subclasses can override this method to provide constraints on
the values of generators' parameters and/or eliminate
generators from this list if necessary.
"""
return p.generators
def state_push(self):
"""
Push the state of all generators
"""
super(Composite,self).state_push()
for gen in self.generators:
gen.state_push()
def state_pop(self):
"""
Pop the state of all generators
"""
super(Composite,self).state_pop()
for gen in self.generators:
gen.state_pop()
# JABALERT: To support large numbers of patterns on a large input region,
# should be changed to evaluate each pattern in a small box, and then
# combine them at the full Composite Bounding box size.
def function(self,p):
"""Constructs combined pattern out of the individual ones."""
generators = self._advance_pattern_generators(p)
assert hasattr(p.operator,'reduce'),repr(p.operator)+" does not support 'reduce'."
# CEBALERT: mask gets applied by all PGs including the Composite itself
# (leads to redundant calculations in current lissom_oo_or usage, but
# will lead to problems/limitations in the future).
patterns = [pg(xdensity=p.xdensity,ydensity=p.ydensity,
bounds=p.bounds,mask=p.mask,
x=p.x+p.size*(pg.x*np.cos(p.orientation)- pg.y*np.sin(p.orientation)),
y=p.y+p.size*(pg.x*np.sin(p.orientation)+ pg.y*np.cos(p.orientation)),
orientation=pg.orientation+p.orientation,
size=pg.size*p.size)
for pg in generators]
image_array = p.operator.reduce(patterns)
return image_array
class ChannelTransform(param.Parameterized):
"""
A ChannelTransform is a callable object that takes channels as
input (an ordered dictionary of arrays) and transforms their
contents in some way before returning them.
"""
__abstract = True
def __call__(self, channels):
raise NotImplementedError
# Example of a ChannelTransform
class CorrelateChannels(ChannelTransform):
"""
Correlate channels by mixing a fraction of one channel into another.
"""
from_channel = param.Number(default=1, doc="""
Name of the channel to take data from.""")
to_channel = param.Number(default=2, doc="""
Name of the channel to change data of.""")
strength = param.Number(default=0, doc="""
Strength of the correlation to add, with 0 being no change,
and 1.0 overwriting to_channel with from_channel.""")
def __call__(self, channel_data):
channel_data[self.to_channel] = \
self.strength*channel_data[self.from_channel] + \
(1-self.strength)*channel_data[self.to_channel]
return channel_data
class ChannelGenerator(PatternGenerator):
"""
Abstract base class for patterns supporting multiple channels natively.
"""
__abstract = True
channel_transforms = param.HookList(class_=ChannelTransform,default=[],doc="""
Optional functions to apply post processing to the set of channels.""")
def __init__(self, **params):
self._original_channel_data = [] # channel data before processing
self._channel_data = [] # channel data after processing
super(ChannelGenerator, self).__init__(**params)
def channels(self, use_cached=False, **params_to_override):
res = collections.OrderedDict()
if not use_cached:
default = self(**params_to_override)
res['default'] = default
else:
res['default'] = None
for i in range(len(self._channel_data)):
res[i] = self._channel_data[i]
return res
def num_channels(self):
return len(self._channel_data)
class ComposeChannels(ChannelGenerator):
"""
Create a multi-channel PatternGenerator from a list of
PatternGenerators, with the specified channel_transforms applied.
"""
generators = param.List(class_=PatternGenerator,default=[Constant(scale=0.0)],
bounds=(1,None), doc="""
List of patterns to use for each channel. Generators which already have more than one
channel will only contribute to a single channel of ComposeChannels.""")
def __init__(self,**params):
super(ComposeChannels,self).__init__(**params)
for i in range(len(self.generators)):
self._channel_data.append( None )
def __call__(self,**params):
# Generates all channels, then returns the default channel
p = param.ParamOverrides(self,params)
params['xdensity']=p.xdensity
params['ydensity']=p.ydensity
params['bounds']=p.bounds
# (not **p)
for i in range(len(p.generators)):
self._channel_data[i] = p.generators[i]( **params )
for c in self.channel_transforms:
self._channel_data = c(self._channel_data)
return sum(act for act in self._channel_data)/len(self._channel_data)
| raster = RGB
data = np.dstack(self.channels().values()[1:]) | conditional_block |
patterngenerator.py | """
PatternGenerator abstract class, basic example concrete class, and
multichannel support.
PatternGenerators support both single-channel patterns, i.e. bare
arrays, and multiple channels, such as for color images. See
``PatternGenerator.__call__`` and ``PatternGenerator.channels`` for
more information.
"""
import numpy as np
from numpy import pi
import collections
import param
from param.parameterized import ParamOverrides
from holoviews import HoloMap, Image, RGB, Dimension
from holoviews.core import BoundingBox, BoundingRegionParameter, SheetCoordinateSystem
from .transferfn import TransferFn
# CEBALERT: PatternGenerator has become a bit of a monster abstract
# class. Can it be split into the minimum required to specify the
# interface, with a subclass implementing the rest (this subclass
# still being above the rest of the PatternGenerators)? We want to
# make it easy to add new types of PatternGenerator that don't match
# the assumptions of the current ones (OneDPowerSpectrum is an example
# of a PG that doesn't match the current assumptions), but still lets
# them be used like the current ones.
# (PatternGenerator-->TwoDPatternGenerator?)
# JLALERT: PatternGenerator should have
# override_plasticity_state/restore_plasticity_state functions which
# can override the plasticity of any output_fn that has state, in case
# anyone ever uses such an object in a PatternGenerator. Will also
# need to support Composite patterns.
class PatternGenerator(param.Parameterized):
"""
A class hierarchy for callable objects that can generate 2D patterns.
Once initialized, PatternGenerators can be called to generate a
value or a matrix of values from a 2D function, typically
accepting at least x and y.
A PatternGenerator's Parameters can make use of Parameter's
precedence attribute to specify the order in which they should
appear, e.g. in a GUI. The precedence attribute has a nominal
range of 0.0 to 1.0, with ordering going from 0.0 (first) to 1.0
(last), but any value is allowed.
The orientation and layout of the pattern matrices is defined by
the SheetCoordinateSystem class, which see.
Note that not every parameter defined for a PatternGenerator will
be used by every subclass. For instance, a Constant pattern will
ignore the x, y, orientation, and size parameters, because the
pattern does not vary with any of those parameters. However,
those parameters are still defined for all PatternGenerators, even
Constant patterns, to allow PatternGenerators to be scaled, rotated,
translated, etc. uniformly.
"""
__abstract = True
bounds = BoundingRegionParameter(
default=BoundingBox(points=((-0.5,-0.5), (0.5,0.5))),precedence=-1,
doc="BoundingBox of the area in which the pattern is generated.")
xdensity = param.Number(default=256,bounds=(0,None),precedence=-1,doc="""
Density (number of samples per 1.0 length) in the x direction.""")
ydensity = param.Number(default=256,bounds=(0,None),precedence=-1,doc="""
Density (number of samples per 1.0 length) in the y direction.
Typically the same as the xdensity.""")
x = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.20,doc="""
X-coordinate location of pattern center.""")
y = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.21,doc="""
Y-coordinate location of pattern center.""")
z = param.ClassSelector(default=None, precedence=-1, class_=Dimension, doc="""
The Dimension object associated with the z-values generated by
the PatternGenerator . If None, uses the default set by
HoloViews.Image.""")
group = param.String(default='Pattern', precedence=-1, doc="""
The group name assigned to the returned HoloViews object.""")
position = param.Composite(attribs=['x','y'],precedence=-1,doc="""
Coordinates of location of pattern center.
Provides a convenient way to set the x and y parameters together
as a tuple (x,y), but shares the same actual storage as x and y
(and thus only position OR x and y need to be specified).""")
orientation = param.Number(default=0.0,softbounds=(0.0,2*pi),precedence=0.40,doc="""
Polar angle of pattern, i.e., the orientation in the Cartesian coordinate
system, with zero at 3 o'clock and increasing counterclockwise.""")
size = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,6.0),
precedence=0.30,doc="""Determines the overall size of the pattern.""")
scale = param.Number(default=1.0,softbounds=(0.0,2.0),precedence=0.10,doc="""
Multiplicative strength of input pattern, defaulting to 1.0""")
offset = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.11,doc="""
Additive offset to input pattern, defaulting to 0.0""")
mask = param.Parameter(default=None,precedence=-1,doc="""
Optional object (expected to be an array) with which to multiply the
pattern array after it has been created, before any output_fns are
applied. This can be used to shape the pattern.""")
# Note that the class type is overridden to PatternGenerator below
mask_shape = param.ClassSelector(param.Parameterized,default=None,precedence=0.06,doc="""
Optional PatternGenerator used to construct a mask to be applied to
the pattern.""")
output_fns = param.HookList(default=[], precedence=0.08,doc="""
Optional function(s) to apply to the pattern array after it has been created.
Can be used for normalization, thresholding, etc.""")
def __init__(self,**params):
super(PatternGenerator, self).__init__(**params)
self.set_matrix_dimensions(self.bounds, self.xdensity, self.ydensity)
def __call__(self,**params_to_override):
"""
Call the subclass's 'function' method on a rotated and scaled
coordinate system.
Creates and fills an array with the requested pattern. If
called without any params, uses the values for the Parameters
as currently set on the object. Otherwise, any params
specified override those currently set on the object.
"""
if 'output_fns' in params_to_override:
self.warning("Output functions specified through the call method will be ignored.")
p=ParamOverrides(self,params_to_override)
# CEBERRORALERT: position parameter is not currently
# supported. We should delete the position parameter or fix
# this.
#
# position=params_to_override.get('position',None) if position
# is not None: x,y = position
self._setup_xy(p.bounds,p.xdensity,p.ydensity,p.x,p.y,p.orientation)
fn_result = self.function(p)
self._apply_mask(p,fn_result)
if p.scale != 1.0:
result = p.scale * fn_result
else:
result = fn_result
if p.offset != 0.0:
result += p.offset
for of in p.output_fns:
of(result)
return result
def __getitem__(self, coords):
value_dims = {}
if self.num_channels() in [0, 1]:
raster, data = Image, self()
value_dims = {'value_dimensions':[self.z]} if self.z else value_dims
elif self.num_channels() in [3,4]:
raster = RGB
data = np.dstack(self.channels().values()[1:])
image = raster(data, bounds=self.bounds,
**dict(group=self.group,
label=self.__class__.__name__, **value_dims))
# Works round a bug fixed shortly after HoloViews 1.0.0 release
return image if isinstance(coords, slice) else image.__getitem__(coords)
def channels(self, use_cached=False, **params_to_override):
"""
Channels() adds a shared interface for single channel and
multichannel structures. It will always return an ordered
dict: its first element is the single channel of the pattern
(if single-channel) or the channel average (if multichannel);
the successive elements are the individual channels' arrays
(key: 0,1,..N-1).
"""
return collections.OrderedDict({ 'default':self.__call__(**params_to_override) })
def num_channels(self):
"""
Query the number of channels implemented by the
PatternGenerator. In case of single-channel generators this
will return 1; in case of multichannel, it will return the
number of channels (eg, in the case of RGB images it would
return '3', Red-Green-Blue, even though the OrderedDict
returned by channels() will have 4 elements -- the 3 channels
+ their average).
"""
return 1
def _setup_xy(self,bounds,xdensity,ydensity,x,y,orientation):
"""
Produce pattern coordinate matrices from the bounds and
density (or rows and cols), and transforms them according to
x, y, and orientation.
"""
self.debug("bounds=%s, xdensity=%s, ydensity=%s, x=%s, y=%s, orientation=%s",bounds,xdensity,ydensity,x,y,orientation)
# Generate vectors representing coordinates at which the pattern
# will be sampled.
# CB: note to myself - use slice_._scs if supplied?
x_points,y_points = SheetCoordinateSystem(bounds,xdensity,ydensity).sheetcoordinates_of_matrixidx()
# Generate matrices of x and y sheet coordinates at which to
# sample pattern, at the correct orientation
self.pattern_x, self.pattern_y = self._create_and_rotate_coordinate_arrays(x_points-x,y_points-y,orientation)
def function(self,p):
"""
Function to draw a pattern that will then be scaled and rotated.
Instead of implementing __call__ directly, PatternGenerator
subclasses will typically implement this helper function used
by __call__, because that way they can let __call__ handle the
scaling and rotation for them. Alternatively, __call__ itself
can be reimplemented entirely by a subclass (e.g. if it does
not need to do any scaling or rotation), in which case this
function will be ignored.
"""
raise NotImplementedError
def _create_and_rotate_coordinate_arrays(self, x, y, orientation):
"""
Create pattern matrices from x and y vectors, and rotate them
to the specified orientation.
"""
# Using this two-liner requires that x increase from left to
# right and y decrease from left to right; I don't think it
# can be rewritten in so little code otherwise - but please
# prove me wrong.
pattern_y = np.subtract.outer(np.cos(orientation)*y, np.sin(orientation)*x)
pattern_x = np.add.outer(np.sin(orientation)*y, np.cos(orientation)*x)
return pattern_x, pattern_y
def _apply_mask(self,p,mat):
"""Create (if necessary) and apply the mask to the given matrix mat."""
mask = p.mask
ms=p.mask_shape
if ms is not None:
mask = ms(x=p.x+p.size*(ms.x*np.cos(p.orientation)-ms.y*np.sin(p.orientation)),
y=p.y+p.size*(ms.x*np.sin(p.orientation)+ms.y*np.cos(p.orientation)),
orientation=ms.orientation+p.orientation,size=ms.size*p.size,
bounds=p.bounds,ydensity=p.ydensity,xdensity=p.xdensity)
if mask is not None:
mat*=mask
def set_matrix_dimensions(self, bounds, xdensity, ydensity):
"""
Change the dimensions of the matrix into which the pattern
will be drawn. Users of this class should call this method
rather than changing the bounds, xdensity, and ydensity
parameters directly. Subclasses can override this method to
update any internal data structures that may depend on the
matrix dimensions.
"""
self.bounds = bounds
self.xdensity = xdensity
self.ydensity = ydensity
scs = SheetCoordinateSystem(bounds, xdensity, ydensity)
for of in self.output_fns:
if isinstance(of, TransferFn):
of.initialize(SCS=scs, shape=scs.shape)
def state_push(self):
"Save the state of the output functions, to be restored with state_pop."
for of in self.output_fns:
if hasattr(of,'state_push'):
of.state_push()
super(PatternGenerator, self).state_push()
def state_pop(self):
"Restore the state of the output functions saved by state_push."
for of in self.output_fns:
if hasattr(of,'state_pop'):
of.state_pop()
super(PatternGenerator, self).state_pop()
def anim(self, duration, offset=0, timestep=1,
label=None, unit=None,
time_fn=param.Dynamic.time_fn):
"""
duration: The temporal duration to animate in the units
defined on the global time function.
offset: The temporal offset from which the animation is
generated given the supplied pattern
timestep: The time interval between successive frames. The
duration must be an exact multiple of the timestep.
label: A label string to override the label of the global time
function (if not None).
unit: The unit string to override the unit value of the global
time function (if not None).
time_fn: The global time function object that is shared across
the time-varying objects that are being sampled.
Note that the offset, timestep and time_fn only affect
patterns parameterized by time-dependent number
generators. Otherwise, the frames are generated by successive
call to the pattern which may or may not be varying (e.g to
view the patterns contained within a Selector).
"""
frames = (duration // timestep) + 1
if duration % timestep != 0:
raise ValueError("The duration value must be an exact multiple of the timestep.")
if label is None:
label = time_fn.label if hasattr(time_fn, 'label') else 'Time'
unit = time_fn.unit if (not unit and hasattr(time_fn, 'unit')) else unit
vmap = HoloMap(kdims=[Dimension(label, unit=unit if unit else '')])
self.state_push()
with time_fn as t:
t(offset)
for i in range(frames):
vmap[t()] = self[:]
t += timestep
self.state_pop()
return vmap
## Support for compositional expressions of PatternGenerator objects
def _promote(self,other):
if not isinstance(other,PatternGenerator):
other = Constant(scale=other,offset=0)
return [self,other]
def _rpromote(self,other):
if not isinstance(other,PatternGenerator):
other = Constant(scale=other,offset=0)
return [other,self]
# Could define any of Python's operators here, esp. if they have operator or ufunc equivalents
def __add__ (self,other): return Composite(generators=self._promote(other),operator=np.add)
def __sub__ (self,other): return Composite(generators=self._promote(other),operator=np.subtract)
def __mul__ (self,other): return Composite(generators=self._promote(other),operator=np.multiply)
def __mod__ (self,other): return Composite(generators=self._promote(other),operator=np.mod)
def __pow__ (self,other): return Composite(generators=self._promote(other),operator=np.power)
def __div__ (self,other): return Composite(generators=self._promote(other),operator=np.divide)
def __and__ (self,other): return Composite(generators=self._promote(other),operator=np.minimum)
def __or__ (self,other): return Composite(generators=self._promote(other),operator=np.maximum)
def __radd__ (self,other): return Composite(generators=self._rpromote(other),operator=np.add)
def __rsub__ (self,other): return Composite(generators=self._rpromote(other),operator=np.subtract)
def __rmul__ (self,other): return Composite(generators=self._rpromote(other),operator=np.multiply)
def __rmod__ (self,other): return Composite(generators=self._rpromote(other),operator=np.mod)
def __rpow__ (self,other): return Composite(generators=self._rpromote(other),operator=np.power)
def __rdiv__ (self,other): return Composite(generators=self._rpromote(other),operator=np.divide)
def __rand__ (self,other): return Composite(generators=self._rpromote(other),operator=np.minimum)
def __ror__ (self,other): return Composite(generators=self._rpromote(other),operator=np.maximum)
def __neg__ (self): return Composite(generators=[Constant(scale=0),self],operator=np.subtract)
class abs_first(object):
@staticmethod
def reduce(x): return np.abs(x[0])
def __abs__ (self): return Composite(generators=[self],operator=self.abs_first)
def pil(self, **params_to_override):
"""Returns a PIL image for this pattern, overriding parameters if provided."""
from PIL.Image import fromarray
nchans = self.num_channels()
if nchans in [0, 1]:
mode, arr = None, self(**params_to_override)
arr = (255.0 / arr.max() * (arr - arr.min())).astype(np.uint8)
elif nchans in [3,4]:
mode = 'RGB' if nchans==3 else 'RGBA'
arr = np.dstack(self.channels(**params_to_override).values()[1:])
arr = (255.0*arr).astype(np.uint8)
else:
raise ValueError("Unsupported number of channels")
return fromarray(arr, mode)
# Override class type; must be set here rather than when mask_shape is declared,
# to avoid referring to class not yet constructed
PatternGenerator.params('mask_shape').class_=PatternGenerator
# Trivial example of a PatternGenerator, provided for when a default is
# needed. The other concrete PatternGenerator classes are stored
# elsewhere, to be imported as needed.
class Constant(PatternGenerator):
"""Constant pattern generator, i.e., a solid, uniform field of the same value."""
# The orientation is ignored, so we don't show it in
# auto-generated lists of parameters (e.g. in the GUI)
orientation = param.Number(precedence=-1)
# Optimization: We use a simpler __call__ method here to skip the
# coordinate transformations (which would have no effect anyway)
def __call__(self,**params_to_override):
p = ParamOverrides(self,params_to_override)
shape = SheetCoordinateSystem(p.bounds,p.xdensity,p.ydensity).shape
result = p.scale*np.ones(shape, np.float)+p.offset
self._apply_mask(p,result)
for of in p.output_fns:
of(result)
return result
class CompositeBase(PatternGenerator):
"""
PatternGenerator that combines or selects from a list of other
PatternGenerators.
"""
__abstract=True
generators = param.List(class_=PatternGenerator,default=[Constant(scale=0.0)],
bounds=(1,None),precedence=0.97, doc="""
List of patterns to combine or select from. The default pattern is a blank pattern,
and thus should be overridden for any useful work.""")
size = param.Number(default=1.0,doc="""Scaling factor applied to all sub-patterns.""")
class Composite(CompositeBase):
"""
PatternGenerator that accepts a list of other PatternGenerators.
To create a new pattern, asks each of the PatternGenerators in the
list to create a pattern, then it combines the patterns to create
a single pattern that it returns.
"""
# The Accum_Replace operator from LISSOM is not yet supported,
# but it should be added once PatternGenerator bounding boxes
# are respected and/or GenericImage patterns support transparency.
operator = param.Parameter(np.maximum,precedence=0.98,doc="""
Binary Numpy function used to combine the individual patterns.
Any binary Numpy array "ufunc" returning the same
type of array as the operands and supporting the reduce
operator is allowed here. Supported ufuncs include::
add
subtract
multiply
divide
maximum
minimum
remainder
power
The most useful ones are probably add and maximum, but there
are uses for at least some of the others as well (e.g. to
remove pieces of other patterns).
You can also write your own operators, by making a class that
has a static method named "reduce" that returns an array of the
same size and type as the arrays in the list. For example::
class return_first(object):
@staticmethod
def reduce(x):
return x[0]
""")
def _advance_pattern_generators(self,p):
"""
Subclasses can override this method to provide constraints on
the values of generators' parameters and/or eliminate
generators from this list if necessary.
"""
return p.generators
def state_push(self):
"""
Push the state of all generators
"""
super(Composite,self).state_push()
for gen in self.generators:
gen.state_push()
def state_pop(self):
"""
Pop the state of all generators
"""
super(Composite,self).state_pop()
for gen in self.generators:
gen.state_pop()
# JABALERT: To support large numbers of patterns on a large input region,
# should be changed to evaluate each pattern in a small box, and then
# combine them at the full Composite Bounding box size.
def function(self,p):
"""Constructs combined pattern out of the individual ones."""
generators = self._advance_pattern_generators(p)
assert hasattr(p.operator,'reduce'),repr(p.operator)+" does not support 'reduce'."
# CEBALERT: mask gets applied by all PGs including the Composite itself
# (leads to redundant calculations in current lissom_oo_or usage, but
# will lead to problems/limitations in the future).
patterns = [pg(xdensity=p.xdensity,ydensity=p.ydensity,
bounds=p.bounds,mask=p.mask,
x=p.x+p.size*(pg.x*np.cos(p.orientation)- pg.y*np.sin(p.orientation)),
y=p.y+p.size*(pg.x*np.sin(p.orientation)+ pg.y*np.cos(p.orientation)),
orientation=pg.orientation+p.orientation, |
class ChannelTransform(param.Parameterized):
"""
A ChannelTransform is a callable object that takes channels as
input (an ordered dictionary of arrays) and transforms their
contents in some way before returning them.
"""
__abstract = True
def __call__(self, channels):
raise NotImplementedError
# Example of a ChannelTransform
class CorrelateChannels(ChannelTransform):
"""
Correlate channels by mixing a fraction of one channel into another.
"""
from_channel = param.Number(default=1, doc="""
Name of the channel to take data from.""")
to_channel = param.Number(default=2, doc="""
Name of the channel to change data of.""")
strength = param.Number(default=0, doc="""
Strength of the correlation to add, with 0 being no change,
and 1.0 overwriting to_channel with from_channel.""")
def __call__(self, channel_data):
channel_data[self.to_channel] = \
self.strength*channel_data[self.from_channel] + \
(1-self.strength)*channel_data[self.to_channel]
return channel_data
class ChannelGenerator(PatternGenerator):
"""
Abstract base class for patterns supporting multiple channels natively.
"""
__abstract = True
channel_transforms = param.HookList(class_=ChannelTransform,default=[],doc="""
Optional functions to apply post processing to the set of channels.""")
def __init__(self, **params):
self._original_channel_data = [] # channel data before processing
self._channel_data = [] # channel data after processing
super(ChannelGenerator, self).__init__(**params)
def channels(self, use_cached=False, **params_to_override):
res = collections.OrderedDict()
if not use_cached:
default = self(**params_to_override)
res['default'] = default
else:
res['default'] = None
for i in range(len(self._channel_data)):
res[i] = self._channel_data[i]
return res
def num_channels(self):
return len(self._channel_data)
class ComposeChannels(ChannelGenerator):
"""
Create a multi-channel PatternGenerator from a list of
PatternGenerators, with the specified channel_transforms applied.
"""
generators = param.List(class_=PatternGenerator,default=[Constant(scale=0.0)],
bounds=(1,None), doc="""
List of patterns to use for each channel. Generators which already have more than one
channel will only contribute to a single channel of ComposeChannels.""")
def __init__(self,**params):
super(ComposeChannels,self).__init__(**params)
for i in range(len(self.generators)):
self._channel_data.append( None )
def __call__(self,**params):
# Generates all channels, then returns the default channel
p = param.ParamOverrides(self,params)
params['xdensity']=p.xdensity
params['ydensity']=p.ydensity
params['bounds']=p.bounds
# (not **p)
for i in range(len(p.generators)):
self._channel_data[i] = p.generators[i]( **params )
for c in self.channel_transforms:
self._channel_data = c(self._channel_data)
return sum(act for act in self._channel_data)/len(self._channel_data) | size=pg.size*p.size)
for pg in generators]
image_array = p.operator.reduce(patterns)
return image_array | random_line_split |
patterngenerator.py | """
PatternGenerator abstract class, basic example concrete class, and
multichannel support.
PatternGenerators support both single-channel patterns, i.e. bare
arrays, and multiple channels, such as for color images. See
``PatternGenerator.__call__`` and ``PatternGenerator.channels`` for
more information.
"""
import numpy as np
from numpy import pi
import collections
import param
from param.parameterized import ParamOverrides
from holoviews import HoloMap, Image, RGB, Dimension
from holoviews.core import BoundingBox, BoundingRegionParameter, SheetCoordinateSystem
from .transferfn import TransferFn
# CEBALERT: PatternGenerator has become a bit of a monster abstract
# class. Can it be split into the minimum required to specify the
# interface, with a subclass implementing the rest (this subclass
# still being above the rest of the PatternGenerators)? We want to
# make it easy to add new types of PatternGenerator that don't match
# the assumptions of the current ones (OneDPowerSpectrum is an example
# of a PG that doesn't match the current assumptions), but still lets
# them be used like the current ones.
# (PatternGenerator-->TwoDPatternGenerator?)
# JLALERT: PatternGenerator should have
# override_plasticity_state/restore_plasticity_state functions which
# can override the plasticity of any output_fn that has state, in case
# anyone ever uses such an object in a PatternGenerator. Will also
# need to support Composite patterns.
class PatternGenerator(param.Parameterized):
"""
A class hierarchy for callable objects that can generate 2D patterns.
Once initialized, PatternGenerators can be called to generate a
value or a matrix of values from a 2D function, typically
accepting at least x and y.
A PatternGenerator's Parameters can make use of Parameter's
precedence attribute to specify the order in which they should
appear, e.g. in a GUI. The precedence attribute has a nominal
range of 0.0 to 1.0, with ordering going from 0.0 (first) to 1.0
(last), but any value is allowed.
The orientation and layout of the pattern matrices is defined by
the SheetCoordinateSystem class, which see.
Note that not every parameter defined for a PatternGenerator will
be used by every subclass. For instance, a Constant pattern will
ignore the x, y, orientation, and size parameters, because the
pattern does not vary with any of those parameters. However,
those parameters are still defined for all PatternGenerators, even
Constant patterns, to allow PatternGenerators to be scaled, rotated,
translated, etc. uniformly.
"""
__abstract = True
bounds = BoundingRegionParameter(
default=BoundingBox(points=((-0.5,-0.5), (0.5,0.5))),precedence=-1,
doc="BoundingBox of the area in which the pattern is generated.")
xdensity = param.Number(default=256,bounds=(0,None),precedence=-1,doc="""
Density (number of samples per 1.0 length) in the x direction.""")
ydensity = param.Number(default=256,bounds=(0,None),precedence=-1,doc="""
Density (number of samples per 1.0 length) in the y direction.
Typically the same as the xdensity.""")
x = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.20,doc="""
X-coordinate location of pattern center.""")
y = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.21,doc="""
Y-coordinate location of pattern center.""")
z = param.ClassSelector(default=None, precedence=-1, class_=Dimension, doc="""
The Dimension object associated with the z-values generated by
the PatternGenerator . If None, uses the default set by
HoloViews.Image.""")
group = param.String(default='Pattern', precedence=-1, doc="""
The group name assigned to the returned HoloViews object.""")
position = param.Composite(attribs=['x','y'],precedence=-1,doc="""
Coordinates of location of pattern center.
Provides a convenient way to set the x and y parameters together
as a tuple (x,y), but shares the same actual storage as x and y
(and thus only position OR x and y need to be specified).""")
orientation = param.Number(default=0.0,softbounds=(0.0,2*pi),precedence=0.40,doc="""
Polar angle of pattern, i.e., the orientation in the Cartesian coordinate
system, with zero at 3 o'clock and increasing counterclockwise.""")
size = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,6.0),
precedence=0.30,doc="""Determines the overall size of the pattern.""")
scale = param.Number(default=1.0,softbounds=(0.0,2.0),precedence=0.10,doc="""
Multiplicative strength of input pattern, defaulting to 1.0""")
offset = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.11,doc="""
Additive offset to input pattern, defaulting to 0.0""")
mask = param.Parameter(default=None,precedence=-1,doc="""
Optional object (expected to be an array) with which to multiply the
pattern array after it has been created, before any output_fns are
applied. This can be used to shape the pattern.""")
# Note that the class type is overridden to PatternGenerator below
mask_shape = param.ClassSelector(param.Parameterized,default=None,precedence=0.06,doc="""
Optional PatternGenerator used to construct a mask to be applied to
the pattern.""")
output_fns = param.HookList(default=[], precedence=0.08,doc="""
Optional function(s) to apply to the pattern array after it has been created.
Can be used for normalization, thresholding, etc.""")
def __init__(self,**params):
super(PatternGenerator, self).__init__(**params)
self.set_matrix_dimensions(self.bounds, self.xdensity, self.ydensity)
def __call__(self,**params_to_override):
"""
Call the subclass's 'function' method on a rotated and scaled
coordinate system.
Creates and fills an array with the requested pattern. If
called without any params, uses the values for the Parameters
as currently set on the object. Otherwise, any params
specified override those currently set on the object.
"""
if 'output_fns' in params_to_override:
self.warning("Output functions specified through the call method will be ignored.")
p=ParamOverrides(self,params_to_override)
# CEBERRORALERT: position parameter is not currently
# supported. We should delete the position parameter or fix
# this.
#
# position=params_to_override.get('position',None) if position
# is not None: x,y = position
self._setup_xy(p.bounds,p.xdensity,p.ydensity,p.x,p.y,p.orientation)
fn_result = self.function(p)
self._apply_mask(p,fn_result)
if p.scale != 1.0:
result = p.scale * fn_result
else:
result = fn_result
if p.offset != 0.0:
result += p.offset
for of in p.output_fns:
of(result)
return result
def __getitem__(self, coords):
value_dims = {}
if self.num_channels() in [0, 1]:
raster, data = Image, self()
value_dims = {'value_dimensions':[self.z]} if self.z else value_dims
elif self.num_channels() in [3,4]:
raster = RGB
data = np.dstack(self.channels().values()[1:])
image = raster(data, bounds=self.bounds,
**dict(group=self.group,
label=self.__class__.__name__, **value_dims))
# Works round a bug fixed shortly after HoloViews 1.0.0 release
return image if isinstance(coords, slice) else image.__getitem__(coords)
def channels(self, use_cached=False, **params_to_override):
"""
Channels() adds a shared interface for single channel and
multichannel structures. It will always return an ordered
dict: its first element is the single channel of the pattern
(if single-channel) or the channel average (if multichannel);
the successive elements are the individual channels' arrays
(key: 0,1,..N-1).
"""
return collections.OrderedDict({ 'default':self.__call__(**params_to_override) })
def num_channels(self):
"""
Query the number of channels implemented by the
PatternGenerator. In case of single-channel generators this
will return 1; in case of multichannel, it will return the
number of channels (eg, in the case of RGB images it would
return '3', Red-Green-Blue, even though the OrderedDict
returned by channels() will have 4 elements -- the 3 channels
+ their average).
"""
return 1
def _setup_xy(self,bounds,xdensity,ydensity,x,y,orientation):
"""
Produce pattern coordinate matrices from the bounds and
density (or rows and cols), and transforms them according to
x, y, and orientation.
"""
self.debug("bounds=%s, xdensity=%s, ydensity=%s, x=%s, y=%s, orientation=%s",bounds,xdensity,ydensity,x,y,orientation)
# Generate vectors representing coordinates at which the pattern
# will be sampled.
# CB: note to myself - use slice_._scs if supplied?
x_points,y_points = SheetCoordinateSystem(bounds,xdensity,ydensity).sheetcoordinates_of_matrixidx()
# Generate matrices of x and y sheet coordinates at which to
# sample pattern, at the correct orientation
self.pattern_x, self.pattern_y = self._create_and_rotate_coordinate_arrays(x_points-x,y_points-y,orientation)
def function(self,p):
"""
Function to draw a pattern that will then be scaled and rotated.
Instead of implementing __call__ directly, PatternGenerator
subclasses will typically implement this helper function used
by __call__, because that way they can let __call__ handle the
scaling and rotation for them. Alternatively, __call__ itself
can be reimplemented entirely by a subclass (e.g. if it does
not need to do any scaling or rotation), in which case this
function will be ignored.
"""
raise NotImplementedError
def _create_and_rotate_coordinate_arrays(self, x, y, orientation):
"""
Create pattern matrices from x and y vectors, and rotate them
to the specified orientation.
"""
# Using this two-liner requires that x increase from left to
# right and y decrease from left to right; I don't think it
# can be rewritten in so little code otherwise - but please
# prove me wrong.
pattern_y = np.subtract.outer(np.cos(orientation)*y, np.sin(orientation)*x)
pattern_x = np.add.outer(np.sin(orientation)*y, np.cos(orientation)*x)
return pattern_x, pattern_y
def _apply_mask(self,p,mat):
"""Create (if necessary) and apply the mask to the given matrix mat."""
mask = p.mask
ms=p.mask_shape
if ms is not None:
mask = ms(x=p.x+p.size*(ms.x*np.cos(p.orientation)-ms.y*np.sin(p.orientation)),
y=p.y+p.size*(ms.x*np.sin(p.orientation)+ms.y*np.cos(p.orientation)),
orientation=ms.orientation+p.orientation,size=ms.size*p.size,
bounds=p.bounds,ydensity=p.ydensity,xdensity=p.xdensity)
if mask is not None:
mat*=mask
def set_matrix_dimensions(self, bounds, xdensity, ydensity):
"""
Change the dimensions of the matrix into which the pattern
will be drawn. Users of this class should call this method
rather than changing the bounds, xdensity, and ydensity
parameters directly. Subclasses can override this method to
update any internal data structures that may depend on the
matrix dimensions.
"""
self.bounds = bounds
self.xdensity = xdensity
self.ydensity = ydensity
scs = SheetCoordinateSystem(bounds, xdensity, ydensity)
for of in self.output_fns:
if isinstance(of, TransferFn):
of.initialize(SCS=scs, shape=scs.shape)
def state_push(self):
"Save the state of the output functions, to be restored with state_pop."
for of in self.output_fns:
if hasattr(of,'state_push'):
of.state_push()
super(PatternGenerator, self).state_push()
def state_pop(self):
"Restore the state of the output functions saved by state_push."
for of in self.output_fns:
if hasattr(of,'state_pop'):
of.state_pop()
super(PatternGenerator, self).state_pop()
def anim(self, duration, offset=0, timestep=1,
label=None, unit=None,
time_fn=param.Dynamic.time_fn):
"""
duration: The temporal duration to animate in the units
defined on the global time function.
offset: The temporal offset from which the animation is
generated given the supplied pattern
timestep: The time interval between successive frames. The
duration must be an exact multiple of the timestep.
label: A label string to override the label of the global time
function (if not None).
unit: The unit string to override the unit value of the global
time function (if not None).
time_fn: The global time function object that is shared across
the time-varying objects that are being sampled.
Note that the offset, timestep and time_fn only affect
patterns parameterized by time-dependent number
generators. Otherwise, the frames are generated by successive
call to the pattern which may or may not be varying (e.g to
view the patterns contained within a Selector).
"""
frames = (duration // timestep) + 1
if duration % timestep != 0:
raise ValueError("The duration value must be an exact multiple of the timestep.")
if label is None:
label = time_fn.label if hasattr(time_fn, 'label') else 'Time'
unit = time_fn.unit if (not unit and hasattr(time_fn, 'unit')) else unit
vmap = HoloMap(kdims=[Dimension(label, unit=unit if unit else '')])
self.state_push()
with time_fn as t:
t(offset)
for i in range(frames):
vmap[t()] = self[:]
t += timestep
self.state_pop()
return vmap
## Support for compositional expressions of PatternGenerator objects
def _promote(self,other):
if not isinstance(other,PatternGenerator):
other = Constant(scale=other,offset=0)
return [self,other]
def _rpromote(self,other):
if not isinstance(other,PatternGenerator):
other = Constant(scale=other,offset=0)
return [other,self]
# Could define any of Python's operators here, esp. if they have operator or ufunc equivalents
def __add__ (self,other): return Composite(generators=self._promote(other),operator=np.add)
def __sub__ (self,other): return Composite(generators=self._promote(other),operator=np.subtract)
def __mul__ (self,other): return Composite(generators=self._promote(other),operator=np.multiply)
def __mod__ (self,other): return Composite(generators=self._promote(other),operator=np.mod)
def __pow__ (self,other): return Composite(generators=self._promote(other),operator=np.power)
def __div__ (self,other): return Composite(generators=self._promote(other),operator=np.divide)
def __and__ (self,other): return Composite(generators=self._promote(other),operator=np.minimum)
def __or__ (self,other): return Composite(generators=self._promote(other),operator=np.maximum)
def __radd__ (self,other): return Composite(generators=self._rpromote(other),operator=np.add)
def __rsub__ (self,other): return Composite(generators=self._rpromote(other),operator=np.subtract)
def __rmul__ (self,other): return Composite(generators=self._rpromote(other),operator=np.multiply)
def __rmod__ (self,other): return Composite(generators=self._rpromote(other),operator=np.mod)
def __rpow__ (self,other): return Composite(generators=self._rpromote(other),operator=np.power)
def __rdiv__ (self,other): return Composite(generators=self._rpromote(other),operator=np.divide)
def __rand__ (self,other): return Composite(generators=self._rpromote(other),operator=np.minimum)
def __ror__ (self,other): return Composite(generators=self._rpromote(other),operator=np.maximum)
def __neg__ (self): return Composite(generators=[Constant(scale=0),self],operator=np.subtract)
class abs_first(object):
@staticmethod
def reduce(x): return np.abs(x[0])
def __abs__ (self): return Composite(generators=[self],operator=self.abs_first)
def pil(self, **params_to_override):
"""Returns a PIL image for this pattern, overriding parameters if provided."""
from PIL.Image import fromarray
nchans = self.num_channels()
if nchans in [0, 1]:
mode, arr = None, self(**params_to_override)
arr = (255.0 / arr.max() * (arr - arr.min())).astype(np.uint8)
elif nchans in [3,4]:
mode = 'RGB' if nchans==3 else 'RGBA'
arr = np.dstack(self.channels(**params_to_override).values()[1:])
arr = (255.0*arr).astype(np.uint8)
else:
raise ValueError("Unsupported number of channels")
return fromarray(arr, mode)
# Override class type; must be set here rather than when mask_shape is declared,
# to avoid referring to class not yet constructed
PatternGenerator.params('mask_shape').class_=PatternGenerator
# Trivial example of a PatternGenerator, provided for when a default is
# needed. The other concrete PatternGenerator classes are stored
# elsewhere, to be imported as needed.
class Constant(PatternGenerator):
"""Constant pattern generator, i.e., a solid, uniform field of the same value."""
# The orientation is ignored, so we don't show it in
# auto-generated lists of parameters (e.g. in the GUI)
orientation = param.Number(precedence=-1)
# Optimization: We use a simpler __call__ method here to skip the
# coordinate transformations (which would have no effect anyway)
def __call__(self,**params_to_override):
p = ParamOverrides(self,params_to_override)
shape = SheetCoordinateSystem(p.bounds,p.xdensity,p.ydensity).shape
result = p.scale*np.ones(shape, np.float)+p.offset
self._apply_mask(p,result)
for of in p.output_fns:
of(result)
return result
class CompositeBase(PatternGenerator):
"""
PatternGenerator that combines or selects from a list of other
PatternGenerators.
"""
__abstract=True
generators = param.List(class_=PatternGenerator,default=[Constant(scale=0.0)],
bounds=(1,None),precedence=0.97, doc="""
List of patterns to combine or select from. The default pattern is a blank pattern,
and thus should be overridden for any useful work.""")
size = param.Number(default=1.0,doc="""Scaling factor applied to all sub-patterns.""")
class Composite(CompositeBase):
"""
PatternGenerator that accepts a list of other PatternGenerators.
To create a new pattern, asks each of the PatternGenerators in the
list to create a pattern, then it combines the patterns to create
a single pattern that it returns.
"""
# The Accum_Replace operator from LISSOM is not yet supported,
# but it should be added once PatternGenerator bounding boxes
# are respected and/or GenericImage patterns support transparency.
operator = param.Parameter(np.maximum,precedence=0.98,doc="""
Binary Numpy function used to combine the individual patterns.
Any binary Numpy array "ufunc" returning the same
type of array as the operands and supporting the reduce
operator is allowed here. Supported ufuncs include::
add
subtract
multiply
divide
maximum
minimum
remainder
power
The most useful ones are probably add and maximum, but there
are uses for at least some of the others as well (e.g. to
remove pieces of other patterns).
You can also write your own operators, by making a class that
has a static method named "reduce" that returns an array of the
same size and type as the arrays in the list. For example::
class return_first(object):
@staticmethod
def reduce(x):
return x[0]
""")
def _advance_pattern_generators(self,p):
"""
Subclasses can override this method to provide constraints on
the values of generators' parameters and/or eliminate
generators from this list if necessary.
"""
return p.generators
def state_push(self):
"""
Push the state of all generators
"""
super(Composite,self).state_push()
for gen in self.generators:
gen.state_push()
def state_pop(self):
"""
Pop the state of all generators
"""
super(Composite,self).state_pop()
for gen in self.generators:
gen.state_pop()
# JABALERT: To support large numbers of patterns on a large input region,
# should be changed to evaluate each pattern in a small box, and then
# combine them at the full Composite Bounding box size.
def function(self,p):
"""Constructs combined pattern out of the individual ones."""
generators = self._advance_pattern_generators(p)
assert hasattr(p.operator,'reduce'),repr(p.operator)+" does not support 'reduce'."
# CEBALERT: mask gets applied by all PGs including the Composite itself
# (leads to redundant calculations in current lissom_oo_or usage, but
# will lead to problems/limitations in the future).
patterns = [pg(xdensity=p.xdensity,ydensity=p.ydensity,
bounds=p.bounds,mask=p.mask,
x=p.x+p.size*(pg.x*np.cos(p.orientation)- pg.y*np.sin(p.orientation)),
y=p.y+p.size*(pg.x*np.sin(p.orientation)+ pg.y*np.cos(p.orientation)),
orientation=pg.orientation+p.orientation,
size=pg.size*p.size)
for pg in generators]
image_array = p.operator.reduce(patterns)
return image_array
class ChannelTransform(param.Parameterized):
"""
A ChannelTransform is a callable object that takes channels as
input (an ordered dictionary of arrays) and transforms their
contents in some way before returning them.
"""
__abstract = True
def __call__(self, channels):
raise NotImplementedError
# Example of a ChannelTransform
class CorrelateChannels(ChannelTransform):
"""
Correlate channels by mixing a fraction of one channel into another.
"""
from_channel = param.Number(default=1, doc="""
Name of the channel to take data from.""")
to_channel = param.Number(default=2, doc="""
Name of the channel to change data of.""")
strength = param.Number(default=0, doc="""
Strength of the correlation to add, with 0 being no change,
and 1.0 overwriting to_channel with from_channel.""")
def __call__(self, channel_data):
channel_data[self.to_channel] = \
self.strength*channel_data[self.from_channel] + \
(1-self.strength)*channel_data[self.to_channel]
return channel_data
class ChannelGenerator(PatternGenerator):
|
class ComposeChannels(ChannelGenerator):
"""
Create a multi-channel PatternGenerator from a list of
PatternGenerators, with the specified channel_transforms applied.
"""
generators = param.List(class_=PatternGenerator,default=[Constant(scale=0.0)],
bounds=(1,None), doc="""
List of patterns to use for each channel. Generators which already have more than one
channel will only contribute to a single channel of ComposeChannels.""")
def __init__(self,**params):
super(ComposeChannels,self).__init__(**params)
for i in range(len(self.generators)):
self._channel_data.append( None )
def __call__(self,**params):
# Generates all channels, then returns the default channel
p = param.ParamOverrides(self,params)
params['xdensity']=p.xdensity
params['ydensity']=p.ydensity
params['bounds']=p.bounds
# (not **p)
for i in range(len(p.generators)):
self._channel_data[i] = p.generators[i]( **params )
for c in self.channel_transforms:
self._channel_data = c(self._channel_data)
return sum(act for act in self._channel_data)/len(self._channel_data)
| """
Abstract base class for patterns supporting multiple channels natively.
"""
__abstract = True
channel_transforms = param.HookList(class_=ChannelTransform,default=[],doc="""
Optional functions to apply post processing to the set of channels.""")
def __init__(self, **params):
self._original_channel_data = [] # channel data before processing
self._channel_data = [] # channel data after processing
super(ChannelGenerator, self).__init__(**params)
def channels(self, use_cached=False, **params_to_override):
res = collections.OrderedDict()
if not use_cached:
default = self(**params_to_override)
res['default'] = default
else:
res['default'] = None
for i in range(len(self._channel_data)):
res[i] = self._channel_data[i]
return res
def num_channels(self):
return len(self._channel_data) | identifier_body |
patterngenerator.py | """
PatternGenerator abstract class, basic example concrete class, and
multichannel support.
PatternGenerators support both single-channel patterns, i.e. bare
arrays, and multiple channels, such as for color images. See
``PatternGenerator.__call__`` and ``PatternGenerator.channels`` for
more information.
"""
import numpy as np
from numpy import pi
import collections
import param
from param.parameterized import ParamOverrides
from holoviews import HoloMap, Image, RGB, Dimension
from holoviews.core import BoundingBox, BoundingRegionParameter, SheetCoordinateSystem
from .transferfn import TransferFn
# CEBALERT: PatternGenerator has become a bit of a monster abstract
# class. Can it be split into the minimum required to specify the
# interface, with a subclass implementing the rest (this subclass
# still being above the rest of the PatternGenerators)? We want to
# make it easy to add new types of PatternGenerator that don't match
# the assumptions of the current ones (OneDPowerSpectrum is an example
# of a PG that doesn't match the current assumptions), but still lets
# them be used like the current ones.
# (PatternGenerator-->TwoDPatternGenerator?)
# JLALERT: PatternGenerator should have
# override_plasticity_state/restore_plasticity_state functions which
# can override the plasticity of any output_fn that has state, in case
# anyone ever uses such an object in a PatternGenerator. Will also
# need to support Composite patterns.
class PatternGenerator(param.Parameterized):
"""
A class hierarchy for callable objects that can generate 2D patterns.
Once initialized, PatternGenerators can be called to generate a
value or a matrix of values from a 2D function, typically
accepting at least x and y.
A PatternGenerator's Parameters can make use of Parameter's
precedence attribute to specify the order in which they should
appear, e.g. in a GUI. The precedence attribute has a nominal
range of 0.0 to 1.0, with ordering going from 0.0 (first) to 1.0
(last), but any value is allowed.
The orientation and layout of the pattern matrices is defined by
the SheetCoordinateSystem class, which see.
Note that not every parameter defined for a PatternGenerator will
be used by every subclass. For instance, a Constant pattern will
ignore the x, y, orientation, and size parameters, because the
pattern does not vary with any of those parameters. However,
those parameters are still defined for all PatternGenerators, even
Constant patterns, to allow PatternGenerators to be scaled, rotated,
translated, etc. uniformly.
"""
__abstract = True
bounds = BoundingRegionParameter(
default=BoundingBox(points=((-0.5,-0.5), (0.5,0.5))),precedence=-1,
doc="BoundingBox of the area in which the pattern is generated.")
xdensity = param.Number(default=256,bounds=(0,None),precedence=-1,doc="""
Density (number of samples per 1.0 length) in the x direction.""")
ydensity = param.Number(default=256,bounds=(0,None),precedence=-1,doc="""
Density (number of samples per 1.0 length) in the y direction.
Typically the same as the xdensity.""")
x = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.20,doc="""
X-coordinate location of pattern center.""")
y = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.21,doc="""
Y-coordinate location of pattern center.""")
z = param.ClassSelector(default=None, precedence=-1, class_=Dimension, doc="""
The Dimension object associated with the z-values generated by
the PatternGenerator . If None, uses the default set by
HoloViews.Image.""")
group = param.String(default='Pattern', precedence=-1, doc="""
The group name assigned to the returned HoloViews object.""")
position = param.Composite(attribs=['x','y'],precedence=-1,doc="""
Coordinates of location of pattern center.
Provides a convenient way to set the x and y parameters together
as a tuple (x,y), but shares the same actual storage as x and y
(and thus only position OR x and y need to be specified).""")
orientation = param.Number(default=0.0,softbounds=(0.0,2*pi),precedence=0.40,doc="""
Polar angle of pattern, i.e., the orientation in the Cartesian coordinate
system, with zero at 3 o'clock and increasing counterclockwise.""")
size = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,6.0),
precedence=0.30,doc="""Determines the overall size of the pattern.""")
scale = param.Number(default=1.0,softbounds=(0.0,2.0),precedence=0.10,doc="""
Multiplicative strength of input pattern, defaulting to 1.0""")
offset = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.11,doc="""
Additive offset to input pattern, defaulting to 0.0""")
mask = param.Parameter(default=None,precedence=-1,doc="""
Optional object (expected to be an array) with which to multiply the
pattern array after it has been created, before any output_fns are
applied. This can be used to shape the pattern.""")
# Note that the class type is overridden to PatternGenerator below
mask_shape = param.ClassSelector(param.Parameterized,default=None,precedence=0.06,doc="""
Optional PatternGenerator used to construct a mask to be applied to
the pattern.""")
output_fns = param.HookList(default=[], precedence=0.08,doc="""
Optional function(s) to apply to the pattern array after it has been created.
Can be used for normalization, thresholding, etc.""")
def __init__(self,**params):
super(PatternGenerator, self).__init__(**params)
self.set_matrix_dimensions(self.bounds, self.xdensity, self.ydensity)
def __call__(self,**params_to_override):
"""
Call the subclass's 'function' method on a rotated and scaled
coordinate system.
Creates and fills an array with the requested pattern. If
called without any params, uses the values for the Parameters
as currently set on the object. Otherwise, any params
specified override those currently set on the object.
"""
if 'output_fns' in params_to_override:
self.warning("Output functions specified through the call method will be ignored.")
p=ParamOverrides(self,params_to_override)
# CEBERRORALERT: position parameter is not currently
# supported. We should delete the position parameter or fix
# this.
#
# position=params_to_override.get('position',None) if position
# is not None: x,y = position
self._setup_xy(p.bounds,p.xdensity,p.ydensity,p.x,p.y,p.orientation)
fn_result = self.function(p)
self._apply_mask(p,fn_result)
if p.scale != 1.0:
result = p.scale * fn_result
else:
result = fn_result
if p.offset != 0.0:
result += p.offset
for of in p.output_fns:
of(result)
return result
def __getitem__(self, coords):
value_dims = {}
if self.num_channels() in [0, 1]:
raster, data = Image, self()
value_dims = {'value_dimensions':[self.z]} if self.z else value_dims
elif self.num_channels() in [3,4]:
raster = RGB
data = np.dstack(self.channels().values()[1:])
image = raster(data, bounds=self.bounds,
**dict(group=self.group,
label=self.__class__.__name__, **value_dims))
# Works round a bug fixed shortly after HoloViews 1.0.0 release
return image if isinstance(coords, slice) else image.__getitem__(coords)
def channels(self, use_cached=False, **params_to_override):
"""
Channels() adds a shared interface for single channel and
multichannel structures. It will always return an ordered
dict: its first element is the single channel of the pattern
(if single-channel) or the channel average (if multichannel);
the successive elements are the individual channels' arrays
(key: 0,1,..N-1).
"""
return collections.OrderedDict({ 'default':self.__call__(**params_to_override) })
def num_channels(self):
"""
Query the number of channels implemented by the
PatternGenerator. In case of single-channel generators this
will return 1; in case of multichannel, it will return the
number of channels (eg, in the case of RGB images it would
return '3', Red-Green-Blue, even though the OrderedDict
returned by channels() will have 4 elements -- the 3 channels
+ their average).
"""
return 1
def _setup_xy(self,bounds,xdensity,ydensity,x,y,orientation):
"""
Produce pattern coordinate matrices from the bounds and
density (or rows and cols), and transforms them according to
x, y, and orientation.
"""
self.debug("bounds=%s, xdensity=%s, ydensity=%s, x=%s, y=%s, orientation=%s",bounds,xdensity,ydensity,x,y,orientation)
# Generate vectors representing coordinates at which the pattern
# will be sampled.
# CB: note to myself - use slice_._scs if supplied?
x_points,y_points = SheetCoordinateSystem(bounds,xdensity,ydensity).sheetcoordinates_of_matrixidx()
# Generate matrices of x and y sheet coordinates at which to
# sample pattern, at the correct orientation
self.pattern_x, self.pattern_y = self._create_and_rotate_coordinate_arrays(x_points-x,y_points-y,orientation)
def function(self,p):
"""
Function to draw a pattern that will then be scaled and rotated.
Instead of implementing __call__ directly, PatternGenerator
subclasses will typically implement this helper function used
by __call__, because that way they can let __call__ handle the
scaling and rotation for them. Alternatively, __call__ itself
can be reimplemented entirely by a subclass (e.g. if it does
not need to do any scaling or rotation), in which case this
function will be ignored.
"""
raise NotImplementedError
def _create_and_rotate_coordinate_arrays(self, x, y, orientation):
"""
Create pattern matrices from x and y vectors, and rotate them
to the specified orientation.
"""
# Using this two-liner requires that x increase from left to
# right and y decrease from left to right; I don't think it
# can be rewritten in so little code otherwise - but please
# prove me wrong.
pattern_y = np.subtract.outer(np.cos(orientation)*y, np.sin(orientation)*x)
pattern_x = np.add.outer(np.sin(orientation)*y, np.cos(orientation)*x)
return pattern_x, pattern_y
def _apply_mask(self,p,mat):
"""Create (if necessary) and apply the mask to the given matrix mat."""
mask = p.mask
ms=p.mask_shape
if ms is not None:
mask = ms(x=p.x+p.size*(ms.x*np.cos(p.orientation)-ms.y*np.sin(p.orientation)),
y=p.y+p.size*(ms.x*np.sin(p.orientation)+ms.y*np.cos(p.orientation)),
orientation=ms.orientation+p.orientation,size=ms.size*p.size,
bounds=p.bounds,ydensity=p.ydensity,xdensity=p.xdensity)
if mask is not None:
mat*=mask
def set_matrix_dimensions(self, bounds, xdensity, ydensity):
"""
Change the dimensions of the matrix into which the pattern
will be drawn. Users of this class should call this method
rather than changing the bounds, xdensity, and ydensity
parameters directly. Subclasses can override this method to
update any internal data structures that may depend on the
matrix dimensions.
"""
self.bounds = bounds
self.xdensity = xdensity
self.ydensity = ydensity
scs = SheetCoordinateSystem(bounds, xdensity, ydensity)
for of in self.output_fns:
if isinstance(of, TransferFn):
of.initialize(SCS=scs, shape=scs.shape)
def state_push(self):
"Save the state of the output functions, to be restored with state_pop."
for of in self.output_fns:
if hasattr(of,'state_push'):
of.state_push()
super(PatternGenerator, self).state_push()
def state_pop(self):
"Restore the state of the output functions saved by state_push."
for of in self.output_fns:
if hasattr(of,'state_pop'):
of.state_pop()
super(PatternGenerator, self).state_pop()
def anim(self, duration, offset=0, timestep=1,
label=None, unit=None,
time_fn=param.Dynamic.time_fn):
"""
duration: The temporal duration to animate in the units
defined on the global time function.
offset: The temporal offset from which the animation is
generated given the supplied pattern
timestep: The time interval between successive frames. The
duration must be an exact multiple of the timestep.
label: A label string to override the label of the global time
function (if not None).
unit: The unit string to override the unit value of the global
time function (if not None).
time_fn: The global time function object that is shared across
the time-varying objects that are being sampled.
Note that the offset, timestep and time_fn only affect
patterns parameterized by time-dependent number
generators. Otherwise, the frames are generated by successive
call to the pattern which may or may not be varying (e.g to
view the patterns contained within a Selector).
"""
frames = (duration // timestep) + 1
if duration % timestep != 0:
raise ValueError("The duration value must be an exact multiple of the timestep.")
if label is None:
label = time_fn.label if hasattr(time_fn, 'label') else 'Time'
unit = time_fn.unit if (not unit and hasattr(time_fn, 'unit')) else unit
vmap = HoloMap(kdims=[Dimension(label, unit=unit if unit else '')])
self.state_push()
with time_fn as t:
t(offset)
for i in range(frames):
vmap[t()] = self[:]
t += timestep
self.state_pop()
return vmap
## Support for compositional expressions of PatternGenerator objects
def _promote(self,other):
if not isinstance(other,PatternGenerator):
other = Constant(scale=other,offset=0)
return [self,other]
def _rpromote(self,other):
if not isinstance(other,PatternGenerator):
other = Constant(scale=other,offset=0)
return [other,self]
# Could define any of Python's operators here, esp. if they have operator or ufunc equivalents
def __add__ (self,other): return Composite(generators=self._promote(other),operator=np.add)
def __sub__ (self,other): return Composite(generators=self._promote(other),operator=np.subtract)
def __mul__ (self,other): return Composite(generators=self._promote(other),operator=np.multiply)
def __mod__ (self,other): return Composite(generators=self._promote(other),operator=np.mod)
def __pow__ (self,other): return Composite(generators=self._promote(other),operator=np.power)
def __div__ (self,other): return Composite(generators=self._promote(other),operator=np.divide)
def __and__ (self,other): return Composite(generators=self._promote(other),operator=np.minimum)
def __or__ (self,other): return Composite(generators=self._promote(other),operator=np.maximum)
def __radd__ (self,other): return Composite(generators=self._rpromote(other),operator=np.add)
def | (self,other): return Composite(generators=self._rpromote(other),operator=np.subtract)
def __rmul__ (self,other): return Composite(generators=self._rpromote(other),operator=np.multiply)
def __rmod__ (self,other): return Composite(generators=self._rpromote(other),operator=np.mod)
def __rpow__ (self,other): return Composite(generators=self._rpromote(other),operator=np.power)
def __rdiv__ (self,other): return Composite(generators=self._rpromote(other),operator=np.divide)
def __rand__ (self,other): return Composite(generators=self._rpromote(other),operator=np.minimum)
def __ror__ (self,other): return Composite(generators=self._rpromote(other),operator=np.maximum)
def __neg__ (self): return Composite(generators=[Constant(scale=0),self],operator=np.subtract)
class abs_first(object):
@staticmethod
def reduce(x): return np.abs(x[0])
def __abs__ (self): return Composite(generators=[self],operator=self.abs_first)
def pil(self, **params_to_override):
"""Returns a PIL image for this pattern, overriding parameters if provided."""
from PIL.Image import fromarray
nchans = self.num_channels()
if nchans in [0, 1]:
mode, arr = None, self(**params_to_override)
arr = (255.0 / arr.max() * (arr - arr.min())).astype(np.uint8)
elif nchans in [3,4]:
mode = 'RGB' if nchans==3 else 'RGBA'
arr = np.dstack(self.channels(**params_to_override).values()[1:])
arr = (255.0*arr).astype(np.uint8)
else:
raise ValueError("Unsupported number of channels")
return fromarray(arr, mode)
# Override class type; must be set here rather than when mask_shape is declared,
# to avoid referring to class not yet constructed
PatternGenerator.params('mask_shape').class_=PatternGenerator
# Trivial example of a PatternGenerator, provided for when a default is
# needed. The other concrete PatternGenerator classes are stored
# elsewhere, to be imported as needed.
class Constant(PatternGenerator):
"""Constant pattern generator, i.e., a solid, uniform field of the same value."""
# The orientation is ignored, so we don't show it in
# auto-generated lists of parameters (e.g. in the GUI)
orientation = param.Number(precedence=-1)
# Optimization: We use a simpler __call__ method here to skip the
# coordinate transformations (which would have no effect anyway)
def __call__(self,**params_to_override):
p = ParamOverrides(self,params_to_override)
shape = SheetCoordinateSystem(p.bounds,p.xdensity,p.ydensity).shape
result = p.scale*np.ones(shape, np.float)+p.offset
self._apply_mask(p,result)
for of in p.output_fns:
of(result)
return result
class CompositeBase(PatternGenerator):
"""
PatternGenerator that combines or selects from a list of other
PatternGenerators.
"""
__abstract=True
generators = param.List(class_=PatternGenerator,default=[Constant(scale=0.0)],
bounds=(1,None),precedence=0.97, doc="""
List of patterns to combine or select from. The default pattern is a blank pattern,
and thus should be overridden for any useful work.""")
size = param.Number(default=1.0,doc="""Scaling factor applied to all sub-patterns.""")
class Composite(CompositeBase):
"""
PatternGenerator that accepts a list of other PatternGenerators.
To create a new pattern, asks each of the PatternGenerators in the
list to create a pattern, then it combines the patterns to create
a single pattern that it returns.
"""
# The Accum_Replace operator from LISSOM is not yet supported,
# but it should be added once PatternGenerator bounding boxes
# are respected and/or GenericImage patterns support transparency.
operator = param.Parameter(np.maximum,precedence=0.98,doc="""
Binary Numpy function used to combine the individual patterns.
Any binary Numpy array "ufunc" returning the same
type of array as the operands and supporting the reduce
operator is allowed here. Supported ufuncs include::
add
subtract
multiply
divide
maximum
minimum
remainder
power
The most useful ones are probably add and maximum, but there
are uses for at least some of the others as well (e.g. to
remove pieces of other patterns).
You can also write your own operators, by making a class that
has a static method named "reduce" that returns an array of the
same size and type as the arrays in the list. For example::
class return_first(object):
@staticmethod
def reduce(x):
return x[0]
""")
def _advance_pattern_generators(self,p):
"""
Subclasses can override this method to provide constraints on
the values of generators' parameters and/or eliminate
generators from this list if necessary.
"""
return p.generators
def state_push(self):
"""
Push the state of all generators
"""
super(Composite,self).state_push()
for gen in self.generators:
gen.state_push()
def state_pop(self):
"""
Pop the state of all generators
"""
super(Composite,self).state_pop()
for gen in self.generators:
gen.state_pop()
# JABALERT: To support large numbers of patterns on a large input region,
# should be changed to evaluate each pattern in a small box, and then
# combine them at the full Composite Bounding box size.
def function(self,p):
"""Constructs combined pattern out of the individual ones."""
generators = self._advance_pattern_generators(p)
assert hasattr(p.operator,'reduce'),repr(p.operator)+" does not support 'reduce'."
# CEBALERT: mask gets applied by all PGs including the Composite itself
# (leads to redundant calculations in current lissom_oo_or usage, but
# will lead to problems/limitations in the future).
patterns = [pg(xdensity=p.xdensity,ydensity=p.ydensity,
bounds=p.bounds,mask=p.mask,
x=p.x+p.size*(pg.x*np.cos(p.orientation)- pg.y*np.sin(p.orientation)),
y=p.y+p.size*(pg.x*np.sin(p.orientation)+ pg.y*np.cos(p.orientation)),
orientation=pg.orientation+p.orientation,
size=pg.size*p.size)
for pg in generators]
image_array = p.operator.reduce(patterns)
return image_array
class ChannelTransform(param.Parameterized):
"""
A ChannelTransform is a callable object that takes channels as
input (an ordered dictionary of arrays) and transforms their
contents in some way before returning them.
"""
__abstract = True
def __call__(self, channels):
raise NotImplementedError
# Example of a ChannelTransform
class CorrelateChannels(ChannelTransform):
"""
Correlate channels by mixing a fraction of one channel into another.
"""
from_channel = param.Number(default=1, doc="""
Name of the channel to take data from.""")
to_channel = param.Number(default=2, doc="""
Name of the channel to change data of.""")
strength = param.Number(default=0, doc="""
Strength of the correlation to add, with 0 being no change,
and 1.0 overwriting to_channel with from_channel.""")
def __call__(self, channel_data):
channel_data[self.to_channel] = \
self.strength*channel_data[self.from_channel] + \
(1-self.strength)*channel_data[self.to_channel]
return channel_data
class ChannelGenerator(PatternGenerator):
"""
Abstract base class for patterns supporting multiple channels natively.
"""
__abstract = True
channel_transforms = param.HookList(class_=ChannelTransform,default=[],doc="""
Optional functions to apply post processing to the set of channels.""")
def __init__(self, **params):
self._original_channel_data = [] # channel data before processing
self._channel_data = [] # channel data after processing
super(ChannelGenerator, self).__init__(**params)
def channels(self, use_cached=False, **params_to_override):
res = collections.OrderedDict()
if not use_cached:
default = self(**params_to_override)
res['default'] = default
else:
res['default'] = None
for i in range(len(self._channel_data)):
res[i] = self._channel_data[i]
return res
def num_channels(self):
return len(self._channel_data)
class ComposeChannels(ChannelGenerator):
"""
Create a multi-channel PatternGenerator from a list of
PatternGenerators, with the specified channel_transforms applied.
"""
generators = param.List(class_=PatternGenerator,default=[Constant(scale=0.0)],
bounds=(1,None), doc="""
List of patterns to use for each channel. Generators which already have more than one
channel will only contribute to a single channel of ComposeChannels.""")
def __init__(self,**params):
super(ComposeChannels,self).__init__(**params)
for i in range(len(self.generators)):
self._channel_data.append( None )
def __call__(self,**params):
# Generates all channels, then returns the default channel
p = param.ParamOverrides(self,params)
params['xdensity']=p.xdensity
params['ydensity']=p.ydensity
params['bounds']=p.bounds
# (not **p)
for i in range(len(p.generators)):
self._channel_data[i] = p.generators[i]( **params )
for c in self.channel_transforms:
self._channel_data = c(self._channel_data)
return sum(act for act in self._channel_data)/len(self._channel_data)
| __rsub__ | identifier_name |
polls.py | from faker import Faker
from machina.core.db.models import get_model
from machina.test.factories.auth import UserFactory
from machina.test.factories.conversation import TopicFactory
faker = Faker()
TopicPoll = get_model('forum_polls', 'TopicPoll')
TopicPollOption = get_model('forum_polls', 'TopicPollOption')
TopicPollVote = get_model('forum_polls', 'TopicPollVote')
class TopicPollFactory(factory.django.DjangoModelFactory):
topic = factory.SubFactory(TopicFactory)
question = faker.text(max_nb_chars=200)
class Meta:
model = TopicPoll
class TopicPollOptionFactory(factory.django.DjangoModelFactory):
poll = factory.SubFactory(TopicPollFactory)
text = faker.text(max_nb_chars=100)
class Meta:
model = TopicPollOption
class TopicPollVoteFactory(factory.django.DjangoModelFactory):
poll_option = factory.SubFactory(TopicPollOptionFactory)
voter = factory.SubFactory(UserFactory)
class Meta:
model = TopicPollVote | import factory
import factory.django | random_line_split | |
polls.py | import factory
import factory.django
from faker import Faker
from machina.core.db.models import get_model
from machina.test.factories.auth import UserFactory
from machina.test.factories.conversation import TopicFactory
faker = Faker()
TopicPoll = get_model('forum_polls', 'TopicPoll')
TopicPollOption = get_model('forum_polls', 'TopicPollOption')
TopicPollVote = get_model('forum_polls', 'TopicPollVote')
class TopicPollFactory(factory.django.DjangoModelFactory):
topic = factory.SubFactory(TopicFactory)
question = faker.text(max_nb_chars=200)
class Meta:
|
class TopicPollOptionFactory(factory.django.DjangoModelFactory):
poll = factory.SubFactory(TopicPollFactory)
text = faker.text(max_nb_chars=100)
class Meta:
model = TopicPollOption
class TopicPollVoteFactory(factory.django.DjangoModelFactory):
poll_option = factory.SubFactory(TopicPollOptionFactory)
voter = factory.SubFactory(UserFactory)
class Meta:
model = TopicPollVote
| model = TopicPoll | identifier_body |
polls.py | import factory
import factory.django
from faker import Faker
from machina.core.db.models import get_model
from machina.test.factories.auth import UserFactory
from machina.test.factories.conversation import TopicFactory
faker = Faker()
TopicPoll = get_model('forum_polls', 'TopicPoll')
TopicPollOption = get_model('forum_polls', 'TopicPollOption')
TopicPollVote = get_model('forum_polls', 'TopicPollVote')
class TopicPollFactory(factory.django.DjangoModelFactory):
topic = factory.SubFactory(TopicFactory)
question = faker.text(max_nb_chars=200)
class Meta:
model = TopicPoll
class TopicPollOptionFactory(factory.django.DjangoModelFactory):
poll = factory.SubFactory(TopicPollFactory)
text = faker.text(max_nb_chars=100)
class Meta:
model = TopicPollOption
class TopicPollVoteFactory(factory.django.DjangoModelFactory):
poll_option = factory.SubFactory(TopicPollOptionFactory)
voter = factory.SubFactory(UserFactory)
class | :
model = TopicPollVote
| Meta | identifier_name |
log-knows-the-names-of-variants.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum foo {
a(uint),
b(~str),
c,
}
enum bar {
d, e, f
}
pub fn main() {
assert_eq!(~"a(22)", fmt!("%?", a(22u)));
assert_eq!(~"b(~\"hi\")", fmt!("%?", b(~"hi")));
assert_eq!(~"c", fmt!("%?", c)); | assert_eq!(~"d", fmt!("%?", d));
} | random_line_split | |
log-knows-the-names-of-variants.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum foo {
a(uint),
b(~str),
c,
}
enum bar {
d, e, f
}
pub fn main() | {
assert_eq!(~"a(22)", fmt!("%?", a(22u)));
assert_eq!(~"b(~\"hi\")", fmt!("%?", b(~"hi")));
assert_eq!(~"c", fmt!("%?", c));
assert_eq!(~"d", fmt!("%?", d));
} | identifier_body | |
log-knows-the-names-of-variants.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum | {
a(uint),
b(~str),
c,
}
enum bar {
d, e, f
}
pub fn main() {
assert_eq!(~"a(22)", fmt!("%?", a(22u)));
assert_eq!(~"b(~\"hi\")", fmt!("%?", b(~"hi")));
assert_eq!(~"c", fmt!("%?", c));
assert_eq!(~"d", fmt!("%?", d));
}
| foo | identifier_name |
test_naive_completion.py | from __future__ import unicode_literals
import pytest
from prompt_toolkit.completion import Completion
from prompt_toolkit.document import Document
@pytest.fixture
def completer():
import mycli.sqlcompleter as sqlcompleter
return sqlcompleter.SQLCompleter(smart_completion=False)
@pytest.fixture
def complete_event():
from mock import Mock
return Mock()
def test_empty_string_completion(completer, complete_event):
text = ''
position = 0
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set(map(Completion, completer.all_completions))
def test_select_keyword_completion(completer, complete_event):
text = 'SEL'
position = len('SEL')
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set([Completion(text='SELECT', start_position=-3)])
def | (completer, complete_event):
text = 'SELECT MA'
position = len('SELECT MA')
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set([
Completion(text='MAX', start_position=-2),
Completion(text='MASTER', start_position=-2)])
def test_column_name_completion(completer, complete_event):
text = 'SELECT FROM users'
position = len('SELECT ')
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set(map(Completion, completer.all_completions))
| test_function_name_completion | identifier_name |
test_naive_completion.py | from __future__ import unicode_literals
import pytest
from prompt_toolkit.completion import Completion
from prompt_toolkit.document import Document
@pytest.fixture
def completer():
import mycli.sqlcompleter as sqlcompleter
return sqlcompleter.SQLCompleter(smart_completion=False)
@pytest.fixture
def complete_event():
from mock import Mock
return Mock()
def test_empty_string_completion(completer, complete_event):
text = ''
position = 0
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set(map(Completion, completer.all_completions))
def test_select_keyword_completion(completer, complete_event):
text = 'SEL'
position = len('SEL')
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set([Completion(text='SELECT', start_position=-3)])
def test_function_name_completion(completer, complete_event):
text = 'SELECT MA'
position = len('SELECT MA')
result = set(completer.get_completions( | Completion(text='MAX', start_position=-2),
Completion(text='MASTER', start_position=-2)])
def test_column_name_completion(completer, complete_event):
text = 'SELECT FROM users'
position = len('SELECT ')
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set(map(Completion, completer.all_completions)) | Document(text=text, cursor_position=position),
complete_event))
assert result == set([ | random_line_split |
test_naive_completion.py | from __future__ import unicode_literals
import pytest
from prompt_toolkit.completion import Completion
from prompt_toolkit.document import Document
@pytest.fixture
def completer():
import mycli.sqlcompleter as sqlcompleter
return sqlcompleter.SQLCompleter(smart_completion=False)
@pytest.fixture
def complete_event():
from mock import Mock
return Mock()
def test_empty_string_completion(completer, complete_event):
text = ''
position = 0
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set(map(Completion, completer.all_completions))
def test_select_keyword_completion(completer, complete_event):
|
def test_function_name_completion(completer, complete_event):
text = 'SELECT MA'
position = len('SELECT MA')
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set([
Completion(text='MAX', start_position=-2),
Completion(text='MASTER', start_position=-2)])
def test_column_name_completion(completer, complete_event):
text = 'SELECT FROM users'
position = len('SELECT ')
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set(map(Completion, completer.all_completions))
| text = 'SEL'
position = len('SEL')
result = set(completer.get_completions(
Document(text=text, cursor_position=position),
complete_event))
assert result == set([Completion(text='SELECT', start_position=-3)]) | identifier_body |
borrowck-field-sensitivity.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
struct A { a: int, b: Box<int> }
fn deref_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
drop(*x.b); //~ ERROR use of moved value: `*x.b`
}
fn deref_after_fu_move() {
let x = A { a: 1, b: box 2 };
let y = A { a: 3, .. x };
drop(*x.b); //~ ERROR use of moved value: `*x.b`
}
fn borrow_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
let p = &x.b; //~ ERROR use of moved value: `x.b`
drop(**p);
}
fn borrow_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
let p = &x.b; //~ ERROR use of moved value: `x.b`
drop(**p);
}
fn move_after_borrow() {
let x = A { a: 1, b: box 2 };
let p = &x.b;
drop(x.b); //~ ERROR cannot move out of `x.b` because it is borrowed
drop(**p);
}
fn fu_move_after_borrow() {
let x = A { a: 1, b: box 2 };
let p = &x.b;
let _y = A { a: 3, .. x }; //~ ERROR cannot move out of `x.b` because it is borrowed
drop(**p);
}
fn mut_borrow_after_mut_borrow() {
let mut x = A { a: 1, b: box 2 };
let p = &mut x.a;
let q = &mut x.a; //~ ERROR cannot borrow `x.a` as mutable more than once at a time
drop(*p);
drop(*q);
}
| drop(x.b);
drop(x.b); //~ ERROR use of moved value: `x.b`
}
fn move_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
drop(x.b); //~ ERROR use of moved value: `x.b`
}
fn fu_move_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
let _z = A { a: 3, .. x }; //~ ERROR use of moved value: `x.b`
}
fn fu_move_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
let _z = A { a: 4, .. x }; //~ ERROR use of moved value: `x.b`
}
// The following functions aren't yet accepted, but they should be.
fn copy_after_field_assign_after_uninit() {
let mut x: A;
x.a = 1;
drop(x.a); //~ ERROR use of possibly uninitialized variable: `x.a`
}
fn borrow_after_field_assign_after_uninit() {
let mut x: A;
x.a = 1;
let p = &x.a; //~ ERROR use of possibly uninitialized variable: `x.a`
drop(*p);
}
fn move_after_field_assign_after_uninit() {
let mut x: A;
x.b = box 1;
drop(x.b); //~ ERROR use of possibly uninitialized variable: `x.b`
}
fn main() {
deref_after_move();
deref_after_fu_move();
borrow_after_move();
borrow_after_fu_move();
move_after_borrow();
fu_move_after_borrow();
mut_borrow_after_mut_borrow();
move_after_move();
move_after_fu_move();
fu_move_after_move();
fu_move_after_fu_move();
copy_after_field_assign_after_uninit();
borrow_after_field_assign_after_uninit();
move_after_field_assign_after_uninit();
} | fn move_after_move() {
let x = A { a: 1, b: box 2 }; | random_line_split |
borrowck-field-sensitivity.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
struct A { a: int, b: Box<int> }
fn deref_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
drop(*x.b); //~ ERROR use of moved value: `*x.b`
}
fn deref_after_fu_move() {
let x = A { a: 1, b: box 2 };
let y = A { a: 3, .. x };
drop(*x.b); //~ ERROR use of moved value: `*x.b`
}
fn borrow_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
let p = &x.b; //~ ERROR use of moved value: `x.b`
drop(**p);
}
fn borrow_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
let p = &x.b; //~ ERROR use of moved value: `x.b`
drop(**p);
}
fn move_after_borrow() {
let x = A { a: 1, b: box 2 };
let p = &x.b;
drop(x.b); //~ ERROR cannot move out of `x.b` because it is borrowed
drop(**p);
}
fn fu_move_after_borrow() {
let x = A { a: 1, b: box 2 };
let p = &x.b;
let _y = A { a: 3, .. x }; //~ ERROR cannot move out of `x.b` because it is borrowed
drop(**p);
}
fn mut_borrow_after_mut_borrow() {
let mut x = A { a: 1, b: box 2 };
let p = &mut x.a;
let q = &mut x.a; //~ ERROR cannot borrow `x.a` as mutable more than once at a time
drop(*p);
drop(*q);
}
fn move_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
drop(x.b); //~ ERROR use of moved value: `x.b`
}
fn move_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
drop(x.b); //~ ERROR use of moved value: `x.b`
}
fn | () {
let x = A { a: 1, b: box 2 };
drop(x.b);
let _z = A { a: 3, .. x }; //~ ERROR use of moved value: `x.b`
}
fn fu_move_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
let _z = A { a: 4, .. x }; //~ ERROR use of moved value: `x.b`
}
// The following functions aren't yet accepted, but they should be.
fn copy_after_field_assign_after_uninit() {
let mut x: A;
x.a = 1;
drop(x.a); //~ ERROR use of possibly uninitialized variable: `x.a`
}
fn borrow_after_field_assign_after_uninit() {
let mut x: A;
x.a = 1;
let p = &x.a; //~ ERROR use of possibly uninitialized variable: `x.a`
drop(*p);
}
fn move_after_field_assign_after_uninit() {
let mut x: A;
x.b = box 1;
drop(x.b); //~ ERROR use of possibly uninitialized variable: `x.b`
}
fn main() {
deref_after_move();
deref_after_fu_move();
borrow_after_move();
borrow_after_fu_move();
move_after_borrow();
fu_move_after_borrow();
mut_borrow_after_mut_borrow();
move_after_move();
move_after_fu_move();
fu_move_after_move();
fu_move_after_fu_move();
copy_after_field_assign_after_uninit();
borrow_after_field_assign_after_uninit();
move_after_field_assign_after_uninit();
}
| fu_move_after_move | identifier_name |
borrowck-field-sensitivity.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
struct A { a: int, b: Box<int> }
fn deref_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
drop(*x.b); //~ ERROR use of moved value: `*x.b`
}
fn deref_after_fu_move() {
let x = A { a: 1, b: box 2 };
let y = A { a: 3, .. x };
drop(*x.b); //~ ERROR use of moved value: `*x.b`
}
fn borrow_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
let p = &x.b; //~ ERROR use of moved value: `x.b`
drop(**p);
}
fn borrow_after_fu_move() |
fn move_after_borrow() {
let x = A { a: 1, b: box 2 };
let p = &x.b;
drop(x.b); //~ ERROR cannot move out of `x.b` because it is borrowed
drop(**p);
}
fn fu_move_after_borrow() {
let x = A { a: 1, b: box 2 };
let p = &x.b;
let _y = A { a: 3, .. x }; //~ ERROR cannot move out of `x.b` because it is borrowed
drop(**p);
}
fn mut_borrow_after_mut_borrow() {
let mut x = A { a: 1, b: box 2 };
let p = &mut x.a;
let q = &mut x.a; //~ ERROR cannot borrow `x.a` as mutable more than once at a time
drop(*p);
drop(*q);
}
fn move_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
drop(x.b); //~ ERROR use of moved value: `x.b`
}
fn move_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
drop(x.b); //~ ERROR use of moved value: `x.b`
}
fn fu_move_after_move() {
let x = A { a: 1, b: box 2 };
drop(x.b);
let _z = A { a: 3, .. x }; //~ ERROR use of moved value: `x.b`
}
fn fu_move_after_fu_move() {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
let _z = A { a: 4, .. x }; //~ ERROR use of moved value: `x.b`
}
// The following functions aren't yet accepted, but they should be.
fn copy_after_field_assign_after_uninit() {
let mut x: A;
x.a = 1;
drop(x.a); //~ ERROR use of possibly uninitialized variable: `x.a`
}
fn borrow_after_field_assign_after_uninit() {
let mut x: A;
x.a = 1;
let p = &x.a; //~ ERROR use of possibly uninitialized variable: `x.a`
drop(*p);
}
fn move_after_field_assign_after_uninit() {
let mut x: A;
x.b = box 1;
drop(x.b); //~ ERROR use of possibly uninitialized variable: `x.b`
}
fn main() {
deref_after_move();
deref_after_fu_move();
borrow_after_move();
borrow_after_fu_move();
move_after_borrow();
fu_move_after_borrow();
mut_borrow_after_mut_borrow();
move_after_move();
move_after_fu_move();
fu_move_after_move();
fu_move_after_fu_move();
copy_after_field_assign_after_uninit();
borrow_after_field_assign_after_uninit();
move_after_field_assign_after_uninit();
}
| {
let x = A { a: 1, b: box 2 };
let _y = A { a: 3, .. x };
let p = &x.b; //~ ERROR use of moved value: `x.b`
drop(**p);
} | identifier_body |
mixer.js | /*
* Copyright © 2020 Luciano Iam <oss@lucianoiam.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
import { ChildComponent } from '../base/component.js';
import { StateNode } from '../base/protocol.js';
import Strip from './strip.js';
export default class Mixer extends ChildComponent {
constructor (parent) {
super(parent);
this._strips = {};
this._ready = false;
}
get ready () {
return this._ready;
}
get strips () {
return Object.values(this._strips);
}
getStripByName (name) {
name = name.trim().toLowerCase();
return this.strips.find(strip => strip.name.trim().toLowerCase() == name);
}
handle (node, addr, val) { |
}
|
if (node.startsWith('strip')) {
if (node == StateNode.STRIP_DESCRIPTION) {
this._strips[addr] = new Strip(this, addr, val);
this.notifyPropertyChanged('strips');
return true;
} else {
const stripAddr = [addr[0]];
if (stripAddr in this._strips) {
return this._strips[stripAddr].handle(node, addr, val);
}
}
} else {
// all initial strip description messages have been received at this point
if (!this._ready) {
this.updateLocal('ready', true);
// passthrough by allowing to return false
}
}
return false;
}
| identifier_body |
mixer.js | /*
* Copyright © 2020 Luciano Iam <oss@lucianoiam.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
import { ChildComponent } from '../base/component.js';
import { StateNode } from '../base/protocol.js';
import Strip from './strip.js';
export default class Mixer extends ChildComponent {
constructor (parent) {
super(parent);
this._strips = {};
this._ready = false;
}
get ready () {
return this._ready;
}
get strips () {
return Object.values(this._strips);
}
getStripByName (name) {
name = name.trim().toLowerCase();
return this.strips.find(strip => strip.name.trim().toLowerCase() == name);
}
handle (node, addr, val) {
if (node.startsWith('strip')) {
if (node == StateNode.STRIP_DESCRIPTION) { | else {
const stripAddr = [addr[0]];
if (stripAddr in this._strips) {
return this._strips[stripAddr].handle(node, addr, val);
}
}
} else {
// all initial strip description messages have been received at this point
if (!this._ready) {
this.updateLocal('ready', true);
// passthrough by allowing to return false
}
}
return false;
}
}
|
this._strips[addr] = new Strip(this, addr, val);
this.notifyPropertyChanged('strips');
return true;
} | conditional_block |
mixer.js | /*
* Copyright © 2020 Luciano Iam <oss@lucianoiam.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
import { ChildComponent } from '../base/component.js';
import { StateNode } from '../base/protocol.js';
import Strip from './strip.js';
| constructor (parent) {
super(parent);
this._strips = {};
this._ready = false;
}
get ready () {
return this._ready;
}
get strips () {
return Object.values(this._strips);
}
getStripByName (name) {
name = name.trim().toLowerCase();
return this.strips.find(strip => strip.name.trim().toLowerCase() == name);
}
handle (node, addr, val) {
if (node.startsWith('strip')) {
if (node == StateNode.STRIP_DESCRIPTION) {
this._strips[addr] = new Strip(this, addr, val);
this.notifyPropertyChanged('strips');
return true;
} else {
const stripAddr = [addr[0]];
if (stripAddr in this._strips) {
return this._strips[stripAddr].handle(node, addr, val);
}
}
} else {
// all initial strip description messages have been received at this point
if (!this._ready) {
this.updateLocal('ready', true);
// passthrough by allowing to return false
}
}
return false;
}
} | export default class Mixer extends ChildComponent {
| random_line_split |
mixer.js | /*
* Copyright © 2020 Luciano Iam <oss@lucianoiam.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
import { ChildComponent } from '../base/component.js';
import { StateNode } from '../base/protocol.js';
import Strip from './strip.js';
export default class Mixer extends ChildComponent {
constructor (parent) {
super(parent);
this._strips = {};
this._ready = false;
}
get r | () {
return this._ready;
}
get strips () {
return Object.values(this._strips);
}
getStripByName (name) {
name = name.trim().toLowerCase();
return this.strips.find(strip => strip.name.trim().toLowerCase() == name);
}
handle (node, addr, val) {
if (node.startsWith('strip')) {
if (node == StateNode.STRIP_DESCRIPTION) {
this._strips[addr] = new Strip(this, addr, val);
this.notifyPropertyChanged('strips');
return true;
} else {
const stripAddr = [addr[0]];
if (stripAddr in this._strips) {
return this._strips[stripAddr].handle(node, addr, val);
}
}
} else {
// all initial strip description messages have been received at this point
if (!this._ready) {
this.updateLocal('ready', true);
// passthrough by allowing to return false
}
}
return false;
}
}
| eady | identifier_name |
ember-cli-build-options.js | 'use strict';
/**
* Configuration options for Ember CLI App used to manage broccoli build tree for DataHub web.
* Returns a method to import build dependencies and an options
* object with configuration attributes
*
* @param {string} env current build application environment
* @returns { options: object }
*/
module.exports = function(env) {
const isTesting = env === 'test';
const isProduction = env === 'production';
return {
options: {
// Configuration options for ember-auto-import library
autoImport: {
// Note: restliparams has an outDir of lib, but autoImport looks for dist
alias: {
restliparams: 'restliparams/lib'
},
webpack: {
node: {
// this will add support for 'require('path')' in browsers
// this is needed by minimatch dependency
path: true
}
},
exclude: ['@glimmer/tracking']
},
// Configurations options for ember-ace editor library
ace: isTesting
? {}
: {
modes: ['json', 'graphqlschema', 'text'],
workers: ['json', 'graphqlschema', 'text'],
exts: ['searchbox']
},
babel: {
sourceMaps: env === 'development' ? 'inline' : false,
targets: {
browsers: ['last 3 versions']
}
},
'ember-cli-babel': {
includePolyfill: !isTesting
},
storeConfigInMeta: false,
SRI: {
enabled: false
},
fingerprint: {
enabled: isProduction
},
'ember-cli-uglify': {
enabled: isProduction,
// Improve build times by using the Fast Minify Mode
// For our internal use case, app load times are not a significant bottleneck currently
// https://github.com/mishoo/UglifyJS2#uglify-fast-minify-mode
uglify: {
compress: false,
mangle: true
}
},
outputPaths: {
app: {
html: 'index.html',
css: {
app: '/assets/datahub-web.css'
},
js: '/assets/datahub-web.js'
},
vendor: { | css: '/assets/vendor.css',
js: '/assets/vendor.js'
}
},
svgJar: {
sourceDirs: ['public/assets/images/svgs']
},
// Configuration options specifying inclusion of Mirage addon files in the application tree
'mirage-from-addon': {
includeAll: true,
exclude: [/scenarios\/default/, /config/]
}
}
};
}; | random_line_split | |
cros_test_lib.py | #!/usr/bin/python
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Cros unit test library, with utility functions."""
from __future__ import print_function
import collections
import contextlib
import cStringIO
import exceptions
import mox
import os
import re
import sys
import unittest
import osutils
import terminal
import cros_build_lib
if 'chromite' not in sys.modules:
# TODO(build): Finish test wrapper (http://crosbug.com/37517).
# Until then, we detect the chromite manipulation not yet having
# occurred, and inject it ourselves.
# We cannot just import chromite since this module is still accessed
# from non chromite.lib.cros_test_lib pathways (which will be resolved
# implicitly via 37517).
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '../third_party'))
import mock
Directory = collections.namedtuple('Directory', ['name', 'contents'])
def _FlattenStructure(base_path, dir_struct):
"""Converts a directory structure to a list of paths."""
flattened = []
for obj in dir_struct:
if isinstance(obj, Directory):
new_base = os.path.join(base_path, obj.name).rstrip(os.sep)
flattened.append(new_base + os.sep)
flattened.extend(_FlattenStructure(new_base, obj.contents))
else:
assert(isinstance(obj, basestring))
flattened.append(os.path.join(base_path, obj))
return flattened
def CreateOnDiskHierarchy(base_path, dir_struct):
"""Creates on-disk representation of an in-memory directory structure.
Arguments:
base_path: The absolute root of the directory structure.
dir_struct: A recursively defined data structure that represents a
directory tree. The basic form is a list. Elements can be file names or
cros_test_lib.Directory objects. The 'contents' attribute of Directory
types is a directory structure representing the contents of the directory.
Examples:
- ['file1', 'file2']
- ['file1', Directory('directory', ['deepfile1', 'deepfile2']), 'file2']
"""
flattened = _FlattenStructure(base_path, dir_struct)
for f in flattened:
f = os.path.join(base_path, f)
if f.endswith(os.sep):
os.mkdir(f)
else:
osutils.Touch(f, makedirs=True)
def _VerifyDirectoryIterables(existing, expected):
"""Compare two iterables representing contents of a directory.
Paths in |existing| and |expected| will be compared for exact match.
Arguments:
existing: An iterable containing paths that exist.
expected: An iterable of paths that are expected.
Raises:
AssertionError when there is any divergence between |existing| and
|expected|.
"""
def FormatPaths(paths):
return '\n'.join(sorted(paths))
existing = set(existing)
expected = set(expected)
unexpected = existing - expected
if unexpected:
raise AssertionError('Found unexpected paths:\n%s'
% FormatPaths(unexpected))
missing = expected - existing
if missing:
raise AssertionError('These files were expected but not found:\n%s'
% FormatPaths(missing))
def _DirectoryIterator(base_path):
"""Iterates through the files and subdirs of a directory."""
for root, dirs, files in os.walk(base_path):
for e in [d + os.sep for d in dirs] + files:
yield os.path.join(root, e)
def VerifyOnDiskHierarchy(base_path, dir_struct):
"""Verify that an on-disk directory tree exactly matches a given structure.
Arguments:
See arguments of CreateOnDiskHierarchy()
Raises:
AssertionError when there is any divergence between the on-disk
structure and the structure specified by 'dir_struct'.
"""
expected = _FlattenStructure(base_path, dir_struct)
_VerifyDirectoryIterables(_DirectoryIterator(base_path), expected)
def VerifyTarball(tarball, dir_struct):
"""Compare the contents of a tarball against a directory structure.
Arguments:
tarball: Path to the tarball.
dir_struct: See CreateOnDiskHierarchy()
Raises:
AssertionError when there is any divergence between the tarball and the
structure specified by 'dir_struct'.
"""
contents = cros_build_lib.RunCommandCaptureOutput(
['tar', '-tf', tarball]).output.splitlines()
normalized = set()
for p in contents:
norm = os.path.normpath(p)
if p.endswith('/'):
norm += '/'
if norm in normalized:
raise AssertionError('Duplicate entry %r found in %r!' % (norm, tarball))
normalized.add(norm)
expected = _FlattenStructure('', dir_struct)
_VerifyDirectoryIterables(normalized, expected)
def _walk_mro_stacking(obj, attr, reverse=False):
iterator = iter if reverse else reversed
methods = (getattr(x, attr, None) for x in iterator(obj.__class__.__mro__))
seen = set()
for x in filter(None, methods):
x = getattr(x, 'im_func', x)
if x not in seen:
seen.add(x)
yield x
def _stacked_setUp(self):
self.__test_was_run__ = False
try:
for target in _walk_mro_stacking(self, '__raw_setUp__'):
target(self)
except:
# TestCase doesn't trigger tearDowns if setUp failed; thus
# manually force it ourselves to ensure cleanup occurs.
_stacked_tearDown(self)
raise
# Now mark the object as fully setUp; this is done so that
# any last minute assertions in tearDown can know if they should
# run or not.
self.__test_was_run__ = True
def _stacked_tearDown(self):
exc_info = None
for target in _walk_mro_stacking(self, '__raw_tearDown__', True):
#pylint: disable=W0702
try:
target(self)
except:
# Preserve the exception, throw it after running
# all tearDowns; we throw just the first also. We suppress
# pylint's warning here since it can't understand that we're
# actually raising the exception, just in a nonstandard way.
if exc_info is None:
exc_info = sys.exc_info()
if exc_info:
# Chuck the saved exception, w/ the same TB from
# when it occurred.
raise exc_info[0], exc_info[1], exc_info[2]
class StackedSetup(type):
"""Metaclass that extracts automatically stacks setUp and tearDown calls.
Basically this exists to make it easier to do setUp *correctly*, while also
suppressing some unittests misbehaviours- for example, the fact that if a
setUp throws an exception the corresponding tearDown isn't ran. This sorts
it.
Usage of it is via usual metaclass approach; just set
`__metaclass__ = StackedSetup` .
Note that this metaclass is designed such that because this is a metaclass,
rather than just a scope mutator, all derivative classes derive from this
metaclass; thus all derivative TestCase classes get automatic stacking."""
def __new__(mcs, name, bases, scope):
if 'setUp' in scope:
scope['__raw_setUp__'] = scope.pop('setUp')
scope['setUp'] = _stacked_setUp
if 'tearDown' in scope:
scope['__raw_tearDown__'] = scope.pop('tearDown')
scope['tearDown'] = _stacked_tearDown
return type.__new__(mcs, name, bases, scope)
class EasyAttr(dict):
"""Convenient class for simulating objects with attributes in tests.
An EasyAttr object can be created with any attributes initialized very
easily. Examples:
1) An object with .id=45 and .name="Joe":
testobj = EasyAttr(id=45, name="Joe")
2) An object with .title.text="Big" and .owner.text="Joe":
testobj = EasyAttr(title=EasyAttr(text="Big"), owner=EasyAttr(text="Joe"))
"""
__slots__ = ()
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
return AttributeError(attr)
def __delattr__(self, attr):
try:
self.pop(attr)
except KeyError:
raise AttributeError(attr)
def __setattr__(self, attr, value):
self[attr] = value
def __dir__(self):
return self.keys()
class OutputCapturer(object):
"""Class with limited support for capturing test stdout/stderr output.
Class is designed as a 'ContextManager'. Example usage in a test method
of an object of TestCase:
with self.OutputCapturer() as output:
# Capturing of stdout/stderr automatically starts now.
# Do stuff that sends output to stdout/stderr.
# Capturing automatically stops at end of 'with' block.
# stdout/stderr can be retrieved from the OutputCapturer object:
stdout = output.getStdoutLines() # Or other access methods
# Some Assert methods are only valid if capturing was used in test.
self.AssertOutputContainsError() # Or other related methods
"""
# These work with error output from operation module.
OPER_MSG_SPLIT_RE = re.compile(r'^\033\[1;.*?\033\[0m$|^[^\n]*$',
re.DOTALL | re.MULTILINE)
ERROR_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
(30 + terminal.Color.RED,), re.DOTALL)
WARNING_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
(30 + terminal.Color.YELLOW,), re.DOTALL)
__slots__ = ['_stderr', '_stderr_cap', '_stdout', '_stdout_cap']
def __init__(self):
self._stdout = None
self._stderr = None
self._stdout_cap = None
self._stderr_cap = None
def __enter__(self):
# This method is called with entering 'with' block.
self.StartCapturing()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# This method is called when exiting 'with' block.
self.StopCapturing()
if exc_type:
print('Exception during output capturing: %r' % (exc_val,))
stdout = self.GetStdout()
if stdout:
print('Captured stdout was:\n%s' % stdout)
else:
print('No captured stdout')
stderr = self.GetStderr()
if stderr:
print('Captured stderr was:\n%s' % stderr)
else:
print('No captured stderr')
def StartCapturing(self):
"""Begin capturing stdout and stderr."""
self._stdout = sys.stdout
self._stderr = sys.stderr
sys.stdout = self._stdout_cap = cStringIO.StringIO()
sys.stderr = self._stderr_cap = cStringIO.StringIO()
def StopCapturing(self):
"""Stop capturing stdout and stderr."""
# The only reason to check stdout or stderr separately might
# have capturing on independently is if StartCapturing did not complete.
if self._stdout:
sys.stdout = self._stdout
self._stdout = None
if self._stderr:
sys.stderr = self._stderr
self._stderr = None
def ClearCaptured(self):
# Only valid if capturing is not on.
assert self._stdout is None and self._stderr is None
self._stdout_cap = None
self._stderr_cap = None
def GetStdout(self):
"""Return captured stdout so far."""
return self._stdout_cap.getvalue()
def GetStderr(self):
"""Return captured stderr so far."""
return self._stderr_cap.getvalue()
def _GetOutputLines(self, output, include_empties):
"""Split |output| into lines, optionally |include_empties|.
Return array of lines.
"""
lines = self.OPER_MSG_SPLIT_RE.findall(output)
if not include_empties:
lines = [ln for ln in lines if ln]
return lines
def GetStdoutLines(self, include_empties=True):
"""Return captured stdout so far as array of lines.
If |include_empties| is false filter out all empty lines.
"""
return self._GetOutputLines(self.GetStdout(), include_empties)
def GetStderrLines(self, include_empties=True):
"""Return captured stderr so far as array of lines.
If |include_empties| is false filter out all empty lines.
"""
return self._GetOutputLines(self.GetStderr(), include_empties)
class TestCase(unittest.TestCase):
__metaclass__ = StackedSetup
# List of vars chromite is globally sensitive to and that should
# be suppressed for tests.
ENVIRON_VARIABLE_SUPPRESSIONS = ('CROS_CACHEDIR',)
def __init__(self, *args, **kwds):
unittest.TestCase.__init__(self, *args, **kwds)
# This is set to keep pylint from complaining.
self.__test_was_run__ = False
def setUp(self):
self.__saved_env__ = os.environ.copy()
self.__saved_cwd__ = os.getcwd()
self.__saved_umask__ = os.umask(022)
for x in self.ENVIRON_VARIABLE_SUPPRESSIONS:
os.environ.pop(x, None)
def tearDown(self):
osutils.SetEnvironment(self.__saved_env__)
os.chdir(self.__saved_cwd__)
os.umask(self.__saved_umask__)
def assertRaises2(self, exception, functor, *args, **kwargs):
"""Like assertRaises, just with checking of the excpetion.
args:
exception: The expected exception type to intecept.
functor: The function to invoke.
args: Positional args to pass to the function.
kwargs: Optional args to pass to the function. Note we pull
exact_kls, msg, and check_attrs from these kwargs.
exact_kls: If given, the exception raise must be *exactly* that class
type; derivatives are a failure.
check_attrs: If given, a mapping of attribute -> value to assert on
the resultant exception. Thus if you wanted to catch a ENOENT, you
would do:
assertRaises2(EnvironmentError, func, args,
attrs={"errno":errno.ENOENT})
msg: The error message to be displayed if the exception isn't raised.
If not given, a suitable one is defaulted to.
returns: The exception object.
"""
exact_kls = kwargs.pop("exact_kls", None)
check_attrs = kwargs.pop("check_attrs", {})
msg = kwargs.pop("msg", None)
if msg is None:
msg = ("%s(*%r, **%r) didn't throw an exception"
% (functor.__name__, args, kwargs))
try:
functor(*args, **kwargs)
raise AssertionError(msg)
except exception, e:
if exact_kls:
self.assertEqual(e.__class__, exception)
bad = []
for attr, required in check_attrs.iteritems():
self.assertTrue(hasattr(e, attr),
msg="%s lacks attr %s" % (e, attr))
value = getattr(e, attr)
if value != required:
bad.append("%s attr is %s, needed to be %s"
% (attr, value, required))
if bad:
raise AssertionError("\n".join(bad))
return e
class OutputTestCase(TestCase):
"""Base class for cros unit tests with utility methods."""
def __init__(self, *args, **kwds):
"""Base class __init__ takes a second argument."""
TestCase.__init__(self, *args, **kwds)
self._output_capturer = None
def OutputCapturer(self):
"""Create and return OutputCapturer object."""
self._output_capturer = OutputCapturer()
return self._output_capturer
def _GetOutputCapt(self):
"""Internal access to existing OutputCapturer.
Raises RuntimeError if output capturing was never on.
"""
if self._output_capturer:
return self._output_capturer
raise RuntimeError('Output capturing was never turned on for this test.')
def _GenCheckMsgFunc(self, prefix_re, line_re):
"""Return boolean func to check a line given |prefix_re| and |line_re|."""
def _method(line):
if prefix_re:
# Prefix regexp will strip off prefix (and suffix) from line.
match = prefix_re.search(line)
if match:
line = match.group(1)
else:
return False
return line_re.search(line) if line_re else True
# Provide a description of what this function looks for in a line. Error
# messages can make use of this.
_method.description = None
if prefix_re and line_re:
_method.description = ('line matching prefix regexp %r then regexp %r' %
(prefix_re.pattern, line_re.pattern))
elif prefix_re:
_method.description = 'line matching prefix regexp %r' % prefix_re.pattern
elif line_re:
_method.description = 'line matching regexp %r' % line_re.pattern
else:
raise RuntimeError('Nonsensical usage of _GenCheckMsgFunc: '
'no prefix_re or line_re')
return _method
def _ContainsMsgLine(self, lines, msg_check_func):
return any(msg_check_func(ln) for ln in lines)
def _GenOutputDescription(self, check_stdout, check_stderr):
# Some extra logic to make an error message useful.
if check_stdout and check_stderr:
return 'stdout or stderr'
elif check_stdout:
return 'stdout'
elif check_stderr:
return 'stderr'
def _AssertOutputContainsMsg(self, check_msg_func, invert,
check_stdout, check_stderr):
assert check_stdout or check_stderr
lines = []
if check_stdout:
lines.extend(self._GetOutputCapt().GetStdoutLines())
if check_stderr:
lines.extend(self._GetOutputCapt().GetStderrLines())
result = self._ContainsMsgLine(lines, check_msg_func)
# Some extra logic to make an error message useful.
output_desc = self._GenOutputDescription(check_stdout, check_stderr)
if invert:
msg = ('expected %s to not contain %s,\nbut found it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertFalse(result, msg=msg)
else:
msg = ('expected %s to contain %s,\nbut did not find it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertTrue(result, msg=msg)
def AssertOutputContainsError(self, regexp=None, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains at least one error line.
If |regexp| is non-null, then the error line must also match it.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.ERROR_MSG_RE, regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def AssertOutputContainsWarning(self, regexp=None, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains at least one warning line.
If |regexp| is non-null, then the warning line must also match it. | Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE,
regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def AssertOutputContainsLine(self, regexp, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains line matching |regexp|.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(None, regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def _AssertOutputEndsInMsg(self, check_msg_func,
check_stdout, check_stderr):
"""Pass if requested output(s) ends(end) with an error message."""
assert check_stdout or check_stderr
lines = []
if check_stdout:
stdout_lines = self._GetOutputCapt().GetStdoutLines(include_empties=False)
if stdout_lines:
lines.append(stdout_lines[-1])
if check_stderr:
stderr_lines = self._GetOutputCapt().GetStderrLines(include_empties=False)
if stderr_lines:
lines.append(stderr_lines[-1])
result = self._ContainsMsgLine(lines, check_msg_func)
# Some extra logic to make an error message useful.
output_desc = self._GenOutputDescription(check_stdout, check_stderr)
msg = ('expected %s to end with %s,\nbut did not find it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertTrue(result, msg=msg)
def AssertOutputEndsInError(self, regexp=None,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in error line.
If |regexp| is non-null, then the error line must also match it.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.ERROR_MSG_RE, regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def AssertOutputEndsInWarning(self, regexp=None,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in warning line.
If |regexp| is non-null, then the warning line must also match it.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE,
regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def AssertOutputEndsInLine(self, regexp,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in line matching |regexp|.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(None, regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def FuncCatchSystemExit(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| and catch exceptions.SystemExit.
Return tuple (return value or None, SystemExit number code or None).
"""
try:
returnval = func(*args, **kwargs)
return returnval, None
except exceptions.SystemExit as ex:
exit_code = ex.args[0]
return None, exit_code
def AssertFuncSystemExitZero(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
If the func does not raise a SystemExit with exit code 0 then assert.
"""
exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
self.assertFalse(exit_code is None,
msg='Expected system exit code 0, but caught none')
self.assertTrue(exit_code == 0,
msg='Expected system exit code 0, but caught %d' %
exit_code)
def AssertFuncSystemExitNonZero(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
If the func does not raise a non-zero SystemExit code then assert.
"""
exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
self.assertFalse(exit_code is None,
msg='Expected non-zero system exit code, but caught none')
self.assertFalse(exit_code == 0,
msg='Expected non-zero system exit code, but caught %d' %
exit_code)
def AssertRaisesAndReturn(self, error, func, *args, **kwargs):
"""Like assertRaises, but return exception raised."""
try:
func(*args, **kwargs)
self.assertTrue(False, msg='Expected %s but got none' % error)
except error as ex:
return ex
class TempDirTestCase(TestCase):
"""Mixin used to give each test a tempdir that is cleansed upon finish"""
sudo_cleanup = False
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
self.tempdir = None
def setUp(self):
#pylint: disable=W0212
osutils._TempDirSetup(self)
def tearDown(self):
#pylint: disable=W0212
osutils._TempDirTearDown(self, self.sudo_cleanup)
class _RunCommandMock(mox.MockObject):
"""Custom mock class used to suppress arguments we don't care about"""
DEFAULT_IGNORED_ARGS = ('print_cmd',)
def __call__(self, *args, **kwds):
for arg in self.DEFAULT_IGNORED_ARGS:
kwds.setdefault(arg, mox.IgnoreArg())
return mox.MockObject.__call__(self, *args, **kwds)
class LessAnnoyingMox(mox.Mox):
"""Mox derivative that slips in our suppressions to mox.
This is used by default via MoxTestCase; namely, this suppresses
certain arguments awareness that we don't care about via switching
in (dependent on the namespace requested) overriding MockObject
classing.
Via this, it makes maintenance much simpler- simplest example, if code
doesn't explicitly assert that print_cmd must be true/false... then
we don't care about what argument is set (it has no effect beyond output).
Mox normally *would* care, making it a pita to maintain. This selectively
suppresses that awareness, making it maintainable.
"""
mock_classes = {}.fromkeys(
['chromite.lib.cros_build_lib.%s' % x
for x in dir(cros_build_lib) if "RunCommand" in x],
_RunCommandMock)
@staticmethod
def _GetNamespace(obj):
return '%s.%s' % (obj.__module__, obj.__name__)
def CreateMock(self, obj, attrs=None):
if attrs is None:
attrs = {}
kls = self.mock_classes.get(
self._GetNamespace(obj), mox.MockObject)
# Copy attrs; I don't trust mox to not be stupid here.
new_mock = kls(obj, attrs=attrs)
self._mock_objects.append(new_mock)
return new_mock
class MoxTestCase(TestCase):
"""Mox based test case; compatible with StackedSetup"""
mox_suppress_verify_all = False
def setUp(self):
self.mox = LessAnnoyingMox()
self.stubs = mox.stubout.StubOutForTesting()
def tearDown(self):
try:
if self.__test_was_run__ and not self.mox_suppress_verify_all:
# This means the test code was actually ran.
# force a verifyall
self.mox.VerifyAll()
finally:
if hasattr(self, 'mox'):
self.mox.UnsetStubs()
if hasattr(self, 'stubs'):
self.stubs.UnsetAll()
self.stubs.SmartUnsetAll()
class MoxTempDirTestCase(TempDirTestCase, MoxTestCase):
"""Convenience class mixing TempDir and Mox"""
class MoxOutputTestCase(OutputTestCase, MoxTestCase):
"""Conevenience class mixing OutputTestCase and MoxTestCase."""
class MockTestCase(TestCase):
"""Python-mock based test case; compatible with StackedSetup"""
def setUp(self):
self._patchers = []
def tearDown(self):
# We can't just run stopall() by itself, and need to stop our patchers
# manually since stopall() doesn't handle repatching.
cros_build_lib.SafeRun([p.stop for p in reversed(self._patchers)] +
[mock.patch.stopall])
def StartPatcher(self, patcher):
"""Call start() on the patcher, and stop() in tearDown."""
m = patcher.start()
self._patchers.append(patcher)
return m
def PatchObject(self, *args, **kwargs):
"""Create and start a mock.patch.object().
stop() will be called automatically during tearDown.
"""
return self.StartPatcher(mock.patch.object(*args, **kwargs))
# MockTestCase must be before TempDirTestCase in this inheritance order,
# because MockTestCase.StartPatcher() calls may be for PartialMocks, which
# create their own temporary directory. The teardown for those directories
# occurs during MockTestCase.tearDown(), which needs to be run before
# TempDirTestCase.tearDown().
class MockTempDirTestCase(MockTestCase, TempDirTestCase):
"""Convenience class mixing TempDir and Mock."""
def FindTests(directory, module_namespace=''):
"""Find all *_unittest.py, and return their python namespaces.
Args:
directory: The directory to scan for tests.
module_namespace: What namespace to prefix all found tests with.
Returns:
A list of python unittests in python namespace form.
"""
results = cros_build_lib.RunCommandCaptureOutput(
['find', '.', '-name', '*_unittest.py', '-printf', '%P\n'],
cwd=directory, print_cmd=False).output.splitlines()
# Drop the trailing .py, inject in the name if one was given.
if module_namespace:
module_namespace += '.'
return [module_namespace + x[:-3].replace('/', '.') for x in results]
@contextlib.contextmanager
def DisableLogging():
"""Temporarily disable chromite logging."""
backup = cros_build_lib.logger.disabled
try:
cros_build_lib.logger.disabled = True
yield
finally:
cros_build_lib.logger.disabled = backup
def main(**kwds):
"""Helper wrapper around unittest.main. Invoke this, not unittest.main.
Any passed in kwds are passed directly down to unittest.main; via this, you
can inject custom argv for example (to limit what tests run).
"""
# Default to exit=True; this matches old behaviour, and allows unittest
# to trigger sys.exit on its own. Unfortunately, the exit keyword is only
# available in 2.7- as such, handle it ourselves.
allow_exit = kwds.pop('exit', True)
cros_build_lib.SetupBasicLogging()
try:
unittest.main(**kwds)
raise SystemExit(0)
except SystemExit, e:
if e.__class__ != SystemExit or allow_exit:
raise
# Redo the exit code ourselves- unittest throws True on occasion.
# This is why the lack of typing for SystemExit code attribute makes life
# suck, in parallel to unittest being special.
# Finally, note that it's possible for code to be a string...
if isinstance(e.code, (int, long)):
# This is done since exit code may be something other than 1/0; if they
# explicitly pass it, we'll honor it.
return e.code
return 1 if e.code else 0 | If |invert| is true, then assert the line is NOT found.
| random_line_split |
cros_test_lib.py | #!/usr/bin/python
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Cros unit test library, with utility functions."""
from __future__ import print_function
import collections
import contextlib
import cStringIO
import exceptions
import mox
import os
import re
import sys
import unittest
import osutils
import terminal
import cros_build_lib
if 'chromite' not in sys.modules:
# TODO(build): Finish test wrapper (http://crosbug.com/37517).
# Until then, we detect the chromite manipulation not yet having
# occurred, and inject it ourselves.
# We cannot just import chromite since this module is still accessed
# from non chromite.lib.cros_test_lib pathways (which will be resolved
# implicitly via 37517).
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '../third_party'))
import mock
Directory = collections.namedtuple('Directory', ['name', 'contents'])
def _FlattenStructure(base_path, dir_struct):
"""Converts a directory structure to a list of paths."""
flattened = []
for obj in dir_struct:
if isinstance(obj, Directory):
new_base = os.path.join(base_path, obj.name).rstrip(os.sep)
flattened.append(new_base + os.sep)
flattened.extend(_FlattenStructure(new_base, obj.contents))
else:
assert(isinstance(obj, basestring))
flattened.append(os.path.join(base_path, obj))
return flattened
def CreateOnDiskHierarchy(base_path, dir_struct):
"""Creates on-disk representation of an in-memory directory structure.
Arguments:
base_path: The absolute root of the directory structure.
dir_struct: A recursively defined data structure that represents a
directory tree. The basic form is a list. Elements can be file names or
cros_test_lib.Directory objects. The 'contents' attribute of Directory
types is a directory structure representing the contents of the directory.
Examples:
- ['file1', 'file2']
- ['file1', Directory('directory', ['deepfile1', 'deepfile2']), 'file2']
"""
flattened = _FlattenStructure(base_path, dir_struct)
for f in flattened:
f = os.path.join(base_path, f)
if f.endswith(os.sep):
os.mkdir(f)
else:
osutils.Touch(f, makedirs=True)
def _VerifyDirectoryIterables(existing, expected):
"""Compare two iterables representing contents of a directory.
Paths in |existing| and |expected| will be compared for exact match.
Arguments:
existing: An iterable containing paths that exist.
expected: An iterable of paths that are expected.
Raises:
AssertionError when there is any divergence between |existing| and
|expected|.
"""
def FormatPaths(paths):
return '\n'.join(sorted(paths))
existing = set(existing)
expected = set(expected)
unexpected = existing - expected
if unexpected:
raise AssertionError('Found unexpected paths:\n%s'
% FormatPaths(unexpected))
missing = expected - existing
if missing:
raise AssertionError('These files were expected but not found:\n%s'
% FormatPaths(missing))
def _DirectoryIterator(base_path):
"""Iterates through the files and subdirs of a directory."""
for root, dirs, files in os.walk(base_path):
for e in [d + os.sep for d in dirs] + files:
yield os.path.join(root, e)
def VerifyOnDiskHierarchy(base_path, dir_struct):
"""Verify that an on-disk directory tree exactly matches a given structure.
Arguments:
See arguments of CreateOnDiskHierarchy()
Raises:
AssertionError when there is any divergence between the on-disk
structure and the structure specified by 'dir_struct'.
"""
expected = _FlattenStructure(base_path, dir_struct)
_VerifyDirectoryIterables(_DirectoryIterator(base_path), expected)
def VerifyTarball(tarball, dir_struct):
"""Compare the contents of a tarball against a directory structure.
Arguments:
tarball: Path to the tarball.
dir_struct: See CreateOnDiskHierarchy()
Raises:
AssertionError when there is any divergence between the tarball and the
structure specified by 'dir_struct'.
"""
contents = cros_build_lib.RunCommandCaptureOutput(
['tar', '-tf', tarball]).output.splitlines()
normalized = set()
for p in contents:
norm = os.path.normpath(p)
if p.endswith('/'):
norm += '/'
if norm in normalized:
raise AssertionError('Duplicate entry %r found in %r!' % (norm, tarball))
normalized.add(norm)
expected = _FlattenStructure('', dir_struct)
_VerifyDirectoryIterables(normalized, expected)
def _walk_mro_stacking(obj, attr, reverse=False):
iterator = iter if reverse else reversed
methods = (getattr(x, attr, None) for x in iterator(obj.__class__.__mro__))
seen = set()
for x in filter(None, methods):
x = getattr(x, 'im_func', x)
if x not in seen:
seen.add(x)
yield x
def _stacked_setUp(self):
self.__test_was_run__ = False
try:
for target in _walk_mro_stacking(self, '__raw_setUp__'):
target(self)
except:
# TestCase doesn't trigger tearDowns if setUp failed; thus
# manually force it ourselves to ensure cleanup occurs.
_stacked_tearDown(self)
raise
# Now mark the object as fully setUp; this is done so that
# any last minute assertions in tearDown can know if they should
# run or not.
self.__test_was_run__ = True
def _stacked_tearDown(self):
exc_info = None
for target in _walk_mro_stacking(self, '__raw_tearDown__', True):
#pylint: disable=W0702
try:
target(self)
except:
# Preserve the exception, throw it after running
# all tearDowns; we throw just the first also. We suppress
# pylint's warning here since it can't understand that we're
# actually raising the exception, just in a nonstandard way.
if exc_info is None:
exc_info = sys.exc_info()
if exc_info:
# Chuck the saved exception, w/ the same TB from
# when it occurred.
raise exc_info[0], exc_info[1], exc_info[2]
class StackedSetup(type):
"""Metaclass that extracts automatically stacks setUp and tearDown calls.
Basically this exists to make it easier to do setUp *correctly*, while also
suppressing some unittests misbehaviours- for example, the fact that if a
setUp throws an exception the corresponding tearDown isn't ran. This sorts
it.
Usage of it is via usual metaclass approach; just set
`__metaclass__ = StackedSetup` .
Note that this metaclass is designed such that because this is a metaclass,
rather than just a scope mutator, all derivative classes derive from this
metaclass; thus all derivative TestCase classes get automatic stacking."""
def __new__(mcs, name, bases, scope):
if 'setUp' in scope:
scope['__raw_setUp__'] = scope.pop('setUp')
scope['setUp'] = _stacked_setUp
if 'tearDown' in scope:
scope['__raw_tearDown__'] = scope.pop('tearDown')
scope['tearDown'] = _stacked_tearDown
return type.__new__(mcs, name, bases, scope)
class EasyAttr(dict):
"""Convenient class for simulating objects with attributes in tests.
An EasyAttr object can be created with any attributes initialized very
easily. Examples:
1) An object with .id=45 and .name="Joe":
testobj = EasyAttr(id=45, name="Joe")
2) An object with .title.text="Big" and .owner.text="Joe":
testobj = EasyAttr(title=EasyAttr(text="Big"), owner=EasyAttr(text="Joe"))
"""
__slots__ = ()
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
return AttributeError(attr)
def __delattr__(self, attr):
try:
self.pop(attr)
except KeyError:
raise AttributeError(attr)
def __setattr__(self, attr, value):
self[attr] = value
def __dir__(self):
return self.keys()
class OutputCapturer(object):
"""Class with limited support for capturing test stdout/stderr output.
Class is designed as a 'ContextManager'. Example usage in a test method
of an object of TestCase:
with self.OutputCapturer() as output:
# Capturing of stdout/stderr automatically starts now.
# Do stuff that sends output to stdout/stderr.
# Capturing automatically stops at end of 'with' block.
# stdout/stderr can be retrieved from the OutputCapturer object:
stdout = output.getStdoutLines() # Or other access methods
# Some Assert methods are only valid if capturing was used in test.
self.AssertOutputContainsError() # Or other related methods
"""
# These work with error output from operation module.
OPER_MSG_SPLIT_RE = re.compile(r'^\033\[1;.*?\033\[0m$|^[^\n]*$',
re.DOTALL | re.MULTILINE)
ERROR_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
(30 + terminal.Color.RED,), re.DOTALL)
WARNING_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
(30 + terminal.Color.YELLOW,), re.DOTALL)
__slots__ = ['_stderr', '_stderr_cap', '_stdout', '_stdout_cap']
def __init__(self):
self._stdout = None
self._stderr = None
self._stdout_cap = None
self._stderr_cap = None
def __enter__(self):
# This method is called with entering 'with' block.
self.StartCapturing()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# This method is called when exiting 'with' block.
self.StopCapturing()
if exc_type:
print('Exception during output capturing: %r' % (exc_val,))
stdout = self.GetStdout()
if stdout:
print('Captured stdout was:\n%s' % stdout)
else:
print('No captured stdout')
stderr = self.GetStderr()
if stderr:
print('Captured stderr was:\n%s' % stderr)
else:
print('No captured stderr')
def StartCapturing(self):
"""Begin capturing stdout and stderr."""
self._stdout = sys.stdout
self._stderr = sys.stderr
sys.stdout = self._stdout_cap = cStringIO.StringIO()
sys.stderr = self._stderr_cap = cStringIO.StringIO()
def StopCapturing(self):
"""Stop capturing stdout and stderr."""
# The only reason to check stdout or stderr separately might
# have capturing on independently is if StartCapturing did not complete.
if self._stdout:
sys.stdout = self._stdout
self._stdout = None
if self._stderr:
sys.stderr = self._stderr
self._stderr = None
def ClearCaptured(self):
# Only valid if capturing is not on.
assert self._stdout is None and self._stderr is None
self._stdout_cap = None
self._stderr_cap = None
def GetStdout(self):
"""Return captured stdout so far."""
return self._stdout_cap.getvalue()
def GetStderr(self):
"""Return captured stderr so far."""
return self._stderr_cap.getvalue()
def _GetOutputLines(self, output, include_empties):
"""Split |output| into lines, optionally |include_empties|.
Return array of lines.
"""
lines = self.OPER_MSG_SPLIT_RE.findall(output)
if not include_empties:
lines = [ln for ln in lines if ln]
return lines
def GetStdoutLines(self, include_empties=True):
"""Return captured stdout so far as array of lines.
If |include_empties| is false filter out all empty lines.
"""
return self._GetOutputLines(self.GetStdout(), include_empties)
def GetStderrLines(self, include_empties=True):
"""Return captured stderr so far as array of lines.
If |include_empties| is false filter out all empty lines.
"""
return self._GetOutputLines(self.GetStderr(), include_empties)
class TestCase(unittest.TestCase):
__metaclass__ = StackedSetup
# List of vars chromite is globally sensitive to and that should
# be suppressed for tests.
ENVIRON_VARIABLE_SUPPRESSIONS = ('CROS_CACHEDIR',)
def __init__(self, *args, **kwds):
unittest.TestCase.__init__(self, *args, **kwds)
# This is set to keep pylint from complaining.
self.__test_was_run__ = False
def setUp(self):
self.__saved_env__ = os.environ.copy()
self.__saved_cwd__ = os.getcwd()
self.__saved_umask__ = os.umask(022)
for x in self.ENVIRON_VARIABLE_SUPPRESSIONS:
os.environ.pop(x, None)
def tearDown(self):
osutils.SetEnvironment(self.__saved_env__)
os.chdir(self.__saved_cwd__)
os.umask(self.__saved_umask__)
def assertRaises2(self, exception, functor, *args, **kwargs):
"""Like assertRaises, just with checking of the excpetion.
args:
exception: The expected exception type to intecept.
functor: The function to invoke.
args: Positional args to pass to the function.
kwargs: Optional args to pass to the function. Note we pull
exact_kls, msg, and check_attrs from these kwargs.
exact_kls: If given, the exception raise must be *exactly* that class
type; derivatives are a failure.
check_attrs: If given, a mapping of attribute -> value to assert on
the resultant exception. Thus if you wanted to catch a ENOENT, you
would do:
assertRaises2(EnvironmentError, func, args,
attrs={"errno":errno.ENOENT})
msg: The error message to be displayed if the exception isn't raised.
If not given, a suitable one is defaulted to.
returns: The exception object.
"""
exact_kls = kwargs.pop("exact_kls", None)
check_attrs = kwargs.pop("check_attrs", {})
msg = kwargs.pop("msg", None)
if msg is None:
msg = ("%s(*%r, **%r) didn't throw an exception"
% (functor.__name__, args, kwargs))
try:
functor(*args, **kwargs)
raise AssertionError(msg)
except exception, e:
if exact_kls:
self.assertEqual(e.__class__, exception)
bad = []
for attr, required in check_attrs.iteritems():
self.assertTrue(hasattr(e, attr),
msg="%s lacks attr %s" % (e, attr))
value = getattr(e, attr)
if value != required:
bad.append("%s attr is %s, needed to be %s"
% (attr, value, required))
if bad:
raise AssertionError("\n".join(bad))
return e
class OutputTestCase(TestCase):
"""Base class for cros unit tests with utility methods."""
def __init__(self, *args, **kwds):
"""Base class __init__ takes a second argument."""
TestCase.__init__(self, *args, **kwds)
self._output_capturer = None
def OutputCapturer(self):
"""Create and return OutputCapturer object."""
self._output_capturer = OutputCapturer()
return self._output_capturer
def _GetOutputCapt(self):
"""Internal access to existing OutputCapturer.
Raises RuntimeError if output capturing was never on.
"""
if self._output_capturer:
return self._output_capturer
raise RuntimeError('Output capturing was never turned on for this test.')
def _GenCheckMsgFunc(self, prefix_re, line_re):
"""Return boolean func to check a line given |prefix_re| and |line_re|."""
def _method(line):
if prefix_re:
# Prefix regexp will strip off prefix (and suffix) from line.
match = prefix_re.search(line)
if match:
line = match.group(1)
else:
return False
return line_re.search(line) if line_re else True
# Provide a description of what this function looks for in a line. Error
# messages can make use of this.
_method.description = None
if prefix_re and line_re:
_method.description = ('line matching prefix regexp %r then regexp %r' %
(prefix_re.pattern, line_re.pattern))
elif prefix_re:
_method.description = 'line matching prefix regexp %r' % prefix_re.pattern
elif line_re:
_method.description = 'line matching regexp %r' % line_re.pattern
else:
raise RuntimeError('Nonsensical usage of _GenCheckMsgFunc: '
'no prefix_re or line_re')
return _method
def _ContainsMsgLine(self, lines, msg_check_func):
return any(msg_check_func(ln) for ln in lines)
def _GenOutputDescription(self, check_stdout, check_stderr):
# Some extra logic to make an error message useful.
if check_stdout and check_stderr:
return 'stdout or stderr'
elif check_stdout:
return 'stdout'
elif check_stderr:
return 'stderr'
def _AssertOutputContainsMsg(self, check_msg_func, invert,
check_stdout, check_stderr):
assert check_stdout or check_stderr
lines = []
if check_stdout:
lines.extend(self._GetOutputCapt().GetStdoutLines())
if check_stderr:
lines.extend(self._GetOutputCapt().GetStderrLines())
result = self._ContainsMsgLine(lines, check_msg_func)
# Some extra logic to make an error message useful.
output_desc = self._GenOutputDescription(check_stdout, check_stderr)
if invert:
msg = ('expected %s to not contain %s,\nbut found it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertFalse(result, msg=msg)
else:
msg = ('expected %s to contain %s,\nbut did not find it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertTrue(result, msg=msg)
def AssertOutputContainsError(self, regexp=None, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains at least one error line.
If |regexp| is non-null, then the error line must also match it.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.ERROR_MSG_RE, regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def AssertOutputContainsWarning(self, regexp=None, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains at least one warning line.
If |regexp| is non-null, then the warning line must also match it.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE,
regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def AssertOutputContainsLine(self, regexp, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains line matching |regexp|.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(None, regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def _AssertOutputEndsInMsg(self, check_msg_func,
check_stdout, check_stderr):
"""Pass if requested output(s) ends(end) with an error message."""
assert check_stdout or check_stderr
lines = []
if check_stdout:
stdout_lines = self._GetOutputCapt().GetStdoutLines(include_empties=False)
if stdout_lines:
lines.append(stdout_lines[-1])
if check_stderr:
stderr_lines = self._GetOutputCapt().GetStderrLines(include_empties=False)
if stderr_lines:
lines.append(stderr_lines[-1])
result = self._ContainsMsgLine(lines, check_msg_func)
# Some extra logic to make an error message useful.
output_desc = self._GenOutputDescription(check_stdout, check_stderr)
msg = ('expected %s to end with %s,\nbut did not find it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertTrue(result, msg=msg)
def AssertOutputEndsInError(self, regexp=None,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in error line.
If |regexp| is non-null, then the error line must also match it.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.ERROR_MSG_RE, regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def AssertOutputEndsInWarning(self, regexp=None,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in warning line.
If |regexp| is non-null, then the warning line must also match it.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE,
regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def AssertOutputEndsInLine(self, regexp,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in line matching |regexp|.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(None, regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def FuncCatchSystemExit(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| and catch exceptions.SystemExit.
Return tuple (return value or None, SystemExit number code or None).
"""
try:
returnval = func(*args, **kwargs)
return returnval, None
except exceptions.SystemExit as ex:
exit_code = ex.args[0]
return None, exit_code
def AssertFuncSystemExitZero(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
If the func does not raise a SystemExit with exit code 0 then assert.
"""
exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
self.assertFalse(exit_code is None,
msg='Expected system exit code 0, but caught none')
self.assertTrue(exit_code == 0,
msg='Expected system exit code 0, but caught %d' %
exit_code)
def AssertFuncSystemExitNonZero(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
If the func does not raise a non-zero SystemExit code then assert.
"""
exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
self.assertFalse(exit_code is None,
msg='Expected non-zero system exit code, but caught none')
self.assertFalse(exit_code == 0,
msg='Expected non-zero system exit code, but caught %d' %
exit_code)
def AssertRaisesAndReturn(self, error, func, *args, **kwargs):
"""Like assertRaises, but return exception raised."""
try:
func(*args, **kwargs)
self.assertTrue(False, msg='Expected %s but got none' % error)
except error as ex:
return ex
class TempDirTestCase(TestCase):
"""Mixin used to give each test a tempdir that is cleansed upon finish"""
sudo_cleanup = False
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
self.tempdir = None
def setUp(self):
#pylint: disable=W0212
osutils._TempDirSetup(self)
def tearDown(self):
#pylint: disable=W0212
osutils._TempDirTearDown(self, self.sudo_cleanup)
class _RunCommandMock(mox.MockObject):
"""Custom mock class used to suppress arguments we don't care about"""
DEFAULT_IGNORED_ARGS = ('print_cmd',)
def __call__(self, *args, **kwds):
for arg in self.DEFAULT_IGNORED_ARGS:
kwds.setdefault(arg, mox.IgnoreArg())
return mox.MockObject.__call__(self, *args, **kwds)
class LessAnnoyingMox(mox.Mox):
"""Mox derivative that slips in our suppressions to mox.
This is used by default via MoxTestCase; namely, this suppresses
certain arguments awareness that we don't care about via switching
in (dependent on the namespace requested) overriding MockObject
classing.
Via this, it makes maintenance much simpler- simplest example, if code
doesn't explicitly assert that print_cmd must be true/false... then
we don't care about what argument is set (it has no effect beyond output).
Mox normally *would* care, making it a pita to maintain. This selectively
suppresses that awareness, making it maintainable.
"""
mock_classes = {}.fromkeys(
['chromite.lib.cros_build_lib.%s' % x
for x in dir(cros_build_lib) if "RunCommand" in x],
_RunCommandMock)
@staticmethod
def _GetNamespace(obj):
return '%s.%s' % (obj.__module__, obj.__name__)
def CreateMock(self, obj, attrs=None):
if attrs is None:
attrs = {}
kls = self.mock_classes.get(
self._GetNamespace(obj), mox.MockObject)
# Copy attrs; I don't trust mox to not be stupid here.
new_mock = kls(obj, attrs=attrs)
self._mock_objects.append(new_mock)
return new_mock
class MoxTestCase(TestCase):
"""Mox based test case; compatible with StackedSetup"""
mox_suppress_verify_all = False
def setUp(self):
self.mox = LessAnnoyingMox()
self.stubs = mox.stubout.StubOutForTesting()
def tearDown(self):
try:
if self.__test_was_run__ and not self.mox_suppress_verify_all:
# This means the test code was actually ran.
# force a verifyall
self.mox.VerifyAll()
finally:
if hasattr(self, 'mox'):
self.mox.UnsetStubs()
if hasattr(self, 'stubs'):
self.stubs.UnsetAll()
self.stubs.SmartUnsetAll()
class MoxTempDirTestCase(TempDirTestCase, MoxTestCase):
"""Convenience class mixing TempDir and Mox"""
class MoxOutputTestCase(OutputTestCase, MoxTestCase):
"""Conevenience class mixing OutputTestCase and MoxTestCase."""
class MockTestCase(TestCase):
"""Python-mock based test case; compatible with StackedSetup"""
def setUp(self):
self._patchers = []
def tearDown(self):
# We can't just run stopall() by itself, and need to stop our patchers
# manually since stopall() doesn't handle repatching.
cros_build_lib.SafeRun([p.stop for p in reversed(self._patchers)] +
[mock.patch.stopall])
def StartPatcher(self, patcher):
"""Call start() on the patcher, and stop() in tearDown."""
m = patcher.start()
self._patchers.append(patcher)
return m
def PatchObject(self, *args, **kwargs):
"""Create and start a mock.patch.object().
stop() will be called automatically during tearDown.
"""
return self.StartPatcher(mock.patch.object(*args, **kwargs))
# MockTestCase must be before TempDirTestCase in this inheritance order,
# because MockTestCase.StartPatcher() calls may be for PartialMocks, which
# create their own temporary directory. The teardown for those directories
# occurs during MockTestCase.tearDown(), which needs to be run before
# TempDirTestCase.tearDown().
class MockTempDirTestCase(MockTestCase, TempDirTestCase):
"""Convenience class mixing TempDir and Mock."""
def FindTests(directory, module_namespace=''):
"""Find all *_unittest.py, and return their python namespaces.
Args:
directory: The directory to scan for tests.
module_namespace: What namespace to prefix all found tests with.
Returns:
A list of python unittests in python namespace form.
"""
results = cros_build_lib.RunCommandCaptureOutput(
['find', '.', '-name', '*_unittest.py', '-printf', '%P\n'],
cwd=directory, print_cmd=False).output.splitlines()
# Drop the trailing .py, inject in the name if one was given.
if module_namespace:
module_namespace += '.'
return [module_namespace + x[:-3].replace('/', '.') for x in results]
@contextlib.contextmanager
def DisableLogging():
"""Temporarily disable chromite logging."""
backup = cros_build_lib.logger.disabled
try:
cros_build_lib.logger.disabled = True
yield
finally:
cros_build_lib.logger.disabled = backup
def | (**kwds):
"""Helper wrapper around unittest.main. Invoke this, not unittest.main.
Any passed in kwds are passed directly down to unittest.main; via this, you
can inject custom argv for example (to limit what tests run).
"""
# Default to exit=True; this matches old behaviour, and allows unittest
# to trigger sys.exit on its own. Unfortunately, the exit keyword is only
# available in 2.7- as such, handle it ourselves.
allow_exit = kwds.pop('exit', True)
cros_build_lib.SetupBasicLogging()
try:
unittest.main(**kwds)
raise SystemExit(0)
except SystemExit, e:
if e.__class__ != SystemExit or allow_exit:
raise
# Redo the exit code ourselves- unittest throws True on occasion.
# This is why the lack of typing for SystemExit code attribute makes life
# suck, in parallel to unittest being special.
# Finally, note that it's possible for code to be a string...
if isinstance(e.code, (int, long)):
# This is done since exit code may be something other than 1/0; if they
# explicitly pass it, we'll honor it.
return e.code
return 1 if e.code else 0
| main | identifier_name |
cros_test_lib.py | #!/usr/bin/python
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Cros unit test library, with utility functions."""
from __future__ import print_function
import collections
import contextlib
import cStringIO
import exceptions
import mox
import os
import re
import sys
import unittest
import osutils
import terminal
import cros_build_lib
if 'chromite' not in sys.modules:
# TODO(build): Finish test wrapper (http://crosbug.com/37517).
# Until then, we detect the chromite manipulation not yet having
# occurred, and inject it ourselves.
# We cannot just import chromite since this module is still accessed
# from non chromite.lib.cros_test_lib pathways (which will be resolved
# implicitly via 37517).
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '../third_party'))
import mock
Directory = collections.namedtuple('Directory', ['name', 'contents'])
def _FlattenStructure(base_path, dir_struct):
"""Converts a directory structure to a list of paths."""
flattened = []
for obj in dir_struct:
if isinstance(obj, Directory):
new_base = os.path.join(base_path, obj.name).rstrip(os.sep)
flattened.append(new_base + os.sep)
flattened.extend(_FlattenStructure(new_base, obj.contents))
else:
assert(isinstance(obj, basestring))
flattened.append(os.path.join(base_path, obj))
return flattened
def CreateOnDiskHierarchy(base_path, dir_struct):
"""Creates on-disk representation of an in-memory directory structure.
Arguments:
base_path: The absolute root of the directory structure.
dir_struct: A recursively defined data structure that represents a
directory tree. The basic form is a list. Elements can be file names or
cros_test_lib.Directory objects. The 'contents' attribute of Directory
types is a directory structure representing the contents of the directory.
Examples:
- ['file1', 'file2']
- ['file1', Directory('directory', ['deepfile1', 'deepfile2']), 'file2']
"""
flattened = _FlattenStructure(base_path, dir_struct)
for f in flattened:
f = os.path.join(base_path, f)
if f.endswith(os.sep):
os.mkdir(f)
else:
osutils.Touch(f, makedirs=True)
def _VerifyDirectoryIterables(existing, expected):
"""Compare two iterables representing contents of a directory.
Paths in |existing| and |expected| will be compared for exact match.
Arguments:
existing: An iterable containing paths that exist.
expected: An iterable of paths that are expected.
Raises:
AssertionError when there is any divergence between |existing| and
|expected|.
"""
def FormatPaths(paths):
return '\n'.join(sorted(paths))
existing = set(existing)
expected = set(expected)
unexpected = existing - expected
if unexpected:
raise AssertionError('Found unexpected paths:\n%s'
% FormatPaths(unexpected))
missing = expected - existing
if missing:
raise AssertionError('These files were expected but not found:\n%s'
% FormatPaths(missing))
def _DirectoryIterator(base_path):
"""Iterates through the files and subdirs of a directory."""
for root, dirs, files in os.walk(base_path):
for e in [d + os.sep for d in dirs] + files:
yield os.path.join(root, e)
def VerifyOnDiskHierarchy(base_path, dir_struct):
"""Verify that an on-disk directory tree exactly matches a given structure.
Arguments:
See arguments of CreateOnDiskHierarchy()
Raises:
AssertionError when there is any divergence between the on-disk
structure and the structure specified by 'dir_struct'.
"""
expected = _FlattenStructure(base_path, dir_struct)
_VerifyDirectoryIterables(_DirectoryIterator(base_path), expected)
def VerifyTarball(tarball, dir_struct):
"""Compare the contents of a tarball against a directory structure.
Arguments:
tarball: Path to the tarball.
dir_struct: See CreateOnDiskHierarchy()
Raises:
AssertionError when there is any divergence between the tarball and the
structure specified by 'dir_struct'.
"""
contents = cros_build_lib.RunCommandCaptureOutput(
['tar', '-tf', tarball]).output.splitlines()
normalized = set()
for p in contents:
norm = os.path.normpath(p)
if p.endswith('/'):
norm += '/'
if norm in normalized:
|
normalized.add(norm)
expected = _FlattenStructure('', dir_struct)
_VerifyDirectoryIterables(normalized, expected)
def _walk_mro_stacking(obj, attr, reverse=False):
iterator = iter if reverse else reversed
methods = (getattr(x, attr, None) for x in iterator(obj.__class__.__mro__))
seen = set()
for x in filter(None, methods):
x = getattr(x, 'im_func', x)
if x not in seen:
seen.add(x)
yield x
def _stacked_setUp(self):
self.__test_was_run__ = False
try:
for target in _walk_mro_stacking(self, '__raw_setUp__'):
target(self)
except:
# TestCase doesn't trigger tearDowns if setUp failed; thus
# manually force it ourselves to ensure cleanup occurs.
_stacked_tearDown(self)
raise
# Now mark the object as fully setUp; this is done so that
# any last minute assertions in tearDown can know if they should
# run or not.
self.__test_was_run__ = True
def _stacked_tearDown(self):
exc_info = None
for target in _walk_mro_stacking(self, '__raw_tearDown__', True):
#pylint: disable=W0702
try:
target(self)
except:
# Preserve the exception, throw it after running
# all tearDowns; we throw just the first also. We suppress
# pylint's warning here since it can't understand that we're
# actually raising the exception, just in a nonstandard way.
if exc_info is None:
exc_info = sys.exc_info()
if exc_info:
# Chuck the saved exception, w/ the same TB from
# when it occurred.
raise exc_info[0], exc_info[1], exc_info[2]
class StackedSetup(type):
"""Metaclass that extracts automatically stacks setUp and tearDown calls.
Basically this exists to make it easier to do setUp *correctly*, while also
suppressing some unittests misbehaviours- for example, the fact that if a
setUp throws an exception the corresponding tearDown isn't ran. This sorts
it.
Usage of it is via usual metaclass approach; just set
`__metaclass__ = StackedSetup` .
Note that this metaclass is designed such that because this is a metaclass,
rather than just a scope mutator, all derivative classes derive from this
metaclass; thus all derivative TestCase classes get automatic stacking."""
def __new__(mcs, name, bases, scope):
if 'setUp' in scope:
scope['__raw_setUp__'] = scope.pop('setUp')
scope['setUp'] = _stacked_setUp
if 'tearDown' in scope:
scope['__raw_tearDown__'] = scope.pop('tearDown')
scope['tearDown'] = _stacked_tearDown
return type.__new__(mcs, name, bases, scope)
class EasyAttr(dict):
"""Convenient class for simulating objects with attributes in tests.
An EasyAttr object can be created with any attributes initialized very
easily. Examples:
1) An object with .id=45 and .name="Joe":
testobj = EasyAttr(id=45, name="Joe")
2) An object with .title.text="Big" and .owner.text="Joe":
testobj = EasyAttr(title=EasyAttr(text="Big"), owner=EasyAttr(text="Joe"))
"""
__slots__ = ()
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
return AttributeError(attr)
def __delattr__(self, attr):
try:
self.pop(attr)
except KeyError:
raise AttributeError(attr)
def __setattr__(self, attr, value):
self[attr] = value
def __dir__(self):
return self.keys()
class OutputCapturer(object):
"""Class with limited support for capturing test stdout/stderr output.
Class is designed as a 'ContextManager'. Example usage in a test method
of an object of TestCase:
with self.OutputCapturer() as output:
# Capturing of stdout/stderr automatically starts now.
# Do stuff that sends output to stdout/stderr.
# Capturing automatically stops at end of 'with' block.
# stdout/stderr can be retrieved from the OutputCapturer object:
stdout = output.getStdoutLines() # Or other access methods
# Some Assert methods are only valid if capturing was used in test.
self.AssertOutputContainsError() # Or other related methods
"""
# These work with error output from operation module.
OPER_MSG_SPLIT_RE = re.compile(r'^\033\[1;.*?\033\[0m$|^[^\n]*$',
re.DOTALL | re.MULTILINE)
ERROR_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
(30 + terminal.Color.RED,), re.DOTALL)
WARNING_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
(30 + terminal.Color.YELLOW,), re.DOTALL)
__slots__ = ['_stderr', '_stderr_cap', '_stdout', '_stdout_cap']
def __init__(self):
self._stdout = None
self._stderr = None
self._stdout_cap = None
self._stderr_cap = None
def __enter__(self):
# This method is called with entering 'with' block.
self.StartCapturing()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# This method is called when exiting 'with' block.
self.StopCapturing()
if exc_type:
print('Exception during output capturing: %r' % (exc_val,))
stdout = self.GetStdout()
if stdout:
print('Captured stdout was:\n%s' % stdout)
else:
print('No captured stdout')
stderr = self.GetStderr()
if stderr:
print('Captured stderr was:\n%s' % stderr)
else:
print('No captured stderr')
def StartCapturing(self):
"""Begin capturing stdout and stderr."""
self._stdout = sys.stdout
self._stderr = sys.stderr
sys.stdout = self._stdout_cap = cStringIO.StringIO()
sys.stderr = self._stderr_cap = cStringIO.StringIO()
def StopCapturing(self):
"""Stop capturing stdout and stderr."""
# The only reason to check stdout or stderr separately might
# have capturing on independently is if StartCapturing did not complete.
if self._stdout:
sys.stdout = self._stdout
self._stdout = None
if self._stderr:
sys.stderr = self._stderr
self._stderr = None
def ClearCaptured(self):
# Only valid if capturing is not on.
assert self._stdout is None and self._stderr is None
self._stdout_cap = None
self._stderr_cap = None
def GetStdout(self):
"""Return captured stdout so far."""
return self._stdout_cap.getvalue()
def GetStderr(self):
"""Return captured stderr so far."""
return self._stderr_cap.getvalue()
def _GetOutputLines(self, output, include_empties):
"""Split |output| into lines, optionally |include_empties|.
Return array of lines.
"""
lines = self.OPER_MSG_SPLIT_RE.findall(output)
if not include_empties:
lines = [ln for ln in lines if ln]
return lines
def GetStdoutLines(self, include_empties=True):
"""Return captured stdout so far as array of lines.
If |include_empties| is false filter out all empty lines.
"""
return self._GetOutputLines(self.GetStdout(), include_empties)
def GetStderrLines(self, include_empties=True):
"""Return captured stderr so far as array of lines.
If |include_empties| is false filter out all empty lines.
"""
return self._GetOutputLines(self.GetStderr(), include_empties)
class TestCase(unittest.TestCase):
__metaclass__ = StackedSetup
# List of vars chromite is globally sensitive to and that should
# be suppressed for tests.
ENVIRON_VARIABLE_SUPPRESSIONS = ('CROS_CACHEDIR',)
def __init__(self, *args, **kwds):
unittest.TestCase.__init__(self, *args, **kwds)
# This is set to keep pylint from complaining.
self.__test_was_run__ = False
def setUp(self):
self.__saved_env__ = os.environ.copy()
self.__saved_cwd__ = os.getcwd()
self.__saved_umask__ = os.umask(022)
for x in self.ENVIRON_VARIABLE_SUPPRESSIONS:
os.environ.pop(x, None)
def tearDown(self):
osutils.SetEnvironment(self.__saved_env__)
os.chdir(self.__saved_cwd__)
os.umask(self.__saved_umask__)
def assertRaises2(self, exception, functor, *args, **kwargs):
"""Like assertRaises, just with checking of the excpetion.
args:
exception: The expected exception type to intecept.
functor: The function to invoke.
args: Positional args to pass to the function.
kwargs: Optional args to pass to the function. Note we pull
exact_kls, msg, and check_attrs from these kwargs.
exact_kls: If given, the exception raise must be *exactly* that class
type; derivatives are a failure.
check_attrs: If given, a mapping of attribute -> value to assert on
the resultant exception. Thus if you wanted to catch a ENOENT, you
would do:
assertRaises2(EnvironmentError, func, args,
attrs={"errno":errno.ENOENT})
msg: The error message to be displayed if the exception isn't raised.
If not given, a suitable one is defaulted to.
returns: The exception object.
"""
exact_kls = kwargs.pop("exact_kls", None)
check_attrs = kwargs.pop("check_attrs", {})
msg = kwargs.pop("msg", None)
if msg is None:
msg = ("%s(*%r, **%r) didn't throw an exception"
% (functor.__name__, args, kwargs))
try:
functor(*args, **kwargs)
raise AssertionError(msg)
except exception, e:
if exact_kls:
self.assertEqual(e.__class__, exception)
bad = []
for attr, required in check_attrs.iteritems():
self.assertTrue(hasattr(e, attr),
msg="%s lacks attr %s" % (e, attr))
value = getattr(e, attr)
if value != required:
bad.append("%s attr is %s, needed to be %s"
% (attr, value, required))
if bad:
raise AssertionError("\n".join(bad))
return e
class OutputTestCase(TestCase):
"""Base class for cros unit tests with utility methods."""
def __init__(self, *args, **kwds):
"""Base class __init__ takes a second argument."""
TestCase.__init__(self, *args, **kwds)
self._output_capturer = None
def OutputCapturer(self):
"""Create and return OutputCapturer object."""
self._output_capturer = OutputCapturer()
return self._output_capturer
def _GetOutputCapt(self):
"""Internal access to existing OutputCapturer.
Raises RuntimeError if output capturing was never on.
"""
if self._output_capturer:
return self._output_capturer
raise RuntimeError('Output capturing was never turned on for this test.')
def _GenCheckMsgFunc(self, prefix_re, line_re):
"""Return boolean func to check a line given |prefix_re| and |line_re|."""
def _method(line):
if prefix_re:
# Prefix regexp will strip off prefix (and suffix) from line.
match = prefix_re.search(line)
if match:
line = match.group(1)
else:
return False
return line_re.search(line) if line_re else True
# Provide a description of what this function looks for in a line. Error
# messages can make use of this.
_method.description = None
if prefix_re and line_re:
_method.description = ('line matching prefix regexp %r then regexp %r' %
(prefix_re.pattern, line_re.pattern))
elif prefix_re:
_method.description = 'line matching prefix regexp %r' % prefix_re.pattern
elif line_re:
_method.description = 'line matching regexp %r' % line_re.pattern
else:
raise RuntimeError('Nonsensical usage of _GenCheckMsgFunc: '
'no prefix_re or line_re')
return _method
def _ContainsMsgLine(self, lines, msg_check_func):
return any(msg_check_func(ln) for ln in lines)
def _GenOutputDescription(self, check_stdout, check_stderr):
# Some extra logic to make an error message useful.
if check_stdout and check_stderr:
return 'stdout or stderr'
elif check_stdout:
return 'stdout'
elif check_stderr:
return 'stderr'
def _AssertOutputContainsMsg(self, check_msg_func, invert,
check_stdout, check_stderr):
assert check_stdout or check_stderr
lines = []
if check_stdout:
lines.extend(self._GetOutputCapt().GetStdoutLines())
if check_stderr:
lines.extend(self._GetOutputCapt().GetStderrLines())
result = self._ContainsMsgLine(lines, check_msg_func)
# Some extra logic to make an error message useful.
output_desc = self._GenOutputDescription(check_stdout, check_stderr)
if invert:
msg = ('expected %s to not contain %s,\nbut found it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertFalse(result, msg=msg)
else:
msg = ('expected %s to contain %s,\nbut did not find it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertTrue(result, msg=msg)
def AssertOutputContainsError(self, regexp=None, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains at least one error line.
If |regexp| is non-null, then the error line must also match it.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.ERROR_MSG_RE, regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def AssertOutputContainsWarning(self, regexp=None, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains at least one warning line.
If |regexp| is non-null, then the warning line must also match it.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE,
regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def AssertOutputContainsLine(self, regexp, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains line matching |regexp|.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(None, regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def _AssertOutputEndsInMsg(self, check_msg_func,
check_stdout, check_stderr):
"""Pass if requested output(s) ends(end) with an error message."""
assert check_stdout or check_stderr
lines = []
if check_stdout:
stdout_lines = self._GetOutputCapt().GetStdoutLines(include_empties=False)
if stdout_lines:
lines.append(stdout_lines[-1])
if check_stderr:
stderr_lines = self._GetOutputCapt().GetStderrLines(include_empties=False)
if stderr_lines:
lines.append(stderr_lines[-1])
result = self._ContainsMsgLine(lines, check_msg_func)
# Some extra logic to make an error message useful.
output_desc = self._GenOutputDescription(check_stdout, check_stderr)
msg = ('expected %s to end with %s,\nbut did not find it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertTrue(result, msg=msg)
def AssertOutputEndsInError(self, regexp=None,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in error line.
If |regexp| is non-null, then the error line must also match it.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.ERROR_MSG_RE, regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def AssertOutputEndsInWarning(self, regexp=None,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in warning line.
If |regexp| is non-null, then the warning line must also match it.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE,
regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def AssertOutputEndsInLine(self, regexp,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in line matching |regexp|.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(None, regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def FuncCatchSystemExit(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| and catch exceptions.SystemExit.
Return tuple (return value or None, SystemExit number code or None).
"""
try:
returnval = func(*args, **kwargs)
return returnval, None
except exceptions.SystemExit as ex:
exit_code = ex.args[0]
return None, exit_code
def AssertFuncSystemExitZero(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
If the func does not raise a SystemExit with exit code 0 then assert.
"""
exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
self.assertFalse(exit_code is None,
msg='Expected system exit code 0, but caught none')
self.assertTrue(exit_code == 0,
msg='Expected system exit code 0, but caught %d' %
exit_code)
def AssertFuncSystemExitNonZero(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
If the func does not raise a non-zero SystemExit code then assert.
"""
exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
self.assertFalse(exit_code is None,
msg='Expected non-zero system exit code, but caught none')
self.assertFalse(exit_code == 0,
msg='Expected non-zero system exit code, but caught %d' %
exit_code)
def AssertRaisesAndReturn(self, error, func, *args, **kwargs):
"""Like assertRaises, but return exception raised."""
try:
func(*args, **kwargs)
self.assertTrue(False, msg='Expected %s but got none' % error)
except error as ex:
return ex
class TempDirTestCase(TestCase):
"""Mixin used to give each test a tempdir that is cleansed upon finish"""
sudo_cleanup = False
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
self.tempdir = None
def setUp(self):
#pylint: disable=W0212
osutils._TempDirSetup(self)
def tearDown(self):
#pylint: disable=W0212
osutils._TempDirTearDown(self, self.sudo_cleanup)
class _RunCommandMock(mox.MockObject):
"""Custom mock class used to suppress arguments we don't care about"""
DEFAULT_IGNORED_ARGS = ('print_cmd',)
def __call__(self, *args, **kwds):
for arg in self.DEFAULT_IGNORED_ARGS:
kwds.setdefault(arg, mox.IgnoreArg())
return mox.MockObject.__call__(self, *args, **kwds)
class LessAnnoyingMox(mox.Mox):
"""Mox derivative that slips in our suppressions to mox.
This is used by default via MoxTestCase; namely, this suppresses
certain arguments awareness that we don't care about via switching
in (dependent on the namespace requested) overriding MockObject
classing.
Via this, it makes maintenance much simpler- simplest example, if code
doesn't explicitly assert that print_cmd must be true/false... then
we don't care about what argument is set (it has no effect beyond output).
Mox normally *would* care, making it a pita to maintain. This selectively
suppresses that awareness, making it maintainable.
"""
mock_classes = {}.fromkeys(
['chromite.lib.cros_build_lib.%s' % x
for x in dir(cros_build_lib) if "RunCommand" in x],
_RunCommandMock)
@staticmethod
def _GetNamespace(obj):
return '%s.%s' % (obj.__module__, obj.__name__)
def CreateMock(self, obj, attrs=None):
if attrs is None:
attrs = {}
kls = self.mock_classes.get(
self._GetNamespace(obj), mox.MockObject)
# Copy attrs; I don't trust mox to not be stupid here.
new_mock = kls(obj, attrs=attrs)
self._mock_objects.append(new_mock)
return new_mock
class MoxTestCase(TestCase):
"""Mox based test case; compatible with StackedSetup"""
mox_suppress_verify_all = False
def setUp(self):
self.mox = LessAnnoyingMox()
self.stubs = mox.stubout.StubOutForTesting()
def tearDown(self):
try:
if self.__test_was_run__ and not self.mox_suppress_verify_all:
# This means the test code was actually ran.
# force a verifyall
self.mox.VerifyAll()
finally:
if hasattr(self, 'mox'):
self.mox.UnsetStubs()
if hasattr(self, 'stubs'):
self.stubs.UnsetAll()
self.stubs.SmartUnsetAll()
class MoxTempDirTestCase(TempDirTestCase, MoxTestCase):
"""Convenience class mixing TempDir and Mox"""
class MoxOutputTestCase(OutputTestCase, MoxTestCase):
"""Conevenience class mixing OutputTestCase and MoxTestCase."""
class MockTestCase(TestCase):
"""Python-mock based test case; compatible with StackedSetup"""
def setUp(self):
self._patchers = []
def tearDown(self):
# We can't just run stopall() by itself, and need to stop our patchers
# manually since stopall() doesn't handle repatching.
cros_build_lib.SafeRun([p.stop for p in reversed(self._patchers)] +
[mock.patch.stopall])
def StartPatcher(self, patcher):
"""Call start() on the patcher, and stop() in tearDown."""
m = patcher.start()
self._patchers.append(patcher)
return m
def PatchObject(self, *args, **kwargs):
"""Create and start a mock.patch.object().
stop() will be called automatically during tearDown.
"""
return self.StartPatcher(mock.patch.object(*args, **kwargs))
# MockTestCase must be before TempDirTestCase in this inheritance order,
# because MockTestCase.StartPatcher() calls may be for PartialMocks, which
# create their own temporary directory. The teardown for those directories
# occurs during MockTestCase.tearDown(), which needs to be run before
# TempDirTestCase.tearDown().
class MockTempDirTestCase(MockTestCase, TempDirTestCase):
"""Convenience class mixing TempDir and Mock."""
def FindTests(directory, module_namespace=''):
"""Find all *_unittest.py, and return their python namespaces.
Args:
directory: The directory to scan for tests.
module_namespace: What namespace to prefix all found tests with.
Returns:
A list of python unittests in python namespace form.
"""
results = cros_build_lib.RunCommandCaptureOutput(
['find', '.', '-name', '*_unittest.py', '-printf', '%P\n'],
cwd=directory, print_cmd=False).output.splitlines()
# Drop the trailing .py, inject in the name if one was given.
if module_namespace:
module_namespace += '.'
return [module_namespace + x[:-3].replace('/', '.') for x in results]
@contextlib.contextmanager
def DisableLogging():
"""Temporarily disable chromite logging."""
backup = cros_build_lib.logger.disabled
try:
cros_build_lib.logger.disabled = True
yield
finally:
cros_build_lib.logger.disabled = backup
def main(**kwds):
"""Helper wrapper around unittest.main. Invoke this, not unittest.main.
Any passed in kwds are passed directly down to unittest.main; via this, you
can inject custom argv for example (to limit what tests run).
"""
# Default to exit=True; this matches old behaviour, and allows unittest
# to trigger sys.exit on its own. Unfortunately, the exit keyword is only
# available in 2.7- as such, handle it ourselves.
allow_exit = kwds.pop('exit', True)
cros_build_lib.SetupBasicLogging()
try:
unittest.main(**kwds)
raise SystemExit(0)
except SystemExit, e:
if e.__class__ != SystemExit or allow_exit:
raise
# Redo the exit code ourselves- unittest throws True on occasion.
# This is why the lack of typing for SystemExit code attribute makes life
# suck, in parallel to unittest being special.
# Finally, note that it's possible for code to be a string...
if isinstance(e.code, (int, long)):
# This is done since exit code may be something other than 1/0; if they
# explicitly pass it, we'll honor it.
return e.code
return 1 if e.code else 0
| raise AssertionError('Duplicate entry %r found in %r!' % (norm, tarball)) | conditional_block |
cros_test_lib.py | #!/usr/bin/python
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Cros unit test library, with utility functions."""
from __future__ import print_function
import collections
import contextlib
import cStringIO
import exceptions
import mox
import os
import re
import sys
import unittest
import osutils
import terminal
import cros_build_lib
if 'chromite' not in sys.modules:
# TODO(build): Finish test wrapper (http://crosbug.com/37517).
# Until then, we detect the chromite manipulation not yet having
# occurred, and inject it ourselves.
# We cannot just import chromite since this module is still accessed
# from non chromite.lib.cros_test_lib pathways (which will be resolved
# implicitly via 37517).
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '../third_party'))
import mock
Directory = collections.namedtuple('Directory', ['name', 'contents'])
def _FlattenStructure(base_path, dir_struct):
"""Converts a directory structure to a list of paths."""
flattened = []
for obj in dir_struct:
if isinstance(obj, Directory):
new_base = os.path.join(base_path, obj.name).rstrip(os.sep)
flattened.append(new_base + os.sep)
flattened.extend(_FlattenStructure(new_base, obj.contents))
else:
assert(isinstance(obj, basestring))
flattened.append(os.path.join(base_path, obj))
return flattened
def CreateOnDiskHierarchy(base_path, dir_struct):
"""Creates on-disk representation of an in-memory directory structure.
Arguments:
base_path: The absolute root of the directory structure.
dir_struct: A recursively defined data structure that represents a
directory tree. The basic form is a list. Elements can be file names or
cros_test_lib.Directory objects. The 'contents' attribute of Directory
types is a directory structure representing the contents of the directory.
Examples:
- ['file1', 'file2']
- ['file1', Directory('directory', ['deepfile1', 'deepfile2']), 'file2']
"""
flattened = _FlattenStructure(base_path, dir_struct)
for f in flattened:
f = os.path.join(base_path, f)
if f.endswith(os.sep):
os.mkdir(f)
else:
osutils.Touch(f, makedirs=True)
def _VerifyDirectoryIterables(existing, expected):
"""Compare two iterables representing contents of a directory.
Paths in |existing| and |expected| will be compared for exact match.
Arguments:
existing: An iterable containing paths that exist.
expected: An iterable of paths that are expected.
Raises:
AssertionError when there is any divergence between |existing| and
|expected|.
"""
def FormatPaths(paths):
return '\n'.join(sorted(paths))
existing = set(existing)
expected = set(expected)
unexpected = existing - expected
if unexpected:
raise AssertionError('Found unexpected paths:\n%s'
% FormatPaths(unexpected))
missing = expected - existing
if missing:
raise AssertionError('These files were expected but not found:\n%s'
% FormatPaths(missing))
def _DirectoryIterator(base_path):
"""Iterates through the files and subdirs of a directory."""
for root, dirs, files in os.walk(base_path):
for e in [d + os.sep for d in dirs] + files:
yield os.path.join(root, e)
def VerifyOnDiskHierarchy(base_path, dir_struct):
"""Verify that an on-disk directory tree exactly matches a given structure.
Arguments:
See arguments of CreateOnDiskHierarchy()
Raises:
AssertionError when there is any divergence between the on-disk
structure and the structure specified by 'dir_struct'.
"""
expected = _FlattenStructure(base_path, dir_struct)
_VerifyDirectoryIterables(_DirectoryIterator(base_path), expected)
def VerifyTarball(tarball, dir_struct):
"""Compare the contents of a tarball against a directory structure.
Arguments:
tarball: Path to the tarball.
dir_struct: See CreateOnDiskHierarchy()
Raises:
AssertionError when there is any divergence between the tarball and the
structure specified by 'dir_struct'.
"""
contents = cros_build_lib.RunCommandCaptureOutput(
['tar', '-tf', tarball]).output.splitlines()
normalized = set()
for p in contents:
norm = os.path.normpath(p)
if p.endswith('/'):
norm += '/'
if norm in normalized:
raise AssertionError('Duplicate entry %r found in %r!' % (norm, tarball))
normalized.add(norm)
expected = _FlattenStructure('', dir_struct)
_VerifyDirectoryIterables(normalized, expected)
def _walk_mro_stacking(obj, attr, reverse=False):
iterator = iter if reverse else reversed
methods = (getattr(x, attr, None) for x in iterator(obj.__class__.__mro__))
seen = set()
for x in filter(None, methods):
x = getattr(x, 'im_func', x)
if x not in seen:
seen.add(x)
yield x
def _stacked_setUp(self):
self.__test_was_run__ = False
try:
for target in _walk_mro_stacking(self, '__raw_setUp__'):
target(self)
except:
# TestCase doesn't trigger tearDowns if setUp failed; thus
# manually force it ourselves to ensure cleanup occurs.
_stacked_tearDown(self)
raise
# Now mark the object as fully setUp; this is done so that
# any last minute assertions in tearDown can know if they should
# run or not.
self.__test_was_run__ = True
def _stacked_tearDown(self):
exc_info = None
for target in _walk_mro_stacking(self, '__raw_tearDown__', True):
#pylint: disable=W0702
try:
target(self)
except:
# Preserve the exception, throw it after running
# all tearDowns; we throw just the first also. We suppress
# pylint's warning here since it can't understand that we're
# actually raising the exception, just in a nonstandard way.
if exc_info is None:
exc_info = sys.exc_info()
if exc_info:
# Chuck the saved exception, w/ the same TB from
# when it occurred.
raise exc_info[0], exc_info[1], exc_info[2]
class StackedSetup(type):
"""Metaclass that extracts automatically stacks setUp and tearDown calls.
Basically this exists to make it easier to do setUp *correctly*, while also
suppressing some unittests misbehaviours- for example, the fact that if a
setUp throws an exception the corresponding tearDown isn't ran. This sorts
it.
Usage of it is via usual metaclass approach; just set
`__metaclass__ = StackedSetup` .
Note that this metaclass is designed such that because this is a metaclass,
rather than just a scope mutator, all derivative classes derive from this
metaclass; thus all derivative TestCase classes get automatic stacking."""
def __new__(mcs, name, bases, scope):
if 'setUp' in scope:
scope['__raw_setUp__'] = scope.pop('setUp')
scope['setUp'] = _stacked_setUp
if 'tearDown' in scope:
scope['__raw_tearDown__'] = scope.pop('tearDown')
scope['tearDown'] = _stacked_tearDown
return type.__new__(mcs, name, bases, scope)
class EasyAttr(dict):
"""Convenient class for simulating objects with attributes in tests.
An EasyAttr object can be created with any attributes initialized very
easily. Examples:
1) An object with .id=45 and .name="Joe":
testobj = EasyAttr(id=45, name="Joe")
2) An object with .title.text="Big" and .owner.text="Joe":
testobj = EasyAttr(title=EasyAttr(text="Big"), owner=EasyAttr(text="Joe"))
"""
__slots__ = ()
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
return AttributeError(attr)
def __delattr__(self, attr):
try:
self.pop(attr)
except KeyError:
raise AttributeError(attr)
def __setattr__(self, attr, value):
self[attr] = value
def __dir__(self):
return self.keys()
class OutputCapturer(object):
"""Class with limited support for capturing test stdout/stderr output.
Class is designed as a 'ContextManager'. Example usage in a test method
of an object of TestCase:
with self.OutputCapturer() as output:
# Capturing of stdout/stderr automatically starts now.
# Do stuff that sends output to stdout/stderr.
# Capturing automatically stops at end of 'with' block.
# stdout/stderr can be retrieved from the OutputCapturer object:
stdout = output.getStdoutLines() # Or other access methods
# Some Assert methods are only valid if capturing was used in test.
self.AssertOutputContainsError() # Or other related methods
"""
# These work with error output from operation module.
OPER_MSG_SPLIT_RE = re.compile(r'^\033\[1;.*?\033\[0m$|^[^\n]*$',
re.DOTALL | re.MULTILINE)
ERROR_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
(30 + terminal.Color.RED,), re.DOTALL)
WARNING_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
(30 + terminal.Color.YELLOW,), re.DOTALL)
__slots__ = ['_stderr', '_stderr_cap', '_stdout', '_stdout_cap']
def __init__(self):
self._stdout = None
self._stderr = None
self._stdout_cap = None
self._stderr_cap = None
def __enter__(self):
# This method is called with entering 'with' block.
self.StartCapturing()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# This method is called when exiting 'with' block.
self.StopCapturing()
if exc_type:
print('Exception during output capturing: %r' % (exc_val,))
stdout = self.GetStdout()
if stdout:
print('Captured stdout was:\n%s' % stdout)
else:
print('No captured stdout')
stderr = self.GetStderr()
if stderr:
print('Captured stderr was:\n%s' % stderr)
else:
print('No captured stderr')
def StartCapturing(self):
"""Begin capturing stdout and stderr."""
self._stdout = sys.stdout
self._stderr = sys.stderr
sys.stdout = self._stdout_cap = cStringIO.StringIO()
sys.stderr = self._stderr_cap = cStringIO.StringIO()
def StopCapturing(self):
"""Stop capturing stdout and stderr."""
# The only reason to check stdout or stderr separately might
# have capturing on independently is if StartCapturing did not complete.
if self._stdout:
sys.stdout = self._stdout
self._stdout = None
if self._stderr:
sys.stderr = self._stderr
self._stderr = None
def ClearCaptured(self):
# Only valid if capturing is not on.
assert self._stdout is None and self._stderr is None
self._stdout_cap = None
self._stderr_cap = None
def GetStdout(self):
"""Return captured stdout so far."""
return self._stdout_cap.getvalue()
def GetStderr(self):
"""Return captured stderr so far."""
return self._stderr_cap.getvalue()
def _GetOutputLines(self, output, include_empties):
"""Split |output| into lines, optionally |include_empties|.
Return array of lines.
"""
lines = self.OPER_MSG_SPLIT_RE.findall(output)
if not include_empties:
lines = [ln for ln in lines if ln]
return lines
def GetStdoutLines(self, include_empties=True):
"""Return captured stdout so far as array of lines.
If |include_empties| is false filter out all empty lines.
"""
return self._GetOutputLines(self.GetStdout(), include_empties)
def GetStderrLines(self, include_empties=True):
"""Return captured stderr so far as array of lines.
If |include_empties| is false filter out all empty lines.
"""
return self._GetOutputLines(self.GetStderr(), include_empties)
class TestCase(unittest.TestCase):
__metaclass__ = StackedSetup
# List of vars chromite is globally sensitive to and that should
# be suppressed for tests.
ENVIRON_VARIABLE_SUPPRESSIONS = ('CROS_CACHEDIR',)
def __init__(self, *args, **kwds):
unittest.TestCase.__init__(self, *args, **kwds)
# This is set to keep pylint from complaining.
self.__test_was_run__ = False
def setUp(self):
self.__saved_env__ = os.environ.copy()
self.__saved_cwd__ = os.getcwd()
self.__saved_umask__ = os.umask(022)
for x in self.ENVIRON_VARIABLE_SUPPRESSIONS:
os.environ.pop(x, None)
def tearDown(self):
osutils.SetEnvironment(self.__saved_env__)
os.chdir(self.__saved_cwd__)
os.umask(self.__saved_umask__)
def assertRaises2(self, exception, functor, *args, **kwargs):
"""Like assertRaises, just with checking of the excpetion.
args:
exception: The expected exception type to intecept.
functor: The function to invoke.
args: Positional args to pass to the function.
kwargs: Optional args to pass to the function. Note we pull
exact_kls, msg, and check_attrs from these kwargs.
exact_kls: If given, the exception raise must be *exactly* that class
type; derivatives are a failure.
check_attrs: If given, a mapping of attribute -> value to assert on
the resultant exception. Thus if you wanted to catch a ENOENT, you
would do:
assertRaises2(EnvironmentError, func, args,
attrs={"errno":errno.ENOENT})
msg: The error message to be displayed if the exception isn't raised.
If not given, a suitable one is defaulted to.
returns: The exception object.
"""
exact_kls = kwargs.pop("exact_kls", None)
check_attrs = kwargs.pop("check_attrs", {})
msg = kwargs.pop("msg", None)
if msg is None:
msg = ("%s(*%r, **%r) didn't throw an exception"
% (functor.__name__, args, kwargs))
try:
functor(*args, **kwargs)
raise AssertionError(msg)
except exception, e:
if exact_kls:
self.assertEqual(e.__class__, exception)
bad = []
for attr, required in check_attrs.iteritems():
self.assertTrue(hasattr(e, attr),
msg="%s lacks attr %s" % (e, attr))
value = getattr(e, attr)
if value != required:
bad.append("%s attr is %s, needed to be %s"
% (attr, value, required))
if bad:
raise AssertionError("\n".join(bad))
return e
class OutputTestCase(TestCase):
"""Base class for cros unit tests with utility methods."""
def __init__(self, *args, **kwds):
"""Base class __init__ takes a second argument."""
TestCase.__init__(self, *args, **kwds)
self._output_capturer = None
def OutputCapturer(self):
"""Create and return OutputCapturer object."""
self._output_capturer = OutputCapturer()
return self._output_capturer
def _GetOutputCapt(self):
"""Internal access to existing OutputCapturer.
Raises RuntimeError if output capturing was never on.
"""
if self._output_capturer:
return self._output_capturer
raise RuntimeError('Output capturing was never turned on for this test.')
def _GenCheckMsgFunc(self, prefix_re, line_re):
"""Return boolean func to check a line given |prefix_re| and |line_re|."""
def _method(line):
if prefix_re:
# Prefix regexp will strip off prefix (and suffix) from line.
match = prefix_re.search(line)
if match:
line = match.group(1)
else:
return False
return line_re.search(line) if line_re else True
# Provide a description of what this function looks for in a line. Error
# messages can make use of this.
_method.description = None
if prefix_re and line_re:
_method.description = ('line matching prefix regexp %r then regexp %r' %
(prefix_re.pattern, line_re.pattern))
elif prefix_re:
_method.description = 'line matching prefix regexp %r' % prefix_re.pattern
elif line_re:
_method.description = 'line matching regexp %r' % line_re.pattern
else:
raise RuntimeError('Nonsensical usage of _GenCheckMsgFunc: '
'no prefix_re or line_re')
return _method
def _ContainsMsgLine(self, lines, msg_check_func):
return any(msg_check_func(ln) for ln in lines)
def _GenOutputDescription(self, check_stdout, check_stderr):
# Some extra logic to make an error message useful.
if check_stdout and check_stderr:
return 'stdout or stderr'
elif check_stdout:
return 'stdout'
elif check_stderr:
return 'stderr'
def _AssertOutputContainsMsg(self, check_msg_func, invert,
check_stdout, check_stderr):
assert check_stdout or check_stderr
lines = []
if check_stdout:
lines.extend(self._GetOutputCapt().GetStdoutLines())
if check_stderr:
lines.extend(self._GetOutputCapt().GetStderrLines())
result = self._ContainsMsgLine(lines, check_msg_func)
# Some extra logic to make an error message useful.
output_desc = self._GenOutputDescription(check_stdout, check_stderr)
if invert:
msg = ('expected %s to not contain %s,\nbut found it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertFalse(result, msg=msg)
else:
msg = ('expected %s to contain %s,\nbut did not find it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertTrue(result, msg=msg)
def AssertOutputContainsError(self, regexp=None, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains at least one error line.
If |regexp| is non-null, then the error line must also match it.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.ERROR_MSG_RE, regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def AssertOutputContainsWarning(self, regexp=None, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains at least one warning line.
If |regexp| is non-null, then the warning line must also match it.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE,
regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def AssertOutputContainsLine(self, regexp, invert=False,
check_stdout=True, check_stderr=False):
"""Assert requested output contains line matching |regexp|.
If |invert| is true, then assert the line is NOT found.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(None, regexp)
return self._AssertOutputContainsMsg(check_msg_func, invert,
check_stdout, check_stderr)
def _AssertOutputEndsInMsg(self, check_msg_func,
check_stdout, check_stderr):
"""Pass if requested output(s) ends(end) with an error message."""
assert check_stdout or check_stderr
lines = []
if check_stdout:
stdout_lines = self._GetOutputCapt().GetStdoutLines(include_empties=False)
if stdout_lines:
lines.append(stdout_lines[-1])
if check_stderr:
stderr_lines = self._GetOutputCapt().GetStderrLines(include_empties=False)
if stderr_lines:
lines.append(stderr_lines[-1])
result = self._ContainsMsgLine(lines, check_msg_func)
# Some extra logic to make an error message useful.
output_desc = self._GenOutputDescription(check_stdout, check_stderr)
msg = ('expected %s to end with %s,\nbut did not find it in:\n%s' %
(output_desc, check_msg_func.description, lines))
self.assertTrue(result, msg=msg)
def AssertOutputEndsInError(self, regexp=None,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in error line.
If |regexp| is non-null, then the error line must also match it.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.ERROR_MSG_RE, regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def AssertOutputEndsInWarning(self, regexp=None,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in warning line.
If |regexp| is non-null, then the warning line must also match it.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(OutputCapturer.WARNING_MSG_RE,
regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def AssertOutputEndsInLine(self, regexp,
check_stdout=True, check_stderr=False):
"""Assert requested output ends in line matching |regexp|.
Raises RuntimeError if output capturing was never one for this test.
"""
check_msg_func = self._GenCheckMsgFunc(None, regexp)
return self._AssertOutputEndsInMsg(check_msg_func,
check_stdout, check_stderr)
def FuncCatchSystemExit(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| and catch exceptions.SystemExit.
Return tuple (return value or None, SystemExit number code or None).
"""
try:
returnval = func(*args, **kwargs)
return returnval, None
except exceptions.SystemExit as ex:
exit_code = ex.args[0]
return None, exit_code
def AssertFuncSystemExitZero(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
If the func does not raise a SystemExit with exit code 0 then assert.
"""
exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
self.assertFalse(exit_code is None,
msg='Expected system exit code 0, but caught none')
self.assertTrue(exit_code == 0,
msg='Expected system exit code 0, but caught %d' %
exit_code)
def AssertFuncSystemExitNonZero(self, func, *args, **kwargs):
"""Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
If the func does not raise a non-zero SystemExit code then assert.
"""
exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
self.assertFalse(exit_code is None,
msg='Expected non-zero system exit code, but caught none')
self.assertFalse(exit_code == 0,
msg='Expected non-zero system exit code, but caught %d' %
exit_code)
def AssertRaisesAndReturn(self, error, func, *args, **kwargs):
"""Like assertRaises, but return exception raised."""
try:
func(*args, **kwargs)
self.assertTrue(False, msg='Expected %s but got none' % error)
except error as ex:
return ex
class TempDirTestCase(TestCase):
"""Mixin used to give each test a tempdir that is cleansed upon finish"""
sudo_cleanup = False
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
self.tempdir = None
def setUp(self):
#pylint: disable=W0212
osutils._TempDirSetup(self)
def tearDown(self):
#pylint: disable=W0212
osutils._TempDirTearDown(self, self.sudo_cleanup)
class _RunCommandMock(mox.MockObject):
"""Custom mock class used to suppress arguments we don't care about"""
DEFAULT_IGNORED_ARGS = ('print_cmd',)
def __call__(self, *args, **kwds):
for arg in self.DEFAULT_IGNORED_ARGS:
kwds.setdefault(arg, mox.IgnoreArg())
return mox.MockObject.__call__(self, *args, **kwds)
class LessAnnoyingMox(mox.Mox):
"""Mox derivative that slips in our suppressions to mox.
This is used by default via MoxTestCase; namely, this suppresses
certain arguments awareness that we don't care about via switching
in (dependent on the namespace requested) overriding MockObject
classing.
Via this, it makes maintenance much simpler- simplest example, if code
doesn't explicitly assert that print_cmd must be true/false... then
we don't care about what argument is set (it has no effect beyond output).
Mox normally *would* care, making it a pita to maintain. This selectively
suppresses that awareness, making it maintainable.
"""
mock_classes = {}.fromkeys(
['chromite.lib.cros_build_lib.%s' % x
for x in dir(cros_build_lib) if "RunCommand" in x],
_RunCommandMock)
@staticmethod
def _GetNamespace(obj):
return '%s.%s' % (obj.__module__, obj.__name__)
def CreateMock(self, obj, attrs=None):
if attrs is None:
attrs = {}
kls = self.mock_classes.get(
self._GetNamespace(obj), mox.MockObject)
# Copy attrs; I don't trust mox to not be stupid here.
new_mock = kls(obj, attrs=attrs)
self._mock_objects.append(new_mock)
return new_mock
class MoxTestCase(TestCase):
"""Mox based test case; compatible with StackedSetup"""
mox_suppress_verify_all = False
def setUp(self):
self.mox = LessAnnoyingMox()
self.stubs = mox.stubout.StubOutForTesting()
def tearDown(self):
try:
if self.__test_was_run__ and not self.mox_suppress_verify_all:
# This means the test code was actually ran.
# force a verifyall
self.mox.VerifyAll()
finally:
if hasattr(self, 'mox'):
self.mox.UnsetStubs()
if hasattr(self, 'stubs'):
self.stubs.UnsetAll()
self.stubs.SmartUnsetAll()
class MoxTempDirTestCase(TempDirTestCase, MoxTestCase):
"""Convenience class mixing TempDir and Mox"""
class MoxOutputTestCase(OutputTestCase, MoxTestCase):
"""Conevenience class mixing OutputTestCase and MoxTestCase."""
class MockTestCase(TestCase):
"""Python-mock based test case; compatible with StackedSetup"""
def setUp(self):
self._patchers = []
def tearDown(self):
# We can't just run stopall() by itself, and need to stop our patchers
# manually since stopall() doesn't handle repatching.
cros_build_lib.SafeRun([p.stop for p in reversed(self._patchers)] +
[mock.patch.stopall])
def StartPatcher(self, patcher):
"""Call start() on the patcher, and stop() in tearDown."""
m = patcher.start()
self._patchers.append(patcher)
return m
def PatchObject(self, *args, **kwargs):
"""Create and start a mock.patch.object().
stop() will be called automatically during tearDown.
"""
return self.StartPatcher(mock.patch.object(*args, **kwargs))
# MockTestCase must be before TempDirTestCase in this inheritance order,
# because MockTestCase.StartPatcher() calls may be for PartialMocks, which
# create their own temporary directory. The teardown for those directories
# occurs during MockTestCase.tearDown(), which needs to be run before
# TempDirTestCase.tearDown().
class MockTempDirTestCase(MockTestCase, TempDirTestCase):
"""Convenience class mixing TempDir and Mock."""
def FindTests(directory, module_namespace=''):
"""Find all *_unittest.py, and return their python namespaces.
Args:
directory: The directory to scan for tests.
module_namespace: What namespace to prefix all found tests with.
Returns:
A list of python unittests in python namespace form.
"""
results = cros_build_lib.RunCommandCaptureOutput(
['find', '.', '-name', '*_unittest.py', '-printf', '%P\n'],
cwd=directory, print_cmd=False).output.splitlines()
# Drop the trailing .py, inject in the name if one was given.
if module_namespace:
module_namespace += '.'
return [module_namespace + x[:-3].replace('/', '.') for x in results]
@contextlib.contextmanager
def DisableLogging():
|
def main(**kwds):
"""Helper wrapper around unittest.main. Invoke this, not unittest.main.
Any passed in kwds are passed directly down to unittest.main; via this, you
can inject custom argv for example (to limit what tests run).
"""
# Default to exit=True; this matches old behaviour, and allows unittest
# to trigger sys.exit on its own. Unfortunately, the exit keyword is only
# available in 2.7- as such, handle it ourselves.
allow_exit = kwds.pop('exit', True)
cros_build_lib.SetupBasicLogging()
try:
unittest.main(**kwds)
raise SystemExit(0)
except SystemExit, e:
if e.__class__ != SystemExit or allow_exit:
raise
# Redo the exit code ourselves- unittest throws True on occasion.
# This is why the lack of typing for SystemExit code attribute makes life
# suck, in parallel to unittest being special.
# Finally, note that it's possible for code to be a string...
if isinstance(e.code, (int, long)):
# This is done since exit code may be something other than 1/0; if they
# explicitly pass it, we'll honor it.
return e.code
return 1 if e.code else 0
| """Temporarily disable chromite logging."""
backup = cros_build_lib.logger.disabled
try:
cros_build_lib.logger.disabled = True
yield
finally:
cros_build_lib.logger.disabled = backup | identifier_body |
de-LI.js | /*!
* numbro.js language configuration
* language : German
* locale: Liechtenstein
* author : Michael Piefel : https://github.com/piefel (based on work from Marco Krage : https://github.com/sinky)
*/
(function () {
'use strict';
var language = {
langLocaleCode: 'de-LI',
cultureCode: 'de-LI',
delimiters: { | },
abbreviations: {
thousand: 'k',
million: 'm',
billion: 'b',
trillion: 't'
},
ordinal: function () {
return '.';
},
currency: {
symbol: 'CHF',
position: 'postfix',
code: 'CHF'
},
defaults: {
currencyFormat: ',4 a'
},
formats: {
fourDigits: '4 a',
fullWithTwoDecimals: ',0.00 $',
fullWithTwoDecimalsNoCurrency: ',0.00',
fullWithNoDecimals: ',0 $'
}
};
// CommonJS
if (typeof module !== 'undefined' && module.exports) {
module.exports = language;
}
// Browser
if (typeof window !== 'undefined' && window.numbro && window.numbro.culture) {
window.numbro.culture(language.cultureCode, language);
}
}.call(typeof window === 'undefined' ? this : window)); | thousands: '\'',
decimal: '.' | random_line_split |
de-LI.js | /*!
* numbro.js language configuration
* language : German
* locale: Liechtenstein
* author : Michael Piefel : https://github.com/piefel (based on work from Marco Krage : https://github.com/sinky)
*/
(function () {
'use strict';
var language = {
langLocaleCode: 'de-LI',
cultureCode: 'de-LI',
delimiters: {
thousands: '\'',
decimal: '.'
},
abbreviations: {
thousand: 'k',
million: 'm',
billion: 'b',
trillion: 't'
},
ordinal: function () {
return '.';
},
currency: {
symbol: 'CHF',
position: 'postfix',
code: 'CHF'
},
defaults: {
currencyFormat: ',4 a'
},
formats: {
fourDigits: '4 a',
fullWithTwoDecimals: ',0.00 $',
fullWithTwoDecimalsNoCurrency: ',0.00',
fullWithNoDecimals: ',0 $'
}
};
// CommonJS
if (typeof module !== 'undefined' && module.exports) |
// Browser
if (typeof window !== 'undefined' && window.numbro && window.numbro.culture) {
window.numbro.culture(language.cultureCode, language);
}
}.call(typeof window === 'undefined' ? this : window));
| {
module.exports = language;
} | conditional_block |
linux_base.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use target::TargetOptions;
use std::default::Default;
pub fn opts() -> TargetOptions | {
TargetOptions {
linker: "cc".to_string(),
dynamic_linking: true,
executables: true,
morestack: true,
linker_is_gnu: true,
has_rpath: true,
pre_link_args: vec![
// We want to be able to strip as much executable code as possible
// from the linker command line, and this flag indicates to the
// linker that it can avoid linking in dynamic libraries that don't
// actually satisfy any symbols up to that point (as with many other
// resolutions the linker does). This option only applies to all
// following libraries so we're sure to pass it as one of the first
// arguments.
"-Wl,--as-needed".to_string(),
],
position_independent_executables: true,
.. Default::default()
}
} | identifier_body | |
linux_base.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use target::TargetOptions;
use std::default::Default;
pub fn opts() -> TargetOptions {
TargetOptions {
linker: "cc".to_string(),
dynamic_linking: true,
executables: true,
morestack: true,
linker_is_gnu: true,
has_rpath: true,
pre_link_args: vec![
// We want to be able to strip as much executable code as possible
// from the linker command line, and this flag indicates to the
// linker that it can avoid linking in dynamic libraries that don't
// actually satisfy any symbols up to that point (as with many other
// resolutions the linker does). This option only applies to all
// following libraries so we're sure to pass it as one of the first
// arguments.
"-Wl,--as-needed".to_string(), | ],
position_independent_executables: true,
.. Default::default()
}
} | random_line_split | |
linux_base.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use target::TargetOptions;
use std::default::Default;
pub fn | () -> TargetOptions {
TargetOptions {
linker: "cc".to_string(),
dynamic_linking: true,
executables: true,
morestack: true,
linker_is_gnu: true,
has_rpath: true,
pre_link_args: vec![
// We want to be able to strip as much executable code as possible
// from the linker command line, and this flag indicates to the
// linker that it can avoid linking in dynamic libraries that don't
// actually satisfy any symbols up to that point (as with many other
// resolutions the linker does). This option only applies to all
// following libraries so we're sure to pass it as one of the first
// arguments.
"-Wl,--as-needed".to_string(),
],
position_independent_executables: true,
.. Default::default()
}
}
| opts | identifier_name |
RiskScatterPanel.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import pprint
import random
import wx
import armid
from Borg import Borg
| FigureCanvasWxAgg as FigCanvas, \
NavigationToolbar2WxAgg as NavigationToolbar
def riskColourCode(riskScore):
if (riskScore <= 1):
return '#fef2ec'
elif (riskScore == 2):
return '#fcd9c8'
elif (riskScore == 3):
return '#f7ac91'
elif (riskScore == 4):
return '#f67e61'
elif (riskScore == 5):
return '#f2543d'
elif (riskScore == 6):
return '#e42626'
elif (riskScore == 7):
return '#b9051a'
elif (riskScore == 8):
return '#900014'
else:
return '#52000D'
class RiskScatterPanel(wx.Panel):
def __init__(self,parent):
wx.Panel.__init__(self,parent,armid.RISKSCATTER_ID)
b = Borg()
self.dbProxy = b.dbProxy
self.dpi = 100
self.fig = Figure((5.0, 4.0), dpi=self.dpi)
self.canvas = FigCanvas(self, -1, self.fig)
self.axes = self.fig.add_subplot(111,xlabel='Severity',ylabel='Likelihood',autoscale_on=False)
self.axes.set_xticklabels(['Marginal','Critical','Catastrophic'])
self.axes.set_yticks([0,1,2,3,4,5])
self.toolbar = NavigationToolbar(self.canvas)
envs = self.dbProxy.getDimensionNames('environment')
self.envCombo = wx.ComboBox(self,armid.RISKSCATTER_COMBOENVIRONMENT_ID,envs[0],choices=envs,size=(300,-1),style=wx.CB_DROPDOWN)
self.envCombo.Bind(wx.EVT_COMBOBOX,self.onEnvironmentChange)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.toolbar, 0, wx.EXPAND)
self.vbox.Add(self.envCombo,0, wx.EXPAND)
self.vbox.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW)
self.SetSizer(self.vbox)
self.vbox.Fit(self)
self.drawScatter(envs[0])
def drawScatter(self,envName):
self.axes.clear()
self.axes.grid(True)
self.axes.set_xlabel('Severity')
self.axes.set_ylabel('Likelihood')
self.axes.set_xbound(0,4)
self.axes.set_ybound(0,5)
xs,ys,cs = self.dbProxy.riskScatter(envName)
ccs = []
for c in cs:
ccs.append(riskColourCode(c))
if ((len(xs) > 0) and (len(ys) > 0)):
self.axes.scatter(xs,ys,c=ccs,marker='d')
self.canvas.draw()
def onEnvironmentChange(self,evt):
envName = self.envCombo.GetStringSelection()
self.drawScatter(envName)
def on_save_plot(self, event):
fileChoices = "PNG (*.png)|*.png"
dlg = wx.FileDialog(self,message="Save risk scatter",defaultDir=os.getcwd(),defaultFile="scatter.png",wildcard=fileChoices,style=wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
self.canvas.print_figure(path, dpi=self.dpi) | import matplotlib
matplotlib.use('WXAgg')
from matplotlib.figure import Figure
from matplotlib.backends.backend_wxagg import \ | random_line_split |
RiskScatterPanel.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import pprint
import random
import wx
import armid
from Borg import Borg
import matplotlib
matplotlib.use('WXAgg')
from matplotlib.figure import Figure
from matplotlib.backends.backend_wxagg import \
FigureCanvasWxAgg as FigCanvas, \
NavigationToolbar2WxAgg as NavigationToolbar
def riskColourCode(riskScore):
if (riskScore <= 1):
return '#fef2ec'
elif (riskScore == 2):
return '#fcd9c8'
elif (riskScore == 3):
return '#f7ac91'
elif (riskScore == 4):
return '#f67e61'
elif (riskScore == 5):
return '#f2543d'
elif (riskScore == 6):
return '#e42626'
elif (riskScore == 7):
return '#b9051a'
elif (riskScore == 8):
return '#900014'
else:
return '#52000D'
class RiskScatterPanel(wx.Panel):
def __init__(self,parent):
wx.Panel.__init__(self,parent,armid.RISKSCATTER_ID)
b = Borg()
self.dbProxy = b.dbProxy
self.dpi = 100
self.fig = Figure((5.0, 4.0), dpi=self.dpi)
self.canvas = FigCanvas(self, -1, self.fig)
self.axes = self.fig.add_subplot(111,xlabel='Severity',ylabel='Likelihood',autoscale_on=False)
self.axes.set_xticklabels(['Marginal','Critical','Catastrophic'])
self.axes.set_yticks([0,1,2,3,4,5])
self.toolbar = NavigationToolbar(self.canvas)
envs = self.dbProxy.getDimensionNames('environment')
self.envCombo = wx.ComboBox(self,armid.RISKSCATTER_COMBOENVIRONMENT_ID,envs[0],choices=envs,size=(300,-1),style=wx.CB_DROPDOWN)
self.envCombo.Bind(wx.EVT_COMBOBOX,self.onEnvironmentChange)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.toolbar, 0, wx.EXPAND)
self.vbox.Add(self.envCombo,0, wx.EXPAND)
self.vbox.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW)
self.SetSizer(self.vbox)
self.vbox.Fit(self)
self.drawScatter(envs[0])
def drawScatter(self,envName):
|
def onEnvironmentChange(self,evt):
envName = self.envCombo.GetStringSelection()
self.drawScatter(envName)
def on_save_plot(self, event):
fileChoices = "PNG (*.png)|*.png"
dlg = wx.FileDialog(self,message="Save risk scatter",defaultDir=os.getcwd(),defaultFile="scatter.png",wildcard=fileChoices,style=wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
self.canvas.print_figure(path, dpi=self.dpi)
| self.axes.clear()
self.axes.grid(True)
self.axes.set_xlabel('Severity')
self.axes.set_ylabel('Likelihood')
self.axes.set_xbound(0,4)
self.axes.set_ybound(0,5)
xs,ys,cs = self.dbProxy.riskScatter(envName)
ccs = []
for c in cs:
ccs.append(riskColourCode(c))
if ((len(xs) > 0) and (len(ys) > 0)):
self.axes.scatter(xs,ys,c=ccs,marker='d')
self.canvas.draw() | identifier_body |
RiskScatterPanel.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import pprint
import random
import wx
import armid
from Borg import Borg
import matplotlib
matplotlib.use('WXAgg')
from matplotlib.figure import Figure
from matplotlib.backends.backend_wxagg import \
FigureCanvasWxAgg as FigCanvas, \
NavigationToolbar2WxAgg as NavigationToolbar
def riskColourCode(riskScore):
if (riskScore <= 1):
|
elif (riskScore == 2):
return '#fcd9c8'
elif (riskScore == 3):
return '#f7ac91'
elif (riskScore == 4):
return '#f67e61'
elif (riskScore == 5):
return '#f2543d'
elif (riskScore == 6):
return '#e42626'
elif (riskScore == 7):
return '#b9051a'
elif (riskScore == 8):
return '#900014'
else:
return '#52000D'
class RiskScatterPanel(wx.Panel):
def __init__(self,parent):
wx.Panel.__init__(self,parent,armid.RISKSCATTER_ID)
b = Borg()
self.dbProxy = b.dbProxy
self.dpi = 100
self.fig = Figure((5.0, 4.0), dpi=self.dpi)
self.canvas = FigCanvas(self, -1, self.fig)
self.axes = self.fig.add_subplot(111,xlabel='Severity',ylabel='Likelihood',autoscale_on=False)
self.axes.set_xticklabels(['Marginal','Critical','Catastrophic'])
self.axes.set_yticks([0,1,2,3,4,5])
self.toolbar = NavigationToolbar(self.canvas)
envs = self.dbProxy.getDimensionNames('environment')
self.envCombo = wx.ComboBox(self,armid.RISKSCATTER_COMBOENVIRONMENT_ID,envs[0],choices=envs,size=(300,-1),style=wx.CB_DROPDOWN)
self.envCombo.Bind(wx.EVT_COMBOBOX,self.onEnvironmentChange)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.toolbar, 0, wx.EXPAND)
self.vbox.Add(self.envCombo,0, wx.EXPAND)
self.vbox.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW)
self.SetSizer(self.vbox)
self.vbox.Fit(self)
self.drawScatter(envs[0])
def drawScatter(self,envName):
self.axes.clear()
self.axes.grid(True)
self.axes.set_xlabel('Severity')
self.axes.set_ylabel('Likelihood')
self.axes.set_xbound(0,4)
self.axes.set_ybound(0,5)
xs,ys,cs = self.dbProxy.riskScatter(envName)
ccs = []
for c in cs:
ccs.append(riskColourCode(c))
if ((len(xs) > 0) and (len(ys) > 0)):
self.axes.scatter(xs,ys,c=ccs,marker='d')
self.canvas.draw()
def onEnvironmentChange(self,evt):
envName = self.envCombo.GetStringSelection()
self.drawScatter(envName)
def on_save_plot(self, event):
fileChoices = "PNG (*.png)|*.png"
dlg = wx.FileDialog(self,message="Save risk scatter",defaultDir=os.getcwd(),defaultFile="scatter.png",wildcard=fileChoices,style=wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
self.canvas.print_figure(path, dpi=self.dpi)
| return '#fef2ec' | conditional_block |
RiskScatterPanel.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import pprint
import random
import wx
import armid
from Borg import Borg
import matplotlib
matplotlib.use('WXAgg')
from matplotlib.figure import Figure
from matplotlib.backends.backend_wxagg import \
FigureCanvasWxAgg as FigCanvas, \
NavigationToolbar2WxAgg as NavigationToolbar
def riskColourCode(riskScore):
if (riskScore <= 1):
return '#fef2ec'
elif (riskScore == 2):
return '#fcd9c8'
elif (riskScore == 3):
return '#f7ac91'
elif (riskScore == 4):
return '#f67e61'
elif (riskScore == 5):
return '#f2543d'
elif (riskScore == 6):
return '#e42626'
elif (riskScore == 7):
return '#b9051a'
elif (riskScore == 8):
return '#900014'
else:
return '#52000D'
class RiskScatterPanel(wx.Panel):
def __init__(self,parent):
wx.Panel.__init__(self,parent,armid.RISKSCATTER_ID)
b = Borg()
self.dbProxy = b.dbProxy
self.dpi = 100
self.fig = Figure((5.0, 4.0), dpi=self.dpi)
self.canvas = FigCanvas(self, -1, self.fig)
self.axes = self.fig.add_subplot(111,xlabel='Severity',ylabel='Likelihood',autoscale_on=False)
self.axes.set_xticklabels(['Marginal','Critical','Catastrophic'])
self.axes.set_yticks([0,1,2,3,4,5])
self.toolbar = NavigationToolbar(self.canvas)
envs = self.dbProxy.getDimensionNames('environment')
self.envCombo = wx.ComboBox(self,armid.RISKSCATTER_COMBOENVIRONMENT_ID,envs[0],choices=envs,size=(300,-1),style=wx.CB_DROPDOWN)
self.envCombo.Bind(wx.EVT_COMBOBOX,self.onEnvironmentChange)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.toolbar, 0, wx.EXPAND)
self.vbox.Add(self.envCombo,0, wx.EXPAND)
self.vbox.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW)
self.SetSizer(self.vbox)
self.vbox.Fit(self)
self.drawScatter(envs[0])
def drawScatter(self,envName):
self.axes.clear()
self.axes.grid(True)
self.axes.set_xlabel('Severity')
self.axes.set_ylabel('Likelihood')
self.axes.set_xbound(0,4)
self.axes.set_ybound(0,5)
xs,ys,cs = self.dbProxy.riskScatter(envName)
ccs = []
for c in cs:
ccs.append(riskColourCode(c))
if ((len(xs) > 0) and (len(ys) > 0)):
self.axes.scatter(xs,ys,c=ccs,marker='d')
self.canvas.draw()
def onEnvironmentChange(self,evt):
envName = self.envCombo.GetStringSelection()
self.drawScatter(envName)
def | (self, event):
fileChoices = "PNG (*.png)|*.png"
dlg = wx.FileDialog(self,message="Save risk scatter",defaultDir=os.getcwd(),defaultFile="scatter.png",wildcard=fileChoices,style=wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
self.canvas.print_figure(path, dpi=self.dpi)
| on_save_plot | identifier_name |
function-parsers.ts | import * as _ from "lodash";
import * as positionHelper from "./position-helper";
import { DataCell } from "./table-models";
import { Step } from "./models";
import { parseCallExpression } from "./primitive-parsers";
import {
isVariable,
parseTypeAndName,
parseVariableDeclaration,
} from "./variable-parsers";
export function parseStep(
firstDataCell: DataCell,
restDataCells: DataCell[]
): Step {
let stepContent;
const lastCell = _.last(restDataCells) || firstDataCell;
const stepLocation = positionHelper.locationFromStartEnd(
firstDataCell.location,
lastCell.location
);
if (isVariable(firstDataCell)) {
const typeAndName = parseTypeAndName(firstDataCell);
const callExpression = parseCallExpression(restDataCells);
stepContent = parseVariableDeclaration(
typeAndName,
[callExpression],
stepLocation
);
} else |
return new Step(stepContent, stepLocation);
}
| {
stepContent = parseCallExpression([firstDataCell, ...restDataCells]);
} | conditional_block |
function-parsers.ts | import * as _ from "lodash";
import * as positionHelper from "./position-helper";
import { DataCell } from "./table-models";
import { Step } from "./models";
import { parseCallExpression } from "./primitive-parsers";
import {
isVariable,
parseTypeAndName,
parseVariableDeclaration,
} from "./variable-parsers";
export function | (
firstDataCell: DataCell,
restDataCells: DataCell[]
): Step {
let stepContent;
const lastCell = _.last(restDataCells) || firstDataCell;
const stepLocation = positionHelper.locationFromStartEnd(
firstDataCell.location,
lastCell.location
);
if (isVariable(firstDataCell)) {
const typeAndName = parseTypeAndName(firstDataCell);
const callExpression = parseCallExpression(restDataCells);
stepContent = parseVariableDeclaration(
typeAndName,
[callExpression],
stepLocation
);
} else {
stepContent = parseCallExpression([firstDataCell, ...restDataCells]);
}
return new Step(stepContent, stepLocation);
}
| parseStep | identifier_name |
function-parsers.ts | import * as _ from "lodash";
import * as positionHelper from "./position-helper";
import { DataCell } from "./table-models";
import { Step } from "./models";
import { parseCallExpression } from "./primitive-parsers";
import {
isVariable,
parseTypeAndName,
parseVariableDeclaration,
} from "./variable-parsers";
export function parseStep(
firstDataCell: DataCell,
restDataCells: DataCell[]
): Step {
let stepContent;
const lastCell = _.last(restDataCells) || firstDataCell; | const stepLocation = positionHelper.locationFromStartEnd(
firstDataCell.location,
lastCell.location
);
if (isVariable(firstDataCell)) {
const typeAndName = parseTypeAndName(firstDataCell);
const callExpression = parseCallExpression(restDataCells);
stepContent = parseVariableDeclaration(
typeAndName,
[callExpression],
stepLocation
);
} else {
stepContent = parseCallExpression([firstDataCell, ...restDataCells]);
}
return new Step(stepContent, stepLocation);
} | random_line_split | |
function-parsers.ts | import * as _ from "lodash";
import * as positionHelper from "./position-helper";
import { DataCell } from "./table-models";
import { Step } from "./models";
import { parseCallExpression } from "./primitive-parsers";
import {
isVariable,
parseTypeAndName,
parseVariableDeclaration,
} from "./variable-parsers";
export function parseStep(
firstDataCell: DataCell,
restDataCells: DataCell[]
): Step | {
let stepContent;
const lastCell = _.last(restDataCells) || firstDataCell;
const stepLocation = positionHelper.locationFromStartEnd(
firstDataCell.location,
lastCell.location
);
if (isVariable(firstDataCell)) {
const typeAndName = parseTypeAndName(firstDataCell);
const callExpression = parseCallExpression(restDataCells);
stepContent = parseVariableDeclaration(
typeAndName,
[callExpression],
stepLocation
);
} else {
stepContent = parseCallExpression([firstDataCell, ...restDataCells]);
}
return new Step(stepContent, stepLocation);
} | identifier_body | |
edit.js | var edit = (function(){
// Cache DOM
var $defaultSection = $('#about_you_dashboard');
var $defaultLink = $("#user_menu li a[href='#about_you']");
var $rowSections = $('.container-fluid .hidden');
var $currentRow = $('.container-fluid .hidden' + window.location.hash + '_dashboard');
var $menuLinks = $('#user_menu li a');
var $hashLink = $.grep( $( '#user_menu li a' ), function (o) {
return o.hash == window.location.hash;
});
| $menuLinks.on('click', toggleMenuActive);
// Functions
function showHashLocation() {
if(window.location.hash == '') {
$defaultSection.removeClass('hidden');
$($defaultLink).parent().addClass('active');
} else {
$currentRow.removeClass('hidden');
$($hashLink).parent().addClass('active');
}
}
function toggleMenuActive() {
$menuLinks.parent().removeClass('active');
$rowSections.addClass('hidden');
$(this).parent().addClass('active');
$('.container-fluid' + this.hash + '_dashboard').removeClass('hidden');
}
showHashLocation();
return{};
})(); | // Bind Events | random_line_split |
edit.js | var edit = (function(){
// Cache DOM
var $defaultSection = $('#about_you_dashboard');
var $defaultLink = $("#user_menu li a[href='#about_you']");
var $rowSections = $('.container-fluid .hidden');
var $currentRow = $('.container-fluid .hidden' + window.location.hash + '_dashboard');
var $menuLinks = $('#user_menu li a');
var $hashLink = $.grep( $( '#user_menu li a' ), function (o) {
return o.hash == window.location.hash;
});
// Bind Events
$menuLinks.on('click', toggleMenuActive);
// Functions
function showHashLocation() {
if(window.location.hash == '') {
$defaultSection.removeClass('hidden');
$($defaultLink).parent().addClass('active');
} else |
}
function toggleMenuActive() {
$menuLinks.parent().removeClass('active');
$rowSections.addClass('hidden');
$(this).parent().addClass('active');
$('.container-fluid' + this.hash + '_dashboard').removeClass('hidden');
}
showHashLocation();
return{};
})(); | {
$currentRow.removeClass('hidden');
$($hashLink).parent().addClass('active');
} | conditional_block |
edit.js | var edit = (function(){
// Cache DOM
var $defaultSection = $('#about_you_dashboard');
var $defaultLink = $("#user_menu li a[href='#about_you']");
var $rowSections = $('.container-fluid .hidden');
var $currentRow = $('.container-fluid .hidden' + window.location.hash + '_dashboard');
var $menuLinks = $('#user_menu li a');
var $hashLink = $.grep( $( '#user_menu li a' ), function (o) {
return o.hash == window.location.hash;
});
// Bind Events
$menuLinks.on('click', toggleMenuActive);
// Functions
function showHashLocation() |
function toggleMenuActive() {
$menuLinks.parent().removeClass('active');
$rowSections.addClass('hidden');
$(this).parent().addClass('active');
$('.container-fluid' + this.hash + '_dashboard').removeClass('hidden');
}
showHashLocation();
return{};
})(); | {
if(window.location.hash == '') {
$defaultSection.removeClass('hidden');
$($defaultLink).parent().addClass('active');
} else {
$currentRow.removeClass('hidden');
$($hashLink).parent().addClass('active');
}
} | identifier_body |
edit.js | var edit = (function(){
// Cache DOM
var $defaultSection = $('#about_you_dashboard');
var $defaultLink = $("#user_menu li a[href='#about_you']");
var $rowSections = $('.container-fluid .hidden');
var $currentRow = $('.container-fluid .hidden' + window.location.hash + '_dashboard');
var $menuLinks = $('#user_menu li a');
var $hashLink = $.grep( $( '#user_menu li a' ), function (o) {
return o.hash == window.location.hash;
});
// Bind Events
$menuLinks.on('click', toggleMenuActive);
// Functions
function | () {
if(window.location.hash == '') {
$defaultSection.removeClass('hidden');
$($defaultLink).parent().addClass('active');
} else {
$currentRow.removeClass('hidden');
$($hashLink).parent().addClass('active');
}
}
function toggleMenuActive() {
$menuLinks.parent().removeClass('active');
$rowSections.addClass('hidden');
$(this).parent().addClass('active');
$('.container-fluid' + this.hash + '_dashboard').removeClass('hidden');
}
showHashLocation();
return{};
})(); | showHashLocation | identifier_name |
mail_mail.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import logging
import re
from urllib import urlencode
from urlparse import urljoin
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class mail_mail(osv.Model):
""" Model holding RFC2822 email messages to send. This model also provides
facilities to queue and send new email messages. """
_name = 'mail.mail'
_description = 'Outgoing Mails'
_inherits = {'mail.message': 'mail_message_id'}
_order = 'id desc'
_columns = {
'mail_message_id': fields.many2one('mail.message', 'Message', required=True, ondelete='cascade'),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing mail server', readonly=1),
'state': fields.selection([
('outgoing', 'Outgoing'),
('sent', 'Sent'),
('received', 'Received'),
('exception', 'Delivery Failed'),
('cancel', 'Cancelled'),
], 'Status', readonly=True),
'auto_delete': fields.boolean('Auto Delete',
help="Permanently delete this email after sending it, to save space"),
'references': fields.text('References', help='Message references, such as identifiers of previous messages', readonly=1),
'email_from': fields.char('From', help='Message sender, taken from user preferences.'),
'email_to': fields.text('To', help='Message recipients'),
'email_cc': fields.char('Cc', help='Carbon copy message recipients'),
'reply_to': fields.char('Reply-To', help='Preferred response address for the message'),
'body_html': fields.text('Rich-text Contents', help="Rich-text/HTML message"),
# Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification
# and during unlink() we will not cascade delete the parent and its attachments
'notification': fields.boolean('Is Notification')
}
def _get_default_from(self, cr, uid, context=None):
this = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if this.alias_domain:
return '%s@%s' % (this.alias_name, this.alias_domain)
elif this.email:
return this.email
raise osv.except_osv(_('Invalid Action!'), _("Unable to send email, please configure the sender's email address or alias."))
_defaults = {
'state': 'outgoing',
'email_from': lambda self, cr, uid, ctx=None: self._get_default_from(cr, uid, ctx),
}
def default_get(self, cr, uid, fields, context=None):
# protection for `default_type` values leaking from menu action context (e.g. for invoices)
# To remove when automatic context propagation is removed in web client
if context and context.get('default_type') and context.get('default_type') not in self._all_columns['type'].column.selection:
context = dict(context, default_type=None)
return super(mail_mail, self).default_get(cr, uid, fields, context=context)
def create(self, cr, uid, values, context=None):
if 'notification' not in values and values.get('mail_message_id'):
values['notification'] = True
return super(mail_mail, self).create(cr, uid, values, context=context)
def unlink(self, cr, uid, ids, context=None):
# cascade-delete the parent message for all mails that are not created for a notification
ids_to_cascade = self.search(cr, uid, [('notification', '=', False), ('id', 'in', ids)])
parent_msg_ids = [m.mail_message_id.id for m in self.browse(cr, uid, ids_to_cascade, context=context)]
res = super(mail_mail, self).unlink(cr, uid, ids, context=context)
self.pool.get('mail.message').unlink(cr, uid, parent_msg_ids, context=context)
return res
def mark_outgoing(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'outgoing'}, context=context)
def cancel(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
def process_email_queue(self, cr, uid, ids=None, context=None):
"""Send immediately queued messages, committing after each
message is sent - this is not transactional and should
not be called during another transaction!
:param list ids: optional list of emails ids to send. If passed
no search is performed, and these ids are used
instead.
:param dict context: if a 'filters' key is present in context,
this value will be used as an additional
filter to further restrict the outgoing
messages to send (by default all 'outgoing'
messages are sent).
"""
if context is None:
context = {}
if not ids:
filters = ['&', ('state', '=', 'outgoing'), ('type', '=', 'email')]
if 'filters' in context:
filters.extend(context['filters'])
ids = self.search(cr, uid, filters, context=context)
res = None
try:
# Force auto-commit - this is meant to be called by
# the scheduler, and we can't allow rolling back the status
# of previously sent emails!
res = self.send(cr, uid, ids, auto_commit=True, context=context)
except Exception:
_logger.exception("Failed processing mail queue")
return res
def _postprocess_sent_message(self, cr, uid, mail, context=None):
"""Perform any post-processing necessary after sending ``mail``
successfully, including deleting it completely along with its
attachment if the ``auto_delete`` flag of the mail was set.
Overridden by subclasses for extra post-processing behaviors.
:param browse_record mail: the mail that was just sent
:return: True
"""
if mail.auto_delete:
# done with SUPERUSER_ID to avoid giving large unlink access rights
self.unlink(cr, SUPERUSER_ID, [mail.id], context=context)
return True
def send_get_mail_subject(self, cr, uid, mail, force=False, partner=None, context=None):
""" If subject is void and record_name defined: '<Author> posted on <Resource>'
:param boolean force: force the subject replacement
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if force or (not mail.subject and mail.model and mail.res_id):
return 'Re: %s' % (mail.record_name)
return mail.subject
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email body. The main purpose of this method
is to be inherited by Portal, to add a link for signing in, in
each notification email a partner receives.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = mail.body_html
# partner is a user, link to a related document (incentive to install portal)
if partner and partner.user_ids and mail.model and mail.res_id \
and self.check_access_rights(cr, partner.user_ids[0].id, 'read', raise_exception=False):
related_user = partner.user_ids[0]
try:
self.pool.get(mail.model).check_access_rule(cr, related_user.id, [mail.res_id], 'read', context=context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# the parameters to encode for the query and fragment part of url
query = {'db': cr.dbname}
fragment = {
'login': related_user.login,
'model': mail.model,
'id': mail.res_id,
}
url = urljoin(base_url, "?%s#%s" % (urlencode(query), urlencode(fragment)))
text = _("""<p>Access this document <a href="%s">directly in OpenERP</a></p>""") % url
body = tools.append_content_to_html(body, ("<div><p>%s</p></div>" % text), plaintext=False)
except except_orm, e:
pass
return body
def send_get_mail_reply_to(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email reply_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if mail.reply_to:
return mail.reply_to
email_reply_to = False
# if model and res_id: try to use ``message_get_reply_to`` that returns the document alias
if mail.model and mail.res_id and hasattr(self.pool.get(mail.model), 'message_get_reply_to'):
email_reply_to = self.pool.get(mail.model).message_get_reply_to(cr, uid, [mail.res_id], context=context)[0]
# no alias reply_to -> reply_to will be the email_from, only the email part
if not email_reply_to and mail.email_from:
emails = tools.email_split(mail.email_from)
if emails:
email_reply_to = emails[0]
# format 'Document name <email_address>'
if email_reply_to and mail.model and mail.res_id:
document_name = self.pool.get(mail.model).name_get(cr, SUPERUSER_ID, [mail.res_id], context=context)[0]
if document_name:
# sanitize document name
sanitized_doc_name = re.sub(r'[^\w+.]+', '-', document_name[1])
# generate reply to
email_reply_to = _('"Followers of %s" <%s>') % (sanitized_doc_name, email_reply_to)
return email_reply_to
def send_get_email_dict(self, cr, uid, mail, partner=None, context=None):
|
def send(self, cr, uid, ids, auto_commit=False, recipient_ids=None, context=None):
""" Sends the selected emails immediately, ignoring their current
state (mails that have already been sent should not be passed
unless they should actually be re-sent).
Emails successfully delivered are marked as 'sent', and those
that fail to be deliver are marked as 'exception', and the
corresponding error mail is output in the server logs.
:param bool auto_commit: whether to force a commit of the mail status
after sending each mail (meant only for scheduler processing);
should never be True during normal transactions (default: False)
:param list recipient_ids: specific list of res.partner recipients.
If set, one email is sent to each partner. Its is possible to
tune the sent email through ``send_get_mail_body`` and ``send_get_mail_subject``.
If not specified, one email is sent to mail_mail.email_to.
:return: True
"""
ir_mail_server = self.pool.get('ir.mail_server')
for mail in self.browse(cr, uid, ids, context=context):
try:
# handle attachments
attachments = []
for attach in mail.attachment_ids:
attachments.append((attach.datas_fname, base64.b64decode(attach.datas)))
# specific behavior to customize the send email for notified partners
email_list = []
if recipient_ids:
for partner in self.pool.get('res.partner').browse(cr, SUPERUSER_ID, recipient_ids, context=context):
email_list.append(self.send_get_email_dict(cr, uid, mail, partner=partner, context=context))
else:
email_list.append(self.send_get_email_dict(cr, uid, mail, context=context))
# build an RFC2822 email.message.Message object and send it without queuing
for email in email_list:
msg = ir_mail_server.build_email(
email_from = mail.email_from,
email_to = email.get('email_to'),
subject = email.get('subject'),
body = email.get('body'),
body_alternative = email.get('body_alternative'),
email_cc = tools.email_split(mail.email_cc),
reply_to = email.get('reply_to'),
attachments = attachments,
message_id = mail.message_id,
references = mail.references,
object_id = mail.res_id and ('%s-%s' % (mail.res_id, mail.model)),
subtype = 'html',
subtype_alternative = 'plain')
res = ir_mail_server.send_email(cr, uid, msg,
mail_server_id=mail.mail_server_id.id, context=context)
if res:
mail.write({'state': 'sent', 'message_id': res})
mail_sent = True
else:
mail.write({'state': 'exception'})
mail_sent = False
# /!\ can't use mail.state here, as mail.refresh() will cause an error
# see revid:odo@openerp.com-20120622152536-42b2s28lvdv3odyr in 6.1
if mail_sent:
self._postprocess_sent_message(cr, uid, mail, context=context)
except Exception:
_logger.exception('failed sending mail.mail %s', mail.id)
mail.write({'state': 'exception'})
if auto_commit == True:
cr.commit()
return True
| """ Return a dictionary for specific email values, depending on a
partner, or generic to the whole recipients given by mail.email_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = self.send_get_mail_body(cr, uid, mail, partner=partner, context=context)
subject = self.send_get_mail_subject(cr, uid, mail, partner=partner, context=context)
reply_to = self.send_get_mail_reply_to(cr, uid, mail, partner=partner, context=context)
body_alternative = tools.html2plaintext(body)
# generate email_to, heuristic:
# 1. if 'partner' is specified and there is a related document: Followers of 'Doc' <email>
# 2. if 'partner' is specified, but no related document: Partner Name <email>
# 3; fallback on mail.email_to that we split to have an email addresses list
if partner and mail.record_name:
sanitized_record_name = re.sub(r'[^\w+.]+', '-', mail.record_name)
email_to = [_('"Followers of %s" <%s>') % (sanitized_record_name, partner.email)]
elif partner:
email_to = ['%s <%s>' % (partner.name, partner.email)]
else:
email_to = tools.email_split(mail.email_to)
return {
'body': body,
'body_alternative': body_alternative,
'subject': subject,
'email_to': email_to,
'reply_to': reply_to,
} | identifier_body |
mail_mail.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-today OpenERP SA (<http://www.openerp.com>)
# | # published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import logging
import re
from urllib import urlencode
from urlparse import urljoin
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class mail_mail(osv.Model):
""" Model holding RFC2822 email messages to send. This model also provides
facilities to queue and send new email messages. """
_name = 'mail.mail'
_description = 'Outgoing Mails'
_inherits = {'mail.message': 'mail_message_id'}
_order = 'id desc'
_columns = {
'mail_message_id': fields.many2one('mail.message', 'Message', required=True, ondelete='cascade'),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing mail server', readonly=1),
'state': fields.selection([
('outgoing', 'Outgoing'),
('sent', 'Sent'),
('received', 'Received'),
('exception', 'Delivery Failed'),
('cancel', 'Cancelled'),
], 'Status', readonly=True),
'auto_delete': fields.boolean('Auto Delete',
help="Permanently delete this email after sending it, to save space"),
'references': fields.text('References', help='Message references, such as identifiers of previous messages', readonly=1),
'email_from': fields.char('From', help='Message sender, taken from user preferences.'),
'email_to': fields.text('To', help='Message recipients'),
'email_cc': fields.char('Cc', help='Carbon copy message recipients'),
'reply_to': fields.char('Reply-To', help='Preferred response address for the message'),
'body_html': fields.text('Rich-text Contents', help="Rich-text/HTML message"),
# Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification
# and during unlink() we will not cascade delete the parent and its attachments
'notification': fields.boolean('Is Notification')
}
def _get_default_from(self, cr, uid, context=None):
this = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if this.alias_domain:
return '%s@%s' % (this.alias_name, this.alias_domain)
elif this.email:
return this.email
raise osv.except_osv(_('Invalid Action!'), _("Unable to send email, please configure the sender's email address or alias."))
_defaults = {
'state': 'outgoing',
'email_from': lambda self, cr, uid, ctx=None: self._get_default_from(cr, uid, ctx),
}
def default_get(self, cr, uid, fields, context=None):
# protection for `default_type` values leaking from menu action context (e.g. for invoices)
# To remove when automatic context propagation is removed in web client
if context and context.get('default_type') and context.get('default_type') not in self._all_columns['type'].column.selection:
context = dict(context, default_type=None)
return super(mail_mail, self).default_get(cr, uid, fields, context=context)
def create(self, cr, uid, values, context=None):
if 'notification' not in values and values.get('mail_message_id'):
values['notification'] = True
return super(mail_mail, self).create(cr, uid, values, context=context)
def unlink(self, cr, uid, ids, context=None):
# cascade-delete the parent message for all mails that are not created for a notification
ids_to_cascade = self.search(cr, uid, [('notification', '=', False), ('id', 'in', ids)])
parent_msg_ids = [m.mail_message_id.id for m in self.browse(cr, uid, ids_to_cascade, context=context)]
res = super(mail_mail, self).unlink(cr, uid, ids, context=context)
self.pool.get('mail.message').unlink(cr, uid, parent_msg_ids, context=context)
return res
def mark_outgoing(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'outgoing'}, context=context)
def cancel(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
def process_email_queue(self, cr, uid, ids=None, context=None):
"""Send immediately queued messages, committing after each
message is sent - this is not transactional and should
not be called during another transaction!
:param list ids: optional list of emails ids to send. If passed
no search is performed, and these ids are used
instead.
:param dict context: if a 'filters' key is present in context,
this value will be used as an additional
filter to further restrict the outgoing
messages to send (by default all 'outgoing'
messages are sent).
"""
if context is None:
context = {}
if not ids:
filters = ['&', ('state', '=', 'outgoing'), ('type', '=', 'email')]
if 'filters' in context:
filters.extend(context['filters'])
ids = self.search(cr, uid, filters, context=context)
res = None
try:
# Force auto-commit - this is meant to be called by
# the scheduler, and we can't allow rolling back the status
# of previously sent emails!
res = self.send(cr, uid, ids, auto_commit=True, context=context)
except Exception:
_logger.exception("Failed processing mail queue")
return res
def _postprocess_sent_message(self, cr, uid, mail, context=None):
"""Perform any post-processing necessary after sending ``mail``
successfully, including deleting it completely along with its
attachment if the ``auto_delete`` flag of the mail was set.
Overridden by subclasses for extra post-processing behaviors.
:param browse_record mail: the mail that was just sent
:return: True
"""
if mail.auto_delete:
# done with SUPERUSER_ID to avoid giving large unlink access rights
self.unlink(cr, SUPERUSER_ID, [mail.id], context=context)
return True
def send_get_mail_subject(self, cr, uid, mail, force=False, partner=None, context=None):
""" If subject is void and record_name defined: '<Author> posted on <Resource>'
:param boolean force: force the subject replacement
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if force or (not mail.subject and mail.model and mail.res_id):
return 'Re: %s' % (mail.record_name)
return mail.subject
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email body. The main purpose of this method
is to be inherited by Portal, to add a link for signing in, in
each notification email a partner receives.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = mail.body_html
# partner is a user, link to a related document (incentive to install portal)
if partner and partner.user_ids and mail.model and mail.res_id \
and self.check_access_rights(cr, partner.user_ids[0].id, 'read', raise_exception=False):
related_user = partner.user_ids[0]
try:
self.pool.get(mail.model).check_access_rule(cr, related_user.id, [mail.res_id], 'read', context=context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# the parameters to encode for the query and fragment part of url
query = {'db': cr.dbname}
fragment = {
'login': related_user.login,
'model': mail.model,
'id': mail.res_id,
}
url = urljoin(base_url, "?%s#%s" % (urlencode(query), urlencode(fragment)))
text = _("""<p>Access this document <a href="%s">directly in OpenERP</a></p>""") % url
body = tools.append_content_to_html(body, ("<div><p>%s</p></div>" % text), plaintext=False)
except except_orm, e:
pass
return body
def send_get_mail_reply_to(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email reply_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if mail.reply_to:
return mail.reply_to
email_reply_to = False
# if model and res_id: try to use ``message_get_reply_to`` that returns the document alias
if mail.model and mail.res_id and hasattr(self.pool.get(mail.model), 'message_get_reply_to'):
email_reply_to = self.pool.get(mail.model).message_get_reply_to(cr, uid, [mail.res_id], context=context)[0]
# no alias reply_to -> reply_to will be the email_from, only the email part
if not email_reply_to and mail.email_from:
emails = tools.email_split(mail.email_from)
if emails:
email_reply_to = emails[0]
# format 'Document name <email_address>'
if email_reply_to and mail.model and mail.res_id:
document_name = self.pool.get(mail.model).name_get(cr, SUPERUSER_ID, [mail.res_id], context=context)[0]
if document_name:
# sanitize document name
sanitized_doc_name = re.sub(r'[^\w+.]+', '-', document_name[1])
# generate reply to
email_reply_to = _('"Followers of %s" <%s>') % (sanitized_doc_name, email_reply_to)
return email_reply_to
def send_get_email_dict(self, cr, uid, mail, partner=None, context=None):
""" Return a dictionary for specific email values, depending on a
partner, or generic to the whole recipients given by mail.email_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = self.send_get_mail_body(cr, uid, mail, partner=partner, context=context)
subject = self.send_get_mail_subject(cr, uid, mail, partner=partner, context=context)
reply_to = self.send_get_mail_reply_to(cr, uid, mail, partner=partner, context=context)
body_alternative = tools.html2plaintext(body)
# generate email_to, heuristic:
# 1. if 'partner' is specified and there is a related document: Followers of 'Doc' <email>
# 2. if 'partner' is specified, but no related document: Partner Name <email>
# 3; fallback on mail.email_to that we split to have an email addresses list
if partner and mail.record_name:
sanitized_record_name = re.sub(r'[^\w+.]+', '-', mail.record_name)
email_to = [_('"Followers of %s" <%s>') % (sanitized_record_name, partner.email)]
elif partner:
email_to = ['%s <%s>' % (partner.name, partner.email)]
else:
email_to = tools.email_split(mail.email_to)
return {
'body': body,
'body_alternative': body_alternative,
'subject': subject,
'email_to': email_to,
'reply_to': reply_to,
}
def send(self, cr, uid, ids, auto_commit=False, recipient_ids=None, context=None):
""" Sends the selected emails immediately, ignoring their current
state (mails that have already been sent should not be passed
unless they should actually be re-sent).
Emails successfully delivered are marked as 'sent', and those
that fail to be deliver are marked as 'exception', and the
corresponding error mail is output in the server logs.
:param bool auto_commit: whether to force a commit of the mail status
after sending each mail (meant only for scheduler processing);
should never be True during normal transactions (default: False)
:param list recipient_ids: specific list of res.partner recipients.
If set, one email is sent to each partner. Its is possible to
tune the sent email through ``send_get_mail_body`` and ``send_get_mail_subject``.
If not specified, one email is sent to mail_mail.email_to.
:return: True
"""
ir_mail_server = self.pool.get('ir.mail_server')
for mail in self.browse(cr, uid, ids, context=context):
try:
# handle attachments
attachments = []
for attach in mail.attachment_ids:
attachments.append((attach.datas_fname, base64.b64decode(attach.datas)))
# specific behavior to customize the send email for notified partners
email_list = []
if recipient_ids:
for partner in self.pool.get('res.partner').browse(cr, SUPERUSER_ID, recipient_ids, context=context):
email_list.append(self.send_get_email_dict(cr, uid, mail, partner=partner, context=context))
else:
email_list.append(self.send_get_email_dict(cr, uid, mail, context=context))
# build an RFC2822 email.message.Message object and send it without queuing
for email in email_list:
msg = ir_mail_server.build_email(
email_from = mail.email_from,
email_to = email.get('email_to'),
subject = email.get('subject'),
body = email.get('body'),
body_alternative = email.get('body_alternative'),
email_cc = tools.email_split(mail.email_cc),
reply_to = email.get('reply_to'),
attachments = attachments,
message_id = mail.message_id,
references = mail.references,
object_id = mail.res_id and ('%s-%s' % (mail.res_id, mail.model)),
subtype = 'html',
subtype_alternative = 'plain')
res = ir_mail_server.send_email(cr, uid, msg,
mail_server_id=mail.mail_server_id.id, context=context)
if res:
mail.write({'state': 'sent', 'message_id': res})
mail_sent = True
else:
mail.write({'state': 'exception'})
mail_sent = False
# /!\ can't use mail.state here, as mail.refresh() will cause an error
# see revid:odo@openerp.com-20120622152536-42b2s28lvdv3odyr in 6.1
if mail_sent:
self._postprocess_sent_message(cr, uid, mail, context=context)
except Exception:
_logger.exception('failed sending mail.mail %s', mail.id)
mail.write({'state': 'exception'})
if auto_commit == True:
cr.commit()
return True | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as | random_line_split |
mail_mail.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import logging
import re
from urllib import urlencode
from urlparse import urljoin
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class mail_mail(osv.Model):
""" Model holding RFC2822 email messages to send. This model also provides
facilities to queue and send new email messages. """
_name = 'mail.mail'
_description = 'Outgoing Mails'
_inherits = {'mail.message': 'mail_message_id'}
_order = 'id desc'
_columns = {
'mail_message_id': fields.many2one('mail.message', 'Message', required=True, ondelete='cascade'),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing mail server', readonly=1),
'state': fields.selection([
('outgoing', 'Outgoing'),
('sent', 'Sent'),
('received', 'Received'),
('exception', 'Delivery Failed'),
('cancel', 'Cancelled'),
], 'Status', readonly=True),
'auto_delete': fields.boolean('Auto Delete',
help="Permanently delete this email after sending it, to save space"),
'references': fields.text('References', help='Message references, such as identifiers of previous messages', readonly=1),
'email_from': fields.char('From', help='Message sender, taken from user preferences.'),
'email_to': fields.text('To', help='Message recipients'),
'email_cc': fields.char('Cc', help='Carbon copy message recipients'),
'reply_to': fields.char('Reply-To', help='Preferred response address for the message'),
'body_html': fields.text('Rich-text Contents', help="Rich-text/HTML message"),
# Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification
# and during unlink() we will not cascade delete the parent and its attachments
'notification': fields.boolean('Is Notification')
}
def _get_default_from(self, cr, uid, context=None):
this = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if this.alias_domain:
return '%s@%s' % (this.alias_name, this.alias_domain)
elif this.email:
return this.email
raise osv.except_osv(_('Invalid Action!'), _("Unable to send email, please configure the sender's email address or alias."))
_defaults = {
'state': 'outgoing',
'email_from': lambda self, cr, uid, ctx=None: self._get_default_from(cr, uid, ctx),
}
def default_get(self, cr, uid, fields, context=None):
# protection for `default_type` values leaking from menu action context (e.g. for invoices)
# To remove when automatic context propagation is removed in web client
if context and context.get('default_type') and context.get('default_type') not in self._all_columns['type'].column.selection:
context = dict(context, default_type=None)
return super(mail_mail, self).default_get(cr, uid, fields, context=context)
def create(self, cr, uid, values, context=None):
if 'notification' not in values and values.get('mail_message_id'):
values['notification'] = True
return super(mail_mail, self).create(cr, uid, values, context=context)
def unlink(self, cr, uid, ids, context=None):
# cascade-delete the parent message for all mails that are not created for a notification
ids_to_cascade = self.search(cr, uid, [('notification', '=', False), ('id', 'in', ids)])
parent_msg_ids = [m.mail_message_id.id for m in self.browse(cr, uid, ids_to_cascade, context=context)]
res = super(mail_mail, self).unlink(cr, uid, ids, context=context)
self.pool.get('mail.message').unlink(cr, uid, parent_msg_ids, context=context)
return res
def mark_outgoing(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'outgoing'}, context=context)
def cancel(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
def process_email_queue(self, cr, uid, ids=None, context=None):
"""Send immediately queued messages, committing after each
message is sent - this is not transactional and should
not be called during another transaction!
:param list ids: optional list of emails ids to send. If passed
no search is performed, and these ids are used
instead.
:param dict context: if a 'filters' key is present in context,
this value will be used as an additional
filter to further restrict the outgoing
messages to send (by default all 'outgoing'
messages are sent).
"""
if context is None:
context = {}
if not ids:
filters = ['&', ('state', '=', 'outgoing'), ('type', '=', 'email')]
if 'filters' in context:
filters.extend(context['filters'])
ids = self.search(cr, uid, filters, context=context)
res = None
try:
# Force auto-commit - this is meant to be called by
# the scheduler, and we can't allow rolling back the status
# of previously sent emails!
res = self.send(cr, uid, ids, auto_commit=True, context=context)
except Exception:
_logger.exception("Failed processing mail queue")
return res
def _postprocess_sent_message(self, cr, uid, mail, context=None):
"""Perform any post-processing necessary after sending ``mail``
successfully, including deleting it completely along with its
attachment if the ``auto_delete`` flag of the mail was set.
Overridden by subclasses for extra post-processing behaviors.
:param browse_record mail: the mail that was just sent
:return: True
"""
if mail.auto_delete:
# done with SUPERUSER_ID to avoid giving large unlink access rights
self.unlink(cr, SUPERUSER_ID, [mail.id], context=context)
return True
def send_get_mail_subject(self, cr, uid, mail, force=False, partner=None, context=None):
""" If subject is void and record_name defined: '<Author> posted on <Resource>'
:param boolean force: force the subject replacement
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if force or (not mail.subject and mail.model and mail.res_id):
return 'Re: %s' % (mail.record_name)
return mail.subject
def | (self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email body. The main purpose of this method
is to be inherited by Portal, to add a link for signing in, in
each notification email a partner receives.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = mail.body_html
# partner is a user, link to a related document (incentive to install portal)
if partner and partner.user_ids and mail.model and mail.res_id \
and self.check_access_rights(cr, partner.user_ids[0].id, 'read', raise_exception=False):
related_user = partner.user_ids[0]
try:
self.pool.get(mail.model).check_access_rule(cr, related_user.id, [mail.res_id], 'read', context=context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# the parameters to encode for the query and fragment part of url
query = {'db': cr.dbname}
fragment = {
'login': related_user.login,
'model': mail.model,
'id': mail.res_id,
}
url = urljoin(base_url, "?%s#%s" % (urlencode(query), urlencode(fragment)))
text = _("""<p>Access this document <a href="%s">directly in OpenERP</a></p>""") % url
body = tools.append_content_to_html(body, ("<div><p>%s</p></div>" % text), plaintext=False)
except except_orm, e:
pass
return body
def send_get_mail_reply_to(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email reply_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if mail.reply_to:
return mail.reply_to
email_reply_to = False
# if model and res_id: try to use ``message_get_reply_to`` that returns the document alias
if mail.model and mail.res_id and hasattr(self.pool.get(mail.model), 'message_get_reply_to'):
email_reply_to = self.pool.get(mail.model).message_get_reply_to(cr, uid, [mail.res_id], context=context)[0]
# no alias reply_to -> reply_to will be the email_from, only the email part
if not email_reply_to and mail.email_from:
emails = tools.email_split(mail.email_from)
if emails:
email_reply_to = emails[0]
# format 'Document name <email_address>'
if email_reply_to and mail.model and mail.res_id:
document_name = self.pool.get(mail.model).name_get(cr, SUPERUSER_ID, [mail.res_id], context=context)[0]
if document_name:
# sanitize document name
sanitized_doc_name = re.sub(r'[^\w+.]+', '-', document_name[1])
# generate reply to
email_reply_to = _('"Followers of %s" <%s>') % (sanitized_doc_name, email_reply_to)
return email_reply_to
def send_get_email_dict(self, cr, uid, mail, partner=None, context=None):
""" Return a dictionary for specific email values, depending on a
partner, or generic to the whole recipients given by mail.email_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = self.send_get_mail_body(cr, uid, mail, partner=partner, context=context)
subject = self.send_get_mail_subject(cr, uid, mail, partner=partner, context=context)
reply_to = self.send_get_mail_reply_to(cr, uid, mail, partner=partner, context=context)
body_alternative = tools.html2plaintext(body)
# generate email_to, heuristic:
# 1. if 'partner' is specified and there is a related document: Followers of 'Doc' <email>
# 2. if 'partner' is specified, but no related document: Partner Name <email>
# 3; fallback on mail.email_to that we split to have an email addresses list
if partner and mail.record_name:
sanitized_record_name = re.sub(r'[^\w+.]+', '-', mail.record_name)
email_to = [_('"Followers of %s" <%s>') % (sanitized_record_name, partner.email)]
elif partner:
email_to = ['%s <%s>' % (partner.name, partner.email)]
else:
email_to = tools.email_split(mail.email_to)
return {
'body': body,
'body_alternative': body_alternative,
'subject': subject,
'email_to': email_to,
'reply_to': reply_to,
}
def send(self, cr, uid, ids, auto_commit=False, recipient_ids=None, context=None):
""" Sends the selected emails immediately, ignoring their current
state (mails that have already been sent should not be passed
unless they should actually be re-sent).
Emails successfully delivered are marked as 'sent', and those
that fail to be deliver are marked as 'exception', and the
corresponding error mail is output in the server logs.
:param bool auto_commit: whether to force a commit of the mail status
after sending each mail (meant only for scheduler processing);
should never be True during normal transactions (default: False)
:param list recipient_ids: specific list of res.partner recipients.
If set, one email is sent to each partner. Its is possible to
tune the sent email through ``send_get_mail_body`` and ``send_get_mail_subject``.
If not specified, one email is sent to mail_mail.email_to.
:return: True
"""
ir_mail_server = self.pool.get('ir.mail_server')
for mail in self.browse(cr, uid, ids, context=context):
try:
# handle attachments
attachments = []
for attach in mail.attachment_ids:
attachments.append((attach.datas_fname, base64.b64decode(attach.datas)))
# specific behavior to customize the send email for notified partners
email_list = []
if recipient_ids:
for partner in self.pool.get('res.partner').browse(cr, SUPERUSER_ID, recipient_ids, context=context):
email_list.append(self.send_get_email_dict(cr, uid, mail, partner=partner, context=context))
else:
email_list.append(self.send_get_email_dict(cr, uid, mail, context=context))
# build an RFC2822 email.message.Message object and send it without queuing
for email in email_list:
msg = ir_mail_server.build_email(
email_from = mail.email_from,
email_to = email.get('email_to'),
subject = email.get('subject'),
body = email.get('body'),
body_alternative = email.get('body_alternative'),
email_cc = tools.email_split(mail.email_cc),
reply_to = email.get('reply_to'),
attachments = attachments,
message_id = mail.message_id,
references = mail.references,
object_id = mail.res_id and ('%s-%s' % (mail.res_id, mail.model)),
subtype = 'html',
subtype_alternative = 'plain')
res = ir_mail_server.send_email(cr, uid, msg,
mail_server_id=mail.mail_server_id.id, context=context)
if res:
mail.write({'state': 'sent', 'message_id': res})
mail_sent = True
else:
mail.write({'state': 'exception'})
mail_sent = False
# /!\ can't use mail.state here, as mail.refresh() will cause an error
# see revid:odo@openerp.com-20120622152536-42b2s28lvdv3odyr in 6.1
if mail_sent:
self._postprocess_sent_message(cr, uid, mail, context=context)
except Exception:
_logger.exception('failed sending mail.mail %s', mail.id)
mail.write({'state': 'exception'})
if auto_commit == True:
cr.commit()
return True
| send_get_mail_body | identifier_name |
mail_mail.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import logging
import re
from urllib import urlencode
from urlparse import urljoin
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class mail_mail(osv.Model):
""" Model holding RFC2822 email messages to send. This model also provides
facilities to queue and send new email messages. """
_name = 'mail.mail'
_description = 'Outgoing Mails'
_inherits = {'mail.message': 'mail_message_id'}
_order = 'id desc'
_columns = {
'mail_message_id': fields.many2one('mail.message', 'Message', required=True, ondelete='cascade'),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing mail server', readonly=1),
'state': fields.selection([
('outgoing', 'Outgoing'),
('sent', 'Sent'),
('received', 'Received'),
('exception', 'Delivery Failed'),
('cancel', 'Cancelled'),
], 'Status', readonly=True),
'auto_delete': fields.boolean('Auto Delete',
help="Permanently delete this email after sending it, to save space"),
'references': fields.text('References', help='Message references, such as identifiers of previous messages', readonly=1),
'email_from': fields.char('From', help='Message sender, taken from user preferences.'),
'email_to': fields.text('To', help='Message recipients'),
'email_cc': fields.char('Cc', help='Carbon copy message recipients'),
'reply_to': fields.char('Reply-To', help='Preferred response address for the message'),
'body_html': fields.text('Rich-text Contents', help="Rich-text/HTML message"),
# Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification
# and during unlink() we will not cascade delete the parent and its attachments
'notification': fields.boolean('Is Notification')
}
def _get_default_from(self, cr, uid, context=None):
this = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if this.alias_domain:
|
elif this.email:
return this.email
raise osv.except_osv(_('Invalid Action!'), _("Unable to send email, please configure the sender's email address or alias."))
_defaults = {
'state': 'outgoing',
'email_from': lambda self, cr, uid, ctx=None: self._get_default_from(cr, uid, ctx),
}
def default_get(self, cr, uid, fields, context=None):
# protection for `default_type` values leaking from menu action context (e.g. for invoices)
# To remove when automatic context propagation is removed in web client
if context and context.get('default_type') and context.get('default_type') not in self._all_columns['type'].column.selection:
context = dict(context, default_type=None)
return super(mail_mail, self).default_get(cr, uid, fields, context=context)
def create(self, cr, uid, values, context=None):
if 'notification' not in values and values.get('mail_message_id'):
values['notification'] = True
return super(mail_mail, self).create(cr, uid, values, context=context)
def unlink(self, cr, uid, ids, context=None):
# cascade-delete the parent message for all mails that are not created for a notification
ids_to_cascade = self.search(cr, uid, [('notification', '=', False), ('id', 'in', ids)])
parent_msg_ids = [m.mail_message_id.id for m in self.browse(cr, uid, ids_to_cascade, context=context)]
res = super(mail_mail, self).unlink(cr, uid, ids, context=context)
self.pool.get('mail.message').unlink(cr, uid, parent_msg_ids, context=context)
return res
def mark_outgoing(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'outgoing'}, context=context)
def cancel(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
def process_email_queue(self, cr, uid, ids=None, context=None):
"""Send immediately queued messages, committing after each
message is sent - this is not transactional and should
not be called during another transaction!
:param list ids: optional list of emails ids to send. If passed
no search is performed, and these ids are used
instead.
:param dict context: if a 'filters' key is present in context,
this value will be used as an additional
filter to further restrict the outgoing
messages to send (by default all 'outgoing'
messages are sent).
"""
if context is None:
context = {}
if not ids:
filters = ['&', ('state', '=', 'outgoing'), ('type', '=', 'email')]
if 'filters' in context:
filters.extend(context['filters'])
ids = self.search(cr, uid, filters, context=context)
res = None
try:
# Force auto-commit - this is meant to be called by
# the scheduler, and we can't allow rolling back the status
# of previously sent emails!
res = self.send(cr, uid, ids, auto_commit=True, context=context)
except Exception:
_logger.exception("Failed processing mail queue")
return res
def _postprocess_sent_message(self, cr, uid, mail, context=None):
"""Perform any post-processing necessary after sending ``mail``
successfully, including deleting it completely along with its
attachment if the ``auto_delete`` flag of the mail was set.
Overridden by subclasses for extra post-processing behaviors.
:param browse_record mail: the mail that was just sent
:return: True
"""
if mail.auto_delete:
# done with SUPERUSER_ID to avoid giving large unlink access rights
self.unlink(cr, SUPERUSER_ID, [mail.id], context=context)
return True
def send_get_mail_subject(self, cr, uid, mail, force=False, partner=None, context=None):
""" If subject is void and record_name defined: '<Author> posted on <Resource>'
:param boolean force: force the subject replacement
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if force or (not mail.subject and mail.model and mail.res_id):
return 'Re: %s' % (mail.record_name)
return mail.subject
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email body. The main purpose of this method
is to be inherited by Portal, to add a link for signing in, in
each notification email a partner receives.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = mail.body_html
# partner is a user, link to a related document (incentive to install portal)
if partner and partner.user_ids and mail.model and mail.res_id \
and self.check_access_rights(cr, partner.user_ids[0].id, 'read', raise_exception=False):
related_user = partner.user_ids[0]
try:
self.pool.get(mail.model).check_access_rule(cr, related_user.id, [mail.res_id], 'read', context=context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# the parameters to encode for the query and fragment part of url
query = {'db': cr.dbname}
fragment = {
'login': related_user.login,
'model': mail.model,
'id': mail.res_id,
}
url = urljoin(base_url, "?%s#%s" % (urlencode(query), urlencode(fragment)))
text = _("""<p>Access this document <a href="%s">directly in OpenERP</a></p>""") % url
body = tools.append_content_to_html(body, ("<div><p>%s</p></div>" % text), plaintext=False)
except except_orm, e:
pass
return body
def send_get_mail_reply_to(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email reply_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if mail.reply_to:
return mail.reply_to
email_reply_to = False
# if model and res_id: try to use ``message_get_reply_to`` that returns the document alias
if mail.model and mail.res_id and hasattr(self.pool.get(mail.model), 'message_get_reply_to'):
email_reply_to = self.pool.get(mail.model).message_get_reply_to(cr, uid, [mail.res_id], context=context)[0]
# no alias reply_to -> reply_to will be the email_from, only the email part
if not email_reply_to and mail.email_from:
emails = tools.email_split(mail.email_from)
if emails:
email_reply_to = emails[0]
# format 'Document name <email_address>'
if email_reply_to and mail.model and mail.res_id:
document_name = self.pool.get(mail.model).name_get(cr, SUPERUSER_ID, [mail.res_id], context=context)[0]
if document_name:
# sanitize document name
sanitized_doc_name = re.sub(r'[^\w+.]+', '-', document_name[1])
# generate reply to
email_reply_to = _('"Followers of %s" <%s>') % (sanitized_doc_name, email_reply_to)
return email_reply_to
def send_get_email_dict(self, cr, uid, mail, partner=None, context=None):
""" Return a dictionary for specific email values, depending on a
partner, or generic to the whole recipients given by mail.email_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = self.send_get_mail_body(cr, uid, mail, partner=partner, context=context)
subject = self.send_get_mail_subject(cr, uid, mail, partner=partner, context=context)
reply_to = self.send_get_mail_reply_to(cr, uid, mail, partner=partner, context=context)
body_alternative = tools.html2plaintext(body)
# generate email_to, heuristic:
# 1. if 'partner' is specified and there is a related document: Followers of 'Doc' <email>
# 2. if 'partner' is specified, but no related document: Partner Name <email>
# 3; fallback on mail.email_to that we split to have an email addresses list
if partner and mail.record_name:
sanitized_record_name = re.sub(r'[^\w+.]+', '-', mail.record_name)
email_to = [_('"Followers of %s" <%s>') % (sanitized_record_name, partner.email)]
elif partner:
email_to = ['%s <%s>' % (partner.name, partner.email)]
else:
email_to = tools.email_split(mail.email_to)
return {
'body': body,
'body_alternative': body_alternative,
'subject': subject,
'email_to': email_to,
'reply_to': reply_to,
}
def send(self, cr, uid, ids, auto_commit=False, recipient_ids=None, context=None):
""" Sends the selected emails immediately, ignoring their current
state (mails that have already been sent should not be passed
unless they should actually be re-sent).
Emails successfully delivered are marked as 'sent', and those
that fail to be deliver are marked as 'exception', and the
corresponding error mail is output in the server logs.
:param bool auto_commit: whether to force a commit of the mail status
after sending each mail (meant only for scheduler processing);
should never be True during normal transactions (default: False)
:param list recipient_ids: specific list of res.partner recipients.
If set, one email is sent to each partner. Its is possible to
tune the sent email through ``send_get_mail_body`` and ``send_get_mail_subject``.
If not specified, one email is sent to mail_mail.email_to.
:return: True
"""
ir_mail_server = self.pool.get('ir.mail_server')
for mail in self.browse(cr, uid, ids, context=context):
try:
# handle attachments
attachments = []
for attach in mail.attachment_ids:
attachments.append((attach.datas_fname, base64.b64decode(attach.datas)))
# specific behavior to customize the send email for notified partners
email_list = []
if recipient_ids:
for partner in self.pool.get('res.partner').browse(cr, SUPERUSER_ID, recipient_ids, context=context):
email_list.append(self.send_get_email_dict(cr, uid, mail, partner=partner, context=context))
else:
email_list.append(self.send_get_email_dict(cr, uid, mail, context=context))
# build an RFC2822 email.message.Message object and send it without queuing
for email in email_list:
msg = ir_mail_server.build_email(
email_from = mail.email_from,
email_to = email.get('email_to'),
subject = email.get('subject'),
body = email.get('body'),
body_alternative = email.get('body_alternative'),
email_cc = tools.email_split(mail.email_cc),
reply_to = email.get('reply_to'),
attachments = attachments,
message_id = mail.message_id,
references = mail.references,
object_id = mail.res_id and ('%s-%s' % (mail.res_id, mail.model)),
subtype = 'html',
subtype_alternative = 'plain')
res = ir_mail_server.send_email(cr, uid, msg,
mail_server_id=mail.mail_server_id.id, context=context)
if res:
mail.write({'state': 'sent', 'message_id': res})
mail_sent = True
else:
mail.write({'state': 'exception'})
mail_sent = False
# /!\ can't use mail.state here, as mail.refresh() will cause an error
# see revid:odo@openerp.com-20120622152536-42b2s28lvdv3odyr in 6.1
if mail_sent:
self._postprocess_sent_message(cr, uid, mail, context=context)
except Exception:
_logger.exception('failed sending mail.mail %s', mail.id)
mail.write({'state': 'exception'})
if auto_commit == True:
cr.commit()
return True
| return '%s@%s' % (this.alias_name, this.alias_domain) | conditional_block |
course.ts | import { Request, Response, Router } from 'express';
import Parameter = require('pinput');
import { SuccessResponse } from '../../../common/responses';
import EquivalencyDao from '../../../queries/EquivalencyDao';
import { numberParam, subjectParam } from '../../params';
import RouteModule from '../../RouteModule';
import { runQuery } from './util';
import {
validateInstitutionAcronym,
} from './validation';
export default function(): RouteModule {
const dao = new EquivalencyDao();
const r = Router();
const validateInstitutions = (institutions: string[]) => {
for (const i of institutions) {
if (!validateInstitutionAcronym(i)) return false;
} | return true;
};
const institutionsParam = (req: Request) =>
new Parameter({
name: 'institutions',
rawInput: req.params.institutions,
validate: validateInstitutions,
preprocess: (inst) => inst.toUpperCase(),
array: true
});
r.get('/', async (req: Request, res: Response) => {
const resp: SuccessResponse = {
status: 200,
data: await dao.subjects()
};
res.json(resp);
});
r.get('/:subject', async (req: Request, res: Response) => {
return runQuery(
[subjectParam(req)],
(subj: string) => dao.numbersForSubject(subj),
res
);
});
r.get('/:subject/:number', async (req: Request, res: Response) => {
return runQuery(
// parameters
[subjectParam(req), numberParam(req)],
// query function
(subj: string, numb: string) => dao.course(subj, numb),
// response
res
);
});
r.get('/:subject/:number/:institutions', async (req: Request, res: Response) => {
return runQuery(
// parameters
[subjectParam(req), numberParam(req), institutionsParam(req)],
// query function
(subj: string, num: string, institutions: string[]) =>
dao.forCourse(subj, num, institutions),
// response
res
);
});
return {
mountPoint: '/course',
router: r
};
} | random_line_split | |
backend.rs | use std::env;
use dotenv::dotenv;
use diesel;
use diesel::prelude::*;
use diesel::pg::PgConnection;
use super::models::*;
use ::services::schema;
pub struct Backend {
connection: PgConnection
}
impl Backend {
pub fn new() -> Self {
dotenv().ok();
let database_url = env::var("DATABASE_URL")
.expect("DATABASE_URL must be set");
let connection = PgConnection::establish(&database_url)
.expect(&format!("Error connecting to {}", database_url));
Self {
connection
}
}
pub fn vote(&self, user: &str, entity: &str, up: i32, down: i32) {
use self::schema::{users, voteables, votes};
let entity = &entity.to_lowercase();
use self::schema::users::dsl as us;
let mut res: Vec<User> = us::users.filter(us::user_id.eq(user))
.load(&self.connection).unwrap();
let user = match res.len() {
0 => {
let new_user = NewUser { user_id: user };
diesel::insert(&new_user).into(users::table)
.get_result(&self.connection)
.expect("Error creating new user")
},
_ => res.pop().unwrap(),
};
use self::schema::voteables::dsl::*;
let mut res: Vec<Voteable> = voteables.filter(value.eq(entity))
.load(&self.connection).unwrap();
let mut voteable = match res.len() {
0 => {
let new_voteable = NewVoteable {
value: entity,
total_up: 0,
total_down: 0,
};
let res = diesel::insert(&new_voteable)
.into(voteables::table)
.get_result(&self.connection);
if let Err(e) = res{
return;
}
res.unwrap()
},
_ => res.pop().unwrap(),
};
voteable.total_up += up;
voteable.total_down += down;
voteable.save_changes::<Voteable>(&self.connection);
use ::services::schema::votes::dsl as vts;
let mut res: Vec<Vote> = vts::votes.filter(vts::user_id.eq(user.id))
.filter(vts::voteable_id.eq(voteable.id))
.load(&self.connection).unwrap();
let mut vote = match res.len() {
0 => {
let new_vote = NewVote{
user_id: user.id,
voteable_id: voteable.id,
up: 0,
down: 0,
};
diesel::insert(&new_vote).into(votes::table)
.get_result(&self.connection)
.expect("Error creating new vote")
},
_ => res.pop().unwrap(),
};
vote.up += up;
vote.down += down;
vote.save_changes::<Vote>(&self.connection);
}
pub fn get_upvotes(&self, entity: &str) -> Option<Voteable> |
}
| {
use self::schema::voteables::dsl::*;
let entity = &entity.to_lowercase();
let mut res = voteables.filter(value.eq(entity))
.load(&self.connection).unwrap();
match res.len() {
0 => None,
_ => Some(res.pop().unwrap()),
}
} | identifier_body |
backend.rs | use std::env;
use dotenv::dotenv;
use diesel;
use diesel::prelude::*;
use diesel::pg::PgConnection;
use super::models::*;
use ::services::schema;
pub struct Backend {
connection: PgConnection
}
impl Backend {
pub fn new() -> Self {
dotenv().ok();
let database_url = env::var("DATABASE_URL")
.expect("DATABASE_URL must be set");
let connection = PgConnection::establish(&database_url)
.expect(&format!("Error connecting to {}", database_url));
Self {
connection
}
}
pub fn | (&self, user: &str, entity: &str, up: i32, down: i32) {
use self::schema::{users, voteables, votes};
let entity = &entity.to_lowercase();
use self::schema::users::dsl as us;
let mut res: Vec<User> = us::users.filter(us::user_id.eq(user))
.load(&self.connection).unwrap();
let user = match res.len() {
0 => {
let new_user = NewUser { user_id: user };
diesel::insert(&new_user).into(users::table)
.get_result(&self.connection)
.expect("Error creating new user")
},
_ => res.pop().unwrap(),
};
use self::schema::voteables::dsl::*;
let mut res: Vec<Voteable> = voteables.filter(value.eq(entity))
.load(&self.connection).unwrap();
let mut voteable = match res.len() {
0 => {
let new_voteable = NewVoteable {
value: entity,
total_up: 0,
total_down: 0,
};
let res = diesel::insert(&new_voteable)
.into(voteables::table)
.get_result(&self.connection);
if let Err(e) = res{
return;
}
res.unwrap()
},
_ => res.pop().unwrap(),
};
voteable.total_up += up;
voteable.total_down += down;
voteable.save_changes::<Voteable>(&self.connection);
use ::services::schema::votes::dsl as vts;
let mut res: Vec<Vote> = vts::votes.filter(vts::user_id.eq(user.id))
.filter(vts::voteable_id.eq(voteable.id))
.load(&self.connection).unwrap();
let mut vote = match res.len() {
0 => {
let new_vote = NewVote{
user_id: user.id,
voteable_id: voteable.id,
up: 0,
down: 0,
};
diesel::insert(&new_vote).into(votes::table)
.get_result(&self.connection)
.expect("Error creating new vote")
},
_ => res.pop().unwrap(),
};
vote.up += up;
vote.down += down;
vote.save_changes::<Vote>(&self.connection);
}
pub fn get_upvotes(&self, entity: &str) -> Option<Voteable> {
use self::schema::voteables::dsl::*;
let entity = &entity.to_lowercase();
let mut res = voteables.filter(value.eq(entity))
.load(&self.connection).unwrap();
match res.len() {
0 => None,
_ => Some(res.pop().unwrap()),
}
}
}
| vote | identifier_name |
backend.rs | use std::env;
use dotenv::dotenv;
use diesel;
use diesel::prelude::*;
use diesel::pg::PgConnection;
use super::models::*;
use ::services::schema;
pub struct Backend {
connection: PgConnection
}
impl Backend {
pub fn new() -> Self {
dotenv().ok();
let database_url = env::var("DATABASE_URL")
.expect("DATABASE_URL must be set");
let connection = PgConnection::establish(&database_url)
.expect(&format!("Error connecting to {}", database_url));
Self {
connection
}
}
pub fn vote(&self, user: &str, entity: &str, up: i32, down: i32) {
use self::schema::{users, voteables, votes};
let entity = &entity.to_lowercase();
use self::schema::users::dsl as us;
let mut res: Vec<User> = us::users.filter(us::user_id.eq(user))
.load(&self.connection).unwrap();
let user = match res.len() {
0 => {
let new_user = NewUser { user_id: user };
diesel::insert(&new_user).into(users::table)
.get_result(&self.connection)
.expect("Error creating new user")
},
_ => res.pop().unwrap(),
};
use self::schema::voteables::dsl::*;
let mut res: Vec<Voteable> = voteables.filter(value.eq(entity))
.load(&self.connection).unwrap();
let mut voteable = match res.len() {
0 => {
let new_voteable = NewVoteable {
value: entity,
total_up: 0,
total_down: 0,
};
let res = diesel::insert(&new_voteable)
.into(voteables::table)
.get_result(&self.connection);
if let Err(e) = res{
return;
}
res.unwrap()
},
_ => res.pop().unwrap(),
};
voteable.total_up += up; | voteable.save_changes::<Voteable>(&self.connection);
use ::services::schema::votes::dsl as vts;
let mut res: Vec<Vote> = vts::votes.filter(vts::user_id.eq(user.id))
.filter(vts::voteable_id.eq(voteable.id))
.load(&self.connection).unwrap();
let mut vote = match res.len() {
0 => {
let new_vote = NewVote{
user_id: user.id,
voteable_id: voteable.id,
up: 0,
down: 0,
};
diesel::insert(&new_vote).into(votes::table)
.get_result(&self.connection)
.expect("Error creating new vote")
},
_ => res.pop().unwrap(),
};
vote.up += up;
vote.down += down;
vote.save_changes::<Vote>(&self.connection);
}
pub fn get_upvotes(&self, entity: &str) -> Option<Voteable> {
use self::schema::voteables::dsl::*;
let entity = &entity.to_lowercase();
let mut res = voteables.filter(value.eq(entity))
.load(&self.connection).unwrap();
match res.len() {
0 => None,
_ => Some(res.pop().unwrap()),
}
}
} | voteable.total_down += down; | random_line_split |
test_kms.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
KMS activation tester
"""
import os
import unittest
from cStringIO import StringIO
import commands.redhat.kms
class TestKMSUpdates(unittest.TestCase):
def test_redhat_up2date(self):
"""Test updating up2date config for Red Hat"""
outfiles = commands.redhat.kms.configure_up2date([
'proxy1.example.com', 'proxy2.example.com'])
self.assertEqual(outfiles['/etc/sysconfig/rhn/up2date'], '\n'.join([
'# Automatically generated Red Hat Update Agent config file, '
'do not edit.',
'# Format: 1.0',
'versionOverride[comment]=Override the automatically determined '
'system version',
'versionOverride=',
'',
'enableProxyAuth[comment]=To use an authenticated proxy or not',
'enableProxyAuth=0',
'',
'networkRetries[comment]=Number of attempts to make at network '
'connections before giving up',
'networkRetries=5',
'',
'hostedWhitelist[comment]=None',
'hostedWhitelist=',
'',
'enableProxy[comment]=Use a HTTP Proxy',
'enableProxy=0',
'',
'serverURL[comment]=Remote server URL',
'serverURL=https://proxy1.example.com/XMLRPC;'
'https://proxy2.example.com/XMLRPC;',
'',
'proxyPassword[comment]=The password to use for an authenticated '
'proxy',
'proxyPassword=',
'',
'noSSLServerURL[comment]=None',
'noSSLServerURL=http://proxy1.example.com/XMLRPC;'
'http://proxy2.example.com/XMLRPC;',
'',
'proxyUser[comment]=The username for an authenticated proxy',
'proxyUser=',
'',
'disallowConfChanges[comment]=Config options that can not be '
'overwritten by a config update action',
'disallowConfChanges=noReboot;sslCACert;useNoSSLForPackages;'
'noSSLServerURL;serverURL;disallowConfChanges;',
'',
'sslCACert[comment]=The CA cert used to verify the ssl server',
'sslCACert=/usr/share/rhn/RHN-ORG-TRUSTED-SSL-CERT',
'',
'debug[comment]=Whether or not debugging is enabled',
'debug=0',
'',
'httpProxy[comment]=HTTP proxy in host:port format, e.g. '
'squid.redhat.com:3128',
'httpProxy=',
'',
'systemIdPath[comment]=Location of system id',
'systemIdPath=/etc/sysconfig/rhn/systemid',
'',
'noReboot[comment]=Disable the reboot action',
'noReboot=0']) + '\n')
if __name__ == "__main__":
| agent_test.main() | conditional_block | |
test_kms.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
KMS activation tester
"""
import os
import unittest
from cStringIO import StringIO
import commands.redhat.kms
class TestKMSUpdates(unittest.TestCase):
def test_redhat_up2date(self):
|
if __name__ == "__main__":
agent_test.main()
| """Test updating up2date config for Red Hat"""
outfiles = commands.redhat.kms.configure_up2date([
'proxy1.example.com', 'proxy2.example.com'])
self.assertEqual(outfiles['/etc/sysconfig/rhn/up2date'], '\n'.join([
'# Automatically generated Red Hat Update Agent config file, '
'do not edit.',
'# Format: 1.0',
'versionOverride[comment]=Override the automatically determined '
'system version',
'versionOverride=',
'',
'enableProxyAuth[comment]=To use an authenticated proxy or not',
'enableProxyAuth=0',
'',
'networkRetries[comment]=Number of attempts to make at network '
'connections before giving up',
'networkRetries=5',
'',
'hostedWhitelist[comment]=None',
'hostedWhitelist=',
'',
'enableProxy[comment]=Use a HTTP Proxy',
'enableProxy=0',
'',
'serverURL[comment]=Remote server URL',
'serverURL=https://proxy1.example.com/XMLRPC;'
'https://proxy2.example.com/XMLRPC;',
'',
'proxyPassword[comment]=The password to use for an authenticated '
'proxy',
'proxyPassword=',
'',
'noSSLServerURL[comment]=None',
'noSSLServerURL=http://proxy1.example.com/XMLRPC;'
'http://proxy2.example.com/XMLRPC;',
'',
'proxyUser[comment]=The username for an authenticated proxy',
'proxyUser=',
'',
'disallowConfChanges[comment]=Config options that can not be '
'overwritten by a config update action',
'disallowConfChanges=noReboot;sslCACert;useNoSSLForPackages;'
'noSSLServerURL;serverURL;disallowConfChanges;',
'',
'sslCACert[comment]=The CA cert used to verify the ssl server',
'sslCACert=/usr/share/rhn/RHN-ORG-TRUSTED-SSL-CERT',
'',
'debug[comment]=Whether or not debugging is enabled',
'debug=0',
'',
'httpProxy[comment]=HTTP proxy in host:port format, e.g. '
'squid.redhat.com:3128',
'httpProxy=',
'',
'systemIdPath[comment]=Location of system id',
'systemIdPath=/etc/sysconfig/rhn/systemid',
'',
'noReboot[comment]=Disable the reboot action',
'noReboot=0']) + '\n') | identifier_body |
test_kms.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
KMS activation tester
"""
import os
import unittest
from cStringIO import StringIO
import commands.redhat.kms
class TestKMSUpdates(unittest.TestCase):
def | (self):
"""Test updating up2date config for Red Hat"""
outfiles = commands.redhat.kms.configure_up2date([
'proxy1.example.com', 'proxy2.example.com'])
self.assertEqual(outfiles['/etc/sysconfig/rhn/up2date'], '\n'.join([
'# Automatically generated Red Hat Update Agent config file, '
'do not edit.',
'# Format: 1.0',
'versionOverride[comment]=Override the automatically determined '
'system version',
'versionOverride=',
'',
'enableProxyAuth[comment]=To use an authenticated proxy or not',
'enableProxyAuth=0',
'',
'networkRetries[comment]=Number of attempts to make at network '
'connections before giving up',
'networkRetries=5',
'',
'hostedWhitelist[comment]=None',
'hostedWhitelist=',
'',
'enableProxy[comment]=Use a HTTP Proxy',
'enableProxy=0',
'',
'serverURL[comment]=Remote server URL',
'serverURL=https://proxy1.example.com/XMLRPC;'
'https://proxy2.example.com/XMLRPC;',
'',
'proxyPassword[comment]=The password to use for an authenticated '
'proxy',
'proxyPassword=',
'',
'noSSLServerURL[comment]=None',
'noSSLServerURL=http://proxy1.example.com/XMLRPC;'
'http://proxy2.example.com/XMLRPC;',
'',
'proxyUser[comment]=The username for an authenticated proxy',
'proxyUser=',
'',
'disallowConfChanges[comment]=Config options that can not be '
'overwritten by a config update action',
'disallowConfChanges=noReboot;sslCACert;useNoSSLForPackages;'
'noSSLServerURL;serverURL;disallowConfChanges;',
'',
'sslCACert[comment]=The CA cert used to verify the ssl server',
'sslCACert=/usr/share/rhn/RHN-ORG-TRUSTED-SSL-CERT',
'',
'debug[comment]=Whether or not debugging is enabled',
'debug=0',
'',
'httpProxy[comment]=HTTP proxy in host:port format, e.g. '
'squid.redhat.com:3128',
'httpProxy=',
'',
'systemIdPath[comment]=Location of system id',
'systemIdPath=/etc/sysconfig/rhn/systemid',
'',
'noReboot[comment]=Disable the reboot action',
'noReboot=0']) + '\n')
if __name__ == "__main__":
agent_test.main()
| test_redhat_up2date | identifier_name |
test_kms.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
KMS activation tester
"""
import os
import unittest
from cStringIO import StringIO
import commands.redhat.kms
class TestKMSUpdates(unittest.TestCase):
def test_redhat_up2date(self):
"""Test updating up2date config for Red Hat"""
outfiles = commands.redhat.kms.configure_up2date([
'proxy1.example.com', 'proxy2.example.com'])
self.assertEqual(outfiles['/etc/sysconfig/rhn/up2date'], '\n'.join([
'# Automatically generated Red Hat Update Agent config file, '
'do not edit.',
'# Format: 1.0',
'versionOverride[comment]=Override the automatically determined '
'system version',
'versionOverride=',
'',
'enableProxyAuth[comment]=To use an authenticated proxy or not',
'enableProxyAuth=0',
'',
'networkRetries[comment]=Number of attempts to make at network '
'connections before giving up',
'networkRetries=5',
'',
'hostedWhitelist[comment]=None',
'hostedWhitelist=',
'',
'enableProxy[comment]=Use a HTTP Proxy',
'enableProxy=0',
'',
'serverURL[comment]=Remote server URL',
'serverURL=https://proxy1.example.com/XMLRPC;'
'https://proxy2.example.com/XMLRPC;',
'',
'proxyPassword[comment]=The password to use for an authenticated '
'proxy',
'proxyPassword=',
'',
'noSSLServerURL[comment]=None',
'noSSLServerURL=http://proxy1.example.com/XMLRPC;'
'http://proxy2.example.com/XMLRPC;',
'',
'proxyUser[comment]=The username for an authenticated proxy',
'proxyUser=',
'',
'disallowConfChanges[comment]=Config options that can not be '
'overwritten by a config update action',
'disallowConfChanges=noReboot;sslCACert;useNoSSLForPackages;'
'noSSLServerURL;serverURL;disallowConfChanges;',
'',
'sslCACert[comment]=The CA cert used to verify the ssl server',
'sslCACert=/usr/share/rhn/RHN-ORG-TRUSTED-SSL-CERT',
'',
'debug[comment]=Whether or not debugging is enabled',
'debug=0',
'', | 'httpProxy[comment]=HTTP proxy in host:port format, e.g. '
'squid.redhat.com:3128',
'httpProxy=',
'',
'systemIdPath[comment]=Location of system id',
'systemIdPath=/etc/sysconfig/rhn/systemid',
'',
'noReboot[comment]=Disable the reboot action',
'noReboot=0']) + '\n')
if __name__ == "__main__":
agent_test.main() | random_line_split | |
TrainAlchemy1.js | Lexicon.add('dregus/alchemist/TrainAlchemy1', { | ],
lost: [
{ currency:true, count:100 },
],
links: [
{ name:'Complete', action:'event:complete' },
],
body:[
{ tag:"p", text:"Proctor motions for you to follow him upstairs into the tower, and after a lengthy climb you "+
"reach a small but well stocked laboratory. There you and the alchemist pour over eldritch texts, "+
"speaking the unutterable, concocting the abominable, and learning that which should have been left "+
"unknown." },
{ tag:"p", _:[
"After what feels like days of study you come down from the tower, out into the ",
{ tag:"span", requirements:['Game:IsNight'], text:"cold desert night." },
{ tag:"span", requirements:['Game:IsDay'], text:"desert sunlight." },
"You're not sure how long it's been. Maybe days, perhaps only just hours. You feel changed, more ",
"knowledgeable certainly, but with the comprehension that there's far more that you don't understand ",
"then you do. It's unsettling, but exhilarating too.",
]},
],
}); |
layout: 'event',
gained: [
{ skill:'alchemy' }, | random_line_split |
selector_parser.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Gecko-specific bits for selector-parsing.
use cssparser::{Parser, ToCss};
use element_state::ElementState;
use gecko_bindings::structs::CSSPseudoClassType;
use selector_parser::{SelectorParser, PseudoElementCascadeType};
use selectors::parser::{ComplexSelector, SelectorMethods};
use selectors::visitor::SelectorVisitor;
use std::borrow::Cow;
use std::fmt;
use string_cache::{Atom, Namespace, WeakAtom, WeakNamespace};
pub use gecko::pseudo_element::{PseudoElement, EAGER_PSEUDOS, EAGER_PSEUDO_COUNT};
pub use gecko::snapshot::SnapshotMap;
bitflags! {
flags NonTSPseudoClassFlag: u8 {
// See NonTSPseudoClass::is_internal()
const PSEUDO_CLASS_INTERNAL = 0x01,
}
}
macro_rules! pseudo_class_name {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
#[doc = "Our representation of a non tree-structural pseudo-class."]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum NonTSPseudoClass {
$(
#[doc = $css]
$name,
)*
$(
#[doc = $s_css]
$s_name(Box<[u16]>),
)*
/// The non-standard `:-moz-any` pseudo-class.
///
/// TODO(emilio): We disallow combinators and pseudos here, so we
/// should use SimpleSelector instead
MozAny(Box<[ComplexSelector<SelectorImpl>]>),
}
}
}
apply_non_ts_list!(pseudo_class_name);
impl ToCss for NonTSPseudoClass {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
use cssparser::CssStringWriter;
use fmt::Write;
macro_rules! pseudo_class_serialize {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => concat!(":", $css),)*
$(NonTSPseudoClass::$s_name(ref s) => {
write!(dest, ":{}(", $s_css)?;
{
// FIXME(emilio): Avoid the extra allocation!
let mut css = CssStringWriter::new(dest);
// Discount the null char in the end from the
// string.
css.write_str(&String::from_utf16(&s[..s.len() - 1]).unwrap())?;
}
return dest.write_str(")")
}, )*
NonTSPseudoClass::MozAny(ref selectors) => {
dest.write_str(":-moz-any(")?;
let mut iter = selectors.iter();
let first = iter.next().expect(":-moz-any must have at least 1 selector");
first.to_css(dest)?;
for selector in iter {
dest.write_str(", ")?;
selector.to_css(dest)?;
}
return dest.write_str(")")
}
}
}
}
let ser = apply_non_ts_list!(pseudo_class_serialize);
dest.write_str(ser)
}
}
impl SelectorMethods for NonTSPseudoClass {
type Impl = SelectorImpl;
fn visit<V>(&self, visitor: &mut V) -> bool
where V: SelectorVisitor<Impl = Self::Impl>,
{
if let NonTSPseudoClass::MozAny(ref selectors) = *self |
true
}
}
impl NonTSPseudoClass {
/// A pseudo-class is internal if it can only be used inside
/// user agent style sheets.
pub fn is_internal(&self) -> bool {
macro_rules! check_flag {
(_) => (false);
($flags:expr) => ($flags.contains(PSEUDO_CLASS_INTERNAL));
}
macro_rules! pseudo_class_check_internal {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => check_flag!($flags),)*
$(NonTSPseudoClass::$s_name(..) => check_flag!($s_flags),)*
NonTSPseudoClass::MozAny(_) => false,
}
}
}
apply_non_ts_list!(pseudo_class_check_internal)
}
/// https://drafts.csswg.org/selectors-4/#useraction-pseudos
///
/// We intentionally skip the link-related ones.
pub fn is_safe_user_action_state(&self) -> bool {
matches!(*self, NonTSPseudoClass::Hover |
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus)
}
/// Get the state flag associated with a pseudo-class, if any.
pub fn state_flag(&self) -> ElementState {
macro_rules! flag {
(_) => (ElementState::empty());
($state:ident) => (::element_state::$state);
}
macro_rules! pseudo_class_state {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => flag!($state),)*
$(NonTSPseudoClass::$s_name(..) => flag!($s_state),)*
NonTSPseudoClass::MozAny(..) => ElementState::empty(),
}
}
}
apply_non_ts_list!(pseudo_class_state)
}
/// Returns true if the given pseudoclass should trigger style sharing cache revalidation.
pub fn needs_cache_revalidation(&self) -> bool {
// :dir() depends on state only, but doesn't use state_flag because its
// semantics don't quite match. Nevertheless, it doesn't need cache
// revalidation, because we already compare states for elements and
// candidates.
self.state_flag().is_empty() &&
!matches!(*self,
NonTSPseudoClass::MozAny(_) |
NonTSPseudoClass::Dir(_) |
NonTSPseudoClass::MozIsHTML |
NonTSPseudoClass::MozPlaceholder)
}
/// Convert NonTSPseudoClass to Gecko's CSSPseudoClassType.
pub fn to_gecko_pseudoclasstype(&self) -> Option<CSSPseudoClassType> {
macro_rules! gecko_type {
(_) => (None);
($gecko_type:ident) =>
(Some(::gecko_bindings::structs::CSSPseudoClassType::$gecko_type));
}
macro_rules! pseudo_class_geckotype {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => gecko_type!($gecko_type),)*
$(NonTSPseudoClass::$s_name(..) => gecko_type!($s_gecko_type),)*
NonTSPseudoClass::MozAny(_) => gecko_type!(any),
}
}
}
apply_non_ts_list!(pseudo_class_geckotype)
}
}
/// The dummy struct we use to implement our selector parsing.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SelectorImpl;
impl ::selectors::SelectorImpl for SelectorImpl {
type AttrValue = Atom;
type Identifier = Atom;
type ClassName = Atom;
type LocalName = Atom;
type NamespacePrefix = Atom;
type NamespaceUrl = Namespace;
type BorrowedNamespaceUrl = WeakNamespace;
type BorrowedLocalName = WeakAtom;
type PseudoElement = PseudoElement;
type NonTSPseudoClass = NonTSPseudoClass;
}
impl<'a> ::selectors::Parser for SelectorParser<'a> {
type Impl = SelectorImpl;
fn parse_non_ts_pseudo_class(&self, name: Cow<str>) -> Result<NonTSPseudoClass, ()> {
macro_rules! pseudo_class_parse {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match_ignore_ascii_case! { &name,
$($css => NonTSPseudoClass::$name,)*
_ => return Err(())
}
}
}
let pseudo_class = apply_non_ts_list!(pseudo_class_parse);
if !pseudo_class.is_internal() || self.in_user_agent_stylesheet() {
Ok(pseudo_class)
} else {
Err(())
}
}
fn parse_non_ts_functional_pseudo_class(&self,
name: Cow<str>,
parser: &mut Parser)
-> Result<NonTSPseudoClass, ()> {
macro_rules! pseudo_class_string_parse {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match_ignore_ascii_case! { &name,
$($s_css => {
let name = parser.expect_ident_or_string()?;
// convert to null terminated utf16 string
// since that's what Gecko deals with
let utf16: Vec<u16> = name.encode_utf16().chain(Some(0u16)).collect();
NonTSPseudoClass::$s_name(utf16.into_boxed_slice())
}, )*
"-moz-any" => {
let selectors = parser.parse_comma_separated(|input| {
ComplexSelector::parse(self, input)
})?;
// Selectors inside `:-moz-any` may not include combinators.
if selectors.iter().flat_map(|x| x.iter_raw()).any(|s| s.is_combinator()) {
return Err(())
}
NonTSPseudoClass::MozAny(selectors.into_boxed_slice())
}
_ => return Err(())
}
}
}
let pseudo_class = apply_non_ts_list!(pseudo_class_string_parse);
if !pseudo_class.is_internal() || self.in_user_agent_stylesheet() {
Ok(pseudo_class)
} else {
Err(())
}
}
fn parse_pseudo_element(&self, name: Cow<str>) -> Result<PseudoElement, ()> {
PseudoElement::from_slice(&name, self.in_user_agent_stylesheet())
.ok_or(())
}
fn default_namespace(&self) -> Option<Namespace> {
self.namespaces.default.clone()
}
fn namespace_for_prefix(&self, prefix: &Atom) -> Option<Namespace> {
self.namespaces.prefixes.get(prefix).cloned()
}
}
impl SelectorImpl {
#[inline]
/// Legacy alias for PseudoElement::cascade_type.
pub fn pseudo_element_cascade_type(pseudo: &PseudoElement) -> PseudoElementCascadeType {
pseudo.cascade_type()
}
/// A helper to traverse each eagerly cascaded pseudo-element, executing
/// `fun` on it.
#[inline]
pub fn each_eagerly_cascaded_pseudo_element<F>(mut fun: F)
where F: FnMut(PseudoElement),
{
for pseudo in &EAGER_PSEUDOS {
fun(pseudo.clone())
}
}
#[inline]
/// Executes a function for each pseudo-element.
pub fn each_pseudo_element<F>(fun: F)
where F: FnMut(PseudoElement),
{
PseudoElement::each(fun)
}
#[inline]
/// Returns the relevant state flag for a given non-tree-structural
/// pseudo-class.
pub fn pseudo_class_state_flag(pc: &NonTSPseudoClass) -> ElementState {
pc.state_flag()
}
}
| {
for selector in selectors.iter() {
if !selector.visit(visitor) {
return false;
}
}
} | conditional_block |
selector_parser.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Gecko-specific bits for selector-parsing.
use cssparser::{Parser, ToCss};
use element_state::ElementState;
use gecko_bindings::structs::CSSPseudoClassType;
use selector_parser::{SelectorParser, PseudoElementCascadeType};
use selectors::parser::{ComplexSelector, SelectorMethods};
use selectors::visitor::SelectorVisitor;
use std::borrow::Cow;
use std::fmt;
use string_cache::{Atom, Namespace, WeakAtom, WeakNamespace};
pub use gecko::pseudo_element::{PseudoElement, EAGER_PSEUDOS, EAGER_PSEUDO_COUNT};
pub use gecko::snapshot::SnapshotMap;
bitflags! {
flags NonTSPseudoClassFlag: u8 {
// See NonTSPseudoClass::is_internal()
const PSEUDO_CLASS_INTERNAL = 0x01,
}
}
macro_rules! pseudo_class_name {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
#[doc = "Our representation of a non tree-structural pseudo-class."]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum NonTSPseudoClass {
$(
#[doc = $css]
$name,
)*
$(
#[doc = $s_css]
$s_name(Box<[u16]>),
)*
/// The non-standard `:-moz-any` pseudo-class.
///
/// TODO(emilio): We disallow combinators and pseudos here, so we
/// should use SimpleSelector instead
MozAny(Box<[ComplexSelector<SelectorImpl>]>),
}
}
}
apply_non_ts_list!(pseudo_class_name);
impl ToCss for NonTSPseudoClass {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
use cssparser::CssStringWriter;
use fmt::Write;
macro_rules! pseudo_class_serialize {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => concat!(":", $css),)*
$(NonTSPseudoClass::$s_name(ref s) => {
write!(dest, ":{}(", $s_css)?;
{
// FIXME(emilio): Avoid the extra allocation!
let mut css = CssStringWriter::new(dest);
// Discount the null char in the end from the
// string.
css.write_str(&String::from_utf16(&s[..s.len() - 1]).unwrap())?;
}
return dest.write_str(")")
}, )*
NonTSPseudoClass::MozAny(ref selectors) => {
dest.write_str(":-moz-any(")?;
let mut iter = selectors.iter();
let first = iter.next().expect(":-moz-any must have at least 1 selector");
first.to_css(dest)?;
for selector in iter {
dest.write_str(", ")?;
selector.to_css(dest)?;
}
return dest.write_str(")")
}
}
}
}
let ser = apply_non_ts_list!(pseudo_class_serialize);
dest.write_str(ser)
}
}
impl SelectorMethods for NonTSPseudoClass {
type Impl = SelectorImpl;
fn visit<V>(&self, visitor: &mut V) -> bool
where V: SelectorVisitor<Impl = Self::Impl>,
{
if let NonTSPseudoClass::MozAny(ref selectors) = *self {
for selector in selectors.iter() {
if !selector.visit(visitor) {
return false;
}
}
}
true
}
}
impl NonTSPseudoClass {
/// A pseudo-class is internal if it can only be used inside
/// user agent style sheets.
pub fn is_internal(&self) -> bool {
macro_rules! check_flag {
(_) => (false);
($flags:expr) => ($flags.contains(PSEUDO_CLASS_INTERNAL));
}
macro_rules! pseudo_class_check_internal {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => check_flag!($flags),)*
$(NonTSPseudoClass::$s_name(..) => check_flag!($s_flags),)*
NonTSPseudoClass::MozAny(_) => false,
}
}
}
apply_non_ts_list!(pseudo_class_check_internal)
}
/// https://drafts.csswg.org/selectors-4/#useraction-pseudos
///
/// We intentionally skip the link-related ones.
pub fn | (&self) -> bool {
matches!(*self, NonTSPseudoClass::Hover |
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus)
}
/// Get the state flag associated with a pseudo-class, if any.
pub fn state_flag(&self) -> ElementState {
macro_rules! flag {
(_) => (ElementState::empty());
($state:ident) => (::element_state::$state);
}
macro_rules! pseudo_class_state {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => flag!($state),)*
$(NonTSPseudoClass::$s_name(..) => flag!($s_state),)*
NonTSPseudoClass::MozAny(..) => ElementState::empty(),
}
}
}
apply_non_ts_list!(pseudo_class_state)
}
/// Returns true if the given pseudoclass should trigger style sharing cache revalidation.
pub fn needs_cache_revalidation(&self) -> bool {
// :dir() depends on state only, but doesn't use state_flag because its
// semantics don't quite match. Nevertheless, it doesn't need cache
// revalidation, because we already compare states for elements and
// candidates.
self.state_flag().is_empty() &&
!matches!(*self,
NonTSPseudoClass::MozAny(_) |
NonTSPseudoClass::Dir(_) |
NonTSPseudoClass::MozIsHTML |
NonTSPseudoClass::MozPlaceholder)
}
/// Convert NonTSPseudoClass to Gecko's CSSPseudoClassType.
pub fn to_gecko_pseudoclasstype(&self) -> Option<CSSPseudoClassType> {
macro_rules! gecko_type {
(_) => (None);
($gecko_type:ident) =>
(Some(::gecko_bindings::structs::CSSPseudoClassType::$gecko_type));
}
macro_rules! pseudo_class_geckotype {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => gecko_type!($gecko_type),)*
$(NonTSPseudoClass::$s_name(..) => gecko_type!($s_gecko_type),)*
NonTSPseudoClass::MozAny(_) => gecko_type!(any),
}
}
}
apply_non_ts_list!(pseudo_class_geckotype)
}
}
/// The dummy struct we use to implement our selector parsing.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SelectorImpl;
impl ::selectors::SelectorImpl for SelectorImpl {
type AttrValue = Atom;
type Identifier = Atom;
type ClassName = Atom;
type LocalName = Atom;
type NamespacePrefix = Atom;
type NamespaceUrl = Namespace;
type BorrowedNamespaceUrl = WeakNamespace;
type BorrowedLocalName = WeakAtom;
type PseudoElement = PseudoElement;
type NonTSPseudoClass = NonTSPseudoClass;
}
impl<'a> ::selectors::Parser for SelectorParser<'a> {
type Impl = SelectorImpl;
fn parse_non_ts_pseudo_class(&self, name: Cow<str>) -> Result<NonTSPseudoClass, ()> {
macro_rules! pseudo_class_parse {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match_ignore_ascii_case! { &name,
$($css => NonTSPseudoClass::$name,)*
_ => return Err(())
}
}
}
let pseudo_class = apply_non_ts_list!(pseudo_class_parse);
if !pseudo_class.is_internal() || self.in_user_agent_stylesheet() {
Ok(pseudo_class)
} else {
Err(())
}
}
fn parse_non_ts_functional_pseudo_class(&self,
name: Cow<str>,
parser: &mut Parser)
-> Result<NonTSPseudoClass, ()> {
macro_rules! pseudo_class_string_parse {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match_ignore_ascii_case! { &name,
$($s_css => {
let name = parser.expect_ident_or_string()?;
// convert to null terminated utf16 string
// since that's what Gecko deals with
let utf16: Vec<u16> = name.encode_utf16().chain(Some(0u16)).collect();
NonTSPseudoClass::$s_name(utf16.into_boxed_slice())
}, )*
"-moz-any" => {
let selectors = parser.parse_comma_separated(|input| {
ComplexSelector::parse(self, input)
})?;
// Selectors inside `:-moz-any` may not include combinators.
if selectors.iter().flat_map(|x| x.iter_raw()).any(|s| s.is_combinator()) {
return Err(())
}
NonTSPseudoClass::MozAny(selectors.into_boxed_slice())
}
_ => return Err(())
}
}
}
let pseudo_class = apply_non_ts_list!(pseudo_class_string_parse);
if !pseudo_class.is_internal() || self.in_user_agent_stylesheet() {
Ok(pseudo_class)
} else {
Err(())
}
}
fn parse_pseudo_element(&self, name: Cow<str>) -> Result<PseudoElement, ()> {
PseudoElement::from_slice(&name, self.in_user_agent_stylesheet())
.ok_or(())
}
fn default_namespace(&self) -> Option<Namespace> {
self.namespaces.default.clone()
}
fn namespace_for_prefix(&self, prefix: &Atom) -> Option<Namespace> {
self.namespaces.prefixes.get(prefix).cloned()
}
}
impl SelectorImpl {
#[inline]
/// Legacy alias for PseudoElement::cascade_type.
pub fn pseudo_element_cascade_type(pseudo: &PseudoElement) -> PseudoElementCascadeType {
pseudo.cascade_type()
}
/// A helper to traverse each eagerly cascaded pseudo-element, executing
/// `fun` on it.
#[inline]
pub fn each_eagerly_cascaded_pseudo_element<F>(mut fun: F)
where F: FnMut(PseudoElement),
{
for pseudo in &EAGER_PSEUDOS {
fun(pseudo.clone())
}
}
#[inline]
/// Executes a function for each pseudo-element.
pub fn each_pseudo_element<F>(fun: F)
where F: FnMut(PseudoElement),
{
PseudoElement::each(fun)
}
#[inline]
/// Returns the relevant state flag for a given non-tree-structural
/// pseudo-class.
pub fn pseudo_class_state_flag(pc: &NonTSPseudoClass) -> ElementState {
pc.state_flag()
}
}
| is_safe_user_action_state | identifier_name |
selector_parser.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Gecko-specific bits for selector-parsing.
use cssparser::{Parser, ToCss};
use element_state::ElementState;
use gecko_bindings::structs::CSSPseudoClassType;
use selector_parser::{SelectorParser, PseudoElementCascadeType};
use selectors::parser::{ComplexSelector, SelectorMethods};
use selectors::visitor::SelectorVisitor;
use std::borrow::Cow;
use std::fmt;
use string_cache::{Atom, Namespace, WeakAtom, WeakNamespace};
pub use gecko::pseudo_element::{PseudoElement, EAGER_PSEUDOS, EAGER_PSEUDO_COUNT};
pub use gecko::snapshot::SnapshotMap;
bitflags! {
flags NonTSPseudoClassFlag: u8 {
// See NonTSPseudoClass::is_internal()
const PSEUDO_CLASS_INTERNAL = 0x01,
}
}
macro_rules! pseudo_class_name {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
#[doc = "Our representation of a non tree-structural pseudo-class."]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum NonTSPseudoClass {
$(
#[doc = $css]
$name,
)*
$(
#[doc = $s_css]
$s_name(Box<[u16]>),
)*
/// The non-standard `:-moz-any` pseudo-class.
///
/// TODO(emilio): We disallow combinators and pseudos here, so we
/// should use SimpleSelector instead
MozAny(Box<[ComplexSelector<SelectorImpl>]>),
}
}
}
apply_non_ts_list!(pseudo_class_name);
impl ToCss for NonTSPseudoClass {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
use cssparser::CssStringWriter;
use fmt::Write;
macro_rules! pseudo_class_serialize {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => concat!(":", $css),)*
$(NonTSPseudoClass::$s_name(ref s) => {
write!(dest, ":{}(", $s_css)?;
{
// FIXME(emilio): Avoid the extra allocation!
let mut css = CssStringWriter::new(dest);
// Discount the null char in the end from the
// string.
css.write_str(&String::from_utf16(&s[..s.len() - 1]).unwrap())?;
}
return dest.write_str(")")
}, )*
NonTSPseudoClass::MozAny(ref selectors) => {
dest.write_str(":-moz-any(")?;
let mut iter = selectors.iter();
let first = iter.next().expect(":-moz-any must have at least 1 selector");
first.to_css(dest)?;
for selector in iter {
dest.write_str(", ")?;
selector.to_css(dest)?;
}
return dest.write_str(")")
}
}
}
}
let ser = apply_non_ts_list!(pseudo_class_serialize);
dest.write_str(ser)
}
}
impl SelectorMethods for NonTSPseudoClass {
type Impl = SelectorImpl;
fn visit<V>(&self, visitor: &mut V) -> bool
where V: SelectorVisitor<Impl = Self::Impl>,
{
if let NonTSPseudoClass::MozAny(ref selectors) = *self {
for selector in selectors.iter() {
if !selector.visit(visitor) {
return false;
}
}
}
true
}
}
impl NonTSPseudoClass {
/// A pseudo-class is internal if it can only be used inside
/// user agent style sheets.
pub fn is_internal(&self) -> bool {
macro_rules! check_flag {
(_) => (false);
($flags:expr) => ($flags.contains(PSEUDO_CLASS_INTERNAL));
}
macro_rules! pseudo_class_check_internal {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => check_flag!($flags),)*
$(NonTSPseudoClass::$s_name(..) => check_flag!($s_flags),)*
NonTSPseudoClass::MozAny(_) => false,
}
}
}
apply_non_ts_list!(pseudo_class_check_internal)
}
/// https://drafts.csswg.org/selectors-4/#useraction-pseudos
///
/// We intentionally skip the link-related ones.
pub fn is_safe_user_action_state(&self) -> bool |
/// Get the state flag associated with a pseudo-class, if any.
pub fn state_flag(&self) -> ElementState {
macro_rules! flag {
(_) => (ElementState::empty());
($state:ident) => (::element_state::$state);
}
macro_rules! pseudo_class_state {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => flag!($state),)*
$(NonTSPseudoClass::$s_name(..) => flag!($s_state),)*
NonTSPseudoClass::MozAny(..) => ElementState::empty(),
}
}
}
apply_non_ts_list!(pseudo_class_state)
}
/// Returns true if the given pseudoclass should trigger style sharing cache revalidation.
pub fn needs_cache_revalidation(&self) -> bool {
// :dir() depends on state only, but doesn't use state_flag because its
// semantics don't quite match. Nevertheless, it doesn't need cache
// revalidation, because we already compare states for elements and
// candidates.
self.state_flag().is_empty() &&
!matches!(*self,
NonTSPseudoClass::MozAny(_) |
NonTSPseudoClass::Dir(_) |
NonTSPseudoClass::MozIsHTML |
NonTSPseudoClass::MozPlaceholder)
}
/// Convert NonTSPseudoClass to Gecko's CSSPseudoClassType.
pub fn to_gecko_pseudoclasstype(&self) -> Option<CSSPseudoClassType> {
macro_rules! gecko_type {
(_) => (None);
($gecko_type:ident) =>
(Some(::gecko_bindings::structs::CSSPseudoClassType::$gecko_type));
}
macro_rules! pseudo_class_geckotype {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => gecko_type!($gecko_type),)*
$(NonTSPseudoClass::$s_name(..) => gecko_type!($s_gecko_type),)*
NonTSPseudoClass::MozAny(_) => gecko_type!(any),
}
}
}
apply_non_ts_list!(pseudo_class_geckotype)
}
}
/// The dummy struct we use to implement our selector parsing.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SelectorImpl;
impl ::selectors::SelectorImpl for SelectorImpl {
type AttrValue = Atom;
type Identifier = Atom;
type ClassName = Atom;
type LocalName = Atom;
type NamespacePrefix = Atom;
type NamespaceUrl = Namespace;
type BorrowedNamespaceUrl = WeakNamespace;
type BorrowedLocalName = WeakAtom;
type PseudoElement = PseudoElement;
type NonTSPseudoClass = NonTSPseudoClass;
}
impl<'a> ::selectors::Parser for SelectorParser<'a> {
type Impl = SelectorImpl;
fn parse_non_ts_pseudo_class(&self, name: Cow<str>) -> Result<NonTSPseudoClass, ()> {
macro_rules! pseudo_class_parse {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match_ignore_ascii_case! { &name,
$($css => NonTSPseudoClass::$name,)*
_ => return Err(())
}
}
}
let pseudo_class = apply_non_ts_list!(pseudo_class_parse);
if !pseudo_class.is_internal() || self.in_user_agent_stylesheet() {
Ok(pseudo_class)
} else {
Err(())
}
}
fn parse_non_ts_functional_pseudo_class(&self,
name: Cow<str>,
parser: &mut Parser)
-> Result<NonTSPseudoClass, ()> {
macro_rules! pseudo_class_string_parse {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match_ignore_ascii_case! { &name,
$($s_css => {
let name = parser.expect_ident_or_string()?;
// convert to null terminated utf16 string
// since that's what Gecko deals with
let utf16: Vec<u16> = name.encode_utf16().chain(Some(0u16)).collect();
NonTSPseudoClass::$s_name(utf16.into_boxed_slice())
}, )*
"-moz-any" => {
let selectors = parser.parse_comma_separated(|input| {
ComplexSelector::parse(self, input)
})?;
// Selectors inside `:-moz-any` may not include combinators.
if selectors.iter().flat_map(|x| x.iter_raw()).any(|s| s.is_combinator()) {
return Err(())
}
NonTSPseudoClass::MozAny(selectors.into_boxed_slice())
}
_ => return Err(())
}
}
}
let pseudo_class = apply_non_ts_list!(pseudo_class_string_parse);
if !pseudo_class.is_internal() || self.in_user_agent_stylesheet() {
Ok(pseudo_class)
} else {
Err(())
}
}
fn parse_pseudo_element(&self, name: Cow<str>) -> Result<PseudoElement, ()> {
PseudoElement::from_slice(&name, self.in_user_agent_stylesheet())
.ok_or(())
}
fn default_namespace(&self) -> Option<Namespace> {
self.namespaces.default.clone()
}
fn namespace_for_prefix(&self, prefix: &Atom) -> Option<Namespace> {
self.namespaces.prefixes.get(prefix).cloned()
}
}
impl SelectorImpl {
#[inline]
/// Legacy alias for PseudoElement::cascade_type.
pub fn pseudo_element_cascade_type(pseudo: &PseudoElement) -> PseudoElementCascadeType {
pseudo.cascade_type()
}
/// A helper to traverse each eagerly cascaded pseudo-element, executing
/// `fun` on it.
#[inline]
pub fn each_eagerly_cascaded_pseudo_element<F>(mut fun: F)
where F: FnMut(PseudoElement),
{
for pseudo in &EAGER_PSEUDOS {
fun(pseudo.clone())
}
}
#[inline]
/// Executes a function for each pseudo-element.
pub fn each_pseudo_element<F>(fun: F)
where F: FnMut(PseudoElement),
{
PseudoElement::each(fun)
}
#[inline]
/// Returns the relevant state flag for a given non-tree-structural
/// pseudo-class.
pub fn pseudo_class_state_flag(pc: &NonTSPseudoClass) -> ElementState {
pc.state_flag()
}
}
| {
matches!(*self, NonTSPseudoClass::Hover |
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus)
} | identifier_body |
selector_parser.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Gecko-specific bits for selector-parsing.
use cssparser::{Parser, ToCss};
use element_state::ElementState;
use gecko_bindings::structs::CSSPseudoClassType;
use selector_parser::{SelectorParser, PseudoElementCascadeType};
use selectors::parser::{ComplexSelector, SelectorMethods};
use selectors::visitor::SelectorVisitor;
use std::borrow::Cow;
use std::fmt;
use string_cache::{Atom, Namespace, WeakAtom, WeakNamespace};
pub use gecko::pseudo_element::{PseudoElement, EAGER_PSEUDOS, EAGER_PSEUDO_COUNT};
pub use gecko::snapshot::SnapshotMap;
bitflags! {
flags NonTSPseudoClassFlag: u8 {
// See NonTSPseudoClass::is_internal()
const PSEUDO_CLASS_INTERNAL = 0x01,
}
}
macro_rules! pseudo_class_name {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
#[doc = "Our representation of a non tree-structural pseudo-class."]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum NonTSPseudoClass {
$(
#[doc = $css]
$name,
)*
$(
#[doc = $s_css]
$s_name(Box<[u16]>),
)*
/// The non-standard `:-moz-any` pseudo-class.
///
/// TODO(emilio): We disallow combinators and pseudos here, so we
/// should use SimpleSelector instead
MozAny(Box<[ComplexSelector<SelectorImpl>]>),
}
}
}
apply_non_ts_list!(pseudo_class_name);
impl ToCss for NonTSPseudoClass {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
use cssparser::CssStringWriter;
use fmt::Write;
macro_rules! pseudo_class_serialize {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => concat!(":", $css),)*
$(NonTSPseudoClass::$s_name(ref s) => {
write!(dest, ":{}(", $s_css)?;
{
// FIXME(emilio): Avoid the extra allocation!
let mut css = CssStringWriter::new(dest);
// Discount the null char in the end from the
// string.
css.write_str(&String::from_utf16(&s[..s.len() - 1]).unwrap())?;
}
return dest.write_str(")")
}, )*
NonTSPseudoClass::MozAny(ref selectors) => {
dest.write_str(":-moz-any(")?;
let mut iter = selectors.iter();
let first = iter.next().expect(":-moz-any must have at least 1 selector");
first.to_css(dest)?;
for selector in iter {
dest.write_str(", ")?;
selector.to_css(dest)?;
}
return dest.write_str(")")
}
}
}
}
let ser = apply_non_ts_list!(pseudo_class_serialize);
dest.write_str(ser)
}
}
impl SelectorMethods for NonTSPseudoClass {
type Impl = SelectorImpl;
fn visit<V>(&self, visitor: &mut V) -> bool
where V: SelectorVisitor<Impl = Self::Impl>,
{
if let NonTSPseudoClass::MozAny(ref selectors) = *self {
for selector in selectors.iter() {
if !selector.visit(visitor) {
return false;
}
}
}
true
}
}
impl NonTSPseudoClass {
/// A pseudo-class is internal if it can only be used inside
/// user agent style sheets.
pub fn is_internal(&self) -> bool {
macro_rules! check_flag {
(_) => (false);
($flags:expr) => ($flags.contains(PSEUDO_CLASS_INTERNAL));
}
macro_rules! pseudo_class_check_internal {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => check_flag!($flags),)*
$(NonTSPseudoClass::$s_name(..) => check_flag!($s_flags),)*
NonTSPseudoClass::MozAny(_) => false,
}
}
}
apply_non_ts_list!(pseudo_class_check_internal)
}
/// https://drafts.csswg.org/selectors-4/#useraction-pseudos
///
/// We intentionally skip the link-related ones.
pub fn is_safe_user_action_state(&self) -> bool {
matches!(*self, NonTSPseudoClass::Hover |
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus)
}
/// Get the state flag associated with a pseudo-class, if any.
pub fn state_flag(&self) -> ElementState {
macro_rules! flag {
(_) => (ElementState::empty());
($state:ident) => (::element_state::$state);
}
macro_rules! pseudo_class_state {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => flag!($state),)*
$(NonTSPseudoClass::$s_name(..) => flag!($s_state),)*
NonTSPseudoClass::MozAny(..) => ElementState::empty(),
}
}
}
apply_non_ts_list!(pseudo_class_state)
}
/// Returns true if the given pseudoclass should trigger style sharing cache revalidation.
pub fn needs_cache_revalidation(&self) -> bool {
// :dir() depends on state only, but doesn't use state_flag because its
// semantics don't quite match. Nevertheless, it doesn't need cache
// revalidation, because we already compare states for elements and
// candidates.
self.state_flag().is_empty() &&
!matches!(*self,
NonTSPseudoClass::MozAny(_) |
NonTSPseudoClass::Dir(_) |
NonTSPseudoClass::MozIsHTML |
NonTSPseudoClass::MozPlaceholder)
}
/// Convert NonTSPseudoClass to Gecko's CSSPseudoClassType.
pub fn to_gecko_pseudoclasstype(&self) -> Option<CSSPseudoClassType> {
macro_rules! gecko_type {
(_) => (None);
($gecko_type:ident) =>
(Some(::gecko_bindings::structs::CSSPseudoClassType::$gecko_type));
}
macro_rules! pseudo_class_geckotype {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match *self {
$(NonTSPseudoClass::$name => gecko_type!($gecko_type),)*
$(NonTSPseudoClass::$s_name(..) => gecko_type!($s_gecko_type),)*
NonTSPseudoClass::MozAny(_) => gecko_type!(any),
}
}
}
apply_non_ts_list!(pseudo_class_geckotype)
}
}
/// The dummy struct we use to implement our selector parsing.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SelectorImpl;
impl ::selectors::SelectorImpl for SelectorImpl {
type AttrValue = Atom;
type Identifier = Atom;
type ClassName = Atom;
type LocalName = Atom;
type NamespacePrefix = Atom;
type NamespaceUrl = Namespace;
type BorrowedNamespaceUrl = WeakNamespace;
type BorrowedLocalName = WeakAtom;
type PseudoElement = PseudoElement;
type NonTSPseudoClass = NonTSPseudoClass;
}
impl<'a> ::selectors::Parser for SelectorParser<'a> {
type Impl = SelectorImpl;
fn parse_non_ts_pseudo_class(&self, name: Cow<str>) -> Result<NonTSPseudoClass, ()> {
macro_rules! pseudo_class_parse {
(bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
match_ignore_ascii_case! { &name,
$($css => NonTSPseudoClass::$name,)*
_ => return Err(())
}
}
}
let pseudo_class = apply_non_ts_list!(pseudo_class_parse);
if !pseudo_class.is_internal() || self.in_user_agent_stylesheet() {
Ok(pseudo_class)
} else {
Err(())
}
}
fn parse_non_ts_functional_pseudo_class(&self,
name: Cow<str>,
parser: &mut Parser)
-> Result<NonTSPseudoClass, ()> {
macro_rules! pseudo_class_string_parse { | match_ignore_ascii_case! { &name,
$($s_css => {
let name = parser.expect_ident_or_string()?;
// convert to null terminated utf16 string
// since that's what Gecko deals with
let utf16: Vec<u16> = name.encode_utf16().chain(Some(0u16)).collect();
NonTSPseudoClass::$s_name(utf16.into_boxed_slice())
}, )*
"-moz-any" => {
let selectors = parser.parse_comma_separated(|input| {
ComplexSelector::parse(self, input)
})?;
// Selectors inside `:-moz-any` may not include combinators.
if selectors.iter().flat_map(|x| x.iter_raw()).any(|s| s.is_combinator()) {
return Err(())
}
NonTSPseudoClass::MozAny(selectors.into_boxed_slice())
}
_ => return Err(())
}
}
}
let pseudo_class = apply_non_ts_list!(pseudo_class_string_parse);
if !pseudo_class.is_internal() || self.in_user_agent_stylesheet() {
Ok(pseudo_class)
} else {
Err(())
}
}
fn parse_pseudo_element(&self, name: Cow<str>) -> Result<PseudoElement, ()> {
PseudoElement::from_slice(&name, self.in_user_agent_stylesheet())
.ok_or(())
}
fn default_namespace(&self) -> Option<Namespace> {
self.namespaces.default.clone()
}
fn namespace_for_prefix(&self, prefix: &Atom) -> Option<Namespace> {
self.namespaces.prefixes.get(prefix).cloned()
}
}
impl SelectorImpl {
#[inline]
/// Legacy alias for PseudoElement::cascade_type.
pub fn pseudo_element_cascade_type(pseudo: &PseudoElement) -> PseudoElementCascadeType {
pseudo.cascade_type()
}
/// A helper to traverse each eagerly cascaded pseudo-element, executing
/// `fun` on it.
#[inline]
pub fn each_eagerly_cascaded_pseudo_element<F>(mut fun: F)
where F: FnMut(PseudoElement),
{
for pseudo in &EAGER_PSEUDOS {
fun(pseudo.clone())
}
}
#[inline]
/// Executes a function for each pseudo-element.
pub fn each_pseudo_element<F>(fun: F)
where F: FnMut(PseudoElement),
{
PseudoElement::each(fun)
}
#[inline]
/// Returns the relevant state flag for a given non-tree-structural
/// pseudo-class.
pub fn pseudo_class_state_flag(pc: &NonTSPseudoClass) -> ElementState {
pc.state_flag()
}
} | (bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => { | random_line_split |
auth.js | /*!
* jixiu-h5 Javascript Library
* weiyining - v1.0.0 (2016)
* https://www.jixiuapp.com/ | Released under MIT license
*/
import UserService from '../service/user';
/**
* 验证组件,检测当前用户是否登陆,如没有登陆就跳转到登陆页
* @function Auth
* @author seven
* @version 1.0
*/
const Auth = (nextState, replace, callback) =>
{
if(!UserService.IsAuthenticated())
{
var api = new UserService();
let userRe | port default Auth; | sult = api.activation();
if(userResult==null)
{
replace({
pathname:`/${nextState.params.app}/login`,
state: {nextPathname: nextState.location.pathname}
});
}
//api.activation().then(r=>
//{
// if(r==null)
// {
// replace({
// pathname:`/${nextState.params.app}/login`,
// state: {nextPathname: nextState.location.pathname}
// });
// }
//});
}
if(callback)
{
callback();
}
};
ex | conditional_block |
auth.js | /*!
* jixiu-h5 Javascript Library
* weiyining - v1.0.0 (2016)
* https://www.jixiuapp.com/ | Released under MIT license
*/
import UserService from '../service/user';
| * @function Auth
* @author seven
* @version 1.0
*/
const Auth = (nextState, replace, callback) =>
{
if(!UserService.IsAuthenticated())
{
var api = new UserService();
let userResult = api.activation();
if(userResult==null)
{
replace({
pathname:`/${nextState.params.app}/login`,
state: {nextPathname: nextState.location.pathname}
});
}
//api.activation().then(r=>
//{
// if(r==null)
// {
// replace({
// pathname:`/${nextState.params.app}/login`,
// state: {nextPathname: nextState.location.pathname}
// });
// }
//});
}
if(callback)
{
callback();
}
};
export default Auth; | /**
* 验证组件,检测当前用户是否登陆,如没有登陆就跳转到登陆页 | random_line_split |
overloaded-index-in-field.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test using overloaded indexing when the "map" is stored in a
// field. This caused problems at some point.
#![feature(core)]
use std::ops::Index;
struct Foo {
x: isize,
y: isize,
}
struct Bar {
foo: Foo
}
impl Index<isize> for Foo {
type Output = isize;
fn index(&self, z: isize) -> &isize {
if z == 0 | else {
&self.y
}
}
}
trait Int {
fn get(self) -> isize;
fn get_from_ref(&self) -> isize;
fn inc(&mut self);
}
impl Int for isize {
fn get(self) -> isize { self }
fn get_from_ref(&self) -> isize { *self }
fn inc(&mut self) { *self += 1; }
}
fn main() {
let f = Bar { foo: Foo {
x: 1,
y: 2,
} };
assert_eq!(f.foo[1].get(), 2);
}
| {
&self.x
} | conditional_block |
overloaded-index-in-field.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test using overloaded indexing when the "map" is stored in a
// field. This caused problems at some point.
#![feature(core)]
use std::ops::Index;
struct Foo {
x: isize,
y: isize,
}
struct Bar {
foo: Foo
}
impl Index<isize> for Foo {
type Output = isize;
fn index(&self, z: isize) -> &isize {
if z == 0 {
&self.x
} else {
&self.y
}
}
}
trait Int {
fn get(self) -> isize;
fn get_from_ref(&self) -> isize;
fn inc(&mut self);
}
impl Int for isize {
fn get(self) -> isize { self }
fn get_from_ref(&self) -> isize { *self }
fn inc(&mut self) { *self += 1; }
}
fn main() {
let f = Bar { foo: Foo {
x: 1,
y: 2,
} };
assert_eq!(f.foo[1].get(), 2);
} | random_line_split | |
overloaded-index-in-field.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test using overloaded indexing when the "map" is stored in a
// field. This caused problems at some point.
#![feature(core)]
use std::ops::Index;
struct | {
x: isize,
y: isize,
}
struct Bar {
foo: Foo
}
impl Index<isize> for Foo {
type Output = isize;
fn index(&self, z: isize) -> &isize {
if z == 0 {
&self.x
} else {
&self.y
}
}
}
trait Int {
fn get(self) -> isize;
fn get_from_ref(&self) -> isize;
fn inc(&mut self);
}
impl Int for isize {
fn get(self) -> isize { self }
fn get_from_ref(&self) -> isize { *self }
fn inc(&mut self) { *self += 1; }
}
fn main() {
let f = Bar { foo: Foo {
x: 1,
y: 2,
} };
assert_eq!(f.foo[1].get(), 2);
}
| Foo | identifier_name |
overloaded-index-in-field.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test using overloaded indexing when the "map" is stored in a
// field. This caused problems at some point.
#![feature(core)]
use std::ops::Index;
struct Foo {
x: isize,
y: isize,
}
struct Bar {
foo: Foo
}
impl Index<isize> for Foo {
type Output = isize;
fn index(&self, z: isize) -> &isize |
}
trait Int {
fn get(self) -> isize;
fn get_from_ref(&self) -> isize;
fn inc(&mut self);
}
impl Int for isize {
fn get(self) -> isize { self }
fn get_from_ref(&self) -> isize { *self }
fn inc(&mut self) { *self += 1; }
}
fn main() {
let f = Bar { foo: Foo {
x: 1,
y: 2,
} };
assert_eq!(f.foo[1].get(), 2);
}
| {
if z == 0 {
&self.x
} else {
&self.y
}
} | identifier_body |
server.js | var fs = require("fs");
var express = require("express"),
optimist = require("optimist"),
gitstatic = require("../");
var argv = optimist.usage("Usage: $0")
.options("h", {
alias: "help",
describe: "display this help text"
})
.options("repository", {
default: ".git",
describe: "path to bare git repository"
})
.options("port", {
default: 3000,
describe: "http port"
})
.check(function(argv) {
if (argv.help) throw ""; | var server = express();
server.get(/^\/.*/, gitstatic.route()
.repository(argv.repository));
server.listen(argv.port); | try { var stats = fs.statSync(argv.repository); } catch (e) { throw "Error: " + e.message; }
if (!stats.isDirectory()) throw "Error: invalid --repository directory.";
})
.argv;
| random_line_split |
associated-types-normalize-in-bounds-ufcs.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we normalize associated types that appear in bounds; if
// we didn't, the call to `self.split2()` fails to type check.
// pretty-expanded FIXME #23616
use std::marker::PhantomData;
struct Splits<'a, T:'a, P>(PhantomData<(&'a T, P)>);
struct SplitsN<I>(PhantomData<I>);
trait SliceExt2 {
type Item;
fn split2<'a, P>(&'a self, pred: P) -> Splits<'a, Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
fn splitn2<'a, P>(&'a self, n: u32, pred: P) -> SplitsN<Splits<'a, Self::Item, P>>
where P: FnMut(&Self::Item) -> bool;
}
impl<T> SliceExt2 for [T] {
type Item = T;
fn split2<P>(&self, pred: P) -> Splits<T, P> where P: FnMut(&T) -> bool {
loop {}
}
fn splitn2<P>(&self, n: u32, pred: P) -> SplitsN<Splits<T, P>> where P: FnMut(&T) -> bool {
SliceExt2::split2(self, pred);
loop {}
}
}
fn main() { } | // | random_line_split |
associated-types-normalize-in-bounds-ufcs.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we normalize associated types that appear in bounds; if
// we didn't, the call to `self.split2()` fails to type check.
// pretty-expanded FIXME #23616
use std::marker::PhantomData;
struct Splits<'a, T:'a, P>(PhantomData<(&'a T, P)>);
struct SplitsN<I>(PhantomData<I>);
trait SliceExt2 {
type Item;
fn split2<'a, P>(&'a self, pred: P) -> Splits<'a, Self::Item, P>
where P: FnMut(&Self::Item) -> bool;
fn splitn2<'a, P>(&'a self, n: u32, pred: P) -> SplitsN<Splits<'a, Self::Item, P>>
where P: FnMut(&Self::Item) -> bool;
}
impl<T> SliceExt2 for [T] {
type Item = T;
fn split2<P>(&self, pred: P) -> Splits<T, P> where P: FnMut(&T) -> bool {
loop {}
}
fn splitn2<P>(&self, n: u32, pred: P) -> SplitsN<Splits<T, P>> where P: FnMut(&T) -> bool {
SliceExt2::split2(self, pred);
loop {}
}
}
fn | () { }
| main | identifier_name |
progress.service.ts | import { Injectable } from '@angular/core';
@Injectable({
providedIn: 'root'
})
export class ProgressService {
private progress: number = 0;
private _total: number = 0;
set total(t: number) {
this._total = t;
}
get percentValue(): number {
let p: number = Math.floor((this.progress / this._total) * 100);
if (p < 0 || isNaN(p) || p === Infinity) {
p = 0;
} else if (p > 100) {
p = 100;
}
return p;
}
| () { }
addToProgress(n: number) {
if (n < 0 || isNaN(n) || n === Infinity) {
n = 0;
}
this.progress += n;
}
updateProgress(n: number) {
if (n < 0 || isNaN(n) || n === Infinity) {
n = 0
} else if (n > 100) {
n = 100
}
this.progress = n;
}
isComplete(): boolean {
if (this.progress >= this._total && this.progress > 0 && this._total > 0) {
return true;
}
return false;
}
} | constructor | identifier_name |
progress.service.ts | import { Injectable } from '@angular/core';
@Injectable({
providedIn: 'root'
})
export class ProgressService {
private progress: number = 0;
private _total: number = 0;
set total(t: number) {
this._total = t;
}
get percentValue(): number {
let p: number = Math.floor((this.progress / this._total) * 100);
if (p < 0 || isNaN(p) || p === Infinity) {
p = 0;
} else if (p > 100) {
p = 100;
}
return p;
}
constructor() { }
addToProgress(n: number) {
if (n < 0 || isNaN(n) || n === Infinity) {
n = 0;
}
this.progress += n;
}
updateProgress(n: number) {
if (n < 0 || isNaN(n) || n === Infinity) {
n = 0
} else if (n > 100) {
n = 100
}
this.progress = n;
}
isComplete(): boolean {
if (this.progress >= this._total && this.progress > 0 && this._total > 0) |
return false;
}
} | {
return true;
} | conditional_block |
progress.service.ts | import { Injectable } from '@angular/core';
@Injectable({
providedIn: 'root'
})
export class ProgressService {
private progress: number = 0;
private _total: number = 0;
set total(t: number) {
this._total = t;
}
get percentValue(): number {
let p: number = Math.floor((this.progress / this._total) * 100);
if (p < 0 || isNaN(p) || p === Infinity) {
p = 0;
} else if (p > 100) {
p = 100;
}
return p;
}
constructor() { }
addToProgress(n: number) {
if (n < 0 || isNaN(n) || n === Infinity) {
n = 0;
}
this.progress += n;
}
updateProgress(n: number) {
if (n < 0 || isNaN(n) || n === Infinity) {
n = 0
} else if (n > 100) {
n = 100
}
this.progress = n;
}
isComplete(): boolean |
} | {
if (this.progress >= this._total && this.progress > 0 && this._total > 0) {
return true;
}
return false;
} | identifier_body |
progress.service.ts | import { Injectable } from '@angular/core';
@Injectable({
providedIn: 'root'
})
export class ProgressService {
private progress: number = 0;
private _total: number = 0;
set total(t: number) {
this._total = t;
}
get percentValue(): number {
let p: number = Math.floor((this.progress / this._total) * 100);
if (p < 0 || isNaN(p) || p === Infinity) {
p = 0;
} else if (p > 100) {
p = 100;
}
return p;
}
constructor() { }
addToProgress(n: number) {
if (n < 0 || isNaN(n) || n === Infinity) {
n = 0;
}
this.progress += n;
}
updateProgress(n: number) { | if (n < 0 || isNaN(n) || n === Infinity) {
n = 0
} else if (n > 100) {
n = 100
}
this.progress = n;
}
isComplete(): boolean {
if (this.progress >= this._total && this.progress > 0 && this._total > 0) {
return true;
}
return false;
}
} | random_line_split | |
index.tsx | import React, { Component } from 'react'
import { ActivityIndicator, View } from 'react-native'
import { decode } from 'he'
import styled from 'styled-components/native'
import Image from 'react-native-fast-image'
import TruncatedHTML from '../TruncatedHTML'
import BlockItemIcon from '../BlockItemIcon'
import { TouchableHighlight } from '../UI/Layout'
import BlockModalMenu from '../BlockModalMenu'
import { openModal } from '../Modal'
import navigationService from '../../utilities/navigationService'
import { Typography, Units, lineHeightFor } from '../../constants/Style'
import { baseStyles } from '../../constants/HtmlView'
import { getLayout } from '../../utilities/screenSize'
import { StyledText } from '../StyledText'
export const BLOCK_METADATA_HEIGHT = Units.scale[4] + Units.base
export const BLOCK_SIZES = () => ({
'1-up': Units.window.width() - Units.scale[4] * 2,
'2-up': Units.window.width() / 2 - Units.scale[1] * 3,
'3-up': Units.window.width() / 3 - Units.scale[1] * 4,
'4-up': Units.window.width() / 4 - Units.scale[1] * 5,
})
interface IContainerProps {
size: TBlockItemSize
onPress: () => void
onLongPress: () => void
}
const Container = styled<IContainerProps>(TouchableHighlight)`
width: ${x => BLOCK_SIZES()[x.size]};
height: ${x => BLOCK_SIZES()[x.size] + BLOCK_METADATA_HEIGHT};
`
interface IOutlineProps {
hasImage: boolean
size: TBlockItemSize
}
const Outline = styled.View<IOutlineProps>`
width: 100%;
height: ${x => BLOCK_SIZES()[x.size]};
border-width: ${({ theme, hasImage, size }) => (hasImage && size === '2-up' ? 0 : theme.borderWidth)};
border-color: ${({ theme }) => theme.borderColor};
overflow: hidden;
`
interface IThumbnailProps {
cache: string
source: {
uri: string
cache: string
}
resizeMode: string
}
const Thumbnail = styled<IThumbnailProps>(Image).attrs({
resizeMode: 'contain',
})`
width: 100%;
height: 100%;
` | const Metadata = styled.View<IMetadataProps>`
height: ${BLOCK_METADATA_HEIGHT};
padding-top: ${Units.scale[2]};
`
const MetadataTitle = styled.View`
flex-direction: row;
align-items: center;
justify-content: center;
`
interface ITitleProps {
size: TBlockItemSize
}
const Title = styled(StyledText)<ITitleProps>`
color: ${({ theme }) => theme.labels};
font-size: ${x => Typography.fontSize[x.size === '1-up' ? 'base' : 'small']};
line-height: ${x => lineHeightFor(x.size === '1-up' ? 'base' : 'small')};
max-width: 90%;
flex-basis: auto;
text-align: center;
`
const ProcessingIndicator = styled(ActivityIndicator)`
height: 100%;
`
export type TBlockItemSize = '4-up' | '3-up' | '2-up' | '1-up'
interface IBlockItemProps {
size?: TBlockItemSize
block: any
channel?: any
style?: any
}
export default class BlockItem extends Component<IBlockItemProps, {}> {
onPress = () => {
const { state } = this.props.block
state === 'available' &&
navigationService.navigate('block', {
id: this.props.block.id,
title: this.props.block.title,
})
}
onLongPress = () => {
const { block, channel } = this.props
openModal({
children: <BlockModalMenu block={block} channel={channel} />,
})
}
render() {
const {
size,
block,
block: {
state,
kind: { __typename },
},
...rest
} = this.props
const title = block.title ? decode(block.title) : null
let inner
switch (__typename) {
case 'Attachment':
case 'Embed':
case 'Link':
case 'Image':
if (block.kind.image_url) {
inner = (
<Thumbnail
cache="force-cache"
source={{
uri: block.kind.image_url,
}}
/>
)
} else {
inner = <View />
}
break
case 'Text':
inner = (
<TruncatedHTML size={size} value={block.kind.content} numberOfFadedLines={1} stylesheet={baseStyles()} />
)
break
default:
inner = <StyledText>{block.title}</StyledText>
break
}
if (state !== 'available' && state !== 'failed') {
inner = <ProcessingIndicator />
}
return (
<Container size={size} onPress={this.onPress} onLongPress={this.onLongPress} {...rest}>
<Outline hasImage={!!block.kind.image_url} size={size}>
{inner}
</Outline>
<Metadata size={size}>
<MetadataTitle>
{title && (
<Title size={size} numberOfLines={1}>
{title}
</Title>
)}
<BlockItemIcon type={__typename} />
</MetadataTitle>
</Metadata>
</Container>
)
}
static defaultProps = {
size: getLayout(['2-up', '3-up', '4-up']),
channel: null,
}
} |
interface IMetadataProps {
size: TBlockItemSize
}
| random_line_split |
urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# myapp.urls
##
##
# Copyright (C) $YEAR$, $AUTHOR_NAME$ <$AUTHOR_EMAIL$>
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of version 3 of the GNU Affero General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | # You should have received a copy of the GNU General Public License along
# with this source code; if not, see <http://www.gnu.org/licenses/>,
# or write to
#
# Free Software Foundation, Inc.
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301 USA
##
##
# End of File
## | # GNU Affero General Public License for more details.
# | random_line_split |
vue-form.js | import Vue from 'vue';
import VueForm from 'vue-form';
Vue.use(VueForm, {
validators: {
'step': function(value, stepValue) {
return stepValue === `any` || Number(value) % Number(stepValue) === 0;
},
'data-exclusive-minimum': function(value, exclusiveMinimum) {
return Number(value) > Number(exclusiveMinimum);
},
'data-exclusive-maximum': function(value, exclusiveMaximum) {
return Number(value) < Number(exclusiveMaximum);
},
'complete-range': function(range) {
return range === null || (range[0] !== null && range[1] !== null);
},
'valid-range': function(range) {
if (range === null) {
// allowed range
return true;
}
if (range[0] === null || range[1] === null) |
if (Number.isNaN(range[0]) || Number.isNaN(range[1])) {
// let number validator handle this
return true;
}
return range[0] <= range[1];
},
'categories-not-empty': function(categories) {
return categories.length > 0;
},
'complete-dimensions': function(dimensions) {
return dimensions === null || (dimensions[0] !== null && dimensions[1] !== null && dimensions[2] !== null);
},
'start-with-uppercase-or-number': function(value) {
return /^[\dA-Z]/.test(value);
},
'no-mode-name': function(value) {
return !/\bmode\b/i.test(value);
},
'no-fine-channel-name': function(value) {
if (/\bfine\b|\d+[\s_-]*bit/i.test(value)) {
return false;
}
return !/\bLSB\b|\bMSB\b/.test(value);
},
'entity-complete': function(value, attributeValue, vnode) {
const component = vnode.componentInstance;
if (component.hasNumber) {
return component.selectedNumber !== `` && component.selectedNumber !== null;
}
return true;
},
'entities-have-same-units': function(value, attributeValue, vnode) {
return vnode.componentInstance.hasSameUnit;
},
'valid-color-hex-list': function(value) {
return /^\s*#[\da-f]{6}(?:\s*,\s*#[\da-f]{6})*\s*$/i.test(value);
},
'max-file-size': function(file, attributeValue) {
if (typeof file === `object`) {
let maxSize = Number.parseInt(attributeValue, 10);
if (attributeValue.includes(`M`)) {
maxSize *= 1000 * 1000;
}
else if (attributeValue.includes(`k`)) {
maxSize *= 1000;
}
return file.size <= maxSize;
}
return true;
},
},
});
| {
// let complete-range validator handle this
return true;
} | conditional_block |
vue-form.js | import Vue from 'vue';
import VueForm from 'vue-form';
Vue.use(VueForm, {
validators: {
'step': function(value, stepValue) {
return stepValue === `any` || Number(value) % Number(stepValue) === 0;
},
'data-exclusive-minimum': function(value, exclusiveMinimum) {
return Number(value) > Number(exclusiveMinimum);
},
'data-exclusive-maximum': function(value, exclusiveMaximum) {
return Number(value) < Number(exclusiveMaximum);
},
'complete-range': function(range) {
return range === null || (range[0] !== null && range[1] !== null);
},
'valid-range': function(range) {
if (range === null) {
// allowed range
return true;
}
if (range[0] === null || range[1] === null) {
// let complete-range validator handle this
return true;
}
if (Number.isNaN(range[0]) || Number.isNaN(range[1])) {
// let number validator handle this
return true;
}
return range[0] <= range[1];
},
'categories-not-empty': function(categories) {
return categories.length > 0;
},
'complete-dimensions': function(dimensions) {
return dimensions === null || (dimensions[0] !== null && dimensions[1] !== null && dimensions[2] !== null);
},
'start-with-uppercase-or-number': function(value) {
return /^[\dA-Z]/.test(value);
},
'no-mode-name': function(value) {
return !/\bmode\b/i.test(value);
},
'no-fine-channel-name': function(value) {
if (/\bfine\b|\d+[\s_-]*bit/i.test(value)) {
return false;
}
return !/\bLSB\b|\bMSB\b/.test(value);
},
'entity-complete': function(value, attributeValue, vnode) {
const component = vnode.componentInstance;
if (component.hasNumber) {
return component.selectedNumber !== `` && component.selectedNumber !== null;
}
return true;
},
'entities-have-same-units': function(value, attributeValue, vnode) {
return vnode.componentInstance.hasSameUnit;
},
'valid-color-hex-list': function(value) {
return /^\s*#[\da-f]{6}(?:\s*,\s*#[\da-f]{6})*\s*$/i.test(value);
},
'max-file-size': function(file, attributeValue) { | maxSize *= 1000 * 1000;
}
else if (attributeValue.includes(`k`)) {
maxSize *= 1000;
}
return file.size <= maxSize;
}
return true;
},
},
}); | if (typeof file === `object`) {
let maxSize = Number.parseInt(attributeValue, 10);
if (attributeValue.includes(`M`)) { | random_line_split |
layout.ts | /**
* @license
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Import Modules
import '../modules/span_graph_module';
import {LitModuleType} from '../core/lit_module';
import {LitComponentLayouts} from '../lib/types';
import {AnnotatedTextGoldModule, AnnotatedTextModule} from '../modules/annotated_text_module';
import {AttentionModule} from '../modules/attention_module';
import {ClassificationModule} from '../modules/classification_module';
import {ColorModule} from '../modules/color_module';
import {ConfusionMatrixModule} from '../modules/confusion_matrix_module';
import {CounterfactualExplainerModule} from '../modules/counterfactual_explainer_module';
import {DataTableModule, SimpleDataTableModule} from '../modules/data_table_module';
import {DatapointEditorModule, SimpleDatapointEditorModule} from '../modules/datapoint_editor_module';
import {EmbeddingsModule} from '../modules/embeddings_module';
import {GeneratedImageModule} from '../modules/generated_image_module';
import {GeneratedTextModule} from '../modules/generated_text_module';
import {GeneratorModule} from '../modules/generator_module';
import {LanguageModelPredictionModule} from '../modules/lm_prediction_module';
import {MetricsModule} from '../modules/metrics_module';
import {MultilabelModule} from '../modules/multilabel_module';
import {PdpModule} from '../modules/pdp_module';
import {RegressionModule} from '../modules/regression_module';
import {SalienceMapModule} from '../modules/salience_map_module';
import {ScalarModule} from '../modules/scalar_module';
import {SequenceSalienceModule} from '../modules/sequence_salience_module';
import {SliceModule} from '../modules/slice_module';
import {SpanGraphGoldModuleVertical, SpanGraphModuleVertical} from '../modules/span_graph_module';
import {TCAVModule} from '../modules/tcav_module';
import {ThresholderModule} from '../modules/thresholder_module';
// clang-format off
const MODEL_PREDS_MODULES: LitModuleType[] = [
SpanGraphGoldModuleVertical,
SpanGraphModuleVertical,
ClassificationModule,
MultilabelModule,
RegressionModule,
LanguageModelPredictionModule, | GeneratedTextModule,
AnnotatedTextGoldModule,
AnnotatedTextModule,
GeneratedImageModule,
];
const DEFAULT_MAIN_GROUP: LitModuleType[] = [
DataTableModule,
DatapointEditorModule,
SliceModule,
ColorModule,
];
// clang-format on
// clang-format off
/**
* Possible layouts for LIT (component groups and settigns.)
*/
export const LAYOUTS: LitComponentLayouts = {
/**
* A "simple demo server" layout.
*/
'simple': {
upper: {
"Editor": [SimpleDatapointEditorModule],
"Examples": [SimpleDataTableModule],
},
lower: {
'Predictions': [ ...MODEL_PREDS_MODULES],
'Salience': [SalienceMapModule, SequenceSalienceModule],
},
layoutSettings: {
hideToolbar: true,
mainHeight: 30,
centerPage: true
},
description: 'A basic layout just containing a datapoint creator/editor, the predictions, and the data table. There are also some visual simplifications: the toolbar is hidden, and the modules are centered on the page rather than being full width.'
},
/**
* A default layout for LIT Modules
*/
'default': {
components : {
'Main': [EmbeddingsModule, ...DEFAULT_MAIN_GROUP],
'Predictions': [
...MODEL_PREDS_MODULES,
ScalarModule,
PdpModule,
],
'Explanations': [
...MODEL_PREDS_MODULES,
SalienceMapModule,
SequenceSalienceModule,
AttentionModule,
],
'Metrics': [
MetricsModule,
ConfusionMatrixModule,
ThresholderModule,
],
'Counterfactuals': [GeneratorModule, CounterfactualExplainerModule],
'TCAV': [
TCAVModule,
],
},
description: "The default LIT layout, which includes the data table and data point editor, the performance and metrics, predictions, explanations, and counterfactuals."
},
};
// clang-format on | random_line_split | |
custom-typings.d.ts | /*
* Custom Type Definitions
* When including 3rd party modules you also need to include the type definition for the module
* if they don't provide one within the module. You can try to install it with typings
typings install node --save
* If you can't find the type definition in the registry we can make an ambient definition in
* this file for now. For example
declare module "my-module" {
export function doesSomething(value: string): string;
}
*
* If you're prototying and you will fix the types later you can also declare it as type any
*
declare var assert: any;
*
* If you're importing a module that uses Node.js modules which are CommonJS you need to import as
*
import * as _ from 'lodash'
* You can include your type definitions in this file until you create one for the typings registry
* see https://github.com/typings/registry
*
*/
// Extra variables that live on Global that will be replaced by webpack DefinePlugin
declare var ENV: string;
declare var HMR: boolean;
interface GlobalEnvironment {
ENV;
HMR;
}
interface WebpackModule {
hot: {
data?: any,
idle: any,
accept(dependencies?: string | string[], callback?: (updatedDependencies?: any) => void): void;
decline(dependencies?: string | string[]): void;
dispose(callback?: (data?: any) => void): void;
addDisposeHandler(callback?: (data?: any) => void): void;
removeDisposeHandler(callback?: (data?: any) => void): void;
check(autoApply?: any, callback?: (err?: Error, outdatedModules?: any[]) => void): void;
apply(options?: any, callback?: (err?: Error, outdatedModules?: any[]) => void): void;
status(callback?: (status?: string) => void): void | string;
removeStatusHandler(callback?: (status?: string) => void): void;
};
}
interface WebpackRequire {
context(file: string, flag?: boolean, exp?: RegExp): any;
}
interface ErrorStackTraceLimit {
stackTraceLimit: number;
}
// Extend typings
interface NodeRequire extends WebpackRequire {}
interface ErrorConstructor extends ErrorStackTraceLimit {}
interface NodeModule extends WebpackModule {}
interface Global extends GlobalEnvironment {}
declare namespace Reflect {
function decorate(decorators: ClassDecorator[], target: Function): Function;
function decorate(
decorators: (PropertyDecorator | MethodDecorator)[],
target: Object,
targetKey: string | symbol,
descriptor?: PropertyDescriptor): PropertyDescriptor;
function metadata(metadataKey: any, metadataValue: any): {
(target: Function): void;
(target: Object, propertyKey: string | symbol): void;
};
function defineMetadata(metadataKey: any, metadataValue: any, target: Object): void;
function defineMetadata(
metadataKey: any,
metadataValue: any,
target: Object,
targetKey: string | symbol): void;
function hasMetadata(metadataKey: any, target: Object): boolean;
function hasMetadata(metadataKey: any, target: Object, targetKey: string | symbol): boolean;
function hasOwnMetadata(metadataKey: any, target: Object): boolean;
function hasOwnMetadata(metadataKey: any, target: Object, targetKey: string | symbol): boolean;
function getMetadata(metadataKey: any, target: Object): any;
function getMetadata(metadataKey: any, target: Object, targetKey: string | symbol): any;
function getOwnMetadata(metadataKey: any, target: Object): any;
function getOwnMetadata(metadataKey: any, target: Object, targetKey: string | symbol): any;
function getMetadataKeys(target: Object): any[];
function getMetadataKeys(target: Object, targetKey: string | symbol): any[];
function getOwnMetadataKeys(target: Object): any[];
function getOwnMetadataKeys(target: Object, targetKey: string | symbol): any[];
function deleteMetadata(metadataKey: any, target: Object): boolean;
function deleteMetadata(metadataKey: any, target: Object, targetKey: string | symbol): boolean;
}
// We need this here since there is a problem with Zone.js typings
interface Thenable<T> {
then<U>(
onFulfilled?: (value: T) => U | Thenable<U>,
onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
then<U>(
onFulfilled?: (value: T) => U | Thenable<U>,
onRejected?: (error: any) => void): Thenable<U>;
catch<U>(onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
}
declare namespace app {
export namespace Faroo {
export interface Constants {
apiBase: string;
sources: {
web: string;
news: string;
topics: string;
trends: string;
}
}
export interface Response {
query: string; | start: number;
length: number;
time: number;
results: Result[];
}
export interface Result {
title: string;
kwic: string;
content: string;
url: string;
iurl: string;
author: string;
date: number;
domain: string;
news: boolean;
number: boolean;
}
}
export interface Search {
get(
source: string,
query: string,
start?: number,
length?: number
): any;
web(
query: string,
start?: number,
length?: number
): any;
news(
query: string,
start?: number,
length?: number
): any;
topics(
start?: number,
length?: number
): any;
trends(
start?: number,
length?: number
): any;
}
} | suggestions: any[];
count: number; | random_line_split |
middleware.ts | import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as sourcemaps from 'gulp-sourcemaps';
import * as plumber from 'gulp-plumber';
import * as postcss from 'gulp-postcss';
import * as concat from 'gulp-concat';
import * as tap from 'gulp-tap';
import * as gulpif from 'gulp-if';
const ERROR_PREFIX = '[postcss-middleware]';
function PostCssMiddleware(options: PostCssMiddleware.Options = <any>{}) {
if (!options.plugins) {
throw new Error(`${ERROR_PREFIX} missing required option: plugins`);
}
if (!Array.isArray(options.plugins)) {
throw new TypeError(`${ERROR_PREFIX} plugins option must be an array`);
}
if (options.src && typeof options.src !== 'function') {
throw new TypeError(`${ERROR_PREFIX} src option must be a function`);
}
if (options.options && typeof options.options !== 'object') {
throw new TypeError(`${ERROR_PREFIX} options option must be an object`);
}
const src = options.src || (req => path.join(__dirname, req.url));
return (req, res, next: Function) => {
if (req.method !== 'GET' && req.method !== 'HEAD') {
next();
return;
}
const globs = src(req);
if (typeof globs !== 'string' && !Array.isArray(globs)) {
next(new TypeError(`${ERROR_PREFIX} src callback must return a glob string or array`));
return;
}
let isFileFound = false;
vfs.src(globs, <any>{ allowEmpty: false })
.on('error', err => {
if (err.message.match(/File not found/i)) |
return next(err);
})
.pipe(plumber({ errorHandler: err => next(err) }))
.pipe(gulpif(options.inlineSourcemaps, sourcemaps.init()))
.pipe(postcss(options.plugins, options.options))
.pipe(concat('.css'))
.pipe(gulpif(options.inlineSourcemaps, sourcemaps.write()))
.pipe(tap(file => {
isFileFound = true;
res.writeHead(200, {
'Content-Type': 'text/css'
});
res.end(file.contents);
}))
.on('end', () => {
if (!isFileFound) {
next();
}
});
};
}
module PostCssMiddleware {
export interface Options {
/**
* An array of PostCSS plugins.
*/
plugins: Function[];
/**
* PostCSS options
*/
options?: Object;
/**
* Build the file path to the source file(s) you wish to read.
*/
src?:
/**
* @param request The Express app's request object.
* @returns A glob string or an array of glob strings. All files matched
* will be concatenated in the response.
*/
(request: any) => string|string[];
/**
* Generate inlined sourcemaps.
*/
inlineSourcemaps?: boolean;
}
}
export = PostCssMiddleware;
| {
return next();
} | conditional_block |
middleware.ts | import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as sourcemaps from 'gulp-sourcemaps';
import * as plumber from 'gulp-plumber';
import * as postcss from 'gulp-postcss';
import * as concat from 'gulp-concat';
import * as tap from 'gulp-tap';
import * as gulpif from 'gulp-if';
const ERROR_PREFIX = '[postcss-middleware]';
function PostCssMiddleware(options: PostCssMiddleware.Options = <any>{}) {
if (!options.plugins) {
throw new Error(`${ERROR_PREFIX} missing required option: plugins`);
}
if (!Array.isArray(options.plugins)) {
throw new TypeError(`${ERROR_PREFIX} plugins option must be an array`);
}
if (options.src && typeof options.src !== 'function') {
throw new TypeError(`${ERROR_PREFIX} src option must be a function`);
}
if (options.options && typeof options.options !== 'object') {
throw new TypeError(`${ERROR_PREFIX} options option must be an object`);
}
const src = options.src || (req => path.join(__dirname, req.url));
return (req, res, next: Function) => {
if (req.method !== 'GET' && req.method !== 'HEAD') {
next();
return;
}
const globs = src(req);
if (typeof globs !== 'string' && !Array.isArray(globs)) {
next(new TypeError(`${ERROR_PREFIX} src callback must return a glob string or array`));
return;
}
let isFileFound = false;
vfs.src(globs, <any>{ allowEmpty: false }) | return next();
}
return next(err);
})
.pipe(plumber({ errorHandler: err => next(err) }))
.pipe(gulpif(options.inlineSourcemaps, sourcemaps.init()))
.pipe(postcss(options.plugins, options.options))
.pipe(concat('.css'))
.pipe(gulpif(options.inlineSourcemaps, sourcemaps.write()))
.pipe(tap(file => {
isFileFound = true;
res.writeHead(200, {
'Content-Type': 'text/css'
});
res.end(file.contents);
}))
.on('end', () => {
if (!isFileFound) {
next();
}
});
};
}
module PostCssMiddleware {
export interface Options {
/**
* An array of PostCSS plugins.
*/
plugins: Function[];
/**
* PostCSS options
*/
options?: Object;
/**
* Build the file path to the source file(s) you wish to read.
*/
src?:
/**
* @param request The Express app's request object.
* @returns A glob string or an array of glob strings. All files matched
* will be concatenated in the response.
*/
(request: any) => string|string[];
/**
* Generate inlined sourcemaps.
*/
inlineSourcemaps?: boolean;
}
}
export = PostCssMiddleware; | .on('error', err => {
if (err.message.match(/File not found/i)) { | random_line_split |
middleware.ts | import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as sourcemaps from 'gulp-sourcemaps';
import * as plumber from 'gulp-plumber';
import * as postcss from 'gulp-postcss';
import * as concat from 'gulp-concat';
import * as tap from 'gulp-tap';
import * as gulpif from 'gulp-if';
const ERROR_PREFIX = '[postcss-middleware]';
function PostCssMiddleware(options: PostCssMiddleware.Options = <any>{}) |
module PostCssMiddleware {
export interface Options {
/**
* An array of PostCSS plugins.
*/
plugins: Function[];
/**
* PostCSS options
*/
options?: Object;
/**
* Build the file path to the source file(s) you wish to read.
*/
src?:
/**
* @param request The Express app's request object.
* @returns A glob string or an array of glob strings. All files matched
* will be concatenated in the response.
*/
(request: any) => string|string[];
/**
* Generate inlined sourcemaps.
*/
inlineSourcemaps?: boolean;
}
}
export = PostCssMiddleware;
| {
if (!options.plugins) {
throw new Error(`${ERROR_PREFIX} missing required option: plugins`);
}
if (!Array.isArray(options.plugins)) {
throw new TypeError(`${ERROR_PREFIX} plugins option must be an array`);
}
if (options.src && typeof options.src !== 'function') {
throw new TypeError(`${ERROR_PREFIX} src option must be a function`);
}
if (options.options && typeof options.options !== 'object') {
throw new TypeError(`${ERROR_PREFIX} options option must be an object`);
}
const src = options.src || (req => path.join(__dirname, req.url));
return (req, res, next: Function) => {
if (req.method !== 'GET' && req.method !== 'HEAD') {
next();
return;
}
const globs = src(req);
if (typeof globs !== 'string' && !Array.isArray(globs)) {
next(new TypeError(`${ERROR_PREFIX} src callback must return a glob string or array`));
return;
}
let isFileFound = false;
vfs.src(globs, <any>{ allowEmpty: false })
.on('error', err => {
if (err.message.match(/File not found/i)) {
return next();
}
return next(err);
})
.pipe(plumber({ errorHandler: err => next(err) }))
.pipe(gulpif(options.inlineSourcemaps, sourcemaps.init()))
.pipe(postcss(options.plugins, options.options))
.pipe(concat('.css'))
.pipe(gulpif(options.inlineSourcemaps, sourcemaps.write()))
.pipe(tap(file => {
isFileFound = true;
res.writeHead(200, {
'Content-Type': 'text/css'
});
res.end(file.contents);
}))
.on('end', () => {
if (!isFileFound) {
next();
}
});
};
} | identifier_body |
middleware.ts | import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as sourcemaps from 'gulp-sourcemaps';
import * as plumber from 'gulp-plumber';
import * as postcss from 'gulp-postcss';
import * as concat from 'gulp-concat';
import * as tap from 'gulp-tap';
import * as gulpif from 'gulp-if';
const ERROR_PREFIX = '[postcss-middleware]';
function | (options: PostCssMiddleware.Options = <any>{}) {
if (!options.plugins) {
throw new Error(`${ERROR_PREFIX} missing required option: plugins`);
}
if (!Array.isArray(options.plugins)) {
throw new TypeError(`${ERROR_PREFIX} plugins option must be an array`);
}
if (options.src && typeof options.src !== 'function') {
throw new TypeError(`${ERROR_PREFIX} src option must be a function`);
}
if (options.options && typeof options.options !== 'object') {
throw new TypeError(`${ERROR_PREFIX} options option must be an object`);
}
const src = options.src || (req => path.join(__dirname, req.url));
return (req, res, next: Function) => {
if (req.method !== 'GET' && req.method !== 'HEAD') {
next();
return;
}
const globs = src(req);
if (typeof globs !== 'string' && !Array.isArray(globs)) {
next(new TypeError(`${ERROR_PREFIX} src callback must return a glob string or array`));
return;
}
let isFileFound = false;
vfs.src(globs, <any>{ allowEmpty: false })
.on('error', err => {
if (err.message.match(/File not found/i)) {
return next();
}
return next(err);
})
.pipe(plumber({ errorHandler: err => next(err) }))
.pipe(gulpif(options.inlineSourcemaps, sourcemaps.init()))
.pipe(postcss(options.plugins, options.options))
.pipe(concat('.css'))
.pipe(gulpif(options.inlineSourcemaps, sourcemaps.write()))
.pipe(tap(file => {
isFileFound = true;
res.writeHead(200, {
'Content-Type': 'text/css'
});
res.end(file.contents);
}))
.on('end', () => {
if (!isFileFound) {
next();
}
});
};
}
module PostCssMiddleware {
export interface Options {
/**
* An array of PostCSS plugins.
*/
plugins: Function[];
/**
* PostCSS options
*/
options?: Object;
/**
* Build the file path to the source file(s) you wish to read.
*/
src?:
/**
* @param request The Express app's request object.
* @returns A glob string or an array of glob strings. All files matched
* will be concatenated in the response.
*/
(request: any) => string|string[];
/**
* Generate inlined sourcemaps.
*/
inlineSourcemaps?: boolean;
}
}
export = PostCssMiddleware;
| PostCssMiddleware | identifier_name |
environment.js | var fs = null;
function showWarning(resolution, APISupport) |
function setupEnvironment() {
// check resolution
if(screen.width<1280 || screen.height < 768) {
showWarning(true, false);
}
// setup file system
try {
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
window.BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder;
window.requestFileSystem(window.TEMPORARY, 5*1024*1024 /*5MB*/,
// callback if the new filesystem is correctly initiated
function(newFS) {
fs = newFS;
setupPictureUpload(); // locate in settings.js because of asynchronous shit
setupRestoreState();
},
// could not initiate the filesystem; no point in going further
function(e) {
showWarning(false, true);
errorHandler(e);});
} catch(err) {
showWarning(false, true);
}
window.onresize = function() {
resizeCanvas();
resizeSettings();
refresh();
}
}
function resizeCanvas() {
canvas.width = document.width;
canvas.height = document.height;
}
| {
// set up text according to actual issue
if(resolution && APISupport) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to have a resolution of at least 1280x768px</p>\
<p>You also have to use a browser supporting the HTML5 FileSystem & FileWriter API.</p>\
<p>In plain english: Google Chrome (at least version 13.0) </p>');
} else if(resolution) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to have a resolution of at least 1280x768px</p>');
} else if (APISupport) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to use a browser supporting the HTML5 FileSystem & FileWriter API.</p>\
<p>In plain english: Google Chrome (at least version 13.0) </p>');
}
// display error message if there's an issue
if(resolution || APISupport) {
$('section#warning').show();
}
} | identifier_body |
environment.js | var fs = null;
function showWarning(resolution, APISupport) {
// set up text according to actual issue
if(resolution && APISupport) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to have a resolution of at least 1280x768px</p>\
<p>You also have to use a browser supporting the HTML5 FileSystem & FileWriter API.</p>\
<p>In plain english: Google Chrome (at least version 13.0) </p>');
} else if(resolution) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to have a resolution of at least 1280x768px</p>');
} else if (APISupport) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to use a browser supporting the HTML5 FileSystem & FileWriter API.</p>\
<p>In plain english: Google Chrome (at least version 13.0) </p>');
}
// display error message if there's an issue
if(resolution || APISupport) {
$('section#warning').show();
}
}
function | () {
// check resolution
if(screen.width<1280 || screen.height < 768) {
showWarning(true, false);
}
// setup file system
try {
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
window.BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder;
window.requestFileSystem(window.TEMPORARY, 5*1024*1024 /*5MB*/,
// callback if the new filesystem is correctly initiated
function(newFS) {
fs = newFS;
setupPictureUpload(); // locate in settings.js because of asynchronous shit
setupRestoreState();
},
// could not initiate the filesystem; no point in going further
function(e) {
showWarning(false, true);
errorHandler(e);});
} catch(err) {
showWarning(false, true);
}
window.onresize = function() {
resizeCanvas();
resizeSettings();
refresh();
}
}
function resizeCanvas() {
canvas.width = document.width;
canvas.height = document.height;
}
| setupEnvironment | identifier_name |
environment.js | var fs = null;
function showWarning(resolution, APISupport) {
// set up text according to actual issue
if(resolution && APISupport) | else if(resolution) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to have a resolution of at least 1280x768px</p>');
} else if (APISupport) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to use a browser supporting the HTML5 FileSystem & FileWriter API.</p>\
<p>In plain english: Google Chrome (at least version 13.0) </p>');
}
// display error message if there's an issue
if(resolution || APISupport) {
$('section#warning').show();
}
}
function setupEnvironment() {
// check resolution
if(screen.width<1280 || screen.height < 768) {
showWarning(true, false);
}
// setup file system
try {
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
window.BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder;
window.requestFileSystem(window.TEMPORARY, 5*1024*1024 /*5MB*/,
// callback if the new filesystem is correctly initiated
function(newFS) {
fs = newFS;
setupPictureUpload(); // locate in settings.js because of asynchronous shit
setupRestoreState();
},
// could not initiate the filesystem; no point in going further
function(e) {
showWarning(false, true);
errorHandler(e);});
} catch(err) {
showWarning(false, true);
}
window.onresize = function() {
resizeCanvas();
resizeSettings();
refresh();
}
}
function resizeCanvas() {
canvas.width = document.width;
canvas.height = document.height;
}
| {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to have a resolution of at least 1280x768px</p>\
<p>You also have to use a browser supporting the HTML5 FileSystem & FileWriter API.</p>\
<p>In plain english: Google Chrome (at least version 13.0) </p>');
} | conditional_block |
environment.js | var fs = null;
| <p>In order to take advantage of framing app, you need to have a resolution of at least 1280x768px</p>\
<p>You also have to use a browser supporting the HTML5 FileSystem & FileWriter API.</p>\
<p>In plain english: Google Chrome (at least version 13.0) </p>');
} else if(resolution) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to have a resolution of at least 1280x768px</p>');
} else if (APISupport) {
$('section#warning').html('\
<h1>Oops</h1>\
<p>In order to take advantage of framing app, you need to use a browser supporting the HTML5 FileSystem & FileWriter API.</p>\
<p>In plain english: Google Chrome (at least version 13.0) </p>');
}
// display error message if there's an issue
if(resolution || APISupport) {
$('section#warning').show();
}
}
function setupEnvironment() {
// check resolution
if(screen.width<1280 || screen.height < 768) {
showWarning(true, false);
}
// setup file system
try {
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
window.BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder;
window.requestFileSystem(window.TEMPORARY, 5*1024*1024 /*5MB*/,
// callback if the new filesystem is correctly initiated
function(newFS) {
fs = newFS;
setupPictureUpload(); // locate in settings.js because of asynchronous shit
setupRestoreState();
},
// could not initiate the filesystem; no point in going further
function(e) {
showWarning(false, true);
errorHandler(e);});
} catch(err) {
showWarning(false, true);
}
window.onresize = function() {
resizeCanvas();
resizeSettings();
refresh();
}
}
function resizeCanvas() {
canvas.width = document.width;
canvas.height = document.height;
} | function showWarning(resolution, APISupport) {
// set up text according to actual issue
if(resolution && APISupport) {
$('section#warning').html('\
<h1>Oops</h1>\ | random_line_split |
BaseController.ts | /**
* BaseController
*
* Classe abstrata com as funções comuns a todos os controllers
*
* @author Henrique de Castro | */
import * as express from 'express';
export class BaseController {
/**
* showSuccess
*
* Exibe um retorno de sucesso
*
* @author Henrique de Castro
* @since 12/2016
* @param array
* @param object
* @return void
*/
public showSucess(data: Object, res: express.Response) {
// Adiciona o sucesso nos dados
let ret = {};
ret['status'] = true;
ret['error'] = false;
ret['data'] = data;
// Exibe o retorno
res.json(ret);
}
/**
* showError
*
* Exibe um retorno de erro
*
* @author Henrique de Castro
* @since 12/2016
* @param string
* @param object
* @return void
*/
public showError(error: String, res: express.Response) {
// Adiciona erro nos dados
let data = {};
data['status'] = false;
data['error'] = error;
// Exibe o retorno
res.json(data);
}
/**
* showAccessDenied
*
* Exibe um retorno de acesso negado
*
* @author Henrique de Castro
* @since 12/2016
* @param object
* @return void
*/
public showAccessDenied(req: express.Request, res: express.Response) {
// Adiciona erro nos dados
let data = {};
data['status'] = false;
data['error'] = 'Acesso negado';
// Exibe o retorno
res.statusCode = 401;
res.json(data);
}
} | * @since 12/2016 | random_line_split |
BaseController.ts | /**
* BaseController
*
* Classe abstrata com as funções comuns a todos os controllers
*
* @author Henrique de Castro
* @since 12/2016
*/
import * as express from 'express';
export class BaseController {
/**
* showSuccess
*
* Exibe um retorno de sucesso
*
* @author Henrique de Castro
* @since 12/2016
* @param array
* @param object
* @return void
*/
public showSucess(data: Object, res: express.Response) {
// Adiciona o sucesso nos dados
let ret = {};
ret['status'] = true;
ret['error'] = false;
ret['data'] = data;
// Exibe o retorno
res.json(ret);
}
/**
* showError
*
* Exibe um retorno de erro
*
* @author Henrique de Castro
* @since 12/2016
* @param string
* @param object
* @return void
*/
public showError(error: String, res: express.Response) {
// Adiciona erro nos dados
let data = {};
data['status'] = false;
data['error'] = error;
// Exibe o retorno
res.json(data);
}
/**
* showAccessDenied
*
* Exibe um retorno de acesso negado
*
* @author Henrique de Castro
* @since 12/2016
* @param object
* @return void
*/
public showAccessDenied(req: express.Request, res: express.Response) {
| }
|
// Adiciona erro nos dados
let data = {};
data['status'] = false;
data['error'] = 'Acesso negado';
// Exibe o retorno
res.statusCode = 401;
res.json(data);
}
| identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.