code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.xcontent;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.RestApiVersion;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import static java.util.Objects.requireNonNull;
import static org.elasticsearch.xcontent.XContentParser.Token.START_ARRAY;
import static org.elasticsearch.xcontent.XContentParser.Token.START_OBJECT;
import static org.elasticsearch.xcontent.XContentParser.Token.VALUE_BOOLEAN;
import static org.elasticsearch.xcontent.XContentParser.Token.VALUE_EMBEDDED_OBJECT;
import static org.elasticsearch.xcontent.XContentParser.Token.VALUE_NULL;
import static org.elasticsearch.xcontent.XContentParser.Token.VALUE_NUMBER;
import static org.elasticsearch.xcontent.XContentParser.Token.VALUE_STRING;
/**
* A declarative, stateless parser that turns XContent into setter calls. A single parser should be defined for each object being parsed,
* nested elements can be added via {@link #declareObject(BiConsumer, ContextParser, ParseField)} which should be satisfied where possible
* by passing another instance of {@link ObjectParser}, this one customized for that Object.
* <p>
* This class works well for object that do have a constructor argument or that can be built using information available from earlier in the
* XContent. For objects that have constructors with required arguments that are specified on the same level as other fields see
* {@link ConstructingObjectParser}.
* </p>
* <p>
* Instances of {@link ObjectParser} should be setup by declaring a constant field for the parsers and declaring all fields in a static
* block just below the creation of the parser. Like this:
* </p>
* <pre>{@code
* private static final ObjectParser<Thing, SomeContext> PARSER = new ObjectParser<>("thing", Thing::new));
* static {
* PARSER.declareInt(Thing::setMineral, new ParseField("mineral"));
* PARSER.declareInt(Thing::setFruit, new ParseField("fruit"));
* }
* }</pre>
* It's highly recommended to use the high level declare methods like {@link #declareString(BiConsumer, ParseField)} instead of
* {@link #declareField} which can be used to implement exceptional parsing operations not covered by the high level methods.
*/
public final class ObjectParser<Value, Context> extends AbstractObjectParser<Value, Context>
implements
BiFunction<XContentParser, Context, Value>,
ContextParser<Context, Value> {
private final List<String[]> requiredFieldSets = new ArrayList<>();
private final List<String[]> exclusiveFieldSets = new ArrayList<>();
/**
* Adapts an array (or varags) setter into a list setter.
*/
public static <Value, ElementValue> BiConsumer<Value, List<ElementValue>> fromList(
Class<ElementValue> c,
BiConsumer<Value, ElementValue[]> consumer
) {
return (Value v, List<ElementValue> l) -> {
@SuppressWarnings("unchecked")
ElementValue[] array = (ElementValue[]) Array.newInstance(c, l.size());
consumer.accept(v, l.toArray(array));
};
}
private interface UnknownFieldParser<Value, Context> {
void acceptUnknownField(
ObjectParser<Value, Context> objectParser,
String field,
XContentLocation location,
XContentParser parser,
Value value,
Context context
) throws IOException;
}
private static <Value, Context> UnknownFieldParser<Value, Context> ignoreUnknown() {
return (op, f, l, p, v, c) -> p.skipChildren();
}
private static <Value, Context> UnknownFieldParser<Value, Context> errorOnUnknown() {
return (op, f, l, p, v, c) -> {
throw new XContentParseException(
l,
ErrorOnUnknown.IMPLEMENTATION.errorMessage(
op.name,
f,
op.fieldParserMap.getOrDefault(p.getRestApiVersion(), Collections.emptyMap()).keySet()
)
);
};
}
/**
* Defines how to consume a parsed undefined field
*/
public interface UnknownFieldConsumer<Value> {
void accept(Value target, String field, Object value);
}
private static <Value, Context> UnknownFieldParser<Value, Context> consumeUnknownField(UnknownFieldConsumer<Value> consumer) {
return (objectParser, field, location, parser, value, context) -> {
XContentParser.Token t = parser.currentToken();
switch (t) {
case VALUE_STRING:
consumer.accept(value, field, parser.text());
break;
case VALUE_NUMBER:
consumer.accept(value, field, parser.numberValue());
break;
case VALUE_BOOLEAN:
consumer.accept(value, field, parser.booleanValue());
break;
case VALUE_NULL:
consumer.accept(value, field, null);
break;
case START_OBJECT:
consumer.accept(value, field, parser.map());
break;
case START_ARRAY:
consumer.accept(value, field, parser.list());
break;
default:
throw new XContentParseException(
parser.getTokenLocation(),
"[" + objectParser.name + "] cannot parse field [" + field + "] with value type [" + t + "]"
);
}
};
}
private static <Value, Category, Context> UnknownFieldParser<Value, Context> unknownIsNamedXContent(
Class<Category> categoryClass,
BiConsumer<Value, ? super Category> consumer
) {
return (objectParser, field, location, parser, value, context) -> {
Category o;
try {
o = parser.namedObject(categoryClass, field, context);
} catch (NamedObjectNotFoundException e) {
Set<String> candidates = new HashSet<>(
objectParser.fieldParserMap.getOrDefault(parser.getRestApiVersion(), Collections.emptyMap()).keySet()
);
e.getCandidates().forEach(candidates::add);
String message = ErrorOnUnknown.IMPLEMENTATION.errorMessage(objectParser.name, field, candidates);
throw new XContentParseException(location, message, e);
}
consumer.accept(value, o);
};
}
private final Map<RestApiVersion, Map<String, FieldParser>> fieldParserMap = new HashMap<>();
private final String name;
private final Function<Context, Value> valueBuilder;
private final UnknownFieldParser<Value, Context> unknownFieldParser;
/**
* Creates a new ObjectParser.
* @param name the parsers name, used to reference the parser in exceptions and messages.
*/
public ObjectParser(String name) {
this(name, errorOnUnknown(), null);
}
/**
* Creates a new ObjectParser.
* @param name the parsers name, used to reference the parser in exceptions and messages.
* @param valueSupplier A supplier that creates a new Value instance. Used when the parser is used as an inner object parser.
*/
public ObjectParser(String name, @Nullable Supplier<Value> valueSupplier) {
this(name, errorOnUnknown(), wrapValueSupplier(valueSupplier));
}
/**
* Creates a new ObjectParser.
* @param name the parsers name, used to reference the parser in exceptions and messages.
* @param valueBuilder A function that creates a new Value from the parse Context. Used
* when the parser is used as an inner object parser.
*/
public static <Value, Context> ObjectParser<Value, Context> fromBuilder(String name, Function<Context, Value> valueBuilder) {
requireNonNull(valueBuilder, "Use the single argument ctor instead");
return new ObjectParser<Value, Context>(name, errorOnUnknown(), valueBuilder);
}
/**
* Creates a new ObjectParser.
* @param name the parsers name, used to reference the parser in exceptions and messages.
* @param ignoreUnknownFields Should this parser ignore unknown fields? This should generally be set to true only when parsing
* responses from external systems, never when parsing requests from users.
* @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser.
*/
public ObjectParser(String name, boolean ignoreUnknownFields, @Nullable Supplier<Value> valueSupplier) {
this(name, ignoreUnknownFields ? ignoreUnknown() : errorOnUnknown(), wrapValueSupplier(valueSupplier));
}
private static <C, V> Function<C, V> wrapValueSupplier(@Nullable Supplier<V> valueSupplier) {
return valueSupplier == null ? c -> { throw new NullPointerException(); } : c -> valueSupplier.get();
}
/**
* Creates a new ObjectParser that consumes unknown fields as generic Objects.
* @param name the parsers name, used to reference the parser in exceptions and messages.
* @param unknownFieldConsumer how to consume parsed unknown fields
* @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser.
*/
public ObjectParser(String name, UnknownFieldConsumer<Value> unknownFieldConsumer, @Nullable Supplier<Value> valueSupplier) {
this(name, consumeUnknownField(unknownFieldConsumer), wrapValueSupplier(valueSupplier));
}
/**
* Creates a new ObjectParser that attempts to resolve unknown fields as {@link XContentParser#namedObject namedObjects}.
* @param <C> the type of named object that unknown fields are expected to be
* @param name the parsers name, used to reference the parser in exceptions and messages.
* @param categoryClass the type of named object that unknown fields are expected to be
* @param unknownFieldConsumer how to consume parsed unknown fields
* @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser.
*/
public <C> ObjectParser(
String name,
Class<C> categoryClass,
BiConsumer<Value, C> unknownFieldConsumer,
@Nullable Supplier<Value> valueSupplier
) {
this(name, unknownIsNamedXContent(categoryClass, unknownFieldConsumer), wrapValueSupplier(valueSupplier));
}
/**
* Creates a new ObjectParser instance with a name.
* @param name the parsers name, used to reference the parser in exceptions and messages.
* @param unknownFieldParser how to parse unknown fields
* @param valueBuilder builds the value from the context. Used when the ObjectParser is not passed a value.
*/
private ObjectParser(
String name,
UnknownFieldParser<Value, Context> unknownFieldParser,
@Nullable Function<Context, Value> valueBuilder
) {
this.name = name;
this.unknownFieldParser = unknownFieldParser;
this.valueBuilder = valueBuilder == null ? c -> { throw new NullPointerException("valueBuilder is not set"); } : valueBuilder;
}
/**
* Parses a Value from the given {@link XContentParser}
* @param parser the parser to build a value from
* @param context context needed for parsing
* @return a new value instance drawn from the provided value supplier on {@link #ObjectParser(String, Supplier)}
* @throws IOException if an IOException occurs.
*/
@Override
public Value parse(XContentParser parser, Context context) throws IOException {
return parse(parser, valueBuilder.apply(context), context);
}
/**
* Parses a Value from the given {@link XContentParser}
* @param parser the parser to build a value from
* @param value the value to fill from the parser
* @param context a context that is passed along to all declared field parsers
* @return the parsed value
* @throws IOException if an IOException occurs.
*/
public Value parse(XContentParser parser, Value value, Context context) throws IOException {
XContentParser.Token token;
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throwExpectedStartObject(parser, token);
}
}
FieldParser fieldParser = null;
String currentFieldName = null;
XContentLocation currentPosition = null;
final List<String[]> requiredFields = this.requiredFieldSets.isEmpty() ? null : new ArrayList<>(this.requiredFieldSets);
final List<List<String>> exclusiveFields;
if (exclusiveFieldSets.isEmpty()) {
exclusiveFields = null;
} else {
exclusiveFields = new ArrayList<>();
for (int i = 0; i < this.exclusiveFieldSets.size(); i++) {
exclusiveFields.add(new ArrayList<>());
}
}
final Map<String, FieldParser> parsers = fieldParserMap.getOrDefault(parser.getRestApiVersion(), Collections.emptyMap());
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
currentPosition = parser.getTokenLocation();
fieldParser = parsers.get(currentFieldName);
} else {
if (currentFieldName == null) {
throwNoFieldFound(parser);
}
if (fieldParser == null) {
unknownFieldParser.acceptUnknownField(this, currentFieldName, currentPosition, parser, value, context);
} else {
fieldParser.assertSupports(name, parser, currentFieldName);
if (requiredFields != null) {
// Check to see if this field is a required field, if it is we can
// remove the entry as the requirement is satisfied
maybeMarkRequiredField(currentFieldName, requiredFields);
}
if (exclusiveFields != null) {
// Check if this field is in an exclusive set, if it is then mark
// it as seen.
maybeMarkExclusiveField(currentFieldName, exclusiveFields);
}
parseSub(parser, fieldParser, currentFieldName, value, context);
}
fieldParser = null;
}
}
// Check for a) multiple entries appearing in exclusive field sets and b) empty required field entries
if (exclusiveFields != null) {
ensureExclusiveFields(exclusiveFields);
}
if (requiredFields != null && requiredFields.isEmpty() == false) {
throwMissingRequiredFields(requiredFields);
}
return value;
}
private void throwExpectedStartObject(XContentParser parser, XContentParser.Token token) {
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] Expected START_OBJECT but was: " + token);
}
private void throwNoFieldFound(XContentParser parser) {
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] no field found");
}
private void throwMissingRequiredFields(List<String[]> requiredFields) {
final StringBuilder message = new StringBuilder();
for (String[] fields : requiredFields) {
message.append("Required one of fields ").append(Arrays.toString(fields)).append(", but none were specified. ");
}
throw new IllegalArgumentException(message.toString());
}
private void ensureExclusiveFields(List<List<String>> exclusiveFields) {
StringBuilder message = null;
for (List<String> fieldset : exclusiveFields) {
if (fieldset.size() > 1) {
if (message == null) {
message = new StringBuilder();
}
message.append("The following fields are not allowed together: ").append(fieldset).append(" ");
}
}
if (message != null && message.length() > 0) {
throw new IllegalArgumentException(message.toString());
}
}
private void maybeMarkExclusiveField(String currentFieldName, List<List<String>> exclusiveFields) {
for (int i = 0; i < this.exclusiveFieldSets.size(); i++) {
for (String field : this.exclusiveFieldSets.get(i)) {
if (field.equals(currentFieldName)) {
exclusiveFields.get(i).add(currentFieldName);
}
}
}
}
private void maybeMarkRequiredField(String currentFieldName, List<String[]> requiredFields) {
Iterator<String[]> iter = requiredFields.iterator();
while (iter.hasNext()) {
String[] requiredFieldNames = iter.next();
for (String field : requiredFieldNames) {
if (field.equals(currentFieldName)) {
iter.remove();
break;
}
}
}
}
@Override
public Value apply(XContentParser parser, Context context) {
try {
return parse(parser, context);
} catch (IOException e) {
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] failed to parse object", e);
}
}
public interface Parser<Value, Context> {
void parse(XContentParser parser, Value value, Context context) throws IOException;
}
public void declareField(Parser<Value, Context> p, ParseField parseField, ValueType type) {
if (parseField == null) {
throw new IllegalArgumentException("[parseField] is required");
}
if (type == null) {
throw new IllegalArgumentException("[type] is required");
}
FieldParser fieldParser = new FieldParser(p, type.supportedTokens(), parseField, type);
for (String fieldValue : parseField.getAllNamesIncludedDeprecated()) {
if (RestApiVersion.minimumSupported().matches(parseField.getForRestApiVersion())) {
Map<String, FieldParser> nameToParserMap = fieldParserMap.computeIfAbsent(
RestApiVersion.minimumSupported(),
(v) -> new HashMap<>()
);
FieldParser previousValue = nameToParserMap.putIfAbsent(fieldValue, fieldParser);
if (previousValue != null) {
throw new IllegalArgumentException("Parser already registered for name=[" + fieldValue + "]. " + previousValue);
}
}
if (RestApiVersion.current().matches(parseField.getForRestApiVersion())) {
Map<String, FieldParser> nameToParserMap = fieldParserMap.computeIfAbsent(RestApiVersion.current(), (v) -> new HashMap<>());
FieldParser previousValue = nameToParserMap.putIfAbsent(fieldValue, fieldParser);
if (previousValue != null) {
throw new IllegalArgumentException("Parser already registered for name=[" + fieldValue + "]. " + previousValue);
}
}
}
}
@Override
public <T> void declareField(BiConsumer<Value, T> consumer, ContextParser<Context, T> parser, ParseField parseField, ValueType type) {
if (consumer == null) {
throw new IllegalArgumentException("[consumer] is required");
}
if (parser == null) {
throw new IllegalArgumentException("[parser] is required");
}
declareField((p, v, c) -> consumer.accept(v, parser.parse(p, c)), parseField, type);
}
public <T> void declareObjectOrDefault(
BiConsumer<Value, T> consumer,
BiFunction<XContentParser, Context, T> objectParser,
Supplier<T> defaultValue,
ParseField field
) {
declareField((p, v, c) -> {
if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
if (p.booleanValue()) {
consumer.accept(v, defaultValue.get());
}
} else {
consumer.accept(v, objectParser.apply(p, c));
}
}, field, ValueType.OBJECT_OR_BOOLEAN);
}
@Override
public <T> void declareNamedObject(BiConsumer<Value, T> consumer, NamedObjectParser<T, Context> namedObjectParser, ParseField field) {
BiFunction<XContentParser, Context, T> objectParser = (XContentParser p, Context c) -> {
try {
XContentParser.Token token = p.nextToken();
assert token == XContentParser.Token.FIELD_NAME;
String currentName = p.currentName();
try {
T namedObject = namedObjectParser.parse(p, c, currentName);
// consume the end object token
token = p.nextToken();
assert token == XContentParser.Token.END_OBJECT;
return namedObject;
} catch (Exception e) {
throw rethrowFieldParseFailure(field, p, currentName, e);
}
} catch (IOException e) {
throw wrapParseError(field, p, e, "error while parsing named object");
}
};
declareField((XContentParser p, Value v, Context c) -> consumer.accept(v, objectParser.apply(p, c)), field, ValueType.OBJECT);
}
@Override
public <T> void declareNamedObjects(
BiConsumer<Value, List<T>> consumer,
NamedObjectParser<T, Context> namedObjectParser,
Consumer<Value> orderedModeCallback,
ParseField field
) {
// This creates and parses the named object
BiFunction<XContentParser, Context, T> objectParser = (XContentParser p, Context c) -> {
if (p.currentToken() != XContentParser.Token.FIELD_NAME) {
throw wrapCanBeObjectOrArrayOfObjects(field, p);
}
// This messy exception nesting has the nice side effect of telling the user which field failed to parse
try {
String currentName = p.currentName();
try {
return namedObjectParser.parse(p, c, currentName);
} catch (Exception e) {
throw rethrowFieldParseFailure(field, p, currentName, e);
}
} catch (IOException e) {
throw wrapParseError(field, p, e, "error while parsing");
}
};
declareField((XContentParser p, Value v, Context c) -> {
List<T> fields = new ArrayList<>();
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
// Fields are just named entries in a single object
while (p.nextToken() != XContentParser.Token.END_OBJECT) {
fields.add(objectParser.apply(p, c));
}
} else if (p.currentToken() == XContentParser.Token.START_ARRAY) {
// Fields are objects in an array. Each object contains a named field.
parseObjectsInArray(orderedModeCallback, field, objectParser, p, v, c, fields);
}
consumer.accept(v, fields);
}, field, ValueType.OBJECT_ARRAY);
}
private <T> void parseObjectsInArray(
Consumer<Value> orderedModeCallback,
ParseField field,
BiFunction<XContentParser, Context, T> objectParser,
XContentParser p,
Value v,
Context c,
List<T> fields
) throws IOException {
orderedModeCallback.accept(v);
XContentParser.Token token;
while ((token = p.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token != XContentParser.Token.START_OBJECT) {
throw wrapCanBeObjectOrArrayOfObjects(field, p);
}
p.nextToken(); // Move to the first field in the object
fields.add(objectParser.apply(p, c));
p.nextToken(); // Move past the object, should be back to into the array
if (p.currentToken() != XContentParser.Token.END_OBJECT) {
throw wrapCanBeObjectOrArrayOfObjects(field, p);
}
}
}
private XContentParseException wrapCanBeObjectOrArrayOfObjects(ParseField field, XContentParser p) {
return new XContentParseException(
p.getTokenLocation(),
"["
+ field
+ "] can be a single object with any number of "
+ "fields or an array where each entry is an object with a single field"
);
}
private XContentParseException wrapParseError(ParseField field, XContentParser p, IOException e, String s) {
return new XContentParseException(p.getTokenLocation(), "[" + field + "] " + s, e);
}
private XContentParseException rethrowFieldParseFailure(ParseField field, XContentParser p, String currentName, Exception e) {
return new XContentParseException(p.getTokenLocation(), "[" + field + "] failed to parse field [" + currentName + "]", e);
}
@Override
public <T> void declareNamedObjects(
BiConsumer<Value, List<T>> consumer,
NamedObjectParser<T, Context> namedObjectParser,
ParseField field
) {
Consumer<Value> orderedModeCallback = (v) -> {
throw new IllegalArgumentException("[" + field + "] doesn't support arrays. Use a single object with multiple fields.");
};
declareNamedObjects(consumer, namedObjectParser, orderedModeCallback, field);
}
/**
* Functional interface for instantiating and parsing named objects. See ObjectParserTests#NamedObject for the canonical way to
* implement this for objects that themselves have a parser.
*/
@FunctionalInterface
public interface NamedObjectParser<T, Context> {
T parse(XContentParser p, Context c, String name) throws IOException;
}
/**
* Get the name of the parser.
*/
@Override
public String getName() {
return name;
}
@Override
public void declareRequiredFieldSet(String... requiredSet) {
if (requiredSet.length == 0) {
return;
}
this.requiredFieldSets.add(requiredSet);
}
@Override
public void declareExclusiveFieldSet(String... exclusiveSet) {
if (exclusiveSet.length == 0) {
return;
}
this.exclusiveFieldSets.add(exclusiveSet);
}
private void parseArray(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) {
assert parser.currentToken() == XContentParser.Token.START_ARRAY : "Token was: " + parser.currentToken();
parseValue(parser, fieldParser, currentFieldName, value, context);
}
private void parseValue(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) {
try {
fieldParser.parser.parse(parser, value, context);
} catch (Exception ex) {
throwFailedToParse(parser, currentFieldName, ex);
}
}
private void throwFailedToParse(XContentParser parser, String currentFieldName, Exception ex) {
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] failed to parse field [" + currentFieldName + "]", ex);
}
private void parseSub(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) {
final XContentParser.Token token = parser.currentToken();
switch (token) {
case START_OBJECT:
parseValue(parser, fieldParser, currentFieldName, value, context);
/*
* Well behaving parsers should consume the entire object but
* asserting that they do that is not something we can do
* efficiently here. Instead we can check that they end on an
* END_OBJECT. They could end on the *wrong* end object and
* this test won't catch them, but that is the price that we pay
* for having a cheap test.
*/
if (parser.currentToken() != XContentParser.Token.END_OBJECT) {
throwMustEndOn(currentFieldName, XContentParser.Token.END_OBJECT);
}
break;
case START_ARRAY:
parseArray(parser, fieldParser, currentFieldName, value, context);
/*
* Well behaving parsers should consume the entire array but
* asserting that they do that is not something we can do
* efficiently here. Instead we can check that they end on an
* END_ARRAY. They could end on the *wrong* end array and
* this test won't catch them, but that is the price that we pay
* for having a cheap test.
*/
if (parser.currentToken() != XContentParser.Token.END_ARRAY) {
throwMustEndOn(currentFieldName, XContentParser.Token.END_ARRAY);
}
break;
case END_OBJECT:
case END_ARRAY:
case FIELD_NAME:
throw throwUnexpectedToken(parser, token);
case VALUE_STRING:
case VALUE_NUMBER:
case VALUE_BOOLEAN:
case VALUE_EMBEDDED_OBJECT:
case VALUE_NULL:
parseValue(parser, fieldParser, currentFieldName, value, context);
}
}
private void throwMustEndOn(String currentFieldName, XContentParser.Token token) {
throw new IllegalStateException("parser for [" + currentFieldName + "] did not end on " + token);
}
private XContentParseException throwUnexpectedToken(XContentParser parser, XContentParser.Token token) {
return new XContentParseException(parser.getTokenLocation(), "[" + name + "]" + token + " is unexpected");
}
private class FieldParser {
private final Parser<Value, Context> parser;
private final EnumSet<XContentParser.Token> supportedTokens;
private final ParseField parseField;
private final ValueType type;
FieldParser(Parser<Value, Context> parser, EnumSet<XContentParser.Token> supportedTokens, ParseField parseField, ValueType type) {
this.parser = parser;
this.supportedTokens = supportedTokens;
this.parseField = parseField;
this.type = type;
}
void assertSupports(String parserName, XContentParser xContentParser, String currentFieldName) {
boolean match = parseField.match(
parserName,
xContentParser::getTokenLocation,
currentFieldName,
xContentParser.getDeprecationHandler()
);
if (match == false) {
throw new XContentParseException(
xContentParser.getTokenLocation(),
"[" + parserName + "] parsefield doesn't accept: " + currentFieldName
);
}
if (supportedTokens.contains(xContentParser.currentToken()) == false) {
throw new XContentParseException(
xContentParser.getTokenLocation(),
"[" + parserName + "] " + currentFieldName + " doesn't support values of type: " + xContentParser.currentToken()
);
}
}
@Override
public String toString() {
String[] deprecatedNames = parseField.getDeprecatedNames();
String allReplacedWith = parseField.getAllReplacedWith();
String deprecated = "";
if (deprecatedNames != null && deprecatedNames.length > 0) {
deprecated = ", deprecated_names=" + Arrays.toString(deprecatedNames);
}
return "FieldParser{"
+ "preferred_name="
+ parseField.getPreferredName()
+ ", supportedTokens="
+ supportedTokens
+ deprecated
+ (allReplacedWith == null ? "" : ", replaced_with=" + allReplacedWith)
+ ", type="
+ type.name()
+ '}';
}
}
public enum ValueType {
STRING(VALUE_STRING),
STRING_OR_NULL(VALUE_STRING, VALUE_NULL),
FLOAT(VALUE_NUMBER, VALUE_STRING),
FLOAT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
DOUBLE(VALUE_NUMBER, VALUE_STRING),
DOUBLE_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
LONG(VALUE_NUMBER, VALUE_STRING),
LONG_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
INT(VALUE_NUMBER, VALUE_STRING),
INT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
BOOLEAN(VALUE_BOOLEAN, VALUE_STRING),
BOOLEAN_OR_NULL(VALUE_BOOLEAN, VALUE_STRING, VALUE_NULL),
STRING_ARRAY(START_ARRAY, VALUE_STRING),
FLOAT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
DOUBLE_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
LONG_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
INT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
BOOLEAN_ARRAY(START_ARRAY, VALUE_BOOLEAN),
OBJECT(START_OBJECT),
OBJECT_OR_NULL(START_OBJECT, VALUE_NULL),
OBJECT_ARRAY(START_OBJECT, START_ARRAY),
OBJECT_ARRAY_OR_NULL(START_OBJECT, START_ARRAY, VALUE_NULL),
OBJECT_OR_BOOLEAN(START_OBJECT, VALUE_BOOLEAN),
OBJECT_OR_STRING(START_OBJECT, VALUE_STRING),
OBJECT_OR_LONG(START_OBJECT, VALUE_NUMBER),
OBJECT_ARRAY_BOOLEAN_OR_STRING(START_OBJECT, START_ARRAY, VALUE_BOOLEAN, VALUE_STRING),
OBJECT_ARRAY_OR_STRING(START_OBJECT, START_ARRAY, VALUE_STRING),
OBJECT_ARRAY_STRING_OR_NUMBER(START_OBJECT, START_ARRAY, VALUE_STRING, VALUE_NUMBER),
VALUE(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING),
VALUE_OBJECT_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING, START_OBJECT, START_ARRAY),
VALUE_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_NUMBER, VALUE_STRING, START_ARRAY);
private final EnumSet<XContentParser.Token> tokens;
ValueType(XContentParser.Token first, XContentParser.Token... rest) {
this.tokens = EnumSet.of(first, rest);
}
public EnumSet<XContentParser.Token> supportedTokens() {
return this.tokens;
}
}
@Override
public String toString() {
return "ObjectParser{" + "name='" + name + '\'' + ", fields=" + fieldParserMap + '}';
}
}
| GlenRSmith/elasticsearch | libs/x-content/src/main/java/org/elasticsearch/xcontent/ObjectParser.java | Java | apache-2.0 | 35,948 |
package org.pentaho.di.trans.steps.loadsave.setter;
import java.lang.reflect.Method;
public class MethodSetter<T> implements Setter<T> {
private final Method method;
public MethodSetter( Method method ) {
this.method = method;
}
@Override
public void set( Object obj, T value ) {
try {
method.invoke( obj, value );
} catch ( Exception e ) {
throw new RuntimeException( "Error invoking " + method + " on " + obj, e );
}
}
}
| apratkin/pentaho-kettle | engine/test-src/org/pentaho/di/trans/steps/loadsave/setter/MethodSetter.java | Java | apache-2.0 | 468 |
/*
* Copyright (C) 2006-2013 Bitronix Software (http://www.bitronix.be)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package bitronix.tm.resource.jdbc.proxy;
import bitronix.tm.resource.jdbc.JdbcPooledConnection;
import bitronix.tm.resource.jdbc.LruStatementCache.CacheKey;
import bitronix.tm.resource.jdbc.PooledConnectionProxy;
import bitronix.tm.resource.jdbc.lrc.LrcXAResource;
import bitronix.tm.utils.ClassLoaderUtils;
import net.sf.cglib.proxy.Callback;
import net.sf.cglib.proxy.CallbackFilter;
import net.sf.cglib.proxy.Enhancer;
import net.sf.cglib.proxy.Factory;
import net.sf.cglib.proxy.LazyLoader;
import net.sf.cglib.proxy.MethodInterceptor;
import net.sf.cglib.proxy.MethodProxy;
import javax.sql.XAConnection;
import java.lang.reflect.Method;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Map;
import java.util.Set;
/**
* This class generates JDBC proxy classes using CGLIB bytecode generated
* implementations. This factory's proxies are more efficient than JdbcJavaProxyFactory
* but less efficient than JdbcJavassistProxyFactory.
*
* @author Brett Wooldridge
*/
public class JdbcCglibProxyFactory implements JdbcProxyFactory {
private final Class<Connection> proxyConnectionClass;
private final Class<Statement> proxyStatementClass;
private final Class<CallableStatement> proxyCallableStatementClass;
private final Class<PreparedStatement> proxyPreparedStatementClass;
private final Class<ResultSet> proxyResultSetClass;
// For LRC we just use the standard Java Proxies
private final JdbcJavaProxyFactory lrcProxyFactory;
JdbcCglibProxyFactory() {
proxyConnectionClass = createProxyConnectionClass();
proxyStatementClass = createProxyStatementClass();
proxyCallableStatementClass = createProxyCallableStatementClass();
proxyPreparedStatementClass = createProxyPreparedStatementClass();
proxyResultSetClass = createProxyResultSetClass();
lrcProxyFactory = new JdbcJavaProxyFactory();
}
/** {@inheritDoc} */
@Override
public Connection getProxyConnection(JdbcPooledConnection jdbcPooledConnection, Connection connection) {
ConnectionJavaProxy methodInterceptor = new ConnectionJavaProxy(jdbcPooledConnection, connection);
Interceptor interceptor = new Interceptor(methodInterceptor);
FastDispatcher fastDispatcher = new FastDispatcher(connection);
try {
Connection connectionCglibProxy = proxyConnectionClass.newInstance();
((Factory) connectionCglibProxy).setCallbacks(new Callback[] { fastDispatcher, interceptor });
return connectionCglibProxy;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** {@inheritDoc} */
@Override
public Statement getProxyStatement(JdbcPooledConnection jdbcPooledConnection, Statement statement) {
StatementJavaProxy methodInterceptor = new StatementJavaProxy(jdbcPooledConnection, statement);
Interceptor interceptor = new Interceptor(methodInterceptor);
FastDispatcher fastDispatcher = new FastDispatcher(statement);
try {
Statement statementCglibProxy = proxyStatementClass.newInstance();
((Factory) statementCglibProxy).setCallbacks(new Callback[] { fastDispatcher, interceptor });
return statementCglibProxy;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** {@inheritDoc} */
@Override
public CallableStatement getProxyCallableStatement(JdbcPooledConnection jdbcPooledConnection, CallableStatement statement) {
CallableStatementJavaProxy methodInterceptor = new CallableStatementJavaProxy(jdbcPooledConnection, statement);
Interceptor interceptor = new Interceptor(methodInterceptor);
FastDispatcher fastDispatcher = new FastDispatcher(statement);
try {
CallableStatement statementCglibProxy = proxyCallableStatementClass.newInstance();
((Factory) statementCglibProxy).setCallbacks(new Callback[] { fastDispatcher, interceptor });
return statementCglibProxy;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** {@inheritDoc} */
@Override
public PreparedStatement getProxyPreparedStatement(JdbcPooledConnection jdbcPooledConnection, PreparedStatement statement, CacheKey cacheKey) {
PreparedStatementJavaProxy methodInterceptor = new PreparedStatementJavaProxy(jdbcPooledConnection, statement, cacheKey);
Interceptor interceptor = new Interceptor(methodInterceptor);
FastDispatcher fastDispatcher = new FastDispatcher(statement);
try {
PreparedStatement statementCglibProxy = proxyPreparedStatementClass.newInstance();
((Factory) statementCglibProxy).setCallbacks(new Callback[] { fastDispatcher, interceptor });
return statementCglibProxy;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** {@inheritDoc} */
@Override
public ResultSet getProxyResultSet(Statement statement, ResultSet resultSet) {
ResultSetJavaProxy methodInterceptor = new ResultSetJavaProxy(statement, resultSet);
Interceptor interceptor = new Interceptor(methodInterceptor);
FastDispatcher fastDispatcher = new FastDispatcher(resultSet);
try {
ResultSet resultSetCglibProxy = proxyResultSetClass.newInstance();
((Factory) resultSetCglibProxy).setCallbacks(new Callback[] { fastDispatcher, interceptor });
return resultSetCglibProxy;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** {@inheritDoc} */
@Override
public XAConnection getProxyXaConnection(Connection connection) {
return lrcProxyFactory.getProxyXaConnection(connection);
}
/** {@inheritDoc} */
@Override
public Connection getProxyConnection(LrcXAResource xaResource, Connection connection) {
return lrcProxyFactory.getProxyConnection(xaResource, connection);
}
// ---------------------------------------------------------------
// Generate CGLIB Proxy Classes
// ---------------------------------------------------------------
@SuppressWarnings("unchecked")
private Class<Connection> createProxyConnectionClass() {
Set<Class<?>> interfaces = ClassLoaderUtils.getAllInterfaces(Connection.class);
interfaces.add(PooledConnectionProxy.class);
Enhancer enhancer = new Enhancer();
enhancer.setInterfaces(interfaces.toArray(new Class<?>[0]));
enhancer.setCallbackTypes(new Class[] {FastDispatcher.class, Interceptor.class} );
enhancer.setCallbackFilter(new InterceptorFilter(new ConnectionJavaProxy()));
return enhancer.createClass();
}
@SuppressWarnings("unchecked")
private Class<PreparedStatement> createProxyPreparedStatementClass() {
Set<Class<?>> interfaces = ClassLoaderUtils.getAllInterfaces(PreparedStatement.class);
Enhancer enhancer = new Enhancer();
enhancer.setInterfaces(interfaces.toArray(new Class<?>[0]));
enhancer.setCallbackTypes(new Class[] {FastDispatcher.class, Interceptor.class} );
enhancer.setCallbackFilter(new InterceptorFilter(new PreparedStatementJavaProxy()));
return enhancer.createClass();
}
@SuppressWarnings("unchecked")
private Class<Statement> createProxyStatementClass() {
Set<Class<?>> interfaces = ClassLoaderUtils.getAllInterfaces(Statement.class);
Enhancer enhancer = new Enhancer();
enhancer.setInterfaces(interfaces.toArray(new Class<?>[0]));
enhancer.setCallbackTypes(new Class[] {FastDispatcher.class, Interceptor.class} );
enhancer.setCallbackFilter(new InterceptorFilter(new StatementJavaProxy()));
return enhancer.createClass();
}
@SuppressWarnings("unchecked")
private Class<CallableStatement> createProxyCallableStatementClass() {
Set<Class<?>> interfaces = ClassLoaderUtils.getAllInterfaces(CallableStatement.class);
Enhancer enhancer = new Enhancer();
enhancer.setInterfaces(interfaces.toArray(new Class<?>[0]));
enhancer.setCallbackTypes(new Class[] {FastDispatcher.class, Interceptor.class} );
enhancer.setCallbackFilter(new InterceptorFilter(new CallableStatementJavaProxy()));
return enhancer.createClass();
}
@SuppressWarnings("unchecked")
private Class<ResultSet> createProxyResultSetClass() {
Set<Class<?>> interfaces = ClassLoaderUtils.getAllInterfaces(ResultSet.class);
Enhancer enhancer = new Enhancer();
enhancer.setInterfaces(interfaces.toArray(new Class<?>[0]));
enhancer.setCallbackTypes(new Class[] {FastDispatcher.class, Interceptor.class} );
enhancer.setCallbackFilter(new InterceptorFilter(new ResultSetJavaProxy()));
return enhancer.createClass();
}
// ---------------------------------------------------------------
// CGLIB Classes
// ---------------------------------------------------------------
static class FastDispatcher implements LazyLoader {
private final Object delegate;
public FastDispatcher(Object delegate) {
this.delegate = delegate;
}
@Override
public Object loadObject() throws Exception {
return delegate;
}
}
static class Interceptor implements MethodInterceptor {
private final JavaProxyBase<?> interceptor;
public Interceptor(JavaProxyBase<?> interceptor) {
this.interceptor = interceptor;
}
@Override
public Object intercept(Object enhanced, Method method, Object[] args, MethodProxy fastProxy) throws Throwable {
interceptor.proxy = enhanced;
return interceptor.invoke(interceptor, method, args);
}
}
static class InterceptorFilter implements CallbackFilter {
private final Map<String, Method> methodMap;
public InterceptorFilter(JavaProxyBase<?> proxyClass) {
methodMap = proxyClass.getMethodMap();
}
@Override
public int accept(Method method) {
if (methodMap.containsKey(JavaProxyBase.getMethodKey(method))) {
// Use the Interceptor
return 1;
}
// Use the FastDispatcher
return 0;
}
}
}
| brettwooldridge/btm | btm/src/main/java/bitronix/tm/resource/jdbc/proxy/JdbcCglibProxyFactory.java | Java | apache-2.0 | 11,137 |
/*
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.cas.util;
import org.aspectj.lang.JoinPoint;
/**
* Utility class to assist with AOP operations.
*
* @author Marvin S. Addison
* @since 3.4
*
*/
public final class AopUtils {
private AopUtils() {}
/**
* Unwraps a join point that may be nested due to layered proxies.
*
* @param point Join point to unwrap.
* @return Innermost join point; if not nested, simply returns the argument.
*/
public static JoinPoint unWrapJoinPoint(final JoinPoint point) {
JoinPoint naked = point;
while (naked.getArgs().length > 0 && naked.getArgs()[0] instanceof JoinPoint) {
naked = (JoinPoint) naked.getArgs()[0];
}
return naked;
}
}
| kevin3061/cas-4.0.1 | cas-server-core/src/main/java/org/jasig/cas/util/AopUtils.java | Java | apache-2.0 | 1,525 |
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/** Wraps the provided value in an array, unless the provided value is an array. */
export declare function coerceArray<T>(value: T | T[]): T[];
| tongpa/pypollmanage | pypollmanage/public/javascript/node_modules/@angular/cdk/typings/coercion/array.d.ts | TypeScript | apache-2.0 | 347 |
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// See docs in ../ops/string_ops.cc.
#include <string>
#include "tensorflow/core/framework/kernel_def_builder.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/tensor.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/lib/strings/stringprintf.h"
namespace tensorflow {
class AsStringOp : public OpKernel {
public:
using OpKernel::OpKernel;
explicit AsStringOp(OpKernelConstruction* ctx) : OpKernel(ctx) {
int32 precision;
bool scientific;
bool shortest;
int32 width;
string fill_string;
DataType dtype;
OP_REQUIRES_OK(ctx, ctx->GetAttr("T", &dtype));
OP_REQUIRES_OK(ctx, ctx->GetAttr("precision", &precision));
OP_REQUIRES_OK(ctx, ctx->GetAttr("scientific", &scientific));
OP_REQUIRES_OK(ctx, ctx->GetAttr("shortest", &shortest));
OP_REQUIRES_OK(ctx, ctx->GetAttr("width", &width));
OP_REQUIRES_OK(ctx, ctx->GetAttr("fill", &fill_string));
switch (dtype) {
case DT_FLOAT:
case DT_DOUBLE:
case DT_COMPLEX64:
case DT_COMPLEX128:
break;
default:
OP_REQUIRES(ctx, !(scientific || shortest),
errors::InvalidArgument("scientific and shortest format "
"not supported for datatype ",
DataTypeString(dtype)));
OP_REQUIRES(ctx, precision < 0,
errors::InvalidArgument("precision not supported "
"for datatype ",
DataTypeString(dtype)));
}
OP_REQUIRES(
ctx, fill_string.size() <= 1,
errors::InvalidArgument("Fill string must be one or fewer characters"));
OP_REQUIRES(ctx, !(scientific && shortest),
errors::InvalidArgument(
"Cannot select both scientific and shortest notation"));
format_ = "%";
if (width > -1) {
strings::Appendf(&format_, "%s%d", fill_string.c_str(), width);
}
if (precision > -1) {
strings::Appendf(&format_, ".%d", precision);
}
switch (dtype) {
case DT_INT8:
case DT_INT16:
case DT_INT32:
strings::Appendf(&format_, "d");
break;
case DT_INT64:
strings::Appendf(&format_, "lld");
break;
case DT_FLOAT:
case DT_DOUBLE:
case DT_COMPLEX64:
case DT_COMPLEX128:
if (shortest) {
strings::Appendf(&format_, "g");
} else if (scientific) {
strings::Appendf(&format_, "e");
} else {
strings::Appendf(&format_, "f");
}
break;
case DT_BOOL:
break;
default:
bool type_not_supported = true;
OP_REQUIRES(ctx, !type_not_supported,
errors::InvalidArgument("Type not supported: ",
DataTypeString(dtype)));
}
if (dtype == DT_COMPLEX64 || dtype == DT_COMPLEX128) {
format_ = strings::Printf("(%s,%s)", format_.c_str(), format_.c_str());
}
}
void Compute(OpKernelContext* context) override {
const Tensor* input_tensor;
OP_REQUIRES_OK(context, context->input("input", &input_tensor));
const DataType& dtype = input_tensor->dtype();
Tensor* output_tensor = nullptr;
OP_REQUIRES_OK(context,
context->allocate_output("output", input_tensor->shape(),
&output_tensor));
auto output_flat = output_tensor->flat<tstring>();
#define ENCODE_TYPE(type, T, enc_str) \
case (type): { \
const auto& input_flat = input_tensor->flat<T>(); \
for (int i = 0; i < input_flat.size(); ++i) { \
output_flat(i) = strings::Printf((enc_str.c_str()), input_flat(i)); \
} \
} break
switch (dtype) {
ENCODE_TYPE(DT_INT32, int32, format_);
ENCODE_TYPE(DT_INT64, int64, format_);
ENCODE_TYPE(DT_FLOAT, float, format_);
ENCODE_TYPE(DT_DOUBLE, double, format_);
ENCODE_TYPE(DT_INT8, int8, format_);
ENCODE_TYPE(DT_INT16, int16, format_);
case (DT_BOOL): {
const auto& input_flat = input_tensor->flat<bool>();
for (int i = 0; i < input_flat.size(); ++i) {
output_flat(i) = (input_flat(i)) ? "true" : "false";
}
} break;
case (DT_COMPLEX64): {
const auto& input_flat = input_tensor->flat<complex64>();
for (int i = 0; i < input_flat.size(); ++i) {
output_flat(i) = strings::Printf(
format_.c_str(), input_flat(i).real(), input_flat(i).imag());
}
} break;
case (DT_COMPLEX128): {
const auto& input_flat = input_tensor->flat<complex128>();
for (int i = 0; i < input_flat.size(); ++i) {
output_flat(i) = strings::Printf(
format_.c_str(), input_flat(i).real(), input_flat(i).imag());
}
} break;
default:
bool can_encode_type = false;
OP_REQUIRES(context, can_encode_type,
errors::InvalidArgument("Cannot encode input of type ",
DataTypeString(dtype)));
}
#undef ENCODE_TYPE
}
private:
string format_;
};
REGISTER_KERNEL_BUILDER(Name("AsString").Device(DEVICE_CPU), AsStringOp);
} // namespace tensorflow
| adit-chandra/tensorflow | tensorflow/core/kernels/as_string_op.cc | C++ | apache-2.0 | 6,260 |
/*
* OutputBuffer.java June 2007
*
* Copyright (C) 2007, Niall Gallagher <niallg@users.sf.net>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.simpleframework.xml.stream;
import java.io.IOException;
import java.io.Writer;
/**
* This is primarily used to replace the <code>StringBuffer</code>
* class, as a way for the <code>Formatter</code> to store the start
* tag for an XML element. This enables the start tag of the current
* element to be removed without disrupting any of the other nodes
* within the document. Once the contents of the output buffer have
* been filled its contents can be emitted to the writer object.
*
* @author Niall Gallagher
*/
class OutputBuffer {
/**
* The characters that this buffer has accumulated.
*/
private StringBuilder text;
/**
* Constructor for <code>OutputBuffer</code>. The default
* <code>OutputBuffer</code> stores 16 characters before a
* resize is needed to append extra characters.
*/
public OutputBuffer() {
this.text = new StringBuilder();
}
/**
* This will add a <code>char</code> to the end of the buffer.
* The buffer will not overflow with repeated uses of the
* <code>append</code>, it uses an <code>ensureCapacity</code>
* method which will allow the buffer to dynamically grow in
* size to accommodate more characters.
*
* @param ch the character to be appended to the buffer
*/
public void append(char ch){
text.append(ch);
}
/**
* This will add a <code>String</code> to the end of the buffer.
* The buffer will not overflow with repeated uses of the
* <code>append</code>, it uses an <code>ensureCapacity</code>
* method which will allow the buffer to dynamically grow in
* size to accommodate large string objects.
*
* @param value the string to be appended to this output buffer
*/
public void append(String value){
text.append(value);
}
/**
* This will add a <code>char</code> array to the buffer.
* The buffer will not overflow with repeated uses of the
* <code>append</code>, it uses an <code>ensureCapacity</code>
* method which will allow the buffer to dynamically grow in
* size to accommodate large character arrays.
*
* @param value the character array to be appended to this
*/
public void append(char[] value){
text.append(value, 0, value.length);
}
/**
* This will add a <code>char</code> array to the buffer.
* The buffer will not overflow with repeated uses of the
* <code>append</code>, it uses an <code>ensureCapacity</code>
* method which will allow the buffer to dynamically grow in
* size to accommodate large character arrays.
*
* @param value the character array to be appended to this
* @param off the read offset for the array to begin reading
* @param len the number of characters to append to this
*/
public void append(char[] value, int off, int len){
text.append(value, off, len);
}
/**
* This will add a <code>String</code> to the end of the buffer.
* The buffer will not overflow with repeated uses of the
* <code>append</code>, it uses an <code>ensureCapacity</code>
* method which will allow the buffer to dynamically grow in
* size to accommodate large string objects.
*
* @param value the string to be appended to the output buffer
* @param off the offset to begin reading from the string
* @param len the number of characters to append to this
*/
public void append(String value, int off, int len){
text.append(value, off, len);
}
/**
* This method is used to write the contents of the buffer to the
* specified <code>Writer</code> object. This is used when the
* XML element is to be committed to the resulting XML document.
*
* @param out this is the writer to write the buffered text to
*
* @throws IOException thrown if there is an I/O problem
*/
public void write(Writer out) throws IOException {
out.append(text);
}
/**
* This will empty the <code>OutputBuffer</code> so that it does
* not contain any content. This is used to that when the buffer
* is written to a specified <code>Writer</code> object nothing
* is written out. This allows XML elements to be removed.
*/
public void clear(){
text.setLength(0);
}
}
| sn00py2/SimpleXMLLegacy | src/org/simpleframework/xml/stream/OutputBuffer.java | Java | apache-2.0 | 5,031 |
class Sidplay < Cask
url 'http://www.twinbirds.com/sidplay/SIDPLAY4.zip'
homepage 'http://www.sidmusic.org/sidplay/mac/'
version 'latest'
no_checksum
link 'SIDPLAY.app'
end
| rzyns/homebrew-cask | Casks/sidplay.rb | Ruby | bsd-2-clause | 183 |
L.Map.include({
addControl: function (control) {
control.addTo(this);
return this;
},
removeControl: function (control) {
control.removeFrom(this);
return this;
},
_initControlPos: function () {
var corners = this._controlCorners = {},
l = 'leaflet-',
container = this._controlContainer =
L.DomUtil.create('div', l + 'control-container', this._container);
function createCorner(vSide, hSide) {
var className = l + vSide + ' ' + l + hSide;
corners[vSide + hSide] =
L.DomUtil.create('div', className, container);
}
createCorner('top', 'left');
createCorner('top', 'right');
createCorner('bottom', 'left');
createCorner('bottom', 'right');
}
});
| scottBrimberry/Leaflet-Smart-Package | src/map/ext/Map.Control.js | JavaScript | bsd-2-clause | 703 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/renderer/service_worker/embedded_worker_devtools_agent.h"
#include "content/child/child_thread.h"
#include "content/common/devtools_messages.h"
#include "content/renderer/render_thread_impl.h"
#include "third_party/WebKit/public/platform/WebCString.h"
#include "third_party/WebKit/public/platform/WebString.h"
#include "third_party/WebKit/public/web/WebEmbeddedWorker.h"
using blink::WebEmbeddedWorker;
using blink::WebString;
namespace content {
EmbeddedWorkerDevToolsAgent::EmbeddedWorkerDevToolsAgent(
blink::WebEmbeddedWorker* webworker,
int route_id)
: webworker_(webworker), route_id_(route_id) {
RenderThreadImpl::current()->AddEmbeddedWorkerRoute(route_id_, this);
}
EmbeddedWorkerDevToolsAgent::~EmbeddedWorkerDevToolsAgent() {
RenderThreadImpl::current()->RemoveEmbeddedWorkerRoute(route_id_);
}
bool EmbeddedWorkerDevToolsAgent::OnMessageReceived(
const IPC::Message& message) {
bool handled = true;
IPC_BEGIN_MESSAGE_MAP(EmbeddedWorkerDevToolsAgent, message)
IPC_MESSAGE_HANDLER(DevToolsAgentMsg_Attach, OnAttach)
IPC_MESSAGE_HANDLER(DevToolsAgentMsg_Reattach, OnReattach)
IPC_MESSAGE_HANDLER(DevToolsAgentMsg_Detach, OnDetach)
IPC_MESSAGE_HANDLER(DevToolsAgentMsg_DispatchOnInspectorBackend,
OnDispatchOnInspectorBackend)
IPC_MESSAGE_UNHANDLED(handled = false)
IPC_END_MESSAGE_MAP()
return handled;
}
void EmbeddedWorkerDevToolsAgent::OnAttach(const std::string& host_id) {
webworker_->attachDevTools(WebString::fromUTF8(host_id));
}
void EmbeddedWorkerDevToolsAgent::OnReattach(const std::string& host_id,
const std::string& state) {
webworker_->reattachDevTools(WebString::fromUTF8(host_id),
WebString::fromUTF8(state));
}
void EmbeddedWorkerDevToolsAgent::OnDetach() {
webworker_->detachDevTools();
}
void EmbeddedWorkerDevToolsAgent::OnDispatchOnInspectorBackend(
const std::string& message) {
webworker_->dispatchDevToolsMessage(WebString::fromUTF8(message));
}
} // namespace content
| M4sse/chromium.src | content/renderer/service_worker/embedded_worker_devtools_agent.cc | C++ | bsd-3-clause | 2,247 |
# stdlib
from collections import defaultdict
import time
# 3p
import psutil
# project
from checks import AgentCheck
from config import _is_affirmative
from utils.platform import Platform
DEFAULT_AD_CACHE_DURATION = 120
DEFAULT_PID_CACHE_DURATION = 120
ATTR_TO_METRIC = {
'thr': 'threads',
'cpu': 'cpu.pct',
'rss': 'mem.rss',
'vms': 'mem.vms',
'real': 'mem.real',
'open_fd': 'open_file_descriptors',
'r_count': 'ioread_count', # FIXME: namespace me correctly (6.x), io.r_count
'w_count': 'iowrite_count', # FIXME: namespace me correctly (6.x) io.r_bytes
'r_bytes': 'ioread_bytes', # FIXME: namespace me correctly (6.x) io.w_count
'w_bytes': 'iowrite_bytes', # FIXME: namespace me correctly (6.x) io.w_bytes
'ctx_swtch_vol': 'voluntary_ctx_switches', # FIXME: namespace me correctly (6.x), ctx_swt.voluntary
'ctx_swtch_invol': 'involuntary_ctx_switches', # FIXME: namespace me correctly (6.x), ctx_swt.involuntary
}
class ProcessCheck(AgentCheck):
def __init__(self, name, init_config, agentConfig, instances=None):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
# ad stands for access denied
# We cache the PIDs getting this error and don't iterate on them
# more often than `access_denied_cache_duration`
# This cache is for all PIDs so it's global, but it should
# be refreshed by instance
self.last_ad_cache_ts = {}
self.ad_cache = set()
self.access_denied_cache_duration = int(
init_config.get(
'access_denied_cache_duration',
DEFAULT_AD_CACHE_DURATION
)
)
# By default cache the PID list for a while
# Sometimes it's not wanted b/c it can mess with no-data monitoring
# This cache is indexed per instance
self.last_pid_cache_ts = {}
self.pid_cache = {}
self.pid_cache_duration = int(
init_config.get(
'pid_cache_duration',
DEFAULT_PID_CACHE_DURATION
)
)
# Process cache, indexed by instance
self.process_cache = defaultdict(dict)
def should_refresh_ad_cache(self, name):
now = time.time()
return now - self.last_ad_cache_ts.get(name, 0) > self.access_denied_cache_duration
def should_refresh_pid_cache(self, name):
now = time.time()
return now - self.last_pid_cache_ts.get(name, 0) > self.pid_cache_duration
def find_pids(self, name, search_string, exact_match, ignore_ad=True):
"""
Create a set of pids of selected processes.
Search for search_string
"""
if not self.should_refresh_pid_cache(name):
return self.pid_cache[name]
ad_error_logger = self.log.debug
if not ignore_ad:
ad_error_logger = self.log.error
refresh_ad_cache = self.should_refresh_ad_cache(name)
matching_pids = set()
for proc in psutil.process_iter():
# Skip access denied processes
if not refresh_ad_cache and proc.pid in self.ad_cache:
continue
found = False
for string in search_string:
try:
# FIXME 6.x: All has been deprecated from the doc, should be removed
if string == 'All':
found = True
if exact_match:
if proc.name() == string:
found = True
else:
cmdline = proc.cmdline()
if string in ' '.join(cmdline):
found = True
except psutil.NoSuchProcess:
self.log.warning('Process disappeared while scanning')
except psutil.AccessDenied, e:
ad_error_logger('Access denied to process with PID %s', proc.pid)
ad_error_logger('Error: %s', e)
if refresh_ad_cache:
self.ad_cache.add(proc.pid)
if not ignore_ad:
raise
else:
if refresh_ad_cache:
self.ad_cache.discard(proc.pid)
if found:
matching_pids.add(proc.pid)
break
self.pid_cache[name] = matching_pids
self.last_pid_cache_ts[name] = time.time()
if refresh_ad_cache:
self.last_ad_cache_ts[name] = time.time()
return matching_pids
def psutil_wrapper(self, process, method, accessors, *args, **kwargs):
"""
A psutil wrapper that is calling
* psutil.method(*args, **kwargs) and returns the result
OR
* psutil.method(*args, **kwargs).accessor[i] for each accessors given in
a list, the result being indexed in a dictionary by the accessor name
"""
if accessors is None:
result = None
else:
result = {}
# Ban certain method that we know fail
if method == 'memory_info_ex'\
and (Platform.is_win32() or Platform.is_solaris()):
return result
elif method == 'num_fds' and not Platform.is_unix():
return result
try:
res = getattr(process, method)(*args, **kwargs)
if accessors is None:
result = res
else:
for acc in accessors:
try:
result[acc] = getattr(res, acc)
except AttributeError:
self.log.debug("psutil.%s().%s attribute does not exist", method, acc)
except (NotImplementedError, AttributeError):
self.log.debug("psutil method %s not implemented", method)
except psutil.AccessDenied:
self.log.debug("psutil was denied acccess for method %s", method)
except psutil.NoSuchProcess:
self.warning("Process {0} disappeared while scanning".format(process.pid))
return result
def get_process_state(self, name, pids):
st = defaultdict(list)
# Remove from cache the processes that are not in `pids`
cached_pids = set(self.process_cache[name].keys())
pids_to_remove = cached_pids - pids
for pid in pids_to_remove:
del self.process_cache[name][pid]
for pid in pids:
st['pids'].append(pid)
new_process = False
# If the pid's process is not cached, retrieve it
if pid not in self.process_cache[name] or not self.process_cache[name][pid].is_running():
new_process = True
try:
self.process_cache[name][pid] = psutil.Process(pid)
self.log.debug('New process in cache: %s' % pid)
# Skip processes dead in the meantime
except psutil.NoSuchProcess:
self.warning('Process %s disappeared while scanning' % pid)
# reset the PID cache now, something changed
self.last_pid_cache_ts[name] = 0
continue
p = self.process_cache[name][pid]
meminfo = self.psutil_wrapper(p, 'memory_info', ['rss', 'vms'])
st['rss'].append(meminfo.get('rss'))
st['vms'].append(meminfo.get('vms'))
# will fail on win32 and solaris
shared_mem = self.psutil_wrapper(p, 'memory_info_ex', ['shared']).get('shared')
if shared_mem is not None and meminfo.get('rss') is not None:
st['real'].append(meminfo['rss'] - shared_mem)
else:
st['real'].append(None)
ctxinfo = self.psutil_wrapper(p, 'num_ctx_switches', ['voluntary', 'involuntary'])
st['ctx_swtch_vol'].append(ctxinfo.get('voluntary'))
st['ctx_swtch_invol'].append(ctxinfo.get('involuntary'))
st['thr'].append(self.psutil_wrapper(p, 'num_threads', None))
cpu_percent = self.psutil_wrapper(p, 'cpu_percent', None)
if not new_process:
# psutil returns `0.` for `cpu_percent` the first time it's sampled on a process,
# so save the value only on non-new processes
st['cpu'].append(cpu_percent)
st['open_fd'].append(self.psutil_wrapper(p, 'num_fds', None))
ioinfo = self.psutil_wrapper(p, 'io_counters', ['read_count', 'write_count', 'read_bytes', 'write_bytes'])
st['r_count'].append(ioinfo.get('read_count'))
st['w_count'].append(ioinfo.get('write_count'))
st['r_bytes'].append(ioinfo.get('read_bytes'))
st['w_bytes'].append(ioinfo.get('write_bytes'))
return st
def check(self, instance):
name = instance.get('name', None)
tags = instance.get('tags', [])
exact_match = _is_affirmative(instance.get('exact_match', True))
search_string = instance.get('search_string', None)
ignore_ad = _is_affirmative(instance.get('ignore_denied_access', True))
if not isinstance(search_string, list):
raise KeyError('"search_string" parameter should be a list')
# FIXME 6.x remove me
if "All" in search_string:
self.warning('Deprecated: Having "All" in your search_string will'
'greatly reduce the performance of the check and '
'will be removed in a future version of the agent.')
if name is None:
raise KeyError('The "name" of process groups is mandatory')
if search_string is None:
raise KeyError('The "search_string" is mandatory')
pids = self.find_pids(
name,
search_string,
exact_match,
ignore_ad=ignore_ad
)
proc_state = self.get_process_state(name, pids)
# FIXME 6.x remove the `name` tag
tags.extend(['process_name:%s' % name, name])
self.log.debug('ProcessCheck: process %s analysed', name)
self.gauge('system.processes.number', len(pids), tags=tags)
for attr, mname in ATTR_TO_METRIC.iteritems():
vals = [x for x in proc_state[attr] if x is not None]
# skip []
if vals:
# FIXME 6.x: change this prefix?
self.gauge('system.processes.%s' % mname, sum(vals), tags=tags)
self._process_service_check(name, len(pids), instance.get('thresholds', None))
def _process_service_check(self, name, nb_procs, bounds):
'''
Report a service check, for each process in search_string.
Report as OK if the process is in the warning thresholds
CRITICAL out of the critical thresholds
WARNING out of the warning thresholds
'''
tag = ["process:%s" % name]
status = AgentCheck.OK
message_str = "PROCS %s: %s processes found for %s"
status_str = {
AgentCheck.OK: "OK",
AgentCheck.WARNING: "WARNING",
AgentCheck.CRITICAL: "CRITICAL"
}
if not bounds and nb_procs < 1:
status = AgentCheck.CRITICAL
elif bounds:
warning = bounds.get('warning', [1, float('inf')])
critical = bounds.get('critical', [1, float('inf')])
if warning[1] < nb_procs or nb_procs < warning[0]:
status = AgentCheck.WARNING
if critical[1] < nb_procs or nb_procs < critical[0]:
status = AgentCheck.CRITICAL
self.service_check(
"process.up",
status,
tags=tag,
message=message_str % (status_str[status], nb_procs, name)
)
| pfmooney/dd-agent | checks.d/process.py | Python | bsd-3-clause | 12,036 |
//@HEADER
// ************************************************************************
//
// Kokkos v. 2.0
// Copyright (2014) Sandia Corporation
//
// Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
// the U.S. Government retains certain rights in this software.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the Corporation nor the names of the
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Questions? Contact H. Carter Edwards (hcedwar@sandia.gov)
//
// ************************************************************************
//@HEADER
#include <cstdio>
#include <iostream>
#include <fstream>
#include <iomanip>
#include <cstdlib>
#include <cmath>
#include <Kokkos_Core.hpp>
#include <HexElement.hpp>
#include <FEMesh.hpp>
namespace HybridFEM {
namespace Nonlinear {
template< class MeshType , typename ScalarType > struct ElementComputation ;
//----------------------------------------------------------------------------
template<>
struct ElementComputation< FEMesh< double , 27 , Kokkos::Cuda > , double >
{
typedef Kokkos::Cuda execution_space ;
static const unsigned ElementNodeCount = 27 ;
typedef HexElement_Data< ElementNodeCount > element_data_type ;
typedef FEMesh< double , ElementNodeCount , execution_space > mesh_type ;
static const unsigned SpatialDim = element_data_type::spatial_dimension ;
static const unsigned FunctionCount = element_data_type::function_count ;
static const unsigned IntegrationCount = element_data_type::integration_count ;
static const unsigned TensorDim = SpatialDim * SpatialDim ;
typedef Kokkos::View< double[][FunctionCount][FunctionCount] , execution_space > elem_matrices_type ;
typedef Kokkos::View< double[][FunctionCount] , execution_space > elem_vectors_type ;
typedef Kokkos::View< double[] , execution_space > value_vector_type ;
private:
const element_data_type elem_data ;
const typename mesh_type::elem_node_ids_type elem_node_ids ;
const typename mesh_type::node_coords_type node_coords ;
const value_vector_type nodal_values ;
const elem_matrices_type element_matrices ;
const elem_vectors_type element_vectors ;
const float coeff_K ;
const unsigned elem_count ;
unsigned invJacIndex[9][4] ;
static const unsigned j11 = 0 , j12 = 1 , j13 = 2 ,
j21 = 3 , j22 = 4 , j23 = 5 ,
j31 = 6 , j32 = 7 , j33 = 8 ;
// Can only handle up to 16 warps:
static const unsigned BlockDimX = 32 ;
static const unsigned BlockDimY = 7 ;
struct WorkSpace {
double sum[ BlockDimY ][ BlockDimX ];
double value_at_integ[ IntegrationCount ];
double gradx_at_integ[ IntegrationCount ];
double grady_at_integ[ IntegrationCount ];
double gradz_at_integ[ IntegrationCount ];
float spaceJac[ BlockDimY ][ 9 ];
float spaceInvJac[ BlockDimY ][ 9 ];
float detJweight[ IntegrationCount ];
float dpsidx[ FunctionCount ][ IntegrationCount ];
float dpsidy[ FunctionCount ][ IntegrationCount ];
float dpsidz[ FunctionCount ][ IntegrationCount ];
};
public:
ElementComputation ( const mesh_type & arg_mesh ,
const elem_matrices_type & arg_element_matrices ,
const elem_vectors_type & arg_element_vectors ,
const value_vector_type & arg_nodal_values ,
const float arg_coeff_K )
: elem_data()
, elem_node_ids( arg_mesh.elem_node_ids )
, node_coords( arg_mesh.node_coords )
, nodal_values( arg_nodal_values )
, element_matrices( arg_element_matrices )
, element_vectors( arg_element_vectors )
, coeff_K( arg_coeff_K )
, elem_count( arg_mesh.elem_node_ids.dimension_0() )
{
const unsigned jInvJ[9][4] =
{ { j22 , j33 , j23 , j32 } ,
{ j13 , j32 , j12 , j33 } ,
{ j12 , j23 , j13 , j22 } ,
{ j23 , j31 , j21 , j33 } ,
{ j11 , j33 , j13 , j31 } ,
{ j13 , j21 , j11 , j23 } ,
{ j21 , j32 , j22 , j31 } ,
{ j12 , j31 , j11 , j32 } ,
{ j11 , j22 , j12 , j21 } };
for ( unsigned i = 0 ; i < 9 ; ++i ) {
for ( unsigned j = 0 ; j < 4 ; ++j ) {
invJacIndex[i][j] = jInvJ[i][j] ;
}
}
const unsigned shmem = sizeof(WorkSpace);
const unsigned grid_max = 65535 ;
const unsigned grid_count = std::min( grid_max , elem_count );
// For compute capability 2.x up to 1024 threads per block
const dim3 block( BlockDimX , BlockDimY , 1 );
const dim3 grid( grid_count , 1 , 1 );
Kokkos::Impl::CudaParallelLaunch< ElementComputation >( *this , grid , block , shmem );
}
public:
//------------------------------------
// Sum among the threadIdx.x
template< typename Type >
__device__ inline static
void sum_x( Type & result , const double value )
{
extern __shared__ WorkSpace work_data[] ;
volatile double * const base_sum =
& work_data->sum[ threadIdx.y ][ threadIdx.x ] ;
base_sum[ 0] = value ;
if ( threadIdx.x < 16 ) {
base_sum[0] += base_sum[16];
base_sum[0] += base_sum[ 8];
base_sum[0] += base_sum[ 4];
base_sum[0] += base_sum[ 2];
base_sum[0] += base_sum[ 1];
}
if ( 0 == threadIdx.x ) {
result = base_sum[0] ;
}
}
__device__ inline static
void sum_x_clear()
{
extern __shared__ WorkSpace work_data[] ;
work_data->sum[ threadIdx.y ][ threadIdx.x ] = 0 ;
}
//------------------------------------
//------------------------------------
__device__ inline
void evaluateFunctions( const unsigned ielem ) const
{
extern __shared__ WorkSpace work_data[] ;
// Each warp (threadIdx.y) computes an integration point
// Each thread is responsible for a node / function.
const unsigned iFunc = threadIdx.x ;
const bool hasFunc = iFunc < FunctionCount ;
//------------------------------------
// Each warp gathers a different variable into 'elem_mat' shared memory.
if ( hasFunc ) {
const unsigned node = elem_node_ids( ielem , iFunc );
for ( unsigned iy = threadIdx.y ; iy < 4 ; iy += blockDim.y ) {
switch( iy ) {
case 0 : work_data->sum[0][iFunc] = node_coords(node,0); break ;
case 1 : work_data->sum[1][iFunc] = node_coords(node,1); break ;
case 2 : work_data->sum[2][iFunc] = node_coords(node,2); break ;
case 3 : work_data->sum[3][iFunc] = nodal_values(node); break ;
default: break ;
}
}
}
__syncthreads(); // Wait for all warps to finish gathering
// now get local 'const' copies in register space:
const double x = work_data->sum[0][ iFunc ];
const double y = work_data->sum[1][ iFunc ];
const double z = work_data->sum[2][ iFunc ];
const double dof_val = work_data->sum[3][ iFunc ];
__syncthreads(); // Wait for all warps to finish extracting
sum_x_clear(); // Make sure summation scratch is zero
//------------------------------------
// Each warp is now on its own computing an integration point
// so no further explicit synchronizations are required.
if ( hasFunc ) {
float * const J = work_data->spaceJac[ threadIdx.y ];
float * const invJ = work_data->spaceInvJac[ threadIdx.y ];
for ( unsigned iInt = threadIdx.y ;
iInt < IntegrationCount ; iInt += blockDim.y ) {
const float val = elem_data.values[iInt][iFunc] ;
const float gx = elem_data.gradients[iInt][0][iFunc] ;
const float gy = elem_data.gradients[iInt][1][iFunc] ;
const float gz = elem_data.gradients[iInt][2][iFunc] ;
sum_x( J[j11], gx * x );
sum_x( J[j12], gx * y );
sum_x( J[j13], gx * z );
sum_x( J[j21], gy * x );
sum_x( J[j22], gy * y );
sum_x( J[j23], gy * z );
sum_x( J[j31], gz * x );
sum_x( J[j32], gz * y );
sum_x( J[j33], gz * z );
// Inverse jacobian, only enough parallel work for 9 threads in the warp
if ( iFunc < TensorDim ) {
invJ[ iFunc ] =
J[ invJacIndex[iFunc][0] ] * J[ invJacIndex[iFunc][1] ] -
J[ invJacIndex[iFunc][2] ] * J[ invJacIndex[iFunc][3] ] ;
// Let all threads in the warp compute determinant into a register
const float detJ = J[j11] * invJ[j11] +
J[j21] * invJ[j12] +
J[j31] * invJ[j13] ;
invJ[ iFunc ] /= detJ ;
if ( 0 == iFunc ) {
work_data->detJweight[ iInt ] = detJ * elem_data.weights[ iInt ] ;
}
}
// Transform bases gradients and compute value and gradient
const float dx = gx * invJ[j11] + gy * invJ[j12] + gz * invJ[j13];
const float dy = gx * invJ[j21] + gy * invJ[j22] + gz * invJ[j23];
const float dz = gx * invJ[j31] + gy * invJ[j32] + gz * invJ[j33];
work_data->dpsidx[iFunc][iInt] = dx ;
work_data->dpsidy[iFunc][iInt] = dy ;
work_data->dpsidz[iFunc][iInt] = dz ;
sum_x( work_data->gradx_at_integ[iInt] , dof_val * dx );
sum_x( work_data->grady_at_integ[iInt] , dof_val * dy );
sum_x( work_data->gradz_at_integ[iInt] , dof_val * dz );
sum_x( work_data->value_at_integ[iInt] , dof_val * val );
}
}
__syncthreads(); // All shared data must be populated at return.
}
__device__ inline
void contributeResidualJacobian( const unsigned ielem ) const
{
extern __shared__ WorkSpace work_data[] ;
sum_x_clear(); // Make sure summation scratch is zero
// $$ R_i = \int_{\Omega} \nabla \phi_i \cdot (k \nabla T) + \phi_i T^2 d \Omega $$
// $$ J_{i,j} = \frac{\partial R_i}{\partial T_j} = \int_{\Omega} k \nabla \phi_i \cdot \nabla \phi_j + 2 \phi_i \phi_j T d \Omega $$
const unsigned iInt = threadIdx.x ;
if ( iInt < IntegrationCount ) {
const double value_at_integ = work_data->value_at_integ[ iInt ] ;
const double gradx_at_integ = work_data->gradx_at_integ[ iInt ] ;
const double grady_at_integ = work_data->grady_at_integ[ iInt ] ;
const double gradz_at_integ = work_data->gradz_at_integ[ iInt ] ;
const float detJweight = work_data->detJweight[ iInt ] ;
const float coeff_K_detJweight = coeff_K * detJweight ;
for ( unsigned iRow = threadIdx.y ;
iRow < FunctionCount ; iRow += blockDim.y ) {
const float value_row = elem_data.values[ iInt ][ iRow ] * detJweight ;
const float dpsidx_row = work_data->dpsidx[ iRow ][ iInt ] * coeff_K_detJweight ;
const float dpsidy_row = work_data->dpsidy[ iRow ][ iInt ] * coeff_K_detJweight ;
const float dpsidz_row = work_data->dpsidz[ iRow ][ iInt ] * coeff_K_detJweight ;
const double res_del = dpsidx_row * gradx_at_integ +
dpsidy_row * grady_at_integ +
dpsidz_row * gradz_at_integ ;
const double res_val = value_at_integ * value_at_integ * value_row ;
const double jac_val_row = 2 * value_at_integ * value_row ;
sum_x( element_vectors( ielem , iRow ) , res_del + res_val );
for ( unsigned iCol = 0 ; iCol < FunctionCount ; ++iCol ) {
const float jac_del =
dpsidx_row * work_data->dpsidx[iCol][iInt] +
dpsidy_row * work_data->dpsidy[iCol][iInt] +
dpsidz_row * work_data->dpsidz[iCol][iInt] ;
const double jac_val =
jac_val_row * elem_data.values[ iInt ][ iCol ] ;
sum_x( element_matrices( ielem , iRow , iCol ) , jac_del + jac_val );
}
}
}
__syncthreads(); // All warps finish before refilling shared data
}
__device__ inline
void operator()(void) const
{
extern __shared__ WorkSpace work_data[] ;
for ( unsigned ielem = blockIdx.x ; ielem < elem_count ; ielem += gridDim.x ) {
evaluateFunctions( ielem );
contributeResidualJacobian( ielem );
}
}
}; /* ElementComputation */
} /* namespace Nonlinear */
} /* namespace HybridFEM */
| nschloe/seacas | packages/kokkos/example/multi_fem/NonlinearElement_Cuda.hpp | C++ | bsd-3-clause | 13,740 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Filter
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2014 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id$
*/
/**
* @see Zend_Filter_File_UpperCase
*/
require_once 'Zend/Filter/File/UpperCase.php';
/**
* @category Zend
* @package Zend_Filter
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2014 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @group Zend_Filter
*/
class Zend_Filter_File_UpperCaseTest extends PHPUnit_Framework_TestCase
{
/**
* Path to test files
*
* @var string
*/
protected $_filesPath;
/**
* Original testfile
*
* @var string
*/
protected $_origFile;
/**
* Testfile
*
* @var string
*/
protected $_newFile;
/**
* Sets the path to test files
*
* @return void
*/
public function __construct()
{
$this->_filesPath = dirname(__FILE__) . DIRECTORY_SEPARATOR
. '..' . DIRECTORY_SEPARATOR . '_files' . DIRECTORY_SEPARATOR;
$this->_origFile = $this->_filesPath . 'testfile2.txt';
$this->_newFile = $this->_filesPath . 'newtestfile2.txt';
}
/**
* Sets the path to test files
*
* @return void
*/
public function setUp()
{
if (!file_exists($this->_newFile)) {
copy($this->_origFile, $this->_newFile);
}
}
/**
* Sets the path to test files
*
* @return void
*/
public function tearDown()
{
if (file_exists($this->_newFile)) {
unlink($this->_newFile);
}
}
/**
* @return void
*/
public function testInstanceCreationAndNormalWorkflow()
{
$this->assertContains('This is a File', file_get_contents($this->_newFile));
$filter = new Zend_Filter_File_UpperCase();
$filter->filter($this->_newFile);
$this->assertContains('THIS IS A FILE', file_get_contents($this->_newFile));
}
/**
* @return void
*/
public function testFileNotFoundException()
{
try {
$filter = new Zend_Filter_File_UpperCase();
$filter->filter($this->_newFile . 'unknown');
$this->fail('Unknown file exception expected');
} catch (Zend_Filter_Exception $e) {
$this->assertContains('not found', $e->getMessage());
}
}
/**
* @return void
*/
public function testCheckSettingOfEncodingInIstance()
{
$this->assertContains('This is a File', file_get_contents($this->_newFile));
try {
$filter = new Zend_Filter_File_UpperCase('ISO-8859-1');
$filter->filter($this->_newFile);
$this->assertContains('THIS IS A FILE', file_get_contents($this->_newFile));
} catch (Zend_Filter_Exception $e) {
$this->assertContains('mbstring is required', $e->getMessage());
}
}
/**
* @return void
*/
public function testCheckSettingOfEncodingWithMethod()
{
$this->assertContains('This is a File', file_get_contents($this->_newFile));
try {
$filter = new Zend_Filter_File_UpperCase();
$filter->setEncoding('ISO-8859-1');
$filter->filter($this->_newFile);
$this->assertContains('THIS IS A FILE', file_get_contents($this->_newFile));
} catch (Zend_Filter_Exception $e) {
$this->assertContains('mbstring is required', $e->getMessage());
}
}
}
| lyft/zf1 | tests/Zend/Filter/File/UpperCaseTest.php | PHP | bsd-3-clause | 4,190 |
#include "trimetrics.hpp"
#include <math.h>
void Metric2DTri::draw(int /*xwin*/, int /*ywin*/ )
{
// draw metric information
glCallList(drawingList);
// draw moused point
glBegin(GL_POINTS);
glPointSize(5.0);
glColor3f(0.0,0.0,0.0);
glVertex3f(currX, currY, 0);
glEnd();
}
void Metric2DTri::mouseEvent(QMouseEvent *e, int xmax, int ymax, bool)
{
// convert window coords to world coords
int ywin = ymax - e->y();
int xwin = e->x();
double nodes[3][3] = { {-.5,1,0}, {-.5,0,0}, {.5,0,0}};
nodes[0][0] = 2*xRange*(double)xwin/(double)xmax - xRange;
nodes[0][1] = yRange*(double)ywin/(double)ymax;
currX = nodes[0][0];
currY = nodes[0][1];
// calculate metric
currMetricVal = (*func)(3, nodes);
// emit value changed
emit current_val_changed();
}
void Metric2DTri::generate_plot()
{
// create a drawing list and delete old one if it exists
if(drawingList)
glDeleteLists(drawingList,1);
drawingList = glGenLists(1);
glNewList(drawingList, GL_COMPILE);
{
double nodes[3][3] = { {-.5,1,0}, {-.5,0,0}, {.5,0,0}};
glPointSize(4.0);
// coordinates can range between (-xRange, xRange) and (0, yRange)
double hscan , vscan;
hscan = vscan = sqrt((double)NUM_POINTS);
// scan vertically
for(int i=0; i<vscan; i++)
{
nodes[0][1] = (double)i/(double)vscan * yRange;
// scan horizontally
for(int j=0; j<hscan; j++)
{
nodes[0][0] = (double)j/(double)hscan * 2 * xRange - xRange;
// calculate metric
double val = (*func)(3, nodes);
// set color based on value
glColor3f( (colorFactor-val)*(colorFactor-val), val*val,2*(colorFactor-val)*val);
// draw the point
glBegin(GL_POINTS);
glVertex3d(nodes[0][0], nodes[0][1], nodes[0][2]);
glEnd();
}
}
// draw fixed nodes
glPointSize(5.0);
glColor3f(0,0,0);
glBegin(GL_POINTS);
glVertex3d(-.5,0,0);
glVertex3d( .5,0,0);
glEnd();
}
glEndList();
}
| b3c/VTK-5.8 | Utilities/verdict/metric_plotter/trimetrics.cpp | C++ | bsd-3-clause | 2,006 |
/*
* CVS Identifier:
*
* $Id: DataBlk.java,v 1.7 2001/04/15 14:32:05 grosbois Exp $
*
* Interface: DataBlk
*
* Description: A generic interface to hold 2D blocks of data.
*
*
*
* COPYRIGHT:
*
* This software module was originally developed by Raphaël Grosbois and
* Diego Santa Cruz (Swiss Federal Institute of Technology-EPFL); Joel
* Askelöf (Ericsson Radio Systems AB); and Bertrand Berthelot, David
* Bouchard, Félix Henry, Gerard Mozelle and Patrice Onno (Canon Research
* Centre France S.A) in the course of development of the JPEG2000
* standard as specified by ISO/IEC 15444 (JPEG 2000 Standard). This
* software module is an implementation of a part of the JPEG 2000
* Standard. Swiss Federal Institute of Technology-EPFL, Ericsson Radio
* Systems AB and Canon Research Centre France S.A (collectively JJ2000
* Partners) agree not to assert against ISO/IEC and users of the JPEG
* 2000 Standard (Users) any of their rights under the copyright, not
* including other intellectual property rights, for this software module
* with respect to the usage by ISO/IEC and Users of this software module
* or modifications thereof for use in hardware or software products
* claiming conformance to the JPEG 2000 Standard. Those intending to use
* this software module in hardware or software products are advised that
* their use may infringe existing patents. The original developers of
* this software module, JJ2000 Partners and ISO/IEC assume no liability
* for use of this software module or modifications thereof. No license
* or right to this software module is granted for non JPEG 2000 Standard
* conforming products. JJ2000 Partners have full right to use this
* software module for his/her own purpose, assign or donate this
* software module to any third party and to inhibit third parties from
* using this software module for non JPEG 2000 Standard conforming
* products. This copyright notice must be included in all copies or
* derivative works of this software module.
*
* Copyright (c) 1999/2000 JJ2000 Partners.
* */
using System;
namespace CSJ2K.j2k.image
{
/// <summary> This is a generic abstract class to store data from a block of an
/// image. This class does not have the notion of components. Therefore, it
/// should be used for data from a single component. Subclasses should
/// implement the different types of storage (<tt>int</tt>, <tt>float</tt>,
/// etc.).
///
/// <p>The data is always stored in one array, of the type matching the data
/// type (i.e. for 'int' it's an 'int[]'). The data should be stored in the
/// array in standard scan-line order. That is the samples go from the top-left
/// corner of the code-block to the lower-right corner by line and then
/// column.</p>
///
/// <p>The member variable 'offset' gives the index in the array of the first
/// data element (i.e. the top-left coefficient (ulx,uly)). The member variable
/// 'scanw' gives the width of the scan that is used to store the data, that
/// can be different from the width of the block. Element '(x,y)' of the
/// code-block (i.e. '(ulx,uly)' is the top-left coefficient), will appear at
/// position 'offset+(y-uly)*scanw+(x-ulx)' in the array of data.</p>
///
/// <p>A block of data can have the <i>progressive</i> attribute set. Data is
/// progressive when it is obtained by successive refinement and the values in
/// this block are approximations of the "final" values. When the final values
/// are returned the progressive attribute must be turned off.</p>
///
/// <p>The classes <tt>DataBlkInt</tt> and <tt>DataBlkFloat</tt> provide
/// implementations for <tt>int</tt> and <tt>float</tt> types respectively.</p>
///
/// </summary>
/// <seealso cref="DataBlkInt">
///
/// </seealso>
/// <seealso cref="DataBlkFloat">
///
/// </seealso>
public abstract class DataBlk
{
/// <summary> Returns the data type of the <tt>DataBlk</tt> object, as defined in
/// this class.
///
/// </summary>
/// <returns> The data type of the object, as defined in thsi class.
///
/// </returns>
public abstract int DataType{get;}
//UPGRADE_NOTE: Respective javadoc comments were merged. It should be changed in order to comply with .NET documentation conventions. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1199'"
/// <summary> Returns the array containing the data, or null if there is no data. The
/// returned array is of the type returned by <tt>getDataType()</tt> (e.g.,
/// for <tt>TYPE_INT</tt>, it is a <tt>int[]</tt>).
///
/// <p>Each implementing class should provide a type specific equivalent
/// method (e.g., <tt>getDataInt()</tt> in <tt>DataBlkInt</tt>) which
/// returns an array of the correct type explicetely and not through an
/// <tt>Object</tt>.</p>
///
/// </summary>
/// <returns> The array containing the data, or <tt>null</tt> if there is no
/// data.
///
/// </returns>
/// <seealso cref="getDataType">
///
/// </seealso>
/// <summary> Sets the data array to the specified one. The type of the specified
/// data array must match the one returned by <tt>getDataType()</tt> (e.g.,
/// for <tt>TYPE_INT</tt>, it should be a <tt>int[]</tt>). If the wrong
/// type of array is given a <tt>ClassCastException</tt> will be thrown.
///
/// <p>The size of the array is not necessarily checked for consistency
/// with <tt>w</tt> and <tt>h</tt> or any other fields.</p>
///
/// <p>Each implementing class should provide a type specific equivalent
/// method (e.g., <tt>setDataInt()</tt> in <tt>DataBlkInt</tt>) which takes
/// an array of the correct type explicetely and not through an
/// <tt>Object</tt>.</p>
///
/// </summary>
/// <param name="arr">The new data array to use
///
/// </param>
/// <seealso cref="getDataType">
///
/// </seealso>
public abstract System.Object Data{get;set;}
/// <summary>The identifier for the <tt>byte</tt> data type, as signed 8 bits. </summary>
public const int TYPE_BYTE = 0;
/// <summary>The identifier for the <tt>short</tt> data type, as signed 16 bits. </summary>
public const int TYPE_SHORT = 1;
/// <summary>The identifier for the <tt>int</tt> data type, as signed 32 bits. </summary>
public const int TYPE_INT = 3;
/// <summary>The identifier for the <tt>float</tt> data type </summary>
public const int TYPE_FLOAT = 4;
/// <summary>The horizontal coordinate (in pixels) of the upper-left corner of the
/// block of data. This is relative to the component of the image from
/// where this block was filled or is to be filled.
/// </summary>
public int ulx;
/// <summary>The vertical coordinate of the upper-left corner of the block of
/// data. This is relative to the component of the image from where this
/// block was filled or is to be filled.
/// </summary>
public int uly;
/// <summary>The width of the block, in pixels. </summary>
public int w;
/// <summary>The height of the block, in pixels. </summary>
public int h;
/// <summary>The offset in the array of the top-left coefficient </summary>
public int offset;
/// <summary>The width of the scanlines used to store the data in the array </summary>
public int scanw;
/// <summary>The progressive attribute (<tt>false</tt> by default) </summary>
public bool progressive;
/// <summary> Returns the size in bits, given the data type. The data type must be
/// one defined in this class. An <tt>IllegalArgumentException</tt> is
/// thrown if <tt>type</tt> is not defined in this class.
///
/// </summary>
/// <param name="type">The data type.
///
/// </param>
/// <returns> The size in bits of the data type.
///
/// </returns>
public static int getSize(int type)
{
switch (type)
{
case TYPE_BYTE:
return 8;
case TYPE_SHORT:
return 16;
case TYPE_INT:
case TYPE_FLOAT:
return 32;
default:
throw new System.ArgumentException();
}
}
/// <summary> Returns a string of informations about the DataBlk
///
/// </summary>
/// <returns> Block dimensions and progressiveness in a string
///
/// </returns>
public override System.String ToString()
{
System.String typeString = "";
switch (DataType)
{
case TYPE_BYTE:
typeString = "Unsigned Byte";
break;
case TYPE_SHORT:
typeString = "Short";
break;
case TYPE_INT:
typeString = "Integer";
break;
case TYPE_FLOAT:
typeString = "Float";
break;
}
return "DataBlk: " + "upper-left(" + ulx + "," + uly + "), width=" + w + ", height=" + h + ", progressive=" + progressive + ", offset=" + offset + ", scanw=" + scanw + ", type=" + typeString;
}
}
} | BogusCurry/halcyon | ThirdParty/libopenmetaverse/CSJ2K/j2k/image/DataBlk.cs | C# | bsd-3-clause | 8,865 |
// Copyright 2013 Beego Authors
// Copyright 2014 The Macaron Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"): you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package cache
import (
"crypto/md5"
"encoding/hex"
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"sync"
"time"
"github.com/Unknwon/com"
"gopkg.in/macaron.v1"
)
// Item represents a cache item.
type Item struct {
Val interface{}
Created int64
Expire int64
}
func (item *Item) hasExpired() bool {
return item.Expire > 0 &&
(time.Now().Unix()-item.Created) >= item.Expire
}
// FileCacher represents a file cache adapter implementation.
type FileCacher struct {
lock sync.Mutex
rootPath string
interval int // GC interval.
}
// NewFileCacher creates and returns a new file cacher.
func NewFileCacher() *FileCacher {
return &FileCacher{}
}
func (c *FileCacher) filepath(key string) string {
m := md5.Sum([]byte(key))
hash := hex.EncodeToString(m[:])
return filepath.Join(c.rootPath, string(hash[0]), string(hash[1]), hash)
}
// Put puts value into cache with key and expire time.
// If expired is 0, it will not be deleted by GC.
func (c *FileCacher) Put(key string, val interface{}, expire int64) error {
filename := c.filepath(key)
item := &Item{val, time.Now().Unix(), expire}
data, err := EncodeGob(item)
if err != nil {
return err
}
os.MkdirAll(filepath.Dir(filename), os.ModePerm)
return ioutil.WriteFile(filename, data, os.ModePerm)
}
func (c *FileCacher) read(key string) (*Item, error) {
filename := c.filepath(key)
data, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
item := new(Item)
return item, DecodeGob(data, item)
}
// Get gets cached value by given key.
func (c *FileCacher) Get(key string) interface{} {
item, err := c.read(key)
if err != nil {
return nil
}
if item.hasExpired() {
os.Remove(c.filepath(key))
return nil
}
return item.Val
}
// Delete deletes cached value by given key.
func (c *FileCacher) Delete(key string) error {
return os.Remove(c.filepath(key))
}
// Incr increases cached int-type value by given key as a counter.
func (c *FileCacher) Incr(key string) error {
item, err := c.read(key)
if err != nil {
return err
}
item.Val, err = Incr(item.Val)
if err != nil {
return err
}
return c.Put(key, item.Val, item.Expire)
}
// Decrease cached int value.
func (c *FileCacher) Decr(key string) error {
item, err := c.read(key)
if err != nil {
return err
}
item.Val, err = Decr(item.Val)
if err != nil {
return err
}
return c.Put(key, item.Val, item.Expire)
}
// IsExist returns true if cached value exists.
func (c *FileCacher) IsExist(key string) bool {
return com.IsExist(c.filepath(key))
}
// Flush deletes all cached data.
func (c *FileCacher) Flush() error {
return os.RemoveAll(c.rootPath)
}
func (c *FileCacher) startGC() {
c.lock.Lock()
defer c.lock.Unlock()
if c.interval < 1 {
return
}
if err := filepath.Walk(c.rootPath, func(path string, fi os.FileInfo, err error) error {
if err != nil {
return fmt.Errorf("Walk: %v", err)
}
if fi.IsDir() {
return nil
}
data, err := ioutil.ReadFile(path)
if err != nil && !os.IsNotExist(err) {
fmt.Errorf("ReadFile: %v", err)
}
item := new(Item)
if err = DecodeGob(data, item); err != nil {
return err
}
if item.hasExpired() {
if err = os.Remove(path); err != nil && !os.IsNotExist(err) {
return fmt.Errorf("Remove: %v", err)
}
}
return nil
}); err != nil {
log.Printf("error garbage collecting cache files: %v", err)
}
time.AfterFunc(time.Duration(c.interval)*time.Second, func() { c.startGC() })
}
// StartAndGC starts GC routine based on config string settings.
func (c *FileCacher) StartAndGC(opt Options) error {
c.lock.Lock()
c.rootPath = opt.AdapterConfig
c.interval = opt.Interval
if !filepath.IsAbs(c.rootPath) {
c.rootPath = filepath.Join(macaron.Root, c.rootPath)
}
c.lock.Unlock()
if err := os.MkdirAll(c.rootPath, os.ModePerm); err != nil {
return err
}
go c.startGC()
return nil
}
func init() {
Register("file", NewFileCacher())
}
| xaionaro/gogs | vendor/github.com/go-macaron/cache/file.go | GO | mit | 4,563 |
/*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.nashorn.internal.parser;
import static jdk.nashorn.internal.parser.TokenKind.LITERAL;
import jdk.nashorn.internal.runtime.Source;
/**
* Basic parse/lex unit.
*
*/
public class Token {
private Token() {
}
/**
* Create a compact form of token information.
* @param type Type of token.
* @param position Start position of the token in the source.
* @param length Length of the token.
* @return Token descriptor.
*/
public static long toDesc(final TokenType type, final int position, final int length) {
return (long)position << 32 |
(long)length << 8 |
type.ordinal();
}
/**
* Extract token position from a token descriptor.
* @param token Token descriptor.
* @return Start position of the token in the source.
*/
public static int descPosition(final long token) {
return (int)(token >>> 32);
}
/**
* Extract token length from a token descriptor.
* @param token Token descriptor.
* @return Length of the token.
*/
public static int descLength(final long token) {
return (int)token >>> 8;
}
/**
* Extract token type from a token descriptor.
* @param token Token descriptor.
* @return Type of token.
*/
public static TokenType descType(final long token) {
return TokenType.getValues()[(int)token & 0xff];
}
/**
* Change the token to use a new type.
*
* @param token The original token.
* @param newType The new token type.
* @return The recast token.
*/
public static long recast(final long token, final TokenType newType) {
return token & ~0xFFL | newType.ordinal();
}
/**
* Return a string representation of a token.
* @param source Token source.
* @param token Token descriptor.
* @param verbose True to include details.
* @return String representation.
*/
public static String toString(final Source source, final long token, final boolean verbose) {
final TokenType type = Token.descType(token);
String result;
if (source != null && type.getKind() == LITERAL) {
result = source.getString(token);
} else {
result = type.getNameOrType();
}
if (verbose) {
final int position = Token.descPosition(token);
final int length = Token.descLength(token);
result += " (" + position + ", " + length + ")";
}
return result;
}
/**
* String conversion of token
*
* @param source the source
* @param token the token
*
* @return token as string
*/
public static String toString(final Source source, final long token) {
return Token.toString(source, token, false);
}
/**
* String conversion of token - version without source given
*
* @param token the token
*
* @return token as string
*/
public static String toString(final long token) {
return Token.toString(null, token, false);
}
/**
* Static hash code computation function token
*
* @param token a token
*
* @return hash code for token
*/
public static int hashCode(final long token) {
return (int)(token ^ token >>> 32);
}
}
| rokn/Count_Words_2015 | testing/openjdk2/nashorn/src/jdk/nashorn/internal/parser/Token.java | Java | mit | 4,603 |
require 'readline'
list = [
'search', 'download', 'open',
'help', 'history', 'quit',
'url', 'next', 'clear',
'prev', 'past',
].sort
comp = proc{ |s| list.grep( /^#{Regexp.escape(s)}/) }
Readline.completion_append_character = " "
Readline.completion_proc = comp
while line = Readline.readline('> ', true)
p line
break if line == 'quit'
end
| kmcminn/rails_survey | vendor/gems/rb-readline-0.5.0/examples/example_readline_with_completion.rb | Ruby | mit | 355 |
<?php
/**
* TextHelperTest file
*
* PHP 5
*
* CakePHP(tm) Tests <http://book.cakephp.org/2.0/en/development/testing.html>
* Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
*
* Licensed under The MIT License
* For full copyright and license information, please see the LICENSE.txt
* Redistributions of files must retain the above copyright notice
*
* @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
* @link http://book.cakephp.org/2.0/en/development/testing.html CakePHP(tm) Tests
* @package Cake.Test.Case.View.Helper
* @since CakePHP(tm) v 1.2.0.4206
* @license MIT License (http://www.opensource.org/licenses/mit-license.php)
*/
App::uses('View', 'View');
App::uses('TextHelper', 'View/Helper');
class TextHelperTestObject extends TextHelper {
public function attach(StringMock $string) {
$this->_engine = $string;
}
public function engine() {
return $this->_engine;
}
}
/**
* StringMock class
*/
class StringMock {
}
/**
* TextHelperTest class
*
* @package Cake.Test.Case.View.Helper
*/
class TextHelperTest extends CakeTestCase {
/**
* setUp method
*
* @return void
*/
public function setUp() {
parent::setUp();
$this->View = new View(null);
$this->Text = new TextHelper($this->View);
}
/**
* tearDown method
*
* @return void
*/
public function tearDown() {
unset($this->View);
parent::tearDown();
}
/**
* test String class methods are called correctly
*/
public function testTextHelperProxyMethodCalls() {
$methods = array(
'highlight', 'stripLinks', 'truncate', 'excerpt', 'toList',
);
$String = $this->getMock('StringMock', $methods);
$Text = new TextHelperTestObject($this->View, array('engine' => 'StringMock'));
$Text->attach($String);
foreach ($methods as $method) {
$String->expects($this->at(0))->method($method);
$Text->{$method}('who', 'what', 'when', 'where', 'how');
}
}
/**
* test engine override
*/
public function testEngineOverride() {
App::build(array(
'Utility' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Utility' . DS)
), App::REGISTER);
$Text = new TextHelperTestObject($this->View, array('engine' => 'TestAppEngine'));
$this->assertInstanceOf('TestAppEngine', $Text->engine());
App::build(array(
'Plugin' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Plugin' . DS)
));
CakePlugin::load('TestPlugin');
$Text = new TextHelperTestObject($this->View, array('engine' => 'TestPlugin.TestPluginEngine'));
$this->assertInstanceOf('TestPluginEngine', $Text->engine());
CakePlugin::unload('TestPlugin');
}
/**
* testAutoLink method
*
* @return void
*/
public function testAutoLink() {
$text = 'This is a test text';
$expected = 'This is a test text';
$result = $this->Text->autoLink($text);
$this->assertEquals($expected, $result);
$text = 'Text with a partial www.cakephp.org URL and test@cakephp.org email address';
$result = $this->Text->autoLink($text);
$expected = 'Text with a partial <a href="http://www.cakephp.org">www.cakephp.org</a> URL and <a href="mailto:test@cakephp\.org">test@cakephp\.org</a> email address';
$this->assertRegExp('#^' . $expected . '$#', $result);
$text = 'This is a test text with URL http://www.cakephp.org';
$expected = 'This is a test text with URL <a href="http://www.cakephp.org">http://www.cakephp.org</a>';
$result = $this->Text->autoLink($text);
$this->assertEquals($expected, $result);
$text = 'This is a test text with URL http://www.cakephp.org and some more text';
$expected = 'This is a test text with URL <a href="http://www.cakephp.org">http://www.cakephp.org</a> and some more text';
$result = $this->Text->autoLink($text);
$this->assertEquals($expected, $result);
$text = "This is a test text with URL http://www.cakephp.org\tand some more text";
$expected = "This is a test text with URL <a href=\"http://www.cakephp.org\">http://www.cakephp.org</a>\tand some more text";
$result = $this->Text->autoLink($text);
$this->assertEquals($expected, $result);
$text = 'This is a test text with URL http://www.cakephp.org(and some more text)';
$expected = 'This is a test text with URL <a href="http://www.cakephp.org">http://www.cakephp.org</a>(and some more text)';
$result = $this->Text->autoLink($text);
$this->assertEquals($expected, $result);
$text = 'This is a test text with URL http://www.cakephp.org';
$expected = 'This is a test text with URL <a href="http://www.cakephp.org" class="link">http://www.cakephp.org</a>';
$result = $this->Text->autoLink($text, array('class' => 'link'));
$this->assertEquals($expected, $result);
$text = 'This is a test text with URL http://www.cakephp.org';
$expected = 'This is a test text with URL <a href="http://www.cakephp.org" class="link" id="MyLink">http://www.cakephp.org</a>';
$result = $this->Text->autoLink($text, array('class' => 'link', 'id' => 'MyLink'));
$this->assertEquals($expected, $result);
}
/**
* Test escaping for autoLink
*
* @return void
*/
public function testAutoLinkEscape() {
$text = 'This is a <b>test</b> text with URL http://www.cakephp.org';
$expected = 'This is a <b>test</b> text with URL <a href="http://www.cakephp.org">http://www.cakephp.org</a>';
$result = $this->Text->autoLink($text);
$this->assertEquals($expected, $result);
$text = 'This is a <b>test</b> text with URL http://www.cakephp.org';
$expected = 'This is a <b>test</b> text with URL <a href="http://www.cakephp.org">http://www.cakephp.org</a>';
$result = $this->Text->autoLink($text, array('escape' => false));
$this->assertEquals($expected, $result);
}
/**
* Data provider for autoLinking
*/
public static function autoLinkProvider() {
return array(
array(
'This is a test text',
'This is a test text',
),
array(
'This is a test that includes (www.cakephp.org)',
'This is a test that includes (<a href="http://www.cakephp.org">www.cakephp.org</a>)',
),
array(
'This is a test that includes www.cakephp.org:8080',
'This is a test that includes <a href="http://www.cakephp.org:8080">www.cakephp.org:8080</a>',
),
array(
'This is a test that includes http://de.wikipedia.org/wiki/Kanton_(Schweiz)#fragment',
'This is a test that includes <a href="http://de.wikipedia.org/wiki/Kanton_(Schweiz)#fragment">http://de.wikipedia.org/wiki/Kanton_(Schweiz)#fragment</a>',
),
array(
'This is a test that includes www.wikipedia.org/wiki/Kanton_(Schweiz)#fragment',
'This is a test that includes <a href="http://www.wikipedia.org/wiki/Kanton_(Schweiz)#fragment">www.wikipedia.org/wiki/Kanton_(Schweiz)#fragment</a>',
),
array(
'This is a test that includes http://example.com/test.php?foo=bar text',
'This is a test that includes <a href="http://example.com/test.php?foo=bar">http://example.com/test.php?foo=bar</a> text',
),
array(
'This is a test that includes www.example.com/test.php?foo=bar text',
'This is a test that includes <a href="http://www.example.com/test.php?foo=bar">www.example.com/test.php?foo=bar</a> text',
),
array(
'Text with a partial www.cakephp.org URL',
'Text with a partial <a href="http://www.cakephp.org">www.cakephp.org</a> URL',
),
array(
'Text with a partial WWW.cakephp.org URL',
'Text with a partial <a href="http://WWW.cakephp.org">WWW.cakephp.org</a> URL',
),
array(
'Text with a partial WWW.cakephp.org ©, URL',
'Text with a partial <a href="http://WWW.cakephp.org">WWW.cakephp.org</a> &copy, URL',
),
array(
'Text with a url www.cot.ag/cuIb2Q and more',
'Text with a url <a href="http://www.cot.ag/cuIb2Q">www.cot.ag/cuIb2Q</a> and more',
),
array(
'Text with a url http://www.does--not--work.com and more',
'Text with a url <a href="http://www.does--not--work.com">http://www.does--not--work.com</a> and more',
),
array(
'Text with a url http://www.not--work.com and more',
'Text with a url <a href="http://www.not--work.com">http://www.not--work.com</a> and more',
),
);
}
/**
* testAutoLinkUrls method
*
* @dataProvider autoLinkProvider
* @return void
*/
public function testAutoLinkUrls($text, $expected) {
$result = $this->Text->autoLinkUrls($text);
$this->assertEquals($expected, $result);
}
/**
* Test the options for autoLinkUrls
*
* @return void
*/
public function testAutoLinkUrlsOptions() {
$text = 'Text with a partial www.cakephp.org URL';
$expected = 'Text with a partial <a href="http://www.cakephp.org" \s*class="link">www.cakephp.org</a> URL';
$result = $this->Text->autoLinkUrls($text, array('class' => 'link'));
$this->assertRegExp('#^' . $expected . '$#', $result);
$text = 'Text with a partial WWW.cakephp.org © URL';
$expected = 'Text with a partial <a href="http://WWW.cakephp.org"\s*>WWW.cakephp.org</a> © URL';
$result = $this->Text->autoLinkUrls($text, array('escape' => false));
$this->assertRegExp('#^' . $expected . '$#', $result);
}
/**
* Test autoLinkUrls with the escape option.
*
* @return void
*/
public function testAutoLinkUrlsEscape() {
$text = 'Text with a partial <a href="http://www.cakephp.org">link</a> link';
$expected = 'Text with a partial <a href="http://www.cakephp.org">link</a> link';
$result = $this->Text->autoLinkUrls($text, array('escape' => false));
$this->assertEquals($expected, $result);
$text = 'Text with a partial <iframe src="http://www.cakephp.org" /> link';
$expected = 'Text with a partial <iframe src="http://www.cakephp.org" /> link';
$result = $this->Text->autoLinkUrls($text, array('escape' => false));
$this->assertEquals($expected, $result);
$text = 'Text with a partial <iframe src="http://www.cakephp.org" /> link';
$expected = 'Text with a partial <iframe src="http://www.cakephp.org" /> link';
$result = $this->Text->autoLinkUrls($text, array('escape' => true));
$this->assertEquals($expected, $result);
$text = 'Text with a url <a href="http://www.not-working-www.com">www.not-working-www.com</a> and more';
$expected = 'Text with a url <a href="http://www.not-working-www.com">www.not-working-www.com</a> and more';
$result = $this->Text->autoLinkUrls($text);
$this->assertEquals($expected, $result);
$text = 'Text with a url www.not-working-www.com and more';
$expected = 'Text with a url <a href="http://www.not-working-www.com">www.not-working-www.com</a> and more';
$result = $this->Text->autoLinkUrls($text);
$this->assertEquals($expected, $result);
$text = 'Text with a url http://www.not-working-www.com and more';
$expected = 'Text with a url <a href="http://www.not-working-www.com">http://www.not-working-www.com</a> and more';
$result = $this->Text->autoLinkUrls($text);
$this->assertEquals($expected, $result);
$text = 'Text with a url http://www.www.not-working-www.com and more';
$expected = 'Text with a url <a href="http://www.www.not-working-www.com">http://www.www.not-working-www.com</a> and more';
$result = $this->Text->autoLinkUrls($text);
$this->assertEquals($expected, $result);
}
/**
* testAutoLinkEmails method
*
* @return void
*/
public function testAutoLinkEmails() {
$text = 'This is a test text';
$expected = 'This is a test text';
$result = $this->Text->autoLinkUrls($text);
$this->assertEquals($expected, $result);
$text = 'Text with email@example.com address';
$expected = 'Text with <a href="mailto:email@example.com"\s*>email@example.com</a> address';
$result = $this->Text->autoLinkEmails($text);
$this->assertRegExp('#^' . $expected . '$#', $result);
$text = "Text with o'hare._-bob@example.com address";
$expected = 'Text with <a href="mailto:o'hare._-bob@example.com">o'hare._-bob@example.com</a> address';
$result = $this->Text->autoLinkEmails($text);
$this->assertEquals($expected, $result);
$text = 'Text with email@example.com address';
$expected = 'Text with <a href="mailto:email@example.com" \s*class="link">email@example.com</a> address';
$result = $this->Text->autoLinkEmails($text, array('class' => 'link'));
$this->assertRegExp('#^' . $expected . '$#', $result);
}
/**
* test invalid email addresses.
*
* @return void
*/
public function testAutoLinkEmailInvalid() {
$result = $this->Text->autoLinkEmails('this is a myaddress@gmx-de test');
$expected = 'this is a myaddress@gmx-de test';
$this->assertEquals($expected, $result);
}
}
| nickpack/Methadone | src/lib/Cake/Test/Case/View/Helper/TextHelperTest.php | PHP | mit | 12,551 |
// ------------------------------------
// #POSTCSS - LOAD OPTIONS - OPTIONS
// ------------------------------------
'use strict'
/**
*
* @method options
*
* @param {Object} options PostCSS Config
*
* @return {Object} options PostCSS Options
*/
module.exports = function options (options) {
if (options.parser) {
options.parser = require(options.parser)
}
if (options.syntax) {
options.syntax = require(options.syntax)
}
if (options.stringifier) {
options.stringifier = require(options.stringifier)
}
if (options.plugins) {
delete options.plugins
}
return options
}
| thethirdwheel/reasonabledisagreement | node_modules/react-scripts/node_modules/postcss-load-options/lib/options.js | JavaScript | mit | 614 |
class WildBoar < ActiveRecord::Base
end
| mvanholstyn/strac | vendor/plugins/has_many_polymorphs/test/models/wild_boar.rb | Ruby | mit | 41 |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Graphics.Shapes;
using osu.Game.Graphics;
using osuTK;
using osuTK.Graphics;
namespace osu.Game.Tests.Visual.UserInterface
{
public class TestSceneDrawableDate : OsuTestScene
{
public TestSceneDrawableDate()
{
Child = new FillFlowContainer
{
Direction = FillDirection.Vertical,
AutoSizeAxes = Axes.Both,
Origin = Anchor.Centre,
Anchor = Anchor.Centre,
Children = new Drawable[]
{
new PokeyDrawableDate(DateTimeOffset.Now.Subtract(TimeSpan.FromSeconds(60))),
new PokeyDrawableDate(DateTimeOffset.Now.Subtract(TimeSpan.FromSeconds(55))),
new PokeyDrawableDate(DateTimeOffset.Now.Subtract(TimeSpan.FromSeconds(50))),
new PokeyDrawableDate(DateTimeOffset.Now),
new PokeyDrawableDate(DateTimeOffset.Now.Add(TimeSpan.FromSeconds(60))),
new PokeyDrawableDate(DateTimeOffset.Now.Add(TimeSpan.FromSeconds(65))),
new PokeyDrawableDate(DateTimeOffset.Now.Add(TimeSpan.FromSeconds(70))),
}
};
}
private class PokeyDrawableDate : CompositeDrawable
{
public PokeyDrawableDate(DateTimeOffset date)
{
const float box_size = 10;
DrawableDate drawableDate;
Box flash;
AutoSizeAxes = Axes.Both;
InternalChildren = new Drawable[]
{
flash = new Box
{
Colour = Color4.Yellow,
Size = new Vector2(box_size),
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft,
Alpha = 0
},
drawableDate = new DrawableDate(date)
{
X = box_size + 2,
}
};
drawableDate.Current.ValueChanged += _ => flash.FadeOutFromOne(500);
}
}
}
}
| ppy/osu | osu.Game.Tests/Visual/UserInterface/TestSceneDrawableDate.cs | C# | mit | 2,429 |
class CreateUsers < ActiveRecord::Migration
def change
create_table :users do |t|
t.string :email
t.string :first_name
t.string :last_name
t.timestamps
end
end
end
| mfpiccolo/self_systeem | test/dummy_app/db/migrate/20140310223358_create_users.rb | Ruby | mit | 201 |
try:
from astropy.models import ParametricModel,Parameter,_convert_input,_convert_output
import numpy as np
class PowerLawModel(ParametricModel):
param_names = ['scale', 'alpha']
def __init__(self, scale, alpha, param_dim=1):
self._scale = Parameter(name='scale', val=scale, mclass=self, param_dim=param_dim)
self._alpha = Parameter(name='alpha', val=alpha, mclass=self, param_dim=param_dim)
super(ParametricModel,self).__init__(self, self.param_names, ndim=1, outdim=1, param_dim=param_dim)
self.linear = False
self.deriv = None
def eval(self, xvals, params):
return params[0]*((xvals)**(-params[1]))
def noderiv(self, params, xvals, yvals):
deriv_dict = {
'scale': ((xvals)**(-params[1])),
'alpha': params[0]*((xvals)**(-params[1]))*np.log(xvals)}
derivval = [deriv_dict[par] for par in self.param_names]
return np.array(derivval).T
def __call__(self, x):
"""
Transforms data using this model.
Parameters
--------------
x : array, of minimum dimensions 1
Notes
-----
See the module docstring for rules for model evaluation.
"""
x, fmt = _convert_input(x, self.param_dim)
result = self.eval(x, self.param_sets)
return _convert_output(result, fmt)
except ImportError:
pass
| vlas-sokolov/pyspeckit | pyspeckit/spectrum/models/astropy_models.py | Python | mit | 1,575 |
#if !defined(CODE_GOOGLE_COM_P_V8_CONVERT_V8_CONVERT_HPP_INCLUDED)
#define CODE_GOOGLE_COM_P_V8_CONVERT_V8_CONVERT_HPP_INCLUDED 1
// Doxygen REFUSES to use this block as namespace docs: @namespace cvv8
/** @mainpage libv8-convert (cvv8)
The cvv8 namespace (formerly v8::convert) houses APIs for handling the
following:
- Converting between v8 Value handles and "native types" using generic
interface. This allows us to write generic algorithms which convert
between JS/C++ without having to know the exact types we're dealing
with. The basic POD types and some STL types are supported out of the
box and plugging in one's own types is normally quite simple.
- Converting free- and member functions into v8::InvocationCallback
functions. These generated functions convert the JavaScript-originated
function arguments into native counterparts, forward the data to the
original native function, and convert the return values back to
something JS can use.
Those two core features give us all we need in order to be able to
bind near-arbitrary C/C++ functions with JavaScript (where calling
conventions and type conversions allow us to do so). For cases where
the "automatic" function-to-InvocationCallback conversions are not
suitable, the type-conversion API can simplify the implementation of
custom v8::InvocationCallback functions.
All of the conversions are compile-time typesafe where possible and
fail gracefully when such a determination can only be made at runtime.
This code originated as the core-most component of the v8-juice
library (http://code.google.com/p/v8-juice). After a couple years
i felt compelled to refactor it into a toolkit usable by arbitrary
v8-using clients, doing a bit of cleanup along the way. The eventuall
intention is that this code will replace the v8::juice::convert
code.
Author: Stephan Beal (http://wanderinghorse.net/home/stephan/)
License: Dual MIT/Public Domain
Project home page: http://code.google.com/p/v8-juice/wiki/V8Convert
The most important functions and types, from a user's perspective,
include:
Converting types:
- cvv8::CastToJS()
- cvv8::CastFromJS()
Implementing custom conversions:
- cvv8::NativeToJS
- cvv8::JSToNative
Converting functions to v8::InvocationCallback:
- cvv8::FunctionToInCa
- cvv8::MethodToInCa
- cvv8::ConstMethodToInCa
- cvv8::ToInCa
- cvv8::FunctorToInCa
- cvv8::PredicatedInCa and cvv8::PredicatedInCaDispatcher
Binding JS properties to native properties, functions, methods, or
functors:
- cvv8::FunctionToGetter, cvv8::FunctionToSetter
- cvv8::MethodToGetter, cvv8::MethodToSetter
- cvv8::ConstMethodToGetter, cvv8::ConstMethodToSetter
- cvv8::FunctorToGetter, cvv8::FunctorToSetter
Other utilities:
- cvv8::CtorForwarder and cvv8::CtorArityDispatcher
- cvv8::ClassCreator simplifies binding of C++ classes with v8.
- cvv8::FunctionTo converts functions to ...
- cvv8::MethodTo converts methods to ...
- cvv8::FunctorTo converts functors to ...
- cvv8::VarTo converts variables to ...
- cvv8::CallForwarder forwards native arguments to JS functions.
- The tmp and sl namespaces hold various template metaprogramming bits.
- ... there's more ...
Most of the code in this library are internal template specializations
which take care of the dirty work. Typical clients won't typically need
more than what's listed above.
A core rule of this library is "if it ain't documented, don't use
it." All public API members which are intended for client-side use
are documented. Some one-line proxies whose purpose is either very
obvious, exist only for template type resolution reasons, or
are strictly internal are not necessarily documented.
*/
namespace cvv8 {
}
#include "convert.hpp"
#include "invocable.hpp"
#include "arguments.hpp"
#include "ClassCreator.hpp"
#include "properties.hpp"
#include "XTo.hpp"
/** LICENSE
This software's source code, including accompanying documentation and
demonstration applications, are licensed under the following
conditions...
The author (Stephan G. Beal [http://wanderinghorse.net/home/stephan/])
explicitly disclaims copyright in all jurisdictions which recognize
such a disclaimer. In such jurisdictions, this software is released
into the Public Domain.
In jurisdictions which do not recognize Public Domain property
(e.g. Germany as of 2011), this software is Copyright (c) 2011
by Stephan G. Beal, and is released under the terms of the MIT License
(see below).
In jurisdictions which recognize Public Domain property, the user of
this software may choose to accept it either as 1) Public Domain, 2)
under the conditions of the MIT License (see below), or 3) under the
terms of dual Public Domain/MIT License conditions described here, as
they choose.
The MIT License is about as close to Public Domain as a license can
get, and is described in clear, concise terms at:
http://en.wikipedia.org/wiki/MIT_License
The full text of the MIT License follows:
--
Copyright (c) 2011 Stephan G. Beal (http://wanderinghorse.net/home/stephan/)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
--END OF MIT LICENSE--
For purposes of the above license, the term "Software" includes
documentation and demonstration source code which accompanies
this software. ("Accompanies" = is contained in the Software's
primary public source code repository.)
*/
#endif /* CODE_GOOGLE_COM_P_V8_CONVERT_V8_CONVERT_HPP_INCLUDED */
| IbpTeam/node-nodegit | vendor/libv8-convert/cvv8/v8-convert.hpp | C++ | mit | 6,517 |
require 'yaml'
module VagrantPlugins
module ProviderLibvirt
module Action
class PruneNFSExports
def initialize(app, env)
@app = app
end
def call(env)
if env[:host]
uuid = env[:machine].id
# get all uuids
uuids = env[:machine].provider.driver.connection.servers.all.map(&:id)
# not exiisted in array will removed from nfs
uuids.delete(uuid)
env[:host].capability(
:nfs_prune, env[:machine].ui, uuids)
end
@app.call(env)
end
end
end
end
end
| timhughes/vagrant-libvirt | lib/vagrant-libvirt/action/prune_nfs_exports.rb | Ruby | mit | 624 |
/**************************************
{
x:0, y:0, width:433,
min:1, max:25, step:1,
message: "rules/turns"
}
**************************************/
function Slider(config){
var self = this;
self.id = config.id;
// Create DOM
var dom = document.createElement("div");
dom.className = "slider";
dom.style.left = config.x+"px";
dom.style.top = config.y+"px";
dom.style.width = config.width+"px";
self.dom = dom;
// Background
var bg = document.createElement("div");
bg.className = "slider_bg";
dom.appendChild(bg);
// Knob
var knob = document.createElement("div");
knob.className = "slider_knob";
dom.appendChild(knob);
// Set value
self.value = 0;
var _paramToValue = function(param){
var value = config.min + (config.max-config.min)*param;
value = Math.round(value/config.step)*config.step;
return value;
};
var _valueToParam = function(value){
var param = (value-config.min)/(config.max-config.min); // to (0-1)
return param;
};
self.setParam = function(param){
// Bounds
var value = config.min + (config.max-config.min)*param;
value = Math.round(value/config.step)*config.step;
self.value = value;
// DOM
knob.style.left = self.value*config.width-15;
};
self.setValue = function(value){
// Set
self.value = value;
// DOM with param
var param = _valueToParam(self.value);
knob.style.left = param*(config.width-30);
};
if(config.message) listen(self, config.message, self.setValue);
// Mouse events
var _isDragging = false;
var _offsetX = 0;
var _mouseToParam = function(event){
// Mouse to Param to Value
var param = (event.clientX - _offsetX - dom.getBoundingClientRect().left - 8)/(config.width-30);
if(param<0) param=0;
if(param>1) param=1;
var value = _paramToValue(param);
// Publish these changes! (only if ACTUALLY changed)
if(self.value != value){
if(config.message) publish(config.message, [value]);
if(config.onchange) config.onchange(value);
}
};
var _onDomMouseDown = function(event){
if(config.onselect) config.onselect();
_mouseToParam(event);
_isDragging = true;
_offsetX = 0;
};
var _onKnobMouseDown = function(event){
_isDragging = true;
if(config.onselect) config.onselect();
_offsetX = event.clientX - knob.getBoundingClientRect().left;
};
var _onWindowMouseMove = function(event){
if(_isDragging) _mouseToParam(event);
};
var _onWindowMouseUp = function(){
_isDragging = false;
};
dom.addEventListener("mousedown",_onDomMouseDown,false);
knob.addEventListener("mousedown",_onKnobMouseDown,false);
window.addEventListener("mousemove",_onWindowMouseMove,false);
window.addEventListener("mouseup",_onWindowMouseUp,false);
// FOR TOUCH
var _fakeEventWrapper = function(event){
var fake = {};
fake.clientX = event.changedTouches[0].clientX;
fake.clientY = event.changedTouches[0].clientY;
return fake;
};
dom.addEventListener("touchstart",function(event){
event = _fakeEventWrapper(event);
_onDomMouseDown(event);
},false);
knob.addEventListener("touchstart",function(event){
event = _fakeEventWrapper(event);
_onKnobMouseDown(event);
},false);
window.addEventListener("touchmove",function(event){
event = _fakeEventWrapper(event);
_onWindowMouseMove(event);
},false);
window.addEventListener("touchend",_onWindowMouseUp,false);
////////////////////////////////////////
// Add...
self.add = function(){
_add(self);
};
// Remove...
self.remove = function(){
unlisten(self);
_remove(self);
};
}
| maeriens/trust | js/core/Slider.js | JavaScript | cc0-1.0 | 3,494 |
/*
// This software is subject to the terms of the Eclipse Public License v1.0
// Agreement, available at the following URL:
// http://www.eclipse.org/legal/epl-v10.html.
// You must accept the terms of that agreement to use this software.
//
// Copyright (C) 2001-2005 Julian Hyde
// Copyright (C) 2005-2015 Pentaho and others
// All Rights Reserved.
*/
package mondrian.olap;
import mondrian.mdx.*;
import mondrian.olap.fun.FunUtil;
import mondrian.olap.fun.Resolver;
import mondrian.olap.type.Type;
import mondrian.resource.MondrianResource;
import mondrian.rolap.*;
import mondrian.spi.UserDefinedFunction;
import mondrian.util.*;
import org.apache.commons.collections.keyvalue.AbstractMapEntry;
import org.apache.commons.io.IOUtils;
import org.apache.commons.vfs2.FileContent;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileSystemManager;
import org.apache.commons.vfs2.VFS;
import org.apache.commons.vfs2.provider.http.HttpFileObject;
import org.apache.log4j.Logger;
import org.eigenbase.xom.XOMUtil;
import org.olap4j.impl.Olap4jUtil;
import org.olap4j.mdx.*;
import java.io.*;
import java.lang.ref.Reference;
import java.lang.reflect.*;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.sql.*;
import java.sql.Connection;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utility functions used throughout mondrian. All methods are static.
*
* @author jhyde
* @since 6 August, 2001
*/
public class Util extends XOMUtil {
public static final String nl = System.getProperty("line.separator");
private static final Logger LOGGER = Logger.getLogger(Util.class);
/**
* Placeholder which indicates a value NULL.
*/
public static final Object nullValue = new Double(FunUtil.DoubleNull);
/**
* Placeholder which indicates an EMPTY value.
*/
public static final Object EmptyValue = new Double(FunUtil.DoubleEmpty);
/**
* Cumulative time spent accessing the database.
*/
private static long databaseMillis = 0;
/**
* Random number generator to provide seed for other random number
* generators.
*/
private static final Random metaRandom =
createRandom(MondrianProperties.instance().TestSeed.get());
/** Unique id for this JVM instance. Part of a key that ensures that if
* two JVMs in the same cluster have a data-source with the same
* identity-hash-code, they will be treated as different data-sources,
* and therefore caches will not be incorrectly shared. */
public static final UUID JVM_INSTANCE_UUID = UUID.randomUUID();
/**
* Whether this is an IBM JVM.
*/
public static final boolean IBM_JVM =
System.getProperties().getProperty("java.vendor").equals(
"IBM Corporation");
/**
* What version of JDBC?
* Returns:<ul>
* <li>0x0401 in JDK 1.7 and higher</li>
* <li>0x0400 in JDK 1.6</li>
* <li>0x0300 otherwise</li>
* </ul>
*/
public static final int JdbcVersion =
System.getProperty("java.version").compareTo("1.7") >= 0
? 0x0401
: System.getProperty("java.version").compareTo("1.6") >= 0
? 0x0400
: 0x0300;
/**
* Whether the code base has re-engineered using retroweaver.
* If this is the case, some functionality is not available, but a lot of
* things are available via {@link mondrian.util.UtilCompatible}.
* Retroweaver has some problems involving {@link java.util.EnumSet}.
*/
public static final boolean Retrowoven =
Access.class.getSuperclass().getName().equals(
"net.sourceforge.retroweaver.runtime.java.lang.Enum");
private static final UtilCompatible compatible;
/**
* Flag to control expensive debugging. (More expensive than merely
* enabling assertions: as we know, a lot of people run with assertions
* enabled.)
*/
public static final boolean DEBUG = false;
static {
compatible = new UtilCompatibleJdk16();
}
public static boolean isNull(Object o) {
return o == null || o == nullValue;
}
/**
* Returns whether a list is strictly sorted.
*
* @param list List
* @return whether list is sorted
*/
public static <T> boolean isSorted(List<T> list) {
T prev = null;
for (T t : list) {
if (prev != null
&& ((Comparable<T>) prev).compareTo(t) >= 0)
{
return false;
}
prev = t;
}
return true;
}
/**
* Parses a string and returns a SHA-256 checksum of it.
*
* @param value The source string to parse.
* @return A checksum of the source string.
*/
public static byte[] digestSha256(String value) {
final MessageDigest algorithm;
try {
algorithm = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
return algorithm.digest(value.getBytes());
}
/**
* Creates an MD5 hash of a String.
*
* @param value String to create one way hash upon.
* @return MD5 hash.
*/
public static byte[] digestMd5(final String value) {
final MessageDigest algorithm;
try {
algorithm = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
return algorithm.digest(value.getBytes());
}
/**
* Creates an {@link ExecutorService} object backed by a thread pool.
* @param maximumPoolSize Maximum number of concurrent
* threads.
* @param corePoolSize Minimum number of concurrent
* threads to maintain in the pool, even if they are
* idle.
* @param keepAliveTime Time, in seconds, for which to
* keep alive unused threads.
* @param name The name of the threads.
* @param rejectionPolicy The rejection policy to enforce.
* @return An executor service preconfigured.
*/
public static ExecutorService getExecutorService(
int maximumPoolSize,
int corePoolSize,
long keepAliveTime,
final String name,
RejectedExecutionHandler rejectionPolicy)
{
// We must create a factory where the threads
// have the right name and are marked as daemon threads.
final ThreadFactory factory =
new ThreadFactory() {
private final AtomicInteger counter = new AtomicInteger(0);
public Thread newThread(Runnable r) {
final Thread t =
Executors.defaultThreadFactory().newThread(r);
t.setDaemon(true);
t.setName(name + '_' + counter.incrementAndGet());
return t;
}
};
// Ok, create the executor
final ThreadPoolExecutor executor =
new ThreadPoolExecutor(
corePoolSize,
maximumPoolSize > 0
? maximumPoolSize
: Integer.MAX_VALUE,
keepAliveTime,
TimeUnit.SECONDS,
// we use a sync queue. any other type of queue
// will prevent the tasks from running concurrently
// because the executors API requires blocking queues.
// Important to pass true here. This makes the
// order of tasks deterministic.
// TODO Write a non-blocking queue which implements
// the blocking queue API so we can pass that to the
// executor.
new LinkedBlockingQueue<Runnable>(),
factory);
// Set the rejection policy if required.
if (rejectionPolicy != null) {
executor.setRejectedExecutionHandler(
rejectionPolicy);
}
// Done
return executor;
}
/**
* Creates an {@link ScheduledExecutorService} object backed by a
* thread pool with a fixed number of threads..
* @param maxNbThreads Maximum number of concurrent
* threads.
* @param name The name of the threads.
* @return An scheduled executor service preconfigured.
*/
public static ScheduledExecutorService getScheduledExecutorService(
final int maxNbThreads,
final String name)
{
return Executors.newScheduledThreadPool(
maxNbThreads,
new ThreadFactory() {
final AtomicInteger counter = new AtomicInteger(0);
public Thread newThread(Runnable r) {
final Thread thread =
Executors.defaultThreadFactory().newThread(r);
thread.setDaemon(true);
thread.setName(name + '_' + counter.incrementAndGet());
return thread;
}
}
);
}
/**
* Encodes string for MDX (escapes ] as ]] inside a name).
*
* @deprecated Will be removed in 4.0
*/
public static String mdxEncodeString(String st) {
StringBuilder retString = new StringBuilder(st.length() + 20);
for (int i = 0; i < st.length(); i++) {
char c = st.charAt(i);
if ((c == ']')
&& ((i + 1) < st.length())
&& (st.charAt(i + 1) != '.'))
{
retString.append(']'); // escaping character
}
retString.append(c);
}
return retString.toString();
}
/**
* Converts a string into a double-quoted string.
*/
public static String quoteForMdx(String val) {
StringBuilder buf = new StringBuilder(val.length() + 20);
quoteForMdx(buf, val);
return buf.toString();
}
/**
* Appends a double-quoted string to a string builder.
*/
public static StringBuilder quoteForMdx(StringBuilder buf, String val) {
buf.append("\"");
String s0 = replace(val, "\"", "\"\"");
buf.append(s0);
buf.append("\"");
return buf;
}
/**
* Return string quoted in [...]. For example, "San Francisco" becomes
* "[San Francisco]"; "a [bracketed] string" becomes
* "[a [bracketed]] string]".
*/
public static String quoteMdxIdentifier(String id) {
StringBuilder buf = new StringBuilder(id.length() + 20);
quoteMdxIdentifier(id, buf);
return buf.toString();
}
public static void quoteMdxIdentifier(String id, StringBuilder buf) {
buf.append('[');
int start = buf.length();
buf.append(id);
replace(buf, start, "]", "]]");
buf.append(']');
}
/**
* Return identifiers quoted in [...].[...]. For example, {"Store", "USA",
* "California"} becomes "[Store].[USA].[California]".
*/
public static String quoteMdxIdentifier(List<Id.Segment> ids) {
StringBuilder sb = new StringBuilder(64);
quoteMdxIdentifier(ids, sb);
return sb.toString();
}
public static void quoteMdxIdentifier(
List<Id.Segment> ids,
StringBuilder sb)
{
for (int i = 0; i < ids.size(); i++) {
if (i > 0) {
sb.append('.');
}
ids.get(i).toString(sb);
}
}
/**
* Quotes a string literal for Java or JavaScript.
*
* @param s Unquoted literal
* @return Quoted string literal
*/
public static String quoteJavaString(String s) {
return s == null
? "null"
: "\""
+ s.replaceAll("\\\\", "\\\\\\\\")
.replaceAll("\\\"", "\\\\\"")
+ "\"";
}
/**
* Returns true if two objects are equal, or are both null.
*
* @param s First object
* @param t Second object
* @return Whether objects are equal or both null
*/
public static boolean equals(Object s, Object t) {
if (s == t) {
return true;
}
if (s == null || t == null) {
return false;
}
return s.equals(t);
}
/**
* Returns true if two strings are equal, or are both null.
*
* <p>The result is not affected by
* {@link MondrianProperties#CaseSensitive the case sensitive option}; if
* you wish to compare names, use {@link #equalName(String, String)}.
*/
public static boolean equals(String s, String t) {
return equals((Object) s, (Object) t);
}
/**
* Returns whether two names are equal.
* Takes into account the
* {@link MondrianProperties#CaseSensitive case sensitive option}.
* Names may be null.
*/
public static boolean equalName(String s, String t) {
if (s == null) {
return t == null;
}
boolean caseSensitive =
MondrianProperties.instance().CaseSensitive.get();
return caseSensitive ? s.equals(t) : s.equalsIgnoreCase(t);
}
/**
* Tests two strings for equality, optionally ignoring case.
*
* @param s First string
* @param t Second string
* @param matchCase Whether to perform case-sensitive match
* @return Whether strings are equal
*/
public static boolean equal(String s, String t, boolean matchCase) {
return matchCase ? s.equals(t) : s.equalsIgnoreCase(t);
}
/**
* Compares two names. if case sensitive flag is false,
* apply finer grain difference with case sensitive
* Takes into account the {@link MondrianProperties#CaseSensitive case
* sensitive option}.
* Names must not be null.
*/
public static int caseSensitiveCompareName(String s, String t) {
boolean caseSensitive =
MondrianProperties.instance().CaseSensitive.get();
if (caseSensitive) {
return s.compareTo(t);
} else {
int v = s.compareToIgnoreCase(t);
// if ignore case returns 0 compare in a case sensitive manner
// this was introduced to solve an issue with Member.equals()
// and Member.compareTo() not agreeing with each other
return v == 0 ? s.compareTo(t) : v;
}
}
/**
* Compares two names.
* Takes into account the {@link MondrianProperties#CaseSensitive case
* sensitive option}.
* Names must not be null.
*/
public static int compareName(String s, String t) {
boolean caseSensitive =
MondrianProperties.instance().CaseSensitive.get();
return caseSensitive ? s.compareTo(t) : s.compareToIgnoreCase(t);
}
/**
* Generates a normalized form of a name, for use as a key into a map.
* Returns the upper case name if
* {@link MondrianProperties#CaseSensitive} is true, the name unchanged
* otherwise.
*/
public static String normalizeName(String s) {
return MondrianProperties.instance().CaseSensitive.get()
? s
: s.toUpperCase();
}
/**
* Returns the result of ((Comparable) k1).compareTo(k2), with
* special-casing for the fact that Boolean only became
* comparable in JDK 1.5.
*
* @see Comparable#compareTo
*/
public static int compareKey(Object k1, Object k2) {
if (k1 instanceof Boolean) {
// Luckily, "F" comes before "T" in the alphabet.
k1 = k1.toString();
k2 = k2.toString();
}
return ((Comparable) k1).compareTo(k2);
}
/**
* Compares integer values.
*
* @param i0 First integer
* @param i1 Second integer
* @return Comparison of integers
*/
public static int compare(int i0, int i1) {
return i0 < i1 ? -1 : (i0 == i1 ? 0 : 1);
}
/**
* Returns a string with every occurrence of a seek string replaced with
* another.
*/
public static String replace(String s, String find, String replace) {
// let's be optimistic
int found = s.indexOf(find);
if (found == -1) {
return s;
}
StringBuilder sb = new StringBuilder(s.length() + 20);
int start = 0;
char[] chars = s.toCharArray();
final int step = find.length();
if (step == 0) {
// Special case where find is "".
sb.append(s);
replace(sb, 0, find, replace);
} else {
for (;;) {
sb.append(chars, start, found - start);
if (found == s.length()) {
break;
}
sb.append(replace);
start = found + step;
found = s.indexOf(find, start);
if (found == -1) {
found = s.length();
}
}
}
return sb.toString();
}
/**
* Replaces all occurrences of a string in a buffer with another.
*
* @param buf String buffer to act on
* @param start Ordinal within <code>find</code> to start searching
* @param find String to find
* @param replace String to replace it with
* @return The string buffer
*/
public static StringBuilder replace(
StringBuilder buf,
int start,
String find,
String replace)
{
// Search and replace from the end towards the start, to avoid O(n ^ 2)
// copying if the string occurs very commonly.
int findLength = find.length();
if (findLength == 0) {
// Special case where the seek string is empty.
for (int j = buf.length(); j >= 0; --j) {
buf.insert(j, replace);
}
return buf;
}
int k = buf.length();
while (k > 0) {
int i = buf.lastIndexOf(find, k);
if (i < start) {
break;
}
buf.replace(i, i + find.length(), replace);
// Step back far enough to ensure that the beginning of the section
// we just replaced does not cause a match.
k = i - findLength;
}
return buf;
}
/**
* Parses an MDX identifier such as <code>[Foo].[Bar].Baz.&Key&Key2</code>
* and returns the result as a list of segments.
*
* @param s MDX identifier
* @return List of segments
*/
public static List<Id.Segment> parseIdentifier(String s) {
return convert(
org.olap4j.impl.IdentifierParser.parseIdentifier(s));
}
/**
* Converts an array of name parts {"part1", "part2"} into a single string
* "[part1].[part2]". If the names contain "]" they are escaped as "]]".
*/
public static String implode(List<Id.Segment> names) {
StringBuilder sb = new StringBuilder(64);
for (int i = 0; i < names.size(); i++) {
if (i > 0) {
sb.append(".");
}
// FIXME: should be:
// names.get(i).toString(sb);
// but that causes some tests to fail
Id.Segment segment = names.get(i);
switch (segment.getQuoting()) {
case UNQUOTED:
segment = new Id.NameSegment(((Id.NameSegment) segment).name);
}
segment.toString(sb);
}
return sb.toString();
}
public static String makeFqName(String name) {
return quoteMdxIdentifier(name);
}
public static String makeFqName(OlapElement parent, String name) {
if (parent == null) {
return Util.quoteMdxIdentifier(name);
} else {
StringBuilder buf = new StringBuilder(64);
buf.append(parent.getUniqueName());
buf.append('.');
Util.quoteMdxIdentifier(name, buf);
return buf.toString();
}
}
public static String makeFqName(String parentUniqueName, String name) {
if (parentUniqueName == null) {
return quoteMdxIdentifier(name);
} else {
StringBuilder buf = new StringBuilder(64);
buf.append(parentUniqueName);
buf.append('.');
Util.quoteMdxIdentifier(name, buf);
return buf.toString();
}
}
public static OlapElement lookupCompound(
SchemaReader schemaReader,
OlapElement parent,
List<Id.Segment> names,
boolean failIfNotFound,
int category)
{
return lookupCompound(
schemaReader, parent, names, failIfNotFound, category,
MatchType.EXACT);
}
/**
* Resolves a name such as
* '[Products].[Product Department].[Produce]' by resolving the
* components ('Products', and so forth) one at a time.
*
* @param schemaReader Schema reader, supplies access-control context
* @param parent Parent element to search in
* @param names Exploded compound name, such as {"Products",
* "Product Department", "Produce"}
* @param failIfNotFound If the element is not found, determines whether
* to return null or throw an error
* @param category Type of returned element, a {@link Category} value;
* {@link Category#Unknown} if it doesn't matter.
*
* @pre parent != null
* @post !(failIfNotFound && return == null)
*
* @see #parseIdentifier(String)
*/
public static OlapElement lookupCompound(
SchemaReader schemaReader,
OlapElement parent,
List<Id.Segment> names,
boolean failIfNotFound,
int category,
MatchType matchType)
{
Util.assertPrecondition(parent != null, "parent != null");
if (LOGGER.isDebugEnabled()) {
StringBuilder buf = new StringBuilder(64);
buf.append("Util.lookupCompound: ");
buf.append("parent.name=");
buf.append(parent.getName());
buf.append(", category=");
buf.append(Category.instance.getName(category));
buf.append(", names=");
quoteMdxIdentifier(names, buf);
LOGGER.debug(buf.toString());
}
// First look up a member from the cache of calculated members
// (cubes and queries both have them).
switch (category) {
case Category.Member:
case Category.Unknown:
Member member = schemaReader.getCalculatedMember(names);
if (member != null) {
return member;
}
}
// Likewise named set.
switch (category) {
case Category.Set:
case Category.Unknown:
NamedSet namedSet = schemaReader.getNamedSet(names);
if (namedSet != null) {
return namedSet;
}
}
// Now resolve the name one part at a time.
for (int i = 0; i < names.size(); i++) {
OlapElement child;
Id.NameSegment name;
if (names.get(i) instanceof Id.NameSegment) {
name = (Id.NameSegment) names.get(i);
child = schemaReader.getElementChild(parent, name, matchType);
} else if (parent instanceof RolapLevel
&& names.get(i) instanceof Id.KeySegment
&& names.get(i).getKeyParts().size() == 1)
{
// The following code is for SsasCompatibleNaming=false.
// Continues the very limited support for key segments in
// mondrian-3.x. To be removed in mondrian-4, when
// SsasCompatibleNaming=true is the only option.
final Id.KeySegment keySegment = (Id.KeySegment) names.get(i);
name = keySegment.getKeyParts().get(0);
final List<Member> levelMembers =
schemaReader.getLevelMembers(
(Level) parent, false);
child = null;
for (Member member : levelMembers) {
if (((RolapMember) member).getKey().toString().equals(
name.getName()))
{
child = member;
break;
}
}
} else {
name = null;
child = schemaReader.getElementChild(parent, name, matchType);
}
// if we're doing a non-exact match, and we find a non-exact
// match, then for an after match, return the first child
// of each subsequent level; for a before match, return the
// last child
if (child instanceof Member
&& !matchType.isExact()
&& !Util.equalName(child.getName(), name.getName()))
{
Member bestChild = (Member) child;
for (int j = i + 1; j < names.size(); j++) {
List<Member> childrenList =
schemaReader.getMemberChildren(bestChild);
FunUtil.hierarchizeMemberList(childrenList, false);
if (matchType == MatchType.AFTER) {
bestChild = childrenList.get(0);
} else {
bestChild =
childrenList.get(childrenList.size() - 1);
}
if (bestChild == null) {
child = null;
break;
}
}
parent = bestChild;
break;
}
if (child == null) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Util.lookupCompound: "
+ "parent.name="
+ parent.getName()
+ " has no child with name="
+ name);
}
if (!failIfNotFound) {
return null;
} else if (category == Category.Member) {
throw MondrianResource.instance().MemberNotFound.ex(
quoteMdxIdentifier(names));
} else {
throw MondrianResource.instance().MdxChildObjectNotFound
.ex(name.toString(), parent.getQualifiedName());
}
}
parent = child;
if (matchType == MatchType.EXACT_SCHEMA) {
matchType = MatchType.EXACT;
}
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Util.lookupCompound: "
+ "found child.name="
+ parent.getName()
+ ", child.class="
+ parent.getClass().getName());
}
switch (category) {
case Category.Dimension:
if (parent instanceof Dimension) {
return parent;
} else if (parent instanceof Hierarchy) {
return parent.getDimension();
} else if (failIfNotFound) {
throw Util.newError(
"Can not find dimension '" + implode(names) + "'");
} else {
return null;
}
case Category.Hierarchy:
if (parent instanceof Hierarchy) {
return parent;
} else if (parent instanceof Dimension) {
return parent.getHierarchy();
} else if (failIfNotFound) {
throw Util.newError(
"Can not find hierarchy '" + implode(names) + "'");
} else {
return null;
}
case Category.Level:
if (parent instanceof Level) {
return parent;
} else if (failIfNotFound) {
throw Util.newError(
"Can not find level '" + implode(names) + "'");
} else {
return null;
}
case Category.Member:
if (parent instanceof Member) {
return parent;
} else if (failIfNotFound) {
throw MondrianResource.instance().MdxCantFindMember.ex(
implode(names));
} else {
return null;
}
case Category.Unknown:
assertPostcondition(parent != null, "return != null");
return parent;
default:
throw newInternal("Bad switch " + category);
}
}
public static OlapElement lookup(Query q, List<Id.Segment> nameParts) {
final Exp exp = lookup(q, nameParts, false);
if (exp instanceof MemberExpr) {
MemberExpr memberExpr = (MemberExpr) exp;
return memberExpr.getMember();
} else if (exp instanceof LevelExpr) {
LevelExpr levelExpr = (LevelExpr) exp;
return levelExpr.getLevel();
} else if (exp instanceof HierarchyExpr) {
HierarchyExpr hierarchyExpr = (HierarchyExpr) exp;
return hierarchyExpr.getHierarchy();
} else if (exp instanceof DimensionExpr) {
DimensionExpr dimensionExpr = (DimensionExpr) exp;
return dimensionExpr.getDimension();
} else {
throw Util.newInternal("Not an olap element: " + exp);
}
}
/**
* Converts an identifier into an expression by resolving its parts into
* an OLAP object (dimension, hierarchy, level or member) within the
* context of a query.
*
* <p>If <code>allowProp</code> is true, also allows property references
* from valid members, for example
* <code>[Measures].[Unit Sales].FORMATTED_VALUE</code>.
* In this case, the result will be a {@link mondrian.mdx.ResolvedFunCall}.
*
* @param q Query expression belongs to
* @param nameParts Parts of the identifier
* @param allowProp Whether to allow property references
* @return OLAP object or property reference
*/
public static Exp lookup(
Query q,
List<Id.Segment> nameParts,
boolean allowProp)
{
return lookup(q, q.getSchemaReader(true), nameParts, allowProp);
}
/**
* Converts an identifier into an expression by resolving its parts into
* an OLAP object (dimension, hierarchy, level or member) within the
* context of a query.
*
* <p>If <code>allowProp</code> is true, also allows property references
* from valid members, for example
* <code>[Measures].[Unit Sales].FORMATTED_VALUE</code>.
* In this case, the result will be a {@link ResolvedFunCall}.
*
* @param q Query expression belongs to
* @param schemaReader Schema reader
* @param segments Parts of the identifier
* @param allowProp Whether to allow property references
* @return OLAP object or property reference
*/
public static Exp lookup(
Query q,
SchemaReader schemaReader,
List<Id.Segment> segments,
boolean allowProp)
{
// First, look for a calculated member defined in the query.
final String fullName = quoteMdxIdentifier(segments);
// Look for any kind of object (member, level, hierarchy,
// dimension) in the cube. Use a schema reader without restrictions.
final SchemaReader schemaReaderSansAc =
schemaReader.withoutAccessControl().withLocus();
final Cube cube = q.getCube();
OlapElement olapElement =
schemaReaderSansAc.lookupCompound(
cube, segments, false, Category.Unknown);
if (olapElement != null) {
Role role = schemaReader.getRole();
if (!role.canAccess(olapElement)) {
olapElement = null;
}
if (olapElement instanceof Member) {
olapElement =
schemaReader.substitute((Member) olapElement);
}
}
if (olapElement == null) {
if (allowProp && segments.size() > 1) {
List<Id.Segment> segmentsButOne =
segments.subList(0, segments.size() - 1);
final Id.Segment lastSegment = last(segments);
final String propertyName =
lastSegment instanceof Id.NameSegment
? ((Id.NameSegment) lastSegment).getName()
: null;
final Member member =
(Member) schemaReaderSansAc.lookupCompound(
cube, segmentsButOne, false, Category.Member);
if (member != null
&& propertyName != null
&& isValidProperty(propertyName, member.getLevel()))
{
return new UnresolvedFunCall(
propertyName, Syntax.Property, new Exp[] {
createExpr(member)});
}
final Level level =
(Level) schemaReaderSansAc.lookupCompound(
cube, segmentsButOne, false, Category.Level);
if (level != null
&& propertyName != null
&& isValidProperty(propertyName, level))
{
return new UnresolvedFunCall(
propertyName, Syntax.Property, new Exp[] {
createExpr(level)});
}
}
// if we're in the middle of loading the schema, the property has
// been set to ignore invalid members, and the member is
// non-existent, return the null member corresponding to the
// hierarchy of the element we're looking for; locate the
// hierarchy by incrementally truncating the name of the element
if (q.ignoreInvalidMembers()) {
int nameLen = segments.size() - 1;
olapElement = null;
while (nameLen > 0 && olapElement == null) {
List<Id.Segment> partialName =
segments.subList(0, nameLen);
olapElement = schemaReaderSansAc.lookupCompound(
cube, partialName, false, Category.Unknown);
nameLen--;
}
if (olapElement != null) {
olapElement = olapElement.getHierarchy().getNullMember();
} else {
throw MondrianResource.instance().MdxChildObjectNotFound.ex(
fullName, cube.getQualifiedName());
}
} else {
throw MondrianResource.instance().MdxChildObjectNotFound.ex(
fullName, cube.getQualifiedName());
}
}
// keep track of any measure members referenced; these will be used
// later to determine if cross joins on virtual cubes can be
// processed natively
q.addMeasuresMembers(olapElement);
return createExpr(olapElement);
}
/**
* Looks up a cube in a schema reader.
*
* @param cubeName Cube name
* @param fail Whether to fail if not found.
* @return Cube, or null if not found
*/
static Cube lookupCube(
SchemaReader schemaReader,
String cubeName,
boolean fail)
{
for (Cube cube : schemaReader.getCubes()) {
if (Util.compareName(cube.getName(), cubeName) == 0) {
return cube;
}
}
if (fail) {
throw MondrianResource.instance().MdxCubeNotFound.ex(cubeName);
}
return null;
}
/**
* Converts an olap element (dimension, hierarchy, level or member) into
* an expression representing a usage of that element in an MDX statement.
*/
public static Exp createExpr(OlapElement element)
{
if (element instanceof Member) {
Member member = (Member) element;
return new MemberExpr(member);
} else if (element instanceof Level) {
Level level = (Level) element;
return new LevelExpr(level);
} else if (element instanceof Hierarchy) {
Hierarchy hierarchy = (Hierarchy) element;
return new HierarchyExpr(hierarchy);
} else if (element instanceof Dimension) {
Dimension dimension = (Dimension) element;
return new DimensionExpr(dimension);
} else if (element instanceof NamedSet) {
NamedSet namedSet = (NamedSet) element;
return new NamedSetExpr(namedSet);
} else {
throw Util.newInternal("Unexpected element type: " + element);
}
}
public static Member lookupHierarchyRootMember(
SchemaReader reader, Hierarchy hierarchy, Id.NameSegment memberName)
{
return lookupHierarchyRootMember(
reader, hierarchy, memberName, MatchType.EXACT);
}
/**
* Finds a root member of a hierarchy with a given name.
*
* @param hierarchy Hierarchy
* @param memberName Name of root member
* @return Member, or null if not found
*/
public static Member lookupHierarchyRootMember(
SchemaReader reader,
Hierarchy hierarchy,
Id.NameSegment memberName,
MatchType matchType)
{
// Lookup member at first level.
//
// Don't use access control. Suppose we cannot see the 'nation' level,
// we still want to be able to resolve '[Customer].[USA].[CA]'.
List<Member> rootMembers = reader.getHierarchyRootMembers(hierarchy);
// if doing an inexact search on a non-all hierarchy, create
// a member corresponding to the name we're searching for so
// we can use it in a hierarchical search
Member searchMember = null;
if (!matchType.isExact()
&& !hierarchy.hasAll()
&& !rootMembers.isEmpty())
{
searchMember =
hierarchy.createMember(
null,
rootMembers.get(0).getLevel(),
memberName.name,
null);
}
int bestMatch = -1;
int k = -1;
for (Member rootMember : rootMembers) {
++k;
int rc;
// when searching on the ALL hierarchy, match must be exact
if (matchType.isExact() || hierarchy.hasAll()) {
rc = rootMember.getName().compareToIgnoreCase(memberName.name);
} else {
rc = FunUtil.compareSiblingMembers(
rootMember,
searchMember);
}
if (rc == 0) {
return rootMember;
}
if (!hierarchy.hasAll()) {
if (matchType == MatchType.BEFORE) {
if (rc < 0
&& (bestMatch == -1
|| FunUtil.compareSiblingMembers(
rootMember,
rootMembers.get(bestMatch)) > 0))
{
bestMatch = k;
}
} else if (matchType == MatchType.AFTER) {
if (rc > 0
&& (bestMatch == -1
|| FunUtil.compareSiblingMembers(
rootMember,
rootMembers.get(bestMatch)) < 0))
{
bestMatch = k;
}
}
}
}
if (matchType == MatchType.EXACT_SCHEMA) {
return null;
}
if (matchType != MatchType.EXACT && bestMatch != -1) {
return rootMembers.get(bestMatch);
}
// If the first level is 'all', lookup member at second level. For
// example, they could say '[USA]' instead of '[(All
// Customers)].[USA]'.
return (rootMembers.size() > 0 && rootMembers.get(0).isAll())
? reader.lookupMemberChildByName(
rootMembers.get(0),
memberName,
matchType)
: null;
}
/**
* Finds a named level in this hierarchy. Returns null if there is no
* such level.
*/
public static Level lookupHierarchyLevel(Hierarchy hierarchy, String s) {
final Level[] levels = hierarchy.getLevels();
for (Level level : levels) {
if (level.getName().equalsIgnoreCase(s)) {
return level;
}
}
return null;
}
/**
* Finds the zero based ordinal of a Member among its siblings.
*/
public static int getMemberOrdinalInParent(
SchemaReader reader,
Member member)
{
Member parent = member.getParentMember();
List<Member> siblings =
(parent == null)
? reader.getHierarchyRootMembers(member.getHierarchy())
: reader.getMemberChildren(parent);
for (int i = 0; i < siblings.size(); i++) {
if (siblings.get(i).equals(member)) {
return i;
}
}
throw Util.newInternal(
"could not find member " + member + " amongst its siblings");
}
/**
* returns the first descendant on the level underneath parent.
* If parent = [Time].[1997] and level = [Time].[Month], then
* the member [Time].[1997].[Q1].[1] will be returned
*/
public static Member getFirstDescendantOnLevel(
SchemaReader reader,
Member parent,
Level level)
{
Member m = parent;
while (m.getLevel() != level) {
List<Member> children = reader.getMemberChildren(m);
m = children.get(0);
}
return m;
}
/**
* Returns whether a string is null or empty.
*/
public static boolean isEmpty(String s) {
return (s == null) || (s.length() == 0);
}
/**
* Encloses a value in single-quotes, to make a SQL string value. Examples:
* <code>singleQuoteForSql(null)</code> yields <code>NULL</code>;
* <code>singleQuoteForSql("don't")</code> yields <code>'don''t'</code>.
*/
public static String singleQuoteString(String val) {
StringBuilder buf = new StringBuilder(64);
singleQuoteString(val, buf);
return buf.toString();
}
/**
* Encloses a value in single-quotes, to make a SQL string value. Examples:
* <code>singleQuoteForSql(null)</code> yields <code>NULL</code>;
* <code>singleQuoteForSql("don't")</code> yields <code>'don''t'</code>.
*/
public static void singleQuoteString(String val, StringBuilder buf) {
buf.append('\'');
String s0 = replace(val, "'", "''");
buf.append(s0);
buf.append('\'');
}
/**
* Creates a random number generator.
*
* @param seed Seed for random number generator.
* If 0, generate a seed from the system clock and print the value
* chosen. (This is effectively non-deterministic.)
* If -1, generate a seed from an internal random number generator.
* (This is deterministic, but ensures that different tests have
* different seeds.)
*
* @return A random number generator.
*/
public static Random createRandom(long seed) {
if (seed == 0) {
seed = new Random().nextLong();
System.out.println("random: seed=" + seed);
} else if (seed == -1 && metaRandom != null) {
seed = metaRandom.nextLong();
}
return new Random(seed);
}
/**
* Returns whether a property is valid for a member of a given level.
* It is valid if the property is defined at the level or at
* an ancestor level, or if the property is a standard property such as
* "FORMATTED_VALUE".
*
* @param propertyName Property name
* @param level Level
* @return Whether property is valid
*/
public static boolean isValidProperty(
String propertyName,
Level level)
{
return lookupProperty(level, propertyName) != null;
}
/**
* Finds a member property called <code>propertyName</code> at, or above,
* <code>level</code>.
*/
public static Property lookupProperty(
Level level,
String propertyName)
{
do {
Property[] properties = level.getProperties();
for (Property property : properties) {
if (property.getName().equals(propertyName)) {
return property;
}
}
level = level.getParentLevel();
} while (level != null);
// Now try a standard property.
boolean caseSensitive =
MondrianProperties.instance().CaseSensitive.get();
final Property property = Property.lookup(propertyName, caseSensitive);
if (property != null
&& property.isMemberProperty()
&& property.isStandard())
{
return property;
}
return null;
}
/**
* Insert a call to this method if you want to flag a piece of
* undesirable code.
*
* @deprecated
*/
public static <T> T deprecated(T reason) {
throw new UnsupportedOperationException(reason.toString());
}
/**
* Insert a call to this method if you want to flag a piece of
* undesirable code.
*
* @deprecated
*/
public static <T> T deprecated(T reason, boolean fail) {
if (fail) {
throw new UnsupportedOperationException(reason.toString());
} else {
return reason;
}
}
public static List<Member> addLevelCalculatedMembers(
SchemaReader reader,
Level level,
List<Member> members)
{
List<Member> calcMembers =
reader.getCalculatedMembers(level.getHierarchy());
List<Member> calcMembersInThisLevel = new ArrayList<Member>();
for (Member calcMember : calcMembers) {
if (calcMember.getLevel().equals(level)) {
calcMembersInThisLevel.add(calcMember);
}
}
if (!calcMembersInThisLevel.isEmpty()) {
List<Member> newMemberList =
new ConcatenableList<Member>();
newMemberList.addAll(members);
newMemberList.addAll(calcMembersInThisLevel);
return newMemberList;
}
return members;
}
/**
* Returns an exception which indicates that a particular piece of
* functionality should work, but a developer has not implemented it yet.
*/
public static RuntimeException needToImplement(Object o) {
throw new UnsupportedOperationException("need to implement " + o);
}
/**
* Returns an exception indicating that we didn't expect to find this value
* here.
*/
public static <T extends Enum<T>> RuntimeException badValue(
Enum<T> anEnum)
{
return Util.newInternal(
"Was not expecting value '" + anEnum
+ "' for enumeration '" + anEnum.getDeclaringClass().getName()
+ "' in this context");
}
/**
* Converts a list of SQL-style patterns into a Java regular expression.
*
* <p>For example, {"Foo_", "Bar%BAZ"} becomes "Foo.|Bar.*BAZ".
*
* @param wildcards List of SQL-style wildcard expressions
* @return Regular expression
*/
public static String wildcardToRegexp(List<String> wildcards) {
StringBuilder buf = new StringBuilder();
for (String value : wildcards) {
if (buf.length() > 0) {
buf.append('|');
}
int i = 0;
while (true) {
int percent = value.indexOf('%', i);
int underscore = value.indexOf('_', i);
if (percent == -1 && underscore == -1) {
if (i < value.length()) {
buf.append(quotePattern(value.substring(i)));
}
break;
}
if (underscore >= 0 && (underscore < percent || percent < 0)) {
if (i < underscore) {
buf.append(
quotePattern(value.substring(i, underscore)));
}
buf.append('.');
i = underscore + 1;
} else if (percent >= 0
&& (percent < underscore || underscore < 0))
{
if (i < percent) {
buf.append(
quotePattern(value.substring(i, percent)));
}
buf.append(".*");
i = percent + 1;
} else {
throw new IllegalArgumentException();
}
}
}
return buf.toString();
}
/**
* Converts a camel-case name to an upper-case name with underscores.
*
* <p>For example, <code>camelToUpper("FooBar")</code> returns "FOO_BAR".
*
* @param s Camel-case string
* @return Upper-case string
*/
public static String camelToUpper(String s) {
StringBuilder buf = new StringBuilder(s.length() + 10);
int prevUpper = -1;
for (int i = 0; i < s.length(); ++i) {
char c = s.charAt(i);
if (Character.isUpperCase(c)) {
if (i > prevUpper + 1) {
buf.append('_');
}
prevUpper = i;
} else {
c = Character.toUpperCase(c);
}
buf.append(c);
}
return buf.toString();
}
/**
* Parses a comma-separated list.
*
* <p>If a value contains a comma, escape it with a second comma. For
* example, <code>parseCommaList("x,y,,z")</code> returns
* <code>{"x", "y,z"}</code>.
*
* @param nameCommaList List of names separated by commas
* @return List of names
*/
public static List<String> parseCommaList(String nameCommaList) {
if (nameCommaList.equals("")) {
return Collections.emptyList();
}
if (nameCommaList.endsWith(",")) {
// Special treatment for list ending in ",", because split ignores
// entries after separator.
final String zzz = "zzz";
final List<String> list = parseCommaList(nameCommaList + zzz);
String last = list.get(list.size() - 1);
if (last.equals(zzz)) {
list.remove(list.size() - 1);
} else {
list.set(
list.size() - 1,
last.substring(0, last.length() - zzz.length()));
}
return list;
}
List<String> names = new ArrayList<String>();
final String[] strings = nameCommaList.split(",");
for (String string : strings) {
final int count = names.size();
if (count > 0
&& names.get(count - 1).equals(""))
{
if (count == 1) {
if (string.equals("")) {
names.add("");
} else {
names.set(
0,
"," + string);
}
} else {
names.set(
count - 2,
names.get(count - 2) + "," + string);
names.remove(count - 1);
}
} else {
names.add(string);
}
}
return names;
}
/**
* Returns an annotation of a particular class on a method. Returns the
* default value if the annotation is not present, or in JDK 1.4.
*
* @param method Method containing annotation
* @param annotationClassName Name of annotation class to find
* @param defaultValue Value to return if annotation is not present
* @return value of annotation
*/
public static <T> T getAnnotation(
Method method,
String annotationClassName,
T defaultValue)
{
return compatible.getAnnotation(
method, annotationClassName, defaultValue);
}
/**
* Closes and cancels a {@link Statement} using the correct methods
* available on the current Java runtime.
* <p>If errors are encountered while canceling a statement,
* the message is logged in {@link Util}.
* @param stmt The statement to cancel.
*/
public static void cancelStatement(Statement stmt) {
compatible.cancelStatement(stmt);
}
public static MemoryInfo getMemoryInfo() {
return compatible.getMemoryInfo();
}
/**
* Converts a list of a string.
*
* For example,
* <code>commaList("foo", Arrays.asList({"a", "b"}))</code>
* returns "foo(a, b)".
*
* @param s Prefix
* @param list List
* @return String representation of string
*/
public static <T> String commaList(
String s,
List<T> list)
{
final StringBuilder buf = new StringBuilder(s);
buf.append("(");
int k = -1;
for (T t : list) {
if (++k > 0) {
buf.append(", ");
}
buf.append(t);
}
buf.append(")");
return buf.toString();
}
/**
* Makes a name distinct from other names which have already been used
* and shorter than a length limit, adds it to the list, and returns it.
*
* @param name Suggested name, may not be unique
* @param maxLength Maximum length of generated name
* @param nameList Collection of names already used
*
* @return Unique name
*/
public static String uniquify(
String name,
int maxLength,
Collection<String> nameList)
{
assert name != null;
if (name.length() > maxLength) {
name = name.substring(0, maxLength);
}
if (nameList.contains(name)) {
String aliasBase = name;
int j = 0;
while (true) {
name = aliasBase + j;
if (name.length() > maxLength) {
aliasBase = aliasBase.substring(0, aliasBase.length() - 1);
continue;
}
if (!nameList.contains(name)) {
break;
}
j++;
}
}
nameList.add(name);
return name;
}
/**
* Returns whether a collection contains precisely one distinct element.
* Returns false if the collection is empty, or if it contains elements
* that are not the same as each other.
*
* @param collection Collection
* @return boolean true if all values are same
*/
public static <T> boolean areOccurencesEqual(
Collection<T> collection)
{
Iterator<T> it = collection.iterator();
if (!it.hasNext()) {
// Collection is empty
return false;
}
T first = it.next();
while (it.hasNext()) {
T t = it.next();
if (!t.equals(first)) {
return false;
}
}
return true;
}
/**
* Creates a memory-, CPU- and cache-efficient immutable list.
*
* @param t Array of members of list
* @param <T> Element type
* @return List containing the given members
*/
public static <T> List<T> flatList(T... t) {
return _flatList(t, false);
}
/**
* Creates a memory-, CPU- and cache-efficient immutable list,
* always copying the contents.
*
* @param t Array of members of list
* @param <T> Element type
* @return List containing the given members
*/
public static <T> List<T> flatListCopy(T... t) {
return _flatList(t, true);
}
/**
* Creates a memory-, CPU- and cache-efficient immutable list, optionally
* copying the list.
*
* @param copy Whether to always copy the list
* @param t Array of members of list
* @return List containing the given members
*/
private static <T> List<T> _flatList(T[] t, boolean copy) {
switch (t.length) {
case 0:
return Collections.emptyList();
case 1:
return Collections.singletonList(t[0]);
case 2:
return new Flat2List<T>(t[0], t[1]);
case 3:
return new Flat3List<T>(t[0], t[1], t[2]);
default:
// REVIEW: AbstractList contains a modCount field; we could
// write our own implementation and reduce creation overhead a
// bit.
if (copy) {
return Arrays.asList(t.clone());
} else {
return Arrays.asList(t);
}
}
}
/**
* Creates a memory-, CPU- and cache-efficient immutable list from an
* existing list. The list is always copied.
*
* @param t Array of members of list
* @param <T> Element type
* @return List containing the given members
*/
public static <T> List<T> flatList(List<T> t) {
switch (t.size()) {
case 0:
return Collections.emptyList();
case 1:
return Collections.singletonList(t.get(0));
case 2:
return new Flat2List<T>(t.get(0), t.get(1));
case 3:
return new Flat3List<T>(t.get(0), t.get(1), t.get(2));
default:
// REVIEW: AbstractList contains a modCount field; we could
// write our own implementation and reduce creation overhead a
// bit.
//noinspection unchecked
return (List<T>) Arrays.asList(t.toArray());
}
}
/**
* Parses a locale string.
*
* <p>The inverse operation of {@link java.util.Locale#toString()}.
*
* @param localeString Locale string, e.g. "en" or "en_US"
* @return Java locale object
*/
public static Locale parseLocale(String localeString) {
String[] strings = localeString.split("_");
switch (strings.length) {
case 1:
return new Locale(strings[0]);
case 2:
return new Locale(strings[0], strings[1]);
case 3:
return new Locale(strings[0], strings[1], strings[2]);
default:
throw newInternal(
"bad locale string '" + localeString + "'");
}
}
private static final Map<String, String> TIME_UNITS =
Olap4jUtil.mapOf(
"ns", "NANOSECONDS",
"us", "MICROSECONDS",
"ms", "MILLISECONDS",
"s", "SECONDS",
"m", "MINUTES",
"h", "HOURS",
"d", "DAYS");
/**
* Parses an interval.
*
* <p>For example, "30s" becomes (30, {@link TimeUnit#SECONDS});
* "2us" becomes (2, {@link TimeUnit#MICROSECONDS}).</p>
*
* <p>Units m (minutes), h (hours) and d (days) are only available
* in JDK 1.6 or later, because the corresponding constants are missing
* from {@link TimeUnit} in JDK 1.5.</p>
*
* @param s String to parse
* @param unit Default time unit; may be null
*
* @return Pair of value and time unit. Neither pair or its components are
* null
*
* @throws NumberFormatException if unit is not present and there is no
* default, or if number is not valid
*/
public static Pair<Long, TimeUnit> parseInterval(
String s,
TimeUnit unit)
throws NumberFormatException
{
final String original = s;
for (Map.Entry<String, String> entry : TIME_UNITS.entrySet()) {
final String abbrev = entry.getKey();
if (s.endsWith(abbrev)) {
final String full = entry.getValue();
try {
unit = TimeUnit.valueOf(full);
s = s.substring(0, s.length() - abbrev.length());
break;
} catch (IllegalArgumentException e) {
// ignore - MINUTES, HOURS, DAYS are not defined in JDK1.5
}
}
}
if (unit == null) {
throw new NumberFormatException(
"Invalid time interval '" + original + "'. Does not contain a "
+ "time unit. (Suffix may be ns (nanoseconds), "
+ "us (microseconds), ms (milliseconds), s (seconds), "
+ "h (hours), d (days). For example, '20s' means 20 seconds.)");
}
try {
return Pair.of(new BigDecimal(s).longValue(), unit);
} catch (NumberFormatException e) {
throw new NumberFormatException(
"Invalid time interval '" + original + "'");
}
}
/**
* Converts a list of olap4j-style segments to a list of mondrian-style
* segments.
*
* @param olap4jSegmentList List of olap4j segments
* @return List of mondrian segments
*/
public static List<Id.Segment> convert(
List<IdentifierSegment> olap4jSegmentList)
{
final List<Id.Segment> list = new ArrayList<Id.Segment>();
for (IdentifierSegment olap4jSegment : olap4jSegmentList) {
list.add(convert(olap4jSegment));
}
return list;
}
/**
* Converts an olap4j-style segment to a mondrian-style segment.
*
* @param olap4jSegment olap4j segment
* @return mondrian segment
*/
public static Id.Segment convert(IdentifierSegment olap4jSegment) {
if (olap4jSegment instanceof NameSegment) {
return convert((NameSegment) olap4jSegment);
} else {
return convert((KeySegment) olap4jSegment);
}
}
private static Id.KeySegment convert(final KeySegment keySegment) {
return new Id.KeySegment(
new AbstractList<Id.NameSegment>() {
public Id.NameSegment get(int index) {
return convert(keySegment.getKeyParts().get(index));
}
public int size() {
return keySegment.getKeyParts().size();
}
});
}
private static Id.NameSegment convert(NameSegment nameSegment) {
return new Id.NameSegment(
nameSegment.getName(),
convert(nameSegment.getQuoting()));
}
private static Id.Quoting convert(Quoting quoting) {
switch (quoting) {
case QUOTED:
return Id.Quoting.QUOTED;
case UNQUOTED:
return Id.Quoting.UNQUOTED;
case KEY:
return Id.Quoting.KEY;
default:
throw Util.unexpected(quoting);
}
}
/**
* Applies a collection of filters to an iterable.
*
* @param iterable Iterable
* @param conds Zero or more conditions
* @param <T>
* @return Iterable that returns only members of underlying iterable for
* for which all conditions evaluate to true
*/
public static <T> Iterable<T> filter(
final Iterable<T> iterable,
final Functor1<Boolean, T>... conds)
{
final Functor1<Boolean, T>[] conds2 = optimizeConditions(conds);
if (conds2.length == 0) {
return iterable;
}
return new Iterable<T>() {
public Iterator<T> iterator() {
return new Iterator<T>() {
final Iterator<T> iterator = iterable.iterator();
T next;
boolean hasNext = moveToNext();
private boolean moveToNext() {
outer:
while (iterator.hasNext()) {
next = iterator.next();
for (Functor1<Boolean, T> cond : conds2) {
if (!cond.apply(next)) {
continue outer;
}
}
return true;
}
return false;
}
public boolean hasNext() {
return hasNext;
}
public T next() {
T t = next;
hasNext = moveToNext();
return t;
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
private static <T> Functor1<Boolean, T>[] optimizeConditions(
Functor1<Boolean, T>[] conds)
{
final List<Functor1<Boolean, T>> functor1List =
new ArrayList<Functor1<Boolean, T>>(Arrays.asList(conds));
for (Iterator<Functor1<Boolean, T>> funcIter =
functor1List.iterator(); funcIter.hasNext();)
{
Functor1<Boolean, T> booleanTFunctor1 = funcIter.next();
if (booleanTFunctor1 == trueFunctor()) {
funcIter.remove();
}
}
if (functor1List.size() < conds.length) {
//noinspection unchecked
return functor1List.toArray(new Functor1[functor1List.size()]);
} else {
return conds;
}
}
/**
* Sorts a collection of {@link Comparable} objects and returns a list.
*
* @param collection Collection
* @param <T> Element type
* @return Sorted list
*/
public static <T extends Comparable> List<T> sort(
Collection<T> collection)
{
Object[] a = collection.toArray(new Object[collection.size()]);
Arrays.sort(a);
return cast(Arrays.asList(a));
}
/**
* Sorts a collection of objects using a {@link java.util.Comparator} and returns a
* list.
*
* @param collection Collection
* @param comparator Comparator
* @param <T> Element type
* @return Sorted list
*/
public static <T> List<T> sort(
Collection<T> collection,
Comparator<T> comparator)
{
Object[] a = collection.toArray(new Object[collection.size()]);
//noinspection unchecked
Arrays.sort(a, (Comparator<? super Object>) comparator);
return cast(Arrays.asList(a));
}
public static List<IdentifierSegment> toOlap4j(
final List<Id.Segment> segments)
{
return new AbstractList<IdentifierSegment>() {
public IdentifierSegment get(int index) {
return toOlap4j(segments.get(index));
}
public int size() {
return segments.size();
}
};
}
public static IdentifierSegment toOlap4j(Id.Segment segment) {
switch (segment.quoting) {
case KEY:
return toOlap4j((Id.KeySegment) segment);
default:
return toOlap4j((Id.NameSegment) segment);
}
}
private static KeySegment toOlap4j(final Id.KeySegment keySegment) {
return new KeySegment(
new AbstractList<NameSegment>() {
public NameSegment get(int index) {
return toOlap4j(keySegment.subSegmentList.get(index));
}
public int size() {
return keySegment.subSegmentList.size();
}
});
}
private static NameSegment toOlap4j(Id.NameSegment nameSegment) {
return new NameSegment(
null,
nameSegment.name,
toOlap4j(nameSegment.quoting));
}
public static Quoting toOlap4j(Id.Quoting quoting) {
return Quoting.valueOf(quoting.name());
}
// TODO: move to IdentifierSegment
public static boolean matches(IdentifierSegment segment, String name) {
switch (segment.getQuoting()) {
case KEY:
return false; // FIXME
case QUOTED:
return equalName(segment.getName(), name);
case UNQUOTED:
return segment.getName().equalsIgnoreCase(name);
default:
throw unexpected(segment.getQuoting());
}
}
public static boolean matches(
Member member, List<Id.Segment> nameParts)
{
if (Util.equalName(Util.implode(nameParts),
member.getUniqueName()))
{
// exact match
return true;
}
Id.Segment segment = nameParts.get(nameParts.size() - 1);
while (member.getParentMember() != null) {
if (!segment.matches(member.getName())) {
return false;
}
member = member.getParentMember();
nameParts = nameParts.subList(0, nameParts.size() - 1);
segment = nameParts.get(nameParts.size() - 1);
}
if (segment.matches(member.getName())) {
return Util.equalName(
member.getHierarchy().getUniqueName(),
Util.implode(nameParts.subList(0, nameParts.size() - 1)));
} else if (member.isAll()) {
return Util.equalName(
member.getHierarchy().getUniqueName(),
Util.implode(nameParts));
} else {
return false;
}
}
public static RuntimeException newElementNotFoundException(
int category,
IdentifierNode identifierNode)
{
String type;
switch (category) {
case Category.Member:
return MondrianResource.instance().MemberNotFound.ex(
identifierNode.toString());
case Category.Unknown:
type = "Element";
break;
default:
type = Category.instance().getDescription(category);
}
return newError(type + " '" + identifierNode + "' not found");
}
/**
* Calls {@link java.util.concurrent.Future#get()} and converts any
* throwable into a non-checked exception.
*
* @param future Future
* @param message Message to qualify wrapped exception
* @param <T> Result type
* @return Result
*/
public static <T> T safeGet(Future<T> future, String message) {
try {
return future.get();
} catch (InterruptedException e) {
throw newError(e, message);
} catch (ExecutionException e) {
final Throwable cause = e.getCause();
if (cause instanceof RuntimeException) {
throw (RuntimeException) cause;
} else if (cause instanceof Error) {
throw (Error) cause;
} else {
throw newError(cause, message);
}
}
}
public static <T> Set<T> newIdentityHashSetFake() {
final HashMap<T, Boolean> map = new HashMap<T, Boolean>();
return new Set<T>() {
public int size() {
return map.size();
}
public boolean isEmpty() {
return map.isEmpty();
}
public boolean contains(Object o) {
return map.containsKey(o);
}
public Iterator<T> iterator() {
return map.keySet().iterator();
}
public Object[] toArray() {
return map.keySet().toArray();
}
public <T> T[] toArray(T[] a) {
return map.keySet().toArray(a);
}
public boolean add(T t) {
return map.put(t, Boolean.TRUE) == null;
}
public boolean remove(Object o) {
return map.remove(o) == Boolean.TRUE;
}
public boolean containsAll(Collection<?> c) {
return map.keySet().containsAll(c);
}
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
public void clear() {
map.clear();
}
};
}
/**
* Equivalent to {@link Timer#Timer(String, boolean)}.
* (Introduced in JDK 1.5.)
*
* @param name the name of the associated thread
* @param isDaemon true if the associated thread should run as a daemon
* @return timer
*/
public static Timer newTimer(String name, boolean isDaemon) {
return compatible.newTimer(name, isDaemon);
}
/**
* As Arrays#binarySearch(Object[], int, int, Object), but
* available pre-JDK 1.6.
*/
public static <T extends Comparable<T>> int binarySearch(
T[] ts, int start, int end, T t)
{
return compatible.binarySearch(ts, start, end, t);
}
/**
* Returns the intersection of two sorted sets. Does not modify either set.
*
* <p>Optimized for the case that both sets are {@link ArraySortedSet}.</p>
*
* @param set1 First set
* @param set2 Second set
* @return Intersection of the sets
*/
public static <E extends Comparable> SortedSet<E> intersect(
SortedSet<E> set1,
SortedSet<E> set2)
{
if (set1.isEmpty()) {
return set1;
}
if (set2.isEmpty()) {
return set2;
}
if (!(set1 instanceof ArraySortedSet)
|| !(set2 instanceof ArraySortedSet))
{
final TreeSet<E> set = new TreeSet<E>(set1);
set.retainAll(set2);
return set;
}
final Comparable<?>[] result =
new Comparable[Math.min(set1.size(), set2.size())];
final Iterator<E> it1 = set1.iterator();
final Iterator<E> it2 = set2.iterator();
int i = 0;
E e1 = it1.next();
E e2 = it2.next();
for (;;) {
final int compare = e1.compareTo(e2);
if (compare == 0) {
result[i++] = e1;
if (!it1.hasNext() || !it2.hasNext()) {
break;
}
e1 = it1.next();
e2 = it2.next();
} else if (compare == 1) {
if (!it2.hasNext()) {
break;
}
e2 = it2.next();
} else {
if (!it1.hasNext()) {
break;
}
e1 = it1.next();
}
}
return new ArraySortedSet(result, 0, i);
}
/**
* Compares two integers using the same algorithm as
* {@link Integer#compareTo(Integer)}.
*
* @param i0 First integer
* @param i1 Second integer
* @return Comparison
*/
public static int compareIntegers(int i0, int i1) {
return (i0 < i1 ? -1 : (i0 == i1 ? 0 : 1));
}
/**
* Returns the last item in a list.
*
* @param list List
* @param <T> Element type
* @return Last item in the list
* @throws IndexOutOfBoundsException if list is empty
*/
public static <T> T last(List<T> list) {
return list.get(list.size() - 1);
}
/**
* Returns the sole item in a list.
*
* <p>If the list has 0 or more than one element, throws.</p>
*
* @param list List
* @param <T> Element type
* @return Sole item in the list
* @throws IndexOutOfBoundsException if list is empty or has more than 1 elt
*/
public static <T> T only(List<T> list) {
if (list.size() != 1) {
throw new IndexOutOfBoundsException(
"list " + list + " has " + list.size()
+ " elements, expected 1");
}
return list.get(0);
}
/**
* Closes a JDBC result set, statement, and connection, ignoring any errors.
* If any of them are null, that's fine.
*
* <p>If any of them throws a {@link SQLException}, returns the first
* such exception, but always executes all closes.</p>
*
* @param resultSet Result set
* @param statement Statement
* @param connection Connection
*/
public static SQLException close(
ResultSet resultSet,
Statement statement,
Connection connection)
{
SQLException firstException = null;
if (resultSet != null) {
try {
if (statement == null) {
statement = resultSet.getStatement();
}
resultSet.close();
} catch (Throwable t) {
firstException = new SQLException();
firstException.initCause(t);
}
}
if (statement != null) {
try {
statement.close();
} catch (Throwable t) {
if (firstException == null) {
firstException = new SQLException();
firstException.initCause(t);
}
}
}
if (connection != null) {
try {
connection.close();
} catch (Throwable t) {
if (firstException == null) {
firstException = new SQLException();
firstException.initCause(t);
}
}
}
return firstException;
}
/**
* Creates a bitset with bits from {@code fromIndex} (inclusive) to
* specified {@code toIndex} (exclusive) set to {@code true}.
*
* <p>For example, {@code bitSetBetween(0, 3)} returns a bit set with bits
* {0, 1, 2} set.
*
* @param fromIndex Index of the first bit to be set.
* @param toIndex Index after the last bit to be set.
* @return Bit set
*/
public static BitSet bitSetBetween(int fromIndex, int toIndex) {
final BitSet bitSet = new BitSet();
if (toIndex > fromIndex) {
// Avoid http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6222207
// "BitSet internal invariants may be violated"
bitSet.set(fromIndex, toIndex);
}
return bitSet;
}
public static class ErrorCellValue {
public String toString() {
return "#ERR";
}
}
@SuppressWarnings({"unchecked"})
public static <T> T[] genericArray(Class<T> clazz, int size) {
return (T[]) Array.newInstance(clazz, size);
}
/**
* Throws an internal error if condition is not true. It would be called
* <code>assert</code>, but that is a keyword as of JDK 1.4.
*/
public static void assertTrue(boolean b) {
if (!b) {
throw newInternal("assert failed");
}
}
/**
* Throws an internal error with the given messagee if condition is not
* true. It would be called <code>assert</code>, but that is a keyword as
* of JDK 1.4.
*/
public static void assertTrue(boolean b, String message) {
if (!b) {
throw newInternal("assert failed: " + message);
}
}
/**
* Creates an internal error with a given message.
*/
public static RuntimeException newInternal(String message) {
return MondrianResource.instance().Internal.ex(message);
}
/**
* Creates an internal error with a given message and cause.
*/
public static RuntimeException newInternal(Throwable e, String message) {
return MondrianResource.instance().Internal.ex(message, e);
}
/**
* Creates a non-internal error. Currently implemented in terms of
* internal errors, but later we will create resourced messages.
*/
public static RuntimeException newError(String message) {
return newInternal(message);
}
/**
* Creates a non-internal error. Currently implemented in terms of
* internal errors, but later we will create resourced messages.
*/
public static RuntimeException newError(Throwable e, String message) {
return newInternal(e, message);
}
/**
* Returns an exception indicating that we didn't expect to find this value
* here.
*
* @param value Value
*/
public static RuntimeException unexpected(Enum value) {
return Util.newInternal(
"Was not expecting value '" + value
+ "' for enumeration '" + value.getClass().getName()
+ "' in this context");
}
/**
* Checks that a precondition (declared using the javadoc <code>@pre</code>
* tag) is satisfied.
*
* @param b The value of executing the condition
*/
public static void assertPrecondition(boolean b) {
assertTrue(b);
}
/**
* Checks that a precondition (declared using the javadoc <code>@pre</code>
* tag) is satisfied. For example,
*
* <blockquote><pre>void f(String s) {
* Util.assertPrecondition(s != null, "s != null");
* ...
* }</pre></blockquote>
*
* @param b The value of executing the condition
* @param condition The text of the condition
*/
public static void assertPrecondition(boolean b, String condition) {
assertTrue(b, condition);
}
/**
* Checks that a postcondition (declared using the javadoc
* <code>@post</code> tag) is satisfied.
*
* @param b The value of executing the condition
*/
public static void assertPostcondition(boolean b) {
assertTrue(b);
}
/**
* Checks that a postcondition (declared using the javadoc
* <code>@post</code> tag) is satisfied.
*
* @param b The value of executing the condition
*/
public static void assertPostcondition(boolean b, String condition) {
assertTrue(b, condition);
}
/**
* Converts an error into an array of strings, the most recent error first.
*
* @param e the error; may be null. Errors are chained according to their
* {@link Throwable#getCause cause}.
*/
public static String[] convertStackToString(Throwable e) {
List<String> list = new ArrayList<String>();
while (e != null) {
String sMsg = getErrorMessage(e);
list.add(sMsg);
e = e.getCause();
}
return list.toArray(new String[list.size()]);
}
/**
* Constructs the message associated with an arbitrary Java error, making
* up one based on the stack trace if there is none. As
* {@link #getErrorMessage(Throwable,boolean)}, but does not print the
* class name if the exception is derived from {@link java.sql.SQLException}
* or is exactly a {@link java.lang.Exception}.
*/
public static String getErrorMessage(Throwable err) {
boolean prependClassName =
!(err instanceof java.sql.SQLException
|| err.getClass() == java.lang.Exception.class);
return getErrorMessage(err, prependClassName);
}
/**
* Constructs the message associated with an arbitrary Java error, making
* up one based on the stack trace if there is none.
*
* @param err the error
* @param prependClassName should the error be preceded by the
* class name of the Java exception? defaults to false, unless the error
* is derived from {@link java.sql.SQLException} or is exactly a {@link
* java.lang.Exception}
*/
public static String getErrorMessage(
Throwable err,
boolean prependClassName)
{
String errMsg = err.getMessage();
if ((errMsg == null) || (err instanceof RuntimeException)) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
err.printStackTrace(pw);
return sw.toString();
} else {
return (prependClassName)
? err.getClass().getName() + ": " + errMsg
: errMsg;
}
}
/**
* If one of the causes of an exception is of a particular class, returns
* that cause. Otherwise returns null.
*
* @param e Exception
* @param clazz Desired class
* @param <T> Class
* @return Cause of given class, or null
*/
public static <T extends Throwable>
T getMatchingCause(Throwable e, Class<T> clazz) {
for (;;) {
if (clazz.isInstance(e)) {
return clazz.cast(e);
}
final Throwable cause = e.getCause();
if (cause == null || cause == e) {
return null;
}
e = cause;
}
}
/**
* Converts an expression to a string.
*/
public static String unparse(Exp exp) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
exp.unparse(pw);
return sw.toString();
}
/**
* Converts an query to a string.
*/
public static String unparse(Query query) {
StringWriter sw = new StringWriter();
PrintWriter pw = new QueryPrintWriter(sw);
query.unparse(pw);
return sw.toString();
}
/**
* Creates a file-protocol URL for the given file.
*/
public static URL toURL(File file) throws MalformedURLException {
String path = file.getAbsolutePath();
// This is a bunch of weird code that is required to
// make a valid URL on the Windows platform, due
// to inconsistencies in what getAbsolutePath returns.
String fs = System.getProperty("file.separator");
if (fs.length() == 1) {
char sep = fs.charAt(0);
if (sep != '/') {
path = path.replace(sep, '/');
}
if (path.charAt(0) != '/') {
path = '/' + path;
}
}
path = "file://" + path;
return new URL(path);
}
/**
* <code>PropertyList</code> is an order-preserving list of key-value
* pairs. Lookup is case-insensitive, but the case of keys is preserved.
*/
public static class PropertyList
implements Iterable<Pair<String, String>>, Serializable
{
List<Pair<String, String>> list =
new ArrayList<Pair<String, String>>();
public PropertyList() {
this.list = new ArrayList<Pair<String, String>>();
}
private PropertyList(List<Pair<String, String>> list) {
this.list = list;
}
@SuppressWarnings({"CloneDoesntCallSuperClone"})
@Override
public PropertyList clone() {
return new PropertyList(new ArrayList<Pair<String, String>>(list));
}
public String get(String key) {
return get(key, null);
}
public String get(String key, String defaultValue) {
for (int i = 0, n = list.size(); i < n; i++) {
Pair<String, String> pair = list.get(i);
if (pair.left.equalsIgnoreCase(key)) {
return pair.right;
}
}
return defaultValue;
}
public String put(String key, String value) {
for (int i = 0, n = list.size(); i < n; i++) {
Pair<String, String> pair = list.get(i);
if (pair.left.equalsIgnoreCase(key)) {
String old = pair.right;
if (key.equalsIgnoreCase("Provider")) {
// Unlike all other properties, later values of
// "Provider" do not supersede
} else {
pair.right = value;
}
return old;
}
}
list.add(new Pair<String, String>(key, value));
return null;
}
public boolean remove(String key) {
boolean found = false;
for (int i = 0; i < list.size(); i++) {
Pair<String, String> pair = list.get(i);
if (pair.getKey().equalsIgnoreCase(key)) {
list.remove(i);
found = true;
--i;
}
}
return found;
}
public String toString() {
StringBuilder sb = new StringBuilder(64);
for (int i = 0, n = list.size(); i < n; i++) {
Pair<String, String> pair = list.get(i);
if (i > 0) {
sb.append("; ");
}
sb.append(pair.left);
sb.append('=');
final String right = pair.right;
if (right == null) {
sb.append("'null'");
} else {
// Quote a property value if is has a semi colon in it
// 'xxx;yyy'. Escape any single-quotes by doubling them.
final int needsQuote = right.indexOf(';');
if (needsQuote >= 0) {
// REVIEW: This logic leaves off the leading/trailing
// quote if the property value already has a
// leading/trailing quote. Doesn't seem right to me.
if (right.charAt(0) != '\'') {
sb.append("'");
}
sb.append(replace(right, "'", "''"));
if (right.charAt(right.length() - 1) != '\'') {
sb.append("'");
}
} else {
sb.append(right);
}
}
}
return sb.toString();
}
public Iterator<Pair<String, String>> iterator() {
return list.iterator();
}
}
/**
* Converts an OLE DB connect string into a {@link PropertyList}.
*
* <p> For example, <code>"Provider=MSOLAP; DataSource=LOCALHOST;"</code>
* becomes the set of (key, value) pairs <code>{("Provider","MSOLAP"),
* ("DataSource", "LOCALHOST")}</code>. Another example is
* <code>Provider='sqloledb';Data Source='MySqlServer';Initial
* Catalog='Pubs';Integrated Security='SSPI';</code>.
*
* <p> This method implements as much as possible of the <a
* href="http://msdn.microsoft.com/library/en-us/oledb/htm/oledbconnectionstringsyntax.asp"
* target="_blank">OLE DB connect string syntax
* specification</a>. To find what it <em>actually</em> does, take
* a look at the <code>mondrian.olap.UtilTestCase</code> test case.
*/
public static PropertyList parseConnectString(String s) {
return new ConnectStringParser(s).parse();
}
private static class ConnectStringParser {
private final String s;
private final int n;
private int i;
private final StringBuilder nameBuf;
private final StringBuilder valueBuf;
private ConnectStringParser(String s) {
this.s = s;
this.i = 0;
this.n = s.length();
this.nameBuf = new StringBuilder(64);
this.valueBuf = new StringBuilder(64);
}
PropertyList parse() {
PropertyList list = new PropertyList();
while (i < n) {
parsePair(list);
}
return list;
}
/**
* Reads "name=value;" or "name=value<EOF>".
*/
void parsePair(PropertyList list) {
String name = parseName();
if (name == null) {
return;
}
String value;
if (i >= n) {
value = "";
} else if (s.charAt(i) == ';') {
i++;
value = "";
} else {
value = parseValue();
}
list.put(name, value);
}
/**
* Reads "name=". Name can contain equals sign if equals sign is
* doubled. Returns null if there is no name to read.
*/
String parseName() {
nameBuf.setLength(0);
while (true) {
char c = s.charAt(i);
switch (c) {
case '=':
i++;
if (i < n && (c = s.charAt(i)) == '=') {
// doubled equals sign; take one of them, and carry on
i++;
nameBuf.append(c);
break;
}
String name = nameBuf.toString();
name = name.trim();
return name;
case ' ':
if (nameBuf.length() == 0) {
// ignore preceding spaces
i++;
if (i >= n) {
// there is no name, e.g. trailing spaces after
// semicolon, 'x=1; y=2; '
return null;
}
break;
} else {
// fall through
}
default:
nameBuf.append(c);
i++;
if (i >= n) {
return nameBuf.toString().trim();
}
}
}
}
/**
* Reads "value;" or "value<EOF>"
*/
String parseValue() {
char c;
// skip over leading white space
while ((c = s.charAt(i)) == ' ') {
i++;
if (i >= n) {
return "";
}
}
if (c == '"' || c == '\'') {
String value = parseQuoted(c);
// skip over trailing white space
while (i < n && (c = s.charAt(i)) == ' ') {
i++;
}
if (i >= n) {
return value;
} else if (s.charAt(i) == ';') {
i++;
return value;
} else {
throw new RuntimeException(
"quoted value ended too soon, at position " + i
+ " in '" + s + "'");
}
} else {
String value;
int semi = s.indexOf(';', i);
if (semi >= 0) {
value = s.substring(i, semi);
i = semi + 1;
} else {
value = s.substring(i);
i = n;
}
return value.trim();
}
}
/**
* Reads a string quoted by a given character. Occurrences of the
* quoting character must be doubled. For example,
* <code>parseQuoted('"')</code> reads <code>"a ""new"" string"</code>
* and returns <code>a "new" string</code>.
*/
String parseQuoted(char q) {
char c = s.charAt(i++);
Util.assertTrue(c == q);
valueBuf.setLength(0);
while (i < n) {
c = s.charAt(i);
if (c == q) {
i++;
if (i < n) {
c = s.charAt(i);
if (c == q) {
valueBuf.append(c);
i++;
continue;
}
}
return valueBuf.toString();
} else {
valueBuf.append(c);
i++;
}
}
throw new RuntimeException(
"Connect string '" + s
+ "' contains unterminated quoted value '" + valueBuf.toString()
+ "'");
}
}
/**
* Combines two integers into a hash code.
*/
public static int hash(int i, int j) {
return (i << 4) ^ j;
}
/**
* Computes a hash code from an existing hash code and an object (which
* may be null).
*/
public static int hash(int h, Object o) {
int k = (o == null) ? 0 : o.hashCode();
return ((h << 4) | h) ^ k;
}
/**
* Computes a hash code from an existing hash code and an array of objects
* (which may be null).
*/
public static int hashArray(int h, Object [] a) {
// The hashcode for a null array and an empty array should be different
// than h, so use magic numbers.
if (a == null) {
return hash(h, 19690429);
}
if (a.length == 0) {
return hash(h, 19690721);
}
for (Object anA : a) {
h = hash(h, anA);
}
return h;
}
/**
* Concatenates one or more arrays.
*
* <p>Resulting array has same element type as first array. Each arrays may
* be empty, but must not be null.
*
* @param a0 First array
* @param as Zero or more subsequent arrays
* @return Array containing all elements
*/
public static <T> T[] appendArrays(
T[] a0,
T[]... as)
{
int n = a0.length;
for (T[] a : as) {
n += a.length;
}
T[] copy = Util.copyOf(a0, n);
n = a0.length;
for (T[] a : as) {
System.arraycopy(a, 0, copy, n, a.length);
n += a.length;
}
return copy;
}
/**
* Adds an object to the end of an array. The resulting array is of the
* same type (e.g. <code>String[]</code>) as the input array.
*
* @param a Array
* @param o Element
* @return New array containing original array plus element
*
* @see #appendArrays
*/
public static <T> T[] append(T[] a, T o) {
T[] a2 = Util.copyOf(a, a.length + 1);
a2[a.length] = o;
return a2;
}
/**
* Like <code>{@link java.util.Arrays}.copyOf(double[], int)</code>, but
* exists prior to JDK 1.6.
*
* @param original the array to be copied
* @param newLength the length of the copy to be returned
* @return a copy of the original array, truncated or padded with zeros
* to obtain the specified length
*/
public static double[] copyOf(double[] original, int newLength) {
double[] copy = new double[newLength];
System.arraycopy(
original, 0, copy, 0, Math.min(original.length, newLength));
return copy;
}
/**
* Like <code>{@link java.util.Arrays}.copyOf(int[], int)</code>, but
* exists prior to JDK 1.6.
*
* @param original the array to be copied
* @param newLength the length of the copy to be returned
* @return a copy of the original array, truncated or padded with zeros
* to obtain the specified length
*/
public static int[] copyOf(int[] original, int newLength) {
int[] copy = new int[newLength];
System.arraycopy(
original, 0, copy, 0, Math.min(original.length, newLength));
return copy;
}
/**
* Like <code>{@link java.util.Arrays}.copyOf(long[], int)</code>, but
* exists prior to JDK 1.6.
*
* @param original the array to be copied
* @param newLength the length of the copy to be returned
* @return a copy of the original array, truncated or padded with zeros
* to obtain the specified length
*/
public static long[] copyOf(long[] original, int newLength) {
long[] copy = new long[newLength];
System.arraycopy(
original, 0, copy, 0, Math.min(original.length, newLength));
return copy;
}
/**
* Like <code>{@link java.util.Arrays}.copyOf(Object[], int)</code>, but
* exists prior to JDK 1.6.
*
* @param original the array to be copied
* @param newLength the length of the copy to be returned
* @return a copy of the original array, truncated or padded with zeros
* to obtain the specified length
*/
public static <T> T[] copyOf(T[] original, int newLength) {
//noinspection unchecked
return (T[]) copyOf(original, newLength, original.getClass());
}
/**
* Copies the specified array.
*
* @param original the array to be copied
* @param newLength the length of the copy to be returned
* @param newType the class of the copy to be returned
* @return a copy of the original array, truncated or padded with nulls
* to obtain the specified length
*/
public static <T, U> T[] copyOf(
U[] original, int newLength, Class<? extends T[]> newType)
{
@SuppressWarnings({"unchecked", "RedundantCast"})
T[] copy = ((Object)newType == (Object)Object[].class)
? (T[]) new Object[newLength]
: (T[]) Array.newInstance(newType.getComponentType(), newLength);
//noinspection SuspiciousSystemArraycopy
System.arraycopy(
original, 0, copy, 0,
Math.min(original.length, newLength));
return copy;
}
/**
* Returns the cumulative amount of time spent accessing the database.
*
* @deprecated Use {@link mondrian.server.monitor.Monitor#getServer()} and
* {@link mondrian.server.monitor.ServerInfo#sqlStatementExecuteNanos};
* will be removed in 4.0.
*/
public static long dbTimeMillis() {
return databaseMillis;
}
/**
* Adds to the cumulative amount of time spent accessing the database.
*
* @deprecated Will be removed in 4.0.
*/
public static void addDatabaseTime(long millis) {
databaseMillis += millis;
}
/**
* Returns the system time less the time spent accessing the database.
* Use this method to figure out how long an operation took: call this
* method before an operation and after an operation, and the difference
* is the amount of non-database time spent.
*
* @deprecated Will be removed in 4.0.
*/
public static long nonDbTimeMillis() {
final long systemMillis = System.currentTimeMillis();
return systemMillis - databaseMillis;
}
/**
* Creates a very simple implementation of {@link Validator}. (Only
* useful for resolving trivial expressions.)
*/
public static Validator createSimpleValidator(final FunTable funTable) {
return new Validator() {
public Query getQuery() {
return null;
}
public SchemaReader getSchemaReader() {
throw new UnsupportedOperationException();
}
public Exp validate(Exp exp, boolean scalar) {
return exp;
}
public void validate(ParameterExpr parameterExpr) {
}
public void validate(MemberProperty memberProperty) {
}
public void validate(QueryAxis axis) {
}
public void validate(Formula formula) {
}
public FunDef getDef(Exp[] args, String name, Syntax syntax) {
// Very simple resolution. Assumes that there is precisely
// one resolver (i.e. no overloading) and no argument
// conversions are necessary.
List<Resolver> resolvers = funTable.getResolvers(name, syntax);
final Resolver resolver = resolvers.get(0);
final List<Resolver.Conversion> conversionList =
new ArrayList<Resolver.Conversion>();
final FunDef def =
resolver.resolve(args, this, conversionList);
assert conversionList.isEmpty();
return def;
}
public boolean alwaysResolveFunDef() {
return false;
}
public boolean canConvert(
int ordinal, Exp fromExp,
int to,
List<Resolver.Conversion> conversions)
{
return true;
}
public boolean requiresExpression() {
return false;
}
public FunTable getFunTable() {
return funTable;
}
public Parameter createOrLookupParam(
boolean definition,
String name,
Type type,
Exp defaultExp,
String description)
{
return null;
}
};
}
/**
* Reads a Reader until it returns EOF and returns the contents as a String.
*
* @param rdr Reader to Read.
* @param bufferSize size of buffer to allocate for reading.
* @return content of Reader as String
* @throws IOException on I/O error
*/
public static String readFully(final Reader rdr, final int bufferSize)
throws IOException
{
if (bufferSize <= 0) {
throw new IllegalArgumentException(
"Buffer size must be greater than 0");
}
final char[] buffer = new char[bufferSize];
final StringBuilder buf = new StringBuilder(bufferSize);
int len;
while ((len = rdr.read(buffer)) != -1) {
buf.append(buffer, 0, len);
}
return buf.toString();
}
/**
* Reads an input stream until it returns EOF and returns the contents as an
* array of bytes.
*
* @param in Input stream
* @param bufferSize size of buffer to allocate for reading.
* @return content of stream as an array of bytes
* @throws IOException on I/O error
*/
public static byte[] readFully(final InputStream in, final int bufferSize)
throws IOException
{
if (bufferSize <= 0) {
throw new IllegalArgumentException(
"Buffer size must be greater than 0");
}
final byte[] buffer = new byte[bufferSize];
final ByteArrayOutputStream baos =
new ByteArrayOutputStream(bufferSize);
int len;
while ((len = in.read(buffer)) != -1) {
baos.write(buffer, 0, len);
}
return baos.toByteArray();
}
/**
* Returns the contents of a URL, substituting tokens.
*
* <p>Replaces the tokens "${key}" if the map is not null and "key" occurs
* in the key-value map.
*
* <p>If the URL string starts with "inline:" the contents are the
* rest of the URL.
*
* @param urlStr URL string
* @param map Key/value map
* @return Contents of URL with tokens substituted
* @throws IOException on I/O error
*/
public static String readURL(final String urlStr, Map<String, String> map)
throws IOException
{
if (urlStr.startsWith("inline:")) {
String content = urlStr.substring("inline:".length());
if (map != null) {
content = Util.replaceProperties(content, map);
}
return content;
} else {
final URL url = new URL(urlStr);
return readURL(url, map);
}
}
/**
* Returns the contents of a URL.
*
* @param url URL
* @return Contents of URL
* @throws IOException on I/O error
*/
public static String readURL(final URL url) throws IOException {
return readURL(url, null);
}
/**
* Returns the contents of a URL, substituting tokens.
*
* <p>Replaces the tokens "${key}" if the map is not null and "key" occurs
* in the key-value map.
*
* @param url URL
* @param map Key/value map
* @return Contents of URL with tokens substituted
* @throws IOException on I/O error
*/
public static String readURL(
final URL url,
Map<String, String> map)
throws IOException
{
final Reader r =
new BufferedReader(new InputStreamReader(url.openStream()));
final int BUF_SIZE = 8096;
try {
String xmlCatalog = readFully(r, BUF_SIZE);
xmlCatalog = Util.replaceProperties(xmlCatalog, map);
return xmlCatalog;
} finally {
r.close();
}
}
/**
* Gets content via Apache VFS. File must exist and have content
*
* @param url String
* @return Apache VFS FileContent for further processing
* @throws FileSystemException on error
*/
public static InputStream readVirtualFile(String url)
throws FileSystemException
{
// Treat catalogUrl as an Apache VFS (Virtual File System) URL.
// VFS handles all of the usual protocols (http:, file:)
// and then some.
FileSystemManager fsManager = VFS.getManager();
if (fsManager == null) {
throw newError("Cannot get virtual file system manager");
}
// Workaround VFS bug.
if (url.startsWith("file://localhost")) {
url = url.substring("file://localhost".length());
}
if (url.startsWith("file:")) {
url = url.substring("file:".length());
}
// work around for VFS bug not closing http sockets
// (Mondrian-585)
if (url.startsWith("http")) {
try {
return new URL(url).openStream();
} catch (IOException e) {
throw newError(
"Could not read URL: " + url);
}
}
File userDir = new File("").getAbsoluteFile();
FileObject file = fsManager.resolveFile(userDir, url);
FileContent fileContent = null;
try {
// Because of VFS caching, make sure we refresh to get the latest
// file content. This refresh may possibly solve the following
// workaround for defect MONDRIAN-508, but cannot be tested, so we
// will leave the work around for now.
file.refresh();
// Workaround to defect MONDRIAN-508. For HttpFileObjects, verifies
// the URL of the file retrieved matches the URL passed in. A VFS
// cache bug can cause it to treat URLs with different parameters
// as the same file (e.g. http://blah.com?param=A,
// http://blah.com?param=B)
if (file instanceof HttpFileObject
&& !file.getName().getURI().equals(url))
{
fsManager.getFilesCache().removeFile(
file.getFileSystem(), file.getName());
file = fsManager.resolveFile(userDir, url);
}
if (!file.isReadable()) {
throw newError(
"Virtual file is not readable: " + url);
}
fileContent = file.getContent();
} finally {
file.close();
}
if (fileContent == null) {
throw newError(
"Cannot get virtual file content: " + url);
}
return fileContent.getInputStream();
}
public static String readVirtualFileAsString(
String catalogUrl)
throws IOException
{
InputStream in = readVirtualFile(catalogUrl);
try {
return IOUtils.toString(in);
} finally {
IOUtils.closeQuietly(in);
}
}
/**
* Converts a {@link Properties} object to a string-to-string {@link Map}.
*
* @param properties Properties
* @return String-to-string map
*/
public static Map<String, String> toMap(final Properties properties) {
return new AbstractMap<String, String>() {
@SuppressWarnings({"unchecked"})
public Set<Entry<String, String>> entrySet() {
return (Set) properties.entrySet();
}
};
}
/**
* Replaces tokens in a string.
*
* <p>Replaces the tokens "${key}" if "key" occurs in the key-value map.
* Otherwise "${key}" is left in the string unchanged.
*
* @param text Source string
* @param env Map of key-value pairs
* @return String with tokens substituted
*/
public static String replaceProperties(
String text,
Map<String, String> env)
{
// As of JDK 1.5, cannot use StringBuilder - appendReplacement requires
// the antediluvian StringBuffer.
StringBuffer buf = new StringBuffer(text.length() + 200);
Pattern pattern = Pattern.compile("\\$\\{([^${}]+)\\}");
Matcher matcher = pattern.matcher(text);
while (matcher.find()) {
String varName = matcher.group(1);
String varValue = env.get(varName);
if (varValue != null) {
matcher.appendReplacement(buf, varValue);
} else {
matcher.appendReplacement(buf, "\\${$1}");
}
}
matcher.appendTail(buf);
return buf.toString();
}
public static String printMemory() {
return printMemory(null);
}
public static String printMemory(String msg) {
final Runtime rt = Runtime.getRuntime();
final long freeMemory = rt.freeMemory();
final long totalMemory = rt.totalMemory();
final StringBuilder buf = new StringBuilder(64);
buf.append("FREE_MEMORY:");
if (msg != null) {
buf.append(msg);
buf.append(':');
}
buf.append(' ');
buf.append(freeMemory / 1024);
buf.append("kb ");
long hundredths = (freeMemory * 10000) / totalMemory;
buf.append(hundredths / 100);
hundredths %= 100;
if (hundredths >= 10) {
buf.append('.');
} else {
buf.append(".0");
}
buf.append(hundredths);
buf.append('%');
return buf.toString();
}
/**
* Casts a Set to a Set with a different element type.
*
* @param set Set
* @return Set of desired type
*/
@SuppressWarnings({"unchecked"})
public static <T> Set<T> cast(Set<?> set) {
return (Set<T>) set;
}
/**
* Casts a List to a List with a different element type.
*
* @param list List
* @return List of desired type
*/
@SuppressWarnings({"unchecked"})
public static <T> List<T> cast(List<?> list) {
return (List<T>) list;
}
/**
* Returns whether it is safe to cast a collection to a collection with a
* given element type.
*
* @param collection Collection
* @param clazz Target element type
* @param <T> Element type
* @return Whether all not-null elements of the collection are instances of
* element type
*/
public static <T> boolean canCast(
Collection<?> collection,
Class<T> clazz)
{
for (Object o : collection) {
if (o != null && !clazz.isInstance(o)) {
return false;
}
}
return true;
}
/**
* Casts a collection to iterable.
*
* Under JDK 1.4, {@link Collection} objects do not implement
* {@link Iterable}, so this method inserts a casting wrapper. (Since
* Iterable does not exist under JDK 1.4, they will have been compiled
* under JDK 1.5 or later, then retrowoven to 1.4 class format. References
* to Iterable will have been replaced with references to
* <code>com.rc.retroweaver.runtime.Retroweaver_</code>.
*
* <p>Under later JDKs this method is trivial. This method can be deleted
* when we discontinue support for JDK 1.4.
*
* @param iterable Object which ought to be iterable
* @param <T> Element type
* @return Object cast to Iterable
*/
public static <T> Iterable<T> castToIterable(
final Object iterable)
{
if (Util.Retrowoven
&& !(iterable instanceof Iterable))
{
return new Iterable<T>() {
public Iterator<T> iterator() {
return ((Collection<T>) iterable).iterator();
}
};
}
return (Iterable<T>) iterable;
}
/**
* Looks up an enumeration by name, returning null if null or not valid.
*
* @param clazz Enumerated type
* @param name Name of constant
*/
public static <E extends Enum<E>> E lookup(Class<E> clazz, String name) {
return lookup(clazz, name, null);
}
/**
* Looks up an enumeration by name, returning a given default value if null
* or not valid.
*
* @param clazz Enumerated type
* @param name Name of constant
* @param defaultValue Default value if constant is not found
* @return Value, or null if name is null or value does not exist
*/
public static <E extends Enum<E>> E lookup(
Class<E> clazz, String name, E defaultValue)
{
if (name == null) {
return defaultValue;
}
try {
return Enum.valueOf(clazz, name);
} catch (IllegalArgumentException e) {
return defaultValue;
}
}
/**
* Make a BigDecimal from a double. On JDK 1.5 or later, the BigDecimal
* precision reflects the precision of the double while with JDK 1.4
* this is not the case.
*
* @param d the input double
* @return the BigDecimal
*/
public static BigDecimal makeBigDecimalFromDouble(double d) {
return compatible.makeBigDecimalFromDouble(d);
}
/**
* Returns a literal pattern String for the specified String.
*
* <p>Specification as for {@link Pattern#quote(String)}, which was
* introduced in JDK 1.5.
*
* @param s The string to be literalized
* @return A literal string replacement
*/
public static String quotePattern(String s) {
return compatible.quotePattern(s);
}
/**
* Generates a unique id.
*
* <p>From JDK 1.5 onwards, uses a {@code UUID}.
*
* @return A unique id
*/
public static String generateUuidString() {
return compatible.generateUuidString();
}
/**
* Compiles a script to yield a Java interface.
*
* <p>Only valid JDK 1.6 and higher; fails on JDK 1.5 and earlier.</p>
*
* @param iface Interface script should implement
* @param script Script code
* @param engineName Name of engine (e.g. "JavaScript")
* @param <T> Interface
* @return Object that implements given interface
*/
public static <T> T compileScript(
Class<T> iface,
String script,
String engineName)
{
return compatible.compileScript(iface, script, engineName);
}
/**
* Removes a thread local from the current thread.
*
* <p>From JDK 1.5 onwards, calls {@link ThreadLocal#remove()}; before
* that, no-ops.</p>
*
* @param threadLocal Thread local
* @param <T> Type
*/
public static <T> void threadLocalRemove(ThreadLocal<T> threadLocal) {
compatible.threadLocalRemove(threadLocal);
}
/**
* Creates a hash set that, like {@link java.util.IdentityHashMap},
* compares keys using identity.
*
* @param <T> Element type
* @return Set
*/
public static <T> Set<T> newIdentityHashSet() {
return compatible.newIdentityHashSet();
}
/**
* Creates a new udf instance from the given udf class.
*
* @param udfClass the class to create new instance for
* @param functionName Function name, or null
* @return an instance of UserDefinedFunction
*/
public static UserDefinedFunction createUdf(
Class<? extends UserDefinedFunction> udfClass,
String functionName)
{
// Instantiate class with default constructor.
UserDefinedFunction udf;
String className = udfClass.getName();
String functionNameOrEmpty =
functionName == null
? ""
: functionName;
// Find a constructor.
Constructor<?> constructor;
Object[] args = {};
// 0. Check that class is public and top-level or static.
if (!Modifier.isPublic(udfClass.getModifiers())
|| (udfClass.getEnclosingClass() != null
&& !Modifier.isStatic(udfClass.getModifiers())))
{
throw MondrianResource.instance().UdfClassMustBePublicAndStatic.ex(
functionName,
className);
}
// 1. Look for a constructor "public Udf(String name)".
try {
constructor = udfClass.getConstructor(String.class);
if (Modifier.isPublic(constructor.getModifiers())) {
args = new Object[] {functionName};
} else {
constructor = null;
}
} catch (NoSuchMethodException e) {
constructor = null;
}
// 2. Otherwise, look for a constructor "public Udf()".
if (constructor == null) {
try {
constructor = udfClass.getConstructor();
if (Modifier.isPublic(constructor.getModifiers())) {
args = new Object[] {};
} else {
constructor = null;
}
} catch (NoSuchMethodException e) {
constructor = null;
}
}
// 3. Else, no constructor suitable.
if (constructor == null) {
throw MondrianResource.instance().UdfClassWrongIface.ex(
functionNameOrEmpty,
className,
UserDefinedFunction.class.getName());
}
// Instantiate class.
try {
udf = (UserDefinedFunction) constructor.newInstance(args);
} catch (InstantiationException e) {
throw MondrianResource.instance().UdfClassWrongIface.ex(
functionNameOrEmpty,
className, UserDefinedFunction.class.getName());
} catch (IllegalAccessException e) {
throw MondrianResource.instance().UdfClassWrongIface.ex(
functionName,
className,
UserDefinedFunction.class.getName());
} catch (ClassCastException e) {
throw MondrianResource.instance().UdfClassWrongIface.ex(
functionNameOrEmpty,
className,
UserDefinedFunction.class.getName());
} catch (InvocationTargetException e) {
throw MondrianResource.instance().UdfClassWrongIface.ex(
functionName,
className,
UserDefinedFunction.class.getName());
}
return udf;
}
/**
* Check the resultSize against the result limit setting. Throws
* LimitExceededDuringCrossjoin exception if limit exceeded.
*
* When it is called from RolapNativeSet.checkCrossJoin(), it is only
* possible to check the known input size, because the final CJ result
* will come from the DB(and will be checked against the limit when
* fetching from the JDBC result set, in SqlTupleReader.prepareTuples())
*
* @param resultSize Result limit
* @throws ResourceLimitExceededException
*/
public static void checkCJResultLimit(long resultSize) {
int resultLimit = MondrianProperties.instance().ResultLimit.get();
// Throw an exeption, if the size of the crossjoin exceeds the result
// limit.
if (resultLimit > 0 && resultLimit < resultSize) {
throw MondrianResource.instance().LimitExceededDuringCrossjoin.ex(
resultSize, resultLimit);
}
// Throw an exception if the crossjoin exceeds a reasonable limit.
// (Yes, 4 billion is a reasonable limit.)
if (resultSize > Integer.MAX_VALUE) {
throw MondrianResource.instance().LimitExceededDuringCrossjoin.ex(
resultSize, Integer.MAX_VALUE);
}
}
/**
* Converts an olap4j connect string into a legacy mondrian connect string.
*
* <p>For example,
* "jdbc:mondrian:Datasource=jdbc/SampleData;Catalog=foodmart/FoodMart.xml;"
* becomes
* "Provider=Mondrian;
* Datasource=jdbc/SampleData;Catalog=foodmart/FoodMart.xml;"
*
* <p>This method is intended to allow legacy applications (such as JPivot
* and Mondrian's XMLA server) to continue to create connections using
* Mondrian's legacy connection API even when they are handed an olap4j
* connect string.
*
* @param url olap4j connect string
* @return mondrian connect string, or null if cannot be converted
*/
public static String convertOlap4jConnectStringToNativeMondrian(
String url)
{
if (url.startsWith("jdbc:mondrian:")) {
return "Provider=Mondrian; "
+ url.substring("jdbc:mondrian:".length());
}
return null;
}
/**
* Checks if a String is whitespace, empty ("") or null.</p>
*
* <pre>
* StringUtils.isBlank(null) = true
* StringUtils.isBlank("") = true
* StringUtils.isBlank(" ") = true
* StringUtils.isBlank("bob") = false
* StringUtils.isBlank(" bob ") = false
* </pre>
*
* <p>(Copied from commons-lang.)
*
* @param str the String to check, may be null
* @return <code>true</code> if the String is null, empty or whitespace
*/
public static boolean isBlank(String str) {
final int strLen;
if (str == null || (strLen = str.length()) == 0) {
return true;
}
for (int i = 0; i < strLen; i++) {
if (!Character.isWhitespace(str.charAt(i))) {
return false;
}
}
return true;
}
/**
* Returns a role which has access to everything.
* @param schema A schema to bind this role to.
* @return A role with root access to the schema.
*/
public static Role createRootRole(Schema schema) {
RoleImpl role = new RoleImpl();
role.grant(schema, Access.ALL);
role.makeImmutable();
return role;
}
/**
* Tries to find the cube from which a dimension is taken.
* It considers private dimensions, shared dimensions and virtual
* dimensions. If it can't determine with certitude the origin
* of the dimension, it returns null.
*/
public static Cube getDimensionCube(Dimension dimension) {
final Cube[] cubes = dimension.getSchema().getCubes();
for (Cube cube : cubes) {
for (Dimension dimension1 : cube.getDimensions()) {
// If the dimensions have the same identity,
// we found an access rule.
if (dimension == dimension1) {
return cube;
}
// If the passed dimension argument is of class
// RolapCubeDimension, we must validate the cube
// assignment and make sure the cubes are the same.
// If not, skip to the next grant.
if (dimension instanceof RolapCubeDimension
&& dimension.equals(dimension1)
&& !((RolapCubeDimension)dimension1)
.getCube()
.equals(cube))
{
continue;
}
// Last thing is to allow for equality correspondences
// to work with virtual cubes.
if (cube instanceof RolapCube
&& ((RolapCube)cube).isVirtual()
&& dimension.equals(dimension1))
{
return cube;
}
}
}
return null;
}
/**
* Similar to {@link ClassLoader#getResource(String)}, except the lookup
* is in reverse order.<br>
* i.e. returns the resource from the supplied classLoader or the
* one closest to it in the hierarchy, instead of the closest to the root
* class loader
* @param classLoader The class loader to fetch from
* @param name The resource name
* @return A URL object for reading the resource, or null if the resource
* could not be found or the invoker doesn't have adequate privileges to get
* the resource.
* @see ClassLoader#getResource(String)
* @see ClassLoader#getResources(String)
*/
public static URL getClosestResource(ClassLoader classLoader, String name) {
URL resource = null;
try {
// The last resource will be from the nearest ClassLoader.
Enumeration<URL> resourceCandidates =
classLoader.getResources(name);
while (resourceCandidates.hasMoreElements()) {
resource = resourceCandidates.nextElement();
}
} catch (IOException ioe) {
// ignore exception - it's OK if file is not found
// just keep getResource contract and return null
Util.discard(ioe);
}
return resource;
}
public static abstract class AbstractFlatList<T>
implements List<T>, RandomAccess
{
protected final List<T> asArrayList() {
//noinspection unchecked
return Arrays.asList((T[]) toArray());
}
public Iterator<T> iterator() {
return asArrayList().iterator();
}
public ListIterator<T> listIterator() {
return asArrayList().listIterator();
}
public boolean isEmpty() {
return false;
}
public boolean add(Object t) {
throw new UnsupportedOperationException();
}
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
public boolean addAll(int index, Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
public void clear() {
throw new UnsupportedOperationException();
}
public T set(int index, Object element) {
throw new UnsupportedOperationException();
}
public void add(int index, Object element) {
throw new UnsupportedOperationException();
}
public T remove(int index) {
throw new UnsupportedOperationException();
}
public ListIterator<T> listIterator(int index) {
return asArrayList().listIterator(index);
}
public List<T> subList(int fromIndex, int toIndex) {
return asArrayList().subList(fromIndex, toIndex);
}
public boolean contains(Object o) {
return indexOf(o) >= 0;
}
public boolean containsAll(Collection<?> c) {
Iterator<?> e = c.iterator();
while (e.hasNext()) {
if (!contains(e.next())) {
return false;
}
}
return true;
}
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
}
/**
* List that stores its two elements in the two members of the class.
* Unlike {@link java.util.ArrayList} or
* {@link java.util.Arrays#asList(Object[])} there is
* no array, only one piece of memory allocated, therefore is very compact
* and cache and CPU efficient.
*
* <p>The list is read-only, cannot be modified or resized, and neither
* of the elements can be null.
*
* <p>The list is created via {@link Util#flatList(Object[])}.
*
* @see mondrian.olap.Util.Flat3List
* @param <T>
*/
protected static class Flat2List<T> extends AbstractFlatList<T> {
private final T t0;
private final T t1;
Flat2List(T t0, T t1) {
this.t0 = t0;
this.t1 = t1;
assert t0 != null;
assert t1 != null;
}
public String toString() {
return "[" + t0 + ", " + t1 + "]";
}
public T get(int index) {
switch (index) {
case 0:
return t0;
case 1:
return t1;
default:
throw new IndexOutOfBoundsException("index " + index);
}
}
public int size() {
return 2;
}
public boolean equals(Object o) {
if (o instanceof Flat2List) {
Flat2List that = (Flat2List) o;
return Util.equals(this.t0, that.t0)
&& Util.equals(this.t1, that.t1);
}
return Arrays.asList(t0, t1).equals(o);
}
public int hashCode() {
int h = 1;
h = h * 31 + t0.hashCode();
h = h * 31 + t1.hashCode();
return h;
}
public int indexOf(Object o) {
if (t0.equals(o)) {
return 0;
}
if (t1.equals(o)) {
return 1;
}
return -1;
}
public int lastIndexOf(Object o) {
if (t1.equals(o)) {
return 1;
}
if (t0.equals(o)) {
return 0;
}
return -1;
}
@SuppressWarnings({"unchecked"})
public <T2> T2[] toArray(T2[] a) {
a[0] = (T2) t0;
a[1] = (T2) t1;
return a;
}
public Object[] toArray() {
return new Object[] {t0, t1};
}
}
/**
* List that stores its three elements in the three members of the class.
* Unlike {@link java.util.ArrayList} or
* {@link java.util.Arrays#asList(Object[])} there is
* no array, only one piece of memory allocated, therefore is very compact
* and cache and CPU efficient.
*
* <p>The list is read-only, cannot be modified or resized, and none
* of the elements can be null.
*
* <p>The list is created via {@link Util#flatList(Object[])}.
*
* @see mondrian.olap.Util.Flat2List
* @param <T>
*/
protected static class Flat3List<T> extends AbstractFlatList<T> {
private final T t0;
private final T t1;
private final T t2;
Flat3List(T t0, T t1, T t2) {
this.t0 = t0;
this.t1 = t1;
this.t2 = t2;
assert t0 != null;
assert t1 != null;
assert t2 != null;
}
public String toString() {
return "[" + t0 + ", " + t1 + ", " + t2 + "]";
}
public T get(int index) {
switch (index) {
case 0:
return t0;
case 1:
return t1;
case 2:
return t2;
default:
throw new IndexOutOfBoundsException("index " + index);
}
}
public int size() {
return 3;
}
public boolean equals(Object o) {
if (o instanceof Flat3List) {
Flat3List that = (Flat3List) o;
return Util.equals(this.t0, that.t0)
&& Util.equals(this.t1, that.t1)
&& Util.equals(this.t2, that.t2);
}
return o.equals(this);
}
public int hashCode() {
int h = 1;
h = h * 31 + t0.hashCode();
h = h * 31 + t1.hashCode();
h = h * 31 + t2.hashCode();
return h;
}
public int indexOf(Object o) {
if (t0.equals(o)) {
return 0;
}
if (t1.equals(o)) {
return 1;
}
if (t2.equals(o)) {
return 2;
}
return -1;
}
public int lastIndexOf(Object o) {
if (t2.equals(o)) {
return 2;
}
if (t1.equals(o)) {
return 1;
}
if (t0.equals(o)) {
return 0;
}
return -1;
}
@SuppressWarnings({"unchecked"})
public <T2> T2[] toArray(T2[] a) {
a[0] = (T2) t0;
a[1] = (T2) t1;
a[2] = (T2) t2;
return a;
}
public Object[] toArray() {
return new Object[] {t0, t1, t2};
}
}
/**
* Garbage-collecting iterator. Iterates over a collection of references,
* and if any of the references has been garbage-collected, removes it from
* the collection.
*
* @param <T> Element type
*/
public static class GcIterator<T> implements Iterator<T> {
private final Iterator<? extends Reference<T>> iterator;
private boolean hasNext;
private T next;
public GcIterator(Iterator<? extends Reference<T>> iterator) {
this.iterator = iterator;
this.hasNext = true;
moveToNext();
}
/**
* Creates an iterator over a collection of references.
*
* @param referenceIterable Collection of references
* @param <T2> element type
* @return iterable over collection
*/
public static <T2> Iterable<T2> over(
final Iterable<? extends Reference<T2>> referenceIterable)
{
return new Iterable<T2>() {
public Iterator<T2> iterator() {
return new GcIterator<T2>(referenceIterable.iterator());
}
};
}
private void moveToNext() {
while (iterator.hasNext()) {
final Reference<T> ref = iterator.next();
next = ref.get();
if (next != null) {
return;
}
iterator.remove();
}
hasNext = false;
}
public boolean hasNext() {
return hasNext;
}
public T next() {
final T next1 = next;
moveToNext();
return next1;
}
public void remove() {
throw new UnsupportedOperationException();
}
}
public static interface Functor1<RT, PT> {
RT apply(PT param);
}
public static <T> Functor1<T, T> identityFunctor() {
//noinspection unchecked
return IDENTITY_FUNCTOR;
}
private static final Functor1 IDENTITY_FUNCTOR =
new Functor1<Object, Object>() {
public Object apply(Object param) {
return param;
}
};
public static <PT> Functor1<Boolean, PT> trueFunctor() {
//noinspection unchecked
return TRUE_FUNCTOR;
}
public static <PT> Functor1<Boolean, PT> falseFunctor() {
//noinspection unchecked
return FALSE_FUNCTOR;
}
private static final Functor1 TRUE_FUNCTOR =
new Functor1<Boolean, Object>() {
public Boolean apply(Object param) {
return true;
}
};
private static final Functor1 FALSE_FUNCTOR =
new Functor1<Boolean, Object>() {
public Boolean apply(Object param) {
return false;
}
};
/**
* Information about memory usage.
*
* @see mondrian.olap.Util#getMemoryInfo()
*/
public interface MemoryInfo {
Usage get();
public interface Usage {
long getUsed();
long getCommitted();
long getMax();
}
}
/**
* A {@link Comparator} implementation which can deal
* correctly with {@link RolapUtil#sqlNullValue}.
*/
public static class SqlNullSafeComparator
implements Comparator<Comparable>
{
public static final SqlNullSafeComparator instance =
new SqlNullSafeComparator();
private SqlNullSafeComparator() {
}
public int compare(Comparable o1, Comparable o2) {
if (o1 == RolapUtil.sqlNullValue) {
return -1;
}
if (o2 == RolapUtil.sqlNullValue) {
return 1;
}
return o1.compareTo(o2);
}
}
/**
* This class implements the Knuth-Morris-Pratt algorithm
* to search within a byte array for a token byte array.
*/
public static class ByteMatcher {
private final int[] matcher;
public final byte[] key;
public ByteMatcher(byte[] key) {
this.key = key;
this.matcher = compile(key);
}
/**
* Matches the pre-compiled byte array token against a
* byte array variable and returns the index of the key
* within the array.
* @param a An array of bytes to search for.
* @return -1 if not found, or the index (0 based) of the match.
*/
public int match(byte[] a) {
int j = 0;
for (int i = 0; i < a.length; i++) {
while (j > 0 && key[j] != a[i]) {
j = matcher[j - 1];
}
if (a[i] == key[j]) {
j++;
}
if (key.length == j) {
return
i - key.length + 1;
}
}
return -1;
}
private int[] compile(byte[] key) {
int[] matcher = new int[key.length];
int j = 0;
for (int i = 1; i < key.length; i++) {
while (j > 0 && key[j] != key[i]) {
j = matcher[j - 1];
}
if (key[i] == key[j]) {
j++;
}
matcher[i] = j;
}
return matcher;
}
}
/**
* Transforms a list into a map for which all the keys return
* a null value associated to it.
*
* <p>The list passed as an argument will be used to back
* the map returned and as many methods are overridden as
* possible to make sure that we don't iterate over the backing
* list when creating it and when performing operations like
* .size(), entrySet() and contains().
*
* <p>The returned map is to be considered immutable. It will
* throw an {@link UnsupportedOperationException} if attempts to
* modify it are made.
*/
public static <K, V> Map<K, V> toNullValuesMap(List<K> list) {
return new NullValuesMap<K, V>(list);
}
private static class NullValuesMap<K, V> extends AbstractMap<K, V> {
private final List<K> list;
private NullValuesMap(List<K> list) {
super();
this.list = Collections.unmodifiableList(list);
}
public Set<Entry<K, V>> entrySet() {
return new AbstractSet<Entry<K, V>>() {
public Iterator<Entry<K, V>>
iterator()
{
return new Iterator<Entry<K, V>>() {
private int pt = -1;
public void remove() {
throw new UnsupportedOperationException();
}
@SuppressWarnings("unchecked")
public Entry<K, V> next() {
return new AbstractMapEntry(
list.get(++pt), null) {};
}
public boolean hasNext() {
return pt < list.size();
}
};
}
public int size() {
return list.size();
}
public boolean contains(Object o) {
if (o instanceof Entry) {
if (list.contains(((Entry) o).getKey())) {
return true;
}
}
return false;
}
};
}
public Set<K> keySet() {
return new AbstractSet<K>() {
public Iterator<K> iterator() {
return new Iterator<K>() {
private int pt = -1;
public void remove() {
throw new UnsupportedOperationException();
}
public K next() {
return list.get(++pt);
}
public boolean hasNext() {
return pt < list.size();
}
};
}
public int size() {
return list.size();
}
public boolean contains(Object o) {
return list.contains(o);
}
};
}
public Collection<V> values() {
return new AbstractList<V>() {
public V get(int index) {
return null;
}
public int size() {
return list.size();
}
public boolean contains(Object o) {
if (o == null && size() > 0) {
return true;
} else {
return false;
}
}
};
}
public V get(Object key) {
return null;
}
public boolean containsKey(Object key) {
return list.contains(key);
}
public boolean containsValue(Object o) {
if (o == null && size() > 0) {
return true;
} else {
return false;
}
}
}
}
// End Util.java
| AvinashPD/mondrian | src/main/mondrian/olap/Util.java | Java | epl-1.0 | 147,689 |
package org.dolphinemu.dolphinemu.viewholders;
import android.view.View;
import android.widget.ImageView;
import androidx.leanback.widget.ImageCardView;
import androidx.leanback.widget.Presenter;
import org.dolphinemu.dolphinemu.model.GameFile;
/**
* A simple class that stores references to views so that the GameAdapter doesn't need to
* keep calling findViewById(), which is expensive.
*/
public final class TvGameViewHolder extends Presenter.ViewHolder
{
public ImageCardView cardParent;
public ImageView imageScreenshot;
public GameFile gameFile;
public TvGameViewHolder(View itemView)
{
super(itemView);
itemView.setTag(this);
cardParent = (ImageCardView) itemView;
imageScreenshot = cardParent.getMainImageView();
}
}
| TwitchPlaysPokemon/dolphinWatch | Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/viewholders/TvGameViewHolder.java | Java | gpl-2.0 | 765 |
/**
* # Hooks & Filters
*
* This file contains all of the form functions of the main _inbound object.
* Filters and actions are described below
*
* Forked from https://github.com/carldanley/WP-JS-Hooks/blob/master/src/event-manager.js
*
* @author David Wells <david@inboundnow.com>
* @version 0.0.1
*/
var _inboundHooks = (function (_inbound) {
/**
* # EventManager
*
* Actions and filters List
* addAction( 'namespace.identifier', callback, priority )
* addFilter( 'namespace.identifier', callback, priority )
* removeAction( 'namespace.identifier' )
* removeFilter( 'namespace.identifier' )
* doAction( 'namespace.identifier', arg1, arg2, moreArgs, finalArg )
* applyFilters( 'namespace.identifier', content )
* @return {[type]} [description]
*/
/**
* Handles managing all events for whatever you plug it into. Priorities for hooks are based on lowest to highest in
* that, lowest priority hooks are fired first.
*/
var EventManager = function() {
/**
* Maintain a reference to the object scope so our public methods never get confusing.
*/
var MethodsAvailable = {
removeFilter : removeFilter,
applyFilters : applyFilters,
addFilter : addFilter,
removeAction : removeAction,
doAction : doAction,
addAction : addAction
};
/**
* Contains the hooks that get registered with this EventManager. The array for storage utilizes a "flat"
* object literal such that looking up the hook utilizes the native object literal hash.
*/
var STORAGE = {
actions : {},
filters : {}
};
/**
* Adds an action to the event manager.
*
* @param action Must contain namespace.identifier
* @param callback Must be a valid callback function before this action is added
* @param [priority=10] Used to control when the function is executed in relation to other callbacks bound to the same hook
* @param [context] Supply a value to be used for this
*/
function addAction( action, callback, priority, context ) {
if( typeof action === 'string' && typeof callback === 'function' ) {
priority = parseInt( ( priority || 10 ), 10 );
_addHook( 'actions', action, callback, priority, context );
}
return MethodsAvailable;
}
/**
* Performs an action if it exists. You can pass as many arguments as you want to this function; the only rule is
* that the first argument must always be the action.
*/
function doAction( /* action, arg1, arg2, ... */ ) {
var args = Array.prototype.slice.call( arguments );
var action = args.shift();
if( typeof action === 'string' ) {
_runHook( 'actions', action, args );
}
return MethodsAvailable;
}
/**
* Removes the specified action if it contains a namespace.identifier & exists.
*
* @param action The action to remove
* @param [callback] Callback function to remove
*/
function removeAction( action, callback ) {
if( typeof action === 'string' ) {
_removeHook( 'actions', action, callback );
}
return MethodsAvailable;
}
/**
* Adds a filter to the event manager.
*
* @param filter Must contain namespace.identifier
* @param callback Must be a valid callback function before this action is added
* @param [priority=10] Used to control when the function is executed in relation to other callbacks bound to the same hook
* @param [context] Supply a value to be used for this
*/
function addFilter( filter, callback, priority, context ) {
if( typeof filter === 'string' && typeof callback === 'function' ) {
//console.log('add filter', filter);
priority = parseInt( ( priority || 10 ), 10 );
_addHook( 'filters', filter, callback, priority );
}
return MethodsAvailable;
}
/**
* Performs a filter if it exists. You should only ever pass 1 argument to be filtered. The only rule is that
* the first argument must always be the filter.
*/
function applyFilters( /* filter, filtered arg, arg2, ... */ ) {
var args = Array.prototype.slice.call( arguments );
var filter = args.shift();
if( typeof filter === 'string' ) {
return _runHook( 'filters', filter, args );
}
return MethodsAvailable;
}
/**
* Removes the specified filter if it contains a namespace.identifier & exists.
*
* @param filter The action to remove
* @param [callback] Callback function to remove
*/
function removeFilter( filter, callback ) {
if( typeof filter === 'string') {
_removeHook( 'filters', filter, callback );
}
return MethodsAvailable;
}
/**
* Removes the specified hook by resetting the value of it.
*
* @param type Type of hook, either 'actions' or 'filters'
* @param hook The hook (namespace.identifier) to remove
* @private
*/
function _removeHook( type, hook, callback, context ) {
if ( !STORAGE[ type ][ hook ] ) {
return;
}
if ( !callback ) {
STORAGE[ type ][ hook ] = [];
} else {
var handlers = STORAGE[ type ][ hook ];
var i;
if ( !context ) {
for ( i = handlers.length; i--; ) {
if ( handlers[i].callback === callback ) {
handlers.splice( i, 1 );
}
}
}
else {
for ( i = handlers.length; i--; ) {
var handler = handlers[i];
if ( handler.callback === callback && handler.context === context) {
handlers.splice( i, 1 );
}
}
}
}
}
/**
* Adds the hook to the appropriate storage container
*
* @param type 'actions' or 'filters'
* @param hook The hook (namespace.identifier) to add to our event manager
* @param callback The function that will be called when the hook is executed.
* @param priority The priority of this hook. Must be an integer.
* @param [context] A value to be used for this
* @private
*/
function _addHook( type, hook, callback, priority, context ) {
var hookObject = {
callback : callback,
priority : priority,
context : context
};
// Utilize 'prop itself' : http://jsperf.com/hasownproperty-vs-in-vs-undefined/19
var hooks = STORAGE[ type ][ hook ];
if( hooks ) {
hooks.push( hookObject );
hooks = _hookInsertSort( hooks );
}
else {
hooks = [ hookObject ];
}
STORAGE[ type ][ hook ] = hooks;
}
/**
* Use an insert sort for keeping our hooks organized based on priority. This function is ridiculously faster
* than bubble sort, etc: http://jsperf.com/javascript-sort
*
* @param hooks The custom array containing all of the appropriate hooks to perform an insert sort on.
* @private
*/
function _hookInsertSort( hooks ) {
var tmpHook, j, prevHook;
for( var i = 1, len = hooks.length; i < len; i++ ) {
tmpHook = hooks[ i ];
j = i;
while( ( prevHook = hooks[ j - 1 ] ) && prevHook.priority > tmpHook.priority ) {
hooks[ j ] = hooks[ j - 1 ];
--j;
}
hooks[ j ] = tmpHook;
}
return hooks;
}
/**
* Runs the specified hook. If it is an action, the value is not modified but if it is a filter, it is.
*
* @param type 'actions' or 'filters'
* @param hook The hook ( namespace.identifier ) to be ran.
* @param args Arguments to pass to the action/filter. If it's a filter, args is actually a single parameter.
* @private
*/
function _runHook( type, hook, args ) {
var handlers = STORAGE[ type ][ hook ];
if ( !handlers ) {
return (type === 'filters') ? args[0] : false;
}
var i = 0, len = handlers.length;
if ( type === 'filters' ) {
for ( ; i < len; i++ ) {
args[ 0 ] = handlers[ i ].callback.apply( handlers[ i ].context, args );
}
} else {
for ( ; i < len; i++ ) {
handlers[ i ].callback.apply( handlers[ i ].context, args );
}
}
return ( type === 'filters' ) ? args[ 0 ] : true;
}
// return all of the publicly available methods
return MethodsAvailable;
};
_inbound.hooks = new EventManager();
/**
* Event Hooks and Filters public methods
*/
/*
* add_action
*
* This function uses _inbound.hooks to mimics WP add_action
*
* ```js
* function Inbound_Add_Action_Example(data) {
* // Do stuff here.
* };
* // Add action to the hook
* _inbound.add_action( 'name_of_action', Inbound_Add_Action_Example, 10 );
* ```
*/
_inbound.add_action = function() {
// allow multiple action parameters such as 'ready append'
var actions = arguments[0].split(' ');
for( k in actions ) {
// prefix action
arguments[0] = 'inbound.' + actions[ k ];
_inbound.hooks.addAction.apply(this, arguments);
}
return this;
};
/*
* remove_action
*
* This function uses _inbound.hooks to mimics WP remove_action
*
* ```js
* // Add remove action 'name_of_action'
* _inbound.remove_action( 'name_of_action');
* ```
*
*/
_inbound.remove_action = function() {
// prefix action
arguments[0] = 'inbound.' + arguments[0];
_inbound.hooks.removeAction.apply(this, arguments);
return this;
};
/*
* do_action
*
* This function uses _inbound.hooks to mimics WP do_action
* This is used if you want to allow for third party JS plugins to act on your functions
*
*/
_inbound.do_action = function() {
// prefix action
arguments[0] = 'inbound.' + arguments[0];
_inbound.hooks.doAction.apply(this, arguments);
return this;
};
/*
* add_filter
*
* This function uses _inbound.hooks to mimics WP add_filter
*
* ```js
* _inbound.add_filter( 'urlParamFilter', URL_Param_Filter, 10 );
* function URL_Param_Filter(urlParams) {
*
* var params = urlParams || {};
* // check for item in object
* if(params.utm_source !== "undefined"){
* //alert('url param "utm_source" is here');
* }
*
* // delete item from object
* delete params.utm_source;
*
* return params;
*
* }
* ```
*/
_inbound.add_filter = function() {
// prefix action
arguments[0] = 'inbound.' + arguments[0];
_inbound.hooks.addFilter.apply(this, arguments);
return this;
};
/*
* remove_filter
*
* This function uses _inbound.hooks to mimics WP remove_filter
*
* ```js
* // Add remove filter 'urlParamFilter'
* _inbound.remove_action( 'urlParamFilter');
* ```
*
*/
_inbound.remove_filter = function() {
// prefix action
arguments[0] = 'inbound.' + arguments[0];
_inbound.hooks.removeFilter.apply(this, arguments);
return this;
};
/*
* apply_filters
*
* This function uses _inbound.hooks to mimics WP apply_filters
*
*/
_inbound.apply_filters = function() {
//console.log('Filter:' + arguments[0] + " ran on ->", arguments[1]);
// prefix action
arguments[0] = 'inbound.' + arguments[0];
return _inbound.hooks.applyFilters.apply(this, arguments);
};
return _inbound;
})(_inbound || {}); | cheayeam/wordpress | wp-content/plugins/leads/shared/assets/js/frontend/analytics-src/analytics.hooks.js | JavaScript | gpl-2.0 | 11,257 |
//
// Copyright (c) 2009-2010 Mikko Mononen memon@inside.org
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
// 3. This notice may not be removed or altered from any source distribution.
//
#include <float.h>
#include <string.h>
#include "DetourNavMeshQuery.h"
#include "DetourNavMesh.h"
#include "DetourNode.h"
#include "DetourCommon.h"
#include "DetourMath.h"
#include "DetourAlloc.h"
#include "DetourAssert.h"
#include <new>
/// @class dtQueryFilter
///
/// <b>The Default Implementation</b>
///
/// At construction: All area costs default to 1.0. All flags are included
/// and none are excluded.
///
/// If a polygon has both an include and an exclude flag, it will be excluded.
///
/// The way filtering works, a navigation mesh polygon must have at least one flag
/// set to ever be considered by a query. So a polygon with no flags will never
/// be considered.
///
/// Setting the include flags to 0 will result in all polygons being excluded.
///
/// <b>Custom Implementations</b>
///
/// DT_VIRTUAL_QUERYFILTER must be defined in order to extend this class.
///
/// Implement a custom query filter by overriding the virtual passFilter()
/// and getCost() functions. If this is done, both functions should be as
/// fast as possible. Use cached local copies of data rather than accessing
/// your own objects where possible.
///
/// Custom implementations do not need to adhere to the flags or cost logic
/// used by the default implementation.
///
/// In order for A* searches to work properly, the cost should be proportional to
/// the travel distance. Implementing a cost modifier less than 1.0 is likely
/// to lead to problems during pathfinding.
///
/// @see dtNavMeshQuery
dtQueryFilter::dtQueryFilter() :
m_includeFlags(0xffff),
m_excludeFlags(0)
{
for (int i = 0; i < DT_MAX_AREAS; ++i)
m_areaCost[i] = 1.0f;
}
#ifdef DT_VIRTUAL_QUERYFILTER
bool dtQueryFilter::passFilter(const dtPolyRef /*ref*/,
const dtMeshTile* /*tile*/,
const dtPoly* poly) const
{
return (poly->flags & m_includeFlags) != 0 && (poly->flags & m_excludeFlags) == 0;
}
float dtQueryFilter::getCost(const float* pa, const float* pb,
const dtPolyRef /*prevRef*/, const dtMeshTile* /*prevTile*/, const dtPoly* /*prevPoly*/,
const dtPolyRef /*curRef*/, const dtMeshTile* /*curTile*/, const dtPoly* curPoly,
const dtPolyRef /*nextRef*/, const dtMeshTile* /*nextTile*/, const dtPoly* /*nextPoly*/) const
{
return dtVdist(pa, pb) * m_areaCost[curPoly->getArea()];
}
#else
inline bool dtQueryFilter::passFilter(const dtPolyRef /*ref*/,
const dtMeshTile* /*tile*/,
const dtPoly* poly) const
{
return (poly->flags & m_includeFlags) != 0 && (poly->flags & m_excludeFlags) == 0;
}
inline float dtQueryFilter::getCost(const float* pa, const float* pb,
const dtPolyRef /*prevRef*/, const dtMeshTile* /*prevTile*/, const dtPoly* /*prevPoly*/,
const dtPolyRef /*curRef*/, const dtMeshTile* /*curTile*/, const dtPoly* curPoly,
const dtPolyRef /*nextRef*/, const dtMeshTile* /*nextTile*/, const dtPoly* /*nextPoly*/) const
{
return dtVdist(pa, pb) * m_areaCost[curPoly->getArea()];
}
#endif
static const float H_SCALE = 0.999f; // Search heuristic scale.
dtNavMeshQuery* dtAllocNavMeshQuery()
{
void* mem = dtAlloc(sizeof(dtNavMeshQuery), DT_ALLOC_PERM);
if (!mem) return 0;
return new(mem) dtNavMeshQuery;
}
void dtFreeNavMeshQuery(dtNavMeshQuery* navmesh)
{
if (!navmesh) return;
navmesh->~dtNavMeshQuery();
dtFree(navmesh);
}
//////////////////////////////////////////////////////////////////////////////////////////
/// @class dtNavMeshQuery
///
/// For methods that support undersized buffers, if the buffer is too small
/// to hold the entire result set the return status of the method will include
/// the #DT_BUFFER_TOO_SMALL flag.
///
/// Constant member functions can be used by multiple clients without side
/// effects. (E.g. No change to the closed list. No impact on an in-progress
/// sliced path query. Etc.)
///
/// Walls and portals: A @e wall is a polygon segment that is
/// considered impassable. A @e portal is a passable segment between polygons.
/// A portal may be treated as a wall based on the dtQueryFilter used for a query.
///
/// @see dtNavMesh, dtQueryFilter, #dtAllocNavMeshQuery(), #dtAllocNavMeshQuery()
dtNavMeshQuery::dtNavMeshQuery() :
m_nav(0),
m_tinyNodePool(0),
m_nodePool(0),
m_openList(0)
{
memset(&m_query, 0, sizeof(dtQueryData));
}
dtNavMeshQuery::~dtNavMeshQuery()
{
if (m_tinyNodePool)
m_tinyNodePool->~dtNodePool();
if (m_nodePool)
m_nodePool->~dtNodePool();
if (m_openList)
m_openList->~dtNodeQueue();
dtFree(m_tinyNodePool);
dtFree(m_nodePool);
dtFree(m_openList);
}
/// @par
///
/// Must be the first function called after construction, before other
/// functions are used.
///
/// This function can be used multiple times.
dtStatus dtNavMeshQuery::init(const dtNavMesh* nav, const int maxNodes, unsigned int threadId)
{
m_nav = nav;
m_owningThread = threadId;
if (!m_nodePool || m_nodePool->getMaxNodes() < maxNodes)
{
if (m_nodePool)
{
m_nodePool->~dtNodePool();
dtFree(m_nodePool);
m_nodePool = 0;
}
m_nodePool = new (dtAlloc(sizeof(dtNodePool), DT_ALLOC_PERM)) dtNodePool(maxNodes, dtNextPow2(maxNodes/4));
if (!m_nodePool)
return DT_FAILURE | DT_OUT_OF_MEMORY;
}
else
{
m_nodePool->clear();
}
if (!m_tinyNodePool)
{
m_tinyNodePool = new (dtAlloc(sizeof(dtNodePool), DT_ALLOC_PERM)) dtNodePool(64, 32);
if (!m_tinyNodePool)
return DT_FAILURE | DT_OUT_OF_MEMORY;
}
else
{
m_tinyNodePool->clear();
}
// TODO: check the open list size too.
if (!m_openList || m_openList->getCapacity() < maxNodes)
{
if (m_openList)
{
m_openList->~dtNodeQueue();
dtFree(m_openList);
m_openList = 0;
}
m_openList = new (dtAlloc(sizeof(dtNodeQueue), DT_ALLOC_PERM)) dtNodeQueue(maxNodes);
if (!m_openList)
return DT_FAILURE | DT_OUT_OF_MEMORY;
}
else
{
m_openList->clear();
}
return DT_SUCCESS;
}
dtStatus dtNavMeshQuery::findRandomPoint(const dtQueryFilter* filter, float (*frand)(),
dtPolyRef* randomRef, float* randomPt) const
{
dtAssert(m_nav);
// Randomly pick one tile. Assume that all tiles cover roughly the same area.
const dtMeshTile* tile = 0;
float tsum = 0.0f;
for (int i = 0; i < m_nav->getMaxTiles(); i++)
{
const dtMeshTile* t = m_nav->getTile(i);
if (!t || !t->header) continue;
// Choose random tile using reservoi sampling.
const float area = 1.0f; // Could be tile area too.
tsum += area;
const float u = frand();
if (u*tsum <= area)
tile = t;
}
if (!tile)
return DT_FAILURE;
// Randomly pick one polygon weighted by polygon area.
const dtPoly* poly = 0;
dtPolyRef polyRef = 0;
const dtPolyRef base = m_nav->getPolyRefBase(tile);
float areaSum = 0.0f;
for (int i = 0; i < tile->header->polyCount; ++i)
{
const dtPoly* p = &tile->polys[i];
// Do not return off-mesh connection polygons.
if (p->getType() != DT_POLYTYPE_GROUND)
continue;
// Must pass filter
const dtPolyRef ref = base | (dtPolyRef)i;
if (!filter->passFilter(ref, tile, p))
continue;
// Calc area of the polygon.
float polyArea = 0.0f;
for (int j = 2; j < p->vertCount; ++j)
{
const float* va = &tile->verts[p->verts[0]*3];
const float* vb = &tile->verts[p->verts[j-1]*3];
const float* vc = &tile->verts[p->verts[j]*3];
polyArea += dtTriArea2D(va,vb,vc);
}
// Choose random polygon weighted by area, using reservoi sampling.
areaSum += polyArea;
const float u = frand();
if (u*areaSum <= polyArea)
{
poly = p;
polyRef = ref;
}
}
if (!poly)
return DT_FAILURE;
// Randomly pick point on polygon.
const float* v = &tile->verts[poly->verts[0]*3];
float verts[3*DT_VERTS_PER_POLYGON];
float areas[DT_VERTS_PER_POLYGON];
dtVcopy(&verts[0*3],v);
for (int j = 1; j < poly->vertCount; ++j)
{
v = &tile->verts[poly->verts[j]*3];
dtVcopy(&verts[j*3],v);
}
const float s = frand();
const float t = frand();
float pt[3];
dtRandomPointInConvexPoly(verts, poly->vertCount, areas, s, t, pt);
float h = 0.0f;
dtStatus status = getPolyHeight(polyRef, pt, &h);
if (dtStatusFailed(status))
return status;
pt[1] = h;
dtVcopy(randomPt, pt);
*randomRef = polyRef;
return DT_SUCCESS;
}
dtStatus dtNavMeshQuery::findRandomPointAroundCircle(dtPolyRef startRef, const float* centerPos, const float radius,
const dtQueryFilter* filter, float (*frand)(),
dtPolyRef* randomRef, float* randomPt) const
{
dtAssert(m_nav);
dtAssert(m_nodePool);
dtAssert(m_openList);
// Validate input
if (!startRef || !m_nav->isValidPolyRef(startRef))
return DT_FAILURE | DT_INVALID_PARAM;
const dtMeshTile* startTile = 0;
const dtPoly* startPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(startRef, &startTile, &startPoly);
if (!filter->passFilter(startRef, startTile, startPoly))
return DT_FAILURE | DT_INVALID_PARAM;
m_nodePool->clear();
m_openList->clear();
dtNode* startNode = m_nodePool->getNode(startRef);
dtVcopy(startNode->pos, centerPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = 0;
startNode->id = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
dtStatus status = DT_SUCCESS;
const float radiusSqr = dtSqr(radius);
float areaSum = 0.0f;
const dtMeshTile* randomTile = 0;
const dtPoly* randomPoly = 0;
dtPolyRef randomPolyRef = 0;
while (!m_openList->empty())
{
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Get poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtPolyRef bestRef = bestNode->id;
const dtMeshTile* bestTile = 0;
const dtPoly* bestPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(bestRef, &bestTile, &bestPoly);
// Place random locations on on ground.
if (bestPoly->getType() == DT_POLYTYPE_GROUND)
{
// Calc area of the polygon.
float polyArea = 0.0f;
for (int j = 2; j < bestPoly->vertCount; ++j)
{
const float* va = &bestTile->verts[bestPoly->verts[0]*3];
const float* vb = &bestTile->verts[bestPoly->verts[j-1]*3];
const float* vc = &bestTile->verts[bestPoly->verts[j]*3];
polyArea += dtTriArea2D(va,vb,vc);
}
// Choose random polygon weighted by area, using reservoi sampling.
areaSum += polyArea;
const float u = frand();
if (u*areaSum <= polyArea)
{
randomTile = bestTile;
randomPoly = bestPoly;
randomPolyRef = bestRef;
}
}
// Get parent poly and tile.
dtPolyRef parentRef = 0;
const dtMeshTile* parentTile = 0;
const dtPoly* parentPoly = 0;
if (bestNode->pidx)
parentRef = m_nodePool->getNodeAtIdx(bestNode->pidx)->id;
if (parentRef)
m_nav->getTileAndPolyByRefUnsafe(parentRef, &parentTile, &parentPoly);
for (unsigned int i = bestPoly->firstLink; i != DT_NULL_LINK; i = bestTile->links[i].next)
{
const dtLink* link = &bestTile->links[i];
dtPolyRef neighbourRef = link->ref;
// Skip invalid neighbours and do not follow back to parent.
if (!neighbourRef || neighbourRef == parentRef)
continue;
// Expand to neighbour
const dtMeshTile* neighbourTile = 0;
const dtPoly* neighbourPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(neighbourRef, &neighbourTile, &neighbourPoly);
// Do not advance if the polygon is excluded by the filter.
if (!filter->passFilter(neighbourRef, neighbourTile, neighbourPoly))
continue;
// Find edge and calc distance to the edge.
float va[3], vb[3];
if (!getPortalPoints(bestRef, bestPoly, bestTile, neighbourRef, neighbourPoly, neighbourTile, va, vb))
continue;
// If the circle is not touching the next polygon, skip it.
float tseg;
float distSqr = dtDistancePtSegSqr2D(centerPos, va, vb, tseg);
if (distSqr > radiusSqr)
continue;
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef);
if (!neighbourNode)
{
status |= DT_OUT_OF_NODES;
continue;
}
if (neighbourNode->flags & DT_NODE_CLOSED)
continue;
// Cost
if (neighbourNode->flags == 0)
dtVlerp(neighbourNode->pos, va, vb, 0.5f);
const float total = bestNode->total + dtVdist(bestNode->pos, neighbourNode->pos);
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~DT_NODE_CLOSED);
neighbourNode->pidx = m_nodePool->getNodeIdx(bestNode);
neighbourNode->total = total;
if (neighbourNode->flags & DT_NODE_OPEN)
{
m_openList->modify(neighbourNode);
}
else
{
neighbourNode->flags = DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
}
}
if (!randomPoly)
return DT_FAILURE;
// Randomly pick point on polygon.
const float* v = &randomTile->verts[randomPoly->verts[0]*3];
float verts[3*DT_VERTS_PER_POLYGON];
float areas[DT_VERTS_PER_POLYGON];
dtVcopy(&verts[0*3],v);
for (int j = 1; j < randomPoly->vertCount; ++j)
{
v = &randomTile->verts[randomPoly->verts[j]*3];
dtVcopy(&verts[j*3],v);
}
const float s = frand();
const float t = frand();
float pt[3];
dtRandomPointInConvexPoly(verts, randomPoly->vertCount, areas, s, t, pt);
float h = 0.0f;
dtStatus stat = getPolyHeight(randomPolyRef, pt, &h);
if (dtStatusFailed(status))
return stat;
pt[1] = h;
dtVcopy(randomPt, pt);
*randomRef = randomPolyRef;
return DT_SUCCESS;
}
//////////////////////////////////////////////////////////////////////////////////////////
/// @par
///
/// Uses the detail polygons to find the surface height. (Most accurate.)
///
/// @p pos does not have to be within the bounds of the polygon or navigation mesh.
///
/// See closestPointOnPolyBoundary() for a limited but faster option.
///
dtStatus dtNavMeshQuery::closestPointOnPoly(dtPolyRef ref, const float* pos, float* closest, bool* posOverPoly) const
{
dtAssert(m_nav);
const dtMeshTile* tile = 0;
const dtPoly* poly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(ref, &tile, &poly)))
return DT_FAILURE | DT_INVALID_PARAM;
if (!tile)
return DT_FAILURE | DT_INVALID_PARAM;
// Off-mesh connections don't have detail polygons.
if (poly->getType() == DT_POLYTYPE_OFFMESH_CONNECTION)
{
const float* v0 = &tile->verts[poly->verts[0]*3];
const float* v1 = &tile->verts[poly->verts[1]*3];
const float d0 = dtVdist(pos, v0);
const float d1 = dtVdist(pos, v1);
const float u = d0 / (d0+d1);
dtVlerp(closest, v0, v1, u);
if (posOverPoly)
*posOverPoly = false;
return DT_SUCCESS;
}
const unsigned int ip = (unsigned int)(poly - tile->polys);
const dtPolyDetail* pd = &tile->detailMeshes[ip];
// Clamp point to be inside the polygon.
float verts[DT_VERTS_PER_POLYGON*3];
float edged[DT_VERTS_PER_POLYGON];
float edget[DT_VERTS_PER_POLYGON];
const int nv = poly->vertCount;
for (int i = 0; i < nv; ++i)
dtVcopy(&verts[i*3], &tile->verts[poly->verts[i]*3]);
dtVcopy(closest, pos);
if (!dtDistancePtPolyEdgesSqr(pos, verts, nv, edged, edget))
{
// Point is outside the polygon, dtClamp to nearest edge.
float dmin = FLT_MAX;
int imin = -1;
for (int i = 0; i < nv; ++i)
{
if (edged[i] < dmin)
{
dmin = edged[i];
imin = i;
}
}
const float* va = &verts[imin*3];
const float* vb = &verts[((imin+1)%nv)*3];
dtVlerp(closest, va, vb, edget[imin]);
if (posOverPoly)
*posOverPoly = false;
}
else
{
if (posOverPoly)
*posOverPoly = true;
}
// Find height at the location.
for (int j = 0; j < pd->triCount; ++j)
{
const unsigned char* t = &tile->detailTris[(pd->triBase+j)*4];
const float* v[3];
for (int k = 0; k < 3; ++k)
{
if (t[k] < poly->vertCount)
v[k] = &tile->verts[poly->verts[t[k]]*3];
else
v[k] = &tile->detailVerts[(pd->vertBase+(t[k]-poly->vertCount))*3];
}
float h;
if (dtClosestHeightPointTriangle(pos, v[0], v[1], v[2], h))
{
closest[1] = h;
break;
}
}
return DT_SUCCESS;
}
/// @par
///
/// Much faster than closestPointOnPoly().
///
/// If the provided position lies within the polygon's xz-bounds (above or below),
/// then @p pos and @p closest will be equal.
///
/// The height of @p closest will be the polygon boundary. The height detail is not used.
///
/// @p pos does not have to be within the bounds of the polybon or the navigation mesh.
///
dtStatus dtNavMeshQuery::closestPointOnPolyBoundary(dtPolyRef ref, const float* pos, float* closest) const
{
dtAssert(m_nav);
const dtMeshTile* tile = 0;
const dtPoly* poly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(ref, &tile, &poly)))
return DT_FAILURE | DT_INVALID_PARAM;
// Collect vertices.
float verts[DT_VERTS_PER_POLYGON*3];
float edged[DT_VERTS_PER_POLYGON];
float edget[DT_VERTS_PER_POLYGON];
int nv = 0;
for (int i = 0; i < (int)poly->vertCount; ++i)
{
dtVcopy(&verts[nv*3], &tile->verts[poly->verts[i]*3]);
nv++;
}
bool inside = dtDistancePtPolyEdgesSqr(pos, verts, nv, edged, edget);
if (inside)
{
// Point is inside the polygon, return the point.
dtVcopy(closest, pos);
}
else
{
// Point is outside the polygon, dtClamp to nearest edge.
float dmin = FLT_MAX;
int imin = -1;
for (int i = 0; i < nv; ++i)
{
if (edged[i] < dmin)
{
dmin = edged[i];
imin = i;
}
}
const float* va = &verts[imin*3];
const float* vb = &verts[((imin+1)%nv)*3];
dtVlerp(closest, va, vb, edget[imin]);
}
return DT_SUCCESS;
}
/// @par
///
/// Will return #DT_FAILURE if the provided position is outside the xz-bounds
/// of the polygon.
///
dtStatus dtNavMeshQuery::getPolyHeight(dtPolyRef ref, const float* pos, float* height) const
{
dtAssert(m_nav);
const dtMeshTile* tile = 0;
const dtPoly* poly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(ref, &tile, &poly)))
return DT_FAILURE | DT_INVALID_PARAM;
if (poly->getType() == DT_POLYTYPE_OFFMESH_CONNECTION)
{
const float* v0 = &tile->verts[poly->verts[0]*3];
const float* v1 = &tile->verts[poly->verts[1]*3];
const float d0 = dtVdist2D(pos, v0);
const float d1 = dtVdist2D(pos, v1);
const float u = d0 / (d0+d1);
if (height)
*height = v0[1] + (v1[1] - v0[1]) * u;
return DT_SUCCESS;
}
else
{
const unsigned int ip = (unsigned int)(poly - tile->polys);
const dtPolyDetail* pd = &tile->detailMeshes[ip];
for (int j = 0; j < pd->triCount; ++j)
{
const unsigned char* t = &tile->detailTris[(pd->triBase+j)*4];
const float* v[3];
for (int k = 0; k < 3; ++k)
{
if (t[k] < poly->vertCount)
v[k] = &tile->verts[poly->verts[t[k]]*3];
else
v[k] = &tile->detailVerts[(pd->vertBase+(t[k]-poly->vertCount))*3];
}
float h;
if (dtClosestHeightPointTriangle(pos, v[0], v[1], v[2], h))
{
if (height)
*height = h;
return DT_SUCCESS;
}
}
}
return DT_FAILURE | DT_INVALID_PARAM;
}
/// @par
///
/// @note If the search box does not intersect any polygons the search will
/// return #DT_SUCCESS, but @p nearestRef will be zero. So if in doubt, check
/// @p nearestRef before using @p nearestPt.
///
/// @warning This function is not suitable for large area searches. If the search
/// extents overlaps more than 128 polygons it may return an invalid result.
///
dtStatus dtNavMeshQuery::findNearestPoly(const float* center, const float* extents,
const dtQueryFilter* filter,
dtPolyRef* nearestRef, float* nearestPt) const
{
dtAssert(m_nav);
*nearestRef = 0;
// Get nearby polygons from proximity grid.
dtPolyRef polys[128];
int polyCount = 0;
if (dtStatusFailed(queryPolygons(center, extents, filter, polys, &polyCount, 128)))
return DT_FAILURE | DT_INVALID_PARAM;
// Find nearest polygon amongst the nearby polygons.
dtPolyRef nearest = 0;
float nearestDistanceSqr = FLT_MAX;
for (int i = 0; i < polyCount; ++i)
{
dtPolyRef ref = polys[i];
float closestPtPoly[3];
float diff[3];
bool posOverPoly = false;
float d = 0;
closestPointOnPoly(ref, center, closestPtPoly, &posOverPoly);
// If a point is directly over a polygon and closer than
// climb height, favor that instead of straight line nearest point.
dtVsub(diff, center, closestPtPoly);
if (posOverPoly)
{
const dtMeshTile* tile = 0;
const dtPoly* poly = 0;
m_nav->getTileAndPolyByRefUnsafe(polys[i], &tile, &poly);
d = dtAbs(diff[1]) - tile->header->walkableClimb;
d = d > 0 ? d*d : 0;
}
else
{
d = dtVlenSqr(diff);
}
if (d < nearestDistanceSqr)
{
if (nearestPt)
dtVcopy(nearestPt, closestPtPoly);
nearestDistanceSqr = d;
nearest = ref;
}
}
if (nearestRef)
*nearestRef = nearest;
return DT_SUCCESS;
}
int dtNavMeshQuery::queryPolygonsInTile(const dtMeshTile* tile, const float* qmin, const float* qmax,
const dtQueryFilter* filter,
dtPolyRef* polys, const int maxPolys) const
{
dtAssert(m_nav);
if (tile->bvTree)
{
const dtBVNode* node = &tile->bvTree[0];
const dtBVNode* end = &tile->bvTree[tile->header->bvNodeCount];
const float* tbmin = tile->header->bmin;
const float* tbmax = tile->header->bmax;
const float qfac = tile->header->bvQuantFactor;
// Calculate quantized box
unsigned short bmin[3], bmax[3];
// dtClamp query box to world box.
float minx = dtClamp(qmin[0], tbmin[0], tbmax[0]) - tbmin[0];
float miny = dtClamp(qmin[1], tbmin[1], tbmax[1]) - tbmin[1];
float minz = dtClamp(qmin[2], tbmin[2], tbmax[2]) - tbmin[2];
float maxx = dtClamp(qmax[0], tbmin[0], tbmax[0]) - tbmin[0];
float maxy = dtClamp(qmax[1], tbmin[1], tbmax[1]) - tbmin[1];
float maxz = dtClamp(qmax[2], tbmin[2], tbmax[2]) - tbmin[2];
// Quantize
bmin[0] = (unsigned short)(qfac * minx) & 0xfffe;
bmin[1] = (unsigned short)(qfac * miny) & 0xfffe;
bmin[2] = (unsigned short)(qfac * minz) & 0xfffe;
bmax[0] = (unsigned short)(qfac * maxx + 1) | 1;
bmax[1] = (unsigned short)(qfac * maxy + 1) | 1;
bmax[2] = (unsigned short)(qfac * maxz + 1) | 1;
// Traverse tree
const dtPolyRef base = m_nav->getPolyRefBase(tile);
int n = 0;
while (node < end)
{
const bool overlap = dtOverlapQuantBounds(bmin, bmax, node->bmin, node->bmax);
const bool isLeafNode = node->i >= 0;
if (isLeafNode && overlap)
{
dtPolyRef ref = base | (dtPolyRef)node->i;
if (filter->passFilter(ref, tile, &tile->polys[node->i]))
{
if (n < maxPolys)
polys[n++] = ref;
}
}
if (overlap || isLeafNode)
node++;
else
{
const int escapeIndex = -node->i;
node += escapeIndex;
}
}
return n;
}
else
{
float bmin[3], bmax[3];
int n = 0;
const dtPolyRef base = m_nav->getPolyRefBase(tile);
for (int i = 0; i < tile->header->polyCount; ++i)
{
const dtPoly* p = &tile->polys[i];
// Do not return off-mesh connection polygons.
if (p->getType() == DT_POLYTYPE_OFFMESH_CONNECTION)
continue;
// Must pass filter
const dtPolyRef ref = base | (dtPolyRef)i;
if (!filter->passFilter(ref, tile, p))
continue;
// Calc polygon bounds.
const float* v = &tile->verts[p->verts[0]*3];
dtVcopy(bmin, v);
dtVcopy(bmax, v);
for (int j = 1; j < p->vertCount; ++j)
{
v = &tile->verts[p->verts[j]*3];
dtVmin(bmin, v);
dtVmax(bmax, v);
}
if (dtOverlapBounds(qmin,qmax, bmin,bmax))
{
if (n < maxPolys)
polys[n++] = ref;
}
}
return n;
}
}
/// @par
///
/// If no polygons are found, the function will return #DT_SUCCESS with a
/// @p polyCount of zero.
///
/// If @p polys is too small to hold the entire result set, then the array will
/// be filled to capacity. The method of choosing which polygons from the
/// full set are included in the partial result set is undefined.
///
dtStatus dtNavMeshQuery::queryPolygons(const float* center, const float* extents,
const dtQueryFilter* filter,
dtPolyRef* polys, int* polyCount, const int maxPolys) const
{
dtAssert(m_nav);
float bmin[3], bmax[3];
dtVsub(bmin, center, extents);
dtVadd(bmax, center, extents);
// Find tiles the query touches.
int minx, miny, maxx, maxy;
m_nav->calcTileLoc(bmin, &minx, &miny);
m_nav->calcTileLoc(bmax, &maxx, &maxy);
static const int MAX_NEIS = 32;
const dtMeshTile* neis[MAX_NEIS];
int n = 0;
for (int y = miny; y <= maxy; ++y)
{
for (int x = minx; x <= maxx; ++x)
{
const int nneis = m_nav->getTilesAt(x,y,neis,MAX_NEIS);
for (int j = 0; j < nneis; ++j)
{
n += queryPolygonsInTile(neis[j], bmin, bmax, filter, polys+n, maxPolys-n);
if (n >= maxPolys)
{
*polyCount = n;
return DT_SUCCESS | DT_BUFFER_TOO_SMALL;
}
}
}
}
*polyCount = n;
return DT_SUCCESS;
}
/// @par
///
/// If the end polygon cannot be reached through the navigation graph,
/// the last polygon in the path will be the nearest the end polygon.
///
/// If the path array is to small to hold the full result, it will be filled as
/// far as possible from the start polygon toward the end polygon.
///
/// The start and end positions are used to calculate traversal costs.
/// (The y-values impact the result.)
///
dtStatus dtNavMeshQuery::findPath(dtPolyRef startRef, dtPolyRef endRef,
const float* startPos, const float* endPos,
const dtQueryFilter* filter,
dtPolyRef* path, int* pathCount, const int maxPath) const
{
dtAssert(m_nav);
dtAssert(m_nodePool);
dtAssert(m_openList);
*pathCount = 0;
if (!startRef || !endRef)
return DT_FAILURE | DT_INVALID_PARAM;
if (!maxPath)
return DT_FAILURE | DT_INVALID_PARAM;
// Validate input
if (!m_nav->isValidPolyRef(startRef) || !m_nav->isValidPolyRef(endRef))
return DT_FAILURE | DT_INVALID_PARAM;
if (startRef == endRef)
{
path[0] = startRef;
*pathCount = 1;
return DT_SUCCESS;
}
m_nodePool->clear();
m_openList->clear();
dtNode* startNode = m_nodePool->getNode(startRef);
dtVcopy(startNode->pos, startPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = dtVdist(startPos, endPos) * H_SCALE;
startNode->id = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
dtNode* lastBestNode = startNode;
float lastBestNodeCost = startNode->total;
dtStatus status = DT_SUCCESS;
while (!m_openList->empty())
{
// Remove node from open list and put it in closed list.
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Reached the goal, stop searching.
if (bestNode->id == endRef)
{
lastBestNode = bestNode;
break;
}
// Get current poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtPolyRef bestRef = bestNode->id;
const dtMeshTile* bestTile = 0;
const dtPoly* bestPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(bestRef, &bestTile, &bestPoly);
// Get parent poly and tile.
dtPolyRef parentRef = 0;
const dtMeshTile* parentTile = 0;
const dtPoly* parentPoly = 0;
if (bestNode->pidx)
parentRef = m_nodePool->getNodeAtIdx(bestNode->pidx)->id;
if (parentRef)
m_nav->getTileAndPolyByRefUnsafe(parentRef, &parentTile, &parentPoly);
for (unsigned int i = bestPoly->firstLink; i != DT_NULL_LINK; i = bestTile->links[i].next)
{
dtPolyRef neighbourRef = bestTile->links[i].ref;
// Skip invalid ids and do not expand back to where we came from.
if (!neighbourRef || neighbourRef == parentRef)
continue;
// Get neighbour poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtMeshTile* neighbourTile = 0;
const dtPoly* neighbourPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(neighbourRef, &neighbourTile, &neighbourPoly);
if (!filter->passFilter(neighbourRef, neighbourTile, neighbourPoly))
continue;
// deal explicitly with crossing tile boundaries
unsigned char crossSide = 0;
if (bestTile->links[i].side != 0xff)
crossSide = bestTile->links[i].side >> 1;
// get the node
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef, crossSide);
if (!neighbourNode)
{
status |= DT_OUT_OF_NODES;
continue;
}
// If the node is visited the first time, calculate node position.
if (neighbourNode->flags == 0)
{
getEdgeMidPoint(bestRef, bestPoly, bestTile,
neighbourRef, neighbourPoly, neighbourTile,
neighbourNode->pos);
}
// Calculate cost and heuristic.
float cost = 0;
float heuristic = 0;
// Special case for last node.
if (neighbourRef == endRef)
{
// Cost
const float curCost = filter->getCost(bestNode->pos, neighbourNode->pos,
parentRef, parentTile, parentPoly,
bestRef, bestTile, bestPoly,
neighbourRef, neighbourTile, neighbourPoly);
const float endCost = filter->getCost(neighbourNode->pos, endPos,
bestRef, bestTile, bestPoly,
neighbourRef, neighbourTile, neighbourPoly,
0, 0, 0);
cost = bestNode->cost + curCost + endCost;
heuristic = 0;
}
else
{
// Cost
const float curCost = filter->getCost(bestNode->pos, neighbourNode->pos,
parentRef, parentTile, parentPoly,
bestRef, bestTile, bestPoly,
neighbourRef, neighbourTile, neighbourPoly);
cost = bestNode->cost + curCost;
heuristic = dtVdist(neighbourNode->pos, endPos)*H_SCALE;
}
const float total = cost + heuristic;
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
// The node is already visited and process, and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_CLOSED) && total >= neighbourNode->total)
continue;
// Add or update the node.
neighbourNode->pidx = m_nodePool->getNodeIdx(bestNode);
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~DT_NODE_CLOSED);
neighbourNode->cost = cost;
neighbourNode->total = total;
if (neighbourNode->flags & DT_NODE_OPEN)
{
// Already in open, update node location.
m_openList->modify(neighbourNode);
}
else
{
// Put the node in open list.
neighbourNode->flags |= DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
// Update nearest node to target so far.
if (heuristic < lastBestNodeCost)
{
lastBestNodeCost = heuristic;
lastBestNode = neighbourNode;
}
}
}
if (lastBestNode->id != endRef)
status |= DT_PARTIAL_RESULT;
// Reverse the path.
dtNode* prev = 0;
dtNode* node = lastBestNode;
do
{
dtNode* next = m_nodePool->getNodeAtIdx(node->pidx);
node->pidx = m_nodePool->getNodeIdx(prev);
prev = node;
node = next;
}
while (node);
// Store path
node = prev;
int n = 0;
do
{
path[n++] = node->id;
if (n >= maxPath)
{
status |= DT_BUFFER_TOO_SMALL;
break;
}
node = m_nodePool->getNodeAtIdx(node->pidx);
}
while (node);
*pathCount = n;
return status;
}
/// @par
///
/// @warning Calling any non-slice methods before calling finalizeSlicedFindPath()
/// or finalizeSlicedFindPathPartial() may result in corrupted data!
///
/// The @p filter pointer is stored and used for the duration of the sliced
/// path query.
///
dtStatus dtNavMeshQuery::initSlicedFindPath(dtPolyRef startRef, dtPolyRef endRef,
const float* startPos, const float* endPos,
const dtQueryFilter* filter, const unsigned int options)
{
dtAssert(m_nav);
dtAssert(m_nodePool);
dtAssert(m_openList);
// Init path state.
memset(&m_query, 0, sizeof(dtQueryData));
m_query.status = DT_FAILURE;
m_query.startRef = startRef;
m_query.endRef = endRef;
dtVcopy(m_query.startPos, startPos);
dtVcopy(m_query.endPos, endPos);
m_query.filter = filter;
m_query.options = options;
m_query.raycastLimitSqr = FLT_MAX;
if (!startRef || !endRef)
return DT_FAILURE | DT_INVALID_PARAM;
// Validate input
if (!m_nav->isValidPolyRef(startRef) || !m_nav->isValidPolyRef(endRef))
return DT_FAILURE | DT_INVALID_PARAM;
// trade quality with performance?
if (options & DT_FINDPATH_ANY_ANGLE)
{
// limiting to several times the character radius yields nice results. It is not sensitive
// so it is enough to compute it from the first tile.
const dtMeshTile* tile = m_nav->getTileByRef(startRef);
float agentRadius = tile->header->walkableRadius;
m_query.raycastLimitSqr = dtSqr(agentRadius * DT_RAY_CAST_LIMIT_PROPORTIONS);
}
if (startRef == endRef)
{
m_query.status = DT_SUCCESS;
return DT_SUCCESS;
}
m_nodePool->clear();
m_openList->clear();
dtNode* startNode = m_nodePool->getNode(startRef);
dtVcopy(startNode->pos, startPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = dtVdist(startPos, endPos) * H_SCALE;
startNode->id = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
m_query.status = DT_IN_PROGRESS;
m_query.lastBestNode = startNode;
m_query.lastBestNodeCost = startNode->total;
return m_query.status;
}
dtStatus dtNavMeshQuery::updateSlicedFindPath(const int maxIter, int* doneIters)
{
if (!dtStatusInProgress(m_query.status))
return m_query.status;
// Make sure the request is still valid.
if (!m_nav->isValidPolyRef(m_query.startRef) || !m_nav->isValidPolyRef(m_query.endRef))
{
m_query.status = DT_FAILURE;
return DT_FAILURE;
}
dtRaycastHit rayHit;
rayHit.maxPath = 0;
int iter = 0;
while (iter < maxIter && !m_openList->empty())
{
iter++;
// Remove node from open list and put it in closed list.
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Reached the goal, stop searching.
if (bestNode->id == m_query.endRef)
{
m_query.lastBestNode = bestNode;
const dtStatus details = m_query.status & DT_STATUS_DETAIL_MASK;
m_query.status = DT_SUCCESS | details;
if (doneIters)
*doneIters = iter;
return m_query.status;
}
// Get current poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtPolyRef bestRef = bestNode->id;
const dtMeshTile* bestTile = 0;
const dtPoly* bestPoly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(bestRef, &bestTile, &bestPoly)))
{
// The polygon has disappeared during the sliced query, fail.
m_query.status = DT_FAILURE;
if (doneIters)
*doneIters = iter;
return m_query.status;
}
// Get parent and grand parent poly and tile.
dtPolyRef parentRef = 0, grandpaRef = 0;
const dtMeshTile* parentTile = 0;
const dtPoly* parentPoly = 0;
dtNode* parentNode = 0;
if (bestNode->pidx)
{
parentNode = m_nodePool->getNodeAtIdx(bestNode->pidx);
parentRef = parentNode->id;
if (parentNode->pidx)
grandpaRef = m_nodePool->getNodeAtIdx(parentNode->pidx)->id;
}
if (parentRef)
{
bool invalidParent = dtStatusFailed(m_nav->getTileAndPolyByRef(parentRef, &parentTile, &parentPoly));
if (invalidParent || (grandpaRef && !m_nav->isValidPolyRef(grandpaRef)) )
{
// The polygon has disappeared during the sliced query, fail.
m_query.status = DT_FAILURE;
if (doneIters)
*doneIters = iter;
return m_query.status;
}
}
// decide whether to test raycast to previous nodes
bool tryLOS = false;
if (m_query.options & DT_FINDPATH_ANY_ANGLE)
{
if ((parentRef != 0) && (dtVdistSqr(parentNode->pos, bestNode->pos) < m_query.raycastLimitSqr))
tryLOS = true;
}
for (unsigned int i = bestPoly->firstLink; i != DT_NULL_LINK; i = bestTile->links[i].next)
{
dtPolyRef neighbourRef = bestTile->links[i].ref;
// Skip invalid ids and do not expand back to where we came from.
if (!neighbourRef || neighbourRef == parentRef)
continue;
// Get neighbour poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtMeshTile* neighbourTile = 0;
const dtPoly* neighbourPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(neighbourRef, &neighbourTile, &neighbourPoly);
if (!m_query.filter->passFilter(neighbourRef, neighbourTile, neighbourPoly))
continue;
// get the neighbor node
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef, 0);
if (!neighbourNode)
{
m_query.status |= DT_OUT_OF_NODES;
continue;
}
// do not expand to nodes that were already visited from the same parent
if (neighbourNode->pidx != 0 && neighbourNode->pidx == bestNode->pidx)
continue;
// If the node is visited the first time, calculate node position.
if (neighbourNode->flags == 0)
{
getEdgeMidPoint(bestRef, bestPoly, bestTile,
neighbourRef, neighbourPoly, neighbourTile,
neighbourNode->pos);
}
// Calculate cost and heuristic.
float cost = 0;
float heuristic = 0;
// raycast parent
bool foundShortCut = false;
rayHit.pathCost = rayHit.t = 0;
if (tryLOS)
{
raycast(parentRef, parentNode->pos, neighbourNode->pos, m_query.filter, DT_RAYCAST_USE_COSTS, &rayHit, grandpaRef);
foundShortCut = rayHit.t >= 1.0f;
}
// update move cost
if (foundShortCut)
{
// shortcut found using raycast. Using shorter cost instead
cost = parentNode->cost + rayHit.pathCost;
}
else
{
// No shortcut found.
const float curCost = m_query.filter->getCost(bestNode->pos, neighbourNode->pos,
parentRef, parentTile, parentPoly,
bestRef, bestTile, bestPoly,
neighbourRef, neighbourTile, neighbourPoly);
cost = bestNode->cost + curCost;
}
// Special case for last node.
if (neighbourRef == m_query.endRef)
{
const float endCost = m_query.filter->getCost(neighbourNode->pos, m_query.endPos,
bestRef, bestTile, bestPoly,
neighbourRef, neighbourTile, neighbourPoly,
0, 0, 0);
cost = cost + endCost;
heuristic = 0;
}
else
{
heuristic = dtVdist(neighbourNode->pos, m_query.endPos)*H_SCALE;
}
const float total = cost + heuristic;
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
// The node is already visited and process, and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_CLOSED) && total >= neighbourNode->total)
continue;
// Add or update the node.
neighbourNode->pidx = foundShortCut ? bestNode->pidx : m_nodePool->getNodeIdx(bestNode);
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~(DT_NODE_CLOSED | DT_NODE_PARENT_DETACHED));
neighbourNode->cost = cost;
neighbourNode->total = total;
if (foundShortCut)
neighbourNode->flags = (neighbourNode->flags | DT_NODE_PARENT_DETACHED);
if (neighbourNode->flags & DT_NODE_OPEN)
{
// Already in open, update node location.
m_openList->modify(neighbourNode);
}
else
{
// Put the node in open list.
neighbourNode->flags |= DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
// Update nearest node to target so far.
if (heuristic < m_query.lastBestNodeCost)
{
m_query.lastBestNodeCost = heuristic;
m_query.lastBestNode = neighbourNode;
}
}
}
// Exhausted all nodes, but could not find path.
if (m_openList->empty())
{
const dtStatus details = m_query.status & DT_STATUS_DETAIL_MASK;
m_query.status = DT_SUCCESS | details;
}
if (doneIters)
*doneIters = iter;
return m_query.status;
}
dtStatus dtNavMeshQuery::finalizeSlicedFindPath(dtPolyRef* path, int* pathCount, const int maxPath)
{
*pathCount = 0;
if (dtStatusFailed(m_query.status))
{
// Reset query.
memset(&m_query, 0, sizeof(dtQueryData));
return DT_FAILURE;
}
int n = 0;
if (m_query.startRef == m_query.endRef)
{
// Special case: the search starts and ends at same poly.
path[n++] = m_query.startRef;
}
else
{
// Reverse the path.
dtAssert(m_query.lastBestNode);
if (m_query.lastBestNode->id != m_query.endRef)
m_query.status |= DT_PARTIAL_RESULT;
dtNode* prev = 0;
dtNode* node = m_query.lastBestNode;
int prevRay = 0;
do
{
dtNode* next = m_nodePool->getNodeAtIdx(node->pidx);
node->pidx = m_nodePool->getNodeIdx(prev);
prev = node;
int nextRay = node->flags & DT_NODE_PARENT_DETACHED; // keep track of whether parent is not adjacent (i.e. due to raycast shortcut)
node->flags = (node->flags & ~DT_NODE_PARENT_DETACHED) | prevRay; // and store it in the reversed path's node
prevRay = nextRay;
node = next;
}
while (node);
// Store path
node = prev;
do
{
dtNode* next = m_nodePool->getNodeAtIdx(node->pidx);
dtStatus status = 0;
if (node->flags & DT_NODE_PARENT_DETACHED)
{
float t, normal[3];
int m;
status = raycast(node->id, node->pos, next->pos, m_query.filter, &t, normal, path+n, &m, maxPath-n);
n += m;
// raycast ends on poly boundary and the path might include the next poly boundary.
if (path[n-1] == next->id)
n--; // remove to avoid duplicates
}
else
{
path[n++] = node->id;
if (n >= maxPath)
status = DT_BUFFER_TOO_SMALL;
}
if (status & DT_STATUS_DETAIL_MASK)
{
m_query.status |= status & DT_STATUS_DETAIL_MASK;
break;
}
node = next;
}
while (node);
}
const dtStatus details = m_query.status & DT_STATUS_DETAIL_MASK;
// Reset query.
memset(&m_query, 0, sizeof(dtQueryData));
*pathCount = n;
return DT_SUCCESS | details;
}
dtStatus dtNavMeshQuery::finalizeSlicedFindPathPartial(const dtPolyRef* existing, const int existingSize,
dtPolyRef* path, int* pathCount, const int maxPath)
{
*pathCount = 0;
if (existingSize == 0)
{
return DT_FAILURE;
}
if (dtStatusFailed(m_query.status))
{
// Reset query.
memset(&m_query, 0, sizeof(dtQueryData));
return DT_FAILURE;
}
int n = 0;
if (m_query.startRef == m_query.endRef)
{
// Special case: the search starts and ends at same poly.
path[n++] = m_query.startRef;
}
else
{
// Find furthest existing node that was visited.
dtNode* prev = 0;
dtNode* node = 0;
for (int i = existingSize-1; i >= 0; --i)
{
m_nodePool->findNodes(existing[i], &node, 1);
if (node)
break;
}
if (!node)
{
m_query.status |= DT_PARTIAL_RESULT;
dtAssert(m_query.lastBestNode);
node = m_query.lastBestNode;
}
// Reverse the path.
int prevRay = 0;
do
{
dtNode* next = m_nodePool->getNodeAtIdx(node->pidx);
node->pidx = m_nodePool->getNodeIdx(prev);
prev = node;
int nextRay = node->flags & DT_NODE_PARENT_DETACHED; // keep track of whether parent is not adjacent (i.e. due to raycast shortcut)
node->flags = (node->flags & ~DT_NODE_PARENT_DETACHED) | prevRay; // and store it in the reversed path's node
prevRay = nextRay;
node = next;
}
while (node);
// Store path
node = prev;
do
{
dtNode* next = m_nodePool->getNodeAtIdx(node->pidx);
dtStatus status = 0;
if (node->flags & DT_NODE_PARENT_DETACHED)
{
float t, normal[3];
int m;
status = raycast(node->id, node->pos, next->pos, m_query.filter, &t, normal, path+n, &m, maxPath-n);
n += m;
// raycast ends on poly boundary and the path might include the next poly boundary.
if (path[n-1] == next->id)
n--; // remove to avoid duplicates
}
else
{
path[n++] = node->id;
if (n >= maxPath)
status = DT_BUFFER_TOO_SMALL;
}
if (status & DT_STATUS_DETAIL_MASK)
{
m_query.status |= status & DT_STATUS_DETAIL_MASK;
break;
}
node = next;
}
while (node);
}
const dtStatus details = m_query.status & DT_STATUS_DETAIL_MASK;
// Reset query.
memset(&m_query, 0, sizeof(dtQueryData));
*pathCount = n;
return DT_SUCCESS | details;
}
dtStatus dtNavMeshQuery::appendVertex(const float* pos, const unsigned char flags, const dtPolyRef ref,
float* straightPath, unsigned char* straightPathFlags, dtPolyRef* straightPathRefs,
int* straightPathCount, const int maxStraightPath) const
{
if ((*straightPathCount) > 0 && dtVequal(&straightPath[((*straightPathCount)-1)*3], pos))
{
// The vertices are equal, update flags and poly.
if (straightPathFlags)
straightPathFlags[(*straightPathCount)-1] = flags;
if (straightPathRefs)
straightPathRefs[(*straightPathCount)-1] = ref;
}
else
{
// Append new vertex.
dtVcopy(&straightPath[(*straightPathCount)*3], pos);
if (straightPathFlags)
straightPathFlags[(*straightPathCount)] = flags;
if (straightPathRefs)
straightPathRefs[(*straightPathCount)] = ref;
(*straightPathCount)++;
// If reached end of path or there is no space to append more vertices, return.
if (flags == DT_STRAIGHTPATH_END || (*straightPathCount) >= maxStraightPath)
{
return DT_SUCCESS | (((*straightPathCount) >= maxStraightPath) ? DT_BUFFER_TOO_SMALL : 0);
}
}
return DT_IN_PROGRESS;
}
dtStatus dtNavMeshQuery::appendPortals(const int startIdx, const int endIdx, const float* endPos, const dtPolyRef* path,
float* straightPath, unsigned char* straightPathFlags, dtPolyRef* straightPathRefs,
int* straightPathCount, const int maxStraightPath, const int options) const
{
const float* startPos = &straightPath[(*straightPathCount-1)*3];
// Append or update last vertex
dtStatus stat = 0;
for (int i = startIdx; i < endIdx; i++)
{
// Calculate portal
const dtPolyRef from = path[i];
const dtMeshTile* fromTile = 0;
const dtPoly* fromPoly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(from, &fromTile, &fromPoly)))
return DT_FAILURE | DT_INVALID_PARAM;
const dtPolyRef to = path[i+1];
const dtMeshTile* toTile = 0;
const dtPoly* toPoly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(to, &toTile, &toPoly)))
return DT_FAILURE | DT_INVALID_PARAM;
float left[3], right[3];
if (dtStatusFailed(getPortalPoints(from, fromPoly, fromTile, to, toPoly, toTile, left, right)))
break;
if (options & DT_STRAIGHTPATH_AREA_CROSSINGS)
{
// Skip intersection if only area crossings are requested.
if (fromPoly->getArea() == toPoly->getArea())
continue;
}
// Append intersection
float s,t;
if (dtIntersectSegSeg2D(startPos, endPos, left, right, s, t))
{
float pt[3];
dtVlerp(pt, left,right, t);
stat = appendVertex(pt, 0, path[i+1],
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath);
if (stat != DT_IN_PROGRESS)
return stat;
}
}
return DT_IN_PROGRESS;
}
/// @par
///
/// This method peforms what is often called 'string pulling'.
///
/// The start position is clamped to the first polygon in the path, and the
/// end position is clamped to the last. So the start and end positions should
/// normally be within or very near the first and last polygons respectively.
///
/// The returned polygon references represent the reference id of the polygon
/// that is entered at the associated path position. The reference id associated
/// with the end point will always be zero. This allows, for example, matching
/// off-mesh link points to their representative polygons.
///
/// If the provided result buffers are too small for the entire result set,
/// they will be filled as far as possible from the start toward the end
/// position.
///
dtStatus dtNavMeshQuery::findStraightPath(const float* startPos, const float* endPos,
const dtPolyRef* path, const int pathSize,
float* straightPath, unsigned char* straightPathFlags, dtPolyRef* straightPathRefs,
int* straightPathCount, const int maxStraightPath, const int options) const
{
dtAssert(m_nav);
*straightPathCount = 0;
if (!maxStraightPath)
return DT_FAILURE | DT_INVALID_PARAM;
if (!path[0])
return DT_FAILURE | DT_INVALID_PARAM;
dtStatus stat = 0;
// TODO: Should this be callers responsibility?
float closestStartPos[3];
if (dtStatusFailed(closestPointOnPolyBoundary(path[0], startPos, closestStartPos)))
return DT_FAILURE | DT_INVALID_PARAM;
float closestEndPos[3];
if (dtStatusFailed(closestPointOnPolyBoundary(path[pathSize-1], endPos, closestEndPos)))
return DT_FAILURE | DT_INVALID_PARAM;
// Add start point.
stat = appendVertex(closestStartPos, DT_STRAIGHTPATH_START, path[0],
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath);
if (stat != DT_IN_PROGRESS)
return stat;
if (pathSize > 1)
{
float portalApex[3], portalLeft[3], portalRight[3];
dtVcopy(portalApex, closestStartPos);
dtVcopy(portalLeft, portalApex);
dtVcopy(portalRight, portalApex);
int apexIndex = 0;
int leftIndex = 0;
int rightIndex = 0;
unsigned char leftPolyType = 0;
unsigned char rightPolyType = 0;
dtPolyRef leftPolyRef = path[0];
dtPolyRef rightPolyRef = path[0];
for (int i = 0; i < pathSize; ++i)
{
float left[3], right[3];
unsigned char fromType, toType;
if (i+1 < pathSize)
{
// Next portal.
if (dtStatusFailed(getPortalPoints(path[i], path[i+1], left, right, fromType, toType)))
{
// Failed to get portal points, in practice this means that path[i+1] is invalid polygon.
// Clamp the end point to path[i], and return the path so far.
if (dtStatusFailed(closestPointOnPolyBoundary(path[i], endPos, closestEndPos)))
{
// This should only happen when the first polygon is invalid.
return DT_FAILURE | DT_INVALID_PARAM;
}
// Apeend portals along the current straight path segment.
if (options & (DT_STRAIGHTPATH_AREA_CROSSINGS | DT_STRAIGHTPATH_ALL_CROSSINGS))
{
stat = appendPortals(apexIndex, i, closestEndPos, path,
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath, options);
}
stat = appendVertex(closestEndPos, 0, path[i],
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath);
return DT_SUCCESS | DT_PARTIAL_RESULT | ((*straightPathCount >= maxStraightPath) ? DT_BUFFER_TOO_SMALL : 0);
}
// If starting really close the portal, advance.
if (i == 0)
{
float t;
if (dtDistancePtSegSqr2D(portalApex, left, right, t) < dtSqr(0.001f))
continue;
}
}
else
{
// End of the path.
dtVcopy(left, closestEndPos);
dtVcopy(right, closestEndPos);
fromType = toType = DT_POLYTYPE_GROUND;
}
// Right vertex.
if (dtTriArea2D(portalApex, portalRight, right) <= 0.0f)
{
if (dtVequal(portalApex, portalRight) || dtTriArea2D(portalApex, portalLeft, right) > 0.0f)
{
dtVcopy(portalRight, right);
rightPolyRef = (i+1 < pathSize) ? path[i+1] : 0;
rightPolyType = toType;
rightIndex = i;
}
else
{
// Append portals along the current straight path segment.
if (options & (DT_STRAIGHTPATH_AREA_CROSSINGS | DT_STRAIGHTPATH_ALL_CROSSINGS))
{
stat = appendPortals(apexIndex, leftIndex, portalLeft, path,
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath, options);
if (stat != DT_IN_PROGRESS)
return stat;
}
dtVcopy(portalApex, portalLeft);
apexIndex = leftIndex;
unsigned char flags = 0;
if (!leftPolyRef)
flags = DT_STRAIGHTPATH_END;
else if (leftPolyType == DT_POLYTYPE_OFFMESH_CONNECTION)
flags = DT_STRAIGHTPATH_OFFMESH_CONNECTION;
dtPolyRef ref = leftPolyRef;
// Append or update vertex
stat = appendVertex(portalApex, flags, ref,
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath);
if (stat != DT_IN_PROGRESS)
return stat;
dtVcopy(portalLeft, portalApex);
dtVcopy(portalRight, portalApex);
leftIndex = apexIndex;
rightIndex = apexIndex;
// Restart
i = apexIndex;
continue;
}
}
// Left vertex.
if (dtTriArea2D(portalApex, portalLeft, left) >= 0.0f)
{
if (dtVequal(portalApex, portalLeft) || dtTriArea2D(portalApex, portalRight, left) < 0.0f)
{
dtVcopy(portalLeft, left);
leftPolyRef = (i+1 < pathSize) ? path[i+1] : 0;
leftPolyType = toType;
leftIndex = i;
}
else
{
// Append portals along the current straight path segment.
if (options & (DT_STRAIGHTPATH_AREA_CROSSINGS | DT_STRAIGHTPATH_ALL_CROSSINGS))
{
stat = appendPortals(apexIndex, rightIndex, portalRight, path,
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath, options);
if (stat != DT_IN_PROGRESS)
return stat;
}
dtVcopy(portalApex, portalRight);
apexIndex = rightIndex;
unsigned char flags = 0;
if (!rightPolyRef)
flags = DT_STRAIGHTPATH_END;
else if (rightPolyType == DT_POLYTYPE_OFFMESH_CONNECTION)
flags = DT_STRAIGHTPATH_OFFMESH_CONNECTION;
dtPolyRef ref = rightPolyRef;
// Append or update vertex
stat = appendVertex(portalApex, flags, ref,
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath);
if (stat != DT_IN_PROGRESS)
return stat;
dtVcopy(portalLeft, portalApex);
dtVcopy(portalRight, portalApex);
leftIndex = apexIndex;
rightIndex = apexIndex;
// Restart
i = apexIndex;
continue;
}
}
}
// Append portals along the current straight path segment.
if (options & (DT_STRAIGHTPATH_AREA_CROSSINGS | DT_STRAIGHTPATH_ALL_CROSSINGS))
{
stat = appendPortals(apexIndex, pathSize-1, closestEndPos, path,
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath, options);
if (stat != DT_IN_PROGRESS)
return stat;
}
}
stat = appendVertex(closestEndPos, DT_STRAIGHTPATH_END, 0,
straightPath, straightPathFlags, straightPathRefs,
straightPathCount, maxStraightPath);
return DT_SUCCESS | ((*straightPathCount >= maxStraightPath) ? DT_BUFFER_TOO_SMALL : 0);
}
/// @par
///
/// This method is optimized for small delta movement and a small number of
/// polygons. If used for too great a distance, the result set will form an
/// incomplete path.
///
/// @p resultPos will equal the @p endPos if the end is reached.
/// Otherwise the closest reachable position will be returned.
///
/// @p resultPos is not projected onto the surface of the navigation
/// mesh. Use #getPolyHeight if this is needed.
///
/// This method treats the end position in the same manner as
/// the #raycast method. (As a 2D point.) See that method's documentation
/// for details.
///
/// If the @p visited array is too small to hold the entire result set, it will
/// be filled as far as possible from the start position toward the end
/// position.
///
dtStatus dtNavMeshQuery::moveAlongSurface(dtPolyRef startRef, const float* startPos, const float* endPos,
const dtQueryFilter* filter,
float* resultPos, dtPolyRef* visited, int* visitedCount, const int maxVisitedSize) const
{
dtAssert(m_nav);
dtAssert(m_tinyNodePool);
*visitedCount = 0;
// Validate input
if (!startRef)
return DT_FAILURE | DT_INVALID_PARAM;
if (!m_nav->isValidPolyRef(startRef))
return DT_FAILURE | DT_INVALID_PARAM;
dtStatus status = DT_SUCCESS;
static const int MAX_STACK = 48;
dtNode* stack[MAX_STACK];
int nstack = 0;
m_tinyNodePool->clear();
dtNode* startNode = m_tinyNodePool->getNode(startRef);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = 0;
startNode->id = startRef;
startNode->flags = DT_NODE_CLOSED;
stack[nstack++] = startNode;
float bestPos[3];
float bestDist = FLT_MAX;
dtNode* bestNode = 0;
dtVcopy(bestPos, startPos);
// Search constraints
float searchPos[3], searchRadSqr;
dtVlerp(searchPos, startPos, endPos, 0.5f);
searchRadSqr = dtSqr(dtVdist(startPos, endPos)/2.0f + 0.001f);
float verts[DT_VERTS_PER_POLYGON*3];
while (nstack)
{
// Pop front.
dtNode* curNode = stack[0];
for (int i = 0; i < nstack-1; ++i)
stack[i] = stack[i+1];
nstack--;
// Get poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtPolyRef curRef = curNode->id;
const dtMeshTile* curTile = 0;
const dtPoly* curPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(curRef, &curTile, &curPoly);
// Collect vertices.
const int nverts = curPoly->vertCount;
for (int i = 0; i < nverts; ++i)
dtVcopy(&verts[i*3], &curTile->verts[curPoly->verts[i]*3]);
// If target is inside the poly, stop search.
if (dtPointInPolygon(endPos, verts, nverts))
{
bestNode = curNode;
dtVcopy(bestPos, endPos);
break;
}
// Find wall edges and find nearest point inside the walls.
for (int i = 0, j = (int)curPoly->vertCount-1; i < (int)curPoly->vertCount; j = i++)
{
// Find links to neighbours.
static const int MAX_NEIS = 8;
int nneis = 0;
dtPolyRef neis[MAX_NEIS];
if (curPoly->neis[j] & DT_EXT_LINK)
{
// Tile border.
for (unsigned int k = curPoly->firstLink; k != DT_NULL_LINK; k = curTile->links[k].next)
{
const dtLink* link = &curTile->links[k];
if (link->edge == j)
{
if (link->ref != 0)
{
const dtMeshTile* neiTile = 0;
const dtPoly* neiPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(link->ref, &neiTile, &neiPoly);
if (filter->passFilter(link->ref, neiTile, neiPoly))
{
if (nneis < MAX_NEIS)
neis[nneis++] = link->ref;
}
}
}
}
}
else if (curPoly->neis[j])
{
const unsigned int idx = (unsigned int)(curPoly->neis[j]-1);
const dtPolyRef ref = m_nav->getPolyRefBase(curTile) | idx;
if (filter->passFilter(ref, curTile, &curTile->polys[idx]))
{
// Internal edge, encode id.
neis[nneis++] = ref;
}
}
if (!nneis)
{
// Wall edge, calc distance.
const float* vj = &verts[j*3];
const float* vi = &verts[i*3];
float tseg;
const float distSqr = dtDistancePtSegSqr2D(endPos, vj, vi, tseg);
if (distSqr < bestDist)
{
// Update nearest distance.
dtVlerp(bestPos, vj,vi, tseg);
bestDist = distSqr;
bestNode = curNode;
}
}
else
{
for (int k = 0; k < nneis; ++k)
{
// Skip if no node can be allocated.
dtNode* neighbourNode = m_tinyNodePool->getNode(neis[k]);
if (!neighbourNode)
continue;
// Skip if already visited.
if (neighbourNode->flags & DT_NODE_CLOSED)
continue;
// Skip the link if it is too far from search constraint.
// TODO: Maybe should use getPortalPoints(), but this one is way faster.
const float* vj = &verts[j*3];
const float* vi = &verts[i*3];
float tseg;
float distSqr = dtDistancePtSegSqr2D(searchPos, vj, vi, tseg);
if (distSqr > searchRadSqr)
continue;
// Mark as the node as visited and push to queue.
if (nstack < MAX_STACK)
{
neighbourNode->pidx = m_tinyNodePool->getNodeIdx(curNode);
neighbourNode->flags |= DT_NODE_CLOSED;
stack[nstack++] = neighbourNode;
}
}
}
}
}
int n = 0;
if (bestNode)
{
// Reverse the path.
dtNode* prev = 0;
dtNode* node = bestNode;
do
{
dtNode* next = m_tinyNodePool->getNodeAtIdx(node->pidx);
node->pidx = m_tinyNodePool->getNodeIdx(prev);
prev = node;
node = next;
}
while (node);
// Store result
node = prev;
do
{
visited[n++] = node->id;
if (n >= maxVisitedSize)
{
status |= DT_BUFFER_TOO_SMALL;
break;
}
node = m_tinyNodePool->getNodeAtIdx(node->pidx);
}
while (node);
}
dtVcopy(resultPos, bestPos);
*visitedCount = n;
return status;
}
dtStatus dtNavMeshQuery::getPortalPoints(dtPolyRef from, dtPolyRef to, float* left, float* right,
unsigned char& fromType, unsigned char& toType) const
{
dtAssert(m_nav);
const dtMeshTile* fromTile = 0;
const dtPoly* fromPoly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(from, &fromTile, &fromPoly)))
return DT_FAILURE | DT_INVALID_PARAM;
fromType = fromPoly->getType();
const dtMeshTile* toTile = 0;
const dtPoly* toPoly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(to, &toTile, &toPoly)))
return DT_FAILURE | DT_INVALID_PARAM;
toType = toPoly->getType();
return getPortalPoints(from, fromPoly, fromTile, to, toPoly, toTile, left, right);
}
// Returns portal points between two polygons.
dtStatus dtNavMeshQuery::getPortalPoints(dtPolyRef from, const dtPoly* fromPoly, const dtMeshTile* fromTile,
dtPolyRef to, const dtPoly* toPoly, const dtMeshTile* toTile,
float* left, float* right) const
{
// Find the link that points to the 'to' polygon.
const dtLink* link = 0;
for (unsigned int i = fromPoly->firstLink; i != DT_NULL_LINK; i = fromTile->links[i].next)
{
if (fromTile->links[i].ref == to)
{
link = &fromTile->links[i];
break;
}
}
if (!link)
return DT_FAILURE | DT_INVALID_PARAM;
// Handle off-mesh connections.
if (fromPoly->getType() == DT_POLYTYPE_OFFMESH_CONNECTION)
{
// Find link that points to first vertex.
for (unsigned int i = fromPoly->firstLink; i != DT_NULL_LINK; i = fromTile->links[i].next)
{
if (fromTile->links[i].ref == to)
{
const int v = fromTile->links[i].edge;
dtVcopy(left, &fromTile->verts[fromPoly->verts[v]*3]);
dtVcopy(right, &fromTile->verts[fromPoly->verts[v]*3]);
return DT_SUCCESS;
}
}
return DT_FAILURE | DT_INVALID_PARAM;
}
if (toPoly->getType() == DT_POLYTYPE_OFFMESH_CONNECTION)
{
for (unsigned int i = toPoly->firstLink; i != DT_NULL_LINK; i = toTile->links[i].next)
{
if (toTile->links[i].ref == from)
{
const int v = toTile->links[i].edge;
dtVcopy(left, &toTile->verts[toPoly->verts[v]*3]);
dtVcopy(right, &toTile->verts[toPoly->verts[v]*3]);
return DT_SUCCESS;
}
}
return DT_FAILURE | DT_INVALID_PARAM;
}
// Find portal vertices.
const int v0 = fromPoly->verts[link->edge];
const int v1 = fromPoly->verts[(link->edge+1) % (int)fromPoly->vertCount];
dtVcopy(left, &fromTile->verts[v0*3]);
dtVcopy(right, &fromTile->verts[v1*3]);
// If the link is at tile boundary, dtClamp the vertices to
// the link width.
if (link->side != 0xff)
{
// Unpack portal limits.
if (link->bmin != 0 || link->bmax != 255)
{
const float s = 1.0f/255.0f;
const float tmin = link->bmin*s;
const float tmax = link->bmax*s;
dtVlerp(left, &fromTile->verts[v0*3], &fromTile->verts[v1*3], tmin);
dtVlerp(right, &fromTile->verts[v0*3], &fromTile->verts[v1*3], tmax);
}
}
return DT_SUCCESS;
}
// Returns edge mid point between two polygons.
dtStatus dtNavMeshQuery::getEdgeMidPoint(dtPolyRef from, dtPolyRef to, float* mid) const
{
float left[3], right[3];
unsigned char fromType, toType;
if (dtStatusFailed(getPortalPoints(from, to, left,right, fromType, toType)))
return DT_FAILURE | DT_INVALID_PARAM;
mid[0] = (left[0]+right[0])*0.5f;
mid[1] = (left[1]+right[1])*0.5f;
mid[2] = (left[2]+right[2])*0.5f;
return DT_SUCCESS;
}
dtStatus dtNavMeshQuery::getEdgeMidPoint(dtPolyRef from, const dtPoly* fromPoly, const dtMeshTile* fromTile,
dtPolyRef to, const dtPoly* toPoly, const dtMeshTile* toTile,
float* mid) const
{
float left[3], right[3];
if (dtStatusFailed(getPortalPoints(from, fromPoly, fromTile, to, toPoly, toTile, left, right)))
return DT_FAILURE | DT_INVALID_PARAM;
mid[0] = (left[0]+right[0])*0.5f;
mid[1] = (left[1]+right[1])*0.5f;
mid[2] = (left[2]+right[2])*0.5f;
return DT_SUCCESS;
}
/// @par
///
/// This method is meant to be used for quick, short distance checks.
///
/// If the path array is too small to hold the result, it will be filled as
/// far as possible from the start postion toward the end position.
///
/// <b>Using the Hit Parameter (t)</b>
///
/// If the hit parameter is a very high value (FLT_MAX), then the ray has hit
/// the end position. In this case the path represents a valid corridor to the
/// end position and the value of @p hitNormal is undefined.
///
/// If the hit parameter is zero, then the start position is on the wall that
/// was hit and the value of @p hitNormal is undefined.
///
/// If 0 < t < 1.0 then the following applies:
///
/// @code
/// distanceToHitBorder = distanceToEndPosition * t
/// hitPoint = startPos + (endPos - startPos) * t
/// @endcode
///
/// <b>Use Case Restriction</b>
///
/// The raycast ignores the y-value of the end position. (2D check.) This
/// places significant limits on how it can be used. For example:
///
/// Consider a scene where there is a main floor with a second floor balcony
/// that hangs over the main floor. So the first floor mesh extends below the
/// balcony mesh. The start position is somewhere on the first floor. The end
/// position is on the balcony.
///
/// The raycast will search toward the end position along the first floor mesh.
/// If it reaches the end position's xz-coordinates it will indicate FLT_MAX
/// (no wall hit), meaning it reached the end position. This is one example of why
/// this method is meant for short distance checks.
///
dtStatus dtNavMeshQuery::raycast(dtPolyRef startRef, const float* startPos, const float* endPos,
const dtQueryFilter* filter,
float* t, float* hitNormal, dtPolyRef* path, int* pathCount, const int maxPath) const
{
dtRaycastHit hit;
hit.path = path;
hit.maxPath = maxPath;
dtStatus status = raycast(startRef, startPos, endPos, filter, 0, &hit);
*t = hit.t;
if (hitNormal)
dtVcopy(hitNormal, hit.hitNormal);
if (pathCount)
*pathCount = hit.pathCount;
return status;
}
/// @par
///
/// This method is meant to be used for quick, short distance checks.
///
/// If the path array is too small to hold the result, it will be filled as
/// far as possible from the start postion toward the end position.
///
/// <b>Using the Hit Parameter t of RaycastHit</b>
///
/// If the hit parameter is a very high value (FLT_MAX), then the ray has hit
/// the end position. In this case the path represents a valid corridor to the
/// end position and the value of @p hitNormal is undefined.
///
/// If the hit parameter is zero, then the start position is on the wall that
/// was hit and the value of @p hitNormal is undefined.
///
/// If 0 < t < 1.0 then the following applies:
///
/// @code
/// distanceToHitBorder = distanceToEndPosition * t
/// hitPoint = startPos + (endPos - startPos) * t
/// @endcode
///
/// <b>Use Case Restriction</b>
///
/// The raycast ignores the y-value of the end position. (2D check.) This
/// places significant limits on how it can be used. For example:
///
/// Consider a scene where there is a main floor with a second floor balcony
/// that hangs over the main floor. So the first floor mesh extends below the
/// balcony mesh. The start position is somewhere on the first floor. The end
/// position is on the balcony.
///
/// The raycast will search toward the end position along the first floor mesh.
/// If it reaches the end position's xz-coordinates it will indicate FLT_MAX
/// (no wall hit), meaning it reached the end position. This is one example of why
/// this method is meant for short distance checks.
///
dtStatus dtNavMeshQuery::raycast(dtPolyRef startRef, const float* startPos, const float* endPos,
const dtQueryFilter* filter, const unsigned int options,
dtRaycastHit* hit, dtPolyRef prevRef) const
{
dtAssert(m_nav);
hit->t = 0;
hit->pathCount = 0;
hit->pathCost = 0;
// Validate input
if (!startRef || !m_nav->isValidPolyRef(startRef))
return DT_FAILURE | DT_INVALID_PARAM;
if (prevRef && !m_nav->isValidPolyRef(prevRef))
return DT_FAILURE | DT_INVALID_PARAM;
float dir[3], curPos[3], lastPos[3];
float verts[DT_VERTS_PER_POLYGON*3+3];
int n = 0;
dtVcopy(curPos, startPos);
dtVsub(dir, endPos, startPos);
dtVset(hit->hitNormal, 0, 0, 0);
dtStatus status = DT_SUCCESS;
const dtMeshTile* prevTile, *tile, *nextTile;
const dtPoly* prevPoly, *poly, *nextPoly;
dtPolyRef curRef, nextRef;
// The API input has been checked already, skip checking internal data.
nextRef = curRef = startRef;
tile = 0;
poly = 0;
m_nav->getTileAndPolyByRefUnsafe(curRef, &tile, &poly);
nextTile = prevTile = tile;
nextPoly = prevPoly = poly;
if (prevRef)
m_nav->getTileAndPolyByRefUnsafe(prevRef, &prevTile, &prevPoly);
while (curRef)
{
// Cast ray against current polygon.
// Collect vertices.
int nv = 0;
for (int i = 0; i < (int)poly->vertCount; ++i)
{
dtVcopy(&verts[nv*3], &tile->verts[poly->verts[i]*3]);
nv++;
}
float tmin, tmax;
int segMin, segMax;
if (!dtIntersectSegmentPoly2D(startPos, endPos, verts, nv, tmin, tmax, segMin, segMax))
{
// Could not hit the polygon, keep the old t and report hit.
hit->pathCount = n;
return status;
}
// Nostalrius: may be a bugged triangle (3 aligned points)
if (tmax - tmin < 0.00001f)
return DT_FAILURE;
// Keep track of furthest t so far.
if (tmax > hit->t)
hit->t = tmax;
// Store visited polygons.
if (n < hit->maxPath)
hit->path[n++] = curRef;
else
status |= DT_BUFFER_TOO_SMALL;
// Ray end is completely inside the polygon.
if (segMax == -1)
{
hit->t = FLT_MAX;
hit->pathCount = n;
// add the cost
if (options & DT_RAYCAST_USE_COSTS)
hit->pathCost += filter->getCost(curPos, endPos, prevRef, prevTile, prevPoly, curRef, tile, poly, curRef, tile, poly);
return status;
}
// Follow neighbours.
nextRef = 0;
for (unsigned int i = poly->firstLink; i != DT_NULL_LINK; i = tile->links[i].next)
{
const dtLink* link = &tile->links[i];
// Find link which contains this edge.
if ((int)link->edge != segMax)
continue;
// Get pointer to the next polygon.
nextTile = 0;
nextPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(link->ref, &nextTile, &nextPoly);
// Skip off-mesh connections.
if (nextPoly->getType() == DT_POLYTYPE_OFFMESH_CONNECTION)
continue;
// Skip links based on filter.
if (!filter->passFilter(link->ref, nextTile, nextPoly))
continue;
// If the link is internal, just return the ref.
if (link->side == 0xff)
{
nextRef = link->ref;
break;
}
// If the link is at tile boundary,
// Check if the link spans the whole edge, and accept.
if (link->bmin == 0 && link->bmax == 255)
{
nextRef = link->ref;
break;
}
// Check for partial edge links.
const int v0 = poly->verts[link->edge];
const int v1 = poly->verts[(link->edge+1) % poly->vertCount];
const float* left = &tile->verts[v0*3];
const float* right = &tile->verts[v1*3];
// Check that the intersection lies inside the link portal.
if (link->side == 0 || link->side == 4)
{
// Calculate link size.
const float s = 1.0f/255.0f;
float lmin = left[2] + (right[2] - left[2])*(link->bmin*s);
float lmax = left[2] + (right[2] - left[2])*(link->bmax*s);
if (lmin > lmax) dtSwap(lmin, lmax);
// Find Z intersection.
float z = startPos[2] + (endPos[2]-startPos[2])*tmax;
if (z >= lmin && z <= lmax)
{
nextRef = link->ref;
break;
}
}
else if (link->side == 2 || link->side == 6)
{
// Calculate link size.
const float s = 1.0f/255.0f;
float lmin = left[0] + (right[0] - left[0])*(link->bmin*s);
float lmax = left[0] + (right[0] - left[0])*(link->bmax*s);
if (lmin > lmax) dtSwap(lmin, lmax);
// Find X intersection.
float x = startPos[0] + (endPos[0]-startPos[0])*tmax;
if (x >= lmin && x <= lmax)
{
nextRef = link->ref;
break;
}
}
}
// add the cost
if (options & DT_RAYCAST_USE_COSTS)
{
// compute the intersection point at the furthest end of the polygon
// and correct the height (since the raycast moves in 2d)
dtVcopy(lastPos, curPos);
dtVmad(curPos, startPos, dir, hit->t);
float* e1 = &verts[segMax*3];
float* e2 = &verts[((segMax+1)%nv)*3];
float eDir[3], diff[3];
dtVsub(eDir, e2, e1);
dtVsub(diff, curPos, e1);
float s = dtSqr(eDir[0]) > dtSqr(eDir[2]) ? diff[0] / eDir[0] : diff[2] / eDir[2];
curPos[1] = e1[1] + eDir[1] * s;
hit->pathCost += filter->getCost(lastPos, curPos, prevRef, prevTile, prevPoly, curRef, tile, poly, nextRef, nextTile, nextPoly);
}
if (!nextRef)
{
// No neighbour, we hit a wall.
// Calculate hit normal.
const int a = segMax;
const int b = segMax+1 < nv ? segMax+1 : 0;
const float* va = &verts[a*3];
const float* vb = &verts[b*3];
const float dx = vb[0] - va[0];
const float dz = vb[2] - va[2];
hit->hitNormal[0] = dz;
hit->hitNormal[1] = 0;
hit->hitNormal[2] = -dx;
dtVnormalize(hit->hitNormal);
hit->pathCount = n;
return status;
}
// No hit, advance to neighbour polygon.
prevRef = curRef;
curRef = nextRef;
prevTile = tile;
tile = nextTile;
prevPoly = poly;
poly = nextPoly;
}
hit->pathCount = n;
return status;
}
/// @par
///
/// At least one result array must be provided.
///
/// The order of the result set is from least to highest cost to reach the polygon.
///
/// A common use case for this method is to perform Dijkstra searches.
/// Candidate polygons are found by searching the graph beginning at the start polygon.
///
/// If a polygon is not found via the graph search, even if it intersects the
/// search circle, it will not be included in the result set. For example:
///
/// polyA is the start polygon.
/// polyB shares an edge with polyA. (Is adjacent.)
/// polyC shares an edge with polyB, but not with polyA
/// Even if the search circle overlaps polyC, it will not be included in the
/// result set unless polyB is also in the set.
///
/// The value of the center point is used as the start position for cost
/// calculations. It is not projected onto the surface of the mesh, so its
/// y-value will effect the costs.
///
/// Intersection tests occur in 2D. All polygons and the search circle are
/// projected onto the xz-plane. So the y-value of the center point does not
/// effect intersection tests.
///
/// If the result arrays are to small to hold the entire result set, they will be
/// filled to capacity.
///
dtStatus dtNavMeshQuery::findPolysAroundCircle(dtPolyRef startRef, const float* centerPos, const float radius,
const dtQueryFilter* filter,
dtPolyRef* resultRef, dtPolyRef* resultParent, float* resultCost,
int* resultCount, const int maxResult) const
{
dtAssert(m_nav);
dtAssert(m_nodePool);
dtAssert(m_openList);
*resultCount = 0;
// Validate input
if (!startRef || !m_nav->isValidPolyRef(startRef))
return DT_FAILURE | DT_INVALID_PARAM;
m_nodePool->clear();
m_openList->clear();
dtNode* startNode = m_nodePool->getNode(startRef);
dtVcopy(startNode->pos, centerPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = 0;
startNode->id = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
dtStatus status = DT_SUCCESS;
int n = 0;
if (n < maxResult)
{
if (resultRef)
resultRef[n] = startNode->id;
if (resultParent)
resultParent[n] = 0;
if (resultCost)
resultCost[n] = 0;
++n;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
const float radiusSqr = dtSqr(radius);
while (!m_openList->empty())
{
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Get poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtPolyRef bestRef = bestNode->id;
const dtMeshTile* bestTile = 0;
const dtPoly* bestPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(bestRef, &bestTile, &bestPoly);
// Get parent poly and tile.
dtPolyRef parentRef = 0;
const dtMeshTile* parentTile = 0;
const dtPoly* parentPoly = 0;
if (bestNode->pidx)
parentRef = m_nodePool->getNodeAtIdx(bestNode->pidx)->id;
if (parentRef)
m_nav->getTileAndPolyByRefUnsafe(parentRef, &parentTile, &parentPoly);
for (unsigned int i = bestPoly->firstLink; i != DT_NULL_LINK; i = bestTile->links[i].next)
{
const dtLink* link = &bestTile->links[i];
dtPolyRef neighbourRef = link->ref;
// Skip invalid neighbours and do not follow back to parent.
if (!neighbourRef || neighbourRef == parentRef)
continue;
// Expand to neighbour
const dtMeshTile* neighbourTile = 0;
const dtPoly* neighbourPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(neighbourRef, &neighbourTile, &neighbourPoly);
// Do not advance if the polygon is excluded by the filter.
if (!filter->passFilter(neighbourRef, neighbourTile, neighbourPoly))
continue;
// Find edge and calc distance to the edge.
float va[3], vb[3];
if (!getPortalPoints(bestRef, bestPoly, bestTile, neighbourRef, neighbourPoly, neighbourTile, va, vb))
continue;
// If the circle is not touching the next polygon, skip it.
float tseg;
float distSqr = dtDistancePtSegSqr2D(centerPos, va, vb, tseg);
if (distSqr > radiusSqr)
continue;
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef);
if (!neighbourNode)
{
status |= DT_OUT_OF_NODES;
continue;
}
if (neighbourNode->flags & DT_NODE_CLOSED)
continue;
// Cost
if (neighbourNode->flags == 0)
dtVlerp(neighbourNode->pos, va, vb, 0.5f);
const float total = bestNode->total + dtVdist(bestNode->pos, neighbourNode->pos);
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~DT_NODE_CLOSED);
neighbourNode->pidx = m_nodePool->getNodeIdx(bestNode);
neighbourNode->total = total;
if (neighbourNode->flags & DT_NODE_OPEN)
{
m_openList->modify(neighbourNode);
}
else
{
if (n < maxResult)
{
if (resultRef)
resultRef[n] = neighbourNode->id;
if (resultParent)
resultParent[n] = m_nodePool->getNodeAtIdx(neighbourNode->pidx)->id;
if (resultCost)
resultCost[n] = neighbourNode->total;
++n;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
neighbourNode->flags = DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
}
}
*resultCount = n;
return status;
}
/// @par
///
/// The order of the result set is from least to highest cost.
///
/// At least one result array must be provided.
///
/// A common use case for this method is to perform Dijkstra searches.
/// Candidate polygons are found by searching the graph beginning at the start
/// polygon.
///
/// The same intersection test restrictions that apply to findPolysAroundCircle()
/// method apply to this method.
///
/// The 3D centroid of the search polygon is used as the start position for cost
/// calculations.
///
/// Intersection tests occur in 2D. All polygons are projected onto the
/// xz-plane. So the y-values of the vertices do not effect intersection tests.
///
/// If the result arrays are is too small to hold the entire result set, they will
/// be filled to capacity.
///
dtStatus dtNavMeshQuery::findPolysAroundShape(dtPolyRef startRef, const float* verts, const int nverts,
const dtQueryFilter* filter,
dtPolyRef* resultRef, dtPolyRef* resultParent, float* resultCost,
int* resultCount, const int maxResult) const
{
dtAssert(m_nav);
dtAssert(m_nodePool);
dtAssert(m_openList);
*resultCount = 0;
// Validate input
if (!startRef || !m_nav->isValidPolyRef(startRef))
return DT_FAILURE | DT_INVALID_PARAM;
m_nodePool->clear();
m_openList->clear();
float centerPos[3] = {0,0,0};
for (int i = 0; i < nverts; ++i)
dtVadd(centerPos,centerPos,&verts[i*3]);
dtVscale(centerPos,centerPos,1.0f/nverts);
dtNode* startNode = m_nodePool->getNode(startRef);
dtVcopy(startNode->pos, centerPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = 0;
startNode->id = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
dtStatus status = DT_SUCCESS;
int n = 0;
if (n < maxResult)
{
if (resultRef)
resultRef[n] = startNode->id;
if (resultParent)
resultParent[n] = 0;
if (resultCost)
resultCost[n] = 0;
++n;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
while (!m_openList->empty())
{
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Get poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtPolyRef bestRef = bestNode->id;
const dtMeshTile* bestTile = 0;
const dtPoly* bestPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(bestRef, &bestTile, &bestPoly);
// Get parent poly and tile.
dtPolyRef parentRef = 0;
const dtMeshTile* parentTile = 0;
const dtPoly* parentPoly = 0;
if (bestNode->pidx)
parentRef = m_nodePool->getNodeAtIdx(bestNode->pidx)->id;
if (parentRef)
m_nav->getTileAndPolyByRefUnsafe(parentRef, &parentTile, &parentPoly);
for (unsigned int i = bestPoly->firstLink; i != DT_NULL_LINK; i = bestTile->links[i].next)
{
const dtLink* link = &bestTile->links[i];
dtPolyRef neighbourRef = link->ref;
// Skip invalid neighbours and do not follow back to parent.
if (!neighbourRef || neighbourRef == parentRef)
continue;
// Expand to neighbour
const dtMeshTile* neighbourTile = 0;
const dtPoly* neighbourPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(neighbourRef, &neighbourTile, &neighbourPoly);
// Do not advance if the polygon is excluded by the filter.
if (!filter->passFilter(neighbourRef, neighbourTile, neighbourPoly))
continue;
// Find edge and calc distance to the edge.
float va[3], vb[3];
if (!getPortalPoints(bestRef, bestPoly, bestTile, neighbourRef, neighbourPoly, neighbourTile, va, vb))
continue;
// If the poly is not touching the edge to the next polygon, skip the connection it.
float tmin, tmax;
int segMin, segMax;
if (!dtIntersectSegmentPoly2D(va, vb, verts, nverts, tmin, tmax, segMin, segMax))
continue;
if (tmin > 1.0f || tmax < 0.0f)
continue;
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef);
if (!neighbourNode)
{
status |= DT_OUT_OF_NODES;
continue;
}
if (neighbourNode->flags & DT_NODE_CLOSED)
continue;
// Cost
if (neighbourNode->flags == 0)
dtVlerp(neighbourNode->pos, va, vb, 0.5f);
const float total = bestNode->total + dtVdist(bestNode->pos, neighbourNode->pos);
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~DT_NODE_CLOSED);
neighbourNode->pidx = m_nodePool->getNodeIdx(bestNode);
neighbourNode->total = total;
if (neighbourNode->flags & DT_NODE_OPEN)
{
m_openList->modify(neighbourNode);
}
else
{
if (n < maxResult)
{
if (resultRef)
resultRef[n] = neighbourNode->id;
if (resultParent)
resultParent[n] = m_nodePool->getNodeAtIdx(neighbourNode->pidx)->id;
if (resultCost)
resultCost[n] = neighbourNode->total;
++n;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
neighbourNode->flags = DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
}
}
*resultCount = n;
return status;
}
/// @par
///
/// This method is optimized for a small search radius and small number of result
/// polygons.
///
/// Candidate polygons are found by searching the navigation graph beginning at
/// the start polygon.
///
/// The same intersection test restrictions that apply to the findPolysAroundCircle
/// mehtod applies to this method.
///
/// The value of the center point is used as the start point for cost calculations.
/// It is not projected onto the surface of the mesh, so its y-value will effect
/// the costs.
///
/// Intersection tests occur in 2D. All polygons and the search circle are
/// projected onto the xz-plane. So the y-value of the center point does not
/// effect intersection tests.
///
/// If the result arrays are is too small to hold the entire result set, they will
/// be filled to capacity.
///
dtStatus dtNavMeshQuery::findLocalNeighbourhood(dtPolyRef startRef, const float* centerPos, const float radius,
const dtQueryFilter* filter,
dtPolyRef* resultRef, dtPolyRef* resultParent,
int* resultCount, const int maxResult) const
{
dtAssert(m_nav);
dtAssert(m_tinyNodePool);
*resultCount = 0;
// Validate input
if (!startRef || !m_nav->isValidPolyRef(startRef))
return DT_FAILURE | DT_INVALID_PARAM;
static const int MAX_STACK = 48;
dtNode* stack[MAX_STACK];
int nstack = 0;
m_tinyNodePool->clear();
dtNode* startNode = m_tinyNodePool->getNode(startRef);
startNode->pidx = 0;
startNode->id = startRef;
startNode->flags = DT_NODE_CLOSED;
stack[nstack++] = startNode;
const float radiusSqr = dtSqr(radius);
float pa[DT_VERTS_PER_POLYGON*3];
float pb[DT_VERTS_PER_POLYGON*3];
dtStatus status = DT_SUCCESS;
int n = 0;
if (n < maxResult)
{
resultRef[n] = startNode->id;
if (resultParent)
resultParent[n] = 0;
++n;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
while (nstack)
{
// Pop front.
dtNode* curNode = stack[0];
for (int i = 0; i < nstack-1; ++i)
stack[i] = stack[i+1];
nstack--;
// Get poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtPolyRef curRef = curNode->id;
const dtMeshTile* curTile = 0;
const dtPoly* curPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(curRef, &curTile, &curPoly);
for (unsigned int i = curPoly->firstLink; i != DT_NULL_LINK; i = curTile->links[i].next)
{
const dtLink* link = &curTile->links[i];
dtPolyRef neighbourRef = link->ref;
// Skip invalid neighbours.
if (!neighbourRef)
continue;
// Skip if cannot alloca more nodes.
dtNode* neighbourNode = m_tinyNodePool->getNode(neighbourRef);
if (!neighbourNode)
continue;
// Skip visited.
if (neighbourNode->flags & DT_NODE_CLOSED)
continue;
// Expand to neighbour
const dtMeshTile* neighbourTile = 0;
const dtPoly* neighbourPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(neighbourRef, &neighbourTile, &neighbourPoly);
// Skip off-mesh connections.
if (neighbourPoly->getType() == DT_POLYTYPE_OFFMESH_CONNECTION)
continue;
// Do not advance if the polygon is excluded by the filter.
if (!filter->passFilter(neighbourRef, neighbourTile, neighbourPoly))
continue;
// Find edge and calc distance to the edge.
float va[3], vb[3];
if (!getPortalPoints(curRef, curPoly, curTile, neighbourRef, neighbourPoly, neighbourTile, va, vb))
continue;
// If the circle is not touching the next polygon, skip it.
float tseg;
float distSqr = dtDistancePtSegSqr2D(centerPos, va, vb, tseg);
if (distSqr > radiusSqr)
continue;
// Mark node visited, this is done before the overlap test so that
// we will not visit the poly again if the test fails.
neighbourNode->flags |= DT_NODE_CLOSED;
neighbourNode->pidx = m_tinyNodePool->getNodeIdx(curNode);
// Check that the polygon does not collide with existing polygons.
// Collect vertices of the neighbour poly.
const int npa = neighbourPoly->vertCount;
for (int k = 0; k < npa; ++k)
dtVcopy(&pa[k*3], &neighbourTile->verts[neighbourPoly->verts[k]*3]);
bool overlap = false;
for (int j = 0; j < n; ++j)
{
dtPolyRef pastRef = resultRef[j];
// Connected polys do not overlap.
bool connected = false;
for (unsigned int k = curPoly->firstLink; k != DT_NULL_LINK; k = curTile->links[k].next)
{
if (curTile->links[k].ref == pastRef)
{
connected = true;
break;
}
}
if (connected)
continue;
// Potentially overlapping.
const dtMeshTile* pastTile = 0;
const dtPoly* pastPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(pastRef, &pastTile, &pastPoly);
// Get vertices and test overlap
const int npb = pastPoly->vertCount;
for (int k = 0; k < npb; ++k)
dtVcopy(&pb[k*3], &pastTile->verts[pastPoly->verts[k]*3]);
if (dtOverlapPolyPoly2D(pa,npa, pb,npb))
{
overlap = true;
break;
}
}
if (overlap)
continue;
// This poly is fine, store and advance to the poly.
if (n < maxResult)
{
resultRef[n] = neighbourRef;
if (resultParent)
resultParent[n] = curRef;
++n;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
if (nstack < MAX_STACK)
{
stack[nstack++] = neighbourNode;
}
}
}
*resultCount = n;
return status;
}
struct dtSegInterval
{
dtPolyRef ref;
short tmin, tmax;
};
static void insertInterval(dtSegInterval* ints, int& nints, const int maxInts,
const short tmin, const short tmax, const dtPolyRef ref)
{
if (nints+1 > maxInts) return;
// Find insertion point.
int idx = 0;
while (idx < nints)
{
if (tmax <= ints[idx].tmin)
break;
idx++;
}
// Move current results.
if (nints-idx)
memmove(ints+idx+1, ints+idx, sizeof(dtSegInterval)*(nints-idx));
// Store
ints[idx].ref = ref;
ints[idx].tmin = tmin;
ints[idx].tmax = tmax;
nints++;
}
/// @par
///
/// If the @p segmentRefs parameter is provided, then all polygon segments will be returned.
/// Otherwise only the wall segments are returned.
///
/// A segment that is normally a portal will be included in the result set as a
/// wall if the @p filter results in the neighbor polygon becoomming impassable.
///
/// The @p segmentVerts and @p segmentRefs buffers should normally be sized for the
/// maximum segments per polygon of the source navigation mesh.
///
dtStatus dtNavMeshQuery::getPolyWallSegments(dtPolyRef ref, const dtQueryFilter* filter,
float* segmentVerts, dtPolyRef* segmentRefs, int* segmentCount,
const int maxSegments) const
{
dtAssert(m_nav);
*segmentCount = 0;
const dtMeshTile* tile = 0;
const dtPoly* poly = 0;
if (dtStatusFailed(m_nav->getTileAndPolyByRef(ref, &tile, &poly)))
return DT_FAILURE | DT_INVALID_PARAM;
int n = 0;
static const int MAX_INTERVAL = 16;
dtSegInterval ints[MAX_INTERVAL];
int nints;
const bool storePortals = segmentRefs != 0;
dtStatus status = DT_SUCCESS;
for (int i = 0, j = (int)poly->vertCount-1; i < (int)poly->vertCount; j = i++)
{
// Skip non-solid edges.
nints = 0;
if (poly->neis[j] & DT_EXT_LINK)
{
// Tile border.
for (unsigned int k = poly->firstLink; k != DT_NULL_LINK; k = tile->links[k].next)
{
const dtLink* link = &tile->links[k];
if (link->edge == j)
{
if (link->ref != 0)
{
const dtMeshTile* neiTile = 0;
const dtPoly* neiPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(link->ref, &neiTile, &neiPoly);
if (filter->passFilter(link->ref, neiTile, neiPoly))
{
insertInterval(ints, nints, MAX_INTERVAL, link->bmin, link->bmax, link->ref);
}
}
}
}
}
else
{
// Internal edge
dtPolyRef neiRef = 0;
if (poly->neis[j])
{
const unsigned int idx = (unsigned int)(poly->neis[j]-1);
neiRef = m_nav->getPolyRefBase(tile) | idx;
if (!filter->passFilter(neiRef, tile, &tile->polys[idx]))
neiRef = 0;
}
// If the edge leads to another polygon and portals are not stored, skip.
if (neiRef != 0 && !storePortals)
continue;
if (n < maxSegments)
{
const float* vj = &tile->verts[poly->verts[j]*3];
const float* vi = &tile->verts[poly->verts[i]*3];
float* seg = &segmentVerts[n*6];
dtVcopy(seg+0, vj);
dtVcopy(seg+3, vi);
if (segmentRefs)
segmentRefs[n] = neiRef;
n++;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
continue;
}
// Add sentinels
insertInterval(ints, nints, MAX_INTERVAL, -1, 0, 0);
insertInterval(ints, nints, MAX_INTERVAL, 255, 256, 0);
// Store segments.
const float* vj = &tile->verts[poly->verts[j]*3];
const float* vi = &tile->verts[poly->verts[i]*3];
for (int k = 1; k < nints; ++k)
{
// Portal segment.
if (storePortals && ints[k].ref)
{
const float tmin = ints[k].tmin/255.0f;
const float tmax = ints[k].tmax/255.0f;
if (n < maxSegments)
{
float* seg = &segmentVerts[n*6];
dtVlerp(seg+0, vj,vi, tmin);
dtVlerp(seg+3, vj,vi, tmax);
if (segmentRefs)
segmentRefs[n] = ints[k].ref;
n++;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
}
// Wall segment.
const int imin = ints[k-1].tmax;
const int imax = ints[k].tmin;
if (imin != imax)
{
const float tmin = imin/255.0f;
const float tmax = imax/255.0f;
if (n < maxSegments)
{
float* seg = &segmentVerts[n*6];
dtVlerp(seg+0, vj,vi, tmin);
dtVlerp(seg+3, vj,vi, tmax);
if (segmentRefs)
segmentRefs[n] = 0;
n++;
}
else
{
status |= DT_BUFFER_TOO_SMALL;
}
}
}
}
*segmentCount = n;
return status;
}
/// @par
///
/// @p hitPos is not adjusted using the height detail data.
///
/// @p hitDist will equal the search radius if there is no wall within the
/// radius. In this case the values of @p hitPos and @p hitNormal are
/// undefined.
///
/// The normal will become unpredicable if @p hitDist is a very small number.
///
dtStatus dtNavMeshQuery::findDistanceToWall(dtPolyRef startRef, const float* centerPos, const float maxRadius,
const dtQueryFilter* filter,
float* hitDist, float* hitPos, float* hitNormal) const
{
dtAssert(m_nav);
dtAssert(m_nodePool);
dtAssert(m_openList);
// Validate input
if (!startRef || !m_nav->isValidPolyRef(startRef))
return DT_FAILURE | DT_INVALID_PARAM;
m_nodePool->clear();
m_openList->clear();
dtNode* startNode = m_nodePool->getNode(startRef);
dtVcopy(startNode->pos, centerPos);
startNode->pidx = 0;
startNode->cost = 0;
startNode->total = 0;
startNode->id = startRef;
startNode->flags = DT_NODE_OPEN;
m_openList->push(startNode);
float radiusSqr = dtSqr(maxRadius);
dtStatus status = DT_SUCCESS;
while (!m_openList->empty())
{
dtNode* bestNode = m_openList->pop();
bestNode->flags &= ~DT_NODE_OPEN;
bestNode->flags |= DT_NODE_CLOSED;
// Get poly and tile.
// The API input has been cheked already, skip checking internal data.
const dtPolyRef bestRef = bestNode->id;
const dtMeshTile* bestTile = 0;
const dtPoly* bestPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(bestRef, &bestTile, &bestPoly);
// Get parent poly and tile.
dtPolyRef parentRef = 0;
const dtMeshTile* parentTile = 0;
const dtPoly* parentPoly = 0;
if (bestNode->pidx)
parentRef = m_nodePool->getNodeAtIdx(bestNode->pidx)->id;
if (parentRef)
m_nav->getTileAndPolyByRefUnsafe(parentRef, &parentTile, &parentPoly);
// Hit test walls.
for (int i = 0, j = (int)bestPoly->vertCount-1; i < (int)bestPoly->vertCount; j = i++)
{
// Skip non-solid edges.
if (bestPoly->neis[j] & DT_EXT_LINK)
{
// Tile border.
bool solid = true;
for (unsigned int k = bestPoly->firstLink; k != DT_NULL_LINK; k = bestTile->links[k].next)
{
const dtLink* link = &bestTile->links[k];
if (link->edge == j)
{
if (link->ref != 0)
{
const dtMeshTile* neiTile = 0;
const dtPoly* neiPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(link->ref, &neiTile, &neiPoly);
if (filter->passFilter(link->ref, neiTile, neiPoly))
solid = false;
}
break;
}
}
if (!solid) continue;
}
else if (bestPoly->neis[j])
{
// Internal edge
const unsigned int idx = (unsigned int)(bestPoly->neis[j]-1);
const dtPolyRef ref = m_nav->getPolyRefBase(bestTile) | idx;
if (filter->passFilter(ref, bestTile, &bestTile->polys[idx]))
continue;
}
// Calc distance to the edge.
const float* vj = &bestTile->verts[bestPoly->verts[j]*3];
const float* vi = &bestTile->verts[bestPoly->verts[i]*3];
float tseg;
float distSqr = dtDistancePtSegSqr2D(centerPos, vj, vi, tseg);
// Edge is too far, skip.
if (distSqr > radiusSqr)
continue;
// Hit wall, update radius.
radiusSqr = distSqr;
// Calculate hit pos.
hitPos[0] = vj[0] + (vi[0] - vj[0])*tseg;
hitPos[1] = vj[1] + (vi[1] - vj[1])*tseg;
hitPos[2] = vj[2] + (vi[2] - vj[2])*tseg;
}
for (unsigned int i = bestPoly->firstLink; i != DT_NULL_LINK; i = bestTile->links[i].next)
{
const dtLink* link = &bestTile->links[i];
dtPolyRef neighbourRef = link->ref;
// Skip invalid neighbours and do not follow back to parent.
if (!neighbourRef || neighbourRef == parentRef)
continue;
// Expand to neighbour.
const dtMeshTile* neighbourTile = 0;
const dtPoly* neighbourPoly = 0;
m_nav->getTileAndPolyByRefUnsafe(neighbourRef, &neighbourTile, &neighbourPoly);
// Skip off-mesh connections.
if (neighbourPoly->getType() == DT_POLYTYPE_OFFMESH_CONNECTION)
continue;
// Calc distance to the edge.
const float* va = &bestTile->verts[bestPoly->verts[link->edge]*3];
const float* vb = &bestTile->verts[bestPoly->verts[(link->edge+1) % bestPoly->vertCount]*3];
float tseg;
float distSqr = dtDistancePtSegSqr2D(centerPos, va, vb, tseg);
// If the circle is not touching the next polygon, skip it.
if (distSqr > radiusSqr)
continue;
if (!filter->passFilter(neighbourRef, neighbourTile, neighbourPoly))
continue;
dtNode* neighbourNode = m_nodePool->getNode(neighbourRef);
if (!neighbourNode)
{
status |= DT_OUT_OF_NODES;
continue;
}
if (neighbourNode->flags & DT_NODE_CLOSED)
continue;
// Cost
if (neighbourNode->flags == 0)
{
getEdgeMidPoint(bestRef, bestPoly, bestTile,
neighbourRef, neighbourPoly, neighbourTile, neighbourNode->pos);
}
const float total = bestNode->total + dtVdist(bestNode->pos, neighbourNode->pos);
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode->flags & DT_NODE_OPEN) && total >= neighbourNode->total)
continue;
neighbourNode->id = neighbourRef;
neighbourNode->flags = (neighbourNode->flags & ~DT_NODE_CLOSED);
neighbourNode->pidx = m_nodePool->getNodeIdx(bestNode);
neighbourNode->total = total;
if (neighbourNode->flags & DT_NODE_OPEN)
{
m_openList->modify(neighbourNode);
}
else
{
neighbourNode->flags |= DT_NODE_OPEN;
m_openList->push(neighbourNode);
}
}
}
// Calc hit normal.
dtVsub(hitNormal, centerPos, hitPos);
dtVnormalize(hitNormal);
*hitDist = dtMathSqrtf(radiusSqr);
return status;
}
bool dtNavMeshQuery::isValidPolyRef(dtPolyRef ref, const dtQueryFilter* filter) const
{
const dtMeshTile* tile = 0;
const dtPoly* poly = 0;
dtStatus status = m_nav->getTileAndPolyByRef(ref, &tile, &poly);
// If cannot get polygon, assume it does not exists and boundary is invalid.
if (dtStatusFailed(status))
return false;
// If cannot pass filter, assume flags has changed and boundary is invalid.
if (!filter->passFilter(ref, tile, poly))
return false;
return true;
}
/// @par
///
/// The closed list is the list of polygons that were fully evaluated during
/// the last navigation graph search. (A* or Dijkstra)
///
bool dtNavMeshQuery::isInClosedList(dtPolyRef ref) const
{
if (!m_nodePool) return false;
dtNode* nodes[DT_MAX_STATES_PER_NODE];
int n= m_nodePool->findNodes(ref, nodes, DT_MAX_STATES_PER_NODE);
for (int i=0; i<n; i++)
{
if (nodes[i]->flags & DT_NODE_CLOSED)
return true;
}
return false;
}
| some2/server | dep/recastnavigation/Detour/Source/DetourNavMeshQuery.cpp | C++ | gpl-2.0 | 101,172 |
<?php
/*
+--------------------------------------------------------------------+
| CiviCRM version 4.4 |
+--------------------------------------------------------------------+
| Copyright CiviCRM LLC (c) 2004-2013 |
+--------------------------------------------------------------------+
| This file is a part of CiviCRM. |
| |
| CiviCRM is free software; you can copy, modify, and distribute it |
| under the terms of the GNU Affero General Public License |
| Version 3, 19 November 2007 and the CiviCRM Licensing Exception. |
| |
| CiviCRM is distributed in the hope that it will be useful, but |
| WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. |
| See the GNU Affero General Public License for more details. |
| |
| You should have received a copy of the GNU Affero General Public |
| License and the CiviCRM Licensing Exception along |
| with this program; if not, contact CiviCRM LLC |
| at info[AT]civicrm[DOT]org. If you have questions about the |
| GNU Affero General Public License or the licensing of CiviCRM, |
| see the CiviCRM license FAQ at http://civicrm.org/licensing |
+--------------------------------------------------------------------+
*/
/**
*
* @package CRM
* @copyright CiviCRM LLC (c) 2004-2013
*
* Generated from xml/schema/CRM/Financial/PaymentProcessor.xml
* DO NOT EDIT. Generated by GenCode.php
*/
require_once 'CRM/Core/DAO.php';
require_once 'CRM/Utils/Type.php';
class CRM_Financial_DAO_PaymentProcessor extends CRM_Core_DAO
{
/**
* static instance to hold the table name
*
* @var string
* @static
*/
static $_tableName = 'civicrm_payment_processor';
/**
* static instance to hold the field values
*
* @var array
* @static
*/
static $_fields = null;
/**
* static instance to hold the keys used in $_fields for each field.
*
* @var array
* @static
*/
static $_fieldKeys = null;
/**
* static instance to hold the FK relationships
*
* @var string
* @static
*/
static $_links = null;
/**
* static instance to hold the values that can
* be imported
*
* @var array
* @static
*/
static $_import = null;
/**
* static instance to hold the values that can
* be exported
*
* @var array
* @static
*/
static $_export = null;
/**
* static value to see if we should log any modifications to
* this table in the civicrm_log table
*
* @var boolean
* @static
*/
static $_log = false;
/**
* Payment Processor ID
*
* @var int unsigned
*/
public $id;
/**
* Which Domain is this match entry for
*
* @var int unsigned
*/
public $domain_id;
/**
* Payment Processor Name.
*
* @var string
*/
public $name;
/**
* Payment Processor Description.
*
* @var string
*/
public $description;
/**
*
* @var int unsigned
*/
public $payment_processor_type_id;
/**
* Is this processor active?
*
* @var boolean
*/
public $is_active;
/**
* Is this processor the default?
*
* @var boolean
*/
public $is_default;
/**
* Is this processor for a test site?
*
* @var boolean
*/
public $is_test;
/**
*
* @var string
*/
public $user_name;
/**
*
* @var string
*/
public $password;
/**
*
* @var string
*/
public $signature;
/**
*
* @var string
*/
public $url_site;
/**
*
* @var string
*/
public $url_api;
/**
*
* @var string
*/
public $url_recur;
/**
*
* @var string
*/
public $url_button;
/**
*
* @var string
*/
public $subject;
/**
*
* @var string
*/
public $class_name;
/**
* Billing Mode
*
* @var int unsigned
*/
public $billing_mode;
/**
* Can process recurring contributions
*
* @var boolean
*/
public $is_recur;
/**
* Payment Type: Credit or Debit
*
* @var int unsigned
*/
public $payment_type;
/**
* class constructor
*
* @access public
* @return civicrm_payment_processor
*/
function __construct()
{
$this->__table = 'civicrm_payment_processor';
parent::__construct();
}
/**
* return foreign keys and entity references
*
* @static
* @access public
* @return array of CRM_Core_EntityReference
*/
static function getReferenceColumns()
{
if (!self::$_links) {
self::$_links = array(
new CRM_Core_EntityReference(self::getTableName() , 'domain_id', 'civicrm_domain', 'id') ,
new CRM_Core_EntityReference(self::getTableName() , 'payment_processor_type_id', 'civicrm_payment_processor_type', 'id') ,
);
}
return self::$_links;
}
/**
* returns all the column names of this table
*
* @access public
* @return array
*/
static function &fields()
{
if (!(self::$_fields)) {
self::$_fields = array(
'id' => array(
'name' => 'id',
'type' => CRM_Utils_Type::T_INT,
'required' => true,
) ,
'domain_id' => array(
'name' => 'domain_id',
'type' => CRM_Utils_Type::T_INT,
'required' => true,
'FKClassName' => 'CRM_Core_DAO_Domain',
) ,
'name' => array(
'name' => 'name',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Payment Processor') ,
'maxlength' => 64,
'size' => CRM_Utils_Type::BIG,
) ,
'description' => array(
'name' => 'description',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Description') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'payment_processor_type_id' => array(
'name' => 'payment_processor_type_id',
'type' => CRM_Utils_Type::T_INT,
'FKClassName' => 'CRM_Financial_DAO_PaymentProcessorType',
) ,
'is_active' => array(
'name' => 'is_active',
'type' => CRM_Utils_Type::T_BOOLEAN,
) ,
'is_default' => array(
'name' => 'is_default',
'type' => CRM_Utils_Type::T_BOOLEAN,
) ,
'is_test' => array(
'name' => 'is_test',
'type' => CRM_Utils_Type::T_BOOLEAN,
) ,
'user_name' => array(
'name' => 'user_name',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('User Name') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'password' => array(
'name' => 'password',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Password') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'signature' => array(
'name' => 'signature',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Signature') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'url_site' => array(
'name' => 'url_site',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Site URL') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'url_api' => array(
'name' => 'url_api',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('API URL') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'url_recur' => array(
'name' => 'url_recur',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Recurring Payments URL') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'url_button' => array(
'name' => 'url_button',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Button URL') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'subject' => array(
'name' => 'subject',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Subject') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'class_name' => array(
'name' => 'class_name',
'type' => CRM_Utils_Type::T_STRING,
'title' => ts('Suffix for PHP clas name implementation') ,
'maxlength' => 255,
'size' => CRM_Utils_Type::HUGE,
) ,
'billing_mode' => array(
'name' => 'billing_mode',
'type' => CRM_Utils_Type::T_INT,
'title' => ts('Billing Mode') ,
'required' => true,
) ,
'is_recur' => array(
'name' => 'is_recur',
'type' => CRM_Utils_Type::T_BOOLEAN,
) ,
'payment_type' => array(
'name' => 'payment_type',
'type' => CRM_Utils_Type::T_INT,
'title' => ts('Payment Type') ,
'default' => '1',
) ,
);
}
return self::$_fields;
}
/**
* Returns an array containing, for each field, the arary key used for that
* field in self::$_fields.
*
* @access public
* @return array
*/
static function &fieldKeys()
{
if (!(self::$_fieldKeys)) {
self::$_fieldKeys = array(
'id' => 'id',
'domain_id' => 'domain_id',
'name' => 'name',
'description' => 'description',
'payment_processor_type_id' => 'payment_processor_type_id',
'is_active' => 'is_active',
'is_default' => 'is_default',
'is_test' => 'is_test',
'user_name' => 'user_name',
'password' => 'password',
'signature' => 'signature',
'url_site' => 'url_site',
'url_api' => 'url_api',
'url_recur' => 'url_recur',
'url_button' => 'url_button',
'subject' => 'subject',
'class_name' => 'class_name',
'billing_mode' => 'billing_mode',
'is_recur' => 'is_recur',
'payment_type' => 'payment_type',
);
}
return self::$_fieldKeys;
}
/**
* returns the names of this table
*
* @access public
* @static
* @return string
*/
static function getTableName()
{
return self::$_tableName;
}
/**
* returns if this table needs to be logged
*
* @access public
* @return boolean
*/
function getLog()
{
return self::$_log;
}
/**
* returns the list of fields that can be imported
*
* @access public
* return array
* @static
*/
static function &import($prefix = false)
{
if (!(self::$_import)) {
self::$_import = array();
$fields = self::fields();
foreach($fields as $name => $field) {
if (CRM_Utils_Array::value('import', $field)) {
if ($prefix) {
self::$_import['payment_processor'] = & $fields[$name];
} else {
self::$_import[$name] = & $fields[$name];
}
}
}
}
return self::$_import;
}
/**
* returns the list of fields that can be exported
*
* @access public
* return array
* @static
*/
static function &export($prefix = false)
{
if (!(self::$_export)) {
self::$_export = array();
$fields = self::fields();
foreach($fields as $name => $field) {
if (CRM_Utils_Array::value('export', $field)) {
if ($prefix) {
self::$_export['payment_processor'] = & $fields[$name];
} else {
self::$_export[$name] = & $fields[$name];
}
}
}
}
return self::$_export;
}
}
| ChrisChinchilla/greenrenters.org | sites/all/modules/civicrm/CRM/Financial/DAO/PaymentProcessor.php | PHP | gpl-2.0 | 11,954 |
<?php
/*
* This file is part of EC-CUBE
*
* Copyright(c) 2000-2014 LOCKON CO.,LTD. All Rights Reserved.
*
* http://www.lockon.co.jp/
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
require_once '../require.php';
require_once CLASS_EX_REALDIR . 'page_extends/entry/LC_Page_Entry_Kiyaku_Ex.php';
$objPage = new LC_Page_Entry_Kiyaku_Ex();
$objPage->init();
$objPage->process();
| mami-sugi/eccube | html/entry/kiyaku.php | PHP | gpl-2.0 | 1,056 |
/*
Copyright_License {
XCSoar Glide Computer - http://www.xcsoar.org/
Copyright (C) 2000-2015 The XCSoar Project
A detailed list of copyright holders can be found in the file "AUTHORS".
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
}
*/
#include "NMEA/Acceleration.hpp"
void
AccelerationState::Complement(const AccelerationState &add)
{
if (add.available && (!available || (add.real && !real))) {
real = add.real;
g_load = add.g_load;
available = add.available;
}
}
| ahsparrow/xcsoar_orig | src/NMEA/Acceleration.cpp | C++ | gpl-2.0 | 1,146 |
<?php
/*
* main class of User Role Editor WordPress plugin
* Author: Vladimir Garagulya
* Author email: vladimir@shinephp.com
* Author URI: http://shinephp.com
* License: GPL v2+
*
*/
class User_Role_Editor {
// common code staff, including options data processor
protected $lib = null;
// plugin's Settings page reference, we've got it from add_options_pages() call
protected $setting_page_hook = null;
// URE's key capability
public $key_capability = 'not allowed';
/**
* class constructor
*/
function __construct($library)
{
// activation action
register_activation_hook(URE_PLUGIN_FULL_PATH, array($this, 'setup'));
// deactivation action
register_deactivation_hook(URE_PLUGIN_FULL_PATH, array($this, 'cleanup'));
// get plugin specific library object
$this->lib = $library;
// Who may use this plugin
$this->key_capability = $this->lib->get_key_capability();
if ($this->lib->multisite) {
// new blog may be registered not at admin back-end only but automatically after new user registration, e.g.
// Gravity Forms User Registration Addon does
add_action( 'wpmu_new_blog', array($this, 'duplicate_roles_for_new_blog'), 10, 2);
}
if (!is_admin()) {
return;
}
add_action('admin_init', array($this, 'plugin_init'), 1);
// Add the translation function after the plugins loaded hook.
add_action('plugins_loaded', array($this, 'load_translation'));
// add own submenu
add_action('admin_menu', array($this, 'plugin_menu'));
if ($this->lib->multisite) {
// add own submenu
add_action('network_admin_menu', array($this, 'network_plugin_menu'));
}
// add a Settings link in the installed plugins page
add_filter('plugin_action_links', array($this, 'plugin_action_links'), 10, 2);
add_filter('plugin_row_meta', array($this, 'plugin_row_meta'), 10, 2);
}
// end of __construct()
/**
* Plugin initialization
*
*/
public function plugin_init() {
global $current_user;
if (!empty($current_user->ID)) {
$user_id = $current_user->ID;
} else {
$user_id = 0;
}
// these filters and actions should prevent editing users with administrator role
// by other users with 'edit_users' capability
if (!$this->lib->user_is_admin($user_id)) {
// Exclude administrator role from edit list.
add_filter('editable_roles', array($this, 'exclude_admin_role' ) );
// prohibit any actions with user who has Administrator role
add_filter('user_has_cap', array($this, 'not_edit_admin' ), 10, 3);
// exclude users with 'Administrator' role from users list
add_action('pre_user_query', array($this, 'exclude_administrators' ) );
// do not show 'Administrator (s)' view above users list
add_filter('views_users', array($this, 'exclude_admins_view' ) );
}
add_action( 'admin_enqueue_scripts', array($this, 'admin_load_js' ) );
add_action( 'user_row_actions', array($this, 'user_row'), 10, 2 );
add_action( 'edit_user_profile', array($this, 'edit_user_profile'), 10, 2 );
add_filter( 'manage_users_columns', array($this, 'user_role_column'), 10, 1 );
add_filter( 'manage_users_custom_column', array($this, 'user_role_row'), 10, 3 );
add_action( 'profile_update', array($this, 'user_profile_update'), 10 );
add_filter( 'all_plugins', array($this, 'exclude_from_plugins_list' ) );
if ($this->lib->multisite) {
add_action( 'wpmu_activate_user', array($this, 'add_other_default_roles'), 10, 1 );
$allow_edit_users_to_not_super_admin = $this->lib->get_option('allow_edit_users_to_not_super_admin', 0);
if ($allow_edit_users_to_not_super_admin) {
add_filter( 'map_meta_cap', array($this, 'restore_users_edit_caps'), 1, 4 );
remove_all_filters( 'enable_edit_any_user_configuration' );
add_filter( 'enable_edit_any_user_configuration', '__return_true');
add_filter( 'admin_head', array($this, 'edit_user_permission_check'), 1, 4 );
}
} else {
add_action( 'user_register', array($this, 'add_other_default_roles'), 10, 1 );
$count_users_without_role = $this->lib->get_option('count_users_without_role', 0);
if ($count_users_without_role) {
add_action( 'restrict_manage_users', array($this, 'move_users_from_no_role_button') );
add_action( 'admin_init', array($this, 'add_css_to_users_page'));
add_action( 'admin_footer', array($this, 'add_js_to_users_page') );
}
}
add_action('wp_ajax_ure_ajax', array($this, 'ure_ajax'));
}
// end of plugin_init()
public function move_users_from_no_role_button() {
global $wpdb;
if ( stripos($_SERVER['REQUEST_URI'], 'wp-admin/users.php')===false ) {
return;
}
$id = get_current_blog_id();
$blog_prefix = $wpdb->get_blog_prefix($id);
$query = "select count(ID) from {$wpdb->users} users
where not exists (select user_id from {$wpdb->usermeta}
where user_id=users.ID and meta_key='{$blog_prefix}capabilities') or
exists (select user_id from wp_usermeta
where user_id=users.ID and meta_key='{$blog_prefix}capabilities' and meta_value='a:0:{}') ;";
$users_count = $wpdb->get_var($query);
if ($users_count>0) {
?>
<input type="button" name="move_from_no_role" id="move_from_no_role" class="button"
value="Without role (<?php echo $users_count;?>)" onclick="ure_move_users_from_no_role_dialog()">
<div id="move_from_no_role_dialog" class="ure-dialog">
<div id="move_from_no_role_content" style="padding: 10px;">
To: <select name="ure_new_role" id="ure_new_role">
<option value="no_rights">No rights</option>
</select><br>
</div>
</div>
<?php
}
}
// end of move_users_from_no_role()
public function add_css_to_users_page() {
if ( stripos($_SERVER['REQUEST_URI'], 'wp-admin/users.php')===false ) {
return;
}
wp_enqueue_style('wp-jquery-ui-dialog');
wp_enqueue_style('ure-admin-css', URE_PLUGIN_URL . 'css/ure-admin.css', array(), false, 'screen');
}
public function add_js_to_users_page() {
if ( stripos($_SERVER['REQUEST_URI'], 'wp-admin/users.php')===false ) {
return;
}
wp_enqueue_script('jquery-ui-dialog', false, array('jquery-ui-core','jquery-ui-button', 'jquery') );
wp_register_script( 'ure-users-js', plugins_url( '/js/ure-users.js', URE_PLUGIN_FULL_PATH ) );
wp_enqueue_script ( 'ure-users-js' );
wp_localize_script( 'ure-users-js', 'ure_users_data', array(
'wp_nonce' => wp_create_nonce('user-role-editor'),
'move_from_no_role_title' => esc_html__('Change role for users without role', 'ure'),
'no_rights_caption' => esc_html__('No rights', 'ure'),
'provide_new_role_caption' => esc_html__('Provide new role', 'ure')
));
}
// end of add_js_to_users_page()
public function add_other_default_roles($user_id) {
if (empty($user_id)) {
return;
}
$user = get_user_by('id', $user_id);
if (empty($user->ID)) {
return;
}
$other_default_roles = $this->lib->get_option('other_default_roles', array());
if (count($other_default_roles)==0) {
return;
}
foreach($other_default_roles as $role) {
$user->add_role($role);
}
}
// end of add_other_default_roles()
/**
* restore edit_users, delete_users, create_users capabilities for non-superadmin users under multisite
* (code is provided by http://wordpress.org/support/profile/sjobidoo)
*
* @param type $caps
* @param type $cap
* @param type $user_id
* @param type $args
* @return type
*/
public function restore_users_edit_caps($caps, $cap, $user_id, $args) {
foreach ($caps as $key => $capability) {
if ($capability != 'do_not_allow')
continue;
switch ($cap) {
case 'edit_user':
case 'edit_users':
$caps[$key] = 'edit_users';
break;
case 'delete_user':
case 'delete_users':
$caps[$key] = 'delete_users';
break;
case 'create_users':
$caps[$key] = $cap;
break;
}
}
return $caps;
}
// end of restore_user_edit_caps()
/**
* Checks that both the editing user and the user being edited are
* members of the blog and prevents the super admin being edited.
* (code is provided by http://wordpress.org/support/profile/sjobidoo)
*
*/
function edit_user_permission_check() {
global $current_user, $profileuser;
if (is_super_admin()) { // Superadmin may do all
return;
}
$screen = get_current_screen();
get_currentuserinfo();
if ($screen->base == 'user-edit' || $screen->base == 'user-edit-network') { // editing a user profile
if (!is_super_admin($current_user->ID) && is_super_admin($profileuser->ID)) { // trying to edit a superadmin while himself is less than a superadmin
wp_die(esc_html__('You do not have permission to edit this user.'));
} elseif (!( is_user_member_of_blog($profileuser->ID, get_current_blog_id()) && is_user_member_of_blog($current_user->ID, get_current_blog_id()) )) { // editing user and edited user aren't members of the same blog
wp_die(esc_html__('You do not have permission to edit this user.'));
}
}
}
// end of edit_user_permission_check()
/**
* exclude administrator role from the roles list
*
* @param string $roles
* @return array
*/
public function exclude_admin_role($roles)
{
if (isset($roles['administrator'])) {
unset($roles['administrator']);
}
return $roles;
}
// end of exclude_admin_role()
/**
* We have two vulnerable queries with user id at admin interface, which should be processed
* 1st: http://blogdomain.com/wp-admin/user-edit.php?user_id=ID&wp_http_referer=%2Fwp-admin%2Fusers.php
* 2nd: http://blogdomain.com/wp-admin/users.php?action=delete&user=ID&_wpnonce=ab34225a78
* If put Administrator user ID into such request, user with lower capabilities (if he has 'edit_users')
* can edit, delete admin record
* This function removes 'edit_users' capability from current user capabilities
* if request has admin user ID in it
*
* @param array $allcaps
* @param type $caps
* @param string $name
* @return array
*/
public function not_edit_admin($allcaps, $caps, $name)
{
$user_keys = array('user_id', 'user');
foreach ($user_keys as $user_key) {
$access_deny = false;
$user_id = $this->lib->get_request_var($user_key, 'get');
if (!empty($user_id)) {
if ($user_id == 1) { // built-in WordPress Admin
$access_deny = true;
} else {
if (!isset($this->lib->user_to_check[$user_id])) {
// check if user_id has Administrator role
$access_deny = $this->lib->has_administrator_role($user_id);
} else {
// user_id was checked already, get result from cash
$access_deny = $this->lib->user_to_check[$user_id];
}
}
if ($access_deny) {
unset($allcaps['edit_users']);
}
break;
}
}
return $allcaps;
}
// end of not_edit_admin()
/**
* add where criteria to exclude users with 'Administrator' role from users list
*
* @global wpdb $wpdb
* @param type $user_query
*/
public function exclude_administrators($user_query)
{
global $wpdb;
$result = false;
$links_to_block = array('profile.php', 'users.php');
foreach ( $links_to_block as $key => $value ) {
$result = stripos($_SERVER['REQUEST_URI'], $value);
if ( $result !== false ) {
break;
}
}
if ( $result===false ) { // block the user edit stuff only
return;
}
// get user_id of users with 'Administrator' role
$tableName = (!$this->lib->multisite && defined('CUSTOM_USER_META_TABLE')) ? CUSTOM_USER_META_TABLE : $wpdb->usermeta;
$meta_key = $wpdb->prefix . 'capabilities';
$admin_role_key = '%"administrator"%';
$query = "select user_id
from $tableName
where meta_key='$meta_key' and meta_value like '$admin_role_key'";
$ids_arr = $wpdb->get_col($query);
if (is_array($ids_arr) && count($ids_arr) > 0) {
$ids = implode(',', $ids_arr);
$user_query->query_where .= " AND ( $wpdb->users.ID NOT IN ( $ids ) )";
}
}
// end of exclude_administrators()
/*
* Exclude view of users with Administrator role
*
*/
public function exclude_admins_view($views) {
unset($views['administrator']);
return $views;
}
// end of exclude_admins_view()
/**
* Add/hide edit actions for every user row at the users list
*
* @global type $pagenow
* @global type $current_user
* @param string $actions
* @param type $user
* @return string
*/
public function user_row($actions, $user)
{
global $pagenow, $current_user;
if ($pagenow == 'users.php') {
if ($current_user->has_cap($this->key_capability)) {
$actions['capabilities'] = '<a href="' .
wp_nonce_url("users.php?page=users-".URE_PLUGIN_FILE."&object=user&user_id={$user->ID}", "ure_user_{$user->ID}") .
'">' . esc_html__('Capabilities', 'ure') . '</a>';
}
}
return $actions;
}
// end of user_row()
/**
* every time when new blog created - duplicate to it roles from the main blog (1)
* @global wpdb $wpdb
* @global WP_Roles $wp_roles
* @param int $blog_id
* @param int $user_id
*
*/
public function duplicate_roles_for_new_blog($blog_id)
{
global $wpdb, $wp_roles;
// get Id of 1st (main) blog
$main_blog_id = $this->lib->get_main_blog_id();
if ( empty($main_blog_id) ) {
return;
}
$current_blog = $wpdb->blogid;
switch_to_blog( $main_blog_id );
$main_roles = new WP_Roles(); // get roles from primary blog
$default_role = get_option('default_role'); // get default role from primary blog
switch_to_blog($blog_id); // switch to the new created blog
$main_roles->use_db = false; // do not touch DB
$main_roles->add_cap('administrator', 'dummy_123456'); // just to save current roles into new blog
$main_roles->role_key = $wp_roles->role_key;
$main_roles->use_db = true; // save roles into new blog DB
$main_roles->remove_cap('administrator', 'dummy_123456'); // remove unneeded dummy capability
update_option('default_role', $default_role); // set default role for new blog as it set for primary one
switch_to_blog($current_blog); // return to blog where we were at the begin
}
// end of duplicate_roles_for_new_blog()
/**
* Filter out URE plugin from not superadmin users
* @param type array $plugins plugins list
* @return type array $plugins updated plugins list
*/
public function exclude_from_plugins_list($plugins) {
global $current_user;
$ure_key_capability = $this->lib->get_key_capability();
// if multi-site, then allow plugin activation for network superadmins and, if that's specially defined, - for single site administrators too
if ($this->lib->user_has_capability($current_user, $ure_key_capability)) {
return $plugins;
}
// exclude URE from plugins list
foreach ($plugins as $key => $value) {
if ($key == 'user-role-editor/' . URE_PLUGIN_FILE) {
unset($plugins[$key]);
break;
}
}
return $plugins;
}
// end of exclude_from_plugins_list()
/**
* Load plugin translation files - linked to the 'plugins_loaded' action
*
*/
function load_translation()
{
load_plugin_textdomain('ure', '', dirname( plugin_basename( URE_PLUGIN_FULL_PATH ) ) .'/lang');
}
// end of ure_load_translation()
/**
* Modify plugin actions link
*
* @param array $links
* @param string $file
* @return array
*/
public function plugin_action_links($links, $file)
{
if ($file == plugin_basename(dirname(URE_PLUGIN_FULL_PATH).'/'.URE_PLUGIN_FILE)) {
$settings_link = "<a href='options-general.php?page=settings-".URE_PLUGIN_FILE."'>" . esc_html__('Settings', 'ure') . "</a>";
array_unshift($links, $settings_link);
}
return $links;
}
// end of plugin_action_links()
public function plugin_row_meta($links, $file) {
if ($file == plugin_basename(dirname(URE_PLUGIN_FULL_PATH) .'/'.URE_PLUGIN_FILE)) {
$links[] = '<a target="_blank" href="http://role-editor.com/changelog">' . esc_html__('Changelog', 'ure') . '</a>';
}
return $links;
}
// end of plugin_row_meta
public function settings_screen_configure() {
$settings_page_hook = $this->settings_page_hook;
if (is_multisite()) {
$settings_page_hook .= '-network';
}
$screen = get_current_screen();
// Check if current screen is URE's settings page
if ($screen->id != $settings_page_hook) {
return;
}
$screen_help = new Ure_Screen_Help();
$screen->add_help_tab( array(
'id' => 'overview',
'title' => esc_html__('Overview'),
'content' => $screen_help->get_settings_help('overview')
));
}
// end of settings_screen_configure()
public function plugin_menu() {
$translated_title = esc_html__('User Role Editor', 'ure');
if (function_exists('add_submenu_page')) {
$ure_page = add_submenu_page(
'users.php',
$translated_title,
$translated_title,
$this->key_capability,
'users-' . URE_PLUGIN_FILE,
array($this, 'edit_roles'));
add_action("admin_print_styles-$ure_page", array($this, 'admin_css_action'));
}
if (!$this->lib->multisite) {
$this->settings_page_hook = add_options_page(
$translated_title,
$translated_title,
$this->key_capability,
'settings-' . URE_PLUGIN_FILE,
array($this, 'settings'));
add_action( 'load-'.$this->settings_page_hook, array($this,'settings_screen_configure') );
add_action("admin_print_styles-{$this->settings_page_hook}", array($this, 'admin_css_action'));
}
}
// end of plugin_menu()
public function network_plugin_menu() {
if (is_multisite()) {
$translated_title = esc_html__('User Role Editor', 'ure');
$this->settings_page_hook = add_submenu_page(
'settings.php',
$translated_title,
$translated_title,
$this->key_capability,
'settings-' . URE_PLUGIN_FILE,
array(&$this, 'settings'));
add_action( 'load-'.$this->settings_page_hook, array($this,'settings_screen_configure') );
add_action("admin_print_styles-{$this->settings_page_hook}", array($this, 'admin_css_action'));
}
}
// end of network_plugin_menu()
protected function get_settings_action() {
$action = 'show';
$update_buttons = array('ure_settings_update', 'ure_addons_settings_update', 'ure_settings_ms_update', 'ure_default_roles_update');
foreach($update_buttons as $update_button) {
if (!isset($_POST[$update_button])) {
continue;
}
if (!wp_verify_nonce($_POST['_wpnonce'], 'user-role-editor')) {
wp_die('Security check failed');
}
$action = $update_button;
break;
}
return $action;
}
// end of get_settings_action()
/**
* Update General Options tab
*/
protected function update_general_options() {
if (defined('URE_SHOW_ADMIN_ROLE') && (URE_SHOW_ADMIN_ROLE == 1)) {
$show_admin_role = 1;
} else {
$show_admin_role = $this->lib->get_request_var('show_admin_role', 'checkbox');
}
$this->lib->put_option('show_admin_role', $show_admin_role);
$caps_readable = $this->lib->get_request_var('caps_readable', 'checkbox');
$this->lib->put_option('ure_caps_readable', $caps_readable);
$show_deprecated_caps = $this->lib->get_request_var('show_deprecated_caps', 'checkbox');
$this->lib->put_option('ure_show_deprecated_caps', $show_deprecated_caps);
do_action('ure_settings_update1');
$this->lib->flush_options();
$this->lib->show_message(esc_html__('User Role Editor options are updated', 'ure'));
}
// end of update_general_options()
/**
* Update Additional Modules Options tab
*/
protected function update_addons_options() {
if (!$this->lib->multisite) {
$count_users_without_role = $this->lib->get_request_var('count_users_without_role', 'checkbox');
$this->lib->put_option('count_users_without_role', $count_users_without_role);
}
do_action('ure_settings_update2');
$this->lib->flush_options();
$this->lib->show_message(esc_html__('User Role Editor options are updated', 'ure'));
}
// end of update_addons_options()
protected function update_default_roles() {
global $wp_roles;
// Primary default role
$primary_default_role = $this->lib->get_request_var('default_user_role', 'post');
if (!empty($primary_default_role) && isset($wp_roles->role_objects[$primary_default_role]) && $primary_default_role !== 'administrator') {
update_option('default_role', $primary_default_role);
}
// Other default roles
$other_default_roles = array();
foreach($_POST as $key=>$value) {
$prefix = substr($key, 0, 8);
if ($prefix!=='wp_role_') {
continue;
}
$role_id = substr($key, 8);
if ($role_id!=='administrator' && isset($wp_roles->role_objects[$role_id])) {
$other_default_roles[] = $role_id;
}
} // foreach()
$this->lib->put_option('other_default_roles', $other_default_roles, true);
$this->lib->show_message(esc_html__('Default Roles are updated', 'ure'));
}
// end of update_default_roles()
protected function update_multisite_options() {
if (!$this->lib->multisite) {
return;
}
$allow_edit_users_to_not_super_admin = $this->lib->get_request_var('allow_edit_users_to_not_super_admin', 'checkbox');
$this->lib->put_option('allow_edit_users_to_not_super_admin', $allow_edit_users_to_not_super_admin);
do_action('ure_settings_ms_update');
$this->lib->flush_options();
$this->lib->show_message(esc_html__('User Role Editor options are updated', 'ure'));
}
// end of update_multisite_options()
public function settings() {
if (!current_user_can($this->key_capability)) {
esc_html__( 'You do not have sufficient permissions to manage options for User Role Editor.', 'ure' );
}
$action = $this->get_settings_action();
switch ($action) {
case 'ure_settings_update':
$this->update_general_options();
break;
case 'ure_addons_settings_update':
$this->update_addons_options();
break;
case 'ure_settings_ms_update':
$this->update_multisite_options();
break;
case 'ure_default_roles_update':
$this->update_default_roles();
case 'show':
default:
;
} // switch()
if (defined('URE_SHOW_ADMIN_ROLE') && (URE_SHOW_ADMIN_ROLE == 1)) {
$show_admin_role = 1;
} else {
$show_admin_role = $this->lib->get_option('show_admin_role', 0);
}
$caps_readable = $this->lib->get_option('ure_caps_readable', 0);
$show_deprecated_caps = $this->lib->get_option('ure_show_deprecated_caps', 0);
if ($this->lib->multisite) {
$allow_edit_users_to_not_super_admin = $this->lib->get_option('allow_edit_users_to_not_super_admin', 0);
} else {
$count_users_without_role = $this->lib->get_option('count_users_without_role', 0);
}
$this->lib->get_default_role();
$this->lib->editor_init1();
$this->lib->role_edit_prepare_html(0);
$ure_tab_idx = $this->lib->get_request_var('ure_tab_idx', 'int');
do_action('ure_settings_load');
if ($this->lib->multisite) {
$link = 'settings.php';
} else {
$link = 'options-general.php';
}
require_once(URE_PLUGIN_DIR . 'includes/settings-template.php');
}
// end of settings()
public function admin_css_action() {
wp_enqueue_style('wp-jquery-ui-dialog');
if (stripos($_SERVER['REQUEST_URI'], 'settings-user-role-editor')!==false) {
wp_enqueue_style('ure-jquery-ui-tabs', URE_PLUGIN_URL . 'css/jquery-ui-1.10.4.custom.min.css', array(), false, 'screen');
}
wp_enqueue_style('ure-admin-css', URE_PLUGIN_URL . 'css/ure-admin.css', array(), false, 'screen');
}
// end of admin_css_action()
// call roles editor page
public function edit_roles() {
global $current_user;
if (!empty($current_user)) {
$user_id = $current_user->ID;
} else {
$user_id = false;
}
$ure_key_capability = $this->lib->get_key_capability();
if (!$this->lib->user_has_capability($current_user, $ure_key_capability)) {
die(esc_html__('Insufficient permissions to work with User Role Editor', 'ure'));
}
$this->lib->editor();
}
// end of edit_roles()
// move old version option to the new storage 'user_role_editor' option, array, containing all URE options
private function convert_option($option_name) {
$option_value = get_option($option_name, 0);
delete_option($option_name);
$this->lib->put_option( $option_name, $option_value );
}
/**
* execute on plugin activation
*/
function setup() {
$this->convert_option('ure_caps_readable');
$this->convert_option('ure_show_deprecated_caps');
$this->convert_option('ure_hide_pro_banner');
$this->lib->flush_options();
$this->lib->make_roles_backup();
}
// end of setup()
/**
* Load plugin javascript stuff
*
* @param string $hook_suffix
*/
public function admin_load_js($hook_suffix){
if (class_exists('User_Role_Editor_Pro')) {
$ure_hook_suffixes = array('settings_page_settings-user-role-editor-pro', 'users_page_users-user-role-editor-pro');
} else {
$ure_hook_suffixes = array('settings_page_settings-user-role-editor', 'users_page_users-user-role-editor');
}
if (in_array($hook_suffix, $ure_hook_suffixes)) {
wp_enqueue_script('jquery-ui-dialog', false, array('jquery-ui-core','jquery-ui-button', 'jquery') );
wp_enqueue_script('jquery-ui-tabs', false, array('jquery-ui-core', 'jquery') );
wp_register_script( 'ure-js', plugins_url( '/js/ure-js.js', URE_PLUGIN_FULL_PATH ) );
wp_enqueue_script ( 'ure-js' );
wp_localize_script( 'ure-js', 'ure_data', array(
'wp_nonce' => wp_create_nonce('user-role-editor'),
'page_url' => URE_WP_ADMIN_URL . URE_PARENT .'?page=users-'.URE_PLUGIN_FILE,
'is_multisite' => is_multisite() ? 1 : 0,
'select_all' => esc_html__('Select All', 'ure'),
'unselect_all' => esc_html__('Unselect All', 'ure'),
'reverse' => esc_html__('Reverse', 'ure'),
'update' => esc_html__('Update', 'ure'),
'confirm_submit' => esc_html__('Please confirm permissions update', 'ure'),
'add_new_role_title' => esc_html__('Add New Role', 'ure'),
'role_name_required' => esc_html__(' Role name (ID) can not be empty!', 'ure'),
'role_name_valid_chars' => esc_html__(' Role name (ID) must contain latin characters, digits, hyphens or underscore only!', 'ure'),
'numeric_role_name_prohibited' => esc_html__(' WordPress does not support numeric Role name (ID). Add latin characters to it.', 'ure'),
'add_role' => esc_html__('Add Role', 'ure'),
'delete_role' => esc_html__('Delete Role', 'ure'),
'cancel' => esc_html__('Cancel', 'ure'),
'add_capability' => esc_html__('Add Capability', 'ure'),
'delete_capability' => esc_html__('Delete Capability', 'ure'),
'reset' => esc_html__('Reset', 'ure'),
'reset_warning' => esc_html__('DANGER! Resetting will restore default settings from WordPress Core.','ure')."\n\n".
esc_html__('If any plugins have changed capabilities in any way upon installation (such as S2Member, WooCommerce, and many more), those capabilities will be DELETED!', 'ure')."\n\n" .
esc_html__('For more information on how to undo changes and restore plugin capabilities go to', 'ure')."\n".
'http://role-editor.com/how-to-restore-deleted-wordpress-user-roles/'."\n\n".
esc_html__('Continue?', 'ure'),
'default_role' => esc_html__('Default Role', 'ure'),
'set_new_default_role' => esc_html__('Set New Default Role', 'ure'),
'delete_capability' => esc_html__('Delete Capability', 'ure'),
'delete_capability_warning' => esc_html__('Warning! Be careful - removing critical capability could crash some plugin or other custom code', 'ure'),
'capability_name_required' => esc_html__(' Capability name (ID) can not be empty!', 'ure'),
'capability_name_valid_chars' => esc_html__(' Capability name (ID) must contain latin characters, digits, hyphens or underscore only!', 'ure'),
) );
// load additional JS stuff for Pro version, if exists
do_action('ure_load_js');
}
}
// end of admin_load_js()
protected function is_user_profile_extention_allowed() {
// Check if we are not at the network admin center
$result = stripos($_SERVER['REQUEST_URI'], 'network/user-edit.php') == false;
return $result;
}
// end of is_user_profile_extention_allowed()
public function edit_user_profile($user) {
global $current_user;
if (!$this->is_user_profile_extention_allowed()) {
return;
}
if (!$this->lib->user_is_admin($current_user->ID)) {
return;
}
?>
<h3><?php _e('User Role Editor', 'ure'); ?></h3>
<table class="form-table">
<tr>
<th scope="row"><?php _e('Other Roles', 'ure'); ?></th>
<td>
<?php
$roles = $this->lib->other_user_roles($user);
if (is_array($roles) && count($roles) > 0) {
foreach ($roles as $role) {
echo '<input type="hidden" name="ure_other_roles[]" value="' . $role . '" />';
}
}
$output = $this->lib->roles_text($roles);
echo $output . ' >> <a href="' . wp_nonce_url("users.php?page=users-".URE_PLUGIN_FILE."&object=user&user_id={$user->ID}", "ure_user_{$user->ID}") . '">' .
esc_html__('Edit', 'ure') . '</a>';
?>
</td>
</tr>
</table>
<?php
}
// end of edit_user_profile()
/**
* add 'Other Roles' column to WordPress users list table
*
* @param array $columns WordPress users list table columns list
* @return array
*/
public function user_role_column($columns = array()) {
$columns['ure_roles'] = esc_html__('Other Roles', 'ure');
return $columns;
}
// end of user_role_column()
/**
* Return user's roles list for display in the WordPress Users list table
*
* @param string $retval
* @param string $column_name
* @param int $user_id
*
* @return string all user roles
*/
public function user_role_row($retval = '', $column_name = '', $user_id = 0)
{
// Only looking for User Role Editor other user roles column
if ('ure_roles' == $column_name) {
$user = get_userdata($user_id);
// Get the users roles
$roles = $this->lib->other_user_roles($user);
$retval = $this->lib->roles_text($roles);
}
// Pass retval through
return $retval;
}
// end of user_role_row()
// save additional user roles when user profile is updated, as WordPress itself doesn't know about them
public function user_profile_update($user_id) {
if (!current_user_can('edit_user', $user_id)) {
return;
}
$user = get_userdata($user_id);
if (isset($_POST['ure_other_roles'])) {
$new_roles = array_intersect($user->roles, $_POST['ure_other_roles']);
$skip_roles = array();
foreach ($new_roles as $role) {
$skip_roles['$role'] = 1;
}
unset($new_roles);
foreach ($_POST['ure_other_roles'] as $role) {
if (!isset($skip_roles[$role])) {
$user->add_role($role);
}
}
}
}
// update_user_profile()
public function ure_ajax() {
require_once(URE_PLUGIN_DIR . 'includes/class-ajax-processor.php');
$ajax_processor = new URE_Ajax_Processor($this->lib);
$ajax_processor->dispatch();
}
// end of ure_ajax()
// execute on plugin deactivation
function cleanup()
{
}
// end of setup()
}
// end of User_Role_Editor
| Mashpy/foraminul | wp-content/plugins/user-role-editor/includes/class-user-role-editor.php | PHP | gpl-2.0 | 35,852 |
<?php
/**
* ICE API: widget extensions, debugger template file
*
* @author Marshall Sorenson <marshall@presscrew.com>
* @link http://infinity.presscrew.com/
* @copyright Copyright (C) 2010-2011 Marshall Sorenson
* @license http://www.gnu.org/licenses/gpl.html GPLv2 or later
* @package ICE-extensions
* @subpackage widgets
* @since 1.0
*/
/* @var $this ICE_Widget_Renderer */
?>
<div <?php $this->render_attrs( 'ui-widget' ) ?>>
<div class="ui-widget-header">
<?php $this->render_title() ?>
</div>
<div class="ui-widget-content">
<?php $this->component()->render_items() ?>
</div>
</div>
<script type="text/javascript">
jQuery('div#<?php $this->render_id() ?> div.ui-widget-content')
.jstree({
'plugins': ['html_data','themeroller'],
'core': {'animation': 0}
});
</script> | itercommunity/IterCommunity | wp-content/themes/cbox-theme/engine/ICE/ext/widgets/debugger/template.php | PHP | gpl-2.0 | 804 |
<?php
/**
* @file
* Definition of Drupal\aggregator\Tests\UpdateFeedTest.
*/
namespace Drupal\aggregator\Tests;
/**
* Tests functionality of updating the feed in the Aggregator module.
*/
class UpdateFeedTest extends AggregatorTestBase {
public static function getInfo() {
return array(
'name' => 'Update feed functionality',
'description' => 'Update feed test.',
'group' => 'Aggregator'
);
}
/**
* Creates a feed and attempts to update it.
*/
function testUpdateFeed() {
$remaining_fields = array('title[0][value]', 'url[0][value]', '');
foreach ($remaining_fields as $same_field) {
$feed = $this->createFeed();
// Get new feed data array and modify newly created feed.
$edit = $this->getFeedEditArray();
$edit['refresh'] = 1800; // Change refresh value.
if (isset($feed->{$same_field}->value)) {
$edit[$same_field] = $feed->{$same_field}->value;
}
$this->drupalPostForm('aggregator/sources/' . $feed->id() . '/configure', $edit, t('Save'));
$this->assertRaw(t('The feed %name has been updated.', array('%name' => $edit['title[0][value]'])), format_string('The feed %name has been updated.', array('%name' => $edit['title[0][value]'])));
// Check feed data.
$this->assertEqual($this->getUrl(), url('aggregator/sources/' . $feed->id(), array('absolute' => TRUE)));
$this->assertTrue($this->uniqueFeed($edit['title[0][value]'], $edit['url[0][value]']), 'The feed is unique.');
// Check feed source.
$this->drupalGet('aggregator/sources/' . $feed->id());
$this->assertResponse(200, 'Feed source exists.');
$this->assertText($edit['title[0][value]'], 'Page title');
// Delete feed.
$feed->title = $edit['title[0][value]']; // Set correct title so deleteFeed() will work.
$this->deleteFeed($feed);
}
}
}
| drupaals/demo.com | d8/core/modules/aggregator/src/Tests/UpdateFeedTest.php | PHP | gpl-2.0 | 1,884 |
class UnimportedExtends2 {
//:: error: cannot find symbol
class Inner extends UnimportedClass {}
}
| Jianchu/checker-framework | framework/tests/framework/UnimportedExtends2.java | Java | gpl-2.0 | 107 |
package com.baidu.disconf.client.core.processor.impl;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.baidu.disconf.client.common.model.DisConfCommonModel;
import com.baidu.disconf.client.common.model.DisconfCenterItem;
import com.baidu.disconf.client.config.DisClientConfig;
import com.baidu.disconf.client.core.processor.DisconfCoreProcessor;
import com.baidu.disconf.client.fetcher.FetcherMgr;
import com.baidu.disconf.client.store.DisconfStoreProcessor;
import com.baidu.disconf.client.store.DisconfStoreProcessorFactory;
import com.baidu.disconf.client.store.processor.model.DisconfValue;
import com.baidu.disconf.client.watch.WatchMgr;
import com.baidu.disconf.core.common.constants.DisConfigTypeEnum;
/**
* 配置项处理器实现
*
* @author liaoqiqi
* @version 2014-8-4
*/
public class DisconfItemCoreProcessorImpl implements DisconfCoreProcessor {
protected static final Logger LOGGER = LoggerFactory.getLogger(DisconfItemCoreProcessorImpl.class);
// 监控器
private WatchMgr watchMgr = null;
// 抓取器
private FetcherMgr fetcherMgr = null;
// 仓库算子
private DisconfStoreProcessor disconfStoreProcessor = DisconfStoreProcessorFactory.getDisconfStoreItemProcessor();
public DisconfItemCoreProcessorImpl(WatchMgr watchMgr, FetcherMgr fetcherMgr) {
this.fetcherMgr = fetcherMgr;
this.watchMgr = watchMgr;
}
/**
*
*/
@Override
public void processAllItems() {
/**
* 配置ITEM列表处理
*/
for (String key : disconfStoreProcessor.getConfKeySet()) {
processOneItem(key);
}
}
@Override
public void processOneItem(String key) {
LOGGER.debug("==============\tstart to process disconf item: " + key + "\t=============================");
DisconfCenterItem disconfCenterItem = (DisconfCenterItem) disconfStoreProcessor.getConfData(key);
if (disconfCenterItem != null) {
try {
updateOneConfItem(key, disconfCenterItem);
} catch (Exception e) {
LOGGER.error(e.toString(), e);
}
}
}
/**
* 更新 一个配置
*/
private void updateOneConf(String keyName) throws Exception {
DisconfCenterItem disconfCenterItem = (DisconfCenterItem) disconfStoreProcessor.getConfData(keyName);
if (disconfCenterItem != null) {
// 更新仓库
updateOneConfItem(keyName, disconfCenterItem);
// 更新实例
inject2OneConf(keyName, disconfCenterItem);
}
}
/**
* 更新一个配置
*/
private void updateOneConfItem(String keyName, DisconfCenterItem disconfCenterItem) throws Exception {
if (disconfCenterItem == null) {
throw new Exception("cannot find disconfCenterItem " + keyName);
}
String value = null;
//
// 开启disconf才需要远程下载, 否则就用默认值
//
if (DisClientConfig.getInstance().ENABLE_DISCONF) {
//
// 下载配置
//
try {
String url = disconfCenterItem.getRemoteServerUrl();
value = fetcherMgr.getValueFromServer(url);
if (value != null) {
LOGGER.debug("value: " + value);
}
} catch (Exception e) {
LOGGER.error("cannot use remote configuration: " + keyName, e);
LOGGER.info("using local variable: " + keyName);
}
LOGGER.debug("download ok.");
}
//
// 注入到仓库中
//
disconfStoreProcessor.inject2Store(keyName, new DisconfValue(value, null));
LOGGER.debug("inject ok.");
//
// Watch
//
if (DisClientConfig.getInstance().ENABLE_DISCONF) {
if (watchMgr != null) {
DisConfCommonModel disConfCommonModel = disconfStoreProcessor.getCommonModel(keyName);
watchMgr.watchPath(this, disConfCommonModel, keyName, DisConfigTypeEnum.ITEM, value);
LOGGER.debug("watch ok.");
} else {
LOGGER.warn("cannot monitor {} because watch mgr is null", keyName);
}
}
}
/**
* 更新消息:
*/
@Override
public void updateOneConfAndCallback(String key) throws Exception {
// 更新 配置
updateOneConf(key);
// 回调
DisconfCoreProcessUtils.callOneConf(disconfStoreProcessor, key);
}
/**
* 某个配置项:注入到实例中
*/
private void inject2OneConf(String key, DisconfCenterItem disconfCenterItem) {
if (disconfCenterItem == null) {
return;
}
try {
Object object = null;
Field field = disconfCenterItem.getField();
//
// 静态
//
if (!Modifier.isStatic(field.getModifiers())) {
object = DisconfCoreProcessUtils.getSpringBean(field.getDeclaringClass());
}
disconfStoreProcessor.inject2Instance(object, key);
} catch (Exception e) {
LOGGER.warn(e.toString(), e);
}
}
/**
*
*/
@Override
public void inject2Conf() {
/**
* 配置ITEM列表处理
*/
for (String key : disconfStoreProcessor.getConfKeySet()) {
LOGGER.debug("==============\tstart to inject value to disconf item instance: " + key +
"\t=============================");
DisconfCenterItem disconfCenterItem = (DisconfCenterItem) disconfStoreProcessor.getConfData(key);
inject2OneConf(key, disconfCenterItem);
}
}
}
| fengshao0907/disconf | disconf-client/src/main/java/com/baidu/disconf/client/core/processor/impl/DisconfItemCoreProcessorImpl.java | Java | gpl-2.0 | 5,926 |
/**@license boxplus image transition engine
* @author Levente Hunyadi
* @version 1.4.2
* @remarks Copyright (C) 2009-2010 Levente Hunyadi
* @remarks Licensed under GNU/GPLv3, see http://www.gnu.org/licenses/gpl-3.0.html
* @see http://hunyadi.info.hu/projects/boxplus
**/
/*
* boxplus: a lightweight pop-up window engine shipped with sigplus
* Copyright 2009-2010 Levente Hunyadi
*
* boxplus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* boxplus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with boxplus. If not, see <http://www.gnu.org/licenses/>.
*/
if (typeof(__jQuery__) == 'undefined') {
var __jQuery__ = jQuery;
}
(function ($) {
var CLASS_DISABLED = 'boxplus-disabled';
var max = Math.max;
var floor = Math.floor;
var ceil = Math.ceil;
/**
* Maximum computed width of matched elements including margin, border and padding.
*/
$.fn.maxWidth = function () {
var width = 0;
this.each( function(index, el) {
width = max(width, $(el).safeWidth());
});
return width;
}
/**
* Maximum computed height of matched elements including margin, border and padding.
*/
$.fn.maxHeight = function () {
var height = 0;
this.each( function(index, el) {
height = max(height, $(el).safeHeight());
});
return height;
}
/**
* "Safe" dimension of an element.
* Some browsers give invalid values with .width() but others give the meaningless,
* value "auto" with .css('width'), this function bridges the differences.
*/
function _safeDimension(obj, dim) {
var cssvalue = parseInt(obj.css(dim));
return isNaN(cssvalue) ? obj[dim]() : cssvalue;
}
$.fn.safeWidth = function () {
return _safeDimension(this, 'width');
}
$.fn.safeHeight = function () {
return _safeDimension(this, 'height');
}
/**
* Creates a new image slider from a collection of images.
* The method should be called on a ul or ol element that wraps a set of li elements.
*/
$.fn.boxplusTransition = function (settings) {
// default configuration properties
var defaults = {
navigation: 'horizontal', // orientation of navigation buttons, or do not show navigation buttons at all ['horizontal'|'vertical'|false]
loop: true, // whether the image sequence loops such that the first image follows the last [true|false]
contextmenu: true, // whether the context menu appears when right-clicking an image [true|false]
orientation: 'vertical', // alignment of bars used in transition ['vertical'|'horizontal']
slices: 15, // number of bars to use in transition animation
effect: 'fade', // image transition effect ['fade'|'bars'|'bars+fade'|'shutter'|'shutter+fade']
easing: 'swing',
duration: 500, // duration for transition animation [ms]
delay: 4000 // delay between successive animation steps [ms]
};
settings = $.extend(defaults, settings);
var lists = this.filter('ul, ol'); // filter elements that are not lists
// iterate over elements if invoked on an element collection
lists.each(function () {
// short-hand access to settings
var isNavigationVertical = settings.navigation == 'vertical';
var isOrientationHorizontal = settings.orientation == 'horizontal';
var sliceCount = settings.slices;
var duration = settings.duration;
var delay = settings.delay;
// status information
var sliderIndexPosition = 0; // index of item currently shown
var animation = false; // true if an animation is in progress
// DOM elements
var list = $(this).wrap('<div />').before('<div />').addClass('boxplus-hidden');
var wrapper = list.parent().addClass('boxplus-wrapper');
var items = $('li', list).css({
position: 'absolute',
left: 0,
top: 0
}).find('img:first');
// forces following an anchor (in a cancellable way) even when click event is triggered with jQuery
items.parent('a').click(function (event) {
if (!event.isDefaultPrevented()) {
location.href = this.href;
}
});
var container = list.prev().addClass('boxplus-transition').addClass(CLASS_DISABLED).click(function () {
items.eq(sliderIndexPosition).parent('a').click(); // when an image is clicked, the anchor wrapping the original image (if any) should be followed
});
// get maximum width and height of image slider items
var itemCount = items.length;
var itemWidth = items.maxWidth();
var itemHeight = items.maxHeight();
// set width and height of image container
wrapper.add(container).css({
width: itemWidth,
height: itemHeight
});
switch (settings.navigation) {
case 'horizontal': case 'vertical':
var cls = 'boxplus-' + settings.navigation;
container.addClass(cls);
// setup overlay navigation controls
function _addButton(cls) {
return '<div class="boxplus-' + cls + '" />';
}
container.prepend(
$(_addButton('prev') + _addButton('next')).addClass(cls).addClass(
(isNavigationVertical ? itemWidth : itemHeight) < 120 ? 'boxplus-small' : 'boxplus-large'
)
);
// bind events for navigation controls
$('.boxplus-prev', container).click(scrollPrevious);
$('.boxplus-next', container).click(scrollNext);
}
if (!settings.contextmenu) {
$(document).bind('contextmenu', function (event) { // subscribe to right-click event
return !container.children().add(container).filter(event.target).size(); // prevent right-click on image
});
}
// add bars to container for animation
var sliceDim = (isOrientationHorizontal ? itemHeight : itemWidth) / sliceCount;
for (var sliceIndex = 0; sliceIndex < sliceCount; sliceIndex++) {
var sliceOffset = floor(sliceIndex*sliceDim);
$('<div class="boxplus-transition-bars" />').css({
left: isOrientationHorizontal ? 0 : sliceOffset,
top: isOrientationHorizontal ? sliceOffset : 0,
height: isOrientationHorizontal ? sliceDim : itemHeight,
width: isOrientationHorizontal ? itemWidth : sliceDim,
visibility: 'hidden'
}).appendTo(container);
}
// update visibility of navigation controls
_updatePaging();
container.removeClass(CLASS_DISABLED);
scrollFirst();
// slider animation
if (delay > 0) {
delay = max(delay, duration + 500);
var intervalID = window.setInterval(scrollNext, delay);
// stop animation when mouse moves over an image
container.mouseover(function () {
window.clearInterval(intervalID);
}).mouseout(function () {
intervalID = window.setInterval(scrollNext, delay);
});
}
//
// Callback functions
//
function scrollFirst() {
return scroll('first');
}
function scrollPrevious() {
return scroll('prev');
}
function scrollNext() {
return scroll('next');
}
function scrollLast() {
return scroll('last');
}
/**
* Sets the image shown as the background image of elements.
* @param elem The element whose background-image property to set.
*/
function _setImage(e, x, y) {
var item = items.eq(sliderIndexPosition); // item to be shown
e.css({
backgroundImage: 'url("' + item.attr('src') + '")',
backgroundPosition: ((itemWidth - item.safeWidth()) / 2 - x) + 'px ' + ((itemHeight - item.safeHeight()) / 2 - y) + 'px'
});
}
/**
* Preloads an image for later display.
* @param item The element to use to acquire the URL of the image.
*/
function _preloadImage(item) {
var longdesc = item.attr('longdesc');
if (longdesc) { // higher-resolution image is available
item.attr('src', longdesc).attr('longdesc', '');
}
}
function _preloadImages() {
_preloadImage(items.eq(sliderIndexPosition));
_preloadImage(items.eq((sliderIndexPosition - 1) % itemCount));
_preloadImage(items.eq((sliderIndexPosition + 1) % itemCount));
}
/**
* Execute image transition.
*/
function scroll(dir) {
var bars = $('.boxplus-transition-bars', container);
if (animation) { // clear ongoing transitions
_setImage(container, 0, 0);
bars.clearQueue().stop().css('visibility', 'hidden');
}
animation = true; // indicate an ongoing transition
switch (dir) {
case 'first':
sliderIndexPosition = 0; break;
case 'prev':
sliderIndexPosition = (sliderIndexPosition - 1) % itemCount; break;
case 'next':
sliderIndexPosition = (sliderIndexPosition + 1) % itemCount; break;
case 'last':
sliderIndexPosition = itemCount - 1; break;
default:
return;
};
_updatePaging();
_preloadImages();
bars.css({ // reset bars background image, height, width, opacity, etc.
opacity: 1
}).each(function (index) { // set the image shown as the background image of bars with computing offset position
var bar = $(this);
var dim = ceil(index*sliceDim+sliceDim) - floor(index*sliceDim);
bar.css({
height: isOrientationHorizontal ? dim : itemHeight,
width: isOrientationHorizontal ? itemWidth : dim
});
var position = bar.position();
_setImage(bar, position.left, position.top);
});
function _transitionFade() {
bars.css('opacity', 0).show();
return {opacity: 1};
}
function _transitionBars() {
bars.css(isOrientationHorizontal ? 'width' : 'height', 0);
if (isOrientationHorizontal) {
return {width: itemWidth};
} else {
return {height: itemHeight};
}
}
function _transitionShutter() {
bars.css(isOrientationHorizontal ? 'height' : 'width', 0);
if (isOrientationHorizontal) {
return {height: ceil(sliceDim)};
} else {
return {width: ceil(sliceDim)};
}
}
var target;
switch (settings.effect) {
case 'fade':
target = _transitionFade(); break;
case 'bars':
target = _transitionBars(); break;
case 'bars+fade':
target = $.extend(_transitionBars(), _transitionFade()); break;
case 'shutter':
target = _transitionShutter(); break;
case 'shutter+fade':
target = $.extend(_transitionShutter(), _transitionFade()); break;
}
bars.css('visibility', 'visible');
// function to arrange bars in a specific order
var ordfun = function (index) { return index; };
switch (dir) {
case 'first': case 'prev':
ordfun = function (index) { return sliceCount-1-index; }; break;
}
// register animation events for bars
bars.each(function (index) {
var k = ordfun(index);
var options = {
duration: 500,
easing: settings.easing
};
if (k == sliceCount-1) {
$.extend(options, {
complete: function () {
animation = false;
_setImage(container, 0, 0);
bars.css('visibility', 'hidden');
}
});
}
// fire animation after an initial delay
$(this).delay(k * duration / sliceCount).animate(target, options);
});
return false; // prevent event propagation
}
/**
* Update which navigation links are enabled.
*/
function _updatePaging() {
if (!settings.loop) {
$('.boxplus-prev', container).toggleClass(CLASS_DISABLED, sliderIndexPosition <= 0);
$('.boxplus-next', container).toggleClass(CLASS_DISABLED, sliderIndexPosition >= itemCount-1);
}
}
});
return this; // support chaining
}
})(__jQuery__); | zincheto/Joomla-Online-Store | plugins/content/sigplus/engines/boxplus/slider/js/boxplus.transition.js | JavaScript | gpl-2.0 | 11,823 |
/* ----------------------------------------------------------------------
LAMMPS - Large-scale Atomic/Molecular Massively Parallel Simulator
http://lammps.sandia.gov, Sandia National Laboratories
Steve Plimpton, sjplimp@sandia.gov
Copyright (2003) Sandia Corporation. Under the terms of Contract
DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
certain rights in this software. This software is distributed under
the GNU General Public License.
See the README file in the top-level LAMMPS directory.
------------------------------------------------------------------------- */
#include <string.h>
#include <stdlib.h>
#include <math.h>
#include "fix_temp_rescale.h"
#include "atom.h"
#include "force.h"
#include "group.h"
#include "update.h"
#include "domain.h"
#include "region.h"
#include "comm.h"
#include "input.h"
#include "variable.h"
#include "modify.h"
#include "compute.h"
#include "error.h"
using namespace LAMMPS_NS;
using namespace FixConst;
enum{NOBIAS,BIAS};
enum{CONSTANT,EQUAL};
/* ---------------------------------------------------------------------- */
FixTempRescale::FixTempRescale(LAMMPS *lmp, int narg, char **arg) :
Fix(lmp, narg, arg),
tstr(NULL), id_temp(NULL), tflag(0)
{
if (narg < 8) error->all(FLERR,"Illegal fix temp/rescale command");
nevery = force->inumeric(FLERR,arg[3]);
if (nevery <= 0) error->all(FLERR,"Illegal fix temp/rescale command");
scalar_flag = 1;
global_freq = nevery;
extscalar = 1;
tstr = NULL;
if (strstr(arg[4],"v_") == arg[4]) {
int n = strlen(&arg[4][2]) + 1;
tstr = new char[n];
strcpy(tstr,&arg[4][2]);
tstyle = EQUAL;
} else {
t_start = force->numeric(FLERR,arg[4]);
t_target = t_start;
tstyle = CONSTANT;
}
t_stop = force->numeric(FLERR,arg[5]);
t_window = force->numeric(FLERR,arg[6]);
fraction = force->numeric(FLERR,arg[7]);
// create a new compute temp
// id = fix-ID + temp, compute group = fix group
int n = strlen(id) + 6;
id_temp = new char[n];
strcpy(id_temp,id);
strcat(id_temp,"_temp");
char **newarg = new char*[6];
newarg[0] = id_temp;
newarg[1] = group->names[igroup];
newarg[2] = (char *) "temp";
modify->add_compute(3,newarg);
delete [] newarg;
tflag = 1;
energy = 0.0;
}
/* ---------------------------------------------------------------------- */
FixTempRescale::~FixTempRescale()
{
delete [] tstr;
// delete temperature if fix created it
if (tflag) modify->delete_compute(id_temp);
delete [] id_temp;
}
/* ---------------------------------------------------------------------- */
int FixTempRescale::setmask()
{
int mask = 0;
mask |= END_OF_STEP;
mask |= THERMO_ENERGY;
return mask;
}
/* ---------------------------------------------------------------------- */
void FixTempRescale::init()
{
// check variable
if (tstr) {
tvar = input->variable->find(tstr);
if (tvar < 0)
error->all(FLERR,"Variable name for fix temp/rescale does not exist");
if (input->variable->equalstyle(tvar)) tstyle = EQUAL;
else error->all(FLERR,"Variable for fix temp/rescale is invalid style");
}
int icompute = modify->find_compute(id_temp);
if (icompute < 0)
error->all(FLERR,"Temperature ID for fix temp/rescale does not exist");
temperature = modify->compute[icompute];
if (temperature->tempbias) which = BIAS;
else which = NOBIAS;
}
/* ---------------------------------------------------------------------- */
void FixTempRescale::end_of_step()
{
double t_current = temperature->compute_scalar();
// there is nothing to do, if there are no degrees of freedom
if (temperature->dof < 1) return;
// protect against division by zero
if (t_current == 0.0)
error->all(FLERR,"Computed temperature for fix temp/rescale cannot be 0.0");
double delta = update->ntimestep - update->beginstep;
if (delta != 0.0) delta /= update->endstep - update->beginstep;
// set current t_target
// if variable temp, evaluate variable, wrap with clear/add
if (tstyle == CONSTANT)
t_target = t_start + delta * (t_stop-t_start);
else {
modify->clearstep_compute();
t_target = input->variable->compute_equal(tvar);
if (t_target < 0.0)
error->one(FLERR,
"Fix temp/rescale variable returned negative temperature");
modify->addstep_compute(update->ntimestep + nevery);
}
// rescale velocity of appropriate atoms if outside window
// for BIAS:
// temperature is current, so do not need to re-compute
// OK to not test returned v = 0, since factor is multiplied by v
if (fabs(t_current-t_target) > t_window) {
t_target = t_current - fraction*(t_current-t_target);
double factor = sqrt(t_target/t_current);
double efactor = 0.5 * force->boltz * temperature->dof;
double **v = atom->v;
int *mask = atom->mask;
int nlocal = atom->nlocal;
energy += (t_current-t_target) * efactor;
if (which == NOBIAS) {
for (int i = 0; i < nlocal; i++) {
if (mask[i] & groupbit) {
v[i][0] *= factor;
v[i][1] *= factor;
v[i][2] *= factor;
}
}
} else {
for (int i = 0; i < nlocal; i++) {
if (mask[i] & groupbit) {
temperature->remove_bias(i,v[i]);
v[i][0] *= factor;
v[i][1] *= factor;
v[i][2] *= factor;
temperature->restore_bias(i,v[i]);
}
}
}
}
}
/* ---------------------------------------------------------------------- */
int FixTempRescale::modify_param(int narg, char **arg)
{
if (strcmp(arg[0],"temp") == 0) {
if (narg < 2) error->all(FLERR,"Illegal fix_modify command");
if (tflag) {
modify->delete_compute(id_temp);
tflag = 0;
}
delete [] id_temp;
int n = strlen(arg[1]) + 1;
id_temp = new char[n];
strcpy(id_temp,arg[1]);
int icompute = modify->find_compute(id_temp);
if (icompute < 0)
error->all(FLERR,"Could not find fix_modify temperature ID");
temperature = modify->compute[icompute];
if (temperature->tempflag == 0)
error->all(FLERR,
"Fix_modify temperature ID does not compute temperature");
if (temperature->igroup != igroup && comm->me == 0)
error->warning(FLERR,"Group for fix_modify temp != fix group");
return 2;
}
return 0;
}
/* ---------------------------------------------------------------------- */
void FixTempRescale::reset_target(double t_new)
{
t_target = t_start = t_stop = t_new;
}
/* ---------------------------------------------------------------------- */
double FixTempRescale::compute_scalar()
{
return energy;
}
/* ----------------------------------------------------------------------
extract thermostat properties
------------------------------------------------------------------------- */
void *FixTempRescale::extract(const char *str, int &dim)
{
if (strcmp(str,"t_target") == 0) {
dim = 0;
return &t_target;
}
return NULL;
}
| ramisetti/lammps | src/fix_temp_rescale.cpp | C++ | gpl-2.0 | 7,002 |
class CBA_Extended_EventHandlers;
class CfgVehicles {
// Static weapons
class LandVehicle;
class StaticWeapon: LandVehicle {
GVAR(canCarry) = 1;
GVAR(carryPosition)[] = {0,1.2,0};
GVAR(carryDirection) = 0;
GVAR(canDrag) = 1;
GVAR(dragPosition)[] = {0,1.2,0};
GVAR(dragDirection) = 0;
};
class StaticCannon: StaticWeapon {
GVAR(canCarry) = 0;
GVAR(canDrag) = 0;
};
class StaticMortar;
class Mortar_01_base_F: StaticMortar {
GVAR(canCarry) = 1;
GVAR(carryPosition)[] = {0,1.2,0};
GVAR(carryDirection) = 0;
GVAR(canDrag) = 1;
GVAR(dragPosition)[] = {0,1.2,0};
GVAR(dragDirection) = 0;
};
// ammo boxes
class ThingX;
class Items_base_F;
class ReammoBox_F: ThingX {
GVAR(canCarry) = 0;
GVAR(carryPosition)[] = {0,1,1};
GVAR(carryDirection) = 0;
GVAR(canDrag) = 0;
GVAR(dragPosition)[] = {0,1.2,0};
GVAR(dragDirection) = 0;
};
class Slingload_base_F: ReammoBox_F {
GVAR(canCarry) = 0;
GVAR(canDrag) = 0;
};
//remove actions from Taru Pods
class Pod_Heli_Transport_04_base_F: Slingload_base_F {
GVAR(canCarry) = 0;
GVAR(canDrag) = 0;
};
class EAST_Box_Base: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(canDrag) = 1;
};
class IND_Box_Base: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(canDrag) = 1;
};
/*class FIA_Box_Base_F: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(canDrag) = 1;
};*/
class NATO_Box_Base: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(canDrag) = 1;
};
class Box_Syndicate_Ammo_F: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(canDrag) = 1;
};
class Box_IED_Exp_F: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(canDrag) = 1;
};
class Box_Syndicate_Wps_F: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(canDrag) = 1;
};
class Box_Syndicate_WpsLaunch_F: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(canDrag) = 1;
};
class Box_NATO_Equip_F: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(carryDirection) = 270;
GVAR(canDrag) = 1;
};
class Box_NATO_Uniforms_F: ReammoBox_F {
GVAR(canCarry) = 1;
GVAR(carryDirection) = 270;
GVAR(canDrag) = 1;
};
// Remove Larger crate dragging support.
// Would be better to allow some sort of joint push/drag functionality
// Requiring 2 units to access the larger crates and attaching them together (a crappy method of doing it)
// in order to move the bigger ones. Currently simply remove support.
// I believe these crates are currently broken (hitbox doesn't work or something) in 1.22 (2014-07-04)
class Box_East_AmmoVeh_F: EAST_Box_Base {
GVAR(canCarry) = 0;
GVAR(canDrag) = 0;
};
class Box_NATO_AmmoVeh_F: NATO_Box_Base {
GVAR(canCarry) = 0;
GVAR(canDrag) = 0;
};
class Box_IND_AmmoVeh_F: IND_Box_Base {
GVAR(canCarry) = 0;
GVAR(canDrag) = 0;
};
//Plastic and metal case
class PlasticCase_01_base_F: Items_base_F {
class EventHandlers {
class CBA_Extended_EventHandlers: CBA_Extended_EventHandlers {};
};
GVAR(canCarry) = 1;
GVAR(carryPosition[]) = {0,1,1};
GVAR(carryDirection) = 270;
GVAR(canDrag) = 1;
GVAR(dragPosition[]) = {0,1.2,0};
GVAR(dragDirection) = 0;
};
class MetalCase_01_base_F: Items_base_F {
class EventHandlers {
class CBA_Extended_EventHandlers: CBA_Extended_EventHandlers {};
};
GVAR(canCarry) = 1;
GVAR(carryPosition[]) = {0,1,1};
GVAR(carryDirection) = 270;
GVAR(canDrag) = 1;
GVAR(dragPosition[]) = {0,1.2,0};
GVAR(dragDirection) = 0;
};
// Barrier
class RoadCone_F: ThingX {
class EventHandlers {
class CBA_Extended_EventHandlers: CBA_Extended_EventHandlers {};
};
GVAR(canCarry) = 1;
GVAR(carryPosition)[] = {0,1,1};
GVAR(carryDirection) = 0;
GVAR(canDrag) = 1;
GVAR(dragPosition)[] = {0,1.2,0};
GVAR(dragDirection) = 0;
};
class RoadBarrier_F: RoadCone_F {
GVAR(carryPosition)[] = {0,1,0.300671};
};
// Misc crates
class Constructions_base_F;
class Land_WoodenBox_F: Constructions_base_F {
class EventHandlers {
class CBA_Extended_EventHandlers: CBA_Extended_EventHandlers {};
};
GVAR(canCarry) = 1;
GVAR(carryPosition[]) = {0,1,1};
GVAR(carryDirection) = 270;
GVAR(canDrag) = 1;
GVAR(dragPosition[]) = {0,1.4,0};
GVAR(dragDirection) = 0;
};
class Land_WoodenCrate_01_F: ThingX {
class EventHandlers {
class CBA_Extended_EventHandlers: CBA_Extended_EventHandlers {};
};
GVAR(canCarry) = 1;
GVAR(carryPosition[]) = {0,1,1};
GVAR(carryDirection) = 270;
GVAR(canDrag) = 1;
GVAR(dragPosition[]) = {0,1.5,0};
GVAR(dragDirection) = 90;
};
class ACE_RepairItem_Base: ThingX {};
class ACE_Track: ACE_RepairItem_Base {
GVAR(canCarry) = 1;
GVAR(carryPosition)[] = {0,1,1};
GVAR(carryDirection) = 0;
};
class ACE_Wheel: ACE_RepairItem_Base {
GVAR(canCarry) = 1;
GVAR(carryPosition)[] = {0,1,1};
GVAR(carryDirection) = 0;
};
class Lamps_base_F;
class Land_PortableLight_single_F: Lamps_base_F {
GVAR(canCarry) = 1;
GVAR(carryPosition)[] = {0,1.2,0};
GVAR(carryDirection) = 180;
GVAR(canDrag) = 1;
GVAR(dragPosition)[] = {0,1.2,0};
GVAR(dragDirection) = 180;
};
};
| NorXAengell/ACE3 | addons/dragging/CfgVehicles.hpp | C++ | gpl-2.0 | 5,893 |
// (C) Copyright Jonathan Turkanis 2003.
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
// See http://www.boost.org/libs/iostreams for documentation.
// Contains the definitions of the class templates gzip_compressor and
// gzip_decompressor for reading and writing files in the gzip file format
// (RFC 1952). Based in part on work of Jonathan de Halleux; see [...]
#ifndef BOOST_IOSTREAMS_GZIP_HPP_INCLUDED
#define BOOST_IOSTREAMS_GZIP_HPP_INCLUDED
#if defined(_MSC_VER) && (_MSC_VER >= 1020)
# pragma once
#endif
#include <boost/config.hpp> // STATIC_CONSTANT, STDC_NAMESPACE,
// DINKUMWARE_STDLIB, __STL_CONFIG_H.
#include <algorithm> // min.
#include <cstdio> // EOF.
#include <cstddef> // size_t.
#include <ctime> // std::time_t.
#include <memory> // allocator.
#include <boost/config.hpp> // Put size_t in std.
#include <boost/detail/workaround.hpp>
#include <boost/cstdint.hpp> // uint8_t, uint32_t.
#include <boost/iostreams/constants.hpp> // buffer size.
#include <boost/iostreams/detail/adapter/non_blocking_adapter.hpp>
#include <boost/iostreams/detail/adapter/range_adapter.hpp>
#include <boost/iostreams/detail/char_traits.hpp>
#include <boost/iostreams/detail/error.hpp>
#include <boost/iostreams/detail/ios.hpp> // failure.
#include <boost/iostreams/operations.hpp>
#include <boost/iostreams/device/back_inserter.hpp>
#include <boost/iostreams/filter/zlib.hpp>
#include <boost/iostreams/pipeline.hpp>
// Must come last.
#if defined(BOOST_MSVC)
# pragma warning(push)
# pragma warning(disable: 4309) // Truncation of constant value.
#endif
#ifdef BOOST_NO_STDC_NAMESPACE
namespace std { using ::time_t; }
#endif
namespace boost { namespace iostreams {
namespace gzip {
using namespace boost::iostreams::zlib;
// Error codes used by gzip_error.
const int zlib_error = 1;
const int bad_crc = 2; // Recorded crc doesn't match data.
const int bad_length = 3; // Recorded length doesn't match data.
const int bad_header = 4; // Malformed header.
const int bad_footer = 5; // Malformed footer.
namespace magic {
// Magic numbers used by gzip header.
const int id1 = 0x1f;
const int id2 = 0x8b;
} // End namespace magic.
namespace method {
// Codes used for the 'CM' byte of the gzip header.
const int deflate = 8;
} // End namespace method.
namespace flags {
// Codes used for the 'FLG' byte of the gzip header.
const int text = 1;
const int header_crc = 2;
const int extra = 4;
const int name = 8;
const int comment = 16;
} // End namespace flags.
namespace extra_flags {
// Codes used for the 'XFL' byte of the gzip header.
const int best_compression = 2;
const int best_speed = 4;
} // End namespace extra_flags.
// Codes used for the 'OS' byte of the gzip header.
const int os_fat = 0;
const int os_amiga = 1;
const int os_vms = 2;
const int os_unix = 3;
const int os_vm_cms = 4;
const int os_atari = 5;
const int os_hpfs = 6;
const int os_macintosh = 7;
const int os_z_system = 8;
const int os_cp_m = 9;
const int os_tops_20 = 10;
const int os_ntfs = 11;
const int os_qdos = 12;
const int os_acorn = 13;
const int os_unknown = 255;
} // End namespace gzip.
//
// Class name: gzip_params.
// Description: Subclass of zlib_params with an additional field
// representing a file name.
//
struct gzip_params : zlib_params {
// Non-explicit constructor.
gzip_params( int level = gzip::default_compression,
int method = gzip::deflated,
int window_bits = gzip::default_window_bits,
int mem_level = gzip::default_mem_level,
int strategy = gzip::default_strategy,
std::string file_name = "",
std::string comment = "",
std::time_t mtime = 0 )
: zlib_params(level, method, window_bits, mem_level, strategy),
file_name(file_name), mtime(mtime)
{ }
std::string file_name;
std::string comment;
std::time_t mtime;
};
//
// Class name: gzip_error.
// Description: Subclass of std::ios_base::failure thrown to indicate
// zlib errors other than out-of-memory conditions.
//
class gzip_error : public BOOST_IOSTREAMS_FAILURE {
public:
explicit gzip_error(int error)
: BOOST_IOSTREAMS_FAILURE("gzip error"),
error_(error), zlib_error_code_(zlib::okay) { }
explicit gzip_error(const zlib_error& e)
: BOOST_IOSTREAMS_FAILURE("gzip error"),
error_(gzip::zlib_error), zlib_error_code_(e.error())
{ }
int error() const { return error_; }
int zlib_error_code() const { return zlib_error_code_; }
private:
int error_;
int zlib_error_code_;
};
//
// Template name: gzip_compressor
// Description: Model of OutputFilter implementing compression in the
// gzip format.
//
template<typename Alloc = std::allocator<char> >
class basic_gzip_compressor : basic_zlib_compressor<Alloc> {
private:
typedef basic_zlib_compressor<Alloc> base_type;
public:
typedef char char_type;
struct category
: dual_use,
filter_tag,
multichar_tag,
closable_tag
{ };
basic_gzip_compressor( const gzip_params& = gzip::default_compression,
int buffer_size = default_device_buffer_size );
template<typename Source>
std::streamsize read(Source& src, char_type* s, std::streamsize n)
{
using namespace std;
streamsize result = 0;
// Read header.
if (!(flags_ & f_header_done))
result += read_string(s, n, header_);
// Read body.
if (!(flags_ & f_body_done)) {
// Read from basic_zlib_filter.
streamsize amt = base_type::read(src, s + result, n - result);
if (amt != -1) {
result += amt;
if (amt < n - result) { // Double-check for EOF.
amt = base_type::read(src, s + result, n - result);
if (amt != -1)
result += amt;
}
}
if (amt == -1)
prepare_footer();
}
// Read footer.
if ((flags_ & f_body_done) != 0 && result < n)
result += read_string(s + result, n - result, footer_);
return result != 0 ? result : -1;
}
template<typename Sink>
std::streamsize write(Sink& snk, const char_type* s, std::streamsize n)
{
if (!(flags_ & f_header_done)) {
std::streamsize amt =
static_cast<std::streamsize>(header_.size() - offset_);
offset_ += boost::iostreams::write(snk, header_.data() + offset_, amt);
if (offset_ == header_.size())
flags_ |= f_header_done;
else
return 0;
}
return base_type::write(snk, s, n);
}
template<typename Sink>
void close(Sink& snk, BOOST_IOS::openmode m)
{
namespace io = boost::iostreams;
if (m & BOOST_IOS::out) {
// Close zlib compressor.
base_type::close(snk, BOOST_IOS::out);
if (flags_ & f_header_done) {
// Write final fields of gzip file format.
write_long(this->crc(), snk);
write_long(this->total_in(), snk);
}
}
#if BOOST_WORKAROUND(__GNUC__, == 2) && defined(__STL_CONFIG_H) || \
BOOST_WORKAROUND(BOOST_DINKUMWARE_STDLIB, == 1) \
/**/
footer_.erase(0, std::string::npos);
#else
footer_.clear();
#endif
offset_ = 0;
flags_ = 0;
}
private:
static gzip_params normalize_params(gzip_params p);
void prepare_footer();
std::streamsize read_string(char* s, std::streamsize n, std::string& str);
template<typename Sink>
static void write_long(long n, Sink& next)
{
boost::iostreams::put(next, static_cast<char>(0xFF & n));
boost::iostreams::put(next, static_cast<char>(0xFF & (n >> 8)));
boost::iostreams::put(next, static_cast<char>(0xFF & (n >> 16)));
boost::iostreams::put(next, static_cast<char>(0xFF & (n >> 24)));
}
enum flag_type {
f_header_done = 1,
f_body_done = f_header_done << 1,
f_footer_done = f_body_done << 1
};
std::string header_;
std::string footer_;
std::size_t offset_;
int flags_;
};
BOOST_IOSTREAMS_PIPABLE(basic_gzip_compressor, 1)
typedef basic_gzip_compressor<> gzip_compressor;
//
// Template name: basic_gzip_decompressor
// Description: Model of InputFilter implementing compression in the
// gzip format.
//
template<typename Alloc = std::allocator<char> >
class basic_gzip_decompressor : basic_zlib_decompressor<Alloc> {
public:
typedef char char_type;
struct category
: //multichar_input_filter_tag ,
multichar_tag,
filter_tag,
input_seekable,
closable_tag
//seekable_filter_tag
{ };
basic_gzip_decompressor( int window_bits = gzip::default_window_bits,
int buffer_size = default_device_buffer_size );
template <typename Source>
std::streampos seek(Source &src, stream_offset off,
BOOST_IOS::seekdir way)
{
if (way != BOOST_IOS::beg)
{
throw detail::cant_seek();
}
non_blocking_adapter<Source> nb(src);
std::streampos rval;
boost::iostreams::seek(nb, 0, std::ios_base::beg);
// reset the decoder
//impl_type::reset(false, true);
base_type::close(src, BOOST_IOS::in);
flags_ = 0;
// now seek
std::streamsize nuint32s = off/sizeof(uint32_t);
std::streamsize nuint8s = off%sizeof(uint32_t);
uint32_t four_bytes;
uint8_t one_byte;
while (nuint32s > 0)
{
read(src, (char_type*)(&four_bytes), sizeof(uint32_t));
--nuint32s;
rval += sizeof(uint32_t);
}
while (nuint8s > 0)
{
read(src, (char_type*)(&one_byte), sizeof(uint8_t));
--nuint8s;
rval += sizeof(uint8_t);
}
return (rval);
}
template<typename Source>
std::streamsize read(Source& src, char_type* s, std::streamsize n)
{
if ((flags_ & f_header_read) == 0) {
non_blocking_adapter<Source> nb(src);
read_header(nb);
flags_ |= f_header_read;
}
if ((flags_ & f_footer_read) != 0)
return -1;
try {
std::streamsize result = 0;
std::streamsize amt;
if ((amt = base_type::read(src, s, n)) != -1) {
result += amt;
if (amt < n) { // Double check for EOF.
amt = base_type::read(src, s + result, n - result);
if (amt != -1)
result += amt;
}
}
if (amt == -1) {
non_blocking_adapter<Source> nb(src);
read_footer(nb);
flags_ |= f_footer_read;
}
return result;
} catch (const zlib_error& e) {
throw gzip_error(e);
}
}
template<typename Source>
void close(Source& src)
{
try {
base_type::close(src, BOOST_IOS::in);
flags_ = 0;
} catch (const zlib_error& e) {
throw gzip_error(e);
}
}
std::string file_name() const { return file_name_; }
std::string comment() const { return comment_; }
bool text() const { return (flags_ & gzip::flags::text) != 0; }
int os() const { return os_; }
std::time_t mtime() const { return mtime_; }
private:
typedef basic_zlib_decompressor<Alloc> base_type;
typedef BOOST_IOSTREAMS_CHAR_TRAITS(char) traits_type;
static bool is_eof(int c) { return traits_type::eq_int_type(c, EOF); }
static gzip_params make_params(int window_bits);
template<typename Source>
static uint8_t read_uint8(Source& src, int error)
{
int c;
if ((c = boost::iostreams::get(src)) == EOF || c == WOULD_BLOCK)
throw gzip_error(error);
return static_cast<uint8_t>(traits_type::to_char_type(c));
}
template<typename Source>
static uint32_t read_uint32(Source& src, int error)
{
uint8_t b1 = read_uint8(src, error);
uint8_t b2 = read_uint8(src, error);
uint8_t b3 = read_uint8(src, error);
uint8_t b4 = read_uint8(src, error);
return b1 + (b2 << 8) + (b3 << 16) + (b4 << 24);
}
template<typename Source>
std::string read_string(Source& src)
{
std::string result;
while (true) {
int c;
if (is_eof(c = boost::iostreams::get(src)))
throw gzip_error(gzip::bad_header);
else if (c == 0)
return result;
else
result += static_cast<char>(c);
}
}
template<typename Source>
void read_header(Source& src) // Source is non-blocking.
{
// Reset saved values.
#if BOOST_WORKAROUND(__GNUC__, == 2) && defined(__STL_CONFIG_H) || \
BOOST_WORKAROUND(BOOST_DINKUMWARE_STDLIB, == 1) \
/**/
file_name_.erase(0, std::string::npos);
comment_.erase(0, std::string::npos);
#else
file_name_.clear();
comment_.clear();
#endif
os_ = gzip::os_unknown;
mtime_ = 0;
int flags;
// Read header, without checking header crc.
if ( boost::iostreams::get(src) != gzip::magic::id1 || // ID1.
boost::iostreams::get(src) != gzip::magic::id2 || // ID2.
is_eof(boost::iostreams::get(src)) || // CM.
is_eof(flags = boost::iostreams::get(src)) ) // FLG.
{
throw gzip_error(gzip::bad_header);
}
mtime_ = read_uint32(src, gzip::bad_header); // MTIME.
read_uint8(src, gzip::bad_header); // XFL.
os_ = read_uint8(src, gzip::bad_header); // OS.
if (flags & boost::iostreams::gzip::flags::text)
flags_ |= f_text;
// Skip extra field. (From J. Halleaux; see note at top.)
if (flags & gzip::flags::extra) {
int length =
static_cast<int>(
read_uint8(src, gzip::bad_header) +
(read_uint8(src, gzip::bad_header) << 8)
);
// length is garbage if EOF but the loop below will quit anyway.
do { }
while (length-- != 0 && !is_eof(boost::iostreams::get(src)));
}
if (flags & gzip::flags::name) // Read file name.
file_name_ = read_string(src);
if (flags & gzip::flags::comment) // Read comment.
comment_ = read_string(src);
if (flags & gzip::flags::header_crc) { // Skip header crc.
read_uint8(src, gzip::bad_header);
read_uint8(src, gzip::bad_header);
}
}
template<typename Source>
void read_footer(Source& src)
{
typename base_type::string_type footer =
this->unconsumed_input();
int c;
while (!is_eof(c = boost::iostreams::get(src)))
footer += c;
detail::range_adapter<input, std::string>
rng(footer.begin(), footer.end());
if (read_uint32(rng, gzip::bad_footer) != this->crc())
throw gzip_error(gzip::bad_crc);
if (static_cast<int>(read_uint32(rng, gzip::bad_footer)) != this->total_out())
throw gzip_error(gzip::bad_length);
}
enum flag_type {
f_header_read = 1,
f_footer_read = f_header_read << 1,
f_text = f_footer_read << 1
};
std::string file_name_;
std::string comment_;
int os_;
std::time_t mtime_;
int flags_;
};
BOOST_IOSTREAMS_PIPABLE(basic_gzip_decompressor, 1)
typedef basic_gzip_decompressor<> gzip_decompressor;
//------------------Implementation of gzip_compressor-------------------------//
template<typename Alloc>
basic_gzip_compressor<Alloc>::basic_gzip_compressor
(const gzip_params& p, int buffer_size)
: base_type(normalize_params(p), buffer_size),
offset_(0), flags_(0)
{
// Calculate gzip header.
bool has_name = !p.file_name.empty();
bool has_comment = !p.comment.empty();
std::string::size_type length =
10 +
(has_name ? p.file_name.size() + 1 : 0) +
(has_comment ? p.comment.size() + 1 : 0);
// + 2; // Header crc confuses gunzip.
int flags =
//gzip::flags::header_crc +
(has_name ? gzip::flags::name : 0) +
(has_comment ? gzip::flags::comment : 0);
int extra_flags =
( p.level == zlib::best_compression ?
gzip::extra_flags::best_compression :
0 ) +
( p.level == zlib::best_speed ?
gzip::extra_flags::best_speed :
0 );
header_.reserve(length);
header_ += gzip::magic::id1; // ID1.
header_ += gzip::magic::id2; // ID2.
header_ += gzip::method::deflate; // CM.
header_ += static_cast<char>(flags); // FLG.
header_ += static_cast<char>(0xFF & p.mtime); // MTIME.
header_ += static_cast<char>(0xFF & (p.mtime >> 8));
header_ += static_cast<char>(0xFF & (p.mtime >> 16));
header_ += static_cast<char>(0xFF & (p.mtime >> 24));
header_ += static_cast<char>(extra_flags); // XFL.
header_ += static_cast<char>(gzip::os_unknown); // OS.
if (has_name) {
header_ += p.file_name;
header_ += '\0';
}
if (has_comment) {
header_ += p.comment;
header_ += '\0';
}
}
template<typename Alloc>
gzip_params basic_gzip_compressor<Alloc>::normalize_params(gzip_params p)
{
p.noheader = true;
p.calculate_crc = true;
return p;
}
template<typename Alloc>
void basic_gzip_compressor<Alloc>::prepare_footer()
{
boost::iostreams::back_insert_device<std::string> out(footer_);
write_long(this->crc(), out);
write_long(this->total_in(), out);
flags_ |= f_body_done;
offset_ = 0;
}
template<typename Alloc>
std::streamsize basic_gzip_compressor<Alloc>::read_string
(char* s, std::streamsize n, std::string& str)
{
using namespace std;
streamsize avail =
static_cast<streamsize>(str.size() - offset_);
streamsize amt = (std::min)(avail, n);
std::copy( str.data() + offset_,
str.data() + offset_ + amt,
s );
offset_ += amt;
if ( !(flags_ & f_header_done) &&
offset_ == static_cast<std::size_t>(str.size()) )
{
flags_ |= f_header_done;
}
return amt;
}
//------------------Implementation of gzip_decompressor-----------------------//
template<typename Alloc>
basic_gzip_decompressor<Alloc>::basic_gzip_decompressor
(int window_bits, int buffer_size)
: base_type(make_params(window_bits), buffer_size),
os_(gzip::os_unknown), mtime_(0), flags_(0)
{ }
template<typename Alloc>
gzip_params basic_gzip_decompressor<Alloc>::make_params(int window_bits)
{
gzip_params p;
p.window_bits = window_bits;
p.noheader = true;
p.calculate_crc = true;
return p;
}
//----------------------------------------------------------------------------//
} } // End namespaces iostreams, boost.
#if defined(BOOST_MSVC)
# pragma warning(pop)
#endif
#endif // #ifndef BOOST_IOSTREAMS_GZIP_HPP_INCLUDED
| rojolocco/MRST2017a | utils/3rdparty/matlab_bgl/matlab_bgl/libmbgl/yasmic/boost_mod/gzip.hpp | C++ | gpl-3.0 | 21,026 |
#include "Copter.h"
// adjust_climb_rate - hold copter at the desired distance above the
// ground; returns climb rate (in cm/s) which should be passed to
// the position controller
float Copter::SurfaceTracking::adjust_climb_rate(float target_rate)
{
#if RANGEFINDER_ENABLED == ENABLED
// check tracking state and that range finders are healthy
if ((surface == Surface::NONE) ||
((surface == Surface::GROUND) && (!copter.rangefinder_alt_ok() || (copter.rangefinder_state.glitch_count != 0))) ||
((surface == Surface::CEILING) && !copter.rangefinder_up_ok()) || (copter.rangefinder_up_state.glitch_count != 0)) {
return target_rate;
}
// calculate current ekf based altitude error
const float current_alt_error = copter.pos_control->get_alt_target() - copter.inertial_nav.get_altitude();
// init based on tracking direction/state
RangeFinderState &rf_state = (surface == Surface::GROUND) ? copter.rangefinder_state : copter.rangefinder_up_state;
const float dir = (surface == Surface::GROUND) ? 1.0f : -1.0f;
// reset target altitude if this controller has just been engaged
// target has been changed between upwards vs downwards
// or glitch has cleared
const uint32_t now = millis();
if ((now - last_update_ms > SURFACE_TRACKING_TIMEOUT_MS) ||
reset_target ||
(last_glitch_cleared_ms != rf_state.glitch_cleared_ms)) {
target_dist_cm = rf_state.alt_cm + (dir * current_alt_error);
reset_target = false;
last_glitch_cleared_ms = rf_state.glitch_cleared_ms;\
}
last_update_ms = now;
// adjust rangefinder target alt if motors have not hit their limits
if ((target_rate<0 && !copter.motors->limit.throttle_lower) || (target_rate>0 && !copter.motors->limit.throttle_upper)) {
target_dist_cm += dir * target_rate * copter.G_Dt;
}
valid_for_logging = true;
#if AC_AVOID_ENABLED == ENABLED
// upward facing terrain following never gets closer than avoidance margin
if (surface == Surface::CEILING) {
const float margin_cm = copter.avoid.enabled() ? copter.avoid.get_margin() * 100.0f : 0.0f;
target_dist_cm = MAX(target_dist_cm, margin_cm);
}
#endif
// calc desired velocity correction from target rangefinder alt vs actual rangefinder alt (remove the error already passed to Altitude controller to avoid oscillations)
const float distance_error = (target_dist_cm - rf_state.alt_cm) - (dir * current_alt_error);
float velocity_correction = dir * distance_error * copter.g.rangefinder_gain;
velocity_correction = constrain_float(velocity_correction, -SURFACE_TRACKING_VELZ_MAX, SURFACE_TRACKING_VELZ_MAX);
// return combined pilot climb rate + rate to correct rangefinder alt error
return (target_rate + velocity_correction);
#else
return target_rate;
#endif
}
// get target altitude (in cm) above ground
// returns true if there is a valid target
bool Copter::SurfaceTracking::get_target_alt_cm(float &_target_alt_cm) const
{
// fail if we are not tracking downwards
if (surface != Surface::GROUND) {
return false;
}
// check target has been updated recently
if (AP_HAL::millis() - last_update_ms > SURFACE_TRACKING_TIMEOUT_MS) {
return false;
}
_target_alt_cm = target_dist_cm;
return true;
}
// set target altitude (in cm) above ground
void Copter::SurfaceTracking::set_target_alt_cm(float _target_alt_cm)
{
// fail if we are not tracking downwards
if (surface != Surface::GROUND) {
return;
}
target_dist_cm = _target_alt_cm;
last_update_ms = AP_HAL::millis();
}
bool Copter::SurfaceTracking::get_target_dist_for_logging(float &target_dist) const
{
if (!valid_for_logging || (surface == Surface::NONE)) {
return false;
}
target_dist = target_dist_cm * 0.01f;
return true;
}
float Copter::SurfaceTracking::get_dist_for_logging() const
{
return ((surface == Surface::CEILING) ? copter.rangefinder_up_state.alt_cm : copter.rangefinder_state.alt_cm) * 0.01f;
}
// set direction
void Copter::SurfaceTracking::set_surface(Surface new_surface)
{
if (surface == new_surface) {
return;
}
// check we have a range finder in the correct direction
if ((new_surface == Surface::GROUND) && !copter.rangefinder.has_orientation(ROTATION_PITCH_270)) {
copter.gcs().send_text(MAV_SEVERITY_WARNING, "SurfaceTracking: no downward rangefinder");
AP_Notify::events.user_mode_change_failed = 1;
return;
}
if ((new_surface == Surface::CEILING) && !copter.rangefinder.has_orientation(ROTATION_PITCH_90)) {
copter.gcs().send_text(MAV_SEVERITY_WARNING, "SurfaceTracking: no upward rangefinder");
AP_Notify::events.user_mode_change_failed = 1;
return;
}
surface = new_surface;
reset_target = true;
}
| squilter/ardupilot | ArduCopter/surface_tracking.cpp | C++ | gpl-3.0 | 4,893 |
M.tool_assignmentupgrade = {
init_upgrade_table: function(Y) {
Y.use('node', function(Y) {
checkboxes = Y.all('td.c0 input');
checkboxes.each(function(node) {
node.on('change', function(e) {
rowelement = e.currentTarget.get('parentNode').get('parentNode');
if (e.currentTarget.get('checked')) {
rowelement.setAttribute('class', 'selectedrow');
} else {
rowelement.setAttribute('class', 'unselectedrow');
}
});
rowelement = node.get('parentNode').get('parentNode');
if (node.get('checked')) {
rowelement.setAttribute('class', 'selectedrow');
} else {
rowelement.setAttribute('class', 'unselectedrow');
}
});
});
var selectall = Y.one('th.c0 input');
selectall.on('change', function(e) {
if (e.currentTarget.get('checked')) {
checkboxes = Y.all('td.c0 input');
checkboxes.each(function(node) {
rowelement = node.get('parentNode').get('parentNode');
node.set('checked', true);
rowelement.setAttribute('class', 'selectedrow');
});
} else {
checkboxes = Y.all('td.c0 input');
checkboxes.each(function(node) {
rowelement = node.get('parentNode').get('parentNode');
node.set('checked', false);
rowelement.setAttribute('class', 'unselectedrow');
});
}
});
var batchform = Y.one('.tool_assignmentupgrade_batchform form');
batchform.on('submit', function(e) {
checkboxes = Y.all('td.c0 input');
var selectedassignments = [];
checkboxes.each(function(node) {
if (node.get('checked')) {
selectedassignments[selectedassignments.length] = node.get('value');
}
});
operation = Y.one('#id_operation');
assignmentsinput = Y.one('input.selectedassignments');
assignmentsinput.set('value', selectedassignments.join(','));
if (selectedassignments.length == 0) {
alert(M.str.assign.noassignmentsselected);
e.preventDefault();
}
});
var perpage = Y.one('#id_perpage');
perpage.on('change', function(e) {
window.onbeforeunload = null;
Y.one('.tool_assignmentupgrade_paginationform form').submit();
});
}
}
| bhaumik25php/ready2study | admin/tool/assignmentupgrade/module.js | JavaScript | gpl-3.0 | 2,755 |
/*
Copyright (C) 2014-2015 de4dot@gmail.com
This file is part of dnSpy
dnSpy is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
dnSpy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with dnSpy. If not, see <http://www.gnu.org/licenses/>.
*/
using System;
using dnlib.DotNet;
using dnlib.PE;
namespace dnSpy.AsmEditor.Module {
static class ModuleUtils {
public static ModuleDef CreateNetModule(string name, Guid mvid, ClrVersion clrVersion) {
return CreateModule(name, mvid, clrVersion, ModuleKind.NetModule);
}
public static ModuleDef CreateModule(string name, Guid mvid, ClrVersion clrVersion, ModuleKind kind, ModuleDef existingModule = null) {
var module = CreateModuleDef(name, mvid, clrVersion, existingModule);
module.Kind = kind;
module.Characteristics = Characteristics._32BitMachine | Characteristics.ExecutableImage;
if (kind == ModuleKind.Dll || kind == ModuleKind.NetModule)
module.Characteristics |= Characteristics.Dll;
module.DllCharacteristics = DllCharacteristics.TerminalServerAware | DllCharacteristics.NoSeh | DllCharacteristics.NxCompat | DllCharacteristics.DynamicBase;
return module;
}
static ModuleDef CreateModuleDef(string name, Guid mvid, ClrVersion clrVersion, ModuleDef existingModule) {
var clrValues = ClrVersionValues.GetValues(clrVersion);
ModuleDef module;
if (existingModule == null)
module = new ModuleDefUser(name, mvid, clrValues.CorLibRef);
else {
module = existingModule;
module.Name = name;
module.Mvid = mvid;
OverwriteAssembly(module.CorLibTypes.AssemblyRef, clrValues.CorLibRef);
}
module.UpdateRowId(module);
module.RuntimeVersion = clrValues.RuntimeVersion;
module.Cor20HeaderRuntimeVersion = clrValues.Cor20HeaderRuntimeVersion;
module.TablesHeaderVersion = clrValues.TablesHeaderVersion;
module.Location = string.Empty;
return module;
}
static void OverwriteAssembly(AssemblyRef dst, AssemblyRef src) {
dst.Name = src.Name;
dst.Version = src.Version;
dst.PublicKeyOrToken = src.PublicKeyOrToken;
dst.Culture = src.Culture;
dst.Attributes = src.Attributes;
dst.Hash = src.Hash;
}
public static AssemblyDef AddToNewAssemblyDef(ModuleDef module, ModuleKind moduleKind, out Characteristics characteristics) {
var asmDef = module.UpdateRowId(new AssemblyDefUser(GetAssemblyName(module)));
asmDef.Modules.Add(module);
WriteNewModuleKind(module, moduleKind, out characteristics);
return asmDef;
}
static string GetAssemblyName(ModuleDef module) {
string name = module.Name;
if (name.EndsWith(".exe", StringComparison.OrdinalIgnoreCase) || name.EndsWith(".dll", StringComparison.OrdinalIgnoreCase))
name = name.Substring(0, name.Length - 4);
else if (name.EndsWith(".netmodule", StringComparison.OrdinalIgnoreCase))
name = name.Substring(0, name.Length - 10);
if (!string.IsNullOrWhiteSpace(name))
return name;
return module.Name;
}
public static void WriteNewModuleKind(ModuleDef module, ModuleKind moduleKind, out Characteristics characteristics) {
module.Kind = moduleKind;
characteristics = module.Characteristics;
module.Characteristics = SaveModule.CharacteristicsHelper.GetCharacteristics(module.Characteristics, moduleKind);
}
}
}
| zuloloxi/dnSpy | dnSpy/AsmEditor/Module/ModuleUtils.cs | C# | gpl-3.0 | 3,738 |
<?php
/**
* interface/therapy_groups/therapy_groups_models/therapy_groups_encounters_model.php contains the model for therapy group encounters.
*
* This model fetches the encounters for the therapy group from the DB.
*
* Copyright (C) 2016 Shachar Zilbershlag <shaharzi@matrix.co.il>
* Copyright (C) 2016 Amiel Elboim <amielel@matrix.co.il>
*
* LICENSE: This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 3
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://opensource.org/licenses/gpl-license.php>;.
*
* @package OpenEMR
* @author Shachar Zilbershlag <shaharzi@matrix.co.il>
* @author Amiel Elboim <amielel@matrix.co.il>
* @link http://www.open-emr.org
*/
class Therapy_Groups_Encounters
{
const TABLE = 'form_groups_encounter';
/**
* Get all encounters of specified group.
* @param $gid
* @return ADORecordSet_mysqli
*/
public function getGroupEncounters($gid)
{
$sql = "SELECT * FROM " . self::TABLE . " WHERE group_id = ? AND date >= CURDATE();";
$result = sqlStatement($sql, array($gid));
while ($row = sqlFetchArray($result)) {
$encounters[] = $row;
}
return $encounters;
}
}
| Jeffrey-P-McAteer/openemr | interface/therapy_groups/therapy_groups_models/therapy_groups_encounters_model.php | PHP | gpl-3.0 | 1,708 |
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: k8s.io/kubernetes/vendor/k8s.io/api/admissionregistration/v1beta1/generated.proto
/*
Package v1beta1 is a generated protocol buffer package.
It is generated from these files:
k8s.io/kubernetes/vendor/k8s.io/api/admissionregistration/v1beta1/generated.proto
It has these top-level messages:
MutatingWebhook
MutatingWebhookConfiguration
MutatingWebhookConfigurationList
Rule
RuleWithOperations
ServiceReference
ValidatingWebhook
ValidatingWebhookConfiguration
ValidatingWebhookConfigurationList
WebhookClientConfig
*/
package v1beta1
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import k8s_io_apimachinery_pkg_apis_meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
import strings "strings"
import reflect "reflect"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
func (m *MutatingWebhook) Reset() { *m = MutatingWebhook{} }
func (*MutatingWebhook) ProtoMessage() {}
func (*MutatingWebhook) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{0} }
func (m *MutatingWebhookConfiguration) Reset() { *m = MutatingWebhookConfiguration{} }
func (*MutatingWebhookConfiguration) ProtoMessage() {}
func (*MutatingWebhookConfiguration) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{1}
}
func (m *MutatingWebhookConfigurationList) Reset() { *m = MutatingWebhookConfigurationList{} }
func (*MutatingWebhookConfigurationList) ProtoMessage() {}
func (*MutatingWebhookConfigurationList) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{2}
}
func (m *Rule) Reset() { *m = Rule{} }
func (*Rule) ProtoMessage() {}
func (*Rule) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{3} }
func (m *RuleWithOperations) Reset() { *m = RuleWithOperations{} }
func (*RuleWithOperations) ProtoMessage() {}
func (*RuleWithOperations) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{4} }
func (m *ServiceReference) Reset() { *m = ServiceReference{} }
func (*ServiceReference) ProtoMessage() {}
func (*ServiceReference) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{5} }
func (m *ValidatingWebhook) Reset() { *m = ValidatingWebhook{} }
func (*ValidatingWebhook) ProtoMessage() {}
func (*ValidatingWebhook) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{6} }
func (m *ValidatingWebhookConfiguration) Reset() { *m = ValidatingWebhookConfiguration{} }
func (*ValidatingWebhookConfiguration) ProtoMessage() {}
func (*ValidatingWebhookConfiguration) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{7}
}
func (m *ValidatingWebhookConfigurationList) Reset() { *m = ValidatingWebhookConfigurationList{} }
func (*ValidatingWebhookConfigurationList) ProtoMessage() {}
func (*ValidatingWebhookConfigurationList) Descriptor() ([]byte, []int) {
return fileDescriptorGenerated, []int{8}
}
func (m *WebhookClientConfig) Reset() { *m = WebhookClientConfig{} }
func (*WebhookClientConfig) ProtoMessage() {}
func (*WebhookClientConfig) Descriptor() ([]byte, []int) { return fileDescriptorGenerated, []int{9} }
func init() {
proto.RegisterType((*MutatingWebhook)(nil), "k8s.io.api.admissionregistration.v1beta1.MutatingWebhook")
proto.RegisterType((*MutatingWebhookConfiguration)(nil), "k8s.io.api.admissionregistration.v1beta1.MutatingWebhookConfiguration")
proto.RegisterType((*MutatingWebhookConfigurationList)(nil), "k8s.io.api.admissionregistration.v1beta1.MutatingWebhookConfigurationList")
proto.RegisterType((*Rule)(nil), "k8s.io.api.admissionregistration.v1beta1.Rule")
proto.RegisterType((*RuleWithOperations)(nil), "k8s.io.api.admissionregistration.v1beta1.RuleWithOperations")
proto.RegisterType((*ServiceReference)(nil), "k8s.io.api.admissionregistration.v1beta1.ServiceReference")
proto.RegisterType((*ValidatingWebhook)(nil), "k8s.io.api.admissionregistration.v1beta1.ValidatingWebhook")
proto.RegisterType((*ValidatingWebhookConfiguration)(nil), "k8s.io.api.admissionregistration.v1beta1.ValidatingWebhookConfiguration")
proto.RegisterType((*ValidatingWebhookConfigurationList)(nil), "k8s.io.api.admissionregistration.v1beta1.ValidatingWebhookConfigurationList")
proto.RegisterType((*WebhookClientConfig)(nil), "k8s.io.api.admissionregistration.v1beta1.WebhookClientConfig")
}
func (m *MutatingWebhook) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *MutatingWebhook) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Name)))
i += copy(dAtA[i:], m.Name)
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ClientConfig.Size()))
n1, err := m.ClientConfig.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n1
if len(m.Rules) > 0 {
for _, msg := range m.Rules {
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
if m.FailurePolicy != nil {
dAtA[i] = 0x22
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.FailurePolicy)))
i += copy(dAtA[i:], *m.FailurePolicy)
}
if m.NamespaceSelector != nil {
dAtA[i] = 0x2a
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.NamespaceSelector.Size()))
n2, err := m.NamespaceSelector.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n2
}
if m.SideEffects != nil {
dAtA[i] = 0x32
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.SideEffects)))
i += copy(dAtA[i:], *m.SideEffects)
}
if m.TimeoutSeconds != nil {
dAtA[i] = 0x38
i++
i = encodeVarintGenerated(dAtA, i, uint64(*m.TimeoutSeconds))
}
if len(m.AdmissionReviewVersions) > 0 {
for _, s := range m.AdmissionReviewVersions {
dAtA[i] = 0x42
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if m.MatchPolicy != nil {
dAtA[i] = 0x4a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.MatchPolicy)))
i += copy(dAtA[i:], *m.MatchPolicy)
}
if m.ReinvocationPolicy != nil {
dAtA[i] = 0x52
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.ReinvocationPolicy)))
i += copy(dAtA[i:], *m.ReinvocationPolicy)
}
if m.ObjectSelector != nil {
dAtA[i] = 0x5a
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ObjectSelector.Size()))
n3, err := m.ObjectSelector.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n3
}
return i, nil
}
func (m *MutatingWebhookConfiguration) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *MutatingWebhookConfiguration) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ObjectMeta.Size()))
n4, err := m.ObjectMeta.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n4
if len(m.Webhooks) > 0 {
for _, msg := range m.Webhooks {
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
return i, nil
}
func (m *MutatingWebhookConfigurationList) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *MutatingWebhookConfigurationList) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ListMeta.Size()))
n5, err := m.ListMeta.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n5
if len(m.Items) > 0 {
for _, msg := range m.Items {
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
return i, nil
}
func (m *Rule) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Rule) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.APIGroups) > 0 {
for _, s := range m.APIGroups {
dAtA[i] = 0xa
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if len(m.APIVersions) > 0 {
for _, s := range m.APIVersions {
dAtA[i] = 0x12
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if len(m.Resources) > 0 {
for _, s := range m.Resources {
dAtA[i] = 0x1a
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if m.Scope != nil {
dAtA[i] = 0x22
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.Scope)))
i += copy(dAtA[i:], *m.Scope)
}
return i, nil
}
func (m *RuleWithOperations) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *RuleWithOperations) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.Operations) > 0 {
for _, s := range m.Operations {
dAtA[i] = 0xa
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Rule.Size()))
n6, err := m.Rule.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n6
return i, nil
}
func (m *ServiceReference) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ServiceReference) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Namespace)))
i += copy(dAtA[i:], m.Namespace)
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Name)))
i += copy(dAtA[i:], m.Name)
if m.Path != nil {
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.Path)))
i += copy(dAtA[i:], *m.Path)
}
if m.Port != nil {
dAtA[i] = 0x20
i++
i = encodeVarintGenerated(dAtA, i, uint64(*m.Port))
}
return i, nil
}
func (m *ValidatingWebhook) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ValidatingWebhook) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.Name)))
i += copy(dAtA[i:], m.Name)
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ClientConfig.Size()))
n7, err := m.ClientConfig.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n7
if len(m.Rules) > 0 {
for _, msg := range m.Rules {
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
if m.FailurePolicy != nil {
dAtA[i] = 0x22
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.FailurePolicy)))
i += copy(dAtA[i:], *m.FailurePolicy)
}
if m.NamespaceSelector != nil {
dAtA[i] = 0x2a
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.NamespaceSelector.Size()))
n8, err := m.NamespaceSelector.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n8
}
if m.SideEffects != nil {
dAtA[i] = 0x32
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.SideEffects)))
i += copy(dAtA[i:], *m.SideEffects)
}
if m.TimeoutSeconds != nil {
dAtA[i] = 0x38
i++
i = encodeVarintGenerated(dAtA, i, uint64(*m.TimeoutSeconds))
}
if len(m.AdmissionReviewVersions) > 0 {
for _, s := range m.AdmissionReviewVersions {
dAtA[i] = 0x42
i++
l = len(s)
for l >= 1<<7 {
dAtA[i] = uint8(uint64(l)&0x7f | 0x80)
l >>= 7
i++
}
dAtA[i] = uint8(l)
i++
i += copy(dAtA[i:], s)
}
}
if m.MatchPolicy != nil {
dAtA[i] = 0x4a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.MatchPolicy)))
i += copy(dAtA[i:], *m.MatchPolicy)
}
if m.ObjectSelector != nil {
dAtA[i] = 0x52
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ObjectSelector.Size()))
n9, err := m.ObjectSelector.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n9
}
return i, nil
}
func (m *ValidatingWebhookConfiguration) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ValidatingWebhookConfiguration) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ObjectMeta.Size()))
n10, err := m.ObjectMeta.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n10
if len(m.Webhooks) > 0 {
for _, msg := range m.Webhooks {
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
return i, nil
}
func (m *ValidatingWebhookConfigurationList) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ValidatingWebhookConfigurationList) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.ListMeta.Size()))
n11, err := m.ListMeta.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n11
if len(m.Items) > 0 {
for _, msg := range m.Items {
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(msg.Size()))
n, err := msg.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n
}
}
return i, nil
}
func (m *WebhookClientConfig) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *WebhookClientConfig) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if m.Service != nil {
dAtA[i] = 0xa
i++
i = encodeVarintGenerated(dAtA, i, uint64(m.Service.Size()))
n12, err := m.Service.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n12
}
if m.CABundle != nil {
dAtA[i] = 0x12
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(m.CABundle)))
i += copy(dAtA[i:], m.CABundle)
}
if m.URL != nil {
dAtA[i] = 0x1a
i++
i = encodeVarintGenerated(dAtA, i, uint64(len(*m.URL)))
i += copy(dAtA[i:], *m.URL)
}
return i, nil
}
func encodeVarintGenerated(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *MutatingWebhook) Size() (n int) {
var l int
_ = l
l = len(m.Name)
n += 1 + l + sovGenerated(uint64(l))
l = m.ClientConfig.Size()
n += 1 + l + sovGenerated(uint64(l))
if len(m.Rules) > 0 {
for _, e := range m.Rules {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
if m.FailurePolicy != nil {
l = len(*m.FailurePolicy)
n += 1 + l + sovGenerated(uint64(l))
}
if m.NamespaceSelector != nil {
l = m.NamespaceSelector.Size()
n += 1 + l + sovGenerated(uint64(l))
}
if m.SideEffects != nil {
l = len(*m.SideEffects)
n += 1 + l + sovGenerated(uint64(l))
}
if m.TimeoutSeconds != nil {
n += 1 + sovGenerated(uint64(*m.TimeoutSeconds))
}
if len(m.AdmissionReviewVersions) > 0 {
for _, s := range m.AdmissionReviewVersions {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if m.MatchPolicy != nil {
l = len(*m.MatchPolicy)
n += 1 + l + sovGenerated(uint64(l))
}
if m.ReinvocationPolicy != nil {
l = len(*m.ReinvocationPolicy)
n += 1 + l + sovGenerated(uint64(l))
}
if m.ObjectSelector != nil {
l = m.ObjectSelector.Size()
n += 1 + l + sovGenerated(uint64(l))
}
return n
}
func (m *MutatingWebhookConfiguration) Size() (n int) {
var l int
_ = l
l = m.ObjectMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
if len(m.Webhooks) > 0 {
for _, e := range m.Webhooks {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
return n
}
func (m *MutatingWebhookConfigurationList) Size() (n int) {
var l int
_ = l
l = m.ListMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
if len(m.Items) > 0 {
for _, e := range m.Items {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
return n
}
func (m *Rule) Size() (n int) {
var l int
_ = l
if len(m.APIGroups) > 0 {
for _, s := range m.APIGroups {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if len(m.APIVersions) > 0 {
for _, s := range m.APIVersions {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if len(m.Resources) > 0 {
for _, s := range m.Resources {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if m.Scope != nil {
l = len(*m.Scope)
n += 1 + l + sovGenerated(uint64(l))
}
return n
}
func (m *RuleWithOperations) Size() (n int) {
var l int
_ = l
if len(m.Operations) > 0 {
for _, s := range m.Operations {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
l = m.Rule.Size()
n += 1 + l + sovGenerated(uint64(l))
return n
}
func (m *ServiceReference) Size() (n int) {
var l int
_ = l
l = len(m.Namespace)
n += 1 + l + sovGenerated(uint64(l))
l = len(m.Name)
n += 1 + l + sovGenerated(uint64(l))
if m.Path != nil {
l = len(*m.Path)
n += 1 + l + sovGenerated(uint64(l))
}
if m.Port != nil {
n += 1 + sovGenerated(uint64(*m.Port))
}
return n
}
func (m *ValidatingWebhook) Size() (n int) {
var l int
_ = l
l = len(m.Name)
n += 1 + l + sovGenerated(uint64(l))
l = m.ClientConfig.Size()
n += 1 + l + sovGenerated(uint64(l))
if len(m.Rules) > 0 {
for _, e := range m.Rules {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
if m.FailurePolicy != nil {
l = len(*m.FailurePolicy)
n += 1 + l + sovGenerated(uint64(l))
}
if m.NamespaceSelector != nil {
l = m.NamespaceSelector.Size()
n += 1 + l + sovGenerated(uint64(l))
}
if m.SideEffects != nil {
l = len(*m.SideEffects)
n += 1 + l + sovGenerated(uint64(l))
}
if m.TimeoutSeconds != nil {
n += 1 + sovGenerated(uint64(*m.TimeoutSeconds))
}
if len(m.AdmissionReviewVersions) > 0 {
for _, s := range m.AdmissionReviewVersions {
l = len(s)
n += 1 + l + sovGenerated(uint64(l))
}
}
if m.MatchPolicy != nil {
l = len(*m.MatchPolicy)
n += 1 + l + sovGenerated(uint64(l))
}
if m.ObjectSelector != nil {
l = m.ObjectSelector.Size()
n += 1 + l + sovGenerated(uint64(l))
}
return n
}
func (m *ValidatingWebhookConfiguration) Size() (n int) {
var l int
_ = l
l = m.ObjectMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
if len(m.Webhooks) > 0 {
for _, e := range m.Webhooks {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
return n
}
func (m *ValidatingWebhookConfigurationList) Size() (n int) {
var l int
_ = l
l = m.ListMeta.Size()
n += 1 + l + sovGenerated(uint64(l))
if len(m.Items) > 0 {
for _, e := range m.Items {
l = e.Size()
n += 1 + l + sovGenerated(uint64(l))
}
}
return n
}
func (m *WebhookClientConfig) Size() (n int) {
var l int
_ = l
if m.Service != nil {
l = m.Service.Size()
n += 1 + l + sovGenerated(uint64(l))
}
if m.CABundle != nil {
l = len(m.CABundle)
n += 1 + l + sovGenerated(uint64(l))
}
if m.URL != nil {
l = len(*m.URL)
n += 1 + l + sovGenerated(uint64(l))
}
return n
}
func sovGenerated(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozGenerated(x uint64) (n int) {
return sovGenerated(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *MutatingWebhook) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&MutatingWebhook{`,
`Name:` + fmt.Sprintf("%v", this.Name) + `,`,
`ClientConfig:` + strings.Replace(strings.Replace(this.ClientConfig.String(), "WebhookClientConfig", "WebhookClientConfig", 1), `&`, ``, 1) + `,`,
`Rules:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Rules), "RuleWithOperations", "RuleWithOperations", 1), `&`, ``, 1) + `,`,
`FailurePolicy:` + valueToStringGenerated(this.FailurePolicy) + `,`,
`NamespaceSelector:` + strings.Replace(fmt.Sprintf("%v", this.NamespaceSelector), "LabelSelector", "k8s_io_apimachinery_pkg_apis_meta_v1.LabelSelector", 1) + `,`,
`SideEffects:` + valueToStringGenerated(this.SideEffects) + `,`,
`TimeoutSeconds:` + valueToStringGenerated(this.TimeoutSeconds) + `,`,
`AdmissionReviewVersions:` + fmt.Sprintf("%v", this.AdmissionReviewVersions) + `,`,
`MatchPolicy:` + valueToStringGenerated(this.MatchPolicy) + `,`,
`ReinvocationPolicy:` + valueToStringGenerated(this.ReinvocationPolicy) + `,`,
`ObjectSelector:` + strings.Replace(fmt.Sprintf("%v", this.ObjectSelector), "LabelSelector", "k8s_io_apimachinery_pkg_apis_meta_v1.LabelSelector", 1) + `,`,
`}`,
}, "")
return s
}
func (this *MutatingWebhookConfiguration) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&MutatingWebhookConfiguration{`,
`ObjectMeta:` + strings.Replace(strings.Replace(this.ObjectMeta.String(), "ObjectMeta", "k8s_io_apimachinery_pkg_apis_meta_v1.ObjectMeta", 1), `&`, ``, 1) + `,`,
`Webhooks:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Webhooks), "MutatingWebhook", "MutatingWebhook", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *MutatingWebhookConfigurationList) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&MutatingWebhookConfigurationList{`,
`ListMeta:` + strings.Replace(strings.Replace(this.ListMeta.String(), "ListMeta", "k8s_io_apimachinery_pkg_apis_meta_v1.ListMeta", 1), `&`, ``, 1) + `,`,
`Items:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Items), "MutatingWebhookConfiguration", "MutatingWebhookConfiguration", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *Rule) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Rule{`,
`APIGroups:` + fmt.Sprintf("%v", this.APIGroups) + `,`,
`APIVersions:` + fmt.Sprintf("%v", this.APIVersions) + `,`,
`Resources:` + fmt.Sprintf("%v", this.Resources) + `,`,
`Scope:` + valueToStringGenerated(this.Scope) + `,`,
`}`,
}, "")
return s
}
func (this *RuleWithOperations) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&RuleWithOperations{`,
`Operations:` + fmt.Sprintf("%v", this.Operations) + `,`,
`Rule:` + strings.Replace(strings.Replace(this.Rule.String(), "Rule", "Rule", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *ServiceReference) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ServiceReference{`,
`Namespace:` + fmt.Sprintf("%v", this.Namespace) + `,`,
`Name:` + fmt.Sprintf("%v", this.Name) + `,`,
`Path:` + valueToStringGenerated(this.Path) + `,`,
`Port:` + valueToStringGenerated(this.Port) + `,`,
`}`,
}, "")
return s
}
func (this *ValidatingWebhook) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ValidatingWebhook{`,
`Name:` + fmt.Sprintf("%v", this.Name) + `,`,
`ClientConfig:` + strings.Replace(strings.Replace(this.ClientConfig.String(), "WebhookClientConfig", "WebhookClientConfig", 1), `&`, ``, 1) + `,`,
`Rules:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Rules), "RuleWithOperations", "RuleWithOperations", 1), `&`, ``, 1) + `,`,
`FailurePolicy:` + valueToStringGenerated(this.FailurePolicy) + `,`,
`NamespaceSelector:` + strings.Replace(fmt.Sprintf("%v", this.NamespaceSelector), "LabelSelector", "k8s_io_apimachinery_pkg_apis_meta_v1.LabelSelector", 1) + `,`,
`SideEffects:` + valueToStringGenerated(this.SideEffects) + `,`,
`TimeoutSeconds:` + valueToStringGenerated(this.TimeoutSeconds) + `,`,
`AdmissionReviewVersions:` + fmt.Sprintf("%v", this.AdmissionReviewVersions) + `,`,
`MatchPolicy:` + valueToStringGenerated(this.MatchPolicy) + `,`,
`ObjectSelector:` + strings.Replace(fmt.Sprintf("%v", this.ObjectSelector), "LabelSelector", "k8s_io_apimachinery_pkg_apis_meta_v1.LabelSelector", 1) + `,`,
`}`,
}, "")
return s
}
func (this *ValidatingWebhookConfiguration) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ValidatingWebhookConfiguration{`,
`ObjectMeta:` + strings.Replace(strings.Replace(this.ObjectMeta.String(), "ObjectMeta", "k8s_io_apimachinery_pkg_apis_meta_v1.ObjectMeta", 1), `&`, ``, 1) + `,`,
`Webhooks:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Webhooks), "ValidatingWebhook", "ValidatingWebhook", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *ValidatingWebhookConfigurationList) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&ValidatingWebhookConfigurationList{`,
`ListMeta:` + strings.Replace(strings.Replace(this.ListMeta.String(), "ListMeta", "k8s_io_apimachinery_pkg_apis_meta_v1.ListMeta", 1), `&`, ``, 1) + `,`,
`Items:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.Items), "ValidatingWebhookConfiguration", "ValidatingWebhookConfiguration", 1), `&`, ``, 1) + `,`,
`}`,
}, "")
return s
}
func (this *WebhookClientConfig) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&WebhookClientConfig{`,
`Service:` + strings.Replace(fmt.Sprintf("%v", this.Service), "ServiceReference", "ServiceReference", 1) + `,`,
`CABundle:` + valueToStringGenerated(this.CABundle) + `,`,
`URL:` + valueToStringGenerated(this.URL) + `,`,
`}`,
}, "")
return s
}
func valueToStringGenerated(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *MutatingWebhook) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: MutatingWebhook: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: MutatingWebhook: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ClientConfig", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ClientConfig.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Rules", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Rules = append(m.Rules, RuleWithOperations{})
if err := m.Rules[len(m.Rules)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field FailurePolicy", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := FailurePolicyType(dAtA[iNdEx:postIndex])
m.FailurePolicy = &s
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field NamespaceSelector", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.NamespaceSelector == nil {
m.NamespaceSelector = &k8s_io_apimachinery_pkg_apis_meta_v1.LabelSelector{}
}
if err := m.NamespaceSelector.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field SideEffects", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := SideEffectClass(dAtA[iNdEx:postIndex])
m.SideEffects = &s
iNdEx = postIndex
case 7:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field TimeoutSeconds", wireType)
}
var v int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.TimeoutSeconds = &v
case 8:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field AdmissionReviewVersions", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.AdmissionReviewVersions = append(m.AdmissionReviewVersions, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 9:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field MatchPolicy", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := MatchPolicyType(dAtA[iNdEx:postIndex])
m.MatchPolicy = &s
iNdEx = postIndex
case 10:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ReinvocationPolicy", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := ReinvocationPolicyType(dAtA[iNdEx:postIndex])
m.ReinvocationPolicy = &s
iNdEx = postIndex
case 11:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectSelector", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.ObjectSelector == nil {
m.ObjectSelector = &k8s_io_apimachinery_pkg_apis_meta_v1.LabelSelector{}
}
if err := m.ObjectSelector.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *MutatingWebhookConfiguration) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: MutatingWebhookConfiguration: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: MutatingWebhookConfiguration: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ObjectMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Webhooks", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Webhooks = append(m.Webhooks, MutatingWebhook{})
if err := m.Webhooks[len(m.Webhooks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *MutatingWebhookConfigurationList) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: MutatingWebhookConfigurationList: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: MutatingWebhookConfigurationList: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ListMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ListMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Items", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Items = append(m.Items, MutatingWebhookConfiguration{})
if err := m.Items[len(m.Items)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Rule) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Rule: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Rule: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field APIGroups", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.APIGroups = append(m.APIGroups, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field APIVersions", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.APIVersions = append(m.APIVersions, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Resources", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Resources = append(m.Resources, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Scope", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := ScopeType(dAtA[iNdEx:postIndex])
m.Scope = &s
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *RuleWithOperations) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: RuleWithOperations: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: RuleWithOperations: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Operations", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Operations = append(m.Operations, OperationType(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Rule", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Rule.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ServiceReference) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ServiceReference: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ServiceReference: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Namespace", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Namespace = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Path", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := string(dAtA[iNdEx:postIndex])
m.Path = &s
iNdEx = postIndex
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Port", wireType)
}
var v int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.Port = &v
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ValidatingWebhook) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ValidatingWebhook: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ValidatingWebhook: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ClientConfig", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ClientConfig.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Rules", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Rules = append(m.Rules, RuleWithOperations{})
if err := m.Rules[len(m.Rules)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field FailurePolicy", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := FailurePolicyType(dAtA[iNdEx:postIndex])
m.FailurePolicy = &s
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field NamespaceSelector", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.NamespaceSelector == nil {
m.NamespaceSelector = &k8s_io_apimachinery_pkg_apis_meta_v1.LabelSelector{}
}
if err := m.NamespaceSelector.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 6:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field SideEffects", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := SideEffectClass(dAtA[iNdEx:postIndex])
m.SideEffects = &s
iNdEx = postIndex
case 7:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field TimeoutSeconds", wireType)
}
var v int32
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= (int32(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
m.TimeoutSeconds = &v
case 8:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field AdmissionReviewVersions", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.AdmissionReviewVersions = append(m.AdmissionReviewVersions, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 9:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field MatchPolicy", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := MatchPolicyType(dAtA[iNdEx:postIndex])
m.MatchPolicy = &s
iNdEx = postIndex
case 10:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectSelector", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.ObjectSelector == nil {
m.ObjectSelector = &k8s_io_apimachinery_pkg_apis_meta_v1.LabelSelector{}
}
if err := m.ObjectSelector.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ValidatingWebhookConfiguration) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ValidatingWebhookConfiguration: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ValidatingWebhookConfiguration: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ObjectMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ObjectMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Webhooks", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Webhooks = append(m.Webhooks, ValidatingWebhook{})
if err := m.Webhooks[len(m.Webhooks)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ValidatingWebhookConfigurationList) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ValidatingWebhookConfigurationList: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ValidatingWebhookConfigurationList: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ListMeta", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.ListMeta.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Items", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Items = append(m.Items, ValidatingWebhookConfiguration{})
if err := m.Items[len(m.Items)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *WebhookClientConfig) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: WebhookClientConfig: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: WebhookClientConfig: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Service", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Service == nil {
m.Service = &ServiceReference{}
}
if err := m.Service.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field CABundle", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + byteLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.CABundle = append(m.CABundle[:0], dAtA[iNdEx:postIndex]...)
if m.CABundle == nil {
m.CABundle = []byte{}
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field URL", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowGenerated
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthGenerated
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
s := string(dAtA[iNdEx:postIndex])
m.URL = &s
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthGenerated
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipGenerated(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthGenerated
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowGenerated
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipGenerated(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthGenerated = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowGenerated = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("k8s.io/kubernetes/vendor/k8s.io/api/admissionregistration/v1beta1/generated.proto", fileDescriptorGenerated)
}
var fileDescriptorGenerated = []byte{
// 1113 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x55, 0x4d, 0x6f, 0x1b, 0xc5,
0x1b, 0xcf, 0xc6, 0x76, 0x6d, 0x8f, 0x93, 0xa6, 0x99, 0xff, 0x9f, 0xd6, 0x84, 0xca, 0x6b, 0xf9,
0x80, 0x2c, 0x41, 0x77, 0x9b, 0x80, 0x10, 0x14, 0x10, 0xca, 0x06, 0x0a, 0x91, 0x92, 0x36, 0x4c,
0xfa, 0x22, 0xf1, 0x22, 0x75, 0xbc, 0x1e, 0xdb, 0x83, 0xed, 0x9d, 0xd5, 0xce, 0xac, 0x43, 0x6e,
0x7c, 0x04, 0xbe, 0x02, 0x27, 0x3e, 0x05, 0x07, 0x6e, 0xe1, 0xd6, 0x63, 0x2f, 0xac, 0xc8, 0x72,
0xe2, 0xc0, 0x81, 0x6b, 0x4e, 0x68, 0x66, 0xc7, 0xeb, 0x97, 0x4d, 0x8a, 0x29, 0xa2, 0x17, 0x7a,
0xdb, 0xf9, 0x3d, 0xf3, 0xfc, 0x9e, 0x97, 0xd9, 0xe7, 0xf9, 0x81, 0x4f, 0xfb, 0x6f, 0x73, 0x8b,
0x32, 0xbb, 0x1f, 0xb6, 0x48, 0xe0, 0x11, 0x41, 0xb8, 0x3d, 0x22, 0x5e, 0x9b, 0x05, 0xb6, 0x36,
0x60, 0x9f, 0xda, 0xb8, 0x3d, 0xa4, 0x9c, 0x53, 0xe6, 0x05, 0xa4, 0x4b, 0xb9, 0x08, 0xb0, 0xa0,
0xcc, 0xb3, 0x47, 0x9b, 0x2d, 0x22, 0xf0, 0xa6, 0xdd, 0x25, 0x1e, 0x09, 0xb0, 0x20, 0x6d, 0xcb,
0x0f, 0x98, 0x60, 0xb0, 0x99, 0x78, 0x5a, 0xd8, 0xa7, 0xd6, 0xb9, 0x9e, 0x96, 0xf6, 0xdc, 0xb8,
0xd1, 0xa5, 0xa2, 0x17, 0xb6, 0x2c, 0x97, 0x0d, 0xed, 0x2e, 0xeb, 0x32, 0x5b, 0x11, 0xb4, 0xc2,
0x8e, 0x3a, 0xa9, 0x83, 0xfa, 0x4a, 0x88, 0x37, 0xde, 0x9c, 0xa4, 0x34, 0xc4, 0x6e, 0x8f, 0x7a,
0x24, 0x38, 0xb6, 0xfd, 0x7e, 0x57, 0x02, 0xdc, 0x1e, 0x12, 0x81, 0xed, 0x51, 0x26, 0x9d, 0x0d,
0xfb, 0x22, 0xaf, 0x20, 0xf4, 0x04, 0x1d, 0x92, 0x8c, 0xc3, 0x5b, 0x7f, 0xe5, 0xc0, 0xdd, 0x1e,
0x19, 0xe2, 0x79, 0xbf, 0xc6, 0x4f, 0x45, 0xb0, 0xb6, 0x1f, 0x0a, 0x2c, 0xa8, 0xd7, 0x7d, 0x48,
0x5a, 0x3d, 0xc6, 0xfa, 0xb0, 0x0e, 0xf2, 0x1e, 0x1e, 0x92, 0xaa, 0x51, 0x37, 0x9a, 0x65, 0x67,
0xe5, 0x24, 0x32, 0x97, 0xe2, 0xc8, 0xcc, 0xdf, 0xc1, 0x43, 0x82, 0x94, 0x05, 0x1e, 0x81, 0x15,
0x77, 0x40, 0x89, 0x27, 0x76, 0x98, 0xd7, 0xa1, 0xdd, 0xea, 0x72, 0xdd, 0x68, 0x56, 0xb6, 0xde,
0xb7, 0x16, 0x6d, 0xa2, 0xa5, 0x43, 0xed, 0x4c, 0x91, 0x38, 0xff, 0xd7, 0x81, 0x56, 0xa6, 0x51,
0x34, 0x13, 0x08, 0x62, 0x50, 0x08, 0xc2, 0x01, 0xe1, 0xd5, 0x5c, 0x3d, 0xd7, 0xac, 0x6c, 0xbd,
0xb7, 0x78, 0x44, 0x14, 0x0e, 0xc8, 0x43, 0x2a, 0x7a, 0x77, 0x7d, 0x92, 0x58, 0xb8, 0xb3, 0xaa,
0x03, 0x16, 0xa4, 0x8d, 0xa3, 0x84, 0x19, 0xee, 0x81, 0xd5, 0x0e, 0xa6, 0x83, 0x30, 0x20, 0x07,
0x6c, 0x40, 0xdd, 0xe3, 0x6a, 0x5e, 0xb5, 0xe1, 0xd5, 0x38, 0x32, 0x57, 0x6f, 0x4f, 0x1b, 0xce,
0x22, 0x73, 0x7d, 0x06, 0xb8, 0x77, 0xec, 0x13, 0x34, 0xeb, 0x0c, 0xbf, 0x06, 0xeb, 0xb2, 0x63,
0xdc, 0xc7, 0x2e, 0x39, 0x24, 0x03, 0xe2, 0x0a, 0x16, 0x54, 0x0b, 0xaa, 0x5d, 0x6f, 0x4c, 0x25,
0x9f, 0xbe, 0x99, 0xe5, 0xf7, 0xbb, 0x12, 0xe0, 0x96, 0xfc, 0x35, 0xac, 0xd1, 0xa6, 0xb5, 0x87,
0x5b, 0x64, 0x30, 0x76, 0x75, 0x5e, 0x8a, 0x23, 0x73, 0xfd, 0xce, 0x3c, 0x23, 0xca, 0x06, 0x81,
0x1f, 0x82, 0x0a, 0xa7, 0x6d, 0xf2, 0x51, 0xa7, 0x43, 0x5c, 0xc1, 0xab, 0x97, 0x54, 0x15, 0x8d,
0x38, 0x32, 0x2b, 0x87, 0x13, 0xf8, 0x2c, 0x32, 0xd7, 0x26, 0xc7, 0x9d, 0x01, 0xe6, 0x1c, 0x4d,
0xbb, 0xc1, 0x5b, 0xe0, 0xb2, 0xfc, 0x7d, 0x58, 0x28, 0x0e, 0x89, 0xcb, 0xbc, 0x36, 0xaf, 0x16,
0xeb, 0x46, 0xb3, 0xe0, 0xc0, 0x38, 0x32, 0x2f, 0xdf, 0x9b, 0xb1, 0xa0, 0xb9, 0x9b, 0xf0, 0x3e,
0xb8, 0x96, 0xbe, 0x09, 0x22, 0x23, 0x4a, 0x8e, 0x1e, 0x90, 0x40, 0x1e, 0x78, 0xb5, 0x54, 0xcf,
0x35, 0xcb, 0xce, 0x2b, 0x71, 0x64, 0x5e, 0xdb, 0x3e, 0xff, 0x0a, 0xba, 0xc8, 0x57, 0x16, 0x36,
0xc4, 0xc2, 0xed, 0xe9, 0xe7, 0x29, 0x4f, 0x0a, 0xdb, 0x9f, 0xc0, 0xb2, 0xb0, 0xa9, 0xa3, 0x7a,
0x9a, 0x69, 0x37, 0xf8, 0x08, 0xc0, 0x80, 0x50, 0x6f, 0xc4, 0x5c, 0xf5, 0x37, 0x68, 0x32, 0xa0,
0xc8, 0x6e, 0xc6, 0x91, 0x09, 0x51, 0xc6, 0x7a, 0x16, 0x99, 0x57, 0xb3, 0xa8, 0xa2, 0x3e, 0x87,
0x0b, 0x32, 0x70, 0x99, 0xb5, 0xbe, 0x22, 0xae, 0x48, 0xdf, 0xbd, 0xf2, 0xec, 0xef, 0xae, 0xfa,
0x7d, 0x77, 0x86, 0x0e, 0xcd, 0xd1, 0x37, 0x7e, 0x36, 0xc0, 0xf5, 0xb9, 0x59, 0x4e, 0xc6, 0x26,
0x4c, 0xfe, 0x78, 0xf8, 0x08, 0x94, 0x24, 0x7b, 0x1b, 0x0b, 0xac, 0x86, 0xbb, 0xb2, 0x75, 0x73,
0xb1, 0x5c, 0x92, 0xc0, 0xfb, 0x44, 0x60, 0x07, 0xea, 0xa1, 0x01, 0x13, 0x0c, 0xa5, 0xac, 0xf0,
0x73, 0x50, 0xd2, 0x91, 0x79, 0x75, 0x59, 0x8d, 0xe8, 0x3b, 0x8b, 0x8f, 0xe8, 0x5c, 0xee, 0x4e,
0x5e, 0x86, 0x42, 0xa5, 0x23, 0x4d, 0xd8, 0xf8, 0xdd, 0x00, 0xf5, 0xa7, 0xd5, 0xb7, 0x47, 0xb9,
0x80, 0x5f, 0x64, 0x6a, 0xb4, 0x16, 0xec, 0x37, 0xe5, 0x49, 0x85, 0x57, 0x74, 0x85, 0xa5, 0x31,
0x32, 0x55, 0x5f, 0x1f, 0x14, 0xa8, 0x20, 0xc3, 0x71, 0x71, 0xb7, 0x9f, 0xb9, 0xb8, 0x99, 0xc4,
0x27, 0x9b, 0x68, 0x57, 0x92, 0xa3, 0x24, 0x46, 0xe3, 0x47, 0x03, 0xe4, 0xe5, 0x6a, 0x82, 0xaf,
0x81, 0x32, 0xf6, 0xe9, 0xc7, 0x01, 0x0b, 0x7d, 0x5e, 0x35, 0xd4, 0xe8, 0xac, 0xc6, 0x91, 0x59,
0xde, 0x3e, 0xd8, 0x4d, 0x40, 0x34, 0xb1, 0xc3, 0x4d, 0x50, 0xc1, 0x3e, 0x4d, 0x27, 0x6d, 0x59,
0x5d, 0x5f, 0x93, 0xe3, 0xb1, 0x7d, 0xb0, 0x9b, 0x4e, 0xd7, 0xf4, 0x1d, 0xc9, 0x1f, 0x10, 0xce,
0xc2, 0xc0, 0xd5, 0x9b, 0x55, 0xf3, 0xa3, 0x31, 0x88, 0x26, 0x76, 0xf8, 0x3a, 0x28, 0x70, 0x97,
0xf9, 0x44, 0xef, 0xc5, 0xab, 0x32, 0xed, 0x43, 0x09, 0x9c, 0x45, 0x66, 0x59, 0x7d, 0xa8, 0x89,
0x48, 0x2e, 0x35, 0xbe, 0x37, 0x00, 0xcc, 0xae, 0x5e, 0xf8, 0x01, 0x00, 0x2c, 0x3d, 0xe9, 0x92,
0x4c, 0xf5, 0x57, 0xa5, 0xe8, 0x59, 0x64, 0xae, 0xa6, 0x27, 0x45, 0x39, 0xe5, 0x02, 0x0f, 0x40,
0x5e, 0xae, 0x6b, 0xad, 0x3c, 0xd6, 0xdf, 0xd3, 0x81, 0x89, 0xa6, 0xc9, 0x13, 0x52, 0x4c, 0x8d,
0xef, 0x0c, 0x70, 0xe5, 0x90, 0x04, 0x23, 0xea, 0x12, 0x44, 0x3a, 0x24, 0x20, 0x9e, 0x4b, 0xa0,
0x0d, 0xca, 0xe9, 0x66, 0xd5, 0x7a, 0xb8, 0xae, 0x7d, 0xcb, 0xe9, 0x16, 0x46, 0x93, 0x3b, 0xa9,
0x76, 0x2e, 0x5f, 0xa8, 0x9d, 0xd7, 0x41, 0xde, 0xc7, 0xa2, 0x57, 0xcd, 0xa9, 0x1b, 0x25, 0x69,
0x3d, 0xc0, 0xa2, 0x87, 0x14, 0xaa, 0xac, 0x2c, 0x10, 0xaa, 0xb9, 0x05, 0x6d, 0x65, 0x81, 0x40,
0x0a, 0x6d, 0xfc, 0x76, 0x09, 0xac, 0x3f, 0xc0, 0x03, 0xda, 0x7e, 0xa1, 0xd7, 0x2f, 0xf4, 0xfa,
0xbf, 0xa5, 0xd7, 0x59, 0x35, 0x05, 0xff, 0xae, 0x9a, 0x9e, 0x1a, 0xa0, 0x96, 0x99, 0xb5, 0xe7,
0xad, 0xa7, 0x5f, 0x66, 0xf4, 0xf4, 0xdd, 0xc5, 0x47, 0x28, 0x93, 0x7d, 0x46, 0x51, 0xff, 0x30,
0x40, 0xe3, 0xe9, 0x35, 0x3e, 0x07, 0x4d, 0x1d, 0xce, 0x6a, 0xea, 0x27, 0xff, 0xa0, 0xc0, 0x45,
0x54, 0xf5, 0x07, 0x03, 0xfc, 0xef, 0x9c, 0x75, 0x06, 0x31, 0x28, 0xf2, 0x64, 0xfd, 0xeb, 0x1a,
0x6f, 0x2d, 0x9e, 0xc8, 0xbc, 0x6e, 0x38, 0x95, 0x38, 0x32, 0x8b, 0x63, 0x74, 0xcc, 0x0b, 0x9b,
0xa0, 0xe4, 0x62, 0x27, 0xf4, 0xda, 0x5a, 0xb8, 0x56, 0x9c, 0x15, 0xd9, 0x93, 0x9d, 0xed, 0x04,
0x43, 0xa9, 0x15, 0xbe, 0x0c, 0x72, 0x61, 0x30, 0xd0, 0x1a, 0x51, 0x8c, 0x23, 0x33, 0x77, 0x1f,
0xed, 0x21, 0x89, 0x39, 0x37, 0x4e, 0x4e, 0x6b, 0x4b, 0x8f, 0x4f, 0x6b, 0x4b, 0x4f, 0x4e, 0x6b,
0x4b, 0xdf, 0xc4, 0x35, 0xe3, 0x24, 0xae, 0x19, 0x8f, 0xe3, 0x9a, 0xf1, 0x24, 0xae, 0x19, 0xbf,
0xc4, 0x35, 0xe3, 0xdb, 0x5f, 0x6b, 0x4b, 0x9f, 0x15, 0x75, 0x6a, 0x7f, 0x06, 0x00, 0x00, 0xff,
0xff, 0xc3, 0x6f, 0x8b, 0x7e, 0x2c, 0x0f, 0x00, 0x00,
}
| pryorda/terraform | vendor/k8s.io/api/admissionregistration/v1beta1/generated.pb.go | GO | mpl-2.0 | 76,757 |
// --
// Core.Agent.TableFilters.js - provides the special module functions for the dashboard
// Copyright (C) 2001-2011 OTRS AG, http://otrs.org/
// --
// This software comes with ABSOLUTELY NO WARRANTY. For details, see
// the enclosed file COPYING for license information (AGPL). If you
// did not receive this file, see http://www.gnu.org/licenses/agpl.txt.
// --
"use strict";
var Core = Core || {};
Core.Agent = Core.Agent || {};
/**
* @namespace
* @exports TargetNS as Core.Agent.TableFilters
* @description
* This namespace contains the special module functions for the Dashboard.
*/
Core.Agent.TableFilters = (function (TargetNS) {
/*
* check dependencies first
*/
if (!Core.Debug.CheckDependency('Core.Agent.TableFilters', 'Core.UI.AllocationList', 'Core.UI.AllocationList')) {
return;
}
/**
* @function
* @param {jQueryObject} $Input Input element to add auto complete to
* @return nothing
*/
TargetNS.InitCustomerIDAutocomplete = function ($Input) {
$Input.autocomplete({
minLength: Core.Config.Get('CustomerAutocomplete.MinQueryLength'),
delay: Core.Config.Get('CustomerAutocomplete.QueryDelay'),
open: function() {
// force a higher z-index than the overlay/dialog
$(this).autocomplete('widget').addClass('ui-overlay-autocomplete');
return false;
},
source: function (Request, Response) {
var URL = Core.Config.Get('Baselink'), Data = {
Action: 'AgentCustomerInformationCenterSearch',
Subaction: 'SearchCustomerID',
Term: Request.term,
MaxResults: Core.Config.Get('CustomerAutocomplete.MaxResultsDisplayed')
};
// if an old ajax request is already running, stop the old request and start the new one
if ($Input.data('AutoCompleteXHR')) {
$Input.data('AutoCompleteXHR').abort();
$Input.removeData('AutoCompleteXHR');
// run the response function to hide the request animation
Response({});
}
$Input.data('AutoCompleteXHR', Core.AJAX.FunctionCall(URL, Data, function (Result) {
var Data = [];
$Input.removeData('AutoCompleteXHR');
$.each(Result, function () {
Data.push({
label: this.Label + ' (' + this.Value + ')',
value: this.Value
});
});
Response(Data);
}));
},
select: function (Event, UI) {
$(Event.target)
.parent()
.find('select')
.append('<option value="' + UI.item.value + '">SelectedItem</option>')
.val(UI.item.value)
.trigger('change');
}
});
};
/**
* @function
* @param {jQueryObject} $Input Input element to add auto complete to
* @param {String} Subaction Subaction to execute, "SearchCustomerID" or "SearchCustomerUser"
* @return nothing
*/
TargetNS.InitCustomerUserAutocomplete = function ($Input) {
$Input.autocomplete({
minLength: Core.Config.Get('CustomerUserAutocomplete.MinQueryLength'),
delay: Core.Config.Get('CustomerUserAutocomplete.QueryDelay'),
open: function() {
// force a higher z-index than the overlay/dialog
$(this).autocomplete('widget').addClass('ui-overlay-autocomplete');
return false;
},
source: function (Request, Response) {
var URL = Core.Config.Get('Baselink'), Data = {
Action: 'AgentCustomerSearch',
Term: Request.term,
MaxResults: Core.Config.Get('CustomerUserAutocomplete.MaxResultsDisplayed')
};
// if an old ajax request is already running, stop the old request and start the new one
if ($Input.data('AutoCompleteXHR')) {
$Input.data('AutoCompleteXHR').abort();
$Input.removeData('AutoCompleteXHR');
// run the response function to hide the request animation
Response({});
}
$Input.data('AutoCompleteXHR', Core.AJAX.FunctionCall(URL, Data, function (Result) {
var Data = [];
$Input.removeData('AutoCompleteXHR');
$.each(Result, function () {
Data.push({
label: this.CustomerValue + " (" + this.CustomerKey + ")",
value: this.CustomerValue,
key: this.CustomerKey
});
});
Response(Data);
}));
},
select: function (Event, UI) {
$(Event.target)
.parent()
.find('select')
.append('<option value="' + UI.item.key + '">SelectedItem</option>')
.val(UI.item.key)
.trigger('change');
}
});
};
/**
* @function
* @param {jQueryObject} $Input Input element to add auto complete to
* @param {String} Subaction Subaction to execute, "SearchCustomerID" or "SearchCustomerUser"
* @return nothing
*/
TargetNS.InitUserAutocomplete = function ($Input, Subaction) {
$Input.autocomplete({
minLength: Core.Config.Get('UserAutocomplete.MinQueryLength'),
delay: Core.Config.Get('UserAutocomplete.QueryDelay'),
open: function() {
// force a higher z-index than the overlay/dialog
$(this).autocomplete('widget').addClass('ui-overlay-autocomplete');
return false;
},
source: function (Request, Response) {
var URL = Core.Config.Get('Baselink'), Data = {
Action: 'AgentUserSearch',
Subaction: Subaction,
Term: Request.term,
MaxResults: Core.Config.Get('UserAutocomplete.MaxResultsDisplayed')
};
// if an old ajax request is already running, stop the old request and start the new one
if ($Input.data('AutoCompleteXHR')) {
$Input.data('AutoCompleteXHR').abort();
$Input.removeData('AutoCompleteXHR');
// run the response function to hide the request animation
Response({});
}
$Input.data('AutoCompleteXHR', Core.AJAX.FunctionCall(URL, Data, function (Result) {
var Data = [];
$Input.removeData('AutoCompleteXHR');
$.each(Result, function () {
Data.push({
label: this.UserValue + " (" + this.UserKey + ")",
value: this.UserValue,
key: this.UserKey
});
});
Response(Data);
}));
},
select: function (Event, UI) {
$(Event.target)
.parent()
.find('select')
.append('<option value="' + UI.item.key + '">SelectedItem</option>')
.val(UI.item.key)
.trigger('change');
}
});
};
/**
* @function
* @return nothing
* This function initializes the special module functions
*/
TargetNS.Init = function () {
// Initiate allocation list
TargetNS.SetAllocationList();
};
/**
* @function
* @private
* @param {string} FieldID Id of the field which is updated via ajax
* @param {string} Show Show or hide the AJAX loader image
* @description Shows and hides an ajax loader for every element which is updates via ajax
*/
function UpdateAllocationList(Event, UI) {
var $ContainerObj = $(UI.sender).closest('.AllocationListContainer'),
Data = {},
FieldName;
if (Event.type === 'sortstop') {
$ContainerObj = $(UI.item).closest('.AllocationListContainer');
}
Data.Columns = {};
Data.Order = [];
$ContainerObj.find('.AvailableFields').find('li').each(function() {
FieldName = $(this).attr('data-fieldname');
Data.Columns[FieldName] = 0;
});
$ContainerObj.find('.AssignedFields').find('li').each(function() {
FieldName = $(this).attr('data-fieldname');
Data.Columns[FieldName] = 1;
Data.Order.push(FieldName);
});
$ContainerObj.closest('form').find('.ColumnsJSON').val(Core.JSON.Stringify(Data));
}
/**
* @function
* @return nothing
* This function binds a click event on an html element to update the preferences of the given dahsboard widget
* @param {jQueryObject} $ClickedElement The jQuery object of the element(s) that get the event listener
* @param {string} ElementID The ID of the element whose content should be updated with the server answer
* @param {jQueryObject} $Form The jQuery object of the form with the data for the server request
*/
TargetNS.SetAllocationList = function (Event, UI) {
$('.AllocationListContainer').each(function() {
var $ContainerObj = $(this),
DataEnabledJSON = $ContainerObj.closest('form.WidgetSettingsForm').find('input.ColumnsEnabledJSON').val(),
DataAvailableJSON = $ContainerObj.closest('form.WidgetSettingsForm').find('input.ColumnsAvailableJSON').val(),
DataEnabled,
DataAvailable,
Translation,
$FieldObj,
IDString = '#' + $ContainerObj.find('.AssignedFields').attr('id') + ', #' + $ContainerObj.find('.AvailableFields').attr('id');
if (DataEnabledJSON) {
DataEnabled = Core.JSON.Parse(DataEnabledJSON);
}
if (DataAvailableJSON) {
DataAvailable = Core.JSON.Parse(DataAvailableJSON);
}
$.each(DataEnabled, function(Index, Field) {
// get field translation
Translation = Core.Config.Get('Column' + Field) || Field;
$FieldObj = $('<li />').attr('title', Field).attr('data-fieldname', Field).text(Translation);
$ContainerObj.find('.AssignedFields').append($FieldObj);
});
$.each(DataAvailable, function(Index, Field) {
// get field translation
Translation = Core.Config.Get('Column' + Field) || Field;
$FieldObj = $('<li />').attr('title', Field).attr('data-fieldname', Field).text(Translation);
$ContainerObj.find('.AvailableFields').append($FieldObj);
});
Core.UI.AllocationList.Init(IDString, $ContainerObj.find('.AllocationList'), 'UpdateAllocationList', '', UpdateAllocationList);
Core.UI.Table.InitTableFilter($ContainerObj.find('.FilterAvailableFields'), $ContainerObj.find('.AvailableFields'));
});
};
/**
* @function
* @return nothing
* This function binds a click event on an html element to update the preferences of the given dahsboard widget
* @param {jQueryObject} $ClickedElement The jQuery object of the element(s) that get the event listener
* @param {string} ElementID The ID of the element whose content should be updated with the server answer
* @param {jQueryObject} $Form The jQuery object of the form with the data for the server request
*/
TargetNS.RegisterUpdatePreferences = function ($ClickedElement, ElementID, $Form) {
if (isJQueryObject($ClickedElement) && $ClickedElement.length) {
$ClickedElement.click(function () {
var URL = Core.Config.Get('Baselink') + Core.AJAX.SerializeForm($Form);
Core.AJAX.ContentUpdate($('#' + ElementID), URL, function () {
Core.UI.ToggleTwoContainer($('#' + ElementID + '-setting'), $('#' + ElementID));
Core.UI.Table.InitCSSPseudoClasses();
});
return false;
});
}
};
return TargetNS;
}(Core.Agent.TableFilters || {}));
| noritnk/otrs | var/httpd/htdocs/js/Core.Agent.TableFilters.js | JavaScript | agpl-3.0 | 12,857 |
<?php
/**
* @copyright Copyright (c) 2016, ownCloud, Inc.
*
* @author Joas Schilling <coding@schilljs.com>
* @author Thomas Müller <thomas.mueller@tmit.eu>
*
* @license AGPL-3.0
*
* This code is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License, version 3,
* along with this program. If not, see <http://www.gnu.org/licenses/>
*
*/
namespace OC\DB\QueryBuilder\ExpressionBuilder;
use OC\DB\QueryBuilder\QueryFunction;
use OCP\DB\QueryBuilder\IQueryBuilder;
class PgSqlExpressionBuilder extends ExpressionBuilder {
/**
* Returns a IQueryFunction that casts the column to the given type
*
* @param string $column
* @param mixed $type One of IQueryBuilder::PARAM_*
* @return string
*/
public function castColumn($column, $type) {
if ($type === IQueryBuilder::PARAM_INT) {
$column = $this->helper->quoteColumnName($column);
return new QueryFunction('CAST(' . $column . ' AS INT)');
}
return parent::castColumn($column, $type);
}
/**
* @inheritdoc
*/
public function iLike($x, $y, $type = null) {
$x = $this->helper->quoteColumnName($x);
$y = $this->helper->quoteColumnName($y);
return $this->expressionBuilder->comparison($x, 'ILIKE', $y);
}
}
| jbicha/server | lib/private/DB/QueryBuilder/ExpressionBuilder/PgSqlExpressionBuilder.php | PHP | agpl-3.0 | 1,662 |
<?php
/**
* Shopware 5
* Copyright (c) shopware AG
*
* According to our dual licensing model, this program can be used either
* under the terms of the GNU Affero General Public License, version 3,
* or under a proprietary license.
*
* The texts of the GNU Affero General Public License with an additional
* permission and of our proprietary license can be found at and
* in the LICENSE file you have received along with this program.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* "Shopware" is a registered trademark of shopware AG.
* The licensing of the program under the AGPLv3 does not imply a
* trademark license. Therefore any rights, title and interest in
* our trademarks remain entirely with us.
*/
namespace ShopwarePlugins\SwagUpdate\Components\Steps;
/**
* @category Shopware
*
* @copyright Copyright (c) shopware AG (http://www.shopware.de)
*/
class ErrorResult
{
/**
* @var string
*/
private $message;
/**
* @var \Exception
*/
private $exception;
/**
* @var array
*/
private $args;
/**
* @param string $message
* @param \Exception $exception
* @param array $args
*/
public function __construct($message, \Exception $exception = null, $args = [])
{
$this->message = $message;
$this->exception = $exception;
$this->args = $args;
}
/**
* @return array
*/
public function getArgs()
{
return $this->args;
}
/**
* @return \Exception
*/
public function getException()
{
return $this->exception;
}
/**
* @return string
*/
public function getMessage()
{
return $this->message;
}
}
| simkli/shopware | engine/Shopware/Plugins/Default/Backend/SwagUpdate/Components/Steps/ErrorResult.php | PHP | agpl-3.0 | 1,953 |
/***************************************************************************
* Copyright (c) 2015 FreeCAD Developers *
* Author: WandererFan <wandererfan@gmail.com> *
* Based on src/Mod/FEM/Gui/DlgSettingsFEMImp.cpp *
* *
* This file is part of the FreeCAD CAx development system. *
* *
* This library is free software; you can redistribute it and/or *
* modify it under the terms of the GNU Library General Public *
* License as published by the Free Software Foundation; either *
* version 2 of the License, or (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU Library General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this library; see the file COPYING.LIB. If not, *
* write to the Free Software Foundation, Inc., 59 Temple Place, *
* Suite 330, Boston, MA 02111-1307, USA *
* *
***************************************************************************/
#include "PreCompiled.h"
#include <App/Application.h>
#include <Base/Parameter.h>
#include <Base/Console.h>
#include "DrawGuiUtil.h"
#include "PreferencesGui.h"
#include "DlgPrefsTechDrawDimensionsImp.h"
#include "ui_DlgPrefsTechDrawDimensions.h"
using namespace TechDrawGui;
using namespace TechDraw;
DlgPrefsTechDrawDimensionsImp::DlgPrefsTechDrawDimensionsImp( QWidget* parent )
: PreferencePage( parent )
, ui(new Ui_DlgPrefsTechDrawDimensionsImp)
{
ui->setupUi(this);
ui->plsb_FontSize->setUnit(Base::Unit::Length);
ui->plsb_FontSize->setMinimum(0);
ui->plsb_ArrowSize->setUnit(Base::Unit::Length);
ui->plsb_ArrowSize->setMinimum(0);
}
DlgPrefsTechDrawDimensionsImp::~DlgPrefsTechDrawDimensionsImp()
{
// no need to delete child widgets, Qt does it all for us
}
void DlgPrefsTechDrawDimensionsImp::saveSettings()
{
ui->pcbStandardAndStyle->onSave();
ui->cbGlobalDecimals->onSave();
ui->cbShowUnits->onSave();
ui->sbAltDecimals->onSave();
ui->plsb_FontSize->onSave();
ui->pdsbToleranceScale->onSave();
ui->leDiameter->onSave();
ui->pcbArrow->onSave();
ui->plsb_ArrowSize->onSave();
}
void DlgPrefsTechDrawDimensionsImp::loadSettings()
{
//set defaults for Quantity widgets if property not found
//Quantity widgets do not use preset value since they are based on
//QAbstractSpinBox
double fontDefault = Preferences::dimFontSizeMM();
ui->plsb_FontSize->setValue(fontDefault);
// double arrowDefault = 5.0;
// plsb_ArrowSize->setValue(arrowDefault);
ui->plsb_ArrowSize->setValue(fontDefault);
ui->pcbStandardAndStyle->onRestore();
ui->cbGlobalDecimals->onRestore();
ui->cbShowUnits->onRestore();
ui->sbAltDecimals->onRestore();
ui->plsb_FontSize->onRestore();
ui->pdsbToleranceScale->onRestore();
ui->leDiameter->onRestore();
ui->pcbArrow->onRestore();
ui->plsb_ArrowSize->onRestore();
DrawGuiUtil::loadArrowBox(ui->pcbArrow);
ui->pcbArrow->setCurrentIndex(prefArrowStyle());
}
/**
* Sets the strings of the subwidgets using the current language.
*/
void DlgPrefsTechDrawDimensionsImp::changeEvent(QEvent *e)
{
if (e->type() == QEvent::LanguageChange) {
saveSettings();
ui->retranslateUi(this);
loadSettings();
}
else {
QWidget::changeEvent(e);
}
}
int DlgPrefsTechDrawDimensionsImp::prefArrowStyle(void) const
{
return PreferencesGui::dimArrowStyle();
}
#include <Mod/TechDraw/Gui/moc_DlgPrefsTechDrawDimensionsImp.cpp>
| sanguinariojoe/FreeCAD | src/Mod/TechDraw/Gui/DlgPrefsTechDrawDimensionsImp.cpp | C++ | lgpl-2.1 | 4,304 |
/***************************************************************************
copyright : (C) 2002 - 2008 by Scott Wheeler
email : wheeler@kde.org
***************************************************************************/
/***************************************************************************
* This library is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Lesser General Public License version *
* 2.1 as published by the Free Software Foundation. *
* *
* This library is distributed in the hope that it will be useful, but *
* WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
* Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public *
* License along with this library; if not, write to the Free Software *
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA *
* 02110-1301 USA *
* *
* Alternatively, this file is available under the Mozilla Public *
* License Version 1.1. You may obtain a copy of the License at *
* http://www.mozilla.org/MPL/ *
***************************************************************************/
#include <tbytevectorlist.h>
#include <tpropertymap.h>
#include <tdebug.h>
#include "id3v2tag.h"
#include "uniquefileidentifierframe.h"
using namespace TagLib;
using namespace ID3v2;
class UniqueFileIdentifierFrame::UniqueFileIdentifierFramePrivate
{
public:
String owner;
ByteVector identifier;
};
////////////////////////////////////////////////////////////////////////////////
// public methods
////////////////////////////////////////////////////////////////////////////////
UniqueFileIdentifierFrame::UniqueFileIdentifierFrame(const ByteVector &data) :
ID3v2::Frame(data),
d(new UniqueFileIdentifierFramePrivate())
{
setData(data);
}
UniqueFileIdentifierFrame::UniqueFileIdentifierFrame(const String &owner, const ByteVector &id) :
ID3v2::Frame("UFID"),
d(new UniqueFileIdentifierFramePrivate())
{
d->owner = owner;
d->identifier = id;
}
UniqueFileIdentifierFrame::~UniqueFileIdentifierFrame()
{
delete d;
}
String UniqueFileIdentifierFrame::owner() const
{
return d->owner;
}
ByteVector UniqueFileIdentifierFrame::identifier() const
{
return d->identifier;
}
void UniqueFileIdentifierFrame::setOwner(const String &s)
{
d->owner = s;
}
void UniqueFileIdentifierFrame::setIdentifier(const ByteVector &v)
{
d->identifier = v;
}
String UniqueFileIdentifierFrame::toString() const
{
return String();
}
PropertyMap UniqueFileIdentifierFrame::asProperties() const
{
PropertyMap map;
if(d->owner == "http://musicbrainz.org") {
map.insert("MUSICBRAINZ_TRACKID", String(d->identifier));
}
else {
map.unsupportedData().append(frameID() + String("/") + d->owner);
}
return map;
}
UniqueFileIdentifierFrame *UniqueFileIdentifierFrame::findByOwner(const ID3v2::Tag *tag, const String &o) // static
{
ID3v2::FrameList comments = tag->frameList("UFID");
for(ID3v2::FrameList::ConstIterator it = comments.begin();
it != comments.end();
++it)
{
UniqueFileIdentifierFrame *frame = dynamic_cast<UniqueFileIdentifierFrame *>(*it);
if(frame && frame->owner() == o)
return frame;
}
return 0;
}
void UniqueFileIdentifierFrame::parseFields(const ByteVector &data)
{
if(data.size() < 1) {
debug("An UFID frame must contain at least 1 byte.");
return;
}
int pos = 0;
d->owner = readStringField(data, String::Latin1, &pos);
d->identifier = data.mid(pos);
}
ByteVector UniqueFileIdentifierFrame::renderFields() const
{
ByteVector data;
data.append(d->owner.data(String::Latin1));
data.append(char(0));
data.append(d->identifier);
return data;
}
UniqueFileIdentifierFrame::UniqueFileIdentifierFrame(const ByteVector &data, Header *h) :
Frame(h),
d(new UniqueFileIdentifierFramePrivate())
{
parseFields(fieldData(data));
}
| black78/taglib | taglib/mpeg/id3v2/frames/uniquefileidentifierframe.cpp | C++ | lgpl-2.1 | 4,466 |
/***************************************************************************
* Copyright (c) 2013 Werner Mayer <wmayer[at]users.sourceforge.net> *
* *
* This file is part of the FreeCAD CAx development system. *
* *
* This library is free software; you can redistribute it and/or *
* modify it under the terms of the GNU Library General Public *
* License as published by the Free Software Foundation; either *
* version 2 of the License, or (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU Library General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this library; see the file COPYING.LIB. If not, *
* write to the Free Software Foundation, Inc., 59 Temple Place, *
* Suite 330, Boston, MA 02111-1307, USA *
* *
***************************************************************************/
#include "PreCompiled.h"
#if defined(__MINGW32__)
# define WNT // avoid conflict with GUID
#endif
#ifndef _PreComp_
# include <climits>
# include <sstream>
# include <Standard_Version.hxx>
# include <BRep_Builder.hxx>
# include <Handle_TDocStd_Document.hxx>
# include <Handle_XCAFApp_Application.hxx>
# include <TDocStd_Document.hxx>
# include <XCAFApp_Application.hxx>
# include <XCAFDoc_DocumentTool.hxx>
# include <XCAFDoc_ShapeTool.hxx>
# include <XCAFDoc_ColorTool.hxx>
# include <XCAFDoc_Location.hxx>
# include <TDF_Label.hxx>
# include <TDF_LabelSequence.hxx>
# include <TDF_ChildIterator.hxx>
# include <TDataStd_Name.hxx>
# include <Quantity_Color.hxx>
# include <STEPCAFControl_Reader.hxx>
# include <STEPCAFControl_Writer.hxx>
# include <STEPControl_Writer.hxx>
# include <IGESCAFControl_Reader.hxx>
# include <IGESCAFControl_Writer.hxx>
# include <IGESControl_Controller.hxx>
# include <Interface_Static.hxx>
# include <Transfer_TransientProcess.hxx>
# include <XSControl_WorkSession.hxx>
# include <TopTools_IndexedMapOfShape.hxx>
# include <TopTools_MapOfShape.hxx>
# include <TopExp_Explorer.hxx>
# include <TopoDS_Iterator.hxx>
# include <APIHeaderSection_MakeHeader.hxx>
# include <OSD_Exception.hxx>
#if OCC_VERSION_HEX >= 0x060500
# include <TDataXtd_Shape.hxx>
# else
# include <TDataStd_Shape.hxx>
# endif
#endif
#include "ImportOCAFAssembly.h"
#include <Base/Console.h>
#include <App/Application.h>
#include <App/Document.h>
#include <App/DocumentObjectPy.h>
#include <Mod/Part/App/PartFeature.h>
#include <Mod/Part/App/ProgressIndicator.h>
#include <Mod/Part/App/ImportIges.h>
#include <Mod/Part/App/ImportStep.h>
using namespace Import;
ImportOCAFAssembly::ImportOCAFAssembly(Handle_TDocStd_Document h, App::Document* d, const std::string& name, App::DocumentObject *target)
: pDoc(h),
doc(d),
default_name(name),
targetObj(target)
{
aShapeTool = XCAFDoc_DocumentTool::ShapeTool (pDoc->Main());
aColorTool = XCAFDoc_DocumentTool::ColorTool(pDoc->Main());
}
ImportOCAFAssembly::~ImportOCAFAssembly()
{
}
void ImportOCAFAssembly::loadShapes()
{
myRefShapes.clear();
loadShapes(pDoc->Main(), TopLoc_Location(), default_name, "", false,0);
}
void ImportOCAFAssembly::loadAssembly()
{
myRefShapes.clear();
loadShapes(pDoc->Main(), TopLoc_Location(), default_name, "", false,0);
}
std::string ImportOCAFAssembly::getName(const TDF_Label& label)
{
Handle(TDataStd_Name) name;
std::string part_name;
if (label.FindAttribute(TDataStd_Name::GetID(),name)) {
TCollection_ExtendedString extstr = name->Get();
char* str = new char[extstr.LengthOfCString()+1];
extstr.ToUTF8CString(str);
part_name = str;
delete [] str;
return part_name;
//if (part_name.empty()) {
// return "";
//}
//else {
// bool ws=true;
// for (std::string::iterator it = part_name.begin(); it != part_name.end(); ++it) {
// if (*it != ' ') {
// ws = false;
// break;
// }
// }
// if (ws)
// part_name = defaultname;
//}
}
return "";
}
void ImportOCAFAssembly::loadShapes(const TDF_Label& label, const TopLoc_Location& loc, const std::string& defaultname, const std::string& assembly, bool isRef, int dep)
{
int hash = 0;
TopoDS_Shape aShape;
if (aShapeTool->GetShape(label,aShape)) {
hash = aShape.HashCode(HashUpper);
}
Handle(TDataStd_Name) name;
std::string part_name = defaultname;
if (label.FindAttribute(TDataStd_Name::GetID(),name)) {
TCollection_ExtendedString extstr = name->Get();
char* str = new char[extstr.LengthOfCString()+1];
extstr.ToUTF8CString(str);
part_name = str;
delete [] str;
if (part_name.empty()) {
part_name = defaultname;
}
else {
bool ws=true;
for (std::string::iterator it = part_name.begin(); it != part_name.end(); ++it) {
if (*it != ' ') {
ws = false;
break;
}
}
if (ws)
part_name = defaultname;
}
}
TopLoc_Location part_loc = loc;
Handle(XCAFDoc_Location) hLoc;
if (label.FindAttribute(XCAFDoc_Location::GetID(), hLoc)) {
if (isRef)
part_loc = part_loc * hLoc->Get();
else
part_loc = hLoc->Get();
}
#ifdef FC_DEBUG
const char *s;
if( !hLoc.IsNull() )
s = hLoc->Get().IsIdentity()?"0":"1";
else
s = "0";
std::stringstream str;
Base::Console().Log("H:%-9d \tN:%-30s \tTop:%d, Asm:%d, Shape:%d, Compound:%d, Simple:%d, Free:%d, Ref:%d, Component:%d, SubShape:%d\tTrf:%s-- Dep:%d \n",
hash,
part_name.c_str(),
aShapeTool->IsTopLevel(label),
aShapeTool->IsAssembly(label),
aShapeTool->IsShape(label),
aShapeTool->IsCompound(label),
aShapeTool->IsSimpleShape(label),
aShapeTool->IsFree(label),
aShapeTool->IsReference(label),
aShapeTool->IsComponent(label),
aShapeTool->IsSubShape(label),
s,
dep
);
label.Dump(str);
Base::Console().Message(str.str().c_str() );
#endif
std::string asm_name = assembly;
if (aShapeTool->IsAssembly(label)) {
asm_name = part_name;
}
TDF_Label ref;
if (aShapeTool->IsReference(label) && aShapeTool->GetReferredShape(label, ref)) {
loadShapes(ref, part_loc, part_name, asm_name, true,dep + 1);
}
if (isRef || myRefShapes.find(hash) == myRefShapes.end()) {
TopoDS_Shape aShape;
if (isRef && aShapeTool->GetShape(label, aShape))
myRefShapes.insert(aShape.HashCode(HashUpper));
if (aShapeTool->IsSimpleShape(label) && (isRef || aShapeTool->IsFree(label))) {
if (!asm_name.empty())
part_name = asm_name;
if (isRef)
createShape(label, loc, part_name);
else
createShape(label, part_loc, part_name);
}
else {
for (TDF_ChildIterator it(label); it.More(); it.Next()) {
loadShapes(it.Value(), part_loc, part_name, asm_name, isRef, dep+1);
}
}
}
}
void ImportOCAFAssembly::createShape(const TDF_Label& label, const TopLoc_Location& loc, const std::string& name)
{
Base::Console().Log("-create Shape\n");
const TopoDS_Shape& aShape = aShapeTool->GetShape(label);
if (!aShape.IsNull() && aShape.ShapeType() == TopAbs_COMPOUND) {
TopExp_Explorer xp;
int ctSolids = 0, ctShells = 0;
for (xp.Init(aShape, TopAbs_SOLID); xp.More(); xp.Next(), ctSolids++)
{
createShape(xp.Current(), loc, name);
}
for (xp.Init(aShape, TopAbs_SHELL, TopAbs_SOLID); xp.More(); xp.Next(), ctShells++)
{
createShape(xp.Current(), loc, name);
}
if (ctSolids > 0 || ctShells > 0)
return;
}
createShape(aShape, loc, name);
}
void ImportOCAFAssembly::createShape(const TopoDS_Shape& aShape, const TopLoc_Location& loc, const std::string& name)
{
Part::Feature* part = static_cast<Part::Feature*>(doc->addObject("Part::Feature"));
if (!loc.IsIdentity())
part->Shape.setValue(aShape.Moved(loc));
else
part->Shape.setValue(aShape);
part->Label.setValue(name);
Quantity_Color aColor;
App::Color color(0.8f,0.8f,0.8f);
if (aColorTool->GetColor(aShape, XCAFDoc_ColorGen, aColor) ||
aColorTool->GetColor(aShape, XCAFDoc_ColorSurf, aColor) ||
aColorTool->GetColor(aShape, XCAFDoc_ColorCurv, aColor)) {
color.r = (float)aColor.Red();
color.g = (float)aColor.Green();
color.b = (float)aColor.Blue();
std::vector<App::Color> colors;
colors.push_back(color);
applyColors(part, colors);
#if 0//TODO
Gui::ViewProvider* vp = Gui::Application::Instance->getViewProvider(part);
if (vp && vp->isDerivedFrom(PartGui::ViewProviderPart::getClassTypeId())) {
color.r = aColor.Red();
color.g = aColor.Green();
color.b = aColor.Blue();
static_cast<PartGui::ViewProviderPart*>(vp)->ShapeColor.setValue(color);
}
#endif
}
TopTools_IndexedMapOfShape faces;
TopExp_Explorer xp(aShape,TopAbs_FACE);
while (xp.More()) {
faces.Add(xp.Current());
xp.Next();
}
bool found_face_color = false;
std::vector<App::Color> faceColors;
faceColors.resize(faces.Extent(), color);
xp.Init(aShape,TopAbs_FACE);
while (xp.More()) {
if (aColorTool->GetColor(xp.Current(), XCAFDoc_ColorGen, aColor) ||
aColorTool->GetColor(xp.Current(), XCAFDoc_ColorSurf, aColor) ||
aColorTool->GetColor(xp.Current(), XCAFDoc_ColorCurv, aColor)) {
int index = faces.FindIndex(xp.Current());
color.r = (float)aColor.Red();
color.g = (float)aColor.Green();
color.b = (float)aColor.Blue();
faceColors[index-1] = color;
found_face_color = true;
}
xp.Next();
}
if (found_face_color) {
applyColors(part, faceColors);
}
}
| timthelion/FreeCAD | src/Mod/Import/App/ImportOCAFAssembly.cpp | C++ | lgpl-2.1 | 10,993 |
package net.sourceforge.jsocks.test;
import net.sourceforge.jsocks.socks.*;
import net.sourceforge.jsocks.socks.server.*;
import java.net.Socket;
/** Test file for UserPasswordAuthentictor */
public class UPSOCKS implements UserValidation{
String user, password;
UPSOCKS(String user,String password){
this.user = user;
this.password = password;
}
public boolean isUserValid(String user,String password,Socket s){
System.err.println("User:"+user+"\tPassword:"+password);
System.err.println("Socket:"+s);
return (user.equals(this.user) && password.equals(this.password));
}
public static void main(String args[]){
String user, password;
if(args.length == 2){
user = args[0];
password = args[1];
}else{
user = "user";
password = "password";
}
UPSOCKS us = new UPSOCKS(user,password);
UserPasswordAuthenticator auth = new UserPasswordAuthenticator(us);
ProxyServer server = new ProxyServer(auth);
server.start(1080);
}
}
| cqjjjzr/jsocks-mirror | src/java/net/sourceforge/jsocks/test/UPSOCKS.java | Java | lgpl-3.0 | 1,135 |
package barqsoft.footballscores;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
public class MainActivity extends ActionBarActivity
{
public static int selected_match_id;
public static int current_fragment = 2;
public static String LOG_TAG = "MainActivity";
private final String save_tag = "Save Test";
private PagerFragment my_main;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Log.d(LOG_TAG, "Reached MainActivity onCreate");
if (savedInstanceState == null) {
my_main = new PagerFragment();
getSupportFragmentManager().beginTransaction()
.add(R.id.container, my_main)
.commit();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_about)
{
Intent start_about = new Intent(this,AboutActivity.class);
startActivity(start_about);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onSaveInstanceState(Bundle outState)
{
Log.v(save_tag,"will save");
Log.v(save_tag,"fragment: "+String.valueOf(my_main.mPagerHandler.getCurrentItem()));
Log.v(save_tag,"selected id: "+selected_match_id);
outState.putInt("Pager_Current",my_main.mPagerHandler.getCurrentItem());
outState.putInt("Selected_match",selected_match_id);
getSupportFragmentManager().putFragment(outState,"my_main",my_main);
super.onSaveInstanceState(outState);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState)
{
Log.v(save_tag,"will retrive");
Log.v(save_tag,"fragment: "+String.valueOf(savedInstanceState.getInt("Pager_Current")));
Log.v(save_tag,"selected id: "+savedInstanceState.getInt("Selected_match"));
current_fragment = savedInstanceState.getInt("Pager_Current");
selected_match_id = savedInstanceState.getInt("Selected_match");
my_main = (PagerFragment) getSupportFragmentManager().getFragment(savedInstanceState,"my_main");
super.onRestoreInstanceState(savedInstanceState);
}
}
| vickychijwani/udacity-p3-super-duo | football-scores/app/src/main/java/barqsoft/footballscores/MainActivity.java | Java | unlicense | 2,966 |
import { PlannerListModule } from 'fabric8-planner';
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { Http } from '@angular/http';
@NgModule({
imports: [ CommonModule, PlannerListModule ]
})
export class PlanListModule {
constructor(http: Http) {}
}
| fabric8io/fabric8-planner | runtime/src/app/components/plan-list.module.ts | TypeScript | apache-2.0 | 322 |
/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// DO NOT EDIT. THIS FILE IS AUTO-GENERATED BY $KUBEROOT/hack/update-generated-conversions.sh
package v1
import (
reflect "reflect"
api "k8s.io/kubernetes/pkg/api"
resource "k8s.io/kubernetes/pkg/api/resource"
conversion "k8s.io/kubernetes/pkg/conversion"
)
func convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in *api.AWSElasticBlockStoreVolumeSource, out *AWSElasticBlockStoreVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.AWSElasticBlockStoreVolumeSource))(in)
}
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_Binding_To_v1_Binding(in *api.Binding, out *Binding, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Binding))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Target, &out.Target, s); err != nil {
return err
}
return nil
}
func convert_api_Capabilities_To_v1_Capabilities(in *api.Capabilities, out *Capabilities, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Capabilities))(in)
}
if in.Add != nil {
out.Add = make([]Capability, len(in.Add))
for i := range in.Add {
out.Add[i] = Capability(in.Add[i])
}
} else {
out.Add = nil
}
if in.Drop != nil {
out.Drop = make([]Capability, len(in.Drop))
for i := range in.Drop {
out.Drop[i] = Capability(in.Drop[i])
}
} else {
out.Drop = nil
}
return nil
}
func convert_api_ComponentCondition_To_v1_ComponentCondition(in *api.ComponentCondition, out *ComponentCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ComponentCondition))(in)
}
out.Type = ComponentConditionType(in.Type)
out.Status = ConditionStatus(in.Status)
out.Message = in.Message
out.Error = in.Error
return nil
}
func convert_api_ComponentStatus_To_v1_ComponentStatus(in *api.ComponentStatus, out *ComponentStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ComponentStatus))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Conditions != nil {
out.Conditions = make([]ComponentCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_api_ComponentCondition_To_v1_ComponentCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
return nil
}
func convert_api_ComponentStatusList_To_v1_ComponentStatusList(in *api.ComponentStatusList, out *ComponentStatusList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ComponentStatusList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ComponentStatus, len(in.Items))
for i := range in.Items {
if err := convert_api_ComponentStatus_To_v1_ComponentStatus(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_Container_To_v1_Container(in *api.Container, out *Container, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Container))(in)
}
out.Name = in.Name
out.Image = in.Image
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
if in.Args != nil {
out.Args = make([]string, len(in.Args))
for i := range in.Args {
out.Args[i] = in.Args[i]
}
} else {
out.Args = nil
}
out.WorkingDir = in.WorkingDir
if in.Ports != nil {
out.Ports = make([]ContainerPort, len(in.Ports))
for i := range in.Ports {
if err := convert_api_ContainerPort_To_v1_ContainerPort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Env != nil {
out.Env = make([]EnvVar, len(in.Env))
for i := range in.Env {
if err := convert_api_EnvVar_To_v1_EnvVar(&in.Env[i], &out.Env[i], s); err != nil {
return err
}
}
} else {
out.Env = nil
}
if err := convert_api_ResourceRequirements_To_v1_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil {
return err
}
if in.VolumeMounts != nil {
out.VolumeMounts = make([]VolumeMount, len(in.VolumeMounts))
for i := range in.VolumeMounts {
if err := convert_api_VolumeMount_To_v1_VolumeMount(&in.VolumeMounts[i], &out.VolumeMounts[i], s); err != nil {
return err
}
}
} else {
out.VolumeMounts = nil
}
if in.LivenessProbe != nil {
out.LivenessProbe = new(Probe)
if err := convert_api_Probe_To_v1_Probe(in.LivenessProbe, out.LivenessProbe, s); err != nil {
return err
}
} else {
out.LivenessProbe = nil
}
if in.ReadinessProbe != nil {
out.ReadinessProbe = new(Probe)
if err := convert_api_Probe_To_v1_Probe(in.ReadinessProbe, out.ReadinessProbe, s); err != nil {
return err
}
} else {
out.ReadinessProbe = nil
}
if in.Lifecycle != nil {
out.Lifecycle = new(Lifecycle)
if err := convert_api_Lifecycle_To_v1_Lifecycle(in.Lifecycle, out.Lifecycle, s); err != nil {
return err
}
} else {
out.Lifecycle = nil
}
out.TerminationMessagePath = in.TerminationMessagePath
out.ImagePullPolicy = PullPolicy(in.ImagePullPolicy)
if in.SecurityContext != nil {
out.SecurityContext = new(SecurityContext)
if err := convert_api_SecurityContext_To_v1_SecurityContext(in.SecurityContext, out.SecurityContext, s); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
out.Stdin = in.Stdin
out.TTY = in.TTY
return nil
}
func convert_api_ContainerPort_To_v1_ContainerPort(in *api.ContainerPort, out *ContainerPort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerPort))(in)
}
out.Name = in.Name
out.HostPort = in.HostPort
out.ContainerPort = in.ContainerPort
out.Protocol = Protocol(in.Protocol)
out.HostIP = in.HostIP
return nil
}
func convert_api_ContainerState_To_v1_ContainerState(in *api.ContainerState, out *ContainerState, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerState))(in)
}
if in.Waiting != nil {
out.Waiting = new(ContainerStateWaiting)
if err := convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting(in.Waiting, out.Waiting, s); err != nil {
return err
}
} else {
out.Waiting = nil
}
if in.Running != nil {
out.Running = new(ContainerStateRunning)
if err := convert_api_ContainerStateRunning_To_v1_ContainerStateRunning(in.Running, out.Running, s); err != nil {
return err
}
} else {
out.Running = nil
}
if in.Terminated != nil {
out.Terminated = new(ContainerStateTerminated)
if err := convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated(in.Terminated, out.Terminated, s); err != nil {
return err
}
} else {
out.Terminated = nil
}
return nil
}
func convert_api_ContainerStateRunning_To_v1_ContainerStateRunning(in *api.ContainerStateRunning, out *ContainerStateRunning, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerStateRunning))(in)
}
if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil {
return err
}
return nil
}
func convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated(in *api.ContainerStateTerminated, out *ContainerStateTerminated, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerStateTerminated))(in)
}
out.ExitCode = in.ExitCode
out.Signal = in.Signal
out.Reason = in.Reason
out.Message = in.Message
if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil {
return err
}
if err := s.Convert(&in.FinishedAt, &out.FinishedAt, 0); err != nil {
return err
}
out.ContainerID = in.ContainerID
return nil
}
func convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting(in *api.ContainerStateWaiting, out *ContainerStateWaiting, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerStateWaiting))(in)
}
out.Reason = in.Reason
return nil
}
func convert_api_ContainerStatus_To_v1_ContainerStatus(in *api.ContainerStatus, out *ContainerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ContainerStatus))(in)
}
out.Name = in.Name
if err := convert_api_ContainerState_To_v1_ContainerState(&in.State, &out.State, s); err != nil {
return err
}
if err := convert_api_ContainerState_To_v1_ContainerState(&in.LastTerminationState, &out.LastTerminationState, s); err != nil {
return err
}
out.Ready = in.Ready
out.RestartCount = in.RestartCount
out.Image = in.Image
out.ImageID = in.ImageID
out.ContainerID = in.ContainerID
return nil
}
func convert_api_Daemon_To_v1_Daemon(in *api.Daemon, out *Daemon, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Daemon))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_DaemonSpec_To_v1_DaemonSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_DaemonStatus_To_v1_DaemonStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_DaemonList_To_v1_DaemonList(in *api.DaemonList, out *DaemonList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.DaemonList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Daemon, len(in.Items))
for i := range in.Items {
if err := convert_api_Daemon_To_v1_Daemon(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_DaemonSpec_To_v1_DaemonSpec(in *api.DaemonSpec, out *DaemonSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.DaemonSpec))(in)
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if in.Template != nil {
out.Template = new(PodTemplateSpec)
if err := convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(in.Template, out.Template, s); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func convert_api_DaemonStatus_To_v1_DaemonStatus(in *api.DaemonStatus, out *DaemonStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.DaemonStatus))(in)
}
out.CurrentNumberScheduled = in.CurrentNumberScheduled
out.NumberMisscheduled = in.NumberMisscheduled
out.DesiredNumberScheduled = in.DesiredNumberScheduled
return nil
}
func convert_api_DeleteOptions_To_v1_DeleteOptions(in *api.DeleteOptions, out *DeleteOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.DeleteOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if in.GracePeriodSeconds != nil {
out.GracePeriodSeconds = new(int64)
*out.GracePeriodSeconds = *in.GracePeriodSeconds
} else {
out.GracePeriodSeconds = nil
}
return nil
}
func convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource(in *api.EmptyDirVolumeSource, out *EmptyDirVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EmptyDirVolumeSource))(in)
}
out.Medium = StorageMedium(in.Medium)
return nil
}
func convert_api_EndpointAddress_To_v1_EndpointAddress(in *api.EndpointAddress, out *EndpointAddress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EndpointAddress))(in)
}
out.IP = in.IP
if in.TargetRef != nil {
out.TargetRef = new(ObjectReference)
if err := convert_api_ObjectReference_To_v1_ObjectReference(in.TargetRef, out.TargetRef, s); err != nil {
return err
}
} else {
out.TargetRef = nil
}
return nil
}
func convert_api_EndpointPort_To_v1_EndpointPort(in *api.EndpointPort, out *EndpointPort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EndpointPort))(in)
}
out.Name = in.Name
out.Port = in.Port
out.Protocol = Protocol(in.Protocol)
return nil
}
func convert_api_EndpointSubset_To_v1_EndpointSubset(in *api.EndpointSubset, out *EndpointSubset, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EndpointSubset))(in)
}
if in.Addresses != nil {
out.Addresses = make([]EndpointAddress, len(in.Addresses))
for i := range in.Addresses {
if err := convert_api_EndpointAddress_To_v1_EndpointAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if in.Ports != nil {
out.Ports = make([]EndpointPort, len(in.Ports))
for i := range in.Ports {
if err := convert_api_EndpointPort_To_v1_EndpointPort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
return nil
}
func convert_api_Endpoints_To_v1_Endpoints(in *api.Endpoints, out *Endpoints, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Endpoints))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Subsets != nil {
out.Subsets = make([]EndpointSubset, len(in.Subsets))
for i := range in.Subsets {
if err := convert_api_EndpointSubset_To_v1_EndpointSubset(&in.Subsets[i], &out.Subsets[i], s); err != nil {
return err
}
}
} else {
out.Subsets = nil
}
return nil
}
func convert_api_EndpointsList_To_v1_EndpointsList(in *api.EndpointsList, out *EndpointsList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EndpointsList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Endpoints, len(in.Items))
for i := range in.Items {
if err := convert_api_Endpoints_To_v1_Endpoints(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_EnvVar_To_v1_EnvVar(in *api.EnvVar, out *EnvVar, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EnvVar))(in)
}
out.Name = in.Name
out.Value = in.Value
if in.ValueFrom != nil {
out.ValueFrom = new(EnvVarSource)
if err := convert_api_EnvVarSource_To_v1_EnvVarSource(in.ValueFrom, out.ValueFrom, s); err != nil {
return err
}
} else {
out.ValueFrom = nil
}
return nil
}
func convert_api_EnvVarSource_To_v1_EnvVarSource(in *api.EnvVarSource, out *EnvVarSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EnvVarSource))(in)
}
if in.FieldRef != nil {
out.FieldRef = new(ObjectFieldSelector)
if err := convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector(in.FieldRef, out.FieldRef, s); err != nil {
return err
}
} else {
out.FieldRef = nil
}
return nil
}
func convert_api_Event_To_v1_Event(in *api.Event, out *Event, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Event))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.InvolvedObject, &out.InvolvedObject, s); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
if err := convert_api_EventSource_To_v1_EventSource(&in.Source, &out.Source, s); err != nil {
return err
}
if err := s.Convert(&in.FirstTimestamp, &out.FirstTimestamp, 0); err != nil {
return err
}
if err := s.Convert(&in.LastTimestamp, &out.LastTimestamp, 0); err != nil {
return err
}
out.Count = in.Count
return nil
}
func convert_api_EventList_To_v1_EventList(in *api.EventList, out *EventList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EventList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Event, len(in.Items))
for i := range in.Items {
if err := convert_api_Event_To_v1_Event(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_EventSource_To_v1_EventSource(in *api.EventSource, out *EventSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.EventSource))(in)
}
out.Component = in.Component
out.Host = in.Host
return nil
}
func convert_api_ExecAction_To_v1_ExecAction(in *api.ExecAction, out *ExecAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ExecAction))(in)
}
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in *api.GCEPersistentDiskVolumeSource, out *GCEPersistentDiskVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.GCEPersistentDiskVolumeSource))(in)
}
out.PDName = in.PDName
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource(in *api.GitRepoVolumeSource, out *GitRepoVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.GitRepoVolumeSource))(in)
}
out.Repository = in.Repository
out.Revision = in.Revision
return nil
}
func convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in *api.GlusterfsVolumeSource, out *GlusterfsVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.GlusterfsVolumeSource))(in)
}
out.EndpointsName = in.EndpointsName
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_HTTPGetAction_To_v1_HTTPGetAction(in *api.HTTPGetAction, out *HTTPGetAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.HTTPGetAction))(in)
}
out.Path = in.Path
if err := s.Convert(&in.Port, &out.Port, 0); err != nil {
return err
}
out.Host = in.Host
out.Scheme = URIScheme(in.Scheme)
return nil
}
func convert_api_Handler_To_v1_Handler(in *api.Handler, out *Handler, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Handler))(in)
}
if in.Exec != nil {
out.Exec = new(ExecAction)
if err := convert_api_ExecAction_To_v1_ExecAction(in.Exec, out.Exec, s); err != nil {
return err
}
} else {
out.Exec = nil
}
if in.HTTPGet != nil {
out.HTTPGet = new(HTTPGetAction)
if err := convert_api_HTTPGetAction_To_v1_HTTPGetAction(in.HTTPGet, out.HTTPGet, s); err != nil {
return err
}
} else {
out.HTTPGet = nil
}
if in.TCPSocket != nil {
out.TCPSocket = new(TCPSocketAction)
if err := convert_api_TCPSocketAction_To_v1_TCPSocketAction(in.TCPSocket, out.TCPSocket, s); err != nil {
return err
}
} else {
out.TCPSocket = nil
}
return nil
}
func convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in *api.HostPathVolumeSource, out *HostPathVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.HostPathVolumeSource))(in)
}
out.Path = in.Path
return nil
}
func convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in *api.ISCSIVolumeSource, out *ISCSIVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ISCSIVolumeSource))(in)
}
out.TargetPortal = in.TargetPortal
out.IQN = in.IQN
out.Lun = in.Lun
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_Lifecycle_To_v1_Lifecycle(in *api.Lifecycle, out *Lifecycle, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Lifecycle))(in)
}
if in.PostStart != nil {
out.PostStart = new(Handler)
if err := convert_api_Handler_To_v1_Handler(in.PostStart, out.PostStart, s); err != nil {
return err
}
} else {
out.PostStart = nil
}
if in.PreStop != nil {
out.PreStop = new(Handler)
if err := convert_api_Handler_To_v1_Handler(in.PreStop, out.PreStop, s); err != nil {
return err
}
} else {
out.PreStop = nil
}
return nil
}
func convert_api_LimitRange_To_v1_LimitRange(in *api.LimitRange, out *LimitRange, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LimitRange))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_LimitRangeSpec_To_v1_LimitRangeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
return nil
}
func convert_api_LimitRangeItem_To_v1_LimitRangeItem(in *api.LimitRangeItem, out *LimitRangeItem, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LimitRangeItem))(in)
}
out.Type = LimitType(in.Type)
if in.Max != nil {
out.Max = make(ResourceList)
for key, val := range in.Max {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Max[ResourceName(key)] = newVal
}
} else {
out.Max = nil
}
if in.Min != nil {
out.Min = make(ResourceList)
for key, val := range in.Min {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Min[ResourceName(key)] = newVal
}
} else {
out.Min = nil
}
if in.Default != nil {
out.Default = make(ResourceList)
for key, val := range in.Default {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Default[ResourceName(key)] = newVal
}
} else {
out.Default = nil
}
return nil
}
func convert_api_LimitRangeList_To_v1_LimitRangeList(in *api.LimitRangeList, out *LimitRangeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LimitRangeList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]LimitRange, len(in.Items))
for i := range in.Items {
if err := convert_api_LimitRange_To_v1_LimitRange(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_LimitRangeSpec_To_v1_LimitRangeSpec(in *api.LimitRangeSpec, out *LimitRangeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LimitRangeSpec))(in)
}
if in.Limits != nil {
out.Limits = make([]LimitRangeItem, len(in.Limits))
for i := range in.Limits {
if err := convert_api_LimitRangeItem_To_v1_LimitRangeItem(&in.Limits[i], &out.Limits[i], s); err != nil {
return err
}
}
} else {
out.Limits = nil
}
return nil
}
func convert_api_List_To_v1_List(in *api.List, out *List, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.List))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if err := s.Convert(&in.Items, &out.Items, 0); err != nil {
return err
}
return nil
}
func convert_api_ListMeta_To_v1_ListMeta(in *api.ListMeta, out *ListMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ListMeta))(in)
}
out.SelfLink = in.SelfLink
out.ResourceVersion = in.ResourceVersion
return nil
}
func convert_api_ListOptions_To_v1_ListOptions(in *api.ListOptions, out *ListOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ListOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := s.Convert(&in.LabelSelector, &out.LabelSelector, 0); err != nil {
return err
}
if err := s.Convert(&in.FieldSelector, &out.FieldSelector, 0); err != nil {
return err
}
out.Watch = in.Watch
out.ResourceVersion = in.ResourceVersion
return nil
}
func convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress(in *api.LoadBalancerIngress, out *LoadBalancerIngress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LoadBalancerIngress))(in)
}
out.IP = in.IP
out.Hostname = in.Hostname
return nil
}
func convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus(in *api.LoadBalancerStatus, out *LoadBalancerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LoadBalancerStatus))(in)
}
if in.Ingress != nil {
out.Ingress = make([]LoadBalancerIngress, len(in.Ingress))
for i := range in.Ingress {
if err := convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress(&in.Ingress[i], &out.Ingress[i], s); err != nil {
return err
}
}
} else {
out.Ingress = nil
}
return nil
}
func convert_api_LocalObjectReference_To_v1_LocalObjectReference(in *api.LocalObjectReference, out *LocalObjectReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.LocalObjectReference))(in)
}
out.Name = in.Name
return nil
}
func convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in *api.NFSVolumeSource, out *NFSVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NFSVolumeSource))(in)
}
out.Server = in.Server
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_Namespace_To_v1_Namespace(in *api.Namespace, out *Namespace, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Namespace))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_NamespaceSpec_To_v1_NamespaceSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_NamespaceStatus_To_v1_NamespaceStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_NamespaceList_To_v1_NamespaceList(in *api.NamespaceList, out *NamespaceList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NamespaceList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Namespace, len(in.Items))
for i := range in.Items {
if err := convert_api_Namespace_To_v1_Namespace(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_NamespaceSpec_To_v1_NamespaceSpec(in *api.NamespaceSpec, out *NamespaceSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NamespaceSpec))(in)
}
if in.Finalizers != nil {
out.Finalizers = make([]FinalizerName, len(in.Finalizers))
for i := range in.Finalizers {
out.Finalizers[i] = FinalizerName(in.Finalizers[i])
}
} else {
out.Finalizers = nil
}
return nil
}
func convert_api_NamespaceStatus_To_v1_NamespaceStatus(in *api.NamespaceStatus, out *NamespaceStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NamespaceStatus))(in)
}
out.Phase = NamespacePhase(in.Phase)
return nil
}
func convert_api_Node_To_v1_Node(in *api.Node, out *Node, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Node))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_NodeSpec_To_v1_NodeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_NodeStatus_To_v1_NodeStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_NodeAddress_To_v1_NodeAddress(in *api.NodeAddress, out *NodeAddress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeAddress))(in)
}
out.Type = NodeAddressType(in.Type)
out.Address = in.Address
return nil
}
func convert_api_NodeCondition_To_v1_NodeCondition(in *api.NodeCondition, out *NodeCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeCondition))(in)
}
out.Type = NodeConditionType(in.Type)
out.Status = ConditionStatus(in.Status)
if err := s.Convert(&in.LastHeartbeatTime, &out.LastHeartbeatTime, 0); err != nil {
return err
}
if err := s.Convert(&in.LastTransitionTime, &out.LastTransitionTime, 0); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
return nil
}
func convert_api_NodeList_To_v1_NodeList(in *api.NodeList, out *NodeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Node, len(in.Items))
for i := range in.Items {
if err := convert_api_Node_To_v1_Node(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_NodeSpec_To_v1_NodeSpec(in *api.NodeSpec, out *NodeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeSpec))(in)
}
out.PodCIDR = in.PodCIDR
out.ExternalID = in.ExternalID
out.ProviderID = in.ProviderID
out.Unschedulable = in.Unschedulable
return nil
}
func convert_api_NodeStatus_To_v1_NodeStatus(in *api.NodeStatus, out *NodeStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeStatus))(in)
}
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
out.Phase = NodePhase(in.Phase)
if in.Conditions != nil {
out.Conditions = make([]NodeCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_api_NodeCondition_To_v1_NodeCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
if in.Addresses != nil {
out.Addresses = make([]NodeAddress, len(in.Addresses))
for i := range in.Addresses {
if err := convert_api_NodeAddress_To_v1_NodeAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if err := convert_api_NodeSystemInfo_To_v1_NodeSystemInfo(&in.NodeInfo, &out.NodeInfo, s); err != nil {
return err
}
return nil
}
func convert_api_NodeSystemInfo_To_v1_NodeSystemInfo(in *api.NodeSystemInfo, out *NodeSystemInfo, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.NodeSystemInfo))(in)
}
out.MachineID = in.MachineID
out.SystemUUID = in.SystemUUID
out.BootID = in.BootID
out.KernelVersion = in.KernelVersion
out.OsImage = in.OsImage
out.ContainerRuntimeVersion = in.ContainerRuntimeVersion
out.KubeletVersion = in.KubeletVersion
out.KubeProxyVersion = in.KubeProxyVersion
return nil
}
func convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector(in *api.ObjectFieldSelector, out *ObjectFieldSelector, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ObjectFieldSelector))(in)
}
out.APIVersion = in.APIVersion
out.FieldPath = in.FieldPath
return nil
}
func convert_api_ObjectMeta_To_v1_ObjectMeta(in *api.ObjectMeta, out *ObjectMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ObjectMeta))(in)
}
out.Name = in.Name
out.GenerateName = in.GenerateName
out.Namespace = in.Namespace
out.SelfLink = in.SelfLink
out.UID = in.UID
out.ResourceVersion = in.ResourceVersion
out.Generation = in.Generation
if err := s.Convert(&in.CreationTimestamp, &out.CreationTimestamp, 0); err != nil {
return err
}
if in.DeletionTimestamp != nil {
if err := s.Convert(&in.DeletionTimestamp, &out.DeletionTimestamp, 0); err != nil {
return err
}
} else {
out.DeletionTimestamp = nil
}
if in.DeletionGracePeriodSeconds != nil {
out.DeletionGracePeriodSeconds = new(int64)
*out.DeletionGracePeriodSeconds = *in.DeletionGracePeriodSeconds
} else {
out.DeletionGracePeriodSeconds = nil
}
if in.Labels != nil {
out.Labels = make(map[string]string)
for key, val := range in.Labels {
out.Labels[key] = val
}
} else {
out.Labels = nil
}
if in.Annotations != nil {
out.Annotations = make(map[string]string)
for key, val := range in.Annotations {
out.Annotations[key] = val
}
} else {
out.Annotations = nil
}
return nil
}
func convert_api_ObjectReference_To_v1_ObjectReference(in *api.ObjectReference, out *ObjectReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ObjectReference))(in)
}
out.Kind = in.Kind
out.Namespace = in.Namespace
out.Name = in.Name
out.UID = in.UID
out.APIVersion = in.APIVersion
out.ResourceVersion = in.ResourceVersion
out.FieldPath = in.FieldPath
return nil
}
func convert_api_PersistentVolume_To_v1_PersistentVolume(in *api.PersistentVolume, out *PersistentVolume, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolume))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim(in *api.PersistentVolumeClaim, out *PersistentVolumeClaim, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaim))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_PersistentVolumeClaimList_To_v1_PersistentVolumeClaimList(in *api.PersistentVolumeClaimList, out *PersistentVolumeClaimList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaimList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PersistentVolumeClaim, len(in.Items))
for i := range in.Items {
if err := convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec(in *api.PersistentVolumeClaimSpec, out *PersistentVolumeClaimSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaimSpec))(in)
}
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if err := convert_api_ResourceRequirements_To_v1_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil {
return err
}
out.VolumeName = in.VolumeName
return nil
}
func convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus(in *api.PersistentVolumeClaimStatus, out *PersistentVolumeClaimStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaimStatus))(in)
}
out.Phase = PersistentVolumeClaimPhase(in.Phase)
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
return nil
}
func convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource(in *api.PersistentVolumeClaimVolumeSource, out *PersistentVolumeClaimVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeClaimVolumeSource))(in)
}
out.ClaimName = in.ClaimName
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_PersistentVolumeList_To_v1_PersistentVolumeList(in *api.PersistentVolumeList, out *PersistentVolumeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PersistentVolume, len(in.Items))
for i := range in.Items {
if err := convert_api_PersistentVolume_To_v1_PersistentVolume(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource(in *api.PersistentVolumeSource, out *PersistentVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeSource))(in)
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource)
if err := convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource)
if err := convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.HostPath != nil {
out.HostPath = new(HostPathVolumeSource)
if err := convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(GlusterfsVolumeSource)
if err := convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.NFS != nil {
out.NFS = new(NFSVolumeSource)
if err := convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in.NFS, out.NFS, s); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.RBD != nil {
out.RBD = new(RBDVolumeSource)
if err := convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in.RBD, out.RBD, s); err != nil {
return err
}
} else {
out.RBD = nil
}
if in.ISCSI != nil {
out.ISCSI = new(ISCSIVolumeSource)
if err := convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil {
return err
}
} else {
out.ISCSI = nil
}
return nil
}
func convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec(in *api.PersistentVolumeSpec, out *PersistentVolumeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeSpec))(in)
}
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
if err := convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource(&in.PersistentVolumeSource, &out.PersistentVolumeSource, s); err != nil {
return err
}
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if in.ClaimRef != nil {
out.ClaimRef = new(ObjectReference)
if err := convert_api_ObjectReference_To_v1_ObjectReference(in.ClaimRef, out.ClaimRef, s); err != nil {
return err
}
} else {
out.ClaimRef = nil
}
out.PersistentVolumeReclaimPolicy = PersistentVolumeReclaimPolicy(in.PersistentVolumeReclaimPolicy)
return nil
}
func convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus(in *api.PersistentVolumeStatus, out *PersistentVolumeStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PersistentVolumeStatus))(in)
}
out.Phase = PersistentVolumePhase(in.Phase)
out.Message = in.Message
out.Reason = in.Reason
return nil
}
func convert_api_Pod_To_v1_Pod(in *api.Pod, out *Pod, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Pod))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PodSpec_To_v1_PodSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_PodStatus_To_v1_PodStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_PodAttachOptions_To_v1_PodAttachOptions(in *api.PodAttachOptions, out *PodAttachOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodAttachOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
return nil
}
func convert_api_PodCondition_To_v1_PodCondition(in *api.PodCondition, out *PodCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodCondition))(in)
}
out.Type = PodConditionType(in.Type)
out.Status = ConditionStatus(in.Status)
return nil
}
func convert_api_PodExecOptions_To_v1_PodExecOptions(in *api.PodExecOptions, out *PodExecOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodExecOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func convert_api_PodList_To_v1_PodList(in *api.PodList, out *PodList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Pod, len(in.Items))
for i := range in.Items {
if err := convert_api_Pod_To_v1_Pod(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_PodLogOptions_To_v1_PodLogOptions(in *api.PodLogOptions, out *PodLogOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodLogOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Container = in.Container
out.Follow = in.Follow
out.Previous = in.Previous
return nil
}
func convert_api_PodProxyOptions_To_v1_PodProxyOptions(in *api.PodProxyOptions, out *PodProxyOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodProxyOptions))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Path = in.Path
return nil
}
func convert_api_PodStatus_To_v1_PodStatus(in *api.PodStatus, out *PodStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodStatus))(in)
}
out.Phase = PodPhase(in.Phase)
if in.Conditions != nil {
out.Conditions = make([]PodCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_api_PodCondition_To_v1_PodCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
out.Message = in.Message
out.Reason = in.Reason
out.HostIP = in.HostIP
out.PodIP = in.PodIP
if in.StartTime != nil {
if err := s.Convert(&in.StartTime, &out.StartTime, 0); err != nil {
return err
}
} else {
out.StartTime = nil
}
if in.ContainerStatuses != nil {
out.ContainerStatuses = make([]ContainerStatus, len(in.ContainerStatuses))
for i := range in.ContainerStatuses {
if err := convert_api_ContainerStatus_To_v1_ContainerStatus(&in.ContainerStatuses[i], &out.ContainerStatuses[i], s); err != nil {
return err
}
}
} else {
out.ContainerStatuses = nil
}
return nil
}
func convert_api_PodStatusResult_To_v1_PodStatusResult(in *api.PodStatusResult, out *PodStatusResult, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodStatusResult))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PodStatus_To_v1_PodStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_PodTemplate_To_v1_PodTemplate(in *api.PodTemplate, out *PodTemplate, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodTemplate))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(&in.Template, &out.Template, s); err != nil {
return err
}
return nil
}
func convert_api_PodTemplateList_To_v1_PodTemplateList(in *api.PodTemplateList, out *PodTemplateList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodTemplateList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PodTemplate, len(in.Items))
for i := range in.Items {
if err := convert_api_PodTemplate_To_v1_PodTemplate(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_PodTemplateSpec_To_v1_PodTemplateSpec(in *api.PodTemplateSpec, out *PodTemplateSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.PodTemplateSpec))(in)
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_PodSpec_To_v1_PodSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
return nil
}
func convert_api_Probe_To_v1_Probe(in *api.Probe, out *Probe, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Probe))(in)
}
if err := convert_api_Handler_To_v1_Handler(&in.Handler, &out.Handler, s); err != nil {
return err
}
out.InitialDelaySeconds = in.InitialDelaySeconds
out.TimeoutSeconds = in.TimeoutSeconds
return nil
}
func convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in *api.RBDVolumeSource, out *RBDVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.RBDVolumeSource))(in)
}
if in.CephMonitors != nil {
out.CephMonitors = make([]string, len(in.CephMonitors))
for i := range in.CephMonitors {
out.CephMonitors[i] = in.CephMonitors[i]
}
} else {
out.CephMonitors = nil
}
out.RBDImage = in.RBDImage
out.FSType = in.FSType
out.RBDPool = in.RBDPool
out.RadosUser = in.RadosUser
out.Keyring = in.Keyring
if in.SecretRef != nil {
out.SecretRef = new(LocalObjectReference)
if err := convert_api_LocalObjectReference_To_v1_LocalObjectReference(in.SecretRef, out.SecretRef, s); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func convert_api_RangeAllocation_To_v1_RangeAllocation(in *api.RangeAllocation, out *RangeAllocation, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.RangeAllocation))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
out.Range = in.Range
if err := s.Convert(&in.Data, &out.Data, 0); err != nil {
return err
}
return nil
}
func convert_api_ReplicationController_To_v1_ReplicationController(in *api.ReplicationController, out *ReplicationController, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ReplicationController))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ReplicationControllerSpec_To_v1_ReplicationControllerSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_ReplicationControllerList_To_v1_ReplicationControllerList(in *api.ReplicationControllerList, out *ReplicationControllerList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ReplicationControllerList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ReplicationController, len(in.Items))
for i := range in.Items {
if err := convert_api_ReplicationController_To_v1_ReplicationController(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus(in *api.ReplicationControllerStatus, out *ReplicationControllerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ReplicationControllerStatus))(in)
}
out.Replicas = in.Replicas
out.ObservedGeneration = in.ObservedGeneration
return nil
}
func convert_api_ResourceQuota_To_v1_ResourceQuota(in *api.ResourceQuota, out *ResourceQuota, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceQuota))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_ResourceQuotaList_To_v1_ResourceQuotaList(in *api.ResourceQuotaList, out *ResourceQuotaList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceQuotaList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ResourceQuota, len(in.Items))
for i := range in.Items {
if err := convert_api_ResourceQuota_To_v1_ResourceQuota(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec(in *api.ResourceQuotaSpec, out *ResourceQuotaSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceQuotaSpec))(in)
}
if in.Hard != nil {
out.Hard = make(ResourceList)
for key, val := range in.Hard {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Hard[ResourceName(key)] = newVal
}
} else {
out.Hard = nil
}
return nil
}
func convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus(in *api.ResourceQuotaStatus, out *ResourceQuotaStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceQuotaStatus))(in)
}
if in.Hard != nil {
out.Hard = make(ResourceList)
for key, val := range in.Hard {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Hard[ResourceName(key)] = newVal
}
} else {
out.Hard = nil
}
if in.Used != nil {
out.Used = make(ResourceList)
for key, val := range in.Used {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Used[ResourceName(key)] = newVal
}
} else {
out.Used = nil
}
return nil
}
func convert_api_ResourceRequirements_To_v1_ResourceRequirements(in *api.ResourceRequirements, out *ResourceRequirements, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ResourceRequirements))(in)
}
if in.Limits != nil {
out.Limits = make(ResourceList)
for key, val := range in.Limits {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Limits[ResourceName(key)] = newVal
}
} else {
out.Limits = nil
}
if in.Requests != nil {
out.Requests = make(ResourceList)
for key, val := range in.Requests {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Requests[ResourceName(key)] = newVal
}
} else {
out.Requests = nil
}
return nil
}
func convert_api_SELinuxOptions_To_v1_SELinuxOptions(in *api.SELinuxOptions, out *SELinuxOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SELinuxOptions))(in)
}
out.User = in.User
out.Role = in.Role
out.Type = in.Type
out.Level = in.Level
return nil
}
func convert_api_Secret_To_v1_Secret(in *api.Secret, out *Secret, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Secret))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Data != nil {
out.Data = make(map[string][]uint8)
for key, val := range in.Data {
newVal := []uint8{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Data[key] = newVal
}
} else {
out.Data = nil
}
out.Type = SecretType(in.Type)
return nil
}
func convert_api_SecretList_To_v1_SecretList(in *api.SecretList, out *SecretList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SecretList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Secret, len(in.Items))
for i := range in.Items {
if err := convert_api_Secret_To_v1_Secret(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_SecretVolumeSource_To_v1_SecretVolumeSource(in *api.SecretVolumeSource, out *SecretVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SecretVolumeSource))(in)
}
out.SecretName = in.SecretName
return nil
}
func convert_api_SecurityContext_To_v1_SecurityContext(in *api.SecurityContext, out *SecurityContext, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SecurityContext))(in)
}
if in.Capabilities != nil {
out.Capabilities = new(Capabilities)
if err := convert_api_Capabilities_To_v1_Capabilities(in.Capabilities, out.Capabilities, s); err != nil {
return err
}
} else {
out.Capabilities = nil
}
if in.Privileged != nil {
out.Privileged = new(bool)
*out.Privileged = *in.Privileged
} else {
out.Privileged = nil
}
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(SELinuxOptions)
if err := convert_api_SELinuxOptions_To_v1_SELinuxOptions(in.SELinuxOptions, out.SELinuxOptions, s); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
out.RunAsNonRoot = in.RunAsNonRoot
return nil
}
func convert_api_SerializedReference_To_v1_SerializedReference(in *api.SerializedReference, out *SerializedReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.SerializedReference))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Reference, &out.Reference, s); err != nil {
return err
}
return nil
}
func convert_api_Service_To_v1_Service(in *api.Service, out *Service, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Service))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_api_ServiceSpec_To_v1_ServiceSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_api_ServiceStatus_To_v1_ServiceStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_api_ServiceAccount_To_v1_ServiceAccount(in *api.ServiceAccount, out *ServiceAccount, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceAccount))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Secrets != nil {
out.Secrets = make([]ObjectReference, len(in.Secrets))
for i := range in.Secrets {
if err := convert_api_ObjectReference_To_v1_ObjectReference(&in.Secrets[i], &out.Secrets[i], s); err != nil {
return err
}
}
} else {
out.Secrets = nil
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := convert_api_LocalObjectReference_To_v1_LocalObjectReference(&in.ImagePullSecrets[i], &out.ImagePullSecrets[i], s); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func convert_api_ServiceAccountList_To_v1_ServiceAccountList(in *api.ServiceAccountList, out *ServiceAccountList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceAccountList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ServiceAccount, len(in.Items))
for i := range in.Items {
if err := convert_api_ServiceAccount_To_v1_ServiceAccount(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_ServiceList_To_v1_ServiceList(in *api.ServiceList, out *ServiceList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceList))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Service, len(in.Items))
for i := range in.Items {
if err := convert_api_Service_To_v1_Service(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_api_ServicePort_To_v1_ServicePort(in *api.ServicePort, out *ServicePort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServicePort))(in)
}
out.Name = in.Name
out.Protocol = Protocol(in.Protocol)
out.Port = in.Port
if err := s.Convert(&in.TargetPort, &out.TargetPort, 0); err != nil {
return err
}
out.NodePort = in.NodePort
return nil
}
func convert_api_ServiceSpec_To_v1_ServiceSpec(in *api.ServiceSpec, out *ServiceSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceSpec))(in)
}
if in.Ports != nil {
out.Ports = make([]ServicePort, len(in.Ports))
for i := range in.Ports {
if err := convert_api_ServicePort_To_v1_ServicePort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
out.ClusterIP = in.ClusterIP
out.Type = ServiceType(in.Type)
if in.ExternalIPs != nil {
out.ExternalIPs = make([]string, len(in.ExternalIPs))
for i := range in.ExternalIPs {
out.ExternalIPs[i] = in.ExternalIPs[i]
}
} else {
out.ExternalIPs = nil
}
out.SessionAffinity = ServiceAffinity(in.SessionAffinity)
return nil
}
func convert_api_ServiceStatus_To_v1_ServiceStatus(in *api.ServiceStatus, out *ServiceStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ServiceStatus))(in)
}
if err := convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus(&in.LoadBalancer, &out.LoadBalancer, s); err != nil {
return err
}
return nil
}
func convert_api_Status_To_v1_Status(in *api.Status, out *Status, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Status))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ListMeta_To_v1_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
out.Status = in.Status
out.Message = in.Message
out.Reason = StatusReason(in.Reason)
if in.Details != nil {
out.Details = new(StatusDetails)
if err := convert_api_StatusDetails_To_v1_StatusDetails(in.Details, out.Details, s); err != nil {
return err
}
} else {
out.Details = nil
}
out.Code = in.Code
return nil
}
func convert_api_StatusCause_To_v1_StatusCause(in *api.StatusCause, out *StatusCause, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.StatusCause))(in)
}
out.Type = CauseType(in.Type)
out.Message = in.Message
out.Field = in.Field
return nil
}
func convert_api_StatusDetails_To_v1_StatusDetails(in *api.StatusDetails, out *StatusDetails, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.StatusDetails))(in)
}
out.Name = in.Name
out.Kind = in.Kind
if in.Causes != nil {
out.Causes = make([]StatusCause, len(in.Causes))
for i := range in.Causes {
if err := convert_api_StatusCause_To_v1_StatusCause(&in.Causes[i], &out.Causes[i], s); err != nil {
return err
}
}
} else {
out.Causes = nil
}
out.RetryAfterSeconds = in.RetryAfterSeconds
return nil
}
func convert_api_TCPSocketAction_To_v1_TCPSocketAction(in *api.TCPSocketAction, out *TCPSocketAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.TCPSocketAction))(in)
}
if err := s.Convert(&in.Port, &out.Port, 0); err != nil {
return err
}
return nil
}
func convert_api_ThirdPartyResourceData_To_v1_ThirdPartyResourceData(in *api.ThirdPartyResourceData, out *ThirdPartyResourceData, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.ThirdPartyResourceData))(in)
}
if err := convert_api_TypeMeta_To_v1_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_api_ObjectMeta_To_v1_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := s.Convert(&in.Data, &out.Data, 0); err != nil {
return err
}
return nil
}
func convert_api_TypeMeta_To_v1_TypeMeta(in *api.TypeMeta, out *TypeMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.TypeMeta))(in)
}
out.Kind = in.Kind
out.APIVersion = in.APIVersion
return nil
}
func convert_api_Volume_To_v1_Volume(in *api.Volume, out *Volume, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.Volume))(in)
}
out.Name = in.Name
if err := convert_api_VolumeSource_To_v1_VolumeSource(&in.VolumeSource, &out.VolumeSource, s); err != nil {
return err
}
return nil
}
func convert_api_VolumeMount_To_v1_VolumeMount(in *api.VolumeMount, out *VolumeMount, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.VolumeMount))(in)
}
out.Name = in.Name
out.ReadOnly = in.ReadOnly
out.MountPath = in.MountPath
return nil
}
func convert_api_VolumeSource_To_v1_VolumeSource(in *api.VolumeSource, out *VolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*api.VolumeSource))(in)
}
if in.HostPath != nil {
out.HostPath = new(HostPathVolumeSource)
if err := convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.EmptyDir != nil {
out.EmptyDir = new(EmptyDirVolumeSource)
if err := convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource(in.EmptyDir, out.EmptyDir, s); err != nil {
return err
}
} else {
out.EmptyDir = nil
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource)
if err := convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource)
if err := convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.GitRepo != nil {
out.GitRepo = new(GitRepoVolumeSource)
if err := convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource(in.GitRepo, out.GitRepo, s); err != nil {
return err
}
} else {
out.GitRepo = nil
}
if in.Secret != nil {
out.Secret = new(SecretVolumeSource)
if err := convert_api_SecretVolumeSource_To_v1_SecretVolumeSource(in.Secret, out.Secret, s); err != nil {
return err
}
} else {
out.Secret = nil
}
if in.NFS != nil {
out.NFS = new(NFSVolumeSource)
if err := convert_api_NFSVolumeSource_To_v1_NFSVolumeSource(in.NFS, out.NFS, s); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.ISCSI != nil {
out.ISCSI = new(ISCSIVolumeSource)
if err := convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil {
return err
}
} else {
out.ISCSI = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(GlusterfsVolumeSource)
if err := convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.PersistentVolumeClaim != nil {
out.PersistentVolumeClaim = new(PersistentVolumeClaimVolumeSource)
if err := convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource(in.PersistentVolumeClaim, out.PersistentVolumeClaim, s); err != nil {
return err
}
} else {
out.PersistentVolumeClaim = nil
}
if in.RBD != nil {
out.RBD = new(RBDVolumeSource)
if err := convert_api_RBDVolumeSource_To_v1_RBDVolumeSource(in.RBD, out.RBD, s); err != nil {
return err
}
} else {
out.RBD = nil
}
return nil
}
func convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in *AWSElasticBlockStoreVolumeSource, out *api.AWSElasticBlockStoreVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*AWSElasticBlockStoreVolumeSource))(in)
}
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_Binding_To_api_Binding(in *Binding, out *api.Binding, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Binding))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Target, &out.Target, s); err != nil {
return err
}
return nil
}
func convert_v1_Capabilities_To_api_Capabilities(in *Capabilities, out *api.Capabilities, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Capabilities))(in)
}
if in.Add != nil {
out.Add = make([]api.Capability, len(in.Add))
for i := range in.Add {
out.Add[i] = api.Capability(in.Add[i])
}
} else {
out.Add = nil
}
if in.Drop != nil {
out.Drop = make([]api.Capability, len(in.Drop))
for i := range in.Drop {
out.Drop[i] = api.Capability(in.Drop[i])
}
} else {
out.Drop = nil
}
return nil
}
func convert_v1_ComponentCondition_To_api_ComponentCondition(in *ComponentCondition, out *api.ComponentCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ComponentCondition))(in)
}
out.Type = api.ComponentConditionType(in.Type)
out.Status = api.ConditionStatus(in.Status)
out.Message = in.Message
out.Error = in.Error
return nil
}
func convert_v1_ComponentStatus_To_api_ComponentStatus(in *ComponentStatus, out *api.ComponentStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ComponentStatus))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Conditions != nil {
out.Conditions = make([]api.ComponentCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_v1_ComponentCondition_To_api_ComponentCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
return nil
}
func convert_v1_ComponentStatusList_To_api_ComponentStatusList(in *ComponentStatusList, out *api.ComponentStatusList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ComponentStatusList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.ComponentStatus, len(in.Items))
for i := range in.Items {
if err := convert_v1_ComponentStatus_To_api_ComponentStatus(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_Container_To_api_Container(in *Container, out *api.Container, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Container))(in)
}
out.Name = in.Name
out.Image = in.Image
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
if in.Args != nil {
out.Args = make([]string, len(in.Args))
for i := range in.Args {
out.Args[i] = in.Args[i]
}
} else {
out.Args = nil
}
out.WorkingDir = in.WorkingDir
if in.Ports != nil {
out.Ports = make([]api.ContainerPort, len(in.Ports))
for i := range in.Ports {
if err := convert_v1_ContainerPort_To_api_ContainerPort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Env != nil {
out.Env = make([]api.EnvVar, len(in.Env))
for i := range in.Env {
if err := convert_v1_EnvVar_To_api_EnvVar(&in.Env[i], &out.Env[i], s); err != nil {
return err
}
}
} else {
out.Env = nil
}
if err := convert_v1_ResourceRequirements_To_api_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil {
return err
}
if in.VolumeMounts != nil {
out.VolumeMounts = make([]api.VolumeMount, len(in.VolumeMounts))
for i := range in.VolumeMounts {
if err := convert_v1_VolumeMount_To_api_VolumeMount(&in.VolumeMounts[i], &out.VolumeMounts[i], s); err != nil {
return err
}
}
} else {
out.VolumeMounts = nil
}
if in.LivenessProbe != nil {
out.LivenessProbe = new(api.Probe)
if err := convert_v1_Probe_To_api_Probe(in.LivenessProbe, out.LivenessProbe, s); err != nil {
return err
}
} else {
out.LivenessProbe = nil
}
if in.ReadinessProbe != nil {
out.ReadinessProbe = new(api.Probe)
if err := convert_v1_Probe_To_api_Probe(in.ReadinessProbe, out.ReadinessProbe, s); err != nil {
return err
}
} else {
out.ReadinessProbe = nil
}
if in.Lifecycle != nil {
out.Lifecycle = new(api.Lifecycle)
if err := convert_v1_Lifecycle_To_api_Lifecycle(in.Lifecycle, out.Lifecycle, s); err != nil {
return err
}
} else {
out.Lifecycle = nil
}
out.TerminationMessagePath = in.TerminationMessagePath
out.ImagePullPolicy = api.PullPolicy(in.ImagePullPolicy)
if in.SecurityContext != nil {
out.SecurityContext = new(api.SecurityContext)
if err := convert_v1_SecurityContext_To_api_SecurityContext(in.SecurityContext, out.SecurityContext, s); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
out.Stdin = in.Stdin
out.TTY = in.TTY
return nil
}
func convert_v1_ContainerPort_To_api_ContainerPort(in *ContainerPort, out *api.ContainerPort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerPort))(in)
}
out.Name = in.Name
out.HostPort = in.HostPort
out.ContainerPort = in.ContainerPort
out.Protocol = api.Protocol(in.Protocol)
out.HostIP = in.HostIP
return nil
}
func convert_v1_ContainerState_To_api_ContainerState(in *ContainerState, out *api.ContainerState, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerState))(in)
}
if in.Waiting != nil {
out.Waiting = new(api.ContainerStateWaiting)
if err := convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting(in.Waiting, out.Waiting, s); err != nil {
return err
}
} else {
out.Waiting = nil
}
if in.Running != nil {
out.Running = new(api.ContainerStateRunning)
if err := convert_v1_ContainerStateRunning_To_api_ContainerStateRunning(in.Running, out.Running, s); err != nil {
return err
}
} else {
out.Running = nil
}
if in.Terminated != nil {
out.Terminated = new(api.ContainerStateTerminated)
if err := convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated(in.Terminated, out.Terminated, s); err != nil {
return err
}
} else {
out.Terminated = nil
}
return nil
}
func convert_v1_ContainerStateRunning_To_api_ContainerStateRunning(in *ContainerStateRunning, out *api.ContainerStateRunning, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerStateRunning))(in)
}
if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil {
return err
}
return nil
}
func convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated(in *ContainerStateTerminated, out *api.ContainerStateTerminated, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerStateTerminated))(in)
}
out.ExitCode = in.ExitCode
out.Signal = in.Signal
out.Reason = in.Reason
out.Message = in.Message
if err := s.Convert(&in.StartedAt, &out.StartedAt, 0); err != nil {
return err
}
if err := s.Convert(&in.FinishedAt, &out.FinishedAt, 0); err != nil {
return err
}
out.ContainerID = in.ContainerID
return nil
}
func convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting(in *ContainerStateWaiting, out *api.ContainerStateWaiting, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerStateWaiting))(in)
}
out.Reason = in.Reason
return nil
}
func convert_v1_ContainerStatus_To_api_ContainerStatus(in *ContainerStatus, out *api.ContainerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ContainerStatus))(in)
}
out.Name = in.Name
if err := convert_v1_ContainerState_To_api_ContainerState(&in.State, &out.State, s); err != nil {
return err
}
if err := convert_v1_ContainerState_To_api_ContainerState(&in.LastTerminationState, &out.LastTerminationState, s); err != nil {
return err
}
out.Ready = in.Ready
out.RestartCount = in.RestartCount
out.Image = in.Image
out.ImageID = in.ImageID
out.ContainerID = in.ContainerID
return nil
}
func convert_v1_Daemon_To_api_Daemon(in *Daemon, out *api.Daemon, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Daemon))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_DaemonSpec_To_api_DaemonSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_DaemonStatus_To_api_DaemonStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_DaemonList_To_api_DaemonList(in *DaemonList, out *api.DaemonList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*DaemonList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Daemon, len(in.Items))
for i := range in.Items {
if err := convert_v1_Daemon_To_api_Daemon(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_DaemonSpec_To_api_DaemonSpec(in *DaemonSpec, out *api.DaemonSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*DaemonSpec))(in)
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if in.Template != nil {
out.Template = new(api.PodTemplateSpec)
if err := convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(in.Template, out.Template, s); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func convert_v1_DaemonStatus_To_api_DaemonStatus(in *DaemonStatus, out *api.DaemonStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*DaemonStatus))(in)
}
out.CurrentNumberScheduled = in.CurrentNumberScheduled
out.NumberMisscheduled = in.NumberMisscheduled
out.DesiredNumberScheduled = in.DesiredNumberScheduled
return nil
}
func convert_v1_DeleteOptions_To_api_DeleteOptions(in *DeleteOptions, out *api.DeleteOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*DeleteOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if in.GracePeriodSeconds != nil {
out.GracePeriodSeconds = new(int64)
*out.GracePeriodSeconds = *in.GracePeriodSeconds
} else {
out.GracePeriodSeconds = nil
}
return nil
}
func convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource(in *EmptyDirVolumeSource, out *api.EmptyDirVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EmptyDirVolumeSource))(in)
}
out.Medium = api.StorageMedium(in.Medium)
return nil
}
func convert_v1_EndpointAddress_To_api_EndpointAddress(in *EndpointAddress, out *api.EndpointAddress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EndpointAddress))(in)
}
out.IP = in.IP
if in.TargetRef != nil {
out.TargetRef = new(api.ObjectReference)
if err := convert_v1_ObjectReference_To_api_ObjectReference(in.TargetRef, out.TargetRef, s); err != nil {
return err
}
} else {
out.TargetRef = nil
}
return nil
}
func convert_v1_EndpointPort_To_api_EndpointPort(in *EndpointPort, out *api.EndpointPort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EndpointPort))(in)
}
out.Name = in.Name
out.Port = in.Port
out.Protocol = api.Protocol(in.Protocol)
return nil
}
func convert_v1_EndpointSubset_To_api_EndpointSubset(in *EndpointSubset, out *api.EndpointSubset, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EndpointSubset))(in)
}
if in.Addresses != nil {
out.Addresses = make([]api.EndpointAddress, len(in.Addresses))
for i := range in.Addresses {
if err := convert_v1_EndpointAddress_To_api_EndpointAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if in.Ports != nil {
out.Ports = make([]api.EndpointPort, len(in.Ports))
for i := range in.Ports {
if err := convert_v1_EndpointPort_To_api_EndpointPort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
return nil
}
func convert_v1_Endpoints_To_api_Endpoints(in *Endpoints, out *api.Endpoints, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Endpoints))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Subsets != nil {
out.Subsets = make([]api.EndpointSubset, len(in.Subsets))
for i := range in.Subsets {
if err := convert_v1_EndpointSubset_To_api_EndpointSubset(&in.Subsets[i], &out.Subsets[i], s); err != nil {
return err
}
}
} else {
out.Subsets = nil
}
return nil
}
func convert_v1_EndpointsList_To_api_EndpointsList(in *EndpointsList, out *api.EndpointsList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EndpointsList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Endpoints, len(in.Items))
for i := range in.Items {
if err := convert_v1_Endpoints_To_api_Endpoints(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_EnvVar_To_api_EnvVar(in *EnvVar, out *api.EnvVar, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EnvVar))(in)
}
out.Name = in.Name
out.Value = in.Value
if in.ValueFrom != nil {
out.ValueFrom = new(api.EnvVarSource)
if err := convert_v1_EnvVarSource_To_api_EnvVarSource(in.ValueFrom, out.ValueFrom, s); err != nil {
return err
}
} else {
out.ValueFrom = nil
}
return nil
}
func convert_v1_EnvVarSource_To_api_EnvVarSource(in *EnvVarSource, out *api.EnvVarSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EnvVarSource))(in)
}
if in.FieldRef != nil {
out.FieldRef = new(api.ObjectFieldSelector)
if err := convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector(in.FieldRef, out.FieldRef, s); err != nil {
return err
}
} else {
out.FieldRef = nil
}
return nil
}
func convert_v1_Event_To_api_Event(in *Event, out *api.Event, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Event))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.InvolvedObject, &out.InvolvedObject, s); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
if err := convert_v1_EventSource_To_api_EventSource(&in.Source, &out.Source, s); err != nil {
return err
}
if err := s.Convert(&in.FirstTimestamp, &out.FirstTimestamp, 0); err != nil {
return err
}
if err := s.Convert(&in.LastTimestamp, &out.LastTimestamp, 0); err != nil {
return err
}
out.Count = in.Count
return nil
}
func convert_v1_EventList_To_api_EventList(in *EventList, out *api.EventList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EventList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Event, len(in.Items))
for i := range in.Items {
if err := convert_v1_Event_To_api_Event(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_EventSource_To_api_EventSource(in *EventSource, out *api.EventSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*EventSource))(in)
}
out.Component = in.Component
out.Host = in.Host
return nil
}
func convert_v1_ExecAction_To_api_ExecAction(in *ExecAction, out *api.ExecAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ExecAction))(in)
}
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in *GCEPersistentDiskVolumeSource, out *api.GCEPersistentDiskVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*GCEPersistentDiskVolumeSource))(in)
}
out.PDName = in.PDName
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource(in *GitRepoVolumeSource, out *api.GitRepoVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*GitRepoVolumeSource))(in)
}
out.Repository = in.Repository
out.Revision = in.Revision
return nil
}
func convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in *GlusterfsVolumeSource, out *api.GlusterfsVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*GlusterfsVolumeSource))(in)
}
out.EndpointsName = in.EndpointsName
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_HTTPGetAction_To_api_HTTPGetAction(in *HTTPGetAction, out *api.HTTPGetAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*HTTPGetAction))(in)
}
out.Path = in.Path
if err := s.Convert(&in.Port, &out.Port, 0); err != nil {
return err
}
out.Host = in.Host
out.Scheme = api.URIScheme(in.Scheme)
return nil
}
func convert_v1_Handler_To_api_Handler(in *Handler, out *api.Handler, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Handler))(in)
}
if in.Exec != nil {
out.Exec = new(api.ExecAction)
if err := convert_v1_ExecAction_To_api_ExecAction(in.Exec, out.Exec, s); err != nil {
return err
}
} else {
out.Exec = nil
}
if in.HTTPGet != nil {
out.HTTPGet = new(api.HTTPGetAction)
if err := convert_v1_HTTPGetAction_To_api_HTTPGetAction(in.HTTPGet, out.HTTPGet, s); err != nil {
return err
}
} else {
out.HTTPGet = nil
}
if in.TCPSocket != nil {
out.TCPSocket = new(api.TCPSocketAction)
if err := convert_v1_TCPSocketAction_To_api_TCPSocketAction(in.TCPSocket, out.TCPSocket, s); err != nil {
return err
}
} else {
out.TCPSocket = nil
}
return nil
}
func convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in *HostPathVolumeSource, out *api.HostPathVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*HostPathVolumeSource))(in)
}
out.Path = in.Path
return nil
}
func convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in *ISCSIVolumeSource, out *api.ISCSIVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ISCSIVolumeSource))(in)
}
out.TargetPortal = in.TargetPortal
out.IQN = in.IQN
out.Lun = in.Lun
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_Lifecycle_To_api_Lifecycle(in *Lifecycle, out *api.Lifecycle, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Lifecycle))(in)
}
if in.PostStart != nil {
out.PostStart = new(api.Handler)
if err := convert_v1_Handler_To_api_Handler(in.PostStart, out.PostStart, s); err != nil {
return err
}
} else {
out.PostStart = nil
}
if in.PreStop != nil {
out.PreStop = new(api.Handler)
if err := convert_v1_Handler_To_api_Handler(in.PreStop, out.PreStop, s); err != nil {
return err
}
} else {
out.PreStop = nil
}
return nil
}
func convert_v1_LimitRange_To_api_LimitRange(in *LimitRange, out *api.LimitRange, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LimitRange))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_LimitRangeSpec_To_api_LimitRangeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
return nil
}
func convert_v1_LimitRangeItem_To_api_LimitRangeItem(in *LimitRangeItem, out *api.LimitRangeItem, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LimitRangeItem))(in)
}
out.Type = api.LimitType(in.Type)
if in.Max != nil {
out.Max = make(api.ResourceList)
for key, val := range in.Max {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Max[api.ResourceName(key)] = newVal
}
} else {
out.Max = nil
}
if in.Min != nil {
out.Min = make(api.ResourceList)
for key, val := range in.Min {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Min[api.ResourceName(key)] = newVal
}
} else {
out.Min = nil
}
if in.Default != nil {
out.Default = make(api.ResourceList)
for key, val := range in.Default {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Default[api.ResourceName(key)] = newVal
}
} else {
out.Default = nil
}
return nil
}
func convert_v1_LimitRangeList_To_api_LimitRangeList(in *LimitRangeList, out *api.LimitRangeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LimitRangeList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.LimitRange, len(in.Items))
for i := range in.Items {
if err := convert_v1_LimitRange_To_api_LimitRange(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_LimitRangeSpec_To_api_LimitRangeSpec(in *LimitRangeSpec, out *api.LimitRangeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LimitRangeSpec))(in)
}
if in.Limits != nil {
out.Limits = make([]api.LimitRangeItem, len(in.Limits))
for i := range in.Limits {
if err := convert_v1_LimitRangeItem_To_api_LimitRangeItem(&in.Limits[i], &out.Limits[i], s); err != nil {
return err
}
}
} else {
out.Limits = nil
}
return nil
}
func convert_v1_List_To_api_List(in *List, out *api.List, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*List))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if err := s.Convert(&in.Items, &out.Items, 0); err != nil {
return err
}
return nil
}
func convert_v1_ListMeta_To_api_ListMeta(in *ListMeta, out *api.ListMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ListMeta))(in)
}
out.SelfLink = in.SelfLink
out.ResourceVersion = in.ResourceVersion
return nil
}
func convert_v1_ListOptions_To_api_ListOptions(in *ListOptions, out *api.ListOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ListOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := s.Convert(&in.LabelSelector, &out.LabelSelector, 0); err != nil {
return err
}
if err := s.Convert(&in.FieldSelector, &out.FieldSelector, 0); err != nil {
return err
}
out.Watch = in.Watch
out.ResourceVersion = in.ResourceVersion
return nil
}
func convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress(in *LoadBalancerIngress, out *api.LoadBalancerIngress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LoadBalancerIngress))(in)
}
out.IP = in.IP
out.Hostname = in.Hostname
return nil
}
func convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus(in *LoadBalancerStatus, out *api.LoadBalancerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LoadBalancerStatus))(in)
}
if in.Ingress != nil {
out.Ingress = make([]api.LoadBalancerIngress, len(in.Ingress))
for i := range in.Ingress {
if err := convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress(&in.Ingress[i], &out.Ingress[i], s); err != nil {
return err
}
}
} else {
out.Ingress = nil
}
return nil
}
func convert_v1_LocalObjectReference_To_api_LocalObjectReference(in *LocalObjectReference, out *api.LocalObjectReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*LocalObjectReference))(in)
}
out.Name = in.Name
return nil
}
func convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in *NFSVolumeSource, out *api.NFSVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NFSVolumeSource))(in)
}
out.Server = in.Server
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_Namespace_To_api_Namespace(in *Namespace, out *api.Namespace, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Namespace))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_NamespaceSpec_To_api_NamespaceSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_NamespaceStatus_To_api_NamespaceStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_NamespaceList_To_api_NamespaceList(in *NamespaceList, out *api.NamespaceList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NamespaceList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Namespace, len(in.Items))
for i := range in.Items {
if err := convert_v1_Namespace_To_api_Namespace(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_NamespaceSpec_To_api_NamespaceSpec(in *NamespaceSpec, out *api.NamespaceSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NamespaceSpec))(in)
}
if in.Finalizers != nil {
out.Finalizers = make([]api.FinalizerName, len(in.Finalizers))
for i := range in.Finalizers {
out.Finalizers[i] = api.FinalizerName(in.Finalizers[i])
}
} else {
out.Finalizers = nil
}
return nil
}
func convert_v1_NamespaceStatus_To_api_NamespaceStatus(in *NamespaceStatus, out *api.NamespaceStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NamespaceStatus))(in)
}
out.Phase = api.NamespacePhase(in.Phase)
return nil
}
func convert_v1_Node_To_api_Node(in *Node, out *api.Node, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Node))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_NodeSpec_To_api_NodeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_NodeStatus_To_api_NodeStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_NodeAddress_To_api_NodeAddress(in *NodeAddress, out *api.NodeAddress, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeAddress))(in)
}
out.Type = api.NodeAddressType(in.Type)
out.Address = in.Address
return nil
}
func convert_v1_NodeCondition_To_api_NodeCondition(in *NodeCondition, out *api.NodeCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeCondition))(in)
}
out.Type = api.NodeConditionType(in.Type)
out.Status = api.ConditionStatus(in.Status)
if err := s.Convert(&in.LastHeartbeatTime, &out.LastHeartbeatTime, 0); err != nil {
return err
}
if err := s.Convert(&in.LastTransitionTime, &out.LastTransitionTime, 0); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
return nil
}
func convert_v1_NodeList_To_api_NodeList(in *NodeList, out *api.NodeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Node, len(in.Items))
for i := range in.Items {
if err := convert_v1_Node_To_api_Node(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_NodeSpec_To_api_NodeSpec(in *NodeSpec, out *api.NodeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeSpec))(in)
}
out.PodCIDR = in.PodCIDR
out.ExternalID = in.ExternalID
out.ProviderID = in.ProviderID
out.Unschedulable = in.Unschedulable
return nil
}
func convert_v1_NodeStatus_To_api_NodeStatus(in *NodeStatus, out *api.NodeStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeStatus))(in)
}
if in.Capacity != nil {
out.Capacity = make(api.ResourceList)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[api.ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
out.Phase = api.NodePhase(in.Phase)
if in.Conditions != nil {
out.Conditions = make([]api.NodeCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_v1_NodeCondition_To_api_NodeCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
if in.Addresses != nil {
out.Addresses = make([]api.NodeAddress, len(in.Addresses))
for i := range in.Addresses {
if err := convert_v1_NodeAddress_To_api_NodeAddress(&in.Addresses[i], &out.Addresses[i], s); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if err := convert_v1_NodeSystemInfo_To_api_NodeSystemInfo(&in.NodeInfo, &out.NodeInfo, s); err != nil {
return err
}
return nil
}
func convert_v1_NodeSystemInfo_To_api_NodeSystemInfo(in *NodeSystemInfo, out *api.NodeSystemInfo, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*NodeSystemInfo))(in)
}
out.MachineID = in.MachineID
out.SystemUUID = in.SystemUUID
out.BootID = in.BootID
out.KernelVersion = in.KernelVersion
out.OsImage = in.OsImage
out.ContainerRuntimeVersion = in.ContainerRuntimeVersion
out.KubeletVersion = in.KubeletVersion
out.KubeProxyVersion = in.KubeProxyVersion
return nil
}
func convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector(in *ObjectFieldSelector, out *api.ObjectFieldSelector, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ObjectFieldSelector))(in)
}
out.APIVersion = in.APIVersion
out.FieldPath = in.FieldPath
return nil
}
func convert_v1_ObjectMeta_To_api_ObjectMeta(in *ObjectMeta, out *api.ObjectMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ObjectMeta))(in)
}
out.Name = in.Name
out.GenerateName = in.GenerateName
out.Namespace = in.Namespace
out.SelfLink = in.SelfLink
out.UID = in.UID
out.ResourceVersion = in.ResourceVersion
out.Generation = in.Generation
if err := s.Convert(&in.CreationTimestamp, &out.CreationTimestamp, 0); err != nil {
return err
}
if in.DeletionTimestamp != nil {
if err := s.Convert(&in.DeletionTimestamp, &out.DeletionTimestamp, 0); err != nil {
return err
}
} else {
out.DeletionTimestamp = nil
}
if in.DeletionGracePeriodSeconds != nil {
out.DeletionGracePeriodSeconds = new(int64)
*out.DeletionGracePeriodSeconds = *in.DeletionGracePeriodSeconds
} else {
out.DeletionGracePeriodSeconds = nil
}
if in.Labels != nil {
out.Labels = make(map[string]string)
for key, val := range in.Labels {
out.Labels[key] = val
}
} else {
out.Labels = nil
}
if in.Annotations != nil {
out.Annotations = make(map[string]string)
for key, val := range in.Annotations {
out.Annotations[key] = val
}
} else {
out.Annotations = nil
}
return nil
}
func convert_v1_ObjectReference_To_api_ObjectReference(in *ObjectReference, out *api.ObjectReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ObjectReference))(in)
}
out.Kind = in.Kind
out.Namespace = in.Namespace
out.Name = in.Name
out.UID = in.UID
out.APIVersion = in.APIVersion
out.ResourceVersion = in.ResourceVersion
out.FieldPath = in.FieldPath
return nil
}
func convert_v1_PersistentVolume_To_api_PersistentVolume(in *PersistentVolume, out *api.PersistentVolume, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolume))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim(in *PersistentVolumeClaim, out *api.PersistentVolumeClaim, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaim))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_PersistentVolumeClaimList_To_api_PersistentVolumeClaimList(in *PersistentVolumeClaimList, out *api.PersistentVolumeClaimList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaimList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.PersistentVolumeClaim, len(in.Items))
for i := range in.Items {
if err := convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec(in *PersistentVolumeClaimSpec, out *api.PersistentVolumeClaimSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaimSpec))(in)
}
if in.AccessModes != nil {
out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if err := convert_v1_ResourceRequirements_To_api_ResourceRequirements(&in.Resources, &out.Resources, s); err != nil {
return err
}
out.VolumeName = in.VolumeName
return nil
}
func convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus(in *PersistentVolumeClaimStatus, out *api.PersistentVolumeClaimStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaimStatus))(in)
}
out.Phase = api.PersistentVolumeClaimPhase(in.Phase)
if in.AccessModes != nil {
out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if in.Capacity != nil {
out.Capacity = make(api.ResourceList)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[api.ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
return nil
}
func convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource(in *PersistentVolumeClaimVolumeSource, out *api.PersistentVolumeClaimVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeClaimVolumeSource))(in)
}
out.ClaimName = in.ClaimName
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_PersistentVolumeList_To_api_PersistentVolumeList(in *PersistentVolumeList, out *api.PersistentVolumeList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.PersistentVolume, len(in.Items))
for i := range in.Items {
if err := convert_v1_PersistentVolume_To_api_PersistentVolume(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource(in *PersistentVolumeSource, out *api.PersistentVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeSource))(in)
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(api.GCEPersistentDiskVolumeSource)
if err := convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(api.AWSElasticBlockStoreVolumeSource)
if err := convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.HostPath != nil {
out.HostPath = new(api.HostPathVolumeSource)
if err := convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(api.GlusterfsVolumeSource)
if err := convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.NFS != nil {
out.NFS = new(api.NFSVolumeSource)
if err := convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in.NFS, out.NFS, s); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.RBD != nil {
out.RBD = new(api.RBDVolumeSource)
if err := convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in.RBD, out.RBD, s); err != nil {
return err
}
} else {
out.RBD = nil
}
if in.ISCSI != nil {
out.ISCSI = new(api.ISCSIVolumeSource)
if err := convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil {
return err
}
} else {
out.ISCSI = nil
}
return nil
}
func convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec(in *PersistentVolumeSpec, out *api.PersistentVolumeSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeSpec))(in)
}
if in.Capacity != nil {
out.Capacity = make(api.ResourceList)
for key, val := range in.Capacity {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Capacity[api.ResourceName(key)] = newVal
}
} else {
out.Capacity = nil
}
if err := convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource(&in.PersistentVolumeSource, &out.PersistentVolumeSource, s); err != nil {
return err
}
if in.AccessModes != nil {
out.AccessModes = make([]api.PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = api.PersistentVolumeAccessMode(in.AccessModes[i])
}
} else {
out.AccessModes = nil
}
if in.ClaimRef != nil {
out.ClaimRef = new(api.ObjectReference)
if err := convert_v1_ObjectReference_To_api_ObjectReference(in.ClaimRef, out.ClaimRef, s); err != nil {
return err
}
} else {
out.ClaimRef = nil
}
out.PersistentVolumeReclaimPolicy = api.PersistentVolumeReclaimPolicy(in.PersistentVolumeReclaimPolicy)
return nil
}
func convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus(in *PersistentVolumeStatus, out *api.PersistentVolumeStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PersistentVolumeStatus))(in)
}
out.Phase = api.PersistentVolumePhase(in.Phase)
out.Message = in.Message
out.Reason = in.Reason
return nil
}
func convert_v1_Pod_To_api_Pod(in *Pod, out *api.Pod, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Pod))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PodSpec_To_api_PodSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_PodStatus_To_api_PodStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_PodAttachOptions_To_api_PodAttachOptions(in *PodAttachOptions, out *api.PodAttachOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodAttachOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
return nil
}
func convert_v1_PodCondition_To_api_PodCondition(in *PodCondition, out *api.PodCondition, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodCondition))(in)
}
out.Type = api.PodConditionType(in.Type)
out.Status = api.ConditionStatus(in.Status)
return nil
}
func convert_v1_PodExecOptions_To_api_PodExecOptions(in *PodExecOptions, out *api.PodExecOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodExecOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func convert_v1_PodList_To_api_PodList(in *PodList, out *api.PodList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Pod, len(in.Items))
for i := range in.Items {
if err := convert_v1_Pod_To_api_Pod(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_PodLogOptions_To_api_PodLogOptions(in *PodLogOptions, out *api.PodLogOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodLogOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Container = in.Container
out.Follow = in.Follow
out.Previous = in.Previous
return nil
}
func convert_v1_PodProxyOptions_To_api_PodProxyOptions(in *PodProxyOptions, out *api.PodProxyOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodProxyOptions))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
out.Path = in.Path
return nil
}
func convert_v1_PodStatus_To_api_PodStatus(in *PodStatus, out *api.PodStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodStatus))(in)
}
out.Phase = api.PodPhase(in.Phase)
if in.Conditions != nil {
out.Conditions = make([]api.PodCondition, len(in.Conditions))
for i := range in.Conditions {
if err := convert_v1_PodCondition_To_api_PodCondition(&in.Conditions[i], &out.Conditions[i], s); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
out.Message = in.Message
out.Reason = in.Reason
out.HostIP = in.HostIP
out.PodIP = in.PodIP
if in.StartTime != nil {
if err := s.Convert(&in.StartTime, &out.StartTime, 0); err != nil {
return err
}
} else {
out.StartTime = nil
}
if in.ContainerStatuses != nil {
out.ContainerStatuses = make([]api.ContainerStatus, len(in.ContainerStatuses))
for i := range in.ContainerStatuses {
if err := convert_v1_ContainerStatus_To_api_ContainerStatus(&in.ContainerStatuses[i], &out.ContainerStatuses[i], s); err != nil {
return err
}
}
} else {
out.ContainerStatuses = nil
}
return nil
}
func convert_v1_PodStatusResult_To_api_PodStatusResult(in *PodStatusResult, out *api.PodStatusResult, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodStatusResult))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PodStatus_To_api_PodStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_PodTemplate_To_api_PodTemplate(in *PodTemplate, out *api.PodTemplate, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodTemplate))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(&in.Template, &out.Template, s); err != nil {
return err
}
return nil
}
func convert_v1_PodTemplateList_To_api_PodTemplateList(in *PodTemplateList, out *api.PodTemplateList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodTemplateList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.PodTemplate, len(in.Items))
for i := range in.Items {
if err := convert_v1_PodTemplate_To_api_PodTemplate(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_PodTemplateSpec_To_api_PodTemplateSpec(in *PodTemplateSpec, out *api.PodTemplateSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*PodTemplateSpec))(in)
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_PodSpec_To_api_PodSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
return nil
}
func convert_v1_Probe_To_api_Probe(in *Probe, out *api.Probe, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Probe))(in)
}
if err := convert_v1_Handler_To_api_Handler(&in.Handler, &out.Handler, s); err != nil {
return err
}
out.InitialDelaySeconds = in.InitialDelaySeconds
out.TimeoutSeconds = in.TimeoutSeconds
return nil
}
func convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in *RBDVolumeSource, out *api.RBDVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*RBDVolumeSource))(in)
}
if in.CephMonitors != nil {
out.CephMonitors = make([]string, len(in.CephMonitors))
for i := range in.CephMonitors {
out.CephMonitors[i] = in.CephMonitors[i]
}
} else {
out.CephMonitors = nil
}
out.RBDImage = in.RBDImage
out.FSType = in.FSType
out.RBDPool = in.RBDPool
out.RadosUser = in.RadosUser
out.Keyring = in.Keyring
if in.SecretRef != nil {
out.SecretRef = new(api.LocalObjectReference)
if err := convert_v1_LocalObjectReference_To_api_LocalObjectReference(in.SecretRef, out.SecretRef, s); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func convert_v1_RangeAllocation_To_api_RangeAllocation(in *RangeAllocation, out *api.RangeAllocation, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*RangeAllocation))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
out.Range = in.Range
if err := s.Convert(&in.Data, &out.Data, 0); err != nil {
return err
}
return nil
}
func convert_v1_ReplicationController_To_api_ReplicationController(in *ReplicationController, out *api.ReplicationController, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ReplicationController))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ReplicationControllerSpec_To_api_ReplicationControllerSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_ReplicationControllerList_To_api_ReplicationControllerList(in *ReplicationControllerList, out *api.ReplicationControllerList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ReplicationControllerList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.ReplicationController, len(in.Items))
for i := range in.Items {
if err := convert_v1_ReplicationController_To_api_ReplicationController(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus(in *ReplicationControllerStatus, out *api.ReplicationControllerStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ReplicationControllerStatus))(in)
}
out.Replicas = in.Replicas
out.ObservedGeneration = in.ObservedGeneration
return nil
}
func convert_v1_ResourceQuota_To_api_ResourceQuota(in *ResourceQuota, out *api.ResourceQuota, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceQuota))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_ResourceQuotaList_To_api_ResourceQuotaList(in *ResourceQuotaList, out *api.ResourceQuotaList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceQuotaList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.ResourceQuota, len(in.Items))
for i := range in.Items {
if err := convert_v1_ResourceQuota_To_api_ResourceQuota(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec(in *ResourceQuotaSpec, out *api.ResourceQuotaSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceQuotaSpec))(in)
}
if in.Hard != nil {
out.Hard = make(api.ResourceList)
for key, val := range in.Hard {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Hard[api.ResourceName(key)] = newVal
}
} else {
out.Hard = nil
}
return nil
}
func convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus(in *ResourceQuotaStatus, out *api.ResourceQuotaStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceQuotaStatus))(in)
}
if in.Hard != nil {
out.Hard = make(api.ResourceList)
for key, val := range in.Hard {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Hard[api.ResourceName(key)] = newVal
}
} else {
out.Hard = nil
}
if in.Used != nil {
out.Used = make(api.ResourceList)
for key, val := range in.Used {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Used[api.ResourceName(key)] = newVal
}
} else {
out.Used = nil
}
return nil
}
func convert_v1_ResourceRequirements_To_api_ResourceRequirements(in *ResourceRequirements, out *api.ResourceRequirements, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ResourceRequirements))(in)
}
if in.Limits != nil {
out.Limits = make(api.ResourceList)
for key, val := range in.Limits {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Limits[api.ResourceName(key)] = newVal
}
} else {
out.Limits = nil
}
if in.Requests != nil {
out.Requests = make(api.ResourceList)
for key, val := range in.Requests {
newVal := resource.Quantity{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Requests[api.ResourceName(key)] = newVal
}
} else {
out.Requests = nil
}
return nil
}
func convert_v1_SELinuxOptions_To_api_SELinuxOptions(in *SELinuxOptions, out *api.SELinuxOptions, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SELinuxOptions))(in)
}
out.User = in.User
out.Role = in.Role
out.Type = in.Type
out.Level = in.Level
return nil
}
func convert_v1_Secret_To_api_Secret(in *Secret, out *api.Secret, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Secret))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Data != nil {
out.Data = make(map[string][]uint8)
for key, val := range in.Data {
newVal := []uint8{}
if err := s.Convert(&val, &newVal, 0); err != nil {
return err
}
out.Data[key] = newVal
}
} else {
out.Data = nil
}
out.Type = api.SecretType(in.Type)
return nil
}
func convert_v1_SecretList_To_api_SecretList(in *SecretList, out *api.SecretList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SecretList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Secret, len(in.Items))
for i := range in.Items {
if err := convert_v1_Secret_To_api_Secret(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_SecretVolumeSource_To_api_SecretVolumeSource(in *SecretVolumeSource, out *api.SecretVolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SecretVolumeSource))(in)
}
out.SecretName = in.SecretName
return nil
}
func convert_v1_SecurityContext_To_api_SecurityContext(in *SecurityContext, out *api.SecurityContext, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SecurityContext))(in)
}
if in.Capabilities != nil {
out.Capabilities = new(api.Capabilities)
if err := convert_v1_Capabilities_To_api_Capabilities(in.Capabilities, out.Capabilities, s); err != nil {
return err
}
} else {
out.Capabilities = nil
}
if in.Privileged != nil {
out.Privileged = new(bool)
*out.Privileged = *in.Privileged
} else {
out.Privileged = nil
}
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(api.SELinuxOptions)
if err := convert_v1_SELinuxOptions_To_api_SELinuxOptions(in.SELinuxOptions, out.SELinuxOptions, s); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
out.RunAsNonRoot = in.RunAsNonRoot
return nil
}
func convert_v1_SerializedReference_To_api_SerializedReference(in *SerializedReference, out *api.SerializedReference, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*SerializedReference))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Reference, &out.Reference, s); err != nil {
return err
}
return nil
}
func convert_v1_Service_To_api_Service(in *Service, out *api.Service, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Service))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := convert_v1_ServiceSpec_To_api_ServiceSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := convert_v1_ServiceStatus_To_api_ServiceStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
func convert_v1_ServiceAccount_To_api_ServiceAccount(in *ServiceAccount, out *api.ServiceAccount, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceAccount))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if in.Secrets != nil {
out.Secrets = make([]api.ObjectReference, len(in.Secrets))
for i := range in.Secrets {
if err := convert_v1_ObjectReference_To_api_ObjectReference(&in.Secrets[i], &out.Secrets[i], s); err != nil {
return err
}
}
} else {
out.Secrets = nil
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]api.LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := convert_v1_LocalObjectReference_To_api_LocalObjectReference(&in.ImagePullSecrets[i], &out.ImagePullSecrets[i], s); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func convert_v1_ServiceAccountList_To_api_ServiceAccountList(in *ServiceAccountList, out *api.ServiceAccountList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceAccountList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.ServiceAccount, len(in.Items))
for i := range in.Items {
if err := convert_v1_ServiceAccount_To_api_ServiceAccount(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_ServiceList_To_api_ServiceList(in *ServiceList, out *api.ServiceList, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceList))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]api.Service, len(in.Items))
for i := range in.Items {
if err := convert_v1_Service_To_api_Service(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convert_v1_ServicePort_To_api_ServicePort(in *ServicePort, out *api.ServicePort, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServicePort))(in)
}
out.Name = in.Name
out.Protocol = api.Protocol(in.Protocol)
out.Port = in.Port
if err := s.Convert(&in.TargetPort, &out.TargetPort, 0); err != nil {
return err
}
out.NodePort = in.NodePort
return nil
}
func convert_v1_ServiceSpec_To_api_ServiceSpec(in *ServiceSpec, out *api.ServiceSpec, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceSpec))(in)
}
if in.Ports != nil {
out.Ports = make([]api.ServicePort, len(in.Ports))
for i := range in.Ports {
if err := convert_v1_ServicePort_To_api_ServicePort(&in.Ports[i], &out.Ports[i], s); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
out.ClusterIP = in.ClusterIP
out.Type = api.ServiceType(in.Type)
if in.ExternalIPs != nil {
out.ExternalIPs = make([]string, len(in.ExternalIPs))
for i := range in.ExternalIPs {
out.ExternalIPs[i] = in.ExternalIPs[i]
}
} else {
out.ExternalIPs = nil
}
out.SessionAffinity = api.ServiceAffinity(in.SessionAffinity)
return nil
}
func convert_v1_ServiceStatus_To_api_ServiceStatus(in *ServiceStatus, out *api.ServiceStatus, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ServiceStatus))(in)
}
if err := convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus(&in.LoadBalancer, &out.LoadBalancer, s); err != nil {
return err
}
return nil
}
func convert_v1_Status_To_api_Status(in *Status, out *api.Status, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Status))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ListMeta_To_api_ListMeta(&in.ListMeta, &out.ListMeta, s); err != nil {
return err
}
out.Status = in.Status
out.Message = in.Message
out.Reason = api.StatusReason(in.Reason)
if in.Details != nil {
out.Details = new(api.StatusDetails)
if err := convert_v1_StatusDetails_To_api_StatusDetails(in.Details, out.Details, s); err != nil {
return err
}
} else {
out.Details = nil
}
out.Code = in.Code
return nil
}
func convert_v1_StatusCause_To_api_StatusCause(in *StatusCause, out *api.StatusCause, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*StatusCause))(in)
}
out.Type = api.CauseType(in.Type)
out.Message = in.Message
out.Field = in.Field
return nil
}
func convert_v1_StatusDetails_To_api_StatusDetails(in *StatusDetails, out *api.StatusDetails, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*StatusDetails))(in)
}
out.Name = in.Name
out.Kind = in.Kind
if in.Causes != nil {
out.Causes = make([]api.StatusCause, len(in.Causes))
for i := range in.Causes {
if err := convert_v1_StatusCause_To_api_StatusCause(&in.Causes[i], &out.Causes[i], s); err != nil {
return err
}
}
} else {
out.Causes = nil
}
out.RetryAfterSeconds = in.RetryAfterSeconds
return nil
}
func convert_v1_TCPSocketAction_To_api_TCPSocketAction(in *TCPSocketAction, out *api.TCPSocketAction, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*TCPSocketAction))(in)
}
if err := s.Convert(&in.Port, &out.Port, 0); err != nil {
return err
}
return nil
}
func convert_v1_ThirdPartyResourceData_To_api_ThirdPartyResourceData(in *ThirdPartyResourceData, out *api.ThirdPartyResourceData, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*ThirdPartyResourceData))(in)
}
if err := convert_v1_TypeMeta_To_api_TypeMeta(&in.TypeMeta, &out.TypeMeta, s); err != nil {
return err
}
if err := convert_v1_ObjectMeta_To_api_ObjectMeta(&in.ObjectMeta, &out.ObjectMeta, s); err != nil {
return err
}
if err := s.Convert(&in.Data, &out.Data, 0); err != nil {
return err
}
return nil
}
func convert_v1_TypeMeta_To_api_TypeMeta(in *TypeMeta, out *api.TypeMeta, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*TypeMeta))(in)
}
out.Kind = in.Kind
out.APIVersion = in.APIVersion
return nil
}
func convert_v1_Volume_To_api_Volume(in *Volume, out *api.Volume, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*Volume))(in)
}
out.Name = in.Name
if err := convert_v1_VolumeSource_To_api_VolumeSource(&in.VolumeSource, &out.VolumeSource, s); err != nil {
return err
}
return nil
}
func convert_v1_VolumeMount_To_api_VolumeMount(in *VolumeMount, out *api.VolumeMount, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*VolumeMount))(in)
}
out.Name = in.Name
out.ReadOnly = in.ReadOnly
out.MountPath = in.MountPath
return nil
}
func convert_v1_VolumeSource_To_api_VolumeSource(in *VolumeSource, out *api.VolumeSource, s conversion.Scope) error {
if defaulting, found := s.DefaultingInterface(reflect.TypeOf(*in)); found {
defaulting.(func(*VolumeSource))(in)
}
if in.HostPath != nil {
out.HostPath = new(api.HostPathVolumeSource)
if err := convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource(in.HostPath, out.HostPath, s); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.EmptyDir != nil {
out.EmptyDir = new(api.EmptyDirVolumeSource)
if err := convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource(in.EmptyDir, out.EmptyDir, s); err != nil {
return err
}
} else {
out.EmptyDir = nil
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(api.GCEPersistentDiskVolumeSource)
if err := convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource(in.GCEPersistentDisk, out.GCEPersistentDisk, s); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(api.AWSElasticBlockStoreVolumeSource)
if err := convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource(in.AWSElasticBlockStore, out.AWSElasticBlockStore, s); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.GitRepo != nil {
out.GitRepo = new(api.GitRepoVolumeSource)
if err := convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource(in.GitRepo, out.GitRepo, s); err != nil {
return err
}
} else {
out.GitRepo = nil
}
if in.Secret != nil {
out.Secret = new(api.SecretVolumeSource)
if err := convert_v1_SecretVolumeSource_To_api_SecretVolumeSource(in.Secret, out.Secret, s); err != nil {
return err
}
} else {
out.Secret = nil
}
if in.NFS != nil {
out.NFS = new(api.NFSVolumeSource)
if err := convert_v1_NFSVolumeSource_To_api_NFSVolumeSource(in.NFS, out.NFS, s); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.ISCSI != nil {
out.ISCSI = new(api.ISCSIVolumeSource)
if err := convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource(in.ISCSI, out.ISCSI, s); err != nil {
return err
}
} else {
out.ISCSI = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(api.GlusterfsVolumeSource)
if err := convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource(in.Glusterfs, out.Glusterfs, s); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.PersistentVolumeClaim != nil {
out.PersistentVolumeClaim = new(api.PersistentVolumeClaimVolumeSource)
if err := convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource(in.PersistentVolumeClaim, out.PersistentVolumeClaim, s); err != nil {
return err
}
} else {
out.PersistentVolumeClaim = nil
}
if in.RBD != nil {
out.RBD = new(api.RBDVolumeSource)
if err := convert_v1_RBDVolumeSource_To_api_RBDVolumeSource(in.RBD, out.RBD, s); err != nil {
return err
}
} else {
out.RBD = nil
}
return nil
}
func init() {
err := api.Scheme.AddGeneratedConversionFuncs(
convert_api_AWSElasticBlockStoreVolumeSource_To_v1_AWSElasticBlockStoreVolumeSource,
convert_api_Binding_To_v1_Binding,
convert_api_Capabilities_To_v1_Capabilities,
convert_api_ComponentCondition_To_v1_ComponentCondition,
convert_api_ComponentStatusList_To_v1_ComponentStatusList,
convert_api_ComponentStatus_To_v1_ComponentStatus,
convert_api_ContainerPort_To_v1_ContainerPort,
convert_api_ContainerStateRunning_To_v1_ContainerStateRunning,
convert_api_ContainerStateTerminated_To_v1_ContainerStateTerminated,
convert_api_ContainerStateWaiting_To_v1_ContainerStateWaiting,
convert_api_ContainerState_To_v1_ContainerState,
convert_api_ContainerStatus_To_v1_ContainerStatus,
convert_api_Container_To_v1_Container,
convert_api_DaemonList_To_v1_DaemonList,
convert_api_DaemonSpec_To_v1_DaemonSpec,
convert_api_DaemonStatus_To_v1_DaemonStatus,
convert_api_Daemon_To_v1_Daemon,
convert_api_DeleteOptions_To_v1_DeleteOptions,
convert_api_EmptyDirVolumeSource_To_v1_EmptyDirVolumeSource,
convert_api_EndpointAddress_To_v1_EndpointAddress,
convert_api_EndpointPort_To_v1_EndpointPort,
convert_api_EndpointSubset_To_v1_EndpointSubset,
convert_api_EndpointsList_To_v1_EndpointsList,
convert_api_Endpoints_To_v1_Endpoints,
convert_api_EnvVarSource_To_v1_EnvVarSource,
convert_api_EnvVar_To_v1_EnvVar,
convert_api_EventList_To_v1_EventList,
convert_api_EventSource_To_v1_EventSource,
convert_api_Event_To_v1_Event,
convert_api_ExecAction_To_v1_ExecAction,
convert_api_GCEPersistentDiskVolumeSource_To_v1_GCEPersistentDiskVolumeSource,
convert_api_GitRepoVolumeSource_To_v1_GitRepoVolumeSource,
convert_api_GlusterfsVolumeSource_To_v1_GlusterfsVolumeSource,
convert_api_HTTPGetAction_To_v1_HTTPGetAction,
convert_api_Handler_To_v1_Handler,
convert_api_HostPathVolumeSource_To_v1_HostPathVolumeSource,
convert_api_ISCSIVolumeSource_To_v1_ISCSIVolumeSource,
convert_api_Lifecycle_To_v1_Lifecycle,
convert_api_LimitRangeItem_To_v1_LimitRangeItem,
convert_api_LimitRangeList_To_v1_LimitRangeList,
convert_api_LimitRangeSpec_To_v1_LimitRangeSpec,
convert_api_LimitRange_To_v1_LimitRange,
convert_api_ListMeta_To_v1_ListMeta,
convert_api_ListOptions_To_v1_ListOptions,
convert_api_List_To_v1_List,
convert_api_LoadBalancerIngress_To_v1_LoadBalancerIngress,
convert_api_LoadBalancerStatus_To_v1_LoadBalancerStatus,
convert_api_LocalObjectReference_To_v1_LocalObjectReference,
convert_api_NFSVolumeSource_To_v1_NFSVolumeSource,
convert_api_NamespaceList_To_v1_NamespaceList,
convert_api_NamespaceSpec_To_v1_NamespaceSpec,
convert_api_NamespaceStatus_To_v1_NamespaceStatus,
convert_api_Namespace_To_v1_Namespace,
convert_api_NodeAddress_To_v1_NodeAddress,
convert_api_NodeCondition_To_v1_NodeCondition,
convert_api_NodeList_To_v1_NodeList,
convert_api_NodeSpec_To_v1_NodeSpec,
convert_api_NodeStatus_To_v1_NodeStatus,
convert_api_NodeSystemInfo_To_v1_NodeSystemInfo,
convert_api_Node_To_v1_Node,
convert_api_ObjectFieldSelector_To_v1_ObjectFieldSelector,
convert_api_ObjectMeta_To_v1_ObjectMeta,
convert_api_ObjectReference_To_v1_ObjectReference,
convert_api_PersistentVolumeClaimList_To_v1_PersistentVolumeClaimList,
convert_api_PersistentVolumeClaimSpec_To_v1_PersistentVolumeClaimSpec,
convert_api_PersistentVolumeClaimStatus_To_v1_PersistentVolumeClaimStatus,
convert_api_PersistentVolumeClaimVolumeSource_To_v1_PersistentVolumeClaimVolumeSource,
convert_api_PersistentVolumeClaim_To_v1_PersistentVolumeClaim,
convert_api_PersistentVolumeList_To_v1_PersistentVolumeList,
convert_api_PersistentVolumeSource_To_v1_PersistentVolumeSource,
convert_api_PersistentVolumeSpec_To_v1_PersistentVolumeSpec,
convert_api_PersistentVolumeStatus_To_v1_PersistentVolumeStatus,
convert_api_PersistentVolume_To_v1_PersistentVolume,
convert_api_PodAttachOptions_To_v1_PodAttachOptions,
convert_api_PodCondition_To_v1_PodCondition,
convert_api_PodExecOptions_To_v1_PodExecOptions,
convert_api_PodList_To_v1_PodList,
convert_api_PodLogOptions_To_v1_PodLogOptions,
convert_api_PodProxyOptions_To_v1_PodProxyOptions,
convert_api_PodStatusResult_To_v1_PodStatusResult,
convert_api_PodStatus_To_v1_PodStatus,
convert_api_PodTemplateList_To_v1_PodTemplateList,
convert_api_PodTemplateSpec_To_v1_PodTemplateSpec,
convert_api_PodTemplate_To_v1_PodTemplate,
convert_api_Pod_To_v1_Pod,
convert_api_Probe_To_v1_Probe,
convert_api_RBDVolumeSource_To_v1_RBDVolumeSource,
convert_api_RangeAllocation_To_v1_RangeAllocation,
convert_api_ReplicationControllerList_To_v1_ReplicationControllerList,
convert_api_ReplicationControllerStatus_To_v1_ReplicationControllerStatus,
convert_api_ReplicationController_To_v1_ReplicationController,
convert_api_ResourceQuotaList_To_v1_ResourceQuotaList,
convert_api_ResourceQuotaSpec_To_v1_ResourceQuotaSpec,
convert_api_ResourceQuotaStatus_To_v1_ResourceQuotaStatus,
convert_api_ResourceQuota_To_v1_ResourceQuota,
convert_api_ResourceRequirements_To_v1_ResourceRequirements,
convert_api_SELinuxOptions_To_v1_SELinuxOptions,
convert_api_SecretList_To_v1_SecretList,
convert_api_SecretVolumeSource_To_v1_SecretVolumeSource,
convert_api_Secret_To_v1_Secret,
convert_api_SecurityContext_To_v1_SecurityContext,
convert_api_SerializedReference_To_v1_SerializedReference,
convert_api_ServiceAccountList_To_v1_ServiceAccountList,
convert_api_ServiceAccount_To_v1_ServiceAccount,
convert_api_ServiceList_To_v1_ServiceList,
convert_api_ServicePort_To_v1_ServicePort,
convert_api_ServiceSpec_To_v1_ServiceSpec,
convert_api_ServiceStatus_To_v1_ServiceStatus,
convert_api_Service_To_v1_Service,
convert_api_StatusCause_To_v1_StatusCause,
convert_api_StatusDetails_To_v1_StatusDetails,
convert_api_Status_To_v1_Status,
convert_api_TCPSocketAction_To_v1_TCPSocketAction,
convert_api_ThirdPartyResourceData_To_v1_ThirdPartyResourceData,
convert_api_TypeMeta_To_v1_TypeMeta,
convert_api_VolumeMount_To_v1_VolumeMount,
convert_api_VolumeSource_To_v1_VolumeSource,
convert_api_Volume_To_v1_Volume,
convert_v1_AWSElasticBlockStoreVolumeSource_To_api_AWSElasticBlockStoreVolumeSource,
convert_v1_Binding_To_api_Binding,
convert_v1_Capabilities_To_api_Capabilities,
convert_v1_ComponentCondition_To_api_ComponentCondition,
convert_v1_ComponentStatusList_To_api_ComponentStatusList,
convert_v1_ComponentStatus_To_api_ComponentStatus,
convert_v1_ContainerPort_To_api_ContainerPort,
convert_v1_ContainerStateRunning_To_api_ContainerStateRunning,
convert_v1_ContainerStateTerminated_To_api_ContainerStateTerminated,
convert_v1_ContainerStateWaiting_To_api_ContainerStateWaiting,
convert_v1_ContainerState_To_api_ContainerState,
convert_v1_ContainerStatus_To_api_ContainerStatus,
convert_v1_Container_To_api_Container,
convert_v1_DaemonList_To_api_DaemonList,
convert_v1_DaemonSpec_To_api_DaemonSpec,
convert_v1_DaemonStatus_To_api_DaemonStatus,
convert_v1_Daemon_To_api_Daemon,
convert_v1_DeleteOptions_To_api_DeleteOptions,
convert_v1_EmptyDirVolumeSource_To_api_EmptyDirVolumeSource,
convert_v1_EndpointAddress_To_api_EndpointAddress,
convert_v1_EndpointPort_To_api_EndpointPort,
convert_v1_EndpointSubset_To_api_EndpointSubset,
convert_v1_EndpointsList_To_api_EndpointsList,
convert_v1_Endpoints_To_api_Endpoints,
convert_v1_EnvVarSource_To_api_EnvVarSource,
convert_v1_EnvVar_To_api_EnvVar,
convert_v1_EventList_To_api_EventList,
convert_v1_EventSource_To_api_EventSource,
convert_v1_Event_To_api_Event,
convert_v1_ExecAction_To_api_ExecAction,
convert_v1_GCEPersistentDiskVolumeSource_To_api_GCEPersistentDiskVolumeSource,
convert_v1_GitRepoVolumeSource_To_api_GitRepoVolumeSource,
convert_v1_GlusterfsVolumeSource_To_api_GlusterfsVolumeSource,
convert_v1_HTTPGetAction_To_api_HTTPGetAction,
convert_v1_Handler_To_api_Handler,
convert_v1_HostPathVolumeSource_To_api_HostPathVolumeSource,
convert_v1_ISCSIVolumeSource_To_api_ISCSIVolumeSource,
convert_v1_Lifecycle_To_api_Lifecycle,
convert_v1_LimitRangeItem_To_api_LimitRangeItem,
convert_v1_LimitRangeList_To_api_LimitRangeList,
convert_v1_LimitRangeSpec_To_api_LimitRangeSpec,
convert_v1_LimitRange_To_api_LimitRange,
convert_v1_ListMeta_To_api_ListMeta,
convert_v1_ListOptions_To_api_ListOptions,
convert_v1_List_To_api_List,
convert_v1_LoadBalancerIngress_To_api_LoadBalancerIngress,
convert_v1_LoadBalancerStatus_To_api_LoadBalancerStatus,
convert_v1_LocalObjectReference_To_api_LocalObjectReference,
convert_v1_NFSVolumeSource_To_api_NFSVolumeSource,
convert_v1_NamespaceList_To_api_NamespaceList,
convert_v1_NamespaceSpec_To_api_NamespaceSpec,
convert_v1_NamespaceStatus_To_api_NamespaceStatus,
convert_v1_Namespace_To_api_Namespace,
convert_v1_NodeAddress_To_api_NodeAddress,
convert_v1_NodeCondition_To_api_NodeCondition,
convert_v1_NodeList_To_api_NodeList,
convert_v1_NodeSpec_To_api_NodeSpec,
convert_v1_NodeStatus_To_api_NodeStatus,
convert_v1_NodeSystemInfo_To_api_NodeSystemInfo,
convert_v1_Node_To_api_Node,
convert_v1_ObjectFieldSelector_To_api_ObjectFieldSelector,
convert_v1_ObjectMeta_To_api_ObjectMeta,
convert_v1_ObjectReference_To_api_ObjectReference,
convert_v1_PersistentVolumeClaimList_To_api_PersistentVolumeClaimList,
convert_v1_PersistentVolumeClaimSpec_To_api_PersistentVolumeClaimSpec,
convert_v1_PersistentVolumeClaimStatus_To_api_PersistentVolumeClaimStatus,
convert_v1_PersistentVolumeClaimVolumeSource_To_api_PersistentVolumeClaimVolumeSource,
convert_v1_PersistentVolumeClaim_To_api_PersistentVolumeClaim,
convert_v1_PersistentVolumeList_To_api_PersistentVolumeList,
convert_v1_PersistentVolumeSource_To_api_PersistentVolumeSource,
convert_v1_PersistentVolumeSpec_To_api_PersistentVolumeSpec,
convert_v1_PersistentVolumeStatus_To_api_PersistentVolumeStatus,
convert_v1_PersistentVolume_To_api_PersistentVolume,
convert_v1_PodAttachOptions_To_api_PodAttachOptions,
convert_v1_PodCondition_To_api_PodCondition,
convert_v1_PodExecOptions_To_api_PodExecOptions,
convert_v1_PodList_To_api_PodList,
convert_v1_PodLogOptions_To_api_PodLogOptions,
convert_v1_PodProxyOptions_To_api_PodProxyOptions,
convert_v1_PodStatusResult_To_api_PodStatusResult,
convert_v1_PodStatus_To_api_PodStatus,
convert_v1_PodTemplateList_To_api_PodTemplateList,
convert_v1_PodTemplateSpec_To_api_PodTemplateSpec,
convert_v1_PodTemplate_To_api_PodTemplate,
convert_v1_Pod_To_api_Pod,
convert_v1_Probe_To_api_Probe,
convert_v1_RBDVolumeSource_To_api_RBDVolumeSource,
convert_v1_RangeAllocation_To_api_RangeAllocation,
convert_v1_ReplicationControllerList_To_api_ReplicationControllerList,
convert_v1_ReplicationControllerStatus_To_api_ReplicationControllerStatus,
convert_v1_ReplicationController_To_api_ReplicationController,
convert_v1_ResourceQuotaList_To_api_ResourceQuotaList,
convert_v1_ResourceQuotaSpec_To_api_ResourceQuotaSpec,
convert_v1_ResourceQuotaStatus_To_api_ResourceQuotaStatus,
convert_v1_ResourceQuota_To_api_ResourceQuota,
convert_v1_ResourceRequirements_To_api_ResourceRequirements,
convert_v1_SELinuxOptions_To_api_SELinuxOptions,
convert_v1_SecretList_To_api_SecretList,
convert_v1_SecretVolumeSource_To_api_SecretVolumeSource,
convert_v1_Secret_To_api_Secret,
convert_v1_SecurityContext_To_api_SecurityContext,
convert_v1_SerializedReference_To_api_SerializedReference,
convert_v1_ServiceAccountList_To_api_ServiceAccountList,
convert_v1_ServiceAccount_To_api_ServiceAccount,
convert_v1_ServiceList_To_api_ServiceList,
convert_v1_ServicePort_To_api_ServicePort,
convert_v1_ServiceSpec_To_api_ServiceSpec,
convert_v1_ServiceStatus_To_api_ServiceStatus,
convert_v1_Service_To_api_Service,
convert_v1_StatusCause_To_api_StatusCause,
convert_v1_StatusDetails_To_api_StatusDetails,
convert_v1_Status_To_api_Status,
convert_v1_TCPSocketAction_To_api_TCPSocketAction,
convert_v1_ThirdPartyResourceData_To_api_ThirdPartyResourceData,
convert_v1_TypeMeta_To_api_TypeMeta,
convert_v1_VolumeMount_To_api_VolumeMount,
convert_v1_VolumeSource_To_api_VolumeSource,
convert_v1_Volume_To_api_Volume,
)
if err != nil {
// If one of the conversion functions is malformed, detect it immediately.
panic(err)
}
}
| yyljlyy/kubernetes | pkg/api/v1/conversion_generated.go | GO | apache-2.0 | 168,006 |
/**
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package parquet.hadoop.codec;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.io.compress.Decompressor;
import org.xerial.snappy.Snappy;
import parquet.Preconditions;
public class SnappyDecompressor implements Decompressor {
// Buffer for uncompressed output. This buffer grows as necessary.
private ByteBuffer outputBuffer = ByteBuffer.allocateDirect(0);
// Buffer for compressed input. This buffer grows as necessary.
private ByteBuffer inputBuffer = ByteBuffer.allocateDirect(0);
private boolean finished;
/**
* Fills specified buffer with uncompressed data. Returns actual number
* of bytes of uncompressed data. A return value of 0 indicates that
* {@link #needsInput()} should be called in order to determine if more
* input data is required.
*
* @param buffer Buffer for the compressed data
* @param off Start offset of the data
* @param len Size of the buffer
* @return The actual number of bytes of uncompressed data.
* @throws IOException
*/
@Override
public synchronized int decompress(byte[] buffer, int off, int len) throws IOException {
SnappyUtil.validateBuffer(buffer, off, len);
if (inputBuffer.position() == 0 && !outputBuffer.hasRemaining()) {
return 0;
}
if (!outputBuffer.hasRemaining()) {
inputBuffer.rewind();
Preconditions.checkArgument(inputBuffer.position() == 0, "Invalid position of 0.");
Preconditions.checkArgument(outputBuffer.position() == 0, "Invalid position of 0.");
// There is compressed input, decompress it now.
int decompressedSize = Snappy.uncompressedLength(inputBuffer);
if (decompressedSize > outputBuffer.capacity()) {
outputBuffer = ByteBuffer.allocateDirect(decompressedSize);
}
// Reset the previous outputBuffer (i.e. set position to 0)
outputBuffer.clear();
int size = Snappy.uncompress(inputBuffer, outputBuffer);
outputBuffer.limit(size);
// We've decompressed the entire input, reset the input now
inputBuffer.clear();
inputBuffer.limit(0);
finished = true;
}
// Return compressed output up to 'len'
int numBytes = Math.min(len, outputBuffer.remaining());
outputBuffer.get(buffer, off, numBytes);
return numBytes;
}
/**
* Sets input data for decompression.
* This should be called if and only if {@link #needsInput()} returns
* <code>true</code> indicating that more input data is required.
* (Both native and non-native versions of various Decompressors require
* that the data passed in via <code>b[]</code> remain unmodified until
* the caller is explicitly notified--via {@link #needsInput()}--that the
* buffer may be safely modified. With this requirement, an extra
* buffer-copy can be avoided.)
*
* @param buffer Input data
* @param off Start offset
* @param len Length
*/
@Override
public synchronized void setInput(byte[] buffer, int off, int len) {
SnappyUtil.validateBuffer(buffer, off, len);
if (inputBuffer.capacity() - inputBuffer.position() < len) {
ByteBuffer newBuffer = ByteBuffer.allocateDirect(inputBuffer.position() + len);
inputBuffer.rewind();
newBuffer.put(inputBuffer);
inputBuffer = newBuffer;
} else {
inputBuffer.limit(inputBuffer.position() + len);
}
inputBuffer.put(buffer, off, len);
}
@Override
public void end() {
// No-op
}
@Override
public synchronized boolean finished() {
return finished && !outputBuffer.hasRemaining();
}
@Override
public int getRemaining() {
return 0;
}
@Override
public synchronized boolean needsInput() {
return !inputBuffer.hasRemaining() && !outputBuffer.hasRemaining();
}
@Override
public synchronized void reset() {
finished = false;
inputBuffer.rewind();
outputBuffer.rewind();
inputBuffer.limit(0);
outputBuffer.limit(0);
}
@Override
public boolean needsDictionary() {
return false;
}
@Override
public void setDictionary(byte[] b, int off, int len) {
// No-op
}
}
| liancheng/parquet-mr | parquet-hadoop/src/main/java/parquet/hadoop/codec/SnappyDecompressor.java | Java | apache-2.0 | 4,725 |
/**
* Copyright 2014 NAVER Corp.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.interceptor.bci;
import java.util.concurrent.Callable;
/**
* @author jaehong.kim
*
*/
public class TestObjectNestedClass {
public void annonymousInnerClass() {
new Callable<Object>() {
@Override
public Object call() throws Exception {
return null;
}
};
new Runnable() {
public void run() {
}
};
}
public void annonymousInnerClass2() {
new Callable<Object>() {
@Override
public Object call() throws Exception {
return null;
}
};
new Runnable() {
public void run() {
}
};
}
public void instanceInnerClass() {
new InstanceInner();
}
class InstanceInner {}
public void localInnerClass() {
class LocalInner {}
new LocalInner();
}
public void localInnerClass2() {
class LocalInner {}
new LocalInner();
}
public void staticNestedClass() {
new StaticNested();
}
static class StaticNested{}
public void enclosingMethod(String s, int i) {
class LocalInner {}
new LocalInner();
}
}
| dawidmalina/pinpoint | profiler/src/test/java/com/navercorp/pinpoint/profiler/interceptor/bci/TestObjectNestedClass.java | Java | apache-2.0 | 1,898 |
/*
* Copyright (c) 2017, WSO2 Inc. (http://wso2.com) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package server.obj;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "Salary")
public class Salary {
private long id;
private long fixed;
private long allowance;
private String empId;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public long getFixed() {
return fixed;
}
public void setFexed(long fixed) {
this.fixed = fixed;
}
public long getAllowance() {
return allowance;
}
public void setAllowance(long allowance) {
this.allowance = allowance;
}
public String getEmpId() {
return empId;
}
public void setEmpId(String empId) {
this.empId = empId;
}
} | jaadds/product-apim | sample-scenarios/backend/src/main/java/server/obj/Salary.java | Java | apache-2.0 | 1,401 |
/**
* slider - jQuery EasyUI
*
* Copyright (c) 2009-2013 www.jeasyui.com. All rights reserved.
*
* Licensed under the GPL or commercial licenses
* To use it on other terms please contact us: info@jeasyui.com
* http://www.gnu.org/licenses/gpl.txt
* http://www.jeasyui.com/license_commercial.php
*
* Dependencies:
* draggable
*
*/
(function($){
function init(target){
var slider = $('<div class="slider">' +
'<div class="slider-inner">' +
'<a href="javascript:void(0)" class="slider-handle"></a>' +
'<span class="slider-tip"></span>' +
'</div>' +
'<div class="slider-rule"></div>' +
'<div class="slider-rulelabel"></div>' +
'<div style="clear:both"></div>' +
'<input type="hidden" class="slider-value">' +
'</div>').insertAfter(target);
var t = $(target);
t.addClass('slider-f').hide();
var name = t.attr('name');
if (name){
slider.find('input.slider-value').attr('name', name);
t.removeAttr('name').attr('sliderName', name);
}
return slider;
}
/**
* set the slider size, for vertical slider, the height property is required
*/
function setSize(target, param){
var state = $.data(target, 'slider');
var opts = state.options;
var slider = state.slider;
if (param){
if (param.width) opts.width = param.width;
if (param.height) opts.height = param.height;
}
if (opts.mode == 'h'){
slider.css('height', '');
slider.children('div').css('height', '');
if (!isNaN(opts.width)){
slider.width(opts.width);
}
} else {
slider.css('width', '');
slider.children('div').css('width', '');
if (!isNaN(opts.height)){
slider.height(opts.height);
slider.find('div.slider-rule').height(opts.height);
slider.find('div.slider-rulelabel').height(opts.height);
slider.find('div.slider-inner')._outerHeight(opts.height);
}
}
initValue(target);
}
/**
* show slider rule if needed
*/
function showRule(target){
var state = $.data(target, 'slider');
var opts = state.options;
var slider = state.slider;
var aa = opts.mode == 'h' ? opts.rule : opts.rule.slice(0).reverse();
if (opts.reversed){
aa = aa.slice(0).reverse();
}
_build(aa);
function _build(aa){
var rule = slider.find('div.slider-rule');
var label = slider.find('div.slider-rulelabel');
rule.empty();
label.empty();
for(var i=0; i<aa.length; i++){
var distance = i*100/(aa.length-1)+'%';
var span = $('<span></span>').appendTo(rule);
span.css((opts.mode=='h'?'left':'top'), distance);
// show the labels
if (aa[i] != '|'){
span = $('<span></span>').appendTo(label);
span.html(aa[i]);
if (opts.mode == 'h'){
span.css({
left: distance,
marginLeft: -Math.round(span.outerWidth()/2)
});
} else {
span.css({
top: distance,
marginTop: -Math.round(span.outerHeight()/2)
});
}
}
}
}
}
/**
* build the slider and set some properties
*/
function buildSlider(target){
var state = $.data(target, 'slider');
var opts = state.options;
var slider = state.slider;
slider.removeClass('slider-h slider-v slider-disabled');
slider.addClass(opts.mode == 'h' ? 'slider-h' : 'slider-v');
slider.addClass(opts.disabled ? 'slider-disabled' : '');
slider.find('a.slider-handle').draggable({
axis:opts.mode,
cursor:'pointer',
disabled: opts.disabled,
onDrag:function(e){
var left = e.data.left;
var width = slider.width();
if (opts.mode!='h'){
left = e.data.top;
width = slider.height();
}
if (left < 0 || left > width) {
return false;
} else {
var value = pos2value(target, left);
adjustValue(value);
return false;
}
},
onBeforeDrag:function(){
state.isDragging = true;
},
onStartDrag:function(){
opts.onSlideStart.call(target, opts.value);
},
onStopDrag:function(e){
var value = pos2value(target, (opts.mode=='h'?e.data.left:e.data.top));
adjustValue(value);
opts.onSlideEnd.call(target, opts.value);
opts.onComplete.call(target, opts.value);
state.isDragging = false;
}
});
slider.find('div.slider-inner').unbind('.slider').bind('mousedown.slider', function(e){
if (state.isDragging){return}
var pos = $(this).offset();
var value = pos2value(target, (opts.mode=='h'?(e.pageX-pos.left):(e.pageY-pos.top)));
adjustValue(value);
opts.onComplete.call(target, opts.value);
});
function adjustValue(value){
var s = Math.abs(value % opts.step);
if (s < opts.step/2){
value -= s;
} else {
value = value - s + opts.step;
}
setValue(target, value);
}
}
/**
* set a specified value to slider
*/
function setValue(target, value){
var state = $.data(target, 'slider');
var opts = state.options;
var slider = state.slider;
var oldValue = opts.value;
if (value < opts.min) value = opts.min;
if (value > opts.max) value = opts.max;
opts.value = value;
$(target).val(value);
slider.find('input.slider-value').val(value);
var pos = value2pos(target, value);
var tip = slider.find('.slider-tip');
if (opts.showTip){
tip.show();
tip.html(opts.tipFormatter.call(target, opts.value));
} else {
tip.hide();
}
if (opts.mode == 'h'){
var style = 'left:'+pos+'px;';
slider.find('.slider-handle').attr('style', style);
tip.attr('style', style + 'margin-left:' + (-Math.round(tip.outerWidth()/2)) + 'px');
} else {
var style = 'top:' + pos + 'px;';
slider.find('.slider-handle').attr('style', style);
tip.attr('style', style + 'margin-left:' + (-Math.round(tip.outerWidth())) + 'px');
}
if (oldValue != value){
opts.onChange.call(target, value, oldValue);
}
}
function initValue(target){
var opts = $.data(target, 'slider').options;
var fn = opts.onChange;
opts.onChange = function(){};
setValue(target, opts.value);
opts.onChange = fn;
}
/**
* translate value to slider position
*/
function value2pos(target, value){
var state = $.data(target, 'slider');
var opts = state.options;
var slider = state.slider;
if (opts.mode == 'h'){
var pos = (value-opts.min)/(opts.max-opts.min)*slider.width();
if (opts.reversed){
pos = slider.width() - pos;
}
} else {
var pos = slider.height() - (value-opts.min)/(opts.max-opts.min)*slider.height();
if (opts.reversed){
pos = slider.height() - pos;
}
}
return pos.toFixed(0);
}
/**
* translate slider position to value
*/
function pos2value(target, pos){
var state = $.data(target, 'slider');
var opts = state.options;
var slider = state.slider;
if (opts.mode == 'h'){
var value = opts.min + (opts.max-opts.min)*(pos/slider.width());
} else {
var value = opts.min + (opts.max-opts.min)*((slider.height()-pos)/slider.height());
}
return opts.reversed ? opts.max - value.toFixed(0) : value.toFixed(0);
}
$.fn.slider = function(options, param){
if (typeof options == 'string'){
return $.fn.slider.methods[options](this, param);
}
options = options || {};
return this.each(function(){
var state = $.data(this, 'slider');
if (state){
$.extend(state.options, options);
} else {
state = $.data(this, 'slider', {
options: $.extend({}, $.fn.slider.defaults, $.fn.slider.parseOptions(this), options),
slider: init(this)
});
$(this).removeAttr('disabled');
}
var opts = state.options;
opts.min = parseFloat(opts.min);
opts.max = parseFloat(opts.max);
opts.value = parseFloat(opts.value);
opts.step = parseFloat(opts.step);
opts.originalValue = opts.value;
buildSlider(this);
showRule(this);
setSize(this);
});
};
$.fn.slider.methods = {
options: function(jq){
return $.data(jq[0], 'slider').options;
},
destroy: function(jq){
return jq.each(function(){
$.data(this, 'slider').slider.remove();
$(this).remove();
});
},
resize: function(jq, param){
return jq.each(function(){
setSize(this, param);
});
},
getValue: function(jq){
return jq.slider('options').value;
},
setValue: function(jq, value){
return jq.each(function(){
setValue(this, value);
});
},
clear: function(jq){
return jq.each(function(){
var opts = $(this).slider('options');
setValue(this, opts.min);
});
},
reset: function(jq){
return jq.each(function(){
var opts = $(this).slider('options');
setValue(this, opts.originalValue);
});
},
enable: function(jq){
return jq.each(function(){
$.data(this, 'slider').options.disabled = false;
buildSlider(this);
});
},
disable: function(jq){
return jq.each(function(){
$.data(this, 'slider').options.disabled = true;
buildSlider(this);
});
}
};
$.fn.slider.parseOptions = function(target){
var t = $(target);
return $.extend({}, $.parser.parseOptions(target, [
'width','height','mode',{reversed:'boolean',showTip:'boolean',min:'number',max:'number',step:'number'}
]), {
value: (t.val() || undefined),
disabled: (t.attr('disabled') ? true : undefined),
rule: (t.attr('rule') ? eval(t.attr('rule')) : undefined)
});
};
$.fn.slider.defaults = {
width: 'auto',
height: 'auto',
mode: 'h', // 'h'(horizontal) or 'v'(vertical)
reversed: false,
showTip: false,
disabled: false,
value: 0,
min: 0,
max: 100,
step: 1,
rule: [], // [0,'|',100]
tipFormatter: function(value){return value},
onChange: function(value, oldValue){},
onSlideStart: function(value){},
onSlideEnd: function(value){},
onComplete: function(value){}
};
})(jQuery);
| OneFlying/searchweb | webapp/static/lib/jquery-easyui-1.3.5/src/jquery.slider.js | JavaScript | apache-2.0 | 9,989 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.blogspot.na5cent.connectdb;
import com.blogspot.na5cent.connectdb.model.Department;
import com.blogspot.na5cent.connectdb.printer.GenericReflectPrinter;
import com.blogspot.na5cent.connectdb.query.Page;
import com.blogspot.na5cent.connectdb.query.Pagination;
import com.blogspot.na5cent.connectdb.service.DepartmentService;
/**
*
* @author anonymous
*/
public class S5QueryPagination {
public static void main(String[] args) throws Exception {
Pagination pagination = new Pagination(1, 5);
Page<Department> page = DepartmentService.findAll(pagination);
System.out.println("total elements = " + page.getTotalElements());
System.out.println("total pages = " + page.getTotalPages());
System.out.println("page size = " + page.getPageRequestSize());
System.out.println("current page = " + page.getCurrentPageNumber());
System.out.println("current page size = " + page.getCurrentPageSize());
GenericReflectPrinter.prints(page.getContents());
}
}
| jittagornp/cpe4235 | connect-database/src/main/java/com/blogspot/na5cent/connectdb/S5QueryPagination.java | Java | apache-2.0 | 1,216 |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.bigdata;
import org.pentaho.di.core.KettleClientEnvironment;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.PluginRegistryExtension;
import org.pentaho.di.core.plugins.PluginTypeInterface;
import org.pentaho.di.core.plugins.RegistryPlugin;
@RegistryPlugin(
id = "ShimDependentPluginRegistryPlugin",
name = "ShimDependentPluginRegistryPlugin",
description = "Registers sub plugins of the big data plugin that depend on the shim jars in their classpath" )
public class ShimDependentPluginRegistryPlugin implements PluginRegistryExtension {
@Override
public String getPluginId( Class<? extends PluginTypeInterface> arg0, Object arg1 ) {
return null;
}
@Override
public void init( PluginRegistry pluginRegistry ) {
if ( KettleClientEnvironment.isInitialized() ) {
PluginRegistry.addPluginType( ShimDependentJobEntryPluginType.getInstance() );
}
}
@Override
public void searchForType( PluginTypeInterface pluginTypeInterface ) {
}
}
| pedrofvteixeira/big-data-plugin | legacy/src/main/java/org/pentaho/di/bigdata/ShimDependentPluginRegistryPlugin.java | Java | apache-2.0 | 1,948 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.intentions.base;
import com.intellij.codeInsight.CodeInsightUtilCore;
import com.intellij.codeInsight.daemon.impl.quickfix.CreateFromUsageUtils;
import com.intellij.codeInsight.daemon.impl.quickfix.CreateMethodFromUsageFix;
import com.intellij.codeInsight.template.*;
import com.intellij.ide.fileTemplates.FileTemplate;
import com.intellij.ide.fileTemplates.FileTemplateManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.groovy.GroovyLanguage;
import org.jetbrains.plugins.groovy.actions.GroovyTemplates;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.expectedTypes.TypeConstraint;
import org.jetbrains.plugins.groovy.lang.psi.util.GrTraitUtil;
import org.jetbrains.plugins.groovy.template.expressions.ChooseTypeExpression;
import org.jetbrains.plugins.groovy.template.expressions.ParameterNameExpression;
public class IntentionUtils {
private static final Logger LOG = Logger.getInstance(IntentionUtils.class);
public static void createTemplateForMethod(PsiType[] argTypes,
ChooseTypeExpression[] paramTypesExpressions,
PsiMethod method,
PsiClass owner,
TypeConstraint[] constraints,
boolean isConstructor,
@NotNull final PsiElement context) {
final Project project = owner.getProject();
PsiTypeElement typeElement = method.getReturnTypeElement();
ChooseTypeExpression expr =
new ChooseTypeExpression(constraints, PsiManager.getInstance(project), context.getResolveScope(),
method.getLanguage() == GroovyLanguage.INSTANCE
);
TemplateBuilderImpl builder = new TemplateBuilderImpl(method);
if (!isConstructor) {
assert typeElement != null;
builder.replaceElement(typeElement, expr);
}
PsiParameter[] parameters = method.getParameterList().getParameters();
assert parameters.length == argTypes.length;
for (int i = 0; i < parameters.length; i++) {
PsiParameter parameter = parameters[i];
PsiTypeElement parameterTypeElement = parameter.getTypeElement();
builder.replaceElement(parameterTypeElement, paramTypesExpressions[i]);
builder.replaceElement(parameter.getNameIdentifier(), new ParameterNameExpression(null));
}
PsiCodeBlock body = method.getBody();
if (body != null) {
PsiElement lbrace = body.getLBrace();
assert lbrace != null;
builder.setEndVariableAfter(lbrace);
}
else {
builder.setEndVariableAfter(method.getParameterList());
}
method = CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(method);
Template template = builder.buildTemplate();
final PsiFile targetFile = owner.getContainingFile();
final Editor newEditor = positionCursor(project, targetFile, method);
TextRange range = method.getTextRange();
newEditor.getDocument().deleteString(range.getStartOffset(), range.getEndOffset());
TemplateManager manager = TemplateManager.getInstance(project);
TemplateEditingListener templateListener = new TemplateEditingAdapter() {
@Override
public void templateFinished(Template template, boolean brokenOff) {
ApplicationManager.getApplication().runWriteAction(() -> {
PsiDocumentManager.getInstance(project).commitDocument(newEditor.getDocument());
final int offset = newEditor.getCaretModel().getOffset();
PsiMethod method1 = PsiTreeUtil.findElementOfClassAtOffset(targetFile, offset - 1, PsiMethod.class, false);
if (context instanceof PsiMethod) {
final PsiTypeParameter[] typeParameters = ((PsiMethod)context).getTypeParameters();
if (typeParameters.length > 0) {
for (PsiTypeParameter typeParameter : typeParameters) {
if (CreateMethodFromUsageFix.checkTypeParam(method1, typeParameter)) {
final JVMElementFactory factory = JVMElementFactories.getFactory(method1.getLanguage(), method1.getProject());
PsiTypeParameterList list = method1.getTypeParameterList();
if (list == null) {
PsiTypeParameterList newList = factory.createTypeParameterList();
list = (PsiTypeParameterList)method1.addAfter(newList, method1.getModifierList());
}
list.add(factory.createTypeParameter(typeParameter.getName(), typeParameter.getExtendsList().getReferencedTypes()));
}
}
}
}
if (method1 != null) {
try {
final boolean hasNoReturnType = method1.getReturnTypeElement() == null && method1 instanceof GrMethod;
if (hasNoReturnType) {
((GrMethod)method1).setReturnType(PsiType.VOID);
}
if (method1.getBody() != null) {
FileTemplateManager templateManager = FileTemplateManager.getInstance(project);
FileTemplate fileTemplate = templateManager.getCodeTemplate(GroovyTemplates.GROOVY_FROM_USAGE_METHOD_BODY);
PsiClass containingClass = method1.getContainingClass();
LOG.assertTrue(!containingClass.isInterface() || GrTraitUtil.isTrait(containingClass), "Interface bodies should be already set up");
CreateFromUsageUtils.setupMethodBody(method1, containingClass, fileTemplate);
}
if (hasNoReturnType) {
((GrMethod)method1).setReturnType(null);
}
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
CreateFromUsageUtils.setupEditor(method1, newEditor);
}
});
}
};
manager.startTemplate(newEditor, template, templateListener);
}
public static Editor positionCursor(@NotNull Project project, @NotNull PsiFile targetFile, @NotNull PsiElement element) {
int textOffset = element.getTextOffset();
VirtualFile virtualFile = targetFile.getVirtualFile();
if (virtualFile != null) {
OpenFileDescriptor descriptor = new OpenFileDescriptor(project, virtualFile, textOffset);
return FileEditorManager.getInstance(project).openTextEditor(descriptor, true);
}
else {
return null;
}
}
}
| asedunov/intellij-community | plugins/groovy/src/org/jetbrains/plugins/groovy/intentions/base/IntentionUtils.java | Java | apache-2.0 | 7,700 |
/**
* Copyright 2012 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hystrix;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import rx.Observable;
import rx.Observable.OnSubscribe;
import rx.Subscriber;
import com.netflix.hystrix.exception.HystrixBadRequestException;
import com.netflix.hystrix.exception.HystrixRuntimeException;
import com.netflix.hystrix.exception.HystrixRuntimeException.FailureType;
import com.netflix.hystrix.strategy.executionhook.HystrixCommandExecutionHook;
import com.netflix.hystrix.strategy.properties.HystrixPropertiesStrategy;
/**
* Used to wrap code that will execute potentially risky functionality (typically meaning a service call over the network)
* with fault and latency tolerance, statistics and performance metrics capture, circuit breaker and bulkhead functionality.
* This command is essentially a blocking command but provides an Observable facade if used with observe()
*
* @param <R>
* the return type
*
* @ThreadSafe
*/
public abstract class HystrixCommand<R> extends AbstractCommand<R> implements HystrixExecutable<R>, HystrixInvokableInfo<R>, HystrixObservable<R> {
/**
* Construct a {@link HystrixCommand} with defined {@link HystrixCommandGroupKey}.
* <p>
* The {@link HystrixCommandKey} will be derived from the implementing class name.
*
* @param group
* {@link HystrixCommandGroupKey} used to group together multiple {@link HystrixCommand} objects.
* <p>
* The {@link HystrixCommandGroupKey} is used to represent a common relationship between commands. For example, a library or team name, the system all related commands interact with,
* common business purpose etc.
*/
protected HystrixCommand(HystrixCommandGroupKey group) {
// use 'null' to specify use the default
this(new Setter(group));
}
/**
* Construct a {@link HystrixCommand} with defined {@link HystrixCommandGroupKey} and {@link HystrixThreadPoolKey}.
* <p>
* The {@link HystrixCommandKey} will be derived from the implementing class name.
*
* @param group
* {@link HystrixCommandGroupKey} used to group together multiple {@link HystrixCommand} objects.
* <p>
* The {@link HystrixCommandGroupKey} is used to represent a common relationship between commands. For example, a library or team name, the system all related commands interact with,
* common business purpose etc.
* @param threadPool
* {@link HystrixThreadPoolKey} used to identify the thread pool in which a {@link HystrixCommand} executes.
*/
protected HystrixCommand(HystrixCommandGroupKey group, HystrixThreadPoolKey threadPool) {
this(new Setter(group).andThreadPoolKey(threadPool));
}
/**
* Construct a {@link HystrixCommand} with defined {@link HystrixCommandGroupKey} and thread timeout
* <p>
* The {@link HystrixCommandKey} will be derived from the implementing class name.
*
* @param group
* {@link HystrixCommandGroupKey} used to group together multiple {@link HystrixCommand} objects.
* <p>
* The {@link HystrixCommandGroupKey} is used to represent a common relationship between commands. For example, a library or team name, the system all related commands interact with,
* common business purpose etc.
* @param executionIsolationThreadTimeoutInMilliseconds
* Time in milliseconds at which point the calling thread will timeout (using {@link Future#get}) and walk away from the executing thread.
*/
protected HystrixCommand(HystrixCommandGroupKey group, int executionIsolationThreadTimeoutInMilliseconds) {
this(new Setter(group).andCommandPropertiesDefaults(HystrixCommandProperties.Setter().withExecutionTimeoutInMilliseconds(executionIsolationThreadTimeoutInMilliseconds)));
}
/**
* Construct a {@link HystrixCommand} with defined {@link HystrixCommandGroupKey}, {@link HystrixThreadPoolKey}, and thread timeout.
* <p>
* The {@link HystrixCommandKey} will be derived from the implementing class name.
*
* @param group
* {@link HystrixCommandGroupKey} used to group together multiple {@link HystrixCommand} objects.
* <p>
* The {@link HystrixCommandGroupKey} is used to represent a common relationship between commands. For example, a library or team name, the system all related commands interact with,
* common business purpose etc.
* @param threadPool
* {@link HystrixThreadPoolKey} used to identify the thread pool in which a {@link HystrixCommand} executes.
* @param executionIsolationThreadTimeoutInMilliseconds
* Time in milliseconds at which point the calling thread will timeout (using {@link Future#get}) and walk away from the executing thread.
*/
protected HystrixCommand(HystrixCommandGroupKey group, HystrixThreadPoolKey threadPool, int executionIsolationThreadTimeoutInMilliseconds) {
this(new Setter(group).andThreadPoolKey(threadPool).andCommandPropertiesDefaults(HystrixCommandProperties.Setter().withExecutionTimeoutInMilliseconds(executionIsolationThreadTimeoutInMilliseconds)));
}
/**
* Construct a {@link HystrixCommand} with defined {@link Setter} that allows injecting property and strategy overrides and other optional arguments.
* <p>
* NOTE: The {@link HystrixCommandKey} is used to associate a {@link HystrixCommand} with {@link HystrixCircuitBreaker}, {@link HystrixCommandMetrics} and other objects.
* <p>
* Do not create multiple {@link HystrixCommand} implementations with the same {@link HystrixCommandKey} but different injected default properties as the first instantiated will win.
* <p>
* Properties passed in via {@link Setter#andCommandPropertiesDefaults} or {@link Setter#andThreadPoolPropertiesDefaults} are cached for the given {@link HystrixCommandKey} for the life of the JVM
* or until {@link Hystrix#reset()} is called. Dynamic properties allow runtime changes. Read more on the <a href="https://github.com/Netflix/Hystrix/wiki/Configuration">Hystrix Wiki</a>.
*
* @param setter
* Fluent interface for constructor arguments
*/
protected HystrixCommand(Setter setter) {
// use 'null' to specify use the default
this(setter.groupKey, setter.commandKey, setter.threadPoolKey, null, null, setter.commandPropertiesDefaults, setter.threadPoolPropertiesDefaults, null, null, null, null, null);
}
/**
* Allow constructing a {@link HystrixCommand} with injection of most aspects of its functionality.
* <p>
* Some of these never have a legitimate reason for injection except in unit testing.
* <p>
* Most of the args will revert to a valid default if 'null' is passed in.
*/
/* package for testing */HystrixCommand(HystrixCommandGroupKey group, HystrixCommandKey key, HystrixThreadPoolKey threadPoolKey, HystrixCircuitBreaker circuitBreaker, HystrixThreadPool threadPool,
HystrixCommandProperties.Setter commandPropertiesDefaults, HystrixThreadPoolProperties.Setter threadPoolPropertiesDefaults,
HystrixCommandMetrics metrics, TryableSemaphore fallbackSemaphore, TryableSemaphore executionSemaphore,
HystrixPropertiesStrategy propertiesStrategy, HystrixCommandExecutionHook executionHook) {
super(group, key, threadPoolKey, circuitBreaker, threadPool, commandPropertiesDefaults, threadPoolPropertiesDefaults, metrics, fallbackSemaphore, executionSemaphore, propertiesStrategy, executionHook);
}
/**
* Fluent interface for arguments to the {@link HystrixCommand} constructor.
* <p>
* The required arguments are set via the 'with' factory method and optional arguments via the 'and' chained methods.
* <p>
* Example:
* <pre> {@code
* Setter.withGroupKey(HystrixCommandGroupKey.Factory.asKey("GroupName"))
.andCommandKey(HystrixCommandKey.Factory.asKey("CommandName"));
* } </pre>
*
* @NotThreadSafe
*/
final public static class Setter {
protected final HystrixCommandGroupKey groupKey;
protected HystrixCommandKey commandKey;
protected HystrixThreadPoolKey threadPoolKey;
protected HystrixCommandProperties.Setter commandPropertiesDefaults;
protected HystrixThreadPoolProperties.Setter threadPoolPropertiesDefaults;
/**
* Setter factory method containing required values.
* <p>
* All optional arguments can be set via the chained methods.
*
* @param groupKey
* {@link HystrixCommandGroupKey} used to group together multiple {@link HystrixCommand} objects.
* <p>
* The {@link HystrixCommandGroupKey} is used to represent a common relationship between commands. For example, a library or team name, the system all related commands interace
* with,
* common business purpose etc.
*/
protected Setter(HystrixCommandGroupKey groupKey) {
this.groupKey = groupKey;
}
/**
* Setter factory method with required values.
* <p>
* All optional arguments can be set via the chained methods.
*
* @param groupKey
* {@link HystrixCommandGroupKey} used to group together multiple {@link HystrixCommand} objects.
* <p>
* The {@link HystrixCommandGroupKey} is used to represent a common relationship between commands. For example, a library or team name, the system all related commands interace
* with,
* common business purpose etc.
*/
public static Setter withGroupKey(HystrixCommandGroupKey groupKey) {
return new Setter(groupKey);
}
/**
* @param commandKey
* {@link HystrixCommandKey} used to identify a {@link HystrixCommand} instance for statistics, circuit-breaker, properties, etc.
* <p>
* By default this will be derived from the instance class name.
* <p>
* NOTE: Every unique {@link HystrixCommandKey} will result in new instances of {@link HystrixCircuitBreaker}, {@link HystrixCommandMetrics} and {@link HystrixCommandProperties}.
* Thus,
* the number of variants should be kept to a finite and reasonable number to avoid high-memory usage or memory leacks.
* <p>
* Hundreds of keys is fine, tens of thousands is probably not.
* @return Setter for fluent interface via method chaining
*/
public Setter andCommandKey(HystrixCommandKey commandKey) {
this.commandKey = commandKey;
return this;
}
/**
* @param threadPoolKey
* {@link HystrixThreadPoolKey} used to define which thread-pool this command should run in (when configured to run on separate threads via
* {@link HystrixCommandProperties#executionIsolationStrategy()}).
* <p>
* By default this is derived from the {@link HystrixCommandGroupKey} but if injected this allows multiple commands to have the same {@link HystrixCommandGroupKey} but different
* thread-pools.
* @return Setter for fluent interface via method chaining
*/
public Setter andThreadPoolKey(HystrixThreadPoolKey threadPoolKey) {
this.threadPoolKey = threadPoolKey;
return this;
}
/**
* Optional
*
* @param commandPropertiesDefaults
* {@link HystrixCommandProperties.Setter} with property overrides for this specific instance of {@link HystrixCommand}.
* <p>
* See the {@link HystrixPropertiesStrategy} JavaDocs for more information on properties and order of precedence.
* @return Setter for fluent interface via method chaining
*/
public Setter andCommandPropertiesDefaults(HystrixCommandProperties.Setter commandPropertiesDefaults) {
this.commandPropertiesDefaults = commandPropertiesDefaults;
return this;
}
/**
* Optional
*
* @param threadPoolPropertiesDefaults
* {@link HystrixThreadPoolProperties.Setter} with property overrides for the {@link HystrixThreadPool} used by this specific instance of {@link HystrixCommand}.
* <p>
* See the {@link HystrixPropertiesStrategy} JavaDocs for more information on properties and order of precedence.
* @return Setter for fluent interface via method chaining
*/
public Setter andThreadPoolPropertiesDefaults(HystrixThreadPoolProperties.Setter threadPoolPropertiesDefaults) {
this.threadPoolPropertiesDefaults = threadPoolPropertiesDefaults;
return this;
}
}
/**
* Implement this method with code to be executed when {@link #execute()} or {@link #queue()} are invoked.
*
* @return R response type
* @throws Exception
* if command execution fails
*/
protected abstract R run() throws Exception;
/**
* If {@link #execute()} or {@link #queue()} fails in any way then this method will be invoked to provide an opportunity to return a fallback response.
* <p>
* This should do work that does not require network transport to produce.
* <p>
* In other words, this should be a static or cached result that can immediately be returned upon failure.
* <p>
* If network traffic is wanted for fallback (such as going to MemCache) then the fallback implementation should invoke another {@link HystrixCommand} instance that protects against that network
* access and possibly has another level of fallback that does not involve network access.
* <p>
* DEFAULT BEHAVIOR: It throws UnsupportedOperationException.
*
* @return R or throw UnsupportedOperationException if not implemented
*/
protected R getFallback() {
throw new UnsupportedOperationException("No fallback available.");
}
@Override
final protected Observable<R> getExecutionObservable() {
return Observable.create(new OnSubscribe<R>() {
@Override
public void call(Subscriber<? super R> s) {
try {
s.onNext(run());
s.onCompleted();
} catch (Throwable e) {
s.onError(e);
}
}
});
}
@Override
final protected Observable<R> getFallbackObservable() {
return Observable.create(new OnSubscribe<R>() {
@Override
public void call(Subscriber<? super R> s) {
try {
s.onNext(getFallback());
s.onCompleted();
} catch (Throwable e) {
s.onError(e);
}
}
});
}
/**
* Used for synchronous execution of command.
*
* @return R
* Result of {@link #run()} execution or a fallback from {@link #getFallback()} if the command fails for any reason.
* @throws HystrixRuntimeException
* if a failure occurs and a fallback cannot be retrieved
* @throws HystrixBadRequestException
* if invalid arguments or state were used representing a user failure, not a system failure
* @throws IllegalStateException
* if invoked more than once
*/
public R execute() {
try {
return queue().get();
} catch (Exception e) {
throw decomposeException(e);
}
}
/**
* Used for asynchronous execution of command.
* <p>
* This will queue up the command on the thread pool and return an {@link Future} to get the result once it completes.
* <p>
* NOTE: If configured to not run in a separate thread, this will have the same effect as {@link #execute()} and will block.
* <p>
* We don't throw an exception but just flip to synchronous execution so code doesn't need to change in order to switch a command from running on a separate thread to the calling thread.
*
* @return {@code Future<R>} Result of {@link #run()} execution or a fallback from {@link #getFallback()} if the command fails for any reason.
* @throws HystrixRuntimeException
* if a fallback does not exist
* <p>
* <ul>
* <li>via {@code Future.get()} in {@link ExecutionException#getCause()} if a failure occurs</li>
* <li>or immediately if the command can not be queued (such as short-circuited, thread-pool/semaphore rejected)</li>
* </ul>
* @throws HystrixBadRequestException
* via {@code Future.get()} in {@link ExecutionException#getCause()} if invalid arguments or state were used representing a user failure, not a system failure
* @throws IllegalStateException
* if invoked more than once
*/
public Future<R> queue() {
/*
* --- Schedulers.immediate()
*
* We use the 'immediate' schedule since Future.get() is blocking so we don't want to bother doing the callback to the Future on a separate thread
* as we don't need to separate the Hystrix thread from user threads since they are already providing it via the Future.get() call.
*
* We pass 'false' to tell the Observable we will block on it so it doesn't schedule an async timeout.
*
* This optimizes for using the calling thread to do the timeout rather than scheduling another thread.
*
* In a tight-loop of executing commands this optimization saves a few microseconds per execution.
* It also just makes no sense to use a separate thread to timeout the command when the calling thread
* is going to sit waiting on it.
*/
final Observable<R> o = toObservable();
final Future<R> f = o.toBlocking().toFuture();
/* special handling of error states that throw immediately */
if (f.isDone()) {
try {
f.get();
return f;
} catch (Exception e) {
RuntimeException re = decomposeException(e);
if (re instanceof HystrixBadRequestException) {
return f;
} else if (re instanceof HystrixRuntimeException) {
HystrixRuntimeException hre = (HystrixRuntimeException) re;
if (hre.getFailureType() == FailureType.COMMAND_EXCEPTION || hre.getFailureType() == FailureType.TIMEOUT) {
// we don't throw these types from queue() only from queue().get() as they are execution errors
return f;
} else {
// these are errors we throw from queue() as they as rejection type errors
throw hre;
}
} else {
throw re;
}
}
}
return f;
}
@Override
protected String getFallbackMethodName() {
return "getFallback";
}
}
| JayhJung/Hystrix | hystrix-core/src/main/java/com/netflix/hystrix/HystrixCommand.java | Java | apache-2.0 | 20,424 |
(function () {
// Append the bind() polyfill
var scriptElem = document.createElement('script');
scriptElem.setAttribute('src', 'scripts/android2.3-jscompat.js');
if (document.body) {
document.body.appendChild(scriptElem);
} else {
document.head.appendChild(scriptElem);
}
}()); | SimonidaPesic/VodicZaKupovinu | platforms/android/assets/www/scripts/platformOverrides.js | JavaScript | apache-2.0 | 329 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.properties;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.camel.Endpoint;
import org.apache.camel.impl.UriEndpointComponent;
import org.apache.camel.util.FilePathResolver;
import org.apache.camel.util.LRUSoftCache;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The <a href="http://camel.apache.org/properties">Properties Component</a> allows you to use property placeholders when defining Endpoint URIs
*/
public class PropertiesComponent extends UriEndpointComponent {
/**
* The default prefix token.
*/
public static final String DEFAULT_PREFIX_TOKEN = "{{";
/**
* The default suffix token.
*/
public static final String DEFAULT_SUFFIX_TOKEN = "}}";
/**
* The default prefix token.
* @deprecated Use {@link #DEFAULT_PREFIX_TOKEN} instead.
*/
@Deprecated
public static final String PREFIX_TOKEN = DEFAULT_PREFIX_TOKEN;
/**
* The default suffix token.
* @deprecated Use {@link #DEFAULT_SUFFIX_TOKEN} instead.
*/
@Deprecated
public static final String SUFFIX_TOKEN = DEFAULT_SUFFIX_TOKEN;
/**
* Never check system properties.
*/
public static final int SYSTEM_PROPERTIES_MODE_NEVER = 0;
/**
* Check system properties if not resolvable in the specified properties.
*/
public static final int SYSTEM_PROPERTIES_MODE_FALLBACK = 1;
/**
* Check system properties first, before trying the specified properties.
* This allows system properties to override any other property source.
* <p/>
* This is the default.
*/
public static final int SYSTEM_PROPERTIES_MODE_OVERRIDE = 2;
/**
* Key for stores special override properties that containers such as OSGi can store
* in the OSGi service registry
*/
public static final String OVERRIDE_PROPERTIES = PropertiesComponent.class.getName() + ".OverrideProperties";
private static final Logger LOG = LoggerFactory.getLogger(PropertiesComponent.class);
private final Map<CacheKey, Properties> cacheMap = new LRUSoftCache<CacheKey, Properties>(1000);
private final Map<String, PropertiesFunction> functions = new HashMap<String, PropertiesFunction>();
private PropertiesResolver propertiesResolver = new DefaultPropertiesResolver(this);
private PropertiesParser propertiesParser = new DefaultPropertiesParser(this);
private boolean isDefaultCreated;
private String[] locations;
private boolean ignoreMissingLocation;
private String encoding;
private boolean cache = true;
private String propertyPrefix;
private String propertyPrefixResolved;
private String propertySuffix;
private String propertySuffixResolved;
private boolean fallbackToUnaugmentedProperty = true;
private String prefixToken = DEFAULT_PREFIX_TOKEN;
private String suffixToken = DEFAULT_SUFFIX_TOKEN;
private Properties initialProperties;
private Properties overrideProperties;
private int systemPropertiesMode = SYSTEM_PROPERTIES_MODE_OVERRIDE;
public PropertiesComponent() {
super(PropertiesEndpoint.class);
// include out of the box functions
addFunction(new EnvPropertiesFunction());
addFunction(new SysPropertiesFunction());
addFunction(new ServicePropertiesFunction());
addFunction(new ServiceHostPropertiesFunction());
addFunction(new ServicePortPropertiesFunction());
}
public PropertiesComponent(boolean isDefaultCreated) {
this();
this.isDefaultCreated = isDefaultCreated;
}
public PropertiesComponent(String location) {
this();
setLocation(location);
}
public PropertiesComponent(String... locations) {
this();
setLocations(locations);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
String[] paths = locations;
// override default locations
String locations = getAndRemoveParameter(parameters, "locations", String.class);
Boolean ignoreMissingLocationLoc = getAndRemoveParameter(parameters, "ignoreMissingLocation", Boolean.class);
if (locations != null) {
LOG.trace("Overriding default locations with location: {}", locations);
paths = locations.split(",");
}
if (ignoreMissingLocationLoc != null) {
ignoreMissingLocation = ignoreMissingLocationLoc;
}
String endpointUri = parseUri(remaining, paths);
LOG.debug("Endpoint uri parsed as: {}", endpointUri);
Endpoint delegate = getCamelContext().getEndpoint(endpointUri);
PropertiesEndpoint answer = new PropertiesEndpoint(uri, delegate, this);
setProperties(answer, parameters);
return answer;
}
public String parseUri(String uri) throws Exception {
return parseUri(uri, locations);
}
public String parseUri(String uri, String... paths) throws Exception {
Properties prop = new Properties();
// use initial properties
if (null != initialProperties) {
prop.putAll(initialProperties);
}
// use locations
if (paths != null) {
// location may contain JVM system property or OS environment variables
// so we need to parse those
String[] locations = parseLocations(paths);
// check cache first
CacheKey key = new CacheKey(locations);
Properties locationsProp = cache ? cacheMap.get(key) : null;
if (locationsProp == null) {
locationsProp = propertiesResolver.resolveProperties(getCamelContext(), ignoreMissingLocation, locations);
if (cache) {
cacheMap.put(key, locationsProp);
}
}
prop.putAll(locationsProp);
}
// use override properties
if (overrideProperties != null) {
// make a copy to avoid affecting the original properties
Properties override = new Properties();
override.putAll(prop);
override.putAll(overrideProperties);
prop = override;
}
// enclose tokens if missing
if (!uri.contains(prefixToken) && !uri.startsWith(prefixToken)) {
uri = prefixToken + uri;
}
if (!uri.contains(suffixToken) && !uri.endsWith(suffixToken)) {
uri = uri + suffixToken;
}
LOG.trace("Parsing uri {} with properties: {}", uri, prop);
if (propertiesParser instanceof AugmentedPropertyNameAwarePropertiesParser) {
return ((AugmentedPropertyNameAwarePropertiesParser) propertiesParser).parseUri(uri, prop, prefixToken, suffixToken,
propertyPrefixResolved, propertySuffixResolved, fallbackToUnaugmentedProperty);
} else {
return propertiesParser.parseUri(uri, prop, prefixToken, suffixToken);
}
}
/**
* Is this component created as a default by {@link org.apache.camel.CamelContext} during starting up Camel.
*/
public boolean isDefaultCreated() {
return isDefaultCreated;
}
public String[] getLocations() {
return locations;
}
/**
* A list of locations to load properties. You can use comma to separate multiple locations.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocations(String[] locations) {
// make sure to trim as people may use new lines when configuring using XML
// and do this in the setter as Spring/Blueprint resolves placeholders before Camel is being started
if (locations != null && locations.length > 0) {
for (int i = 0; i < locations.length; i++) {
String loc = locations[i];
locations[i] = loc.trim();
}
}
this.locations = locations;
}
/**
* A list of locations to load properties. You can use comma to separate multiple locations.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocation(String location) {
setLocations(location.split(","));
}
public String getEncoding() {
return encoding;
}
/**
* Encoding to use when loading properties file from the file system or classpath.
* <p/>
* If no encoding has been set, then the properties files is loaded using ISO-8859-1 encoding (latin-1)
* as documented by {@link java.util.Properties#load(java.io.InputStream)}
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public PropertiesResolver getPropertiesResolver() {
return propertiesResolver;
}
/**
* To use a custom PropertiesResolver
*/
public void setPropertiesResolver(PropertiesResolver propertiesResolver) {
this.propertiesResolver = propertiesResolver;
}
public PropertiesParser getPropertiesParser() {
return propertiesParser;
}
/**
* To use a custom PropertiesParser
*/
public void setPropertiesParser(PropertiesParser propertiesParser) {
this.propertiesParser = propertiesParser;
}
public boolean isCache() {
return cache;
}
/**
* Whether or not to cache loaded properties. The default value is true.
*/
public void setCache(boolean cache) {
this.cache = cache;
}
public String getPropertyPrefix() {
return propertyPrefix;
}
/**
* Optional prefix prepended to property names before resolution.
*/
public void setPropertyPrefix(String propertyPrefix) {
this.propertyPrefix = propertyPrefix;
this.propertyPrefixResolved = propertyPrefix;
if (ObjectHelper.isNotEmpty(this.propertyPrefix)) {
this.propertyPrefixResolved = FilePathResolver.resolvePath(this.propertyPrefix);
}
}
public String getPropertySuffix() {
return propertySuffix;
}
/**
* Optional suffix appended to property names before resolution.
*/
public void setPropertySuffix(String propertySuffix) {
this.propertySuffix = propertySuffix;
this.propertySuffixResolved = propertySuffix;
if (ObjectHelper.isNotEmpty(this.propertySuffix)) {
this.propertySuffixResolved = FilePathResolver.resolvePath(this.propertySuffix);
}
}
public boolean isFallbackToUnaugmentedProperty() {
return fallbackToUnaugmentedProperty;
}
/**
* If true, first attempt resolution of property name augmented with propertyPrefix and propertySuffix
* before falling back the plain property name specified. If false, only the augmented property name is searched.
*/
public void setFallbackToUnaugmentedProperty(boolean fallbackToUnaugmentedProperty) {
this.fallbackToUnaugmentedProperty = fallbackToUnaugmentedProperty;
}
public boolean isIgnoreMissingLocation() {
return ignoreMissingLocation;
}
/**
* Whether to silently ignore if a location cannot be located, such as a properties file not found.
*/
public void setIgnoreMissingLocation(boolean ignoreMissingLocation) {
this.ignoreMissingLocation = ignoreMissingLocation;
}
public String getPrefixToken() {
return prefixToken;
}
/**
* Sets the value of the prefix token used to identify properties to replace. Setting a value of
* {@code null} restores the default token (@link {@link #DEFAULT_PREFIX_TOKEN}).
*/
public void setPrefixToken(String prefixToken) {
if (prefixToken == null) {
this.prefixToken = DEFAULT_PREFIX_TOKEN;
} else {
this.prefixToken = prefixToken;
}
}
public String getSuffixToken() {
return suffixToken;
}
/**
* Sets the value of the suffix token used to identify properties to replace. Setting a value of
* {@code null} restores the default token (@link {@link #DEFAULT_SUFFIX_TOKEN}).
*/
public void setSuffixToken(String suffixToken) {
if (suffixToken == null) {
this.suffixToken = DEFAULT_SUFFIX_TOKEN;
} else {
this.suffixToken = suffixToken;
}
}
public Properties getInitialProperties() {
return initialProperties;
}
/**
* Sets initial properties which will be used before any locations are resolved.
*
* @param initialProperties properties that are added first
*/
public void setInitialProperties(Properties initialProperties) {
this.initialProperties = initialProperties;
}
public Properties getOverrideProperties() {
return overrideProperties;
}
/**
* Sets a special list of override properties that take precedence
* and will use first, if a property exist.
*
* @param overrideProperties properties that is used first
*/
public void setOverrideProperties(Properties overrideProperties) {
this.overrideProperties = overrideProperties;
}
/**
* Gets the functions registered in this properties component.
*/
public Map<String, PropertiesFunction> getFunctions() {
return functions;
}
/**
* Registers the {@link org.apache.camel.component.properties.PropertiesFunction} as a function to this component.
*/
public void addFunction(PropertiesFunction function) {
this.functions.put(function.getName(), function);
}
/**
* Is there a {@link org.apache.camel.component.properties.PropertiesFunction} with the given name?
*/
public boolean hasFunction(String name) {
return functions.containsKey(name);
}
public int getSystemPropertiesMode() {
return systemPropertiesMode;
}
/**
* Sets the system property mode.
*
* @see #SYSTEM_PROPERTIES_MODE_NEVER
* @see #SYSTEM_PROPERTIES_MODE_FALLBACK
* @see #SYSTEM_PROPERTIES_MODE_OVERRIDE
*/
public void setSystemPropertiesMode(int systemPropertiesMode) {
this.systemPropertiesMode = systemPropertiesMode;
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (systemPropertiesMode != SYSTEM_PROPERTIES_MODE_NEVER
&& systemPropertiesMode != SYSTEM_PROPERTIES_MODE_FALLBACK
&& systemPropertiesMode != SYSTEM_PROPERTIES_MODE_OVERRIDE) {
throw new IllegalArgumentException("Option systemPropertiesMode has invalid value: " + systemPropertiesMode);
}
// inject the component to the parser
if (propertiesParser instanceof DefaultPropertiesParser) {
((DefaultPropertiesParser) propertiesParser).setPropertiesComponent(this);
}
}
@Override
protected void doStop() throws Exception {
cacheMap.clear();
super.doStop();
}
private String[] parseLocations(String[] locations) {
List<String> answer = new ArrayList<String>();
for (String location : locations) {
LOG.trace("Parsing location: {} ", location);
try {
location = FilePathResolver.resolvePath(location);
LOG.debug("Parsed location: {} ", location);
if (ObjectHelper.isNotEmpty(location)) {
answer.add(location);
}
} catch (IllegalArgumentException e) {
if (!ignoreMissingLocation) {
throw e;
} else {
LOG.debug("Ignored missing location: {}", location);
}
}
}
// must return a not-null answer
return answer.toArray(new String[answer.size()]);
}
/**
* Key used in the locations cache
*/
private static final class CacheKey implements Serializable {
private static final long serialVersionUID = 1L;
private final String[] locations;
private CacheKey(String[] locations) {
this.locations = locations;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CacheKey that = (CacheKey) o;
if (!Arrays.equals(locations, that.locations)) {
return false;
}
return true;
}
@Override
public int hashCode() {
return locations != null ? Arrays.hashCode(locations) : 0;
}
@Override
public String toString() {
return "LocationKey[" + Arrays.asList(locations).toString() + "]";
}
}
}
| arnaud-deprez/camel | camel-core/src/main/java/org/apache/camel/component/properties/PropertiesComponent.java | Java | apache-2.0 | 18,104 |
/**
* Copyright 2010-2015 Axel Fontaine
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Private API. No compatibility guarantees provided.
*/
package org.flywaydb.core.internal.dbsupport.vertica; | mpage23/flyway | flyway-core/src/main/java/org/flywaydb/core/internal/dbsupport/vertica/package-info.java | Java | apache-2.0 | 722 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for experimental iterator_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.data.python.ops import iterator_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.estimator import estimator
from tensorflow.python.estimator import model_fn
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training import training_util
class CheckpointInputPipelineHookTest(test.TestCase):
@staticmethod
def _model_fn(features, labels, mode, config):
del labels
del mode
del config
global_step = training_util.get_or_create_global_step()
update_global_step_op = global_step.assign_add(1)
latest_feature = variables.Variable(
0, name='latest_feature', dtype=dtypes.int64)
store_latest_feature_op = latest_feature.assign(features)
ops.add_to_collection('my_vars', global_step)
ops.add_to_collection('my_vars', latest_feature)
return model_fn.EstimatorSpec(
mode='train',
train_op=control_flow_ops.group(
[update_global_step_op, store_latest_feature_op]),
loss=constant_op.constant(2.0))
def _read_vars(self, model_dir):
"""Returns (global_step, latest_feature)."""
with ops.Graph().as_default() as g:
ckpt_path = saver_lib.latest_checkpoint(model_dir)
meta_filename = ckpt_path + '.meta'
saver_lib.import_meta_graph(meta_filename)
saver = saver_lib.Saver()
with self.test_session(graph=g) as sess:
saver.restore(sess, ckpt_path)
return sess.run(ops.get_collection('my_vars'))
def _build_iterator_saver_hook(self, est):
return iterator_ops.CheckpointInputPipelineHook(est)
def testReturnDatasetFromInputFn(self):
def _input_fn():
return dataset_ops.Dataset.range(10)
est = estimator.Estimator(model_fn=self._model_fn)
est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)])
self.assertSequenceEqual(self._read_vars(est.model_dir), (2, 1))
est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)])
self.assertSequenceEqual(self._read_vars(est.model_dir), (4, 3))
def testBuildIteratorInInputFn(self):
def _input_fn():
ds = dataset_ops.Dataset.range(10)
iterator = ds.make_one_shot_iterator()
return iterator.get_next()
est = estimator.Estimator(model_fn=self._model_fn)
est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)])
self.assertSequenceEqual(self._read_vars(est.model_dir), (2, 1))
est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)])
self.assertSequenceEqual(self._read_vars(est.model_dir), (4, 3))
def testDoNotRestore(self):
def _input_fn():
return dataset_ops.Dataset.range(10)
est = estimator.Estimator(model_fn=self._model_fn)
est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)])
self.assertSequenceEqual(self._read_vars(est.model_dir), (2, 1))
est.train(_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)])
self.assertSequenceEqual(self._read_vars(est.model_dir), (4, 3))
# Hook not provided, input pipeline was not restored.
est.train(_input_fn, steps=2)
self.assertSequenceEqual(self._read_vars(est.model_dir), (6, 1))
def testRaiseErrorIfNoIterator(self):
def _input_fn():
return constant_op.constant(1, dtype=dtypes.int64)
est = estimator.Estimator(model_fn=self._model_fn)
with self.assertRaises(ValueError):
est.train(
_input_fn, steps=2, hooks=[self._build_iterator_saver_hook(est)])
if __name__ == '__main__':
test.main()
| yanchen036/tensorflow | tensorflow/contrib/data/python/ops/iterator_ops_test.py | Python | apache-2.0 | 4,700 |
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
namespace com.google.zxing
{
/// <summary> The general exception class throw when something goes wrong during decoding of a barcode.
/// This includes, but is not limited to, failing checksums / error correction algorithms, being
/// unable to locate finder timing patterns, and so on.
///
/// </summary>
/// <author> Sean Owen
/// </author>
/// <author>www.Redivivus.in (suraj.supekar@redivivus.in) - Ported from ZXING Java Source
/// </author>
[Serializable]
public sealed class ReaderException:System.Exception
{
public static ReaderException Instance
{
get
{
// Exception e = new Exception();
// // Take the stack frame before this one.
// StackTraceElement stack = e.getStackTrace()[1];
// String key = stack.getClassName() + "." + stack.getMethodName() + "(), line " +
// stack.getLineNumber();
// if (throwers.containsKey(key)) {
// Integer value = throwers.get(key);
// value++;
// throwers.put(key, value);
// } else {
// throwers.put(key, 1);
// }
// exceptionCount++;
return instance;
}
}
// TODO: Currently we throw up to 400 ReaderExceptions while scanning a single 240x240 image before
// rejecting it. This involves a lot of overhead and memory allocation, and affects both performance
// and latency on continuous scan clients. In the future, we should change all the decoders not to
// throw exceptions for routine events, like not finding a barcode on a given row. Instead, we
// should return error codes back to the callers, and simply delete this class. In the mean time, I
// have altered this class to be as lightweight as possible, by ignoring the exception string, and
// by disabling the generation of stack traces, which is especially time consuming. These are just
// temporary measures, pending the big cleanup.
//UPGRADE_NOTE: Final was removed from the declaration of 'instance '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private static readonly ReaderException instance = new ReaderException();
// EXCEPTION TRACKING SUPPORT
// Identifies who is throwing exceptions and how often. To use:
//
// 1. Uncomment these lines and the code below which uses them.
// 2. Uncomment the two corresponding lines in j2se/CommandLineRunner.decode()
// 3. Change core to build as Java 1.5 temporarily
// private static int exceptionCount = 0;
// private static Map<String,Integer> throwers = new HashMap<String,Integer>(32);
private ReaderException()
{
// do nothing
}
// public static int getExceptionCountAndReset() {
// int temp = exceptionCount;
// exceptionCount = 0;
// return temp;
// }
//
// public static String getThrowersAndReset() {
// StringBuilder builder = new StringBuilder(1024);
// Object[] keys = throwers.keySet().toArray();
// for (int x = 0; x < keys.length; x++) {
// String key = (String) keys[x];
// Integer value = throwers.get(key);
// builder.append(key);
// builder.append(": ");
// builder.append(value);
// builder.append("\n");
// }
// throwers.clear();
// return builder.toString();
// }
// Prevent stack traces from being taken
// srowen says: huh, my IDE is saying this is not an override. native methods can't be overridden?
// This, at least, does not hurt. Because we use a singleton pattern here, it doesn't matter anyhow.
//UPGRADE_NOTE: Exception 'java.lang.Throwable' was converted to 'System.Exception' which has different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1100'"
//UPGRADE_NOTE: The equivalent of method 'java.lang.Throwable.fillInStackTrace' is not an override method. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1143'"
public System.Exception fillInStackTrace()
{
return null;
}
}
} | kerwinxu/barcodeManager | zxing/csharp/ReaderException.cs | C# | bsd-2-clause | 4,794 |
class GitCrypt < Formula
desc "Enable transparent encryption/decryption of files in a git repo"
homepage "https://www.agwa.name/projects/git-crypt/"
url "https://www.agwa.name/projects/git-crypt/downloads/git-crypt-0.5.0.tar.gz"
sha256 "0a8f92c0a0a125bf768d0c054d947ca4e4b8d6556454b0e7e87fb907ee17cf06"
bottle do
cellar :any
sha256 "8d7f6640e34881ae40a2e949b7755eb9faa711399c37b86892df359b1a368bb2" => :sierra
sha256 "ed93687aa2996d6171f9090062f24453028c7d9d97e9842a0fee7aee57648979" => :el_capitan
sha256 "ce33f2d01af41259b6ea9be1e849000bdd08413b1f109268ea65709644d455eb" => :yosemite
sha256 "2cedd573983fe7ec7387e76f9ffd0ba351e71e19e3382f7365209d1aad0f7e3f" => :mavericks
sha256 "1bba33a973b90d39140a64193bcdab63b34c3b4f379850ee41ee155325173f4f" => :mountain_lion
end
depends_on "openssl"
def install
system "make"
bin.install "git-crypt"
end
test do
system "#{bin}/git-crypt", "keygen", "keyfile"
end
end
| kuahyeow/homebrew-core | Formula/git-crypt.rb | Ruby | bsd-2-clause | 970 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.mutatorsstack;
import android.graphics.Matrix;
import android.graphics.Path;
import android.graphics.Rect;
import android.graphics.RectF;
import androidx.annotation.Keep;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
/**
* The mutator stack containing a list of mutators
*
* <p>The mutators can be applied to a {@link io.flutter.plugin.platform.PlatformView} to perform a
* series mutations. See {@link FlutterMutatorsStack.FlutterMutator} for informations on Mutators.
*/
@Keep
public class FlutterMutatorsStack {
/**
* The type of a Mutator See {@link FlutterMutatorsStack.FlutterMutator} for informations on
* Mutators.
*/
public enum FlutterMutatorType {
CLIP_RECT,
CLIP_RRECT,
CLIP_PATH,
TRANSFORM,
OPACITY
}
/**
* A class represents a mutator
*
* <p>A mutator contains information of a single mutation operation that can be applied to a
* {@link io.flutter.plugin.platform.PlatformView}. See {@link
* FlutterMutatorsStack.FlutterMutator} for informations on Mutators.
*/
public class FlutterMutator {
@Nullable private Matrix matrix;
@Nullable private Rect rect;
@Nullable private Path path;
@Nullable private float[] radiis;
private FlutterMutatorType type;
/**
* Initialize a clip rect mutator.
*
* @param rect the rect to be clipped.
*/
public FlutterMutator(Rect rect) {
this.type = FlutterMutatorType.CLIP_RECT;
this.rect = rect;
}
/**
* Initialize a clip rrect mutator.
*
* @param rect the rect of the rrect
* @param radiis the radiis of the rrect. Array of 8 values, 4 pairs of [X,Y]. This value cannot
* be null.
*/
public FlutterMutator(Rect rect, float[] radiis) {
this.type = FlutterMutatorType.CLIP_RRECT;
this.rect = rect;
this.radiis = radiis;
}
/**
* Initialize a clip path mutator.
*
* @param path the path to be clipped.
*/
public FlutterMutator(Path path) {
this.type = FlutterMutatorType.CLIP_PATH;
this.path = path;
}
/**
* Initialize a transform mutator.
*
* @param matrix the transform matrix to apply.
*/
public FlutterMutator(Matrix matrix) {
this.type = FlutterMutatorType.TRANSFORM;
this.matrix = matrix;
}
/**
* Get the mutator type.
*
* @return The type of the mutator.
*/
public FlutterMutatorType getType() {
return type;
}
/**
* Get the rect of the mutator if the {@link #getType()} returns FlutterMutatorType.CLIP_RECT.
*
* @return the clipping rect if the type is FlutterMutatorType.CLIP_RECT; otherwise null.
*/
public Rect getRect() {
return rect;
}
/**
* Get the path of the mutator if the {@link #getType()} returns FlutterMutatorType.CLIP_PATH.
*
* @return the clipping path if the type is FlutterMutatorType.CLIP_PATH; otherwise null.
*/
public Path getPath() {
return path;
}
/**
* Get the matrix of the mutator if the {@link #getType()} returns FlutterMutatorType.TRANSFORM.
*
* @return the matrix if the type is FlutterMutatorType.TRANSFORM; otherwise null.
*/
public Matrix getMatrix() {
return matrix;
}
}
private @NonNull List<FlutterMutator> mutators;
private List<Path> finalClippingPaths;
private Matrix finalMatrix;
/** Initialize the mutator stack. */
public FlutterMutatorsStack() {
this.mutators = new ArrayList<FlutterMutator>();
finalMatrix = new Matrix();
finalClippingPaths = new ArrayList<Path>();
}
/**
* Push a transform {@link FlutterMutatorsStack.FlutterMutator} to the stack.
*
* @param values the transform matrix to be pushed to the stack. The array matches how a {@link
* android.graphics.Matrix} is constructed.
*/
public void pushTransform(float[] values) {
Matrix matrix = new Matrix();
matrix.setValues(values);
FlutterMutator mutator = new FlutterMutator(matrix);
mutators.add(mutator);
finalMatrix.preConcat(mutator.getMatrix());
}
/** Push a clipRect {@link FlutterMutatorsStack.FlutterMutator} to the stack. */
public void pushClipRect(int left, int top, int right, int bottom) {
Rect rect = new Rect(left, top, right, bottom);
FlutterMutator mutator = new FlutterMutator(rect);
mutators.add(mutator);
Path path = new Path();
path.addRect(new RectF(rect), Path.Direction.CCW);
path.transform(finalMatrix);
finalClippingPaths.add(path);
}
/**
* Push a clipRRect {@link FlutterMutatorsStack.FlutterMutator} to the stack.
*
* @param left left offset of the rrect.
* @param top top offset of the rrect.
* @param right right position of the rrect.
* @param bottom bottom position of the rrect.
* @param radiis the radiis of the rrect. It must be size of 8, including an x and y for each
* corner.
*/
public void pushClipRRect(int left, int top, int right, int bottom, float[] radiis) {
Rect rect = new Rect(left, top, right, bottom);
FlutterMutator mutator = new FlutterMutator(rect, radiis);
mutators.add(mutator);
Path path = new Path();
path.addRoundRect(new RectF(rect), radiis, Path.Direction.CCW);
path.transform(finalMatrix);
finalClippingPaths.add(path);
}
/**
* Get a list of all the raw mutators. The 0 index of the returned list is the top of the stack.
*/
public List<FlutterMutator> getMutators() {
return mutators;
}
/**
* Get a list of all the clipping operations. All the clipping operations -- whether it is clip
* rect, clip rrect, or clip path -- are converted into Paths. The paths are also transformed with
* the matrix that up to their stack positions. For example: If the stack looks like (from top to
* bottom): TransA -> ClipA -> TransB -> ClipB, the final paths will look like
* [TransA*ClipA, TransA*TransB*ClipB].
*
* <p>Clipping this list to the parent canvas of a view results the final clipping path.
*/
public List<Path> getFinalClippingPaths() {
return finalClippingPaths;
}
/**
* Returns the final matrix. Apply this matrix to the canvas of a view results the final
* transformation of the view.
*/
public Matrix getFinalMatrix() {
return finalMatrix;
}
}
| rmacnak-google/engine | shell/platform/android/io/flutter/embedding/engine/mutatorsstack/FlutterMutatorsStack.java | Java | bsd-3-clause | 6,643 |
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#include "mitkNavigationDataPassThroughFilter.h"
mitk::NavigationDataPassThroughFilter::NavigationDataPassThroughFilter()
{
}
mitk::NavigationDataPassThroughFilter::~NavigationDataPassThroughFilter()
{
}
void mitk::NavigationDataPassThroughFilter::GenerateData()
{
// get each input and transfer the data
DataObjectPointerArray inputs = this->GetInputs(); //get all inputs
for ( unsigned int index = 0; index < inputs.size(); ++index )
{
// get the needed variables (input and output)
const mitk::NavigationData* nd = this->GetInput(index);
mitk::NavigationData* output = this->GetOutput(index);
if ( ! nd || ! output )
{
MITK_ERROR("NavigationDataToNavigationDataFilter")("NavigationDataPassThroughFilter")
<< "Input and output must not be null.";
mitkThrow() << "Input and output must not be null.";
}
output->Graft(nd); // copy all information from input to output
output->SetDataValid(nd->IsDataValid());
}
}
| NifTK/MITK | Modules/US/USNavigation/Filter/mitkNavigationDataPassThroughFilter.cpp | C++ | bsd-3-clause | 1,482 |
/*
* Copyright 2013 The Polymer Authors. All rights reserved.
* Use of this source code is goverened by a BSD-style
* license that can be found in the LICENSE file.
*/
suite('ChildNodeInterface', function() {
function getTree() {
var tree = {};
var div = tree.div = document.createElement('div');
div.innerHTML = 'a<b></b>c<d></d>e';
var a = tree.a = div.firstChild;
var b = tree.b = a.nextSibling;
var c = tree.c = b.nextSibling;
var d = tree.d = c.nextSibling;
var e = tree.e = d.nextSibling;
var sr = tree.sr = div.createShadowRoot();
sr.innerHTML = 'f<g></g>h<content></content>i<j></j>k';
var f = tree.f = sr.firstChild;
var g = tree.g = f.nextSibling;
var h = tree.h = g.nextSibling;
var content = tree.content = h.nextSibling;
var i = tree.i = content.nextSibling;
var j = tree.j = i.nextSibling;
var k = tree.k = j.nextSibling;
div.offsetHeight; // trigger rendering
return tree;
}
test('nextElementSibling', function() {
var tree = getTree();
assert.equal(tree.b.nextElementSibling, tree.d);
assert.equal(tree.d.nextElementSibling, null);
assert.equal(tree.g.nextElementSibling, tree.content);
assert.equal(tree.content.nextElementSibling, tree.j);
assert.equal(tree.j.nextElementSibling, null);
});
test('previousElementSibling', function() {
var tree = getTree();
assert.equal(tree.b.previousElementSibling, null);
assert.equal(tree.d.previousElementSibling, tree.b);
assert.equal(tree.g.previousElementSibling, null);
assert.equal(tree.content.previousElementSibling, tree.g);
assert.equal(tree.j.previousElementSibling, tree.content);
});
test('remove', function() {
var div = document.createElement('div');
div.innerHTML = '<a></a>';
var a = div.firstChild;
a.remove();
assert.equal(div.firstChild, null);
assert.equal(a.parentNode, null);
// no op.
div.remove();
});
});
| modulexcite/ShadowDOM | test/js/ChildNodeInterface.js | JavaScript | bsd-3-clause | 1,975 |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import code
import cpp_util
from model import Platforms
from schema_util import CapitalizeFirstLetter
from schema_util import JsFunctionNameToClassName
import json
import os
import re
def _RemoveDescriptions(node):
"""Returns a copy of |schema| with "description" fields removed.
"""
if isinstance(node, dict):
result = {}
for key, value in node.items():
# Some schemas actually have properties called "description", so only
# remove descriptions that have string values.
if key == 'description' and isinstance(value, basestring):
continue
result[key] = _RemoveDescriptions(value)
return result
if isinstance(node, list):
return [_RemoveDescriptions(v) for v in node]
return node
class CppBundleGenerator(object):
"""This class contains methods to generate code based on multiple schemas.
"""
def __init__(self,
root,
model,
api_defs,
cpp_type_generator,
cpp_namespace,
source_file_dir,
impl_dir):
self._root = root
self._model = model
self._api_defs = api_defs
self._cpp_type_generator = cpp_type_generator
self._cpp_namespace = cpp_namespace
self._source_file_dir = source_file_dir
self._impl_dir = impl_dir
self.api_cc_generator = _APICCGenerator(self)
self.api_h_generator = _APIHGenerator(self)
self.schemas_cc_generator = _SchemasCCGenerator(self)
self.schemas_h_generator = _SchemasHGenerator(self)
def _GenerateHeader(self, file_base, body_code):
"""Generates a code.Code object for a header file
Parameters:
- |file_base| - the base of the filename, e.g. 'foo' (for 'foo.h')
- |body_code| - the code to put in between the multiple inclusion guards"""
c = code.Code()
c.Append(cpp_util.CHROMIUM_LICENSE)
c.Append()
c.Append(cpp_util.GENERATED_BUNDLE_FILE_MESSAGE % self._source_file_dir)
ifndef_name = cpp_util.GenerateIfndefName(self._source_file_dir, file_base)
c.Append()
c.Append('#ifndef %s' % ifndef_name)
c.Append('#define %s' % ifndef_name)
c.Append()
c.Concat(body_code)
c.Append()
c.Append('#endif // %s' % ifndef_name)
c.Append()
return c
def _GetPlatformIfdefs(self, model_object):
"""Generates the "defined" conditional for an #if check if |model_object|
has platform restrictions. Returns None if there are no restrictions.
"""
if model_object.platforms is None:
return None
ifdefs = []
for platform in model_object.platforms:
if platform == Platforms.CHROMEOS:
ifdefs.append('defined(OS_CHROMEOS)')
elif platform == Platforms.LINUX:
ifdefs.append('defined(OS_LINUX)')
elif platform == Platforms.MAC:
ifdefs.append('defined(OS_MACOSX)')
elif platform == Platforms.WIN:
ifdefs.append('defined(OS_WIN)')
else:
raise ValueError("Unsupported platform ifdef: %s" % platform.name)
return ' || '.join(ifdefs)
def _GenerateRegisterFunctions(self, namespace_name, function):
c = code.Code()
function_ifdefs = self._GetPlatformIfdefs(function)
if function_ifdefs is not None:
c.Append("#if %s" % function_ifdefs, indent_level=0)
function_name = JsFunctionNameToClassName(namespace_name, function.name)
c.Append("registry->RegisterFunction<%sFunction>();" % (
function_name))
if function_ifdefs is not None:
c.Append("#endif // %s" % function_ifdefs, indent_level=0)
return c
def _GenerateFunctionRegistryRegisterAll(self):
c = code.Code()
c.Append('// static')
c.Sblock('void GeneratedFunctionRegistry::RegisterAll('
'ExtensionFunctionRegistry* registry) {')
for namespace in self._model.namespaces.values():
namespace_ifdefs = self._GetPlatformIfdefs(namespace)
if namespace_ifdefs is not None:
c.Append("#if %s" % namespace_ifdefs, indent_level=0)
namespace_name = CapitalizeFirstLetter(namespace.name.replace(
"experimental.", ""))
for function in namespace.functions.values():
if function.nocompile:
continue
c.Concat(self._GenerateRegisterFunctions(namespace.name, function))
for type_ in namespace.types.values():
for function in type_.functions.values():
if function.nocompile:
continue
namespace_types_name = JsFunctionNameToClassName(
namespace.name, type_.name)
c.Concat(self._GenerateRegisterFunctions(namespace_types_name,
function))
if namespace_ifdefs is not None:
c.Append("#endif // %s" % namespace_ifdefs, indent_level=0)
c.Eblock("}")
return c
class _APIHGenerator(object):
"""Generates the header for API registration / declaration"""
def __init__(self, cpp_bundle):
self._bundle = cpp_bundle
def Generate(self, namespace):
c = code.Code()
c.Append('#include <string>')
c.Append()
c.Append('#include "base/basictypes.h"')
c.Append()
c.Append("class ExtensionFunctionRegistry;")
c.Append()
c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace))
c.Append()
c.Append('class GeneratedFunctionRegistry {')
c.Sblock(' public:')
c.Append('static void RegisterAll('
'ExtensionFunctionRegistry* registry);')
c.Eblock('};')
c.Append()
c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace))
return self._bundle._GenerateHeader('generated_api', c)
class _APICCGenerator(object):
"""Generates a code.Code object for the generated API .cc file"""
def __init__(self, cpp_bundle):
self._bundle = cpp_bundle
def Generate(self, namespace):
c = code.Code()
c.Append(cpp_util.CHROMIUM_LICENSE)
c.Append()
c.Append('#include "%s"' % (os.path.join(self._bundle._source_file_dir,
'generated_api.h')))
c.Append()
for namespace in self._bundle._model.namespaces.values():
namespace_name = namespace.unix_name.replace("experimental_", "")
implementation_header = namespace.compiler_options.get(
"implemented_in",
"%s/%s/%s_api.h" % (self._bundle._impl_dir,
namespace_name,
namespace_name))
if not os.path.exists(
os.path.join(self._bundle._root,
os.path.normpath(implementation_header))):
if "implemented_in" in namespace.compiler_options:
raise ValueError('Header file for namespace "%s" specified in '
'compiler_options not found: %s' %
(namespace.unix_name, implementation_header))
continue
ifdefs = self._bundle._GetPlatformIfdefs(namespace)
if ifdefs is not None:
c.Append("#if %s" % ifdefs, indent_level=0)
c.Append('#include "%s"' % implementation_header)
if ifdefs is not None:
c.Append("#endif // %s" % ifdefs, indent_level=0)
c.Append()
c.Append('#include '
'"extensions/browser/extension_function_registry.h"')
c.Append()
c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace))
c.Append()
c.Concat(self._bundle._GenerateFunctionRegistryRegisterAll())
c.Append()
c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace))
c.Append()
return c
class _SchemasHGenerator(object):
"""Generates a code.Code object for the generated schemas .h file"""
def __init__(self, cpp_bundle):
self._bundle = cpp_bundle
def Generate(self, namespace):
c = code.Code()
c.Append('#include <map>')
c.Append('#include <string>')
c.Append()
c.Append('#include "base/strings/string_piece.h"')
c.Append()
c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace))
c.Append()
c.Append('class GeneratedSchemas {')
c.Sblock(' public:')
c.Append('// Determines if schema named |name| is generated.')
c.Append('static bool IsGenerated(std::string name);')
c.Append()
c.Append('// Gets the API schema named |name|.')
c.Append('static base::StringPiece Get(const std::string& name);')
c.Eblock('};')
c.Append()
c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace))
return self._bundle._GenerateHeader('generated_schemas', c)
def _FormatNameAsConstant(name):
"""Formats a name to be a C++ constant of the form kConstantName"""
name = '%s%s' % (name[0].upper(), name[1:])
return 'k%s' % re.sub('_[a-z]',
lambda m: m.group(0)[1].upper(),
name.replace('.', '_'))
class _SchemasCCGenerator(object):
"""Generates a code.Code object for the generated schemas .cc file"""
def __init__(self, cpp_bundle):
self._bundle = cpp_bundle
def Generate(self, namespace):
c = code.Code()
c.Append(cpp_util.CHROMIUM_LICENSE)
c.Append()
c.Append('#include "%s"' % (os.path.join(self._bundle._source_file_dir,
'generated_schemas.h')))
c.Append()
c.Append('#include "base/lazy_instance.h"')
c.Append()
c.Append('namespace {')
for api in self._bundle._api_defs:
namespace = self._bundle._model.namespaces[api.get('namespace')]
# JSON parsing code expects lists of schemas, so dump a singleton list.
json_content = json.dumps([_RemoveDescriptions(api)],
separators=(',', ':'))
# Escape all double-quotes and backslashes. For this to output a valid
# JSON C string, we need to escape \ and ". Note that some schemas are
# too large to compile on windows. Split the JSON up into several
# strings, since apparently that helps.
max_length = 8192
segments = [json_content[i:i + max_length].replace('\\', '\\\\')
.replace('"', '\\"')
for i in xrange(0, len(json_content), max_length)]
c.Append('const char %s[] = "%s";' %
(_FormatNameAsConstant(namespace.name), '" "'.join(segments)))
c.Append('}')
c.Concat(cpp_util.OpenNamespace(self._bundle._cpp_namespace))
c.Append()
c.Sblock('struct Static {')
c.Sblock('Static() {')
for api in self._bundle._api_defs:
namespace = self._bundle._model.namespaces[api.get('namespace')]
c.Append('schemas["%s"] = %s;' % (namespace.name,
_FormatNameAsConstant(namespace.name)))
c.Eblock('}')
c.Append()
c.Append('std::map<std::string, const char*> schemas;')
c.Eblock('};')
c.Append()
c.Append('base::LazyInstance<Static> g_lazy_instance;')
c.Append()
c.Append('// static')
c.Sblock('base::StringPiece GeneratedSchemas::Get('
'const std::string& name) {')
c.Append('return IsGenerated(name) ? '
'g_lazy_instance.Get().schemas[name] : "";')
c.Eblock('}')
c.Append()
c.Append('// static')
c.Sblock('bool GeneratedSchemas::IsGenerated(std::string name) {')
c.Append('return g_lazy_instance.Get().schemas.count(name) > 0;')
c.Eblock('}')
c.Append()
c.Concat(cpp_util.CloseNamespace(self._bundle._cpp_namespace))
c.Append()
return c
| boundarydevices/android_external_chromium_org | tools/json_schema_compiler/cpp_bundle_generator.py | Python | bsd-3-clause | 11,513 |
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\HttpFoundation;
/**
* HTTP header utility functions.
*
* @author Christian Schmidt <github@chsc.dk>
*/
class HeaderUtils
{
public const DISPOSITION_ATTACHMENT = 'attachment';
public const DISPOSITION_INLINE = 'inline';
/**
* This class should not be instantiated.
*/
private function __construct()
{
}
/**
* Splits an HTTP header by one or more separators.
*
* Example:
*
* HeaderUtils::split("da, en-gb;q=0.8", ",;")
* // => ['da'], ['en-gb', 'q=0.8']]
*
* @param string $header HTTP header value
* @param string $separators List of characters to split on, ordered by
* precedence, e.g. ",", ";=", or ",;="
*
* @return array Nested array with as many levels as there are characters in
* $separators
*/
public static function split(string $header, string $separators): array
{
$quotedSeparators = preg_quote($separators, '/');
preg_match_all('
/
(?!\s)
(?:
# quoted-string
"(?:[^"\\\\]|\\\\.)*(?:"|\\\\|$)
|
# token
[^"'.$quotedSeparators.']+
)+
(?<!\s)
|
# separator
\s*
(?<separator>['.$quotedSeparators.'])
\s*
/x', trim($header), $matches, PREG_SET_ORDER);
return self::groupParts($matches, $separators);
}
/**
* Combines an array of arrays into one associative array.
*
* Each of the nested arrays should have one or two elements. The first
* value will be used as the keys in the associative array, and the second
* will be used as the values, or true if the nested array only contains one
* element. Array keys are lowercased.
*
* Example:
*
* HeaderUtils::combine([["foo", "abc"], ["bar"]])
* // => ["foo" => "abc", "bar" => true]
*/
public static function combine(array $parts): array
{
$assoc = [];
foreach ($parts as $part) {
$name = strtolower($part[0]);
$value = $part[1] ?? true;
$assoc[$name] = $value;
}
return $assoc;
}
/**
* Joins an associative array into a string for use in an HTTP header.
*
* The key and value of each entry are joined with "=", and all entries
* are joined with the specified separator and an additional space (for
* readability). Values are quoted if necessary.
*
* Example:
*
* HeaderUtils::toString(["foo" => "abc", "bar" => true, "baz" => "a b c"], ",")
* // => 'foo=abc, bar, baz="a b c"'
*/
public static function toString(array $assoc, string $separator): string
{
$parts = [];
foreach ($assoc as $name => $value) {
if (true === $value) {
$parts[] = $name;
} else {
$parts[] = $name.'='.self::quote($value);
}
}
return implode($separator.' ', $parts);
}
/**
* Encodes a string as a quoted string, if necessary.
*
* If a string contains characters not allowed by the "token" construct in
* the HTTP specification, it is backslash-escaped and enclosed in quotes
* to match the "quoted-string" construct.
*/
public static function quote(string $s): string
{
if (preg_match('/^[a-z0-9!#$%&\'*.^_`|~-]+$/i', $s)) {
return $s;
}
return '"'.addcslashes($s, '"\\"').'"';
}
/**
* Decodes a quoted string.
*
* If passed an unquoted string that matches the "token" construct (as
* defined in the HTTP specification), it is passed through verbatimly.
*/
public static function unquote(string $s): string
{
return preg_replace('/\\\\(.)|"/', '$1', $s);
}
/**
* Generates a HTTP Content-Disposition field-value.
*
* @param string $disposition One of "inline" or "attachment"
* @param string $filename A unicode string
* @param string $filenameFallback A string containing only ASCII characters that
* is semantically equivalent to $filename. If the filename is already ASCII,
* it can be omitted, or just copied from $filename
*
* @return string A string suitable for use as a Content-Disposition field-value
*
* @throws \InvalidArgumentException
*
* @see RFC 6266
*/
public static function makeDisposition(string $disposition, string $filename, string $filenameFallback = ''): string
{
if (!\in_array($disposition, [self::DISPOSITION_ATTACHMENT, self::DISPOSITION_INLINE])) {
throw new \InvalidArgumentException(sprintf('The disposition must be either "%s" or "%s".', self::DISPOSITION_ATTACHMENT, self::DISPOSITION_INLINE));
}
if ('' === $filenameFallback) {
$filenameFallback = $filename;
}
// filenameFallback is not ASCII.
if (!preg_match('/^[\x20-\x7e]*$/', $filenameFallback)) {
throw new \InvalidArgumentException('The filename fallback must only contain ASCII characters.');
}
// percent characters aren't safe in fallback.
if (false !== strpos($filenameFallback, '%')) {
throw new \InvalidArgumentException('The filename fallback cannot contain the "%" character.');
}
// path separators aren't allowed in either.
if (false !== strpos($filename, '/') || false !== strpos($filename, '\\') || false !== strpos($filenameFallback, '/') || false !== strpos($filenameFallback, '\\')) {
throw new \InvalidArgumentException('The filename and the fallback cannot contain the "/" and "\\" characters.');
}
$params = ['filename' => $filenameFallback];
if ($filename !== $filenameFallback) {
$params['filename*'] = "utf-8''".rawurlencode($filename);
}
return $disposition.'; '.self::toString($params, ';');
}
private static function groupParts(array $matches, string $separators): array
{
$separator = $separators[0];
$partSeparators = substr($separators, 1);
$i = 0;
$partMatches = [];
foreach ($matches as $match) {
if (isset($match['separator']) && $match['separator'] === $separator) {
++$i;
} else {
$partMatches[$i][] = $match;
}
}
$parts = [];
if ($partSeparators) {
foreach ($partMatches as $matches) {
$parts[] = self::groupParts($matches, $partSeparators);
}
} else {
foreach ($partMatches as $matches) {
$parts[] = self::unquote($matches[0][0]);
}
}
return $parts;
}
}
| andrerom/symfony | src/Symfony/Component/HttpFoundation/HeaderUtils.php | PHP | mit | 7,369 |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Windows.Media.Effects.BevelBitmapEffect.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Windows.Media.Effects
{
sealed public partial class BevelBitmapEffect : BitmapEffect
{
#region Methods and constructors
public BevelBitmapEffect()
{
}
public System.Windows.Media.Effects.BevelBitmapEffect Clone()
{
return default(System.Windows.Media.Effects.BevelBitmapEffect);
}
public System.Windows.Media.Effects.BevelBitmapEffect CloneCurrentValue()
{
return default(System.Windows.Media.Effects.BevelBitmapEffect);
}
protected override System.Windows.Freezable CreateInstanceCore()
{
return default(System.Windows.Freezable);
}
protected override System.Runtime.InteropServices.SafeHandle CreateUnmanagedEffect()
{
return default(System.Runtime.InteropServices.SafeHandle);
}
protected override void UpdateUnmanagedPropertyState(System.Runtime.InteropServices.SafeHandle unmanagedEffect)
{
}
#endregion
#region Properties and indexers
public double BevelWidth
{
get
{
return default(double);
}
set
{
}
}
public EdgeProfile EdgeProfile
{
get
{
return default(EdgeProfile);
}
set
{
}
}
public double LightAngle
{
get
{
return default(double);
}
set
{
}
}
public double Relief
{
get
{
return default(double);
}
set
{
}
}
public double Smoothness
{
get
{
return default(double);
}
set
{
}
}
#endregion
#region Fields
public readonly static System.Windows.DependencyProperty BevelWidthProperty;
public readonly static System.Windows.DependencyProperty EdgeProfileProperty;
public readonly static System.Windows.DependencyProperty LightAngleProperty;
public readonly static System.Windows.DependencyProperty ReliefProperty;
public readonly static System.Windows.DependencyProperty SmoothnessProperty;
#endregion
}
}
| Microsoft/CodeContracts | Microsoft.Research/Contracts/PresentationCore/Sources/System.Windows.Media.Effects.BevelBitmapEffect.cs | C# | mit | 4,064 |
<?php
/*
* This file is part of the symfony package.
* (c) 2004-2006 Fabien Potencier <fabien.potencier@symfony-project.com>
* (c) 2004-2006 Sean Kerr <sean@code-box.org>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* sfPDODatabase provides connectivity for the PDO database abstraction layer.
*
* @package symfony
* @subpackage database
* @author Daniel Swarbrick (daniel@pressure.net.nz)
* @author Fabien Potencier <fabien.potencier@symfony-project.com>
* @author Sean Kerr <sean@code-box.org>
* @author Dustin Whittle <dustin.whittle@symfony-project.com>
* @version SVN: $Id: sfPDODatabase.class.php 17858 2009-05-01 21:22:50Z FabianLange $
*/
class sfPDODatabase extends sfDatabase
{
/**
* Connects to the database.
*
* @throws <b>sfDatabaseException</b> If a connection could not be created
*/
public function connect()
{
// determine how to get our parameters
$method = $this->getParameter('method', 'dsn');
// get parameters
switch ($method)
{
case 'dsn':
$dsn = $this->getParameter('dsn');
if ($dsn == null)
{
// missing required dsn parameter
throw new sfDatabaseException('Database configuration specifies method "dsn", but is missing dsn parameter.');
}
break;
}
try
{
$pdo_class = $this->getParameter('class', 'PDO');
$username = $this->getParameter('username');
$password = $this->getParameter('password');
$persistent = $this->getParameter('persistent');
$options = ($persistent) ? array(PDO::ATTR_PERSISTENT => true) : array();
$this->connection = new $pdo_class($dsn, $username, $password, $options);
}
catch (PDOException $e)
{
throw new sfDatabaseException($e->getMessage());
}
// lets generate exceptions instead of silent failures
if (sfConfig::get('sf_debug'))
{
$this->connection->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
}
else
{
$this->connection->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_SILENT);
}
// compatability
$compatability = $this->getParameter('compat');
if ($compatability)
{
$this->connection->setAttribute(PDO::ATTR_CASE, PDO::CASE_NATURAL);
}
// nulls
$nulls = $this->getParameter('nulls');
if ($nulls)
{
$this->connection->setAttribute(PDO::ATTR_ORACLE_NULLS, PDO::NULL_EMPTY_STRING);
}
// auto commit
$autocommit = $this->getParameter('autocommit');
if ($autocommit)
{
$this->connection->setAttribute(PDO::ATTR_AUTOCOMMIT, true);
}
$this->resource = $this->connection;
}
/**
* Execute the shutdown procedure.
*
* @return void
*/
public function shutdown ()
{
if ($this->connection !== null)
{
@$this->connection = null;
}
}
/**
* Magic method for calling PDO directly via sfPDODatabase
*
* @param string $method
* @param array $arguments
* @return mixed
*/
public function __call($method, $arguments)
{
return $this->getConnection()->$method($arguments);
}
}
| serg-smirnoff/symfony-pposter | symfony/lib/symfony/database/sfPDODatabase.class.php | PHP | mit | 3,236 |
<?php
if(php_sapi_name() != 'cli') {
die("This script must be called from the command line\n");
}
if(!empty($_SERVER['argv'][1])) {
$path = $_SERVER['argv'][1];
} else {
die("Usage: php {$_SERVER['argv'][0]} <file>\n");
}
$result = array('comments' => array());
$extension = pathinfo($path, PATHINFO_EXTENSION);
// Whitelist of extensions to check (default phpcs list)
if(in_array($extension, array('php', 'js', 'inc', 'css'))) {
// Run each sniff
// phpcs --encoding=utf-8 --standard=framework/tests/phpcs/tabs.xml
run_sniff('tabs.xml', $path, $result);
// phpcs --encoding=utf-8 --tab-width=4 --standard=framework/tests/phpcs/ruleset.xml
run_sniff('ruleset.xml', $path, $result, '--tab-width=4');
}
echo json_encode($result);
function run_sniff($standard, $path, array &$result, $extraFlags = '') {
$sniffPath = escapeshellarg(__DIR__ . '/phpcs/' . $standard);
$checkPath = escapeshellarg($path);
exec("phpcs --encoding=utf-8 $extraFlags --standard=$sniffPath --report=xml $checkPath", $output);
// We can't check the return code as it's non-zero if the sniff finds an error
if($output) {
$xml = implode("\n", $output);
$xml = simplexml_load_string($xml);
$errors = $xml->xpath('/phpcs/file/error');
if($errors) {
$sanePath = str_replace('/', '_', $path);
foreach($errors as $error) {
$attributes = $error->attributes();
$result['comments'][] = array(
'line' => (int)strval($attributes->line),
'id' => $standard . '-' . $sanePath . '-' . $attributes->line . '-' . $attributes->column,
'message' => strval($error)
);
}
}
}
}
| Widdershin/silverstripe-todo | src/framework/tests/phpcs_runner.php | PHP | mit | 1,596 |
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
* @flow
*/
'use strict';
import {BatchedBridge} from 'react-native/Libraries/ReactPrivate/ReactNativePrivateInterface';
// TODO @sema: Adjust types
import type {ReactNativeType} from './ReactNativeTypes';
let ReactFabric;
if (__DEV__) {
ReactFabric = require('../implementations/ReactFabric-dev');
} else {
ReactFabric = require('../implementations/ReactFabric-prod');
}
BatchedBridge.registerCallableModule('ReactFabric', ReactFabric);
module.exports = (ReactFabric: ReactNativeType);
| VioletLife/react | scripts/rollup/shims/react-native/ReactFabric.js | JavaScript | mit | 698 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Linq;
using System.Security.Cryptography.X509Certificates;
using Test.Cryptography;
using Xunit;
namespace System.Security.Cryptography.Pkcs.Tests
{
public static class TimestampTokenTests
{
[Theory]
[InlineData(nameof(TimestampTokenTestData.FreeTsaDotOrg1))]
[InlineData(nameof(TimestampTokenTestData.Symantec1))]
public static void ParseDocument(string testDataName)
{
TimestampTokenTestData testData = TimestampTokenTestData.GetTestData(testDataName);
TestParseDocument(testData.FullTokenBytes, testData, testData.FullTokenBytes.Length);
}
[Theory]
[InlineData(nameof(TimestampTokenTestData.FreeTsaDotOrg1))]
[InlineData(nameof(TimestampTokenTestData.Symantec1))]
public static void ParseDocument_ExcessData(string testDataName)
{
TimestampTokenTestData testData = TimestampTokenTestData.GetTestData(testDataName);
int baseLen = testData.FullTokenBytes.Length;
byte[] tooMuchData = new byte[baseLen + 30];
testData.FullTokenBytes.CopyTo(tooMuchData);
// Look like an octet string of the remainder of the payload. Should be ignored.
tooMuchData[baseLen] = 0x04;
tooMuchData[baseLen + 1] = 28;
TestParseDocument(tooMuchData, testData, baseLen);
}
private static void TestParseDocument(
ReadOnlyMemory<byte> tokenBytes,
TimestampTokenTestData testData,
int? expectedBytesRead)
{
int bytesRead;
Rfc3161TimestampToken token;
Assert.True(
Rfc3161TimestampToken.TryDecode(tokenBytes, out token, out bytesRead),
"Rfc3161TimestampToken.TryDecode");
if (expectedBytesRead != null)
{
Assert.Equal(expectedBytesRead.Value, bytesRead);
}
Assert.NotNull(token);
TimestampTokenInfoTests.AssertEqual(testData, token.TokenInfo);
SignedCms signedCms = token.AsSignedCms();
Assert.NotNull(signedCms);
Assert.Equal(Oids.TstInfo, signedCms.ContentInfo.ContentType.Value);
Assert.Equal(
testData.TokenInfoBytes.ByteArrayToHex(),
signedCms.ContentInfo.Content.ByteArrayToHex());
if (testData.EmbeddedSigningCertificate != null)
{
Assert.NotNull(signedCms.SignerInfos[0].Certificate);
Assert.Equal(
testData.EmbeddedSigningCertificate.Value.ByteArrayToHex(),
signedCms.SignerInfos[0].Certificate.RawData.ByteArrayToHex());
// Assert.NoThrow
signedCms.CheckSignature(true);
}
else
{
Assert.Null(signedCms.SignerInfos[0].Certificate);
using (var signerCert = new X509Certificate2(testData.ExternalCertificateBytes))
{
// Assert.NoThrow
signedCms.CheckSignature(
new X509Certificate2Collection(signerCert),
true);
}
}
X509Certificate2 returnedCert;
ReadOnlySpan<byte> messageContentSpan = testData.MessageContent.Span;
X509Certificate2Collection candidates = null;
if (testData.EmbeddedSigningCertificate != null)
{
Assert.True(
token.VerifySignatureForData(messageContentSpan, out returnedCert),
"token.VerifySignatureForData(correct)");
Assert.NotNull(returnedCert);
Assert.Equal(signedCms.SignerInfos[0].Certificate, returnedCert);
}
else
{
candidates = new X509Certificate2Collection
{
new X509Certificate2(testData.ExternalCertificateBytes),
};
Assert.False(
token.VerifySignatureForData(messageContentSpan, out returnedCert),
"token.VerifySignatureForData(correct, no cert)");
Assert.Null(returnedCert);
Assert.True(
token.VerifySignatureForData(messageContentSpan, out returnedCert, candidates),
"token.VerifySignatureForData(correct, certs)");
Assert.NotNull(returnedCert);
Assert.Equal(candidates[0], returnedCert);
}
X509Certificate2 previousCert = returnedCert;
Assert.False(
token.VerifySignatureForData(messageContentSpan.Slice(1), out returnedCert, candidates),
"token.VerifySignatureForData(incorrect)");
Assert.Null(returnedCert);
byte[] messageHash = testData.HashBytes.ToArray();
Assert.False(
token.VerifySignatureForHash(messageHash, HashAlgorithmName.MD5, out returnedCert, candidates),
"token.VerifyHash(correct, MD5)");
Assert.Null(returnedCert);
Assert.False(
token.VerifySignatureForHash(messageHash, new Oid(Oids.Md5), out returnedCert, candidates),
"token.VerifyHash(correct, Oid(MD5))");
Assert.Null(returnedCert);
Assert.True(
token.VerifySignatureForHash(messageHash, new Oid(testData.HashAlgorithmId), out returnedCert, candidates),
"token.VerifyHash(correct, Oid(algId))");
Assert.NotNull(returnedCert);
Assert.Equal(previousCert, returnedCert);
messageHash[0] ^= 0xFF;
Assert.False(
token.VerifySignatureForHash(messageHash, new Oid(testData.HashAlgorithmId), out returnedCert, candidates),
"token.VerifyHash(incorrect, Oid(algId))");
Assert.Null(returnedCert);
}
[Fact]
public static void TryDecode_Fails_SignedCmsOfData()
{
Assert.False(
Rfc3161TimestampToken.TryDecode(
SignedDocuments.RsaPkcs1OneSignerIssuerAndSerialNumber,
out Rfc3161TimestampToken token,
out int bytesRead),
"Rfc3161TimestampToken.TryDecode");
Assert.Equal(0, bytesRead);
Assert.Null(token);
}
[Fact]
public static void TryDecode_Fails_Empty()
{
Assert.False(
Rfc3161TimestampToken.TryDecode(
ReadOnlyMemory<byte>.Empty,
out Rfc3161TimestampToken token,
out int bytesRead),
"Rfc3161TimestampToken.TryDecode");
Assert.Equal(0, bytesRead);
Assert.Null(token);
}
[Fact]
public static void TryDecode_Fails_EnvelopedCms()
{
byte[] encodedMessage =
("3082010c06092a864886f70d010703a081fe3081fb0201003181c83081c5020100302e301a311830160603550403130f5253"
+ "414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d010101050004818013"
+ "dc0eb2984a445d04a1f6246b8fe41f1d24507548d449d454d5bb5e0638d75ed101bf78c0155a5d208eb746755fbccbc86923"
+ "8443760a9ae94770d6373e0197be23a6a891f0c522ca96b3e8008bf23547474b7e24e7f32e8134df3862d84f4dea2470548e"
+ "c774dd74f149a56cdd966e141122900d00ad9d10ea1848541294a1302b06092a864886f70d010701301406082a864886f70d"
+ "030704089c8119f6cf6b174c8008bcea3a10d0737eb9").HexToByteArray();
Assert.False(
Rfc3161TimestampToken.TryDecode(
encodedMessage,
out Rfc3161TimestampToken token,
out int bytesRead),
"Rfc3161TimestampToken.TryDecode");
Assert.Equal(0, bytesRead);
Assert.Null(token);
}
[Fact]
public static void TryDecode_Fails_MalformedToken()
{
ContentInfo contentInfo = new ContentInfo(
new Oid(Oids.TstInfo, Oids.TstInfo),
new byte[] { 1 });
SignedCms cms = new SignedCms(contentInfo);
using (X509Certificate2 cert = Certificates.RSAKeyTransferCapi1.TryGetCertificateWithPrivateKey())
{
cms.ComputeSignature(new CmsSigner(SubjectIdentifierType.IssuerAndSerialNumber, cert));
}
Assert.False(
Rfc3161TimestampToken.TryDecode(
cms.Encode(),
out Rfc3161TimestampToken token,
out int bytesRead),
"Rfc3161TimestampToken.TryDecode");
Assert.Equal(0, bytesRead);
Assert.Null(token);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain, SigningCertificateOption.ValidHashNoName)]
[InlineData(X509IncludeOption.None, SigningCertificateOption.ValidHashNoName)]
[InlineData(X509IncludeOption.WholeChain, SigningCertificateOption.ValidHashWithName)]
[InlineData(X509IncludeOption.None, SigningCertificateOption.ValidHashWithName)]
public static void MatchV1(X509IncludeOption includeOption, SigningCertificateOption v1Option)
{
CustomBuild_CertMatch(
Certificates.ValidLookingTsaCert,
new DateTimeOffset(2018, 1, 10, 17, 21, 11, 802, TimeSpan.Zero),
v1Option,
SigningCertificateOption.Omit,
includeOption: includeOption);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain)]
[InlineData(X509IncludeOption.None)]
public static void CertHashMismatchV1(X509IncludeOption includeOption)
{
CustomBuild_CertMismatch(
Certificates.ValidLookingTsaCert,
new DateTimeOffset(2018, 1, 10, 17, 21, 11, 802, TimeSpan.Zero),
SigningCertificateOption.InvalidHashNoName,
SigningCertificateOption.Omit,
includeOption: includeOption);
}
[Theory]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.SubjectKeyIdentifier)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.SubjectKeyIdentifier)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.IssuerAndSerialNumber)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.IssuerAndSerialNumber)]
public static void CertMismatchIssuerAndSerialV1(
X509IncludeOption includeOption,
SigningCertificateOption v1Option,
SubjectIdentifierType identifierType)
{
CustomBuild_CertMismatch(
Certificates.ValidLookingTsaCert,
new DateTimeOffset(2018, 1, 10, 17, 21, 11, 802, TimeSpan.Zero),
v1Option,
SigningCertificateOption.Omit,
includeOption: includeOption,
identifierType: identifierType);
}
[Theory]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashNoName,
null)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashNoName,
null)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithName,
"MD5")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithName,
"MD5")]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithName,
"SHA1")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithName,
"SHA1")]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithName,
"SHA384")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithName,
"SHA384")]
public static void MatchV2(
X509IncludeOption includeOption,
SigningCertificateOption v2Option,
string hashAlgName)
{
CustomBuild_CertMatch(
Certificates.ValidLookingTsaCert,
new DateTimeOffset(2018, 1, 10, 17, 21, 11, 802, TimeSpan.Zero),
SigningCertificateOption.Omit,
v2Option,
hashAlgName == null ? default(HashAlgorithmName) : new HashAlgorithmName(hashAlgName),
includeOption);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain, null)]
[InlineData(X509IncludeOption.None, null)]
[InlineData(X509IncludeOption.WholeChain, "MD5")]
[InlineData(X509IncludeOption.None, "MD5")]
[InlineData(X509IncludeOption.WholeChain, "SHA1")]
[InlineData(X509IncludeOption.None, "SHA1")]
[InlineData(X509IncludeOption.WholeChain, "SHA384")]
[InlineData(X509IncludeOption.None, "SHA384")]
public static void CertHashMismatchV2(X509IncludeOption includeOption, string hashAlgName)
{
CustomBuild_CertMismatch(
Certificates.ValidLookingTsaCert,
new DateTimeOffset(2018, 1, 10, 17, 21, 11, 802, TimeSpan.Zero),
SigningCertificateOption.Omit,
SigningCertificateOption.InvalidHashNoName,
hashAlgName == null ? default(HashAlgorithmName) : new HashAlgorithmName(hashAlgName),
includeOption: includeOption);
}
[Theory]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.SubjectKeyIdentifier,
null)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.SubjectKeyIdentifier,
null)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.SubjectKeyIdentifier,
"SHA384")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.SubjectKeyIdentifier,
"SHA384")]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.IssuerAndSerialNumber,
null)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.IssuerAndSerialNumber,
null)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.IssuerAndSerialNumber,
"SHA384")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithInvalidName,
SubjectIdentifierType.IssuerAndSerialNumber,
"SHA384")]
public static void CertMismatchIssuerAndSerialV2(
X509IncludeOption includeOption,
SigningCertificateOption v2Option,
SubjectIdentifierType identifierType,
string hashAlgName)
{
CustomBuild_CertMismatch(
Certificates.ValidLookingTsaCert,
new DateTimeOffset(2018, 1, 10, 17, 21, 11, 802, TimeSpan.Zero),
SigningCertificateOption.Omit,
v2Option,
hashAlgName == null ? default(HashAlgorithmName) : new HashAlgorithmName(hashAlgName),
includeOption: includeOption,
identifierType: identifierType);
}
[Theory]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.ValidHashNoName,
null)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.ValidHashNoName,
"SHA512")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.ValidHashNoName,
null)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.ValidHashNoName,
"SHA512")]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.ValidHashWithName,
null)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.ValidHashWithName,
"SHA384")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.ValidHashWithName,
null)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.ValidHashWithName,
"SHA384")]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.ValidHashNoName,
null)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.ValidHashNoName,
"SHA512")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.ValidHashNoName,
null)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.ValidHashNoName,
"SHA512")]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.ValidHashWithName,
null)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.ValidHashWithName,
"SHA384")]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.ValidHashWithName,
null)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.ValidHashWithName,
"SHA384")]
public static void CertMatchV1AndV2(
X509IncludeOption includeOption,
SigningCertificateOption v1Option,
SigningCertificateOption v2Option,
string hashAlgName)
{
CustomBuild_CertMatch(
Certificates.ValidLookingTsaCert,
new DateTimeOffset(2018, 1, 10, 17, 21, 11, 802, TimeSpan.Zero),
v1Option,
v2Option,
hashAlgName == null ? default(HashAlgorithmName) : new HashAlgorithmName(hashAlgName),
includeOption);
}
[Theory]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.InvalidHashNoName,
SigningCertificateOption.ValidHashWithName,
SubjectIdentifierType.IssuerAndSerialNumber,
null)]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithInvalidSerial,
SigningCertificateOption.ValidHashWithName,
SubjectIdentifierType.IssuerAndSerialNumber,
"SHA384")]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.ValidHashWithInvalidName,
SigningCertificateOption.InvalidHashNoName,
SubjectIdentifierType.SubjectKeyIdentifier,
null)]
[InlineData(
X509IncludeOption.None,
SigningCertificateOption.ValidHashWithName,
SigningCertificateOption.InvalidHashNoName,
SubjectIdentifierType.SubjectKeyIdentifier,
"SHA512")]
[InlineData(
X509IncludeOption.WholeChain,
SigningCertificateOption.InvalidHashWithInvalidSerial,
SigningCertificateOption.ValidHashNoName,
SubjectIdentifierType.IssuerAndSerialNumber,
null)]
public static void CertMismatchV1OrV2(
X509IncludeOption includeOption,
SigningCertificateOption v1Option,
SigningCertificateOption v2Option,
SubjectIdentifierType identifierType,
string hashAlgName)
{
CustomBuild_CertMismatch(
Certificates.ValidLookingTsaCert,
new DateTimeOffset(2018, 1, 10, 17, 21, 11, 802, TimeSpan.Zero),
v1Option,
v2Option,
hashAlgName == null ? default(HashAlgorithmName) : new HashAlgorithmName(hashAlgName),
includeOption: includeOption,
identifierType: identifierType);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain)]
[InlineData(X509IncludeOption.None)]
public static void TimestampTooOld(X509IncludeOption includeOption)
{
CertLoader loader = Certificates.ValidLookingTsaCert;
DateTimeOffset referenceTime;
using (X509Certificate2 cert = loader.GetCertificate())
{
referenceTime = cert.NotBefore.AddSeconds(-1);
}
CustomBuild_CertMismatch(
loader,
referenceTime,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.Omit,
includeOption: includeOption);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain)]
[InlineData(X509IncludeOption.None)]
public static void TimestampTooNew(X509IncludeOption includeOption)
{
CertLoader loader = Certificates.ValidLookingTsaCert;
DateTimeOffset referenceTime;
using (X509Certificate2 cert = loader.GetCertificate())
{
referenceTime = cert.NotAfter.AddSeconds(1);
}
CustomBuild_CertMismatch(
loader,
referenceTime,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.Omit,
includeOption: includeOption);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain)]
[InlineData(X509IncludeOption.None)]
public static void NoEkuExtension(X509IncludeOption includeOption)
{
CertLoader loader = Certificates.RSA2048SignatureOnly;
DateTimeOffset referenceTime;
using (X509Certificate2 cert = loader.GetCertificate())
{
referenceTime = cert.NotAfter.AddDays(-1);
Assert.Equal(0, cert.Extensions.OfType<X509EnhancedKeyUsageExtension>().Count());
}
CustomBuild_CertMismatch(
loader,
referenceTime,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.Omit,
includeOption: includeOption);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain)]
[InlineData(X509IncludeOption.None)]
public static void TwoEkuExtensions(X509IncludeOption includeOption)
{
CertLoader loader = Certificates.TwoEkuTsaCert;
DateTimeOffset referenceTime;
using (X509Certificate2 cert = loader.GetCertificate())
{
referenceTime = cert.NotAfter.AddDays(-1);
var ekuExts = cert.Extensions.OfType<X509EnhancedKeyUsageExtension>().ToList();
Assert.Equal(2, ekuExts.Count);
// Make sure we're validating that "early success" doesn't happen.
Assert.Contains(
Oids.TimeStampingPurpose,
ekuExts[0].EnhancedKeyUsages.OfType<Oid>().Select(o => o.Value));
}
CustomBuild_CertMismatch(
loader,
referenceTime,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.Omit,
includeOption: includeOption);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain)]
[InlineData(X509IncludeOption.None)]
public static void NonCriticalEkuExtension(X509IncludeOption includeOption)
{
CertLoader loader = Certificates.NonCriticalTsaEku;
DateTimeOffset referenceTime;
using (X509Certificate2 cert = loader.GetCertificate())
{
referenceTime = cert.NotAfter.AddDays(-1);
var ekuExts = cert.Extensions.OfType<X509EnhancedKeyUsageExtension>().ToList();
Assert.Equal(1, ekuExts.Count);
Assert.False(ekuExts[0].Critical, "ekuExts[0].Critical");
}
CustomBuild_CertMismatch(
loader,
referenceTime,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.Omit,
includeOption: includeOption);
}
[Theory]
[InlineData(X509IncludeOption.WholeChain)]
[InlineData(X509IncludeOption.None)]
public static void NoTsaEku(X509IncludeOption includeOption)
{
CertLoader loader = Certificates.TlsClientServerCert;
DateTimeOffset referenceTime;
using (X509Certificate2 cert = loader.GetCertificate())
{
referenceTime = cert.NotAfter.AddDays(-1);
}
CustomBuild_CertMismatch(
loader,
referenceTime,
SigningCertificateOption.ValidHashNoName,
SigningCertificateOption.Omit,
includeOption: includeOption);
}
private static void CustomBuild_CertMatch(
CertLoader loader,
DateTimeOffset referenceTime,
SigningCertificateOption v1Option,
SigningCertificateOption v2Option,
HashAlgorithmName v2AlgorithmName = default,
X509IncludeOption includeOption = default,
SubjectIdentifierType identifierType = SubjectIdentifierType.IssuerAndSerialNumber)
{
byte[] tokenBytes = BuildCustomToken(
loader,
referenceTime,
v1Option,
v2Option,
v2AlgorithmName,
includeOption,
identifierType);
Rfc3161TimestampToken token;
Assert.True(Rfc3161TimestampToken.TryDecode(tokenBytes, out token, out int bytesRead));
Assert.Equal(tokenBytes.Length, bytesRead);
Assert.NotNull(token);
Assert.Equal(referenceTime, token.TokenInfo.Timestamp);
using (X509Certificate2 cert = Certificates.ValidLookingTsaCert.GetCertificate())
{
Assert.True(
token.VerifySignatureForHash(
token.TokenInfo.GetMessageHash().Span,
token.TokenInfo.HashAlgorithmId,
out X509Certificate2 signer,
new X509Certificate2Collection(cert)));
Assert.Equal(cert, signer);
}
}
private static void CustomBuild_CertMismatch(
CertLoader loader,
DateTimeOffset referenceTime,
SigningCertificateOption v1Option,
SigningCertificateOption v2Option,
HashAlgorithmName v2AlgorithmName = default,
X509IncludeOption includeOption = default,
SubjectIdentifierType identifierType = SubjectIdentifierType.IssuerAndSerialNumber)
{
byte[] tokenBytes = BuildCustomToken(
loader,
referenceTime,
v1Option,
v2Option,
v2AlgorithmName,
includeOption,
identifierType);
Rfc3161TimestampToken token;
bool willParse = includeOption == X509IncludeOption.None;
if (willParse && identifierType == SubjectIdentifierType.IssuerAndSerialNumber)
{
// Because IASN matches against the ESSCertId(V2) directly it will reject the token.
switch (v1Option)
{
case SigningCertificateOption.ValidHashWithInvalidName:
case SigningCertificateOption.ValidHashWithInvalidSerial:
case SigningCertificateOption.InvalidHashWithInvalidName:
case SigningCertificateOption.InvalidHashWithInvalidSerial:
willParse = false;
break;
}
switch (v2Option)
{
case SigningCertificateOption.ValidHashWithInvalidName:
case SigningCertificateOption.ValidHashWithInvalidSerial:
case SigningCertificateOption.InvalidHashWithInvalidName:
case SigningCertificateOption.InvalidHashWithInvalidSerial:
willParse = false;
break;
}
}
if (willParse)
{
Assert.True(Rfc3161TimestampToken.TryDecode(tokenBytes, out token, out int bytesRead));
Assert.NotNull(token);
Assert.Equal(tokenBytes.Length, bytesRead);
using (X509Certificate2 cert = loader.GetCertificate())
{
Assert.False(
token.VerifySignatureForHash(
token.TokenInfo.GetMessageHash().Span,
token.TokenInfo.HashAlgorithmId,
out X509Certificate2 signer,
new X509Certificate2Collection(cert)));
Assert.Null(signer);
}
}
else
{
Assert.False(Rfc3161TimestampToken.TryDecode(tokenBytes, out token, out int bytesRead));
Assert.Null(token);
Assert.Equal(0, bytesRead);
}
}
private static byte[] BuildCustomToken(
CertLoader cert,
DateTimeOffset timestamp,
SigningCertificateOption v1Option,
SigningCertificateOption v2Option,
HashAlgorithmName v2DigestAlg=default,
X509IncludeOption includeOption=X509IncludeOption.ExcludeRoot,
SubjectIdentifierType identifierType=SubjectIdentifierType.IssuerAndSerialNumber)
{
long accuracyMicroSeconds = (long)(TimeSpan.FromMinutes(1).TotalMilliseconds * 1000);
byte[] serialNumber = BitConverter.GetBytes(DateTimeOffset.UtcNow.Ticks);
Array.Reverse(serialNumber);
Rfc3161TimestampTokenInfo info = new Rfc3161TimestampTokenInfo(
new Oid("0.0", "0.0"),
new Oid(Oids.Sha384),
new byte[384 / 8],
serialNumber,
timestamp,
accuracyMicroSeconds,
isOrdering: true);
ContentInfo contentInfo = new ContentInfo(new Oid(Oids.TstInfo, Oids.TstInfo), info.Encode());
SignedCms cms = new SignedCms(contentInfo);
using (X509Certificate2 tsaCert = cert.TryGetCertificateWithPrivateKey())
{
CmsSigner signer = new CmsSigner(identifierType, tsaCert)
{
IncludeOption = includeOption
};
if (v1Option != SigningCertificateOption.Omit)
{
ExpandOption(v1Option, out bool validHash, out bool skipIssuerSerial, out bool validName, out bool validSerial);
// simple SigningCertificate
byte[] signingCertificateV1Bytes =
"301A3018301604140000000000000000000000000000000000000000".HexToByteArray();
if (validHash)
{
using (SHA1 hasher = SHA1.Create())
{
byte[] hash = hasher.ComputeHash(tsaCert.RawData);
Buffer.BlockCopy(
hash,
0,
signingCertificateV1Bytes,
signingCertificateV1Bytes.Length - hash.Length,
hash.Length);
}
}
if (!skipIssuerSerial)
{
byte[] footer = BuildIssuerAndSerialNumber(tsaCert, validName, validSerial);
signingCertificateV1Bytes[1] += (byte)footer.Length;
signingCertificateV1Bytes[3] += (byte)footer.Length;
signingCertificateV1Bytes[5] += (byte)footer.Length;
Assert.InRange(signingCertificateV1Bytes[1], 0, 127);
signingCertificateV1Bytes = signingCertificateV1Bytes.Concat(footer).ToArray();
}
signer.SignedAttributes.Add(
new AsnEncodedData("1.2.840.113549.1.9.16.2.12", signingCertificateV1Bytes));
}
if (v2Option != SigningCertificateOption.Omit)
{
byte[] attrBytes;
byte[] algBytes = Array.Empty<byte>();
byte[] hashBytes;
byte[] issuerNameBytes = Array.Empty<byte>();
if (v2DigestAlg != default)
{
switch (v2DigestAlg.Name)
{
case "MD5":
algBytes = "300C06082A864886F70D02050500".HexToByteArray();
break;
case "SHA1":
algBytes = "300906052B0E03021A0500".HexToByteArray();
break;
case "SHA256":
// Invalid under DER, because it's the default.
algBytes = "300D06096086480165030402010500".HexToByteArray();
break;
case "SHA384":
algBytes = "300D06096086480165030402020500".HexToByteArray();
break;
case "SHA512":
algBytes = "300D06096086480165030402030500".HexToByteArray();
break;
default:
throw new NotSupportedException(v2DigestAlg.Name);
}
}
else
{
v2DigestAlg = HashAlgorithmName.SHA256;
}
hashBytes = tsaCert.GetCertHash(v2DigestAlg);
ExpandOption(v2Option, out bool validHash, out bool skipIssuerSerial, out bool validName, out bool validSerial);
if (!validHash)
{
hashBytes[0] ^= 0xFF;
}
if (!skipIssuerSerial)
{
issuerNameBytes = BuildIssuerAndSerialNumber(tsaCert, validName, validSerial);
}
// hashBytes hasn't been wrapped in an OCTET STRING yet, so add 2 more.
int payloadSize = algBytes.Length + hashBytes.Length + issuerNameBytes.Length + 2;
Assert.InRange(payloadSize, 0, 123);
attrBytes = new byte[payloadSize + 6];
int index = 0;
// SEQUENCE (SigningCertificateV2)
attrBytes[index++] = 0x30;
attrBytes[index++] = (byte)(payloadSize + 4);
// SEQUENCE OF => certs
attrBytes[index++] = 0x30;
attrBytes[index++] = (byte)(payloadSize + 2);
// SEQUENCE (ESSCertIdV2)
attrBytes[index++] = 0x30;
attrBytes[index++] = (byte)payloadSize;
Buffer.BlockCopy(algBytes, 0, attrBytes, index, algBytes.Length);
index += algBytes.Length;
// OCTET STRING (Hash)
attrBytes[index++] = 0x04;
attrBytes[index++] = (byte)hashBytes.Length;
Buffer.BlockCopy(hashBytes, 0, attrBytes, index, hashBytes.Length);
index += hashBytes.Length;
Buffer.BlockCopy(issuerNameBytes, 0, attrBytes, index, issuerNameBytes.Length);
signer.SignedAttributes.Add(
new AsnEncodedData("1.2.840.113549.1.9.16.2.47", attrBytes));
}
cms.ComputeSignature(signer);
}
return cms.Encode();
}
private static byte[] BuildIssuerAndSerialNumber(X509Certificate2 tsaCert, bool validName, bool validSerial)
{
byte[] issuerNameBytes;
if (validName)
{
issuerNameBytes = tsaCert.IssuerName.RawData;
}
else
{
issuerNameBytes = new X500DistinguishedName("CN=No Match").RawData;
}
byte[] serialBytes = tsaCert.GetSerialNumber();
if (validSerial)
{
Array.Reverse(serialBytes);
}
else
{
// If the byte sequence was a palindrome it's still a match,
// so flip some bits.
serialBytes[0] ^= 0x7F;
}
if (issuerNameBytes.Length + serialBytes.Length > 80)
{
throw new NotSupportedException(
"Issuer name and serial length are bigger than this code can handle");
}
// SEQUENCE
// SEQUENCE
// CONTEXT-SPECIFIC 4
// [IssuerName]
// INTEGER
// [SerialNumber, big endian]
byte[] issuerAndSerialNumber = new byte[issuerNameBytes.Length + serialBytes.Length + 8];
issuerAndSerialNumber[0] = 0x30;
issuerAndSerialNumber[1] = (byte)(issuerAndSerialNumber.Length - 2);
issuerAndSerialNumber[2] = 0x30;
issuerAndSerialNumber[3] = (byte)(issuerNameBytes.Length + 2);
issuerAndSerialNumber[4] = 0xA4;
issuerAndSerialNumber[5] = (byte)(issuerNameBytes.Length);
Buffer.BlockCopy(issuerNameBytes, 0, issuerAndSerialNumber, 6, issuerNameBytes.Length);
issuerAndSerialNumber[issuerNameBytes.Length + 6] = 0x02;
issuerAndSerialNumber[issuerNameBytes.Length + 7] = (byte)serialBytes.Length;
Buffer.BlockCopy(serialBytes, 0, issuerAndSerialNumber, issuerNameBytes.Length + 8, serialBytes.Length);
return issuerAndSerialNumber;
}
private static void ExpandOption(
SigningCertificateOption option,
out bool validHash,
out bool skipIssuerSerial,
out bool validName,
out bool validSerial)
{
Assert.NotEqual(SigningCertificateOption.Omit, option);
validHash = option < SigningCertificateOption.InvalidHashNoName;
skipIssuerSerial =
option == SigningCertificateOption.ValidHashNoName ||
option == SigningCertificateOption.InvalidHashNoName;
if (skipIssuerSerial)
{
validName = validSerial = false;
}
else
{
validName =
option == SigningCertificateOption.ValidHashWithName ||
option == SigningCertificateOption.InvalidHashWithName ||
option == SigningCertificateOption.ValidHashWithInvalidSerial ||
option == SigningCertificateOption.InvalidHashWithInvalidSerial;
validSerial =
option == SigningCertificateOption.ValidHashWithName ||
option == SigningCertificateOption.InvalidHashWithName ||
option == SigningCertificateOption.ValidHashWithInvalidName ||
option == SigningCertificateOption.InvalidHashWithInvalidName;
}
}
public enum SigningCertificateOption
{
Omit,
ValidHashNoName,
ValidHashWithName,
ValidHashWithInvalidName,
ValidHashWithInvalidSerial,
InvalidHashNoName,
InvalidHashWithName,
InvalidHashWithInvalidName,
InvalidHashWithInvalidSerial,
}
}
}
| zhenlan/corefx | src/System.Security.Cryptography.Pkcs/tests/Rfc3161/TimestampTokenTests.cs | C# | mit | 42,729 |
<?php
if(!defined('DS')) define('DS', DIRECTORY_SEPARATOR);
if(!defined('MB')) define('MB', (int)function_exists('mb_get_info'));
if(!defined('BOM')) define('BOM', "\xEF\xBB\xBF");
// polyfill for new sort flag
if(!defined('SORT_NATURAL')) define('SORT_NATURAL', 'SORT_NATURAL');
// a super simple autoloader
function load($classmap, $base = null) {
spl_autoload_register(function($class) use ($classmap, $base) {
$class = strtolower($class);
if(!isset($classmap[$class])) return false;
if($base) {
include($base . DS . $classmap[$class]);
} else {
include($classmap[$class]);
}
});
}
// auto-load all toolkit classes
load(array(
// classes
'a' => __DIR__ . DS . 'lib' . DS . 'a.php',
'bitmask' => __DIR__ . DS . 'lib' . DS . 'bitmask.php',
'brick' => __DIR__ . DS . 'lib' . DS . 'brick.php',
'c' => __DIR__ . DS . 'lib' . DS . 'c.php',
'cookie' => __DIR__ . DS . 'lib' . DS . 'cookie.php',
'cache' => __DIR__ . DS . 'lib' . DS . 'cache.php',
'cache\\driver' => __DIR__ . DS . 'lib' . DS . 'cache' . DS . 'driver.php',
'cache\\driver\\apc' => __DIR__ . DS . 'lib' . DS . 'cache' . DS . 'driver' . DS . 'apc.php',
'cache\\driver\\file' => __DIR__ . DS . 'lib' . DS . 'cache' . DS . 'driver' . DS . 'file.php',
'cache\\driver\\memcached' => __DIR__ . DS . 'lib' . DS . 'cache' . DS . 'driver' . DS . 'memcached.php',
'cache\\driver\\mock' => __DIR__ . DS . 'lib' . DS . 'cache' . DS . 'driver' . DS . 'mock.php',
'cache\\driver\\session' => __DIR__ . DS . 'lib' . DS . 'cache' . DS . 'driver' . DS . 'session.php',
'cache\\value' => __DIR__ . DS . 'lib' . DS . 'cache' . DS . 'value.php',
'collection' => __DIR__ . DS . 'lib' . DS . 'collection.php',
'crypt' => __DIR__ . DS . 'lib' . DS . 'crypt.php',
'data' => __DIR__ . DS . 'lib' . DS . 'data.php',
'database' => __DIR__ . DS . 'lib' . DS . 'database.php',
'database\\query' => __DIR__ . DS . 'lib' . DS . 'database' . DS . 'query.php',
'db' => __DIR__ . DS . 'lib' . DS . 'db.php',
'detect' => __DIR__ . DS . 'lib' . DS . 'detect.php',
'dimensions' => __DIR__ . DS . 'lib' . DS . 'dimensions.php',
'dir' => __DIR__ . DS . 'lib' . DS . 'dir.php',
'email' => __DIR__ . DS . 'lib' . DS . 'email.php',
'embed' => __DIR__ . DS . 'lib' . DS . 'embed.php',
'error' => __DIR__ . DS . 'lib' . DS . 'error.php',
'errorreporting' => __DIR__ . DS . 'lib' . DS . 'errorreporting.php',
'escape' => __DIR__ . DS . 'lib' . DS . 'escape.php',
'exif' => __DIR__ . DS . 'lib' . DS . 'exif.php',
'exif\\camera' => __DIR__ . DS . 'lib' . DS . 'exif' . DS . 'camera.php',
'exif\\location' => __DIR__ . DS . 'lib' . DS . 'exif' . DS . 'location.php',
'f' => __DIR__ . DS . 'lib' . DS . 'f.php',
'folder' => __DIR__ . DS . 'lib' . DS . 'folder.php',
'header' => __DIR__ . DS . 'lib' . DS . 'header.php',
'html' => __DIR__ . DS . 'lib' . DS . 'html.php',
'i' => __DIR__ . DS . 'lib' . DS . 'i.php',
'l' => __DIR__ . DS . 'lib' . DS . 'l.php',
'media' => __DIR__ . DS . 'lib' . DS . 'media.php',
'obj' => __DIR__ . DS . 'lib' . DS . 'obj.php',
'pagination' => __DIR__ . DS . 'lib' . DS . 'pagination.php',
'password' => __DIR__ . DS . 'lib' . DS . 'password.php',
'r' => __DIR__ . DS . 'lib' . DS . 'r.php',
'redirect' => __DIR__ . DS . 'lib' . DS . 'redirect.php',
'remote' => __DIR__ . DS . 'lib' . DS . 'remote.php',
'response' => __DIR__ . DS . 'lib' . DS . 'response.php',
'router' => __DIR__ . DS . 'lib' . DS . 'router.php',
's' => __DIR__ . DS . 'lib' . DS . 's.php',
'server' => __DIR__ . DS . 'lib' . DS . 'server.php',
'silo' => __DIR__ . DS . 'lib' . DS . 'silo.php',
'sql' => __DIR__ . DS . 'lib' . DS . 'sql.php',
'str' => __DIR__ . DS . 'lib' . DS . 'str.php',
'system' => __DIR__ . DS . 'lib' . DS . 'system.php',
'thumb' => __DIR__ . DS . 'lib' . DS . 'thumb.php',
'timer' => __DIR__ . DS . 'lib' . DS . 'timer.php',
'toolkit' => __DIR__ . DS . 'lib' . DS . 'toolkit.php',
'tpl' => __DIR__ . DS . 'lib' . DS . 'tpl.php',
'upload' => __DIR__ . DS . 'lib' . DS . 'upload.php',
'url' => __DIR__ . DS . 'lib' . DS . 'url.php',
'v' => __DIR__ . DS . 'lib' . DS . 'v.php',
'visitor' => __DIR__ . DS . 'lib' . DS . 'visitor.php',
'xml' => __DIR__ . DS . 'lib' . DS . 'xml.php',
'yaml' => __DIR__ . DS . 'lib' . DS . 'yaml.php',
// vendors
'spyc' => __DIR__ . DS . 'vendors' . DS . 'yaml' . DS . 'yaml.php',
'abeautifulsite\\simpleimage' => __DIR__ . DS . 'vendors' . DS . 'abeautifulsite' . DS . 'SimpleImage.php',
'mimereader' => __DIR__ . DS . 'vendors' . DS . 'mimereader' . DS . 'mimereader.php',
));
// load all helpers
include(__DIR__ . DS . 'helpers.php'); | muten84/luigibifulco.it | www.luigibifulco.it/blog/kirby/toolkit/bootstrap.php | PHP | cc0-1.0 | 5,950 |
/**
* Copyright (c) 2014-2017 by the respective copyright holders.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.smarthome.core.events;
/**
* An {@link EventFilter} can be provided by an {@link EventSubscriber} in order
* to receive specific {@link Event}s by an {@link EventPublisher} if the filter applies.
*
* @author Stefan Bußweiler - Initial contribution
*/
public interface EventFilter {
/**
* Apply the filter on an event. <p> This method is called for each subscribed {@link Event} of an
* {@link EventSubscriber}. If the filter applies, the event will be dispatched to the
* {@link EventSubscriber#receive(Event)} method.
*
* @param event the event (not null)
* @return true if the filter criterion applies
*/
boolean apply(Event event);
}
| AchimHentschel/smarthome | bundles/core/org.eclipse.smarthome.core/src/main/java/org/eclipse/smarthome/core/events/EventFilter.java | Java | epl-1.0 | 1,055 |
<?php
define('WP_ADMIN', TRUE);
if ( defined('ABSPATH') )
require_once(ABSPATH . 'wp-load.php');
else
require_once('../wp-load.php');
require_once(ABSPATH . 'wp-admin/includes/admin.php');
if ( !wp_validate_auth_cookie() )
wp_die(__('Cheatin’ uh?'));
nocache_headers();
do_action('admin_init');
$action = 'admin_post';
if ( !empty($_REQUEST['action']) )
$action .= '_' . $_REQUEST['action'];
do_action($action);
?> | localshred/dtraders | wp-admin/admin-post.php | PHP | gpl-2.0 | 432 |
/*
* Copyright (C) 2008-2017 TrinityCore <http://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "World.h"
// This is where scripts' loading functions should be declared:
// world
void AddSC_areatrigger_scripts();
void AddSC_emerald_dragons();
void AddSC_generic_creature();
void AddSC_go_scripts();
void AddSC_guards();
void AddSC_item_scripts();
void AddSC_npc_professions();
void AddSC_npc_innkeeper();
void AddSC_npcs_special();
void AddSC_achievement_scripts();
void AddSC_action_ip_logger();
void AddSC_scene_scripts();
// player
void AddSC_chat_log();
void AddSC_duel_reset();
// The name of this function should match:
// void Add${NameOfDirectory}Scripts()
void AddWorldScripts()
{
AddSC_areatrigger_scripts();
AddSC_emerald_dragons();
AddSC_generic_creature();
AddSC_go_scripts();
AddSC_guards();
AddSC_item_scripts();
AddSC_npc_professions();
AddSC_npc_innkeeper();
AddSC_npcs_special();
AddSC_achievement_scripts();
AddSC_chat_log(); // location: scripts\World\chat_log.cpp
AddSC_scene_scripts();
// FIXME: This should be moved in a script validation hook.
// To avoid duplicate code, we check once /*ONLY*/ if logging is permitted or not.
if (sWorld->getBoolConfig(CONFIG_IP_BASED_ACTION_LOGGING))
AddSC_action_ip_logger(); // location: scripts\World\action_ip_logger.cpp
AddSC_duel_reset();
}
| mohsen-star/TrinityCore-7.2 | src/server/scripts/World/world_script_loader.cpp | C++ | gpl-2.0 | 2,018 |
// binary.cc -- binary input files for gold
// Copyright (C) 2008-2016 Free Software Foundation, Inc.
// Written by Ian Lance Taylor <iant@google.com>.
// This file is part of gold.
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston,
// MA 02110-1301, USA.
#include "gold.h"
#include <cerrno>
#include <cstring>
#include "elfcpp.h"
#include "stringpool.h"
#include "fileread.h"
#include "output.h"
#include "binary.h"
// safe-ctype.h interferes with macros defined by the system <ctype.h>.
// Some C++ system headers might include <ctype.h> and rely on its macro
// definitions being intact. So make sure that safe-ctype.h is included
// only after any C++ system headers, whether directly here (above) or via
// other local header files (e.g. #include <string> in "binary.h").
#include "safe-ctype.h"
// Support for reading binary files as input. These become blobs in
// the final output. These files are treated as though they have a
// single .data section and define three symbols:
// _binary_FILENAME_start, _binary_FILENAME_end, _binary_FILENAME_size.
// The FILENAME is the name of the input file, with any
// non-alphanumeric character changed to an underscore.
// We implement this by creating an ELF file in memory.
namespace gold
{
// class Binary_to_elf.
Binary_to_elf::Binary_to_elf(elfcpp::EM machine, int size, bool big_endian,
const std::string& filename)
: elf_machine_(machine), size_(size), big_endian_(big_endian),
filename_(filename), data_(NULL), filesize_(0)
{
}
Binary_to_elf::~Binary_to_elf()
{
if (this->data_ != NULL)
delete[] this->data_;
}
// Given FILENAME, create a buffer which looks like an ELF file with
// the contents of FILENAME as the contents of the only section. The
// TASK parameters is mainly for debugging, and records who holds
// locks.
bool
Binary_to_elf::convert(const Task* task)
{
if (this->size_ == 32)
{
if (!this->big_endian_)
{
#ifdef HAVE_TARGET_32_LITTLE
return this->sized_convert<32, false>(task);
#else
gold_unreachable();
#endif
}
else
{
#ifdef HAVE_TARGET_32_BIG
return this->sized_convert<32, true>(task);
#else
gold_unreachable();
#endif
}
}
else if (this->size_ == 64)
{
if (!this->big_endian_)
{
#ifdef HAVE_TARGET_64_LITTLE
return this->sized_convert<64, false>(task);
#else
gold_unreachable();
#endif
}
else
{
#ifdef HAVE_TARGET_64_BIG
return this->sized_convert<64, true>(task);
#else
gold_unreachable();
#endif
}
}
else
gold_unreachable();
}
// We are going to create:
// * The ELF file header.
// * Five sections: null section, .data, .symtab, .strtab, .shstrtab
// * The contents of the file.
// * Four symbols: null, begin, end, size.
// * Three symbol names.
// * Four section names.
template<int size, bool big_endian>
bool
Binary_to_elf::sized_convert(const Task* task)
{
// Read the input file.
File_read f;
if (!f.open(task, this->filename_))
{
gold_error(_("cannot open %s: %s:"), this->filename_.c_str(),
strerror(errno));
return false;
}
section_size_type filesize = convert_to_section_size_type(f.filesize());
const unsigned char* fileview;
if (filesize == 0)
fileview = NULL;
else
fileview = f.get_view(0, 0, filesize, false, false);
unsigned int align;
if (size == 32)
align = 4;
else if (size == 64)
align = 8;
else
gold_unreachable();
section_size_type aligned_filesize = align_address(filesize, align);
// Build the stringpool for the symbol table.
std::string mangled_name = this->filename_;
for (std::string::iterator p = mangled_name.begin();
p != mangled_name.end();
++p)
if (!ISALNUM(*p))
*p = '_';
mangled_name = "_binary_" + mangled_name;
std::string start_symbol_name = mangled_name + "_start";
std::string end_symbol_name = mangled_name + "_end";
std::string size_symbol_name = mangled_name + "_size";
Stringpool strtab;
strtab.add(start_symbol_name.c_str(), false, NULL);
strtab.add(end_symbol_name.c_str(), false, NULL);
strtab.add(size_symbol_name.c_str(), false, NULL);
strtab.set_string_offsets();
// Build the stringpool for the section name table.
Stringpool shstrtab;
shstrtab.add(".data", false, NULL);
shstrtab.add(".symtab", false, NULL);
shstrtab.add(".strtab", false, NULL);
shstrtab.add(".shstrtab", false, NULL);
shstrtab.set_string_offsets();
// Work out the size of the generated file, and the offsets of the
// various sections, and allocate a buffer.
const int sym_size = elfcpp::Elf_sizes<size>::sym_size;
size_t output_size = (elfcpp::Elf_sizes<size>::ehdr_size
+ 5 * elfcpp::Elf_sizes<size>::shdr_size);
size_t data_offset = output_size;
output_size += aligned_filesize;
size_t symtab_offset = output_size;
output_size += 4 * sym_size;
size_t strtab_offset = output_size;
output_size += strtab.get_strtab_size();
size_t shstrtab_offset = output_size;
output_size += shstrtab.get_strtab_size();
unsigned char* buffer = new unsigned char[output_size];
// Write out the data.
unsigned char* pout = buffer;
this->write_file_header<size, big_endian>(&pout);
this->write_section_header<size, big_endian>("", &shstrtab, elfcpp::SHT_NULL,
0, 0, 0, 0, 0,
0, 0, &pout);
// Having the section be named ".data", having it be writable, and
// giving it an alignment of 1 is because the GNU linker does it
// that way, and existing linker script expect it.
this->write_section_header<size, big_endian>(".data", &shstrtab,
elfcpp::SHT_PROGBITS,
(elfcpp::SHF_ALLOC
| elfcpp::SHF_WRITE),
data_offset,
filesize, 0, 0,
1, 0, &pout);
this->write_section_header<size, big_endian>(".symtab", &shstrtab,
elfcpp::SHT_SYMTAB,
0, symtab_offset, 4 * sym_size,
3, 1, align, sym_size, &pout);
this->write_section_header<size, big_endian>(".strtab", &shstrtab,
elfcpp::SHT_STRTAB,
0, strtab_offset,
strtab.get_strtab_size(),
0, 0, 1, 0, &pout);
this->write_section_header<size, big_endian>(".shstrtab", &shstrtab,
elfcpp::SHT_STRTAB,
0, shstrtab_offset,
shstrtab.get_strtab_size(),
0, 0, 1, 0, &pout);
if (filesize > 0)
{
memcpy(pout, fileview, filesize);
pout += filesize;
memset(pout, 0, aligned_filesize - filesize);
pout += aligned_filesize - filesize;
}
this->write_symbol<size, big_endian>("", &strtab, 0, 0, 0, &pout);
this->write_symbol<size, big_endian>(start_symbol_name, &strtab, 0, filesize,
1, &pout);
this->write_symbol<size, big_endian>(end_symbol_name, &strtab, filesize, 0,
1, &pout);
this->write_symbol<size, big_endian>(size_symbol_name, &strtab, filesize, 0,
elfcpp::SHN_ABS, &pout);
strtab.write_to_buffer(pout, strtab.get_strtab_size());
pout += strtab.get_strtab_size();
shstrtab.write_to_buffer(pout, shstrtab.get_strtab_size());
pout += shstrtab.get_strtab_size();
gold_assert(static_cast<size_t>(pout - buffer) == output_size);
this->data_ = buffer;
this->filesize_ = output_size;
f.unlock(task);
return true;
}
// Write out the file header.
template<int size, bool big_endian>
void
Binary_to_elf::write_file_header(unsigned char** ppout)
{
elfcpp::Ehdr_write<size, big_endian> oehdr(*ppout);
unsigned char e_ident[elfcpp::EI_NIDENT];
memset(e_ident, 0, elfcpp::EI_NIDENT);
e_ident[elfcpp::EI_MAG0] = elfcpp::ELFMAG0;
e_ident[elfcpp::EI_MAG1] = elfcpp::ELFMAG1;
e_ident[elfcpp::EI_MAG2] = elfcpp::ELFMAG2;
e_ident[elfcpp::EI_MAG3] = elfcpp::ELFMAG3;
if (size == 32)
e_ident[elfcpp::EI_CLASS] = elfcpp::ELFCLASS32;
else if (size == 64)
e_ident[elfcpp::EI_CLASS] = elfcpp::ELFCLASS64;
else
gold_unreachable();
e_ident[elfcpp::EI_DATA] = (big_endian
? elfcpp::ELFDATA2MSB
: elfcpp::ELFDATA2LSB);
e_ident[elfcpp::EI_VERSION] = elfcpp::EV_CURRENT;
oehdr.put_e_ident(e_ident);
oehdr.put_e_type(elfcpp::ET_REL);
oehdr.put_e_machine(this->elf_machine_);
oehdr.put_e_version(elfcpp::EV_CURRENT);
oehdr.put_e_entry(0);
oehdr.put_e_phoff(0);
oehdr.put_e_shoff(elfcpp::Elf_sizes<size>::ehdr_size);
oehdr.put_e_flags(0);
oehdr.put_e_ehsize(elfcpp::Elf_sizes<size>::ehdr_size);
oehdr.put_e_phentsize(0);
oehdr.put_e_phnum(0);
oehdr.put_e_shentsize(elfcpp::Elf_sizes<size>::shdr_size);
oehdr.put_e_shnum(5);
oehdr.put_e_shstrndx(4);
*ppout += elfcpp::Elf_sizes<size>::ehdr_size;
}
// Write out a section header.
template<int size, bool big_endian>
void
Binary_to_elf::write_section_header(
const char* name,
const Stringpool* shstrtab,
elfcpp::SHT type,
unsigned int flags,
section_size_type offset,
section_size_type section_size,
unsigned int link,
unsigned int info,
unsigned int addralign,
unsigned int entsize,
unsigned char** ppout)
{
elfcpp::Shdr_write<size, big_endian> oshdr(*ppout);
oshdr.put_sh_name(*name == '\0' ? 0 : shstrtab->get_offset(name));
oshdr.put_sh_type(type);
oshdr.put_sh_flags(flags);
oshdr.put_sh_addr(0);
oshdr.put_sh_offset(offset);
oshdr.put_sh_size(section_size);
oshdr.put_sh_link(link);
oshdr.put_sh_info(info);
oshdr.put_sh_addralign(addralign);
oshdr.put_sh_entsize(entsize);
*ppout += elfcpp::Elf_sizes<size>::shdr_size;
}
// Write out a symbol.
template<int size, bool big_endian>
void
Binary_to_elf::write_symbol(
const std::string& name,
const Stringpool* strtab,
section_size_type value,
typename elfcpp::Elf_types<32>::Elf_WXword st_size,
unsigned int shndx,
unsigned char** ppout)
{
unsigned char* pout = *ppout;
elfcpp::Sym_write<size, big_endian> osym(pout);
osym.put_st_name(name.empty() ? 0 : strtab->get_offset(name.c_str()));
osym.put_st_value(value);
osym.put_st_size(st_size);
osym.put_st_info(name.empty() ? elfcpp::STB_LOCAL : elfcpp::STB_GLOBAL,
elfcpp::STT_NOTYPE);
osym.put_st_other(elfcpp::STV_DEFAULT, 0);
osym.put_st_shndx(shndx);
*ppout += elfcpp::Elf_sizes<size>::sym_size;
}
} // End namespace gold.
| swigger/gdb-ios | gold/binary.cc | C++ | gpl-2.0 | 10,859 |
<?php
namespace app\properties\handlers\datepicker;
use app\properties\handlers\AbstractHandler;
class DatepickerProperty extends AbstractHandler
{
} | rinodung/yii2-shop-cms | properties/handlers/datepicker/DatepickerProperty.php | PHP | gpl-3.0 | 153 |
// -----------------------------------------------------------------------------
// Globals
// Major version of Flash required
var requiredMajorVersion = 8;
// Minor version of Flash required
var requiredMinorVersion = 0;
// Minor version of Flash required
var requiredRevision = 0;
// the version of javascript supported
var jsVersion = 1.0;
// -----------------------------------------------------------------------------
var isIE = (navigator.appVersion.indexOf("MSIE") != -1) ? true : false;
var isWin = (navigator.appVersion.toLowerCase().indexOf("win") != -1) ? true : false;
var isOpera = (navigator.userAgent.indexOf("Opera") != -1) ? true : false;
jsVersion = 1.1;
// JavaScript helper required to detect Flash Player PlugIn version information
function JSGetSwfVer(i){
// NS/Opera version >= 3 check for Flash plugin in plugin array
if (navigator.plugins != null && navigator.plugins.length > 0) {
if (navigator.plugins["Shockwave Flash 2.0"] || navigator.plugins["Shockwave Flash"]) {
var swVer2 = navigator.plugins["Shockwave Flash 2.0"] ? " 2.0" : "";
var flashDescription = navigator.plugins["Shockwave Flash" + swVer2].description;
descArray = flashDescription.split(" ");
tempArrayMajor = descArray[2].split(".");
versionMajor = tempArrayMajor[0];
versionMinor = tempArrayMajor[1];
if ( descArray[3] != "" ) {
tempArrayMinor = descArray[3].split("r");
} else {
tempArrayMinor = descArray[4].split("r");
}
versionRevision = tempArrayMinor[1] > 0 ? tempArrayMinor[1] : 0;
flashVer = versionMajor + "." + versionMinor + "." + versionRevision;
} else {
flashVer = -1;
}
}
// MSN/WebTV 2.6 supports Flash 4
else if (navigator.userAgent.toLowerCase().indexOf("webtv/2.6") != -1) flashVer = 4;
// WebTV 2.5 supports Flash 3
else if (navigator.userAgent.toLowerCase().indexOf("webtv/2.5") != -1) flashVer = 3;
// older WebTV supports Flash 2
else if (navigator.userAgent.toLowerCase().indexOf("webtv") != -1) flashVer = 2;
// Can't detect in all other cases
else {
flashVer = -1;
}
return flashVer;
}
// When called with reqMajorVer, reqMinorVer, reqRevision returns true if that version or greater is available
function DetectFlashVer(reqMajorVer, reqMinorVer, reqRevision)
{
reqVer = parseFloat(reqMajorVer + "." + reqRevision);
// loop backwards through the versions until we find the newest version
for (i=25;i>0;i--) {
if (isIE && isWin && !isOpera) {
versionStr = VBGetSwfVer(i);
} else {
versionStr = JSGetSwfVer(i);
}
if (versionStr == -1 ) {
return false;
} else if (versionStr != 0) {
if(isIE && isWin && !isOpera) {
tempArray = versionStr.split(" ");
tempString = tempArray[1];
versionArray = tempString .split(",");
} else {
versionArray = versionStr.split(".");
}
versionMajor = versionArray[0];
versionMinor = versionArray[1];
versionRevision = versionArray[2];
versionString = versionMajor + "." + versionRevision; // 7.0r24 == 7.24
versionNum = parseFloat(versionString);
// is the major.revision >= requested major.revision AND the minor version >= requested minor
if ( (versionMajor > reqMajorVer) && (versionNum >= reqVer) ) {
return true;
} else {
return ((versionNum >= reqVer && versionMinor >= reqMinorVer) ? true : false );
}
}
}
}
| zhiqinghuang/core | dotCMS/html/js/flash/detectFlash.js | JavaScript | gpl-3.0 | 3,505 |
#
# (c) 2016 Red Hat Inc.
#
# (c) 2017 Dell EMC.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.module_utils._text import to_text, to_bytes
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$")
]
terminal_stderr_re = [
re.compile(br"% ?Bad secret"),
re.compile(br"(\bInterface is part of a port-channel\b)"),
re.compile(br"(\bThe maximum number of users have already been created\b)|(\bUse '-' for range\b)"),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"Invalid|invalid.*$", re.I),
re.compile(br"((\bout of range\b)|(\bnot found\b)|(\bCould not\b)|(\bUnable to\b)|(\bCannot\b)|(\bError\b)).*", re.I),
re.compile(br"((\balready exists\b)|(\bnot exist\b)|(\bnot active\b)|(\bFailed\b)|(\bIncorrect\b)|(\bnot enabled\b)).*", re.I),
]
terminal_initial_prompt = br"\(y/n\)"
terminal_initial_answer = b"y"
terminal_inital_prompt_newline = False
def on_open_shell(self):
try:
self._exec_cli_command(b'terminal length 0')
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
def on_become(self, passwd=None):
if self._get_prompt().endswith(b'#'):
return
cmd = {u'command': u'enable'}
if passwd:
cmd[u'prompt'] = to_text(r"[\r\n]?password:$", errors='surrogate_or_strict')
cmd[u'answer'] = passwd
try:
self._exec_cli_command(to_bytes(json.dumps(cmd), errors='surrogate_or_strict'))
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to elevate privilege to enable mode')
# in dellos6 the terminal settings are accepted after the privilege mode
try:
self._exec_cli_command(b'terminal length 0')
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
def on_unbecome(self):
prompt = self._get_prompt()
if prompt is None:
# if prompt is None most likely the terminal is hung up at a prompt
return
if prompt.strip().endswith(b')#'):
self._exec_cli_command(b'end')
self._exec_cli_command(b'disable')
elif prompt.endswith(b'#'):
self._exec_cli_command(b'disable')
| roadmapper/ansible | lib/ansible/plugins/terminal/dellos6.py | Python | gpl-3.0 | 3,474 |
package com.puppycrawl.tools.checkstyle.checks.design;
public class InputMutableException {
public class FooException extends Exception {
private final int finalErrorCode;
private int errorCode = 1;
public FooException() {
finalErrorCode = 1;
}
public class FooExceptionThisIsNot extends RuntimeException {
private final int finalErrorCode;
private int errorCode = 1;
/** constructor */
public FooExceptionThisIsNot() {
finalErrorCode = 1;
}
}
}
public class BarError extends Throwable {
private int errorCode;
}
public class BazDoesNotExtendError {
private int errorCode;
}
public class CustomProblem extends ThreadDeath {
private int errorCode;
public class CustomFailure extends ThreadDeath {
private int errorCode;
public void someMethod() {
if(true) {
final int i = 0;
}
}
}
}
class CustomException extends java.lang.Exception {}
class CustomMutableException extends java.lang.Exception {
int errorCode;
}
}
| Bhavik3/checkstyle | src/test/resources/com/puppycrawl/tools/checkstyle/checks/design/InputMutableException.java | Java | lgpl-2.1 | 1,239 |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.css.compiler.passes;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.css.compiler.ast.CssCompilerPass;
import com.google.common.css.compiler.ast.CssKeyNode;
import com.google.common.css.compiler.ast.CssKeyframesNode;
import com.google.common.css.compiler.ast.DefaultTreeVisitor;
import com.google.common.css.compiler.ast.ErrorManager;
import com.google.common.css.compiler.ast.GssError;
import com.google.common.css.compiler.ast.MutatingVisitController;
/**
* Compiler pass which ensures that @keyframes rules are only allowed if
* they are enabled. In addition this pass checks if the keys are between
* 0% and 100%. If CSS simplification is enabled, "from" is replaced by "0%"
* and "100%" is replaced by "to".
*
* @author fbenz@google.com (Florian Benz)
*/
public class ProcessKeyframes extends DefaultTreeVisitor
implements CssCompilerPass {
@VisibleForTesting
static final String KEYFRAMES_NOT_ALLOWED_ERROR_MESSAGE =
"a @keyframes rule occured but the option for it is disabled";
@VisibleForTesting
static final String WRONG_KEY_VALUE_ERROR_MESSAGE =
"the value of the key is not between 0% and 100%";
static final String INVALID_NUMBER_ERROR_MESSAGE =
"the value of the key is invalid (not 'from', 'to', or 'XXX.XXX%')";
private final MutatingVisitController visitController;
private final ErrorManager errorManager;
private final boolean keyframesAllowed;
private final boolean simplifyCss;
public ProcessKeyframes(MutatingVisitController visitController,
ErrorManager errorManager,
boolean keyframesAllowed,
boolean simplifyCss) {
this.visitController = visitController;
this.errorManager = errorManager;
this.keyframesAllowed = keyframesAllowed;
this.simplifyCss = simplifyCss;
}
@Override
public boolean enterKeyframesRule(CssKeyframesNode node) {
if (!keyframesAllowed) {
errorManager.report(new GssError(KEYFRAMES_NOT_ALLOWED_ERROR_MESSAGE,
node.getSourceCodeLocation()));
}
return keyframesAllowed;
}
@Override
public boolean enterKey(CssKeyNode node) {
if (!keyframesAllowed) {
return false;
}
String value = node.getKeyValue();
float percentage = -1;
if (value.contains("%")) {
try {
// parse to a float by excluding '%'
percentage = Float.parseFloat(value.substring(0, value.length() - 1));
} catch (NumberFormatException e) {
// should not happen if the generated parser works correctly
errorManager.report(new GssError(INVALID_NUMBER_ERROR_MESSAGE,
node.getSourceCodeLocation()));
return false;
}
if (!checkRangeOfPercentage(node, percentage)) {
return false;
}
} else {
if (!value.equals("from") && !value.equals("to")) {
errorManager.report(new GssError(INVALID_NUMBER_ERROR_MESSAGE,
node.getSourceCodeLocation()));
return false;
}
}
if (simplifyCss) {
compactRepresentation(node, percentage);
}
return true;
}
/**
* Checks if the percentage is between 0% and 100% inclusive.
*
* @param node The {@link CssKeyNode} to get the location in case of an error
* @param percentage The value represented as a float
* @return Returns true if there is no error
*/
private boolean checkRangeOfPercentage(CssKeyNode node, float percentage) {
// check whether the percentage is between 0% and 100%
if (percentage < 0 || percentage > 100) {
errorManager.report(new GssError(WRONG_KEY_VALUE_ERROR_MESSAGE,
node.getSourceCodeLocation()));
return false;
}
return true;
}
/**
* Shortens the representation of the key.
*
* @param node The {@link CssKeyNode} where the percentage belongs to.
* @param percentage The value represented as a float
*/
@VisibleForTesting
void compactRepresentation(CssKeyNode node, float percentage) {
if (node.getKeyValue().equals("from")) {
node.setKeyValue("0%");
} else if (percentage == 100) {
node.setKeyValue("to");
} else if (percentage != -1) {
String percentageStr = Float.toString(percentage);
if (0 < percentage && percentage < 1) {
// eliminate an unnecessary leading 0
percentageStr = percentageStr.substring(1, percentageStr.length());
}
// eliminate a trailing zero like in 0.0
percentageStr = percentageStr.replaceAll("0+$", "");
if (percentageStr.endsWith(".")) {
// if the number ends with '.' then eliminate that too
percentageStr = percentageStr.substring(0, percentageStr.length() - 1);
}
node.setKeyValue(percentageStr + "%");
}
}
@Override
public void runPass() {
visitController.startVisit(this);
}
}
| buntarb/closure-stylesheets | src/com/google/common/css/compiler/passes/ProcessKeyframes.java | Java | apache-2.0 | 5,425 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.task.impl;
import com.intellij.task.ProjectTask;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
import java.util.Collections;
/**
* @author Vladislav.Soroka
*/
public abstract class AbstractProjectTask implements ProjectTask {
@NotNull
private Collection<ProjectTask> myDependencies;
public AbstractProjectTask() {
this(Collections.emptyList());
}
public AbstractProjectTask(@NotNull Collection<ProjectTask> dependencies) {
myDependencies = dependencies;
}
@NotNull
public Collection<ProjectTask> getDependsOn() {
return myDependencies;
}
public void setDependsOn(@NotNull Collection<ProjectTask> dependencies) {
myDependencies = dependencies;
}
@Override
@Nls
public String toString() {
return getPresentableName();
}
}
| allotria/intellij-community | platform/lang-impl/src/com/intellij/task/impl/AbstractProjectTask.java | Java | apache-2.0 | 1,005 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.cumulativesum;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.BUCKETS_PATH;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT;
public class CumulativeSumPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder<CumulativeSumPipelineAggregationBuilder> {
public static final String NAME = "cumulative_sum";
private String format;
public CumulativeSumPipelineAggregationBuilder(String name, String bucketsPath) {
super(name, NAME, new String[] { bucketsPath });
}
/**
* Read from a stream.
*/
public CumulativeSumPipelineAggregationBuilder(StreamInput in) throws IOException {
super(in, NAME);
format = in.readOptionalString();
}
@Override
protected final void doWriteTo(StreamOutput out) throws IOException {
out.writeOptionalString(format);
}
/**
* Sets the format to use on the output of this aggregation.
*/
public CumulativeSumPipelineAggregationBuilder format(String format) {
if (format == null) {
throw new IllegalArgumentException("[format] must not be null: [" + name + "]");
}
this.format = format;
return this;
}
/**
* Gets the format to use on the output of this aggregation.
*/
public String format() {
return format;
}
protected DocValueFormat formatter() {
if (format != null) {
return new DocValueFormat.Decimal(format);
} else {
return DocValueFormat.RAW;
}
}
@Override
protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException {
return new CumulativeSumPipelineAggregator(name, bucketsPaths, formatter(), metaData);
}
@Override
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggFactories,
List<PipelineAggregationBuilder> pipelineAggregatorFactories) {
if (bucketsPaths.length != 1) {
throw new IllegalStateException(BUCKETS_PATH.getPreferredName()
+ " must contain a single entry for aggregation [" + name + "]");
}
if (parent instanceof HistogramAggregatorFactory) {
HistogramAggregatorFactory histoParent = (HistogramAggregatorFactory) parent;
if (histoParent.minDocCount() != 0) {
throw new IllegalStateException("parent histogram of cumulative sum aggregation [" + name
+ "] must have min_doc_count of 0");
}
} else if (parent instanceof DateHistogramAggregatorFactory) {
DateHistogramAggregatorFactory histoParent = (DateHistogramAggregatorFactory) parent;
if (histoParent.minDocCount() != 0) {
throw new IllegalStateException("parent histogram of cumulative sum aggregation [" + name
+ "] must have min_doc_count of 0");
}
} else {
throw new IllegalStateException("cumulative sum aggregation [" + name
+ "] must have a histogram or date_histogram as parent");
}
}
@Override
protected final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
if (format != null) {
builder.field(BucketMetricsParser.FORMAT.getPreferredName(), format);
}
return builder;
}
public static CumulativeSumPipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser)
throws IOException {
XContentParser.Token token;
String currentFieldName = null;
String[] bucketsPaths = null;
String format = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if (FORMAT.match(currentFieldName)) {
format = parser.text();
} else if (BUCKETS_PATH.match(currentFieldName)) {
bucketsPaths = new String[] { parser.text() };
} else {
throw new ParsingException(parser.getTokenLocation(),
"Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) {
List<String> paths = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String path = parser.text();
paths.add(path);
}
bucketsPaths = paths.toArray(new String[paths.size()]);
} else {
throw new ParsingException(parser.getTokenLocation(),
"Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "].");
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"Unexpected token " + token + " in [" + pipelineAggregatorName + "].");
}
}
if (bucketsPaths == null) {
throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + BUCKETS_PATH.getPreferredName()
+ "] for derivative aggregation [" + pipelineAggregatorName + "]");
}
CumulativeSumPipelineAggregationBuilder factory =
new CumulativeSumPipelineAggregationBuilder(pipelineAggregatorName, bucketsPaths[0]);
if (format != null) {
factory.format(format);
}
return factory;
}
@Override
protected int doHashCode() {
return Objects.hash(format);
}
@Override
protected boolean doEquals(Object obj) {
CumulativeSumPipelineAggregationBuilder other = (CumulativeSumPipelineAggregationBuilder) obj;
return Objects.equals(format, other.format);
}
@Override
public String getWriteableName() {
return NAME;
}
} | fred84/elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregationBuilder.java | Java | apache-2.0 | 8,317 |
package configuration
type HAProxy struct {
TemplatePath string
OutputPath string
ReloadCommand string
}
| utopiansociety/bamboo | configuration/haproxy.go | GO | apache-2.0 | 113 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.scripting.core.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.ops4j.pax.exam.CoreOptions.bundle;
import static org.ops4j.pax.exam.CoreOptions.junitBundles;
import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
import static org.ops4j.pax.exam.CoreOptions.options;
import static org.ops4j.pax.exam.CoreOptions.provision;
import static org.ops4j.pax.exam.CoreOptions.systemProperty;
import static org.ops4j.pax.exam.CoreOptions.when;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineFactory;
import org.apache.sling.scripting.api.BindingsValuesProvider;
import org.apache.sling.scripting.api.BindingsValuesProvidersByContext;
import org.apache.sling.scripting.core.it.ScriptingCoreTestSupport;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.PaxExam;
import org.ops4j.pax.exam.util.Filter;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.ServiceRegistration;
@RunWith(PaxExam.class)
public class BindingsValuesProvidersByContextIT extends ScriptingCoreTestSupport{
@Inject
@Filter(timeout = 300000)
private BindingsValuesProvidersByContext bvpProvider;
@Inject
private BundleContext bundleContext;
private final List<ServiceRegistration> regs = new ArrayList<ServiceRegistration>();
@Before
public void setup() {
regs.clear();
}
@After
public void cleanup() {
for(ServiceRegistration reg : regs) {
reg.unregister();
}
}
private Dictionary<String, Object> getProperties(String context, String engineName) {
final Dictionary<String, Object> props = new Hashtable<String, Object>();
if(context != null) {
props.put(BindingsValuesProvider.CONTEXT, context.split(","));
}
if(engineName != null) {
props.put(ScriptEngine.NAME, engineName);
}
return props;
}
private void addBVP(final String id, String context, String engineName) {
final BindingsValuesProvider bvp = new BindingsValuesProvider() {
@Override
public String toString() {
return id;
}
@Override
public void addBindings(Bindings b) {
}
};
regs.add(bundleContext.registerService(BindingsValuesProvider.class.getName(), bvp, getProperties(context, engineName)));
}
private void addBVPWithServiceRanking(final String id, String context, String engineName, int serviceRanking) {
final BindingsValuesProvider bvp = new BindingsValuesProvider() {
@Override
public String toString() {
return id;
}
@Override
public void addBindings(Bindings b) {
}
};
final Dictionary<String, Object> properties = getProperties(context, engineName);
properties.put(Constants.SERVICE_RANKING, serviceRanking);
regs.add(bundleContext.registerService(BindingsValuesProvider.class.getName(), bvp, properties));
}
private void addMap(final String id, String context, String engineName) {
final Map<String, Object> result = new HashMap<String, Object>() {
private static final long serialVersionUID = 1L;
@Override
public String toString() {
return "M_" + id;
}
};
regs.add(bundleContext.registerService(Map.class.getName(), result, getProperties(context, engineName)));
}
private ScriptEngineFactory factory(final String engineName) {
return new ScriptEngineFactory() {
@Override
public ScriptEngine getScriptEngine() {
return null;
}
@Override
public String getProgram(String... arg0) {
return null;
}
@Override
public Object getParameter(String arg0) {
return null;
}
@Override
public String getOutputStatement(String arg0) {
return null;
}
@Override
public List<String> getNames() {
final List<String> names = new ArrayList<String>();
names.add(engineName);
return names;
}
@Override
public List<String> getMimeTypes() {
return null;
}
@Override
public String getMethodCallSyntax(String arg0, String arg1, String... arg2) {
return null;
}
@Override
public String getLanguageVersion() {
return null;
}
@Override
public String getLanguageName() {
return null;
}
@Override
public List<String> getExtensions() {
return null;
}
@Override
public String getEngineVersion() {
return null;
}
@Override
public String getEngineName() {
return engineName;
}
};
}
private String asString(Collection<?> data, boolean sortList) {
final List<String> maybeSorted = new ArrayList<String>();
for(Object o : data) {
maybeSorted.add(o.toString());
}
if(sortList) {
Collections.sort(maybeSorted);
}
final StringBuilder sb = new StringBuilder();
for(String str : maybeSorted) {
if(sb.length() > 0) {
sb.append(",");
}
sb.append(str);
}
return sb.toString();
}
private String asString(Collection<?> data) {
return asString(data, true);
}
@Test
public void testAny() {
addBVP("one", null, "js");
addBVP("two", null, null);
addBVP("three", null, "*");
addBVP("four", null, "ANY");
addBVP("five", null, "basic");
assertEquals("four,one,three,two", asString(bvpProvider.getBindingsValuesProviders(factory("js"), null)));
assertEquals("five,four,three,two", asString(bvpProvider.getBindingsValuesProviders(factory("basic"), null)));
assertEquals("four,three,two", asString(bvpProvider.getBindingsValuesProviders(factory("other"), null)));
final String unsorted = asString(bvpProvider.getBindingsValuesProviders(factory("js"), null), false);
assertTrue("Expecting js language-specific BVP at the end", unsorted.endsWith("one"));
}
@Test
public void testContextsAndLanguages() {
addBVP("foo", null, "js");
addBVP("bar", null, null);
addBVP("r1", "request", "js");
addBVP("r2", "request", null);
addBVP("o1", "other", "js");
addBVP("o2", "other", null);
addBVP("o3", "other,request", null);
addBVP("o4", "python", null);
addBVP("python", "python", "python");
assertEquals("bar,foo,o3,r1,r2", asString(bvpProvider.getBindingsValuesProviders(factory("js"), "request")));
assertEquals("With default content", "bar,foo,o3,r1,r2", asString(bvpProvider.getBindingsValuesProviders(factory("js"), null)));
assertEquals("o1,o2,o3", asString(bvpProvider.getBindingsValuesProviders(factory("js"), "other")));
assertEquals("o4,python", asString(bvpProvider.getBindingsValuesProviders(factory("python"), "python")));
assertEquals("", asString(bvpProvider.getBindingsValuesProviders(factory("js"), "unusedContext")));
final String unsorted = asString(bvpProvider.getBindingsValuesProviders(factory("python"), "python"), false);
assertTrue("Expecting python language-specific BVP at the end", unsorted.endsWith("python"));
}
@Test
public void testMapsAndBvps() {
addBVP("foo", null, "js");
addMap("bar", null, null);
addMap("r1", "request", "js");
addMap("r2", "request", null);
addMap("o1", "other", "js");
addBVP("o2", "other", null);
addMap("o3", "other,request", null);
addBVP("o4", "python", null);
addMap("python", "python", "python");
assertEquals("M_bar,M_o3,M_r1,M_r2,foo", asString(bvpProvider.getBindingsValuesProviders(factory("js"), "request")));
assertEquals("With default content", "M_bar,M_o3,M_r1,M_r2,foo", asString(bvpProvider.getBindingsValuesProviders(factory("js"), null)));
assertEquals("M_o1,M_o3,o2", asString(bvpProvider.getBindingsValuesProviders(factory("js"), "other")));
assertEquals("", asString(bvpProvider.getBindingsValuesProviders(factory("js"), "unusedContext")));
assertEquals("M_python,o4", asString(bvpProvider.getBindingsValuesProviders(factory("python"), "python")));
final String unsorted = asString(bvpProvider.getBindingsValuesProviders(factory("python"), "python"), false);
assertTrue("Expecting python language-specific BVP at the end", unsorted.endsWith("M_python"));
}
@Test
public void testBVPsWithServiceRankingA() {
addBVPWithServiceRanking("last", null, "js", Integer.MAX_VALUE);
addBVPWithServiceRanking("second", null, "js", 0);
addBVPWithServiceRanking("first", null, "js", Integer.MIN_VALUE);
assertEquals("first,second,last", asString(bvpProvider.getBindingsValuesProviders(factory("js"), null), false));
}
@Test
public void testBVPsWithServiceRankingB() {
addBVPWithServiceRanking("first", null, "js", Integer.MIN_VALUE);
addBVPWithServiceRanking("second", null, "js", 0);
addBVPWithServiceRanking("last", null, "js", Integer.MAX_VALUE);
assertEquals("first,second,last", asString(bvpProvider.getBindingsValuesProviders(factory("js"), null), false));
}
@Test
public void testBVPsWithServiceRankingC() {
addBVPWithServiceRanking("second", "request", "js", 0);
addBVPWithServiceRanking("first", "request", "js", Integer.MIN_VALUE);
addBVPWithServiceRanking("genericThree", "request", null, 42);
addBVPWithServiceRanking("genericTwo", "request", null, 0);
addBVPWithServiceRanking("last", "request", "js", Integer.MAX_VALUE);
addBVPWithServiceRanking("genericOne", "request", null, -42);
assertEquals("genericOne,genericTwo,genericThree,first,second,last", asString(bvpProvider.getBindingsValuesProviders(factory("js"), "request"), false));
}
}
| tmaret/sling | bundles/scripting/core/src/test/java/org/apache/sling/scripting/core/impl/BindingsValuesProvidersByContextIT.java | Java | apache-2.0 | 11,818 |
// { dg-do run { xfail sparc64-*-elf arm-*-pe } }
// { dg-options "-fexceptions" }
#include <exception>
#include <stdlib.h>
void my_terminate() {
exit (0); // Double faults should call terminate
}
struct A {
A() { }
~A() {
std::set_terminate (my_terminate);
throw 1; // This throws from EH dtor, should call my_terminate
}
};
int main() {
try {
try {
throw 1;
} catch (int i) {
A a; // A hit on this EH dtor went to the wrong place
throw 1;
}
} catch (...) {
return 1;
}
return 1;
}
| efortuna/AndroidSDKClone | ndk_experimental/tests/device/test-stlport_static-exception/jni/eh25.cpp | C++ | apache-2.0 | 548 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.codeInsight;
import com.intellij.lang.Language;
import com.intellij.lang.injection.MultiHostRegistrar;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiElement;
import com.intellij.util.containers.ContainerUtil;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyCallExpressionNavigator;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import static com.jetbrains.python.PyStringFormatParser.*;
/**
* @author vlan
*/
public final class PyInjectionUtil {
public static class InjectionResult {
public static final InjectionResult EMPTY = new InjectionResult(false, true);
private final boolean myInjected;
private final boolean myStrict;
public InjectionResult(boolean injected, boolean strict) {
myInjected = injected;
myStrict = strict;
}
public boolean isInjected() {
return myInjected;
}
public boolean isStrict() {
return myStrict;
}
public InjectionResult append(@NotNull InjectionResult result) {
return new InjectionResult(myInjected || result.isInjected(), myStrict && result.isStrict());
}
}
public static final List<Class<? extends PsiElement>> ELEMENTS_TO_INJECT_IN =
ContainerUtil.immutableList(PyStringLiteralExpression.class, PyParenthesizedExpression.class, PyBinaryExpression.class,
PyCallExpression.class, PsiComment.class);
private PyInjectionUtil() {}
/**
* Returns the largest expression in the specified context that represents a string literal suitable for language injection, possibly
* with concatenation, parentheses, or formatting.
*/
@Nullable
public static PsiElement getLargestStringLiteral(@NotNull PsiElement context) {
PsiElement element = null;
for (PsiElement current = context; current != null && isStringLiteralPart(current, element); current = current.getParent()) {
element = current;
}
return element;
}
/**
* Registers language injections in the given registrar for the specified string literal element or its ancestor that contains
* string concatenations or formatting.
*/
@NotNull
public static InjectionResult registerStringLiteralInjection(@NotNull PsiElement element,
@NotNull MultiHostRegistrar registrar,
@NotNull Language language) {
registrar.startInjecting(language);
final InjectionResult result = processStringLiteral(element, registrar, "", "", Formatting.NONE);
if (result.isInjected()) {
registrar.doneInjecting();
}
return result;
}
private static boolean isStringLiteralPart(@NotNull PsiElement element, @Nullable PsiElement context) {
if (element == context || element instanceof PyStringLiteralExpression || element instanceof PsiComment) {
return true;
}
else if (element instanceof PyParenthesizedExpression) {
final PyExpression contained = ((PyParenthesizedExpression)element).getContainedExpression();
return contained != null && isStringLiteralPart(contained, context);
}
else if (element instanceof PyBinaryExpression) {
final PyBinaryExpression expr = (PyBinaryExpression)element;
final PyExpression left = expr.getLeftExpression();
final PyExpression right = expr.getRightExpression();
if (expr.isOperator("+")) {
return isStringLiteralPart(left, context) || right != null && isStringLiteralPart(right, context);
}
else if (expr.isOperator("%")) {
return right != context && isStringLiteralPart(left, context);
}
return false;
}
else if (element instanceof PyCallExpression) {
final PyExpression qualifier = getFormatCallQualifier((PyCallExpression)element);
return qualifier != null && isStringLiteralPart(qualifier, context);
}
else if (element instanceof PyReferenceExpression) {
final PyCallExpression callExpr = PyCallExpressionNavigator.getPyCallExpressionByCallee(element);
return callExpr != null && isStringLiteralPart(callExpr, context);
}
return false;
}
@Nullable
private static PyExpression getFormatCallQualifier(@NotNull PyCallExpression element) {
final PyExpression callee = element.getCallee();
if (callee instanceof PyQualifiedExpression) {
final PyQualifiedExpression qualifiedExpr = (PyQualifiedExpression)callee;
final PyExpression qualifier = qualifiedExpr.getQualifier();
if (qualifier != null && PyNames.FORMAT.equals(qualifiedExpr.getReferencedName())) {
return qualifier;
}
}
return null;
}
@NotNull
private static InjectionResult processStringLiteral(@NotNull PsiElement element, @NotNull MultiHostRegistrar registrar,
@NotNull String prefix, @NotNull String suffix, @NotNull Formatting formatting) {
final String missingValue = "missing_value";
if (element instanceof PyStringLiteralExpression) {
boolean injected = false;
boolean strict = true;
final PyStringLiteralExpression expr = (PyStringLiteralExpression)element;
for (PyStringElement stringElem : expr.getStringElements()) {
final int nodeOffsetInParent = stringElem.getTextOffset() - expr.getTextRange().getStartOffset();
final TextRange contentRange = stringElem.getContentRange();
final int contentStartOffset = contentRange.getStartOffset();
if (formatting != Formatting.NONE || stringElem.isFormatted()) {
// Each range is relative to the start of the string element
final List<TextRange> subsRanges;
if (formatting != Formatting.NONE) {
final String content = stringElem.getContent();
subsRanges = StreamEx.of(formatting == Formatting.NEW_STYLE ? parseNewStyleFormat(content) : parsePercentFormat(content))
.select(SubstitutionChunk.class)
.map(chunk -> chunk.getTextRange().shiftRight(contentStartOffset))
.toList();
}
else {
subsRanges = StreamEx.of(((PyFormattedStringElement)stringElem).getFragments())
.map(PsiElement::getTextRangeInParent)
.toList();
}
if (!subsRanges.isEmpty()) {
strict = false;
}
final TextRange sentinel = TextRange.from(contentRange.getEndOffset(), 0);
final List<TextRange> withSentinel = ContainerUtil.append(subsRanges, sentinel);
int literalChunkStart = contentStartOffset;
int literalChunkEnd;
for (int i = 0; i < withSentinel.size(); i++) {
final TextRange subRange = withSentinel.get(i);
literalChunkEnd = subRange.getStartOffset();
if (literalChunkEnd > literalChunkStart) {
final String chunkPrefix;
if (i == 0) {
chunkPrefix = prefix;
}
else if (i == 1 && withSentinel.get(0).getStartOffset() == contentStartOffset) {
chunkPrefix = missingValue;
}
else {
chunkPrefix = "";
}
final String chunkSuffix;
if (i < withSentinel.size() - 1) {
chunkSuffix = missingValue;
}
else if (i == withSentinel.size() - 1) {
chunkSuffix = suffix;
}
else {
chunkSuffix = "";
}
final TextRange chunkRange = TextRange.create(literalChunkStart, literalChunkEnd);
registrar.addPlace(chunkPrefix, chunkSuffix, expr, chunkRange.shiftRight(nodeOffsetInParent));
injected = true;
}
literalChunkStart = subRange.getEndOffset();
}
}
else {
registrar.addPlace(prefix, suffix, expr, contentRange.shiftRight(nodeOffsetInParent));
injected = true;
}
}
return new InjectionResult(injected, strict);
}
else if (element instanceof PyParenthesizedExpression) {
final PyExpression contained = ((PyParenthesizedExpression)element).getContainedExpression();
if (contained != null) {
return processStringLiteral(contained, registrar, prefix, suffix, formatting);
}
}
else if (element instanceof PyBinaryExpression) {
final PyBinaryExpression expr = (PyBinaryExpression)element;
final PyExpression left = expr.getLeftExpression();
final PyExpression right = expr.getRightExpression();
final boolean isLeftString = isStringLiteralPart(left, null);
if (expr.isOperator("+")) {
final boolean isRightString = right != null && isStringLiteralPart(right, null);
InjectionResult result = InjectionResult.EMPTY;
if (isLeftString) {
result = result.append(processStringLiteral(left, registrar, prefix, isRightString ? "" : missingValue, formatting));
}
if (isRightString) {
result = result.append(processStringLiteral(right, registrar, isLeftString ? "" : missingValue, suffix, formatting));
}
return result;
}
else if (expr.isOperator("%")) {
return processStringLiteral(left, registrar, prefix, suffix, Formatting.PERCENT);
}
}
else if (element instanceof PyCallExpression) {
final PyExpression qualifier = getFormatCallQualifier((PyCallExpression)element);
if (qualifier != null) {
return processStringLiteral(qualifier, registrar, prefix, suffix, Formatting.NEW_STYLE);
}
}
return InjectionResult.EMPTY;
}
private enum Formatting {
NONE,
PERCENT,
NEW_STYLE
}
}
| smmribeiro/intellij-community | python/src/com/jetbrains/python/codeInsight/PyInjectionUtil.java | Java | apache-2.0 | 10,214 |
package main;
import com.intellij.util.concurrency.annotations.fake.RequiresEdt;
public class MethodHasOtherAnnotationBefore {
@Deprecated
@RequiresEdt
public Object test() {
return null;
}
} | smmribeiro/intellij-community | plugins/devkit/jps-plugin/testData/threadingModelHelper/instrumenter/MethodHasOtherAnnotationBefore.java | Java | apache-2.0 | 205 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.transform.trait;
import groovy.transform.CompileStatic;
import org.codehaus.groovy.ast.AnnotationNode;
import org.codehaus.groovy.ast.ClassHelper;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.FieldNode;
import org.codehaus.groovy.ast.GenericsType;
import org.codehaus.groovy.ast.MethodNode;
import org.codehaus.groovy.ast.Parameter;
import org.codehaus.groovy.ast.PropertyNode;
import org.codehaus.groovy.ast.expr.ArgumentListExpression;
import org.codehaus.groovy.ast.expr.ArrayExpression;
import org.codehaus.groovy.ast.expr.BinaryExpression;
import org.codehaus.groovy.ast.expr.BooleanExpression;
import org.codehaus.groovy.ast.expr.CastExpression;
import org.codehaus.groovy.ast.expr.ClassExpression;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.ast.expr.MethodCallExpression;
import org.codehaus.groovy.ast.expr.StaticMethodCallExpression;
import org.codehaus.groovy.ast.expr.VariableExpression;
import org.codehaus.groovy.ast.stmt.BlockStatement;
import org.codehaus.groovy.ast.stmt.EmptyStatement;
import org.codehaus.groovy.ast.stmt.ExpressionStatement;
import org.codehaus.groovy.ast.stmt.IfStatement;
import org.codehaus.groovy.ast.stmt.ReturnStatement;
import org.codehaus.groovy.ast.stmt.Statement;
import org.codehaus.groovy.ast.tools.GeneralUtils;
import org.codehaus.groovy.ast.tools.GenericsUtils;
import org.codehaus.groovy.classgen.asm.BytecodeHelper;
import org.codehaus.groovy.control.CompilationUnit;
import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.MetaClassHelper;
import org.codehaus.groovy.syntax.SyntaxException;
import org.codehaus.groovy.syntax.Token;
import org.codehaus.groovy.syntax.Types;
import org.codehaus.groovy.transform.ASTTransformationCollectorCodeVisitor;
import org.codehaus.groovy.transform.sc.StaticCompileTransformation;
import org.objectweb.asm.Opcodes;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import static org.codehaus.groovy.ast.tools.GenericsUtils.correctToGenericsSpecRecurse;
/**
* This class contains a static utility method {@link #doExtendTraits(org.codehaus.groovy.ast.ClassNode, org.codehaus.groovy.control.SourceUnit, org.codehaus.groovy.control.CompilationUnit)}
* aimed at generating code for a classnode implementing a trait.
*
* @author Cédric Champeau
* @since 2.3.0
*/
public abstract class TraitComposer {
/**
* This comparator is used to make sure that generated direct getters appear first in the list of method
* nodes.
*/
private static final Comparator<MethodNode> GETTER_FIRST_COMPARATOR = new Comparator<MethodNode>() {
public int compare(final MethodNode o1, final MethodNode o2) {
if (o1.getName().endsWith(Traits.DIRECT_GETTER_SUFFIX)) return -1;
return 1;
}
};
public static final ClassNode COMPILESTATIC_CLASSNODE = ClassHelper.make(CompileStatic.class);
/**
* Given a class node, if this class node implements a trait, then generate all the appropriate
* code which delegates calls to the trait. It is safe to call this method on a class node which
* does not implement a trait.
* @param cNode a class node
* @param unit the source unit
*/
public static void doExtendTraits(final ClassNode cNode, final SourceUnit unit, final CompilationUnit cu) {
if (cNode.isInterface()) return;
boolean isItselfTrait = Traits.isTrait(cNode);
SuperCallTraitTransformer superCallTransformer = new SuperCallTraitTransformer(unit);
if (isItselfTrait) {
checkTraitAllowed(cNode, unit);
return;
}
if (!cNode.getNameWithoutPackage().endsWith(Traits.TRAIT_HELPER)) {
List<ClassNode> traits = findTraits(cNode);
for (ClassNode trait : traits) {
TraitHelpersTuple helpers = Traits.findHelpers(trait);
applyTrait(trait, cNode, helpers);
superCallTransformer.visitClass(cNode);
if (unit!=null) {
ASTTransformationCollectorCodeVisitor collector = new ASTTransformationCollectorCodeVisitor(unit, cu.getTransformLoader());
collector.visitClass(cNode);
}
}
}
}
private static List<ClassNode> findTraits(ClassNode cNode) {
LinkedHashSet<ClassNode> interfaces = new LinkedHashSet<ClassNode>();
Traits.collectAllInterfacesReverseOrder(cNode, interfaces);
List<ClassNode> traits = new LinkedList<ClassNode>();
for (ClassNode candidate : interfaces) {
if (Traits.isAnnotatedWithTrait(candidate)) {
traits.add(candidate);
}
}
return traits;
}
private static void checkTraitAllowed(final ClassNode bottomTrait, final SourceUnit unit) {
ClassNode superClass = bottomTrait.getSuperClass();
if (superClass==null || ClassHelper.OBJECT_TYPE.equals(superClass)) return;
if (!Traits.isTrait(superClass)) {
unit.addError(new SyntaxException("A trait can only inherit from another trait", superClass.getLineNumber(), superClass.getColumnNumber()));
}
}
private static void applyTrait(final ClassNode trait, final ClassNode cNode, final TraitHelpersTuple helpers) {
ClassNode helperClassNode = helpers.getHelper();
ClassNode fieldHelperClassNode = helpers.getFieldHelper();
Map<String,ClassNode> genericsSpec = GenericsUtils.createGenericsSpec(cNode);
genericsSpec = GenericsUtils.createGenericsSpec(trait, genericsSpec);
for (MethodNode methodNode : helperClassNode.getAllDeclaredMethods()) {
String name = methodNode.getName();
Parameter[] helperMethodParams = methodNode.getParameters();
boolean isAbstract = methodNode.isAbstract();
if (!isAbstract && helperMethodParams.length > 0 && ((methodNode.getModifiers() & Opcodes.ACC_STATIC) == Opcodes.ACC_STATIC) && !name.contains("$")) {
ArgumentListExpression argList = new ArgumentListExpression();
argList.addExpression(new VariableExpression("this"));
Parameter[] origParams = new Parameter[helperMethodParams.length - 1];
Parameter[] params = new Parameter[helperMethodParams.length - 1];
System.arraycopy(methodNode.getParameters(), 1, params, 0, params.length);
Map<String,ClassNode> methodGenericsSpec = new LinkedHashMap<String, ClassNode>(genericsSpec);
MethodNode originalMethod = trait.getMethod(name, params);
// Original method may be null in case of a private method
if (originalMethod!=null) {
methodGenericsSpec = GenericsUtils.addMethodGenerics(originalMethod, methodGenericsSpec);
}
for (int i = 1; i < helperMethodParams.length; i++) {
Parameter parameter = helperMethodParams[i];
ClassNode originType = parameter.getOriginType();
ClassNode fixedType = correctToGenericsSpecRecurse(methodGenericsSpec, originType);
Parameter newParam = new Parameter(fixedType, "arg" + i);
List<AnnotationNode> copied = new LinkedList<AnnotationNode>();
List<AnnotationNode> notCopied = new LinkedList<AnnotationNode>();
GeneralUtils.copyAnnotatedNodeAnnotations(parameter, copied, notCopied);
newParam.addAnnotations(copied);
params[i - 1] = newParam;
origParams[i-1] = parameter;
argList.addExpression(new VariableExpression(params[i - 1]));
}
createForwarderMethod(trait, cNode, methodNode, originalMethod, helperClassNode, methodGenericsSpec, helperMethodParams, origParams, params, argList);
}
}
cNode.addObjectInitializerStatements(new ExpressionStatement(
new MethodCallExpression(
new ClassExpression(helperClassNode),
Traits.INIT_METHOD,
new ArgumentListExpression(new VariableExpression("this")))
));
MethodCallExpression staticInitCall = new MethodCallExpression(
new ClassExpression(helperClassNode),
Traits.STATIC_INIT_METHOD,
new ArgumentListExpression(new ClassExpression(cNode)));
MethodNode staticInitMethod = new MethodNode(
Traits.STATIC_INIT_METHOD, Opcodes.ACC_STATIC | Opcodes.ACC_PUBLIC, ClassHelper.VOID_TYPE,
new Parameter[] {new Parameter(ClassHelper.CLASS_Type,"clazz")}, ClassNode.EMPTY_ARRAY, EmptyStatement.INSTANCE);
staticInitMethod.setDeclaringClass(helperClassNode);
staticInitCall.setMethodTarget(staticInitMethod);
cNode.addStaticInitializerStatements(Collections.<Statement>singletonList(new ExpressionStatement(
staticInitCall
)), false);
if (fieldHelperClassNode != null && !cNode.declaresInterface(fieldHelperClassNode)) {
// we should implement the field helper interface too
cNode.addInterface(fieldHelperClassNode);
// implementation of methods
List<MethodNode> declaredMethods = fieldHelperClassNode.getAllDeclaredMethods();
Collections.sort(declaredMethods, GETTER_FIRST_COMPARATOR);
for (MethodNode methodNode : declaredMethods) {
String fieldName = methodNode.getName();
if (fieldName.endsWith(Traits.DIRECT_GETTER_SUFFIX) || fieldName.endsWith(Traits.DIRECT_SETTER_SUFFIX)) {
int suffixIdx = fieldName.lastIndexOf("$");
fieldName = fieldName.substring(0, suffixIdx);
String operation = methodNode.getName().substring(suffixIdx + 1);
boolean getter = "get".equals(operation);
ClassNode returnType = correctToGenericsSpecRecurse(genericsSpec, methodNode.getReturnType());
int isStatic = 0;
boolean publicField = true;
FieldNode helperField = fieldHelperClassNode.getField(Traits.FIELD_PREFIX + Traits.PUBLIC_FIELD_PREFIX + fieldName);
if (helperField==null) {
publicField = false;
helperField = fieldHelperClassNode.getField(Traits.FIELD_PREFIX + Traits.PRIVATE_FIELD_PREFIX + fieldName);
}
if (helperField==null) {
publicField = true;
// try to find a static one
helperField = fieldHelperClassNode.getField(Traits.STATIC_FIELD_PREFIX+Traits.PUBLIC_FIELD_PREFIX+fieldName);
if (helperField==null) {
publicField = false;
helperField = fieldHelperClassNode.getField(Traits.STATIC_FIELD_PREFIX+Traits.PRIVATE_FIELD_PREFIX +fieldName);
}
isStatic = Opcodes.ACC_STATIC;
}
if (getter) {
// add field
if (helperField!=null) {
List<AnnotationNode> copied = new LinkedList<AnnotationNode>();
List<AnnotationNode> notCopied = new LinkedList<AnnotationNode>();
GeneralUtils.copyAnnotatedNodeAnnotations(helperField, copied, notCopied);
FieldNode fieldNode = cNode.addField(fieldName, (publicField?Opcodes.ACC_PUBLIC:Opcodes.ACC_PRIVATE) | isStatic, returnType, null);
fieldNode.addAnnotations(copied);
}
}
Parameter[] newParams;
if (getter) {
newParams = Parameter.EMPTY_ARRAY;
} else {
ClassNode originType = methodNode.getParameters()[0].getOriginType();
ClassNode fixedType = originType.isGenericsPlaceHolder()?ClassHelper.OBJECT_TYPE:correctToGenericsSpecRecurse(genericsSpec, originType);
newParams = new Parameter[]{new Parameter(fixedType, "val")};
}
Expression fieldExpr = new VariableExpression(cNode.getField(fieldName));
Statement body =
getter ? new ReturnStatement(fieldExpr) :
new ExpressionStatement(
new BinaryExpression(
fieldExpr,
Token.newSymbol(Types.EQUAL, 0, 0),
new VariableExpression(newParams[0])
)
);
MethodNode impl = new MethodNode(
methodNode.getName(),
Opcodes.ACC_PUBLIC | isStatic,
returnType,
newParams,
ClassNode.EMPTY_ARRAY,
body
);
AnnotationNode an = new AnnotationNode(COMPILESTATIC_CLASSNODE);
impl.addAnnotation(an);
cNode.addTransform(StaticCompileTransformation.class, an);
cNode.addMethod(impl);
}
}
}
}
private static void createForwarderMethod(
ClassNode trait,
ClassNode targetNode,
MethodNode helperMethod,
MethodNode originalMethod,
ClassNode helperClassNode,
Map<String,ClassNode> genericsSpec,
Parameter[] helperMethodParams,
Parameter[] traitMethodParams,
Parameter[] forwarderParams,
ArgumentListExpression helperMethodArgList) {
MethodCallExpression mce = new MethodCallExpression(
new ClassExpression(helperClassNode),
helperMethod.getName(),
helperMethodArgList
);
mce.setImplicitThis(false);
genericsSpec = GenericsUtils.addMethodGenerics(helperMethod,genericsSpec);
ClassNode[] exceptionNodes = correctToGenericsSpecRecurse(genericsSpec, copyExceptions(helperMethod.getExceptions()));
ClassNode fixedReturnType = correctToGenericsSpecRecurse(genericsSpec, helperMethod.getReturnType());
Expression forwardExpression = genericsSpec.isEmpty()?mce:new CastExpression(fixedReturnType,mce);
int access = helperMethod.getModifiers();
// we could rely on the first parameter name ($static$self) but that information is not
// guaranteed to be always present
boolean isHelperForStaticMethod = helperMethodParams[0].getOriginType().equals(ClassHelper.CLASS_Type);
if (Modifier.isPrivate(access) && !isHelperForStaticMethod) {
// do not create forwarder for private methods
// see GROOVY-7213
return;
}
if (!isHelperForStaticMethod) {
access = access ^ Opcodes.ACC_STATIC;
}
MethodNode forwarder = new MethodNode(
helperMethod.getName(),
access,
fixedReturnType,
forwarderParams,
exceptionNodes,
new ExpressionStatement(forwardExpression)
);
List<AnnotationNode> copied = new LinkedList<AnnotationNode>();
List<AnnotationNode> notCopied = Collections.emptyList(); // at this point, should *always* stay empty
GeneralUtils.copyAnnotatedNodeAnnotations(helperMethod, copied, notCopied);
if (!copied.isEmpty()) {
forwarder.addAnnotations(copied);
}
if (originalMethod!=null) {
GenericsType[] newGt = GenericsUtils.applyGenericsContextToPlaceHolders(genericsSpec, originalMethod.getGenericsTypes());
newGt = removeNonPlaceHolders(newGt);
forwarder.setGenericsTypes(newGt);
}
// add a helper annotation indicating that it is a bridge method
AnnotationNode bridgeAnnotation = new AnnotationNode(Traits.TRAITBRIDGE_CLASSNODE);
bridgeAnnotation.addMember("traitClass", new ClassExpression(trait));
bridgeAnnotation.addMember("desc", new ConstantExpression(BytecodeHelper.getMethodDescriptor(helperMethod.getReturnType(), traitMethodParams)));
forwarder.addAnnotation(
bridgeAnnotation
);
if (!shouldSkipMethod(targetNode, forwarder.getName(), forwarderParams)) {
targetNode.addMethod(forwarder);
}
createSuperForwarder(targetNode, forwarder, genericsSpec);
}
private static GenericsType[] removeNonPlaceHolders(GenericsType[] oldTypes) {
if (oldTypes==null || oldTypes.length==0) return oldTypes;
ArrayList<GenericsType> l = new ArrayList<GenericsType>(Arrays.asList(oldTypes));
Iterator<GenericsType> it = l.iterator();
boolean modified = false;
while (it.hasNext()) {
GenericsType gt = it.next();
if (!gt.isPlaceholder()) {
it.remove();
modified = true;
}
}
if (!modified) return oldTypes;
if (l.size()==0) return null;
return l.toArray(new GenericsType[l.size()]);
}
/**
* Creates, if necessary, a super forwarder method, for stackable traits.
* @param forwarder a forwarder method
* @param genericsSpec
*/
private static void createSuperForwarder(ClassNode targetNode, MethodNode forwarder, final Map<String,ClassNode> genericsSpec) {
List<ClassNode> interfaces = new ArrayList<ClassNode>(Traits.collectAllInterfacesReverseOrder(targetNode, new LinkedHashSet<ClassNode>()));
String name = forwarder.getName();
Parameter[] forwarderParameters = forwarder.getParameters();
LinkedHashSet<ClassNode> traits = new LinkedHashSet<ClassNode>();
List<MethodNode> superForwarders = new LinkedList<MethodNode>();
for (ClassNode node : interfaces) {
if (Traits.isTrait(node)) {
MethodNode method = node.getDeclaredMethod(name, forwarderParameters);
if (method!=null) {
// a similar method exists, we need a super bridge
// trait$super$foo(Class currentTrait, ...)
traits.add(node);
superForwarders.add(method);
}
}
}
for (MethodNode superForwarder : superForwarders) {
doCreateSuperForwarder(targetNode, superForwarder, traits.toArray(new ClassNode[traits.size()]), genericsSpec);
}
}
/**
* Creates a method to dispatch to "super traits" in a "stackable" fashion. The generated method looks like this:
* <p>
* <code>ReturnType trait$super$method(Class clazz, Arg1 arg1, Arg2 arg2, ...) {
* if (SomeTrait.is(A) { return SomeOtherTrait$Trait$Helper.method(this, arg1, arg2) }
* super.method(arg1,arg2)
* }</code>
* </p>
* @param targetNode
* @param forwarderMethod
* @param interfacesToGenerateForwarderFor
* @param genericsSpec
*/
private static void doCreateSuperForwarder(ClassNode targetNode, MethodNode forwarderMethod, ClassNode[] interfacesToGenerateForwarderFor, Map<String,ClassNode> genericsSpec) {
Parameter[] parameters = forwarderMethod.getParameters();
Parameter[] superForwarderParams = new Parameter[parameters.length];
for (int i = 0; i < parameters.length; i++) {
Parameter parameter = parameters[i];
ClassNode originType = parameter.getOriginType();
superForwarderParams[i] = new Parameter(correctToGenericsSpecRecurse(genericsSpec, originType), parameter.getName());
}
for (int i = 0; i < interfacesToGenerateForwarderFor.length; i++) {
final ClassNode current = interfacesToGenerateForwarderFor[i];
final ClassNode next = i < interfacesToGenerateForwarderFor.length - 1 ? interfacesToGenerateForwarderFor[i + 1] : null;
String forwarderName = Traits.getSuperTraitMethodName(current, forwarderMethod.getName());
if (targetNode.getDeclaredMethod(forwarderName, superForwarderParams) == null) {
ClassNode returnType = correctToGenericsSpecRecurse(genericsSpec, forwarderMethod.getReturnType());
Statement delegate = next == null ? createSuperFallback(forwarderMethod, returnType) : createDelegatingForwarder(forwarderMethod, next);
MethodNode methodNode = targetNode.addMethod(forwarderName, Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, returnType, superForwarderParams, ClassNode.EMPTY_ARRAY, delegate);
methodNode.setGenericsTypes(forwarderMethod.getGenericsTypes());
}
}
}
private static Statement createSuperFallback(MethodNode forwarderMethod, ClassNode returnType) {
ArgumentListExpression args = new ArgumentListExpression();
Parameter[] forwarderMethodParameters = forwarderMethod.getParameters();
for (final Parameter forwarderMethodParameter : forwarderMethodParameters) {
args.addExpression(new VariableExpression(forwarderMethodParameter));
}
BinaryExpression instanceOfExpr = new BinaryExpression(new VariableExpression("this"), Token.newSymbol(Types.KEYWORD_INSTANCEOF, -1, -1), new ClassExpression(Traits.GENERATED_PROXY_CLASSNODE));
MethodCallExpression superCall = new MethodCallExpression(
new VariableExpression("super"),
forwarderMethod.getName(),
args
);
superCall.setImplicitThis(false);
CastExpression proxyReceiver = new CastExpression(Traits.GENERATED_PROXY_CLASSNODE, new VariableExpression("this"));
MethodCallExpression getProxy = new MethodCallExpression(proxyReceiver, "getProxyTarget", ArgumentListExpression.EMPTY_ARGUMENTS);
getProxy.setImplicitThis(true);
StaticMethodCallExpression proxyCall = new StaticMethodCallExpression(
ClassHelper.make(InvokerHelper.class),
"invokeMethod",
new ArgumentListExpression(getProxy, new ConstantExpression(forwarderMethod.getName()), new ArrayExpression(ClassHelper.OBJECT_TYPE, args.getExpressions()))
);
IfStatement stmt = new IfStatement(
new BooleanExpression(instanceOfExpr),
new ExpressionStatement(new CastExpression(returnType,proxyCall)),
new ExpressionStatement(superCall)
);
return stmt;
}
private static Statement createDelegatingForwarder(final MethodNode forwarderMethod, final ClassNode next) {
// generates --> next$Trait$Helper.method(this, arg1, arg2)
TraitHelpersTuple helpers = Traits.findHelpers(next);
ArgumentListExpression args = new ArgumentListExpression();
args.addExpression(new VariableExpression("this"));
Parameter[] forwarderMethodParameters = forwarderMethod.getParameters();
for (final Parameter forwarderMethodParameter : forwarderMethodParameters) {
args.addExpression(new VariableExpression(forwarderMethodParameter));
}
StaticMethodCallExpression delegateCall = new StaticMethodCallExpression(
helpers.getHelper(),
forwarderMethod.getName(),
args
);
Statement result;
if (ClassHelper.VOID_TYPE.equals(forwarderMethod.getReturnType())) {
BlockStatement stmt = new BlockStatement();
stmt.addStatement(new ExpressionStatement(delegateCall));
stmt.addStatement(new ReturnStatement(new ConstantExpression(null)));
result = stmt;
} else {
result = new ReturnStatement(delegateCall);
}
return result;
}
private static ClassNode[] copyExceptions(final ClassNode[] sourceExceptions) {
ClassNode[] exceptionNodes = new ClassNode[sourceExceptions == null ? 0 : sourceExceptions.length];
System.arraycopy(sourceExceptions, 0, exceptionNodes, 0, exceptionNodes.length);
return exceptionNodes;
}
private static boolean shouldSkipMethod(final ClassNode cNode, final String name, final Parameter[] params) {
if (isExistingProperty(name, cNode, params) || cNode.getDeclaredMethod(name, params)!=null) {
// override exists in the weaved class itself
return true;
}
return false;
}
/**
* An utility method which tries to find a method with default implementation (in the Java 8 semantics).
* @param cNode a class node
* @param name the name of the method
* @param params the parameters of the method
* @return a method node corresponding to a default method if it exists
*/
private static MethodNode findDefaultMethodFromInterface(final ClassNode cNode, final String name, final Parameter[] params) {
if (cNode == null) {
return null;
}
if (cNode.isInterface()) {
MethodNode method = cNode.getMethod(name, params);
if (method!=null && !method.isAbstract()) {
// this is a Java 8 only behavior!
return method;
}
}
ClassNode[] interfaces = cNode.getInterfaces();
for (ClassNode anInterface : interfaces) {
MethodNode res = findDefaultMethodFromInterface(anInterface, name, params);
if (res!=null) {
return res;
}
}
return findDefaultMethodFromInterface(cNode.getSuperClass(), name, params);
}
private static boolean isExistingProperty(final String methodName, final ClassNode cNode, final Parameter[] params) {
String propertyName = methodName;
boolean getter = false;
if (methodName.startsWith("get")) {
propertyName = propertyName.substring(3);
getter = true;
} else if (methodName.startsWith("is")) {
propertyName = propertyName.substring(2);
getter = true;
} else if (methodName.startsWith("set")) {
propertyName = propertyName.substring(3);
} else {
return false;
}
if (getter && params.length>0) {
return false;
}
if (!getter && params.length!=1) {
return false;
}
if (propertyName.length()==0) {
return false;
}
propertyName = MetaClassHelper.convertPropertyName(propertyName);
PropertyNode pNode = cNode.getProperty(propertyName);
return pNode != null;
}
}
| antoaravinth/incubator-groovy | src/main/org/codehaus/groovy/transform/trait/TraitComposer.java | Java | apache-2.0 | 28,378 |