repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
spring-cloud/spring-cloud-sleuth | spring-cloud-sleuth-instrumentation/src/main/java/org/springframework/cloud/sleuth/instrument/async/TraceAsyncAspect.java | 2844 | /*
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.sleuth.instrument.async;
import java.lang.reflect.Method;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.cloud.sleuth.Span;
import org.springframework.cloud.sleuth.SpanNamer;
import org.springframework.cloud.sleuth.Tracer;
import org.springframework.cloud.sleuth.docs.AssertingSpan;
import org.springframework.cloud.sleuth.internal.SpanNameUtil;
import org.springframework.util.ReflectionUtils;
/**
* Aspect that creates a new Span for running threads executing methods annotated with
* {@link org.springframework.scheduling.annotation.Async} annotation.
*
* @author Marcin Grzejszczak
* @since 1.0.0
* @see Tracer
*/
@Aspect
public class TraceAsyncAspect {
private final Tracer tracer;
private final SpanNamer spanNamer;
public TraceAsyncAspect(Tracer tracer, SpanNamer spanNamer) {
this.tracer = tracer;
this.spanNamer = spanNamer;
}
@Around("execution (@org.springframework.scheduling.annotation.Async * *.*(..))")
public Object traceBackgroundThread(final ProceedingJoinPoint pjp) throws Throwable {
String spanName = name(pjp);
Span span = this.tracer.currentSpan();
if (span == null) {
span = this.tracer.nextSpan();
}
AssertingSpan assertingSpan = SleuthAsyncSpan.ASYNC_ANNOTATION_SPAN.wrap(span).name(spanName);
try (Tracer.SpanInScope ws = this.tracer.withSpan(assertingSpan.start())) {
assertingSpan.tag(SleuthAsyncSpan.Tags.CLASS, pjp.getTarget().getClass().getSimpleName())
.tag(SleuthAsyncSpan.Tags.METHOD, pjp.getSignature().getName());
return pjp.proceed();
}
finally {
assertingSpan.end();
}
}
String name(ProceedingJoinPoint pjp) {
return this.spanNamer.name(getMethod(pjp, pjp.getTarget()),
SpanNameUtil.toLowerHyphen(pjp.getSignature().getName()));
}
private Method getMethod(ProceedingJoinPoint pjp, Object object) {
MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod();
return ReflectionUtils.findMethod(object.getClass(), method.getName(), method.getParameterTypes());
}
}
| apache-2.0 |
nidi3/graphviz-java | graphviz-java/src/main/java/guru/nidi/graphviz/attribute/Attributes.java | 2520 | /*
* Copyright © 2015 Stefan Niederhauser (nidin@gmx.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package guru.nidi.graphviz.attribute;
import javax.annotation.Nullable;
import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
import static java.util.Arrays.asList;
public interface Attributes<F extends For> extends Iterable<Entry<String, Object>> {
static <F extends For> Attributes<F> attr(String key, @Nullable Object value) {
return new MapAttributes<F>(key, value);
}
static <F extends For> Attributes<F> attrs() {
return new MapAttributes<>();
}
@SafeVarargs
static <F extends For> Attributes<F> attrs(Attributes<? extends F>... attributes) {
return attrs(asList(attributes));
}
static <F extends For> Attributes<F> attrs(List<Attributes<? extends F>> attributes) {
final MapAttributes<F> res = new MapAttributes<>();
for (Attributes<? extends F> attribute : attributes) {
attribute.applyTo(res);
}
return res;
}
Attributes<? super F> applyTo(MapAttributes<? super F> attrs);
default Attributes<? super F> applyTo(Attributes<? super F> attrs) {
if (!(attrs instanceof MapAttributes)) {
throw new UnsupportedOperationException("attributes must be a MapAttributes");
}
@SuppressWarnings("unchecked") final MapAttributes<? super F> as = (MapAttributes<? super F>) attrs;
return applyTo(as);
}
default Attributes<F> copy() {
@SuppressWarnings("unchecked") final Attributes<F> copy = (Attributes<F>) applyTo(attrs());
return copy;
}
@Nullable
default Object get(String key) {
return applyTo(new MapAttributes<>()).get(key);
}
@Override
default Iterator<Entry<String, Object>> iterator() {
return applyTo(new MapAttributes<>()).iterator();
}
default boolean isEmpty() {
return applyTo(new MapAttributes<>()).isEmpty();
}
}
| apache-2.0 |
googleapis/java-monitoring-dashboards | proto-google-cloud-monitoring-dashboard-v1/src/main/java/com/google/monitoring/dashboard/v1/ListDashboardsResponseOrBuilder.java | 3032 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/dashboard/v1/dashboards_service.proto
package com.google.monitoring.dashboard.v1;
public interface ListDashboardsResponseOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.monitoring.dashboard.v1.ListDashboardsResponse)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* The list of requested dashboards.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Dashboard dashboards = 1;</code>
*/
java.util.List<com.google.monitoring.dashboard.v1.Dashboard> getDashboardsList();
/**
*
*
* <pre>
* The list of requested dashboards.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Dashboard dashboards = 1;</code>
*/
com.google.monitoring.dashboard.v1.Dashboard getDashboards(int index);
/**
*
*
* <pre>
* The list of requested dashboards.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Dashboard dashboards = 1;</code>
*/
int getDashboardsCount();
/**
*
*
* <pre>
* The list of requested dashboards.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Dashboard dashboards = 1;</code>
*/
java.util.List<? extends com.google.monitoring.dashboard.v1.DashboardOrBuilder>
getDashboardsOrBuilderList();
/**
*
*
* <pre>
* The list of requested dashboards.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Dashboard dashboards = 1;</code>
*/
com.google.monitoring.dashboard.v1.DashboardOrBuilder getDashboardsOrBuilder(int index);
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
java.lang.String getNextPageToken();
/**
*
*
* <pre>
* If there are more results than have been returned, then this field is set
* to a non-empty value. To see the additional results,
* use that value as `page_token` in the next call to this method.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
com.google.protobuf.ByteString getNextPageTokenBytes();
}
| apache-2.0 |
zhangleidaniejian/bluemmSite | src/main/java/cn/com/bluemoon/jeesite/common/persistence/dialect/db/SQLServerDialect.java | 1759 | /**
* Copyright © 2012-2014 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package cn.com.bluemoon.jeesite.common.persistence.dialect.db;
import cn.com.bluemoon.jeesite.common.persistence.dialect.Dialect;
/**
* MSSQLServer 数据库实现分页方言
*
* @author poplar.yfyang
* @version 1.0 2010-10-10 下午12:31
* @since JDK 1.5
*/
public class SQLServerDialect implements Dialect {
public boolean supportsLimit() {
return true;
}
static int getAfterSelectInsertPoint(String sql) {
int selectIndex = sql.toLowerCase().indexOf("select");
final int selectDistinctIndex = sql.toLowerCase().indexOf("select distinct");
return selectIndex + (selectDistinctIndex == selectIndex ? 15 : 6);
}
public String getLimitString(String sql, int offset, int limit) {
return getLimit(sql, offset, limit);
}
/**
* 将sql变成分页sql语句,提供将offset及limit使用占位符号(placeholder)替换.
* <pre>
* 如mysql
* dialect.getLimitString("select * from user", 12, ":offset",0,":limit") 将返回
* select * from user limit :offset,:limit
* </pre>
*
* @param sql 实际SQL语句
* @param offset 分页开始纪录条数
* @param limit 分页每页显示纪录条数
* @return 包含占位符的分页sql
*/
public String getLimit(String sql, int offset, int limit) {
if (offset > 0) {
throw new UnsupportedOperationException("sql server has no offset");
}
return new StringBuffer(sql.length() + 8)
.append(sql)
.insert(getAfterSelectInsertPoint(sql), " top " + limit)
.toString();
}
}
| apache-2.0 |
gravitee-io/gateway | gravitee-gateway-core/src/main/java/io/gravitee/gateway/core/processor/AbstractProcessor.java | 1462 | /**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.gateway.core.processor;
import io.gravitee.gateway.api.handler.Handler;
/**
* @author David BRASSELY (david.brassely at graviteesource.com)
* @author GraviteeSource Team
*/
public abstract class AbstractProcessor<T> implements Processor<T> {
protected Handler<T> next;
protected Handler<Void> exitHandler;
protected Handler<ProcessorFailure> errorHandler;
@Override
public Processor<T> handler(Handler<T> handler) {
this.next = handler;
return this;
}
@Override
public Processor<T> errorHandler(Handler<ProcessorFailure> errorHandler) {
this.errorHandler = errorHandler;
return this;
}
@Override
public Processor<T> exitHandler(Handler<Void> exitHandler) {
this.exitHandler = exitHandler;
return this;
}
}
| apache-2.0 |
ilya-moskovtsev/imoskovtsev | junior/chapter_003_sql_jdbc/src/main/java/ru/job4j/sqlite/ParseXML.java | 1721 | package ru.job4j.sqlite;
import java.io.File;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import java.io.FileReader;
/**
* Parsing an XML File Using SAX
* https://docs.oracle.com/javase/tutorial/jaxp/sax/parsing.html
*/
public class ParseXML {
private static final Logger LOG = LogManager.getLogger(ParseXML.class.getName());
public static final String ATTRIBUTE_NAME = "field";
public int sumFields(File xml) {
LOG.info(String.format("Will parse %s for %s values and sum them", xml.getName(), ATTRIBUTE_NAME));
final int[] sum = {0};
try {
SAXParserFactory spf = SAXParserFactory.newInstance();
spf.setNamespaceAware(true);
SAXParser saxParser = spf.newSAXParser();
XMLReader xmlReader = saxParser.getXMLReader();
xmlReader.setContentHandler(new DefaultHandler() {
@Override
public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException {
sum[0] += atts.getValue(ATTRIBUTE_NAME) != null ? Integer.parseInt(atts.getValue(ATTRIBUTE_NAME)) : 0;
}
});
xmlReader.parse(new InputSource(new FileReader(xml)));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
LOG.info(String.format("The sum of all fields is %d", sum[0]));
return sum[0];
}
}
| apache-2.0 |
jdillon/orientdb | core/src/main/java/com/orientechnologies/orient/core/record/impl/ODocumentHelper.java | 57489 | /*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.record.impl;
import com.orientechnologies.common.collection.OMultiValue;
import com.orientechnologies.common.util.OPair;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.config.OStorageConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.*;
import com.orientechnologies.orient.core.db.record.ORecordElement.STATUS;
import com.orientechnologies.orient.core.db.record.ridbag.ORidBag;
import com.orientechnologies.orient.core.exception.OQueryParsingException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.serialization.serializer.record.string.ORecordSerializerStringAbstract;
import com.orientechnologies.orient.core.sql.OSQLHelper;
import com.orientechnologies.orient.core.sql.functions.OSQLFunctionRuntime;
import com.orientechnologies.orient.core.type.tree.OMVRBTreeRIDSet;
import java.lang.reflect.Array;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Map.Entry;
/**
* Helper class to manage documents.
*
* @author Luca Garulli
*/
public class ODocumentHelper {
public static final String ATTRIBUTE_THIS = "@this";
public static final String ATTRIBUTE_RID = "@rid";
public static final String ATTRIBUTE_RID_ID = "@rid_id";
public static final String ATTRIBUTE_RID_POS = "@rid_pos";
public static final String ATTRIBUTE_VERSION = "@version";
public static final String ATTRIBUTE_CLASS = "@class";
public static final String ATTRIBUTE_TYPE = "@type";
public static final String ATTRIBUTE_SIZE = "@size";
public static final String ATTRIBUTE_FIELDS = "@fields";
public static final String ATTRIBUTE_RAW = "@raw";
public static interface ODbRelatedCall<T> {
public T call();
}
public static interface RIDMapper {
ORID map(ORID rid);
}
public static void sort(List<? extends OIdentifiable> ioResultSet, List<OPair<String, String>> iOrderCriteria,
OCommandContext context) {
if (ioResultSet != null)
Collections.sort(ioResultSet, new ODocumentComparator(iOrderCriteria, context));
}
@SuppressWarnings("unchecked")
public static <RET> RET convertField(final ODocument iDocument, final String iFieldName, final Class<?> iFieldType, Object iValue) {
if (iFieldType == null)
return (RET) iValue;
if (ORID.class.isAssignableFrom(iFieldType)) {
if (iValue instanceof ORID) {
return (RET) iValue;
} else if (iValue instanceof String) {
return (RET) new ORecordId((String) iValue);
} else if (iValue instanceof ORecord) {
return (RET) ((ORecord) iValue).getIdentity();
}
} else if (ORecord.class.isAssignableFrom(iFieldType)) {
if (iValue instanceof ORID || iValue instanceof ORecord) {
return (RET) iValue;
} else if (iValue instanceof String) {
return (RET) new ORecordId((String) iValue);
}
} else if (Set.class.isAssignableFrom(iFieldType)) {
if (!(iValue instanceof Set)) {
// CONVERT IT TO SET
final Collection<?> newValue;
if (iValue instanceof ORecordLazyList || iValue instanceof ORecordLazyMap)
newValue = new ORecordLazySet(iDocument);
else
newValue = new OTrackedSet<Object>(iDocument);
if (iValue instanceof Collection<?>) {
((Collection<Object>) newValue).addAll((Collection<Object>) iValue);
return (RET) newValue;
} else if (iValue instanceof Map) {
((Collection<Object>) newValue).addAll(((Map<String, Object>) iValue).values());
return (RET) newValue;
} else if (iValue instanceof String) {
final String stringValue = (String) iValue;
if (stringValue != null && !stringValue.isEmpty()) {
final String[] items = stringValue.split(",");
for (String s : items) {
((Collection<Object>) newValue).add(s);
}
}
return (RET) newValue;
} else if (OMultiValue.isMultiValue(iValue)) {
// GENERIC MULTI VALUE
for (Object s : OMultiValue.getMultiValueIterable(iValue)) {
((Collection<Object>) newValue).add(s);
}
return (RET) newValue;
}
} else {
return (RET) iValue;
}
} else if (List.class.isAssignableFrom(iFieldType)) {
if (!(iValue instanceof List)) {
// CONVERT IT TO LIST
final Collection<?> newValue;
if (iValue instanceof OMVRBTreeRIDSet || iValue instanceof ORecordLazyMap || iValue instanceof ORecordLazySet)
newValue = new ORecordLazyList(iDocument);
else
newValue = new OTrackedList<Object>(iDocument);
if (iValue instanceof Collection) {
((Collection<Object>) newValue).addAll((Collection<Object>) iValue);
return (RET) newValue;
} else if (iValue instanceof Map) {
((Collection<Object>) newValue).addAll(((Map<String, Object>) iValue).values());
return (RET) newValue;
} else if (iValue instanceof String) {
final String stringValue = (String) iValue;
if (stringValue != null && !stringValue.isEmpty()) {
final String[] items = stringValue.split(",");
for (String s : items) {
((Collection<Object>) newValue).add(s);
}
}
return (RET) newValue;
} else if (OMultiValue.isMultiValue(iValue)) {
// GENERIC MULTI VALUE
for (Object s : OMultiValue.getMultiValueIterable(iValue)) {
((Collection<Object>) newValue).add(s);
}
return (RET) newValue;
}
} else {
return (RET) iValue;
}
} else if (iValue instanceof Enum) {
// ENUM
if (Number.class.isAssignableFrom(iFieldType))
iValue = ((Enum<?>) iValue).ordinal();
else
iValue = iValue.toString();
if (!(iValue instanceof String) && !iFieldType.isAssignableFrom(iValue.getClass()))
throw new IllegalArgumentException("Property '" + iFieldName + "' of type '" + iFieldType
+ "' cannot accept value of type: " + iValue.getClass());
} else if (Date.class.isAssignableFrom(iFieldType)) {
if (iValue instanceof String && ODatabaseRecordThreadLocal.INSTANCE.isDefined()) {
final OStorageConfiguration config = ODatabaseRecordThreadLocal.INSTANCE.get().getStorage().getConfiguration();
DateFormat formatter = config.getDateFormatInstance();
if (((String) iValue).length() > config.dateFormat.length()) {
// ASSUMES YOU'RE USING THE DATE-TIME FORMATTE
formatter = config.getDateTimeFormatInstance();
}
try {
Date newValue = formatter.parse((String) iValue);
// _fieldValues.put(iFieldName, newValue);
return (RET) newValue;
} catch (ParseException pe) {
final String dateFormat = ((String) iValue).length() > config.dateFormat.length() ? config.dateTimeFormat
: config.dateFormat;
throw new OQueryParsingException("Error on conversion of date '" + iValue + "' using the format: " + dateFormat);
}
}
}
iValue = OType.convert(iValue, iFieldType);
return (RET) iValue;
}
@SuppressWarnings("unchecked")
public static <RET> RET getFieldValue(Object value, final String iFieldName) {
return (RET) getFieldValue(value, iFieldName, null);
}
@SuppressWarnings("unchecked")
public static <RET> RET getFieldValue(Object value, final String iFieldName, final OCommandContext iContext) {
if (value == null)
return null;
final int fieldNameLength = iFieldName.length();
if (fieldNameLength == 0)
return (RET) value;
OIdentifiable currentRecord = value instanceof OIdentifiable ? (OIdentifiable) value : null;
int beginPos = iFieldName.charAt(0) == '.' ? 1 : 0;
int nextSeparatorPos = iFieldName.charAt(0) == '.' ? 1 : 0;
do {
char nextSeparator = ' ';
for (; nextSeparatorPos < fieldNameLength; ++nextSeparatorPos) {
nextSeparator = iFieldName.charAt(nextSeparatorPos);
if (nextSeparator == '.' || nextSeparator == '[')
break;
}
final String fieldName;
if (nextSeparatorPos < fieldNameLength)
fieldName = iFieldName.substring(beginPos, nextSeparatorPos);
else {
nextSeparator = ' ';
if (beginPos > 0)
fieldName = iFieldName.substring(beginPos);
else
fieldName = iFieldName;
}
if (nextSeparator == '[') {
if (fieldName != null && fieldName.length() > 0) {
if (currentRecord != null)
value = getIdentifiableValue(currentRecord, fieldName);
else if (value instanceof Map<?, ?>)
value = getMapEntry((Map<String, ?>) value, fieldName);
else if (OMultiValue.isMultiValue(value)) {
final HashSet<Object> temp = new HashSet<Object>();
for (Object o : OMultiValue.getMultiValueIterable(value)) {
if (o instanceof OIdentifiable) {
Object r = getFieldValue(o, iFieldName);
if (r != null)
OMultiValue.add(temp, r);
}
}
value = temp;
}
}
if (value == null)
return null;
else if (value instanceof OIdentifiable)
currentRecord = (OIdentifiable) value;
final int end = iFieldName.indexOf(']', nextSeparatorPos);
if (end == -1)
throw new IllegalArgumentException("Missed closed ']'");
String indexPart = iFieldName.substring(nextSeparatorPos + 1, end);
if (indexPart.length() == 0)
return null;
nextSeparatorPos = end;
if (value instanceof OCommandContext)
value = ((OCommandContext) value).getVariables();
if (value instanceof OIdentifiable) {
final ORecord record = currentRecord != null && currentRecord instanceof OIdentifiable ? ((OIdentifiable) currentRecord)
.getRecord() : null;
final Object index = getIndexPart(iContext, indexPart);
final String indexAsString = index != null ? index.toString() : null;
final List<String> indexParts = OStringSerializerHelper.smartSplit(indexAsString, ',',
OStringSerializerHelper.DEFAULT_IGNORE_CHARS);
final List<String> indexRanges = OStringSerializerHelper.smartSplit(indexAsString, '-', ' ');
final List<String> indexCondition = OStringSerializerHelper.smartSplit(indexAsString, '=', ' ');
if (indexParts.size() == 1 && indexCondition.size() == 1 && indexRanges.size() == 1)
// SINGLE VALUE
value = ((ODocument) record).field(indexAsString);
else if (indexParts.size() > 1) {
// MULTI VALUE
final Object[] values = new Object[indexParts.size()];
for (int i = 0; i < indexParts.size(); ++i) {
values[i] = ((ODocument) record).field(OStringSerializerHelper.getStringContent(indexParts.get(i)));
}
value = values;
} else if (indexRanges.size() > 1) {
// MULTI VALUES RANGE
String from = indexRanges.get(0);
String to = indexRanges.get(1);
final ODocument doc = (ODocument) record;
final String[] fieldNames = doc.fieldNames();
final int rangeFrom = from != null && !from.isEmpty() ? Integer.parseInt(from) : 0;
final int rangeTo = to != null && !to.isEmpty() ? Math.min(Integer.parseInt(to), fieldNames.length - 1)
: fieldNames.length - 1;
final Object[] values = new Object[rangeTo - rangeFrom + 1];
for (int i = rangeFrom; i <= rangeTo; ++i)
values[i - rangeFrom] = doc.field(fieldNames[i]);
value = values;
} else if (!indexCondition.isEmpty()) {
// CONDITION
final String conditionFieldName = indexCondition.get(0);
Object conditionFieldValue = ORecordSerializerStringAbstract.getTypeValue(indexCondition.get(1));
if (conditionFieldValue instanceof String)
conditionFieldValue = OStringSerializerHelper.getStringContent(conditionFieldValue);
final Object fieldValue = getFieldValue(currentRecord, conditionFieldName);
if (conditionFieldValue != null && fieldValue != null)
conditionFieldValue = OType.convert(conditionFieldValue, fieldValue.getClass());
if (fieldValue == null && !conditionFieldValue.equals("null") || fieldValue != null
&& !fieldValue.equals(conditionFieldValue))
value = null;
}
} else if (value instanceof Map<?, ?>) {
final Object index = getIndexPart(iContext, indexPart);
final String indexAsString = index != null ? index.toString() : null;
final List<String> indexParts = OStringSerializerHelper.smartSplit(indexAsString, ',',
OStringSerializerHelper.DEFAULT_IGNORE_CHARS);
final List<String> indexRanges = OStringSerializerHelper.smartSplit(indexAsString, '-', ' ');
final List<String> indexCondition = OStringSerializerHelper.smartSplit(indexAsString, '=', ' ');
final Map<String, ?> map = (Map<String, ?>) value;
if (indexParts.size() == 1 && indexCondition.size() == 1 && indexRanges.size() == 1)
// SINGLE VALUE
value = map.get(indexAsString);
else if (indexParts.size() > 1) {
// MULTI VALUE
final Object[] values = new Object[indexParts.size()];
for (int i = 0; i < indexParts.size(); ++i) {
values[i] = map.get(OStringSerializerHelper.getStringContent(indexParts.get(i)));
}
value = values;
} else if (indexRanges.size() > 1) {
// MULTI VALUES RANGE
String from = indexRanges.get(0);
String to = indexRanges.get(1);
final List<String> fieldNames = new ArrayList<String>(map.keySet());
final int rangeFrom = from != null && !from.isEmpty() ? Integer.parseInt(from) : 0;
final int rangeTo = to != null && !to.isEmpty() ? Math.min(Integer.parseInt(to), fieldNames.size() - 1) : fieldNames
.size() - 1;
final Object[] values = new Object[rangeTo - rangeFrom + 1];
for (int i = rangeFrom; i <= rangeTo; ++i)
values[i - rangeFrom] = map.get(fieldNames.get(i));
value = values;
} else if (!indexCondition.isEmpty()) {
// CONDITION
final String conditionFieldName = indexCondition.get(0);
Object conditionFieldValue = ORecordSerializerStringAbstract.getTypeValue(indexCondition.get(1));
if (conditionFieldValue instanceof String)
conditionFieldValue = OStringSerializerHelper.getStringContent(conditionFieldValue);
final Object fieldValue = map.get(conditionFieldName);
if (conditionFieldValue != null && fieldValue != null)
conditionFieldValue = OType.convert(conditionFieldValue, fieldValue.getClass());
if (fieldValue == null && !conditionFieldValue.equals("null") || fieldValue != null
&& !fieldValue.equals(conditionFieldValue))
value = null;
}
} else if (OMultiValue.isMultiValue(value)) {
// MULTI VALUE
final Object index = getIndexPart(iContext, indexPart);
final String indexAsString = index != null ? index.toString() : null;
final List<String> indexParts = OStringSerializerHelper.smartSplit(indexAsString, ',');
final List<String> indexRanges = OStringSerializerHelper.smartSplit(indexAsString, '-');
final List<String> indexCondition = OStringSerializerHelper.smartSplit(indexAsString, '=', ' ');
if (indexParts.size() == 1 && indexRanges.size() == 1 && indexCondition.size() == 1) {
// SINGLE VALUE
if (value instanceof Map<?, ?>)
value = getMapEntry((Map<String, ?>) value, index);
else if (Character.isDigit(indexAsString.charAt(0)))
value = OMultiValue.getValue(value, Integer.parseInt(indexAsString));
else
// FILTER BY FIELD
value = getFieldValue(value, indexAsString, iContext);
} else if (indexParts.size() > 1) {
// MULTI VALUES
final Object[] values = new Object[indexParts.size()];
for (int i = 0; i < indexParts.size(); ++i)
values[i] = OMultiValue.getValue(value, Integer.parseInt(indexParts.get(i)));
value = values;
} else if (indexRanges.size() > 1) {
// MULTI VALUES RANGE
String from = indexRanges.get(0);
String to = indexRanges.get(1);
final int rangeFrom = from != null && !from.isEmpty() ? Integer.parseInt(from) : 0;
final int rangeTo = to != null && !to.isEmpty() ? Math.min(Integer.parseInt(to), OMultiValue.getSize(value) - 1)
: OMultiValue.getSize(value) - 1;
final Object[] values = new Object[rangeTo - rangeFrom + 1];
for (int i = rangeFrom; i <= rangeTo; ++i)
values[i - rangeFrom] = OMultiValue.getValue(value, i);
value = values;
} else if (!indexCondition.isEmpty()) {
// CONDITION
final String conditionFieldName = indexCondition.get(0);
Object conditionFieldValue = ORecordSerializerStringAbstract.getTypeValue(indexCondition.get(1));
if (conditionFieldValue instanceof String)
conditionFieldValue = OStringSerializerHelper.getStringContent(conditionFieldValue);
final HashSet<Object> values = new HashSet<Object>();
for (Object v : OMultiValue.getMultiValueIterable(value)) {
Object filtered = filterItem(conditionFieldName, conditionFieldValue, v);
if (filtered != null)
if (filtered instanceof Collection<?>)
values.addAll((Collection<? extends Object>) filtered);
else
values.add(filtered);
}
if (values.isEmpty())
// RETURNS NULL
value = null;
else if (values.size() == 1)
// RETURNS THE SINGLE ODOCUMENT
value = values.iterator().next();
else
// RETURNS THE FILTERED COLLECTION
value = values;
}
}
} else {
if (fieldName.length() == 0) {
// NO FIELD NAME: THIS IS THE CASE OF NOT USEFUL . AFTER A ] OR .
beginPos = ++nextSeparatorPos;
continue;
}
if (fieldName.startsWith("$"))
value = iContext.getVariable(fieldName);
else if (fieldName.contains("("))
value = evaluateFunction(value, fieldName, iContext);
else {
final List<String> indexCondition = OStringSerializerHelper.smartSplit(fieldName, '=', ' ');
if (indexCondition.size() == 2) {
final String conditionFieldName = indexCondition.get(0);
Object conditionFieldValue = ORecordSerializerStringAbstract.getTypeValue(indexCondition.get(1));
if (conditionFieldValue instanceof String)
conditionFieldValue = OStringSerializerHelper.getStringContent(conditionFieldValue);
value = filterItem(conditionFieldName, conditionFieldValue, value);
} else if (currentRecord != null) {
// GET THE LINKED OBJECT IF ANY
value = getIdentifiableValue(currentRecord, fieldName);
if (value != null && value instanceof ORecord && ((ORecord) value).getInternalStatus() == STATUS.NOT_LOADED)
// RELOAD IT
((ORecord) value).reload();
} else if (value instanceof Map<?, ?>)
value = getMapEntry((Map<String, ?>) value, fieldName);
else if (OMultiValue.isMultiValue(value)) {
final Set<Object> values = new HashSet<Object>();
for (Object v : OMultiValue.getMultiValueIterable(value)) {
final Object item;
if (v instanceof OIdentifiable)
item = getIdentifiableValue((OIdentifiable) v, fieldName);
else if (v instanceof Map)
item = ((Map<?, ?>) v).get(fieldName);
else
item = null;
if (item != null)
if (item instanceof Collection<?>)
values.addAll((Collection<? extends Object>) item);
else
values.add(item);
}
if (values.isEmpty())
value = null;
else
value = values;
} else
return null;
}
}
if (value instanceof OIdentifiable)
currentRecord = (OIdentifiable) value;
else
currentRecord = null;
beginPos = ++nextSeparatorPos;
} while (nextSeparatorPos < fieldNameLength && value != null);
return (RET) value;
}
protected static Object getIndexPart(final OCommandContext iContext, final String indexPart) {
Object index = indexPart;
if (indexPart.indexOf(',') == -1 && ( indexPart.charAt(0) == '"' || indexPart.charAt(0) == '\'') )
index = OStringSerializerHelper.getStringContent(indexPart);
else if (indexPart.charAt(0) == '$') {
final Object ctxValue = iContext.getVariable(indexPart);
if (ctxValue == null)
return null;
index = ctxValue;
} else if (!Character.isDigit(indexPart.charAt(0)))
// GET FROM CURRENT VALUE
index = indexPart;
return index;
}
@SuppressWarnings("unchecked")
protected static Object filterItem(final String iConditionFieldName, final Object iConditionFieldValue, final Object iValue) {
if (iValue instanceof OIdentifiable) {
final ORecord rec = ((OIdentifiable) iValue).getRecord();
if (rec instanceof ODocument) {
final ODocument doc = (ODocument) rec;
Object fieldValue = doc.field(iConditionFieldName);
if (iConditionFieldValue == null)
return fieldValue == null ? doc : null;
fieldValue = OType.convert(fieldValue, iConditionFieldValue.getClass());
if (fieldValue != null && fieldValue.equals(iConditionFieldValue))
return doc;
}
} else if (iValue instanceof Map<?, ?>) {
final Map<String, ?> map = (Map<String, ?>) iValue;
Object fieldValue = getMapEntry(map, iConditionFieldName);
fieldValue = OType.convert(fieldValue, iConditionFieldValue.getClass());
if (fieldValue != null && fieldValue.equals(iConditionFieldValue))
return map;
}
return null;
}
/**
* Retrieves the value crossing the map with the dotted notation
*
* @param iKey
* Field(s) to retrieve. If are multiple fields, then the dot must be used as separator
* @return
*/
@SuppressWarnings("unchecked")
public static Object getMapEntry(final Map<String, ?> iMap, final Object iKey) {
if (iMap == null || iKey == null)
return null;
if (iKey instanceof String) {
String iName = (String) iKey;
int pos = iName.indexOf('.');
if (pos > -1)
iName = iName.substring(0, pos);
final Object value = iMap.get(iName);
if (value == null)
return null;
if (pos > -1) {
final String restFieldName = iName.substring(pos + 1);
if (value instanceof ODocument)
return getFieldValue(value, restFieldName);
else if (value instanceof Map<?, ?>)
return getMapEntry((Map<String, ?>) value, restFieldName);
}
return value;
} else
return iMap.get(iKey);
}
public static Object getIdentifiableValue(final OIdentifiable iCurrent, final String iFieldName) {
if (iFieldName == null || iFieldName.length() == 0)
return null;
final char begin = iFieldName.charAt(0);
if (begin == '@') {
// RETURN AN ATTRIBUTE
if (iFieldName.equalsIgnoreCase(ATTRIBUTE_THIS))
return iCurrent.getRecord();
else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_RID))
return iCurrent.getIdentity();
else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_RID_ID))
return iCurrent.getIdentity().getClusterId();
else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_RID_POS))
return iCurrent.getIdentity().getClusterPosition();
else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_VERSION))
return iCurrent.getRecord().getRecordVersion().getCounter();
else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_CLASS))
return ((ODocument) iCurrent.getRecord()).getClassName();
else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_TYPE))
return Orient.instance().getRecordFactoryManager().getRecordTypeName(ORecordInternal.getRecordType(iCurrent.getRecord()));
else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_SIZE)) {
final byte[] stream = iCurrent.getRecord().toStream();
return stream != null ? stream.length : 0;
} else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_FIELDS))
return ((ODocument) iCurrent.getRecord()).fieldNames();
else if (iFieldName.equalsIgnoreCase(ATTRIBUTE_RAW))
return new String(iCurrent.getRecord().toStream());
}
if (iCurrent == null)
return null;
final ODocument doc = ((ODocument) iCurrent.getRecord());
doc.checkForFields(iFieldName);
return doc._fieldValues.get(iFieldName);
}
public static Object evaluateFunction(final Object currentValue, final String iFunction, final OCommandContext iContext) {
if (currentValue == null)
return null;
Object result = null;
final String function = iFunction.toUpperCase();
if (function.startsWith("SIZE("))
result = currentValue instanceof ORecord ? 1 : OMultiValue.getSize(currentValue);
else if (function.startsWith("LENGTH("))
result = currentValue.toString().length();
else if (function.startsWith("TOUPPERCASE("))
result = currentValue.toString().toUpperCase();
else if (function.startsWith("TOLOWERCASE("))
result = currentValue.toString().toLowerCase();
else if (function.startsWith("TRIM("))
result = currentValue.toString().trim();
else if (function.startsWith("TOJSON("))
result = currentValue instanceof ODocument ? ((ODocument) currentValue).toJSON() : null;
else if (function.startsWith("KEYS("))
result = currentValue instanceof Map<?, ?> ? ((Map<?, ?>) currentValue).keySet() : null;
else if (function.startsWith("VALUES("))
result = currentValue instanceof Map<?, ?> ? ((Map<?, ?>) currentValue).values() : null;
else if (function.startsWith("ASSTRING("))
result = currentValue.toString();
else if (function.startsWith("ASINTEGER("))
result = new Integer(currentValue.toString());
else if (function.startsWith("ASFLOAT("))
result = new Float(currentValue.toString());
else if (function.startsWith("ASBOOLEAN(")) {
if (currentValue instanceof String)
result = new Boolean((String) currentValue);
else if (currentValue instanceof Number) {
final int bValue = ((Number) currentValue).intValue();
if (bValue == 0)
result = Boolean.FALSE;
else if (bValue == 1)
result = Boolean.TRUE;
}
} else if (function.startsWith("ASDATE("))
if (currentValue instanceof Date)
result = currentValue;
else if (currentValue instanceof Number)
result = new Date(((Number) currentValue).longValue());
else
try {
result = ODatabaseRecordThreadLocal.INSTANCE.get().getStorage().getConfiguration().getDateFormatInstance()
.parse(currentValue.toString());
} catch (ParseException e) {
}
else if (function.startsWith("ASDATETIME("))
if (currentValue instanceof Date)
result = currentValue;
else if (currentValue instanceof Number)
result = new Date(((Number) currentValue).longValue());
else
try {
result = ODatabaseRecordThreadLocal.INSTANCE.get().getStorage().getConfiguration().getDateTimeFormatInstance()
.parse(currentValue.toString());
} catch (ParseException e) {
}
else {
// EXTRACT ARGUMENTS
final List<String> args = OStringSerializerHelper.getParameters(iFunction.substring(iFunction.indexOf('(')));
final ORecord currentRecord = iContext != null ? (ORecord) iContext.getVariable("$current") : null;
for (int i = 0; i < args.size(); ++i) {
final String arg = args.get(i);
final Object o = OSQLHelper.getValue(arg, currentRecord, iContext);
if (o != null)
args.set(i, o.toString());
}
if (function.startsWith("CHARAT("))
result = currentValue.toString().charAt(Integer.parseInt(args.get(0)));
else if (function.startsWith("INDEXOF("))
if (args.size() == 1)
result = currentValue.toString().indexOf(OStringSerializerHelper.getStringContent(args.get(0)));
else
result = currentValue.toString().indexOf(OStringSerializerHelper.getStringContent(args.get(0)),
Integer.parseInt(args.get(1)));
else if (function.startsWith("SUBSTRING("))
if (args.size() == 1)
result = currentValue.toString().substring(Integer.parseInt(args.get(0)));
else
result = currentValue.toString().substring(Integer.parseInt(args.get(0)), Integer.parseInt(args.get(1)));
else if (function.startsWith("APPEND("))
result = currentValue.toString() + OStringSerializerHelper.getStringContent(args.get(0));
else if (function.startsWith("PREFIX("))
result = OStringSerializerHelper.getStringContent(args.get(0)) + currentValue.toString();
else if (function.startsWith("FORMAT("))
if (currentValue instanceof Date)
result = new SimpleDateFormat(OStringSerializerHelper.getStringContent(args.get(0))).format(currentValue);
else
result = String.format(OStringSerializerHelper.getStringContent(args.get(0)), currentValue.toString());
else if (function.startsWith("LEFT(")) {
final int len = Integer.parseInt(args.get(0));
final String stringValue = currentValue.toString();
result = stringValue.substring(0, len <= stringValue.length() ? len : stringValue.length());
} else if (function.startsWith("RIGHT(")) {
final int offset = Integer.parseInt(args.get(0));
final String stringValue = currentValue.toString();
result = stringValue.substring(offset < stringValue.length() ? stringValue.length() - offset : 0);
} else {
final OSQLFunctionRuntime f = OSQLHelper.getFunction(null, iFunction);
if (f != null)
result = f.execute(currentRecord, currentRecord, null, iContext);
}
}
return result;
}
@SuppressWarnings("unchecked")
public static void copyFieldValue(final ODocument iCloned, final Entry<String, Object> iEntry) {
final Object fieldValue = iEntry.getValue();
if (fieldValue != null) {
if (fieldValue instanceof ODocument && !((ODocument) fieldValue).getIdentity().isValid()) {
// EMBEDDED DOCUMENT
iCloned._fieldValues.put(iEntry.getKey(), ((ODocument) fieldValue).copy());
} else if (fieldValue instanceof ORidBag) {
iCloned._fieldValues.put(iEntry.getKey(), ((ORidBag) fieldValue).copy());
} else if (fieldValue instanceof ORecordLazyList) {
iCloned._fieldValues.put(iEntry.getKey(), ((ORecordLazyList) fieldValue).copy(iCloned));
} else if (fieldValue instanceof ORecordTrackedList) {
final ORecordTrackedList newList = new ORecordTrackedList(iCloned);
newList.addAll((ORecordTrackedList) fieldValue);
iCloned._fieldValues.put(iEntry.getKey(), newList);
} else if (fieldValue instanceof OTrackedList<?>) {
final OTrackedList<Object> newList = new OTrackedList<Object>(iCloned);
newList.addAll((OTrackedList<Object>) fieldValue);
iCloned._fieldValues.put(iEntry.getKey(), newList);
} else if (fieldValue instanceof List<?>) {
iCloned._fieldValues.put(iEntry.getKey(), new ArrayList<Object>((List<Object>) fieldValue));
// SETS
} else if (fieldValue instanceof OMVRBTreeRIDSet) {
iCloned._fieldValues.put(iEntry.getKey(), ((OMVRBTreeRIDSet) fieldValue).copy(iCloned));
} else if (fieldValue instanceof ORecordLazySet) {
final ORecordLazySet newList = new ORecordLazySet(iCloned);
newList.addAll((ORecordLazySet) fieldValue);
iCloned._fieldValues.put(iEntry.getKey(), newList);
} else if (fieldValue instanceof ORecordTrackedSet) {
final ORecordTrackedSet newList = new ORecordTrackedSet(iCloned);
newList.addAll((ORecordTrackedSet) fieldValue);
iCloned._fieldValues.put(iEntry.getKey(), newList);
} else if (fieldValue instanceof OTrackedSet<?>) {
final OTrackedSet<Object> newList = new OTrackedSet<Object>(iCloned);
newList.addAll((OTrackedSet<Object>) fieldValue);
iCloned._fieldValues.put(iEntry.getKey(), newList);
} else if (fieldValue instanceof Set<?>) {
iCloned._fieldValues.put(iEntry.getKey(), new HashSet<Object>((Set<Object>) fieldValue));
// MAPS
} else if (fieldValue instanceof ORecordLazyMap) {
final ORecordLazyMap newMap = new ORecordLazyMap(iCloned, ((ORecordLazyMap) fieldValue).getRecordType());
newMap.putAll((ORecordLazyMap) fieldValue);
iCloned._fieldValues.put(iEntry.getKey(), newMap);
} else if (fieldValue instanceof OTrackedMap) {
final OTrackedMap<Object> newMap = new OTrackedMap<Object>(iCloned);
newMap.putAll((OTrackedMap<Object>) fieldValue);
iCloned._fieldValues.put(iEntry.getKey(), newMap);
} else if (fieldValue instanceof Map<?, ?>) {
iCloned._fieldValues.put(iEntry.getKey(), new LinkedHashMap<String, Object>((Map<String, Object>) fieldValue));
} else
iCloned._fieldValues.put(iEntry.getKey(), fieldValue);
} else if (iCloned.getImmutableSchemaClass() != null) {
final OProperty prop = iCloned.getImmutableSchemaClass().getProperty(iEntry.getKey());
if (prop != null && prop.isMandatory())
iCloned._fieldValues.put(iEntry.getKey(), fieldValue);
}
}
public static boolean hasSameContentItem(final Object iCurrent, ODatabaseDocumentInternal iMyDb, final Object iOther,
final ODatabaseDocumentInternal iOtherDb, RIDMapper ridMapper) {
if (iCurrent instanceof ODocument) {
final ODocument current = (ODocument) iCurrent;
if (iOther instanceof ORID) {
if (!current.isDirty()) {
if (!current.getIdentity().equals(iOther))
return false;
} else {
final ODocument otherDoc = iOtherDb.load((ORID) iOther);
if (!ODocumentHelper.hasSameContentOf(current, iMyDb, otherDoc, iOtherDb, ridMapper))
return false;
}
} else if (!ODocumentHelper.hasSameContentOf(current, iMyDb, (ODocument) iOther, iOtherDb, ridMapper))
return false;
} else if (!compareScalarValues(iCurrent, iOther, ridMapper))
return false;
return true;
}
/**
* Makes a deep comparison field by field to check if the passed ODocument instance is identical as identity and content to the
* current one. Instead equals() just checks if the RID are the same.
*
* @param iOther
* ODocument instance
* @return true if the two document are identical, otherwise false
* @see #equals(Object)
*/
@SuppressWarnings("unchecked")
public static boolean hasSameContentOf(final ODocument iCurrent, final ODatabaseDocumentInternal iMyDb, final ODocument iOther,
final ODatabaseDocumentInternal iOtherDb, RIDMapper ridMapper) {
return hasSameContentOf(iCurrent, iMyDb, iOther, iOtherDb, ridMapper, true);
}
/**
* Makes a deep comparison field by field to check if the passed ODocument instance is identical in the content to the current
* one. Instead equals() just checks if the RID are the same.
*
* @param iOther
* ODocument instance
* @return true if the two document are identical, otherwise false
* @see #equals(Object)
*/
@SuppressWarnings("unchecked")
public static boolean hasSameContentOf(final ODocument iCurrent, final ODatabaseDocumentInternal iMyDb, final ODocument iOther,
final ODatabaseDocumentInternal iOtherDb, RIDMapper ridMapper, final boolean iCheckAlsoIdentity) {
if (iOther == null)
return false;
if (iCheckAlsoIdentity && !iCurrent.equals(iOther) && iCurrent.getIdentity().isValid())
return false;
if (iMyDb != null)
makeDbCall(iMyDb, new ODbRelatedCall<Object>() {
public Object call() {
if (iCurrent.getInternalStatus() == STATUS.NOT_LOADED)
iCurrent.reload();
return null;
}
});
if (iOtherDb != null)
makeDbCall(iOtherDb, new ODbRelatedCall<Object>() {
public Object call() {
if (iOther.getInternalStatus() == STATUS.NOT_LOADED)
iOther.reload();
return null;
}
});
if (iMyDb != null)
makeDbCall(iMyDb, new ODbRelatedCall<Object>() {
public Object call() {
iCurrent.checkForFields();
return null;
}
});
else
iCurrent.checkForFields();
if (iOtherDb != null)
makeDbCall(iOtherDb, new ODbRelatedCall<Object>() {
public Object call() {
iOther.checkForFields();
return null;
}
});
else
iOther.checkForFields();
if (iCurrent._fieldValues.size() != iOther._fieldValues.size())
return false;
// CHECK FIELD-BY-FIELD
Object myFieldValue;
Object otherFieldValue;
for (Entry<String, Object> f : iCurrent._fieldValues.entrySet()) {
myFieldValue = f.getValue();
otherFieldValue = iOther._fieldValues.get(f.getKey());
if (myFieldValue == otherFieldValue)
continue;
// CHECK FOR NULLS
if (myFieldValue == null) {
if (otherFieldValue != null)
return false;
} else if (otherFieldValue == null)
return false;
if (myFieldValue != null)
if (myFieldValue instanceof Set && otherFieldValue instanceof Set) {
if (!compareSets(iMyDb, (Set<?>) myFieldValue, iOtherDb, (Set<?>) otherFieldValue, ridMapper))
return false;
} else if (myFieldValue instanceof Collection && otherFieldValue instanceof Collection) {
if (!compareCollections(iMyDb, (Collection<?>) myFieldValue, iOtherDb, (Collection<?>) otherFieldValue, ridMapper))
return false;
} else if (myFieldValue instanceof ORidBag && otherFieldValue instanceof ORidBag) {
if (!compareBags(iMyDb, (ORidBag) myFieldValue, iOtherDb, (ORidBag) otherFieldValue, ridMapper))
return false;
} else if (myFieldValue instanceof Map && otherFieldValue instanceof Map) {
if (!compareMaps(iMyDb, (Map<Object, Object>) myFieldValue, iOtherDb, (Map<Object, Object>) otherFieldValue, ridMapper))
return false;
} else if (myFieldValue instanceof ODocument && otherFieldValue instanceof ODocument) {
if (!hasSameContentOf((ODocument) myFieldValue, iMyDb, (ODocument) otherFieldValue, iOtherDb, ridMapper))
return false;
} else {
if (!compareScalarValues(myFieldValue, otherFieldValue, ridMapper))
return false;
}
}
return true;
}
public static boolean compareMaps(ODatabaseDocumentInternal iMyDb, Map<Object, Object> myFieldValue,
ODatabaseDocumentInternal iOtherDb, Map<Object, Object> otherFieldValue, RIDMapper ridMapper) {
// CHECK IF THE ORDER IS RESPECTED
final Map<Object, Object> myMap = myFieldValue;
final Map<Object, Object> otherMap = otherFieldValue;
if (myMap.size() != otherMap.size())
return false;
boolean oldMyAutoConvert = false;
boolean oldOtherAutoConvert = false;
if (myMap instanceof ORecordLazyMultiValue) {
oldMyAutoConvert = ((ORecordLazyMultiValue) myMap).isAutoConvertToRecord();
((ORecordLazyMultiValue) myMap).setAutoConvertToRecord(false);
}
if (otherMap instanceof ORecordLazyMultiValue) {
oldOtherAutoConvert = ((ORecordLazyMultiValue) otherMap).isAutoConvertToRecord();
((ORecordLazyMultiValue) otherMap).setAutoConvertToRecord(false);
}
try {
final Iterator<Entry<Object, Object>> myEntryIterator = makeDbCall(iMyDb,
new ODbRelatedCall<Iterator<Entry<Object, Object>>>() {
public Iterator<Entry<Object, Object>> call() {
return myMap.entrySet().iterator();
}
});
while (makeDbCall(iMyDb, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return myEntryIterator.hasNext();
}
})) {
final Entry<Object, Object> myEntry = makeDbCall(iMyDb, new ODbRelatedCall<Entry<Object, Object>>() {
public Entry<Object, Object> call() {
return myEntryIterator.next();
}
});
final Object myKey = makeDbCall(iMyDb, new ODbRelatedCall<Object>() {
public Object call() {
return myEntry.getKey();
}
});
if (makeDbCall(iOtherDb, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return !otherMap.containsKey(myKey);
}
}))
return false;
if (myEntry.getValue() instanceof ODocument) {
if (!hasSameContentOf(makeDbCall(iMyDb, new ODbRelatedCall<ODocument>() {
public ODocument call() {
return (ODocument) myEntry.getValue();
}
}), iMyDb, makeDbCall(iOtherDb, new ODbRelatedCall<ODocument>() {
public ODocument call() {
return (ODocument) otherMap.get(myEntry.getKey());
}
}), iOtherDb, ridMapper))
return false;
} else {
final Object myValue = makeDbCall(iMyDb, new ODbRelatedCall<Object>() {
public Object call() {
return myEntry.getValue();
}
});
final Object otherValue = makeDbCall(iOtherDb, new ODbRelatedCall<Object>() {
public Object call() {
return otherMap.get(myEntry.getKey());
}
});
if (!compareScalarValues(myValue, otherValue, ridMapper))
return false;
}
}
return true;
} finally {
if (myMap instanceof ORecordLazyMultiValue)
((ORecordLazyMultiValue) myMap).setAutoConvertToRecord(oldMyAutoConvert);
if (otherMap instanceof ORecordLazyMultiValue)
((ORecordLazyMultiValue) otherMap).setAutoConvertToRecord(oldOtherAutoConvert);
}
}
public static boolean compareCollections(ODatabaseDocumentInternal iMyDb, Collection<?> myFieldValue,
ODatabaseDocumentInternal iOtherDb, Collection<?> otherFieldValue, RIDMapper ridMapper) {
final Collection<?> myCollection = myFieldValue;
final Collection<?> otherCollection = otherFieldValue;
if (myCollection.size() != otherCollection.size())
return false;
boolean oldMyAutoConvert = false;
boolean oldOtherAutoConvert = false;
if (myCollection instanceof ORecordLazyMultiValue) {
oldMyAutoConvert = ((ORecordLazyMultiValue) myCollection).isAutoConvertToRecord();
((ORecordLazyMultiValue) myCollection).setAutoConvertToRecord(false);
}
if (otherCollection instanceof ORecordLazyMultiValue) {
oldOtherAutoConvert = ((ORecordLazyMultiValue) otherCollection).isAutoConvertToRecord();
((ORecordLazyMultiValue) otherCollection).setAutoConvertToRecord(false);
}
try {
final Iterator<?> myIterator = makeDbCall(iMyDb, new ODbRelatedCall<Iterator<?>>() {
public Iterator<?> call() {
return myCollection.iterator();
}
});
final Iterator<?> otherIterator = makeDbCall(iOtherDb, new ODbRelatedCall<Iterator<?>>() {
public Iterator<?> call() {
return otherCollection.iterator();
}
});
while (makeDbCall(iMyDb, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return myIterator.hasNext();
}
})) {
final Object myNextVal = makeDbCall(iMyDb, new ODbRelatedCall<Object>() {
public Object call() {
return myIterator.next();
}
});
final Object otherNextVal = makeDbCall(iOtherDb, new ODbRelatedCall<Object>() {
public Object call() {
return otherIterator.next();
}
});
if (!hasSameContentItem(myNextVal, iMyDb, otherNextVal, iOtherDb, ridMapper))
return false;
}
return true;
} finally {
if (myCollection instanceof ORecordLazyMultiValue)
((ORecordLazyMultiValue) myCollection).setAutoConvertToRecord(oldMyAutoConvert);
if (otherCollection instanceof ORecordLazyMultiValue)
((ORecordLazyMultiValue) otherCollection).setAutoConvertToRecord(oldOtherAutoConvert);
}
}
public static boolean compareSets(ODatabaseDocumentInternal iMyDb, Set<?> myFieldValue, ODatabaseDocumentInternal iOtherDb,
Set<?> otherFieldValue, RIDMapper ridMapper) {
final Set<?> mySet = myFieldValue;
final Set<?> otherSet = otherFieldValue;
final int mySize = makeDbCall(iMyDb, new ODbRelatedCall<Integer>() {
public Integer call() {
return mySet.size();
}
});
final int otherSize = makeDbCall(iOtherDb, new ODbRelatedCall<Integer>() {
public Integer call() {
return otherSet.size();
}
});
if (mySize != otherSize)
return false;
boolean oldMyAutoConvert = false;
boolean oldOtherAutoConvert = false;
if (mySet instanceof ORecordLazyMultiValue) {
oldMyAutoConvert = ((ORecordLazyMultiValue) mySet).isAutoConvertToRecord();
((ORecordLazyMultiValue) mySet).setAutoConvertToRecord(false);
}
if (otherSet instanceof ORecordLazyMultiValue) {
oldOtherAutoConvert = ((ORecordLazyMultiValue) otherSet).isAutoConvertToRecord();
((ORecordLazyMultiValue) otherSet).setAutoConvertToRecord(false);
}
try {
final Iterator<?> myIterator = makeDbCall(iMyDb, new ODbRelatedCall<Iterator<?>>() {
public Iterator<?> call() {
return mySet.iterator();
}
});
while (makeDbCall(iMyDb, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return myIterator.hasNext();
}
})) {
final Iterator<?> otherIterator = makeDbCall(iOtherDb, new ODbRelatedCall<Iterator<?>>() {
public Iterator<?> call() {
return otherSet.iterator();
}
});
final Object myNextVal = makeDbCall(iMyDb, new ODbRelatedCall<Object>() {
public Object call() {
return myIterator.next();
}
});
boolean found = false;
while (!found && makeDbCall(iOtherDb, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return otherIterator.hasNext();
}
})) {
final Object otherNextVal = makeDbCall(iOtherDb, new ODbRelatedCall<Object>() {
public Object call() {
return otherIterator.next();
}
});
found = hasSameContentItem(myNextVal, iMyDb, otherNextVal, iOtherDb, ridMapper);
}
if (!found)
return false;
}
return true;
} finally {
if (mySet instanceof ORecordLazyMultiValue)
((ORecordLazyMultiValue) mySet).setAutoConvertToRecord(oldMyAutoConvert);
if (otherSet instanceof ORecordLazyMultiValue)
((ORecordLazyMultiValue) otherSet).setAutoConvertToRecord(oldOtherAutoConvert);
}
}
public static boolean compareBags(ODatabaseDocumentInternal iMyDb, ORidBag myFieldValue, ODatabaseDocumentInternal iOtherDb,
ORidBag otherFieldValue, RIDMapper ridMapper) {
final ORidBag myBag = myFieldValue;
final ORidBag otherBag = otherFieldValue;
final int mySize = makeDbCall(iMyDb, new ODbRelatedCall<Integer>() {
public Integer call() {
return myBag.size();
}
});
final int otherSize = makeDbCall(iOtherDb, new ODbRelatedCall<Integer>() {
public Integer call() {
return otherBag.size();
}
});
if (mySize != otherSize)
return false;
boolean oldMyAutoConvert;
boolean oldOtherAutoConvert;
oldMyAutoConvert = myBag.isAutoConvertToRecord();
myBag.setAutoConvertToRecord(false);
oldOtherAutoConvert = otherBag.isAutoConvertToRecord();
otherBag.setAutoConvertToRecord(false);
final ORidBag otherBagCopy = makeDbCall(iOtherDb, new ODbRelatedCall<ORidBag>() {
@Override
public ORidBag call() {
final ORidBag otherRidBag = new ORidBag();
otherRidBag.setAutoConvertToRecord(false);
for (OIdentifiable identifiable : otherBag)
otherRidBag.add(identifiable);
return otherRidBag;
}
});
try {
final Iterator<OIdentifiable> myIterator = makeDbCall(iMyDb, new ODbRelatedCall<Iterator<OIdentifiable>>() {
public Iterator<OIdentifiable> call() {
return myBag.iterator();
}
});
while (makeDbCall(iMyDb, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return myIterator.hasNext();
}
})) {
final OIdentifiable myIdentifiable = makeDbCall(iMyDb, new ODbRelatedCall<OIdentifiable>() {
@Override
public OIdentifiable call() {
return myIterator.next();
}
});
final ORID otherRid;
if (ridMapper != null) {
ORID convertedRid = ridMapper.map(myIdentifiable.getIdentity());
if (convertedRid != null)
otherRid = convertedRid;
else
otherRid = myIdentifiable.getIdentity();
} else
otherRid = myIdentifiable.getIdentity();
makeDbCall(iOtherDb, new ODbRelatedCall<Object>() {
@Override
public Object call() {
otherBagCopy.remove(otherRid);
return null;
}
});
}
return makeDbCall(iOtherDb, new ODbRelatedCall<Boolean>() {
@Override
public Boolean call() {
return otherBagCopy.isEmpty();
}
});
} finally {
myBag.setAutoConvertToRecord(oldMyAutoConvert);
otherBag.setAutoConvertToRecord(oldOtherAutoConvert);
}
}
private static boolean compareScalarValues(Object myValue, Object otherValue, RIDMapper ridMapper) {
if (myValue == null && otherValue != null || myValue != null && otherValue == null)
return false;
if (myValue == null)
return true;
if (myValue.getClass().isArray() && !otherValue.getClass().isArray() || !myValue.getClass().isArray()
&& otherValue.getClass().isArray())
return false;
if (myValue.getClass().isArray() && otherValue.getClass().isArray()) {
final int myArraySize = Array.getLength(myValue);
final int otherArraySize = Array.getLength(otherValue);
if (myArraySize != otherArraySize)
return false;
for (int i = 0; i < myArraySize; i++) {
final Object first = Array.get(myValue, i);
final Object second = Array.get(otherValue, i);
if (first == null && second != null || (first != null && !first.equals(second)))
return false;
}
return true;
}
if (myValue instanceof Number && otherValue instanceof Number) {
final Number myNumberValue = (Number) myValue;
final Number otherNumberValue = (Number) otherValue;
if (isInteger(myNumberValue) && isInteger(otherNumberValue))
return myNumberValue.longValue() == otherNumberValue.longValue();
else if (isFloat(myNumberValue) && isFloat(otherNumberValue))
return myNumberValue.doubleValue() == otherNumberValue.doubleValue();
}
if (ridMapper != null && myValue instanceof ORID && otherValue instanceof ORID && ((ORID) myValue).isPersistent()) {
ORID convertedValue = ridMapper.map((ORID) myValue);
if (convertedValue != null)
myValue = convertedValue;
}
return myValue.equals(otherValue);
}
private static boolean isInteger(Number value) {
return value instanceof Byte || value instanceof Short || value instanceof Integer || value instanceof Long;
}
private static boolean isFloat(Number value) {
return value instanceof Float || value instanceof Double;
}
public static void deleteCrossRefs(final ORID iRid, final ODocument iContent) {
for (String fieldName : iContent.fieldNames()) {
final Object fieldValue = iContent.field(fieldName);
if (fieldValue != null) {
if (fieldValue.equals(iRid)) {
// REMOVE THE LINK
iContent.field(fieldName, (ORID) null);
iContent.save();
} else if (fieldValue instanceof ODocument && ((ODocument) fieldValue).isEmbedded()) {
// EMBEDDED DOCUMENT: GO RECURSIVELY
deleteCrossRefs(iRid, (ODocument) fieldValue);
} else if (OMultiValue.isMultiValue(fieldValue)) {
// MULTI-VALUE (COLLECTION, ARRAY OR MAP), CHECK THE CONTENT
for (final Iterator<?> it = OMultiValue.getMultiValueIterator(fieldValue); it.hasNext();) {
final Object item = it.next();
if (fieldValue.equals(iRid)) {
// DELETE ITEM
it.remove();
} else if (item instanceof ODocument && ((ODocument) item).isEmbedded()) {
// EMBEDDED DOCUMENT: GO RECURSIVELY
deleteCrossRefs(iRid, (ODocument) item);
}
}
}
}
}
}
public static <T> T makeDbCall(final ODatabaseDocumentInternal databaseRecord, final ODbRelatedCall<T> function) {
ODatabaseRecordThreadLocal.INSTANCE.set(databaseRecord);
return function.call();
}
}
| apache-2.0 |
rickgit/AndroidWidget | uipatterns/CustomView/src/main/java/edu/ptu/customview/animation/SimpleTextView.java | 2871 | package edu.ptu.customview.animation;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.os.Build;
import android.support.annotation.Nullable;
import android.support.annotation.RequiresApi;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.widget.TextView;
import static edu.ptu.customview.animation.LotteryScrollNumView.dpToPx;
/**
* @author anshu.wang
* @version 1.0
* @time 2017/11/8.
*/
public class SimpleTextView extends View {
private Paint textPaint;
private float mOffset=0f;
private Rect mTextBounds = new Rect();
private int mTextHeight;
private int mTextCenterX;
public SimpleTextView(Context context) {
super(context);
initPaint();
}
public SimpleTextView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
initPaint();
}
public SimpleTextView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initPaint();
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public SimpleTextView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
initPaint();
}
private void initPaint() {
if (textPaint != null)
return;
textPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
textPaint.setTextAlign(Paint.Align.CENTER);
textPaint.setTextSize(dpToPx(getContext(), 23));
textPaint.setColor(0xffcc00ca);
}
public static int dpToPx(Context context, int dp) {
return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp, context.getResources()
.getDisplayMetrics());
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
textPaint.getTextBounds("00", 0, 2, mTextBounds);
mTextHeight = mTextBounds.height();
setMeasuredDimension(dpToPx(getContext(),43), dpToPx(getContext(),43));
mTextCenterX = (getMeasuredWidth() - getPaddingLeft() - getPaddingRight()) / 2;
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
canvas.translate(0,mOffset);
canvas.drawText("02", getMeasuredWidth()/2, -getMeasuredHeight()/2 - mTextHeight / 2, textPaint);
canvas.drawText("03", getMeasuredWidth()/2, 0, textPaint);
canvas.drawText("04", getMeasuredWidth()/2, getMeasuredHeight()/2 + mTextHeight / 2, textPaint);
}
public void setmOffset(float mOffset) {
this.mOffset = mOffset;
invalidate();
}
public void addOffset(float mOffset) {
this.mOffset+= 1;
invalidate();
}
}
| apache-2.0 |
lvjk/excrawler | src/main/java/six/com/crawler/work/downer/exception/RawDataNotFoundException.java | 628 | package six.com.crawler.work.downer.exception;
import six.com.crawler.work.exception.WorkerExceptionType;
/**
* 源数据未找到异常
* @author weijiyong@tospur.com
*
*/
public class RawDataNotFoundException extends DownerException{
/**
*
*/
private static final long serialVersionUID = 7272831990726015341L;
public RawDataNotFoundException(String message) {
super(WorkerExceptionType.DOWNER_UNFOUND_RAW_DATA_EXCEPTION,message);
}
public RawDataNotFoundException(String message, Throwable cause) {
super(WorkerExceptionType.DOWNER_UNFOUND_RAW_DATA_EXCEPTION,message, cause);
}
}
| apache-2.0 |
trasa/aws-sdk-java | aws-java-sdk-codecommit/src/main/java/com/amazonaws/services/codecommit/model/OrderEnum.java | 1561 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.codecommit.model;
/**
*
*/
public enum OrderEnum {
Ascending("ascending"),
Descending("descending");
private String value;
private OrderEnum(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return OrderEnum corresponding to the value
*/
public static OrderEnum fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
} else if ("ascending".equals(value)) {
return Ascending;
} else if ("descending".equals(value)) {
return Descending;
} else {
throw new IllegalArgumentException("Cannot create enum from "
+ value + " value!");
}
}
} | apache-2.0 |
HHYEG2016/Mon | app/src/main/java/com/github/hhyeg2016/mon/data_logger/AppLogger.java | 1731 | package com.github.hhyeg2016.mon.data_logger;
import android.app.usage.UsageEvents;
import android.app.usage.UsageStatsManager;
import android.content.Context;
import com.github.hhyeg2016.mon.data.AppData;
import com.github.hhyeg2016.mon.data_manager.AppDataManager;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
/**
* Created by Conner on 16-02-21.
*/
public class AppLogger {
public static ArrayList<AppData> getAppDataLogs(Context context) {
int TIME_TO_GO_BACK = 1; // in years
ArrayList<AppData> appDataArrayList = new ArrayList<>();
UsageStatsManager usm = (UsageStatsManager) context.getSystemService(Context.USAGE_STATS_SERVICE);
Calendar calendar = Calendar.getInstance();
long endTime = calendar.getTimeInMillis();
calendar.add(Calendar.YEAR, -TIME_TO_GO_BACK);
long startTime = calendar.getTimeInMillis();
UsageEvents events = usm.queryEvents(startTime, endTime);
while (events.hasNextEvent()) {
UsageEvents.Event e = new UsageEvents.Event();
events.getNextEvent(e);
String temp = e.getPackageName();
AppData appData = new AppData(e.getTimeStamp(), slice_range(temp, (temp.lastIndexOf('.')+1), temp.length()), String.valueOf(e.getEventType()));
appDataArrayList.add(appData);
}
return appDataArrayList;
}
public static String slice_range(String s, int startIndex, int endIndex) {
if (startIndex == 0) return s;
if (startIndex < 0) startIndex = s.length() + startIndex;
if (endIndex < 0) endIndex = s.length() + endIndex;
return s.substring(startIndex, endIndex);
}
}
| apache-2.0 |
eirikrwu/foresty | foresty-server/src/main/java/com/foresty/quartz/job/EventRollingJob.java | 1990 | package com.foresty.quartz.job;
import com.foresty.model.Event;
import com.foresty.repository.EventRepository;
import com.foresty.service.EventService;
import com.foresty.service.EventServiceException;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.util.Calendar;
import java.util.List;
/**
* Created by EveningSun on 14-3-29.
*/
@Component
public class EventRollingJob implements Job {
private static final Logger LOGGER = LogManager.getLogger(EventRollingJob.class);
@Autowired
private EventService eventService;
@Autowired
private EventRepository eventRepository;
private int rollingDays = 1;
@Override
@Transactional
public void execute(JobExecutionContext context) throws JobExecutionException {
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.DAY_OF_YEAR, -1 * this.rollingDays);
EventRepository.EventCriteria criteria = new EventRepository.EventCriteria();
criteria.setMaxStartTime(calendar.getTime());
try {
List<Event> events = this.eventRepository.getEventsByCriterion(criteria, null).getContent();
for (Event event : events) {
this.eventService.deleteEventById(event.getId());
}
LOGGER.info("Deleted " + events.size() + " events because of events rolling.");
} catch (DataAccessException e) {
throw new JobExecutionException("Can't query for events: " + e.getMessage(), e);
} catch (EventServiceException e) {
throw new JobExecutionException("Can't delete event: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
southworkscom/azure-sdk-for-java | core/azure-core/src/main/java/com/microsoft/windowsazure/core/utils/XmlUtility.java | 3624 | /**
* Copyright Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microsoft.windowsazure.core.utils;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import java.util.ArrayList;
public abstract class XmlUtility {
public static Element getElementByTagNameNS(Node element, String namespace,
String name) {
NodeList elements = element.getChildNodes();
CharSequence colon = ":";
if (elements != null) {
for (int i = 0; i < elements.getLength(); i++) {
if (elements.item(i).getNodeType() == Node.ELEMENT_NODE
&& (elements.item(i).getAttributes().getNamedItemNS("http://www.w3.org/2001/XMLSchema-instance", "nil") == null
|| !"true".equals(elements.item(i).getAttributes().getNamedItemNS("http://www.w3.org/2001/XMLSchema-instance", "nil")))) {
Element currentElement = (Element) elements.item(i);
String nodeName = currentElement.getNodeName();
String nodeNameOnly = nodeName;
if (nodeName.contains(colon)) {
String[] nodeNameSplit = nodeName.split(":");
nodeNameOnly = nodeNameSplit[1];
}
if ((currentElement.getNamespaceURI() == null
|| currentElement.getNamespaceURI().equals(namespace))
&& nodeNameOnly.equals(name)) {
return currentElement;
}
}
}
}
return null;
}
public static ArrayList<Element> getElementsByTagNameNS(Node element,
String namespace, String name) {
ArrayList<Element> childElements = new ArrayList<Element>();
NodeList elements = element.getChildNodes();
if (elements != null) {
for (int i = 0; i < elements.getLength(); i++) {
if (elements.item(i).getNodeType() == Node.ELEMENT_NODE) {
Element currentElement = (Element) elements.item(i);
if ((currentElement.getNamespaceURI() == null
|| currentElement.getNamespaceURI().equals(namespace))
&& currentElement.getNodeName().equals(name)) {
childElements.add(currentElement);
}
}
}
}
return childElements;
}
public static Element getElementByTagName(Node element, String name) {
NodeList elements = element.getChildNodes();
if (elements != null) {
for (int i = 0; i < elements.getLength(); i++) {
if (elements.item(i).getNodeType() == Node.ELEMENT_NODE) {
Element currentElement = (Element) elements.item(i);
if (currentElement.getNodeName().equals(name)) {
return currentElement;
}
}
}
}
return null;
}
}
| apache-2.0 |
riccardove/easyjasub | easyjasub-lib/src/main/java/com/github/riccardove/easyjasub/dictionary/DictionaryJMDictReader.java | 3365 | package com.github.riccardove.easyjasub.dictionary;
/*
* #%L
* easyjasub-lib
* %%
* Copyright (C) 2014 Riccardo Vestrini
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.github.riccardove.easyjasub.CharacterIterator;
import com.github.riccardove.easyjasub.EasyJaSubTrie;
import com.github.riccardove.easyjasub.dictionary.EasyJaSubDictionaryEntry.Sense;
import com.github.riccardove.easyjasub.jmdict.IJMDictSense;
import com.github.riccardove.easyjasub.jmdict.JMDictObserver;
final class DictionaryJMDictReader implements JMDictObserver {
public DictionaryJMDictReader(
EasyJaSubTrie<EasyJaSubDictionaryEntry, Character> trie,
ArrayList<String> errors) {
this.trie = trie;
this.errors = errors;
}
private final EasyJaSubTrie<EasyJaSubDictionaryEntry, Character> trie;
private final ArrayList<String> errors;
@Override
public void onError(int index, String entseq, String message) {
errors.add("JMDict error " + index + " " + entseq + " " + message);
}
@Override
public void onEntry(int index, String entseq, String keb, String reb,
Collection<IJMDictSense> senses) {
List<EasyJaSubDictionaryEntry.Sense> senseList = getSenseList(senses);
addEntry(keb, senseList);
addEntry(reb, senseList);
}
private void addEntry(String keb,
List<EasyJaSubDictionaryEntry.Sense> senseList) {
if (keb != null) {
EasyJaSubDictionaryEntry entry = getEntry(keb);
for (Sense sense : senseList) {
entry.addSense(sense);
}
}
}
private EasyJaSubDictionaryEntry getEntry(String keb) {
EasyJaSubTrie.Value<EasyJaSubDictionaryEntry> value = trie
.add(new CharacterIterator(keb));
EasyJaSubDictionaryEntry entry = value.getValue();
if (entry == null) {
entry = new EasyJaSubDictionaryEntry();
entry.setLength(keb.length());
value.setValue(entry);
}
return entry;
}
private List<EasyJaSubDictionaryEntry.Sense> getSenseList(
Collection<IJMDictSense> senses) {
List<EasyJaSubDictionaryEntry.Sense> senseList = new ArrayList<EasyJaSubDictionaryEntry.Sense>();
int index = 0;
for (IJMDictSense sense : senses) {
EasyJaSubDictionaryEntry.Sense s = new Sense(index++);
boolean glossFound = false;
for (String gloss : sense.getGloss()) {
// tries to simplify the gloss to fit a subtitle word
gloss = DictionaryGloss.choose(gloss, " "); // TODO
if (gloss != null) {
glossFound = true;
s.addGloss(gloss);
}
}
if (!glossFound) {
String gloss = DictionaryGloss.getLong(sense.getGloss()
.iterator().next());
s.addGloss(gloss);
}
for (String pos : sense.getPartOfSpeech()) {
s.addPartOfSpeech(pos);
}
senseList.add(s);
}
return senseList;
}
} | apache-2.0 |
zjhzxhz/Algorithm | ID3/Attribute.java | 1943 | public class Attribute {
public Attribute(String attributeName, boolean isContinuous, boolean isIgnored, Object attributeValue) {
this.attributeName = attributeName;
this.isContinuous = isContinuous;
this.isIgnored = isIgnored;
this.attributeValue = attributeValue;
}
public Attribute(String attributeName, boolean isContinuous, boolean isIgnored, Number attributeValue, Number lowerValue, Number upperValue) {
this.attributeName = attributeName;
this.isContinuous = isContinuous;
this.isIgnored = isIgnored;
this.attributeValue = attributeValue;
this.lowerValue = lowerValue;
this.upperValue = upperValue;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
public int hashCode() {
int hashCode = attributeName.hashCode();
if ( isContinuous ) {
hashCode += lowerValue.hashCode() + upperValue.hashCode();
} else {
hashCode += attributeValue.hashCode();
}
return hashCode;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object o) {
if ( o instanceof Attribute ) {
return o.hashCode() == this.hashCode();
}
return false;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
public String toString() {
String description = "";
if ( isIgnored ) {
return description;
}
if ( isContinuous ) {
description = String.format("Attribute: [AttrName = %s, AttrValue = [%s, %s)]",
new Object[] { attributeName, lowerValue, upperValue });
} else {
description = String.format("Attribute: [AttrName = %s, AttrValue = %s]",
new Object[] { attributeName, attributeValue });
}
return description;
}
public final String attributeName;
public final boolean isContinuous;
public final boolean isIgnored;
/**
* For non-continuous value.
*/
public final Object attributeValue;
/**
* For continuous value.
*/
public Number lowerValue;
public Number upperValue;
}
| apache-2.0 |
vdmeer/asciilist | src/main/java/de/vandermeer/asciilist/AsciiListContext.java | 11301 | /* Copyright 2016 Sven van der Meer <vdmeer.sven@mykolab.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.vandermeer.asciilist;
import org.apache.commons.lang3.text.StrBuilder;
import de.vandermeer.asciithemes.TA_Frame;
import de.vandermeer.skb.interfaces.document.IsListContext;
import de.vandermeer.skb.interfaces.transformers.textformat.TextAlignment;
import de.vandermeer.skb.interfaces.translators.CharacterTranslator;
import de.vandermeer.skb.interfaces.translators.HtmlElementTranslator;
import de.vandermeer.skb.interfaces.translators.TargetTranslator;
/**
* Context for an {@link AsciiList}.
*
* @author Sven van der Meer <vdmeer.sven@mykolab.com>
* @version v0.0.5 build 170502 (02-May-17) for Java 1.8
* @since v0.1.0
*/
public interface AsciiListContext extends IsListContext {
/**
* Copies all settings from the given object.
* @param ctx the object to copy settings from
* @return this to allow chaining
*/
AsciiListContext copySettings(AsciiListContext ctx);
/**
* Returns the set alignment.
* @return paragraph alignment
*/
TextAlignment getAlignment();
/**
* Returns a calculated item label string, item label plus all margins and indentations.
* Calculated means that the returned item string is using default values.
* The return can be used to calculate nested indentations, but not as an actual item string.
* To get an actual item string, use {@link #getItemString(AsciiListItem, int)}.
* @return calculated item label string
*/
default StrBuilder getCalculatedItemString(){
return this.getItemString(null, 0);
}
/**
* Returns the character translator
* @return character translator
*/
CharacterTranslator getCharTranslator();
/**
* Returns the paragraph frame.
* @return frame, null if not set
*/
TA_Frame getFrame();
/**
* Returns the set frame mode.
* @return frame mode
*/
int getFrameMode();
/**
* Returns the HTML entity translator.
* @return HTML entity
*/
HtmlElementTranslator getHtmlElementTranslator();
/**
* Returns the item margin character.
* @return item margin character
*/
Character getItemChar();
/**
* Returns the item margin.
* @return item margin
*/
int getItemMargin();
/**
* Returns the item label string, item label plus all margins and indentations.
* @param item the list item for which the string should be returned, can be null to use default settings
* @param index the index of the label in the item list, can be null to use default settings
* @param <LI> list item type
* @return item label string, default if no item is provided
*/
<LI extends AsciiListItem> StrBuilder getItemString(LI item, int index);
/**
* Returns the left label margin character.
* @return left label margin character
*/
Character getLabelLeftChar();
/**
* Returns the left label margin.
* @return left label margin
*/
int getLabelLeftMargin();
/**
* Returns the right label margin character.
* @return right label margin character
*/
Character getLabelRightChar();
/**
* Returns the right label margin.
* @return right label margin
*/
int getLabelRightMargin();
/**
* Returns the left label string.
* @return left label string, null if not set
*/
String getLeftLabelString();
/**
* Returns the level of the list.
* @return list level, one if top list, greater than one if it is a nested list
*/
int getLevel();
/**
* Returns the list end string
* @return list end string
*/
String getListEnd();
/**
* Returns the list start string
* @return list start string
*/
String getListStart();
/**
* Returns the parent index array.
* @return parent index array
*/
int[] getParentIndex();
/**
* Returns the right label string.
* @return right label string, null if not set
*/
String getRightLabelString();
/**
* Returns the target translator.
* @return target translator, null if not set
*/
TargetTranslator getTargetTranslator();
/**
* Returns the left text margin character.
* @return left text margin character
*/
Character getTextLeftChar();
/**
* Returns the left text margin.
* @return left text margin
*/
int getTextLeftMargin();
/**
* Returns the right text margin character.
* @return right text margin character
*/
Character getTextRightChar();
/**
* Returns the right text margin.
* @return right text margin
*/
int getTextRightMargin();
/**
* Returns the width of item text calculated using the currently set width.
* @return text width
*/
default int getTextWidth(){
return this.getTextWidth(this.getWidth());
}
/**
* Returns the width of item text calculated for the given width.
* @param width the maximum width for the list
* @return text width
*/
default int getTextWidth(int width){
return (width - this.getCalculatedItemString().length());
}
/**
* Returns the paragraph width.
* @return paragraph width
*/
int getWidth();
/**
* Inherit settings from another context.
* @param ctx the context to inherit settings from
* @return self to allow chaining
*/
AsciiListContext inheritSettings(AsciiListContext ctx);
/**
* Sets all converters, margins, characters, indentations, and strings to default values.
*/
void init();
/**
* Sets the paragraph alignment.
* @param alignment set alignment
* @throws NullPointerException if the argument was null
* @return this to allow chaining
*/
AsciiListContext setAlignment(TextAlignment alignment);
/**
* Sets the character translator.
* It will also remove any other translator set.
* Nothing will happen if the argument is null.
* @param charTranslator translator
* @return this to allow chaining
*/
AsciiListContext setCharTranslator(CharacterTranslator charTranslator);
/**
* Sets the paragraph frame.
* @param frame new frame, null to reset
* @return this to allow chaining
*/
AsciiListContext setFrame(TA_Frame frame);
/**
* Sets the frame mode.
* @param frameMode new frame mode, only used if 0 or positive integer
* @return this to allow chaining
*/
AsciiListContext setFrameMode(int frameMode);
/**
* Sets the HTML entity translator.
* It will also remove any other translator set.
* Nothing will happen if the argument is null.
* @param htmlElementTranslator translator
* @return this to allow chaining
*/
AsciiListContext setHtmlElementTranslator(HtmlElementTranslator htmlElementTranslator);
/**
* Sets the item margin character.
* @param c new item margin character, ignored if null
* @return this to allow chaining
*/
AsciiListContext setItemChar(Character c);
/**
* Sets the item margin.
* @param margin new item margin, ignored if negative
* @return this to allow chaining
*/
AsciiListContext setItemMargin(int margin);
/**
* Sets the left label margin character.
* @param c new left label margin character, ignored if null
* @return this to allow chaining
*/
AsciiListContext setLabelLeftChar(Character c);
/**
* Sets the left label margin.
* @param margin new left label margin, ignored if negative
* @return this to allow chaining
*/
AsciiListContext setLabelLeftMargin(int margin);
/**
* Sets the right label margin character.
* @param c new right label margin character, ignored if null
* @return this to allow chaining
*/
AsciiListContext setLabelRightChar(Character c);
/**
* Sets the right label margin.
* @param margin new right label margin, ignored if negative
* @return this to allow chaining
*/
AsciiListContext setLabelRightMargin(int margin);
/**
* Sets the left label string.
* @param leftLabel left label string, null is ok
* @return this to allow chaining
*/
AsciiListContext setLeftLabelString(String leftLabel);
/**
* Sets the level of the list.
* @param level new list level, should only be used if 2 or larger (nested list) using 1 as default
* @return self to allow chaining
*/
AsciiListContext setLevel(int level);
/**
* Sets the list end string
* @param listEnd new list end string, null and blank are ok
* @return this to allow chaining
*/
AsciiListContext setListEnd(String listEnd);
/**
* Sets the list start string
* @param listStart new list start string, null and blank are ok
* @return this to allow chaining
*/
AsciiListContext setListStart(String listStart);
/**
* Sets the parent index array
* @param parentIndex parent index array
* @return this to allow chaining
*/
AsciiListContext setParents(int[] parentIndex);
/**
* Sets the right label string.
* @param rightLabel right label string, null is ok
* @return this to allow chaining
*/
AsciiListContext setRightLabelString(String rightLabel);
/**
* Sets the target translator.
* It will also remove any other translator set.
* Nothing will happen if the argument is null.
* @param targetTranslator translator
* @return this to allow chaining
*/
AsciiListContext setTargetTranslator(TargetTranslator targetTranslator);
/**
* Sets the left text margin character.
* @param c new left text margin character, ignored if null
* @return this to allow chaining
*/
AsciiListContext setTextLeftChar(Character c);
/**
* Sets the left text margin.
* @param margin new left text margin, ignored if negative
* @return this to allow chaining
*/
AsciiListContext setTextLeftMargin(int margin);
/**
* Sets the right text margin character.
* @param c new right text margin character, ignored if null
* @return this to allow chaining
*/
AsciiListContext setTextRightChar(Character c);
/**
* Sets the right text margin.
* @param margin new right text margin, ignored if negative
* @return this to allow chaining
*/
AsciiListContext setTextRightMargin(int margin);
/**
* Sets the paragraph width.
* @param width new width
* @return this to allow chaining
* @throws IllegalStateException if the resulting text width was smaller than 3
*/
AsciiListContext setWidth(int width);
/**
* Sets a new line separator for the renderer.
* @param separator the new separator, ignored if blank
* @return self to allow chaining
*/
AsciiListContext setLineSeparator(String separator);
/**
* Returns the current set line separator.
* @return the line separator, null if none set
*/
String getLineSeparator();
}
| apache-2.0 |
vam-google/google-cloud-java | google-cloud-clients/google-cloud-vision/src/test/java/com/google/cloud/vision/v1p4beta1/ImageAnnotatorClientTest.java | 9884 | /*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.vision.v1p4beta1;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.longrunning.Operation;
import com.google.protobuf.Any;
import com.google.protobuf.GeneratedMessageV3;
import io.grpc.Status;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@javax.annotation.Generated("by GAPIC")
public class ImageAnnotatorClientTest {
private static MockProductSearch mockProductSearch;
private static MockImageAnnotator mockImageAnnotator;
private static MockServiceHelper serviceHelper;
private ImageAnnotatorClient client;
private LocalChannelProvider channelProvider;
@BeforeClass
public static void startStaticServer() {
mockProductSearch = new MockProductSearch();
mockImageAnnotator = new MockImageAnnotator();
serviceHelper =
new MockServiceHelper(
"in-process-1", Arrays.<MockGrpcService>asList(mockProductSearch, mockImageAnnotator));
serviceHelper.start();
}
@AfterClass
public static void stopServer() {
serviceHelper.stop();
}
@Before
public void setUp() throws IOException {
serviceHelper.reset();
channelProvider = serviceHelper.createChannelProvider();
ImageAnnotatorSettings settings =
ImageAnnotatorSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = ImageAnnotatorClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
@SuppressWarnings("all")
public void batchAnnotateImagesTest() {
BatchAnnotateImagesResponse expectedResponse = BatchAnnotateImagesResponse.newBuilder().build();
mockImageAnnotator.addResponse(expectedResponse);
List<AnnotateImageRequest> requests = new ArrayList<>();
BatchAnnotateImagesResponse actualResponse = client.batchAnnotateImages(requests);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockImageAnnotator.getRequests();
Assert.assertEquals(1, actualRequests.size());
BatchAnnotateImagesRequest actualRequest = (BatchAnnotateImagesRequest) actualRequests.get(0);
Assert.assertEquals(requests, actualRequest.getRequestsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void batchAnnotateImagesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockImageAnnotator.addException(exception);
try {
List<AnnotateImageRequest> requests = new ArrayList<>();
client.batchAnnotateImages(requests);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void batchAnnotateFilesTest() {
BatchAnnotateFilesResponse expectedResponse = BatchAnnotateFilesResponse.newBuilder().build();
mockImageAnnotator.addResponse(expectedResponse);
List<AnnotateFileRequest> requests = new ArrayList<>();
BatchAnnotateFilesResponse actualResponse = client.batchAnnotateFiles(requests);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockImageAnnotator.getRequests();
Assert.assertEquals(1, actualRequests.size());
BatchAnnotateFilesRequest actualRequest = (BatchAnnotateFilesRequest) actualRequests.get(0);
Assert.assertEquals(requests, actualRequest.getRequestsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void batchAnnotateFilesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockImageAnnotator.addException(exception);
try {
List<AnnotateFileRequest> requests = new ArrayList<>();
client.batchAnnotateFiles(requests);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
}
}
@Test
@SuppressWarnings("all")
public void asyncBatchAnnotateImagesTest() throws Exception {
AsyncBatchAnnotateImagesResponse expectedResponse =
AsyncBatchAnnotateImagesResponse.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("asyncBatchAnnotateImagesTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockImageAnnotator.addResponse(resultOperation);
List<AnnotateImageRequest> requests = new ArrayList<>();
OutputConfig outputConfig = OutputConfig.newBuilder().build();
AsyncBatchAnnotateImagesResponse actualResponse =
client.asyncBatchAnnotateImagesAsync(requests, outputConfig).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockImageAnnotator.getRequests();
Assert.assertEquals(1, actualRequests.size());
AsyncBatchAnnotateImagesRequest actualRequest =
(AsyncBatchAnnotateImagesRequest) actualRequests.get(0);
Assert.assertEquals(requests, actualRequest.getRequestsList());
Assert.assertEquals(outputConfig, actualRequest.getOutputConfig());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void asyncBatchAnnotateImagesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockImageAnnotator.addException(exception);
try {
List<AnnotateImageRequest> requests = new ArrayList<>();
OutputConfig outputConfig = OutputConfig.newBuilder().build();
client.asyncBatchAnnotateImagesAsync(requests, outputConfig).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = (InvalidArgumentException) e.getCause();
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
@SuppressWarnings("all")
public void asyncBatchAnnotateFilesTest() throws Exception {
AsyncBatchAnnotateFilesResponse expectedResponse =
AsyncBatchAnnotateFilesResponse.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("asyncBatchAnnotateFilesTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockImageAnnotator.addResponse(resultOperation);
List<AsyncAnnotateFileRequest> requests = new ArrayList<>();
AsyncBatchAnnotateFilesResponse actualResponse =
client.asyncBatchAnnotateFilesAsync(requests).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockImageAnnotator.getRequests();
Assert.assertEquals(1, actualRequests.size());
AsyncBatchAnnotateFilesRequest actualRequest =
(AsyncBatchAnnotateFilesRequest) actualRequests.get(0);
Assert.assertEquals(requests, actualRequest.getRequestsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
@SuppressWarnings("all")
public void asyncBatchAnnotateFilesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockImageAnnotator.addException(exception);
try {
List<AsyncAnnotateFileRequest> requests = new ArrayList<>();
client.asyncBatchAnnotateFilesAsync(requests).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = (InvalidArgumentException) e.getCause();
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
}
| apache-2.0 |
alexandrev/mbandroid | mBandroid/src/main/java/com/xandrev/mbandroid/notifications/NotificationManager.java | 3927 | package com.xandrev.mbandroid.notifications;
/**
* Created by alexa on 12/8/2015.
*/
import android.app.Notification;
import android.os.Bundle;
import android.service.notification.NotificationListenerService;
import android.service.notification.StatusBarNotification;
import android.util.Log;
import com.xandrev.mbandroid.services.NotificationLogger;
import com.xandrev.mbandroid.tiles.CommonTile;
import com.xandrev.mbandroid.tiles.TilesManager;
import java.util.ArrayList;
import java.util.List;
public class NotificationManager extends NotificationListenerService {
private NotificationLogger notificationLogger;
private List<StatusBarNotification> dupId;
private TilesManager tilesManager;
private static final String TAG = "NotificationManager";
@Override
public void onCreate() {
super.onCreate();
tilesManager = TilesManager.getInstance(this);
notificationLogger = NotificationLogger.getInstance();
dupId = new ArrayList<>();
}
@Override
public void onNotificationPosted(StatusBarNotification sbn) {
Log.d(TAG, "New notification detected");
if (sbn != null) {
if(!dupId.contains(sbn)) {
String pack = sbn.getPackageName();
Log.d(TAG, "Notification source: " + pack);
Log.d(TAG, "Key: " + sbn.getKey());
Log.d(TAG, "Id: " + sbn.getId());
dupId.add(sbn);
if (!isInternalNotification(sbn)) {
List<CommonTile> tileList = tilesManager.getTilesAffected(pack);
if (tileList != null) {
Log.d(TAG, "Tiles affected: " + tileList.size());
for (CommonTile tile : tileList) {
tile.executeNotification(sbn);
notificationLogger.addNotificationLog(tile, sbn);
}
}
} else {
String tickerText = sbn.getNotification().tickerText.toString();
if (tickerText != null && !"".equals(tickerText)) {
if ("cleaning_notifications".equals(tickerText)) {
clearAllNotifications(sbn.getNotification());
}
}
cancelNotification(sbn.getKey());
}
}
}
}
private void clearAllNotifications(Notification notification) {
if(notification != null) {
Bundle extras = notification.extras;
if(extras.getCharSequence("android.text") != null) {
String text = extras.getCharSequence("android.text").toString();
Log.d(TAG,"Notification Keys: "+text);
if(text != null) {
String[] keys = text.split(",");
if (keys != null) {
for (String key : keys) {
Log.d(TAG, "Notification Key: " + key);
cancelNotification(key);
}
}
}
}
}
}
private boolean isInternalNotification(StatusBarNotification sbn) {
boolean out = false;
if(sbn != null){
String packageName = sbn.getPackageName();
if(packageName != null && !"".equals(packageName)){
out = "com.xandrev.mbandroid".equals(packageName);
String tickerText = sbn.getNotification().tickerText.toString();
if (tickerText != null && !"".equals(tickerText)) {
out = "cleaning_notifications".equals(tickerText) && out;
}
}
}
return out;
}
@Override
public void onNotificationRemoved(StatusBarNotification sbn) {
Log.d(TAG, "Notification Removed");
}
}
| apache-2.0 |
ChangsungKim/TestRepository01 | app/controllers/VoteApp.java | 3739 | /*
* Yobi, Project Hosting SW
*
* Copyright 2013 NAVER Corp.
* http://yobi.io
*
* @Author Changsung Kim
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers;
import models.Issue;
import models.User;
import play.mvc.Call;
import play.mvc.With;
import models.Project;
import play.mvc.Result;
import play.mvc.Controller;
import play.db.ebean.Transactional;
import actions.AnonymousCheckAction;
import models.enumeration.ResourceType;
import controllers.annotation.IsCreatable;
import java.util.ArrayList;
import java.util.List;
/**
* 이슈에서 투표하기 위한 Controller
*/
@With(AnonymousCheckAction.class)
public class VoteApp extends Controller {
/**
* 투표하기
*
* 투표요청이 들어온 이슈에서 로그인 사용자의 투표가 등록된다.
*
* @param ownerName
* @param projectName
* @param issueNumber
* @return
*/
@Transactional
@IsCreatable(ResourceType.ISSUE_COMMENT)
public static Result vote(String ownerName, String projectName, Long issueNumber) {
Project project = Project.findByOwnerAndProjectName(ownerName, projectName);
Issue issue = Issue.findByNumber(project, issueNumber);
issue.addVoter(UserApp.currentUser());
Call call = routes.IssueApp.issue(ownerName, projectName, issueNumber);
return redirect(call);
}
/**
* 투표 취소하기
*
* 투표요청이 들어온 이슈에서 로그인 사용자의 투표가 취소된다.
*
* @param ownerName
* @param projectName
* @param issueNumber
* @return
*/
@Transactional
@IsCreatable(ResourceType.ISSUE_COMMENT)
public static Result unvote(String ownerName, String projectName, Long issueNumber) {
Project project = Project.findByOwnerAndProjectName(ownerName, projectName);
Issue issue = Issue.findByNumber(project, issueNumber);
issue.removeVoter(UserApp.currentUser());
Call call = routes.IssueApp.issue(ownerName, projectName, issueNumber);
return redirect(call);
}
public static List<User> getVotersForAvatar(List<User> voters, int size){
return getSubList(voters, 0, size);
}
public static List<User> getVotersForName(List<User> voters, int fromIndex, int size){
return getSubList(voters, fromIndex, fromIndex + size);
}
public static List<User> getVotersExceptCurrentUser(List<User> voters){
List<User> result = new ArrayList<User>(voters);
while(result.contains(UserApp.currentUser())){
result.remove(UserApp.currentUser());
}
return result;
}
/**
* Get subList of voters within its size.
*
* @param fromIndex
* @param toIndex
* @return
*/
private static List<User> getSubList(List<User> voters, int fromIndex, int toIndex) {
try {
return voters.subList(
Math.max(0, fromIndex),
Math.min(voters.size(), toIndex)
);
} catch(IndexOutOfBoundsException e){
play.Logger.warn("Failed to get subList of voters", e);
return new ArrayList<User>();
}
}
}
| apache-2.0 |
TheClimateCorporation/mirage | library/src/test/java/com/climate/mirage/shadows/NoExecuteAsyncTaskShadow.java | 1576 | package com.climate.mirage.shadows;
import android.os.AsyncTask;
import android.os.Handler;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import org.robolectric.annotation.RealObject;
import org.robolectric.shadows.ShadowAsyncTask;
import java.util.concurrent.Executor;
@Implements(AsyncTask.class)
public class NoExecuteAsyncTaskShadow<Params, Progress, Result>
extends ShadowAsyncTask<Params, Progress, Result> {
@RealObject
private AsyncTask<Params, Progress, Result> realAsyncTask;
private AsyncTask.Status status = AsyncTask.Status.PENDING;
private Handler handler = new Handler();
public NoExecuteAsyncTaskShadow() {
super();
}
@Implementation
public AsyncTask<Params, Progress, Result> execute(Params... params) {
// status = AsyncTask.Status.FINISHED;
// handler.postDelayed(delayed, 10);
return this.realAsyncTask;
}
@Implementation
public AsyncTask<Params, Progress, Result> executeOnExecutor(Executor executor, Params... params) {
// status = AsyncTask.Status.FINISHED;
return this.realAsyncTask;
}
@Override
public AsyncTask.Status getStatus() {
return status;
}
// private Runnable delayed = new Runnable() {
// @Override
// public void run() {
// getBridge().onPostExecute(null);
// }
// };
//
// private ShadowAsyncTaskBridge<Params, Progress, Result> getBridge() {
// return new ShadowAsyncTaskBridge(this.realAsyncTask);
// }
} | apache-2.0 |
bozzzzo/quark | quarkc/test/emit/expected/java/macro_stuff/src/main/java/macro_stuff_md/macro_stuff_Macro_test_Method.java | 683 | package macro_stuff_md;
public class macro_stuff_Macro_test_Method extends quark.reflect.Method implements io.datawire.quark.runtime.QObject {
public macro_stuff_Macro_test_Method() {
super("quark.void", "test", new java.util.ArrayList(java.util.Arrays.asList(new Object[]{})));
}
public Object invoke(Object object, java.util.ArrayList<Object> args) {
macro_stuff.Macro obj = (macro_stuff.Macro) (object);
(obj).test();
return null;
}
public String _getClass() {
return (String) (null);
}
public Object _getField(String name) {
return null;
}
public void _setField(String name, Object value) {}
}
| apache-2.0 |
gbif/occurrence | occurrence-cli/src/main/java/org/gbif/occurrence/cli/dataset/EsDatasetDeleterConfiguration.java | 3095 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.occurrence.cli.dataset;
import org.gbif.common.messaging.config.MessagingConfiguration;
import org.gbif.occurrence.cli.common.GangliaConfiguration;
import java.util.Arrays;
import java.util.StringJoiner;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParametersDelegate;
/** Configuration required to update ES with changes from deleted datasets. */
public class EsDatasetDeleterConfiguration {
@ParametersDelegate @NotNull @Valid
public MessagingConfiguration messaging = new MessagingConfiguration();
@ParametersDelegate @Valid @NotNull
public GangliaConfiguration ganglia = new GangliaConfiguration();
@Parameter(names = "--pool-size")
@Min(1)
public int poolSize = 1;
@Parameter(names = "--queue-name")
@NotNull
public String queueName;
@Parameter(names = "--es-hosts")
@NotNull
public String[] esHosts;
@Parameter(names = "--es-index")
@NotNull
public String[] esIndex;
@Parameter(names = "--es-connect-timeout")
public int esConnectTimeout = 7500;
@Parameter(names = "--es-socket-timeout")
public int esSocketTimeout = 125000;
@Parameter(names = "--es-sniff-interval")
public int esSniffInterval = 300000;
@Parameter(names = "--es-sniff-after-failure-delay")
public int esSniffAfterFailureDelay = 30000;
@Parameter(names = "--hdfs-site-config")
@NotNull
public String hdfsSiteConfig;
@Parameter(names = "--core-site-config")
@NotNull
public String coreSiteConfig;
@Parameter(names = "--hdfs-view-dir-path")
@NotNull
public String hdfsViewDirPath;
@Parameter(names = "--ingest-dir-path")
@NotNull
public String ingestDirPath;
@Override
public String toString() {
return new StringJoiner(
", ", EsDatasetDeleterConfiguration.class.getSimpleName() + "[", "]")
.add("messaging=" + messaging)
.add("ganglia=" + ganglia)
.add("poolSize=" + poolSize)
.add("queueName='" + queueName + "'")
.add("esHosts=" + Arrays.toString(esHosts))
.add("esIndex=" + Arrays.toString(esIndex))
.add("esConnectTimeout=" + esConnectTimeout)
.add("esSocketTimeout=" + esSocketTimeout)
.add("esSniffInterval=" + esSniffInterval)
.add("esSniffAfterFailureDelay=" + esSniffAfterFailureDelay)
.add("coreSiteConfig=" + coreSiteConfig)
.add("hdfsSiteConfig=" + hdfsSiteConfig)
.toString();
}
}
| apache-2.0 |
ifeegoo/AppX | Android/Android/feature/src/main/java/com/ifeegoo/android/feature/com/ifeegoo/lib/utils/AppManager.java | 2165 | package com.ifeegoo.android.feature.com.ifeegoo.lib.utils;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.util.Log;
import android.content.pm.PackageManager.NameNotFoundException;
public final class AppManager
{
private static final String TAG = AppManager.class.getSimpleName();
private static AppManager sAppManager = new AppManager();
private static Context sContext;
private static PackageManager sPackageManager;
private static PackageInfo sPackageInfo;
private static ApplicationInfo sApplicationInfo;
private AppManager()
{
}
public static AppManager getInstance(Context context)
{
if (context != null)
{
sContext = context;
sPackageManager = context.getPackageManager();
try
{
sPackageInfo = sPackageManager.getPackageInfo(sContext.getPackageName(), 0);
sApplicationInfo = sPackageInfo.applicationInfo;
} catch (NameNotFoundException nameNotFoundException)
{
Log.d(TAG, "Name not found!");
}
}
else
{
Log.d(TAG, "The context is null!");
}
if (sAppManager == null)
{
sAppManager = new AppManager();
}
return sAppManager;
}
public String getName()
{
if ((sApplicationInfo != null) && (sContext != null))
{
return sContext.getResources().getString(sApplicationInfo.labelRes);
}
return "";
}
public String getPackageName()
{
if (sContext != null)
{
return sContext.getPackageName();
}
return "";
}
public String getVersionName()
{
if (sPackageInfo != null)
{
return sPackageInfo.versionName;
}
return "";
}
public int getVersionCode()
{
if (sPackageInfo != null)
{
return sPackageInfo.versionCode;
}
return 0;
}
}
| apache-2.0 |
leakingtapan/rof | tst/com/amazon/mqa/datagen/rof/typed/DefaultOptionalFactoryTest.java | 1766 | package com.amazon.mqa.datagen.rof.typed;
import static org.easymock.EasyMock.expect;
import static org.testng.Assert.assertEquals;
import com.amazon.mqa.datagen.supplier.RandomIntegerSupplier;
import com.amazon.mtqa.testutil.MockObjectContainer;
import com.google.common.base.Supplier;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.Optional;
/**
* Unit test for {@link DefaultOptionalFactory}.
*/
public final class DefaultOptionalFactoryTest {
/** Creates random integers. */
private final Supplier<Integer> integerFactory = new RandomIntegerSupplier();
/** Contains mocks. */
private MockObjectContainer mocks;
/** Mock typed object factory. */
private TypedObjectFactory mockTypedObjectFactory;
/** Class under test. */
private OptionalFactory factory;
/**
* Required test set up.
*/
@SuppressWarnings("unchecked")
@BeforeMethod
public void setUp() {
mocks = new MockObjectContainer();
mockTypedObjectFactory = mocks.createMock("mockTypedObjectFactory", TypedObjectFactory.class);
factory = new DefaultOptionalFactory(mockTypedObjectFactory);
}
/**
* Tests creating Optional.
*
* @throws Exception for any failure.
*/
@Test
public void testCreateOptional() throws Exception {
// set up
final Class<Integer> clazz = Integer.class;
final Integer expected = integerFactory.get();
expect(mockTypedObjectFactory.create(clazz)).andReturn(expected);
mocks.replayAll();
// exercise
final Optional actual = factory.create(clazz);
// verify
assertEquals(actual, Optional.of(expected), "wrong optional");
}
}
| apache-2.0 |
jbeetle/BJAF3.x | src/main/com/beetle/framework/web/controller/VirtualController.java | 2087 | /*
* BJAF - Beetle J2EE Application Framework
* 甲壳虫J2EE企业应用开发框架
* 版权所有2003-2015 余浩东 (www.beetlesoft.net)
*
* 这是一个免费开源的软件,您必须在
*<http://www.apache.org/licenses/LICENSE-2.0>
*协议下合法使用、修改或重新发布。
*
* 感谢您使用、推广本框架,若有建议或问题,欢迎您和我联系。
* 邮件: <yuhaodong@gmail.com/>.
*/
package com.beetle.framework.web.controller;
import com.beetle.framework.web.view.View;
import javax.servlet.http.HttpServletRequest;
/**
* <p>
* Title: BeetleSoft Framework
* </p>
* <p>
* 虚拟控制器,没有具体实现类,扩展属性:<br>
* (1)可通过“frontActionFlag”和“backActionFlag”
* 两个页面参数控制是否处理全局回调,两个参数值为“1”时有效,为"0"时无效 <br>
* (2)默认进行Session检查,可通过"sessionCheck=false"页面参数来关闭Session检查功能
* <p>
* Copyright: Copyright (c) 2003
* </p>
* <p>
* Company: 甲壳虫软件
* <p/>
* </p>
*
* @author 余浩东
* @version 1.0
*/
public class VirtualController extends ControllerImp {
public VirtualController(String simpleViewName, HttpServletRequest request) {
String scf = request.getParameter("sessionCheck");
if (scf == null || scf.equals("true")) {
this.enableSessionCheck();
}
this.simpleViewName = simpleViewName;
this.setCacheSeconds(0);
this.setInstanceCacheFlag(false);
String gfc = request.getParameter("frontActionFlag");
if (gfc != null && gfc.equals("0")) {
this.disableFrontAction();
}
String gbc = request.getParameter("backActionFlag");
if (gbc != null && gbc.equals("0")) {
this.disableBackAction();
}
}
private String simpleViewName;
public View perform(WebInput request) throws ControllerException {
View v = new View(this.simpleViewName);
return v;
}
}
| apache-2.0 |
alexruiz/fest-android | src/main/java/org/fest/assertions/api/android/database/sqlite/SqliteDatabaseAssert.java | 4628 | // Copyright 2013 Square, Inc.
package org.fest.assertions.api.android.database.sqlite;
import android.database.sqlite.SQLiteDatabase;
import org.fest.assertions.api.AbstractAssert;
import static org.fest.assertions.api.Assertions.assertThat;
/** Assertions for {@link SQLiteDatabase} instances. */
public class SqliteDatabaseAssert extends AbstractAssert<SqliteDatabaseAssert, SQLiteDatabase> {
public SqliteDatabaseAssert(SQLiteDatabase actual) {
super(actual, SqliteDatabaseAssert.class);
}
public SqliteDatabaseAssert hasMaximumSize(long size) {
isNotNull();
long actualSize = actual.getMaximumSize();
assertThat(actualSize) //
.overridingErrorMessage("Expected maximum size <%s> but was <%s>.", size, actualSize) //
.isEqualTo(size);
return this;
}
public SqliteDatabaseAssert hasPageSize(long size) {
isNotNull();
long actualSize = actual.getPageSize();
assertThat(actualSize) //
.overridingErrorMessage("Expected page size <%s> but was <%s>.", size, actualSize) //
.isEqualTo(size);
return this;
}
public SqliteDatabaseAssert hasPath(String path) {
isNotNull();
String actualPath = actual.getPath();
assertThat(actualPath) //
.overridingErrorMessage("Expected path <%s> but was <%s>.", path, actualPath) //
.isEqualTo(path);
return this;
}
public SqliteDatabaseAssert hasVersion(int version) {
isNotNull();
int actualVersion = actual.getVersion();
assertThat(actualVersion) //
.overridingErrorMessage("Expected version <%s> but was <%s>.", version, actualVersion) //
.isEqualTo(version);
return this;
}
public SqliteDatabaseAssert isInTransaction() {
isNotNull();
assertThat(actual.inTransaction()) //
.overridingErrorMessage("Expected to be in transaction but was not.") //
.isTrue();
return this;
}
public SqliteDatabaseAssert isNotInTransaction() {
isNotNull();
assertThat(actual.inTransaction()) //
.overridingErrorMessage("Expected to not be in transaction but was.") //
.isFalse();
return this;
}
public SqliteDatabaseAssert isDatabaseIntegrityOk() {
isNotNull();
assertThat(actual.isDatabaseIntegrityOk()) //
.overridingErrorMessage("Expected database integrity to be OK but was not.") //
.isTrue();
return this;
}
public SqliteDatabaseAssert isDatabaseIntegrityNotOk() {
isNotNull();
assertThat(actual.isDatabaseIntegrityOk()) //
.overridingErrorMessage("Expected database integrity to not be OK but was.") //
.isFalse();
return this;
}
public SqliteDatabaseAssert isLockedByCurrentThread() {
isNotNull();
assertThat(actual.isDbLockedByCurrentThread()) //
.overridingErrorMessage("Expected DB to be locked by current thread but was not.") //
.isTrue();
return this;
}
public SqliteDatabaseAssert isNotLockedByCurrentThread() {
isNotNull();
assertThat(actual.isDbLockedByCurrentThread()) //
.overridingErrorMessage("Expected DB to not be locked but current thread but was.") //
.isFalse();
return this;
}
public SqliteDatabaseAssert isOpen() {
isNotNull();
assertThat(actual.isOpen()) //
.overridingErrorMessage("Expected DB to be open but was not.") //
.isTrue();
return this;
}
public SqliteDatabaseAssert isNotOpen() {
isNotNull();
assertThat(actual.isOpen()) //
.overridingErrorMessage("Expected DB to not be open but was.") //
.isFalse();
return this;
}
public SqliteDatabaseAssert isReadOnly() {
isNotNull();
assertThat(actual.isReadOnly()) //
.overridingErrorMessage("Expected DB to be read only but was not.") //
.isTrue();
return this;
}
public SqliteDatabaseAssert isNotReadOnly() {
isNotNull();
assertThat(actual.isReadOnly()) //
.overridingErrorMessage("Expected DB to not be read only but was.") //
.isFalse();
return this;
}
public SqliteDatabaseAssert isWriteAheadLoggingEnabled() {
isNotNull();
assertThat(actual.isWriteAheadLoggingEnabled()) //
.overridingErrorMessage(
"Expected DB to have write-ahead logging enabled but was disabled.") //
.isTrue();
return this;
}
public SqliteDatabaseAssert isWriteAheadLoggingDisabled() {
isNotNull();
assertThat(actual.isWriteAheadLoggingEnabled()) //
.overridingErrorMessage(
"Expected DB to have write-ahead logging disabled but was enabled.") //
.isFalse();
return this;
}
}
| apache-2.0 |
QualiMaster/Infrastructure | CoordinationLayer/src/eu/qualimaster/coordination/commands/AlgorithmChangeCommand.java | 4578 | package eu.qualimaster.coordination.commands;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import eu.qualimaster.common.QMInternal;
import eu.qualimaster.infrastructure.PipelineOptions;
import eu.qualimaster.pipeline.AlgorithmChangeParameter;
/**
* A command to change an algorithm in an algorithm family.
* Please note that abstract names are described here while the CoordinationLayer must translate
* them to actual Execution System commands.
*
* @author Holger Eichelberger
*/
public class AlgorithmChangeCommand extends AbstractPipelineElementCommand {
private static final long serialVersionUID = 79134352686021725L;
private String algorithm;
private Map<String, Serializable> parameters = new HashMap<String, Serializable>();
private PipelineOptions options;
/**
* Crates an algorithm change command.
*
* @param pipeline the name of the pipeline
* @param pipelineElement the name of the pipeline element
* @param algorithm the target name of the algorithm
*/
public AlgorithmChangeCommand(String pipeline, String pipelineElement, String algorithm) {
super(pipeline, pipelineElement);
this.algorithm = algorithm;
}
/**
* Sets an integer parameter.
*
* @param param the parameter identifier
* @param value the value
* @throws IllegalArgumentException in case that the parameter does not accept an integer (or a string as fallback)
*/
public void setIntParameter(AlgorithmChangeParameter param, int value) {
AlgorithmChangeParameter.setIntParameter(parameters, param, value);
}
/**
* Returns an integer parameter.
*
* @param param the parameter identifier
* @param dflt the default value in case that the parameter is not specified or cannot be turned into an integer
* (may be <b>null</b>)
* @return the value of <code>param</code>, <code>dflt</code> if not specified / not an integer value
*/
public Integer getIntParameter(AlgorithmChangeParameter param, Integer dflt) {
return AlgorithmChangeParameter.getIntParameter(parameters, param, dflt);
}
/**
* Sets a String parameter.
*
* @param param the parameter identifier
* @param value the value
* @throws IllegalArgumentException in case that parameter does not accet a String value
*/
public void setStringParameter(AlgorithmChangeParameter param, String value) {
AlgorithmChangeParameter.setStringParameter(parameters, param, value);
}
/**
* Returns a String parameter.
*
* @param param the parameter identifier
* @param dflt the default value in case that the parameter is not specified (may be <b>null</b>)
* @return the value of <code>param</code>, <code>dflt</code> if not specified
*/
public String getStringParameter(AlgorithmChangeParameter param, String dflt) {
return AlgorithmChangeParameter.getStringParameter(parameters, param, dflt);
}
/**
* Returns all defined parameters.
*
* @return all parameters
*/
@QMInternal
public Map<AlgorithmChangeParameter, Serializable> getParameters() {
return AlgorithmChangeParameter.convert(parameters);
}
/**
* Sets all given parameters.
*
* @param parameters the parameters to set
*/
@QMInternal
public void setParameters(Map<String, Serializable> parameters) {
this.parameters.putAll(parameters);
}
/**
* Returns the target algorithm name.
*
* @return the name of the target algorithm
*/
public String getAlgorithm() {
return algorithm;
}
@QMInternal
@Override
public CoordinationExecutionResult accept(ICoordinationCommandVisitor visitor) {
return visitor.visitAlgorithmChangeCommand(this);
}
/**
* Defines the (additional) pipeline options for the new (sub-pipeline) algorithm.
*
* @param options the pipeline options (may be <b>null</b>
*/
public void setOptions(PipelineOptions options) {
this.options = options;
}
/**
* Returns (additional) pipeline options to be used for the new (sub-pipeline) algorithm.
*
* @return the pipeline options (may be <b>null</b>)
*/
public PipelineOptions getOptions() {
return options;
}
}
| apache-2.0 |
epheng/computer-database | src/test/java/dao/CompanyDAOTest.java | 222 | package dao;
public class CompanyDAOTest {
/*
@Test
public void listAllCompaniesTest() {
CompanyDAO companyDao = CompanyDAO.getInstance();
assertNotNull("list not null", companyDao.listAllCompanies());
}
*/
}
| apache-2.0 |
agwlvssainokuni/sqlapp | foundation/test/java/cherry/foundation/validator/ZipCodeValidatorTest.java | 3500 | /*
* Copyright 2014,2015 agwlvssainokuni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cherry.foundation.validator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.util.HashMap;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.ConversionService;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.validation.BindingResult;
import org.springframework.validation.Validator;
import org.springframework.web.bind.WebDataBinder;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = "classpath:config/applicationContext-test.xml")
public class ZipCodeValidatorTest {
@Autowired
private ConversionService conversionService;
@Autowired
private Validator validator;
@Test
public void testOK() {
Map<String, String> val = new HashMap<>();
val.put("zipCode0", null);
val.put("zipCode1", "");
val.put("zipCode2", "1234567");
val.put("zipCode3", null);
val.put("zipCode4", "");
val.put("zipCode5", "123-4567");
TestDto dto = new TestDto();
WebDataBinder binder = new WebDataBinder(dto);
binder.setConversionService(conversionService);
binder.addValidators(validator);
binder.bind(new MutablePropertyValues(val));
binder.validate();
BindingResult result = binder.getBindingResult();
assertEquals(0, result.getErrorCount());
assertNull(dto.getZipCode0());
assertEquals("", dto.getZipCode1());
assertEquals("1234567", dto.getZipCode2());
assertNull(dto.getZipCode3());
assertEquals("", dto.getZipCode4());
assertEquals("123-4567", dto.getZipCode5());
}
@Test
public void testNG() {
Map<String, String> val = new HashMap<>();
val.put("zipCode0", "123456");
val.put("zipCode1", "12345678");
val.put("zipCode2", "abcdefg");
val.put("zipCode3", "123-456");
val.put("zipCode4", "123-45678");
val.put("zipCode5", "abc-defg");
TestDto dto = new TestDto();
WebDataBinder binder = new WebDataBinder(dto);
binder.setConversionService(conversionService);
binder.addValidators(validator);
binder.bind(new MutablePropertyValues(val));
binder.validate();
BindingResult result = binder.getBindingResult();
assertEquals(6, result.getErrorCount());
}
@Getter
@Setter
public static class TestDto {
@ZipCode
private String zipCode0;
@ZipCode
private String zipCode1;
@ZipCode
private String zipCode2;
@ZipCode(hyphen = true)
private String zipCode3;
@ZipCode(hyphen = true)
private String zipCode4;
@ZipCode(hyphen = true)
private String zipCode5;
}
}
| apache-2.0 |
zhuilu/androidpn-client | src/org/androidpn/client/adapter/UserAdapter.java | 1735 | package org.androidpn.client.adapter;
import java.util.List;
import org.androidpn.client.bean.User;
import org.androidpn.demoapp.R;
import android.content.Context;
import android.os.Handler;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
/**
* Created by dai.sl on 15/3/30.
*/
public class UserAdapter extends BaseAdapter {
private List<User> users;
private LayoutInflater inflater;
private Context mContext;
private Handler mHandler;
static float screenWidth = 0;
public UserAdapter(List<User> users, Context context, Handler handler) {
this.users = users;
this.mContext = context;
inflater = LayoutInflater.from(context);
mHandler = handler;
screenWidth = context.getResources().getDisplayMetrics().widthPixels;
}
@Override
public int getCount() {
return users.size();
}
@Override
public Object getItem(int position) {
return users.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
UserViewHolder holderFoler = null;
if (convertView == null) {
convertView = inflater.inflate(R.layout.show_item, null);
holderFoler = new UserViewHolder(convertView);
convertView.setTag(holderFoler);
} else {
holderFoler = (UserViewHolder) convertView.getTag();
}
final User user = users.get(position);
holderFoler.photo.setText(user.getPhoto());
return convertView;
}
public static class UserViewHolder {
TextView photo;
public UserViewHolder(View convertView) {
photo = (TextView) convertView.findViewById(R.id.photo);
}
}
}
| apache-2.0 |
hotpads/datarouter | datarouter-filesystem/src/main/java/io/datarouter/filesystem/snapshot/reader/block/DecodingBlockLoaderFactory.java | 1966 | /*
* Copyright © 2009 HotPads (admin@hotpads.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datarouter.filesystem.snapshot.reader.block;
import javax.inject.Inject;
import javax.inject.Singleton;
import io.datarouter.filesystem.snapshot.block.root.RootBlock;
import io.datarouter.filesystem.snapshot.compress.BlockDecompressor;
import io.datarouter.filesystem.snapshot.compress.BlockDecompressorFactory;
import io.datarouter.filesystem.snapshot.encode.BlockDecoder;
import io.datarouter.filesystem.snapshot.encode.BlockDecoderFactory;
import io.datarouter.filesystem.snapshot.path.SnapshotPaths;
import io.datarouter.filesystem.snapshot.path.SnapshotPathsRegistry;
import io.datarouter.filesystem.snapshot.storage.block.SnapshotBlockStorageReader;
@Singleton
public class DecodingBlockLoaderFactory{
@Inject
private SnapshotPathsRegistry pathsRegistry;
@Inject
private BlockDecoderFactory blockDecoderFactory;
@Inject
private BlockDecompressorFactory blockDecompressorFactory;
public DecodingBlockLoader create(RootBlock rootBlock, SnapshotBlockStorageReader snapshotBlockStorageReader){
BlockDecoder blockDecoder = blockDecoderFactory.create(rootBlock);
SnapshotPaths paths = pathsRegistry.getPaths(rootBlock.pathFormat());
BlockDecompressor blockDecompressor = blockDecompressorFactory.create(rootBlock);
return new DecodingBlockLoader(snapshotBlockStorageReader, paths, blockDecompressor, blockDecoder);
}
}
| apache-2.0 |
ceylon/ceylon | model/src/org/eclipse/ceylon/compiler/java/metadata/ObjectExprs.java | 955 | /********************************************************************************
* Copyright (c) 2011-2017 Red Hat Inc. and/or its affiliates and others
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* http://www.apache.org/licenses/LICENSE-2.0
*
* SPDX-License-Identifier: Apache-2.0
********************************************************************************/
package org.eclipse.ceylon.compiler.java.metadata;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Applied to an annotation constructor, encodes the "literal"
* enumerated object expressions in the invocation
*/
@Retention(RetentionPolicy.CLASS)
@Target({ElementType.TYPE, ElementType.METHOD})
public @interface ObjectExprs {
ObjectValue[] value();
}
| apache-2.0 |
vespa-engine/vespa | container-core/src/test/java/com/yahoo/metrics/simple/DimensionsCacheTest.java | 4621 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.metrics.simple;
import static org.junit.Assert.*;
import java.util.Collection;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.yahoo.metrics.simple.UntypedMetric.AssumedType;
/**
* Functional test for point persistence layer.
*
* @author Steinar Knutsen
*/
public class DimensionsCacheTest {
private static final int POINTS_TO_KEEP = 3;
DimensionCache cache;
@Before
public void setUp() throws Exception {
cache = new DimensionCache(POINTS_TO_KEEP);
}
@After
public void tearDown() throws Exception {
cache = null;
}
@Test
public final void smokeTest() {
String metricName = "testMetric";
Bucket first = new Bucket();
for (int i = 0; i < 4; ++i) {
populateSingleValue(metricName, first, i);
}
cache.updateDimensionPersistence(null, first);
Bucket second = new Bucket();
final int newest = 42;
populateSingleValue(metricName, second, newest);
cache.updateDimensionPersistence(first, second);
assertEquals(POINTS_TO_KEEP, second.getValuesForMetric(metricName).size());
boolean newestFound = false;
for (Entry<Point, UntypedMetric> x : second.getValuesForMetric(metricName)) {
if (x.getValue().getLast() == newest) {
newestFound = true;
}
}
assertTrue("Kept newest measurement when padding points.", newestFound);
}
@Test
public final void testNoBoomWithEmptyBuckets() {
Bucket check = new Bucket();
cache.updateDimensionPersistence(null, new Bucket());
cache.updateDimensionPersistence(null, new Bucket());
cache.updateDimensionPersistence(new Bucket(), check);
assertEquals(0, check.entrySet().size());
}
@Test
public final void testUpdateWithNullThenDataThenData() {
Bucket first = new Bucket();
populateDimensionLessValue("one", first, 2);
cache.updateDimensionPersistence(null, first);
Bucket second = new Bucket();
populateDimensionLessValue("other", second, 3);
cache.updateDimensionPersistence(first, second);
Collection<String> names = second.getAllMetricNames();
assertEquals(2, names.size());
assertTrue(names.contains("one"));
assertTrue(names.contains("other"));
}
@Test
public final void requireThatOldDataIsForgotten() {
Bucket first = new Bucket(); // "now" as timestamp
populateDimensionLessValue("one", first, 2);
cache.updateDimensionPersistence(first, new Bucket());
Bucket second = new Bucket(17, 42); // really old timestamp
populateDimensionLessValue("other", second, 3);
Bucket third = new Bucket();
populateDimensionLessValue("two", third, 4);
cache.updateDimensionPersistence(second, third);
Collection<String> names = third.getAllMetricNames();
assertEquals(2, names.size());
assertTrue(names.contains("one"));
assertTrue(names.contains("two"));
}
@Test
public final void testUpdateWithNullThenDataThenNoDataThenData() {
Bucket first = new Bucket();
Bucket second = new Bucket();
populateDimensionLessValue("first", first, 1.0d);
populateDimensionLessValue("second", second, 2.0d);
cache.updateDimensionPersistence(null, first);
cache.updateDimensionPersistence(first, new Bucket());
cache.updateDimensionPersistence(new Bucket(), second);
assertEquals(2, second.entrySet().size());
assertTrue(second.getAllMetricNames().contains("first"));
assertTrue(second.getAllMetricNames().contains("second"));
}
private void populateDimensionLessValue(String metricName, Bucket bucket, double x) {
Identifier id = new Identifier(metricName, null);
Sample wrappedX = new Sample(new Measurement(Double.valueOf(x)), id, AssumedType.GAUGE);
bucket.put(wrappedX);
}
private void populateSingleValue(String metricName, Bucket bucket, int i) {
Map<String, Integer> m = new TreeMap<>();
m.put(String.valueOf(i), Integer.valueOf(i));
Point p = new Point(m);
Identifier id = new Identifier(metricName, p);
Sample x = new Sample(new Measurement(Double.valueOf(i)), id, AssumedType.GAUGE);
bucket.put(x);
}
}
| apache-2.0 |
bseeger/fcrepo4 | fcrepo-http-commons/src/main/java/org/fcrepo/http/commons/FedoraApplication.java | 2199 | /*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.http.commons;
import org.fcrepo.kernel.api.Transaction;
import org.fcrepo.http.commons.session.TransactionProvider;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.jackson.JacksonFeature;
import org.glassfish.jersey.logging.LoggingFeature;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.process.internal.RequestScoped;
import java.util.logging.Logger;
import static org.slf4j.LoggerFactory.getLogger;
/**
* @author cabeer
* @since 9/22/14
*/
public class FedoraApplication extends ResourceConfig {
private static final org.slf4j.Logger LOGGER = getLogger(FedoraApplication.class);
/**
* THIS IS OUR RESOURCE CONFIG!
*/
public FedoraApplication() {
super();
packages("org.fcrepo");
register(new FactoryBinder());
register(MultiPartFeature.class);
register(JacksonFeature.class);
if (LOGGER.isDebugEnabled()) {
register(new LoggingFeature(Logger.getLogger(LoggingFeature.class.getName())));
}
}
static class FactoryBinder extends AbstractBinder {
@Override
protected void configure() {
bindFactory(TransactionProvider.class)
.to(Transaction.class)
.in(RequestScoped.class);
}
}
}
| apache-2.0 |
NessComputing/components-ness-metrics | src/main/java/com/nesscomputing/metrics/MetricsModule.java | 1155 | /**
* Copyright (C) 2012 Ness Computing, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nesscomputing.metrics;
import com.google.inject.AbstractModule;
import com.google.inject.Scopes;
import com.nesscomputing.config.ConfigProvider;
import com.yammer.metrics.guice.InstrumentationModule;
public class MetricsModule extends AbstractModule {
@Override
protected void configure() {
install (new InstrumentationModule());
bind (MetricsConfiguration.class).toProvider(ConfigProvider.of(MetricsConfiguration.class)).in(Scopes.SINGLETON);
bind (MetricsExporter.class).asEagerSingleton();
}
}
| apache-2.0 |
impetus-opensource/Kundera | src/kundera-oracle-nosql/src/test/java/com/impetus/client/oraclenosql/datatypes/tests/StudentOracleNoSQLLongPrimitiveTest.java | 19677 | /**
* Copyright 2013 Impetus Infotech.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.impetus.client.oraclenosql.datatypes.tests;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.Query;
import junit.framework.Assert;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.impetus.client.oraclenosql.datatypes.entities.StudentOracleNoSQLLongPrimitive;
/**
* @author vivek.mishra
*
*/
public class StudentOracleNoSQLLongPrimitiveTest extends OracleNoSQLBase
{
private static final String keyspace = "KunderaTests";
private EntityManagerFactory emf;
@Before
public void setUp() throws Exception
{
emf = Persistence.createEntityManagerFactory("oracleNosqlDataTypeTest");
}
@After
public void tearDown() throws Exception
{
emf.close();
}
@Test
public void testExecuteUseSameEm()
{
testPersist(true);
testFindById(true);
testMerge(true);
// testFindByQuery(true);
// testNamedQueryUseSameEm(true);
testDelete(true);
}
@Test
public void testExecute()
{
testPersist(false);
testFindById(false);
testMerge(false);
// testFindByQuery(false);
// testNamedQuery(false);
testDelete(false);
}
public void testPersist(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
// Insert max value of long
StudentOracleNoSQLLongPrimitive studentMax = new StudentOracleNoSQLLongPrimitive();
studentMax.setAge((Short) getMaxValue(short.class));
studentMax.setId((Long) getMaxValue(long.class));
studentMax.setName((String) getMaxValue(String.class));
em.persist(studentMax);
// Insert min value of long
StudentOracleNoSQLLongPrimitive studentMin = new StudentOracleNoSQLLongPrimitive();
studentMin.setAge((Short) getMinValue(short.class));
studentMin.setId((Long) getMinValue(long.class));
studentMin.setName((String) getMinValue(String.class));
em.persist(studentMin);
// Insert random value of long
StudentOracleNoSQLLongPrimitive student = new StudentOracleNoSQLLongPrimitive();
student.setAge((Short) getRandomValue(short.class));
student.setId((Long) getRandomValue(long.class));
student.setName((String) getRandomValue(String.class));
em.persist(student);
em.close();
}
public void testFindById(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
StudentOracleNoSQLLongPrimitive studentMax = em.find(StudentOracleNoSQLLongPrimitive.class, getMaxValue(long.class));
Assert.assertNotNull(studentMax);
Assert.assertEquals(getMaxValue(short.class), studentMax.getAge());
Assert.assertEquals(getMaxValue(String.class), studentMax.getName());
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLLongPrimitive studentMin = em.find(StudentOracleNoSQLLongPrimitive.class, getMinValue(long.class));
Assert.assertNotNull(studentMin);
Assert.assertEquals(getMinValue(short.class), studentMin.getAge());
Assert.assertEquals(getMinValue(String.class), studentMin.getName());
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLLongPrimitive student = em.find(StudentOracleNoSQLLongPrimitive.class, getRandomValue(long.class));
Assert.assertNotNull(student);
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
em.close();
}
public void testMerge(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
StudentOracleNoSQLLongPrimitive student = em.find(StudentOracleNoSQLLongPrimitive.class, getMaxValue(long.class));
Assert.assertNotNull(student);
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals(getMaxValue(String.class), student.getName());
student.setName("Kuldeep");
em.merge(student);
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLLongPrimitive newStudent = em.find(StudentOracleNoSQLLongPrimitive.class, getMaxValue(long.class));
Assert.assertNotNull(newStudent);
Assert.assertEquals(getMaxValue(short.class), newStudent.getAge());
Assert.assertEquals("Kuldeep", newStudent.getName());
}
public void testFindByQuery(boolean useSameEm)
{
findAllQuery();
findByName();
findByAge();
findByNameAndAgeGTAndLT();
findByNameAndAgeGTEQAndLTEQ();
findByNameAndAgeGTAndLTEQ();
findByNameAndAgeWithOrClause();
findByAgeAndNameGTAndLT();
findByNameAndAGEBetween();
// findByRange();
}
private void findByAgeAndNameGTAndLT()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.age = " + getMinValue(short.class)
+ " and s.name > Amresh and s.name <= " + getMaxValue(String.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
Assert.assertEquals(getMinValue(long.class), student.getId());
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
private void findByRange()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.id between " + getMinValue(long.class) + " and "
+ getMaxValue(long.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(3, students.size());
int count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
if (student.getId() == ((Long) getMaxValue(long.class)).longValue())
{
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
else if (student.getId() == ((Long) getMinValue(long.class)).longValue())
{
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
else
{
Assert.assertEquals(getRandomValue(long.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
}
Assert.assertEquals(3, count);
em.close();
}
private void findByNameAndAgeWithOrClause()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.name = Kuldeep and s.age > " + getMinValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
Assert.assertEquals(getMaxValue(long.class), student.getId());
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
private void findByNameAndAgeGTAndLTEQ()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.name = Kuldeep and s.age > " + getMinValue(short.class)
+ " and s.age <= " + getMaxValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
Assert.assertEquals(getMaxValue(long.class), student.getId());
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
public void testNamedQueryUseSameEm(boolean useSameEm)
{
updateNamed(true);
deleteNamed(true);
}
public void testNamedQuery(boolean useSameEm)
{
updateNamed(false);
deleteNamed(false);
}
public void testDelete(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
StudentOracleNoSQLLongPrimitive studentMax = em.find(StudentOracleNoSQLLongPrimitive.class, getMaxValue(long.class));
Assert.assertNotNull(studentMax);
Assert.assertEquals(getMaxValue(short.class), studentMax.getAge());
Assert.assertEquals("Kuldeep", studentMax.getName());
em.remove(studentMax);
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
studentMax = em.find(StudentOracleNoSQLLongPrimitive.class, getMaxValue(long.class));
Assert.assertNull(studentMax);
em.close();
}
/**
*
*/
private void deleteNamed(boolean useSameEm)
{
String deleteQuery = "Delete From StudentOracleNoSQLLongPrimitive s where s.name=Vivek";
EntityManager em = emf.createEntityManager();
Query q = em.createQuery(deleteQuery);
q.executeUpdate();
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLLongPrimitive newStudent = em.find(StudentOracleNoSQLLongPrimitive.class, getRandomValue(long.class));
Assert.assertNull(newStudent);
em.close();
}
/**
* @return
*/
private void updateNamed(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
String updateQuery = "Update StudentOracleNoSQLLongPrimitive s SET s.name=Vivek where s.name=Amresh";
Query q = em.createQuery(updateQuery);
q.executeUpdate();
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLLongPrimitive newStudent = em.find(StudentOracleNoSQLLongPrimitive.class, getRandomValue(long.class));
Assert.assertNotNull(newStudent);
Assert.assertEquals(getRandomValue(short.class), newStudent.getAge());
Assert.assertEquals("Vivek", newStudent.getName());
em.close();
}
private void findByNameAndAGEBetween()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.name = Amresh and s.age between "
+ getMinValue(short.class) + " and " + getMaxValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
Assert.assertEquals(getRandomValue(long.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
private void findByNameAndAgeGTAndLT()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.name = Amresh and s.age > " + getMinValue(short.class)
+ " and s.age < " + getMaxValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
Assert.assertEquals(getRandomValue(long.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
private void findByNameAndAgeGTEQAndLTEQ()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.name = Kuldeep and s.age >= "
+ getMinValue(short.class) + " and s.age <= " + getMaxValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(2, students.size());
count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
if (student.getId() == ((Long) getMaxValue(long.class)).longValue())
{
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
else
{
Assert.assertEquals(getMinValue(long.class), student.getId());
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
}
Assert.assertEquals(2, count);
em.close();
}
private void findByAge()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.age = " + getRandomValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
Assert.assertEquals(getRandomValue(long.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
/**
*
*/
private void findByName()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLLongPrimitive> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLLongPrimitive s where s.name = Kuldeep";
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(2, students.size());
count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
if (student.getId() == ((Long) getMaxValue(long.class)).longValue())
{
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
else
{
Assert.assertEquals(getMinValue(long.class), student.getId());
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
}
Assert.assertEquals(2, count);
em.close();
}
/**
*
*/
private void findAllQuery()
{
EntityManager em = emf.createEntityManager();
// Selet all query.
String query = "Select s From StudentOracleNoSQLLongPrimitive s ";
Query q = em.createQuery(query);
List<StudentOracleNoSQLLongPrimitive> students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(3, students.size());
int count = 0;
for (StudentOracleNoSQLLongPrimitive student : students)
{
if (student.getId() == ((Long) getMaxValue(long.class)).longValue())
{
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
else if (student.getId() == ((Long) getMinValue(long.class)).longValue())
{
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
else
{
Assert.assertEquals(getRandomValue(long.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
}
Assert.assertEquals(3, count);
em.close();
}
}
| apache-2.0 |
Wenpei/incubator-systemml | src/main/java/org/apache/sysml/runtime/matrix/sort/IndexSortStitchupMapper.java | 4553 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.matrix.sort;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.sysml.runtime.matrix.SortMR;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration;
public class IndexSortStitchupMapper extends MapReduceBase
implements Mapper<MatrixIndexes, MatrixBlock, MatrixIndexes, MatrixBlock>
{
private long[] _offsets = null;
private long _rlen = -1;
private long _brlen = -1;
private MatrixBlock _tmpBlk = null;
private MatrixIndexes _tmpIx = null;
@Override
public void map(MatrixIndexes key, MatrixBlock value, OutputCollector<MatrixIndexes, MatrixBlock> out, Reporter reporter)
throws IOException
{
//compute starting cell offset
int id = (int)key.getColumnIndex();
long offset = _offsets[id];
offset += (key.getRowIndex()-1)*_brlen;
//SPECIAL CASE: block aligned
int blksize = computeOutputBlocksize(_rlen, _brlen, offset);
if( offset%_brlen==0 && value.getNumRows()==blksize )
{
_tmpIx.setIndexes(offset/_brlen+1, 1);
out.collect(_tmpIx, value);
}
//GENERAL CASE: not block aligned
else
{
int loffset = (int) (offset%_brlen);
//multiple output blocks
if( value.getNumRows()+loffset>_brlen )
{
long tmpnnz = 0;
//output first part
_tmpBlk.reset( (int)_brlen, 1 );
for( int i=0; i<_brlen-loffset; i++ )
_tmpBlk.quickSetValue(loffset+i, 0, value.quickGetValue(i, 0));
tmpnnz += _tmpBlk.getNonZeros();
_tmpIx.setIndexes(offset/_brlen+1, 1);
out.collect(_tmpIx, _tmpBlk);
//output second block
blksize = computeOutputBlocksize(_rlen, _brlen, offset+(_brlen-loffset));
_tmpBlk.reset( blksize, 1 );
for( int i=(int)_brlen-loffset; i<value.getNumRows(); i++ )
_tmpBlk.quickSetValue(i-((int)_brlen-loffset), 0, value.quickGetValue(i, 0));
tmpnnz += _tmpBlk.getNonZeros();
_tmpIx.setIndexes(offset/_brlen+2, 1);
out.collect(_tmpIx, _tmpBlk);
//sanity check for correctly redistributed non-zeros
if( tmpnnz != value.getNonZeros() )
throw new IOException("Number of split non-zeros does not match non-zeros of original block ("+tmpnnz+" vs "+value.getNonZeros()+")");
}
//single output block
else
{
_tmpBlk.reset( blksize, 1 );
for( int i=0; i<value.getNumRows(); i++ )
_tmpBlk.quickSetValue(loffset+i, 0, value.quickGetValue(i, 0));
_tmpIx.setIndexes(offset/_brlen+1, 1);
out.collect(_tmpIx, _tmpBlk);
}
}
}
@Override
public void configure(JobConf job)
{
super.configure(job);
_offsets = parseOffsets(job.get(SortMR.SORT_INDEXES_OFFSETS));
_rlen = MRJobConfiguration.getNumRows(job, (byte) 0);
_brlen = MRJobConfiguration.getNumRowsPerBlock(job, (byte) 0);
_tmpIx = new MatrixIndexes();
_tmpBlk = new MatrixBlock((int)_brlen, 1, false);
}
/**
*
* @param str
* @return
*/
private static long[] parseOffsets(String str)
{
String counts = str.substring(1, str.length() - 1);
StringTokenizer st = new StringTokenizer(counts, ",");
int len = st.countTokens();
long[] ret = new long[len];
for( int i=0; i<len; i++ )
ret[i] = Long.parseLong(st.nextToken().trim());
return ret;
}
private static int computeOutputBlocksize( long rlen, long brlen, long offset )
{
long rix = offset/brlen+1;
int blksize = (int) Math.min(brlen, rlen-(rix-1)*brlen);
return blksize;
}
}
| apache-2.0 |
sladeware/groningen | src/main/java/org/arbeitspferde/groningen/GroningenConfigParamsModule.java | 5035 | /* Copyright 2012 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.arbeitspferde.groningen;
import com.google.inject.AbstractModule;
import com.google.inject.Key;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.ProtocolMessageEnum;
import org.arbeitspferde.groningen.common.BlockScope;
import org.arbeitspferde.groningen.common.SimpleScope;
import org.arbeitspferde.groningen.config.GroningenConfig;
import org.arbeitspferde.groningen.config.NamedConfigParamImpl;
import org.arbeitspferde.groningen.config.PipelineIterationScoped;
import org.arbeitspferde.groningen.proto.Params.GroningenParams;
import java.util.logging.Logger;
/**
* Guice module that handles Groningen's per-pipeline configuration parameters.
*/
public class GroningenConfigParamsModule extends AbstractModule {
private static final Logger log =
Logger.getLogger(GroningenConfigParamsModule.class.getCanonicalName());
/**
* Seeds one particular configuration parameter of the given type into the given scope.
* Suppressing "unchecked" warnings due to generic type being used for type casting.
*/
@SuppressWarnings("unchecked")
private static <T> void nailConfigParamToScope(Class<T> type, FieldDescriptor fd,
GroningenConfig config, BlockScope scope) {
T value = (T) config.getParamBlock().getField(fd);
log.fine(
String.format("nailing %s (%s) to {%s}", fd.getName(), fd.getJavaType().name(), value));
scope.seed(Key.get(type, new NamedConfigParamImpl(fd.getName())), value);
}
/**
* Binds given type annotated by the name of the FieldDescriptor to a
* SimpleScope.seededKeyProvider. This provider throws an exception when the value is
* injected outside of the desired scope.
*/
private <T> void bindConfigParamToSeededKeyProvider(Class<T> type, FieldDescriptor fd) {
bind(Key.get(type, new NamedConfigParamImpl(fd.getName())))
.toProvider(SimpleScope.<T>seededKeyProvider())
.in(PipelineIterationScoped.class);
}
/**
* Nails all the per-pipeline configuration parameters to a given Guice scope.
*
* @param config Configuration parameters to be fixed for the pipeline run duration.
* @param scope Guice scope used to fix the parameters.
*/
public static void nailConfigToScope(GroningenConfig config, BlockScope scope) {
scope.seed(GroningenConfig.class, config);
for (FieldDescriptor fd : GroningenParams.getDescriptor().getFields()) {
switch (fd.getJavaType()) {
case ENUM:
nailConfigParamToScope(ProtocolMessageEnum.class, fd, config, scope);
break;
case INT:
nailConfigParamToScope(Integer.class, fd, config, scope);
break;
case LONG:
nailConfigParamToScope(Long.class, fd, config, scope);
break;
case DOUBLE:
nailConfigParamToScope(Double.class, fd, config, scope);
break;
case STRING:
nailConfigParamToScope(String.class, fd, config, scope);
break;
default:
log.warning(String.format("unrecognized field descriptor type: %s.", fd.getJavaType()));
break;
}
}
}
/**
* GroningenConfigParamsModule binds pipelineIterationScope to a specific SimpleScope
* implementation and binds all the configuration parameters (fields annotated with
* @NamedConfigParam) to a SimpleScope.seededKeyProvider(), which prevents them being
* injected outside of the scope (see bindConfigParamToSeededKeyProvider()).
*/
@Override
protected void configure() {
bind(GroningenConfig.class)
.toProvider(SimpleScope.<GroningenConfig>seededKeyProvider())
.in(PipelineIterationScoped.class);
for (FieldDescriptor fd : GroningenParams.getDescriptor().getFields()) {
switch (fd.getJavaType()) {
case ENUM:
bindConfigParamToSeededKeyProvider(ProtocolMessageEnum.class, fd);
break;
case INT:
bindConfigParamToSeededKeyProvider(Integer.class, fd);
break;
case LONG:
bindConfigParamToSeededKeyProvider(Long.class, fd);
break;
case DOUBLE:
bindConfigParamToSeededKeyProvider(Double.class, fd);
break;
case STRING:
bindConfigParamToSeededKeyProvider(String.class, fd);
break;
default:
log.warning(String.format("unrecognized field descriptor type: %s.", fd.getJavaType()));
break;
}
}
}
}
| apache-2.0 |
osinstom/onos | core/net/src/test/java/org/onosproject/net/pi/impl/PiTranslatorServiceTest.java | 12547 | /*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.pi.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.testing.EqualsTester;
import org.junit.Before;
import org.junit.Test;
import org.onlab.packet.MacAddress;
import org.onlab.util.ImmutableByteSequence;
import org.onosproject.TestApplicationId;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.DefaultApplicationId;
import org.onosproject.core.GroupId;
import org.onosproject.net.DeviceId;
import org.onosproject.net.PortNumber;
import org.onosproject.net.flow.DefaultFlowRule;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.FlowRule;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.flow.instructions.Instructions;
import org.onosproject.net.group.DefaultGroup;
import org.onosproject.net.group.DefaultGroupBucket;
import org.onosproject.net.group.DefaultGroupDescription;
import org.onosproject.net.group.Group;
import org.onosproject.net.group.GroupBucket;
import org.onosproject.net.group.GroupBuckets;
import org.onosproject.net.group.GroupDescription;
import org.onosproject.net.pi.model.PiPipeconf;
import org.onosproject.net.pi.runtime.PiAction;
import org.onosproject.net.pi.runtime.PiActionGroup;
import org.onosproject.net.pi.runtime.PiActionGroupMember;
import org.onosproject.net.pi.runtime.PiActionGroupMemberId;
import org.onosproject.net.pi.runtime.PiActionParam;
import org.onosproject.net.pi.runtime.PiGroupKey;
import org.onosproject.net.pi.runtime.PiTableAction;
import org.onosproject.net.pi.runtime.PiTableEntry;
import org.onosproject.net.pi.runtime.PiTernaryFieldMatch;
import org.onosproject.pipelines.basic.PipeconfLoader;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.onlab.util.ImmutableByteSequence.copyFrom;
import static org.onlab.util.ImmutableByteSequence.fit;
import static org.onosproject.net.group.GroupDescription.Type.SELECT;
import static org.onosproject.net.pi.impl.PiFlowRuleTranslatorImpl.MAX_PI_PRIORITY;
import static org.onosproject.pipelines.basic.BasicConstants.ACT_PRF_WCMP_SELECTOR_ID;
import static org.onosproject.pipelines.basic.BasicConstants.ACT_PRM_PORT_ID;
import static org.onosproject.pipelines.basic.BasicConstants.ACT_SET_EGRESS_PORT_ID;
import static org.onosproject.pipelines.basic.BasicConstants.HDR_ETH_DST_ID;
import static org.onosproject.pipelines.basic.BasicConstants.HDR_ETH_SRC_ID;
import static org.onosproject.pipelines.basic.BasicConstants.HDR_ETH_TYPE_ID;
import static org.onosproject.pipelines.basic.BasicConstants.HDR_IN_PORT_ID;
import static org.onosproject.pipelines.basic.BasicConstants.PORT_BITWIDTH;
import static org.onosproject.pipelines.basic.BasicConstants.TBL_TABLE0_ID;
import static org.onosproject.pipelines.basic.BasicConstants.TBL_WCMP_TABLE_ID;
/**
* Tests for {@link PiFlowRuleTranslatorImpl}.
*/
@SuppressWarnings("ConstantConditions")
public class PiTranslatorServiceTest {
private static final short IN_PORT_MASK = 0x01ff; // 9-bit mask
private static final short ETH_TYPE_MASK = (short) 0xffff;
private static final DeviceId DEVICE_ID = DeviceId.deviceId("device:dummy:1");
private static final ApplicationId APP_ID = TestApplicationId.create("dummy");
private static final GroupId GROUP_ID = GroupId.valueOf(1);
private static final List<GroupBucket> BUCKET_LIST = ImmutableList.of(outputBucket(1),
outputBucket(2),
outputBucket(3)
);
private static final PiGroupKey GROUP_KEY = new PiGroupKey(TBL_WCMP_TABLE_ID, ACT_PRF_WCMP_SELECTOR_ID,
GROUP_ID.id());
private static final GroupBuckets BUCKETS = new GroupBuckets(BUCKET_LIST);
private static final GroupDescription GROUP_DESC =
new DefaultGroupDescription(DEVICE_ID, SELECT, BUCKETS, GROUP_KEY, GROUP_ID.id(), APP_ID);
private static final Group GROUP = new DefaultGroup(GROUP_ID, GROUP_DESC);
private static final int DEFAULT_MEMBER_WEIGHT = 1;
private static final int BASE_MEM_ID = 65535;
private Collection<PiActionGroupMember> expectedMembers;
private Random random = new Random();
private PiPipeconf pipeconf;
@Before
public void setUp() throws Exception {
pipeconf = PipeconfLoader.BASIC_PIPECONF;
expectedMembers = ImmutableSet.of(outputMember(1),
outputMember(2),
outputMember(3));
}
@Test
public void testTranslateFlowRules() throws Exception {
ApplicationId appId = new DefaultApplicationId(1, "test");
int tableId = 0;
MacAddress ethDstMac = MacAddress.valueOf(random.nextLong());
MacAddress ethSrcMac = MacAddress.valueOf(random.nextLong());
short ethType = (short) (0x0000FFFF & random.nextInt());
short outPort = (short) random.nextInt(65);
short inPort = (short) random.nextInt(65);
int timeout = random.nextInt(100);
int priority = random.nextInt(100);
TrafficSelector matchInPort1 = DefaultTrafficSelector
.builder()
.matchInPort(PortNumber.portNumber(inPort))
.matchEthDst(ethDstMac)
.matchEthSrc(ethSrcMac)
.matchEthType(ethType)
.build();
TrafficTreatment outPort2 = DefaultTrafficTreatment
.builder()
.setOutput(PortNumber.portNumber(outPort))
.build();
FlowRule rule1 = DefaultFlowRule.builder()
.forDevice(DEVICE_ID)
.forTable(tableId)
.fromApp(appId)
.withSelector(matchInPort1)
.withTreatment(outPort2)
.makeTemporary(timeout)
.withPriority(priority)
.build();
FlowRule rule2 = DefaultFlowRule.builder()
.forDevice(DEVICE_ID)
.forTable(tableId)
.fromApp(appId)
.withSelector(matchInPort1)
.withTreatment(outPort2)
.makeTemporary(timeout)
.withPriority(priority)
.build();
PiTableEntry entry1 = PiFlowRuleTranslatorImpl.translate(rule1, pipeconf, null);
PiTableEntry entry2 = PiFlowRuleTranslatorImpl.translate(rule1, pipeconf, null);
// check equality, i.e. same rules must produce same entries
new EqualsTester()
.addEqualityGroup(rule1, rule2)
.addEqualityGroup(entry1, entry2)
.testEquals();
int numMatchParams = pipeconf.pipelineModel().table(TBL_TABLE0_ID).get().matchFields().size();
// parse values stored in entry1
PiTernaryFieldMatch inPortParam = (PiTernaryFieldMatch) entry1.matchKey().fieldMatch(HDR_IN_PORT_ID).get();
PiTernaryFieldMatch ethDstParam = (PiTernaryFieldMatch) entry1.matchKey().fieldMatch(HDR_ETH_DST_ID).get();
PiTernaryFieldMatch ethSrcParam = (PiTernaryFieldMatch) entry1.matchKey().fieldMatch(HDR_ETH_SRC_ID).get();
PiTernaryFieldMatch ethTypeParam = (PiTernaryFieldMatch) entry1.matchKey().fieldMatch(HDR_ETH_TYPE_ID).get();
Optional<Double> expectedTimeout = pipeconf.pipelineModel().table(TBL_TABLE0_ID).get().supportsAging()
? Optional.of((double) rule1.timeout()) : Optional.empty();
// check that the number of parameters in the entry is the same as the number of table keys
assertThat("Incorrect number of match parameters",
entry1.matchKey().fieldMatches().size(), is(equalTo(numMatchParams)));
// check that values stored in entry are the same used for the flow rule
assertThat("Incorrect inPort match param value",
inPortParam.value().asReadOnlyBuffer().getShort(), is(equalTo(inPort)));
assertThat("Incorrect inPort match param mask",
inPortParam.mask().asReadOnlyBuffer().getShort(), is(equalTo(IN_PORT_MASK)));
assertThat("Incorrect ethDestMac match param value",
ethDstParam.value().asArray(), is(equalTo(ethDstMac.toBytes())));
assertThat("Incorrect ethDestMac match param mask",
ethDstParam.mask().asArray(), is(equalTo(MacAddress.BROADCAST.toBytes())));
assertThat("Incorrect ethSrcMac match param value",
ethSrcParam.value().asArray(), is(equalTo(ethSrcMac.toBytes())));
assertThat("Incorrect ethSrcMac match param mask",
ethSrcParam.mask().asArray(), is(equalTo(MacAddress.BROADCAST.toBytes())));
assertThat("Incorrect ethType match param value",
ethTypeParam.value().asReadOnlyBuffer().getShort(), is(equalTo(ethType)));
assertThat("Incorrect ethType match param mask",
ethTypeParam.mask().asReadOnlyBuffer().getShort(), is(equalTo(ETH_TYPE_MASK)));
assertThat("Incorrect priority value",
entry1.priority().get(), is(equalTo(MAX_PI_PRIORITY - rule1.priority())));
assertThat("Incorrect timeout value",
entry1.timeout(), is(equalTo(expectedTimeout)));
}
private static GroupBucket outputBucket(int portNum) {
ImmutableByteSequence paramVal = copyFrom(portNum);
PiActionParam param = new PiActionParam(ACT_PRM_PORT_ID, paramVal);
PiTableAction action = PiAction.builder().withId(ACT_SET_EGRESS_PORT_ID).withParameter(param).build();
TrafficTreatment treatment = DefaultTrafficTreatment.builder()
.add(Instructions.piTableAction(action))
.build();
return DefaultGroupBucket.createSelectGroupBucket(treatment);
}
private static PiActionGroupMember outputMember(int portNum)
throws ImmutableByteSequence.ByteSequenceTrimException {
PiActionParam param = new PiActionParam(ACT_PRM_PORT_ID, fit(copyFrom(portNum), PORT_BITWIDTH));
PiAction piAction = PiAction.builder()
.withId(ACT_SET_EGRESS_PORT_ID)
.withParameter(param).build();
return PiActionGroupMember.builder()
.withAction(piAction)
.withId(PiActionGroupMemberId.of(BASE_MEM_ID + portNum))
.withWeight(DEFAULT_MEMBER_WEIGHT)
.build();
}
/**
* Test add group with buckets.
*/
@Test
public void testTranslateGroups() throws Exception {
PiActionGroup piGroup1 = PiGroupTranslatorImpl.translate(GROUP, pipeconf, null);
PiActionGroup piGroup2 = PiGroupTranslatorImpl.translate(GROUP, pipeconf, null);
new EqualsTester()
.addEqualityGroup(piGroup1, piGroup2)
.testEquals();
assertThat("Group ID must be equal",
piGroup1.id().id(), is(equalTo(GROUP_ID.id())));
assertThat("Action profile ID must be equal",
piGroup1.actionProfileId(), is(equalTo(ACT_PRF_WCMP_SELECTOR_ID)));
// members installed
Collection<PiActionGroupMember> members = piGroup1.members();
assertThat("The number of group members must be equal",
piGroup1.members().size(), is(expectedMembers.size()));
assertThat("Group members must be equal",
members.containsAll(expectedMembers) && expectedMembers.containsAll(members));
}
}
| apache-2.0 |
subclipse/svnclientadapter | cmdline/src/main/java/org/tigris/subversion/svnclientadapter/commandline/CmdLineProperty.java | 2392 | /*******************************************************************************
* Copyright (c) 2003, 2006 svnClientAdapter project and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* svnClientAdapter project committers - initial API and implementation
******************************************************************************/
package org.tigris.subversion.svnclientadapter.commandline;
import java.io.File;
import org.tigris.subversion.svnclientadapter.ISVNProperty;
import org.tigris.subversion.svnclientadapter.SVNUrl;
/**
*
* @author Philip Schatz (schatz at tigris)
*/
class CmdLineProperty implements ISVNProperty {
private String propName;
private String propValue;
private File file;
private SVNUrl url;
private byte[] data;
CmdLineProperty(String name, String value, File file, byte[] data) {
this.propName = name;
this.propValue = value;
this.url = null;
this.file = file.getAbsoluteFile();
this.data = data;
}
CmdLineProperty(String name, String value, SVNUrl url, byte[] data) {
this.propName = name;
this.propValue = value;
this.url = url;
this.file = null;
this.data = data;
}
/* (non-Javadoc)
* @see org.tigris.subversion.svnclientadapter.ISVNProperty#getName()
*/
public String getName() {
return propName;
}
/* (non-Javadoc)
* @see org.tigris.subversion.svnclientadapter.ISVNProperty#getValue()
*/
public String getValue() {
return propValue;
}
/* (non-Javadoc)
* @see org.tigris.subversion.svnclientadapter.ISVNProperty#getFile()
*/
public File getFile() {
return file;
}
/* (non-Javadoc)
* @see org.tigris.subversion.svnclientadapter.ISVNProperty#getUrl()
*/
public SVNUrl getUrl() {
return url;
}
/* (non-Javadoc)
* @see org.tigris.subversion.svnclientadapter.ISVNProperty#getData()
*/
public byte[] getData() {
return data;
}
}
| apache-2.0 |
Log10Solutions/querydsl | querydsl-core/src/main/java/com/querydsl/core/types/dsl/MapPath.java | 5339 | /*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.core.types.dsl;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
import javax.annotation.Nullable;
import com.querydsl.core.types.*;
/**
* {@code MapPath} represents map paths
*
* @author tiwe
*
* @param <K> key type
* @param <V> value type
*/
public class MapPath<K, V, E extends SimpleExpression<? super V>> extends MapExpressionBase<K, V, E> implements Path<Map<K, V>> {
private static final long serialVersionUID = -9113333728412016832L;
private final Class<K> keyType;
private final PathImpl<Map<K,V>> pathMixin;
private final Class<E> queryType;
@Nullable
private transient Constructor<E> constructor;
private final Class<V> valueType;
protected MapPath(Class<? super K> keyType, Class<? super V> valueType, Class<E> queryType, String variable) {
this(keyType, valueType, queryType, PathMetadataFactory.forVariable(variable));
}
protected MapPath(Class<? super K> keyType, Class<? super V> valueType, Class<E> queryType, Path<?> parent, String property) {
this(keyType, valueType, queryType, PathMetadataFactory.forProperty(parent, property));
}
@SuppressWarnings("unchecked")
protected MapPath(Class<? super K> keyType, Class<? super V> valueType, Class<E> queryType, PathMetadata metadata) {
super(new ParameterizedPathImpl<Map<K,V>>((Class) Map.class, metadata, keyType, valueType));
this.keyType = (Class<K>) keyType;
this.valueType = (Class<V>) valueType;
this.queryType = queryType;
this.pathMixin = (PathImpl<Map<K,V>>) mixin;
}
@Override
public final <R,C> R accept(Visitor<R,C> v, C context) {
return v.visit(pathMixin, context);
}
protected PathMetadata forMapAccess(K key) {
return PathMetadataFactory.forMapAccess(this, key);
}
protected PathMetadata forMapAccess(Expression<K> key) {
return PathMetadataFactory.forMapAccess(this, key);
}
@Override
public E get(Expression<K> key) {
try {
PathMetadata md = forMapAccess(key);
return newInstance(md);
} catch (NoSuchMethodException e) {
throw new ExpressionException(e);
} catch (InstantiationException e) {
throw new ExpressionException(e);
} catch (IllegalAccessException e) {
throw new ExpressionException(e);
} catch (InvocationTargetException e) {
throw new ExpressionException(e);
}
}
@Override
public E get(K key) {
try {
PathMetadata md = forMapAccess(key);
return newInstance(md);
} catch (NoSuchMethodException e) {
throw new ExpressionException(e);
} catch (InstantiationException e) {
throw new ExpressionException(e);
} catch (IllegalAccessException e) {
throw new ExpressionException(e);
} catch (InvocationTargetException e) {
throw new ExpressionException(e);
}
}
/**
* Get the key type
*
* @return key type
*/
public Class<K> getKeyType() {
return keyType;
}
@Override
public PathMetadata getMetadata() {
return pathMixin.getMetadata();
}
@Override
public Path<?> getRoot() {
return pathMixin.getRoot();
}
/**
* Get the value type
*
* @return value type
*/
public Class<V> getValueType() {
return valueType;
}
@Override
public AnnotatedElement getAnnotatedElement() {
return pathMixin.getAnnotatedElement();
}
private E newInstance(PathMetadata pm) throws NoSuchMethodException,
InstantiationException, IllegalAccessException,
InvocationTargetException {
if (constructor == null) {
if (Constants.isTyped(queryType)) {
constructor = queryType.getDeclaredConstructor(Class.class, PathMetadata.class);
} else {
constructor = queryType.getDeclaredConstructor(PathMetadata.class);
}
constructor.setAccessible(true);
}
if (Constants.isTyped(queryType)) {
return constructor.newInstance(getValueType(), pm);
} else {
return constructor.newInstance(pm);
}
}
@Override
public Class<?> getParameter(int index) {
if (index == 0) {
return keyType;
} else if (index == 1) {
return valueType;
} else {
throw new IndexOutOfBoundsException(String.valueOf(index));
}
}
}
| apache-2.0 |
trasa/aws-sdk-java | aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/transform/ListUsersRequestMarshaller.java | 2324 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model.transform;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.amazonaws.AmazonClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.identitymanagement.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* ListUsersRequest Marshaller
*/
public class ListUsersRequestMarshaller implements
Marshaller<Request<ListUsersRequest>, ListUsersRequest> {
public Request<ListUsersRequest> marshall(ListUsersRequest listUsersRequest) {
if (listUsersRequest == null) {
throw new AmazonClientException(
"Invalid argument passed to marshall(...)");
}
Request<ListUsersRequest> request = new DefaultRequest<ListUsersRequest>(
listUsersRequest, "AmazonIdentityManagement");
request.addParameter("Action", "ListUsers");
request.addParameter("Version", "2010-05-08");
request.setHttpMethod(HttpMethodName.POST);
if (listUsersRequest.getPathPrefix() != null) {
request.addParameter("PathPrefix",
StringUtils.fromString(listUsersRequest.getPathPrefix()));
}
if (listUsersRequest.getMarker() != null) {
request.addParameter("Marker",
StringUtils.fromString(listUsersRequest.getMarker()));
}
if (listUsersRequest.getMaxItems() != null) {
request.addParameter("MaxItems",
StringUtils.fromInteger(listUsersRequest.getMaxItems()));
}
return request;
}
}
| apache-2.0 |
firejack-open/Firejack-Platform | core/src/main/java/net/firejack/platform/core/domain/TreeNode.java | 3028 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package net.firejack.platform.core.domain;
import net.firejack.platform.core.adapter.MapAdapter;
import net.firejack.platform.core.annotation.Property;
import net.firejack.platform.core.validation.annotation.NotNull;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.codehaus.jackson.map.ser.MapSerializer;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Component
@XmlRootElement
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
@XmlAccessorType(XmlAccessType.PROPERTY)
public abstract class TreeNode<E extends TreeNode> extends BaseEntity {
private static final long serialVersionUID = 2336429664970782344L;
@Property(name = "parent.id")
private Long parentId;
@Property
private Integer childCount;
@Property
private List<E> children;
private Map<String, Serializable> parameters = new HashMap<String, Serializable>();
@NotNull
public Long getParentId() {
return parentId;
}
public void setParentId(Long parentId) {
this.parentId = parentId;
}
public Integer getChildCount() {
return childCount;
}
public void setChildCount(Integer childCount) {
this.childCount = childCount;
}
@XmlElementWrapper(name = "children")
public List<E> getChildren() {
return children;
}
public void setChildren(List<E> children) {
this.children = children;
}
@XmlJavaTypeAdapter(MapAdapter.class)
@JsonSerialize(using = MapSerializer.class)
public Map<String, Serializable> getParameters() {
return parameters;
}
public void setParameters(Map<String, Serializable> parameters) {
this.parameters = parameters;
}
}
| apache-2.0 |
Unicon/cas | core/cas-server-core-configuration/src/main/java/org/apereo/cas/configuration/support/JpaBeans.java | 7037 | package org.apereo.cas.configuration.support;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.commons.lang3.StringUtils;
import org.apereo.cas.configuration.model.support.jpa.AbstractJpaProperties;
import org.apereo.cas.configuration.model.support.jpa.DatabaseProperties;
import org.apereo.cas.configuration.model.support.jpa.JpaConfigDataHolder;
import org.hibernate.cfg.Environment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.datasource.lookup.DataSourceLookupFailureException;
import org.springframework.jdbc.datasource.lookup.JndiDataSourceLookup;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import javax.sql.DataSource;
import java.util.Properties;
/**
* This is {@link JpaBeans}.
*
* @author Misagh Moayyed
* @since 5.2.0
*/
public final class JpaBeans {
private static final Logger LOGGER = LoggerFactory.getLogger(JpaBeans.class);
protected JpaBeans() {
}
/**
* Get new data source, from JNDI lookup or created via direct configuration
* of Hikari pool.
* <p>
* If jpaProperties contains {@link AbstractJpaProperties#getDataSourceName()} a lookup will be
* attempted. If the DataSource is not found via JNDI then CAS will attempt to
* configure a Hikari connection pool.
* <p>
* Since the datasource beans are {@link org.springframework.cloud.context.config.annotation.RefreshScope},
* they will be a proxied by Spring
* and on some application servers there have been classloading issues. A workaround
* for this is to use the {@link AbstractJpaProperties#isDataSourceProxy()} setting and then the dataSource will be
* wrapped in an application level class. If that is an issue, don't do it.
* <p>
* If user wants to do lookup as resource, they may include {@code java:/comp/env}
* in {@code dataSourceName} and put resource reference in web.xml
* otherwise {@code dataSourceName} is used as JNDI name.
*
* @param jpaProperties the jpa properties
* @return the data source
*/
public static DataSource newDataSource(final AbstractJpaProperties jpaProperties) {
final String dataSourceName = jpaProperties.getDataSourceName();
final boolean proxyDataSource = jpaProperties.isDataSourceProxy();
if (StringUtils.isNotBlank(dataSourceName)) {
try {
final JndiDataSourceLookup dsLookup = new JndiDataSourceLookup();
dsLookup.setResourceRef(false);
final DataSource containerDataSource = dsLookup.getDataSource(dataSourceName);
if (!proxyDataSource) {
return containerDataSource;
}
return new DataSourceProxy(containerDataSource);
} catch (final DataSourceLookupFailureException e) {
LOGGER.warn("Lookup of datasource [{}] failed due to {} "
+ "falling back to configuration via JPA properties.", dataSourceName, e.getMessage());
}
}
try {
final HikariDataSource bean = new HikariDataSource();
if (StringUtils.isNotBlank(jpaProperties.getDriverClass())) {
bean.setDriverClassName(jpaProperties.getDriverClass());
}
bean.setJdbcUrl(jpaProperties.getUrl());
bean.setUsername(jpaProperties.getUser());
bean.setPassword(jpaProperties.getPassword());
bean.setLoginTimeout((int) jpaProperties.getPool().getMaxWait());
bean.setMaximumPoolSize(jpaProperties.getPool().getMaxSize());
bean.setMinimumIdle(jpaProperties.getPool().getMinSize());
bean.setIdleTimeout(jpaProperties.getIdleTimeout());
bean.setLeakDetectionThreshold(jpaProperties.getLeakThreshold());
bean.setInitializationFailTimeout(jpaProperties.getFailFastTimeout());
bean.setIsolateInternalQueries(jpaProperties.isIsolateInternalQueries());
bean.setConnectionTestQuery(jpaProperties.getHealthQuery());
bean.setAllowPoolSuspension(jpaProperties.getPool().isSuspension());
bean.setAutoCommit(jpaProperties.isAutocommit());
bean.setValidationTimeout(jpaProperties.getPool().getTimeoutMillis());
return bean;
} catch (final Exception e) {
LOGGER.error("Error creating DataSource: [{}]", e.getMessage());
throw new IllegalArgumentException(e);
}
}
/**
* New hibernate jpa vendor adapter.
*
* @param databaseProperties the database properties
* @return the hibernate jpa vendor adapter
*/
public static HibernateJpaVendorAdapter newHibernateJpaVendorAdapter(final DatabaseProperties databaseProperties) {
final HibernateJpaVendorAdapter bean = new HibernateJpaVendorAdapter();
bean.setGenerateDdl(databaseProperties.isGenDdl());
bean.setShowSql(databaseProperties.isShowSql());
return bean;
}
/**
* New entity manager factory bean.
*
* @param config the config
* @param jpaProperties the jpa properties
* @return the local container entity manager factory bean
*/
public static LocalContainerEntityManagerFactoryBean newHibernateEntityManagerFactoryBean(final JpaConfigDataHolder config,
final AbstractJpaProperties jpaProperties) {
final LocalContainerEntityManagerFactoryBean bean = new LocalContainerEntityManagerFactoryBean();
bean.setJpaVendorAdapter(config.getJpaVendorAdapter());
if (StringUtils.isNotBlank(config.getPersistenceUnitName())) {
bean.setPersistenceUnitName(config.getPersistenceUnitName());
}
bean.setPackagesToScan(config.getPackagesToScan().toArray(new String[] {}));
if (config.getDataSource() != null) {
bean.setDataSource(config.getDataSource());
}
final Properties properties = new Properties();
properties.put(Environment.DIALECT, jpaProperties.getDialect());
properties.put(Environment.HBM2DDL_AUTO, jpaProperties.getDdlAuto());
properties.put(Environment.STATEMENT_BATCH_SIZE, jpaProperties.getBatchSize());
if (StringUtils.isNotBlank(jpaProperties.getDefaultCatalog())) {
properties.put(Environment.DEFAULT_CATALOG, jpaProperties.getDefaultCatalog());
}
if (StringUtils.isNotBlank(jpaProperties.getDefaultSchema())) {
properties.put(Environment.DEFAULT_SCHEMA, jpaProperties.getDefaultSchema());
}
properties.put(Environment.ENABLE_LAZY_LOAD_NO_TRANS, Boolean.TRUE);
properties.put(Environment.FORMAT_SQL, Boolean.TRUE);
properties.putAll(jpaProperties.getProperties());
bean.setJpaProperties(properties);
return bean;
}
}
| apache-2.0 |
kantega/Security-api-implementation | ntlm-identity/src/main/java/no/kantega/security/api/impl/ntlm/NtlmIdentityResolver.java | 4029 | package no.kantega.security.api.impl.ntlm;
import jcifs.smb.NtlmPasswordAuthentication;
import no.kantega.security.api.identity.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.Properties;
public class NtlmIdentityResolver implements IdentityResolver {
private static final String NTML_REQUEST_ATTR = "NtlmHttpAuth";
private String authenticationContext = "ntlm";
private Logger log = LoggerFactory.getLogger(getClass());
private static final String TARGET_URI_PARAM = "targetUri";
private String ntlmUrl;
public AuthenticatedIdentity getIdentity(HttpServletRequest httpServletRequest) throws IdentificationFailedException {
NtlmPasswordAuthentication auth = (NtlmPasswordAuthentication) httpServletRequest.getSession().getAttribute(NTML_REQUEST_ATTR);
if(auth == null) {
if(log.isDebugEnabled()) {
log.debug("User is not authenticated throgh NTLM. Is this URL behind the NTLMHttpFilter");
}
return null;
} else {
return new NtlmAuthenticatedIdentity(this, auth);
}
}
public void initateLogin(LoginContext loginContext) {
String targetUri = loginContext.getTargetUri().toString();
if(targetUri == null) {
HttpServletRequest request = loginContext.getRequest();
targetUri = request.getRequestURL().toString();
}
String authenticationService = ntlmUrl.startsWith("/") ? loginContext.getRequest().getContextPath() + ntlmUrl : ntlmUrl;
String redirectUrl = null;
try {
redirectUrl = authenticationService +"?" + TARGET_URI_PARAM +"=" + URLEncoder.encode(targetUri, "utf-8");
loginContext.getResponse().sendRedirect(redirectUrl);
} catch (IOException e) {
throw new RuntimeException("Exception redirecting to uri " + redirectUrl, e);
}
}
public void initiateLogout(LogoutContext logoutContext) {
logoutContext.getRequest().getSession().removeAttribute(NTML_REQUEST_ATTR);
String targetUrl = "/";
if (logoutContext.getTargetUri() != null) {
targetUrl = logoutContext.getTargetUri().toASCIIString();
targetUrl = targetUrl.replaceAll("<", "");
targetUrl = targetUrl.replaceAll(">", "");
}
try {
logoutContext.getResponse().sendRedirect(targetUrl);
} catch (IOException e) {
}
}
public String getAuthenticationContext() {
return authenticationContext;
}
public String getAuthenticationContextDescription() {
return "";
}
public String getAuthenticationContextIconUrl() {
return "";
}
public void setAuthenticationContext(String authenticationContext) {
this.authenticationContext = authenticationContext;
}
public void setNtlmUrl(String ntlmUrl) {
this.ntlmUrl = ntlmUrl;
}
class NtlmAuthenticatedIdentity implements AuthenticatedIdentity {
private IdentityResolver resolver;
private NtlmPasswordAuthentication ntlmAuthentication;
public NtlmAuthenticatedIdentity(IdentityResolver resolver, NtlmPasswordAuthentication ntlmAuthentication) {
this.resolver = resolver;
this.ntlmAuthentication = ntlmAuthentication;
}
public String getLanguage() {
throw new IllegalStateException("Language not implemented");
}
public Properties getRawAttributes() {
throw new IllegalStateException("Raw properties not implemented");
}
public IdentityResolver getResolver() {
return resolver;
}
public String getUserId() {
return ntlmAuthentication.getUsername();
}
public String getDomain() {
return resolver.getAuthenticationContext();
}
}
}
| apache-2.0 |
yanchhuong/spring-boot-master | src/main/java/com/heroku/demo/HomeController.java | 2109 | package com.heroku.demo;
/*
* Copyright 2015 Benedikt Ritter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
package com.heroku.demo;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
@Controller
@RequestMapping("/")
public class HomeController {
private RecordRepository repository;
@Autowired
public HomeController(RecordRepository repository) {
this.repository = repository;
}
@RequestMapping(method = RequestMethod.GET)
public String home(ModelMap model) {
List<Record> records = repository.findAll();
model.addAttribute("records", records);
model.addAttribute("insertRecord", new Record());
System.out.println("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
return "home";
}
@RequestMapping(method = RequestMethod.POST)
public String insertData(ModelMap model,
@ModelAttribute("insertRecord") @Valid Record record,
BindingResult result) {
if (!result.hasErrors()) {
repository.save(record);
}
return home(model);
}
}
*/ | apache-2.0 |
lijian17/zxing | demo/BarcodeScanner/src/com/google/zxing/client/android/camera/CameraConfigurationUtils.java | 17863 | package com.google.zxing.client.android.camera;
import android.annotation.TargetApi;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.Camera;
import android.os.Build;
import android.util.Log;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Pattern;
/**
* 配置Android相机的工具类
*
* @author lijian-pc
* @date 2017-8-22 下午6:09:14
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public final class CameraConfigurationUtils {
private static final String TAG = "CameraConfiguration";
/** 分号(;) */
private static final Pattern SEMICOLON = Pattern.compile(";");
/** 最小预览像素 */
private static final int MIN_PREVIEW_PIXELS = 480 * 320; // normal screen
/** 最大曝光补偿 */
private static final float MAX_EXPOSURE_COMPENSATION = 1.5f;
/** 最小曝光补偿 */
private static final float MIN_EXPOSURE_COMPENSATION = 0.0f;
/** 最大外形失真 */
private static final double MAX_ASPECT_DISTORTION = 0.15;
/** 最小帧频 */
private static final int MIN_FPS = 10;
/** 最大帧频 */
private static final int MAX_FPS = 20;
/** 面积单位 */
private static final int AREA_PER_1000 = 400;
/**
* 相机配置工具类
*/
private CameraConfigurationUtils() {
}
/**
*
* @param parameters
* 相机参数
* @param autoFocus
* 是否自动对焦
* @param disableContinuous
* 禁用连续
* @param safeMode
* 安全模式
*/
public static void setFocus(Camera.Parameters parameters,
boolean autoFocus, boolean disableContinuous, boolean safeMode) {
// 支持的对焦模式
List<String> supportedFocusModes = parameters.getSupportedFocusModes();
String focusMode = null;
// 如果是自动对焦
if (autoFocus) {
// 安全模式 或者 禁用连续
if (safeMode || disableContinuous) {
focusMode = findSettableValue("对焦模式", supportedFocusModes,
Camera.Parameters.FOCUS_MODE_AUTO);
} else {
focusMode = findSettableValue("对焦模式", supportedFocusModes,
Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE,// 连续画面
Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO,// 连续视频
Camera.Parameters.FOCUS_MODE_AUTO);// 自动对焦
}
}
// 也许选择自动对焦但不可用,因此可以在这里:
if (!safeMode && focusMode == null) {
focusMode = findSettableValue("对焦模式", supportedFocusModes,
Camera.Parameters.FOCUS_MODE_MACRO,
Camera.Parameters.FOCUS_MODE_EDOF);
}
if (focusMode != null) {
if (focusMode.equals(parameters.getFocusMode())) {
Log.i(TAG, "对焦模式已设置为 " + focusMode);
} else {
parameters.setFocusMode(focusMode);
}
}
}
/**
* 设置闪光灯
*
* @param parameters
* 相机参数
* @param on
* 是否开启
*/
public static void setTorch(Camera.Parameters parameters, boolean on) {
List<String> supportedFlashModes = parameters.getSupportedFlashModes();
String flashMode;
if (on) {
flashMode = findSettableValue("闪光模式", supportedFlashModes,
Camera.Parameters.FLASH_MODE_TORCH,
Camera.Parameters.FLASH_MODE_ON);
} else {
flashMode = findSettableValue("闪光模式", supportedFlashModes,
Camera.Parameters.FLASH_MODE_OFF);
}
if (flashMode != null) {
if (flashMode.equals(parameters.getFlashMode())) {
Log.i(TAG, "闪光模式已设置为 " + flashMode);
} else {
Log.i(TAG, "设置闪光模式为 " + flashMode);
parameters.setFlashMode(flashMode);
}
}
}
/**
* 设置最佳曝光
*
* @param parameters
* 相机参数
* @param lightOn
* 是否开启闪光灯
*/
public static void setBestExposure(Camera.Parameters parameters,
boolean lightOn) {
// 获得最小曝光补偿指数。
int minExposure = parameters.getMinExposureCompensation();
// 获得最大曝光补偿指数。
int maxExposure = parameters.getMaxExposureCompensation();
// 获取曝光补偿步长
float step = parameters.getExposureCompensationStep();
if ((minExposure != 0 || maxExposure != 0) && step > 0.0f) {
// 指示灯亮时设为低电平
float targetCompensation = lightOn ? MIN_EXPOSURE_COMPENSATION
: MAX_EXPOSURE_COMPENSATION;
int compensationSteps = Math.round(targetCompensation / step);
float actualCompensation = step * compensationSteps;
// Clamp value:
compensationSteps = Math.max(
Math.min(compensationSteps, maxExposure), minExposure);
if (parameters.getExposureCompensation() == compensationSteps) {
Log.i(TAG, "曝光补偿已设定为 " + compensationSteps + " / "
+ actualCompensation);
} else {
Log.i(TAG, "设置曝光补偿 " + compensationSteps + " / "
+ actualCompensation);
parameters.setExposureCompensation(compensationSteps);
}
} else {
Log.i(TAG, "相机不支持曝光补偿");
}
}
/**
* 设置最佳预览FPS
*
* @param parameters
* 相机参数
*/
public static void setBestPreviewFPS(Camera.Parameters parameters) {
setBestPreviewFPS(parameters, MIN_FPS, MAX_FPS);
}
/**
* 设置最佳预览FPS
*
* @param parameters
* 相机参数
* @param minFPS
* 最小FPS
* @param maxFPS
* 最大FPS
*/
public static void setBestPreviewFPS(Camera.Parameters parameters,
int minFPS, int maxFPS) {
List<int[]> supportedPreviewFpsRanges = parameters
.getSupportedPreviewFpsRange();
Log.i(TAG, "支持的FPS范围: " + toString(supportedPreviewFpsRanges));
if (supportedPreviewFpsRanges != null
&& !supportedPreviewFpsRanges.isEmpty()) {
int[] suitableFPSRange = null;
for (int[] fpsRange : supportedPreviewFpsRanges) {
int thisMin = fpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
int thisMax = fpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
if (thisMin >= minFPS * 1000 && thisMax <= maxFPS * 1000) {
suitableFPSRange = fpsRange;
break;
}
}
if (suitableFPSRange == null) {
Log.i(TAG, "没有合适的FPS范围?");
} else {
int[] currentFpsRange = new int[2];
parameters.getPreviewFpsRange(currentFpsRange);
if (Arrays.equals(currentFpsRange, suitableFPSRange)) {
Log.i(TAG, "FPS范围已设置为 " + Arrays.toString(suitableFPSRange));
} else {
Log.i(TAG, "将FPS范围设置为 " + Arrays.toString(suitableFPSRange));
parameters
.setPreviewFpsRange(
suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
}
}
}
}
/**
* 设置焦点区域
*
* @param parameters
* 相机参数
*/
public static void setFocusArea(Camera.Parameters parameters) {
if (parameters.getMaxNumFocusAreas() > 0) {
Log.i(TAG, "旧的焦点区域: " + toString(parameters.getFocusAreas()));
List<Camera.Area> middleArea = buildMiddleArea(AREA_PER_1000);
Log.i(TAG, "设置焦点区域 : " + toString(middleArea));
parameters.setFocusAreas(middleArea);
} else {
Log.i(TAG, "设备不支持对焦区域");
}
}
/**
* 设置测光区域
*
* @param parameters
* 相机参数
*/
public static void setMetering(Camera.Parameters parameters) {
if (parameters.getMaxNumMeteringAreas() > 0) {
Log.i(TAG, "旧测光区域: " + parameters.getMeteringAreas());
List<Camera.Area> middleArea = buildMiddleArea(AREA_PER_1000);
Log.i(TAG, "将测光区域设置为 : " + toString(middleArea));
parameters.setMeteringAreas(middleArea);
} else {
Log.i(TAG, "设备不支持测光区域");
}
}
/**
* 构建中间区域
*
* @param areaPer1000
* @return
*/
private static List<Camera.Area> buildMiddleArea(int areaPer1000) {
return Collections.singletonList(new Camera.Area(new Rect(-areaPer1000,
-areaPer1000, areaPer1000, areaPer1000), 1));
}
/**
* 设置视频稳定
*
* @param parameters
* 相机参数
*/
public static void setVideoStabilization(Camera.Parameters parameters) {
if (parameters.isVideoStabilizationSupported()) {
if (parameters.getVideoStabilization()) {
Log.i(TAG, "视频稳定已启用");
} else {
Log.i(TAG, "启用视频稳定...");
parameters.setVideoStabilization(true);
}
} else {
Log.i(TAG, "此设备不支持视频稳定");
}
}
/**
* 设置条码场景模式
*
* @param parameters
* 相机参数
*/
public static void setBarcodeSceneMode(Camera.Parameters parameters) {
if (Camera.Parameters.SCENE_MODE_BARCODE.equals(parameters
.getSceneMode())) {
Log.i(TAG, "条码场景模式已设置");
return;
}
String sceneMode = findSettableValue("场景模式",
parameters.getSupportedSceneModes(),
Camera.Parameters.SCENE_MODE_BARCODE);
if (sceneMode != null) {
parameters.setSceneMode(sceneMode);
}
}
/**
* 设置缩放
*
* @param parameters
* 相机参数
* @param targetZoomRatio
* 目标缩放比例
*/
public static void setZoom(Camera.Parameters parameters,
double targetZoomRatio) {
if (parameters.isZoomSupported()) {
Integer zoom = indexOfClosestZoom(parameters, targetZoomRatio);
if (zoom == null) {
return;
}
if (parameters.getZoom() == zoom) {
Log.i(TAG, "缩放已设置为 " + zoom);
} else {
Log.i(TAG, "设置缩放 " + zoom);
parameters.setZoom(zoom);
}
} else {
Log.i(TAG, "不支持缩放");
}
}
/**
* 最接近的缩放indexOf
*
* @param parameters
* 相机参数
* @param targetZoomRatio
* 目标缩放比例
* @return
*/
private static Integer indexOfClosestZoom(Camera.Parameters parameters,
double targetZoomRatio) {
List<Integer> ratios = parameters.getZoomRatios();
Log.i(TAG, "缩放比例: " + ratios);
int maxZoom = parameters.getMaxZoom();
if (ratios == null || ratios.isEmpty() || ratios.size() != maxZoom + 1) {
Log.w(TAG, "缩放倍数无效!");
return null;
}
double target100 = 100.0 * targetZoomRatio;
double smallestDiff = Double.POSITIVE_INFINITY;
int closestIndex = 0;
for (int i = 0; i < ratios.size(); i++) {
double diff = Math.abs(ratios.get(i) - target100);
if (diff < smallestDiff) {
smallestDiff = diff;
closestIndex = i;
}
}
Log.i(TAG, "选择缩放比例 " + (ratios.get(closestIndex) / 100.0));
return closestIndex;
}
/**
* 设置反转颜色
*
* @param parameters
* 相机参数
*/
public static void setInvertColor(Camera.Parameters parameters) {
if (Camera.Parameters.EFFECT_NEGATIVE.equals(parameters
.getColorEffect())) {
Log.i(TAG, "负效果已设定");
return;
}
String colorMode = findSettableValue("颜色效果",
parameters.getSupportedColorEffects(),
Camera.Parameters.EFFECT_NEGATIVE);
if (colorMode != null) {
parameters.setColorEffect(colorMode);
}
}
/**
* 找到最佳预览大小值
*
* @param parameters
* 相机参数
* @param screenResolution
* 屏幕分辨率
* @return
*/
public static Point findBestPreviewSizeValue(Camera.Parameters parameters,
Point screenResolution) {
List<Camera.Size> rawSupportedSizes = parameters
.getSupportedPreviewSizes();
if (rawSupportedSizes == null) {
Log.w(TAG, "设备返回不支持预览大小; 使用默认值");
Camera.Size defaultSize = parameters.getPreviewSize();
if (defaultSize == null) {
throw new IllegalStateException("参数不包含预览大小!");
}
return new Point(defaultSize.width, defaultSize.height);
}
// 按尺寸排序,降序
List<Camera.Size> supportedPreviewSizes = new ArrayList<Camera.Size>(
rawSupportedSizes);
Collections.sort(supportedPreviewSizes, new Comparator<Camera.Size>() {
@Override
public int compare(Camera.Size a, Camera.Size b) {
int aPixels = a.height * a.width;
int bPixels = b.height * b.width;
if (bPixels < aPixels) {
return -1;
}
if (bPixels > aPixels) {
return 1;
}
return 0;
}
});
if (Log.isLoggable(TAG, Log.INFO)) {
StringBuilder previewSizesString = new StringBuilder();
for (Camera.Size supportedPreviewSize : supportedPreviewSizes) {
previewSizesString.append(supportedPreviewSize.width)
.append('x').append(supportedPreviewSize.height)
.append(' ');
}
Log.i(TAG, "支持的预览大小: " + previewSizesString);
}
double screenAspectRatio = screenResolution.x
/ (double) screenResolution.y;
// 删除不合适的大小
Iterator<Camera.Size> it = supportedPreviewSizes.iterator();
while (it.hasNext()) {
Camera.Size supportedPreviewSize = it.next();
int realWidth = supportedPreviewSize.width;
int realHeight = supportedPreviewSize.height;
if (realWidth * realHeight < MIN_PREVIEW_PIXELS) {
it.remove();
continue;
}
boolean isCandidatePortrait = realWidth < realHeight;
int maybeFlippedWidth = isCandidatePortrait ? realHeight
: realWidth;
int maybeFlippedHeight = isCandidatePortrait ? realWidth
: realHeight;
double aspectRatio = maybeFlippedWidth
/ (double) maybeFlippedHeight;
double distortion = Math.abs(aspectRatio - screenAspectRatio);
if (distortion > MAX_ASPECT_DISTORTION) {
it.remove();
continue;
}
if (maybeFlippedWidth == screenResolution.x
&& maybeFlippedHeight == screenResolution.y) {
Point exactPoint = new Point(realWidth, realHeight);
Log.i(TAG, "发现预览大小与屏幕尺寸完全匹配: " + exactPoint);
return exactPoint;
}
}
// 如果没有完全匹配,请使用最大的预览大小。 由于需要额外的计算,这对于旧设备来说不是一个好主意。 我们可能会在新的Android
// 4+设备上使用,CPU的功能更强大。
if (!supportedPreviewSizes.isEmpty()) {
Camera.Size largestPreview = supportedPreviewSizes.get(0);
Point largestSize = new Point(largestPreview.width,
largestPreview.height);
Log.i(TAG, "使用最大的合适预览大小: " + largestSize);
return largestSize;
}
// 如果没有任何合适的,返回当前的预览大小
Camera.Size defaultPreview = parameters.getPreviewSize();
if (defaultPreview == null) {
throw new IllegalStateException("参数不包含预览大小!");
}
Point defaultSize = new Point(defaultPreview.width,
defaultPreview.height);
Log.i(TAG, "没有合适的预览大小,默认使用: " + defaultSize);
return defaultSize;
}
/**
* 查找可设置的值
*
* @param name
* @param supportedValues
* @param desiredValues
* @return
*/
private static String findSettableValue(String name,
Collection<String> supportedValues, String... desiredValues) {
Log.i(TAG, "请求 " + name + " 来之: " + Arrays.toString(desiredValues));
Log.i(TAG, "支持的 " + name + " 值: " + supportedValues);
if (supportedValues != null) {
for (String desiredValue : desiredValues) {
if (supportedValues.contains(desiredValue)) {
Log.i(TAG, "可以设置 " + name + " 至: " + desiredValue);
return desiredValue;
}
}
}
Log.i(TAG, "不支持的值匹配");
return null;
}
private static String toString(Collection<int[]> arrays) {
if (arrays == null || arrays.isEmpty()) {
return "[]";
}
StringBuilder buffer = new StringBuilder();
buffer.append('[');
Iterator<int[]> it = arrays.iterator();
while (it.hasNext()) {
buffer.append(Arrays.toString(it.next()));
if (it.hasNext()) {
buffer.append(", ");
}
}
buffer.append(']');
return buffer.toString();
}
private static String toString(Iterable<Camera.Area> areas) {
if (areas == null) {
return null;
}
StringBuilder result = new StringBuilder();
for (Camera.Area area : areas) {
result.append(area.rect).append(':').append(area.weight)
.append(' ');
}
return result.toString();
}
public static String collectStats(Camera.Parameters parameters) {
return collectStats(parameters.flatten());
}
/**
* 收集统计
*
* @param flattenedParams
* @return
*/
public static String collectStats(CharSequence flattenedParams) {
StringBuilder sb = new StringBuilder(1000);
sb.append("BOARD=").append(Build.BOARD).append('\n');
sb.append("BRAND=").append(Build.BRAND).append('\n');
sb.append("CPU_ABI=").append(Build.CPU_ABI).append('\n');
sb.append("DEVICE=").append(Build.DEVICE).append('\n');
sb.append("DISPLAY=").append(Build.DISPLAY).append('\n');
sb.append("FINGERPRINT=").append(Build.FINGERPRINT).append('\n');
sb.append("HOST=").append(Build.HOST).append('\n');
sb.append("ID=").append(Build.ID).append('\n');
sb.append("MANUFACTURER=").append(Build.MANUFACTURER).append('\n');
sb.append("MODEL=").append(Build.MODEL).append('\n');
sb.append("PRODUCT=").append(Build.PRODUCT).append('\n');
sb.append("TAGS=").append(Build.TAGS).append('\n');
sb.append("TIME=").append(Build.TIME).append('\n');
sb.append("TYPE=").append(Build.TYPE).append('\n');
sb.append("USER=").append(Build.USER).append('\n');
sb.append("VERSION.CODENAME=").append(Build.VERSION.CODENAME)
.append('\n');
sb.append("VERSION.INCREMENTAL=").append(Build.VERSION.INCREMENTAL)
.append('\n');
sb.append("VERSION.RELEASE=").append(Build.VERSION.RELEASE)
.append('\n');
sb.append("VERSION.SDK_INT=").append(Build.VERSION.SDK_INT)
.append('\n');
if (flattenedParams != null) {
String[] params = SEMICOLON.split(flattenedParams);
Arrays.sort(params);
for (String param : params) {
sb.append(param).append('\n');
}
}
return sb.toString();
}
}
| apache-2.0 |
HuangLS/neo4j | community/kernel/src/main/java/org/neo4j/unsafe/batchinsert/BatchRelationshipIterable.java | 3257 | /*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.unsafe.batchinsert;
import java.util.Iterator;
import org.neo4j.function.Consumers;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.NotFoundException;
import org.neo4j.helpers.collection.PrefetchingIterator;
import org.neo4j.kernel.impl.api.store.StoreNodeRelationshipCursor;
import org.neo4j.kernel.impl.store.InvalidRecordException;
import org.neo4j.kernel.impl.store.NeoStores;
import org.neo4j.kernel.impl.store.record.NodeRecord;
import org.neo4j.kernel.impl.store.record.RelationshipGroupRecord;
import org.neo4j.kernel.impl.store.record.RelationshipRecord;
import static org.neo4j.kernel.impl.locking.LockService.NO_LOCK_SERVICE;
abstract class BatchRelationshipIterable<T> implements Iterable<T>
{
private final StoreNodeRelationshipCursor relationshipCursor;
public BatchRelationshipIterable( NeoStores neoStores, long nodeId )
{
RelationshipRecord relationshipRecord = new RelationshipRecord( -1 );
RelationshipGroupRecord relationshipGroupRecord = new RelationshipGroupRecord( -1, -1 );
this.relationshipCursor = new StoreNodeRelationshipCursor(
relationshipRecord, neoStores,
relationshipGroupRecord, null,
Consumers.<StoreNodeRelationshipCursor>noop(), NO_LOCK_SERVICE );
// TODO There's an opportunity to reuse lots of instances created here, but this isn't a
// critical path instance so perhaps not necessary a.t.m.
try
{
NodeRecord nodeRecord = neoStores.getNodeStore().getRecord( nodeId );
relationshipCursor.init( nodeRecord.isDense(), nodeRecord.getNextRel(), nodeId,
Direction.BOTH );
}
catch ( InvalidRecordException e )
{
throw new NotFoundException( "Node " + nodeId + " not found" );
}
}
@Override
public Iterator<T> iterator()
{
return new PrefetchingIterator<T>()
{
@Override
protected T fetchNextOrNull()
{
if ( !relationshipCursor.next() )
{
return null;
}
return nextFrom( relationshipCursor.id(), relationshipCursor.type(),
relationshipCursor.startNode(), relationshipCursor.endNode() );
}
};
}
protected abstract T nextFrom( long relId, int type, long startNode, long endNode );
}
| apache-2.0 |
aleo72/ww-ceem-radar | src/main/java/gov/nasa/worldwindx/examples/ExtrudedShapes.java | 7080 | /*
* Copyright (C) 2012 United States Government as represented by the Administrator of the
* National Aeronautics and Space Administration.
* All Rights Reserved.
*/
package gov.nasa.worldwindx.examples;
import gov.nasa.worldwind.View;
import gov.nasa.worldwind.avlist.AVKey;
import gov.nasa.worldwind.event.*;
import gov.nasa.worldwind.geom.Position;
import gov.nasa.worldwind.layers.RenderableLayer;
import gov.nasa.worldwind.render.*;
import gov.nasa.worldwind.render.airspaces.*;
import gov.nasa.worldwind.render.airspaces.Polygon;
import gov.nasa.worldwind.util.WWIO;
import gov.nasa.worldwindx.examples.util.ExampleUtil;
import java.io.*;
import java.util.*;
import java.util.zip.*;
/**
* Demonstrates how to create {@link ExtrudedPolygon}s with cap and side textures. The polygon geometry is retrieved
* from a World Wind data site, as is the image applied to the extruded polygon's sides.
*
* @author tag
* @version $Id: ExtrudedShapes.java 1171 2013-02-11 21:45:02Z dcollins $
*/
public class ExtrudedShapes extends ApplicationTemplate
{
protected static final String DEMO_AIRSPACES_PATH
= "gov/nasa/worldwindx/examples/data/AirspaceBuilder-DemoShapes.zip";
protected static String DEFAULT_IMAGE_URL = "gov/nasa/worldwindx/examples/images/build123sm.jpg";
public static class AppFrame extends ApplicationTemplate.AppFrame
{
public AppFrame()
{
super(true, true, false);
try
{
// Create a layer for the shapes.
RenderableLayer layer = new RenderableLayer();
layer.setName("Extruded Shapes");
layer.setPickEnabled(true);
// Retrieve the geometry from the World Wind demo site.
List<Airspace> airspaces = new ArrayList<Airspace>();
loadAirspacesFromPath(DEMO_AIRSPACES_PATH, airspaces);
// Define attributes for the shapes.
ShapeAttributes sideAttributes = new BasicShapeAttributes();
sideAttributes.setInteriorMaterial(Material.LIGHT_GRAY);
sideAttributes.setOutlineMaterial(Material.DARK_GRAY);
ShapeAttributes capAttributes = new BasicShapeAttributes(sideAttributes);
capAttributes.setInteriorMaterial(Material.GRAY);
// Construct the extruded polygons from the demo data.
int n = 0, m = 0;
for (Airspace airspace : airspaces)
{
if (airspace instanceof Polygon) // only polygons in the demo data are used
{
Polygon pgonAirspace = (Polygon) airspace;
// Collect the images to be applied to the shape's sides.
ArrayList<String> textures = new ArrayList<String>();
for (int i = 0; i < pgonAirspace.getLocations().size(); i++)
{
textures.add(DEFAULT_IMAGE_URL);
}
// Construct the extruded polygon. Use the default texture coordinates.
double height = 40; // building height
ExtrudedPolygon quad = new ExtrudedPolygon(pgonAirspace.getLocations(), height, textures);
// Apply the shape's attributes. Note the separate attributes for cap and sides.
quad.setSideAttributes(sideAttributes);
quad.setCapAttributes(capAttributes);
// Specify a cap for the extruded polygon, specifying its texture coordinates and image.
if (pgonAirspace.getLocations().size() == 4)
{
float[] texCoords = new float[] {0, 0, 1, 0, 1, 1, 0, 1};
quad.setCapImageSource("images/32x32-icon-nasa.png", texCoords, 4);
}
// Add the shape to the layer.
layer.addRenderable(quad);
++n;
m += ((Polygon) airspace).getLocations().size();
}
}
System.out.printf("NUM SHAPES = %d, NUM SIDES = %d\n", n, m);
// Add the layer to the model.
insertBeforePlacenames(this.getWwd(), layer);
// Make sure the new layer is shown in the layer manager.
this.getLayerPanel().update(this.getWwd());
// Adjust the view so that it looks at the buildings.
View view = getWwd().getView();
view.setEyePosition(Position.fromDegrees(47.656, -122.306, 1e3));
// This is how a select listener would notice that one of the shapes was picked.
getWwd().addSelectListener(new SelectListener()
{
public void selected(SelectEvent event)
{
if (event.getTopObject() instanceof ExtrudedPolygon)
System.out.println("EXTRUDED POLYGON");
}
});
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
protected static void loadAirspacesFromPath(String path, Collection<Airspace> airspaces)
{
File file = ExampleUtil.saveResourceToTempFile(path, ".zip");
if (file == null)
return;
try
{
ZipFile zipFile = new ZipFile(file);
ZipEntry entry = null;
for (Enumeration<? extends ZipEntry> e = zipFile.entries(); e.hasMoreElements(); entry = e.nextElement())
{
if (entry == null)
continue;
String name = WWIO.getFilename(entry.getName());
if (!(name.startsWith("gov.nasa.worldwind.render.airspaces") && name.endsWith(".xml")))
continue;
String[] tokens = name.split("-");
try
{
Class c = Class.forName(tokens[0]);
Airspace airspace = (Airspace) c.newInstance();
BufferedReader input = new BufferedReader(new InputStreamReader(zipFile.getInputStream(entry)));
String s = input.readLine();
airspace.restoreState(s);
airspaces.add(airspace);
if (tokens.length >= 2)
{
airspace.setValue(AVKey.DISPLAY_NAME, tokens[1]);
}
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
}
catch (IOException e)
{
e.printStackTrace();
}
}
public static void main(String[] args)
{
ApplicationTemplate.start("World Wind Extruded Polygons on Ground", AppFrame.class);
}
}
| apache-2.0 |
Microsoft-CISL/hadoop-prototype | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java | 73773 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager;
import static org.apache.hadoop.yarn.server.utils.YarnServerBuilderUtils.newNodeHeartbeatResponse;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.io.retry.RetryProxy;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.ServerSocketUtil;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenIdentifier;
import org.apache.hadoop.service.Service.STATE;
import org.apache.hadoop.service.ServiceOperations;
import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.SignalContainerCommand;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.client.RMProxy;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.ResourceTracker;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.UnRegisterNodeManagerRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.UnRegisterNodeManagerResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.NodeHeartbeatResponsePBImpl;
import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.server.api.records.NodeAction;
import org.apache.hadoop.yarn.server.api.records.NodeStatus;
import org.apache.hadoop.yarn.server.api.records.impl.pb.MasterKeyPBImpl;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerImpl;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService;
import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.server.utils.YarnServerBuilderUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@SuppressWarnings("rawtypes")
public class TestNodeStatusUpdater {
// temp fix until metrics system can auto-detect itself running in unit test:
static {
DefaultMetricsSystem.setMiniClusterMode(true);
}
static final Log LOG = LogFactory.getLog(TestNodeStatusUpdater.class);
static final File basedir =
new File("target", TestNodeStatusUpdater.class.getName());
static final File nmLocalDir = new File(basedir, "nm0");
static final File tmpDir = new File(basedir, "tmpDir");
static final File remoteLogsDir = new File(basedir, "remotelogs");
static final File logsDir = new File(basedir, "logs");
private static final RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
volatile int heartBeatID = 0;
volatile Throwable nmStartError = null;
private final List<NodeId> registeredNodes = new ArrayList<NodeId>();
private boolean triggered = false;
private Configuration conf;
private NodeManager nm;
private AtomicBoolean assertionFailedInThread = new AtomicBoolean(false);
@Before
public void setUp() throws IOException {
nmLocalDir.mkdirs();
tmpDir.mkdirs();
logsDir.mkdirs();
remoteLogsDir.mkdirs();
conf = createNMConfig();
}
@After
public void tearDown() {
this.registeredNodes.clear();
heartBeatID = 0;
ServiceOperations.stop(nm);
assertionFailedInThread.set(false);
DefaultMetricsSystem.shutdown();
}
public static MasterKey createMasterKey() {
MasterKey masterKey = new MasterKeyPBImpl();
masterKey.setKeyId(123);
masterKey.setBytes(ByteBuffer.wrap(new byte[] { new Integer(123)
.byteValue() }));
return masterKey;
}
private class MyResourceTracker implements ResourceTracker {
private final Context context;
private boolean signalContainer;
public MyResourceTracker(Context context, boolean signalContainer) {
this.context = context;
this.signalContainer = signalContainer;
}
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException,
IOException {
NodeId nodeId = request.getNodeId();
Resource resource = request.getResource();
LOG.info("Registering " + nodeId.toString());
// NOTE: this really should be checking against the config value
InetSocketAddress expected = NetUtils.getConnectAddress(
conf.getSocketAddr(YarnConfiguration.NM_ADDRESS, null, -1));
Assert.assertEquals(NetUtils.getHostPortString(expected), nodeId.toString());
Assert.assertEquals(5 * 1024, resource.getMemory());
registeredNodes.add(nodeId);
RegisterNodeManagerResponse response = recordFactory
.newRecordInstance(RegisterNodeManagerResponse.class);
response.setContainerTokenMasterKey(createMasterKey());
response.setNMTokenMasterKey(createMasterKey());
return response;
}
private Map<ApplicationId, List<ContainerStatus>> getAppToContainerStatusMap(
List<ContainerStatus> containers) {
Map<ApplicationId, List<ContainerStatus>> map =
new HashMap<ApplicationId, List<ContainerStatus>>();
for (ContainerStatus cs : containers) {
ApplicationId applicationId =
cs.getContainerId().getApplicationAttemptId().getApplicationId();
List<ContainerStatus> appContainers = map.get(applicationId);
if (appContainers == null) {
appContainers = new ArrayList<ContainerStatus>();
map.put(applicationId, appContainers);
}
appContainers.add(cs);
}
return map;
}
@Override
public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
throws YarnException, IOException {
NodeStatus nodeStatus = request.getNodeStatus();
LOG.info("Got heartbeat number " + heartBeatID);
NodeManagerMetrics mockMetrics = mock(NodeManagerMetrics.class);
Dispatcher mockDispatcher = mock(Dispatcher.class);
EventHandler mockEventHandler = mock(EventHandler.class);
when(mockDispatcher.getEventHandler()).thenReturn(mockEventHandler);
NMStateStoreService stateStore = new NMNullStateStoreService();
nodeStatus.setResponseId(heartBeatID++);
Map<ApplicationId, List<ContainerStatus>> appToContainers =
getAppToContainerStatusMap(nodeStatus.getContainersStatuses());
List<SignalContainerRequest> containersToSignal = null;
ApplicationId appId1 = ApplicationId.newInstance(0, 1);
ApplicationId appId2 = ApplicationId.newInstance(0, 2);
ContainerId firstContainerID = null;
if (heartBeatID == 1) {
Assert.assertEquals(0, nodeStatus.getContainersStatuses().size());
// Give a container to the NM.
ApplicationAttemptId appAttemptID =
ApplicationAttemptId.newInstance(appId1, 0);
firstContainerID =
ContainerId.newContainerId(appAttemptID, heartBeatID);
ContainerLaunchContext launchContext = recordFactory
.newRecordInstance(ContainerLaunchContext.class);
Resource resource = BuilderUtils.newResource(2, 1);
long currentTime = System.currentTimeMillis();
String user = "testUser";
ContainerTokenIdentifier containerToken = BuilderUtils
.newContainerTokenIdentifier(BuilderUtils.newContainerToken(
firstContainerID, InetAddress.getByName("localhost")
.getCanonicalHostName(), 1234, user, resource,
currentTime + 10000, 123, "password".getBytes(), currentTime));
Container container = new ContainerImpl(conf, mockDispatcher,
stateStore, launchContext, null, mockMetrics, containerToken);
this.context.getContainers().put(firstContainerID, container);
} else if (heartBeatID == 2) {
// Checks on the RM end
Assert.assertEquals("Number of applications should only be one!", 1,
nodeStatus.getContainersStatuses().size());
Assert.assertEquals("Number of container for the app should be one!",
1, appToContainers.get(appId1).size());
// Checks on the NM end
ConcurrentMap<ContainerId, Container> activeContainers =
this.context.getContainers();
Assert.assertEquals(1, activeContainers.size());
if (this.signalContainer) {
containersToSignal = new ArrayList<SignalContainerRequest>();
SignalContainerRequest signalReq = recordFactory
.newRecordInstance(SignalContainerRequest.class);
signalReq.setContainerId(firstContainerID);
signalReq.setCommand(SignalContainerCommand.OUTPUT_THREAD_DUMP);
containersToSignal.add(signalReq);
}
// Give another container to the NM.
ApplicationAttemptId appAttemptID =
ApplicationAttemptId.newInstance(appId2, 0);
ContainerId secondContainerID =
ContainerId.newContainerId(appAttemptID, heartBeatID);
ContainerLaunchContext launchContext = recordFactory
.newRecordInstance(ContainerLaunchContext.class);
long currentTime = System.currentTimeMillis();
String user = "testUser";
Resource resource = BuilderUtils.newResource(3, 1);
ContainerTokenIdentifier containerToken = BuilderUtils
.newContainerTokenIdentifier(BuilderUtils.newContainerToken(
secondContainerID, InetAddress.getByName("localhost")
.getCanonicalHostName(), 1234, user, resource,
currentTime + 10000, 123, "password".getBytes(), currentTime));
Container container = new ContainerImpl(conf, mockDispatcher,
stateStore, launchContext, null, mockMetrics, containerToken);
this.context.getContainers().put(secondContainerID, container);
} else if (heartBeatID == 3) {
// Checks on the RM end
Assert.assertEquals("Number of applications should have two!", 2,
appToContainers.size());
Assert.assertEquals("Number of container for the app-1 should be only one!",
1, appToContainers.get(appId1).size());
Assert.assertEquals("Number of container for the app-2 should be only one!",
1, appToContainers.get(appId2).size());
// Checks on the NM end
ConcurrentMap<ContainerId, Container> activeContainers =
this.context.getContainers();
Assert.assertEquals(2, activeContainers.size());
}
NodeHeartbeatResponse nhResponse = YarnServerBuilderUtils.
newNodeHeartbeatResponse(heartBeatID, null, null, null, null, null,
1000L);
if (containersToSignal != null) {
nhResponse.addAllContainersToSignal(containersToSignal);
}
return nhResponse;
}
@Override
public UnRegisterNodeManagerResponse unRegisterNodeManager(
UnRegisterNodeManagerRequest request) throws YarnException, IOException {
return recordFactory
.newRecordInstance(UnRegisterNodeManagerResponse.class);
}
}
private class MyContainerManager extends ContainerManagerImpl {
public boolean signaled = false;
public MyContainerManager(Context context, ContainerExecutor exec,
DeletionService deletionContext, NodeStatusUpdater nodeStatusUpdater,
NodeManagerMetrics metrics,
LocalDirsHandlerService dirsHandler) {
super(context, exec, deletionContext, nodeStatusUpdater,
metrics, dirsHandler);
}
@Override
public void handle(ContainerManagerEvent event) {
if (event.getType() == ContainerManagerEventType.SIGNAL_CONTAINERS) {
signaled = true;
}
}
}
private class MyNodeStatusUpdater extends NodeStatusUpdaterImpl {
public ResourceTracker resourceTracker;
private Context context;
public MyNodeStatusUpdater(Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics) {
this(context, dispatcher, healthChecker, metrics, false);
}
public MyNodeStatusUpdater(Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics,
boolean signalContainer) {
super(context, dispatcher, healthChecker, metrics);
this.context = context;
resourceTracker = new MyResourceTracker(this.context, signalContainer);
}
@Override
protected ResourceTracker getRMClient() {
return resourceTracker;
}
@Override
protected void stopRMProxy() {
return;
}
}
// Test NodeStatusUpdater sends the right container statuses each time it
// heart beats.
private class MyNodeStatusUpdater2 extends NodeStatusUpdaterImpl {
public ResourceTracker resourceTracker;
public MyNodeStatusUpdater2(Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics) {
super(context, dispatcher, healthChecker, metrics);
resourceTracker = new MyResourceTracker4(context);
}
@Override
protected ResourceTracker getRMClient() {
return resourceTracker;
}
@Override
protected void stopRMProxy() {
return;
}
}
private class MyNodeStatusUpdater3 extends NodeStatusUpdaterImpl {
public ResourceTracker resourceTracker;
private Context context;
public MyNodeStatusUpdater3(Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics) {
super(context, dispatcher, healthChecker, metrics);
this.context = context;
this.resourceTracker = new MyResourceTracker3(this.context);
}
@Override
protected ResourceTracker getRMClient() {
return resourceTracker;
}
@Override
protected void stopRMProxy() {
return;
}
@Override
protected boolean isTokenKeepAliveEnabled(Configuration conf) {
return true;
}
}
private class MyNodeStatusUpdater4 extends NodeStatusUpdaterImpl {
private final long rmStartIntervalMS;
private final boolean rmNeverStart;
public ResourceTracker resourceTracker;
public MyNodeStatusUpdater4(Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics,
long rmStartIntervalMS, boolean rmNeverStart) {
super(context, dispatcher, healthChecker, metrics);
this.rmStartIntervalMS = rmStartIntervalMS;
this.rmNeverStart = rmNeverStart;
}
@Override
protected void serviceStart() throws Exception {
//record the startup time
super.serviceStart();
}
@Override
protected ResourceTracker getRMClient() throws IOException {
RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf);
resourceTracker =
(ResourceTracker) RetryProxy.create(ResourceTracker.class,
new MyResourceTracker6(rmStartIntervalMS, rmNeverStart),
retryPolicy);
return resourceTracker;
}
private boolean isTriggered() {
return triggered;
}
@Override
protected void stopRMProxy() {
return;
}
}
private class MyNodeStatusUpdater5 extends NodeStatusUpdaterImpl {
private ResourceTracker resourceTracker;
private Configuration conf;
public MyNodeStatusUpdater5(Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics, Configuration conf) {
super(context, dispatcher, healthChecker, metrics);
resourceTracker = new MyResourceTracker5();
this.conf = conf;
}
@Override
protected ResourceTracker getRMClient() {
RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf);
return (ResourceTracker) RetryProxy.create(ResourceTracker.class,
resourceTracker, retryPolicy);
}
@Override
protected void stopRMProxy() {
return;
}
}
private class MyNodeManager extends NodeManager {
private MyNodeStatusUpdater3 nodeStatusUpdater;
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
this.nodeStatusUpdater =
new MyNodeStatusUpdater3(context, dispatcher, healthChecker, metrics);
return this.nodeStatusUpdater;
}
public MyNodeStatusUpdater3 getNodeStatusUpdater() {
return this.nodeStatusUpdater;
}
}
private class MyNodeManager2 extends NodeManager {
public boolean isStopped = false;
private NodeStatusUpdater nodeStatusUpdater;
private CyclicBarrier syncBarrier;
private Configuration conf;
public MyNodeManager2 (CyclicBarrier syncBarrier, Configuration conf) {
this.syncBarrier = syncBarrier;
this.conf = conf;
}
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
nodeStatusUpdater =
new MyNodeStatusUpdater5(context, dispatcher, healthChecker,
metrics, conf);
return nodeStatusUpdater;
}
@Override
protected void serviceStop() throws Exception {
System.out.println("Called stooppppp");
super.serviceStop();
isStopped = true;
ConcurrentMap<ApplicationId, Application> applications =
getNMContext().getApplications();
// ensure that applications are empty
if(!applications.isEmpty()) {
assertionFailedInThread.set(true);
}
syncBarrier.await(10000, TimeUnit.MILLISECONDS);
}
}
//
private class MyResourceTracker2 implements ResourceTracker {
public NodeAction heartBeatNodeAction = NodeAction.NORMAL;
public NodeAction registerNodeAction = NodeAction.NORMAL;
public String shutDownMessage = "";
public String rmVersion = "3.0.1";
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException,
IOException {
RegisterNodeManagerResponse response = recordFactory
.newRecordInstance(RegisterNodeManagerResponse.class);
response.setNodeAction(registerNodeAction );
response.setContainerTokenMasterKey(createMasterKey());
response.setNMTokenMasterKey(createMasterKey());
response.setDiagnosticsMessage(shutDownMessage);
response.setRMVersion(rmVersion);
return response;
}
@Override
public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
throws YarnException, IOException {
NodeStatus nodeStatus = request.getNodeStatus();
nodeStatus.setResponseId(heartBeatID++);
NodeHeartbeatResponse nhResponse = YarnServerBuilderUtils.
newNodeHeartbeatResponse(heartBeatID, heartBeatNodeAction, null,
null, null, null, 1000L);
nhResponse.setDiagnosticsMessage(shutDownMessage);
return nhResponse;
}
@Override
public UnRegisterNodeManagerResponse unRegisterNodeManager(
UnRegisterNodeManagerRequest request) throws YarnException, IOException {
return recordFactory
.newRecordInstance(UnRegisterNodeManagerResponse.class);
}
}
private class MyResourceTracker3 implements ResourceTracker {
public NodeAction heartBeatNodeAction = NodeAction.NORMAL;
public NodeAction registerNodeAction = NodeAction.NORMAL;
private Map<ApplicationId, List<Long>> keepAliveRequests =
new HashMap<ApplicationId, List<Long>>();
private ApplicationId appId = BuilderUtils.newApplicationId(1, 1);
private final Context context;
MyResourceTracker3(Context context) {
this.context = context;
}
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException,
IOException {
RegisterNodeManagerResponse response =
recordFactory.newRecordInstance(RegisterNodeManagerResponse.class);
response.setNodeAction(registerNodeAction);
response.setContainerTokenMasterKey(createMasterKey());
response.setNMTokenMasterKey(createMasterKey());
return response;
}
@Override
public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
throws YarnException, IOException {
LOG.info("Got heartBeatId: [" + heartBeatID +"]");
NodeStatus nodeStatus = request.getNodeStatus();
nodeStatus.setResponseId(heartBeatID++);
NodeHeartbeatResponse nhResponse = YarnServerBuilderUtils.
newNodeHeartbeatResponse(heartBeatID, heartBeatNodeAction, null,
null, null, null, 1000L);
if (nodeStatus.getKeepAliveApplications() != null
&& nodeStatus.getKeepAliveApplications().size() > 0) {
for (ApplicationId appId : nodeStatus.getKeepAliveApplications()) {
List<Long> list = keepAliveRequests.get(appId);
if (list == null) {
list = new LinkedList<Long>();
keepAliveRequests.put(appId, list);
}
list.add(System.currentTimeMillis());
}
}
if (heartBeatID == 2) {
LOG.info("Sending FINISH_APP for application: [" + appId + "]");
this.context.getApplications().put(appId, mock(Application.class));
nhResponse.addAllApplicationsToCleanup(Collections.singletonList(appId));
}
return nhResponse;
}
@Override
public UnRegisterNodeManagerResponse unRegisterNodeManager(
UnRegisterNodeManagerRequest request) throws YarnException, IOException {
return recordFactory
.newRecordInstance(UnRegisterNodeManagerResponse.class);
}
}
// Test NodeStatusUpdater sends the right container statuses each time it
// heart beats.
private Credentials expectedCredentials = new Credentials();
private class MyResourceTracker4 implements ResourceTracker {
public NodeAction registerNodeAction = NodeAction.NORMAL;
public NodeAction heartBeatNodeAction = NodeAction.NORMAL;
private Context context;
private final ContainerStatus containerStatus2 =
createContainerStatus(2, ContainerState.RUNNING);
private final ContainerStatus containerStatus3 =
createContainerStatus(3, ContainerState.COMPLETE);
private final ContainerStatus containerStatus4 =
createContainerStatus(4, ContainerState.RUNNING);
private final ContainerStatus containerStatus5 =
createContainerStatus(5, ContainerState.COMPLETE);
public MyResourceTracker4(Context context) {
// create app Credentials
org.apache.hadoop.security.token.Token<DelegationTokenIdentifier> token1 =
new org.apache.hadoop.security.token.Token<DelegationTokenIdentifier>();
token1.setKind(new Text("kind1"));
expectedCredentials.addToken(new Text("token1"), token1);
this.context = context;
}
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException, IOException {
RegisterNodeManagerResponse response =
recordFactory.newRecordInstance(RegisterNodeManagerResponse.class);
response.setNodeAction(registerNodeAction);
response.setContainerTokenMasterKey(createMasterKey());
response.setNMTokenMasterKey(createMasterKey());
return response;
}
@Override
public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
throws YarnException, IOException {
List<ContainerId> finishedContainersPulledByAM = new ArrayList
<ContainerId>();
try {
if (heartBeatID == 0) {
Assert.assertEquals(0, request.getNodeStatus().getContainersStatuses()
.size());
Assert.assertEquals(0, context.getContainers().size());
} else if (heartBeatID == 1) {
List<ContainerStatus> statuses =
request.getNodeStatus().getContainersStatuses();
Assert.assertEquals(2, statuses.size());
Assert.assertEquals(2, context.getContainers().size());
boolean container2Exist = false, container3Exist = false;
for (ContainerStatus status : statuses) {
if (status.getContainerId().equals(
containerStatus2.getContainerId())) {
Assert.assertTrue(status.getState().equals(
containerStatus2.getState()));
container2Exist = true;
}
if (status.getContainerId().equals(
containerStatus3.getContainerId())) {
Assert.assertTrue(status.getState().equals(
containerStatus3.getState()));
container3Exist = true;
}
}
Assert.assertTrue(container2Exist && container3Exist);
// should throw exception that can be retried by the
// nodeStatusUpdaterRunnable, otherwise nm just shuts down and the
// test passes.
throw new YarnRuntimeException("Lost the heartbeat response");
} else if (heartBeatID == 2 || heartBeatID == 3) {
List<ContainerStatus> statuses =
request.getNodeStatus().getContainersStatuses();
if (heartBeatID == 2) {
// NM should send completed containers again, since the last
// heartbeat is lost.
Assert.assertEquals(4, statuses.size());
} else {
// NM should not send completed containers again, since the last
// heartbeat is successful.
Assert.assertEquals(2, statuses.size());
}
Assert.assertEquals(4, context.getContainers().size());
boolean container2Exist = false, container3Exist = false,
container4Exist = false, container5Exist = false;
for (ContainerStatus status : statuses) {
if (status.getContainerId().equals(
containerStatus2.getContainerId())) {
Assert.assertTrue(status.getState().equals(
containerStatus2.getState()));
container2Exist = true;
}
if (status.getContainerId().equals(
containerStatus3.getContainerId())) {
Assert.assertTrue(status.getState().equals(
containerStatus3.getState()));
container3Exist = true;
}
if (status.getContainerId().equals(
containerStatus4.getContainerId())) {
Assert.assertTrue(status.getState().equals(
containerStatus4.getState()));
container4Exist = true;
}
if (status.getContainerId().equals(
containerStatus5.getContainerId())) {
Assert.assertTrue(status.getState().equals(
containerStatus5.getState()));
container5Exist = true;
}
}
if (heartBeatID == 2) {
Assert.assertTrue(container2Exist && container3Exist
&& container4Exist && container5Exist);
} else {
// NM do not send completed containers again
Assert.assertTrue(container2Exist && !container3Exist
&& container4Exist && !container5Exist);
}
if (heartBeatID == 3) {
finishedContainersPulledByAM.add(containerStatus3.getContainerId());
}
} else if (heartBeatID == 4) {
List<ContainerStatus> statuses =
request.getNodeStatus().getContainersStatuses();
Assert.assertEquals(2, statuses.size());
// Container 3 is acked by AM, hence removed from context
Assert.assertEquals(3, context.getContainers().size());
boolean container3Exist = false;
for (ContainerStatus status : statuses) {
if (status.getContainerId().equals(
containerStatus3.getContainerId())) {
container3Exist = true;
}
}
Assert.assertFalse(container3Exist);
}
} catch (AssertionError error) {
error.printStackTrace();
assertionFailedInThread.set(true);
} finally {
heartBeatID++;
}
NodeStatus nodeStatus = request.getNodeStatus();
nodeStatus.setResponseId(heartBeatID);
NodeHeartbeatResponse nhResponse =
YarnServerBuilderUtils.newNodeHeartbeatResponse(heartBeatID,
heartBeatNodeAction, null, null, null, null, 1000L);
nhResponse.addContainersToBeRemovedFromNM(finishedContainersPulledByAM);
Map<ApplicationId, ByteBuffer> appCredentials =
new HashMap<ApplicationId, ByteBuffer>();
DataOutputBuffer dob = new DataOutputBuffer();
expectedCredentials.writeTokenStorageToStream(dob);
ByteBuffer byteBuffer1 =
ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
appCredentials.put(ApplicationId.newInstance(1234, 1), byteBuffer1);
nhResponse.setSystemCredentialsForApps(appCredentials);
return nhResponse;
}
@Override
public UnRegisterNodeManagerResponse unRegisterNodeManager(
UnRegisterNodeManagerRequest request) throws YarnException, IOException {
return recordFactory
.newRecordInstance(UnRegisterNodeManagerResponse.class);
}
}
private class MyResourceTracker5 implements ResourceTracker {
public NodeAction registerNodeAction = NodeAction.NORMAL;
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException,
IOException {
RegisterNodeManagerResponse response = recordFactory
.newRecordInstance(RegisterNodeManagerResponse.class);
response.setNodeAction(registerNodeAction );
response.setContainerTokenMasterKey(createMasterKey());
response.setNMTokenMasterKey(createMasterKey());
return response;
}
@Override
public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
throws YarnException, IOException {
heartBeatID++;
if(heartBeatID == 1) {
// EOFException should be retried as well.
throw new EOFException("NodeHeartbeat exception");
}
else {
throw new java.net.ConnectException(
"NodeHeartbeat exception");
}
}
@Override
public UnRegisterNodeManagerResponse unRegisterNodeManager(
UnRegisterNodeManagerRequest request) throws YarnException, IOException {
return recordFactory
.newRecordInstance(UnRegisterNodeManagerResponse.class);
}
}
private class MyResourceTracker6 implements ResourceTracker {
private long rmStartIntervalMS;
private boolean rmNeverStart;
private final long waitStartTime;
public MyResourceTracker6(long rmStartIntervalMS, boolean rmNeverStart) {
this.rmStartIntervalMS = rmStartIntervalMS;
this.rmNeverStart = rmNeverStart;
this.waitStartTime = System.currentTimeMillis();
}
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException, IOException,
IOException {
if (System.currentTimeMillis() - waitStartTime <= rmStartIntervalMS
|| rmNeverStart) {
throw new java.net.ConnectException("Faking RM start failure as start "
+ "delay timer has not expired.");
} else {
NodeId nodeId = request.getNodeId();
Resource resource = request.getResource();
LOG.info("Registering " + nodeId.toString());
// NOTE: this really should be checking against the config value
InetSocketAddress expected = NetUtils.getConnectAddress(
conf.getSocketAddr(YarnConfiguration.NM_ADDRESS, null, -1));
Assert.assertEquals(NetUtils.getHostPortString(expected),
nodeId.toString());
Assert.assertEquals(5 * 1024, resource.getMemory());
registeredNodes.add(nodeId);
RegisterNodeManagerResponse response = recordFactory
.newRecordInstance(RegisterNodeManagerResponse.class);
triggered = true;
return response;
}
}
@Override
public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
throws YarnException, IOException {
NodeStatus nodeStatus = request.getNodeStatus();
nodeStatus.setResponseId(heartBeatID++);
NodeHeartbeatResponse nhResponse = YarnServerBuilderUtils.
newNodeHeartbeatResponse(heartBeatID, NodeAction.NORMAL, null,
null, null, null, 1000L);
return nhResponse;
}
@Override
public UnRegisterNodeManagerResponse unRegisterNodeManager(
UnRegisterNodeManagerRequest request) throws YarnException, IOException {
return recordFactory
.newRecordInstance(UnRegisterNodeManagerResponse.class);
}
}
@Before
public void clearError() {
nmStartError = null;
}
@After
public void deleteBaseDir() throws IOException {
FileContext lfs = FileContext.getLocalFSFileContext();
lfs.delete(new Path(basedir.getPath()), true);
}
@Test(timeout = 90000)
public void testRecentlyFinishedContainers() throws Exception {
NodeManager nm = new NodeManager();
YarnConfiguration conf = new YarnConfiguration();
conf.set(
NodeStatusUpdaterImpl.YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS,
"10000");
nm.init(conf);
NodeStatusUpdaterImpl nodeStatusUpdater =
(NodeStatusUpdaterImpl) nm.getNodeStatusUpdater();
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 0);
nm.getNMContext().getApplications().putIfAbsent(appId,
mock(Application.class));
nm.getNMContext().getContainers().putIfAbsent(cId, mock(Container.class));
nodeStatusUpdater.addCompletedContainer(cId);
Assert.assertTrue(nodeStatusUpdater.isContainerRecentlyStopped(cId));
nm.getNMContext().getContainers().remove(cId);
long time1 = System.currentTimeMillis();
int waitInterval = 15;
while (waitInterval-- > 0
&& nodeStatusUpdater.isContainerRecentlyStopped(cId)) {
nodeStatusUpdater.removeVeryOldStoppedContainersFromCache();
Thread.sleep(1000);
}
long time2 = System.currentTimeMillis();
// By this time the container will be removed from cache. need to verify.
Assert.assertFalse(nodeStatusUpdater.isContainerRecentlyStopped(cId));
Assert.assertTrue((time2 - time1) >= 10000 && (time2 - time1) <= 250000);
}
@Test(timeout = 90000)
public void testRemovePreviousCompletedContainersFromContext() throws Exception {
NodeManager nm = new NodeManager();
YarnConfiguration conf = new YarnConfiguration();
conf.set(
NodeStatusUpdaterImpl
.YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS,
"10000");
nm.init(conf);
NodeStatusUpdaterImpl nodeStatusUpdater =
(NodeStatusUpdaterImpl) nm.getNodeStatusUpdater();
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 1);
Token containerToken =
BuilderUtils.newContainerToken(cId, "anyHost", 1234, "anyUser",
BuilderUtils.newResource(1024, 1), 0, 123,
"password".getBytes(), 0);
Container anyCompletedContainer = new ContainerImpl(conf, null,
null, null, null, null,
BuilderUtils.newContainerTokenIdentifier(containerToken)) {
@Override
public ContainerState getCurrentState() {
return ContainerState.COMPLETE;
}
@Override
public org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState getContainerState() {
return org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState.DONE;
}
};
ContainerId runningContainerId =
ContainerId.newContainerId(appAttemptId, 3);
Token runningContainerToken =
BuilderUtils.newContainerToken(runningContainerId, "anyHost",
1234, "anyUser", BuilderUtils.newResource(1024, 1), 0, 123,
"password".getBytes(), 0);
Container runningContainer =
new ContainerImpl(conf, null, null, null, null, null,
BuilderUtils.newContainerTokenIdentifier(runningContainerToken)) {
@Override
public ContainerState getCurrentState() {
return ContainerState.RUNNING;
}
@Override
public org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState getContainerState() {
return org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState.RUNNING;
}
};
nm.getNMContext().getApplications().putIfAbsent(appId,
mock(Application.class));
nm.getNMContext().getContainers().put(cId, anyCompletedContainer);
nm.getNMContext().getContainers()
.put(runningContainerId, runningContainer);
Assert.assertEquals(2, nodeStatusUpdater.getContainerStatuses().size());
List<ContainerId> ackedContainers = new ArrayList<ContainerId>();
ackedContainers.add(cId);
ackedContainers.add(runningContainerId);
nodeStatusUpdater.removeOrTrackCompletedContainersFromContext(ackedContainers);
Set<ContainerId> containerIdSet = new HashSet<ContainerId>();
List<ContainerStatus> containerStatuses = nodeStatusUpdater.getContainerStatuses();
for (ContainerStatus status : containerStatuses) {
containerIdSet.add(status.getContainerId());
}
Assert.assertEquals(1, containerStatuses.size());
// completed container is removed;
Assert.assertFalse(containerIdSet.contains(cId));
// running container is not removed;
Assert.assertTrue(containerIdSet.contains(runningContainerId));
}
@Test(timeout = 10000)
public void testCompletedContainersIsRecentlyStopped() throws Exception {
NodeManager nm = new NodeManager();
nm.init(conf);
NodeStatusUpdaterImpl nodeStatusUpdater =
(NodeStatusUpdaterImpl) nm.getNodeStatusUpdater();
ApplicationId appId = ApplicationId.newInstance(0, 0);
Application completedApp = mock(Application.class);
when(completedApp.getApplicationState()).thenReturn(
ApplicationState.FINISHED);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
Token containerToken =
BuilderUtils.newContainerToken(containerId, "host", 1234, "user",
BuilderUtils.newResource(1024, 1), 0, 123,
"password".getBytes(), 0);
Container completedContainer = new ContainerImpl(conf, null,
null, null, null, null,
BuilderUtils.newContainerTokenIdentifier(containerToken)) {
@Override
public ContainerState getCurrentState() {
return ContainerState.COMPLETE;
}
};
nm.getNMContext().getApplications().putIfAbsent(appId, completedApp);
nm.getNMContext().getContainers().put(containerId, completedContainer);
Assert.assertEquals(1, nodeStatusUpdater.getContainerStatuses().size());
Assert.assertTrue(nodeStatusUpdater.isContainerRecentlyStopped(
containerId));
}
@Test
public void testCleanedupApplicationContainerCleanup() throws IOException {
NodeManager nm = new NodeManager();
YarnConfiguration conf = new YarnConfiguration();
conf.set(NodeStatusUpdaterImpl
.YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS,
"1000000");
nm.init(conf);
NodeStatusUpdaterImpl nodeStatusUpdater =
(NodeStatusUpdaterImpl) nm.getNodeStatusUpdater();
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 1);
Token containerToken =
BuilderUtils.newContainerToken(cId, "anyHost", 1234, "anyUser",
BuilderUtils.newResource(1024, 1), 0, 123,
"password".getBytes(), 0);
Container anyCompletedContainer = new ContainerImpl(conf, null,
null, null, null, null,
BuilderUtils.newContainerTokenIdentifier(containerToken)) {
@Override
public ContainerState getCurrentState() {
return ContainerState.COMPLETE;
}
};
Application application = mock(Application.class);
when(application.getApplicationState()).thenReturn(ApplicationState.RUNNING);
nm.getNMContext().getApplications().putIfAbsent(appId, application);
nm.getNMContext().getContainers().put(cId, anyCompletedContainer);
Assert.assertEquals(1, nodeStatusUpdater.getContainerStatuses().size());
when(application.getApplicationState()).thenReturn(
ApplicationState.FINISHING_CONTAINERS_WAIT);
// The completed container will be saved in case of lost heartbeat.
Assert.assertEquals(1, nodeStatusUpdater.getContainerStatuses().size());
Assert.assertEquals(1, nodeStatusUpdater.getContainerStatuses().size());
nm.getNMContext().getContainers().put(cId, anyCompletedContainer);
nm.getNMContext().getApplications().remove(appId);
// The completed container will be saved in case of lost heartbeat.
Assert.assertEquals(1, nodeStatusUpdater.getContainerStatuses().size());
Assert.assertEquals(1, nodeStatusUpdater.getContainerStatuses().size());
}
@Test
public void testNMRegistration() throws InterruptedException, IOException {
nm = new NodeManager() {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
return new MyNodeStatusUpdater(context, dispatcher, healthChecker,
metrics);
}
};
YarnConfiguration conf = createNMConfig();
nm.init(conf);
// verify that the last service is the nodeStatusUpdater (ie registration
// with RM)
Object[] services = nm.getServices().toArray();
Object lastService = services[services.length-1];
Assert.assertTrue("last service is NOT the node status updater",
lastService instanceof NodeStatusUpdater);
new Thread() {
public void run() {
try {
nm.start();
} catch (Throwable e) {
TestNodeStatusUpdater.this.nmStartError = e;
throw new YarnRuntimeException(e);
}
}
}.start();
System.out.println(" ----- thread already started.."
+ nm.getServiceState());
int waitCount = 0;
while (nm.getServiceState() == STATE.INITED && waitCount++ != 50) {
LOG.info("Waiting for NM to start..");
if (nmStartError != null) {
LOG.error("Error during startup. ", nmStartError);
Assert.fail(nmStartError.getCause().getMessage());
}
Thread.sleep(2000);
}
if (nm.getServiceState() != STATE.STARTED) {
// NM could have failed.
Assert.fail("NodeManager failed to start");
}
waitCount = 0;
while (heartBeatID <= 3 && waitCount++ != 200) {
Thread.sleep(1000);
}
Assert.assertFalse(heartBeatID <= 3);
Assert.assertEquals("Number of registered NMs is wrong!!", 1,
this.registeredNodes.size());
nm.stop();
}
@Test
public void testStopReentrant() throws Exception {
final AtomicInteger numCleanups = new AtomicInteger(0);
nm = new NodeManager() {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
MyNodeStatusUpdater myNodeStatusUpdater = new MyNodeStatusUpdater(
context, dispatcher, healthChecker, metrics);
MyResourceTracker2 myResourceTracker2 = new MyResourceTracker2();
myResourceTracker2.heartBeatNodeAction = NodeAction.SHUTDOWN;
myNodeStatusUpdater.resourceTracker = myResourceTracker2;
return myNodeStatusUpdater;
}
@Override
protected ContainerManagerImpl createContainerManager(Context context,
ContainerExecutor exec, DeletionService del,
NodeStatusUpdater nodeStatusUpdater,
ApplicationACLsManager aclsManager,
LocalDirsHandlerService dirsHandler) {
return new ContainerManagerImpl(context, exec, del, nodeStatusUpdater,
metrics, dirsHandler) {
@Override
public void cleanUpApplicationsOnNMShutDown() {
super.cleanUpApplicationsOnNMShutDown();
numCleanups.incrementAndGet();
}
};
}
};
YarnConfiguration conf = createNMConfig();
nm.init(conf);
nm.start();
int waitCount = 0;
while (heartBeatID < 1 && waitCount++ != 200) {
Thread.sleep(500);
}
Assert.assertFalse(heartBeatID < 1);
// Meanwhile call stop directly as the shutdown hook would
nm.stop();
// NM takes a while to reach the STOPPED state.
waitCount = 0;
while (nm.getServiceState() != STATE.STOPPED && waitCount++ != 20) {
LOG.info("Waiting for NM to stop..");
Thread.sleep(1000);
}
Assert.assertEquals(STATE.STOPPED, nm.getServiceState());
Assert.assertEquals(numCleanups.get(), 1);
}
@Test
public void testNodeDecommision() throws Exception {
nm = getNodeManager(NodeAction.SHUTDOWN);
YarnConfiguration conf = createNMConfig();
nm.init(conf);
Assert.assertEquals(STATE.INITED, nm.getServiceState());
nm.start();
int waitCount = 0;
while (heartBeatID < 1 && waitCount++ != 200) {
Thread.sleep(500);
}
Assert.assertFalse(heartBeatID < 1);
Assert.assertTrue(nm.getNMContext().getDecommissioned());
// NM takes a while to reach the STOPPED state.
waitCount = 0;
while (nm.getServiceState() != STATE.STOPPED && waitCount++ != 20) {
LOG.info("Waiting for NM to stop..");
Thread.sleep(1000);
}
Assert.assertEquals(STATE.STOPPED, nm.getServiceState());
}
private abstract class NodeManagerWithCustomNodeStatusUpdater extends NodeManager {
private NodeStatusUpdater updater;
private NodeManagerWithCustomNodeStatusUpdater() {
}
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher,
NodeHealthCheckerService healthChecker) {
updater = createUpdater(context, dispatcher, healthChecker);
return updater;
}
public NodeStatusUpdater getUpdater() {
return updater;
}
abstract NodeStatusUpdater createUpdater(Context context,
Dispatcher dispatcher,
NodeHealthCheckerService healthChecker);
}
@Test
public void testNMShutdownForRegistrationFailure() throws Exception {
nm = new NodeManagerWithCustomNodeStatusUpdater() {
@Override
protected NodeStatusUpdater createUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
MyNodeStatusUpdater nodeStatusUpdater = new MyNodeStatusUpdater(
context, dispatcher, healthChecker, metrics);
MyResourceTracker2 myResourceTracker2 = new MyResourceTracker2();
myResourceTracker2.registerNodeAction = NodeAction.SHUTDOWN;
myResourceTracker2.shutDownMessage = "RM Shutting Down Node";
nodeStatusUpdater.resourceTracker = myResourceTracker2;
return nodeStatusUpdater;
}
};
verifyNodeStartFailure(
"Recieved SHUTDOWN signal from Resourcemanager, "
+ "Registration of NodeManager failed, "
+ "Message from ResourceManager: RM Shutting Down Node");
}
@Test (timeout = 150000)
public void testNMConnectionToRM() throws Exception {
final long delta = 50000;
final long connectionWaitMs = 5000;
final long connectionRetryIntervalMs = 1000;
//Waiting for rmStartIntervalMS, RM will be started
final long rmStartIntervalMS = 2*1000;
conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_MS,
connectionWaitMs);
conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS,
connectionRetryIntervalMs);
//Test NM try to connect to RM Several times, but finally fail
NodeManagerWithCustomNodeStatusUpdater nmWithUpdater;
nm = nmWithUpdater = new NodeManagerWithCustomNodeStatusUpdater() {
@Override
protected NodeStatusUpdater createUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
NodeStatusUpdater nodeStatusUpdater = new MyNodeStatusUpdater4(
context, dispatcher, healthChecker, metrics,
rmStartIntervalMS, true);
return nodeStatusUpdater;
}
};
nm.init(conf);
long waitStartTime = System.currentTimeMillis();
try {
nm.start();
Assert.fail("NM should have failed to start due to RM connect failure");
} catch(Exception e) {
long t = System.currentTimeMillis();
long duration = t - waitStartTime;
boolean waitTimeValid = (duration >= connectionWaitMs)
&& (duration < (connectionWaitMs + delta));
if(!waitTimeValid) {
//either the exception was too early, or it had a different cause.
//reject with the inner stack trace
throw new Exception("NM should have tried re-connecting to RM during " +
"period of at least " + connectionWaitMs + " ms, but " +
"stopped retrying within " + (connectionWaitMs + delta) +
" ms: " + e, e);
}
}
//Test NM connect to RM, fail at first several attempts,
//but finally success.
nm = nmWithUpdater = new NodeManagerWithCustomNodeStatusUpdater() {
@Override
protected NodeStatusUpdater createUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
NodeStatusUpdater nodeStatusUpdater = new MyNodeStatusUpdater4(
context, dispatcher, healthChecker, metrics, rmStartIntervalMS,
false);
return nodeStatusUpdater;
}
};
nm.init(conf);
NodeStatusUpdater updater = nmWithUpdater.getUpdater();
Assert.assertNotNull("Updater not yet created ", updater);
waitStartTime = System.currentTimeMillis();
try {
nm.start();
} catch (Exception ex){
LOG.error("NM should have started successfully " +
"after connecting to RM.", ex);
throw ex;
}
long duration = System.currentTimeMillis() - waitStartTime;
MyNodeStatusUpdater4 myUpdater = (MyNodeStatusUpdater4) updater;
Assert.assertTrue("NM started before updater triggered",
myUpdater.isTriggered());
Assert.assertTrue("NM should have connected to RM after "
+"the start interval of " + rmStartIntervalMS
+": actual " + duration
+ " " + myUpdater,
(duration >= rmStartIntervalMS));
Assert.assertTrue("NM should have connected to RM less than "
+ (rmStartIntervalMS + delta)
+" milliseconds of RM starting up: actual " + duration
+ " " + myUpdater,
(duration < (rmStartIntervalMS + delta)));
}
/**
* Verifies that if for some reason NM fails to start ContainerManager RPC
* server, RM is oblivious to NM's presence. The behaviour is like this
* because otherwise, NM will report to RM even if all its servers are not
* started properly, RM will think that the NM is alive and will retire the NM
* only after NM_EXPIRY interval. See MAPREDUCE-2749.
*/
@Test
public void testNoRegistrationWhenNMServicesFail() throws Exception {
nm = new NodeManager() {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
return new MyNodeStatusUpdater(context, dispatcher, healthChecker,
metrics);
}
@Override
protected ContainerManagerImpl createContainerManager(Context context,
ContainerExecutor exec, DeletionService del,
NodeStatusUpdater nodeStatusUpdater,
ApplicationACLsManager aclsManager,
LocalDirsHandlerService diskhandler) {
return new ContainerManagerImpl(context, exec, del, nodeStatusUpdater,
metrics, diskhandler) {
@Override
protected void serviceStart() {
// Simulating failure of starting RPC server
throw new YarnRuntimeException("Starting of RPC Server failed");
}
};
}
};
verifyNodeStartFailure("Starting of RPC Server failed");
}
@Test
public void testApplicationKeepAlive() throws Exception {
MyNodeManager nm = new MyNodeManager();
try {
YarnConfiguration conf = createNMConfig();
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
conf.setLong(YarnConfiguration.RM_NM_EXPIRY_INTERVAL_MS,
4000l);
nm.init(conf);
nm.start();
// HB 2 -> app cancelled by RM.
while (heartBeatID < 12) {
Thread.sleep(1000l);
}
MyResourceTracker3 rt =
(MyResourceTracker3) nm.getNodeStatusUpdater().getRMClient();
rt.context.getApplications().remove(rt.appId);
Assert.assertEquals(1, rt.keepAliveRequests.size());
int numKeepAliveRequests = rt.keepAliveRequests.get(rt.appId).size();
LOG.info("Number of Keep Alive Requests: [" + numKeepAliveRequests + "]");
Assert.assertTrue(numKeepAliveRequests == 2 || numKeepAliveRequests == 3);
while (heartBeatID < 20) {
Thread.sleep(1000l);
}
int numKeepAliveRequests2 = rt.keepAliveRequests.get(rt.appId).size();
Assert.assertEquals(numKeepAliveRequests, numKeepAliveRequests2);
} finally {
if (nm.getServiceState() == STATE.STARTED)
nm.stop();
}
}
/**
* Test completed containerStatus get back up when heart beat lost, and will
* be sent via next heart beat.
*/
@Test(timeout = 200000)
public void testCompletedContainerStatusBackup() throws Exception {
nm = new NodeManager() {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
MyNodeStatusUpdater2 myNodeStatusUpdater =
new MyNodeStatusUpdater2(context, dispatcher, healthChecker,
metrics);
return myNodeStatusUpdater;
}
@Override
protected NMContext createNMContext(
NMContainerTokenSecretManager containerTokenSecretManager,
NMTokenSecretManagerInNM nmTokenSecretManager,
NMStateStoreService store) {
return new MyNMContext(containerTokenSecretManager,
nmTokenSecretManager);
}
};
YarnConfiguration conf = createNMConfig();
nm.init(conf);
nm.start();
int waitCount = 0;
while (heartBeatID <= 4 && waitCount++ != 20) {
Thread.sleep(500);
}
if (heartBeatID <= 4) {
Assert.fail("Failed to get all heartbeats in time, " +
"heartbeatID:" + heartBeatID);
}
if(assertionFailedInThread.get()) {
Assert.fail("ContainerStatus Backup failed");
}
Assert.assertNotNull(nm.getNMContext().getSystemCredentialsForApps()
.get(ApplicationId.newInstance(1234, 1)).getToken(new Text("token1")));
nm.stop();
}
@Test(timeout = 200000)
public void testNodeStatusUpdaterRetryAndNMShutdown()
throws Exception {
final long connectionWaitSecs = 1000;
final long connectionRetryIntervalMs = 1000;
int port = ServerSocketUtil.getPort(49156, 10);
YarnConfiguration conf = createNMConfig(port);
conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_MS,
connectionWaitSecs);
conf.setLong(YarnConfiguration
.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS,
connectionRetryIntervalMs);
conf.setLong(YarnConfiguration.NM_SLEEP_DELAY_BEFORE_SIGKILL_MS, 5000);
conf.setLong(YarnConfiguration.NM_LOG_RETAIN_SECONDS, 1);
CyclicBarrier syncBarrier = new CyclicBarrier(2);
nm = new MyNodeManager2(syncBarrier, conf);
nm.init(conf);
nm.start();
// start a container
ContainerId cId = TestNodeManagerShutdown.createContainerId();
FileContext localFS = FileContext.getLocalFSFileContext();
TestNodeManagerShutdown.startContainer(nm, cId, localFS, nmLocalDir,
new File("start_file.txt"), port);
try {
syncBarrier.await(10000, TimeUnit.MILLISECONDS);
} catch (Exception e) {
}
Assert.assertFalse("Containers not cleaned up when NM stopped",
assertionFailedInThread.get());
Assert.assertTrue(((MyNodeManager2) nm).isStopped);
Assert.assertTrue("calculate heartBeatCount based on" +
" connectionWaitSecs and RetryIntervalSecs", heartBeatID == 2);
}
@Test
public void testRMVersionLessThanMinimum() throws InterruptedException,
IOException {
final AtomicInteger numCleanups = new AtomicInteger(0);
YarnConfiguration conf = createNMConfig();
conf.set(YarnConfiguration.NM_RESOURCEMANAGER_MINIMUM_VERSION, "3.0.0");
nm = new NodeManager() {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
MyNodeStatusUpdater myNodeStatusUpdater = new MyNodeStatusUpdater(
context, dispatcher, healthChecker, metrics);
MyResourceTracker2 myResourceTracker2 = new MyResourceTracker2();
myResourceTracker2.heartBeatNodeAction = NodeAction.NORMAL;
myResourceTracker2.rmVersion = "3.0.0";
myNodeStatusUpdater.resourceTracker = myResourceTracker2;
return myNodeStatusUpdater;
}
@Override
protected ContainerManagerImpl createContainerManager(Context context,
ContainerExecutor exec, DeletionService del,
NodeStatusUpdater nodeStatusUpdater,
ApplicationACLsManager aclsManager,
LocalDirsHandlerService dirsHandler) {
return new ContainerManagerImpl(context, exec, del, nodeStatusUpdater,
metrics, dirsHandler) {
@Override
public void cleanUpApplicationsOnNMShutDown() {
super.cleanUpApplicationsOnNMShutDown();
numCleanups.incrementAndGet();
}
};
}
};
nm.init(conf);
nm.start();
// NM takes a while to reach the STARTED state.
int waitCount = 0;
while (nm.getServiceState() != STATE.STARTED && waitCount++ != 20) {
LOG.info("Waiting for NM to stop..");
Thread.sleep(1000);
}
Assert.assertTrue(nm.getServiceState() == STATE.STARTED);
nm.stop();
}
//Verify that signalContainer request can be dispatched from
//NodeStatusUpdaterImpl to ContainerManagerImpl.
@Test
public void testSignalContainerToContainerManager() throws Exception {
nm = new NodeManager() {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
return new MyNodeStatusUpdater(
context, dispatcher, healthChecker, metrics, true);
}
@Override
protected ContainerManagerImpl createContainerManager(Context context,
ContainerExecutor exec, DeletionService del,
NodeStatusUpdater nodeStatusUpdater,
ApplicationACLsManager aclsManager,
LocalDirsHandlerService diskhandler) {
return new MyContainerManager(context, exec, del, nodeStatusUpdater,
metrics, diskhandler);
}
};
YarnConfiguration conf = createNMConfig();
nm.init(conf);
nm.start();
System.out.println(" ----- thread already started.."
+ nm.getServiceState());
int waitCount = 0;
while (nm.getServiceState() == STATE.INITED && waitCount++ != 20) {
LOG.info("Waiting for NM to start..");
if (nmStartError != null) {
LOG.error("Error during startup. ", nmStartError);
Assert.fail(nmStartError.getCause().getMessage());
}
Thread.sleep(1000);
}
if (nm.getServiceState() != STATE.STARTED) {
// NM could have failed.
Assert.fail("NodeManager failed to start");
}
waitCount = 0;
while (heartBeatID <= 3 && waitCount++ != 20) {
Thread.sleep(500);
}
Assert.assertFalse(heartBeatID <= 3);
Assert.assertEquals("Number of registered NMs is wrong!!", 1,
this.registeredNodes.size());
MyContainerManager containerManager =
(MyContainerManager)nm.getContainerManager();
Assert.assertTrue(containerManager.signaled);
nm.stop();
}
@Test
public void testConcurrentAccessToSystemCredentials(){
final Map<ApplicationId, ByteBuffer> testCredentials = new HashMap<>();
ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[300]);
ApplicationId applicationId = ApplicationId.newInstance(123456, 120);
testCredentials.put(applicationId, byteBuffer);
final List<Throwable> exceptions = Collections.synchronizedList(new
ArrayList<Throwable>());
final int NUM_THREADS = 10;
final CountDownLatch allDone = new CountDownLatch(NUM_THREADS);
final ExecutorService threadPool = Executors.newFixedThreadPool(
NUM_THREADS);
final AtomicBoolean stop = new AtomicBoolean(false);
try {
for (int i = 0; i < NUM_THREADS; i++) {
threadPool.submit(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; i < 100 && !stop.get(); i++) {
NodeHeartbeatResponse nodeHeartBeatResponse =
newNodeHeartbeatResponse(0, NodeAction.NORMAL,
null, null, null, null, 0);
nodeHeartBeatResponse.setSystemCredentialsForApps(
testCredentials);
NodeHeartbeatResponseProto proto =
((NodeHeartbeatResponsePBImpl)nodeHeartBeatResponse)
.getProto();
Assert.assertNotNull(proto);
}
} catch (Throwable t) {
exceptions.add(t);
stop.set(true);
} finally {
allDone.countDown();
}
}
});
}
int testTimeout = 2;
Assert.assertTrue("Timeout waiting for more than " + testTimeout + " " +
"seconds",
allDone.await(testTimeout, TimeUnit.SECONDS));
} catch (InterruptedException ie) {
exceptions.add(ie);
} finally {
threadPool.shutdownNow();
}
Assert.assertTrue("Test failed with exception(s)" + exceptions,
exceptions.isEmpty());
}
// Add new containers info into NM context each time node heart beats.
private class MyNMContext extends NMContext {
public MyNMContext(
NMContainerTokenSecretManager containerTokenSecretManager,
NMTokenSecretManagerInNM nmTokenSecretManager) {
super(containerTokenSecretManager, nmTokenSecretManager, null, null,
new NMNullStateStoreService());
}
@Override
public ConcurrentMap<ContainerId, Container> getContainers() {
if (heartBeatID == 0) {
return containers;
} else if (heartBeatID == 1) {
ContainerStatus containerStatus2 =
createContainerStatus(2, ContainerState.RUNNING);
putMockContainer(containerStatus2);
ContainerStatus containerStatus3 =
createContainerStatus(3, ContainerState.COMPLETE);
putMockContainer(containerStatus3);
return containers;
} else if (heartBeatID == 2) {
ContainerStatus containerStatus4 =
createContainerStatus(4, ContainerState.RUNNING);
putMockContainer(containerStatus4);
ContainerStatus containerStatus5 =
createContainerStatus(5, ContainerState.COMPLETE);
putMockContainer(containerStatus5);
return containers;
} else if (heartBeatID == 3 || heartBeatID == 4) {
return containers;
} else {
containers.clear();
return containers;
}
}
private void putMockContainer(ContainerStatus containerStatus) {
Container container = getMockContainer(containerStatus);
containers.put(containerStatus.getContainerId(), container);
applications.putIfAbsent(containerStatus.getContainerId()
.getApplicationAttemptId().getApplicationId(),
mock(Application.class));
}
}
public static ContainerStatus createContainerStatus(int id,
ContainerState containerState) {
ApplicationId applicationId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId applicationAttemptId =
ApplicationAttemptId.newInstance(applicationId, 1);
ContainerId contaierId = ContainerId.newContainerId(applicationAttemptId, id);
ContainerStatus containerStatus =
BuilderUtils.newContainerStatus(contaierId, containerState,
"test_containerStatus: id=" + id + ", containerState: "
+ containerState, 0, Resource.newInstance(1024, 1));
return containerStatus;
}
public static Container getMockContainer(ContainerStatus containerStatus) {
ContainerImpl container = mock(ContainerImpl.class);
when(container.cloneAndGetContainerStatus()).thenReturn(containerStatus);
when(container.getCurrentState()).thenReturn(containerStatus.getState());
when(container.getContainerId()).thenReturn(
containerStatus.getContainerId());
if (containerStatus.getState().equals(ContainerState.COMPLETE)) {
when(container.getContainerState())
.thenReturn(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState.DONE);
} else if (containerStatus.getState().equals(ContainerState.RUNNING)) {
when(container.getContainerState())
.thenReturn(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState.RUNNING);
}
return container;
}
private void verifyNodeStartFailure(String errMessage) throws Exception {
Assert.assertNotNull("nm is null", nm);
YarnConfiguration conf = createNMConfig();
nm.init(conf);
try {
nm.start();
Assert.fail("NM should have failed to start. Didn't get exception!!");
} catch (Exception e) {
//the version in trunk looked in the cause for equality
// and assumed failures were nested.
//this version assumes that error strings propagate to the base and
//use a contains() test only. It should be less brittle
if(!e.getMessage().contains(errMessage)) {
throw e;
}
}
// the service should be stopped
Assert.assertEquals("NM state is wrong!", STATE.STOPPED, nm
.getServiceState());
Assert.assertEquals("Number of registered nodes is wrong!", 0,
this.registeredNodes.size());
}
private YarnConfiguration createNMConfig(int port) throws IOException {
YarnConfiguration conf = new YarnConfiguration();
String localhostAddress = null;
try {
localhostAddress = InetAddress.getByName("localhost")
.getCanonicalHostName();
} catch (UnknownHostException e) {
Assert.fail("Unable to get localhost address: " + e.getMessage());
}
conf.setInt(YarnConfiguration.NM_PMEM_MB, 5 * 1024); // 5GB
conf.set(YarnConfiguration.NM_ADDRESS, localhostAddress + ":" + port);
conf.set(YarnConfiguration.NM_LOCALIZER_ADDRESS, localhostAddress + ":"
+ ServerSocketUtil.getPort(49160, 10));
conf.set(YarnConfiguration.NM_LOG_DIRS, logsDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
remoteLogsDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOCAL_DIRS, nmLocalDir.getAbsolutePath());
conf.setLong(YarnConfiguration.NM_LOG_RETAIN_SECONDS, 1);
return conf;
}
private YarnConfiguration createNMConfig() throws IOException {
return createNMConfig(ServerSocketUtil.getPort(49170, 10));
}
private NodeManager getNodeManager(final NodeAction nodeHeartBeatAction) {
return new NodeManager() {
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
MyNodeStatusUpdater myNodeStatusUpdater = new MyNodeStatusUpdater(
context, dispatcher, healthChecker, metrics);
MyResourceTracker2 myResourceTracker2 = new MyResourceTracker2();
myResourceTracker2.heartBeatNodeAction = nodeHeartBeatAction;
myNodeStatusUpdater.resourceTracker = myResourceTracker2;
return myNodeStatusUpdater;
}
};
}
}
| apache-2.0 |
j-coll/opencga | opencga-server/src/test/java/org/opencb/opencga/server/rest/analysis/VariantWebServiceTest.java | 2198 | /*
* Copyright 2015-2020 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.server.rest.analysis;
import org.junit.Test;
import org.opencb.commons.datastore.core.QueryParam;
import org.opencb.opencga.core.models.variant.VariantQueryParams;
import org.opencb.opencga.storage.core.variant.adaptors.VariantQueryParam;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.opencb.opencga.analysis.variant.manager.VariantCatalogQueryUtils.VARIANT_CATALOG_QUERY_PARAMS;
public class VariantWebServiceTest {
@Test
public void testVariantQueryParams() {
Set<String> other = VARIANT_CATALOG_QUERY_PARAMS.stream().map(QueryParam::key).collect(Collectors.toSet());
List<String> excluded = Arrays.asList("chromosome", "sort");
Map<String, Class<?>> fields = new VariantQueryParams().fields();
for (String field : fields.keySet()) {
if (excluded.contains(field)) {
continue;
}
VariantQueryParam queryParam = VariantQueryParam.valueOf(field);
if (queryParam == null) {
if (!other.contains(field)) {
fail("Extra field " + field);
}
}
}
for (VariantQueryParam value : VariantQueryParam.values()) {
assertTrue(fields.containsKey(value.key()));
}
for (QueryParam value : VARIANT_CATALOG_QUERY_PARAMS) {
assertTrue(fields.containsKey(value.key()));
}
}
} | apache-2.0 |
Temperance2015/MyWeather | app/src/main/java/com/temperance2015/myweather/db/MyWeatherOpenHelper.java | 1428 | package com.temperance2015.myweather.db;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
/**
* Created by Isabel on 2015/9/23.
*/
public class MyWeatherOpenHelper extends SQLiteOpenHelper {
//province建表
public static final String CREATE_PROVINCE = "create table Province ("
+ "id integer primary key autoincrement, "
+ "province_name text, "
+ "province_code text)";
//city建表
public static final String CREATE_CITY = "create table City ("
+ "id integer primary key autoincrement, "
+ "city_name text, "
+ "city_code text, "
+ "province_id integer)";
//country建表
public static final String CREATE_COUNTY = "create table County ("
+ "id integer primary key autoincrement, "
+ "county_name text, "
+ "county_code text, "
+ "city_id integer)";
public MyWeatherOpenHelper(Context context,String name,SQLiteDatabase.CursorFactory factory,int version){
super(context,name,factory,version);
}
@Override
public void onCreate(SQLiteDatabase db){
db.execSQL(CREATE_PROVINCE);
db.execSQL(CREATE_CITY);
db.execSQL(CREATE_COUNTY);
}
@Override
public void onUpgrade(SQLiteDatabase db,int oldVersion,int newVersion){
}
}
| apache-2.0 |
McLeodMoores/starling | projects/analytics/src/main/java/com/opengamma/analytics/financial/equity/EquityTrsDataBundle.java | 3298 | /**
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.equity;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.ObjectUtils;
import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderInterface;
import com.opengamma.analytics.financial.provider.description.interestrate.ParameterProviderInterface;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.ForwardSensitivity;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.tuple.DoublesPair;
/**
* Data bundle with one equity price and a multi-curve provider.
*/
public class EquityTrsDataBundle implements ParameterProviderInterface {
/** The equity price **/
private final double _spotEquity;
// TODO: Should this be replace by a map of LegalEntity/price (to be able to handle several equities in the same object).
/** The multi-curve provider */
private final MulticurveProviderInterface _curves;
/**
* @param spotEquity
* the spot equity price
* @param curves
* discounting curves, not null
*/
public EquityTrsDataBundle(final double spotEquity, final MulticurveProviderInterface curves) {
ArgumentChecker.notNull(curves, "curves");
_spotEquity = spotEquity;
_curves = curves;
}
/**
* Gets the spot equity price.
*
* @return the spot equity price
*/
public double getSpotEquity() {
return _spotEquity;
}
/**
* Gets the curves.
*
* @return the curves
*/
public MulticurveProviderInterface getCurves() {
return _curves;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + _curves.hashCode();
long temp;
temp = Double.doubleToLongBits(_spotEquity);
result = prime * result + (int) (temp ^ temp >>> 32);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof EquityTrsDataBundle)) {
return false;
}
final EquityTrsDataBundle other = (EquityTrsDataBundle) obj;
if (Double.compare(_spotEquity, other._spotEquity) != 0) {
return false;
}
if (!ObjectUtils.equals(_curves, other._curves)) {
return false;
}
return true;
}
@Override
public ParameterProviderInterface copy() {
final MulticurveProviderInterface multicurveProvider = _curves.copy();
return new EquityTrsDataBundle(_spotEquity, multicurveProvider);
}
@Override
public MulticurveProviderInterface getMulticurveProvider() {
return _curves.getMulticurveProvider();
}
@Override
public double[] parameterSensitivity(final String name, final List<DoublesPair> pointSensitivity) {
return _curves.parameterSensitivity(name, pointSensitivity);
}
@Override
public double[] parameterForwardSensitivity(final String name, final List<ForwardSensitivity> pointSensitivity) {
return _curves.parameterForwardSensitivity(name, pointSensitivity);
}
@Override
public Set<String> getAllCurveNames() {
return _curves.getAllCurveNames();
}
}
| apache-2.0 |
open-telemetry/opentelemetry-java | sdk/metrics/src/main/java/io/opentelemetry/sdk/metrics/exemplar/FilteredExemplarReservoir.java | 1299 | /*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.exemplar;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import java.util.List;
/** Implementation of a reservoir that has a pre-filter on measurements. */
class FilteredExemplarReservoir implements ExemplarReservoir {
private final ExemplarFilter filter;
private final ExemplarReservoir reservoir;
FilteredExemplarReservoir(ExemplarFilter filter, ExemplarReservoir reservoir) {
this.filter = filter;
this.reservoir = reservoir;
}
@Override
public void offerMeasurement(long value, Attributes attributes, Context context) {
if (filter.shouldSampleMeasurement(value, attributes, context)) {
reservoir.offerMeasurement(value, attributes, context);
}
}
@Override
public void offerMeasurement(double value, Attributes attributes, Context context) {
if (filter.shouldSampleMeasurement(value, attributes, context)) {
reservoir.offerMeasurement(value, attributes, context);
}
}
@Override
public List<ExemplarData> collectAndReset(Attributes pointAttributes) {
return reservoir.collectAndReset(pointAttributes);
}
}
| apache-2.0 |
thomasmaurel/ensj-healthcheck | src/org/ensembl/healthcheck/testcase/eg_core/DuplicateObjectXref.java | 2193 | /*
* Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
* Copyright [2016-2018] EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* File: DisplayXrefIdTest.java
* Created by: dstaines
* Created on: May 27, 2009
* CVS: $$
*/
package org.ensembl.healthcheck.testcase.eg_core;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.ReportManager;
/**
* Test for where xrefs have been added twice to the same Ensembl object.
* This is acceptable where analysis_id is different e.g. GO terms from 2 pipelines
* @author dstaines
*
*/
public class DuplicateObjectXref extends AbstractEgCoreTestCase {
private final static String DUPLICATE_OBJ_XREF = "select count(*) from (select count(*) from xref x join object_xref ox using (xref_id) left outer join ontology_xref ontx using (object_xref_id) group by ox.ensembl_id, ox.ensembl_object_type,x.dbprimary_acc,x.external_db_id,x.info_type,x.info_text,ontx.source_xref_id,ontx.linkage_type having count(*)>1) cc";
protected boolean runTest(DatabaseRegistryEntry dbre) {
boolean passes = true;
int nDupOX = getTemplate(dbre).queryForDefaultObject(DUPLICATE_OBJ_XREF, Integer.class);
if(nDupOX>0) {
passes = false;
ReportManager.problem(this, dbre.getConnection(), nDupOX+" duplicates found in object_xref: "+DUPLICATE_OBJ_XREF);
}
return passes;
}
/* (non-Javadoc)
* @see org.ensembl.healthcheck.testcase.AbstractTemplatedTestCase#getEgDescription()
*/
@Override
protected String getEgDescription() {
return "Test for where object_xrefs have been added twice";
}
}
| apache-2.0 |
ecarm002/incubator-asterixdb | asterixdb/asterix-app/src/test/java/org/apache/asterix/app/resource/PlanStagesGeneratorTest.java | 17110 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.app.resource;
import static org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag.GROUP;
import static org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag.INNERJOIN;
import static org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag.LEFTOUTERJOIN;
import static org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag.ORDER;
import static org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode.LOCAL;
import static org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode.PARTITIONED;
import static org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode.UNPARTITIONED;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Stream;
import org.apache.asterix.utils.ResourceUtils;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.ExternalGroupByPOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.HashPartitionExchangePOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.OneToOneExchangePOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.PreclusteredGroupByPOperator;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import org.apache.hyracks.api.job.resource.IClusterCapacity;
import org.junit.Assert;
import org.junit.Test;
public class PlanStagesGeneratorTest {
private static final Set<LogicalOperatorTag> BLOCKING_OPERATORS =
new HashSet<>(Arrays.asList(INNERJOIN, LEFTOUTERJOIN, ORDER));
private static final long MEMORY_BUDGET = 33554432L;
private static final int FRAME_SIZE = 32768;
private static final int FRAME_LIMIT = (int) (MEMORY_BUDGET / FRAME_SIZE);
private static final int PARALLELISM = 10;
private static final long MAX_BUFFER_PER_CONNECTION = 1L;
@Test
public void noBlockingPlan() throws AlgebricksException {
EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator();
ets.setExecutionMode(UNPARTITIONED);
AssignOperator assignOperator = new AssignOperator(Collections.emptyList(), null);
assignOperator.setExecutionMode(UNPARTITIONED);
assignOperator.getInputs().add(new MutableObject<>(ets));
ExchangeOperator exchange = new ExchangeOperator();
exchange.setExecutionMode(UNPARTITIONED);
exchange.setPhysicalOperator(new OneToOneExchangePOperator());
exchange.getInputs().add(new MutableObject<>(assignOperator));
DistributeResultOperator resultOperator = new DistributeResultOperator(null, null);
resultOperator.setExecutionMode(UNPARTITIONED);
resultOperator.getInputs().add(new MutableObject<>(exchange));
ALogicalPlanImpl plan = new ALogicalPlanImpl(Collections.singletonList(new MutableObject(resultOperator)));
List<PlanStage> stages = ResourceUtils.getStages(plan);
// ensure a single stage plan
final int expectedStages = 1;
Assert.assertEquals(expectedStages, stages.size());
validateStages(stages, resultOperator, exchange, ets, assignOperator);
// frame size for every operator
final long expectedMemory = stages.get(0).getOperators().size() * FRAME_SIZE;
assertRequiredMemory(stages, expectedMemory);
}
@Test
public void testNonBlockingGroupByOrderBy() throws AlgebricksException {
EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator();
ets.setExecutionMode(PARTITIONED);
DataSourceScanOperator scanOperator = new DataSourceScanOperator(Collections.emptyList(), null);
scanOperator.setExecutionMode(PARTITIONED);
scanOperator.getInputs().add(new MutableObject<>(ets));
ExchangeOperator exchange = new ExchangeOperator();
exchange.setExecutionMode(PARTITIONED);
exchange.setPhysicalOperator(new OneToOneExchangePOperator());
exchange.getInputs().add(new MutableObject<>(scanOperator));
GroupByOperator groupByOperator = new GroupByOperator();
groupByOperator.setExecutionMode(PARTITIONED);
groupByOperator
.setPhysicalOperator(new PreclusteredGroupByPOperator(Collections.emptyList(), true, FRAME_LIMIT));
groupByOperator.getInputs().add(new MutableObject<>(exchange));
OrderOperator orderOperator = new OrderOperator();
orderOperator.setExecutionMode(PARTITIONED);
orderOperator.getInputs().add(new MutableObject<>(groupByOperator));
DistributeResultOperator resultOperator = new DistributeResultOperator(null, null);
resultOperator.setExecutionMode(PARTITIONED);
resultOperator.getInputs().add(new MutableObject<>(orderOperator));
ALogicalPlanImpl plan = new ALogicalPlanImpl(Collections.singletonList(new MutableObject(resultOperator)));
final List<PlanStage> stages = ResourceUtils.getStages(plan);
validateStages(stages, ets, exchange, groupByOperator, orderOperator, resultOperator);
// ensure 3 stage (root to order, order to group by, group by to ets)
final int expectedStages = 2;
Assert.assertEquals(expectedStages, stages.size());
// dominating stage should have orderBy, orderBy's input (groupby), groupby's input (exchange),
// exchange's input (scanOperator), and scanOperator's input (ets)
long orderOperatorRequiredMemory = FRAME_LIMIT * FRAME_SIZE * PARALLELISM;
long groupByOperatorRequiredMemory = FRAME_LIMIT * FRAME_SIZE * PARALLELISM;
long exchangeRequiredMemory = PARALLELISM * FRAME_SIZE;
long scanOperatorRequiredMemory = PARALLELISM * FRAME_SIZE;
long etsRequiredMemory = FRAME_SIZE * PARALLELISM;
final long expectedMemory = orderOperatorRequiredMemory + groupByOperatorRequiredMemory + exchangeRequiredMemory
+ scanOperatorRequiredMemory + etsRequiredMemory;
assertRequiredMemory(stages, expectedMemory);
}
@Test
public void testJoinGroupby() throws AlgebricksException {
EmptyTupleSourceOperator ets1 = new EmptyTupleSourceOperator();
ets1.setExecutionMode(PARTITIONED);
DataSourceScanOperator scanOperator1 = new DataSourceScanOperator(Collections.emptyList(), null);
scanOperator1.setExecutionMode(PARTITIONED);
scanOperator1.getInputs().add(new MutableObject<>(ets1));
EmptyTupleSourceOperator ets2 = new EmptyTupleSourceOperator();
ets1.setExecutionMode(PARTITIONED);
DataSourceScanOperator scanOperator2 = new DataSourceScanOperator(Collections.emptyList(), null);
scanOperator2.setExecutionMode(PARTITIONED);
scanOperator2.getInputs().add(new MutableObject<>(ets2));
InnerJoinOperator firstJoin = new InnerJoinOperator(new MutableObject<>(ConstantExpression.TRUE));
firstJoin.setExecutionMode(PARTITIONED);
firstJoin.getInputs().add(new MutableObject<>(scanOperator1));
firstJoin.getInputs().add(new MutableObject<>(scanOperator2));
ExchangeOperator exchangeOperator1 = new ExchangeOperator();
exchangeOperator1.setExecutionMode(PARTITIONED);
exchangeOperator1.setPhysicalOperator(new HashPartitionExchangePOperator(Collections.emptyList(), null));
exchangeOperator1.getInputs().add(new MutableObject<>(firstJoin));
EmptyTupleSourceOperator ets3 = new EmptyTupleSourceOperator();
ets1.setExecutionMode(PARTITIONED);
GroupByOperator groupByOperator = new GroupByOperator();
groupByOperator
.setPhysicalOperator(new ExternalGroupByPOperator(Collections.emptyList(), FRAME_LIMIT, FRAME_LIMIT));
groupByOperator.setExecutionMode(LOCAL);
groupByOperator.getInputs().add(new MutableObject<>(ets3));
ExchangeOperator exchangeOperator2 = new ExchangeOperator();
exchangeOperator2.setExecutionMode(PARTITIONED);
exchangeOperator2.setPhysicalOperator(new HashPartitionExchangePOperator(Collections.emptyList(), null));
exchangeOperator2.getInputs().add(new MutableObject<>(groupByOperator));
LeftOuterJoinOperator secondJoin = new LeftOuterJoinOperator(new MutableObject<>(ConstantExpression.TRUE));
secondJoin.setExecutionMode(PARTITIONED);
secondJoin.getInputs().add(new MutableObject<>(exchangeOperator1));
secondJoin.getInputs().add(new MutableObject<>(exchangeOperator2));
DistributeResultOperator resultOperator = new DistributeResultOperator(null, null);
resultOperator.setExecutionMode(PARTITIONED);
resultOperator.getInputs().add(new MutableObject<>(secondJoin));
ALogicalPlanImpl plan = new ALogicalPlanImpl(Collections.singletonList(new MutableObject(resultOperator)));
List<PlanStage> stages = ResourceUtils.getStages(plan);
final int expectedStages = 4;
Assert.assertEquals(expectedStages, stages.size());
validateStages(stages, ets1, scanOperator1, ets2, scanOperator2, firstJoin, exchangeOperator1, ets3,
groupByOperator, exchangeOperator2, secondJoin, resultOperator);
// dominating stage should have the following operators:
// resultOperator, its input (secondJoin), secondJoin's first input (exchangeOperator1), exchangeOperator1's
// input (firstJoin), firstJoin's first input (scanOperator1), and scanOperator1's input (ets1)
long resultOperatorRequiredMemory = FRAME_SIZE * PARALLELISM;
long secondJoinRequiredMemory = FRAME_LIMIT * FRAME_SIZE * PARALLELISM;
long exchangeOperator1RequiredMemory = 2 * MAX_BUFFER_PER_CONNECTION * PARALLELISM * PARALLELISM * FRAME_SIZE;
long firstJoinRequiredMemory = FRAME_LIMIT * FRAME_SIZE * PARALLELISM;
long scanOperator1RequiredMemory = FRAME_SIZE * PARALLELISM;
long ets1RequiredMemory = FRAME_SIZE * PARALLELISM;
long expectedMemory = resultOperatorRequiredMemory + secondJoinRequiredMemory + exchangeOperator1RequiredMemory
+ firstJoinRequiredMemory + scanOperator1RequiredMemory + ets1RequiredMemory;
assertRequiredMemory(stages, expectedMemory);
}
@Test
public void testReplicateSortJoin() throws AlgebricksException {
EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator();
ets.setExecutionMode(PARTITIONED);
DataSourceScanOperator scanOperator = new DataSourceScanOperator(Collections.emptyList(), null);
scanOperator.setExecutionMode(PARTITIONED);
scanOperator.getInputs().add(new MutableObject<>(ets));
ReplicateOperator replicateOperator = new ReplicateOperator(2);
replicateOperator.setExecutionMode(PARTITIONED);
replicateOperator.getInputs().add(new MutableObject<>(scanOperator));
OrderOperator order1 = new OrderOperator();
order1.setExecutionMode(PARTITIONED);
order1.setPhysicalOperator(new OneToOneExchangePOperator());
order1.getInputs().add(new MutableObject<>(replicateOperator));
OrderOperator order2 = new OrderOperator();
order2.setExecutionMode(PARTITIONED);
order2.setPhysicalOperator(new OneToOneExchangePOperator());
order2.getInputs().add(new MutableObject<>(replicateOperator));
LeftOuterJoinOperator secondJoin = new LeftOuterJoinOperator(new MutableObject<>(ConstantExpression.TRUE));
secondJoin.setExecutionMode(PARTITIONED);
secondJoin.getInputs().add(new MutableObject<>(order1));
secondJoin.getInputs().add(new MutableObject<>(order2));
DistributeResultOperator resultOperator = new DistributeResultOperator(null, null);
resultOperator.setExecutionMode(PARTITIONED);
resultOperator.getInputs().add(new MutableObject<>(secondJoin));
ALogicalPlanImpl plan = new ALogicalPlanImpl(Collections.singletonList(new MutableObject(resultOperator)));
List<PlanStage> stages = ResourceUtils.getStages(plan);
final int expectedStages = 3;
Assert.assertEquals(expectedStages, stages.size());
validateStages(stages);
// dominating stage should have the following operators:
// secondJoin, secondJoin's second input (order2), order2's input (replicate),
// replicate's input (scanOperator), scanOperator's input (ets)
long secondJoinRequiredMemory = FRAME_LIMIT * FRAME_SIZE * PARALLELISM;
long order2RequiredMemory = FRAME_LIMIT * FRAME_SIZE * PARALLELISM;
long replicateOperatorRequiredMemory = FRAME_SIZE * PARALLELISM;
long scanOperator1RequiredMemory = FRAME_SIZE * PARALLELISM;
long etsRequiredMemory = FRAME_SIZE * PARALLELISM;
long expectedMemory = secondJoinRequiredMemory + order2RequiredMemory + replicateOperatorRequiredMemory
+ scanOperator1RequiredMemory + etsRequiredMemory;
assertRequiredMemory(stages, expectedMemory);
}
private void validateStages(List<PlanStage> stages, ILogicalOperator... operators) {
// ensure all operators appear
Stream.of(operators).forEach(op -> ensureOperatorExists(stages, op));
// ensure the correct count
for (PlanStage stage : stages) {
stage.getOperators().forEach(op -> validateOperatorStages(stages, op));
}
}
private void ensureOperatorExists(List<PlanStage> stages, ILogicalOperator operator) {
final long actual = stages.stream().map(PlanStage::getOperators).filter(op -> op.contains(operator)).count();
Assert.assertTrue(actual > 0);
}
private void validateOperatorStages(List<PlanStage> stages, ILogicalOperator operator) {
if (stages.size() == 1) {
return;
}
long expectedAppearances = BLOCKING_OPERATORS.contains(operator.getOperatorTag()) ? 2 : 1;
if (operator.getOperatorTag() == GROUP) {
GroupByOperator groupByOperator = (GroupByOperator) operator;
if (groupByOperator.getPhysicalOperator().getOperatorTag() == PhysicalOperatorTag.EXTERNAL_GROUP_BY
|| groupByOperator.getPhysicalOperator().getOperatorTag() == PhysicalOperatorTag.SORT_GROUP_BY) {
expectedAppearances = 2;
}
}
final long actual = stages.stream().map(PlanStage::getOperators).filter(op -> op.contains(operator)).count();
Assert.assertEquals(expectedAppearances, actual);
}
private void assertRequiredMemory(List<PlanStage> stages, long expectedMemory) {
final IClusterCapacity clusterCapacity = ResourceUtils.getStageBasedRequiredCapacity(stages, PARALLELISM,
FRAME_LIMIT, FRAME_LIMIT, FRAME_LIMIT, FRAME_LIMIT, FRAME_SIZE);
Assert.assertEquals(clusterCapacity.getAggregatedMemoryByteSize(), expectedMemory);
}
}
| apache-2.0 |
stevenhva/InfoLearn_OpenOLAT | src/main/java/org/olat/NewControllerFactory.java | 9812 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
* <p>
* Initial code contributed and copyrighted by<br>
* JGS goodsolutions GmbH, http://www.goodsolutions.ch
* <p>
*/
package org.olat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.Windows;
import org.olat.core.gui.components.Window;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.dtabs.DTab;
import org.olat.core.gui.control.generic.dtabs.DTabs;
import org.olat.core.id.OLATResourceable;
import org.olat.core.id.context.BusinessControl;
import org.olat.core.id.context.BusinessControlFactory;
import org.olat.core.id.context.ContextEntry;
import org.olat.core.id.context.ContextEntryControllerCreator;
import org.olat.core.id.context.TabContext;
import org.olat.core.logging.AssertException;
import org.olat.core.logging.LogDelegator;
import org.olat.core.util.UserSession;
import org.olat.core.util.resource.OresHelper;
import org.olat.repository.RepositoryEntry;
import org.olat.repository.RepositoryManager;
/**
* Description:<br>
* input: e.g. [repoentry:123] or [repoentry:123][CourseNode:456] or ...
*
*
* <P>
* Initial Date: 16.06.2006 <br>
*
* @author Felix Jost
*/
public class NewControllerFactory extends LogDelegator {
private static NewControllerFactory INSTANCE = new NewControllerFactory();
// map of controller creators, setted by Spring configuration
private Map<String, ContextEntryControllerCreator> contextEntryControllerCreators = new HashMap<String, ContextEntryControllerCreator>();
/**
* Get an instance of the new controller factory
*
* @return
*/
public static NewControllerFactory getInstance() {
return INSTANCE;
}
/**
* Singleton constructor
*/
private NewControllerFactory() {
//
}
/**
* Add a context entry controller creator for a specific key. This is used to
* add new creators at runtime, e.g. from a self contained module. It is
* allowed to overwrite existing ContextEntryControllerCreator. Use the
* canLaunch() method to check if for a certain key something is already
* defined.
*
* @param key
* @param controllerCreator
*/
public synchronized void addContextEntryControllerCreator(String key, ContextEntryControllerCreator controllerCreator) {
ContextEntryControllerCreator oldCreator = contextEntryControllerCreators.get(key);
contextEntryControllerCreators.put(key, controllerCreator);
// Add config logging to console
logInfo("Adding context entry controller creator for key::" + key + " and value::" + controllerCreator.getClass().getCanonicalName()
+ (oldCreator == null ? "" : " replaceing existing controller creator ::" + oldCreator.getClass().getCanonicalName()), null);
}
/**
* Check if a context entry controller creator is available for the given key
*
* @param key
* @return true: key is known; false: key can not be used
*/
public boolean canLaunch(String key) {
return contextEntryControllerCreators.containsKey(key);
}
/**
* Check first context entry can be launched
* a further check is mostly not possible, as it gets validated through the BC-stack while building the controller-chain
*
* return true: if this will be launchable at least for the first step.
*/
public boolean validateCEWithContextControllerCreator(final UserRequest ureq, final WindowControl wControl, ContextEntry ce){
String firstType = ce.getOLATResourceable().getResourceableTypeName();
if (canLaunch(firstType)){
return contextEntryControllerCreators.get(firstType).validateContextEntryAndShowError(ce, ureq, wControl);
}
return false;
}
/**
* Launch a controller in a tab or a site with the business path
* @param businessPath
* @param ureq
* @param origControl
*/
public boolean launch(String businessPath, UserRequest ureq, WindowControl origControl) {
BusinessControl bc = BusinessControlFactory.getInstance().createFromString(businessPath);
WindowControl bwControl = BusinessControlFactory.getInstance().createBusinessWindowControl(bc, origControl);
return launch(ureq, bwControl);
}
private ContextEntryControllerCreator getContextEntryControllerCreator(String type) {
ContextEntryControllerCreator typeHandler = contextEntryControllerCreators.get(type);
if(typeHandler != null) {
return typeHandler.clone();
}
return null;
}
/**
* Launch a controller in a tab or site in the given window from a user
* request url
*
* @param ureq
* @param wControl
*/
public boolean launch(UserRequest ureq, WindowControl wControl) {
BusinessControl bc = wControl.getBusinessControl();
ContextEntry mainCe = bc.popLauncherContextEntry();
OLATResourceable ores = mainCe.getOLATResourceable();
// Check for RepositoryEntry resource
boolean ceConsumed = false;
RepositoryEntry re = null;
if (ores.getResourceableTypeName().equals(OresHelper.calculateTypeName(RepositoryEntry.class))) {
if(ores instanceof RepositoryEntry) {
re = (RepositoryEntry)ores;
ores = re.getOlatResource();
ceConsumed = true;
} else {
// It is a repository-entry => get OLATResourceable from RepositoryEntry
RepositoryManager repom = RepositoryManager.getInstance();
re = repom.lookupRepositoryEntry(ores.getResourceableId());
if (re != null){
ores = re.getOlatResource();
ceConsumed = true;
mainCe.upgradeOLATResourceable(re);
}
}
}
// was brasato:: DTabs dts = wControl.getDTabs();
UserSession usess = ureq.getUserSession();
Window window = Windows.getWindows(usess).getWindow(ureq);
if (window == null) {
logDebug("Found no window for jumpin => take WindowBackOffice", null);
window = wControl.getWindowBackOffice().getWindow();
}
DTabs dts = window.getDTabs();
DTab dt = dts.getDTab(ores);
if (dt != null) {
// tab already open => close it
dts.removeDTab(ureq, dt);// disposes also dt and controllers
}
String firstType = mainCe.getOLATResourceable().getResourceableTypeName();
// String firstTypeId = ClassToId.getInstance().lookup() BusinessGroup
ContextEntryControllerCreator typeHandler = getContextEntryControllerCreator(firstType);
if (typeHandler == null) {
logWarn("Cannot found an handler for context entry: " + mainCe, null);
return false;//simply return and don't throw a red screen
}
if (!typeHandler.validateContextEntryAndShowError(mainCe, ureq, wControl)){
//simply return and don't throw a red screen
return false;
}
//fxdiff BAKS-7 Resume function
String siteClassName = typeHandler.getSiteClassName(mainCe, ureq);
// open in existing site
if (siteClassName != null) {
// use special activation key to trigger the activate method
//fxdiff BAKS-7 Resume function
List<ContextEntry> entries = new ArrayList<ContextEntry>();
if (bc.hasContextEntry()) {
ContextEntry subContext = bc.popLauncherContextEntry();
if (subContext != null) {
entries.add(subContext);
while(bc.hasContextEntry()) {
entries.add(bc.popLauncherContextEntry());
}
}
} else if (!ceConsumed) {
//the olatresourceable is not in a dynamic tab but in a fix one
if(ores != null) {
entries.add(BusinessControlFactory.getInstance().createContextEntry(ores));
}
}
TabContext context = typeHandler.getTabContext(ureq, ores, mainCe, entries);
dts.activateStatic(ureq, siteClassName, context.getContext());
return true;
} else {
List<ContextEntry> entries = new ArrayList<ContextEntry>();
while(bc.hasContextEntry()) {
entries.add(bc.popLauncherContextEntry());
}
TabContext context = typeHandler.getTabContext(ureq, ores, mainCe, entries);
// or create new tab
//String tabName = typeHandler.getTabName(mainCe, ureq);
// create and add Tab
dt = dts.createDTab(context.getTabResource(), re, context.getName());
if (dt == null) {
// user error message is generated in BaseFullWebappController, nothing to do here
return false;
} else {
WindowControl bwControl = BusinessControlFactory.getInstance().createBusinessWindowControl(bc, dt.getWindowControl());
usess.addToHistory(ureq, bc);
Controller launchC = typeHandler.createController(mainCe, ureq, bwControl);
if (launchC == null) {
throw new AssertException("ControllerFactory could not create a controller to be launched. Please validate businesspath "
+ bc.getAsString() + " for type " + typeHandler.getClass().getName() + " in advance with validateContextEntryAndShowError().");
}
dt.setController(launchC);
if(dts.addDTab(ureq, dt)) {
dts.activate(ureq, dt, context.getContext()); // null: do not activate to a certain view
return true;
} else {
return false;
}
}
}
}
} | apache-2.0 |
datalayer/zeppelin-R | zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java | 5972 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.lang.NullArgumentException;
import org.apache.zeppelin.conf.ZeppelinConfiguration;
import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
import org.apache.zeppelin.dep.Dependency;
import org.apache.zeppelin.dep.DependencyResolver;
import org.apache.zeppelin.interpreter.mock.MockInterpreter1;
import org.apache.zeppelin.interpreter.mock.MockInterpreter2;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.sonatype.aether.RepositoryException;
public class InterpreterFactoryTest {
private InterpreterFactory factory;
private File tmpDir;
private ZeppelinConfiguration conf;
private InterpreterContext context;
private DependencyResolver depResolver;
@Before
public void setUp() throws Exception {
tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis());
tmpDir.mkdirs();
new File(tmpDir, "conf").mkdirs();
MockInterpreter1.register("mock1", "org.apache.zeppelin.interpreter.mock.MockInterpreter1");
MockInterpreter2.register("mock2", "org.apache.zeppelin.interpreter.mock.MockInterpreter2");
System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), tmpDir.getAbsolutePath());
System.setProperty(ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1,org.apache.zeppelin.interpreter.mock.MockInterpreter2");
conf = new ZeppelinConfiguration();
depResolver = new DependencyResolver(tmpDir.getAbsolutePath() + "/local-repo");
factory = new InterpreterFactory(conf, new InterpreterOption(false), null, null, depResolver);
context = new InterpreterContext("note", "id", "title", "text", null, null, null, null, null, null);
}
@After
public void tearDown() throws Exception {
delete(tmpDir);
}
private void delete(File file){
if(file.isFile()) file.delete();
else if(file.isDirectory()){
File [] files = file.listFiles();
if(files!=null && files.length>0){
for(File f : files){
delete(f);
}
}
file.delete();
}
}
@Test
public void testBasic() {
List<String> all = factory.getDefaultInterpreterSettingList();
// get interpreter
Interpreter repl1 = factory.get(all.get(0)).getInterpreterGroup().getFirst();
assertFalse(((LazyOpenInterpreter) repl1).isOpen());
repl1.interpret("repl1", context);
assertTrue(((LazyOpenInterpreter) repl1).isOpen());
// try to get unavailable interpreter
assertNull(factory.get("unknown"));
// restart interpreter
factory.restart(all.get(0));
repl1 = factory.get(all.get(0)).getInterpreterGroup().getFirst();
assertFalse(((LazyOpenInterpreter) repl1).isOpen());
}
@Test
public void testFactoryDefaultList() throws IOException, RepositoryException {
// get default list from default setting
List<String> all = factory.getDefaultInterpreterSettingList();
assertEquals(2, all.size());
assertEquals(factory.get(all.get(0)).getInterpreterGroup().getFirst().getClassName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1");
// add setting
factory.add("a mock", "mock2", new LinkedList<Dependency>(), new InterpreterOption(false), new Properties());
all = factory.getDefaultInterpreterSettingList();
assertEquals(2, all.size());
assertEquals("mock1", factory.get(all.get(0)).getName());
assertEquals("a mock", factory.get(all.get(1)).getName());
}
@Test
public void testExceptions() throws InterpreterException, IOException, RepositoryException {
List<String> all = factory.getDefaultInterpreterSettingList();
// add setting with null option & properties expected nullArgumentException.class
try {
factory.add("a mock", "mock2", new LinkedList<Dependency>(), null, new Properties());
} catch(NullArgumentException e) {
assertEquals("Test null option" , e.getMessage(),new NullArgumentException("option").getMessage());
}
try {
factory.add("a mock", "mock2", new LinkedList<Dependency>(), new InterpreterOption(false), null);
} catch (NullArgumentException e){
assertEquals("Test null properties" , e.getMessage(),new NullArgumentException("properties").getMessage());
}
}
@Test
public void testSaveLoad() throws IOException, RepositoryException {
// interpreter settings
assertEquals(2, factory.get().size());
// check if file saved
assertTrue(new File(conf.getInterpreterSettingPath()).exists());
factory.add("newsetting", "mock1", new LinkedList<Dependency>(), new InterpreterOption(false), new Properties());
assertEquals(3, factory.get().size());
InterpreterFactory factory2 = new InterpreterFactory(conf, null, null, null, depResolver);
assertEquals(3, factory2.get().size());
}
}
| apache-2.0 |
Leao/CodeColors | codecolors-core/src/main/java/io/leao/codecolors/core/editor/CcMultiEditorSet.java | 931 | package io.leao.codecolors.core.editor;
import java.util.Set;
import io.leao.codecolors.core.CcCore;
import io.leao.codecolors.core.color.CcColorStateList;
import io.leao.codecolors.core.util.CcTempUtils;
public class CcMultiEditorSet extends CcMultiEditor<CcMultiEditorSet> {
public void submit() {
Set<CcColorStateList> changedColors = CcTempUtils.getColorSet();
for (int colorResId : mEditors.keySet()) {
CcColorStateList color = CcCore.getColorManager().getColor(colorResId);
if (color != null) {
CcEditor editor = mEditors.get(colorResId);
boolean changed = color.set(editor);
if (changed) {
changedColors.add(color);
}
}
}
if (changedColors.size() > 0) {
invalidate(changedColors);
}
CcTempUtils.recycleColorSet(changedColors);
}
} | apache-2.0 |
gaapt/deepdive | mln/src/tuffy/util/MathMan.java | 366 | package tuffy.util;
public class MathMan {
public static int prorate(int total, double ratio){
return (int)(total * ratio);
}
public static double getLogOddsWeight(double prob){
if(prob < 0 || prob > 1) return 0;
if(prob == 0) return -Config.hard_weight;
if(prob == 1) return Config.hard_weight;
return Math.log(prob/(1-prob));
}
}
| apache-2.0 |
wolfdog007/aruzhev | tracker/src/main/java/ru/job4j/start/StubInput.java | 1290 | package ru.job4j.start;
/**
* Implements stub class with users input.
*
* @author Ruzhev Alexander
* @since 17.04.2017
*/
public class StubInput implements Input {
/**
* Array of user answers.
*/
private String[] answers;
/**
* Counter in array answers.
*/
private int position = 0;
/**
* Constructor.
*
* @param answers array of answers.
*/
public StubInput(String[] answers) {
this.answers = answers;
}
/**
* User acnion.
*
* @param question - question for users
* @return the current action
*/
public String ask(String question) {
return answers[position++];
}
/**
* Validate question.
* @param question - question for users
* @param range - the range of answers
* @return UnsupportedOperationException
*/
public int ask(String question, int[] range) {
int key = Integer.valueOf(this.ask(question));
boolean exist = false;
for (int value : range) {
if (value == key) {
exist = true;
break;
}
}
if (exist) {
return key;
} else {
throw new MenuOutException("Out of menu range.");
}
}
}
| apache-2.0 |
haikuowuya/android_system_code | src/com/sun/org/apache/xml/internal/serializer/utils/SerializerMessages_zh_CN.java | 4957 | /*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: SerializerMessages_zh_CN.java,v 1.1.4.1 2005/09/08 11:03:18 suresh_emailid Exp $
*/
package com.sun.org.apache.xml.internal.serializer.utils;
import java.util.ListResourceBundle;
public class SerializerMessages_zh_CN extends ListResourceBundle {
public Object[][] getContents() {
Object[][] contents = new Object[][] {
// BAD_MSGKEY needs translation
// BAD_MSGFORMAT needs translation
{ MsgKey.ER_SERIALIZER_NOT_CONTENTHANDLER,
"\u4e32\u884c\u5668\u7c7b\u201c{0}\u201d\u4e0d\u5b9e\u73b0 org.xml.sax.ContentHandler."},
{ MsgKey.ER_RESOURCE_COULD_NOT_FIND,
"\u627e\u4e0d\u5230\u8d44\u6e90 [ {0} ]\u3002\n {1}"},
{ MsgKey.ER_RESOURCE_COULD_NOT_LOAD,
"\u8d44\u6e90 [ {0} ] \u65e0\u6cd5\u88c5\u5165\uff1a{1} \n {2} \n {3}"},
{ MsgKey.ER_BUFFER_SIZE_LESSTHAN_ZERO,
"\u7f13\u51b2\u533a\u5927\u5c0f <=0"},
{ MsgKey.ER_INVALID_UTF16_SURROGATE,
"\u68c0\u6d4b\u5230\u65e0\u6548\u7684 UTF-16 \u66ff\u4ee3\u8005\uff1a{0}\uff1f"},
{ MsgKey.ER_OIERROR,
"IO \u9519\u8bef"},
{ MsgKey.ER_ILLEGAL_ATTRIBUTE_POSITION,
"\u5728\u751f\u6210\u5b50\u8282\u70b9\u4e4b\u540e\u6216\u5728\u751f\u6210\u5143\u7d20\u4e4b\u524d\u65e0\u6cd5\u6dfb\u52a0\u5c5e\u6027 {0}\u3002\u5c06\u5ffd\u7565\u5c5e\u6027\u3002"},
{ MsgKey.ER_NAMESPACE_PREFIX,
"\u6ca1\u6709\u8bf4\u660e\u540d\u79f0\u7a7a\u95f4\u524d\u7f00\u201c{0}\u201d\u3002"},
// ER_STRAY_ATTRIBUTE needs translation
{ MsgKey.ER_STRAY_NAMESPACE,
"\u540d\u79f0\u7a7a\u95f4\u8bf4\u660e\u201c{0}\u201d=\u201c{1}\u201d\u5728\u5143\u7d20\u5916\u3002"},
{ MsgKey.ER_COULD_NOT_LOAD_RESOURCE,
"\u65e0\u6cd5\u88c5\u5165\u201c{0}\u201d\uff08\u68c0\u67e5 CLASSPATH\uff09\uff0c\u73b0\u5728\u53ea\u4f7f\u7528\u7f3a\u7701\u503c"},
// ER_ILLEGAL_CHARACTER needs translation
{ MsgKey.ER_COULD_NOT_LOAD_METHOD_PROPERTY,
"\u65e0\u6cd5\u4e3a\u8f93\u51fa\u65b9\u6cd5\u201c{1}\u201d\u88c5\u8f7d\u5c5e\u6027\u6587\u4ef6\u201c{0}\u201d\uff08\u68c0\u67e5 CLASSPATH\uff09"},
{ MsgKey.ER_INVALID_PORT,
"\u65e0\u6548\u7684\u7aef\u53e3\u53f7"},
{ MsgKey.ER_PORT_WHEN_HOST_NULL,
"\u4e3b\u673a\u4e3a\u7a7a\u65f6\uff0c\u65e0\u6cd5\u8bbe\u7f6e\u7aef\u53e3"},
{ MsgKey.ER_HOST_ADDRESS_NOT_WELLFORMED,
"\u4e3b\u673a\u4e0d\u662f\u683c\u5f0f\u826f\u597d\u7684\u5730\u5740"},
{ MsgKey.ER_SCHEME_NOT_CONFORMANT,
"\u6a21\u5f0f\u4e0d\u4e00\u81f4\u3002"},
{ MsgKey.ER_SCHEME_FROM_NULL_STRING,
"\u65e0\u6cd5\u4ece\u7a7a\u5b57\u7b26\u4e32\u8bbe\u7f6e\u6a21\u5f0f"},
{ MsgKey.ER_PATH_CONTAINS_INVALID_ESCAPE_SEQUENCE,
"\u8def\u5f84\u5305\u542b\u65e0\u6548\u7684\u8f6c\u4e49\u5e8f\u5217"},
{ MsgKey.ER_PATH_INVALID_CHAR,
"\u8def\u5f84\u5305\u542b\u975e\u6cd5\u5b57\u7b26\uff1a{0}"},
{ MsgKey.ER_FRAG_INVALID_CHAR,
"\u7247\u6bb5\u5305\u542b\u65e0\u6548\u7684\u5b57\u7b26"},
{ MsgKey.ER_FRAG_WHEN_PATH_NULL,
"\u8def\u5f84\u4e3a\u7a7a\u65f6\uff0c\u65e0\u6cd5\u8bbe\u7f6e\u7247\u6bb5"},
{ MsgKey.ER_FRAG_FOR_GENERIC_URI,
"\u53ea\u80fd\u4e3a\u4e00\u822c URI \u8bbe\u7f6e\u7247\u6bb5"},
{ MsgKey.ER_NO_SCHEME_IN_URI,
"\u5728 URI \u4e2d\u627e\u4e0d\u5230\u6a21\u5f0f\uff1a{0}"},
{ MsgKey.ER_CANNOT_INIT_URI_EMPTY_PARMS,
"\u65e0\u6cd5\u4ee5\u7a7a\u53c2\u6570\u521d\u59cb\u5316 URI"},
{ MsgKey.ER_NO_FRAGMENT_STRING_IN_PATH,
"\u8def\u5f84\u548c\u7247\u6bb5\u4e2d\u90fd\u65e0\u6cd5\u6307\u5b9a\u7247\u6bb5"},
{ MsgKey.ER_NO_QUERY_STRING_IN_PATH,
"\u8def\u5f84\u548c\u67e5\u8be2\u5b57\u7b26\u4e32\u4e2d\u4e0d\u80fd\u6307\u5b9a\u67e5\u8be2\u5b57\u7b26\u4e32"},
{ MsgKey.ER_NO_PORT_IF_NO_HOST,
"\u5982\u679c\u6ca1\u6709\u6307\u5b9a\u4e3b\u673a\uff0c\u5219\u4e0d\u53ef\u4ee5\u6307\u5b9a\u7aef\u53e3"},
{ MsgKey.ER_NO_USERINFO_IF_NO_HOST,
"\u5982\u679c\u6ca1\u6709\u6307\u5b9a\u4e3b\u673a\uff0c\u5219\u4e0d\u53ef\u4ee5\u6307\u5b9a Userinfo"},
{ MsgKey.ER_SCHEME_REQUIRED,
"\u6a21\u5f0f\u662f\u5fc5\u9700\u7684\uff01"}
};
return contents;
}
}
| apache-2.0 |
atmelino/JATexperimental | src/jat/application/missionPlan/MissionPlanEvents.java | 8358 | /* JAT: Java Astrodynamics Toolkit
*
Copyright 2012 Tobias Berthold
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package jat.application.missionPlan;
import jat.core.cm.TwoBodyAPL;
import jat.core.ephemeris.DE405Body.body;
import jat.core.ephemeris.DE405Plus;
import jat.core.spacetime.TimeAPL;
import jat.core.util.jatMessages;
import jat.coreNOSA.cm.Constants;
import jat.coreNOSA.cm.Lambert;
import jat.coreNOSA.cm.LambertException;
import jat.coreNOSA.spacetime.CalDate;
import jat.jat3D.Colors;
import jat.jat3D.Sphere3D;
import jat.jat3D.TwoBodyOrbit3D;
import jat.jat3D.behavior.jat_Rotate;
import jat.jat3D.plot3D.Rainbow3f;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.io.IOException;
import java.util.Calendar;
import javax.swing.JOptionPane;
import javax.swing.Timer;
import javax.vecmath.Vector3f;
class MissionPlanEvents implements ActionListener, ItemListener {
MissionPlanMain mpMain;
MissionPlanGUI mpGUI;
MissionPlanParameters param;
jat_Rotate jat_rotate;
jatMessages messages;
public Timer timer;
int i;
int time_advance = 10; // seconds
DE405Plus Eph; // Ephemeris class
Flight f;
Rainbow3f rainbow = new Rainbow3f();
ManageFlightsDialog myDialog;
boolean directionDown;
public MissionPlanEvents(MissionPlanMain mpMain) {
this.mpMain = mpMain;
this.param = mpMain.mpParam;
this.Eph = param.Eph;
messages = param.messages;
timer = new Timer(50, this);
// timer = new Timer(1000, this);
// timer.start();
}
public void actionPerformed(ActionEvent ev) {
this.mpGUI = mpMain.mpGUI;
this.jat_rotate = mpMain.mpPlot.jat_rotate;
i++;
if (ev.getSource() == mpGUI.btn_stop) {
time_advance = 0;
}
if (ev.getSource() == mpGUI.btn_rewind) {
int sign = (int) Math.signum(time_advance);
switch (sign) {
case -1:
time_advance *= 2;
break;
case -0:
time_advance = -10;
break;
case 1:
time_advance /= 2;
break;
}
}
if (ev.getSource() == mpGUI.btn_forward) {
int sign = (int) Math.signum(time_advance);
switch (sign) {
case -1:
time_advance /= 2;
break;
case -0:
time_advance = 10;
break;
case 1:
time_advance *= 2;
break;
}
}
if (ev.getSource() == mpGUI.btnAddFlight) {
messages.addln("[MissionPlanEvents add flight]");
// System.out.println("add flight");
AddFlightDialog myDialog = new AddFlightDialog(mpMain);
// Get the resulting dates and delta-v's and add trajectory to
// plot
if (myDialog.pcpMain.pReturn.DepartureDate > 0.) {
try {
f = new Flight();
f.flightName = "flight" + i;
// retrieve selected values from dialog and store
f.departure_planet = myDialog.pcpMain.pReturn.departure_planet;
f.departurePlanetName = body.name[f.departure_planet.ordinal()];
f.arrival_planet = myDialog.pcpMain.pReturn.arrival_planet;
f.arrivalPlanetName = body.name[f.arrival_planet.ordinal()];
f.departureDate = new TimeAPL(myDialog.pcpMain.pReturn.DepartureDate);
f.arrivalDate = new TimeAPL(myDialog.pcpMain.pReturn.ArrivalDate);
f.mu = Constants.GM_Sun / 1.e9;
f.tof = TimeAPL.minus(f.arrivalDate, f.departureDate) * 86400.0;
f.lambert = new Lambert(Constants.GM_Sun / 1.e9);
f.r0 = Eph.get_planet_pos(f.departure_planet, f.departureDate);
f.v0 = Eph.get_planet_vel(f.departure_planet, f.departureDate);
f.rf = Eph.get_planet_pos(f.arrival_planet, f.arrivalDate);
f.vf = Eph.get_planet_vel(f.arrival_planet, f.arrivalDate);
try {
f.totaldv = f.lambert.compute(f.r0, f.v0, f.rf, f.vf, f.tof);
messages.addln("[MissionPlanEvents] total DV " + f.totaldv + "km/s");
// totaldv = -1;
// apply the first delta-v
f.dv0 = f.lambert.deltav0;
f.v0 = f.v0.plus(f.dv0);
// System.out.println(ssmain.flightList.size());
TwoBodyAPL temp = new TwoBodyAPL(f.mu, f.r0, f.v0);
f.t0_on_orbit = temp.t_from_ta();
// VectorN rot_r0 = f.r0;
// VectorN rot_v0 = f.v0;
f.color = rainbow.colorFor(10 * mpMain.flightList.size());
// f.orbit = new TwoBodyOrbit3D(f.mu, rot_r0, rot_v0,
// f.t0_on_orbit, f.t0_on_orbit + f.tof,
// f.color);
f.orbit = new TwoBodyOrbit3D(f.mu, f.r0, f.v0, f.t0_on_orbit, f.t0_on_orbit + f.tof, f.color);
mpMain.mpPlot.jatScene.add(f.orbit, f.flightName);
f.satellite = new Sphere3D(5000000.f, Colors.gold);
mpMain.mpPlot.jatScene.add(f.satellite, f.satelliteName);
mpMain.flightList.add(f);
} catch (LambertException e) {
messages.addln("Lambert failed"); // totaldv = -1;
// System.out.println(e.message);
// e.printStackTrace();
}
} catch (IOException e1) {
e1.printStackTrace();
}
}
} // End of button Add Flight pressed
if (ev.getSource() == mpGUI.btnManageFlights) {
// System.out.println("manage flights");
myDialog = new ManageFlightsDialog(mpMain);
}
// Periodic timer events
// System.out.println("alive");
CalDate caldate;
if (mpGUI.realtime_chk.isSelected()) {
Calendar cal = Calendar.getInstance();
int Y, M, D, h, m, s;
Y = cal.get(Calendar.YEAR);
M = cal.get(Calendar.MONTH);
D = cal.get(Calendar.DAY_OF_MONTH);
h = cal.get(Calendar.HOUR_OF_DAY);
m = cal.get(Calendar.MINUTE);
s = cal.get(Calendar.SECOND);
caldate = new CalDate(Y, M, D, h, m, s);
param.simulationDate = new TimeAPL(caldate);
} else {
param.simulationDate.step_seconds(time_advance);
mpGUI.timestepfield.setText("" + time_advance);
caldate = new CalDate(param.simulationDate.mjd_utc());
}
mpGUI.yearfield.setText("" + caldate.year());
mpGUI.monthfield.setText("" + caldate.month());
mpGUI.dayfield.setText("" + caldate.day());
mpGUI.hourfield.setText("" + caldate.hour());
mpGUI.minutefield.setText("" + caldate.min());
mpGUI.secondfield.setText("" + (int) caldate.sec());
update_scene(param.simulationDate);
if (mpGUI.chckbxCameraRotate.isSelected()) {
Vector3f sphereCoord = jat_rotate.getV_current_sphere();
// System.out.println(sphereCoord.x + " " + sphereCoord.y + " " +
// sphereCoord.z);
if (sphereCoord.z > 1)
directionDown = true;
if (sphereCoord.z < -1)
directionDown = false;
if (directionDown)
jat_rotate.jat_rotate(.005f, -.002f);
else
jat_rotate.jat_rotate(.005f, .002f);
}
if (messages.changed) {
messages.printMessages();
messages.printMessagesToTextArea(mpMain.textArea);
}
}// End of ActionPerformed
public void itemStateChanged(ItemEvent e) {
Object source = e.getItemSelectable();
if (source == mpGUI.realtime_chk) {
if (mpGUI.realtime_chk.isSelected()) {
mpGUI.btn_stop.setEnabled(false);
mpGUI.btn_forward.setEnabled(false);
mpGUI.btn_rewind.setEnabled(false);
} else {
mpGUI.btn_stop.setEnabled(true);
mpGUI.btn_forward.setEnabled(true);
mpGUI.btn_rewind.setEnabled(true);
}
}
}
void update_scene(TimeAPL mytime) {
try {
for (int i = 1; i < 6; i++) {
if (param.planetOnOff[i]) {
mpMain.mpPlot.planets[i].set_position(Eph.get_planet_pos(body.fromInt(i), mytime));
}
}
} catch (IOException e) {
JOptionPane.showMessageDialog(mpGUI, "DE405 Ephemeris data file not found.");
e.printStackTrace();
System.exit(0);
// e.printStackTrace();
}
for (int i = 0; i < mpMain.flightList.size(); i++) {
double satelliteTime;
Flight f = mpMain.flightList.get(i);
satelliteTime = TimeAPL.minus(mytime, f.departureDate);
mpMain.mpGUI.viewdistancefield.setText("" + satelliteTime);
if (satelliteTime > 0 && satelliteTime < f.tof / 86400.) {
f.satellite.set_position(f.orbit.sat.position(satelliteTime * 86400));
} else
f.satellite.set_position(0, 0, 0);
}
}
}
| apache-2.0 |
1527115168/coolweather | app/src/main/java/xp/com/coolweather/db/City.java | 829 | package xp.com.coolweather.db;
import org.litepal.crud.DataSupport;
/**
* Created by xp on 2017/5/18.
*/
public class City extends DataSupport {
private int id;
private String cityName;
private int cityCode;
private int provinceId;
public int getProvinceId() {
return provinceId;
}
public void setProvinceId(int provinceId) {
this.provinceId = provinceId;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getCityName() {
return cityName;
}
public void setCityName(String cityName) {
this.cityName = cityName;
}
public int getCityCode() {
return cityCode;
}
public void setCityCode(int cityCode) {
this.cityCode = cityCode;
}
}
| apache-2.0 |
sajavadi/pinot | pinot-tools/src/main/java/com/linkedin/pinot/tools/scan/query/SegmentQueryProcessor.java | 11201 | /**
* Copyright (C) 2014-2016 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.tools.scan.query;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.linkedin.pinot.common.request.AggregationInfo;
import com.linkedin.pinot.common.request.BrokerRequest;
import com.linkedin.pinot.common.request.FilterOperator;
import com.linkedin.pinot.common.request.GroupBy;
import com.linkedin.pinot.common.segment.ReadMode;
import com.linkedin.pinot.common.utils.request.FilterQueryTree;
import com.linkedin.pinot.common.utils.request.RequestUtils;
import com.linkedin.pinot.core.common.BlockMultiValIterator;
import com.linkedin.pinot.core.common.BlockSingleValIterator;
import com.linkedin.pinot.core.query.utils.Pair;
import com.linkedin.pinot.core.segment.index.ColumnMetadata;
import com.linkedin.pinot.core.segment.index.IndexSegmentImpl;
import com.linkedin.pinot.core.segment.index.SegmentMetadataImpl;
import com.linkedin.pinot.core.segment.index.loader.Loaders;
import com.linkedin.pinot.core.segment.index.readers.Dictionary;
class SegmentQueryProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(SegmentQueryProcessor.class);
private File _segmentDir;
private Set<String> _mvColumns;
private Map<String, int[]> _mvColumnArrayMap;
private final SegmentMetadataImpl _metadata;
private final IndexSegmentImpl _indexSegment;
private final String _tableName;
private final String _segmentName;
private final int _totalDocs;
SegmentQueryProcessor(File segmentDir)
throws Exception {
_segmentDir = segmentDir;
_indexSegment = (IndexSegmentImpl) Loaders.IndexSegment.load(_segmentDir, ReadMode.mmap);
_metadata = new SegmentMetadataImpl(_segmentDir);
_tableName = _metadata.getTableName();
_segmentName = _metadata.getName();
_totalDocs = _metadata.getTotalDocs();
_mvColumns = new HashSet<>();
_mvColumnArrayMap = new HashMap<>();
for (ColumnMetadata columnMetadata : _metadata.getColumnMetadataMap().values()) {
String column = columnMetadata.getColumnName();
if (!columnMetadata.isSingleValue()) {
_mvColumns.add(column);
}
_mvColumnArrayMap.put(column, new int[columnMetadata.getMaxNumberOfMultiValues()]);
}
}
public void close() {
_metadata.close();
_indexSegment.destroy();
}
public ResultTable process(BrokerRequest brokerRequest)
throws Exception {
if (pruneSegment(brokerRequest)) {
return null;
}
LOGGER.debug("Processing segment: {}", _segmentName);
FilterQueryTree filterQueryTree = RequestUtils.generateFilterQueryTree(brokerRequest);
List<Integer> filteredDocIds = filterDocIds(filterQueryTree, null);
ResultTable result = null;
if (brokerRequest.isSetAggregationsInfo()) {
// Aggregation only
if (!brokerRequest.isSetGroupBy()) {
Aggregation aggregation =
new Aggregation(_indexSegment, _metadata, filteredDocIds, brokerRequest.getAggregationsInfo(), null, 10);
result = aggregation.run();
} else { // Aggregation GroupBy
GroupBy groupBy = brokerRequest.getGroupBy();
Aggregation aggregation =
new Aggregation(_indexSegment, _metadata, filteredDocIds, brokerRequest.getAggregationsInfo(),
groupBy.getColumns(), groupBy.getTopN());
result = aggregation.run();
}
} else {// Only Selection
if (brokerRequest.isSetSelections()) {
List<String> columns = brokerRequest.getSelections().getSelectionColumns();
if (columns.contains("*")) {
columns = Arrays.asList(_indexSegment.getColumnNames());
}
List<Pair> selectionColumns = new ArrayList<>();
Set<String> columSet = new HashSet<>();
// Collect a unique list of columns, in case input has duplicates.
for (String column : columns) {
if (!columSet.contains(column)) {
selectionColumns.add(new Pair(column, null));
columSet.add(column);
}
}
Selection selection = new Selection(_indexSegment, _metadata, filteredDocIds, selectionColumns);
result = selection.run();
}
}
result.setNumDocsScanned(filteredDocIds.size());
result.setTotalDocs(_totalDocs);
return result;
}
private boolean pruneSegment(BrokerRequest brokerRequest) {
// Check if segment belongs to the table being queried.
if (!_tableName.equals(brokerRequest.getQuerySource().getTableName())) {
LOGGER.debug("Skipping segment {} from different table {}", _segmentName, _tableName);
return true;
}
// Check if any column in the query does not exist in the segment.
Set<String> allColumns = _metadata.getAllColumns();
if (brokerRequest.isSetAggregationsInfo()) {
for (AggregationInfo aggregationInfo : brokerRequest.getAggregationsInfo()) {
Map<String, String> aggregationParams = aggregationInfo.getAggregationParams();
for (String column : aggregationParams.values()) {
if (column != null && !column.isEmpty() && !column.equals("*") && !allColumns.contains(column)) {
LOGGER.debug("Skipping segment '{}', as it does not have column '{}'", _metadata.getName(), column);
return true;
}
}
GroupBy groupBy = brokerRequest.getGroupBy();
if (groupBy != null) {
for (String column : groupBy.getColumns()) {
if (!allColumns.contains(column)) {
LOGGER.debug("Skipping segment '{}', as it does not have column '{}'", _metadata.getName(), column);
return true;
}
}
}
}
} else {
if (brokerRequest.isSetSelections()) {
for (String column : brokerRequest.getSelections().getSelectionColumns()) {
if (!allColumns.contains(column)) {
LOGGER.debug("Skipping segment '{}', as it does not have column '{}'", _metadata.getName(), column);
return true;
}
}
}
}
return false;
}
private List<Integer> filterDocIds(FilterQueryTree filterQueryTree, List<Integer> inputDocIds) {
// If no filter predicate, return the input without filtering.
if (filterQueryTree == null) {
List<Integer> allDocs = new ArrayList<>(_totalDocs);
for (int i = 0; i < _totalDocs; ++i) {
allDocs.add(i);
}
return allDocs;
}
final List<FilterQueryTree> childFilters = filterQueryTree.getChildren();
final boolean isLeaf = (childFilters == null) || childFilters.isEmpty();
if (isLeaf) {
FilterOperator filterType = filterQueryTree.getOperator();
String column = filterQueryTree.getColumn();
final List<String> value = filterQueryTree.getValue();
return getMatchingDocIds(inputDocIds, filterType, column, value);
}
List<Integer> result = filterDocIds(childFilters.get(0), inputDocIds);
final FilterOperator operator = filterQueryTree.getOperator();
for (int i = 1; i < childFilters.size(); ++i) {
// List<Integer> childResult = operator.equals(FilterOperator.AND) ? filterDocIds(childFilters.get(i), result)
// : filterDocIds(childFilters.get(i), inputDocIds);
List<Integer> childResult = filterDocIds(childFilters.get(i), inputDocIds);
result = combine(result, childResult, operator);
}
return result;
}
private List<Integer> combine(List<Integer> operand1, List<Integer> operand2, FilterOperator operator) {
List<Integer> result = new ArrayList<>();
Set<Integer> set = new HashSet<>();
switch (operator) {
case AND:
set.addAll(operand1);
for (Integer docId : operand2) {
if (set.contains(docId)) {
result.add(docId);
}
}
break;
case OR:
set.addAll(operand1);
set.addAll(operand2);
result.addAll(set);
break;
default:
throw new RuntimeException("Unsupported combine operator");
}
return result;
}
List<Integer> getMatchingDocIds(List<Integer> inputDocIds, FilterOperator filterType, String column,
List<String> value) {
Dictionary dictionaryReader = _indexSegment.getDictionaryFor(column);
PredicateFilter predicateFilter;
switch (filterType) {
case EQUALITY:
predicateFilter = new EqualsPredicateFilter(dictionaryReader, value.get(0));
break;
case NOT:
predicateFilter = new NotPredicateFilter(dictionaryReader, value.get(0));
break;
case IN:
predicateFilter = new InPredicateFilter(dictionaryReader, value);
break;
case NOT_IN:
predicateFilter = new NotInPredicateFilter(dictionaryReader, value);
break;
case RANGE:
predicateFilter = new RangePredicateFilter(dictionaryReader, value);
break;
case REGEXP_LIKE:
default:
throw new UnsupportedOperationException("Unsupported filterType:" + filterType);
}
return evaluatePredicate(inputDocIds, column, predicateFilter);
}
private List<Integer> evaluatePredicate(List<Integer> inputDocIds, String column, PredicateFilter predicateFilter) {
List<Integer> result = new ArrayList<>();
if (!_mvColumns.contains(column)) {
BlockSingleValIterator bvIter =
(BlockSingleValIterator) _indexSegment.getDataSource(column).getNextBlock().getBlockValueSet().iterator();
int i = 0;
while (bvIter.hasNext() && (inputDocIds == null || i < inputDocIds.size())) {
int docId = (inputDocIds != null) ? inputDocIds.get(i++) : i++;
bvIter.skipTo(docId);
if (predicateFilter.apply(bvIter.nextIntVal())) {
result.add(docId);
}
}
} else {
BlockMultiValIterator bvIter =
(BlockMultiValIterator) _indexSegment.getDataSource(column).getNextBlock().getBlockValueSet().iterator();
int i = 0;
while (bvIter.hasNext() && (inputDocIds == null || i < inputDocIds.size())) {
int docId = (inputDocIds != null) ? inputDocIds.get(i++) : i++;
bvIter.skipTo(docId);
int[] dictIds = _mvColumnArrayMap.get(column);
int numMVValues = bvIter.nextIntVal(dictIds);
if (predicateFilter.apply(dictIds, numMVValues)) {
result.add(docId);
}
}
}
return result;
}
public String getSegmentName() {
return _segmentName;
}
} | apache-2.0 |
openwide-java/artifact-listener | maven-artifact-notifier-webapp/src/main/java/fr/openwide/maven/artifact/notifier/web/application/notification/service/WebappNotificationUrlBuilderServiceImpl.java | 4381 | package fr.openwide.maven.artifact.notifier.web.application.notification.service;
import java.util.concurrent.Callable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import fr.openwide.core.wicket.more.link.descriptor.generator.IPageLinkGenerator;
import fr.openwide.core.wicket.more.model.GenericEntityModel;
import fr.openwide.core.wicket.more.notification.service.AbstractNotificationUrlBuilderServiceImpl;
import fr.openwide.core.wicket.more.notification.service.IWicketContextExecutor;
import fr.openwide.maven.artifact.notifier.core.business.artifact.model.Artifact;
import fr.openwide.maven.artifact.notifier.core.business.notification.service.INotificationUrlBuilderService;
import fr.openwide.maven.artifact.notifier.core.business.user.model.EmailAddress;
import fr.openwide.maven.artifact.notifier.core.business.user.model.User;
import fr.openwide.maven.artifact.notifier.core.config.application.MavenArtifactNotifierConfigurer;
import fr.openwide.maven.artifact.notifier.web.application.artifact.page.ArtifactDescriptionPage;
import fr.openwide.maven.artifact.notifier.web.application.navigation.page.AboutPage;
import fr.openwide.maven.artifact.notifier.web.application.navigation.page.HomePage;
import fr.openwide.maven.artifact.notifier.web.application.navigation.page.ViewProfilePage;
import fr.openwide.maven.artifact.notifier.web.application.navigation.util.LinkUtils;
import fr.openwide.maven.artifact.notifier.web.application.notification.page.ConfirmEmailNotificationPage;
import fr.openwide.maven.artifact.notifier.web.application.notification.page.ConfirmRegistrationNotificationPage;
import fr.openwide.maven.artifact.notifier.web.application.notification.page.DeleteEmailNotificationPage;
import fr.openwide.maven.artifact.notifier.web.application.notification.page.ResetPasswordNotificationPage;
@Service("webappNotificationUrlBuilderService")
public class WebappNotificationUrlBuilderServiceImpl extends AbstractNotificationUrlBuilderServiceImpl implements INotificationUrlBuilderService {
@Autowired
private MavenArtifactNotifierConfigurer configurer;
@Autowired
public WebappNotificationUrlBuilderServiceImpl(IWicketContextExecutor wicketExecutor) {
super(wicketExecutor);
}
@Override
public String getHomeUrl() {
Callable<IPageLinkGenerator> pageLinkGeneratorTask = new Callable<IPageLinkGenerator>() {
@Override
public IPageLinkGenerator call() throws Exception {
return HomePage.linkDescriptor();
}
};
return buildUrl(pageLinkGeneratorTask);
}
@Override
public String getAboutUrl() {
Callable<IPageLinkGenerator> pageLinkGeneratorTask = new Callable<IPageLinkGenerator>() {
@Override
public IPageLinkGenerator call() throws Exception {
return AboutPage.linkDescriptor();
}
};
return buildUrl(pageLinkGeneratorTask);
}
@Override
public String getGitHubUrl() {
return configurer.getLinkGitHubProject();
}
@Override
public String getProfileUrl() {
Callable<IPageLinkGenerator> pageLinkGeneratorTask = new Callable<IPageLinkGenerator>() {
@Override
public IPageLinkGenerator call() throws Exception {
return ViewProfilePage.linkDescriptor();
}
};
return buildUrl(pageLinkGeneratorTask);
}
@Override
public String getConfirmRegistrationUrl(User user) {
return buildUrl(ConfirmRegistrationNotificationPage.class, LinkUtils.getUserHashPageParameters(user));
}
@Override
public String getResetPasswordUrl(User user) {
return buildUrl(ResetPasswordNotificationPage.class, LinkUtils.getUserHashPageParameters(user));
}
@Override
public String getConfirmEmailUrl(EmailAddress emailAddress) {
return buildUrl(ConfirmEmailNotificationPage.class, LinkUtils.getEmailHashPageParameters(emailAddress));
}
@Override
public String getDeleteEmailUrl(EmailAddress emailAddress) {
return buildUrl(DeleteEmailNotificationPage.class, LinkUtils.getEmailHashPageParameters(emailAddress));
}
@Override
public String getArtifactDescriptionUrl(final Artifact artifact) {
Callable<IPageLinkGenerator> pageLinkGeneratorTask = new Callable<IPageLinkGenerator>() {
@Override
public IPageLinkGenerator call() throws Exception {
return ArtifactDescriptionPage.linkDescriptor(new GenericEntityModel<Long, Artifact>(artifact));
}
};
return buildUrl(pageLinkGeneratorTask);
}
}
| apache-2.0 |
iritgo/iritgo-aktera | aktera-journal/src/main/java/de/iritgo/aktera/journal/module/ModuleCreateHandler.java | 3154 | /**
* This file is part of the Iritgo/Aktera Framework.
*
* Copyright (C) 2005-2011 Iritgo Technologies.
* Copyright (C) 2003-2005 BueroByte GbR.
*
* Iritgo licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.iritgo.aktera.journal.module;
import de.iritgo.aktera.model.ModelException;
import de.iritgo.aktera.model.ModelRequest;
import de.iritgo.aktera.persist.CreateHandler;
import de.iritgo.aktera.persist.PersistenceException;
import de.iritgo.aktera.persist.PersistentFactory;
import org.apache.avalon.framework.logger.Logger;
import java.sql.Connection;
import java.sql.SQLException;
/**
* Database creation.
*/
public class ModuleCreateHandler extends CreateHandler
{
/**
* @see de.iritgo.aktera.persist.CreateHandler#createTables(ModelRequest,
* de.iritgo.aktera.persist.PersistentFactory, java.sql.Connection,
* Logger)
*/
@Override
public void createTables(ModelRequest request, PersistentFactory persistentFactory, Connection connection,
Logger logger) throws ModelException, PersistenceException, SQLException
{
createTable("JournalEntry", "id serial primary key", "occurredat timestamp not null", "producerId int4",
"producerType varchar(255)", "ownerId int4", "ownerType varchar(255)", "ownerGroupId int4",
"ownerGroupType varchar(255)", "extendedInfoId int4", "extendedInfoType varchar(255)",
"message varchar(255)", "shortMessage varchar(255)", "searchableText varchar(255)",
"tags varchar(255)", "misc varchar(255)", "rawData varchar(255)", "primaryType varchar(255)",
"secondaryType varchar(255)", "newFlag boolean");
createIndex("JournalEntry", "occurredat");
createTable("JournalData", "id serial primary key", "type varchar(255)", "category varchar(255)",
"occurredAt timestamp not null",
"timestamp1 timestamp", "timestamp2 timestamp", "key int4", "integer1 int4", "integer2 int4",
"string1 varchar(255)", "string2 varchar(255)", "string3 varchar(255)", "string4 varchar(255)",
"string5 varchar(255)", "string6 varchar(255)");
}
@Override
public void createData(PersistentFactory persistentFactory, Connection connection, Logger logger,
ModelRequest request) throws ModelException, PersistenceException, SQLException
{
createInstanceSecurity("de.iritgo.aktera.ui.listing.List", "aktera.journal.list", "user", "*");
createInstanceSecurity("de.iritgo.aktera.ui.listing.List", "aktera.journal" + ".list.notvisible", "user", "*");
createComponentSecurity("aktera.journal.delete-journal-entry", "user", "*");
createComponentSecurity("aktera.journal.execute-journal-entry", "user", "*");
}
}
| apache-2.0 |
datianshi/embed-tomcat-mvc | src/test/java/com/shaozhending/test/embedtomcat/TestConfig.java | 328 | package com.shaozhending.test.embedtomcat;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestTemplate;
@Configuration
public class TestConfig {
@Bean
public RestTemplate getTemplate(){
return new RestTemplate();
}
}
| apache-2.0 |
hekate-io/hekate | hekate-core/src/test/java/io/hekate/messaging/retry/RetryPolicyTest.java | 2089 | /*
* Copyright 2022 The Hekate Project
*
* The Hekate Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.hekate.messaging.retry;
import io.hekate.HekateTestBase;
import org.junit.Before;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class RetryPolicyTest extends HekateTestBase {
private RetryPolicy<?> policy;
@Before
public void setUp() throws Exception {
policy = mock(RetryPolicy.class);
}
@Test
public void testFixedDelay() {
when(policy.withFixedDelay(anyLong())).thenCallRealMethod();
policy.withFixedDelay(100500);
verify(policy).withBackoff(argThat(arg -> arg.delayBeforeRetry(1) == 100500));
}
@Test
public void testExponentialDelay() {
when(policy.withExponentialDelay(anyLong(), anyLong())).thenCallRealMethod();
policy.withExponentialDelay(100500, 500100);
verify(policy).withBackoff(argThat(arg -> {
ExponentialBackoffPolicy exp = (ExponentialBackoffPolicy)arg;
return exp.baseDelay() == 100500 && exp.maxDelay() == 500100;
}));
}
@Test
public void testUnlimitedAttempts() {
when(policy.unlimitedAttempts()).thenCallRealMethod();
policy.unlimitedAttempts();
verify(policy).maxAttempts(eq(-1));
}
}
| apache-2.0 |
tensorflow/java | tensorflow-core/tensorflow-core-api/src/gen/java/org/tensorflow/op/core/ImmutableConst.java | 4060 | /* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================*/
// This class has been generated, DO NOT EDIT!
package org.tensorflow.op.core;
import java.util.Arrays;
import org.tensorflow.GraphOperation;
import org.tensorflow.Operand;
import org.tensorflow.Operation;
import org.tensorflow.OperationBuilder;
import org.tensorflow.Output;
import org.tensorflow.ndarray.Shape;
import org.tensorflow.op.Operands;
import org.tensorflow.op.RawOp;
import org.tensorflow.op.RawOpInputs;
import org.tensorflow.op.Scope;
import org.tensorflow.op.annotation.Endpoint;
import org.tensorflow.op.annotation.OpInputsMetadata;
import org.tensorflow.op.annotation.OpMetadata;
import org.tensorflow.op.annotation.Operator;
import org.tensorflow.proto.framework.DataType;
import org.tensorflow.types.family.TType;
/**
* Returns immutable tensor from memory region.
* The current implementation memmaps the tensor from a file.
*
* @param <T> data type for {@code tensor} output
*/
@OpMetadata(
opType = ImmutableConst.OP_NAME,
inputsClass = ImmutableConst.Inputs.class
)
@Operator
public final class ImmutableConst<T extends TType> extends RawOp implements Operand<T> {
/**
* The name of this op, as known by TensorFlow core engine
*/
public static final String OP_NAME = "ImmutableConst";
private Output<T> tensor;
public ImmutableConst(Operation operation) {
super(operation, OP_NAME);
int outputIdx = 0;
tensor = operation.output(outputIdx++);
}
/**
* Factory method to create a class wrapping a new ImmutableConst operation.
*
* @param scope current scope
* @param dtype Type of the returned tensor.
* @param shape Shape of the returned tensor.
* @param memoryRegionName Name of readonly memory region used by the tensor, see
* NewReadOnlyMemoryRegionFromFile in tensorflow::Env.
* @param <T> data type for {@code ImmutableConst} output and operands
* @return a new instance of ImmutableConst
*/
@Endpoint(
describeByClass = true
)
public static <T extends TType> ImmutableConst<T> create(Scope scope, Class<T> dtype, Shape shape,
String memoryRegionName) {
OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "ImmutableConst");
opBuilder.setAttr("dtype", Operands.toDataType(dtype));
opBuilder.setAttr("shape", shape);
opBuilder.setAttr("memory_region_name", memoryRegionName);
return new ImmutableConst<>(opBuilder.build());
}
/**
* Gets tensor.
*
* @return tensor.
*/
public Output<T> tensor() {
return tensor;
}
@Override
public Output<T> asOutput() {
return tensor;
}
@OpInputsMetadata(
outputsClass = ImmutableConst.class
)
public static class Inputs extends RawOpInputs<ImmutableConst<?>> {
/**
* Type of the returned tensor.
*/
public final DataType dtype;
/**
* Shape of the returned tensor.
*/
public final Shape shape;
/**
* Name of readonly memory region used by the tensor, see
* NewReadOnlyMemoryRegionFromFile in tensorflow::Env.
*/
public final String memoryRegionName;
public Inputs(GraphOperation op) {
super(new ImmutableConst<>(op), op, Arrays.asList("dtype", "shape", "memory_region_name"));
int inputIndex = 0;
dtype = op.attributes().getAttrType("dtype");
shape = op.attributes().getAttrShape("shape");
memoryRegionName = op.attributes().getAttrString("memory_region_name");
}
}
}
| apache-2.0 |
whiskeyfei/SimpleNews.io | app/src/main/java/com/kong/app/blog/BlogFragment.java | 2270 | package com.kong.app.blog;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.TabLayout;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.view.View;
import com.kong.app.blog.model.Feed;
import com.kong.app.news.ListBaseFragment;
import com.kong.app.news.adapter.IRVPagerView;
import com.kong.app.news.adapter.RVPagerAdapter;
import com.kong.home.tab.event.SelectRepeatEvent;
import java.util.List;
public class BlogFragment extends ListBaseFragment implements BlogContract.View {
private static final String TAG = "BlogFragment";
private BlogContract.Presenter mPresenter;
public static BlogFragment newInstance() {
Bundle args = new Bundle();
BlogFragment fragment = new BlogFragment();
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
new BlogPresenter(this);
}
@Override
public void setPresenter(BlogContract.Presenter presenter) {
mPresenter = presenter;
}
@Override
public void onCreateView() {
mPresenter.start();
}
@Override
public int getCurrentType() {
return SelectRepeatEvent.BLOGINDEX;
}
@Override
public void onSuccess(Feed feed) {
Log.i(TAG, "onSuccess:" + feed);
setupViewPager(mViewPager, feed.getPosts());
mViewPager.setOffscreenPageLimit(mViewPager.getAdapter().getCount());
mTablayout.setupWithViewPager(mViewPager);
mTablayout.setTabMode(TabLayout.MODE_SCROLLABLE);
mTablayout.setVisibility(View.VISIBLE);
}
private void setupViewPager(ViewPager viewPager, List<Feed.PostsBean> posts) {
final RVPagerAdapter adapter = new RVPagerAdapter();
for (Feed.PostsBean postBean : posts) {
IRVPagerView view = new BlogContentView(getActivity()).setPostsBeans(postBean);
mIRVPagerViews.add(view);
}
adapter.setIRVPagerViews(mIRVPagerViews);
viewPager.setAdapter(adapter);
}
@Override
public void showProgress() {
}
@Override
public void hideProgress() {
}
} | apache-2.0 |
robsoncardosoti/flowable-engine | modules/flowable-dmn-engine/src/main/java/org/flowable/dmn/engine/impl/hitpolicy/EvaluateRuleValidityBehavior.java | 842 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.dmn.engine.impl.hitpolicy;
import org.flowable.dmn.engine.impl.mvel.MvelExecutionContext;
/**
* @author Yvo Swillens
*/
public interface EvaluateRuleValidityBehavior {
void evaluateRuleValidity(int ruleNumber, MvelExecutionContext executionContext);
}
| apache-2.0 |
vam-google/google-cloud-java | google-api-grpc/proto-google-cloud-vision-v1p3beta1/src/main/java/com/google/cloud/vision/v1p3beta1/ProductSearchResultsOrBuilder.java | 5223 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1p3beta1/product_search.proto
package com.google.cloud.vision.v1p3beta1;
public interface ProductSearchResultsOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.vision.v1p3beta1.ProductSearchResults)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Product category.
* [Deprecated] Use `product_category`.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.ProductSearchCategory category = 1;</code>
*/
int getCategoryValue();
/**
*
*
* <pre>
* Product category.
* [Deprecated] Use `product_category`.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.ProductSearchCategory category = 1;</code>
*/
com.google.cloud.vision.v1p3beta1.ProductSearchCategory getCategory();
/**
*
*
* <pre>
* Product category.
* Supported values are `bag` and `shoe`.
* [Deprecated] `product_category` is provided in each Product.
* </pre>
*
* <code>string product_category = 4;</code>
*/
java.lang.String getProductCategory();
/**
*
*
* <pre>
* Product category.
* Supported values are `bag` and `shoe`.
* [Deprecated] `product_category` is provided in each Product.
* </pre>
*
* <code>string product_category = 4;</code>
*/
com.google.protobuf.ByteString getProductCategoryBytes();
/**
*
*
* <pre>
* Timestamp of the index which provided these results. Changes made after
* this time are not reflected in the current results.
* </pre>
*
* <code>.google.protobuf.Timestamp index_time = 2;</code>
*/
boolean hasIndexTime();
/**
*
*
* <pre>
* Timestamp of the index which provided these results. Changes made after
* this time are not reflected in the current results.
* </pre>
*
* <code>.google.protobuf.Timestamp index_time = 2;</code>
*/
com.google.protobuf.Timestamp getIndexTime();
/**
*
*
* <pre>
* Timestamp of the index which provided these results. Changes made after
* this time are not reflected in the current results.
* </pre>
*
* <code>.google.protobuf.Timestamp index_time = 2;</code>
*/
com.google.protobuf.TimestampOrBuilder getIndexTimeOrBuilder();
/**
*
*
* <pre>
* List of detected products.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfo products = 3;
* </code>
*/
java.util.List<com.google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfo>
getProductsList();
/**
*
*
* <pre>
* List of detected products.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfo products = 3;
* </code>
*/
com.google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfo getProducts(int index);
/**
*
*
* <pre>
* List of detected products.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfo products = 3;
* </code>
*/
int getProductsCount();
/**
*
*
* <pre>
* List of detected products.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfo products = 3;
* </code>
*/
java.util.List<
? extends com.google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfoOrBuilder>
getProductsOrBuilderList();
/**
*
*
* <pre>
* List of detected products.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfo products = 3;
* </code>
*/
com.google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfoOrBuilder getProductsOrBuilder(
int index);
/**
*
*
* <pre>
* List of results, one for each product match.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.Result results = 5;</code>
*/
java.util.List<com.google.cloud.vision.v1p3beta1.ProductSearchResults.Result> getResultsList();
/**
*
*
* <pre>
* List of results, one for each product match.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.Result results = 5;</code>
*/
com.google.cloud.vision.v1p3beta1.ProductSearchResults.Result getResults(int index);
/**
*
*
* <pre>
* List of results, one for each product match.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.Result results = 5;</code>
*/
int getResultsCount();
/**
*
*
* <pre>
* List of results, one for each product match.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.Result results = 5;</code>
*/
java.util.List<? extends com.google.cloud.vision.v1p3beta1.ProductSearchResults.ResultOrBuilder>
getResultsOrBuilderList();
/**
*
*
* <pre>
* List of results, one for each product match.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.ProductSearchResults.Result results = 5;</code>
*/
com.google.cloud.vision.v1p3beta1.ProductSearchResults.ResultOrBuilder getResultsOrBuilder(
int index);
}
| apache-2.0 |
kanawish/caster-samples | libCore/src/main/java/com/kanawish/functional/PlainConsumer3.java | 524 | package com.kanawish.functional;
/**
* Functional interface for a callback that consumes multiple values at the same
* and may throw a checked exception.
*
* @param <T1> the first value type
* @param <T2> the second value type
* @param <T3> the third value type
*/
public interface PlainConsumer3<T1, T2, T3> {
/**
* Consum the input parameters.
* @param t1 the first parameter
* @param t2 the second parameter
* @param t3 the third parameter
*/
void accept(T1 t1, T2 t2, T3 t3);
}
| apache-2.0 |
TremoloSecurity/Scale | scale/scale-passwordreset-classes/src/main/java/com/tremolosecurity/scale/passwordreset/validate/BasicValidator.java | 4707 | /*
Copyright 2015 Tremolo Security, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.tremolosecurity.scale.passwordreset.validate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import com.tremolosecurity.saml.Attribute;
import com.tremolosecurity.scale.passwordreset.PasswordValidator;
import com.tremolosecurity.scale.user.UserObj;
public class BasicValidator implements PasswordValidator {
int minChars;
int maxChars;
int minReqs;
boolean requireUpper;
boolean requireLower;
boolean requireNumber;
boolean requireSpecial;
@Override
public void init(HashMap<String, Attribute> initParams) throws Exception {
Attribute attr = initParams.get("minChars");
if (attr == null) {
this.minChars = 0;
} else {
this.minChars = Integer.parseInt(attr.getValues().get(0));
}
attr = initParams.get("maxChars");
if (attr == null) {
this.maxChars = -1;
} else {
this.maxChars = Integer.parseInt(attr.getValues().get(0));
}
attr = initParams.get("requireLower");
if (attr == null) {
this.requireLower = true;
} else {
this.requireLower = attr.getValues().get(0).equalsIgnoreCase("true");
}
attr = initParams.get("requireUpper");
if (attr == null) {
this.requireUpper = true;
} else {
this.requireUpper = attr.getValues().get(0).equalsIgnoreCase("true");
}
attr = initParams.get("requireNumber");
if (attr == null) {
this.requireNumber = true;
} else {
this.requireNumber = attr.getValues().get(0).equalsIgnoreCase("true");
}
attr = initParams.get("requireSpecial");
if (attr == null) {
this.requireSpecial = true;
} else {
this.requireSpecial = attr.getValues().get(0).equalsIgnoreCase("true");
}
attr = initParams.get("minRequirements");
if (attr == null) {
this.minReqs = -1;
} else {
this.minReqs = Integer.parseInt(attr.getValues().get(0));
}
}
@Override
public List<String> validate(String password, UserObj user) {
ArrayList<String> errors = new ArrayList<String>();
if (password.length() < this.minChars) {
errors.add("Password must be at least " + this.minChars
+ " characters long");
return errors;
}
if (this.maxChars > -1 && password.length() > this.maxChars) {
errors.add("Password can not be more then " + this.minChars
+ " characters long");
return errors;
}
boolean hasLower = false;
boolean hasUpper = false;
boolean hasSpecial = false;
boolean hasNumber = false;
int numReqs = 0;
for (char c : password.toCharArray()) {
if (Character.isLowerCase(c)) {
if (! hasLower) {
hasLower = true;
numReqs++;
}
} else if (Character.isUpperCase(c)) {
if (! hasUpper) {
hasUpper = true;
numReqs++;
}
} else if (Character.isDigit(c)) {
if (! hasNumber) {
hasNumber = true;
numReqs++;
}
} else {
if (! hasSpecial) {
hasSpecial = true;
numReqs++;
}
}
}
if (this.minReqs == 0) {
checkForErrors(errors, hasLower, hasUpper, hasSpecial, hasNumber);
} else {
if (numReqs < this.minReqs) {
StringBuffer b = new StringBuffer();
b.append("At least " + this.minReqs + " of ");
if (this.requireLower) {
b.append("lower case character,");
}
if (this.requireUpper) {
b.append("upper case character,");
}
if (this.requireNumber) {
b.append("numeric character,");
}
if (this.requireSpecial) {
b.append("special character,");
}
errors.add(b.toString().substring(0, b.toString().length() - 1));
checkForErrors(errors, hasLower, hasUpper, hasSpecial,
hasNumber);
}
}
return errors;
}
private void checkForErrors(ArrayList<String> errors, boolean hasLower,
boolean hasUpper, boolean hasSpecial, boolean hasNumber) {
if (this.requireLower && !hasLower) {
errors.add("At least one lowercase letter is required");
}
if (this.requireUpper && !hasUpper) {
errors.add("At least one uppercase letter is required");
}
if (this.requireNumber && !hasNumber) {
errors.add("At least one number is required");
}
if (this.requireSpecial && !hasSpecial) {
errors.add("At least one non-alphanumeric character is required");
}
}
}
| apache-2.0 |
ctripcorp/x-pipe | redis/redis-console/src/test/java/com/ctrip/xpipe/redis/console/healthcheck/nonredis/cluster/impl/UnitTestClusterHealthMonitorManager.java | 2053 | package com.ctrip.xpipe.redis.console.healthcheck.nonredis.cluster.impl;
import com.ctrip.xpipe.api.observer.Observer;
import com.ctrip.xpipe.redis.checker.healthcheck.RedisHealthCheckInstance;
import com.ctrip.xpipe.redis.console.healthcheck.nonredis.cluster.ClusterHealthMonitorManager;
import com.ctrip.xpipe.redis.console.healthcheck.nonredis.cluster.ClusterHealthState;
import com.ctrip.xpipe.spring.AbstractProfile;
import com.google.common.collect.Sets;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.Map;
import java.util.Set;
@Component
@Profile(AbstractProfile.PROFILE_NAME_TEST)
public class UnitTestClusterHealthMonitorManager implements ClusterHealthMonitorManager {
private Set<String> warningClusters = Sets.newConcurrentHashSet();
@PostConstruct
public void postConstruction() {
warningClusters.add("cluster1");
}
@Override
public void healthCheckMasterDown(RedisHealthCheckInstance instance) {
}
@Override
public void healthCheckMasterUp(RedisHealthCheckInstance instance) {
}
@Override
public void outerClientMasterDown(String clusterId, String shardId) {
}
@Override
public void outerClientMasterUp(String clusterId, String shardId) {
}
@Override
public Set<String> getWarningClusters(ClusterHealthState state) {
if(state.equals(ClusterHealthState.HALF_DOWN)
|| state.equals(ClusterHealthState.QUARTER_DOWN)
|| state.equals(ClusterHealthState.LEAST_ONE_DOWN)) {
return Sets.newHashSet(warningClusters);
}
return Sets.newHashSet();
}
@Override
public Observer createHealthStatusObserver() {
return null;
}
@Override
public void updateHealthCheckWarningShards(Map<String, Set<String>> warningClusterShards) {
}
@Override
public Map<String, Set<String>> getAllClusterWarningShards() {
return null;
}
}
| apache-2.0 |
LeThoSon/ui_pattern | app/src/main/java/it/thoson/uipattern/facebook/FacebookActivity.java | 3584 | package it.thoson.uipattern.facebook;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import com.facebook.AccessToken;
import com.facebook.CallbackManager;
import com.facebook.FacebookCallback;
import com.facebook.FacebookException;
import com.facebook.FacebookSdk;
import com.facebook.login.LoginManager;
import com.facebook.login.LoginResult;
import com.facebook.login.widget.LoginButton;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import it.thoson.uipattern.R;
public class FacebookActivity extends AppCompatActivity {
private TextView info;
private LoginButton loginButton;
private Button btnLogin;
private CallbackManager callbackManager;
public static final String TAG = "FacebookActivity";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_facebook);
FacebookSdk.sdkInitialize(getApplicationContext());
callbackManager = CallbackManager.Factory.create();
LoginManager.getInstance().registerCallback(callbackManager,
new FacebookCallback<LoginResult>() {
@Override
public void onSuccess(LoginResult loginResult) {
// App code
Log.i(TAG, "LoginManager onSuccess");
}
@Override
public void onCancel() {
// App code
Log.i(TAG, "LoginManager onCancel");
}
@Override
public void onError(FacebookException exception) {
// App code
Log.i(TAG, "LoginManager onError");
}
});
info = (TextView)findViewById(R.id.tv_info);
if (AccessToken.getCurrentAccessToken() != null) {
info.setText("Token: "+AccessToken.getCurrentAccessToken().getToken().toString());
}
loginButton = (LoginButton)findViewById(R.id.login_button);
loginButton.setReadPermissions("");
loginButton.registerCallback(callbackManager, new FacebookCallback<LoginResult>() {
@Override
public void onSuccess(LoginResult loginResult) {
Log.i(TAG, "onSuccess");
LoginResult temp = loginResult;
}
@Override
public void onCancel() {
Log.i(TAG, "onCancel");
}
@Override
public void onError(FacebookException error) {
Log.i(TAG, "onError");
}
});
btnLogin = (Button) findViewById(R.id.btn_logout);
btnLogin.setText(AccessToken.getCurrentAccessToken() == null ? "Log in" : "Log out");
btnLogin.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
List<String> arr = new ArrayList<String>();
LoginManager.getInstance().logInWithPublishPermissions(FacebookActivity.this, arr);
}
});
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
callbackManager.onActivityResult(requestCode, resultCode, data);
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-redshift/src/main/java/com/amazonaws/services/redshift/model/transform/InProgressTableRestoreQuotaExceededExceptionUnmarshaller.java | 1720 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.redshift.model.transform;
import org.w3c.dom.Node;
import javax.annotation.Generated;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.transform.StandardErrorUnmarshaller;
import com.amazonaws.services.redshift.model.InProgressTableRestoreQuotaExceededException;
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class InProgressTableRestoreQuotaExceededExceptionUnmarshaller extends StandardErrorUnmarshaller {
public InProgressTableRestoreQuotaExceededExceptionUnmarshaller() {
super(InProgressTableRestoreQuotaExceededException.class);
}
@Override
public AmazonServiceException unmarshall(Node node) throws Exception {
// Bail out if this isn't the right error code that this
// marshaller understands
String errorCode = parseErrorCode(node);
if (errorCode == null || !errorCode.equals("InProgressTableRestoreQuotaExceededFault"))
return null;
InProgressTableRestoreQuotaExceededException e = (InProgressTableRestoreQuotaExceededException) super.unmarshall(node);
return e;
}
}
| apache-2.0 |
cheng-li/pyramid | core/src/main/java/edu/neu/ccs/pyramid/application/BRLREN.java | 17104 | package edu.neu.ccs.pyramid.application;
import com.fasterxml.jackson.databind.ObjectMapper;
import edu.neu.ccs.pyramid.classification.logistic_regression.LogisticRegression;
import edu.neu.ccs.pyramid.classification.logistic_regression.LogisticRegressionInspector;
import edu.neu.ccs.pyramid.configuration.Config;
import edu.neu.ccs.pyramid.dataset.*;
import edu.neu.ccs.pyramid.eval.MAP;
import edu.neu.ccs.pyramid.eval.MLMeasures;
import edu.neu.ccs.pyramid.feature.Feature;
import edu.neu.ccs.pyramid.feature.FeatureList;
import edu.neu.ccs.pyramid.feature.TopFeatures;
import edu.neu.ccs.pyramid.multilabel_classification.MultiLabelClassifier;
import edu.neu.ccs.pyramid.multilabel_classification.cbm.*;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGBInspector;
import edu.neu.ccs.pyramid.optimization.EarlyStopper;
import edu.neu.ccs.pyramid.util.ListUtil;
import edu.neu.ccs.pyramid.util.Pair;
import edu.neu.ccs.pyramid.util.PrintUtil;
import edu.neu.ccs.pyramid.util.Serialization;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.time.StopWatch;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.*;
import java.util.logging.FileHandler;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class BRLREN {
private static boolean VERBOSE = false;
public static void main(Config config) throws Exception{
Logger logger = Logger.getAnonymousLogger();
String logFile = config.getString("output.log");
FileHandler fileHandler = null;
if (!logFile.isEmpty()){
new File(logFile).getParentFile().mkdirs();
//todo should append?
fileHandler = new FileHandler(logFile, true);
java.util.logging.Formatter formatter = new SimpleFormatter();
fileHandler.setFormatter(formatter);
logger.addHandler(fileHandler);
logger.setUseParentHandlers(false);
}
// logger.info(config.toString());
VERBOSE = config.getBoolean("output.verbose");
new File(config.getString("output.dir")).mkdirs();
if (config.getBoolean("train")){
logger.info("============================================================");
HyperParameters hyperParameters = new HyperParameters(config);
MultiLabelClfDataSet trainSet = loadTrainData(config);
MultiLabelClfDataSet validSet = loadValidData(config);
train(config, hyperParameters, trainSet, validSet, logger);
logger.info("============================================================");
}
if (fileHandler!=null){
fileHandler.close();
}
}
public static void main(String[] args) throws Exception {
if (args.length != 1) {
throw new IllegalArgumentException("Please specify a properties file.");
}
Config config = new Config(args[0]);
main(config);
}
private static void train(Config config, HyperParameters hyperParameters, MultiLabelClfDataSet trainSet, MultiLabelClfDataSet validSet,
Logger logger) throws Exception{
List<Integer> unobservedLabels = DataSetUtil.unobservedLabels(trainSet);
if (!unobservedLabels.isEmpty()){
logger.info("The following labels do not actually appear in the training set and therefore cannot be learned:");
logger.info(ListUtil.toSimpleString(unobservedLabels));
FileUtils.writeStringToFile(Paths.get(config.getString("output.dir"),"model_predictions",config.getString("output.modelFolder"),"analysis","unobserved_labels.txt").toFile(), ListUtil.toSimpleString(unobservedLabels));
}
String output = config.getString("output.dir");
EarlyStopper earlyStopper = loadNewEarlyStopper();
StopWatch stopWatch = new StopWatch();
stopWatch.start();
CBM cbm = newCBM(config,trainSet, hyperParameters, logger);
ENCBMOptimizer optimizer = getOptimizer(config, hyperParameters, cbm, trainSet);
logger.info("Initializing the model");
if (config.getBoolean("train.randomInitialize")) {
optimizer.randInitialize();
} else {
optimizer.initialize();
}
logger.info("Initialization done");
AccPredictor accPredictor = new AccPredictor(cbm);
accPredictor.setComponentContributionThreshold(config.getDouble("predict.piThreshold"));
int interval = 1;
for (int iter=1;true;iter++){
logger.info("Training progress: iteration "+iter );
optimizer.iterate();
if (iter%interval==0){
MLMeasures validMeasures = new MLMeasures(accPredictor,validSet);
if (VERBOSE){
logger.info("validation performance");
logger.info(validMeasures.toString());
}
earlyStopper.add(iter,validMeasures.getInstanceAverage().getAccuracy());
if (earlyStopper.getBestIteration()==iter){
Serialization.serialize(cbm, Paths.get(output,"model_predictions",config.getString("output.modelFolder"),"models","classifier"));
}
if (earlyStopper.shouldStop()){
if (VERBOSE){
logger.info("Early Stopper: the training should stop now!");
logger.info("Early Stopper: best iteration found = "+earlyStopper.getBestIteration());
logger.info("Early Stopper: best validation performance = "+earlyStopper.getBestValue());
}
break;
}
}
}
logger.info("training done!");
logger.info("time spent on training = "+stopWatch);
List<MultiLabel> support = DataSetUtil.gatherMultiLabels(trainSet);
Serialization.serialize(support, Paths.get(output,"model_predictions",config.getString("output.modelFolder"),"models","support"));
CBM bestModel = (CBM) Serialization.deserialize(Paths.get(output,"model_predictions",config.getString("output.modelFolder"),"models","classifier"));
boolean topFeaturesToFile = true;
if (topFeaturesToFile){
File analysisFolder = Paths.get(output, "model_predictions",config.getString("output.modelFolder"),"analysis").toFile();
analysisFolder.mkdirs();
logger.info("start writing top features");
List<TopFeatures> topFeaturesList = IntStream.range(0,bestModel.getNumClasses())
.mapToObj(k ->topFeatures(bestModel, trainSet.getFeatureList(), trainSet.getLabelTranslator(), k, 100)).collect(Collectors.toList());
ObjectMapper mapper = new ObjectMapper();
String file = "top_features.json";
mapper.writeValue(new File(analysisFolder,file), topFeaturesList);
StringBuilder sb = new StringBuilder();
for (int l=0;l<bestModel.getNumClasses();l++){
sb.append("-------------------------").append("\n");
sb.append(bestModel.getLabelTranslator().toExtLabel(l)).append(":").append("\n");
for (Feature feature: topFeaturesList.get(l).getTopFeatures()){
sb.append(feature.simpleString()).append(", ");
}
sb.append("\n");
}
FileUtils.writeStringToFile(new File(analysisFolder, "top_features.txt"), sb.toString());
logger.info("finish writing top features");
}
}
private static TopFeatures topFeatures(CBM cbm, FeatureList featureList, LabelTranslator mlLabelTranslator, int classIndex,
int limit){
LogisticRegression logisticRegression = (LogisticRegression) cbm.getBinaryClassifiers()[0][classIndex];
logisticRegression.setFeatureList(featureList);
List<String> labels = new ArrayList<>();
labels.add("not_"+mlLabelTranslator.toExtLabel(classIndex));
labels.add(mlLabelTranslator.toExtLabel(classIndex));
LabelTranslator labelTranslator = new LabelTranslator(labels);
logisticRegression.setLabelTranslator(labelTranslator);
TopFeatures topFeatures = LogisticRegressionInspector.topFeatures(logisticRegression, 1,limit);
return topFeatures;
}
//todo currently only for br
private static void featureImportance(Config config, CBM cbm, FeatureList featureList, LabelTranslator mlLabelTranslator,
Logger logger) throws Exception{
logger.info("number of selected features in all labels (union)= "+CBMInspector.usedFeatures(cbm).size());
int[] featuresByEach = CBMInspector.usedFeaturesByEachLabel(cbm);
double average = Arrays.stream(featuresByEach).average().getAsDouble();
logger.info("average number of selected features in each label ="+average);
StringBuilder sbcount = new StringBuilder();
for (int l=0;l<featuresByEach.length;l++){
sbcount.append(mlLabelTranslator.toExtLabel(l)).append(":").append(featuresByEach[l]).append("\n");
}
String output = config.getString("output.dir");
Paths.get(output, "model_predictions",config.getString("output.modelFolder"),"analysis").toFile().mkdirs();
try (BufferedWriter bw = new BufferedWriter(new FileWriter(Paths.get(output, "model_predictions",config.getString("output.modelFolder"),"analysis","top_features.txt").toFile()))){
for (int l=0;l<cbm.getNumClasses();l++){
if (cbm.getBinaryClassifiers()[0][l] instanceof LogisticRegression){
LogisticRegression logisticRegression = (LogisticRegression) cbm.getBinaryClassifiers()[0][l];
logisticRegression.setFeatureList(featureList);
List<String> labels = new ArrayList<>();
labels.add("not_"+mlLabelTranslator.toExtLabel(l));
labels.add(mlLabelTranslator.toExtLabel(l));
LabelTranslator labelTranslator = new LabelTranslator(labels);
logisticRegression.setLabelTranslator(labelTranslator);
TopFeatures topFeatures = LogisticRegressionInspector.topFeatures(logisticRegression, 1,Integer.MAX_VALUE);
bw.write("label "+l+" ("+mlLabelTranslator.toExtLabel(l)+")");
bw.write(": ");
for (int f=0;f<topFeatures.getTopFeatures().size();f++){
Feature feature = topFeatures.getTopFeatures().get(f);
double utility = topFeatures.getUtilities().get(f);
if (utility!=0){
bw.write(""+feature.getIndex());
// bw.write(" (");
// bw.write(feature.getName());
// bw.write(")");
bw.write(":");
bw.write(""+utility);
bw.write(", ");
}
}
bw.write("\n");
}
}
}
logger.info("feature count in each label is saved to the file "+Paths.get(output, "model_predictions",config.getString("output.modelFolder"),"analysis","feature_count_in_each_label.txt").toFile().getAbsolutePath());
FileUtils.writeStringToFile(Paths.get(output, "model_predictions",config.getString("output.modelFolder"),"analysis","feature_count_in_each_label.txt").toFile(),sbcount.toString());
}
private static ENCBMOptimizer getOptimizer(Config config, HyperParameters hyperParameters, CBM cbm, MultiLabelClfDataSet trainSet){
ENCBMOptimizer optimizer = new ENCBMOptimizer(cbm, trainSet);
if (config.getBoolean("train.useInstanceWeights")){
double[] instanceWeights = loadInstanceWeights(config);
optimizer.setInstanceWeights(instanceWeights);
}
optimizer.setLineSearch(config.getBoolean("train.elasticnet.lineSearch"));
optimizer.setRegularizationBinary(hyperParameters.penalty);
optimizer.setRegularizationMultiClass(hyperParameters.penalty);
optimizer.setL1RatioBinary(hyperParameters.l1Ratio);
optimizer.setL1RatioMultiClass(hyperParameters.l1Ratio);
optimizer.setActiveSet(config.getBoolean("train.elasticnet.activeSet"));
optimizer.setBinaryUpdatesPerIter(config.getInt("train.updatesPerIteration"));
optimizer.setMulticlassUpdatesPerIter(config.getInt("train.updatesPerIteration"));
optimizer.setSkipDataThreshold(config.getDouble("train.skipDataThreshold"));
optimizer.setSkipLabelThreshold(config.getDouble("train.skipLabelThreshold"));
optimizer.setMaxNumLinearRegUpdates(config.getInt("train.maxNumLinearRegUpdates"));
//
return optimizer;
}
private static CBM newCBM(Config config, MultiLabelClfDataSet trainSet, HyperParameters hyperParameters,
Logger logger){
CBM cbm;
cbm = CBM.getBuilder()
.setNumClasses(trainSet.getNumClasses())
.setNumFeatures(trainSet.getNumFeatures())
.setNumComponents(hyperParameters.numComponents)
.setMultiClassClassifierType("elasticnet")
.setBinaryClassifierType("elasticnet")
.setDense(true)
.build();
cbm.setLabelTranslator(trainSet.getLabelTranslator());
String allowEmpty = config.getString("predict.allowEmpty");
switch (allowEmpty){
case "true":
cbm.setAllowEmpty(true);
break;
case "false":
cbm.setAllowEmpty(false);
break;
case "auto":
Set<MultiLabel> seen = DataSetUtil.gatherMultiLabels(trainSet).stream().collect(Collectors.toSet());
MultiLabel empty = new MultiLabel();
if (seen.contains(empty)){
cbm.setAllowEmpty(true);
if (VERBOSE){
logger.info("training set contains empty labels, automatically set predict.allowEmpty = true");
}
} else {
cbm.setAllowEmpty(false);
if (VERBOSE){
logger.info("training set does not contain empty labels, automatically set predict.allowEmpty = false");
}
}
break;
default:
throw new IllegalArgumentException("unknown value for predict.allowEmpty");
}
return cbm;
}
private static EarlyStopper loadNewEarlyStopper(){
int patience = 5;
EarlyStopper earlyStopper = new EarlyStopper(EarlyStopper.Goal.MAXIMIZE,patience);
earlyStopper.setMinimumIterations(5);
return earlyStopper;
}
private static MultiLabelClfDataSet loadValidData(Config config) throws Exception{
MultiLabelClfDataSet validSet = TRECFormat.loadMultiLabelClfDataSetAutoSparseSequential(config.getString("input.validData"));
return validSet;
}
private static MultiLabelClfDataSet loadTrainData(Config config) throws Exception{
MultiLabelClfDataSet trainSet = TRECFormat.loadMultiLabelClfDataSetAutoSparseSequential(config.getString("input.trainData"));
return trainSet;
}
private static double[] loadInstanceWeights(Config config){
File file = new File(config.getString("input.trainData"),"instance_weights.txt");
double[] weights = new double[0];
try {
weights = FileUtils.readLines(file).stream().mapToDouble(Double::parseDouble).toArray();
} catch (IOException e) {
e.printStackTrace();
}
return weights;
}
private static class HyperParameters{
double penalty;
double l1Ratio;
int numComponents;
HyperParameters() {
}
HyperParameters(Config config) {
penalty = config.getDouble("train.penalty");
l1Ratio = config.getDouble("train.l1Ratio");
numComponents = config.getInt("train.numComponents");
}
Config asConfig(){
Config config = new Config();
config.setDouble("train.penalty", penalty);
config.setDouble("train.l1Ratio", l1Ratio);
config.setInt("train.numComponents", numComponents);
return config;
}
}
private static boolean containsNovelClass(MultiLabel multiLabel, List<Integer> novelLabels){
for (int l:novelLabels){
if (multiLabel.matchClass(l)){
return true;
}
}
return false;
}
}
| apache-2.0 |
googleapis/java-network-management | google-cloud-network-management/src/main/java/com/google/cloud/networkmanagement/v1/stub/ReachabilityServiceStubSettings.java | 32774 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.networkmanagement.v1.stub;
import static com.google.cloud.networkmanagement.v1.ReachabilityServiceClient.ListConnectivityTestsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.networkmanagement.v1.ConnectivityTest;
import com.google.cloud.networkmanagement.v1.CreateConnectivityTestRequest;
import com.google.cloud.networkmanagement.v1.DeleteConnectivityTestRequest;
import com.google.cloud.networkmanagement.v1.GetConnectivityTestRequest;
import com.google.cloud.networkmanagement.v1.ListConnectivityTestsRequest;
import com.google.cloud.networkmanagement.v1.ListConnectivityTestsResponse;
import com.google.cloud.networkmanagement.v1.OperationMetadata;
import com.google.cloud.networkmanagement.v1.RerunConnectivityTestRequest;
import com.google.cloud.networkmanagement.v1.UpdateConnectivityTestRequest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link ReachabilityServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (networkmanagement.googleapis.com) and default port (443) are
* used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of getConnectivityTest to 30 seconds:
*
* <pre>{@code
* ReachabilityServiceStubSettings.Builder reachabilityServiceSettingsBuilder =
* ReachabilityServiceStubSettings.newBuilder();
* reachabilityServiceSettingsBuilder
* .getConnectivityTestSettings()
* .setRetrySettings(
* reachabilityServiceSettingsBuilder
* .getConnectivityTestSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* ReachabilityServiceStubSettings reachabilityServiceSettings =
* reachabilityServiceSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class ReachabilityServiceStubSettings extends StubSettings<ReachabilityServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final PagedCallSettings<
ListConnectivityTestsRequest,
ListConnectivityTestsResponse,
ListConnectivityTestsPagedResponse>
listConnectivityTestsSettings;
private final UnaryCallSettings<GetConnectivityTestRequest, ConnectivityTest>
getConnectivityTestSettings;
private final UnaryCallSettings<CreateConnectivityTestRequest, Operation>
createConnectivityTestSettings;
private final OperationCallSettings<
CreateConnectivityTestRequest, ConnectivityTest, OperationMetadata>
createConnectivityTestOperationSettings;
private final UnaryCallSettings<UpdateConnectivityTestRequest, Operation>
updateConnectivityTestSettings;
private final OperationCallSettings<
UpdateConnectivityTestRequest, ConnectivityTest, OperationMetadata>
updateConnectivityTestOperationSettings;
private final UnaryCallSettings<RerunConnectivityTestRequest, Operation>
rerunConnectivityTestSettings;
private final OperationCallSettings<
RerunConnectivityTestRequest, ConnectivityTest, OperationMetadata>
rerunConnectivityTestOperationSettings;
private final UnaryCallSettings<DeleteConnectivityTestRequest, Operation>
deleteConnectivityTestSettings;
private final OperationCallSettings<DeleteConnectivityTestRequest, Empty, OperationMetadata>
deleteConnectivityTestOperationSettings;
private static final PagedListDescriptor<
ListConnectivityTestsRequest, ListConnectivityTestsResponse, ConnectivityTest>
LIST_CONNECTIVITY_TESTS_PAGE_STR_DESC =
new PagedListDescriptor<
ListConnectivityTestsRequest, ListConnectivityTestsResponse, ConnectivityTest>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListConnectivityTestsRequest injectToken(
ListConnectivityTestsRequest payload, String token) {
return ListConnectivityTestsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListConnectivityTestsRequest injectPageSize(
ListConnectivityTestsRequest payload, int pageSize) {
return ListConnectivityTestsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListConnectivityTestsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListConnectivityTestsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<ConnectivityTest> extractResources(
ListConnectivityTestsResponse payload) {
return payload.getResourcesList() == null
? ImmutableList.<ConnectivityTest>of()
: payload.getResourcesList();
}
};
private static final PagedListResponseFactory<
ListConnectivityTestsRequest,
ListConnectivityTestsResponse,
ListConnectivityTestsPagedResponse>
LIST_CONNECTIVITY_TESTS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListConnectivityTestsRequest,
ListConnectivityTestsResponse,
ListConnectivityTestsPagedResponse>() {
@Override
public ApiFuture<ListConnectivityTestsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListConnectivityTestsRequest, ListConnectivityTestsResponse> callable,
ListConnectivityTestsRequest request,
ApiCallContext context,
ApiFuture<ListConnectivityTestsResponse> futureResponse) {
PageContext<
ListConnectivityTestsRequest, ListConnectivityTestsResponse, ConnectivityTest>
pageContext =
PageContext.create(
callable, LIST_CONNECTIVITY_TESTS_PAGE_STR_DESC, request, context);
return ListConnectivityTestsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listConnectivityTests. */
public PagedCallSettings<
ListConnectivityTestsRequest,
ListConnectivityTestsResponse,
ListConnectivityTestsPagedResponse>
listConnectivityTestsSettings() {
return listConnectivityTestsSettings;
}
/** Returns the object with the settings used for calls to getConnectivityTest. */
public UnaryCallSettings<GetConnectivityTestRequest, ConnectivityTest>
getConnectivityTestSettings() {
return getConnectivityTestSettings;
}
/** Returns the object with the settings used for calls to createConnectivityTest. */
public UnaryCallSettings<CreateConnectivityTestRequest, Operation>
createConnectivityTestSettings() {
return createConnectivityTestSettings;
}
/** Returns the object with the settings used for calls to createConnectivityTest. */
public OperationCallSettings<CreateConnectivityTestRequest, ConnectivityTest, OperationMetadata>
createConnectivityTestOperationSettings() {
return createConnectivityTestOperationSettings;
}
/** Returns the object with the settings used for calls to updateConnectivityTest. */
public UnaryCallSettings<UpdateConnectivityTestRequest, Operation>
updateConnectivityTestSettings() {
return updateConnectivityTestSettings;
}
/** Returns the object with the settings used for calls to updateConnectivityTest. */
public OperationCallSettings<UpdateConnectivityTestRequest, ConnectivityTest, OperationMetadata>
updateConnectivityTestOperationSettings() {
return updateConnectivityTestOperationSettings;
}
/** Returns the object with the settings used for calls to rerunConnectivityTest. */
public UnaryCallSettings<RerunConnectivityTestRequest, Operation>
rerunConnectivityTestSettings() {
return rerunConnectivityTestSettings;
}
/** Returns the object with the settings used for calls to rerunConnectivityTest. */
public OperationCallSettings<RerunConnectivityTestRequest, ConnectivityTest, OperationMetadata>
rerunConnectivityTestOperationSettings() {
return rerunConnectivityTestOperationSettings;
}
/** Returns the object with the settings used for calls to deleteConnectivityTest. */
public UnaryCallSettings<DeleteConnectivityTestRequest, Operation>
deleteConnectivityTestSettings() {
return deleteConnectivityTestSettings;
}
/** Returns the object with the settings used for calls to deleteConnectivityTest. */
public OperationCallSettings<DeleteConnectivityTestRequest, Empty, OperationMetadata>
deleteConnectivityTestOperationSettings() {
return deleteConnectivityTestOperationSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public ReachabilityServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcReachabilityServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "networkmanagement.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "networkmanagement.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(ReachabilityServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected ReachabilityServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listConnectivityTestsSettings = settingsBuilder.listConnectivityTestsSettings().build();
getConnectivityTestSettings = settingsBuilder.getConnectivityTestSettings().build();
createConnectivityTestSettings = settingsBuilder.createConnectivityTestSettings().build();
createConnectivityTestOperationSettings =
settingsBuilder.createConnectivityTestOperationSettings().build();
updateConnectivityTestSettings = settingsBuilder.updateConnectivityTestSettings().build();
updateConnectivityTestOperationSettings =
settingsBuilder.updateConnectivityTestOperationSettings().build();
rerunConnectivityTestSettings = settingsBuilder.rerunConnectivityTestSettings().build();
rerunConnectivityTestOperationSettings =
settingsBuilder.rerunConnectivityTestOperationSettings().build();
deleteConnectivityTestSettings = settingsBuilder.deleteConnectivityTestSettings().build();
deleteConnectivityTestOperationSettings =
settingsBuilder.deleteConnectivityTestOperationSettings().build();
}
/** Builder for ReachabilityServiceStubSettings. */
public static class Builder
extends StubSettings.Builder<ReachabilityServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListConnectivityTestsRequest,
ListConnectivityTestsResponse,
ListConnectivityTestsPagedResponse>
listConnectivityTestsSettings;
private final UnaryCallSettings.Builder<GetConnectivityTestRequest, ConnectivityTest>
getConnectivityTestSettings;
private final UnaryCallSettings.Builder<CreateConnectivityTestRequest, Operation>
createConnectivityTestSettings;
private final OperationCallSettings.Builder<
CreateConnectivityTestRequest, ConnectivityTest, OperationMetadata>
createConnectivityTestOperationSettings;
private final UnaryCallSettings.Builder<UpdateConnectivityTestRequest, Operation>
updateConnectivityTestSettings;
private final OperationCallSettings.Builder<
UpdateConnectivityTestRequest, ConnectivityTest, OperationMetadata>
updateConnectivityTestOperationSettings;
private final UnaryCallSettings.Builder<RerunConnectivityTestRequest, Operation>
rerunConnectivityTestSettings;
private final OperationCallSettings.Builder<
RerunConnectivityTestRequest, ConnectivityTest, OperationMetadata>
rerunConnectivityTestOperationSettings;
private final UnaryCallSettings.Builder<DeleteConnectivityTestRequest, Operation>
deleteConnectivityTestSettings;
private final OperationCallSettings.Builder<
DeleteConnectivityTestRequest, Empty, OperationMetadata>
deleteConnectivityTestOperationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"no_retry_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeout(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(60000L))
.setTotalTimeout(Duration.ofMillis(60000L))
.build();
definitions.put("no_retry_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listConnectivityTestsSettings =
PagedCallSettings.newBuilder(LIST_CONNECTIVITY_TESTS_PAGE_STR_FACT);
getConnectivityTestSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createConnectivityTestSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createConnectivityTestOperationSettings = OperationCallSettings.newBuilder();
updateConnectivityTestSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateConnectivityTestOperationSettings = OperationCallSettings.newBuilder();
rerunConnectivityTestSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
rerunConnectivityTestOperationSettings = OperationCallSettings.newBuilder();
deleteConnectivityTestSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteConnectivityTestOperationSettings = OperationCallSettings.newBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listConnectivityTestsSettings,
getConnectivityTestSettings,
createConnectivityTestSettings,
updateConnectivityTestSettings,
rerunConnectivityTestSettings,
deleteConnectivityTestSettings);
initDefaults(this);
}
protected Builder(ReachabilityServiceStubSettings settings) {
super(settings);
listConnectivityTestsSettings = settings.listConnectivityTestsSettings.toBuilder();
getConnectivityTestSettings = settings.getConnectivityTestSettings.toBuilder();
createConnectivityTestSettings = settings.createConnectivityTestSettings.toBuilder();
createConnectivityTestOperationSettings =
settings.createConnectivityTestOperationSettings.toBuilder();
updateConnectivityTestSettings = settings.updateConnectivityTestSettings.toBuilder();
updateConnectivityTestOperationSettings =
settings.updateConnectivityTestOperationSettings.toBuilder();
rerunConnectivityTestSettings = settings.rerunConnectivityTestSettings.toBuilder();
rerunConnectivityTestOperationSettings =
settings.rerunConnectivityTestOperationSettings.toBuilder();
deleteConnectivityTestSettings = settings.deleteConnectivityTestSettings.toBuilder();
deleteConnectivityTestOperationSettings =
settings.deleteConnectivityTestOperationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listConnectivityTestsSettings,
getConnectivityTestSettings,
createConnectivityTestSettings,
updateConnectivityTestSettings,
rerunConnectivityTestSettings,
deleteConnectivityTestSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listConnectivityTestsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.getConnectivityTestSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.createConnectivityTestSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.updateConnectivityTestSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.rerunConnectivityTestSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.deleteConnectivityTestSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.createConnectivityTestOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<CreateConnectivityTestRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(ConnectivityTest.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.updateConnectivityTestOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<UpdateConnectivityTestRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(ConnectivityTest.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.rerunConnectivityTestOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<RerunConnectivityTestRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(ConnectivityTest.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
builder
.deleteConnectivityTestOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteConnectivityTestRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Empty.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelay(Duration.ofMillis(45000L))
.setInitialRpcTimeout(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ZERO)
.setTotalTimeout(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listConnectivityTests. */
public PagedCallSettings.Builder<
ListConnectivityTestsRequest,
ListConnectivityTestsResponse,
ListConnectivityTestsPagedResponse>
listConnectivityTestsSettings() {
return listConnectivityTestsSettings;
}
/** Returns the builder for the settings used for calls to getConnectivityTest. */
public UnaryCallSettings.Builder<GetConnectivityTestRequest, ConnectivityTest>
getConnectivityTestSettings() {
return getConnectivityTestSettings;
}
/** Returns the builder for the settings used for calls to createConnectivityTest. */
public UnaryCallSettings.Builder<CreateConnectivityTestRequest, Operation>
createConnectivityTestSettings() {
return createConnectivityTestSettings;
}
/** Returns the builder for the settings used for calls to createConnectivityTest. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
CreateConnectivityTestRequest, ConnectivityTest, OperationMetadata>
createConnectivityTestOperationSettings() {
return createConnectivityTestOperationSettings;
}
/** Returns the builder for the settings used for calls to updateConnectivityTest. */
public UnaryCallSettings.Builder<UpdateConnectivityTestRequest, Operation>
updateConnectivityTestSettings() {
return updateConnectivityTestSettings;
}
/** Returns the builder for the settings used for calls to updateConnectivityTest. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
UpdateConnectivityTestRequest, ConnectivityTest, OperationMetadata>
updateConnectivityTestOperationSettings() {
return updateConnectivityTestOperationSettings;
}
/** Returns the builder for the settings used for calls to rerunConnectivityTest. */
public UnaryCallSettings.Builder<RerunConnectivityTestRequest, Operation>
rerunConnectivityTestSettings() {
return rerunConnectivityTestSettings;
}
/** Returns the builder for the settings used for calls to rerunConnectivityTest. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<
RerunConnectivityTestRequest, ConnectivityTest, OperationMetadata>
rerunConnectivityTestOperationSettings() {
return rerunConnectivityTestOperationSettings;
}
/** Returns the builder for the settings used for calls to deleteConnectivityTest. */
public UnaryCallSettings.Builder<DeleteConnectivityTestRequest, Operation>
deleteConnectivityTestSettings() {
return deleteConnectivityTestSettings;
}
/** Returns the builder for the settings used for calls to deleteConnectivityTest. */
@BetaApi(
"The surface for use by generated code is not stable yet and may change in the future.")
public OperationCallSettings.Builder<DeleteConnectivityTestRequest, Empty, OperationMetadata>
deleteConnectivityTestOperationSettings() {
return deleteConnectivityTestOperationSettings;
}
@Override
public ReachabilityServiceStubSettings build() throws IOException {
return new ReachabilityServiceStubSettings(this);
}
}
}
| apache-2.0 |
lliuwuzhou/coolweather | src/com/coolweather/app/util/Utility.java | 3526 | package com.coolweather.app.util;
import java.util.Date;
import java.text.SimpleDateFormat;
import java.util.Locale;
import org.json.JSONObject;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.text.TextUtils;
import com.coolweather.app.db.CoolWeatherDB;
import com.coolweather.app.model.City;
import com.coolweather.app.model.County;
import com.coolweather.app.model.Province;
public class Utility {
public static boolean handleProvinceResponse(CoolWeatherDB coolWeatherDB,String response){
if(!TextUtils.isEmpty(response))
{
String[] allProvinces = response.split(",");
if(allProvinces !=null && allProvinces.length > 0)
{
for(String p : allProvinces)
{
String[] provinceArr = p.split("\\|");
Province province = new Province();
province.setProvinceName(provinceArr[1]);
province.setProvinceCode(provinceArr[0]);
coolWeatherDB.saveProvince(province);
}
return true;
}
}
return false;
}
public static boolean handleCityResponse(CoolWeatherDB coolWeatherDB,String response,int provinceId){
if(!TextUtils.isEmpty(response))
{
String[] allCities = response.split(",");
if(allCities !=null && allCities.length > 0)
{
for(String p : allCities)
{
String[] array = p.split("\\|");
City city = new City();
city.setCityCode(array[0]);
city.setCityName(array[1]);
city.setProvinceid(provinceId);
coolWeatherDB.saveCity(city);
}
return true;
}
}
return false;
}
public static boolean handleCountyResponse(CoolWeatherDB coolWeatherDB,String response,int cityId){
if(!TextUtils.isEmpty(response))
{
String[] allCounties = response.split(",");
if(allCounties !=null && allCounties.length > 0)
{
for(String p : allCounties)
{
String[] array = p.split("\\|");
County county = new County();
county.setCountyCode(array[0]);
county.setCountyName(array[1]);
county.setCityId(cityId);
coolWeatherDB.saveCounty(county);
}
return true;
}
}
return false;
}
public static void handleWeatherResponse(Context context,String response)
{
try{
JSONObject jsonObject = new JSONObject(response);
JSONObject weatherInfo = jsonObject.getJSONObject("weatherinfo");
String cityName = weatherInfo.getString("city");
String weatherCode = weatherInfo.getString("cityid");
String temp1 = weatherInfo.getString("temp1");
String temp2 = weatherInfo.getString("temp2");
String weatherDesp = weatherInfo.getString("weather");
String publishTime = weatherInfo.getString("ptime");
saveWeatherInfo(context,cityName,weatherCode,temp1,temp2,weatherDesp,publishTime);
}catch(Exception e)
{
e.printStackTrace();
}
}
public static void saveWeatherInfo(Context context,String cityName,String weatherCode,String temp1,String temp2,String weatherDesp,String publishTime)
{
SimpleDateFormat sdf = new SimpleDateFormat("yyyy年M月d日",Locale.CHINA);
SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(context).edit();
editor.putBoolean("city_selected", true);
editor.putString("city_name", cityName);
editor.putString("weather_code", weatherCode);
editor.putString("temp1", temp1);
editor.putString("temp2", temp2);
editor.putString("weather_desp", weatherDesp);
editor.putString("publish_time", publishTime);
editor.putString("current_time", sdf.format(new Date()));
editor.commit();
}
}
| apache-2.0 |
madebyatomicrobot/vinyl | vinyl-samples/src/main/java/com/madebyatomicrobot/vinyl/samples/multipleparent/Child.java | 189 | package com.madebyatomicrobot.vinyl.samples.multipleparent;
import com.madebyatomicrobot.vinyl.annotations.Record;
@Record
public interface Child extends Dad, Mom {
String child();
}
| apache-2.0 |
quarkusio/quarkus | extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/kafka/KafkaEventObserverTest.java | 854 | package io.quarkus.micrometer.runtime.binder.kafka;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import io.micrometer.core.instrument.binder.kafka.KafkaClientMetrics;
import io.quarkus.runtime.ShutdownEvent;
class KafkaEventObserverTest {
@Test
void testAllKafkaClientMetricsClosed() {
KafkaEventObserver sut = new KafkaEventObserver();
KafkaClientMetrics firstClientMetrics = Mockito.mock(KafkaClientMetrics.class);
KafkaClientMetrics secondClientMetrics = Mockito.mock(KafkaClientMetrics.class);
sut.clientMetrics.put(firstClientMetrics, firstClientMetrics);
sut.clientMetrics.put(secondClientMetrics, secondClientMetrics);
sut.onStop(new ShutdownEvent());
Mockito.verify(firstClientMetrics).close();
Mockito.verify(secondClientMetrics).close();
}
}
| apache-2.0 |
sdw2330976/Research-jetty-9.2.5 | jetty-plus/src/main/java/org/eclipse/jetty/plus/annotation/ContainerInitializer.java | 7536 | //
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.plus.annotation;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.ServletContainerInitializer;
import org.eclipse.jetty.util.ConcurrentHashSet;
import org.eclipse.jetty.util.Loader;
import org.eclipse.jetty.util.StringUtil;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.webapp.WebAppContext;
public class ContainerInitializer
{
private static final Logger LOG = Log.getLogger(ContainerInitializer.class);
final protected ServletContainerInitializer _target;
final protected Class<?>[] _interestedTypes;
final protected Set<String> _applicableTypeNames = new ConcurrentHashSet<String>();
final protected Set<String> _annotatedTypeNames = new ConcurrentHashSet<String>();
public ContainerInitializer (ServletContainerInitializer target, Class<?>[] classes)
{
_target = target;
_interestedTypes = classes;
}
public ContainerInitializer (ClassLoader loader, String toString)
{
Matcher m = Pattern.compile("ContainerInitializer\\{(.*),interested=(.*),applicable=(.*),annotated=(.*)\\}").matcher(toString);
if (!m.matches())
throw new IllegalArgumentException(toString);
try
{
_target = (ServletContainerInitializer)loader.loadClass(m.group(1)).newInstance();
String[] interested = StringUtil.arrayFromString(m.group(2));
_interestedTypes = new Class<?>[interested.length];
for (int i=0;i<interested.length;i++)
_interestedTypes[i]=loader.loadClass(interested[i]);
for (String s:StringUtil.arrayFromString(m.group(3)))
_applicableTypeNames.add(s);
for (String s:StringUtil.arrayFromString(m.group(4)))
_annotatedTypeNames.add(s);
}
catch(Exception e)
{
throw new IllegalArgumentException(toString, e);
}
}
public ServletContainerInitializer getTarget ()
{
return _target;
}
public Class[] getInterestedTypes ()
{
return _interestedTypes;
}
/**
* A class has been found that has an annotation of interest
* to this initializer.
* @param className
*/
public void addAnnotatedTypeName (String className)
{
_annotatedTypeNames.add(className);
}
public Set<String> getAnnotatedTypeNames ()
{
return Collections.unmodifiableSet(_annotatedTypeNames);
}
public void addApplicableTypeName (String className)
{
_applicableTypeNames.add(className);
}
public Set<String> getApplicableTypeNames ()
{
return Collections.unmodifiableSet(_applicableTypeNames);
}
public void callStartup(WebAppContext context)
throws Exception
{
if (_target != null)
{
Set<Class<?>> classes = new HashSet<Class<?>>();
ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(context.getClassLoader());
try
{
for (String s : _applicableTypeNames)
classes.add(Loader.loadClass(context.getClass(), s));
context.getServletContext().setExtendedListenerTypes(true);
if (LOG.isDebugEnabled())
{
long start = System.nanoTime();
_target.onStartup(classes, context.getServletContext());
LOG.debug("ContainerInitializer {} called in {}ms", _target.getClass().getName(), TimeUnit.MILLISECONDS.convert(System.nanoTime()-start, TimeUnit.NANOSECONDS));
}
else
_target.onStartup(classes, context.getServletContext());
}
finally
{
context.getServletContext().setExtendedListenerTypes(false);
Thread.currentThread().setContextClassLoader(oldLoader);
}
}
}
public String toString()
{
List<String> interested = Collections.emptyList();
if (_interestedTypes != null)
{
interested = new ArrayList<>(_interestedTypes.length);
for (Class<?> c : _interestedTypes)
interested.add(c.getName());
}
return String.format("ContainerInitializer{%s,interested=%s,applicable=%s,annotated=%s}",_target.getClass().getName(),interested,_applicableTypeNames,_annotatedTypeNames);
}
public void resolveClasses(WebAppContext context, Map<String, Set<String>> classMap)
{
//We have already found the classes that directly have an annotation that was in the HandlesTypes
//annotation of the ServletContainerInitializer. For each of those classes, walk the inheritance
//hierarchy to find classes that extend or implement them.
Set<String> annotatedClassNames = getAnnotatedTypeNames();
if (annotatedClassNames != null && !annotatedClassNames.isEmpty())
{
for (String name : annotatedClassNames)
{
//add the class that has the annotation
addApplicableTypeName(name);
//find and add the classes that inherit the annotation
addInheritedTypes(classMap, (Set<String>)classMap.get(name));
}
}
//Now we need to look at the HandlesTypes classes that were not annotations. We need to
//find all classes that extend or implement them.
if (getInterestedTypes() != null)
{
for (Class<?> c : getInterestedTypes())
{
if (!c.isAnnotation())
{
//find and add the classes that implement or extend the class.
//but not including the class itself
addInheritedTypes(classMap, (Set<String>)classMap.get(c.getName()));
}
}
}
}
private void addInheritedTypes(Map<String, Set<String>> classMap,Set<String> names)
{
if (names == null || names.isEmpty())
return;
for (String s : names)
{
//add the name of the class
addApplicableTypeName(s);
//walk the hierarchy and find all types that extend or implement the class
addInheritedTypes(classMap, (Set<String>)classMap.get(s));
}
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-waf/src/main/java/com/amazonaws/services/waf/model/CreateRuleResult.java | 6510 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.waf.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/waf-regional-2016-11-28/CreateRule" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateRuleResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The <a>Rule</a> returned in the <code>CreateRule</code> response.
* </p>
*/
private Rule rule;
/**
* <p>
* The <code>ChangeToken</code> that you used to submit the <code>CreateRule</code> request. You can also use this
* value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.
* </p>
*/
private String changeToken;
/**
* <p>
* The <a>Rule</a> returned in the <code>CreateRule</code> response.
* </p>
*
* @param rule
* The <a>Rule</a> returned in the <code>CreateRule</code> response.
*/
public void setRule(Rule rule) {
this.rule = rule;
}
/**
* <p>
* The <a>Rule</a> returned in the <code>CreateRule</code> response.
* </p>
*
* @return The <a>Rule</a> returned in the <code>CreateRule</code> response.
*/
public Rule getRule() {
return this.rule;
}
/**
* <p>
* The <a>Rule</a> returned in the <code>CreateRule</code> response.
* </p>
*
* @param rule
* The <a>Rule</a> returned in the <code>CreateRule</code> response.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateRuleResult withRule(Rule rule) {
setRule(rule);
return this;
}
/**
* <p>
* The <code>ChangeToken</code> that you used to submit the <code>CreateRule</code> request. You can also use this
* value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.
* </p>
*
* @param changeToken
* The <code>ChangeToken</code> that you used to submit the <code>CreateRule</code> request. You can also use
* this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.
*/
public void setChangeToken(String changeToken) {
this.changeToken = changeToken;
}
/**
* <p>
* The <code>ChangeToken</code> that you used to submit the <code>CreateRule</code> request. You can also use this
* value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.
* </p>
*
* @return The <code>ChangeToken</code> that you used to submit the <code>CreateRule</code> request. You can also
* use this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.
*/
public String getChangeToken() {
return this.changeToken;
}
/**
* <p>
* The <code>ChangeToken</code> that you used to submit the <code>CreateRule</code> request. You can also use this
* value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.
* </p>
*
* @param changeToken
* The <code>ChangeToken</code> that you used to submit the <code>CreateRule</code> request. You can also use
* this value to query the status of the request. For more information, see <a>GetChangeTokenStatus</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateRuleResult withChangeToken(String changeToken) {
setChangeToken(changeToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRule() != null)
sb.append("Rule: ").append(getRule()).append(",");
if (getChangeToken() != null)
sb.append("ChangeToken: ").append(getChangeToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateRuleResult == false)
return false;
CreateRuleResult other = (CreateRuleResult) obj;
if (other.getRule() == null ^ this.getRule() == null)
return false;
if (other.getRule() != null && other.getRule().equals(this.getRule()) == false)
return false;
if (other.getChangeToken() == null ^ this.getChangeToken() == null)
return false;
if (other.getChangeToken() != null && other.getChangeToken().equals(this.getChangeToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRule() == null) ? 0 : getRule().hashCode());
hashCode = prime * hashCode + ((getChangeToken() == null) ? 0 : getChangeToken().hashCode());
return hashCode;
}
@Override
public CreateRuleResult clone() {
try {
return (CreateRuleResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| apache-2.0 |
eBay/mTracker | build-service/src/main/java/com/ccoe/build/tracking/BatchUpdateCategoryJob.java | 2365 | /*
Copyright [2013-2014] eBay Software Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.ccoe.build.tracking;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.ccoe.build.core.filter.SessionTransformer;
import com.ccoe.build.core.model.Session;
import com.ccoe.build.core.utils.StringUtils;
import com.ccoe.build.tracking.jdbc.SessionJDBCTemplate;
public class BatchUpdateCategoryJob implements Job {
private ApplicationContext context = null;
private final SessionJDBCTemplate sessionJDBCTemplate;
public BatchUpdateCategoryJob() {
context = new ClassPathXmlApplicationContext("tracking-spring-jdbc-config.xml");
sessionJDBCTemplate = (SessionJDBCTemplate) context.getBean("sessionJDBCTemplate");
}
public void execute(JobExecutionContext context) {
System.out.println("[INFO] " + new Date() + " Start executing BatchUpdateCategoryJob...");
List<Session> sessions = sessionJDBCTemplate.getExpSessionWithNullCategory();
SessionTransformer transformer = new SessionTransformer();
List<Session> batchUpdates = new ArrayList<Session>();
for (Session session : sessions) {
transformer.tranform(session);
if (!StringUtils.isEmpty(session.getCategory())) {
System.out.println("Updating " + session.getId() + " --> " + session.getCategory());
batchUpdates.add(session);
} else {
System.out.println("NO category " + session.getId());
}
}
sessionJDBCTemplate.batchUpdateCategory(batchUpdates);
System.out.println("DONE!");
}
public static void main(String[] args) {
BatchUpdateCategoryJob job = new BatchUpdateCategoryJob();
job.execute(null);
}
}
| apache-2.0 |
glorycloud/GloryMail | CloudyMail/lib_src/org/apache/commons/lang/text/CompositeFormat.java | 3866 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang.text;
import java.text.FieldPosition;
import java.text.Format;
import java.text.ParseException;
import java.text.ParsePosition;
/**
* Formats using one formatter and parses using a different formatter. An
* example of use for this would be a webapp where data is taken in one way and
* stored in a database another way.
*
* @author Apache Software Foundation
* @author Archimedes Trajano
* @version $Id: CompositeFormat.java 905636 2010-02-02 14:03:32Z niallp $
*/
public class CompositeFormat extends Format {
/**
* Required for serialization support.
*
* @see java.io.Serializable
*/
private static final long serialVersionUID = -4329119827877627683L;
/** The parser to use. */
private final Format parser;
/** The formatter to use. */
private final Format formatter;
/**
* Create a format that points its parseObject method to one implementation
* and its format method to another.
*
* @param parser implementation
* @param formatter implementation
*/
public CompositeFormat(Format parser, Format formatter) {
this.parser = parser;
this.formatter = formatter;
}
/**
* Uses the formatter Format instance.
*
* @param obj the object to format
* @param toAppendTo the {@link StringBuffer} to append to
* @param pos the FieldPosition to use (or ignore).
* @return <code>toAppendTo</code>
* @see Format#format(Object, StringBuffer, FieldPosition)
*/
public StringBuffer format(Object obj, StringBuffer toAppendTo,
FieldPosition pos) {
return formatter.format(obj, toAppendTo, pos);
}
/**
* Uses the parser Format instance.
*
* @param source the String source
* @param pos the ParsePosition containing the position to parse from, will
* be updated according to parsing success (index) or failure
* (error index)
* @return the parsed Object
* @see Format#parseObject(String, ParsePosition)
*/
public Object parseObject(String source, ParsePosition pos) {
return parser.parseObject(source, pos);
}
/**
* Provides access to the parser Format implementation.
*
* @return parser Format implementation
*/
public Format getParser() {
return this.parser;
}
/**
* Provides access to the parser Format implementation.
*
* @return formatter Format implementation
*/
public Format getFormatter() {
return this.formatter;
}
/**
* Utility method to parse and then reformat a String.
*
* @param input String to reformat
* @return A reformatted String
* @throws ParseException thrown by parseObject(String) call
*/
public String reformat(String input) throws ParseException {
return format(parseObject(input));
}
}
| apache-2.0 |
didouard/beelee | presentation/src/main/java/com/fernandocejas/android10/sample/presentation/AndroidApplication.java | 2130 | /**
* Copyright (C) 2015 Fernando Cejas Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fernandocejas.android10.sample.presentation;
import android.app.Application;
import com.fernandocejas.android10.sample.presentation.db.Message;
import com.fernandocejas.android10.sample.presentation.db.Room;
import com.fernandocejas.android10.sample.presentation.db.User;
import com.fernandocejas.android10.sample.presentation.internal.di.components.ApplicationComponent;
import com.fernandocejas.android10.sample.presentation.internal.di.components.DaggerApplicationComponent;
import com.fernandocejas.android10.sample.presentation.internal.di.modules.ApplicationModule;
import com.parse.Parse;
import com.parse.ParseObject;
/**
* Android Main Application
*/
public class AndroidApplication extends Application {
private ApplicationComponent applicationComponent;
@Override public void onCreate() {
super.onCreate();
this.initParse();
this.initializeInjector();
}
private void initializeInjector() {
this.applicationComponent = DaggerApplicationComponent.builder()
.applicationModule(new ApplicationModule(this))
.build();
}
private void initParse() {
Parse.enableLocalDatastore(this);
ParseObject.registerSubclass(Room.class);
ParseObject.registerSubclass(Message.class);
ParseObject.registerSubclass(User.class);
Parse.initialize(this, getString(R.string.parse_app_id), getString(R.string.parse_client_key));
}
public ApplicationComponent getApplicationComponent() {
return this.applicationComponent;
}
}
| apache-2.0 |
Muni10/flyway | flyway-core/src/test/java/org/flywaydb/core/internal/dbsupport/sqlserver/SQLServerMigrationTestCase.java | 14902 | /*
* Copyright 2010-2017 Boxfuse GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flywaydb.core.internal.dbsupport.sqlserver;
import org.flywaydb.core.api.FlywayException;
import org.flywaydb.core.api.MigrationState;
import org.flywaydb.core.internal.dbsupport.FlywaySqlScriptException;
import org.flywaydb.core.api.MigrationVersion;
import org.flywaydb.core.internal.dbsupport.Schema;
import org.flywaydb.core.internal.dbsupport.SqlScript;
import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource;
import org.flywaydb.core.migration.MigrationTestCase;
import org.junit.Ignore;
import org.junit.Test;
import java.sql.CallableStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
/**
* Test to demonstrate the migration functionality using SQL Server.
*/
@SuppressWarnings({"JavaDoc"})
public abstract class SQLServerMigrationTestCase extends MigrationTestCase {
@Override
protected String getQuoteLocation() {
return "migration/quote";
}
@Test
public void failedMigration() throws Exception {
String tableName = "before_the_error";
flyway.setLocations("migration/failed");
Map<String, String> placeholders = new HashMap<String, String>();
placeholders.put("tableName", dbSupport.quote(tableName));
flyway.setPlaceholders(placeholders);
try {
flyway.migrate();
fail();
} catch (FlywaySqlScriptException e) {
// root cause of exception must be defined, and it should be FlywaySqlScriptException
assertNotNull(e.getCause());
assertTrue(e.getCause() instanceof SQLException);
// and make sure the failed statement was properly recorded
// Normal DB should fail at line 21. SqlServer fails at line 17 as statements are executed in batches.
assertEquals(17, e.getLineNumber());
assertTrue(e.getStatement().contains("THIS IS NOT VALID SQL"));
}
}
/**
* Tests clean and migrate for SQL Server Stored Procedures.
*/
@Test
public void storedProcedure() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/procedure");
flyway.migrate();
assertEquals("Hello", jdbcTemplate.queryForString("SELECT value FROM test_data"));
flyway.clean();
// Running migrate again on an unclean database, triggers duplicate object exceptions.
flyway.migrate();
}
/**
* Tests clean and migrate for SQL Server Functions.
*/
@Test
public void function() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/function");
flyway.migrate();
// Test inlined function.
jdbcTemplate.execute("INSERT INTO test_data (value) VALUES ('Hello')");
List<String> reverse = jdbcTemplate.queryForStringList("SELECT * from reverseInlineFunc();");
assertEquals(1, reverse.size());
assertEquals("olleH", reverse.get(0));
// Test table valued-function.
final int count = 10;
List<String> integers = jdbcTemplate.queryForStringList("SELECT * from dbo.positiveIntegers(?)", String.valueOf(count));
assertEquals(count, integers.size());
for (int i = 1; i <= 10; i++) {
assertEquals(i, Integer.parseInt(integers.get(i - 1)));
}
flyway.clean();
// Running migrate again on an unclean database, triggers duplicate object exceptions.
flyway.migrate();
}
/**
* Tests clean and migrate for SQL Server Triggers.
*/
@Test
public void trigger() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/trigger");
flyway.migrate();
assertEquals(3, jdbcTemplate.queryForInt("SELECT priority FROM customers where name='MS Internet Explorer Team'"));
flyway.clean();
// Running migrate again on an unclean database, triggers duplicate object exceptions.
flyway.migrate();
}
/**
* Tests clean and migrate for SQL Server Views.
*/
@Test
public void view() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/view");
flyway.migrate();
assertEquals(150, jdbcTemplate.queryForInt("SELECT value FROM v"));
flyway.clean();
// Running migrate again on an unclean database, triggers duplicate object exceptions.
flyway.migrate();
}
/**
* Tests clean and migrate for SQL Server Types.
*/
@Test
public void type() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/type");
flyway.migrate();
flyway.clean();
// Running migrate again on an unclean database, triggers duplicate object exceptions.
flyway.migrate();
}
/**
* Tests clean and migrate for SQL Server assemblies.
*/
@Test
public void assembly() throws Exception {
CallableStatement stmt = jdbcTemplate.getConnection().prepareCall("EXEC sp_configure 'clr enabled', 1; RECONFIGURE;");
stmt.execute();
try {
flyway.setLocations("migration/dbsupport/sqlserver/sql/assembly");
flyway.migrate();
// CLR procedure.
stmt = jdbcTemplate.getConnection().prepareCall("EXEC helloFromProc ?, ?");
stmt.setString(1, "Alice");
stmt.registerOutParameter(2, Types.VARCHAR);
stmt.execute();
assertEquals("Hello Alice", stmt.getString(2));
// CLR function.
assertEquals("Hello Bob", jdbcTemplate.queryForString("SELECT dbo.helloFromFunc('Bob');"));
List<String> greetings = jdbcTemplate.queryForStringList("SELECT * FROM dbo.helloFromTableValuedFunction(3, 'Charlie')");
assertEquals(3, greetings.size());
for (String greeting : greetings) {
assertEquals("Hello Charlie", greeting);
}
String[] names = new String[]{"Dave", "Erin", "Faythe"};
for (String name : names) {
jdbcTemplate.execute("INSERT INTO names (name) VALUES (?)", name);
}
// CLR trigger.
greetings = jdbcTemplate.queryForStringList("SELECT * FROM triggered_greetings");
assertEquals(names.length, greetings.size());
for (String name : names) {
assertTrue(greetings.remove("Hello " + name));
}
// User aggregate.
greetings = jdbcTemplate.queryForStringList("SELECT dbo.helloAll(name) FROM names");
assertEquals(1, greetings.size());
assertEquals("Hello Dave, Erin, Faythe", greetings.get(0));
flyway.clean();
// Running migrate again on an unclean database, triggers duplicate object exceptions.
flyway.migrate();
} finally {
try {
jdbcTemplate.getConnection().prepareCall("EXEC sp_configure 'clr enabled', 0; RECONFIGURE;");
} catch (Exception e) {
// Swallow.
}
}
}
/**
* Tests clean and migrate for SQL Server unicode strings.
*/
@Test
public void nvarchar() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/nvarchar");
flyway.migrate();
flyway.clean();
}
/**
* Tests clean and migrate for SQL Server sequences.
*/
@Test
public void sequence() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/sequence");
flyway.migrate();
flyway.clean();
flyway.migrate();
}
/**
* Tests clean and migrate for default constraints with functions.
*/
@Test
public void defaultConstraints() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/default");
flyway.migrate();
flyway.clean();
}
/**
* Tests migrate error for pk constraints.
*/
@Test(expected = FlywayException.class)
public void pkConstraints() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/pkConstraint");
flyway.migrate();
}
/**
* Tests clean and migrate for synonyms.
*/
@Test
public void synonym() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/synonym");
flyway.migrate();
flyway.clean();
flyway.migrate();
}
@Test
public void itShouldCleanCheckConstraint() throws Exception {
// given
flyway.setLocations("migration/dbsupport/sqlserver/sql/checkConstraint");
flyway.migrate();
// when
flyway.clean();
// then
int pendingMigrations = flyway.info().pending().length;
assertEquals(3, pendingMigrations);
}
/**
* Tests a large migration that has been reported to hang on SqlServer 2005.
*/
@Ignore("Axel: Fails due to nested transaction being opened in script, causing outer transaction not to receive COMMIT statement")
@Test
public void large() throws Exception {
flyway.setLocations("migration/dbsupport/sqlserver/sql/large",
"org.flywaydb.core.internal.dbsupport.sqlserver.large");
flyway.setTarget(MigrationVersion.fromVersion("3.1.0"));
flyway.migrate();
assertEquals("3.1.0", flyway.info().current().getVersion().toString());
assertEquals(MigrationState.SUCCESS, flyway.info().current().getState());
assertTrue(jdbcTemplate.queryForInt("SELECT COUNT(*) FROM dbo.CHANGELOG") > 0);
}
/**
* Tests that dml errors that occur in the middle of a batch are correctly detected
* see issue 718
*/
@Test
public void dmlErrorsCorrectlyDetected() throws Exception {
String tableName = "sample_table";
flyway.setLocations("migration/dbsupport/sqlserver/sql/dmlErrorDetection");
Map<String, String> placeholders = new HashMap<String, String>();
placeholders.put("tableName", dbSupport.quote(tableName));
flyway.setPlaceholders(placeholders);
try {
flyway.migrate();
fail("This migration should have failed and this point shouldn't have been reached");
} catch (FlywaySqlScriptException e) {
// root cause of exception must be defined, and it should be FlywaySqlScriptException
assertNotNull(e.getCause());
assertTrue(e.getCause() instanceof SQLException);
// and make sure the failed statement was properly recorded
assertEquals(23, e.getLineNumber());
assertTrue(e.getStatement().contains("INSERT INTO"));
assertTrue(e.getStatement().contains("VALUES(1)"));
}
}
@Test
public void msDBToolsIgnoredForEmpty() throws Exception {
Schema schema = dbSupport.getOriginalSchema();
new SqlScript(new ClassPathResource("migration/dbsupport/sqlserver/createMSDBTools.sql",
Thread.currentThread().getContextClassLoader()).loadAsString("UTF-8"), dbSupport).
execute(jdbcTemplate);
try {
assertTrue("MS DB tools must be ignored in empty check.", schema.empty());
} finally {
try {
new SqlScript(new ClassPathResource("migration/dbsupport/sqlserver/dropMSDBTools.sql",
Thread.currentThread().getContextClassLoader()).loadAsString("UTF-8"), dbSupport).
execute(jdbcTemplate);
} catch (Exception e) {
// Swallow to prevent override of test raised exception.
}
}
}
@Test
public void msDBToolsNotCleared() throws Exception {
Schema schema = dbSupport.getOriginalSchema();
new SqlScript(new ClassPathResource("migration/dbsupport/sqlserver/createMSDBTools.sql",
Thread.currentThread().getContextClassLoader()).loadAsString("UTF-8"), dbSupport).
execute(jdbcTemplate);
try {
final String queryObjectCount = "SELECT COUNT(*) from sys.all_objects";
int initialObjectsCount = jdbcTemplate.queryForInt(queryObjectCount);
schema.clean();
int finalObjectCount = jdbcTemplate.queryForInt(queryObjectCount);
assertEquals("Cleaning the schema must not delete MS DB Tools objects.", initialObjectsCount, finalObjectCount);
} finally {
try {
new SqlScript(new ClassPathResource("migration/dbsupport/sqlserver/dropMSDBTools.sql",
Thread.currentThread().getContextClassLoader()).loadAsString("UTF-8"), dbSupport).
execute(jdbcTemplate);
} catch (Exception e) {
// Swallow to prevent override of test raised exception.
}
}
}
@Override
@Ignore("Not supported on SQL Server")
public void setCurrentSchema() throws Exception {
//Skip
}
@Override
protected void createFlyway3MetadataTable() throws Exception {
jdbcTemplate.execute("CREATE TABLE [schema_version] (\n" +
" [version_rank] INT NOT NULL,\n" +
" [installed_rank] INT NOT NULL,\n" +
" [version] NVARCHAR(50) NOT NULL,\n" +
" [description] NVARCHAR(200),\n" +
" [type] NVARCHAR(20) NOT NULL,\n" +
" [script] NVARCHAR(1000) NOT NULL,\n" +
" [checksum] INT,\n" +
" [installed_by] NVARCHAR(100) NOT NULL,\n" +
" [installed_on] DATETIME NOT NULL DEFAULT GETDATE(),\n" +
" [execution_time] INT NOT NULL,\n" +
" [success] BIT NOT NULL\n" +
")");
jdbcTemplate.execute("ALTER TABLE [schema_version] ADD CONSTRAINT [schema_version_pk] PRIMARY KEY ([version])");
jdbcTemplate.execute("CREATE INDEX [schema_version_vr_idx] ON [schema_version] ([version_rank])");
jdbcTemplate.execute("CREATE INDEX [schema_version_ir_idx] ON [schema_version] ([installed_rank])");
jdbcTemplate.execute("CREATE INDEX [schema_version_s_idx] ON [schema_version] ([success])");
}
} | apache-2.0 |
widoriezebos/Thoth | thoth-modules/thoth-core/src/test/java/net/riezebos/thoth/content/search/util/WriteResult.java | 1028 | /* Copyright (c) 2020 W.T.J. Riezebos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.riezebos.thoth.content.search.util;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
public class WriteResult {
private Term term;
private Document document;
public WriteResult(Term term, Document document) {
this.term = term;
this.document = document;
}
public Term getTerm() {
return term;
}
public Document getDocument() {
return document;
}
}
| apache-2.0 |
masonmei/java-agent | plugins/httpclient3/src/main/java/com/baidu/oped/apm/plugin/httpclient3/HttpClient3CallContext.java | 2573 | /*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.baidu.oped.apm.plugin.httpclient3;
/**
*
* @author jaehong.kim
*
*/
public class HttpClient3CallContext {
private long readBeginTime;
private long readEndTime;
private boolean readFail;
private long writeBeginTime;
private long writeEndTime;
private boolean writeFail;
public void setReadBeginTime(long readBeginTime) {
this.readBeginTime = readBeginTime;
}
public void setReadEndTime(long readEndTime) {
this.readEndTime = readEndTime;
}
public boolean isReadFail() {
return readFail;
}
public void setReadFail(boolean readFail) {
this.readFail = readFail;
}
public void setWriteBeginTime(long writeBeginTime) {
this.writeBeginTime = writeBeginTime;
}
public void setWriteEndTime(long writeEndTime) {
this.writeEndTime = writeEndTime;
}
public boolean isWriteFail() {
return writeFail;
}
public void setWriteFail(boolean writeFail) {
this.writeFail = writeFail;
}
public long getWriteElapsedTime() {
long result = writeEndTime - writeBeginTime;
return result > 0 ? result : 0;
}
public long getReadElapsedTime() {
long result = readEndTime - readBeginTime;
return result > 0 ? result : 0;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("{readBeginTime=");
builder.append(readBeginTime);
builder.append(", readEndTime=");
builder.append(readEndTime);
builder.append(", readFail=");
builder.append(readFail);
builder.append(", writeBeginTime=");
builder.append(writeBeginTime);
builder.append(", writeEndTime=");
builder.append(writeEndTime);
builder.append(", writeFail=");
builder.append(writeFail);
builder.append("}");
return builder.toString();
}
} | apache-2.0 |
sergej-samsonow/code-generator | producer/pojo/src/main/java/com/github/sergejsamsonow/codegenerator/producer/pojo/model/PojoProperty.java | 467 | package com.github.sergejsamsonow.codegenerator.producer.pojo.model;
import java.util.Set;
public interface PojoProperty {
public String getFieldName();
public boolean isList();
public boolean isSimpleTypeContainer();
public String getContainedType();
public String getInitCode();
public Set<String> getImportedTypes();
public String getDeclarationType();
public String getGetterName();
public String getSetterName();
} | apache-2.0 |
kawakicchi/developer-tools | src/main/java/com/github/kawakicchi/developer/dbviewer/component/DBObjectTypePanel.java | 4617 | package com.github.kawakicchi.developer.dbviewer.component;
import java.awt.Insets;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import com.github.kawakicchi.developer.dbviewer.model.DatabaseModel;
import com.github.kawakicchi.developer.dbviewer.model.UserEntity;
public class DBObjectTypePanel extends JPanel {
/** serialVersionUID */
private static final long serialVersionUID = -1545074734344651645L;
private static final int COMPONENT_LABEL_WIDTH = 40;
private static final int COMPONENT_HEIGHT = 24;
private static final int COMPONENT_MARGIN = 6;
private static final int COMPOENNT_INTERVAL = 2;
private List<DBObjectTypeListener> listeners;
private JLabel lblUser;
private JComboBox<String> cmbUser;
private JLabel lblType;
private JComboBox<String> cmbType;
private JLabel lblFilt;
private JTextField txtFilt;
public DBObjectTypePanel() {
listeners = new ArrayList<DBObjectTypeListener>();
setLayout(null);
int x = COMPONENT_MARGIN;
int y = COMPONENT_MARGIN;
lblUser = new JLabel("User");
lblUser.setLocation(x, y);
lblUser.setSize(COMPONENT_LABEL_WIDTH, COMPONENT_HEIGHT);
add(lblUser);
cmbUser = new JComboBox<String>();
cmbUser.setLocation(x + COMPONENT_LABEL_WIDTH, y);
add(cmbUser);
y += COMPONENT_HEIGHT + COMPOENNT_INTERVAL;
lblType = new JLabel("Type");
lblType.setLocation(x, y);
lblType.setSize(COMPONENT_LABEL_WIDTH, COMPONENT_HEIGHT);
add(lblType);
cmbType = new JComboBox<String>();
cmbType.setLocation(x + COMPONENT_LABEL_WIDTH, y);
add(cmbType);
y += COMPONENT_HEIGHT + COMPOENNT_INTERVAL;
lblFilt = new JLabel("Filter");
lblFilt.setLocation(x, y);
lblFilt.setSize(COMPONENT_LABEL_WIDTH, COMPONENT_HEIGHT);
add(lblFilt);
txtFilt = new JTextField();
txtFilt.setLocation(x + COMPONENT_LABEL_WIDTH, y);
add(txtFilt);
cmbUser.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(final ItemEvent e) {
if (e.getStateChange() == ItemEvent.SELECTED){
callDBObjectTypeChenged();
}
}
});
cmbType.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(final ItemEvent e) {
if (e.getStateChange() == ItemEvent.SELECTED){
callDBObjectTypeChenged();
}
}
});
addComponentListener(new ComponentAdapter() {
@Override
public void componentShown(ComponentEvent e) {
resize();
}
@Override
public void componentResized(ComponentEvent e) {
resize();
}
private void resize() {
Insets insets = getInsets();
int width = getWidth() - (insets.left + insets.right);
cmbUser.setSize(width - (COMPONENT_LABEL_WIDTH + COMPONENT_MARGIN * 2), COMPONENT_HEIGHT);
cmbType.setSize(width - (COMPONENT_LABEL_WIDTH + COMPONENT_MARGIN * 2), COMPONENT_HEIGHT);
txtFilt.setSize(width - (COMPONENT_LABEL_WIDTH + COMPONENT_MARGIN * 2), COMPONENT_HEIGHT);
}
});
}
public final void addDBObjectTypeListener(final DBObjectTypeListener listener) {
synchronized (listeners) {
listeners.add(listener);
}
}
public void setDatabaseModel(final DatabaseModel model) {
try {
cmbUser.removeAllItems();
List<UserEntity> users = model.getUserList();
for (UserEntity user : users) {
cmbUser.addItem(user.getName());
}
cmbType.removeAllItems();
List<String> types = model.getObjectTypeList();
for (String type : types) {
cmbType.addItem(type);
}
} catch (SQLException ex) {
ex.printStackTrace();
}
}
public void setUser(final String user) {
cmbUser.setSelectedItem(user);
/*
for (int i = 0 ; i < cmbUser.getItemCount() ; i++) {
if (cmbUser.getItemAt(i).equals(user)) {
cmbUser.setSelectedIndex(i);
return;
}
}
*/
}
public String getUser() {
return cmbUser.getSelectedItem().toString();
}
public String getType() {
return cmbType.getSelectedItem().toString();
}
private void callDBObjectTypeChenged() {
synchronized (listeners) {
for (DBObjectTypeListener listener : listeners) {
listener.dbObjectTypeChanged(this);
}
}
}
public static interface DBObjectTypeListener {
public void dbObjectTypeChanged(final DBObjectTypePanel panel);
}
}
| apache-2.0 |
SAP/sap_mobile_platform_extend_functionality | customization/sample.persistence.jpa/model/src/main/java/com/sap/mobile/platform/server/sample/persistence/jpa/log/CustomSessionLog.java | 1154 | /**
* (c) 2013 SAP AG or an SAP affiliate company. All rights reserved.
*
* No part of this publication may be reproduced or transmitted in any form or for any purpose
* without the express permission of SAP AG. The information contained herein may be changed
* without prior notice.
*/
package com.sap.mobile.platform.server.sample.persistence.jpa.log;
import org.eclipse.persistence.logging.AbstractSessionLog;
import org.eclipse.persistence.logging.SessionLog;
import org.eclipse.persistence.logging.SessionLogEntry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author all
*/
public final class CustomSessionLog extends AbstractSessionLog implements
SessionLog {
/**
* SLF4J adapter.
*/
public static final Logger LOGGER = LoggerFactory
.getLogger(CustomSessionLog.class);
@Override
public void log(final SessionLogEntry sle) {
switch (sle.getLevel()) {
case SEVERE:
LOGGER.error(sle.getMessage());
break;
case WARNING:
LOGGER.warn(sle.getMessage());
break;
case INFO:
LOGGER.info(sle.getMessage());
break;
default:
LOGGER.debug(sle.getMessage());
break;
}
}
}
| apache-2.0 |
adamjshook/accumulo | test/src/test/java/org/apache/accumulo/test/functional/TableChangeStateIT.java | 15812 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.test.functional;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.impl.Tables;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.master.state.tables.TableState;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.zookeeper.ZooUtil;
import org.apache.accumulo.fate.AdminUtil;
import org.apache.accumulo.fate.ZooStore;
import org.apache.accumulo.fate.zookeeper.IZooReaderWriter;
import org.apache.accumulo.harness.AccumuloClusterIT;
import org.apache.accumulo.server.zookeeper.ZooReaderWriterFactory;
import org.apache.hadoop.io.Text;
import org.apache.zookeeper.KeeperException;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* ACCUMULO-4574. Test to verify that changing table state to online / offline {@link org.apache.accumulo.core.client.admin.TableOperations#online(String)} when
* the table is already in that state returns without blocking.
*/
public class TableChangeStateIT extends AccumuloClusterIT {
private static final Logger log = LoggerFactory.getLogger(TableChangeStateIT.class);
private static final int NUM_ROWS = 1000;
private static final long SLOW_SCAN_SLEEP_MS = 100L;
private Connector connector;
@Before
public void setup() {
connector = getConnector();
}
@Override
protected int defaultTimeoutSeconds() {
return 4 * 60;
}
/**
* Validate that {@code TableOperations} online operation does not block when table is already online and fate transaction lock is held by other operations.
* The test creates, populates a table and then runs a compaction with a slow iterator so that operation takes long enough to simulate the condition. After
* the online operation while compaction is running completes, the test is complete and the compaction is canceled so that other tests can run.
*
* @throws Exception
* any exception is a test failure.
*/
@Test
public void changeTableStateTest() throws Exception {
ExecutorService pool = Executors.newCachedThreadPool();
String tableName = getUniqueNames(1)[0];
createData(tableName);
assertEquals("verify table online after created", TableState.ONLINE, getTableState(tableName));
OnLineCallable onlineOp = new OnLineCallable(tableName);
Future<OnlineOpTiming> task = pool.submit(onlineOp);
OnlineOpTiming timing1 = task.get();
log.trace("Online 1 in {} ms", TimeUnit.MILLISECONDS.convert(timing1.runningTime(), TimeUnit.NANOSECONDS));
assertEquals("verify table is still online", TableState.ONLINE, getTableState(tableName));
// verify that offline then online functions as expected.
connector.tableOperations().offline(tableName, true);
assertEquals("verify table is offline", TableState.OFFLINE, getTableState(tableName));
onlineOp = new OnLineCallable(tableName);
task = pool.submit(onlineOp);
OnlineOpTiming timing2 = task.get();
log.trace("Online 2 in {} ms", TimeUnit.MILLISECONDS.convert(timing2.runningTime(), TimeUnit.NANOSECONDS));
assertEquals("verify table is back online", TableState.ONLINE, getTableState(tableName));
// launch a full table compaction with the slow iterator to ensure table lock is acquired and held by the compaction
Future<?> compactTask = pool.submit(new SlowCompactionRunner(tableName));
assertTrue("verify that compaction running and fate transaction exists", blockUntilCompactionRunning(tableName));
// try to set online while fate transaction is in progress - before ACCUMULO-4574 this would block
onlineOp = new OnLineCallable(tableName);
task = pool.submit(onlineOp);
OnlineOpTiming timing3 = task.get();
assertTrue("online should take less time than expected compaction time",
timing3.runningTime() < TimeUnit.NANOSECONDS.convert(NUM_ROWS * SLOW_SCAN_SLEEP_MS, TimeUnit.MILLISECONDS));
assertEquals("verify table is still online", TableState.ONLINE, getTableState(tableName));
assertTrue("verify compaction still running and fate transaction still exists", blockUntilCompactionRunning(tableName));
// test complete, cancel compaction and move on.
connector.tableOperations().cancelCompaction(tableName);
log.debug("Success: Timing results for online commands.");
log.debug("Time for unblocked online {} ms", TimeUnit.MILLISECONDS.convert(timing1.runningTime(), TimeUnit.NANOSECONDS));
log.debug("Time for online when offline {} ms", TimeUnit.MILLISECONDS.convert(timing2.runningTime(), TimeUnit.NANOSECONDS));
log.debug("Time for blocked online {} ms", TimeUnit.MILLISECONDS.convert(timing3.runningTime(), TimeUnit.NANOSECONDS));
// block if compaction still running
compactTask.get();
}
/**
* Blocks current thread until compaction is running.
*
* @return true if compaction and associate fate found.
*/
private boolean blockUntilCompactionRunning(final String tableName) {
int runningCompactions = 0;
List<String> tservers = connector.instanceOperations().getTabletServers();
/*
* wait for compaction to start - The compaction will acquire a fate transaction lock that used to block a subsequent online command while the fate
* transaction lock was held.
*/
while (runningCompactions == 0) {
try {
for (String tserver : tservers) {
runningCompactions += connector.instanceOperations().getActiveCompactions(tserver).size();
log.trace("tserver {}, running compactions {}", tservers, runningCompactions);
}
} catch (AccumuloSecurityException | AccumuloException ex) {
throw new IllegalStateException("failed to get active compactions, test fails.", ex);
}
try {
Thread.sleep(250);
} catch (InterruptedException ex) {
// reassert interrupt
Thread.currentThread().interrupt();
}
}
// Validate that there is a compaction fate transaction - otherwise test is invalid.
return findFate(tableName);
}
/**
* Checks fates in zookeeper looking for transaction associated with a compaction as a double check that the test will be valid because the running compaction
* does have a fate transaction lock.
*
* @return true if corresponding fate transaction found, false otherwise
*/
private boolean findFate(final String tableName) {
Instance instance = connector.getInstance();
AdminUtil<String> admin = new AdminUtil<>(false);
try {
String tableId = Tables.getTableId(instance, tableName);
log.trace("tid: {}", tableId);
String secret = cluster.getSiteConfiguration().get(Property.INSTANCE_SECRET);
IZooReaderWriter zk = new ZooReaderWriterFactory().getZooReaderWriter(instance.getZooKeepers(), instance.getZooKeepersSessionTimeOut(), secret);
ZooStore<String> zs = new ZooStore<>(ZooUtil.getRoot(instance) + Constants.ZFATE, zk);
AdminUtil.FateStatus fateStatus = admin.getStatus(zs, zk, ZooUtil.getRoot(instance) + Constants.ZTABLE_LOCKS + "/" + tableId, null, null);
for (AdminUtil.TransactionStatus tx : fateStatus.getTransactions()) {
if (tx.getTop().contains("CompactionDriver") && tx.getDebug().contains("CompactRange")) {
return true;
}
}
} catch (KeeperException | TableNotFoundException | InterruptedException ex) {
throw new IllegalStateException(ex);
}
// did not find appropriate fate transaction for compaction.
return Boolean.FALSE;
}
/**
* Returns the current table state (ONLINE, OFFLINE,...) of named table.
*
* @param tableName
* the table name
* @return the current table state
* @throws TableNotFoundException
* if table does not exist
*/
private TableState getTableState(String tableName) throws TableNotFoundException {
String tableId = Tables.getTableId(connector.getInstance(), tableName);
TableState tstate = Tables.getTableState(connector.getInstance(), tableId);
log.trace("tableName: '{}': tableId {}, current state: {}", tableName, tableId, tstate);
return tstate;
}
/**
* Create the provided table and populate with some data using a batch writer. The table is scanned to ensure it was populated as expected.
*
* @param tableName
* the name of the table
*/
private void createData(final String tableName) {
try {
// create table.
connector.tableOperations().create(tableName);
BatchWriter bw = connector.createBatchWriter(tableName, new BatchWriterConfig());
// populate
for (int i = 0; i < NUM_ROWS; i++) {
Mutation m = new Mutation(new Text(String.format("%05d", i)));
m.put(new Text("col" + Integer.toString((i % 3) + 1)), new Text("qual"), new Value("junk".getBytes(UTF_8)));
bw.addMutation(m);
}
bw.close();
long startTimestamp = System.nanoTime();
Scanner scanner = connector.createScanner(tableName, Authorizations.EMPTY);
int count = 0;
for (Map.Entry<Key,Value> elt : scanner) {
String expected = String.format("%05d", count);
assert (elt.getKey().getRow().toString().equals(expected));
count++;
}
log.trace("Scan time for {} rows {} ms", NUM_ROWS, TimeUnit.MILLISECONDS.convert((System.nanoTime() - startTimestamp), TimeUnit.NANOSECONDS));
scanner.close();
if (count != NUM_ROWS) {
throw new IllegalStateException(String.format("Number of rows %1$d does not match expected %2$d", count, NUM_ROWS));
}
} catch (AccumuloException | AccumuloSecurityException | TableNotFoundException | TableExistsException ex) {
throw new IllegalStateException("Create data failed with exception", ex);
}
}
/**
* Provides timing information for oline operation.
*/
private static class OnlineOpTiming {
private long started = 0L;
private long completed = 0L;
OnlineOpTiming() {
started = System.nanoTime();
}
/**
* stop timing and set completion flag.
*/
void setComplete() {
completed = System.nanoTime();
}
/**
* @return running time in nanoseconds.
*/
long runningTime() {
return completed - started;
}
}
/**
* Run online operation in a separate thread and gather timing information.
*/
private class OnLineCallable implements Callable<OnlineOpTiming> {
final String tableName;
/**
* Create an instance of this class to set the provided table online.
*
* @param tableName
* The table name that will be set online.
*/
OnLineCallable(final String tableName) {
this.tableName = tableName;
}
@Override
public OnlineOpTiming call() throws Exception {
OnlineOpTiming status = new OnlineOpTiming();
log.trace("Setting {} online", tableName);
connector.tableOperations().online(tableName, true);
// stop timing
status.setComplete();
log.trace("Online completed in {} ms", TimeUnit.MILLISECONDS.convert(status.runningTime(), TimeUnit.NANOSECONDS));
return status;
}
}
/**
* Instance to create / run a compaction using a slow iterator.
*/
private class SlowCompactionRunner implements Runnable {
private final String tableName;
/**
* Create an instance of this class.
*
* @param tableName
* the name of the table that will be compacted with the slow iterator.
*/
SlowCompactionRunner(final String tableName) {
this.tableName = tableName;
}
@Override
public void run() {
long startTimestamp = System.nanoTime();
IteratorSetting slow = new IteratorSetting(30, "slow", SlowIterator.class);
SlowIterator.setSleepTime(slow, SLOW_SCAN_SLEEP_MS);
List<IteratorSetting> compactIterators = new ArrayList<>();
compactIterators.add(slow);
log.trace("Slow iterator {}", slow.toString());
try {
log.trace("Start compaction");
connector.tableOperations().compact(tableName, new Text("0"), new Text("z"), compactIterators, true, true);
log.trace("Compaction wait is complete");
log.trace("Slow compaction of {} rows took {} ms", NUM_ROWS, TimeUnit.MILLISECONDS.convert((System.nanoTime() - startTimestamp), TimeUnit.NANOSECONDS));
// validate that number of rows matches expected.
startTimestamp = System.nanoTime();
// validate expected data created and exists in table.
Scanner scanner = connector.createScanner(tableName, Authorizations.EMPTY);
int count = 0;
for (Map.Entry<Key,Value> elt : scanner) {
String expected = String.format("%05d", count);
assert (elt.getKey().getRow().toString().equals(expected));
count++;
}
log.trace("After compaction, scan time for {} rows {} ms", NUM_ROWS,
TimeUnit.MILLISECONDS.convert((System.nanoTime() - startTimestamp), TimeUnit.NANOSECONDS));
if (count != NUM_ROWS) {
throw new IllegalStateException(String.format("After compaction, number of rows %1$d does not match expected %2$d", count, NUM_ROWS));
}
} catch (TableNotFoundException ex) {
throw new IllegalStateException("test failed, table " + tableName + " does not exist", ex);
} catch (AccumuloSecurityException ex) {
throw new IllegalStateException("test failed, could not add iterator due to security exception", ex);
} catch (AccumuloException ex) {
// test cancels compaction on complete, so ignore it as an exception.
if (!ex.getMessage().contains("Compaction canceled")) {
throw new IllegalStateException("test failed with an Accumulo exception", ex);
}
}
}
}
}
| apache-2.0 |
treasure-data/digdag | digdag-standards/src/main/java/io/digdag/standards/operator/param/RedisServerClientConnection.java | 521 | package io.digdag.standards.operator.param;
import io.digdag.spi.ParamServerClientConnection;
import redis.clients.jedis.Jedis;
public class RedisServerClientConnection implements ParamServerClientConnection<Jedis>
{
private final Jedis connection;
public RedisServerClientConnection(Jedis connection){
this.connection = connection;
}
@Override
public Jedis get()
{
return this.connection;
}
@Override
public String getType()
{
return "redis";
}
}
| apache-2.0 |
parsingdata/metal | core/src/main/java/io/parsingdata/metal/expression/value/SingleValueExpression.java | 1736 | /*
* Copyright 2013-2021 Netherlands Forensic Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.parsingdata.metal.expression.value;
import java.util.Optional;
import io.parsingdata.metal.data.ImmutableList;
import io.parsingdata.metal.data.ParseState;
import io.parsingdata.metal.encoding.Encoding;
/**
* Interface for all SingleValueExpression implementations.
* <p>
* A SingleValueExpression is an expression that is evaluated by executing its
* {@link #evalSingle(ParseState, Encoding)} method. It yields an {@link Optional}
* {@link Value} object.
* <p>
* As context, it receives the current <code>ParseState</code> object as
* well as the current <code>Encoding</code> object.
*/
@SuppressWarnings("FunctionalInterfaceMethodChanged") // What we do is in line with error-prone's advice
@FunctionalInterface
public interface SingleValueExpression extends ValueExpression {
Optional<Value> evalSingle(ParseState parseState, Encoding encoding);
@Override
default ImmutableList<Value> eval(ParseState parseState, Encoding encoding) {
return evalSingle(parseState, encoding)
.map(ImmutableList::create)
.orElseGet(ImmutableList::new);
}
}
| apache-2.0 |
sialcasa/mvvmFX | examples/mini-examples/scopes-example/src/main/java/de/saxsys/mvvmfx/examples/scopesexample/model/DocumentRepository.java | 615 | package de.saxsys.mvvmfx.examples.scopesexample.model;
import javax.inject.Singleton;
import java.util.*;
@Singleton
public class DocumentRepository {
private Map<String, Document> entities = new HashMap<>();
public Optional<Document> findById(String id) {
return Optional.ofNullable(entities.get(id));
}
public Collection<Document> findAll() {
return entities.values();
}
public void persist(Document document) {
entities.put(document.getId(), document);
}
public void remove(Document document) {
remove(document.getId());
}
public void remove(String id) {
entities.remove(id);
}
}
| apache-2.0 |
dongaihua/highlight-elasticsearch | src/main/java/org/elasticsearch/common/lucene/Lucene.java | 15172 | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.*;
import org.apache.lucene.index.SegmentInfos.FindSegmentsFile;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Field;
/**
*
*/
public class Lucene {
public static final Version VERSION = Version.LUCENE_42;
public static final Version ANALYZER_VERSION = VERSION;
public static final Version QUERYPARSER_VERSION = VERSION;
public static final NamedAnalyzer STANDARD_ANALYZER = new NamedAnalyzer("_standard", AnalyzerScope.GLOBAL, new StandardAnalyzer(ANALYZER_VERSION));
public static final NamedAnalyzer KEYWORD_ANALYZER = new NamedAnalyzer("_keyword", AnalyzerScope.GLOBAL, new KeywordAnalyzer());
public static final int NO_DOC = -1;
public static ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0];
public static Version parseVersion(@Nullable String version, Version defaultVersion, ESLogger logger) {
if (version == null) {
return defaultVersion;
}
if ("4.2".equals(version)) {
return Version.LUCENE_42;
}
if ("4.1".equals(version)) {
return Version.LUCENE_41;
}
if ("4.0".equals(version)) {
return Version.LUCENE_40;
}
if ("3.6".equals(version)) {
return Version.LUCENE_36;
}
if ("3.5".equals(version)) {
return Version.LUCENE_35;
}
if ("3.4".equals(version)) {
return Version.LUCENE_34;
}
if ("3.3".equals(version)) {
return Version.LUCENE_33;
}
if ("3.2".equals(version)) {
return Version.LUCENE_32;
}
if ("3.1".equals(version)) {
return Version.LUCENE_31;
}
if ("3.0".equals(version)) {
return Version.LUCENE_30;
}
logger.warn("no version match {}, default to {}", version, defaultVersion);
return defaultVersion;
}
/**
* Reads the segments infos, failing if it fails to load
*/
public static SegmentInfos readSegmentInfos(Directory directory) throws IOException {
final SegmentInfos sis = new SegmentInfos();
sis.read(directory);
return sis;
}
public static long count(IndexSearcher searcher, Query query) throws IOException {
TotalHitCountCollector countCollector = new TotalHitCountCollector();
// we don't need scores, so wrap it in a constant score query
if (!(query instanceof ConstantScoreQuery)) {
query = new ConstantScoreQuery(query);
}
searcher.search(query, countCollector);
return countCollector.getTotalHits();
}
/**
* Closes the index writer, returning <tt>false</tt> if it failed to close.
*/
public static boolean safeClose(IndexWriter writer) {
if (writer == null) {
return true;
}
try {
writer.close();
return true;
} catch (IOException e) {
return false;
}
}
public static TopDocs readTopDocs(StreamInput in) throws IOException {
if (!in.readBoolean()) {
// no docs
return null;
}
if (in.readBoolean()) {
int totalHits = in.readVInt();
float maxScore = in.readFloat();
SortField[] fields = new SortField[in.readVInt()];
for (int i = 0; i < fields.length; i++) {
String field = null;
if (in.readBoolean()) {
field = in.readString();
}
fields[i] = new SortField(field, readSortType(in), in.readBoolean());
}
FieldDoc[] fieldDocs = new FieldDoc[in.readVInt()];
for (int i = 0; i < fieldDocs.length; i++) {
Comparable[] cFields = new Comparable[in.readVInt()];
for (int j = 0; j < cFields.length; j++) {
byte type = in.readByte();
if (type == 0) {
cFields[j] = null;
} else if (type == 1) {
cFields[j] = in.readString();
} else if (type == 2) {
cFields[j] = in.readInt();
} else if (type == 3) {
cFields[j] = in.readLong();
} else if (type == 4) {
cFields[j] = in.readFloat();
} else if (type == 5) {
cFields[j] = in.readDouble();
} else if (type == 6) {
cFields[j] = in.readByte();
} else if (type == 7) {
cFields[j] = in.readShort();
} else if (type == 8) {
cFields[j] = in.readBoolean();
} else if (type == 9) {
cFields[j] = in.readBytesRef();
} else {
throw new IOException("Can't match type [" + type + "]");
}
}
fieldDocs[i] = new FieldDoc(in.readVInt(), in.readFloat(), cFields);
}
return new TopFieldDocs(totalHits, fieldDocs, fields, maxScore);
} else {
int totalHits = in.readVInt();
float maxScore = in.readFloat();
ScoreDoc[] scoreDocs = new ScoreDoc[in.readVInt()];
for (int i = 0; i < scoreDocs.length; i++) {
scoreDocs[i] = new ScoreDoc(in.readVInt(), in.readFloat());
}
return new TopDocs(totalHits, scoreDocs, maxScore);
}
}
public static void writeTopDocs(StreamOutput out, TopDocs topDocs, int from) throws IOException {
if (topDocs.scoreDocs.length - from < 0) {
out.writeBoolean(false);
return;
}
out.writeBoolean(true);
if (topDocs instanceof TopFieldDocs) {
out.writeBoolean(true);
TopFieldDocs topFieldDocs = (TopFieldDocs) topDocs;
out.writeVInt(topDocs.totalHits);
out.writeFloat(topDocs.getMaxScore());
out.writeVInt(topFieldDocs.fields.length);
for (SortField sortField : topFieldDocs.fields) {
if (sortField.getField() == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(sortField.getField());
}
if (sortField.getComparatorSource() != null) {
writeSortType(out, ((IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource()).reducedType());
} else {
writeSortType(out, sortField.getType());
}
out.writeBoolean(sortField.getReverse());
}
out.writeVInt(topDocs.scoreDocs.length - from);
int index = 0;
for (ScoreDoc doc : topFieldDocs.scoreDocs) {
if (index++ < from) {
continue;
}
FieldDoc fieldDoc = (FieldDoc) doc;
out.writeVInt(fieldDoc.fields.length);
for (Object field : fieldDoc.fields) {
if (field == null) {
out.writeByte((byte) 0);
} else {
Class type = field.getClass();
if (type == String.class) {
out.writeByte((byte) 1);
out.writeString((String) field);
} else if (type == Integer.class) {
out.writeByte((byte) 2);
out.writeInt((Integer) field);
} else if (type == Long.class) {
out.writeByte((byte) 3);
out.writeLong((Long) field);
} else if (type == Float.class) {
out.writeByte((byte) 4);
out.writeFloat((Float) field);
} else if (type == Double.class) {
out.writeByte((byte) 5);
out.writeDouble((Double) field);
} else if (type == Byte.class) {
out.writeByte((byte) 6);
out.writeByte((Byte) field);
} else if (type == Short.class) {
out.writeByte((byte) 7);
out.writeShort((Short) field);
} else if (type == Boolean.class) {
out.writeByte((byte) 8);
out.writeBoolean((Boolean) field);
} else if (type == BytesRef.class) {
out.writeByte((byte) 9);
out.writeBytesRef((BytesRef) field);
} else {
throw new IOException("Can't handle sort field value of type [" + type + "]");
}
}
}
out.writeVInt(doc.doc);
out.writeFloat(doc.score);
}
} else {
out.writeBoolean(false);
out.writeVInt(topDocs.totalHits);
out.writeFloat(topDocs.getMaxScore());
out.writeVInt(topDocs.scoreDocs.length - from);
int index = 0;
for (ScoreDoc doc : topDocs.scoreDocs) {
if (index++ < from) {
continue;
}
out.writeVInt(doc.doc);
out.writeFloat(doc.score);
}
}
}
// LUCENE 4 UPGRADE: We might want to maintain our own ordinal, instead of Lucene's ordinal
public static SortField.Type readSortType(StreamInput in) throws IOException {
return SortField.Type.values()[in.readVInt()];
}
public static void writeSortType(StreamOutput out, SortField.Type sortType) throws IOException {
out.writeVInt(sortType.ordinal());
}
public static Explanation readExplanation(StreamInput in) throws IOException {
float value = in.readFloat();
String description = in.readString();
Explanation explanation = new Explanation(value, description);
if (in.readBoolean()) {
int size = in.readVInt();
for (int i = 0; i < size; i++) {
explanation.addDetail(readExplanation(in));
}
}
return explanation;
}
public static void writeExplanation(StreamOutput out, Explanation explanation) throws IOException {
out.writeFloat(explanation.getValue());
out.writeString(explanation.getDescription());
Explanation[] subExplanations = explanation.getDetails();
if (subExplanations == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeVInt(subExplanations.length);
for (Explanation subExp : subExplanations) {
writeExplanation(out, subExp);
}
}
}
private static final Field segmentReaderSegmentInfoField;
static {
Field segmentReaderSegmentInfoFieldX = null;
try {
segmentReaderSegmentInfoFieldX = SegmentReader.class.getDeclaredField("si");
segmentReaderSegmentInfoFieldX.setAccessible(true);
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
segmentReaderSegmentInfoField = segmentReaderSegmentInfoFieldX;
}
public static SegmentInfoPerCommit getSegmentInfo(SegmentReader reader) {
try {
return (SegmentInfoPerCommit) segmentReaderSegmentInfoField.get(reader);
} catch (IllegalAccessException e) {
return null;
}
}
public static class ExistsCollector extends Collector {
private boolean exists;
public void reset() {
exists = false;
}
public boolean exists() {
return exists;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.exists = false;
}
@Override
public void collect(int doc) throws IOException {
exists = true;
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
}
private Lucene() {
}
// LUCENE UPGRADE this is a workaround for LUCENE-4870
public static final boolean indexExists(final Directory directory) {
try {
new FindSegmentsFile(directory) {
@Override
protected Object doBody(String segmentFileName) throws IOException {
try {
new SegmentInfos().read(directory, segmentFileName);
} catch (FileNotFoundException ex) {
if (!directory.fileExists(segmentFileName)) {
throw ex;
}
// this is ok - we might have run into a access
// exception here or even worse a too many open files exception.
}
return null;
}
}.run();
return true;
} catch (IOException ioe) {
return false;
}
}
}
| apache-2.0 |
knutwalker/google-closure-compiler | src/com/google/javascript/jscomp/InlineProperties.java | 10618 | /*
* Copyright 2012 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.TypeValidator.TypeMismatch;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.jstype.FunctionType;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.JSTypeNative;
import com.google.javascript.rhino.jstype.JSTypeRegistry;
import com.google.javascript.rhino.jstype.ObjectType;
import java.util.Map;
import java.util.Set;
<<<<<<< HEAD
=======
>>>>>>> 5c522db6e745151faa1d8dc310d145e94f78ac77
/**
* InlineProperties attempts to find references to properties that are known to
* be constants and inline the known value.
*
* This pass relies on type information to find these property references and
* properties are assumed to be constant if either:
* - the property is assigned unconditionally in the instance constructor
* - the property is assigned unconditionally to the type's prototype
*
* The current implementation only inlines immutable values (as defined by
* NodeUtil.isImmutableValue).
*
* @author johnlenz@google.com (John Lenz)
*/
public class InlineProperties implements CompilerPass {
private final AbstractCompiler compiler;
static class PropertyInfo {
PropertyInfo(JSType type, Node value) {
this.type = type;
this.value = value;
}
final JSType type;
final Node value;
}
private static final PropertyInfo INVALIDATED = new PropertyInfo(
null, null);
private final Map<String, PropertyInfo> props = Maps.newHashMap();
private Set<JSType> invalidatingTypes;
InlineProperties(AbstractCompiler compiler) {
this.compiler = compiler;
buildInvalidatingTypeSet();
}
// TODO(johnlenz): this is a direct copy of the invalidation code
// from AmbiguateProperties, if in the end we don't need to modify it
// we should move it to a common location.
private void buildInvalidatingTypeSet() {
JSTypeRegistry registry = compiler.getTypeRegistry();
invalidatingTypes = Sets.newHashSet(
registry.getNativeType(JSTypeNative.ALL_TYPE),
registry.getNativeType(JSTypeNative.NO_OBJECT_TYPE),
registry.getNativeType(JSTypeNative.NO_TYPE),
registry.getNativeType(JSTypeNative.NULL_TYPE),
registry.getNativeType(JSTypeNative.VOID_TYPE),
registry.getNativeType(JSTypeNative.FUNCTION_FUNCTION_TYPE),
registry.getNativeType(JSTypeNative.FUNCTION_INSTANCE_TYPE),
registry.getNativeType(JSTypeNative.FUNCTION_PROTOTYPE),
registry.getNativeType(JSTypeNative.GLOBAL_THIS),
registry.getNativeType(JSTypeNative.OBJECT_TYPE),
registry.getNativeType(JSTypeNative.OBJECT_PROTOTYPE),
registry.getNativeType(JSTypeNative.OBJECT_FUNCTION_TYPE),
registry.getNativeType(JSTypeNative.TOP_LEVEL_PROTOTYPE),
registry.getNativeType(JSTypeNative.UNKNOWN_TYPE));
for (TypeMismatch mis : compiler.getTypeValidator().getMismatches()) {
addInvalidatingType(mis.typeA);
addInvalidatingType(mis.typeB);
}
}
/**
* Invalidates the given type, so that no properties on it will be renamed.
*/
private void addInvalidatingType(JSType type) {
type = type.restrictByNotNullOrUndefined();
if (type.isUnionType()) {
for (JSType alt : type.toMaybeUnionType().getAlternates()) {
addInvalidatingType(alt);
}
}
invalidatingTypes.add(type);
ObjectType objType = ObjectType.cast(type);
if (objType != null && objType.isInstanceType()) {
invalidatingTypes.add(objType.getImplicitPrototype());
}
}
/** Returns true if properties on this type should not be renamed. */
private boolean isInvalidatingType(JSType type) {
if (type.isUnionType()) {
type = type.restrictByNotNullOrUndefined();
if (type.isUnionType()) {
for (JSType alt : type.toMaybeUnionType().getAlternates()) {
if (isInvalidatingType(alt)) {
return true;
}
}
return false;
}
}
ObjectType objType = ObjectType.cast(type);
return objType == null
|| invalidatingTypes.contains(objType)
|| !objType.hasReferenceName()
|| objType.isUnknownType()
|| objType.isEmptyType() /* unresolved types */
|| objType.isEnumType()
|| objType.autoboxesTo() != null;
}
/**
* This method gets the JSType from the Node argument and verifies that it is
* present.
*/
private JSType getJSType(Node n) {
JSType jsType = n.getJSType();
if (jsType == null) {
return compiler.getTypeRegistry().getNativeType(
JSTypeNative.UNKNOWN_TYPE);
} else {
return jsType;
}
}
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverseRoots(
compiler, new GatherCandidates(), externs, root);
NodeTraversal.traverseRoots(
compiler, new ReplaceCandidates(), externs, root);
}
class GatherCandidates extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
boolean invalidatingPropRef = false;
String propName = null;
if (n.isGetProp()) {
propName = n.getLastChild().getString();
if (t.getInput().isExtern()) {
// Any extern reference invalidates
invalidatingPropRef = true;
} else if (parent.isAssign()) {
invalidatingPropRef = !maybeCandidateDefinition(t, n, parent);
} else if (NodeUtil.isLValue(n)) {
// Other LValue references invalidate
invalidatingPropRef = true;
} else if (parent.isDelProp()) {
// Deletes invalidate
invalidatingPropRef = true;
} else {
// A property read doesn't invalidate
invalidatingPropRef = false;
}
} else if (n.isStringKey()) {
propName = n.getString();
if (t.getInput().isExtern()) {
// Any extern reference invalidates
invalidatingPropRef = true;
} else {
// For now, any object literal key invalidates
// TODO(johnlenz): support prototype properties like:
// foo.prototype = { a: 1, b: 2 };
invalidatingPropRef = true;
}
}
if (invalidatingPropRef) {
Preconditions.checkNotNull(propName);
invalidateProperty(propName);
}
}
/**
* @return Whether this is a valid definition for a candidate property.
*/
private boolean maybeCandidateDefinition(
NodeTraversal t, Node n, Node parent) {
Preconditions.checkState(n.isGetProp() && parent.isAssign());
boolean isCandidate = false;
Node src = n.getFirstChild();
String propName = n.getLastChild().getString();
Node value = parent.getLastChild();
if (src.isThis()) {
// This is a simple assignment like:
// this.foo = 1;
if (inContructor(t)) {
// This maybe a valid assignment.
isCandidate = maybeStoreCandidateValue(
getJSType(src), propName, value);
}
} else if (t.inGlobalScope()
&& src.isGetProp()
&& src.getLastChild().getString().equals("prototype")) {
// This is a prototype assignment like:
// x.prototype.foo = 1;
JSType instanceType = maybeGetInstanceTypeFromPrototypeRef(src);
if (instanceType != null) {
isCandidate = maybeStoreCandidateValue(
instanceType, propName, value);
}
}
return isCandidate;
}
private JSType maybeGetInstanceTypeFromPrototypeRef(Node src) {
JSType ownerType = getJSType(src.getFirstChild());
if (ownerType.isFunctionType() && ownerType.isConstructor()) {
FunctionType functionType = ((FunctionType) ownerType);
return functionType.getInstanceType();
}
return null;
}
private void invalidateProperty(String propName) {
props.put(propName, INVALIDATED);
}
private boolean maybeStoreCandidateValue(
JSType type, String propName, Node value) {
Preconditions.checkNotNull(value);
if (!props.containsKey(propName)
&& !isInvalidatingType(type)
&& NodeUtil.isImmutableValue(value)
&& NodeUtil.isExecutedExactlyOnce(value)) {
props.put(propName, new PropertyInfo(type, value));
return true;
}
return false;
}
private boolean inContructor(NodeTraversal t) {
Node root = t.getScopeRoot();
JSDocInfo info = NodeUtil.getBestJSDocInfo(root);
return info != null && info.isConstructor();
}
}
class ReplaceCandidates extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isGetProp() && !NodeUtil.isLValue(n)) {
Node target = n.getFirstChild();
String propName = n.getLastChild().getString();
PropertyInfo info = props.get(propName);
if (info != null
&& info != INVALIDATED
&& isMatchingType(target, info.type)) {
Node replacement = info.value.cloneTree();
if (NodeUtil.mayHaveSideEffects(n.getFirstChild(), compiler)) {
replacement = IR.comma(n.removeFirstChild(), replacement).srcref(n);
}
parent.replaceChild(n, replacement);
compiler.reportCodeChange();
}
}
}
private boolean isMatchingType(Node n, JSType src) {
src = src.restrictByNotNullOrUndefined();
JSType dest = getJSType(n).restrictByNotNullOrUndefined();
if (!isInvalidatingType(dest)
&& dest.isSubtype(src)) {
return true;
}
return false;
}
}
}
| apache-2.0 |